hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 958k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 2
classes | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f731902b159345554e51c2ec9585cd8075fabbd5 | 846 | py | Python | setup.py | kenlowrie/avscript | 7e3550adc46a01785ea89cdcd9ddaa5ac35bdbdd | [
"Apache-2.0"
] | 1 | 2021-09-30T19:29:30.000Z | 2021-09-30T19:29:30.000Z | setup.py | kenlowrie/avscript | 7e3550adc46a01785ea89cdcd9ddaa5ac35bdbdd | [
"Apache-2.0"
] | 7 | 2018-07-16T22:52:55.000Z | 2020-05-20T23:48:36.000Z | setup.py | kenlowrie/avscript | 7e3550adc46a01785ea89cdcd9ddaa5ac35bdbdd | [
"Apache-2.0"
] | 1 | 2018-05-24T22:58:44.000Z | 2018-05-24T22:58:44.000Z | from setuptools import setup
from sys import version_info
setup(name='avscript',
version='0.3.8',
description='Audio/Visual Script Parser',
url='https://github.com/kenlowrie/avscript',
author='Ken Lowrie',
author_email='ken@kenlowrie.com',
license='Apache',
packages=['avscript', 'avscript.avs'],
install_requires=['kenl380.pylib'],
entry_points = {
'console_scripts': ['avscript=avscript.avscript_md:av_parse_file',
'avscript{}=avscript.avscript_md:av_parse_file'.format(version_info.major),
'mkavscript=avscript.mkavscript_md:mkavscript_md',
'mkavscript{}=avscript.mkavscript_md:mkavscript_md'.format(version_info.major),
],
},
include_package_data=True,
zip_safe=False)
| 38.454545 | 107 | 0.630024 | from setuptools import setup
from sys import version_info
setup(name='avscript',
version='0.3.8',
description='Audio/Visual Script Parser',
url='https://github.com/kenlowrie/avscript',
author='Ken Lowrie',
author_email='ken@kenlowrie.com',
license='Apache',
packages=['avscript', 'avscript.avs'],
install_requires=['kenl380.pylib'],
entry_points = {
'console_scripts': ['avscript=avscript.avscript_md:av_parse_file',
'avscript{}=avscript.avscript_md:av_parse_file'.format(version_info.major),
'mkavscript=avscript.mkavscript_md:mkavscript_md',
'mkavscript{}=avscript.mkavscript_md:mkavscript_md'.format(version_info.major),
],
},
include_package_data=True,
zip_safe=False)
| true | true |
f73190671897a2b17be262536826df2b282c304f | 24,050 | py | Python | test/galaxy_selenium/navigates_galaxy.py | Galaxyinternship/Galaxy | 204be086a8c16d6684584cefa9053ed7c86a1784 | [
"CC-BY-3.0"
] | null | null | null | test/galaxy_selenium/navigates_galaxy.py | Galaxyinternship/Galaxy | 204be086a8c16d6684584cefa9053ed7c86a1784 | [
"CC-BY-3.0"
] | null | null | null | test/galaxy_selenium/navigates_galaxy.py | Galaxyinternship/Galaxy | 204be086a8c16d6684584cefa9053ed7c86a1784 | [
"CC-BY-3.0"
] | null | null | null | """A mixing that extends a HasDriver class with Galaxy-specific utilities.
Implementer must provide a self.build_url method to target Galaxy.
"""
from __future__ import print_function
import contextlib
import random
import string
import time
from functools import partial, wraps
import requests
import yaml
from .data import NAVIGATION_DATA
from .has_driver import exception_indicates_stale_element, HasDriver
from . import sizzle
# Test case data
DEFAULT_PASSWORD = '123456'
class NullTourCallback(object):
def handle_step(self, step, step_index):
pass
def retry_call_during_transitions(f, attempts=5, sleep=.1):
previous_attempts = 0
while True:
try:
return f()
except Exception as e:
if previous_attempts > attempts:
raise
if not exception_indicates_stale_element(e):
raise
time.sleep(sleep)
previous_attempts += 1
def retry_during_transitions(f, attempts=5, sleep=.1):
@wraps(f)
def _retry(*args, **kwds):
retry_call_during_transitions(partial(f, *args, **kwds), attempts=attempts, sleep=sleep)
return _retry
class NavigatesGalaxy(HasDriver):
default_password = DEFAULT_PASSWORD
def get(self, url=""):
full_url = self.build_url(url)
return self.driver.get(full_url)
@property
def navigation_data(self):
return NAVIGATION_DATA
def home(self):
self.get()
self.wait_for_selector_visible("#masthead")
self.wait_for_selector_visible("#current-history-panel")
def switch_to_main_panel(self):
self.driver.switch_to.frame(self.navigation_data["selectors"]["frames"]["main"])
@contextlib.contextmanager
def main_panel(self):
try:
self.switch_to_main_panel()
yield
finally:
self.driver.switch_to.default_content
def api_get(self, endpoint, data={}, raw=False):
full_url = self.build_url("api/" + endpoint, for_selenium=False)
response = requests.get(full_url, data=data, cookies=self.selenium_to_requests_cookies())
if raw:
return response
else:
return response.json()
def get_galaxy_session(self):
for cookie in self.driver.get_cookies():
if cookie["name"] == "galaxysession":
return cookie["value"]
def selenium_to_requests_cookies(self):
return {
'galaxysession': self.get_galaxy_session()
}
def history_panel_name_element(self):
name_selector = self.test_data["historyPanel"]["selectors"]["history"]["name"]
return self.wait_for_selector(name_selector)
def current_history(self):
history = self.api_get("histories")[0]
return history
def current_history_id(self):
return self.current_history()["id"]
def current_history_contents(self):
current_history_id = self.current_history_id()
history_contents = self.api_get("histories/%s/contents" % current_history_id)
return history_contents
def latest_history_item(self):
history_contents = self.current_history_contents()
assert len(history_contents) > 0
return history_contents[-1]
def wait_for_history(self, timeout=30, assert_ok=True):
def history_becomes_terminal(driver):
current_history_id = self.current_history_id()
state = self.api_get("histories/%s" % current_history_id)["state"]
if state not in ["running", "queued", "new", "ready"]:
return state
else:
return None
final_state = self.wait(timeout).until(history_becomes_terminal)
if assert_ok:
assert final_state == "ok", final_state
return final_state
def history_panel_wait_for_hid_ok(self, hid, timeout=60):
self.history_panel_wait_for_hid_state(hid, 'ok', timeout=timeout)
def history_panel_wait_for_hid_visible(self, hid, timeout=60):
current_history_id = self.current_history_id()
def history_has_hid(driver):
contents = self.api_get("histories/%s/contents" % current_history_id)
return any([d for d in contents if d["hid"] == hid])
self.wait(timeout).until(history_has_hid)
contents = self.api_get("histories/%s/contents" % current_history_id)
history_item = [d for d in contents if d["hid"] == hid][0]
history_item_selector = "#%s-%s" % (history_item["history_content_type"], history_item["id"])
self.wait_for_selector_visible(history_item_selector)
return history_item_selector
def history_panel_wait_for_hid_hidden(self, hid, timeout=60):
current_history_id = self.current_history_id()
contents = self.api_get("histories/%s/contents" % current_history_id)
history_item = [d for d in contents if d["hid"] == hid][0]
history_item_selector = "#%s-%s" % (history_item["history_content_type"], history_item["id"])
self.wait_for_selector_absent(history_item_selector)
return history_item_selector
def history_panel_wait_for_hid_state(self, hid, state, timeout=60):
history_item_selector = self.history_panel_wait_for_hid_visible(hid, timeout=timeout)
history_item_selector_state = "%s.state-%s" % (history_item_selector, state)
try:
self.wait_for_selector_visible(history_item_selector_state)
except self.TimeoutException as e:
history_item = self.driver.find_element_by_css_selector(history_item_selector)
current_state = "UNKNOWN"
classes = history_item.get_attribute("class").split(" ")
for clazz in classes:
if clazz.startswith("state-"):
current_state = clazz[len("state-"):]
template = "Failed waiting on history item %d state to change to [%s] current state [%s]. "
message = template % (hid, state, current_state)
raise self.prepend_timeout_message(e, message)
def get_logged_in_user(self):
return self.api_get("users/current")
def is_logged_in(self):
return "email" in self.get_logged_in_user()
def _get_random_name(self, prefix=None, suffix=None, len=10):
return '%s%s%s' % (
prefix or '',
''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(len)),
suffix or '',
)
def _get_random_email(self, username=None, domain=None):
username = username or 'test'
domain = domain or 'test.test'
return self._get_random_name(prefix=username, suffix="@" + domain)
def submit_login(self, email, password=None):
if password is None:
password = self.default_password
login_info = {
'login': email,
'password': password,
}
self.click_masthead_user()
self.click_label(self.navigation_data["labels"]["masthead"]["userMenu"]["login"])
with self.main_panel():
form = self.wait_for_selector(self.navigation_data["selectors"]["loginPage"]["form"])
self.fill(form, login_info)
self.click_submit(form)
def register(self, email=None, password=None, username=None, confirm=None, assert_valid=True):
if email is None:
email = self._get_random_email()
if password is None:
password = self.default_password
if confirm is None:
confirm = password
if username is None:
username = email.split("@")[0]
self.home()
self.click_masthead_user()
self.click_label(self.navigation_data["labels"]["masthead"]["userMenu"]["register"])
with self.main_panel():
register_form_id = self.navigation_data["selectors"]["registrationPage"]["form"]
form = self.wait_for_id(register_form_id)
self.fill(form, dict(
email=email,
password=password,
username=username,
confirm=confirm
))
self.click_xpath(self.navigation_data["selectors"]["registrationPage"]["submit_xpath"])
if assert_valid:
self.home()
self.click_masthead_user()
user_email_element = self.wait_for_xpath_visible(self.navigation_data["selectors"]["masthead"]["userMenu"]["userEmail_xpath"])
text = user_email_element.text
assert email in text
assert self.get_logged_in_user()["email"] == email
# Hide masthead menu click
self.click_center()
def click_center(self):
action_chains = self.action_chains()
center_element = self.driver.find_element_by_css_selector("#center")
action_chains.move_to_element(center_element).click().perform()
def perform_upload(self, test_path, ext=None, genome=None, ext_all=None, genome_all=None):
self.home()
upload_button = self.wait_for_selector_clickable(".upload-button")
upload_button.click()
if ext_all is not None:
self.wait_for_selector_visible('.upload-footer-extension')
self.select2_set_value(".upload-footer-extension", ext_all)
if genome_all is not None:
self.wait_for_selector_visible('.upload-footer-genome')
self.select2_set_value(".upload-footer-genome", genome_all)
local_upload_button = self.wait_for_selector_clickable("button#btn-local")
local_upload_button.click()
file_upload = self.wait_for_selector('input[type="file"]')
file_upload.send_keys(test_path)
if ext is not None:
self.wait_for_selector_visible('.upload-extension')
self.select2_set_value(".upload-extension", ext)
if genome is not None:
self.wait_for_selector_visible('.upload-genome')
self.select2_set_value(".upload-genome", genome)
start_button = self.wait_for_selector_clickable("button#btn-start")
start_button.click()
close_button = self.wait_for_selector_clickable("button#btn-close")
close_button.click()
def workflow_index_open(self):
self.home()
self.click_masthead_workflow()
def workflow_index_table_elements(self):
self.wait_for_selector_visible(".manage-table tbody")
table_elements = self.driver.find_elements_by_css_selector(".manage-table tbody > tr")
# drop header
return table_elements[1:]
def workflow_index_click_option(self, option_title, workflow_index=0):
table_elements = self.workflow_index_table_elements()
workflow_row = table_elements[workflow_index]
workflow_button = workflow_row.find_element_by_css_selector(".menubutton")
workflow_button.click()
menu_element = self.wait_for_selector_visible(".popmenu-wrapper .dropdown-menu")
menu_options = menu_element.find_elements_by_css_selector("li a")
found_option = False
for menu_option in menu_options:
if option_title in menu_option.text:
menu_option.click()
found_option = True
break
if not found_option:
raise AssertionError("Failed to find workflow action option with title [%s]" % option_title)
def workflow_run_submit(self):
button = self.wait_for_selector(".ui-form-header button")
button.click()
def tool_open(self, tool_id):
link_element = self.wait_for_selector('a[href$="tool_runner?tool_id=%s"]' % tool_id)
link_element.click()
def tool_parameter_div(self, expanded_parameter_id):
return self.wait_for_selector("div.ui-form-element[tour_id$='%s']" % expanded_parameter_id)
def tool_set_value(self, expanded_parameter_id, value, expected_type=None, test_data_resolver=None):
div_element = self.tool_parameter_div(expanded_parameter_id)
assert div_element
if expected_type == "data":
div_selector = "div.ui-form-element[tour_id$='%s']" % expanded_parameter_id
self.select2_set_value(div_selector, value)
else:
input_element = div_element.find_element_by_css_selector("input")
# Clear default value
input_element.clear()
input_element.send_keys(value)
def tool_execute(self):
execute_button = self.wait_for_selector("button#execute")
execute_button.click()
def click_masthead_user(self):
self.click_xpath(self.navigation_data["selectors"]["masthead"]["user"])
def click_masthead_workflow(self):
self.click_xpath(self.navigation_data["selectors"]["masthead"]["workflow"])
def click_button_new_workflow(self):
self.click_selector(self.navigation_data["selectors"]["workflows"]["new_button"])
def click_history_options(self):
history_options_button_selector = self.test_data["historyOptions"]["selectors"]["button"]
history_options_element = self.wait_for_selector(history_options_button_selector)
assert history_options_element.is_displayed()
history_options_button_icon_selector = self.test_data["historyOptions"]["selectors"]["buttonIcon"]
history_options_button_icon_element = self.wait_for_selector(history_options_button_icon_selector)
assert history_options_button_icon_element.is_displayed()
history_options_element.click()
def click_history_option(self, option_label):
# Open menu
self.click_history_options()
# Click labelled option
menu_selector = self.history_options_menu_selector()
menu_element = self.wait_for_selector(menu_selector)
menu_selection_element = menu_element.find_element_by_xpath('//ul[@id="history-options-button-menu"]/li/a[text()[contains(.,"%s")]]' % option_label)
menu_selection_element.click()
def history_options_menu_selector(self):
menu_selector = self.test_data["historyOptions"]["selectors"]["menu"]
return menu_selector
@retry_during_transitions
def history_panel_refresh_click(self):
refresh_item = self.wait_for_selector_clickable("#history-refresh-button")
refresh_item.click()
def history_panel_multi_operations_selector(self):
return self.test_data["historyPanel"]["selectors"]["history"]["multiOperationsIcon"]
def history_panel_multi_operations_show(self):
operations_selector = self.history_panel_multi_operations_selector()
operations_element = self.wait_for_selector_clickable(operations_selector)
operations_element.click()
@retry_during_transitions
def history_panel_muli_operation_select_hid(self, hid):
item_selector = self.history_panel_item_selector(hid, wait=True)
operation_radio_selector = "%s .selector" % item_selector
element = self.wait_for_selector_clickable(operation_radio_selector)
element.click()
def history_panel_multi_operation_action_selector(self):
return self.test_data["historyPanel"]["selectors"]["history"]["multiOperationsActionBtn"]
def history_panel_multi_operation_action_click(self, action):
time.sleep(5)
button_element = self.wait_for_selector_clickable(self.history_panel_multi_operation_action_selector())
button_element.click()
menu_element = self.wait_for_selector_visible(".list-action-menu.open")
action_element = menu_element.find_element_by_link_text(action)
action_element.click()
def history_panel_item_selector(self, hid, wait=False):
current_history_id = self.current_history_id()
contents = self.api_get("histories/%s/contents" % current_history_id)
try:
history_item = [d for d in contents if d["hid"] == hid][0]
except IndexError:
raise Exception("Could not find history item with hid [%s] in contents [%s]" % (hid, contents))
history_item_selector = "#%s-%s" % (history_item["history_content_type"], history_item["id"])
if wait:
self.wait_for_selector_visible(history_item_selector)
return history_item_selector
def modal_body_selector(self):
return ".modal-body"
def history_panel_item_body_selector(self, hid, wait=False):
selector = "%s %s" % (self.history_panel_item_selector(hid), self.test_data["historyPanel"]["selectors"]["hda"]["body"])
if wait:
self.wait_for_selector_visible(selector)
return selector
def hda_div_selector(self, hda_id):
return "#dataset-%s" % hda_id
def hda_body_selector(self, hda_id):
return "%s %s" % (self.hda_div_selector(hda_id), self.test_data["historyPanel"]["selectors"]["hda"]["body"])
def hda_click_primary_action_button(self, hid, button_key):
self.history_panel_click_item_title(hid=hid, wait=True)
body_selector = self.history_panel_item_body_selector(hid=hid, wait=True)
buttons_selector = body_selector + " " + self.test_data["historyPanel"]["selectors"]["hda"]["primaryActionButtons"]
self.wait_for_selector_visible(buttons_selector)
button_def = self.test_data["historyPanel"]["hdaPrimaryActionButtons"][button_key]
button_selector = button_def["selector"]
button_item = self.wait_for_selector_visible("%s %s" % (buttons_selector, button_selector))
return button_item.click()
def history_panel_click_item_title(self, **kwds):
if "hda_id" in kwds:
item_selector = self.hda_div_selector(kwds["hda_id"])
else:
item_selector = self.history_panel_item_selector(kwds["hid"])
title_selector = "%s .title" % item_selector
title_element = self.wait_for_selector(title_selector)
title_element.click()
if kwds.get("wait", False):
# Find a better way to wait for transition
time.sleep(.5)
def click_hda_title(self, hda_id, wait=False):
# TODO: Replace with calls to history_panel_click_item_title.
return self.history_panel_click_item_title(hda_id=hda_id, wait=wait)
def collection_builder_set_name(self, name):
name_element = self.wait_for_selector_visible("input.collection-name")
name_element.send_keys(name)
def collection_builder_hide_originals(self):
hide_element = self.wait_for_selector_clickable("input.hide-originals")
hide_element.click()
def collection_builder_create(self):
create_element = self.wait_for_selector_clickable("button.create-collection")
create_element.click()
def logout_if_needed(self):
if self.is_logged_in():
self.home()
self.click_masthead_user()
self.click_label(self.navigation_data["labels"]["masthead"]["userMenu"]["logout"])
self.click_label('go to the home page')
assert not self.is_logged_in()
def run_tour(self, path, skip_steps=[], sleep_on_steps={}, tour_callback=None):
if tour_callback is None:
tour_callback = NullTourCallback()
self.home()
with open(path, "r") as f:
tour_dict = yaml.load(f)
steps = tour_dict["steps"]
for i, step in enumerate(steps):
title = step.get("title", None)
skip = False
if skip_steps:
for skip_step in skip_steps:
if title == skip_step:
skip = True
if title in sleep_on_steps:
time.sleep(sleep_on_steps[title])
if skip:
continue
self.run_tour_step(step, i, tour_callback)
def tour_wait_for_clickable_element(self, selector):
wait = self.wait()
element = wait.until(sizzle.sizzle_selector_clickable(selector))
return element
def tour_wait_for_element_present(self, selector):
wait = self.wait()
element = wait.until(sizzle.sizzle_presence_of_selector(selector))
return element
def get_tooltip_text(self, element, sleep=0, click_away=True):
tooltip_selector = self.test_data["selectors"]["tooltipBalloon"]
self.wait_for_selector_absent(tooltip_selector)
action_chains = self.action_chains()
action_chains.move_to_element(element)
action_chains.perform()
if sleep > 0:
time.sleep(sleep)
tooltip_element = self.wait_for_selector_visible(tooltip_selector)
text = tooltip_element.text
if click_away:
self.click_center()
return text
def assert_tooltip_text(self, element, expected, sleep=0, click_away=True):
text = self.get_tooltip_text(element, sleep=sleep, click_away=click_away)
assert text == expected, "Tooltip text [%s] was not expected text [%s]." % (text, expected)
def assert_error_message(self, contains=None):
return self._assert_message("error", contains=contains)
def assert_warning_message(self, contains=None):
return self._assert_message("warning", contains=contains)
def _assert_message(self, type, contains=None):
element = self.wait_for_selector(self.test_data["selectors"]["messages"][type])
assert element, "No error message found, one expected."
if contains is not None:
assert contains in element.text
def assert_no_error_message(self):
self.assert_selector_absent(self.test_data["selectors"]["messages"]["error"])
def run_tour_step(self, step, step_index, tour_callback):
preclick = step.get("preclick", [])
for preclick_selector in preclick:
print("(Pre)Clicking %s" % preclick_selector)
element = self.tour_wait_for_clickable_element(preclick_selector)
element.click()
element_str = step.get("element", None)
if element_str is not None:
print("Waiting for element %s" % element_str)
element = self.tour_wait_for_element_present(element_str)
assert element is not None
textinsert = step.get("textinsert", None)
if textinsert is not None:
element.send_keys(textinsert)
tour_callback.handle_step(step, step_index)
postclick = step.get("postclick", [])
for postclick_selector in postclick:
print("(Post)Clicking %s" % postclick_selector)
element = self.tour_wait_for_clickable_element(postclick_selector)
element.click()
def select2_set_value(self, container_selector, value, with_click=True):
# There are two hacky was to select things from the select2 widget -
# with_click=True: This simulates the mouse click after the suggestion contains
# only the selected value.
# with_click=False: This presses enter on the selection. Not sure
# why.
# with_click seems to work in all situtations - the enter methods
# doesn't seem to work with the tool form for some reason.
container_elem = self.wait_for_selector(container_selector)
text_element = container_elem.find_element_by_css_selector("input[type='text']")
text_element.send_keys(value)
# Wait for select2 options to load and then click to add this one.
drop_elem = self.wait_for_selector_visible("#select2-drop")
# Sleep seems to be needed - at least for send_enter.
time.sleep(.5)
if not with_click:
# Wait for select2 options to load and then click to add this one.
self.send_enter(text_element)
else:
select_elem = drop_elem.find_elements_by_css_selector(".select2-result-label")[0]
action_chains = self.action_chains()
action_chains.move_to_element(select_elem).click().perform()
self.wait_for_selector_absent_or_hidden("#select2-drop")
| 40.083333 | 156 | 0.669023 | from __future__ import print_function
import contextlib
import random
import string
import time
from functools import partial, wraps
import requests
import yaml
from .data import NAVIGATION_DATA
from .has_driver import exception_indicates_stale_element, HasDriver
from . import sizzle
DEFAULT_PASSWORD = '123456'
class NullTourCallback(object):
def handle_step(self, step, step_index):
pass
def retry_call_during_transitions(f, attempts=5, sleep=.1):
previous_attempts = 0
while True:
try:
return f()
except Exception as e:
if previous_attempts > attempts:
raise
if not exception_indicates_stale_element(e):
raise
time.sleep(sleep)
previous_attempts += 1
def retry_during_transitions(f, attempts=5, sleep=.1):
@wraps(f)
def _retry(*args, **kwds):
retry_call_during_transitions(partial(f, *args, **kwds), attempts=attempts, sleep=sleep)
return _retry
class NavigatesGalaxy(HasDriver):
default_password = DEFAULT_PASSWORD
def get(self, url=""):
full_url = self.build_url(url)
return self.driver.get(full_url)
@property
def navigation_data(self):
return NAVIGATION_DATA
def home(self):
self.get()
self.wait_for_selector_visible("#masthead")
self.wait_for_selector_visible("#current-history-panel")
def switch_to_main_panel(self):
self.driver.switch_to.frame(self.navigation_data["selectors"]["frames"]["main"])
@contextlib.contextmanager
def main_panel(self):
try:
self.switch_to_main_panel()
yield
finally:
self.driver.switch_to.default_content
def api_get(self, endpoint, data={}, raw=False):
full_url = self.build_url("api/" + endpoint, for_selenium=False)
response = requests.get(full_url, data=data, cookies=self.selenium_to_requests_cookies())
if raw:
return response
else:
return response.json()
def get_galaxy_session(self):
for cookie in self.driver.get_cookies():
if cookie["name"] == "galaxysession":
return cookie["value"]
def selenium_to_requests_cookies(self):
return {
'galaxysession': self.get_galaxy_session()
}
def history_panel_name_element(self):
name_selector = self.test_data["historyPanel"]["selectors"]["history"]["name"]
return self.wait_for_selector(name_selector)
def current_history(self):
history = self.api_get("histories")[0]
return history
def current_history_id(self):
return self.current_history()["id"]
def current_history_contents(self):
current_history_id = self.current_history_id()
history_contents = self.api_get("histories/%s/contents" % current_history_id)
return history_contents
def latest_history_item(self):
history_contents = self.current_history_contents()
assert len(history_contents) > 0
return history_contents[-1]
def wait_for_history(self, timeout=30, assert_ok=True):
def history_becomes_terminal(driver):
current_history_id = self.current_history_id()
state = self.api_get("histories/%s" % current_history_id)["state"]
if state not in ["running", "queued", "new", "ready"]:
return state
else:
return None
final_state = self.wait(timeout).until(history_becomes_terminal)
if assert_ok:
assert final_state == "ok", final_state
return final_state
def history_panel_wait_for_hid_ok(self, hid, timeout=60):
self.history_panel_wait_for_hid_state(hid, 'ok', timeout=timeout)
def history_panel_wait_for_hid_visible(self, hid, timeout=60):
current_history_id = self.current_history_id()
def history_has_hid(driver):
contents = self.api_get("histories/%s/contents" % current_history_id)
return any([d for d in contents if d["hid"] == hid])
self.wait(timeout).until(history_has_hid)
contents = self.api_get("histories/%s/contents" % current_history_id)
history_item = [d for d in contents if d["hid"] == hid][0]
history_item_selector = "#%s-%s" % (history_item["history_content_type"], history_item["id"])
self.wait_for_selector_visible(history_item_selector)
return history_item_selector
def history_panel_wait_for_hid_hidden(self, hid, timeout=60):
current_history_id = self.current_history_id()
contents = self.api_get("histories/%s/contents" % current_history_id)
history_item = [d for d in contents if d["hid"] == hid][0]
history_item_selector = "#%s-%s" % (history_item["history_content_type"], history_item["id"])
self.wait_for_selector_absent(history_item_selector)
return history_item_selector
def history_panel_wait_for_hid_state(self, hid, state, timeout=60):
history_item_selector = self.history_panel_wait_for_hid_visible(hid, timeout=timeout)
history_item_selector_state = "%s.state-%s" % (history_item_selector, state)
try:
self.wait_for_selector_visible(history_item_selector_state)
except self.TimeoutException as e:
history_item = self.driver.find_element_by_css_selector(history_item_selector)
current_state = "UNKNOWN"
classes = history_item.get_attribute("class").split(" ")
for clazz in classes:
if clazz.startswith("state-"):
current_state = clazz[len("state-"):]
template = "Failed waiting on history item %d state to change to [%s] current state [%s]. "
message = template % (hid, state, current_state)
raise self.prepend_timeout_message(e, message)
def get_logged_in_user(self):
return self.api_get("users/current")
def is_logged_in(self):
return "email" in self.get_logged_in_user()
def _get_random_name(self, prefix=None, suffix=None, len=10):
return '%s%s%s' % (
prefix or '',
''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(len)),
suffix or '',
)
def _get_random_email(self, username=None, domain=None):
username = username or 'test'
domain = domain or 'test.test'
return self._get_random_name(prefix=username, suffix="@" + domain)
def submit_login(self, email, password=None):
if password is None:
password = self.default_password
login_info = {
'login': email,
'password': password,
}
self.click_masthead_user()
self.click_label(self.navigation_data["labels"]["masthead"]["userMenu"]["login"])
with self.main_panel():
form = self.wait_for_selector(self.navigation_data["selectors"]["loginPage"]["form"])
self.fill(form, login_info)
self.click_submit(form)
def register(self, email=None, password=None, username=None, confirm=None, assert_valid=True):
if email is None:
email = self._get_random_email()
if password is None:
password = self.default_password
if confirm is None:
confirm = password
if username is None:
username = email.split("@")[0]
self.home()
self.click_masthead_user()
self.click_label(self.navigation_data["labels"]["masthead"]["userMenu"]["register"])
with self.main_panel():
register_form_id = self.navigation_data["selectors"]["registrationPage"]["form"]
form = self.wait_for_id(register_form_id)
self.fill(form, dict(
email=email,
password=password,
username=username,
confirm=confirm
))
self.click_xpath(self.navigation_data["selectors"]["registrationPage"]["submit_xpath"])
if assert_valid:
self.home()
self.click_masthead_user()
user_email_element = self.wait_for_xpath_visible(self.navigation_data["selectors"]["masthead"]["userMenu"]["userEmail_xpath"])
text = user_email_element.text
assert email in text
assert self.get_logged_in_user()["email"] == email
self.click_center()
def click_center(self):
action_chains = self.action_chains()
center_element = self.driver.find_element_by_css_selector("#center")
action_chains.move_to_element(center_element).click().perform()
def perform_upload(self, test_path, ext=None, genome=None, ext_all=None, genome_all=None):
self.home()
upload_button = self.wait_for_selector_clickable(".upload-button")
upload_button.click()
if ext_all is not None:
self.wait_for_selector_visible('.upload-footer-extension')
self.select2_set_value(".upload-footer-extension", ext_all)
if genome_all is not None:
self.wait_for_selector_visible('.upload-footer-genome')
self.select2_set_value(".upload-footer-genome", genome_all)
local_upload_button = self.wait_for_selector_clickable("button#btn-local")
local_upload_button.click()
file_upload = self.wait_for_selector('input[type="file"]')
file_upload.send_keys(test_path)
if ext is not None:
self.wait_for_selector_visible('.upload-extension')
self.select2_set_value(".upload-extension", ext)
if genome is not None:
self.wait_for_selector_visible('.upload-genome')
self.select2_set_value(".upload-genome", genome)
start_button = self.wait_for_selector_clickable("button#btn-start")
start_button.click()
close_button = self.wait_for_selector_clickable("button#btn-close")
close_button.click()
def workflow_index_open(self):
self.home()
self.click_masthead_workflow()
def workflow_index_table_elements(self):
self.wait_for_selector_visible(".manage-table tbody")
table_elements = self.driver.find_elements_by_css_selector(".manage-table tbody > tr")
return table_elements[1:]
def workflow_index_click_option(self, option_title, workflow_index=0):
table_elements = self.workflow_index_table_elements()
workflow_row = table_elements[workflow_index]
workflow_button = workflow_row.find_element_by_css_selector(".menubutton")
workflow_button.click()
menu_element = self.wait_for_selector_visible(".popmenu-wrapper .dropdown-menu")
menu_options = menu_element.find_elements_by_css_selector("li a")
found_option = False
for menu_option in menu_options:
if option_title in menu_option.text:
menu_option.click()
found_option = True
break
if not found_option:
raise AssertionError("Failed to find workflow action option with title [%s]" % option_title)
def workflow_run_submit(self):
button = self.wait_for_selector(".ui-form-header button")
button.click()
def tool_open(self, tool_id):
link_element = self.wait_for_selector('a[href$="tool_runner?tool_id=%s"]' % tool_id)
link_element.click()
def tool_parameter_div(self, expanded_parameter_id):
return self.wait_for_selector("div.ui-form-element[tour_id$='%s']" % expanded_parameter_id)
def tool_set_value(self, expanded_parameter_id, value, expected_type=None, test_data_resolver=None):
div_element = self.tool_parameter_div(expanded_parameter_id)
assert div_element
if expected_type == "data":
div_selector = "div.ui-form-element[tour_id$='%s']" % expanded_parameter_id
self.select2_set_value(div_selector, value)
else:
input_element = div_element.find_element_by_css_selector("input")
input_element.clear()
input_element.send_keys(value)
def tool_execute(self):
execute_button = self.wait_for_selector("button#execute")
execute_button.click()
def click_masthead_user(self):
self.click_xpath(self.navigation_data["selectors"]["masthead"]["user"])
def click_masthead_workflow(self):
self.click_xpath(self.navigation_data["selectors"]["masthead"]["workflow"])
def click_button_new_workflow(self):
self.click_selector(self.navigation_data["selectors"]["workflows"]["new_button"])
def click_history_options(self):
history_options_button_selector = self.test_data["historyOptions"]["selectors"]["button"]
history_options_element = self.wait_for_selector(history_options_button_selector)
assert history_options_element.is_displayed()
history_options_button_icon_selector = self.test_data["historyOptions"]["selectors"]["buttonIcon"]
history_options_button_icon_element = self.wait_for_selector(history_options_button_icon_selector)
assert history_options_button_icon_element.is_displayed()
history_options_element.click()
def click_history_option(self, option_label):
self.click_history_options()
menu_selector = self.history_options_menu_selector()
menu_element = self.wait_for_selector(menu_selector)
menu_selection_element = menu_element.find_element_by_xpath('//ul[@id="history-options-button-menu"]/li/a[text()[contains(.,"%s")]]' % option_label)
menu_selection_element.click()
def history_options_menu_selector(self):
menu_selector = self.test_data["historyOptions"]["selectors"]["menu"]
return menu_selector
@retry_during_transitions
def history_panel_refresh_click(self):
refresh_item = self.wait_for_selector_clickable("#history-refresh-button")
refresh_item.click()
def history_panel_multi_operations_selector(self):
return self.test_data["historyPanel"]["selectors"]["history"]["multiOperationsIcon"]
def history_panel_multi_operations_show(self):
operations_selector = self.history_panel_multi_operations_selector()
operations_element = self.wait_for_selector_clickable(operations_selector)
operations_element.click()
@retry_during_transitions
def history_panel_muli_operation_select_hid(self, hid):
item_selector = self.history_panel_item_selector(hid, wait=True)
operation_radio_selector = "%s .selector" % item_selector
element = self.wait_for_selector_clickable(operation_radio_selector)
element.click()
def history_panel_multi_operation_action_selector(self):
return self.test_data["historyPanel"]["selectors"]["history"]["multiOperationsActionBtn"]
def history_panel_multi_operation_action_click(self, action):
time.sleep(5)
button_element = self.wait_for_selector_clickable(self.history_panel_multi_operation_action_selector())
button_element.click()
menu_element = self.wait_for_selector_visible(".list-action-menu.open")
action_element = menu_element.find_element_by_link_text(action)
action_element.click()
def history_panel_item_selector(self, hid, wait=False):
current_history_id = self.current_history_id()
contents = self.api_get("histories/%s/contents" % current_history_id)
try:
history_item = [d for d in contents if d["hid"] == hid][0]
except IndexError:
raise Exception("Could not find history item with hid [%s] in contents [%s]" % (hid, contents))
history_item_selector = "#%s-%s" % (history_item["history_content_type"], history_item["id"])
if wait:
self.wait_for_selector_visible(history_item_selector)
return history_item_selector
def modal_body_selector(self):
return ".modal-body"
def history_panel_item_body_selector(self, hid, wait=False):
selector = "%s %s" % (self.history_panel_item_selector(hid), self.test_data["historyPanel"]["selectors"]["hda"]["body"])
if wait:
self.wait_for_selector_visible(selector)
return selector
def hda_div_selector(self, hda_id):
return "#dataset-%s" % hda_id
def hda_body_selector(self, hda_id):
return "%s %s" % (self.hda_div_selector(hda_id), self.test_data["historyPanel"]["selectors"]["hda"]["body"])
def hda_click_primary_action_button(self, hid, button_key):
self.history_panel_click_item_title(hid=hid, wait=True)
body_selector = self.history_panel_item_body_selector(hid=hid, wait=True)
buttons_selector = body_selector + " " + self.test_data["historyPanel"]["selectors"]["hda"]["primaryActionButtons"]
self.wait_for_selector_visible(buttons_selector)
button_def = self.test_data["historyPanel"]["hdaPrimaryActionButtons"][button_key]
button_selector = button_def["selector"]
button_item = self.wait_for_selector_visible("%s %s" % (buttons_selector, button_selector))
return button_item.click()
def history_panel_click_item_title(self, **kwds):
if "hda_id" in kwds:
item_selector = self.hda_div_selector(kwds["hda_id"])
else:
item_selector = self.history_panel_item_selector(kwds["hid"])
title_selector = "%s .title" % item_selector
title_element = self.wait_for_selector(title_selector)
title_element.click()
if kwds.get("wait", False):
time.sleep(.5)
def click_hda_title(self, hda_id, wait=False):
return self.history_panel_click_item_title(hda_id=hda_id, wait=wait)
def collection_builder_set_name(self, name):
name_element = self.wait_for_selector_visible("input.collection-name")
name_element.send_keys(name)
def collection_builder_hide_originals(self):
hide_element = self.wait_for_selector_clickable("input.hide-originals")
hide_element.click()
def collection_builder_create(self):
create_element = self.wait_for_selector_clickable("button.create-collection")
create_element.click()
def logout_if_needed(self):
if self.is_logged_in():
self.home()
self.click_masthead_user()
self.click_label(self.navigation_data["labels"]["masthead"]["userMenu"]["logout"])
self.click_label('go to the home page')
assert not self.is_logged_in()
def run_tour(self, path, skip_steps=[], sleep_on_steps={}, tour_callback=None):
if tour_callback is None:
tour_callback = NullTourCallback()
self.home()
with open(path, "r") as f:
tour_dict = yaml.load(f)
steps = tour_dict["steps"]
for i, step in enumerate(steps):
title = step.get("title", None)
skip = False
if skip_steps:
for skip_step in skip_steps:
if title == skip_step:
skip = True
if title in sleep_on_steps:
time.sleep(sleep_on_steps[title])
if skip:
continue
self.run_tour_step(step, i, tour_callback)
def tour_wait_for_clickable_element(self, selector):
wait = self.wait()
element = wait.until(sizzle.sizzle_selector_clickable(selector))
return element
def tour_wait_for_element_present(self, selector):
wait = self.wait()
element = wait.until(sizzle.sizzle_presence_of_selector(selector))
return element
def get_tooltip_text(self, element, sleep=0, click_away=True):
tooltip_selector = self.test_data["selectors"]["tooltipBalloon"]
self.wait_for_selector_absent(tooltip_selector)
action_chains = self.action_chains()
action_chains.move_to_element(element)
action_chains.perform()
if sleep > 0:
time.sleep(sleep)
tooltip_element = self.wait_for_selector_visible(tooltip_selector)
text = tooltip_element.text
if click_away:
self.click_center()
return text
def assert_tooltip_text(self, element, expected, sleep=0, click_away=True):
text = self.get_tooltip_text(element, sleep=sleep, click_away=click_away)
assert text == expected, "Tooltip text [%s] was not expected text [%s]." % (text, expected)
def assert_error_message(self, contains=None):
return self._assert_message("error", contains=contains)
def assert_warning_message(self, contains=None):
return self._assert_message("warning", contains=contains)
def _assert_message(self, type, contains=None):
element = self.wait_for_selector(self.test_data["selectors"]["messages"][type])
assert element, "No error message found, one expected."
if contains is not None:
assert contains in element.text
def assert_no_error_message(self):
self.assert_selector_absent(self.test_data["selectors"]["messages"]["error"])
def run_tour_step(self, step, step_index, tour_callback):
preclick = step.get("preclick", [])
for preclick_selector in preclick:
print("(Pre)Clicking %s" % preclick_selector)
element = self.tour_wait_for_clickable_element(preclick_selector)
element.click()
element_str = step.get("element", None)
if element_str is not None:
print("Waiting for element %s" % element_str)
element = self.tour_wait_for_element_present(element_str)
assert element is not None
textinsert = step.get("textinsert", None)
if textinsert is not None:
element.send_keys(textinsert)
tour_callback.handle_step(step, step_index)
postclick = step.get("postclick", [])
for postclick_selector in postclick:
print("(Post)Clicking %s" % postclick_selector)
element = self.tour_wait_for_clickable_element(postclick_selector)
element.click()
def select2_set_value(self, container_selector, value, with_click=True):
container_elem = self.wait_for_selector(container_selector)
text_element = container_elem.find_element_by_css_selector("input[type='text']")
text_element.send_keys(value)
# Wait for select2 options to load and then click to add this one.
drop_elem = self.wait_for_selector_visible("#select2-drop")
# Sleep seems to be needed - at least for send_enter.
time.sleep(.5)
if not with_click:
# Wait for select2 options to load and then click to add this one.
self.send_enter(text_element)
else:
select_elem = drop_elem.find_elements_by_css_selector(".select2-result-label")[0]
action_chains = self.action_chains()
action_chains.move_to_element(select_elem).click().perform()
self.wait_for_selector_absent_or_hidden("#select2-drop")
| true | true |
f7319258f8ed765fde12cec23303820fd0ad703d | 14,348 | py | Python | Lib/warnings.py | adamwen829/cpython | 0f1c7c760c6b2804f5d05cae9ca045d1fdf3d667 | [
"PSF-2.0"
] | 2 | 2017-05-05T02:07:59.000Z | 2017-08-18T09:24:48.000Z | Lib/warnings.py | adamwen829/cpython | 0f1c7c760c6b2804f5d05cae9ca045d1fdf3d667 | [
"PSF-2.0"
] | null | null | null | Lib/warnings.py | adamwen829/cpython | 0f1c7c760c6b2804f5d05cae9ca045d1fdf3d667 | [
"PSF-2.0"
] | 3 | 2016-04-21T07:58:27.000Z | 2016-05-06T21:34:44.000Z | """Python part of the warnings subsystem."""
import sys
__all__ = ["warn", "warn_explicit", "showwarning",
"formatwarning", "filterwarnings", "simplefilter",
"resetwarnings", "catch_warnings"]
def showwarning(message, category, filename, lineno, file=None, line=None):
"""Hook to write a warning to a file; replace if you like."""
if file is None:
file = sys.stderr
try:
file.write(formatwarning(message, category, filename, lineno, line))
except OSError:
pass # the file (probably stderr) is invalid - this warning gets lost.
def formatwarning(message, category, filename, lineno, line=None):
"""Function to format a warning the standard way."""
import linecache
s = "%s:%s: %s: %s\n" % (filename, lineno, category.__name__, message)
line = linecache.getline(filename, lineno) if line is None else line
if line:
line = line.strip()
s += " %s\n" % line
return s
def filterwarnings(action, message="", category=Warning, module="", lineno=0,
append=False):
"""Insert an entry into the list of warnings filters (at the front).
'action' -- one of "error", "ignore", "always", "default", "module",
or "once"
'message' -- a regex that the warning message must match
'category' -- a class that the warning must be a subclass of
'module' -- a regex that the module name must match
'lineno' -- an integer line number, 0 matches all warnings
'append' -- if true, append to the list of filters
"""
import re
assert action in ("error", "ignore", "always", "default", "module",
"once"), "invalid action: %r" % (action,)
assert isinstance(message, str), "message must be a string"
assert isinstance(category, type), "category must be a class"
assert issubclass(category, Warning), "category must be a Warning subclass"
assert isinstance(module, str), "module must be a string"
assert isinstance(lineno, int) and lineno >= 0, \
"lineno must be an int >= 0"
item = (action, re.compile(message, re.I), category,
re.compile(module), lineno)
if append:
filters.append(item)
else:
filters.insert(0, item)
_filters_mutated()
def simplefilter(action, category=Warning, lineno=0, append=False):
"""Insert a simple entry into the list of warnings filters (at the front).
A simple filter matches all modules and messages.
'action' -- one of "error", "ignore", "always", "default", "module",
or "once"
'category' -- a class that the warning must be a subclass of
'lineno' -- an integer line number, 0 matches all warnings
'append' -- if true, append to the list of filters
"""
assert action in ("error", "ignore", "always", "default", "module",
"once"), "invalid action: %r" % (action,)
assert isinstance(lineno, int) and lineno >= 0, \
"lineno must be an int >= 0"
item = (action, None, category, None, lineno)
if append:
filters.append(item)
else:
filters.insert(0, item)
_filters_mutated()
def resetwarnings():
"""Clear the list of warning filters, so that no filters are active."""
filters[:] = []
_filters_mutated()
class _OptionError(Exception):
"""Exception used by option processing helpers."""
pass
# Helper to process -W options passed via sys.warnoptions
def _processoptions(args):
for arg in args:
try:
_setoption(arg)
except _OptionError as msg:
print("Invalid -W option ignored:", msg, file=sys.stderr)
# Helper for _processoptions()
def _setoption(arg):
import re
parts = arg.split(':')
if len(parts) > 5:
raise _OptionError("too many fields (max 5): %r" % (arg,))
while len(parts) < 5:
parts.append('')
action, message, category, module, lineno = [s.strip()
for s in parts]
action = _getaction(action)
message = re.escape(message)
category = _getcategory(category)
module = re.escape(module)
if module:
module = module + '$'
if lineno:
try:
lineno = int(lineno)
if lineno < 0:
raise ValueError
except (ValueError, OverflowError):
raise _OptionError("invalid lineno %r" % (lineno,))
else:
lineno = 0
filterwarnings(action, message, category, module, lineno)
# Helper for _setoption()
def _getaction(action):
if not action:
return "default"
if action == "all": return "always" # Alias
for a in ('default', 'always', 'ignore', 'module', 'once', 'error'):
if a.startswith(action):
return a
raise _OptionError("invalid action: %r" % (action,))
# Helper for _setoption()
def _getcategory(category):
import re
if not category:
return Warning
if re.match("^[a-zA-Z0-9_]+$", category):
try:
cat = eval(category)
except NameError:
raise _OptionError("unknown warning category: %r" % (category,))
else:
i = category.rfind(".")
module = category[:i]
klass = category[i+1:]
try:
m = __import__(module, None, None, [klass])
except ImportError:
raise _OptionError("invalid module name: %r" % (module,))
try:
cat = getattr(m, klass)
except AttributeError:
raise _OptionError("unknown warning category: %r" % (category,))
if not issubclass(cat, Warning):
raise _OptionError("invalid warning category: %r" % (category,))
return cat
# Code typically replaced by _warnings
def warn(message, category=None, stacklevel=1):
"""Issue a warning, or maybe ignore it or raise an exception."""
# Check if message is already a Warning object
if isinstance(message, Warning):
category = message.__class__
# Check category argument
if category is None:
category = UserWarning
if not (isinstance(category, type) and issubclass(category, Warning)):
raise TypeError("category must be a Warning subclass, "
"not '{:s}'".format(type(category).__name__))
# Get context information
try:
caller = sys._getframe(stacklevel)
except ValueError:
globals = sys.__dict__
lineno = 1
else:
globals = caller.f_globals
lineno = caller.f_lineno
if '__name__' in globals:
module = globals['__name__']
else:
module = "<string>"
filename = globals.get('__file__')
if filename:
fnl = filename.lower()
if fnl.endswith((".pyc", ".pyo")):
filename = filename[:-1]
else:
if module == "__main__":
try:
filename = sys.argv[0]
except AttributeError:
# embedded interpreters don't have sys.argv, see bug #839151
filename = '__main__'
if not filename:
filename = module
registry = globals.setdefault("__warningregistry__", {})
warn_explicit(message, category, filename, lineno, module, registry,
globals)
def warn_explicit(message, category, filename, lineno,
module=None, registry=None, module_globals=None):
lineno = int(lineno)
if module is None:
module = filename or "<unknown>"
if module[-3:].lower() == ".py":
module = module[:-3] # XXX What about leading pathname?
if registry is None:
registry = {}
if registry.get('version', 0) != _filters_version:
registry.clear()
registry['version'] = _filters_version
if isinstance(message, Warning):
text = str(message)
category = message.__class__
else:
text = message
message = category(message)
key = (text, category, lineno)
# Quick test for common case
if registry.get(key):
return
# Search the filters
for item in filters:
action, msg, cat, mod, ln = item
if ((msg is None or msg.match(text)) and
issubclass(category, cat) and
(mod is None or mod.match(module)) and
(ln == 0 or lineno == ln)):
break
else:
action = defaultaction
# Early exit actions
if action == "ignore":
registry[key] = 1
return
# Prime the linecache for formatting, in case the
# "file" is actually in a zipfile or something.
import linecache
linecache.getlines(filename, module_globals)
if action == "error":
raise message
# Other actions
if action == "once":
registry[key] = 1
oncekey = (text, category)
if onceregistry.get(oncekey):
return
onceregistry[oncekey] = 1
elif action == "always":
pass
elif action == "module":
registry[key] = 1
altkey = (text, category, 0)
if registry.get(altkey):
return
registry[altkey] = 1
elif action == "default":
registry[key] = 1
else:
# Unrecognized actions are errors
raise RuntimeError(
"Unrecognized action (%r) in warnings.filters:\n %s" %
(action, item))
if not callable(showwarning):
raise TypeError("warnings.showwarning() must be set to a "
"function or method")
# Print message and context
showwarning(message, category, filename, lineno)
class WarningMessage(object):
"""Holds the result of a single showwarning() call."""
_WARNING_DETAILS = ("message", "category", "filename", "lineno", "file",
"line")
def __init__(self, message, category, filename, lineno, file=None,
line=None):
local_values = locals()
for attr in self._WARNING_DETAILS:
setattr(self, attr, local_values[attr])
self._category_name = category.__name__ if category else None
def __str__(self):
return ("{message : %r, category : %r, filename : %r, lineno : %s, "
"line : %r}" % (self.message, self._category_name,
self.filename, self.lineno, self.line))
class catch_warnings(object):
"""A context manager that copies and restores the warnings filter upon
exiting the context.
The 'record' argument specifies whether warnings should be captured by a
custom implementation of warnings.showwarning() and be appended to a list
returned by the context manager. Otherwise None is returned by the context
manager. The objects appended to the list are arguments whose attributes
mirror the arguments to showwarning().
The 'module' argument is to specify an alternative module to the module
named 'warnings' and imported under that name. This argument is only useful
when testing the warnings module itself.
"""
def __init__(self, *, record=False, module=None):
"""Specify whether to record warnings and if an alternative module
should be used other than sys.modules['warnings'].
For compatibility with Python 3.0, please consider all arguments to be
keyword-only.
"""
self._record = record
self._module = sys.modules['warnings'] if module is None else module
self._entered = False
def __repr__(self):
args = []
if self._record:
args.append("record=True")
if self._module is not sys.modules['warnings']:
args.append("module=%r" % self._module)
name = type(self).__name__
return "%s(%s)" % (name, ", ".join(args))
def __enter__(self):
if self._entered:
raise RuntimeError("Cannot enter %r twice" % self)
self._entered = True
self._filters = self._module.filters
self._module.filters = self._filters[:]
self._module._filters_mutated()
self._showwarning = self._module.showwarning
if self._record:
log = []
def showwarning(*args, **kwargs):
log.append(WarningMessage(*args, **kwargs))
self._module.showwarning = showwarning
return log
else:
return None
def __exit__(self, *exc_info):
if not self._entered:
raise RuntimeError("Cannot exit %r without entering first" % self)
self._module.filters = self._filters
self._module._filters_mutated()
self._module.showwarning = self._showwarning
# filters contains a sequence of filter 5-tuples
# The components of the 5-tuple are:
# - an action: error, ignore, always, default, module, or once
# - a compiled regex that must match the warning message
# - a class representing the warning category
# - a compiled regex that must match the module that is being warned
# - a line number for the line being warning, or 0 to mean any line
# If either if the compiled regexs are None, match anything.
_warnings_defaults = False
try:
from _warnings import (filters, _defaultaction, _onceregistry,
warn, warn_explicit, _filters_mutated)
defaultaction = _defaultaction
onceregistry = _onceregistry
_warnings_defaults = True
except ImportError:
filters = []
defaultaction = "default"
onceregistry = {}
_filters_version = 1
def _filters_mutated():
global _filters_version
_filters_version += 1
# Module initialization
_processoptions(sys.warnoptions)
if not _warnings_defaults:
silence = [ImportWarning, PendingDeprecationWarning]
silence.append(DeprecationWarning)
for cls in silence:
simplefilter("ignore", category=cls)
bytes_warning = sys.flags.bytes_warning
if bytes_warning > 1:
bytes_action = "error"
elif bytes_warning:
bytes_action = "default"
else:
bytes_action = "ignore"
simplefilter(bytes_action, category=BytesWarning, append=1)
# resource usage warnings are enabled by default in pydebug mode
if hasattr(sys, 'gettotalrefcount'):
resource_action = "always"
else:
resource_action = "ignore"
simplefilter(resource_action, category=ResourceWarning, append=1)
del _warnings_defaults
| 34.995122 | 79 | 0.617299 |
import sys
__all__ = ["warn", "warn_explicit", "showwarning",
"formatwarning", "filterwarnings", "simplefilter",
"resetwarnings", "catch_warnings"]
def showwarning(message, category, filename, lineno, file=None, line=None):
if file is None:
file = sys.stderr
try:
file.write(formatwarning(message, category, filename, lineno, line))
except OSError:
pass
def formatwarning(message, category, filename, lineno, line=None):
import linecache
s = "%s:%s: %s: %s\n" % (filename, lineno, category.__name__, message)
line = linecache.getline(filename, lineno) if line is None else line
if line:
line = line.strip()
s += " %s\n" % line
return s
def filterwarnings(action, message="", category=Warning, module="", lineno=0,
append=False):
import re
assert action in ("error", "ignore", "always", "default", "module",
"once"), "invalid action: %r" % (action,)
assert isinstance(message, str), "message must be a string"
assert isinstance(category, type), "category must be a class"
assert issubclass(category, Warning), "category must be a Warning subclass"
assert isinstance(module, str), "module must be a string"
assert isinstance(lineno, int) and lineno >= 0, \
"lineno must be an int >= 0"
item = (action, re.compile(message, re.I), category,
re.compile(module), lineno)
if append:
filters.append(item)
else:
filters.insert(0, item)
_filters_mutated()
def simplefilter(action, category=Warning, lineno=0, append=False):
assert action in ("error", "ignore", "always", "default", "module",
"once"), "invalid action: %r" % (action,)
assert isinstance(lineno, int) and lineno >= 0, \
"lineno must be an int >= 0"
item = (action, None, category, None, lineno)
if append:
filters.append(item)
else:
filters.insert(0, item)
_filters_mutated()
def resetwarnings():
filters[:] = []
_filters_mutated()
class _OptionError(Exception):
pass
def _processoptions(args):
for arg in args:
try:
_setoption(arg)
except _OptionError as msg:
print("Invalid -W option ignored:", msg, file=sys.stderr)
def _setoption(arg):
import re
parts = arg.split(':')
if len(parts) > 5:
raise _OptionError("too many fields (max 5): %r" % (arg,))
while len(parts) < 5:
parts.append('')
action, message, category, module, lineno = [s.strip()
for s in parts]
action = _getaction(action)
message = re.escape(message)
category = _getcategory(category)
module = re.escape(module)
if module:
module = module + '$'
if lineno:
try:
lineno = int(lineno)
if lineno < 0:
raise ValueError
except (ValueError, OverflowError):
raise _OptionError("invalid lineno %r" % (lineno,))
else:
lineno = 0
filterwarnings(action, message, category, module, lineno)
def _getaction(action):
if not action:
return "default"
if action == "all": return "always"
for a in ('default', 'always', 'ignore', 'module', 'once', 'error'):
if a.startswith(action):
return a
raise _OptionError("invalid action: %r" % (action,))
def _getcategory(category):
import re
if not category:
return Warning
if re.match("^[a-zA-Z0-9_]+$", category):
try:
cat = eval(category)
except NameError:
raise _OptionError("unknown warning category: %r" % (category,))
else:
i = category.rfind(".")
module = category[:i]
klass = category[i+1:]
try:
m = __import__(module, None, None, [klass])
except ImportError:
raise _OptionError("invalid module name: %r" % (module,))
try:
cat = getattr(m, klass)
except AttributeError:
raise _OptionError("unknown warning category: %r" % (category,))
if not issubclass(cat, Warning):
raise _OptionError("invalid warning category: %r" % (category,))
return cat
def warn(message, category=None, stacklevel=1):
if isinstance(message, Warning):
category = message.__class__
if category is None:
category = UserWarning
if not (isinstance(category, type) and issubclass(category, Warning)):
raise TypeError("category must be a Warning subclass, "
"not '{:s}'".format(type(category).__name__))
try:
caller = sys._getframe(stacklevel)
except ValueError:
globals = sys.__dict__
lineno = 1
else:
globals = caller.f_globals
lineno = caller.f_lineno
if '__name__' in globals:
module = globals['__name__']
else:
module = "<string>"
filename = globals.get('__file__')
if filename:
fnl = filename.lower()
if fnl.endswith((".pyc", ".pyo")):
filename = filename[:-1]
else:
if module == "__main__":
try:
filename = sys.argv[0]
except AttributeError:
filename = '__main__'
if not filename:
filename = module
registry = globals.setdefault("__warningregistry__", {})
warn_explicit(message, category, filename, lineno, module, registry,
globals)
def warn_explicit(message, category, filename, lineno,
module=None, registry=None, module_globals=None):
lineno = int(lineno)
if module is None:
module = filename or "<unknown>"
if module[-3:].lower() == ".py":
module = module[:-3] # XXX What about leading pathname?
if registry is None:
registry = {}
if registry.get('version', 0) != _filters_version:
registry.clear()
registry['version'] = _filters_version
if isinstance(message, Warning):
text = str(message)
category = message.__class__
else:
text = message
message = category(message)
key = (text, category, lineno)
# Quick test for common case
if registry.get(key):
return
# Search the filters
for item in filters:
action, msg, cat, mod, ln = item
if ((msg is None or msg.match(text)) and
issubclass(category, cat) and
(mod is None or mod.match(module)) and
(ln == 0 or lineno == ln)):
break
else:
action = defaultaction
# Early exit actions
if action == "ignore":
registry[key] = 1
return
# Prime the linecache for formatting, in case the
# "file" is actually in a zipfile or something.
import linecache
linecache.getlines(filename, module_globals)
if action == "error":
raise message
# Other actions
if action == "once":
registry[key] = 1
oncekey = (text, category)
if onceregistry.get(oncekey):
return
onceregistry[oncekey] = 1
elif action == "always":
pass
elif action == "module":
registry[key] = 1
altkey = (text, category, 0)
if registry.get(altkey):
return
registry[altkey] = 1
elif action == "default":
registry[key] = 1
else:
# Unrecognized actions are errors
raise RuntimeError(
"Unrecognized action (%r) in warnings.filters:\n %s" %
(action, item))
if not callable(showwarning):
raise TypeError("warnings.showwarning() must be set to a "
"function or method")
# Print message and context
showwarning(message, category, filename, lineno)
class WarningMessage(object):
_WARNING_DETAILS = ("message", "category", "filename", "lineno", "file",
"line")
def __init__(self, message, category, filename, lineno, file=None,
line=None):
local_values = locals()
for attr in self._WARNING_DETAILS:
setattr(self, attr, local_values[attr])
self._category_name = category.__name__ if category else None
def __str__(self):
return ("{message : %r, category : %r, filename : %r, lineno : %s, "
"line : %r}" % (self.message, self._category_name,
self.filename, self.lineno, self.line))
class catch_warnings(object):
def __init__(self, *, record=False, module=None):
self._record = record
self._module = sys.modules['warnings'] if module is None else module
self._entered = False
def __repr__(self):
args = []
if self._record:
args.append("record=True")
if self._module is not sys.modules['warnings']:
args.append("module=%r" % self._module)
name = type(self).__name__
return "%s(%s)" % (name, ", ".join(args))
def __enter__(self):
if self._entered:
raise RuntimeError("Cannot enter %r twice" % self)
self._entered = True
self._filters = self._module.filters
self._module.filters = self._filters[:]
self._module._filters_mutated()
self._showwarning = self._module.showwarning
if self._record:
log = []
def showwarning(*args, **kwargs):
log.append(WarningMessage(*args, **kwargs))
self._module.showwarning = showwarning
return log
else:
return None
def __exit__(self, *exc_info):
if not self._entered:
raise RuntimeError("Cannot exit %r without entering first" % self)
self._module.filters = self._filters
self._module._filters_mutated()
self._module.showwarning = self._showwarning
# filters contains a sequence of filter 5-tuples
# The components of the 5-tuple are:
# - an action: error, ignore, always, default, module, or once
# - a compiled regex that must match the warning message
# - a class representing the warning category
# - a compiled regex that must match the module that is being warned
# - a line number for the line being warning, or 0 to mean any line
# If either if the compiled regexs are None, match anything.
_warnings_defaults = False
try:
from _warnings import (filters, _defaultaction, _onceregistry,
warn, warn_explicit, _filters_mutated)
defaultaction = _defaultaction
onceregistry = _onceregistry
_warnings_defaults = True
except ImportError:
filters = []
defaultaction = "default"
onceregistry = {}
_filters_version = 1
def _filters_mutated():
global _filters_version
_filters_version += 1
# Module initialization
_processoptions(sys.warnoptions)
if not _warnings_defaults:
silence = [ImportWarning, PendingDeprecationWarning]
silence.append(DeprecationWarning)
for cls in silence:
simplefilter("ignore", category=cls)
bytes_warning = sys.flags.bytes_warning
if bytes_warning > 1:
bytes_action = "error"
elif bytes_warning:
bytes_action = "default"
else:
bytes_action = "ignore"
simplefilter(bytes_action, category=BytesWarning, append=1)
# resource usage warnings are enabled by default in pydebug mode
if hasattr(sys, 'gettotalrefcount'):
resource_action = "always"
else:
resource_action = "ignore"
simplefilter(resource_action, category=ResourceWarning, append=1)
del _warnings_defaults
| true | true |
f731926d323d02ff6850d4318c6001330ae3775c | 737 | py | Python | benchmarks/__init__.py | cedric05/dothttp | ae68f2c3dbb8f1053a0feeab0ac829aa5d3a2a6e | [
"Apache-2.0"
] | 10 | 2021-02-09T17:21:49.000Z | 2021-12-05T19:10:53.000Z | benchmarks/__init__.py | cedric05/dothttp | ae68f2c3dbb8f1053a0feeab0ac829aa5d3a2a6e | [
"Apache-2.0"
] | 60 | 2021-02-07T17:20:47.000Z | 2022-03-12T06:26:23.000Z | benchmarks/__init__.py | cedric05/dothttp | ae68f2c3dbb8f1053a0feeab0ac829aa5d3a2a6e | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
import os
from dothttp import Config
from dothttp.request_base import RequestCompiler
def run_model():
"""
main use case is to check benchmarks for loading from file to textx model
modifying http.tx would increase or lower performance.
"""
filename = os.path.join(os.path.dirname(__file__), "../examples/example.http")
envs = []
target = "1"
nocookie = False
curl = False
properties = []
config = Config(file=filename, env=envs, properties=properties, curl=curl, property_file=None, debug=True,
no_cookie=nocookie, format=False, info=False, target=target)
comp = RequestCompiler(config)
comp.load()
comp.load_def()
return
| 28.346154 | 110 | 0.671642 |
import os
from dothttp import Config
from dothttp.request_base import RequestCompiler
def run_model():
filename = os.path.join(os.path.dirname(__file__), "../examples/example.http")
envs = []
target = "1"
nocookie = False
curl = False
properties = []
config = Config(file=filename, env=envs, properties=properties, curl=curl, property_file=None, debug=True,
no_cookie=nocookie, format=False, info=False, target=target)
comp = RequestCompiler(config)
comp.load()
comp.load_def()
return
| true | true |
f73192bc3cb337f8236fb50957755b303a00785e | 6,757 | py | Python | cheating/client.py | totu/challenge-fi-2021 | 7e41581e3dd5a61b838ecb7aadfde501b94802c5 | [
"Unlicense"
] | null | null | null | cheating/client.py | totu/challenge-fi-2021 | 7e41581e3dd5a61b838ecb7aadfde501b94802c5 | [
"Unlicense"
] | null | null | null | cheating/client.py | totu/challenge-fi-2021 | 7e41581e3dd5a61b838ecb7aadfde501b94802c5 | [
"Unlicense"
] | null | null | null | import pygame
from pygame.locals import *
from _thread import *
import os
import random
import socket
import json
import math
from gamelogic import objects, world, common
SOCK_COUNT = 2
pygame.init()
W, H = 800, 437
win = pygame.display.set_mode((W, H))
pygame.display.set_caption("Projekt Kyyber 2021 Client")
bg_orig = [
pygame.image.load(os.path.join("images/bg1/", str(x) + ".png")).convert_alpha()
for x in range(1, 8)
]
# bg_orig = [pygame.image.load(os.path.join('images/bg2/', str(x) + '.png')).convert_alpha() for x in range(1,7)]
bg = []
BGW = int(bg_orig[0].get_width() * (H / float(bg_orig[0].get_height())))
for i in bg_orig:
bg.append(pygame.transform.scale(i, (BGW, H)))
bg.reverse()
clock = pygame.time.Clock()
camera_pos = 0
gameoverfade = 0
def redrawWindow():
global camera_pos, gameoverfade
largeFont = pygame.font.SysFont("comicsans", 30)
hugeFont = pygame.font.SysFont("comicsans", 90)
gameover = False
for player in gamestate.players:
if player.id == my_id:
camera_pos = player.x - W / 2
camera_pos = min(camera_pos, world.WORLD_SIZE - W)
camera_pos = max(camera_pos, 0)
text = largeFont.render(
"AMMO: {}, RELOAD: {}, HP: {}".format(
player.ammo, player.reloadleft, player.hp
),
1,
(255, 255, 255),
)
break
else:
text = hugeFont.render("TRY HARDER!", 1, (255, 255, 255))
gameover = True
for j, layer in enumerate(bg):
for i in range(0, W * 2, BGW):
camera_pos_bg = (camera_pos * (float(j) / float(len(bg)))) % BGW
win.blit(bg[j], (i - camera_pos_bg, 0))
for player in gamestate.players:
player.draw(win, camera_pos, my_id)
sorted_enemies = sorted(gamestate.enemies, key=lambda i: i.y_pos)
sorted_enemies.reverse() # Closest ones to front
for enemy in sorted_enemies:
enemy.draw(win, camera_pos)
for boss in gamestate.bosses:
boss.draw(win, camera_pos)
for projectile in gamestate.projectiles:
projectile.draw(win, camera_pos)
if gameover:
veil = pygame.Surface((W, H))
veil.fill((0, 0, 0))
veil.set_alpha(gameoverfade)
gameoverfade += 0.1
if gameoverfade > 255:
gameoverfade = 255
win.blit(veil, (0, 0))
win.blit(text, (W / 2 - text.get_width() / 2, H / 2 - text.get_height() / 2))
else:
win.blit(text, (20, 400))
for i, achievement in enumerate(achievements):
win.blit(
largeFont.render(
str(achievement) + ": " + str(achievements[achievement]),
1,
(255, 255, 255),
),
(10, 10 + 30 * i),
)
pygame.display.update()
def update_gamestate_thread():
global gamestate
while True:
clock.tick(world.SPEED)
gamestate = common.physics(gamestate)
redrawWindow()
gamestate = objects.gamestate()
achievements = []
my_id = 0
sendevent = []
start_new_thread(update_gamestate_thread, ())
sockets = set()
main = None
for i in range(SOCK_COUNT):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sockets.add(sock)
serveraddr = "127.0.0.1"
serveraddr = "challenge.fi" # Join remote server
for sock in sockets:
if main is None:
main = sock
sock.connect((serveraddr, 9999))
data = main.recv(1024).strip()
my_id = json.loads(data)["player_id"]
while True:
me = None
for player in gamestate.players:
if player.id == my_id:
me = player
if me:
sendevent.append(
[
['shoot', 4900, 100, 0.38]
])
keys = pygame.key.get_pressed()
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
if keys[pygame.K_x]:
eka = me.x #+ math.cos(me.mouseDir(camera_pos)) * 60
toka = me.y + 60 #- math.sin(me.mouseDir(camera_pos)) * 60
mouse = me.mouseDir(camera_pos)
sendevent.append(
[
"shoot", eka, toka, mouse
]
)
print(f"{eka}, {toka}, {mouse}")
if keys[pygame.K_r]:
eka = 4900
sendevent.append(
# ['shoot', 4931.029795212544, 260.67432742841345, 1.7208623491309805]
#['shoot', 4754.656506303356, 100, 0]
['shoot', 4900, 100, 0.38]
#205.57772405728508, 297.39211224788403, 0.38633721482131445]
)
if event.type == pygame.MOUSEBUTTONDOWN and event.button == 1:
eka = me.x - 20 + math.cos(me.mouseDir(camera_pos)) * 60
print(eka)
toka = me.y - math.sin(me.mouseDir(camera_pos)) * 60
mouse = me.mouseDir(camera_pos)
sendevent.append(
[
"shoot", eka, toka, mouse
]
)
print(f"{eka}, {toka}, {mouse}")
if keys[pygame.K_SPACE]:
sendevent.append(["jump"])
if keys[pygame.K_m]:
sendevent.append(["jump"])
sendevent.append(["reload"])
sendevent.append(["jump"])
if keys[pygame.K_a] and not me.dir == 1:
sendevent.append(["left"])
elif keys[pygame.K_d] and not me.dir == -1:
sendevent.append(["right"])
elif not me.dir == 0:
sendevent.append(["stop"])
common.parse_clientevents(my_id, json.dumps(sendevent), gamestate)
# for boss in gamestate.bosses:
# print(f"{boss.id} {boss.x},{boss.y}, hp:{boss.hp}")
for sock in sockets:
sock.sendall(bytes(json.dumps(sendevent), "utf-8"))
# if gamestate.projectiles:
# for projectile in gamestate.projectiles:
# print(f"{projectile.x},{projectile.y} d:{projectile.dir} r:{projectile.moveremaining}")
if sendevent:
print(sendevent)
print(f"{me.x},{me.y}")
sendevent = []
for boss in gamestate.bosses:
print(boss.x-me.x, boss.y-me.y, boss.hp)
# receive packets until a valid json can be formed
response = b""
while True:
chunk = main.recv(1000)
response += chunk
try:
gamestate_dict, achievements = json.loads(response)
gamestate = common.update_gamestate_from_dict(
gamestate, gamestate_dict
)
break
except Exception as e:
pass
| 31.282407 | 113 | 0.547432 | import pygame
from pygame.locals import *
from _thread import *
import os
import random
import socket
import json
import math
from gamelogic import objects, world, common
SOCK_COUNT = 2
pygame.init()
W, H = 800, 437
win = pygame.display.set_mode((W, H))
pygame.display.set_caption("Projekt Kyyber 2021 Client")
bg_orig = [
pygame.image.load(os.path.join("images/bg1/", str(x) + ".png")).convert_alpha()
for x in range(1, 8)
]
bg = []
BGW = int(bg_orig[0].get_width() * (H / float(bg_orig[0].get_height())))
for i in bg_orig:
bg.append(pygame.transform.scale(i, (BGW, H)))
bg.reverse()
clock = pygame.time.Clock()
camera_pos = 0
gameoverfade = 0
def redrawWindow():
global camera_pos, gameoverfade
largeFont = pygame.font.SysFont("comicsans", 30)
hugeFont = pygame.font.SysFont("comicsans", 90)
gameover = False
for player in gamestate.players:
if player.id == my_id:
camera_pos = player.x - W / 2
camera_pos = min(camera_pos, world.WORLD_SIZE - W)
camera_pos = max(camera_pos, 0)
text = largeFont.render(
"AMMO: {}, RELOAD: {}, HP: {}".format(
player.ammo, player.reloadleft, player.hp
),
1,
(255, 255, 255),
)
break
else:
text = hugeFont.render("TRY HARDER!", 1, (255, 255, 255))
gameover = True
for j, layer in enumerate(bg):
for i in range(0, W * 2, BGW):
camera_pos_bg = (camera_pos * (float(j) / float(len(bg)))) % BGW
win.blit(bg[j], (i - camera_pos_bg, 0))
for player in gamestate.players:
player.draw(win, camera_pos, my_id)
sorted_enemies = sorted(gamestate.enemies, key=lambda i: i.y_pos)
sorted_enemies.reverse()
for enemy in sorted_enemies:
enemy.draw(win, camera_pos)
for boss in gamestate.bosses:
boss.draw(win, camera_pos)
for projectile in gamestate.projectiles:
projectile.draw(win, camera_pos)
if gameover:
veil = pygame.Surface((W, H))
veil.fill((0, 0, 0))
veil.set_alpha(gameoverfade)
gameoverfade += 0.1
if gameoverfade > 255:
gameoverfade = 255
win.blit(veil, (0, 0))
win.blit(text, (W / 2 - text.get_width() / 2, H / 2 - text.get_height() / 2))
else:
win.blit(text, (20, 400))
for i, achievement in enumerate(achievements):
win.blit(
largeFont.render(
str(achievement) + ": " + str(achievements[achievement]),
1,
(255, 255, 255),
),
(10, 10 + 30 * i),
)
pygame.display.update()
def update_gamestate_thread():
global gamestate
while True:
clock.tick(world.SPEED)
gamestate = common.physics(gamestate)
redrawWindow()
gamestate = objects.gamestate()
achievements = []
my_id = 0
sendevent = []
start_new_thread(update_gamestate_thread, ())
sockets = set()
main = None
for i in range(SOCK_COUNT):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sockets.add(sock)
serveraddr = "127.0.0.1"
serveraddr = "challenge.fi"
for sock in sockets:
if main is None:
main = sock
sock.connect((serveraddr, 9999))
data = main.recv(1024).strip()
my_id = json.loads(data)["player_id"]
while True:
me = None
for player in gamestate.players:
if player.id == my_id:
me = player
if me:
sendevent.append(
[
['shoot', 4900, 100, 0.38]
])
keys = pygame.key.get_pressed()
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
if keys[pygame.K_x]:
eka = me.x
toka = me.y + 60
mouse = me.mouseDir(camera_pos)
sendevent.append(
[
"shoot", eka, toka, mouse
]
)
print(f"{eka}, {toka}, {mouse}")
if keys[pygame.K_r]:
eka = 4900
sendevent.append(
['shoot', 4900, 100, 0.38]
)
if event.type == pygame.MOUSEBUTTONDOWN and event.button == 1:
eka = me.x - 20 + math.cos(me.mouseDir(camera_pos)) * 60
print(eka)
toka = me.y - math.sin(me.mouseDir(camera_pos)) * 60
mouse = me.mouseDir(camera_pos)
sendevent.append(
[
"shoot", eka, toka, mouse
]
)
print(f"{eka}, {toka}, {mouse}")
if keys[pygame.K_SPACE]:
sendevent.append(["jump"])
if keys[pygame.K_m]:
sendevent.append(["jump"])
sendevent.append(["reload"])
sendevent.append(["jump"])
if keys[pygame.K_a] and not me.dir == 1:
sendevent.append(["left"])
elif keys[pygame.K_d] and not me.dir == -1:
sendevent.append(["right"])
elif not me.dir == 0:
sendevent.append(["stop"])
common.parse_clientevents(my_id, json.dumps(sendevent), gamestate)
for sock in sockets:
sock.sendall(bytes(json.dumps(sendevent), "utf-8"))
if sendevent:
print(sendevent)
print(f"{me.x},{me.y}")
sendevent = []
for boss in gamestate.bosses:
print(boss.x-me.x, boss.y-me.y, boss.hp)
response = b""
while True:
chunk = main.recv(1000)
response += chunk
try:
gamestate_dict, achievements = json.loads(response)
gamestate = common.update_gamestate_from_dict(
gamestate, gamestate_dict
)
break
except Exception as e:
pass
| true | true |
f73192e61c63b58e1cc57017f11c01d7531dc764 | 3,051 | py | Python | pyaz/pyaz_utils.py | py-az-cli/py-az-cli | 9a7dc44e360c096a5a2f15595353e9dad88a9792 | [
"MIT"
] | null | null | null | pyaz/pyaz_utils.py | py-az-cli/py-az-cli | 9a7dc44e360c096a5a2f15595353e9dad88a9792 | [
"MIT"
] | null | null | null | pyaz/pyaz_utils.py | py-az-cli/py-az-cli | 9a7dc44e360c096a5a2f15595353e9dad88a9792 | [
"MIT"
] | 1 | 2022-02-03T09:12:01.000Z | 2022-02-03T09:12:01.000Z | """Utility functions for the pyaz generated code to use."""
import json
import logging
import shlex
import shutil
import subprocess
from typing import Dict
def _call_az(command: str, parameters: Dict) -> object:
"""
Call an az command (supplied as a string, and parameters as dictionary).
Calls az cli via a subprocess
Returns the az cli json converted to python object
Example:
`
_call_az("az group create", locals())
`
"""
# format the parameters into a list
params = _get_params(parameters)
# split commands into a list
commands = command.split()
# add the params to the commands
commands.extend(params)
full_command = " ".join(commands)
print(f"Executing command: {full_command}")
logging.info("Executing command: %s", full_command)
# split full command using shlex rules
commands = shlex.split(full_command)
# strip off az and replace it with full path to az to accomodate Windows
commands.pop(0)
commands.insert(0, shutil.which("az"))
output = subprocess.run(
commands,
shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
check=True,
)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
try:
return json.loads(stdout)
except: # pylint: disable=bare-except
return stdout
elif stderr:
raise Exception(stderr)
def _get_cli_param_name(name: str) -> str:
"""
Convert parameter name back to cli format from pythonic version.
- Strips trailing underscore from keywords
- Converts remaining underscores to dashes
- Adds leading dashes
"""
if name[-1] == "_":
name = name[0:-1]
name = name.replace("_", "-")
name = f"--{name}"
return name
def _get_params(params: Dict) -> list:
"""
Given the built-in locals dictionary returns a formatted list of parameters.
The list returned contains the az cli formatted parameter names and values
"""
# return params
output = []
# loop through locals and append list of parameters and their values
# as long as the parameter has a value
for param in params:
if params[param]:
# if value is a boolean then don't append value, just param, used for flags
if isinstance(params[param], bool):
output.append(_get_cli_param_name(param))
else:
output.append(_get_cli_param_name(param))
# special case to handle tags, need to apply shlex.split
# to handle case where there are multiple tags
if param == "tags":
param_values = shlex.split(params[param])
param_values = [f'"{value}"' for value in param_values]
output.extend(param_values)
else:
# wrap parameter value in quotes
output.append(f'"{params[param]}"')
return output
| 28.514019 | 87 | 0.621763 |
import json
import logging
import shlex
import shutil
import subprocess
from typing import Dict
def _call_az(command: str, parameters: Dict) -> object:
params = _get_params(parameters)
commands = command.split()
commands.extend(params)
full_command = " ".join(commands)
print(f"Executing command: {full_command}")
logging.info("Executing command: %s", full_command)
commands = shlex.split(full_command)
commands.pop(0)
commands.insert(0, shutil.which("az"))
output = subprocess.run(
commands,
shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
check=True,
)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
try:
return json.loads(stdout)
except:
return stdout
elif stderr:
raise Exception(stderr)
def _get_cli_param_name(name: str) -> str:
if name[-1] == "_":
name = name[0:-1]
name = name.replace("_", "-")
name = f"--{name}"
return name
def _get_params(params: Dict) -> list:
output = []
for param in params:
if params[param]:
if isinstance(params[param], bool):
output.append(_get_cli_param_name(param))
else:
output.append(_get_cli_param_name(param))
# special case to handle tags, need to apply shlex.split
# to handle case where there are multiple tags
if param == "tags":
param_values = shlex.split(params[param])
param_values = [f'"{value}"' for value in param_values]
output.extend(param_values)
else:
# wrap parameter value in quotes
output.append(f'"{params[param]}"')
return output
| true | true |
f731932dfc82782e68c3b57938cdc030d69e56ef | 53,591 | py | Python | electrum/wallet_db.py | L-47/qtum-electrum | dd1b0a8b6ef6e96015a6210de36b23949eaad359 | [
"MIT"
] | 1 | 2020-07-21T18:37:59.000Z | 2020-07-21T18:37:59.000Z | electrum/wallet_db.py | L-47/qtum-electrum | dd1b0a8b6ef6e96015a6210de36b23949eaad359 | [
"MIT"
] | null | null | null | electrum/wallet_db.py | L-47/qtum-electrum | dd1b0a8b6ef6e96015a6210de36b23949eaad359 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2015 Thomas Voegtlin
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
import ast
import json
import copy
import threading
from collections import defaultdict
from functools import reduce
from typing import Dict, Optional, List, Tuple, Set, Iterable, NamedTuple, Sequence, TYPE_CHECKING, Union
import binascii
from . import util, bitcoin
from .util import profiler, WalletFileException, multisig_type, TxMinedInfo, bfh
from .invoices import PR_TYPE_ONCHAIN, Invoice
from .keystore import bip44_derivation
from .transaction import Transaction, TxOutpoint, tx_from_any, PartialTransaction, PartialTxOutput
from .logging import Logger
from .lnutil import LOCAL, REMOTE, FeeUpdate, UpdateAddHtlc, LocalConfig, RemoteConfig, Keypair, OnlyPubkeyKeypair, RevocationStore, ChannelBackupStorage
from .lnutil import ChannelConstraints, Outpoint, ShachainElement
from .json_db import StoredDict, JsonDB, locked, modifier
from .plugin import run_hook, plugin_loaders
from .paymentrequest import PaymentRequest
from .submarine_swaps import SwapData
from .bitcoin import Token, Delegation
if TYPE_CHECKING:
from .storage import WalletStorage
# seed_version is now used for the version of the wallet file
OLD_SEED_VERSION = 4 # electrum versions < 2.0
NEW_SEED_VERSION = 11 # electrum versions >= 2.0
FINAL_SEED_VERSION = 32 # electrum >= 2.7 will set this to prevent
# old versions from overwriting new format
class TxFeesValue(NamedTuple):
fee: Optional[int] = None
is_calculated_by_us: bool = False
num_inputs: Optional[int] = None
class WalletDB(JsonDB):
def __init__(self, raw, *, manual_upgrades: bool):
JsonDB.__init__(self, {})
self._manual_upgrades = manual_upgrades
self._called_after_upgrade_tasks = False
if raw: # loading existing db
self.load_data(raw)
self.load_plugins()
else: # creating new db
self.put('seed_version', FINAL_SEED_VERSION)
self._after_upgrade_tasks()
def load_data(self, s):
try:
self.data = json.loads(s)
except:
try:
d = ast.literal_eval(s)
labels = d.get('labels', {})
except Exception as e:
raise WalletFileException("Cannot read wallet file. (parsing failed)")
self.data = {}
for key, value in d.items():
try:
json.dumps(key)
json.dumps(value)
except:
self.logger.info(f'Failed to convert label to json format: {key}')
continue
self.data[key] = value
if not isinstance(self.data, dict):
raise WalletFileException("Malformed wallet file (not dict)")
if not self._manual_upgrades and self.requires_split():
raise WalletFileException("This wallet has multiple accounts and must be split")
if not self.requires_upgrade():
self._after_upgrade_tasks()
elif not self._manual_upgrades:
self.upgrade()
def requires_split(self):
d = self.get('accounts', {})
return len(d) > 1
def get_split_accounts(self):
result = []
# backward compatibility with old wallets
d = self.get('accounts', {})
if len(d) < 2:
return
wallet_type = self.get('wallet_type')
if wallet_type == 'old':
assert len(d) == 2
data1 = copy.deepcopy(self.data)
data1['accounts'] = {'0': d['0']}
data1['suffix'] = 'deterministic'
data2 = copy.deepcopy(self.data)
data2['accounts'] = {'/x': d['/x']}
data2['seed'] = None
data2['seed_version'] = None
data2['master_public_key'] = None
data2['wallet_type'] = 'imported'
data2['suffix'] = 'imported'
result = [data1, data2]
elif wallet_type in ['bip44', 'trezor', 'keepkey', 'ledger', 'btchip', 'digitalbitbox', 'safe_t']:
mpk = self.get('master_public_keys')
for k in d.keys():
i = int(k)
x = d[k]
if x.get("pending"):
continue
xpub = mpk["x/%d'"%i]
new_data = copy.deepcopy(self.data)
# save account, derivation and xpub at index 0
new_data['accounts'] = {'0': x}
new_data['master_public_keys'] = {"x/0'": xpub}
new_data['derivation'] = bip44_derivation(k)
new_data['suffix'] = k
result.append(new_data)
else:
raise WalletFileException("This wallet has multiple accounts and must be split")
return result
def requires_upgrade(self):
return self.get_seed_version() < FINAL_SEED_VERSION
@profiler
def upgrade(self):
self.logger.info('upgrading wallet format')
if self._called_after_upgrade_tasks:
# we need strict ordering between upgrade() and after_upgrade_tasks()
raise Exception("'after_upgrade_tasks' must NOT be called before 'upgrade'")
self._convert_imported()
self._convert_wallet_type()
self._convert_account()
self._convert_version_13_b()
self._convert_version_14()
self._convert_version_15()
self._convert_version_16()
self._convert_version_17()
self._convert_version_18()
self._convert_version_19()
self._convert_version_20()
self._convert_version_21()
self._convert_version_22()
self._convert_version_23()
self._convert_version_24()
self._convert_version_25()
self._convert_version_26()
self._convert_version_27()
self._convert_version_28()
self._convert_version_29()
self._convert_version_30()
self._convert_version_31()
self._convert_version_32()
self.put('seed_version', FINAL_SEED_VERSION) # just to be sure
self._after_upgrade_tasks()
def _after_upgrade_tasks(self):
self._called_after_upgrade_tasks = True
self._load_transactions()
def _convert_wallet_type(self):
if not self._is_upgrade_method_needed(0, 13):
return
wallet_type = self.get('wallet_type')
if wallet_type == 'btchip': wallet_type = 'ledger'
if self.get('keystore') or self.get('x1/') or wallet_type=='imported':
return False
assert not self.requires_split()
seed_version = self.get_seed_version()
seed = self.get('seed')
xpubs = self.get('master_public_keys')
xprvs = self.get('master_private_keys', {})
mpk = self.get('master_public_key')
keypairs = self.get('keypairs')
key_type = self.get('key_type')
if seed_version == OLD_SEED_VERSION or wallet_type == 'old':
d = {
'type': 'old',
'seed': seed,
'mpk': mpk,
}
self.put('wallet_type', 'standard')
self.put('keystore', d)
elif key_type == 'imported':
d = {
'type': 'imported',
'keypairs': keypairs,
}
self.put('wallet_type', 'standard')
self.put('keystore', d)
elif wallet_type in ['xpub', 'standard']:
xpub = xpubs["x/"]
xprv = xprvs.get("x/")
d = {
'type': 'bip32',
'xpub': xpub,
'xprv': xprv,
'seed': seed,
}
self.put('wallet_type', 'standard')
self.put('keystore', d)
elif wallet_type in ['bip44']:
xpub = xpubs["x/0'"]
xprv = xprvs.get("x/0'")
d = {
'type': 'bip32',
'xpub': xpub,
'xprv': xprv,
}
self.put('wallet_type', 'standard')
self.put('keystore', d)
elif wallet_type in ['trezor', 'keepkey', 'ledger', 'digitalbitbox', 'safe_t']:
xpub = xpubs["x/0'"]
derivation = self.get('derivation', bip44_derivation(0))
d = {
'type': 'hardware',
'hw_type': wallet_type,
'xpub': xpub,
'derivation': derivation,
}
self.put('wallet_type', 'standard')
self.put('keystore', d)
elif (wallet_type == '2fa') or multisig_type(wallet_type):
for key in xpubs.keys():
d = {
'type': 'bip32',
'xpub': xpubs[key],
'xprv': xprvs.get(key),
}
if key == 'x1/' and seed:
d['seed'] = seed
self.put(key, d)
else:
raise WalletFileException('Unable to tell wallet type. Is this even a wallet file?')
# remove junk
self.put('master_public_key', None)
self.put('master_public_keys', None)
self.put('master_private_keys', None)
self.put('derivation', None)
self.put('seed', None)
self.put('keypairs', None)
self.put('key_type', None)
def _convert_version_13_b(self):
# version 13 is ambiguous, and has an earlier and a later structure
if not self._is_upgrade_method_needed(0, 13):
return
if self.get('wallet_type') == 'standard':
if self.get('keystore').get('type') == 'imported':
pubkeys = self.get('keystore').get('keypairs').keys()
d = {'change': []}
receiving_addresses = []
for pubkey in pubkeys:
addr = bitcoin.pubkey_to_address('p2pkh', pubkey)
receiving_addresses.append(addr)
d['receiving'] = receiving_addresses
self.put('addresses', d)
self.put('pubkeys', None)
self.put('seed_version', 13)
def _convert_version_14(self):
# convert imported wallets for 3.0
if not self._is_upgrade_method_needed(13, 13):
return
if self.get('wallet_type') =='imported':
addresses = self.get('addresses')
if type(addresses) is list:
addresses = dict([(x, None) for x in addresses])
self.put('addresses', addresses)
elif self.get('wallet_type') == 'standard':
if self.get('keystore').get('type')=='imported':
addresses = set(self.get('addresses').get('receiving'))
pubkeys = self.get('keystore').get('keypairs').keys()
assert len(addresses) == len(pubkeys)
d = {}
for pubkey in pubkeys:
addr = bitcoin.pubkey_to_address('p2pkh', pubkey)
assert addr in addresses
d[addr] = {
'pubkey': pubkey,
'redeem_script': None,
'type': 'p2pkh'
}
self.put('addresses', d)
self.put('pubkeys', None)
self.put('wallet_type', 'imported')
self.put('seed_version', 14)
def _convert_version_15(self):
if not self._is_upgrade_method_needed(14, 14):
return
if self.get('seed_type') == 'segwit':
# should not get here; get_seed_version should have caught this
raise Exception('unsupported derivation (development segwit, v14)')
self.put('seed_version', 15)
def _convert_version_16(self):
# fixes issue #3193 for Imported_Wallets with addresses
# also, previous versions allowed importing any garbage as an address
# which we now try to remove, see pr #3191
if not self._is_upgrade_method_needed(15, 15):
return
def remove_address(addr):
def remove_from_dict(dict_name):
d = self.get(dict_name, None)
if d is not None:
d.pop(addr, None)
self.put(dict_name, d)
def remove_from_list(list_name):
lst = self.get(list_name, None)
if lst is not None:
s = set(lst)
s -= {addr}
self.put(list_name, list(s))
# note: we don't remove 'addr' from self.get('addresses')
remove_from_dict('addr_history')
remove_from_dict('labels')
remove_from_dict('payment_requests')
remove_from_list('frozen_addresses')
if self.get('wallet_type') == 'imported':
addresses = self.get('addresses')
assert isinstance(addresses, dict)
addresses_new = dict()
for address, details in addresses.items():
if not bitcoin.is_address(address):
remove_address(address)
continue
if details is None:
addresses_new[address] = {}
else:
addresses_new[address] = details
self.put('addresses', addresses_new)
self.put('seed_version', 16)
def _convert_version_17(self):
# delete pruned_txo; construct spent_outpoints
if not self._is_upgrade_method_needed(16, 16):
return
self.put('pruned_txo', None)
transactions = self.get('transactions', {}) # txid -> raw_tx
spent_outpoints = defaultdict(dict)
for txid, raw_tx in transactions.items():
tx = Transaction(raw_tx)
for txin in tx.inputs():
if txin.is_coinbase_input():
continue
prevout_hash = txin.prevout.txid.hex()
prevout_n = txin.prevout.out_idx
spent_outpoints[prevout_hash][str(prevout_n)] = txid
self.put('spent_outpoints', spent_outpoints)
tokens = self.get('tokens', {}) # contractAddr_bindAddr -> [name, symbol, decimals, balance]
new_tokens = {}
for key, value in tokens.items():
contract_addr, bind_addr = key.split('_')
new_token = Token(contract_addr, bind_addr, value[0], value[1], value[2], value[3])
new_tokens[new_token.get_key()] = new_token
self.put('tokens', new_tokens)
self.put('seed_version', 17)
def _convert_version_18(self):
# delete verified_tx3 as its structure changed
if not self._is_upgrade_method_needed(17, 17):
return
self.put('verified_tx3', None)
self.put('seed_version', 18)
def _convert_version_19(self):
# delete tx_fees as its structure changed
if not self._is_upgrade_method_needed(18, 18):
return
self.put('tx_fees', None)
self.put('seed_version', 19)
def _convert_version_20(self):
# store 'derivation' (prefix) and 'root_fingerprint' in all xpub-based keystores.
# store explicit None values if we cannot retroactively determine them
if not self._is_upgrade_method_needed(19, 19):
return
from .bip32 import BIP32Node, convert_bip32_intpath_to_strpath
# note: This upgrade method reimplements bip32.root_fp_and_der_prefix_from_xkey.
# This is done deliberately, to avoid introducing that method as a dependency to this upgrade.
for ks_name in ('keystore', *['x{}/'.format(i) for i in range(1, 16)]):
ks = self.get(ks_name, None)
if ks is None: continue
xpub = ks.get('xpub', None)
if xpub is None: continue
bip32node = BIP32Node.from_xkey(xpub)
# derivation prefix
derivation_prefix = ks.get('derivation', None)
if derivation_prefix is None:
assert bip32node.depth >= 0, bip32node.depth
if bip32node.depth == 0:
derivation_prefix = 'm'
elif bip32node.depth == 1:
child_number_int = int.from_bytes(bip32node.child_number, 'big')
derivation_prefix = convert_bip32_intpath_to_strpath([child_number_int])
ks['derivation'] = derivation_prefix
# root fingerprint
root_fingerprint = ks.get('ckcc_xfp', None)
if root_fingerprint is not None:
root_fingerprint = root_fingerprint.to_bytes(4, byteorder="little", signed=False).hex().lower()
if root_fingerprint is None:
if bip32node.depth == 0:
root_fingerprint = bip32node.calc_fingerprint_of_this_node().hex().lower()
elif bip32node.depth == 1:
root_fingerprint = bip32node.fingerprint.hex()
ks['root_fingerprint'] = root_fingerprint
ks.pop('ckcc_xfp', None)
self.put(ks_name, ks)
self.put('seed_version', 20)
def _convert_version_21(self):
if not self._is_upgrade_method_needed(20, 20):
return
channels = self.get('channels')
if channels:
for channel in channels:
channel['state'] = 'OPENING'
self.put('channels', channels)
self.put('seed_version', 21)
def _convert_version_22(self):
# construct prevouts_by_scripthash
if not self._is_upgrade_method_needed(21, 21):
return
from .bitcoin import script_to_scripthash
transactions = self.get('transactions', {}) # txid -> raw_tx
prevouts_by_scripthash = defaultdict(list)
for txid, raw_tx in transactions.items():
tx = Transaction(raw_tx)
for idx, txout in enumerate(tx.outputs()):
outpoint = f"{txid}:{idx}"
scripthash = script_to_scripthash(txout.scriptpubkey.hex())
prevouts_by_scripthash[scripthash].append((outpoint, txout.value))
self.put('prevouts_by_scripthash', prevouts_by_scripthash)
self.put('seed_version', 22)
def _convert_version_23(self):
if not self._is_upgrade_method_needed(22, 22):
return
channels = self.get('channels', [])
LOCAL = 1
REMOTE = -1
for c in channels:
# move revocation store from remote_config
r = c['remote_config'].pop('revocation_store')
c['revocation_store'] = r
# convert fee updates
log = c.get('log', {})
for sub in LOCAL, REMOTE:
l = log[str(sub)]['fee_updates']
d = {}
for i, fu in enumerate(l):
d[str(i)] = {
'rate':fu['rate'],
'ctn_local':fu['ctns'][str(LOCAL)],
'ctn_remote':fu['ctns'][str(REMOTE)]
}
log[str(int(sub))]['fee_updates'] = d
self.data['channels'] = channels
self.data['seed_version'] = 23
def _convert_version_24(self):
if not self._is_upgrade_method_needed(23, 23):
return
channels = self.get('channels', [])
for c in channels:
# convert revocation store to dict
r = c['revocation_store']
d = {}
for i in range(49):
v = r['buckets'][i]
if v is not None:
d[str(i)] = v
r['buckets'] = d
c['revocation_store'] = r
# convert channels to dict
self.data['channels'] = { x['channel_id']: x for x in channels }
# convert txi & txo
txi = self.get('txi', {})
for tx_hash, d in txi.items():
d2 = {}
for addr, l in d.items():
d2[addr] = {}
for ser, v in l:
d2[addr][ser] = v
txi[tx_hash] = d2
self.data['txi'] = txi
txo = self.get('txo', {})
for tx_hash, d in txo.items():
d2 = {}
for addr, l in d.items():
d2[addr] = {}
for n, v, cb in l:
d2[addr][str(n)] = (v, cb)
txo[tx_hash] = d2
self.data['txo'] = txo
self.data['seed_version'] = 24
def _convert_version_25(self):
if not self._is_upgrade_method_needed(24, 24):
return
# add 'type' field to onchain requests
requests = self.data.get('payment_requests', {})
for k, r in list(requests.items()):
if r.get('address') == k:
requests[k] = {
'address': r['address'],
'amount': r.get('amount'),
'exp': r.get('exp'),
'id': r.get('id'),
'memo': r.get('memo'),
'time': r.get('time'),
'type': PR_TYPE_ONCHAIN,
}
# convert bip70 invoices
invoices = self.data.get('invoices', {})
for k, r in list(invoices.items()):
data = r.get("hex")
if data:
pr = PaymentRequest(bytes.fromhex(data))
if pr.id != k:
continue
invoices[k] = {
'type': PR_TYPE_ONCHAIN,
'amount': pr.get_amount(),
'bip70': data,
'exp': pr.get_expiration_date() - pr.get_time(),
'id': pr.id,
'message': pr.get_memo(),
'outputs': [x.to_legacy_tuple() for x in pr.get_outputs()],
'time': pr.get_time(),
'requestor': pr.get_requestor(),
}
self.data['seed_version'] = 25
def _convert_version_26(self):
if not self._is_upgrade_method_needed(25, 25):
return
channels = self.data.get('channels', {})
channel_timestamps = self.data.pop('lightning_channel_timestamps', {})
for channel_id, c in channels.items():
item = channel_timestamps.get(channel_id)
if item:
funding_txid, funding_height, funding_timestamp, closing_txid, closing_height, closing_timestamp = item
if funding_txid:
c['funding_height'] = funding_txid, funding_height, funding_timestamp
if closing_txid:
c['closing_height'] = closing_txid, closing_height, closing_timestamp
self.data['seed_version'] = 26
def _convert_version_27(self):
if not self._is_upgrade_method_needed(26, 26):
return
channels = self.data.get('channels', {})
for channel_id, c in channels.items():
c['local_config']['htlc_minimum_msat'] = 1
self.data['seed_version'] = 27
def _convert_version_28(self):
if not self._is_upgrade_method_needed(27, 27):
return
channels = self.data.get('channels', {})
for channel_id, c in channels.items():
c['local_config']['channel_seed'] = None
self.data['seed_version'] = 28
def _convert_version_29(self):
if not self._is_upgrade_method_needed(28, 28):
return
requests = self.data.get('payment_requests', {})
invoices = self.data.get('invoices', {})
for d in [invoices, requests]:
for key, r in list(d.items()):
_type = r.get('type', 0)
item = {
'type': _type,
'message': r.get('message') or r.get('memo', ''),
'amount': r.get('amount'),
'exp': r.get('exp') or 0,
'time': r.get('time', 0),
}
if _type == PR_TYPE_ONCHAIN:
address = r.pop('address', None)
if address:
outputs = [(0, address, r.get('amount'))]
else:
outputs = r.get('outputs')
item.update({
'outputs': outputs,
'id': r.get('id'),
'bip70': r.get('bip70'),
'requestor': r.get('requestor'),
})
else:
item.update({
'rhash': r['rhash'],
'invoice': r['invoice'],
})
d[key] = item
self.data['seed_version'] = 29
def _convert_version_30(self):
if not self._is_upgrade_method_needed(29, 29):
return
from .invoices import PR_TYPE_ONCHAIN, PR_TYPE_LN
requests = self.data.get('payment_requests', {})
invoices = self.data.get('invoices', {})
for d in [invoices, requests]:
for key, item in list(d.items()):
_type = item['type']
if _type == PR_TYPE_ONCHAIN:
item['amount_sat'] = item.pop('amount')
elif _type == PR_TYPE_LN:
amount_sat = item.pop('amount')
item['amount_msat'] = 1000 * amount_sat if amount_sat is not None else None
item.pop('exp')
item.pop('message')
item.pop('rhash')
item.pop('time')
else:
raise Exception(f"unknown invoice type: {_type}")
self.data['seed_version'] = 30
def _convert_version_31(self):
if not self._is_upgrade_method_needed(30, 30):
return
from .invoices import PR_TYPE_ONCHAIN
requests = self.data.get('payment_requests', {})
invoices = self.data.get('invoices', {})
for d in [invoices, requests]:
for key, item in list(d.items()):
if item['type'] == PR_TYPE_ONCHAIN:
item['amount_sat'] = item['amount_sat'] or 0
item['exp'] = item['exp'] or 0
item['time'] = item['time'] or 0
self.data['seed_version'] = 31
def _convert_version_32(self):
if not self._is_upgrade_method_needed(31, 31):
return
from .invoices import PR_TYPE_ONCHAIN
invoices_old = self.data.get('invoices', {})
invoices_new = {k: item for k, item in invoices_old.items()
if not (item['type'] == PR_TYPE_ONCHAIN and item['outputs'] is None)}
self.data['invoices'] = invoices_new
self.data['seed_version'] = 32
def _convert_imported(self):
if not self._is_upgrade_method_needed(0, 13):
return
# '/x' is the internal ID for imported accounts
d = self.get('accounts', {}).get('/x', {}).get('imported',{})
if not d:
return False
addresses = []
keypairs = {}
for addr, v in d.items():
pubkey, privkey = v
if privkey:
keypairs[pubkey] = privkey
else:
addresses.append(addr)
if addresses and keypairs:
raise WalletFileException('mixed addresses and privkeys')
elif addresses:
self.put('addresses', addresses)
self.put('accounts', None)
elif keypairs:
self.put('wallet_type', 'standard')
self.put('key_type', 'imported')
self.put('keypairs', keypairs)
self.put('accounts', None)
else:
raise WalletFileException('no addresses or privkeys')
def _convert_account(self):
if not self._is_upgrade_method_needed(0, 13):
return
self.put('accounts', None)
def _is_upgrade_method_needed(self, min_version, max_version):
assert min_version <= max_version
cur_version = self.get_seed_version()
if cur_version > max_version:
return False
elif cur_version < min_version:
raise WalletFileException(
'storage upgrade: unexpected version {} (should be {}-{})'
.format(cur_version, min_version, max_version))
else:
return True
@locked
def get_seed_version(self):
seed_version = self.get('seed_version')
if not seed_version:
seed_version = OLD_SEED_VERSION if len(self.get('master_public_key','')) == 128 else NEW_SEED_VERSION
if seed_version > FINAL_SEED_VERSION:
raise WalletFileException('This version of Electrum is too old to open this wallet.\n'
'(highest supported storage version: {}, version of this file: {})'
.format(FINAL_SEED_VERSION, seed_version))
if seed_version==14 and self.get('seed_type') == 'segwit':
self._raise_unsupported_version(seed_version)
if seed_version >=12:
return seed_version
if seed_version not in [OLD_SEED_VERSION, NEW_SEED_VERSION]:
self._raise_unsupported_version(seed_version)
return seed_version
def _raise_unsupported_version(self, seed_version):
msg = f"Your wallet has an unsupported seed version: {seed_version}."
if seed_version in [5, 7, 8, 9, 10, 14]:
msg += "\n\nTo open this wallet, try 'git checkout seed_v%d'"%seed_version
if seed_version == 6:
# version 1.9.8 created v6 wallets when an incorrect seed was entered in the restore dialog
msg += '\n\nThis file was created because of a bug in version 1.9.8.'
if self.get('master_public_keys') is None and self.get('master_private_keys') is None and self.get('imported_keys') is None:
# pbkdf2 (at that time an additional dependency) was not included with the binaries, and wallet creation aborted.
msg += "\nIt does not contain any keys, and can safely be removed."
else:
# creation was complete if electrum was run from source
msg += "\nPlease open this file with Electrum 1.9.8, and move your coins to a new wallet."
raise WalletFileException(msg)
@locked
def get_txi_addresses(self, tx_hash: str) -> List[str]:
"""Returns list of is_mine addresses that appear as inputs in tx."""
assert isinstance(tx_hash, str)
return list(self.txi.get(tx_hash, {}).keys())
@locked
def get_txo_addresses(self, tx_hash: str) -> List[str]:
"""Returns list of is_mine addresses that appear as outputs in tx."""
assert isinstance(tx_hash, str)
return list(self.txo.get(tx_hash, {}).keys())
@locked
def get_txi_addr(self, tx_hash: str, address: str) -> Iterable[Tuple[str, int]]:
"""Returns an iterable of (prev_outpoint, value)."""
assert isinstance(tx_hash, str)
assert isinstance(address, str)
d = self.txi.get(tx_hash, {}).get(address, {})
return list(d.items())
@locked
def get_txo_addr(self, tx_hash: str, address: str) -> Iterable[Tuple[int, int, bool]]:
"""Returns an iterable of (output_index, value, is_coinbase)."""
assert isinstance(tx_hash, str)
assert isinstance(address, str)
d = self.txo.get(tx_hash, {}).get(address, {})
return [(int(n), v, cb) for (n, (v, cb)) in d.items()]
@modifier
def add_txi_addr(self, tx_hash: str, addr: str, ser: str, v: int) -> None:
assert isinstance(tx_hash, str)
assert isinstance(addr, str)
assert isinstance(ser, str)
assert isinstance(v, int)
if tx_hash not in self.txi:
self.txi[tx_hash] = {}
d = self.txi[tx_hash]
if addr not in d:
d[addr] = {}
d[addr][ser] = v
@modifier
def add_txo_addr(self, tx_hash: str, addr: str, n: Union[int, str], v: int, is_coinbase: bool) -> None:
n = str(n)
assert isinstance(tx_hash, str)
assert isinstance(addr, str)
assert isinstance(n, str)
assert isinstance(v, int)
assert isinstance(is_coinbase, bool)
if tx_hash not in self.txo:
self.txo[tx_hash] = {}
d = self.txo[tx_hash]
if addr not in d:
d[addr] = {}
d[addr][n] = (v, is_coinbase)
@locked
def list_txi(self) -> Sequence[str]:
return list(self.txi.keys())
@locked
def list_txo(self) -> Sequence[str]:
return list(self.txo.keys())
@modifier
def remove_txi(self, tx_hash: str) -> None:
assert isinstance(tx_hash, str)
self.txi.pop(tx_hash, None)
@modifier
def remove_txo(self, tx_hash: str) -> None:
assert isinstance(tx_hash, str)
self.txo.pop(tx_hash, None)
@locked
def list_spent_outpoints(self) -> Sequence[Tuple[str, str]]:
return [(h, n)
for h in self.spent_outpoints.keys()
for n in self.get_spent_outpoints(h)
]
@locked
def get_spent_outpoints(self, prevout_hash: str) -> Sequence[str]:
assert isinstance(prevout_hash, str)
return list(self.spent_outpoints.get(prevout_hash, {}).keys())
@locked
def get_spent_outpoint(self, prevout_hash: str, prevout_n: Union[int, str]) -> Optional[str]:
assert isinstance(prevout_hash, str)
prevout_n = str(prevout_n)
return self.spent_outpoints.get(prevout_hash, {}).get(prevout_n)
@modifier
def remove_spent_outpoint(self, prevout_hash: str, prevout_n: Union[int, str]) -> None:
assert isinstance(prevout_hash, str)
prevout_n = str(prevout_n)
self.spent_outpoints[prevout_hash].pop(prevout_n, None)
if not self.spent_outpoints[prevout_hash]:
self.spent_outpoints.pop(prevout_hash)
@modifier
def set_spent_outpoint(self, prevout_hash: str, prevout_n: Union[int, str], tx_hash: str) -> None:
assert isinstance(prevout_hash, str)
assert isinstance(tx_hash, str)
prevout_n = str(prevout_n)
if prevout_hash not in self.spent_outpoints:
self.spent_outpoints[prevout_hash] = {}
self.spent_outpoints[prevout_hash][prevout_n] = tx_hash
@modifier
def add_prevout_by_scripthash(self, scripthash: str, *, prevout: TxOutpoint, value: int) -> None:
assert isinstance(scripthash, str)
assert isinstance(prevout, TxOutpoint)
assert isinstance(value, int)
if scripthash not in self._prevouts_by_scripthash:
self._prevouts_by_scripthash[scripthash] = set()
self._prevouts_by_scripthash[scripthash].add((prevout.to_str(), value))
@modifier
def remove_prevout_by_scripthash(self, scripthash: str, *, prevout: TxOutpoint, value: int) -> None:
assert isinstance(scripthash, str)
assert isinstance(prevout, TxOutpoint)
assert isinstance(value, int)
self._prevouts_by_scripthash[scripthash].discard((prevout.to_str(), value))
if not self._prevouts_by_scripthash[scripthash]:
self._prevouts_by_scripthash.pop(scripthash)
@locked
def get_prevouts_by_scripthash(self, scripthash: str) -> Set[Tuple[TxOutpoint, int]]:
assert isinstance(scripthash, str)
prevouts_and_values = self._prevouts_by_scripthash.get(scripthash, set())
return {(TxOutpoint.from_str(prevout), value) for prevout, value in prevouts_and_values}
@modifier
def add_transaction(self, tx_hash: str, tx: Transaction) -> None:
assert isinstance(tx_hash, str)
assert isinstance(tx, Transaction), tx
# note that tx might be a PartialTransaction
if not tx_hash:
raise Exception("trying to add tx to db without txid")
if tx_hash != tx.txid():
raise Exception(f"trying to add tx to db with inconsistent txid: {tx_hash} != {tx.txid()}")
# don't allow overwriting complete tx with partial tx
tx_we_already_have = self.transactions.get(tx_hash, None)
if tx_we_already_have is None or isinstance(tx_we_already_have, PartialTransaction):
self.transactions[tx_hash] = tx
@modifier
def remove_transaction(self, tx_hash: str) -> Optional[Transaction]:
assert isinstance(tx_hash, str)
return self.transactions.pop(tx_hash, None)
@locked
def get_transaction(self, tx_hash: Optional[str]) -> Optional[Transaction]:
if tx_hash is None:
return None
assert isinstance(tx_hash, str)
return self.transactions.get(tx_hash)
@locked
def list_transactions(self) -> Sequence[str]:
return list(self.transactions.keys())
@locked
def get_history(self) -> Sequence[str]:
return list(self.history.keys())
def is_addr_in_history(self, addr: str) -> bool:
# does not mean history is non-empty!
assert isinstance(addr, str)
return addr in self.history
@locked
def get_addr_history(self, addr: str) -> Sequence[Tuple[str, int]]:
assert isinstance(addr, str)
return self.history.get(addr, [])
@modifier
def set_addr_history(self, addr: str, hist) -> None:
assert isinstance(addr, str)
self.history[addr] = hist
@modifier
def remove_addr_history(self, addr: str) -> None:
assert isinstance(addr, str)
self.history.pop(addr, None)
@locked
def list_verified_tx(self) -> Sequence[str]:
return list(self.verified_tx.keys())
@locked
def get_verified_tx(self, txid: str) -> Optional[TxMinedInfo]:
assert isinstance(txid, str)
if txid not in self.verified_tx:
return None
height, timestamp, txpos, header_hash = self.verified_tx[txid]
return TxMinedInfo(height=height,
conf=None,
timestamp=timestamp,
txpos=txpos,
header_hash=header_hash)
@modifier
def add_verified_tx(self, txid: str, info: TxMinedInfo):
assert isinstance(txid, str)
assert isinstance(info, TxMinedInfo)
self.verified_tx[txid] = (info.height, info.timestamp, info.txpos, info.header_hash)
@modifier
def remove_verified_tx(self, txid: str):
assert isinstance(txid, str)
self.verified_tx.pop(txid, None)
def is_in_verified_tx(self, txid: str) -> bool:
assert isinstance(txid, str)
return txid in self.verified_tx
@modifier
def add_tx_fee_from_server(self, txid: str, fee_sat: Optional[int]) -> None:
assert isinstance(txid, str)
# note: when called with (fee_sat is None), rm currently saved value
if txid not in self.tx_fees:
self.tx_fees[txid] = TxFeesValue()
tx_fees_value = self.tx_fees[txid]
if tx_fees_value.is_calculated_by_us:
return
self.tx_fees[txid] = tx_fees_value._replace(fee=fee_sat, is_calculated_by_us=False)
@modifier
def add_tx_fee_we_calculated(self, txid: str, fee_sat: Optional[int]) -> None:
assert isinstance(txid, str)
if fee_sat is None:
return
assert isinstance(fee_sat, int)
if txid not in self.tx_fees:
self.tx_fees[txid] = TxFeesValue()
self.tx_fees[txid] = self.tx_fees[txid]._replace(fee=fee_sat, is_calculated_by_us=True)
@locked
def get_tx_fee(self, txid: str, *, trust_server: bool = False) -> Optional[int]:
assert isinstance(txid, str)
"""Returns tx_fee."""
tx_fees_value = self.tx_fees.get(txid)
if tx_fees_value is None:
return None
if not trust_server and not tx_fees_value.is_calculated_by_us:
return None
return tx_fees_value.fee
@modifier
def add_num_inputs_to_tx(self, txid: str, num_inputs: int) -> None:
assert isinstance(txid, str)
assert isinstance(num_inputs, int)
if txid not in self.tx_fees:
self.tx_fees[txid] = TxFeesValue()
self.tx_fees[txid] = self.tx_fees[txid]._replace(num_inputs=num_inputs)
@locked
def get_num_all_inputs_of_tx(self, txid: str) -> Optional[int]:
assert isinstance(txid, str)
tx_fees_value = self.tx_fees.get(txid)
if tx_fees_value is None:
return None
return tx_fees_value.num_inputs
@locked
def get_num_ismine_inputs_of_tx(self, txid: str) -> int:
assert isinstance(txid, str)
txins = self.txi.get(txid, {})
return sum([len(tupls) for addr, tupls in txins.items()])
@modifier
def remove_tx_fee(self, txid: str) -> None:
assert isinstance(txid, str)
self.tx_fees.pop(txid, None)
@locked
def get_dict(self, name):
# Warning: interacts un-intuitively with 'put': certain parts
# of 'data' will have pointers saved as separate variables.
if name not in self.data:
self.data[name] = {}
return self.data[name]
@locked
def num_change_addresses(self) -> int:
return len(self.change_addresses)
@locked
def num_receiving_addresses(self) -> int:
return len(self.receiving_addresses)
@locked
def get_change_addresses(self, *, slice_start=None, slice_stop=None) -> List[str]:
# note: slicing makes a shallow copy
return self.change_addresses[slice_start:slice_stop]
@locked
def get_receiving_addresses(self, *, slice_start=None, slice_stop=None) -> List[str]:
# note: slicing makes a shallow copy
return self.receiving_addresses[slice_start:slice_stop]
@modifier
def add_change_address(self, addr: str) -> None:
assert isinstance(addr, str)
self._addr_to_addr_index[addr] = (1, len(self.change_addresses))
self.change_addresses.append(addr)
@modifier
def add_receiving_address(self, addr: str) -> None:
assert isinstance(addr, str)
self._addr_to_addr_index[addr] = (0, len(self.receiving_addresses))
self.receiving_addresses.append(addr)
@locked
def get_address_index(self, address: str) -> Optional[Sequence[int]]:
assert isinstance(address, str)
return self._addr_to_addr_index.get(address)
@modifier
def add_imported_address(self, addr: str, d: dict) -> None:
assert isinstance(addr, str)
self.imported_addresses[addr] = d
@modifier
def remove_imported_address(self, addr: str) -> None:
assert isinstance(addr, str)
self.imported_addresses.pop(addr)
@locked
def has_imported_address(self, addr: str) -> bool:
assert isinstance(addr, str)
return addr in self.imported_addresses
@locked
def get_imported_addresses(self) -> Sequence[str]:
return list(sorted(self.imported_addresses.keys()))
@locked
def get_imported_address(self, addr: str) -> Optional[dict]:
assert isinstance(addr, str)
return self.imported_addresses.get(addr)
def load_addresses(self, wallet_type):
""" called from Abstract_Wallet.__init__ """
if wallet_type == 'imported':
self.imported_addresses = self.get_dict('addresses') # type: Dict[str, dict]
else:
self.get_dict('addresses')
for name in ['receiving', 'change']:
if name not in self.data['addresses']:
self.data['addresses'][name] = []
self.change_addresses = self.data['addresses']['change']
self.receiving_addresses = self.data['addresses']['receiving']
self._addr_to_addr_index = {} # type: Dict[str, Sequence[int]] # key: address, value: (is_change, index)
for i, addr in enumerate(self.receiving_addresses):
self._addr_to_addr_index[addr] = (0, i)
for i, addr in enumerate(self.change_addresses):
self._addr_to_addr_index[addr] = (1, i)
@profiler
def _load_transactions(self):
self.data = StoredDict(self.data, self, [])
# references in self.data
# TODO make all these private
# txid -> address -> prev_outpoint -> value
self.txi = self.get_dict('txi') # type: Dict[str, Dict[str, Dict[str, int]]]
# txid -> address -> output_index -> (value, is_coinbase)
self.txo = self.get_dict('txo') # type: Dict[str, Dict[str, Dict[str, Tuple[int, bool]]]]
self.transactions = self.get_dict('transactions') # type: Dict[str, Transaction]
self.spent_outpoints = self.get_dict('spent_outpoints') # txid -> output_index -> next_txid
self.history = self.get_dict('addr_history') # address -> list of (txid, height)
self.verified_tx = self.get_dict('verified_tx3') # txid -> (height, timestamp, txpos, header_hash)
self.tx_fees = self.get_dict('tx_fees') # type: Dict[str, TxFeesValue]
# scripthash -> set of (outpoint, value)
self._prevouts_by_scripthash = self.get_dict('prevouts_by_scripthash') # type: Dict[str, Set[Tuple[str, int]]]
# remove unreferenced tx
for tx_hash in list(self.transactions.keys()):
if not self.get_txi_addresses(tx_hash) and not self.get_txo_addresses(tx_hash):
self.logger.info(f"removing unreferenced tx: {tx_hash}")
self.transactions.pop(tx_hash)
# remove unreferenced outpoints
for prevout_hash in self.spent_outpoints.keys():
d = self.spent_outpoints[prevout_hash]
for prevout_n, spending_txid in list(d.items()):
if spending_txid not in self.transactions:
self.logger.info("removing unreferenced spent outpoint")
d.pop(prevout_n)
self.tokens = self.get_dict('tokens')
# contract_addr + '_' + b58addr -> list(txid, height, log_index)
self.token_history = self.get_dict('addr_token_history')
# txid -> tx receipt
self.tx_receipt = self.get_dict('tx_receipt')
# txid -> raw tx
self.token_txs = self.get_dict('token_txs')
self.smart_contracts = self.get_dict('smart_contracts')
self.delegations = self.get_dict('delegations')
if self.token_history:
token_hist_txids = [x2[0] for x2 in reduce(lambda x1, y1: x1+y1, self.token_history.values())]
else:
token_hist_txids = []
for tx_hash, raw in self.token_txs.items():
if tx_hash in token_hist_txids:
tx = Transaction(raw)
self.token_txs[tx_hash] = tx
@modifier
def set_token(self, token: Token):
self.tokens[token.get_key()] = token
@modifier
def delete_token(self, key: str):
self.tokens.pop(key, None)
self.token_history.pop(key, None)
@locked
def get_token(self, key: str) -> Optional[Token]:
return Token(*self.tokens.get(key))
@locked
def list_tokens(self) -> list:
return list(self.tokens.keys())
@modifier
def set_token_history(self, key: str, hist: list):
self.token_history[key] = hist
@modifier
def delete_token_history(self, key: str):
self.token_history.pop(key, None)
@locked
def get_token_history(self, key: str) -> list:
return self.token_history.get(key, [])
@locked
def list_token_histories(self) -> list:
return list(self.token_history.keys())
@modifier
def set_token_tx(self, txid: str, raw: str):
self.token_txs[txid] = raw
@modifier
def delete_token_tx(self, txid: str):
self.token_txs.pop(txid, None)
@locked
def get_token_tx(self, txid: str):
return self.token_txs.get(txid)
@locked
def list_token_txs(self) -> list:
return list(self.token_txs.keys())
@modifier
def set_tx_receipt(self, txid: str, receipt: list):
self.tx_receipt[txid] = receipt
@modifier
def delete_tx_receipt(self, txid: str):
return self.tx_receipt.pop(txid, None)
@locked
def get_tx_receipt(self, txid: str) -> list:
return self.tx_receipt.get(txid, [])
@locked
def list_tx_receipts(self) -> list:
return list(self.tx_receipt.keys())
@modifier
def set_delegation(self, dele: Delegation):
self.delegations[dele.addr] = [dele.staker, dele.fee]
@modifier
def delete_delegation(self, addr: str):
self.delegations.pop(addr, None)
@locked
def get_delegation(self, addr: str) -> Optional[Delegation]:
dele = self.delegations.get(addr, [])
if len(dele) != 2:
return None
return Delegation(addr=addr, staker=dele[0], fee=dele[1])
@locked
def list_delegations(self) -> Sequence[str]:
return list(self.delegations.keys())
@modifier
def clear_history(self):
self.txi.clear()
self.txo.clear()
self.spent_outpoints.clear()
self.transactions.clear()
self.history.clear()
self.verified_tx.clear()
self.tx_fees.clear()
self.token_txs.clear()
self.token_history.clear()
self.tx_receipt.clear()
self._prevouts_by_scripthash.clear()
def _convert_dict(self, path, key, v):
if key == 'transactions':
# note: for performance, "deserialize=False" so that we will deserialize these on-demand
v = dict((k, tx_from_any(x, deserialize=False)) for k, x in v.items())
if key == 'invoices':
v = dict((k, Invoice.from_json(x)) for k, x in v.items())
if key == 'payment_requests':
v = dict((k, Invoice.from_json(x)) for k, x in v.items())
elif key == 'adds':
v = dict((k, UpdateAddHtlc.from_tuple(*x)) for k, x in v.items())
elif key == 'fee_updates':
v = dict((k, FeeUpdate(**x)) for k, x in v.items())
elif key == 'submarine_swaps':
v = dict((k, SwapData(**x)) for k, x in v.items())
elif key == 'channel_backups':
v = dict((k, ChannelBackupStorage(**x)) for k, x in v.items())
elif key == 'tx_fees':
v = dict((k, TxFeesValue(*x)) for k, x in v.items())
elif key == 'prevouts_by_scripthash':
v = dict((k, {(prevout, value) for (prevout, value) in x}) for k, x in v.items())
elif key == 'buckets':
v = dict((k, ShachainElement(bfh(x[0]), int(x[1]))) for k, x in v.items())
elif key == 'data_loss_protect_remote_pcp':
v = dict((k, bfh(x)) for k, x in v.items())
return v
def _convert_value(self, path, key, v):
if key == 'local_config':
v = LocalConfig(**v)
elif key == 'remote_config':
v = RemoteConfig(**v)
elif key == 'constraints':
v = ChannelConstraints(**v)
elif key == 'funding_outpoint':
v = Outpoint(**v)
return v
def write(self, storage: 'WalletStorage'):
with self.lock:
self._write(storage)
def _write(self, storage: 'WalletStorage'):
if threading.currentThread().isDaemon():
self.logger.warning('daemon thread cannot write db')
return
if not self.modified():
return
storage.write(self.dump())
self.set_modified(False)
def is_ready_to_be_used_by_wallet(self):
return not self.requires_upgrade() and self._called_after_upgrade_tasks
def split_accounts(self, root_path):
from .storage import WalletStorage
out = []
result = self.get_split_accounts()
for data in result:
path = root_path + '.' + data['suffix']
storage = WalletStorage(path)
db = WalletDB(json.dumps(data), manual_upgrades=False)
db._called_after_upgrade_tasks = False
db.upgrade()
db.write(storage)
out.append(path)
return out
def get_action(self):
action = run_hook('get_action', self)
return action
def load_plugins(self):
wallet_type = self.get('wallet_type')
if wallet_type in plugin_loaders:
plugin_loaders[wallet_type]()
def set_keystore_encryption(self, enable):
self.put('use_encryption', enable)
| 39.088986 | 153 | 0.582038 |
import os
import ast
import json
import copy
import threading
from collections import defaultdict
from functools import reduce
from typing import Dict, Optional, List, Tuple, Set, Iterable, NamedTuple, Sequence, TYPE_CHECKING, Union
import binascii
from . import util, bitcoin
from .util import profiler, WalletFileException, multisig_type, TxMinedInfo, bfh
from .invoices import PR_TYPE_ONCHAIN, Invoice
from .keystore import bip44_derivation
from .transaction import Transaction, TxOutpoint, tx_from_any, PartialTransaction, PartialTxOutput
from .logging import Logger
from .lnutil import LOCAL, REMOTE, FeeUpdate, UpdateAddHtlc, LocalConfig, RemoteConfig, Keypair, OnlyPubkeyKeypair, RevocationStore, ChannelBackupStorage
from .lnutil import ChannelConstraints, Outpoint, ShachainElement
from .json_db import StoredDict, JsonDB, locked, modifier
from .plugin import run_hook, plugin_loaders
from .paymentrequest import PaymentRequest
from .submarine_swaps import SwapData
from .bitcoin import Token, Delegation
if TYPE_CHECKING:
from .storage import WalletStorage
OLD_SEED_VERSION = 4
NEW_SEED_VERSION = 11
FINAL_SEED_VERSION = 32
class TxFeesValue(NamedTuple):
fee: Optional[int] = None
is_calculated_by_us: bool = False
num_inputs: Optional[int] = None
class WalletDB(JsonDB):
def __init__(self, raw, *, manual_upgrades: bool):
JsonDB.__init__(self, {})
self._manual_upgrades = manual_upgrades
self._called_after_upgrade_tasks = False
if raw:
self.load_data(raw)
self.load_plugins()
else:
self.put('seed_version', FINAL_SEED_VERSION)
self._after_upgrade_tasks()
def load_data(self, s):
try:
self.data = json.loads(s)
except:
try:
d = ast.literal_eval(s)
labels = d.get('labels', {})
except Exception as e:
raise WalletFileException("Cannot read wallet file. (parsing failed)")
self.data = {}
for key, value in d.items():
try:
json.dumps(key)
json.dumps(value)
except:
self.logger.info(f'Failed to convert label to json format: {key}')
continue
self.data[key] = value
if not isinstance(self.data, dict):
raise WalletFileException("Malformed wallet file (not dict)")
if not self._manual_upgrades and self.requires_split():
raise WalletFileException("This wallet has multiple accounts and must be split")
if not self.requires_upgrade():
self._after_upgrade_tasks()
elif not self._manual_upgrades:
self.upgrade()
def requires_split(self):
d = self.get('accounts', {})
return len(d) > 1
def get_split_accounts(self):
result = []
d = self.get('accounts', {})
if len(d) < 2:
return
wallet_type = self.get('wallet_type')
if wallet_type == 'old':
assert len(d) == 2
data1 = copy.deepcopy(self.data)
data1['accounts'] = {'0': d['0']}
data1['suffix'] = 'deterministic'
data2 = copy.deepcopy(self.data)
data2['accounts'] = {'/x': d['/x']}
data2['seed'] = None
data2['seed_version'] = None
data2['master_public_key'] = None
data2['wallet_type'] = 'imported'
data2['suffix'] = 'imported'
result = [data1, data2]
elif wallet_type in ['bip44', 'trezor', 'keepkey', 'ledger', 'btchip', 'digitalbitbox', 'safe_t']:
mpk = self.get('master_public_keys')
for k in d.keys():
i = int(k)
x = d[k]
if x.get("pending"):
continue
xpub = mpk["x/%d'"%i]
new_data = copy.deepcopy(self.data)
# save account, derivation and xpub at index 0
new_data['accounts'] = {'0': x}
new_data['master_public_keys'] = {"x/0'": xpub}
new_data['derivation'] = bip44_derivation(k)
new_data['suffix'] = k
result.append(new_data)
else:
raise WalletFileException("This wallet has multiple accounts and must be split")
return result
def requires_upgrade(self):
return self.get_seed_version() < FINAL_SEED_VERSION
@profiler
def upgrade(self):
self.logger.info('upgrading wallet format')
if self._called_after_upgrade_tasks:
raise Exception("'after_upgrade_tasks' must NOT be called before 'upgrade'")
self._convert_imported()
self._convert_wallet_type()
self._convert_account()
self._convert_version_13_b()
self._convert_version_14()
self._convert_version_15()
self._convert_version_16()
self._convert_version_17()
self._convert_version_18()
self._convert_version_19()
self._convert_version_20()
self._convert_version_21()
self._convert_version_22()
self._convert_version_23()
self._convert_version_24()
self._convert_version_25()
self._convert_version_26()
self._convert_version_27()
self._convert_version_28()
self._convert_version_29()
self._convert_version_30()
self._convert_version_31()
self._convert_version_32()
self.put('seed_version', FINAL_SEED_VERSION)
self._after_upgrade_tasks()
def _after_upgrade_tasks(self):
self._called_after_upgrade_tasks = True
self._load_transactions()
def _convert_wallet_type(self):
if not self._is_upgrade_method_needed(0, 13):
return
wallet_type = self.get('wallet_type')
if wallet_type == 'btchip': wallet_type = 'ledger'
if self.get('keystore') or self.get('x1/') or wallet_type=='imported':
return False
assert not self.requires_split()
seed_version = self.get_seed_version()
seed = self.get('seed')
xpubs = self.get('master_public_keys')
xprvs = self.get('master_private_keys', {})
mpk = self.get('master_public_key')
keypairs = self.get('keypairs')
key_type = self.get('key_type')
if seed_version == OLD_SEED_VERSION or wallet_type == 'old':
d = {
'type': 'old',
'seed': seed,
'mpk': mpk,
}
self.put('wallet_type', 'standard')
self.put('keystore', d)
elif key_type == 'imported':
d = {
'type': 'imported',
'keypairs': keypairs,
}
self.put('wallet_type', 'standard')
self.put('keystore', d)
elif wallet_type in ['xpub', 'standard']:
xpub = xpubs["x/"]
xprv = xprvs.get("x/")
d = {
'type': 'bip32',
'xpub': xpub,
'xprv': xprv,
'seed': seed,
}
self.put('wallet_type', 'standard')
self.put('keystore', d)
elif wallet_type in ['bip44']:
xpub = xpubs["x/0'"]
xprv = xprvs.get("x/0'")
d = {
'type': 'bip32',
'xpub': xpub,
'xprv': xprv,
}
self.put('wallet_type', 'standard')
self.put('keystore', d)
elif wallet_type in ['trezor', 'keepkey', 'ledger', 'digitalbitbox', 'safe_t']:
xpub = xpubs["x/0'"]
derivation = self.get('derivation', bip44_derivation(0))
d = {
'type': 'hardware',
'hw_type': wallet_type,
'xpub': xpub,
'derivation': derivation,
}
self.put('wallet_type', 'standard')
self.put('keystore', d)
elif (wallet_type == '2fa') or multisig_type(wallet_type):
for key in xpubs.keys():
d = {
'type': 'bip32',
'xpub': xpubs[key],
'xprv': xprvs.get(key),
}
if key == 'x1/' and seed:
d['seed'] = seed
self.put(key, d)
else:
raise WalletFileException('Unable to tell wallet type. Is this even a wallet file?')
# remove junk
self.put('master_public_key', None)
self.put('master_public_keys', None)
self.put('master_private_keys', None)
self.put('derivation', None)
self.put('seed', None)
self.put('keypairs', None)
self.put('key_type', None)
def _convert_version_13_b(self):
# version 13 is ambiguous, and has an earlier and a later structure
if not self._is_upgrade_method_needed(0, 13):
return
if self.get('wallet_type') == 'standard':
if self.get('keystore').get('type') == 'imported':
pubkeys = self.get('keystore').get('keypairs').keys()
d = {'change': []}
receiving_addresses = []
for pubkey in pubkeys:
addr = bitcoin.pubkey_to_address('p2pkh', pubkey)
receiving_addresses.append(addr)
d['receiving'] = receiving_addresses
self.put('addresses', d)
self.put('pubkeys', None)
self.put('seed_version', 13)
def _convert_version_14(self):
# convert imported wallets for 3.0
if not self._is_upgrade_method_needed(13, 13):
return
if self.get('wallet_type') =='imported':
addresses = self.get('addresses')
if type(addresses) is list:
addresses = dict([(x, None) for x in addresses])
self.put('addresses', addresses)
elif self.get('wallet_type') == 'standard':
if self.get('keystore').get('type')=='imported':
addresses = set(self.get('addresses').get('receiving'))
pubkeys = self.get('keystore').get('keypairs').keys()
assert len(addresses) == len(pubkeys)
d = {}
for pubkey in pubkeys:
addr = bitcoin.pubkey_to_address('p2pkh', pubkey)
assert addr in addresses
d[addr] = {
'pubkey': pubkey,
'redeem_script': None,
'type': 'p2pkh'
}
self.put('addresses', d)
self.put('pubkeys', None)
self.put('wallet_type', 'imported')
self.put('seed_version', 14)
def _convert_version_15(self):
if not self._is_upgrade_method_needed(14, 14):
return
if self.get('seed_type') == 'segwit':
# should not get here; get_seed_version should have caught this
raise Exception('unsupported derivation (development segwit, v14)')
self.put('seed_version', 15)
def _convert_version_16(self):
# fixes issue #3193 for Imported_Wallets with addresses
# also, previous versions allowed importing any garbage as an address
# which we now try to remove, see pr #3191
if not self._is_upgrade_method_needed(15, 15):
return
def remove_address(addr):
def remove_from_dict(dict_name):
d = self.get(dict_name, None)
if d is not None:
d.pop(addr, None)
self.put(dict_name, d)
def remove_from_list(list_name):
lst = self.get(list_name, None)
if lst is not None:
s = set(lst)
s -= {addr}
self.put(list_name, list(s))
# note: we don't remove 'addr' from self.get('addresses')
remove_from_dict('addr_history')
remove_from_dict('labels')
remove_from_dict('payment_requests')
remove_from_list('frozen_addresses')
if self.get('wallet_type') == 'imported':
addresses = self.get('addresses')
assert isinstance(addresses, dict)
addresses_new = dict()
for address, details in addresses.items():
if not bitcoin.is_address(address):
remove_address(address)
continue
if details is None:
addresses_new[address] = {}
else:
addresses_new[address] = details
self.put('addresses', addresses_new)
self.put('seed_version', 16)
def _convert_version_17(self):
if not self._is_upgrade_method_needed(16, 16):
return
self.put('pruned_txo', None)
transactions = self.get('transactions', {})
spent_outpoints = defaultdict(dict)
for txid, raw_tx in transactions.items():
tx = Transaction(raw_tx)
for txin in tx.inputs():
if txin.is_coinbase_input():
continue
prevout_hash = txin.prevout.txid.hex()
prevout_n = txin.prevout.out_idx
spent_outpoints[prevout_hash][str(prevout_n)] = txid
self.put('spent_outpoints', spent_outpoints)
tokens = self.get('tokens', {})
new_tokens = {}
for key, value in tokens.items():
contract_addr, bind_addr = key.split('_')
new_token = Token(contract_addr, bind_addr, value[0], value[1], value[2], value[3])
new_tokens[new_token.get_key()] = new_token
self.put('tokens', new_tokens)
self.put('seed_version', 17)
def _convert_version_18(self):
if not self._is_upgrade_method_needed(17, 17):
return
self.put('verified_tx3', None)
self.put('seed_version', 18)
def _convert_version_19(self):
if not self._is_upgrade_method_needed(18, 18):
return
self.put('tx_fees', None)
self.put('seed_version', 19)
def _convert_version_20(self):
if not self._is_upgrade_method_needed(19, 19):
return
from .bip32 import BIP32Node, convert_bip32_intpath_to_strpath
for ks_name in ('keystore', *['x{}/'.format(i) for i in range(1, 16)]):
ks = self.get(ks_name, None)
if ks is None: continue
xpub = ks.get('xpub', None)
if xpub is None: continue
bip32node = BIP32Node.from_xkey(xpub)
derivation_prefix = ks.get('derivation', None)
if derivation_prefix is None:
assert bip32node.depth >= 0, bip32node.depth
if bip32node.depth == 0:
derivation_prefix = 'm'
elif bip32node.depth == 1:
child_number_int = int.from_bytes(bip32node.child_number, 'big')
derivation_prefix = convert_bip32_intpath_to_strpath([child_number_int])
ks['derivation'] = derivation_prefix
root_fingerprint = ks.get('ckcc_xfp', None)
if root_fingerprint is not None:
root_fingerprint = root_fingerprint.to_bytes(4, byteorder="little", signed=False).hex().lower()
if root_fingerprint is None:
if bip32node.depth == 0:
root_fingerprint = bip32node.calc_fingerprint_of_this_node().hex().lower()
elif bip32node.depth == 1:
root_fingerprint = bip32node.fingerprint.hex()
ks['root_fingerprint'] = root_fingerprint
ks.pop('ckcc_xfp', None)
self.put(ks_name, ks)
self.put('seed_version', 20)
def _convert_version_21(self):
if not self._is_upgrade_method_needed(20, 20):
return
channels = self.get('channels')
if channels:
for channel in channels:
channel['state'] = 'OPENING'
self.put('channels', channels)
self.put('seed_version', 21)
def _convert_version_22(self):
if not self._is_upgrade_method_needed(21, 21):
return
from .bitcoin import script_to_scripthash
transactions = self.get('transactions', {})
prevouts_by_scripthash = defaultdict(list)
for txid, raw_tx in transactions.items():
tx = Transaction(raw_tx)
for idx, txout in enumerate(tx.outputs()):
outpoint = f"{txid}:{idx}"
scripthash = script_to_scripthash(txout.scriptpubkey.hex())
prevouts_by_scripthash[scripthash].append((outpoint, txout.value))
self.put('prevouts_by_scripthash', prevouts_by_scripthash)
self.put('seed_version', 22)
def _convert_version_23(self):
if not self._is_upgrade_method_needed(22, 22):
return
channels = self.get('channels', [])
LOCAL = 1
REMOTE = -1
for c in channels:
r = c['remote_config'].pop('revocation_store')
c['revocation_store'] = r
log = c.get('log', {})
for sub in LOCAL, REMOTE:
l = log[str(sub)]['fee_updates']
d = {}
for i, fu in enumerate(l):
d[str(i)] = {
'rate':fu['rate'],
'ctn_local':fu['ctns'][str(LOCAL)],
'ctn_remote':fu['ctns'][str(REMOTE)]
}
log[str(int(sub))]['fee_updates'] = d
self.data['channels'] = channels
self.data['seed_version'] = 23
def _convert_version_24(self):
if not self._is_upgrade_method_needed(23, 23):
return
channels = self.get('channels', [])
for c in channels:
r = c['revocation_store']
d = {}
for i in range(49):
v = r['buckets'][i]
if v is not None:
d[str(i)] = v
r['buckets'] = d
c['revocation_store'] = r
self.data['channels'] = { x['channel_id']: x for x in channels }
txi = self.get('txi', {})
for tx_hash, d in txi.items():
d2 = {}
for addr, l in d.items():
d2[addr] = {}
for ser, v in l:
d2[addr][ser] = v
txi[tx_hash] = d2
self.data['txi'] = txi
txo = self.get('txo', {})
for tx_hash, d in txo.items():
d2 = {}
for addr, l in d.items():
d2[addr] = {}
for n, v, cb in l:
d2[addr][str(n)] = (v, cb)
txo[tx_hash] = d2
self.data['txo'] = txo
self.data['seed_version'] = 24
def _convert_version_25(self):
if not self._is_upgrade_method_needed(24, 24):
return
requests = self.data.get('payment_requests', {})
for k, r in list(requests.items()):
if r.get('address') == k:
requests[k] = {
'address': r['address'],
'amount': r.get('amount'),
'exp': r.get('exp'),
'id': r.get('id'),
'memo': r.get('memo'),
'time': r.get('time'),
'type': PR_TYPE_ONCHAIN,
}
invoices = self.data.get('invoices', {})
for k, r in list(invoices.items()):
data = r.get("hex")
if data:
pr = PaymentRequest(bytes.fromhex(data))
if pr.id != k:
continue
invoices[k] = {
'type': PR_TYPE_ONCHAIN,
'amount': pr.get_amount(),
'bip70': data,
'exp': pr.get_expiration_date() - pr.get_time(),
'id': pr.id,
'message': pr.get_memo(),
'outputs': [x.to_legacy_tuple() for x in pr.get_outputs()],
'time': pr.get_time(),
'requestor': pr.get_requestor(),
}
self.data['seed_version'] = 25
def _convert_version_26(self):
if not self._is_upgrade_method_needed(25, 25):
return
channels = self.data.get('channels', {})
channel_timestamps = self.data.pop('lightning_channel_timestamps', {})
for channel_id, c in channels.items():
item = channel_timestamps.get(channel_id)
if item:
funding_txid, funding_height, funding_timestamp, closing_txid, closing_height, closing_timestamp = item
if funding_txid:
c['funding_height'] = funding_txid, funding_height, funding_timestamp
if closing_txid:
c['closing_height'] = closing_txid, closing_height, closing_timestamp
self.data['seed_version'] = 26
def _convert_version_27(self):
if not self._is_upgrade_method_needed(26, 26):
return
channels = self.data.get('channels', {})
for channel_id, c in channels.items():
c['local_config']['htlc_minimum_msat'] = 1
self.data['seed_version'] = 27
def _convert_version_28(self):
if not self._is_upgrade_method_needed(27, 27):
return
channels = self.data.get('channels', {})
for channel_id, c in channels.items():
c['local_config']['channel_seed'] = None
self.data['seed_version'] = 28
def _convert_version_29(self):
if not self._is_upgrade_method_needed(28, 28):
return
requests = self.data.get('payment_requests', {})
invoices = self.data.get('invoices', {})
for d in [invoices, requests]:
for key, r in list(d.items()):
_type = r.get('type', 0)
item = {
'type': _type,
'message': r.get('message') or r.get('memo', ''),
'amount': r.get('amount'),
'exp': r.get('exp') or 0,
'time': r.get('time', 0),
}
if _type == PR_TYPE_ONCHAIN:
address = r.pop('address', None)
if address:
outputs = [(0, address, r.get('amount'))]
else:
outputs = r.get('outputs')
item.update({
'outputs': outputs,
'id': r.get('id'),
'bip70': r.get('bip70'),
'requestor': r.get('requestor'),
})
else:
item.update({
'rhash': r['rhash'],
'invoice': r['invoice'],
})
d[key] = item
self.data['seed_version'] = 29
def _convert_version_30(self):
if not self._is_upgrade_method_needed(29, 29):
return
from .invoices import PR_TYPE_ONCHAIN, PR_TYPE_LN
requests = self.data.get('payment_requests', {})
invoices = self.data.get('invoices', {})
for d in [invoices, requests]:
for key, item in list(d.items()):
_type = item['type']
if _type == PR_TYPE_ONCHAIN:
item['amount_sat'] = item.pop('amount')
elif _type == PR_TYPE_LN:
amount_sat = item.pop('amount')
item['amount_msat'] = 1000 * amount_sat if amount_sat is not None else None
item.pop('exp')
item.pop('message')
item.pop('rhash')
item.pop('time')
else:
raise Exception(f"unknown invoice type: {_type}")
self.data['seed_version'] = 30
def _convert_version_31(self):
if not self._is_upgrade_method_needed(30, 30):
return
from .invoices import PR_TYPE_ONCHAIN
requests = self.data.get('payment_requests', {})
invoices = self.data.get('invoices', {})
for d in [invoices, requests]:
for key, item in list(d.items()):
if item['type'] == PR_TYPE_ONCHAIN:
item['amount_sat'] = item['amount_sat'] or 0
item['exp'] = item['exp'] or 0
item['time'] = item['time'] or 0
self.data['seed_version'] = 31
def _convert_version_32(self):
if not self._is_upgrade_method_needed(31, 31):
return
from .invoices import PR_TYPE_ONCHAIN
invoices_old = self.data.get('invoices', {})
invoices_new = {k: item for k, item in invoices_old.items()
if not (item['type'] == PR_TYPE_ONCHAIN and item['outputs'] is None)}
self.data['invoices'] = invoices_new
self.data['seed_version'] = 32
def _convert_imported(self):
if not self._is_upgrade_method_needed(0, 13):
return
d = self.get('accounts', {}).get('/x', {}).get('imported',{})
if not d:
return False
addresses = []
keypairs = {}
for addr, v in d.items():
pubkey, privkey = v
if privkey:
keypairs[pubkey] = privkey
else:
addresses.append(addr)
if addresses and keypairs:
raise WalletFileException('mixed addresses and privkeys')
elif addresses:
self.put('addresses', addresses)
self.put('accounts', None)
elif keypairs:
self.put('wallet_type', 'standard')
self.put('key_type', 'imported')
self.put('keypairs', keypairs)
self.put('accounts', None)
else:
raise WalletFileException('no addresses or privkeys')
def _convert_account(self):
if not self._is_upgrade_method_needed(0, 13):
return
self.put('accounts', None)
def _is_upgrade_method_needed(self, min_version, max_version):
assert min_version <= max_version
cur_version = self.get_seed_version()
if cur_version > max_version:
return False
elif cur_version < min_version:
raise WalletFileException(
'storage upgrade: unexpected version {} (should be {}-{})'
.format(cur_version, min_version, max_version))
else:
return True
@locked
def get_seed_version(self):
seed_version = self.get('seed_version')
if not seed_version:
seed_version = OLD_SEED_VERSION if len(self.get('master_public_key','')) == 128 else NEW_SEED_VERSION
if seed_version > FINAL_SEED_VERSION:
raise WalletFileException('This version of Electrum is too old to open this wallet.\n'
'(highest supported storage version: {}, version of this file: {})'
.format(FINAL_SEED_VERSION, seed_version))
if seed_version==14 and self.get('seed_type') == 'segwit':
self._raise_unsupported_version(seed_version)
if seed_version >=12:
return seed_version
if seed_version not in [OLD_SEED_VERSION, NEW_SEED_VERSION]:
self._raise_unsupported_version(seed_version)
return seed_version
def _raise_unsupported_version(self, seed_version):
msg = f"Your wallet has an unsupported seed version: {seed_version}."
if seed_version in [5, 7, 8, 9, 10, 14]:
msg += "\n\nTo open this wallet, try 'git checkout seed_v%d'"%seed_version
if seed_version == 6:
msg += '\n\nThis file was created because of a bug in version 1.9.8.'
if self.get('master_public_keys') is None and self.get('master_private_keys') is None and self.get('imported_keys') is None:
msg += "\nIt does not contain any keys, and can safely be removed."
else:
msg += "\nPlease open this file with Electrum 1.9.8, and move your coins to a new wallet."
raise WalletFileException(msg)
@locked
def get_txi_addresses(self, tx_hash: str) -> List[str]:
assert isinstance(tx_hash, str)
return list(self.txi.get(tx_hash, {}).keys())
@locked
def get_txo_addresses(self, tx_hash: str) -> List[str]:
assert isinstance(tx_hash, str)
return list(self.txo.get(tx_hash, {}).keys())
@locked
def get_txi_addr(self, tx_hash: str, address: str) -> Iterable[Tuple[str, int]]:
assert isinstance(tx_hash, str)
assert isinstance(address, str)
d = self.txi.get(tx_hash, {}).get(address, {})
return list(d.items())
@locked
def get_txo_addr(self, tx_hash: str, address: str) -> Iterable[Tuple[int, int, bool]]:
assert isinstance(tx_hash, str)
assert isinstance(address, str)
d = self.txo.get(tx_hash, {}).get(address, {})
return [(int(n), v, cb) for (n, (v, cb)) in d.items()]
@modifier
def add_txi_addr(self, tx_hash: str, addr: str, ser: str, v: int) -> None:
assert isinstance(tx_hash, str)
assert isinstance(addr, str)
assert isinstance(ser, str)
assert isinstance(v, int)
if tx_hash not in self.txi:
self.txi[tx_hash] = {}
d = self.txi[tx_hash]
if addr not in d:
d[addr] = {}
d[addr][ser] = v
@modifier
def add_txo_addr(self, tx_hash: str, addr: str, n: Union[int, str], v: int, is_coinbase: bool) -> None:
n = str(n)
assert isinstance(tx_hash, str)
assert isinstance(addr, str)
assert isinstance(n, str)
assert isinstance(v, int)
assert isinstance(is_coinbase, bool)
if tx_hash not in self.txo:
self.txo[tx_hash] = {}
d = self.txo[tx_hash]
if addr not in d:
d[addr] = {}
d[addr][n] = (v, is_coinbase)
@locked
def list_txi(self) -> Sequence[str]:
return list(self.txi.keys())
@locked
def list_txo(self) -> Sequence[str]:
return list(self.txo.keys())
@modifier
def remove_txi(self, tx_hash: str) -> None:
assert isinstance(tx_hash, str)
self.txi.pop(tx_hash, None)
@modifier
def remove_txo(self, tx_hash: str) -> None:
assert isinstance(tx_hash, str)
self.txo.pop(tx_hash, None)
@locked
def list_spent_outpoints(self) -> Sequence[Tuple[str, str]]:
return [(h, n)
for h in self.spent_outpoints.keys()
for n in self.get_spent_outpoints(h)
]
@locked
def get_spent_outpoints(self, prevout_hash: str) -> Sequence[str]:
assert isinstance(prevout_hash, str)
return list(self.spent_outpoints.get(prevout_hash, {}).keys())
@locked
def get_spent_outpoint(self, prevout_hash: str, prevout_n: Union[int, str]) -> Optional[str]:
assert isinstance(prevout_hash, str)
prevout_n = str(prevout_n)
return self.spent_outpoints.get(prevout_hash, {}).get(prevout_n)
@modifier
def remove_spent_outpoint(self, prevout_hash: str, prevout_n: Union[int, str]) -> None:
assert isinstance(prevout_hash, str)
prevout_n = str(prevout_n)
self.spent_outpoints[prevout_hash].pop(prevout_n, None)
if not self.spent_outpoints[prevout_hash]:
self.spent_outpoints.pop(prevout_hash)
@modifier
def set_spent_outpoint(self, prevout_hash: str, prevout_n: Union[int, str], tx_hash: str) -> None:
assert isinstance(prevout_hash, str)
assert isinstance(tx_hash, str)
prevout_n = str(prevout_n)
if prevout_hash not in self.spent_outpoints:
self.spent_outpoints[prevout_hash] = {}
self.spent_outpoints[prevout_hash][prevout_n] = tx_hash
@modifier
def add_prevout_by_scripthash(self, scripthash: str, *, prevout: TxOutpoint, value: int) -> None:
assert isinstance(scripthash, str)
assert isinstance(prevout, TxOutpoint)
assert isinstance(value, int)
if scripthash not in self._prevouts_by_scripthash:
self._prevouts_by_scripthash[scripthash] = set()
self._prevouts_by_scripthash[scripthash].add((prevout.to_str(), value))
@modifier
def remove_prevout_by_scripthash(self, scripthash: str, *, prevout: TxOutpoint, value: int) -> None:
assert isinstance(scripthash, str)
assert isinstance(prevout, TxOutpoint)
assert isinstance(value, int)
self._prevouts_by_scripthash[scripthash].discard((prevout.to_str(), value))
if not self._prevouts_by_scripthash[scripthash]:
self._prevouts_by_scripthash.pop(scripthash)
@locked
def get_prevouts_by_scripthash(self, scripthash: str) -> Set[Tuple[TxOutpoint, int]]:
assert isinstance(scripthash, str)
prevouts_and_values = self._prevouts_by_scripthash.get(scripthash, set())
return {(TxOutpoint.from_str(prevout), value) for prevout, value in prevouts_and_values}
@modifier
def add_transaction(self, tx_hash: str, tx: Transaction) -> None:
assert isinstance(tx_hash, str)
assert isinstance(tx, Transaction), tx
if not tx_hash:
raise Exception("trying to add tx to db without txid")
if tx_hash != tx.txid():
raise Exception(f"trying to add tx to db with inconsistent txid: {tx_hash} != {tx.txid()}")
tx_we_already_have = self.transactions.get(tx_hash, None)
if tx_we_already_have is None or isinstance(tx_we_already_have, PartialTransaction):
self.transactions[tx_hash] = tx
@modifier
def remove_transaction(self, tx_hash: str) -> Optional[Transaction]:
assert isinstance(tx_hash, str)
return self.transactions.pop(tx_hash, None)
@locked
def get_transaction(self, tx_hash: Optional[str]) -> Optional[Transaction]:
if tx_hash is None:
return None
assert isinstance(tx_hash, str)
return self.transactions.get(tx_hash)
@locked
def list_transactions(self) -> Sequence[str]:
return list(self.transactions.keys())
@locked
def get_history(self) -> Sequence[str]:
return list(self.history.keys())
def is_addr_in_history(self, addr: str) -> bool:
# does not mean history is non-empty!
assert isinstance(addr, str)
return addr in self.history
@locked
def get_addr_history(self, addr: str) -> Sequence[Tuple[str, int]]:
assert isinstance(addr, str)
return self.history.get(addr, [])
@modifier
def set_addr_history(self, addr: str, hist) -> None:
assert isinstance(addr, str)
self.history[addr] = hist
@modifier
def remove_addr_history(self, addr: str) -> None:
assert isinstance(addr, str)
self.history.pop(addr, None)
@locked
def list_verified_tx(self) -> Sequence[str]:
return list(self.verified_tx.keys())
@locked
def get_verified_tx(self, txid: str) -> Optional[TxMinedInfo]:
assert isinstance(txid, str)
if txid not in self.verified_tx:
return None
height, timestamp, txpos, header_hash = self.verified_tx[txid]
return TxMinedInfo(height=height,
conf=None,
timestamp=timestamp,
txpos=txpos,
header_hash=header_hash)
@modifier
def add_verified_tx(self, txid: str, info: TxMinedInfo):
assert isinstance(txid, str)
assert isinstance(info, TxMinedInfo)
self.verified_tx[txid] = (info.height, info.timestamp, info.txpos, info.header_hash)
@modifier
def remove_verified_tx(self, txid: str):
assert isinstance(txid, str)
self.verified_tx.pop(txid, None)
def is_in_verified_tx(self, txid: str) -> bool:
assert isinstance(txid, str)
return txid in self.verified_tx
@modifier
def add_tx_fee_from_server(self, txid: str, fee_sat: Optional[int]) -> None:
assert isinstance(txid, str)
# note: when called with (fee_sat is None), rm currently saved value
if txid not in self.tx_fees:
self.tx_fees[txid] = TxFeesValue()
tx_fees_value = self.tx_fees[txid]
if tx_fees_value.is_calculated_by_us:
return
self.tx_fees[txid] = tx_fees_value._replace(fee=fee_sat, is_calculated_by_us=False)
@modifier
def add_tx_fee_we_calculated(self, txid: str, fee_sat: Optional[int]) -> None:
assert isinstance(txid, str)
if fee_sat is None:
return
assert isinstance(fee_sat, int)
if txid not in self.tx_fees:
self.tx_fees[txid] = TxFeesValue()
self.tx_fees[txid] = self.tx_fees[txid]._replace(fee=fee_sat, is_calculated_by_us=True)
@locked
def get_tx_fee(self, txid: str, *, trust_server: bool = False) -> Optional[int]:
assert isinstance(txid, str)
tx_fees_value = self.tx_fees.get(txid)
if tx_fees_value is None:
return None
if not trust_server and not tx_fees_value.is_calculated_by_us:
return None
return tx_fees_value.fee
@modifier
def add_num_inputs_to_tx(self, txid: str, num_inputs: int) -> None:
assert isinstance(txid, str)
assert isinstance(num_inputs, int)
if txid not in self.tx_fees:
self.tx_fees[txid] = TxFeesValue()
self.tx_fees[txid] = self.tx_fees[txid]._replace(num_inputs=num_inputs)
@locked
def get_num_all_inputs_of_tx(self, txid: str) -> Optional[int]:
assert isinstance(txid, str)
tx_fees_value = self.tx_fees.get(txid)
if tx_fees_value is None:
return None
return tx_fees_value.num_inputs
@locked
def get_num_ismine_inputs_of_tx(self, txid: str) -> int:
assert isinstance(txid, str)
txins = self.txi.get(txid, {})
return sum([len(tupls) for addr, tupls in txins.items()])
@modifier
def remove_tx_fee(self, txid: str) -> None:
assert isinstance(txid, str)
self.tx_fees.pop(txid, None)
@locked
def get_dict(self, name):
# Warning: interacts un-intuitively with 'put': certain parts
# of 'data' will have pointers saved as separate variables.
if name not in self.data:
self.data[name] = {}
return self.data[name]
@locked
def num_change_addresses(self) -> int:
return len(self.change_addresses)
@locked
def num_receiving_addresses(self) -> int:
return len(self.receiving_addresses)
@locked
def get_change_addresses(self, *, slice_start=None, slice_stop=None) -> List[str]:
# note: slicing makes a shallow copy
return self.change_addresses[slice_start:slice_stop]
@locked
def get_receiving_addresses(self, *, slice_start=None, slice_stop=None) -> List[str]:
# note: slicing makes a shallow copy
return self.receiving_addresses[slice_start:slice_stop]
@modifier
def add_change_address(self, addr: str) -> None:
assert isinstance(addr, str)
self._addr_to_addr_index[addr] = (1, len(self.change_addresses))
self.change_addresses.append(addr)
@modifier
def add_receiving_address(self, addr: str) -> None:
assert isinstance(addr, str)
self._addr_to_addr_index[addr] = (0, len(self.receiving_addresses))
self.receiving_addresses.append(addr)
@locked
def get_address_index(self, address: str) -> Optional[Sequence[int]]:
assert isinstance(address, str)
return self._addr_to_addr_index.get(address)
@modifier
def add_imported_address(self, addr: str, d: dict) -> None:
assert isinstance(addr, str)
self.imported_addresses[addr] = d
@modifier
def remove_imported_address(self, addr: str) -> None:
assert isinstance(addr, str)
self.imported_addresses.pop(addr)
@locked
def has_imported_address(self, addr: str) -> bool:
assert isinstance(addr, str)
return addr in self.imported_addresses
@locked
def get_imported_addresses(self) -> Sequence[str]:
return list(sorted(self.imported_addresses.keys()))
@locked
def get_imported_address(self, addr: str) -> Optional[dict]:
assert isinstance(addr, str)
return self.imported_addresses.get(addr)
def load_addresses(self, wallet_type):
if wallet_type == 'imported':
self.imported_addresses = self.get_dict('addresses') # type: Dict[str, dict]
else:
self.get_dict('addresses')
for name in ['receiving', 'change']:
if name not in self.data['addresses']:
self.data['addresses'][name] = []
self.change_addresses = self.data['addresses']['change']
self.receiving_addresses = self.data['addresses']['receiving']
self._addr_to_addr_index = {} # type: Dict[str, Sequence[int]] # key: address, value: (is_change, index)
for i, addr in enumerate(self.receiving_addresses):
self._addr_to_addr_index[addr] = (0, i)
for i, addr in enumerate(self.change_addresses):
self._addr_to_addr_index[addr] = (1, i)
@profiler
def _load_transactions(self):
self.data = StoredDict(self.data, self, [])
# references in self.data
# TODO make all these private
# txid -> address -> prev_outpoint -> value
self.txi = self.get_dict('txi') # type: Dict[str, Dict[str, Dict[str, int]]]
# txid -> address -> output_index -> (value, is_coinbase)
self.txo = self.get_dict('txo') # type: Dict[str, Dict[str, Dict[str, Tuple[int, bool]]]]
self.transactions = self.get_dict('transactions') # type: Dict[str, Transaction]
self.spent_outpoints = self.get_dict('spent_outpoints') # txid -> output_index -> next_txid
self.history = self.get_dict('addr_history') # address -> list of (txid, height)
self.verified_tx = self.get_dict('verified_tx3') # txid -> (height, timestamp, txpos, header_hash)
self.tx_fees = self.get_dict('tx_fees') # type: Dict[str, TxFeesValue]
# scripthash -> set of (outpoint, value)
self._prevouts_by_scripthash = self.get_dict('prevouts_by_scripthash') # type: Dict[str, Set[Tuple[str, int]]]
# remove unreferenced tx
for tx_hash in list(self.transactions.keys()):
if not self.get_txi_addresses(tx_hash) and not self.get_txo_addresses(tx_hash):
self.logger.info(f"removing unreferenced tx: {tx_hash}")
self.transactions.pop(tx_hash)
# remove unreferenced outpoints
for prevout_hash in self.spent_outpoints.keys():
d = self.spent_outpoints[prevout_hash]
for prevout_n, spending_txid in list(d.items()):
if spending_txid not in self.transactions:
self.logger.info("removing unreferenced spent outpoint")
d.pop(prevout_n)
self.tokens = self.get_dict('tokens')
# contract_addr + '_' + b58addr -> list(txid, height, log_index)
self.token_history = self.get_dict('addr_token_history')
# txid -> tx receipt
self.tx_receipt = self.get_dict('tx_receipt')
# txid -> raw tx
self.token_txs = self.get_dict('token_txs')
self.smart_contracts = self.get_dict('smart_contracts')
self.delegations = self.get_dict('delegations')
if self.token_history:
token_hist_txids = [x2[0] for x2 in reduce(lambda x1, y1: x1+y1, self.token_history.values())]
else:
token_hist_txids = []
for tx_hash, raw in self.token_txs.items():
if tx_hash in token_hist_txids:
tx = Transaction(raw)
self.token_txs[tx_hash] = tx
@modifier
def set_token(self, token: Token):
self.tokens[token.get_key()] = token
@modifier
def delete_token(self, key: str):
self.tokens.pop(key, None)
self.token_history.pop(key, None)
@locked
def get_token(self, key: str) -> Optional[Token]:
return Token(*self.tokens.get(key))
@locked
def list_tokens(self) -> list:
return list(self.tokens.keys())
@modifier
def set_token_history(self, key: str, hist: list):
self.token_history[key] = hist
@modifier
def delete_token_history(self, key: str):
self.token_history.pop(key, None)
@locked
def get_token_history(self, key: str) -> list:
return self.token_history.get(key, [])
@locked
def list_token_histories(self) -> list:
return list(self.token_history.keys())
@modifier
def set_token_tx(self, txid: str, raw: str):
self.token_txs[txid] = raw
@modifier
def delete_token_tx(self, txid: str):
self.token_txs.pop(txid, None)
@locked
def get_token_tx(self, txid: str):
return self.token_txs.get(txid)
@locked
def list_token_txs(self) -> list:
return list(self.token_txs.keys())
@modifier
def set_tx_receipt(self, txid: str, receipt: list):
self.tx_receipt[txid] = receipt
@modifier
def delete_tx_receipt(self, txid: str):
return self.tx_receipt.pop(txid, None)
@locked
def get_tx_receipt(self, txid: str) -> list:
return self.tx_receipt.get(txid, [])
@locked
def list_tx_receipts(self) -> list:
return list(self.tx_receipt.keys())
@modifier
def set_delegation(self, dele: Delegation):
self.delegations[dele.addr] = [dele.staker, dele.fee]
@modifier
def delete_delegation(self, addr: str):
self.delegations.pop(addr, None)
@locked
def get_delegation(self, addr: str) -> Optional[Delegation]:
dele = self.delegations.get(addr, [])
if len(dele) != 2:
return None
return Delegation(addr=addr, staker=dele[0], fee=dele[1])
@locked
def list_delegations(self) -> Sequence[str]:
return list(self.delegations.keys())
@modifier
def clear_history(self):
self.txi.clear()
self.txo.clear()
self.spent_outpoints.clear()
self.transactions.clear()
self.history.clear()
self.verified_tx.clear()
self.tx_fees.clear()
self.token_txs.clear()
self.token_history.clear()
self.tx_receipt.clear()
self._prevouts_by_scripthash.clear()
def _convert_dict(self, path, key, v):
if key == 'transactions':
# note: for performance, "deserialize=False" so that we will deserialize these on-demand
v = dict((k, tx_from_any(x, deserialize=False)) for k, x in v.items())
if key == 'invoices':
v = dict((k, Invoice.from_json(x)) for k, x in v.items())
if key == 'payment_requests':
v = dict((k, Invoice.from_json(x)) for k, x in v.items())
elif key == 'adds':
v = dict((k, UpdateAddHtlc.from_tuple(*x)) for k, x in v.items())
elif key == 'fee_updates':
v = dict((k, FeeUpdate(**x)) for k, x in v.items())
elif key == 'submarine_swaps':
v = dict((k, SwapData(**x)) for k, x in v.items())
elif key == 'channel_backups':
v = dict((k, ChannelBackupStorage(**x)) for k, x in v.items())
elif key == 'tx_fees':
v = dict((k, TxFeesValue(*x)) for k, x in v.items())
elif key == 'prevouts_by_scripthash':
v = dict((k, {(prevout, value) for (prevout, value) in x}) for k, x in v.items())
elif key == 'buckets':
v = dict((k, ShachainElement(bfh(x[0]), int(x[1]))) for k, x in v.items())
elif key == 'data_loss_protect_remote_pcp':
v = dict((k, bfh(x)) for k, x in v.items())
return v
def _convert_value(self, path, key, v):
if key == 'local_config':
v = LocalConfig(**v)
elif key == 'remote_config':
v = RemoteConfig(**v)
elif key == 'constraints':
v = ChannelConstraints(**v)
elif key == 'funding_outpoint':
v = Outpoint(**v)
return v
def write(self, storage: 'WalletStorage'):
with self.lock:
self._write(storage)
def _write(self, storage: 'WalletStorage'):
if threading.currentThread().isDaemon():
self.logger.warning('daemon thread cannot write db')
return
if not self.modified():
return
storage.write(self.dump())
self.set_modified(False)
def is_ready_to_be_used_by_wallet(self):
return not self.requires_upgrade() and self._called_after_upgrade_tasks
def split_accounts(self, root_path):
from .storage import WalletStorage
out = []
result = self.get_split_accounts()
for data in result:
path = root_path + '.' + data['suffix']
storage = WalletStorage(path)
db = WalletDB(json.dumps(data), manual_upgrades=False)
db._called_after_upgrade_tasks = False
db.upgrade()
db.write(storage)
out.append(path)
return out
def get_action(self):
action = run_hook('get_action', self)
return action
def load_plugins(self):
wallet_type = self.get('wallet_type')
if wallet_type in plugin_loaders:
plugin_loaders[wallet_type]()
def set_keystore_encryption(self, enable):
self.put('use_encryption', enable)
| true | true |
f7319439ecbf5ae98691f76abcedaa8bb60c23b1 | 4,513 | py | Python | lib/bindings/samples/server/API/social_api.py | tlalexander/stitchEm | cdff821ad2c500703e6cb237ec61139fce7bf11c | [
"MIT"
] | 182 | 2019-04-19T12:38:30.000Z | 2022-03-20T16:48:20.000Z | lib/bindings/samples/server/API/social_api.py | doymcc/stitchEm | 20693a55fa522d7a196b92635e7a82df9917c2e2 | [
"MIT"
] | 107 | 2019-04-23T10:49:35.000Z | 2022-03-02T18:12:28.000Z | lib/bindings/samples/server/API/social_api.py | doymcc/stitchEm | 20693a55fa522d7a196b92635e7a82df9917c2e2 | [
"MIT"
] | 59 | 2019-06-04T11:27:25.000Z | 2022-03-17T23:49:49.000Z | from concurrent.futures import ThreadPoolExecutor
from tornado.concurrent import run_on_executor
from API.handlers import APIHandler
from API.schema import api
import errors
from social.social_factory import SOCIAL_NETWORKS
class SocialAPI(APIHandler):
"""REST interface related to social networks
"""
executor = ThreadPoolExecutor(1)
def __init__(self, extra):
"""Init
"""
super(SocialAPI, self).__init__(extra)
"""
self.server = extra["server"]
self.project_manager = extra["project_manager"]
self.output_manager = extra["output_manager"]
"""
@api(name="MakeLink",
endpoint="social.make_link",
description="Link the box to a user account on a social network",
parameters={
"type": "object",
"properties":
{
"social_network": {
"type": "string",
"enum": ["facebook"]
},
"token": {
"type": "string"
}
},
"required": ["social_network", "token"]
}
)
@run_on_executor
def make_link(self, parameters):
social_network_name = parameters.get("social_network")
token = parameters.get("token")
if social_network_name not in SOCIAL_NETWORKS:
raise errors.InvalidParameter("the social network {} is not implemented".format(social_network_name))
if not SOCIAL_NETWORKS[social_network_name].is_Disconnected():
raise errors.InvalidParameter("cannot link to social network {} as it is in state {}".format(
social_network_name, SOCIAL_NETWORKS[social_network_name].state))
SOCIAL_NETWORKS[social_network_name].t_make_connection(token)
@api(name="MakeLink",
endpoint="social.unmake_link",
description="Remove the link between the box and a user account on a social network",
parameters={
"type": "object",
"properties":
{
"social_network": {
"type": "string",
"enum": ["facebook"]
},
},
"required": ["social_network"]
}
)
@run_on_executor
def unmake_link(self, parameters):
social_network_name = parameters.get("social_network")
if social_network_name not in SOCIAL_NETWORKS:
raise errors.InvalidParameter("the social network {} is not implemented".format(social_network_name))
if not SOCIAL_NETWORKS[social_network_name].is_Connected():
raise errors.InvalidParameter("cannot unlink from social network {} as it is in state {}".format(
social_network_name, SOCIAL_NETWORKS[social_network_name].state))
SOCIAL_NETWORKS[social_network_name].t_remove_connection()
@api(name="Call",
endpoint="social.call",
description="Make a call to a social network",
parameters={
"type": "object",
"properties":
{
"social_network": {
"type": "string",
"enum": ["facebook"]
},
"endpoint" : {
"type": "string"
},
"parameters" : {
"type": "object"
}
},
"required": ["social_network", "endpoint"]
},
result={
"type": "object"
}
)
@run_on_executor
def call(self, parameters):
social_network_name = parameters.get("social_network")
endpoint = parameters.get("endpoint")
call_parameters = parameters.get("parameters")
if social_network_name not in SOCIAL_NETWORKS:
raise errors.InvalidParameter("the social network {} is not implemented".format(social_network_name))
if not SOCIAL_NETWORKS[social_network_name].is_Connected():
raise errors.InvalidParameter("cannot make API call to social network {} as it is in state {}".format(
social_network_name, SOCIAL_NETWORKS[social_network_name].state))
result = SOCIAL_NETWORKS[social_network_name].call_api(endpoint, call_parameters)
return result if result is not None else {} | 39.587719 | 114 | 0.561489 | from concurrent.futures import ThreadPoolExecutor
from tornado.concurrent import run_on_executor
from API.handlers import APIHandler
from API.schema import api
import errors
from social.social_factory import SOCIAL_NETWORKS
class SocialAPI(APIHandler):
executor = ThreadPoolExecutor(1)
def __init__(self, extra):
super(SocialAPI, self).__init__(extra)
@api(name="MakeLink",
endpoint="social.make_link",
description="Link the box to a user account on a social network",
parameters={
"type": "object",
"properties":
{
"social_network": {
"type": "string",
"enum": ["facebook"]
},
"token": {
"type": "string"
}
},
"required": ["social_network", "token"]
}
)
@run_on_executor
def make_link(self, parameters):
social_network_name = parameters.get("social_network")
token = parameters.get("token")
if social_network_name not in SOCIAL_NETWORKS:
raise errors.InvalidParameter("the social network {} is not implemented".format(social_network_name))
if not SOCIAL_NETWORKS[social_network_name].is_Disconnected():
raise errors.InvalidParameter("cannot link to social network {} as it is in state {}".format(
social_network_name, SOCIAL_NETWORKS[social_network_name].state))
SOCIAL_NETWORKS[social_network_name].t_make_connection(token)
@api(name="MakeLink",
endpoint="social.unmake_link",
description="Remove the link between the box and a user account on a social network",
parameters={
"type": "object",
"properties":
{
"social_network": {
"type": "string",
"enum": ["facebook"]
},
},
"required": ["social_network"]
}
)
@run_on_executor
def unmake_link(self, parameters):
social_network_name = parameters.get("social_network")
if social_network_name not in SOCIAL_NETWORKS:
raise errors.InvalidParameter("the social network {} is not implemented".format(social_network_name))
if not SOCIAL_NETWORKS[social_network_name].is_Connected():
raise errors.InvalidParameter("cannot unlink from social network {} as it is in state {}".format(
social_network_name, SOCIAL_NETWORKS[social_network_name].state))
SOCIAL_NETWORKS[social_network_name].t_remove_connection()
@api(name="Call",
endpoint="social.call",
description="Make a call to a social network",
parameters={
"type": "object",
"properties":
{
"social_network": {
"type": "string",
"enum": ["facebook"]
},
"endpoint" : {
"type": "string"
},
"parameters" : {
"type": "object"
}
},
"required": ["social_network", "endpoint"]
},
result={
"type": "object"
}
)
@run_on_executor
def call(self, parameters):
social_network_name = parameters.get("social_network")
endpoint = parameters.get("endpoint")
call_parameters = parameters.get("parameters")
if social_network_name not in SOCIAL_NETWORKS:
raise errors.InvalidParameter("the social network {} is not implemented".format(social_network_name))
if not SOCIAL_NETWORKS[social_network_name].is_Connected():
raise errors.InvalidParameter("cannot make API call to social network {} as it is in state {}".format(
social_network_name, SOCIAL_NETWORKS[social_network_name].state))
result = SOCIAL_NETWORKS[social_network_name].call_api(endpoint, call_parameters)
return result if result is not None else {} | true | true |
f7319447be1c583c56c689293955c02433bab7f1 | 4,941 | py | Python | docs/source/conf.py | Geofroy/meerkat_dev | ebff9d16b3edc6efdc580f940ca8d60e733d9da1 | [
"MIT"
] | null | null | null | docs/source/conf.py | Geofroy/meerkat_dev | ebff9d16b3edc6efdc580f940ca8d60e733d9da1 | [
"MIT"
] | 4 | 2017-11-06T13:11:03.000Z | 2018-05-04T14:57:18.000Z | docs/source/conf.py | meerkat-code/meerkat_dev | c3f4bef39a72aa99a460ed1a0022c7ea594e037c | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/stable/config
import os
import sys
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('../../'))
# -- Project information -----------------------------------------------------
project = 'meerkat_dev'
copyright = '2018, Meerkat Developers'
author = 'Jonathan Berry'
# The short X.Y version
version = ''
# The full version, including alpha/beta/rc tags
release = ''
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinxcontrib.autohttp.flask',
'sphinxcontrib.napoleon'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path .
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'meerkat_authdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'meerkat_dev.tex', 'meerkat\\_dev Documentation',
'Jonathan Berry', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'meerkat_dev', 'meerkat_dev Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'meerkat_dev', 'meerkat_dev Documentation',
author, 'meerkat_dev', 'One line description of project.',
'Miscellaneous'),
]
# -- Extension configuration -------------------------------------------------
| 30.5 | 79 | 0.65088 |
import os
import sys
project = 'meerkat_dev'
copyright = '2018, Meerkat Developers'
author = 'Jonathan Berry'
version = ''
release = ''
extensions = [
'sphinx.ext.autodoc',
'sphinxcontrib.autohttp.flask',
'sphinxcontrib.napoleon'
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
language = None
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
pygments_style = 'sphinx'
html_theme = 'alabaster'
html_static_path = ['_static']
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'meerkat_authdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'meerkat_dev.tex', 'meerkat\\_dev Documentation',
'Jonathan Berry', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'meerkat_dev', 'meerkat_dev Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'meerkat_dev', 'meerkat_dev Documentation',
author, 'meerkat_dev', 'One line description of project.',
'Miscellaneous'),
]
# -- Extension configuration -------------------------------------------------
| true | true |
f731958aa03e65b62779fd052badafaa8d11eb1d | 8,117 | py | Python | 07_train/privacy/tensorflow_privacy/privacy/membership_inference_attack/membership_inference_attack.py | ichen20/oreilly_book | 8098d8096d9decca6aa5afbb267b9f05ce0570f2 | [
"Apache-2.0"
] | 2,327 | 2020-03-01T09:47:34.000Z | 2021-11-25T12:38:42.000Z | 07_train/privacy/tensorflow_privacy/privacy/membership_inference_attack/membership_inference_attack.py | ichen20/oreilly_book | 8098d8096d9decca6aa5afbb267b9f05ce0570f2 | [
"Apache-2.0"
] | 209 | 2020-03-01T17:14:12.000Z | 2021-11-08T20:35:42.000Z | 07_train/privacy/tensorflow_privacy/privacy/membership_inference_attack/membership_inference_attack.py | ichen20/oreilly_book | 8098d8096d9decca6aa5afbb267b9f05ce0570f2 | [
"Apache-2.0"
] | 686 | 2020-03-03T17:24:51.000Z | 2021-11-25T23:39:12.000Z | # Copyright 2020, The TensorFlow Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Code that runs membership inference attacks based on the model outputs.
This file belongs to the new API for membership inference attacks. This file
will be renamed to membership_inference_attack.py after the old API is removed.
"""
from typing import Iterable
import numpy as np
from sklearn import metrics
from tensorflow_privacy.privacy.membership_inference_attack import models
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import AttackInputData
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import AttackResults
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import AttackType
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import \
PrivacyReportMetadata
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import RocCurve
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import SingleAttackResult
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import SingleSliceSpec
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import SlicingSpec
from tensorflow_privacy.privacy.membership_inference_attack.dataset_slicing import get_single_slice_specs
from tensorflow_privacy.privacy.membership_inference_attack.dataset_slicing import get_slice
def _get_slice_spec(data: AttackInputData) -> SingleSliceSpec:
if hasattr(data, 'slice_spec'):
return data.slice_spec
return SingleSliceSpec()
def _run_trained_attack(attack_input: AttackInputData,
attack_type: AttackType,
balance_attacker_training: bool = True):
"""Classification attack done by ML models."""
attacker = None
if attack_type == AttackType.LOGISTIC_REGRESSION:
attacker = models.LogisticRegressionAttacker()
elif attack_type == AttackType.MULTI_LAYERED_PERCEPTRON:
attacker = models.MultilayerPerceptronAttacker()
elif attack_type == AttackType.RANDOM_FOREST:
attacker = models.RandomForestAttacker()
elif attack_type == AttackType.K_NEAREST_NEIGHBORS:
attacker = models.KNearestNeighborsAttacker()
else:
raise NotImplementedError('Attack type %s not implemented yet.' %
attack_type)
prepared_attacker_data = models.create_attacker_data(
attack_input, balance=balance_attacker_training)
attacker.train_model(prepared_attacker_data.features_train,
prepared_attacker_data.is_training_labels_train)
# Run the attacker on (permuted) test examples.
predictions_test = attacker.predict(prepared_attacker_data.features_test)
# Generate ROC curves with predictions.
fpr, tpr, thresholds = metrics.roc_curve(
prepared_attacker_data.is_training_labels_test, predictions_test)
roc_curve = RocCurve(tpr=tpr, fpr=fpr, thresholds=thresholds)
return SingleAttackResult(
slice_spec=_get_slice_spec(attack_input),
attack_type=attack_type,
roc_curve=roc_curve)
def _run_threshold_attack(attack_input: AttackInputData):
fpr, tpr, thresholds = metrics.roc_curve(
np.concatenate((np.zeros(attack_input.get_train_size()),
np.ones(attack_input.get_test_size()))),
np.concatenate(
(attack_input.get_loss_train(), attack_input.get_loss_test())))
roc_curve = RocCurve(tpr=tpr, fpr=fpr, thresholds=thresholds)
return SingleAttackResult(
slice_spec=_get_slice_spec(attack_input),
attack_type=AttackType.THRESHOLD_ATTACK,
roc_curve=roc_curve)
def _run_threshold_entropy_attack(attack_input: AttackInputData):
fpr, tpr, thresholds = metrics.roc_curve(
np.concatenate((np.zeros(attack_input.get_train_size()),
np.ones(attack_input.get_test_size()))),
np.concatenate(
(attack_input.get_entropy_train(), attack_input.get_entropy_test())))
roc_curve = RocCurve(tpr=tpr, fpr=fpr, thresholds=thresholds)
return SingleAttackResult(
slice_spec=_get_slice_spec(attack_input),
attack_type=AttackType.THRESHOLD_ENTROPY_ATTACK,
roc_curve=roc_curve)
def _run_attack(attack_input: AttackInputData,
attack_type: AttackType,
balance_attacker_training: bool = True):
attack_input.validate()
if attack_type.is_trained_attack:
return _run_trained_attack(attack_input, attack_type,
balance_attacker_training)
if attack_type == AttackType.THRESHOLD_ENTROPY_ATTACK:
return _run_threshold_entropy_attack(attack_input)
return _run_threshold_attack(attack_input)
def run_attacks(attack_input: AttackInputData,
slicing_spec: SlicingSpec = None,
attack_types: Iterable[AttackType] = (
AttackType.THRESHOLD_ATTACK,),
privacy_report_metadata: PrivacyReportMetadata = None,
balance_attacker_training: bool = True) -> AttackResults:
"""Runs membership inference attacks on a classification model.
It runs attacks specified by attack_types on each attack_input slice which is
specified by slicing_spec.
Args:
attack_input: input data for running an attack
slicing_spec: specifies attack_input slices to run attack on
attack_types: attacks to run
privacy_report_metadata: the metadata of the model under attack.
balance_attacker_training: Whether the training and test sets for the
membership inference attacker should have a balanced (roughly equal)
number of samples from the training and test sets used to develop
the model under attack.
Returns:
the attack result.
"""
attack_input.validate()
attack_results = []
if slicing_spec is None:
slicing_spec = SlicingSpec(entire_dataset=True)
input_slice_specs = get_single_slice_specs(slicing_spec,
attack_input.num_classes)
for single_slice_spec in input_slice_specs:
attack_input_slice = get_slice(attack_input, single_slice_spec)
for attack_type in attack_types:
attack_results.append(
_run_attack(attack_input_slice, attack_type,
balance_attacker_training))
privacy_report_metadata = _compute_missing_privacy_report_metadata(
privacy_report_metadata, attack_input)
return AttackResults(
single_attack_results=attack_results,
privacy_report_metadata=privacy_report_metadata)
def _compute_missing_privacy_report_metadata(
metadata: PrivacyReportMetadata,
attack_input: AttackInputData) -> PrivacyReportMetadata:
"""Populates metadata fields if they are missing."""
if metadata is None:
metadata = PrivacyReportMetadata()
if metadata.accuracy_train is None:
metadata.accuracy_train = _get_accuracy(attack_input.logits_train,
attack_input.labels_train)
if metadata.accuracy_test is None:
metadata.accuracy_test = _get_accuracy(attack_input.logits_test,
attack_input.labels_test)
if metadata.loss_train is None:
metadata.loss_train = np.average(attack_input.get_loss_train())
if metadata.loss_test is None:
metadata.loss_test = np.average(attack_input.get_loss_test())
return metadata
def _get_accuracy(logits, labels):
"""Computes the accuracy if it is missing."""
if logits is None or labels is None:
return None
return metrics.accuracy_score(labels, np.argmax(logits, axis=1))
| 41.203046 | 105 | 0.756684 |
from typing import Iterable
import numpy as np
from sklearn import metrics
from tensorflow_privacy.privacy.membership_inference_attack import models
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import AttackInputData
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import AttackResults
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import AttackType
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import \
PrivacyReportMetadata
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import RocCurve
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import SingleAttackResult
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import SingleSliceSpec
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import SlicingSpec
from tensorflow_privacy.privacy.membership_inference_attack.dataset_slicing import get_single_slice_specs
from tensorflow_privacy.privacy.membership_inference_attack.dataset_slicing import get_slice
def _get_slice_spec(data: AttackInputData) -> SingleSliceSpec:
if hasattr(data, 'slice_spec'):
return data.slice_spec
return SingleSliceSpec()
def _run_trained_attack(attack_input: AttackInputData,
attack_type: AttackType,
balance_attacker_training: bool = True):
attacker = None
if attack_type == AttackType.LOGISTIC_REGRESSION:
attacker = models.LogisticRegressionAttacker()
elif attack_type == AttackType.MULTI_LAYERED_PERCEPTRON:
attacker = models.MultilayerPerceptronAttacker()
elif attack_type == AttackType.RANDOM_FOREST:
attacker = models.RandomForestAttacker()
elif attack_type == AttackType.K_NEAREST_NEIGHBORS:
attacker = models.KNearestNeighborsAttacker()
else:
raise NotImplementedError('Attack type %s not implemented yet.' %
attack_type)
prepared_attacker_data = models.create_attacker_data(
attack_input, balance=balance_attacker_training)
attacker.train_model(prepared_attacker_data.features_train,
prepared_attacker_data.is_training_labels_train)
predictions_test = attacker.predict(prepared_attacker_data.features_test)
fpr, tpr, thresholds = metrics.roc_curve(
prepared_attacker_data.is_training_labels_test, predictions_test)
roc_curve = RocCurve(tpr=tpr, fpr=fpr, thresholds=thresholds)
return SingleAttackResult(
slice_spec=_get_slice_spec(attack_input),
attack_type=attack_type,
roc_curve=roc_curve)
def _run_threshold_attack(attack_input: AttackInputData):
fpr, tpr, thresholds = metrics.roc_curve(
np.concatenate((np.zeros(attack_input.get_train_size()),
np.ones(attack_input.get_test_size()))),
np.concatenate(
(attack_input.get_loss_train(), attack_input.get_loss_test())))
roc_curve = RocCurve(tpr=tpr, fpr=fpr, thresholds=thresholds)
return SingleAttackResult(
slice_spec=_get_slice_spec(attack_input),
attack_type=AttackType.THRESHOLD_ATTACK,
roc_curve=roc_curve)
def _run_threshold_entropy_attack(attack_input: AttackInputData):
fpr, tpr, thresholds = metrics.roc_curve(
np.concatenate((np.zeros(attack_input.get_train_size()),
np.ones(attack_input.get_test_size()))),
np.concatenate(
(attack_input.get_entropy_train(), attack_input.get_entropy_test())))
roc_curve = RocCurve(tpr=tpr, fpr=fpr, thresholds=thresholds)
return SingleAttackResult(
slice_spec=_get_slice_spec(attack_input),
attack_type=AttackType.THRESHOLD_ENTROPY_ATTACK,
roc_curve=roc_curve)
def _run_attack(attack_input: AttackInputData,
attack_type: AttackType,
balance_attacker_training: bool = True):
attack_input.validate()
if attack_type.is_trained_attack:
return _run_trained_attack(attack_input, attack_type,
balance_attacker_training)
if attack_type == AttackType.THRESHOLD_ENTROPY_ATTACK:
return _run_threshold_entropy_attack(attack_input)
return _run_threshold_attack(attack_input)
def run_attacks(attack_input: AttackInputData,
slicing_spec: SlicingSpec = None,
attack_types: Iterable[AttackType] = (
AttackType.THRESHOLD_ATTACK,),
privacy_report_metadata: PrivacyReportMetadata = None,
balance_attacker_training: bool = True) -> AttackResults:
attack_input.validate()
attack_results = []
if slicing_spec is None:
slicing_spec = SlicingSpec(entire_dataset=True)
input_slice_specs = get_single_slice_specs(slicing_spec,
attack_input.num_classes)
for single_slice_spec in input_slice_specs:
attack_input_slice = get_slice(attack_input, single_slice_spec)
for attack_type in attack_types:
attack_results.append(
_run_attack(attack_input_slice, attack_type,
balance_attacker_training))
privacy_report_metadata = _compute_missing_privacy_report_metadata(
privacy_report_metadata, attack_input)
return AttackResults(
single_attack_results=attack_results,
privacy_report_metadata=privacy_report_metadata)
def _compute_missing_privacy_report_metadata(
metadata: PrivacyReportMetadata,
attack_input: AttackInputData) -> PrivacyReportMetadata:
if metadata is None:
metadata = PrivacyReportMetadata()
if metadata.accuracy_train is None:
metadata.accuracy_train = _get_accuracy(attack_input.logits_train,
attack_input.labels_train)
if metadata.accuracy_test is None:
metadata.accuracy_test = _get_accuracy(attack_input.logits_test,
attack_input.labels_test)
if metadata.loss_train is None:
metadata.loss_train = np.average(attack_input.get_loss_train())
if metadata.loss_test is None:
metadata.loss_test = np.average(attack_input.get_loss_test())
return metadata
def _get_accuracy(logits, labels):
if logits is None or labels is None:
return None
return metrics.accuracy_score(labels, np.argmax(logits, axis=1))
| true | true |
f73196366aa14046b85680925a5d5d15177b1086 | 1,779 | py | Python | chainer/snap2model.py | fujibo/poseHG | e582a6ca1badc9a894b8b7e2a5e0acf9eb348c5c | [
"BSD-3-Clause"
] | null | null | null | chainer/snap2model.py | fujibo/poseHG | e582a6ca1badc9a894b8b7e2a5e0acf9eb348c5c | [
"BSD-3-Clause"
] | null | null | null | chainer/snap2model.py | fujibo/poseHG | e582a6ca1badc9a894b8b7e2a5e0acf9eb348c5c | [
"BSD-3-Clause"
] | 1 | 2018-06-30T07:07:25.000Z | 2018-06-30T07:07:25.000Z | import numpy as np
import tempfile
def snap2model_parser(path_snapshot, path_model=None):
"""convert snapshot to model
:param path_snapshot: str
:param path_model: str, default None
:return: file descriptor (path_model is None) or None (otherwise)
"""
snapshot = np.load(path_snapshot)
model = dict()
for key in snapshot.keys():
parse = key.split('/')
if parse[0] == 'updater' and parse[1] == 'optimizer:main':
if parse[2] == 'model':
model_key = '/'.join(parse[3:-1])
model[model_key] = snapshot[key]
if path_model is None:
outfile = tempfile.TemporaryFile()
np.savez(outfile, **model)
outfile.seek(0)
return outfile
else:
np.savez(path_model, **model)
return None
def snap2model_trainer(path_snapshot, path_model=None):
import chainer
from dataset import MPIIDataset
from train import TrainChain
from net import StackedHG
train_data = MPIIDataset(split='train')
model = StackedHG(16)
train_chain = TrainChain(model)
optimizer = chainer.optimizers.RMSprop(lr=2.5e-4)
optimizer.setup(train_chain)
# original batch size 6
train_iter = chainer.iterators.SerialIterator(train_data, 1, repeat=True, shuffle=True)
updater = chainer.training.StandardUpdater(train_iter, optimizer, device=-1)
trainer = chainer.training.Trainer(updater, (100, 'epoch'), out='')
chainer.serializers.load_npz(path_snapshot, trainer)
if path_model is None:
outfile = tempfile.TemporaryFile()
chainer.serializers.save_npz(outfile, model)
outfile.seek(0)
return outfile
else:
chainer.serializers.save_npz(path_model, model)
return None
| 29.65 | 91 | 0.658797 | import numpy as np
import tempfile
def snap2model_parser(path_snapshot, path_model=None):
snapshot = np.load(path_snapshot)
model = dict()
for key in snapshot.keys():
parse = key.split('/')
if parse[0] == 'updater' and parse[1] == 'optimizer:main':
if parse[2] == 'model':
model_key = '/'.join(parse[3:-1])
model[model_key] = snapshot[key]
if path_model is None:
outfile = tempfile.TemporaryFile()
np.savez(outfile, **model)
outfile.seek(0)
return outfile
else:
np.savez(path_model, **model)
return None
def snap2model_trainer(path_snapshot, path_model=None):
import chainer
from dataset import MPIIDataset
from train import TrainChain
from net import StackedHG
train_data = MPIIDataset(split='train')
model = StackedHG(16)
train_chain = TrainChain(model)
optimizer = chainer.optimizers.RMSprop(lr=2.5e-4)
optimizer.setup(train_chain)
train_iter = chainer.iterators.SerialIterator(train_data, 1, repeat=True, shuffle=True)
updater = chainer.training.StandardUpdater(train_iter, optimizer, device=-1)
trainer = chainer.training.Trainer(updater, (100, 'epoch'), out='')
chainer.serializers.load_npz(path_snapshot, trainer)
if path_model is None:
outfile = tempfile.TemporaryFile()
chainer.serializers.save_npz(outfile, model)
outfile.seek(0)
return outfile
else:
chainer.serializers.save_npz(path_model, model)
return None
| true | true |
f73196906eb86fee01bdd6f96402a0ac464b613b | 1,524 | py | Python | symengine/__init__.py | Meldanya/symengine.py | ec3ce7aef387878abd10a3991e4c3610c98cc70d | [
"MIT"
] | null | null | null | symengine/__init__.py | Meldanya/symengine.py | ec3ce7aef387878abd10a3991e4c3610c98cc70d | [
"MIT"
] | 1 | 2015-10-17T13:22:48.000Z | 2015-10-18T11:43:13.000Z | symengine/__init__.py | Meldanya/symengine.py | ec3ce7aef387878abd10a3991e4c3610c98cc70d | [
"MIT"
] | null | null | null | from .lib.symengine_wrapper import (
have_mpfr, have_mpc, have_flint, have_piranha, have_llvm,
I, E, pi, oo, zoo, nan, Symbol, Dummy, S, sympify, SympifyError,
Integer, Rational, Float, Number, RealNumber, RealDouble, ComplexDouble,
add, Add, Mul, Pow, function_symbol,
Max, Min, DenseMatrix, Matrix,
ImmutableMatrix, ImmutableDenseMatrix, MutableDenseMatrix,
MatrixBase, Basic, DictBasic, symarray, series, diff, zeros,
eye, diag, ones, Derivative, Subs, expand, has_symbol,
UndefFunction, Function, latex,
have_numpy, true, false, Equality, Unequality, GreaterThan,
LessThan, StrictGreaterThan, StrictLessThan, Eq, Ne, Ge, Le,
Gt, Lt, And, Or, Not, Nand, Nor, Xor, Xnor, perfect_power, integer_nthroot,
isprime, sqrt_mod, Expr, cse, count_ops, ccode, Piecewise, Contains, Interval, FiniteSet,
FunctionSymbol as AppliedUndef,
golden_ratio as GoldenRatio,
catalan as Catalan,
eulergamma as EulerGamma
)
from .utilities import var, symbols
from .functions import *
from .printing import init_printing
if have_mpfr:
from .lib.symengine_wrapper import RealMPFR
if have_mpc:
from .lib.symengine_wrapper import ComplexMPC
if have_numpy:
from .lib.symengine_wrapper import (Lambdify, LambdifyCSE)
def lambdify(args, exprs, **kwargs):
return Lambdify(args, *exprs, **kwargs)
__version__ = "0.4.0"
def test():
import pytest
import os
return not pytest.cmdline.main(
[os.path.dirname(os.path.abspath(__file__))])
| 33.866667 | 93 | 0.723097 | from .lib.symengine_wrapper import (
have_mpfr, have_mpc, have_flint, have_piranha, have_llvm,
I, E, pi, oo, zoo, nan, Symbol, Dummy, S, sympify, SympifyError,
Integer, Rational, Float, Number, RealNumber, RealDouble, ComplexDouble,
add, Add, Mul, Pow, function_symbol,
Max, Min, DenseMatrix, Matrix,
ImmutableMatrix, ImmutableDenseMatrix, MutableDenseMatrix,
MatrixBase, Basic, DictBasic, symarray, series, diff, zeros,
eye, diag, ones, Derivative, Subs, expand, has_symbol,
UndefFunction, Function, latex,
have_numpy, true, false, Equality, Unequality, GreaterThan,
LessThan, StrictGreaterThan, StrictLessThan, Eq, Ne, Ge, Le,
Gt, Lt, And, Or, Not, Nand, Nor, Xor, Xnor, perfect_power, integer_nthroot,
isprime, sqrt_mod, Expr, cse, count_ops, ccode, Piecewise, Contains, Interval, FiniteSet,
FunctionSymbol as AppliedUndef,
golden_ratio as GoldenRatio,
catalan as Catalan,
eulergamma as EulerGamma
)
from .utilities import var, symbols
from .functions import *
from .printing import init_printing
if have_mpfr:
from .lib.symengine_wrapper import RealMPFR
if have_mpc:
from .lib.symengine_wrapper import ComplexMPC
if have_numpy:
from .lib.symengine_wrapper import (Lambdify, LambdifyCSE)
def lambdify(args, exprs, **kwargs):
return Lambdify(args, *exprs, **kwargs)
__version__ = "0.4.0"
def test():
import pytest
import os
return not pytest.cmdline.main(
[os.path.dirname(os.path.abspath(__file__))])
| true | true |
f731975854283587ddab9488f9c9c75d42aa8ed2 | 7,107 | py | Python | databricks/koalas/missing/series.py | dennyglee/koalas | 9781c1ece43b1fa2dce78a0813602e03db0b1fa3 | [
"Apache-2.0"
] | 1 | 2019-05-29T05:46:19.000Z | 2019-05-29T05:46:19.000Z | databricks/koalas/missing/series.py | dennyglee/koalas | 9781c1ece43b1fa2dce78a0813602e03db0b1fa3 | [
"Apache-2.0"
] | null | null | null | databricks/koalas/missing/series.py | dennyglee/koalas | 9781c1ece43b1fa2dce78a0813602e03db0b1fa3 | [
"Apache-2.0"
] | null | null | null | #
# Copyright (C) 2019 Databricks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from databricks.koalas.missing import _unsupported_function, _unsupported_property, common
def unsupported_function(method_name, deprecated=False, reason=""):
return _unsupported_function(class_name='pd.Series', method_name=method_name,
deprecated=deprecated, reason=reason)
def unsupported_property(property_name, deprecated=False, reason=""):
return _unsupported_property(class_name='pd.Series', property_name=property_name,
deprecated=deprecated, reason=reason)
class _MissingPandasLikeSeries(object):
# Properties
axes = unsupported_property('axes')
iat = unsupported_property('iat')
# Deprecated properties
blocks = unsupported_property('blocks', deprecated=True)
ftypes = unsupported_property('ftypes', deprecated=True)
ftype = unsupported_property('ftype', deprecated=True)
is_copy = unsupported_property('is_copy', deprecated=True)
ix = unsupported_property('ix', deprecated=True)
asobject = unsupported_property('asobject', deprecated=True)
strides = unsupported_property('strides', deprecated=True)
imag = unsupported_property('imag', deprecated=True)
itemsize = unsupported_property('itemsize', deprecated=True)
data = unsupported_property('data', deprecated=True)
base = unsupported_property('base', deprecated=True)
flags = unsupported_property('flags', deprecated=True)
# Functions
align = unsupported_function('align')
argsort = unsupported_function('argsort')
asfreq = unsupported_function('asfreq')
asof = unsupported_function('asof')
at_time = unsupported_function('at_time')
autocorr = unsupported_function('autocorr')
between_time = unsupported_function('between_time')
bfill = unsupported_function('bfill')
combine = unsupported_function('combine')
combine_first = unsupported_function('combine_first')
cov = unsupported_function('cov')
divmod = unsupported_function('divmod')
dot = unsupported_function('dot')
droplevel = unsupported_function('droplevel')
duplicated = unsupported_function('duplicated')
ewm = unsupported_function('ewm')
factorize = unsupported_function('factorize')
ffill = unsupported_function('ffill')
filter = unsupported_function('filter')
first = unsupported_function('first')
get = unsupported_function('get')
infer_objects = unsupported_function('infer_objects')
interpolate = unsupported_function('interpolate')
items = unsupported_function('items')
iteritems = unsupported_function('iteritems')
last = unsupported_function('last')
last_valid_index = unsupported_function('last_valid_index')
mad = unsupported_function('mad')
pct_change = unsupported_function('pct_change')
prod = unsupported_function('prod')
product = unsupported_function('product')
ravel = unsupported_function('ravel')
rdivmod = unsupported_function('rdivmod')
reindex = unsupported_function('reindex')
reindex_like = unsupported_function('reindex_like')
rename_axis = unsupported_function('rename_axis')
reorder_levels = unsupported_function('reorder_levels')
repeat = unsupported_function('repeat')
resample = unsupported_function('resample')
searchsorted = unsupported_function('searchsorted')
sem = unsupported_function('sem')
set_axis = unsupported_function('set_axis')
slice_shift = unsupported_function('slice_shift')
squeeze = unsupported_function('squeeze')
swapaxes = unsupported_function('swapaxes')
swaplevel = unsupported_function('swaplevel')
tail = unsupported_function('tail')
take = unsupported_function('take')
to_hdf = unsupported_function('to_hdf')
to_period = unsupported_function('to_period')
to_sql = unsupported_function('to_sql')
to_timestamp = unsupported_function('to_timestamp')
tshift = unsupported_function('tshift')
tz_convert = unsupported_function('tz_convert')
tz_localize = unsupported_function('tz_localize')
unstack = unsupported_function('unstack')
view = unsupported_function('view')
# Deprecated functions
as_blocks = unsupported_function('as_blocks', deprecated=True)
as_matrix = unsupported_function('as_matrix', deprecated=True)
clip_lower = unsupported_function('clip_lower', deprecated=True)
clip_upper = unsupported_function('clip_upper', deprecated=True)
compress = unsupported_function('compress', deprecated=True)
convert_objects = unsupported_function('convert_objects', deprecated=True)
get_ftype_counts = unsupported_function('get_ftype_counts', deprecated=True)
get_value = unsupported_function('get_value', deprecated=True)
nonzero = unsupported_function('nonzero', deprecated=True)
reindex_axis = unsupported_function('reindex_axis', deprecated=True)
select = unsupported_function('select', deprecated=True)
set_value = unsupported_function('set_value', deprecated=True)
valid = unsupported_function('valid', deprecated=True)
get_values = unsupported_function('get_values', deprecated=True)
to_dense = unsupported_function('to_dense', deprecated=True)
to_sparse = unsupported_function('to_sparse', deprecated=True)
to_msgpack = unsupported_function('to_msgpack', deprecated=True)
compound = unsupported_function('compound', deprecated=True)
put = unsupported_function('put', deprecated=True)
item = unsupported_function('item', deprecated=True)
ptp = unsupported_function('ptp', deprecated=True)
argmax = unsupported_function('argmax', deprecated=True)
argmin = unsupported_function('argmin', deprecated=True)
# Properties we won't support.
values = common.values(unsupported_property)
array = common.array(unsupported_property)
real = unsupported_property(
'real',
reason="If you want to collect your data as an NumPy array, use 'to_numpy()' instead.")
nbytes = unsupported_property(
'nbytes',
reason="'nbytes' requires to compute whole dataset. You can calculate manually it, "
"with its 'itemsize', by explicitly executing its count. Use Spark's web UI "
"to monitor disk and memory usage of your application in general.")
# Functions we won't support.
memory_usage = common.memory_usage(unsupported_function)
to_pickle = common.to_pickle(unsupported_function)
to_xarray = common.to_xarray(unsupported_function)
__iter__ = common.__iter__(unsupported_function)
| 47.066225 | 95 | 0.74307 |
from databricks.koalas.missing import _unsupported_function, _unsupported_property, common
def unsupported_function(method_name, deprecated=False, reason=""):
return _unsupported_function(class_name='pd.Series', method_name=method_name,
deprecated=deprecated, reason=reason)
def unsupported_property(property_name, deprecated=False, reason=""):
return _unsupported_property(class_name='pd.Series', property_name=property_name,
deprecated=deprecated, reason=reason)
class _MissingPandasLikeSeries(object):
axes = unsupported_property('axes')
iat = unsupported_property('iat')
blocks = unsupported_property('blocks', deprecated=True)
ftypes = unsupported_property('ftypes', deprecated=True)
ftype = unsupported_property('ftype', deprecated=True)
is_copy = unsupported_property('is_copy', deprecated=True)
ix = unsupported_property('ix', deprecated=True)
asobject = unsupported_property('asobject', deprecated=True)
strides = unsupported_property('strides', deprecated=True)
imag = unsupported_property('imag', deprecated=True)
itemsize = unsupported_property('itemsize', deprecated=True)
data = unsupported_property('data', deprecated=True)
base = unsupported_property('base', deprecated=True)
flags = unsupported_property('flags', deprecated=True)
align = unsupported_function('align')
argsort = unsupported_function('argsort')
asfreq = unsupported_function('asfreq')
asof = unsupported_function('asof')
at_time = unsupported_function('at_time')
autocorr = unsupported_function('autocorr')
between_time = unsupported_function('between_time')
bfill = unsupported_function('bfill')
combine = unsupported_function('combine')
combine_first = unsupported_function('combine_first')
cov = unsupported_function('cov')
divmod = unsupported_function('divmod')
dot = unsupported_function('dot')
droplevel = unsupported_function('droplevel')
duplicated = unsupported_function('duplicated')
ewm = unsupported_function('ewm')
factorize = unsupported_function('factorize')
ffill = unsupported_function('ffill')
filter = unsupported_function('filter')
first = unsupported_function('first')
get = unsupported_function('get')
infer_objects = unsupported_function('infer_objects')
interpolate = unsupported_function('interpolate')
items = unsupported_function('items')
iteritems = unsupported_function('iteritems')
last = unsupported_function('last')
last_valid_index = unsupported_function('last_valid_index')
mad = unsupported_function('mad')
pct_change = unsupported_function('pct_change')
prod = unsupported_function('prod')
product = unsupported_function('product')
ravel = unsupported_function('ravel')
rdivmod = unsupported_function('rdivmod')
reindex = unsupported_function('reindex')
reindex_like = unsupported_function('reindex_like')
rename_axis = unsupported_function('rename_axis')
reorder_levels = unsupported_function('reorder_levels')
repeat = unsupported_function('repeat')
resample = unsupported_function('resample')
searchsorted = unsupported_function('searchsorted')
sem = unsupported_function('sem')
set_axis = unsupported_function('set_axis')
slice_shift = unsupported_function('slice_shift')
squeeze = unsupported_function('squeeze')
swapaxes = unsupported_function('swapaxes')
swaplevel = unsupported_function('swaplevel')
tail = unsupported_function('tail')
take = unsupported_function('take')
to_hdf = unsupported_function('to_hdf')
to_period = unsupported_function('to_period')
to_sql = unsupported_function('to_sql')
to_timestamp = unsupported_function('to_timestamp')
tshift = unsupported_function('tshift')
tz_convert = unsupported_function('tz_convert')
tz_localize = unsupported_function('tz_localize')
unstack = unsupported_function('unstack')
view = unsupported_function('view')
as_blocks = unsupported_function('as_blocks', deprecated=True)
as_matrix = unsupported_function('as_matrix', deprecated=True)
clip_lower = unsupported_function('clip_lower', deprecated=True)
clip_upper = unsupported_function('clip_upper', deprecated=True)
compress = unsupported_function('compress', deprecated=True)
convert_objects = unsupported_function('convert_objects', deprecated=True)
get_ftype_counts = unsupported_function('get_ftype_counts', deprecated=True)
get_value = unsupported_function('get_value', deprecated=True)
nonzero = unsupported_function('nonzero', deprecated=True)
reindex_axis = unsupported_function('reindex_axis', deprecated=True)
select = unsupported_function('select', deprecated=True)
set_value = unsupported_function('set_value', deprecated=True)
valid = unsupported_function('valid', deprecated=True)
get_values = unsupported_function('get_values', deprecated=True)
to_dense = unsupported_function('to_dense', deprecated=True)
to_sparse = unsupported_function('to_sparse', deprecated=True)
to_msgpack = unsupported_function('to_msgpack', deprecated=True)
compound = unsupported_function('compound', deprecated=True)
put = unsupported_function('put', deprecated=True)
item = unsupported_function('item', deprecated=True)
ptp = unsupported_function('ptp', deprecated=True)
argmax = unsupported_function('argmax', deprecated=True)
argmin = unsupported_function('argmin', deprecated=True)
values = common.values(unsupported_property)
array = common.array(unsupported_property)
real = unsupported_property(
'real',
reason="If you want to collect your data as an NumPy array, use 'to_numpy()' instead.")
nbytes = unsupported_property(
'nbytes',
reason="'nbytes' requires to compute whole dataset. You can calculate manually it, "
"with its 'itemsize', by explicitly executing its count. Use Spark's web UI "
"to monitor disk and memory usage of your application in general.")
memory_usage = common.memory_usage(unsupported_function)
to_pickle = common.to_pickle(unsupported_function)
to_xarray = common.to_xarray(unsupported_function)
__iter__ = common.__iter__(unsupported_function)
| true | true |
f73197e08ced9d0164ad4b85df778656c785b84a | 3,279 | py | Python | cube_root.py | scottwedge/CSC110 | 8d1e39de183e3ea477dac993d0fa9b721feca1ca | [
"MIT"
] | null | null | null | cube_root.py | scottwedge/CSC110 | 8d1e39de183e3ea477dac993d0fa9b721feca1ca | [
"MIT"
] | 1 | 2020-09-04T02:04:33.000Z | 2020-09-04T02:04:33.000Z | cube_root.py | scottwedge/CSC110 | 8d1e39de183e3ea477dac993d0fa9b721feca1ca | [
"MIT"
] | 1 | 2020-09-04T01:51:55.000Z | 2020-09-04T01:51:55.000Z |
# Sample program illustrating programmer-defined functions
# along with code that calls the functions
#
# The 'cube_root' function uses a 'return' statement. Its
# only job is to calculate and "return" the cube root of a
# number. It does not print anything. Notice that statements
# that "call" the 'cube_root' function need to USE the value
# returned. They can do this by a) saving it in a variable,
# b) using it in a 'print' statement, or c) using the value in
# ANY general expression. Imagine that the value returned by
# the function REPLACES the function call wherever it occurs.
# This is EXACTLY the same way you use built-in functions like
# 'input()', 'abs()', 'round()', etc.
#
# The 'show_table' function does NOT use a 'return' statement.
# It's job is to PRINT a table. Different functions may be
# used in different ways.
#
# CSC 110
# Winter 2013
# The cube_root function calculates and RETURNS the cube root
# of a number. If the value of 'x' is negative, a negative
# real number is returned.
def cube_root(x):
if x < 0:
result = -( (-x)**(1.0/3.0) )
else:
result = x ** (1.0/3.0)
return result
# Main program
def main():
print('Let\'s examine the cube roots of some numbers.\n')
# CALL the function and save the value returned in a variable:
num = 27
root = cube_root(num) # The ARGUMENT is a variable
print('Cube root of ' + str(num) + ' is ' + format(root, '.1f'))
root = cube_root(num + 98) # The argument is an expression
print(root)
# Use the function call directly in a 'print' statement:
print('Cube root of ' + str(num) + ' is ' + format(cube_root(num), '.1f'))
# Use multiple function calls in an expression:
print('The answer is', cube_root(8) + cube_root(1000) / 2)
print('The answer (rounded) is', round(cube_root(8) + cube_root(1000) / 2))
# Here is a table of some cube roots:
print('\n n cube_root(n)') # print header row
num = 8
print(format(num, '8.1f'), format(cube_root(num), '10.3f'))
num = 31
print(format(num, '8.1f'), format(cube_root(num), '10.3f'))
num = 1727
print(format(num, '8.1f'), format(cube_root(num), '10.3f'))
num = 1728
print(format(num, '8.1f'), format(cube_root(num), '10.3f'))
num = 1729
print(format(num, '8.1f'), format(cube_root(num), '10.3f'))
# here is a table of cube roots
print("using loop")
x = [8,31,1727,1728,1729]
for num in x:
print(format(num, '8.1f'), format(cube_root(num), '10.3f'))
# Here are a couple of longer tables:
show_table(0, 1000, 10)
show_table(42875, 1000000, 20)
# This function shows a table of cube roots.
# The first two parameters are the minimum and maximum values for 'n'.
# The third parameter is the number of rows to show in the table.
def show_table(minN, maxN, rows):
# Calculate the step size. There are (rows - 1) intervals:
step = (maxN - minN) / (rows - 1.0)
print('\n n cube_root(n)') # print header row
# Loop 'rows' times to print the rows in the table:
for i in range(rows):
n = minN + i * step # calculate the value of 'n' for row 'i'
print(format(n, '12.3f'), format(cube_root(n), '8.3f'))
# Run the program
main() | 35.641304 | 79 | 0.643489 |
# used in different ways.
#
# CSC 110
# Winter 2013
# The cube_root function calculates and RETURNS the cube root
# of a number. If the value of 'x' is negative, a negative
# real number is returned.
def cube_root(x):
if x < 0:
result = -( (-x)**(1.0/3.0) )
else:
result = x ** (1.0/3.0)
return result
# Main program
def main():
print('Let\'s examine the cube roots of some numbers.\n')
num = 27
root = cube_root(num)
print('Cube root of ' + str(num) + ' is ' + format(root, '.1f'))
root = cube_root(num + 98)
print(root)
print('Cube root of ' + str(num) + ' is ' + format(cube_root(num), '.1f'))
print('The answer is', cube_root(8) + cube_root(1000) / 2)
print('The answer (rounded) is', round(cube_root(8) + cube_root(1000) / 2))
print('\n n cube_root(n)')
num = 8
print(format(num, '8.1f'), format(cube_root(num), '10.3f'))
num = 31
print(format(num, '8.1f'), format(cube_root(num), '10.3f'))
num = 1727
print(format(num, '8.1f'), format(cube_root(num), '10.3f'))
num = 1728
print(format(num, '8.1f'), format(cube_root(num), '10.3f'))
num = 1729
print(format(num, '8.1f'), format(cube_root(num), '10.3f'))
print("using loop")
x = [8,31,1727,1728,1729]
for num in x:
print(format(num, '8.1f'), format(cube_root(num), '10.3f'))
show_table(0, 1000, 10)
show_table(42875, 1000000, 20)
def show_table(minN, maxN, rows):
step = (maxN - minN) / (rows - 1.0)
print('\n n cube_root(n)')
for i in range(rows):
n = minN + i * step
print(format(n, '12.3f'), format(cube_root(n), '8.3f'))
main() | true | true |
f73198cc95d621ef4641d770730e34b065a75e7a | 1,470 | py | Python | MultiMediaAnalyse/Task2/main.py | wsh-nie/Assignments | e1f800ca9abe54218f9b695dce843ec96a99710c | [
"MIT"
] | null | null | null | MultiMediaAnalyse/Task2/main.py | wsh-nie/Assignments | e1f800ca9abe54218f9b695dce843ec96a99710c | [
"MIT"
] | null | null | null | MultiMediaAnalyse/Task2/main.py | wsh-nie/Assignments | e1f800ca9abe54218f9b695dce843ec96a99710c | [
"MIT"
] | null | null | null | import cv2
def SIFT(imgname1, imgname2):
sift = cv2.xfeatures2d.SIFT_create()
img1 = cv2.imread(imgname1)
img2 = cv2.imread(imgname2)
kp1, des1 = sift.detectAndCompute(img1, None)
kp2, des2 = sift.detectAndCompute(img2, None)
FLANN_INDEX_KDTREE = 0
index_params = dict(algorithm=FLANN_INDEX_KDTREE, trees=5)
search_params = dict(checks=50)
flann = cv2.FlannBasedMatcher(index_params, search_params)
matches = flann.knnMatch(des1,des2,k=2)
good = []
for m,n in matches:
if m.distance < 0.70*n.distance:
good.append([m])
img3 = cv2.drawMatchesKnn(img1,kp1,img2,kp2,good,None,flags=2)
return img3
def ORB(imgname1, imgname2):
orb = cv2.ORB_create()
img1 = cv2.imread(imgname1)
img2 = cv2.imread(imgname2)
kp1, des1 = orb.detectAndCompute(img1,None)
kp2, des2 = orb.detectAndCompute(img2,None)
bf = cv2.BFMatcher()
matches = bf.knnMatch(des1,des2, k=2)
good = []
for m,n in matches:
if m.distance < 0.8*n.distance:
good.append([m])
img3 = cv2.drawMatchesKnn(img1,kp1,img2,kp2,good,None,flags=2)
return img3
if __name__ == '__main__':
imgname1 = '1.jpg'
imgname2 = '2.jpg'
img3 = SIFT(imgname1, imgname2)
img4 = ORB(imgname1, imgname2)
cv2.imshow("SIFT", img3)
cv2.imwrite("sift.jpg",img3)
cv2.imshow("ORB", img4)
cv2.imwrite("orb.jpg",img4)
cv2.waitKey(0)
cv2.destroyAllWindows()
| 25.344828 | 66 | 0.646259 | import cv2
def SIFT(imgname1, imgname2):
sift = cv2.xfeatures2d.SIFT_create()
img1 = cv2.imread(imgname1)
img2 = cv2.imread(imgname2)
kp1, des1 = sift.detectAndCompute(img1, None)
kp2, des2 = sift.detectAndCompute(img2, None)
FLANN_INDEX_KDTREE = 0
index_params = dict(algorithm=FLANN_INDEX_KDTREE, trees=5)
search_params = dict(checks=50)
flann = cv2.FlannBasedMatcher(index_params, search_params)
matches = flann.knnMatch(des1,des2,k=2)
good = []
for m,n in matches:
if m.distance < 0.70*n.distance:
good.append([m])
img3 = cv2.drawMatchesKnn(img1,kp1,img2,kp2,good,None,flags=2)
return img3
def ORB(imgname1, imgname2):
orb = cv2.ORB_create()
img1 = cv2.imread(imgname1)
img2 = cv2.imread(imgname2)
kp1, des1 = orb.detectAndCompute(img1,None)
kp2, des2 = orb.detectAndCompute(img2,None)
bf = cv2.BFMatcher()
matches = bf.knnMatch(des1,des2, k=2)
good = []
for m,n in matches:
if m.distance < 0.8*n.distance:
good.append([m])
img3 = cv2.drawMatchesKnn(img1,kp1,img2,kp2,good,None,flags=2)
return img3
if __name__ == '__main__':
imgname1 = '1.jpg'
imgname2 = '2.jpg'
img3 = SIFT(imgname1, imgname2)
img4 = ORB(imgname1, imgname2)
cv2.imshow("SIFT", img3)
cv2.imwrite("sift.jpg",img3)
cv2.imshow("ORB", img4)
cv2.imwrite("orb.jpg",img4)
cv2.waitKey(0)
cv2.destroyAllWindows()
| true | true |
f73198d4417ffcba32dc97836964e3ca95693c56 | 3,900 | py | Python | romp/lib/loss_funcs/params_loss.py | vltmedia/ROMP | 1d2d96bd39f67a0a86ce7e397e3af856b3c5ee00 | [
"MIT"
] | 385 | 2021-03-30T15:40:31.000Z | 2022-03-31T21:52:52.000Z | romp/lib/loss_funcs/params_loss.py | vltmedia/ROMP | 1d2d96bd39f67a0a86ce7e397e3af856b3c5ee00 | [
"MIT"
] | 154 | 2021-03-31T11:28:27.000Z | 2022-03-31T08:00:45.000Z | romp/lib/loss_funcs/params_loss.py | vltmedia/ROMP | 1d2d96bd39f67a0a86ce7e397e3af856b3c5ee00 | [
"MIT"
] | 102 | 2021-04-15T06:43:00.000Z | 2022-03-31T12:40:08.000Z | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import torch
import torch.nn as nn
import time
import pickle
import numpy as np
import config
import constants
from config import args
from utils import batch_rodrigues, rotation_matrix_to_angle_axis
def batch_l2_loss(real,predict):
loss_batch = torch.norm(real-predict, p=2, dim=1)
return loss_batch.mean()
def batch_l2_loss_param(real,predict):
# convert to rot mat, multiple angular maps to the same rotation with Pi as a period.
batch_size = real.shape[0]
real = batch_rodrigues(real.reshape(-1,3)).contiguous()#(N*J)*3 -> (N*J)*3*3
predict = batch_rodrigues(predict.reshape(-1,3)).contiguous()#(N*J)*3 -> (N*J)*3*3
loss = torch.norm((real-predict).view(-1,9), p=2, dim=-1)#self.sl1loss(real,predict)#
loss = loss.reshape(batch_size, -1).mean(-1)
return loss
def _calc_MPJAE(rel_pose_pred,rel_pose_real):
global_pose_rotmat_pred = trans_relative_rot_to_global_rotmat(rel_pose_pred, with_global_rot=True)
global_pose_rotmat_real = trans_relative_rot_to_global_rotmat(rel_pose_real, with_global_rot=True)
MPJAE_error = _calc_joint_angle_error(global_pose_rotmat_pred, global_pose_rotmat_real).cpu().numpy()
return MPJAE_error
def trans_relative_rot_to_global_rotmat(params, with_global_rot=False):
'''
calculate absolute rotation matrix in the global coordinate frame of K body parts.
The rotation is the map from the local bone coordinate frame to the global one.
K= 9 parts in the following order:
root (JOINT 0) , left hip (JOINT 1), right hip (JOINT 2), left knee (JOINT 4), right knee (JOINT 5),
left shoulder (JOINT 16), right shoulder (JOINT 17), left elbow (JOINT 18), right elbow (JOINT 19).
parent kinetic tree [-1, 0, 0, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 9, 9, 12, 13, 14, 16, 17, 18, 19, 20, 21]
'''
batch_size, param_num = params.shape[0], params.shape[1]//3
pose_rotmat = batch_rodrigues(params.reshape(-1,3)).view(batch_size, param_num, 3, 3).contiguous()
if with_global_rot:
sellect_joints = np.array([0,1,2,4,5,16,17,18,19],dtype=np.int)
results = [pose_rotmat[:, 0]]
for idx in range(param_num-1):
i_val = int(idx + 1)
joint_rot = pose_rotmat[:, i_val]
parent = constants.kintree_parents[i_val]
glob_transf_mat = torch.matmul(results[parent], joint_rot)
results.append(glob_transf_mat)
else:
sellect_joints = np.array([1,2,4,5,16,17,18,19],dtype=np.int)-1
results = [torch.eye(3,3)[None].cuda().repeat(batch_size,1,1)]
for i_val in range(param_num-1):
#i_val = int(idx + 1)
joint_rot = pose_rotmat[:, i_val]
parent = constants.kintree_parents[i_val+1]
glob_transf_mat = torch.matmul(results[parent], joint_rot)
results.append(glob_transf_mat)
global_rotmat = torch.stack(results, axis=1)[:, sellect_joints].contiguous()
return global_rotmat
def _calc_joint_angle_error(pred_mat, gt_mat, return_axis_angle=False):
"""
Compute the geodesic distance between the two input matrices.
:param pred_mat: predicted rotation matrices. Shape: ( Seq, 9g, 3, 3)
:param gt_mat: ground truth rotation matrices. Shape: ( Seq, 9, 3, 3)
:return: Mean geodesic distance between input matrices.
"""
# Reshape the matrices into B x 3 x 3 arrays
r1 = pred_mat.reshape(-1,3,3)
r2 = gt_mat.reshape(-1,3,3)
# Transpose gt matrices
r2t = r2.permute(0,2,1)
r = torch.matmul(r1, r2t)
# Convert rotation matrix to axis angle representation and find the angle
axis_angles = rotation_matrix_to_angle_axis(r)
angles = torch.norm(axis_angles, dim=-1)*(180./np.pi)
if return_axis_angle:
return angles,axis_angles
return angles
| 42.857143 | 120 | 0.691538 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import torch
import torch.nn as nn
import time
import pickle
import numpy as np
import config
import constants
from config import args
from utils import batch_rodrigues, rotation_matrix_to_angle_axis
def batch_l2_loss(real,predict):
loss_batch = torch.norm(real-predict, p=2, dim=1)
return loss_batch.mean()
def batch_l2_loss_param(real,predict):
batch_size = real.shape[0]
real = batch_rodrigues(real.reshape(-1,3)).contiguous()
predict = batch_rodrigues(predict.reshape(-1,3)).contiguous()
loss = torch.norm((real-predict).view(-1,9), p=2, dim=-1) loss = loss.reshape(batch_size, -1).mean(-1)
return loss
def _calc_MPJAE(rel_pose_pred,rel_pose_real):
global_pose_rotmat_pred = trans_relative_rot_to_global_rotmat(rel_pose_pred, with_global_rot=True)
global_pose_rotmat_real = trans_relative_rot_to_global_rotmat(rel_pose_real, with_global_rot=True)
MPJAE_error = _calc_joint_angle_error(global_pose_rotmat_pred, global_pose_rotmat_real).cpu().numpy()
return MPJAE_error
def trans_relative_rot_to_global_rotmat(params, with_global_rot=False):
batch_size, param_num = params.shape[0], params.shape[1]//3
pose_rotmat = batch_rodrigues(params.reshape(-1,3)).view(batch_size, param_num, 3, 3).contiguous()
if with_global_rot:
sellect_joints = np.array([0,1,2,4,5,16,17,18,19],dtype=np.int)
results = [pose_rotmat[:, 0]]
for idx in range(param_num-1):
i_val = int(idx + 1)
joint_rot = pose_rotmat[:, i_val]
parent = constants.kintree_parents[i_val]
glob_transf_mat = torch.matmul(results[parent], joint_rot)
results.append(glob_transf_mat)
else:
sellect_joints = np.array([1,2,4,5,16,17,18,19],dtype=np.int)-1
results = [torch.eye(3,3)[None].cuda().repeat(batch_size,1,1)]
for i_val in range(param_num-1):
joint_rot = pose_rotmat[:, i_val]
parent = constants.kintree_parents[i_val+1]
glob_transf_mat = torch.matmul(results[parent], joint_rot)
results.append(glob_transf_mat)
global_rotmat = torch.stack(results, axis=1)[:, sellect_joints].contiguous()
return global_rotmat
def _calc_joint_angle_error(pred_mat, gt_mat, return_axis_angle=False):
r1 = pred_mat.reshape(-1,3,3)
r2 = gt_mat.reshape(-1,3,3)
r2t = r2.permute(0,2,1)
r = torch.matmul(r1, r2t)
axis_angles = rotation_matrix_to_angle_axis(r)
angles = torch.norm(axis_angles, dim=-1)*(180./np.pi)
if return_axis_angle:
return angles,axis_angles
return angles
| true | true |
f7319a140d0f0b8ef8a302e45f0ea738b572ba7a | 63,805 | py | Python | ironic/drivers/modules/drac/raid.py | inmotionhosting/ironic | 1c7b5f82592e23ab66dddca56e0b059d3cb0710b | [
"Apache-2.0"
] | 1 | 2021-02-27T02:48:59.000Z | 2021-02-27T02:48:59.000Z | ironic/drivers/modules/drac/raid.py | inmotionhosting/ironic | 1c7b5f82592e23ab66dddca56e0b059d3cb0710b | [
"Apache-2.0"
] | null | null | null | ironic/drivers/modules/drac/raid.py | inmotionhosting/ironic | 1c7b5f82592e23ab66dddca56e0b059d3cb0710b | [
"Apache-2.0"
] | null | null | null | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
DRAC RAID specific methods
"""
from collections import defaultdict
import math
from futurist import periodics
from ironic_lib import metrics_utils
from oslo_log import log as logging
from oslo_utils import importutils
from oslo_utils import units
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import raid as raid_common
from ironic.conductor import task_manager
from ironic.conductor import utils as manager_utils
from ironic.conf import CONF
from ironic.drivers import base
from ironic.drivers.modules import deploy_utils
from ironic.drivers.modules.drac import common as drac_common
from ironic.drivers.modules.drac import job as drac_job
drac_exceptions = importutils.try_import('dracclient.exceptions')
drac_constants = importutils.try_import('dracclient.constants')
LOG = logging.getLogger(__name__)
METRICS = metrics_utils.get_metrics_logger(__name__)
_CURRENT_RAID_CONTROLLER_MODE = "RAIDCurrentControllerMode"
_REQUESTED_RAID_CONTROLLER_MODE = "RAIDRequestedControllerMode"
_EHBA_MODE = "Enhanced HBA"
_RAID_MODE = "RAID"
RAID_LEVELS = {
'0': {
'min_disks': 1,
'max_disks': 1000,
'type': 'simple',
'overhead': 0
},
'1': {
'min_disks': 2,
'max_disks': 2,
'type': 'simple',
'overhead': 1
},
'5': {
'min_disks': 3,
'max_disks': 1000,
'type': 'simple',
'overhead': 1
},
'6': {
'min_disks': 4,
'max_disks': 1000,
'type': 'simple',
'overhead': 2
},
'1+0': {
'type': 'spanned',
'span_type': '1'
},
'5+0': {
'type': 'spanned',
'span_type': '5'
},
'6+0': {
'type': 'spanned',
'span_type': '6'
}
}
def list_raid_controllers(node):
"""List the RAID controllers of the node.
:param node: an ironic node object.
:returns: a list of RAIDController objects from dracclient.
:raises: DracOperationError on an error from python-dracclient.
"""
client = drac_common.get_drac_client(node)
try:
return client.list_raid_controllers()
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to get the list of RAID controllers '
'for node %(node_uuid)s. Reason: %(error)s.',
{'node_uuid': node.uuid, 'error': exc})
raise exception.DracOperationError(error=exc)
def list_virtual_disks(node):
"""List the virtual disks of the node.
:param node: an ironic node object.
:returns: a list of VirtualDisk objects from dracclient.
:raises: DracOperationError on an error from python-dracclient.
"""
client = drac_common.get_drac_client(node)
try:
return client.list_virtual_disks()
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to get the list of virtual disks '
'for node %(node_uuid)s. Reason: %(error)s.',
{'node_uuid': node.uuid, 'error': exc})
raise exception.DracOperationError(error=exc)
def list_physical_disks(node):
"""List the physical disks of the node.
:param node: an ironic node object.
:returns: a list of PhysicalDisk objects from dracclient.
:raises: DracOperationError on an error from python-dracclient.
"""
client = drac_common.get_drac_client(node)
try:
return client.list_physical_disks()
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to get the list of physical disks '
'for node %(node_uuid)s. Reason: %(error)s.',
{'node_uuid': node.uuid, 'error': exc})
raise exception.DracOperationError(error=exc)
def _is_raid_controller(node, raid_controller_fqdd, raid_controllers=None):
"""Find out if object's fqdd is for a raid controller or not
:param node: an ironic node object
:param raid_controller_fqdd: The object's fqdd we are testing to see
if it is a raid controller or not.
:param raid_controllers: A list of RAIDControllers used to check for
the presence of BOSS cards. If None, the
iDRAC will be queried for the list of
controllers.
:returns: boolean, True if the device is a RAID controller,
False if not.
"""
client = drac_common.get_drac_client(node)
try:
return client.is_raid_controller(raid_controller_fqdd,
raid_controllers)
except drac_exceptions.BaseClientException as exc:
LOG.error('Unable to determine if controller %(raid_controller_fqdd)s '
'on node %(node_uuid)s is a RAID controller. '
'Reason: %(error)s. ',
{'raid_controller_fqdd': raid_controller_fqdd,
'node_uuid': node.uuid, 'error': exc})
raise exception.DracOperationError(error=exc)
def _validate_job_queue(node, raid_controller=None):
"""Validate that there are no pending jobs for this controller.
:param node: an ironic node object.
:param raid_controller: id of the RAID controller.
"""
kwargs = {}
if raid_controller:
kwargs["name_prefix"] = "Config:RAID:%s" % raid_controller
drac_job.validate_job_queue(node, **kwargs)
def create_virtual_disk(node, raid_controller, physical_disks, raid_level,
size_mb, disk_name=None, span_length=None,
span_depth=None):
"""Create a single virtual disk on a RAID controller.
The created virtual disk will be in pending state. The DRAC card will do
the actual configuration once the changes are applied by calling the
``commit_config`` method.
:param node: an ironic node object.
:param raid_controller: id of the RAID controller.
:param physical_disks: ids of the physical disks.
:param raid_level: RAID level of the virtual disk.
:param size_mb: size of the virtual disk.
:param disk_name: name of the virtual disk. (optional)
:param span_depth: Number of spans in virtual disk. (optional)
:param span_length: Number of disks per span. (optional)
:returns: a dictionary containing the commit_needed key with a boolean
value indicating whether a config job must be created for the
values to be applied.
:raises: DracOperationError on an error from python-dracclient.
"""
# This causes config to fail, because the boot mode is set via a config
# job.
_validate_job_queue(node, raid_controller)
client = drac_common.get_drac_client(node)
try:
return client.create_virtual_disk(raid_controller, physical_disks,
raid_level, size_mb, disk_name,
span_length, span_depth)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to create virtual disk for node '
'%(node_uuid)s. Reason: %(error)s.',
{'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def delete_virtual_disk(node, virtual_disk):
"""Delete a single virtual disk on a RAID controller.
The deleted virtual disk will be in pending state. The DRAC card will do
the actual configuration once the changes are applied by calling the
``commit_config`` method.
:param node: an ironic node object.
:param virtual_disk: id of the virtual disk.
:returns: a dictionary containing the commit_needed key with a boolean
value indicating whether a config job must be created for the
values to be applied.
:raises: DracOperationError on an error from python-dracclient.
"""
# NOTE(mgoddard): Cannot specify raid_controller as we don't know it.
_validate_job_queue(node)
client = drac_common.get_drac_client(node)
try:
return client.delete_virtual_disk(virtual_disk)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to delete virtual disk '
'%(virtual_disk_fqdd)s for node %(node_uuid)s. '
'Reason: %(error)s.',
{'virtual_disk_fqdd': virtual_disk,
'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def _reset_raid_config(node, raid_controller):
"""Delete all virtual disk and unassign all hotspares physical disk
:param node: an ironic node object.
:param raid_controller: id of the RAID controller.
:returns: a dictionary containing
- The is_commit_required needed key with a
boolean value indicating whether a config job must be created
for the values to be applied.
- The is_reboot_required key with a RebootRequired enumerated
value indicating whether the server must be rebooted to
reset configuration.
:raises: DracOperationError on an error from python-dracclient.
"""
try:
_validate_job_queue(node, raid_controller)
client = drac_common.get_drac_client(node)
return client.reset_raid_config(raid_controller)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to delete all virtual disk '
'and unassign all hotspares '
'on %(raid_controller_fqdd)s '
'for node %(node_uuid)s. '
'Reason: %(error)s.',
{'raid_controller_fqdd': raid_controller,
'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def clear_foreign_config(node, raid_controller):
"""Free up the foreign drives.
:param node: an ironic node object.
:param raid_controller: id of the RAID controller.
:returns: a dictionary containing
- The is_commit_required needed key with a
boolean value indicating whether a config job must be created
for the values to be applied.
- The is_reboot_required key with a RebootRequired enumerated
value indicating whether the server must be rebooted to
clear foreign configuration.
:raises: DracOperationError on an error from python-dracclient.
"""
try:
_validate_job_queue(node, raid_controller)
client = drac_common.get_drac_client(node)
return client.clear_foreign_config(raid_controller)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to free foreign driver '
'on %(raid_controller_fqdd)s '
'for node %(node_uuid)s. '
'Reason: %(error)s.',
{'raid_controller_fqdd': raid_controller,
'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def set_raid_settings(node, controller_fqdd, settings):
"""Sets the RAID configuration
It sets the pending_value parameter for each of the attributes
passed in. For the values to be applied, a config job must
be created.
:param node: an ironic node object.
:param controller_fqdd: the ID of the RAID controller.
:param settings: a dictionary containing the proposed values, with
each key being the name of attribute and the value
being the proposed value.
:returns: a dictionary containing:
- The is_commit_required key with a boolean value indicating
whether a config job must be created for the values to be
applied.
- The is_reboot_required key with a RebootRequired enumerated
value indicating whether the server must be rebooted for the
values to be applied. Possible values are true and false.
:raises: DRACOperationFailed on error reported back by the DRAC
interface
"""
try:
drac_job.validate_job_queue(node)
client = drac_common.get_drac_client(node)
return client.set_raid_settings(controller_fqdd, settings)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to set raid settings '
'on %(raid_controller_fqdd)s '
'for node %(node_uuid)s. '
'Reason: %(error)s.',
{'raid_controller_fqdd': controller_fqdd,
'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def list_raid_settings(node):
"""List the RAID configuration settings
:param node: an ironic node object.
:returns: a dictionary with the RAID settings using InstanceID as the
key. The attributes are RAIDEnumerableAttribute,
RAIDStringAttribute and RAIDIntegerAttribute objects.
:raises: DRACOperationFailed on error reported back by the DRAC
interface
"""
try:
drac_job.validate_job_queue(node)
client = drac_common.get_drac_client(node)
return client.list_raid_settings()
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to list raid settings '
'for node %(node_uuid)s. '
'Reason: %(error)s.',
{'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def change_physical_disk_state(node, mode=None,
controllers_to_physical_disk_ids=None):
"""Convert disks RAID status
This method converts the requested physical disks from
RAID to JBOD or vice versa. It does this by only converting the
disks that are not already in the correct state.
:param node: an ironic node object.
:param mode: the mode to change the disks either to RAID or JBOD.
:param controllers_to_physical_disk_ids: Dictionary of controllers and
corresponding disk ids to convert to the requested mode.
:return: a dictionary containing:
- conversion_results, a dictionary that maps controller ids
to the conversion results for that controller.
The conversion results are a dict that contains:
- The is_commit_required key with the value always set to
True indicating that a config job must be created to
complete disk conversion.
- The is_reboot_required key with a RebootRequired
enumerated value indicating whether the server must be
rebooted to complete disk conversion.
:raises: DRACOperationError on an error from python-dracclient.
"""
try:
drac_job.validate_job_queue(node)
client = drac_common.get_drac_client(node)
return client.change_physical_disk_state(
mode, controllers_to_physical_disk_ids)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to change physical drives '
'to %(mode)s mode for node %(node_uuid)s. '
'Reason: %(error)s.',
{'mode': mode, 'node_uuid': node.uuid, 'error': exc})
raise exception.DracOperationError(error=exc)
def commit_config(node, raid_controller, reboot=False, realtime=False):
"""Apply all pending changes on a RAID controller.
:param node: an ironic node object.
:param raid_controller: id of the RAID controller.
:param reboot: indicates whether a reboot job should be automatically
created with the config job. (optional, defaults to False)
:param realtime: indicates RAID controller supports realtime.
(optional, defaults to False)
:returns: id of the created job
:raises: DracOperationError on an error from python-dracclient.
"""
client = drac_common.get_drac_client(node)
try:
return client.commit_pending_raid_changes(
raid_controller=raid_controller,
reboot=reboot,
realtime=realtime)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to commit pending RAID config for'
' controller %(raid_controller_fqdd)s on node '
'%(node_uuid)s. Reason: %(error)s.',
{'raid_controller_fqdd': raid_controller,
'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def _change_physical_disk_mode(node, mode=None,
controllers_to_physical_disk_ids=None,
substep="completed"):
"""Physical drives conversion from RAID to JBOD or vice-versa.
:param node: an ironic node object.
:param mode: the mode to change the disks either to RAID or JBOD.
:param controllers_to_physical_disk_ids: Dictionary of controllers and
corresponding disk ids to convert to the requested mode.
:returns: states.CLEANWAIT if deletion is in progress asynchronously
or None if it is completed.
"""
change_disk_state = change_physical_disk_state(
node, mode, controllers_to_physical_disk_ids)
controllers = list()
conversion_results = change_disk_state['conversion_results']
for controller_id, result in conversion_results.items():
controller = {'raid_controller': controller_id,
'is_reboot_required': result['is_reboot_required'],
'is_commit_required': result['is_commit_required']}
controllers.append(controller)
return _commit_to_controllers(
node,
controllers, substep=substep)
def abandon_config(node, raid_controller):
"""Deletes all pending changes on a RAID controller.
:param node: an ironic node object.
:param raid_controller: id of the RAID controller.
:raises: DracOperationError on an error from python-dracclient.
"""
client = drac_common.get_drac_client(node)
try:
client.abandon_pending_raid_changes(raid_controller)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to delete pending RAID config '
'for controller %(raid_controller_fqdd)s on node '
'%(node_uuid)s. Reason: %(error)s.',
{'raid_controller_fqdd': raid_controller,
'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def _calculate_spans(raid_level, disks_count):
"""Calculates number of spans for a RAID level given a physical disk count
:param raid_level: RAID level of the virtual disk.
:param disk_count: number of physical disks used for the virtual disk.
:returns: number of spans.
"""
if raid_level in ['0', '1', '5', '6']:
return 1
elif raid_level in ['5+0', '6+0']:
return 2
elif raid_level in ['1+0']:
return disks_count >> 1
else:
reason = (_('Cannot calculate spans for RAID level "%s"') %
raid_level)
raise exception.DracOperationError(error=reason)
def _usable_disks_count(raid_level, disks_count):
"""Calculates the number of disks usable for a RAID level
...given a physical disk count
:param raid_level: RAID level of the virtual disk.
:param disk_count: number of physical disks used for the virtual disk.
:returns: number of disks.
"""
if raid_level in ['0', '1', '5', '6']:
return disks_count
elif raid_level in ['5+0', '6+0', '1+0']:
# largest even number less than disk_count
return (disks_count >> 1) << 1
else:
reason = (_('RAID level %(raid_level)s is not supported by the '
'driver. Supported RAID levels: %(supported_raid_levels)s')
% {'raid_level': raid_level,
'supported_raid_levels': list(RAID_LEVELS)})
raise exception.DracOperationError(error=reason)
def _raid_level_min_disks(raid_level, spans_count=1):
try:
raid_level_info = RAID_LEVELS[raid_level]
except KeyError:
reason = (_('RAID level %(raid_level)s is not supported by the '
'driver. Supported RAID levels: %(supported_raid_levels)s')
% {'raid_level': raid_level,
'supported_raid_levels': list(RAID_LEVELS)})
raise exception.DracOperationError(error=reason)
if raid_level_info['type'] == 'spanned':
if spans_count <= 1:
reason = _('Spanned RAID volumes cannot contain a single span')
raise exception.DracOperationError(error=reason)
span_type = raid_level_info['span_type']
raid_level_info = RAID_LEVELS[span_type]
return raid_level_info['min_disks'] * spans_count
def _raid_level_max_disks(raid_level, spans_count=1):
try:
raid_level_info = RAID_LEVELS[raid_level]
except KeyError:
reason = (_('RAID level %(raid_level)s is not supported by the '
'driver. Supported RAID levels: %(supported_raid_levels)s')
% {'raid_level': raid_level,
'supported_raid_levels': list(RAID_LEVELS)})
raise exception.DracOperationError(error=reason)
if raid_level_info['type'] == 'spanned':
if spans_count <= 1:
reason = _('Spanned RAID volumes cannot contain a single span')
raise exception.DracOperationError(error=reason)
span_type = raid_level_info['span_type']
raid_level_info = RAID_LEVELS[span_type]
return raid_level_info['max_disks'] * spans_count
def _raid_level_overhead(raid_level, spans_count=1):
try:
raid_level_info = RAID_LEVELS[raid_level]
except KeyError:
reason = (_('RAID level %(raid_level)s is not supported by the '
'driver. Supported RAID levels: %(supported_raid_levels)s')
% {'raid_level': raid_level,
'supported_raid_levels': list(RAID_LEVELS)})
raise exception.DracOperationError(error=reason)
if raid_level_info['type'] == 'spanned':
if spans_count <= 1:
reason = _('Spanned RAID volumes cannot contain a single span')
raise exception.DracOperationError(error=reason)
span_type = raid_level_info['span_type']
raid_level_info = RAID_LEVELS[span_type]
return raid_level_info['overhead'] * spans_count
def _max_volume_size_mb(raid_level, physical_disks, free_space_mb,
spans_count=1, stripe_size_kb=64 * units.Ki):
# restrict the size to the smallest available space
free_spaces = [free_space_mb[disk] for disk in physical_disks]
size_kb = min(free_spaces) * units.Ki
# NOTE(ifarkas): using math.floor so we get a volume size that does not
# exceed the available space
stripes_per_disk = int(math.floor(float(size_kb) / stripe_size_kb))
disks_count = len(physical_disks)
overhead_disks_count = _raid_level_overhead(raid_level, spans_count)
return int(stripes_per_disk * stripe_size_kb
* (disks_count - overhead_disks_count) / units.Ki)
def _volume_usage_per_disk_mb(logical_disk, physical_disks, spans_count=1,
stripe_size_kb=64 * units.Ki):
disks_count = len(physical_disks)
overhead_disks_count = _raid_level_overhead(logical_disk['raid_level'],
spans_count)
volume_size_kb = logical_disk['size_mb'] * units.Ki
# NOTE(ifarkas): using math.ceil so we get the largest disk usage
# possible, so we can avoid over-committing
stripes_per_volume = math.ceil(float(volume_size_kb) / stripe_size_kb)
stripes_per_disk = math.ceil(
float(stripes_per_volume) / (disks_count - overhead_disks_count))
return int(stripes_per_disk * stripe_size_kb / units.Ki)
def _find_configuration(logical_disks, physical_disks, pending_delete):
"""Find RAID configuration.
This method transforms the RAID configuration defined in Ironic to a format
that is required by dracclient. This includes matching the physical disks
to RAID volumes when it's not pre-defined, or in general calculating
missing properties.
:param logical_disks: list of logical disk definitions.
:param physical_disks: list of physical disk definitions.
:param pending_delete: Whether there is a pending deletion of virtual
disks that should be accounted for.
"""
# shared physical disks of RAID volumes size_gb='MAX' should be
# deprioritized during the matching process to reserve as much space as
# possible. Reserved means it won't be used during matching.
volumes_with_reserved_physical_disks = [
volume for volume in logical_disks
if ('physical_disks' in volume and volume['size_mb'] == 'MAX'
and volume.get('share_physical_disks', False))]
reserved_physical_disks = [
disk for disk in physical_disks
for volume in volumes_with_reserved_physical_disks
if disk.id in volume['physical_disks']]
# we require each logical disk contain only homogeneous physical disks, so
# sort them by type
physical_disks_by_type = {}
reserved_physical_disks_by_type = {}
free_space_mb = {}
for disk in physical_disks:
# calculate free disk space
free_space_mb[disk] = _get_disk_free_size_mb(disk, pending_delete)
disk_type = (disk.controller, disk.media_type, disk.interface_type,
disk.size_mb)
if disk_type not in physical_disks_by_type:
physical_disks_by_type[disk_type] = []
reserved_physical_disks_by_type[disk_type] = []
if disk in reserved_physical_disks:
reserved_physical_disks_by_type[disk_type].append(disk)
else:
physical_disks_by_type[disk_type].append(disk)
# exclude non-shared physical disks (predefined by the user) from
# physical_disks_by_type because they are not going to be used during
# matching
for volume in logical_disks:
if ('physical_disks' in volume
and not volume.get('share_physical_disks', False)):
for disk in physical_disks:
if disk.id in volume['physical_disks']:
disk_type = (disk.controller, disk.media_type,
disk.interface_type, disk.size_mb)
if disk in physical_disks_by_type[disk_type]:
physical_disks_by_type[disk_type].remove(disk)
processed_volumes = []
# step 1 - process volumes with predefined disks and exact size
for volume in [volume for volume in logical_disks
if ('physical_disks' in volume
and volume['size_mb'] != 'MAX')]:
_calculate_volume_props(volume, physical_disks, free_space_mb)
processed_volumes.append(volume)
# step 2 - process volumes without predefined disks
volumes_without_disks = [disk for disk in logical_disks
if 'physical_disks' not in disk]
if volumes_without_disks:
result, free_space_mb = (
_assign_disks_to_volume(volumes_without_disks,
physical_disks_by_type, free_space_mb,
pending_delete))
if not result:
# try again using the reserved physical disks in addition
for disk_type, disks in physical_disks_by_type.items():
physical_disks_by_type[disk_type] += (
reserved_physical_disks_by_type[disk_type])
result, free_space_mb = (
_assign_disks_to_volume(volumes_without_disks,
physical_disks_by_type,
free_space_mb,
pending_delete))
if not result:
error_msg = _('failed to find matching physical disks for all '
'logical disks')
LOG.error('DRAC driver failed to create RAID '
'configuration. Reason: %(error)s.',
{'error': error_msg})
raise exception.DracOperationError(error=error_msg)
processed_volumes += volumes_without_disks
# step 3 - process volumes with predefined disks and size_mb == 'MAX'
for volume in [volume for volume in logical_disks
if ('physical_disks' in volume
and volume['size_mb'] == 'MAX')]:
_calculate_volume_props(volume, physical_disks, free_space_mb)
processed_volumes.append(volume)
return processed_volumes
def _calculate_volume_props(logical_disk, physical_disks, free_space_mb):
selected_disks = [disk for disk in physical_disks
if disk.id in logical_disk['physical_disks']]
spans_count = _calculate_spans(
logical_disk['raid_level'], len(selected_disks))
if len(selected_disks) % spans_count != 0:
error_msg = _('invalid number of physical disks was provided')
raise exception.DracOperationError(error=error_msg)
disks_per_span = int(len(selected_disks) / spans_count)
# Best practice is to not pass span_length and span_depth when creating a
# RAID10. The iDRAC will dynamically calculate these values using maximum
# values obtained from the RAID controller.
logical_disk['span_depth'] = None
logical_disk['span_length'] = None
if logical_disk['raid_level'] != '1+0':
logical_disk['span_depth'] = spans_count
logical_disk['span_length'] = disks_per_span
max_volume_size_mb = _max_volume_size_mb(
logical_disk['raid_level'], selected_disks, free_space_mb,
spans_count=spans_count)
if logical_disk['size_mb'] == 'MAX':
if max_volume_size_mb == 0:
error_msg = _("size set to 'MAX' but could not allocate physical "
"disk space")
raise exception.DracOperationError(error=error_msg)
logical_disk['size_mb'] = max_volume_size_mb
elif max_volume_size_mb < logical_disk['size_mb']:
if max_volume_size_mb == 0:
error_msg = _('not enough physical disk space for the logical '
'disk')
raise exception.DracOperationError(error=error_msg)
disk_usage = _volume_usage_per_disk_mb(logical_disk, selected_disks,
spans_count=spans_count)
for disk in selected_disks:
if free_space_mb[disk] < disk_usage:
error_msg = _('not enough free space on physical disks for the '
'logical disk')
raise exception.DracOperationError(error=error_msg)
else:
free_space_mb[disk] -= disk_usage
if 'controller' not in logical_disk:
logical_disk['controller'] = selected_disks[0].controller
def _assign_disks_to_volume(logical_disks, physical_disks_by_type,
free_space_mb, pending_delete):
logical_disk = logical_disks.pop(0)
raid_level = logical_disk['raid_level']
# iterate over all possible configurations
for (controller, disk_type,
interface_type, size_mb), disks in physical_disks_by_type.items():
if ('disk_type' in logical_disk
and logical_disk['disk_type'] != disk_type):
continue
if ('interface_type' in logical_disk
and logical_disk['interface_type'] != interface_type):
continue
# filter out disks without free disk space
disks = [disk for disk in disks if free_space_mb[disk] > 0]
# sort disks by free size which is important if we have max disks limit
# on a volume
disks = sorted(
disks,
key=lambda disk: free_space_mb[disk])
# filter out disks already in use if sharing is disabled
if ('share_physical_disks' not in logical_disk
or not logical_disk['share_physical_disks']):
initial_free_size_mb = {
disk: _get_disk_free_size_mb(disk, pending_delete)
for disk in disks
}
disks = [disk for disk in disks
if initial_free_size_mb[disk] == free_space_mb[disk]]
max_spans = _calculate_spans(raid_level, len(disks))
min_spans = min([2, max_spans])
min_disks = _raid_level_min_disks(raid_level,
spans_count=min_spans)
max_disks = _raid_level_max_disks(raid_level,
spans_count=max_spans)
candidate_max_disks = min([max_disks, len(disks)])
for disks_count in range(min_disks, candidate_max_disks + 1):
if ('number_of_physical_disks' in logical_disk
and (logical_disk['number_of_physical_disks']
!= disks_count)):
continue
# skip invalid disks_count
if disks_count != _usable_disks_count(logical_disk['raid_level'],
disks_count):
continue
selected_disks = disks[0:disks_count]
candidate_volume = logical_disk.copy()
candidate_free_space_mb = free_space_mb.copy()
candidate_volume['physical_disks'] = [disk.id for disk
in selected_disks]
try:
_calculate_volume_props(candidate_volume, selected_disks,
candidate_free_space_mb)
except exception.DracOperationError:
continue
if len(logical_disks) > 0:
result, candidate_free_space_mb = (
_assign_disks_to_volume(logical_disks,
physical_disks_by_type,
candidate_free_space_mb,
pending_delete))
if result:
logical_disks.append(candidate_volume)
return (True, candidate_free_space_mb)
else:
logical_disks.append(candidate_volume)
return (True, candidate_free_space_mb)
else:
# put back the logical_disk to queue
logical_disks.insert(0, logical_disk)
return (False, free_space_mb)
def _filter_logical_disks(logical_disks, include_root_volume,
include_nonroot_volumes):
filtered_disks = []
for disk in logical_disks:
if include_root_volume and disk.get('is_root_volume'):
filtered_disks.append(disk)
if include_nonroot_volumes and not disk.get('is_root_volume'):
filtered_disks.append(disk)
return filtered_disks
def _create_config_job(node, controller, reboot=False, realtime=False,
raid_config_job_ids=[],
raid_config_parameters=[]):
job_id = commit_config(node, raid_controller=controller,
reboot=reboot, realtime=realtime)
raid_config_job_ids.append(job_id)
if controller not in raid_config_parameters:
raid_config_parameters.append(controller)
LOG.info('Change has been committed to RAID controller '
'%(controller)s on node %(node)s. '
'DRAC job id: %(job_id)s',
{'controller': controller, 'node': node.uuid,
'job_id': job_id})
return {'raid_config_job_ids': raid_config_job_ids,
'raid_config_parameters': raid_config_parameters}
def _validate_volume_size(node, logical_disks):
new_physical_disks = list_physical_disks(node)
free_space_mb = {}
new_processed_volumes = []
for disk in new_physical_disks:
free_space_mb[disk] = disk.free_size_mb
for logical_disk in logical_disks:
selected_disks = [disk for disk in new_physical_disks
if disk.id in logical_disk['physical_disks']]
spans_count = _calculate_spans(
logical_disk['raid_level'], len(selected_disks))
new_max_vol_size_mb = _max_volume_size_mb(
logical_disk['raid_level'],
selected_disks,
free_space_mb,
spans_count=spans_count)
if logical_disk['size_mb'] > new_max_vol_size_mb:
logical_disk['size_mb'] = new_max_vol_size_mb
LOG.info("Logical size does not match so calculating volume "
"properties for current logical_disk")
_calculate_volume_props(
logical_disk, new_physical_disks, free_space_mb)
new_processed_volumes.append(logical_disk)
if new_processed_volumes:
return new_processed_volumes
return logical_disks
def _switch_to_raid_mode(node, controller_fqdd):
"""Convert the controller mode from Enhanced HBA to RAID mode
:param node: an ironic node object
:param controller_fqdd: the ID of the RAID controller.
:returns: a dictionary containing
- The raid_controller key with a ID of the
RAID controller value.
- The is_commit_required needed key with a
boolean value indicating whether a config job must be created
for the values to be applied.
- The is_reboot_required key with a RebootRequired enumerated
value indicating whether the server must be rebooted to
switch the controller mode to RAID.
"""
# wait for pending jobs to complete
drac_job.wait_for_job_completion(node)
raid_attr = "{}:{}".format(controller_fqdd,
_REQUESTED_RAID_CONTROLLER_MODE)
settings = {raid_attr: _RAID_MODE}
settings_results = set_raid_settings(
node, controller_fqdd, settings)
controller = {
'raid_controller': controller_fqdd,
'is_reboot_required': settings_results['is_reboot_required'],
'is_commit_required': settings_results['is_commit_required']}
return controller
def _commit_to_controllers(node, controllers, substep="completed"):
"""Commit changes to RAID controllers on the node.
:param node: an ironic node object
:param controllers: a list of dictionary containing
- The raid_controller key with raid controller
fqdd value indicating on which raid configuration
job needs to be perform.
- The is_commit_required needed key with a
boolean value indicating whether a config job must
be created.
- The is_reboot_required key with a RebootRequired
enumerated value indicating whether the server must
be rebooted only if raid controller does not support
realtime.
:param substep: contain sub cleaning or deploy step which executes any raid
configuration job if set after cleaning or deploy step.
(default to completed)
:returns: states.CLEANWAIT (cleaning) or states.DEPLOYWAIT (deployment) if
configuration is in progress asynchronously or None if it is
completed.
"""
# remove controller which does not require configuration job
controllers = [controller for controller in controllers
if controller['is_commit_required']]
if not controllers:
LOG.debug('No changes on any of the controllers on node %s',
node.uuid)
driver_internal_info = node.driver_internal_info
driver_internal_info['raid_config_substep'] = substep
driver_internal_info['raid_config_parameters'] = []
node.driver_internal_info = driver_internal_info
node.save()
return
driver_internal_info = node.driver_internal_info
driver_internal_info['raid_config_substep'] = substep
driver_internal_info['raid_config_parameters'] = []
if 'raid_config_job_ids' not in driver_internal_info:
driver_internal_info['raid_config_job_ids'] = []
optional = drac_constants.RebootRequired.optional
# all realtime controllers
all_realtime = all(
(cntlr['is_reboot_required'] == optional)
and not(cntlr.get('is_ehba_mode'))
for cntlr in controllers)
# check any controller with ehba mode
any_ehba_controllers = any(
cntrl.get('is_ehba_mode') is True for cntrl in controllers)
raid_config_job_ids = []
raid_config_parameters = []
if all_realtime:
for controller in controllers:
realtime_controller = controller['raid_controller']
job_details = _create_config_job(
node, controller=realtime_controller,
reboot=False, realtime=True,
raid_config_job_ids=raid_config_job_ids,
raid_config_parameters=raid_config_parameters)
elif any_ehba_controllers:
commit_to_ehba_controllers = []
for controller in controllers:
if controller.get('is_ehba_mode'):
job_details = _create_config_job(
node, controller=controller['raid_controller'],
reboot=False, realtime=True,
raid_config_job_ids=raid_config_job_ids,
raid_config_parameters=raid_config_parameters)
ehba_controller = _switch_to_raid_mode(
node, controller['raid_controller'])
commit_to_ehba_controllers.append(
ehba_controller['raid_controller'])
else:
job_details = _create_config_job(
node, controller=controller['raid_controller'],
reboot=False, realtime=False,
raid_config_job_ids=raid_config_job_ids,
raid_config_parameters=raid_config_parameters)
for controller in commit_to_ehba_controllers:
LOG.debug("Create job with Reboot to apply configuration "
"changes for ehba controllers")
job_details = _create_config_job(
node, controller=controller,
reboot=(controller == commit_to_ehba_controllers[-1]),
realtime=False, raid_config_job_ids=raid_config_job_ids,
raid_config_parameters=raid_config_parameters)
else:
for controller in controllers:
mix_controller = controller['raid_controller']
reboot = (controller == controllers[-1])
job_details = _create_config_job(
node, controller=mix_controller,
reboot=reboot, realtime=False,
raid_config_job_ids=raid_config_job_ids,
raid_config_parameters=raid_config_parameters)
driver_internal_info['raid_config_job_ids'].extend(job_details[
'raid_config_job_ids'])
driver_internal_info['raid_config_parameters'].extend(job_details[
'raid_config_parameters'])
node.driver_internal_info = driver_internal_info
# Signal whether the node has been rebooted, that we do not need to execute
# the step again, and that this completion of this step is triggered
# through async polling.
# NOTE(mgoddard): set_async_step_flags calls node.save().
deploy_utils.set_async_step_flags(
node,
reboot=not all_realtime,
skip_current_step=True,
polling=True)
return deploy_utils.get_async_step_return_state(node)
def _create_virtual_disks(task, node):
logical_disks_to_create = node.driver_internal_info[
'logical_disks_to_create']
# Check valid properties attached to voiume after drives conversion
isVolValidationNeeded = node.driver_internal_info[
'volume_validation']
if isVolValidationNeeded:
logical_disks_to_create = _validate_volume_size(
node, logical_disks_to_create)
controllers = list()
for logical_disk in logical_disks_to_create:
controller = dict()
controller_cap = create_virtual_disk(
node,
raid_controller=logical_disk['controller'],
physical_disks=logical_disk['physical_disks'],
raid_level=logical_disk['raid_level'],
size_mb=logical_disk['size_mb'],
disk_name=logical_disk.get('name'),
span_length=logical_disk.get('span_length'),
span_depth=logical_disk.get('span_depth'))
controller['raid_controller'] = logical_disk['controller']
controller['is_reboot_required'] = controller_cap[
'is_reboot_required']
controller['is_commit_required'] = controller_cap[
'is_commit_required']
if controller not in controllers:
controllers.append(controller)
return _commit_to_controllers(node, controllers)
def _controller_in_hba_mode(raid_settings, controller_fqdd):
controller_mode = raid_settings.get(
'{}:{}'.format(controller_fqdd, _CURRENT_RAID_CONTROLLER_MODE))
return _EHBA_MODE in controller_mode.current_value
def _controller_supports_ehba_mode(settings, controller_fqdd):
raid_cntrl_attr = "{}:{}".format(controller_fqdd,
_CURRENT_RAID_CONTROLLER_MODE)
current_cntrl_mode = settings.get(raid_cntrl_attr)
if not current_cntrl_mode:
return False
else:
return _EHBA_MODE in current_cntrl_mode.possible_values
def _get_disk_free_size_mb(disk, pending_delete):
"""Return the size of free space on the disk in MB.
:param disk: a PhysicalDisk object.
:param pending_delete: Whether there is a pending deletion of all virtual
disks.
"""
return disk.size_mb if pending_delete else disk.free_size_mb
class DracWSManRAID(base.RAIDInterface):
def get_properties(self):
"""Return the properties of the interface."""
return drac_common.COMMON_PROPERTIES
@base.deploy_step(priority=0,
argsinfo=base.RAID_APPLY_CONFIGURATION_ARGSINFO)
def apply_configuration(self, task, raid_config, create_root_volume=True,
create_nonroot_volumes=False,
delete_existing=True):
return super(DracWSManRAID, self).apply_configuration(
task, raid_config, create_root_volume=create_root_volume,
create_nonroot_volumes=create_nonroot_volumes,
delete_existing=delete_existing)
@METRICS.timer('DracRAID.create_configuration')
@base.clean_step(priority=0, abortable=False, argsinfo={
'create_root_volume': {
'description': (
'This specifies whether to create the root volume. '
'Defaults to `True`.'
),
'required': False
},
'create_nonroot_volumes': {
'description': (
'This specifies whether to create the non-root volumes. '
'Defaults to `True`.'
),
'required': False
},
"delete_existing": {
"description": (
"Setting this to 'True' indicates to delete existing RAID "
"configuration prior to creating the new configuration. "
"Default value is 'False'."
),
"required": False,
}
})
def create_configuration(self, task,
create_root_volume=True,
create_nonroot_volumes=True,
delete_existing=False):
"""Create the RAID configuration.
This method creates the RAID configuration on the given node.
:param task: a TaskManager instance containing the node to act on.
:param create_root_volume: If True, a root volume is created
during RAID configuration. Otherwise, no root volume is
created. Default is True.
:param create_nonroot_volumes: If True, non-root volumes are
created. If False, no non-root volumes are created. Default
is True.
:param delete_existing: Setting this to True indicates to delete RAID
configuration prior to creating the new configuration. Default is
False.
:returns: states.CLEANWAIT (cleaning) or states.DEPLOYWAIT (deployment)
if creation is in progress asynchronously or None if it is
completed.
:raises: MissingParameterValue, if node.target_raid_config is missing
or empty.
:raises: DracOperationError on an error from python-dracclient.
"""
node = task.node
logical_disks = node.target_raid_config['logical_disks']
for disk in logical_disks:
if disk['size_gb'] == 'MAX' and 'physical_disks' not in disk:
raise exception.InvalidParameterValue(
_("create_configuration called with invalid "
"target_raid_configuration for node %(node_id)s. "
"'physical_disks' is missing from logical_disk while "
"'size_gb'='MAX' was requested: "
"%(logical_disk)s") % {'node_id': node.uuid,
'logical_disk': disk})
if disk['size_gb'] == 'MAX':
disk['size_mb'] = 'MAX'
else:
disk['size_mb'] = disk['size_gb'] * units.Ki
del disk['size_gb']
if delete_existing:
self._delete_configuration_no_commit(task)
physical_disks = list_physical_disks(node)
logical_disks = _find_configuration(logical_disks, physical_disks,
pending_delete=delete_existing)
logical_disks_to_create = _filter_logical_disks(
logical_disks, create_root_volume, create_nonroot_volumes)
controllers_to_physical_disk_ids = defaultdict(list)
for logical_disk in logical_disks_to_create:
# Not applicable to JBOD logical disks.
if logical_disk['raid_level'] == 'JBOD':
continue
for physical_disk_name in logical_disk['physical_disks']:
controllers_to_physical_disk_ids[
logical_disk['controller']].append(
physical_disk_name)
# adding logical_disks to driver_internal_info to create virtual disks
driver_internal_info = node.driver_internal_info
driver_internal_info[
"logical_disks_to_create"] = logical_disks_to_create
commit_results = None
if logical_disks_to_create:
LOG.debug(
"Converting physical disks configured to back RAID "
"logical disks to RAID mode for node %(node_uuid)s ",
{"node_uuid": node.uuid})
raid_mode = drac_constants.RaidStatus.raid
commit_results = _change_physical_disk_mode(
node, raid_mode,
controllers_to_physical_disk_ids,
substep="create_virtual_disks")
volume_validation = True if commit_results else False
driver_internal_info['volume_validation'] = volume_validation
node.driver_internal_info = driver_internal_info
node.save()
if commit_results:
return commit_results
else:
LOG.debug("Controller does not support drives conversion "
"so creating virtual disks")
return _create_virtual_disks(task, node)
@METRICS.timer('DracRAID.delete_configuration')
@base.clean_step(priority=0)
@base.deploy_step(priority=0)
def delete_configuration(self, task):
"""Delete the RAID configuration.
:param task: a TaskManager instance containing the node to act on.
:returns: states.CLEANWAIT (cleaning) or states.DEPLOYWAIT (deployment)
if deletion is in progress asynchronously or None if it is
completed.
:raises: DracOperationError on an error from python-dracclient.
"""
controllers = self._delete_configuration_no_commit(task)
return _commit_to_controllers(task.node, controllers,
substep="delete_foreign_config")
@METRICS.timer('DracRAID.get_logical_disks')
def get_logical_disks(self, task):
"""Get the RAID configuration of the node.
:param task: a TaskManager instance containing the node to act on.
:returns: A dictionary of properties.
:raises: DracOperationError on an error from python-dracclient.
"""
node = task.node
logical_disks = []
for disk in list_virtual_disks(node):
logical_disk = {
'id': disk.id,
'controller': disk.controller,
'size_gb': int(disk.size_mb / units.Ki),
'raid_level': disk.raid_level
}
if disk.name is not None:
logical_disk['name'] = disk.name
logical_disks.append(logical_disk)
return {'logical_disks': logical_disks}
@METRICS.timer('DracRAID._query_raid_config_job_status')
@periodics.periodic(
spacing=CONF.drac.query_raid_config_job_status_interval)
def _query_raid_config_job_status(self, manager, context):
"""Periodic task to check the progress of running RAID config jobs."""
filters = {'reserved': False, 'maintenance': False}
fields = ['driver_internal_info']
node_list = manager.iter_nodes(fields=fields, filters=filters)
for (node_uuid, driver, conductor_group,
driver_internal_info) in node_list:
try:
lock_purpose = 'checking async raid configuration jobs'
with task_manager.acquire(context, node_uuid,
purpose=lock_purpose,
shared=True) as task:
if not isinstance(task.driver.raid, DracWSManRAID):
continue
job_ids = driver_internal_info.get('raid_config_job_ids')
if not job_ids:
continue
self._check_node_raid_jobs(task)
except exception.NodeNotFound:
LOG.info("During query_raid_config_job_status, node "
"%(node)s was not found and presumed deleted by "
"another process.", {'node': node_uuid})
except exception.NodeLocked:
LOG.info("During query_raid_config_job_status, node "
"%(node)s was already locked by another process. "
"Skip.", {'node': node_uuid})
@METRICS.timer('DracRAID._check_node_raid_jobs')
def _check_node_raid_jobs(self, task):
"""Check the progress of running RAID config jobs of a node."""
node = task.node
raid_config_job_ids = node.driver_internal_info['raid_config_job_ids']
finished_job_ids = []
for config_job_id in raid_config_job_ids:
config_job = drac_job.get_job(node, job_id=config_job_id)
if config_job is None or config_job.status == 'Completed':
finished_job_ids.append(config_job_id)
elif config_job.status == 'Failed':
finished_job_ids.append(config_job_id)
self._set_raid_config_job_failure(node)
if not finished_job_ids:
return
task.upgrade_lock()
self._delete_cached_config_job_id(node, finished_job_ids)
if not node.driver_internal_info.get('raid_config_job_failure',
False):
if 'raid_config_substep' in node.driver_internal_info:
substep = node.driver_internal_info['raid_config_substep']
if substep == 'delete_foreign_config':
foreign_drives = self._execute_foreign_drives(task, node)
if foreign_drives is None:
return self._convert_drives(task, node)
elif substep == 'physical_disk_conversion':
self._convert_drives(task, node)
elif substep == "create_virtual_disks":
return _create_virtual_disks(task, node)
elif substep == 'completed':
self._complete_raid_substep(task, node)
else:
self._complete_raid_substep(task, node)
else:
self._clear_raid_substep(node)
self._clear_raid_config_job_failure(node)
self._set_failed(task, config_job)
def _execute_foreign_drives(self, task, node):
controllers = list()
jobs_required = False
for controller_id in node.driver_internal_info[
'raid_config_parameters']:
controller_cap = clear_foreign_config(
node, controller_id)
controller = {
'raid_controller': controller_id,
'is_reboot_required': controller_cap['is_reboot_required'],
'is_commit_required': controller_cap['is_commit_required']}
controllers.append(controller)
jobs_required = jobs_required or controller_cap[
'is_commit_required']
if not jobs_required:
LOG.info(
"No foreign drives detected, so "
"resume %s", "cleaning" if node.clean_step else "deployment")
return None
else:
return _commit_to_controllers(
node,
controllers,
substep='physical_disk_conversion')
def _complete_raid_substep(self, task, node):
self._clear_raid_substep(node)
self._resume(task)
def _convert_drives(self, task, node):
jbod = drac_constants.RaidStatus.jbod
drives_results = _change_physical_disk_mode(
node, mode=jbod)
if drives_results is None:
LOG.debug("Controller does not support drives "
"conversion on %(node_uuid)s",
{'node_uuid': node.uuid})
self._complete_raid_substep(task, node)
def _clear_raid_substep(self, node):
driver_internal_info = node.driver_internal_info
driver_internal_info.pop('raid_config_substep', None)
driver_internal_info.pop('raid_config_parameters', None)
node.driver_internal_info = driver_internal_info
node.save()
def _set_raid_config_job_failure(self, node):
driver_internal_info = node.driver_internal_info
driver_internal_info['raid_config_job_failure'] = True
node.driver_internal_info = driver_internal_info
node.save()
def _clear_raid_config_job_failure(self, node):
driver_internal_info = node.driver_internal_info
del driver_internal_info['raid_config_job_failure']
node.driver_internal_info = driver_internal_info
node.save()
def _delete_cached_config_job_id(self, node, finished_config_job_ids=None):
if finished_config_job_ids is None:
finished_config_job_ids = []
driver_internal_info = node.driver_internal_info
unfinished_job_ids = [job_id for job_id
in driver_internal_info['raid_config_job_ids']
if job_id not in finished_config_job_ids]
driver_internal_info['raid_config_job_ids'] = unfinished_job_ids
node.driver_internal_info = driver_internal_info
node.save()
def _set_failed(self, task, config_job):
error_msg = (_("Failed config job: %(config_job_id)s. "
"Message: '%(message)s'.") %
{'config_job_id': config_job.id,
'message': config_job.message})
log_msg = ("RAID configuration job failed for node %(node)s. "
"%(error)s" %
{'node': task.node.uuid, 'error': error_msg})
if task.node.clean_step:
manager_utils.cleaning_error_handler(task, error_msg)
else:
manager_utils.deploying_error_handler(task, log_msg, error_msg)
def _resume(self, task):
raid_common.update_raid_info(
task.node, self.get_logical_disks(task))
if task.node.clean_step:
manager_utils.notify_conductor_resume_clean(task)
else:
manager_utils.notify_conductor_resume_deploy(task)
def _delete_configuration_no_commit(self, task):
"""Delete existing RAID configuration without committing the change.
:param task: A TaskManager instance.
:returns: A set of names of RAID controllers which need RAID changes to
be committed.
"""
node = task.node
controllers = list()
drac_raid_controllers = list_raid_controllers(node)
drac_raid_settings = list_raid_settings(node)
for cntrl in drac_raid_controllers:
if _is_raid_controller(node, cntrl.id, drac_raid_controllers):
controller = dict()
if _controller_supports_ehba_mode(
drac_raid_settings,
cntrl.id) and _controller_in_hba_mode(
drac_raid_settings, cntrl.id):
controller['is_ehba_mode'] = True
controller_cap = _reset_raid_config(node, cntrl.id)
controller["raid_controller"] = cntrl.id
controller["is_reboot_required"] = controller_cap[
"is_reboot_required"]
controller["is_commit_required"] = controller_cap[
"is_commit_required"]
controllers.append(controller)
return controllers
class DracRAID(DracWSManRAID):
"""Class alias of class DracWSManRAID.
This class provides ongoing support of the deprecated 'idrac' RAID
interface implementation entrypoint.
All bug fixes and new features should be implemented in its base
class, DracWSManRAID. That makes them available to both the
deprecated 'idrac' and new 'idrac-wsman' entrypoints. Such changes
should not be made to this class.
"""
def __init__(self):
super(DracRAID, self).__init__()
LOG.warning("RAID interface 'idrac' is deprecated and may be removed "
"in a future release. Use 'idrac-wsman' instead.")
| 40.926876 | 79 | 0.635797 |
from collections import defaultdict
import math
from futurist import periodics
from ironic_lib import metrics_utils
from oslo_log import log as logging
from oslo_utils import importutils
from oslo_utils import units
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import raid as raid_common
from ironic.conductor import task_manager
from ironic.conductor import utils as manager_utils
from ironic.conf import CONF
from ironic.drivers import base
from ironic.drivers.modules import deploy_utils
from ironic.drivers.modules.drac import common as drac_common
from ironic.drivers.modules.drac import job as drac_job
drac_exceptions = importutils.try_import('dracclient.exceptions')
drac_constants = importutils.try_import('dracclient.constants')
LOG = logging.getLogger(__name__)
METRICS = metrics_utils.get_metrics_logger(__name__)
_CURRENT_RAID_CONTROLLER_MODE = "RAIDCurrentControllerMode"
_REQUESTED_RAID_CONTROLLER_MODE = "RAIDRequestedControllerMode"
_EHBA_MODE = "Enhanced HBA"
_RAID_MODE = "RAID"
RAID_LEVELS = {
'0': {
'min_disks': 1,
'max_disks': 1000,
'type': 'simple',
'overhead': 0
},
'1': {
'min_disks': 2,
'max_disks': 2,
'type': 'simple',
'overhead': 1
},
'5': {
'min_disks': 3,
'max_disks': 1000,
'type': 'simple',
'overhead': 1
},
'6': {
'min_disks': 4,
'max_disks': 1000,
'type': 'simple',
'overhead': 2
},
'1+0': {
'type': 'spanned',
'span_type': '1'
},
'5+0': {
'type': 'spanned',
'span_type': '5'
},
'6+0': {
'type': 'spanned',
'span_type': '6'
}
}
def list_raid_controllers(node):
client = drac_common.get_drac_client(node)
try:
return client.list_raid_controllers()
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to get the list of RAID controllers '
'for node %(node_uuid)s. Reason: %(error)s.',
{'node_uuid': node.uuid, 'error': exc})
raise exception.DracOperationError(error=exc)
def list_virtual_disks(node):
client = drac_common.get_drac_client(node)
try:
return client.list_virtual_disks()
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to get the list of virtual disks '
'for node %(node_uuid)s. Reason: %(error)s.',
{'node_uuid': node.uuid, 'error': exc})
raise exception.DracOperationError(error=exc)
def list_physical_disks(node):
client = drac_common.get_drac_client(node)
try:
return client.list_physical_disks()
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to get the list of physical disks '
'for node %(node_uuid)s. Reason: %(error)s.',
{'node_uuid': node.uuid, 'error': exc})
raise exception.DracOperationError(error=exc)
def _is_raid_controller(node, raid_controller_fqdd, raid_controllers=None):
client = drac_common.get_drac_client(node)
try:
return client.is_raid_controller(raid_controller_fqdd,
raid_controllers)
except drac_exceptions.BaseClientException as exc:
LOG.error('Unable to determine if controller %(raid_controller_fqdd)s '
'on node %(node_uuid)s is a RAID controller. '
'Reason: %(error)s. ',
{'raid_controller_fqdd': raid_controller_fqdd,
'node_uuid': node.uuid, 'error': exc})
raise exception.DracOperationError(error=exc)
def _validate_job_queue(node, raid_controller=None):
kwargs = {}
if raid_controller:
kwargs["name_prefix"] = "Config:RAID:%s" % raid_controller
drac_job.validate_job_queue(node, **kwargs)
def create_virtual_disk(node, raid_controller, physical_disks, raid_level,
size_mb, disk_name=None, span_length=None,
span_depth=None):
_validate_job_queue(node, raid_controller)
client = drac_common.get_drac_client(node)
try:
return client.create_virtual_disk(raid_controller, physical_disks,
raid_level, size_mb, disk_name,
span_length, span_depth)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to create virtual disk for node '
'%(node_uuid)s. Reason: %(error)s.',
{'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def delete_virtual_disk(node, virtual_disk):
_validate_job_queue(node)
client = drac_common.get_drac_client(node)
try:
return client.delete_virtual_disk(virtual_disk)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to delete virtual disk '
'%(virtual_disk_fqdd)s for node %(node_uuid)s. '
'Reason: %(error)s.',
{'virtual_disk_fqdd': virtual_disk,
'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def _reset_raid_config(node, raid_controller):
try:
_validate_job_queue(node, raid_controller)
client = drac_common.get_drac_client(node)
return client.reset_raid_config(raid_controller)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to delete all virtual disk '
'and unassign all hotspares '
'on %(raid_controller_fqdd)s '
'for node %(node_uuid)s. '
'Reason: %(error)s.',
{'raid_controller_fqdd': raid_controller,
'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def clear_foreign_config(node, raid_controller):
try:
_validate_job_queue(node, raid_controller)
client = drac_common.get_drac_client(node)
return client.clear_foreign_config(raid_controller)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to free foreign driver '
'on %(raid_controller_fqdd)s '
'for node %(node_uuid)s. '
'Reason: %(error)s.',
{'raid_controller_fqdd': raid_controller,
'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def set_raid_settings(node, controller_fqdd, settings):
try:
drac_job.validate_job_queue(node)
client = drac_common.get_drac_client(node)
return client.set_raid_settings(controller_fqdd, settings)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to set raid settings '
'on %(raid_controller_fqdd)s '
'for node %(node_uuid)s. '
'Reason: %(error)s.',
{'raid_controller_fqdd': controller_fqdd,
'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def list_raid_settings(node):
try:
drac_job.validate_job_queue(node)
client = drac_common.get_drac_client(node)
return client.list_raid_settings()
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to list raid settings '
'for node %(node_uuid)s. '
'Reason: %(error)s.',
{'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def change_physical_disk_state(node, mode=None,
controllers_to_physical_disk_ids=None):
try:
drac_job.validate_job_queue(node)
client = drac_common.get_drac_client(node)
return client.change_physical_disk_state(
mode, controllers_to_physical_disk_ids)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to change physical drives '
'to %(mode)s mode for node %(node_uuid)s. '
'Reason: %(error)s.',
{'mode': mode, 'node_uuid': node.uuid, 'error': exc})
raise exception.DracOperationError(error=exc)
def commit_config(node, raid_controller, reboot=False, realtime=False):
client = drac_common.get_drac_client(node)
try:
return client.commit_pending_raid_changes(
raid_controller=raid_controller,
reboot=reboot,
realtime=realtime)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to commit pending RAID config for'
' controller %(raid_controller_fqdd)s on node '
'%(node_uuid)s. Reason: %(error)s.',
{'raid_controller_fqdd': raid_controller,
'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def _change_physical_disk_mode(node, mode=None,
controllers_to_physical_disk_ids=None,
substep="completed"):
change_disk_state = change_physical_disk_state(
node, mode, controllers_to_physical_disk_ids)
controllers = list()
conversion_results = change_disk_state['conversion_results']
for controller_id, result in conversion_results.items():
controller = {'raid_controller': controller_id,
'is_reboot_required': result['is_reboot_required'],
'is_commit_required': result['is_commit_required']}
controllers.append(controller)
return _commit_to_controllers(
node,
controllers, substep=substep)
def abandon_config(node, raid_controller):
client = drac_common.get_drac_client(node)
try:
client.abandon_pending_raid_changes(raid_controller)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to delete pending RAID config '
'for controller %(raid_controller_fqdd)s on node '
'%(node_uuid)s. Reason: %(error)s.',
{'raid_controller_fqdd': raid_controller,
'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def _calculate_spans(raid_level, disks_count):
if raid_level in ['0', '1', '5', '6']:
return 1
elif raid_level in ['5+0', '6+0']:
return 2
elif raid_level in ['1+0']:
return disks_count >> 1
else:
reason = (_('Cannot calculate spans for RAID level "%s"') %
raid_level)
raise exception.DracOperationError(error=reason)
def _usable_disks_count(raid_level, disks_count):
if raid_level in ['0', '1', '5', '6']:
return disks_count
elif raid_level in ['5+0', '6+0', '1+0']:
# largest even number less than disk_count
return (disks_count >> 1) << 1
else:
reason = (_('RAID level %(raid_level)s is not supported by the '
'driver. Supported RAID levels: %(supported_raid_levels)s')
% {'raid_level': raid_level,
'supported_raid_levels': list(RAID_LEVELS)})
raise exception.DracOperationError(error=reason)
def _raid_level_min_disks(raid_level, spans_count=1):
try:
raid_level_info = RAID_LEVELS[raid_level]
except KeyError:
reason = (_('RAID level %(raid_level)s is not supported by the '
'driver. Supported RAID levels: %(supported_raid_levels)s')
% {'raid_level': raid_level,
'supported_raid_levels': list(RAID_LEVELS)})
raise exception.DracOperationError(error=reason)
if raid_level_info['type'] == 'spanned':
if spans_count <= 1:
reason = _('Spanned RAID volumes cannot contain a single span')
raise exception.DracOperationError(error=reason)
span_type = raid_level_info['span_type']
raid_level_info = RAID_LEVELS[span_type]
return raid_level_info['min_disks'] * spans_count
def _raid_level_max_disks(raid_level, spans_count=1):
try:
raid_level_info = RAID_LEVELS[raid_level]
except KeyError:
reason = (_('RAID level %(raid_level)s is not supported by the '
'driver. Supported RAID levels: %(supported_raid_levels)s')
% {'raid_level': raid_level,
'supported_raid_levels': list(RAID_LEVELS)})
raise exception.DracOperationError(error=reason)
if raid_level_info['type'] == 'spanned':
if spans_count <= 1:
reason = _('Spanned RAID volumes cannot contain a single span')
raise exception.DracOperationError(error=reason)
span_type = raid_level_info['span_type']
raid_level_info = RAID_LEVELS[span_type]
return raid_level_info['max_disks'] * spans_count
def _raid_level_overhead(raid_level, spans_count=1):
try:
raid_level_info = RAID_LEVELS[raid_level]
except KeyError:
reason = (_('RAID level %(raid_level)s is not supported by the '
'driver. Supported RAID levels: %(supported_raid_levels)s')
% {'raid_level': raid_level,
'supported_raid_levels': list(RAID_LEVELS)})
raise exception.DracOperationError(error=reason)
if raid_level_info['type'] == 'spanned':
if spans_count <= 1:
reason = _('Spanned RAID volumes cannot contain a single span')
raise exception.DracOperationError(error=reason)
span_type = raid_level_info['span_type']
raid_level_info = RAID_LEVELS[span_type]
return raid_level_info['overhead'] * spans_count
def _max_volume_size_mb(raid_level, physical_disks, free_space_mb,
spans_count=1, stripe_size_kb=64 * units.Ki):
# restrict the size to the smallest available space
free_spaces = [free_space_mb[disk] for disk in physical_disks]
size_kb = min(free_spaces) * units.Ki
# NOTE(ifarkas): using math.floor so we get a volume size that does not
# exceed the available space
stripes_per_disk = int(math.floor(float(size_kb) / stripe_size_kb))
disks_count = len(physical_disks)
overhead_disks_count = _raid_level_overhead(raid_level, spans_count)
return int(stripes_per_disk * stripe_size_kb
* (disks_count - overhead_disks_count) / units.Ki)
def _volume_usage_per_disk_mb(logical_disk, physical_disks, spans_count=1,
stripe_size_kb=64 * units.Ki):
disks_count = len(physical_disks)
overhead_disks_count = _raid_level_overhead(logical_disk['raid_level'],
spans_count)
volume_size_kb = logical_disk['size_mb'] * units.Ki
# NOTE(ifarkas): using math.ceil so we get the largest disk usage
# possible, so we can avoid over-committing
stripes_per_volume = math.ceil(float(volume_size_kb) / stripe_size_kb)
stripes_per_disk = math.ceil(
float(stripes_per_volume) / (disks_count - overhead_disks_count))
return int(stripes_per_disk * stripe_size_kb / units.Ki)
def _find_configuration(logical_disks, physical_disks, pending_delete):
# shared physical disks of RAID volumes size_gb='MAX' should be
# deprioritized during the matching process to reserve as much space as
# possible. Reserved means it won't be used during matching.
volumes_with_reserved_physical_disks = [
volume for volume in logical_disks
if ('physical_disks' in volume and volume['size_mb'] == 'MAX'
and volume.get('share_physical_disks', False))]
reserved_physical_disks = [
disk for disk in physical_disks
for volume in volumes_with_reserved_physical_disks
if disk.id in volume['physical_disks']]
physical_disks_by_type = {}
reserved_physical_disks_by_type = {}
free_space_mb = {}
for disk in physical_disks:
free_space_mb[disk] = _get_disk_free_size_mb(disk, pending_delete)
disk_type = (disk.controller, disk.media_type, disk.interface_type,
disk.size_mb)
if disk_type not in physical_disks_by_type:
physical_disks_by_type[disk_type] = []
reserved_physical_disks_by_type[disk_type] = []
if disk in reserved_physical_disks:
reserved_physical_disks_by_type[disk_type].append(disk)
else:
physical_disks_by_type[disk_type].append(disk)
for volume in logical_disks:
if ('physical_disks' in volume
and not volume.get('share_physical_disks', False)):
for disk in physical_disks:
if disk.id in volume['physical_disks']:
disk_type = (disk.controller, disk.media_type,
disk.interface_type, disk.size_mb)
if disk in physical_disks_by_type[disk_type]:
physical_disks_by_type[disk_type].remove(disk)
processed_volumes = []
for volume in [volume for volume in logical_disks
if ('physical_disks' in volume
and volume['size_mb'] != 'MAX')]:
_calculate_volume_props(volume, physical_disks, free_space_mb)
processed_volumes.append(volume)
volumes_without_disks = [disk for disk in logical_disks
if 'physical_disks' not in disk]
if volumes_without_disks:
result, free_space_mb = (
_assign_disks_to_volume(volumes_without_disks,
physical_disks_by_type, free_space_mb,
pending_delete))
if not result:
for disk_type, disks in physical_disks_by_type.items():
physical_disks_by_type[disk_type] += (
reserved_physical_disks_by_type[disk_type])
result, free_space_mb = (
_assign_disks_to_volume(volumes_without_disks,
physical_disks_by_type,
free_space_mb,
pending_delete))
if not result:
error_msg = _('failed to find matching physical disks for all '
'logical disks')
LOG.error('DRAC driver failed to create RAID '
'configuration. Reason: %(error)s.',
{'error': error_msg})
raise exception.DracOperationError(error=error_msg)
processed_volumes += volumes_without_disks
for volume in [volume for volume in logical_disks
if ('physical_disks' in volume
and volume['size_mb'] == 'MAX')]:
_calculate_volume_props(volume, physical_disks, free_space_mb)
processed_volumes.append(volume)
return processed_volumes
def _calculate_volume_props(logical_disk, physical_disks, free_space_mb):
selected_disks = [disk for disk in physical_disks
if disk.id in logical_disk['physical_disks']]
spans_count = _calculate_spans(
logical_disk['raid_level'], len(selected_disks))
if len(selected_disks) % spans_count != 0:
error_msg = _('invalid number of physical disks was provided')
raise exception.DracOperationError(error=error_msg)
disks_per_span = int(len(selected_disks) / spans_count)
logical_disk['span_depth'] = None
logical_disk['span_length'] = None
if logical_disk['raid_level'] != '1+0':
logical_disk['span_depth'] = spans_count
logical_disk['span_length'] = disks_per_span
max_volume_size_mb = _max_volume_size_mb(
logical_disk['raid_level'], selected_disks, free_space_mb,
spans_count=spans_count)
if logical_disk['size_mb'] == 'MAX':
if max_volume_size_mb == 0:
error_msg = _("size set to 'MAX' but could not allocate physical "
"disk space")
raise exception.DracOperationError(error=error_msg)
logical_disk['size_mb'] = max_volume_size_mb
elif max_volume_size_mb < logical_disk['size_mb']:
if max_volume_size_mb == 0:
error_msg = _('not enough physical disk space for the logical '
'disk')
raise exception.DracOperationError(error=error_msg)
disk_usage = _volume_usage_per_disk_mb(logical_disk, selected_disks,
spans_count=spans_count)
for disk in selected_disks:
if free_space_mb[disk] < disk_usage:
error_msg = _('not enough free space on physical disks for the '
'logical disk')
raise exception.DracOperationError(error=error_msg)
else:
free_space_mb[disk] -= disk_usage
if 'controller' not in logical_disk:
logical_disk['controller'] = selected_disks[0].controller
def _assign_disks_to_volume(logical_disks, physical_disks_by_type,
free_space_mb, pending_delete):
logical_disk = logical_disks.pop(0)
raid_level = logical_disk['raid_level']
for (controller, disk_type,
interface_type, size_mb), disks in physical_disks_by_type.items():
if ('disk_type' in logical_disk
and logical_disk['disk_type'] != disk_type):
continue
if ('interface_type' in logical_disk
and logical_disk['interface_type'] != interface_type):
continue
disks = [disk for disk in disks if free_space_mb[disk] > 0]
disks = sorted(
disks,
key=lambda disk: free_space_mb[disk])
if ('share_physical_disks' not in logical_disk
or not logical_disk['share_physical_disks']):
initial_free_size_mb = {
disk: _get_disk_free_size_mb(disk, pending_delete)
for disk in disks
}
disks = [disk for disk in disks
if initial_free_size_mb[disk] == free_space_mb[disk]]
max_spans = _calculate_spans(raid_level, len(disks))
min_spans = min([2, max_spans])
min_disks = _raid_level_min_disks(raid_level,
spans_count=min_spans)
max_disks = _raid_level_max_disks(raid_level,
spans_count=max_spans)
candidate_max_disks = min([max_disks, len(disks)])
for disks_count in range(min_disks, candidate_max_disks + 1):
if ('number_of_physical_disks' in logical_disk
and (logical_disk['number_of_physical_disks']
!= disks_count)):
continue
if disks_count != _usable_disks_count(logical_disk['raid_level'],
disks_count):
continue
selected_disks = disks[0:disks_count]
candidate_volume = logical_disk.copy()
candidate_free_space_mb = free_space_mb.copy()
candidate_volume['physical_disks'] = [disk.id for disk
in selected_disks]
try:
_calculate_volume_props(candidate_volume, selected_disks,
candidate_free_space_mb)
except exception.DracOperationError:
continue
if len(logical_disks) > 0:
result, candidate_free_space_mb = (
_assign_disks_to_volume(logical_disks,
physical_disks_by_type,
candidate_free_space_mb,
pending_delete))
if result:
logical_disks.append(candidate_volume)
return (True, candidate_free_space_mb)
else:
logical_disks.append(candidate_volume)
return (True, candidate_free_space_mb)
else:
logical_disks.insert(0, logical_disk)
return (False, free_space_mb)
def _filter_logical_disks(logical_disks, include_root_volume,
include_nonroot_volumes):
filtered_disks = []
for disk in logical_disks:
if include_root_volume and disk.get('is_root_volume'):
filtered_disks.append(disk)
if include_nonroot_volumes and not disk.get('is_root_volume'):
filtered_disks.append(disk)
return filtered_disks
def _create_config_job(node, controller, reboot=False, realtime=False,
raid_config_job_ids=[],
raid_config_parameters=[]):
job_id = commit_config(node, raid_controller=controller,
reboot=reboot, realtime=realtime)
raid_config_job_ids.append(job_id)
if controller not in raid_config_parameters:
raid_config_parameters.append(controller)
LOG.info('Change has been committed to RAID controller '
'%(controller)s on node %(node)s. '
'DRAC job id: %(job_id)s',
{'controller': controller, 'node': node.uuid,
'job_id': job_id})
return {'raid_config_job_ids': raid_config_job_ids,
'raid_config_parameters': raid_config_parameters}
def _validate_volume_size(node, logical_disks):
new_physical_disks = list_physical_disks(node)
free_space_mb = {}
new_processed_volumes = []
for disk in new_physical_disks:
free_space_mb[disk] = disk.free_size_mb
for logical_disk in logical_disks:
selected_disks = [disk for disk in new_physical_disks
if disk.id in logical_disk['physical_disks']]
spans_count = _calculate_spans(
logical_disk['raid_level'], len(selected_disks))
new_max_vol_size_mb = _max_volume_size_mb(
logical_disk['raid_level'],
selected_disks,
free_space_mb,
spans_count=spans_count)
if logical_disk['size_mb'] > new_max_vol_size_mb:
logical_disk['size_mb'] = new_max_vol_size_mb
LOG.info("Logical size does not match so calculating volume "
"properties for current logical_disk")
_calculate_volume_props(
logical_disk, new_physical_disks, free_space_mb)
new_processed_volumes.append(logical_disk)
if new_processed_volumes:
return new_processed_volumes
return logical_disks
def _switch_to_raid_mode(node, controller_fqdd):
drac_job.wait_for_job_completion(node)
raid_attr = "{}:{}".format(controller_fqdd,
_REQUESTED_RAID_CONTROLLER_MODE)
settings = {raid_attr: _RAID_MODE}
settings_results = set_raid_settings(
node, controller_fqdd, settings)
controller = {
'raid_controller': controller_fqdd,
'is_reboot_required': settings_results['is_reboot_required'],
'is_commit_required': settings_results['is_commit_required']}
return controller
def _commit_to_controllers(node, controllers, substep="completed"):
controllers = [controller for controller in controllers
if controller['is_commit_required']]
if not controllers:
LOG.debug('No changes on any of the controllers on node %s',
node.uuid)
driver_internal_info = node.driver_internal_info
driver_internal_info['raid_config_substep'] = substep
driver_internal_info['raid_config_parameters'] = []
node.driver_internal_info = driver_internal_info
node.save()
return
driver_internal_info = node.driver_internal_info
driver_internal_info['raid_config_substep'] = substep
driver_internal_info['raid_config_parameters'] = []
if 'raid_config_job_ids' not in driver_internal_info:
driver_internal_info['raid_config_job_ids'] = []
optional = drac_constants.RebootRequired.optional
all_realtime = all(
(cntlr['is_reboot_required'] == optional)
and not(cntlr.get('is_ehba_mode'))
for cntlr in controllers)
any_ehba_controllers = any(
cntrl.get('is_ehba_mode') is True for cntrl in controllers)
raid_config_job_ids = []
raid_config_parameters = []
if all_realtime:
for controller in controllers:
realtime_controller = controller['raid_controller']
job_details = _create_config_job(
node, controller=realtime_controller,
reboot=False, realtime=True,
raid_config_job_ids=raid_config_job_ids,
raid_config_parameters=raid_config_parameters)
elif any_ehba_controllers:
commit_to_ehba_controllers = []
for controller in controllers:
if controller.get('is_ehba_mode'):
job_details = _create_config_job(
node, controller=controller['raid_controller'],
reboot=False, realtime=True,
raid_config_job_ids=raid_config_job_ids,
raid_config_parameters=raid_config_parameters)
ehba_controller = _switch_to_raid_mode(
node, controller['raid_controller'])
commit_to_ehba_controllers.append(
ehba_controller['raid_controller'])
else:
job_details = _create_config_job(
node, controller=controller['raid_controller'],
reboot=False, realtime=False,
raid_config_job_ids=raid_config_job_ids,
raid_config_parameters=raid_config_parameters)
for controller in commit_to_ehba_controllers:
LOG.debug("Create job with Reboot to apply configuration "
"changes for ehba controllers")
job_details = _create_config_job(
node, controller=controller,
reboot=(controller == commit_to_ehba_controllers[-1]),
realtime=False, raid_config_job_ids=raid_config_job_ids,
raid_config_parameters=raid_config_parameters)
else:
for controller in controllers:
mix_controller = controller['raid_controller']
reboot = (controller == controllers[-1])
job_details = _create_config_job(
node, controller=mix_controller,
reboot=reboot, realtime=False,
raid_config_job_ids=raid_config_job_ids,
raid_config_parameters=raid_config_parameters)
driver_internal_info['raid_config_job_ids'].extend(job_details[
'raid_config_job_ids'])
driver_internal_info['raid_config_parameters'].extend(job_details[
'raid_config_parameters'])
node.driver_internal_info = driver_internal_info
deploy_utils.set_async_step_flags(
node,
reboot=not all_realtime,
skip_current_step=True,
polling=True)
return deploy_utils.get_async_step_return_state(node)
def _create_virtual_disks(task, node):
logical_disks_to_create = node.driver_internal_info[
'logical_disks_to_create']
isVolValidationNeeded = node.driver_internal_info[
'volume_validation']
if isVolValidationNeeded:
logical_disks_to_create = _validate_volume_size(
node, logical_disks_to_create)
controllers = list()
for logical_disk in logical_disks_to_create:
controller = dict()
controller_cap = create_virtual_disk(
node,
raid_controller=logical_disk['controller'],
physical_disks=logical_disk['physical_disks'],
raid_level=logical_disk['raid_level'],
size_mb=logical_disk['size_mb'],
disk_name=logical_disk.get('name'),
span_length=logical_disk.get('span_length'),
span_depth=logical_disk.get('span_depth'))
controller['raid_controller'] = logical_disk['controller']
controller['is_reboot_required'] = controller_cap[
'is_reboot_required']
controller['is_commit_required'] = controller_cap[
'is_commit_required']
if controller not in controllers:
controllers.append(controller)
return _commit_to_controllers(node, controllers)
def _controller_in_hba_mode(raid_settings, controller_fqdd):
controller_mode = raid_settings.get(
'{}:{}'.format(controller_fqdd, _CURRENT_RAID_CONTROLLER_MODE))
return _EHBA_MODE in controller_mode.current_value
def _controller_supports_ehba_mode(settings, controller_fqdd):
raid_cntrl_attr = "{}:{}".format(controller_fqdd,
_CURRENT_RAID_CONTROLLER_MODE)
current_cntrl_mode = settings.get(raid_cntrl_attr)
if not current_cntrl_mode:
return False
else:
return _EHBA_MODE in current_cntrl_mode.possible_values
def _get_disk_free_size_mb(disk, pending_delete):
return disk.size_mb if pending_delete else disk.free_size_mb
class DracWSManRAID(base.RAIDInterface):
def get_properties(self):
return drac_common.COMMON_PROPERTIES
@base.deploy_step(priority=0,
argsinfo=base.RAID_APPLY_CONFIGURATION_ARGSINFO)
def apply_configuration(self, task, raid_config, create_root_volume=True,
create_nonroot_volumes=False,
delete_existing=True):
return super(DracWSManRAID, self).apply_configuration(
task, raid_config, create_root_volume=create_root_volume,
create_nonroot_volumes=create_nonroot_volumes,
delete_existing=delete_existing)
@METRICS.timer('DracRAID.create_configuration')
@base.clean_step(priority=0, abortable=False, argsinfo={
'create_root_volume': {
'description': (
'This specifies whether to create the root volume. '
'Defaults to `True`.'
),
'required': False
},
'create_nonroot_volumes': {
'description': (
'This specifies whether to create the non-root volumes. '
'Defaults to `True`.'
),
'required': False
},
"delete_existing": {
"description": (
"Setting this to 'True' indicates to delete existing RAID "
"configuration prior to creating the new configuration. "
"Default value is 'False'."
),
"required": False,
}
})
def create_configuration(self, task,
create_root_volume=True,
create_nonroot_volumes=True,
delete_existing=False):
node = task.node
logical_disks = node.target_raid_config['logical_disks']
for disk in logical_disks:
if disk['size_gb'] == 'MAX' and 'physical_disks' not in disk:
raise exception.InvalidParameterValue(
_("create_configuration called with invalid "
"target_raid_configuration for node %(node_id)s. "
"'physical_disks' is missing from logical_disk while "
"'size_gb'='MAX' was requested: "
"%(logical_disk)s") % {'node_id': node.uuid,
'logical_disk': disk})
if disk['size_gb'] == 'MAX':
disk['size_mb'] = 'MAX'
else:
disk['size_mb'] = disk['size_gb'] * units.Ki
del disk['size_gb']
if delete_existing:
self._delete_configuration_no_commit(task)
physical_disks = list_physical_disks(node)
logical_disks = _find_configuration(logical_disks, physical_disks,
pending_delete=delete_existing)
logical_disks_to_create = _filter_logical_disks(
logical_disks, create_root_volume, create_nonroot_volumes)
controllers_to_physical_disk_ids = defaultdict(list)
for logical_disk in logical_disks_to_create:
if logical_disk['raid_level'] == 'JBOD':
continue
for physical_disk_name in logical_disk['physical_disks']:
controllers_to_physical_disk_ids[
logical_disk['controller']].append(
physical_disk_name)
driver_internal_info = node.driver_internal_info
driver_internal_info[
"logical_disks_to_create"] = logical_disks_to_create
commit_results = None
if logical_disks_to_create:
LOG.debug(
"Converting physical disks configured to back RAID "
"logical disks to RAID mode for node %(node_uuid)s ",
{"node_uuid": node.uuid})
raid_mode = drac_constants.RaidStatus.raid
commit_results = _change_physical_disk_mode(
node, raid_mode,
controllers_to_physical_disk_ids,
substep="create_virtual_disks")
volume_validation = True if commit_results else False
driver_internal_info['volume_validation'] = volume_validation
node.driver_internal_info = driver_internal_info
node.save()
if commit_results:
return commit_results
else:
LOG.debug("Controller does not support drives conversion "
"so creating virtual disks")
return _create_virtual_disks(task, node)
@METRICS.timer('DracRAID.delete_configuration')
@base.clean_step(priority=0)
@base.deploy_step(priority=0)
def delete_configuration(self, task):
controllers = self._delete_configuration_no_commit(task)
return _commit_to_controllers(task.node, controllers,
substep="delete_foreign_config")
@METRICS.timer('DracRAID.get_logical_disks')
def get_logical_disks(self, task):
node = task.node
logical_disks = []
for disk in list_virtual_disks(node):
logical_disk = {
'id': disk.id,
'controller': disk.controller,
'size_gb': int(disk.size_mb / units.Ki),
'raid_level': disk.raid_level
}
if disk.name is not None:
logical_disk['name'] = disk.name
logical_disks.append(logical_disk)
return {'logical_disks': logical_disks}
@METRICS.timer('DracRAID._query_raid_config_job_status')
@periodics.periodic(
spacing=CONF.drac.query_raid_config_job_status_interval)
def _query_raid_config_job_status(self, manager, context):
filters = {'reserved': False, 'maintenance': False}
fields = ['driver_internal_info']
node_list = manager.iter_nodes(fields=fields, filters=filters)
for (node_uuid, driver, conductor_group,
driver_internal_info) in node_list:
try:
lock_purpose = 'checking async raid configuration jobs'
with task_manager.acquire(context, node_uuid,
purpose=lock_purpose,
shared=True) as task:
if not isinstance(task.driver.raid, DracWSManRAID):
continue
job_ids = driver_internal_info.get('raid_config_job_ids')
if not job_ids:
continue
self._check_node_raid_jobs(task)
except exception.NodeNotFound:
LOG.info("During query_raid_config_job_status, node "
"%(node)s was not found and presumed deleted by "
"another process.", {'node': node_uuid})
except exception.NodeLocked:
LOG.info("During query_raid_config_job_status, node "
"%(node)s was already locked by another process. "
"Skip.", {'node': node_uuid})
@METRICS.timer('DracRAID._check_node_raid_jobs')
def _check_node_raid_jobs(self, task):
node = task.node
raid_config_job_ids = node.driver_internal_info['raid_config_job_ids']
finished_job_ids = []
for config_job_id in raid_config_job_ids:
config_job = drac_job.get_job(node, job_id=config_job_id)
if config_job is None or config_job.status == 'Completed':
finished_job_ids.append(config_job_id)
elif config_job.status == 'Failed':
finished_job_ids.append(config_job_id)
self._set_raid_config_job_failure(node)
if not finished_job_ids:
return
task.upgrade_lock()
self._delete_cached_config_job_id(node, finished_job_ids)
if not node.driver_internal_info.get('raid_config_job_failure',
False):
if 'raid_config_substep' in node.driver_internal_info:
substep = node.driver_internal_info['raid_config_substep']
if substep == 'delete_foreign_config':
foreign_drives = self._execute_foreign_drives(task, node)
if foreign_drives is None:
return self._convert_drives(task, node)
elif substep == 'physical_disk_conversion':
self._convert_drives(task, node)
elif substep == "create_virtual_disks":
return _create_virtual_disks(task, node)
elif substep == 'completed':
self._complete_raid_substep(task, node)
else:
self._complete_raid_substep(task, node)
else:
self._clear_raid_substep(node)
self._clear_raid_config_job_failure(node)
self._set_failed(task, config_job)
def _execute_foreign_drives(self, task, node):
controllers = list()
jobs_required = False
for controller_id in node.driver_internal_info[
'raid_config_parameters']:
controller_cap = clear_foreign_config(
node, controller_id)
controller = {
'raid_controller': controller_id,
'is_reboot_required': controller_cap['is_reboot_required'],
'is_commit_required': controller_cap['is_commit_required']}
controllers.append(controller)
jobs_required = jobs_required or controller_cap[
'is_commit_required']
if not jobs_required:
LOG.info(
"No foreign drives detected, so "
"resume %s", "cleaning" if node.clean_step else "deployment")
return None
else:
return _commit_to_controllers(
node,
controllers,
substep='physical_disk_conversion')
def _complete_raid_substep(self, task, node):
self._clear_raid_substep(node)
self._resume(task)
def _convert_drives(self, task, node):
jbod = drac_constants.RaidStatus.jbod
drives_results = _change_physical_disk_mode(
node, mode=jbod)
if drives_results is None:
LOG.debug("Controller does not support drives "
"conversion on %(node_uuid)s",
{'node_uuid': node.uuid})
self._complete_raid_substep(task, node)
def _clear_raid_substep(self, node):
driver_internal_info = node.driver_internal_info
driver_internal_info.pop('raid_config_substep', None)
driver_internal_info.pop('raid_config_parameters', None)
node.driver_internal_info = driver_internal_info
node.save()
def _set_raid_config_job_failure(self, node):
driver_internal_info = node.driver_internal_info
driver_internal_info['raid_config_job_failure'] = True
node.driver_internal_info = driver_internal_info
node.save()
def _clear_raid_config_job_failure(self, node):
driver_internal_info = node.driver_internal_info
del driver_internal_info['raid_config_job_failure']
node.driver_internal_info = driver_internal_info
node.save()
def _delete_cached_config_job_id(self, node, finished_config_job_ids=None):
if finished_config_job_ids is None:
finished_config_job_ids = []
driver_internal_info = node.driver_internal_info
unfinished_job_ids = [job_id for job_id
in driver_internal_info['raid_config_job_ids']
if job_id not in finished_config_job_ids]
driver_internal_info['raid_config_job_ids'] = unfinished_job_ids
node.driver_internal_info = driver_internal_info
node.save()
def _set_failed(self, task, config_job):
error_msg = (_("Failed config job: %(config_job_id)s. "
"Message: '%(message)s'.") %
{'config_job_id': config_job.id,
'message': config_job.message})
log_msg = ("RAID configuration job failed for node %(node)s. "
"%(error)s" %
{'node': task.node.uuid, 'error': error_msg})
if task.node.clean_step:
manager_utils.cleaning_error_handler(task, error_msg)
else:
manager_utils.deploying_error_handler(task, log_msg, error_msg)
def _resume(self, task):
raid_common.update_raid_info(
task.node, self.get_logical_disks(task))
if task.node.clean_step:
manager_utils.notify_conductor_resume_clean(task)
else:
manager_utils.notify_conductor_resume_deploy(task)
def _delete_configuration_no_commit(self, task):
node = task.node
controllers = list()
drac_raid_controllers = list_raid_controllers(node)
drac_raid_settings = list_raid_settings(node)
for cntrl in drac_raid_controllers:
if _is_raid_controller(node, cntrl.id, drac_raid_controllers):
controller = dict()
if _controller_supports_ehba_mode(
drac_raid_settings,
cntrl.id) and _controller_in_hba_mode(
drac_raid_settings, cntrl.id):
controller['is_ehba_mode'] = True
controller_cap = _reset_raid_config(node, cntrl.id)
controller["raid_controller"] = cntrl.id
controller["is_reboot_required"] = controller_cap[
"is_reboot_required"]
controller["is_commit_required"] = controller_cap[
"is_commit_required"]
controllers.append(controller)
return controllers
class DracRAID(DracWSManRAID):
def __init__(self):
super(DracRAID, self).__init__()
LOG.warning("RAID interface 'idrac' is deprecated and may be removed "
"in a future release. Use 'idrac-wsman' instead.")
| true | true |
f7319a3273ea4a13ac90d44d92e546e36cb45026 | 20,249 | py | Python | Decompiler/BattleActionScript.py | AGraber/EDDecompiler | 6e00c7c6ba8a12cea7722fcef34ed8ff4ac6bc66 | [
"MIT"
] | 5 | 2021-05-23T19:34:38.000Z | 2021-12-05T05:57:36.000Z | Decompiler/BattleActionScript.py | AGraber/EDDecompiler | 6e00c7c6ba8a12cea7722fcef34ed8ff4ac6bc66 | [
"MIT"
] | null | null | null | Decompiler/BattleActionScript.py | AGraber/EDDecompiler | 6e00c7c6ba8a12cea7722fcef34ed8ff4ac6bc66 | [
"MIT"
] | 1 | 2021-05-06T05:50:14.000Z | 2021-05-06T05:50:14.000Z | from Assembler.Assembler2s import *
from Base.EDAOBase import *
import Instruction.ActionOpTableEDAO as edao
import BattleMonsterStatus as MSFile
INVALID_ACTION_OFFSET = 0xFFFF
EMPTY_ACTION = INVALID_ACTION_OFFSET
class CharacterPositionFactor:
def __init__(self, fs = None):
if fs == None:
return
self.X = fs.ReadByte()
self.Y = fs.ReadByte()
class BattleActionScriptInfo:
ActionFileType_Normal = 0
ActionFileType_Arts = 1
ActionFileType_Item = 2
def __init__(self):
self.ChrPosFactorOffset = 0
self.ActionListOffset = 0
self.UnknownTableOffset = 0
self.ActionStartOffset = 0
self.PreloadChipList = []
self.ModelFileList = []
self.UnknownTableList = []
self.ActionList = []
self.ChrPosFactor = []
self.CraftActions = []
self.GlobalLabelTable = {}
self.ChrName = None
self.ASFileName = ''
self.ActionFileType = self.ActionFileType_Normal
def open(self, asname):
fs = fileio.FileStream()
fs.Open(asname)
self.ASFileName = asname
asname = os.path.basename(asname).lower()
if asname == 'as90000.dat':
self.ActionFileType = self.ActionFileType_Arts
elif asname == 'as90001.dat':
self.ActionFileType = self.ActionFileType_Item
else:
self.ActionFileType = self.ActionFileType_Normal
self.ActionListOffset = fs.ReadUShort()
if self.ActionFileType == self.ActionFileType_Normal:
self.ChrPosFactorOffset = fs.ReadUShort()
self.UnknownTableOffset = fs.ReadUShort()
while True:
index = fs.ReadULong()
if index == 0xFFFFFFFF:
break
self.PreloadChipList.append(ChipFileIndex(index))
fs.seek(self.ChrPosFactorOffset)
for i in range(8):
self.ChrPosFactor.append(CharacterPositionFactor(fs))
minoffset = 0xFFFFFFFF
fs.seek(self.ActionListOffset)
while True:
if fs.tell() >= minoffset:
break
offset = fs.ReadUShort()
if offset == 0:
break
minoffset = min(minoffset, offset)
self.ActionList.append(offset)
if len(self.ActionList) == 0:
raise Exception('action number == 0')
self.CraftActions = self.DisassembleCraftActions(fs)
return
for i in range(0x69, fs.Length):
if i not in offsetlist:
print('%X' % i)
#input()
input()
def GetBuiltinNames(self):
if self.ActionFileType == self.ActionFileType_Arts:
BuiltinArtsNames = []
try:
offsetlist = []
t_magic = os.path.abspath(os.path.dirname(os.path.abspath(self.ASFileName)) + '\\..\\..\\text\\t_magic._dt')
if not os.path.exists(t_magic):
if t_magic.endswith('\\patch\\text\\t_magic._dt'):
t_magic = t_magic.replace('\\patch\\text\\t_magic._dt', '\\data\\text\\t_magic._dt')
elif t_magic.endswith('\\patch2\\text\\t_magic._dt'):
t_magic = t_magic.replace('\\patch2\\text\\t_magic._dt', '\\data\\text\\t_magic._dt')
magic = fileio.FileStream()
magic.Open(t_magic)
for i in range(len(self.ActionList)):
offsetlist.append(magic.ReadUShort())
BuiltinArtsNames.append('')
NameConflict = {}
for i in range(len(offsetlist)):
offset = offsetlist[i]
if i != len(offsetlist) - 1 and offsetlist[i + 1] - offset < 0x1C:
continue
#print('%X' % offset)
magic.seek(offset + 0x18)
offset = magic.ReadUShort()
if offset == 0:
continue
magic.seek(offset)
name = magic.ReadMultiByte().replace(' ', '')
if name == '':
continue
if name not in NameConflict:
NameConflict[name] = 1
else:
NameConflict[name] += 1
name += '_%d' % NameConflict[name]
BuiltinArtsNames[i] = name
except:
BuiltinArtsNames = []
return BuiltinArtsNames
elif self.ActionFileType == self.ActionFileType_Normal:
BuiltinCraftNames = \
[
'SysCraft_Init', # 00 0
'SysCraft_Stand', # 01 1
'SysCraft_Move', # 02 2
'SysCraft_UnderAttack', # 03 3
'SysCraft_Dead', # 04 4
'SysCraft_NormalAttack', # 05 5
'SysCraft_ArtsAria', # 06 6
'SysCraft_ArtsCast', # 07 7
'SysCraft_Win', # 08 8
'SysCraft_EnterBattle', # 09 9
'SysCraft_UseItem', # 0A 10
'SysCraft_Stun', # 0B 11
'SysCraft_Unknown2', # 0C 12
'SysCraft_Reserve1', # 0D 13
'SysCraft_Reserve2', # 0E 14
'SysCraft_Counter', # 0F 15
'', # 10 16
'', # 11 17
'', # 12 18
'', # 13 19
'', # 14 20
'', # 15 21
'', # 16 22
'', # 17 23
'', # 18 24
'', # 19 25
'', # 1A 26
'', # 1B 27
'', # 1C 28
'', # 1D 29
'SysCraft_TeamRushInit', # 1E 30
'SysCraft_TeamRushAction', # 1F 31
]
return BuiltinCraftNames
return []
def DiasmInstructionCallback(self, data):
return
def DisassembleCraftActions(self, fs):
CraftNameMap = {}
msfile = None
try:
msfile = MSFile.BattleMonsterStatus()
msfile.open(os.path.dirname(self.ASFileName) + '\\ms' + os.path.basename(self.ASFileName)[2:])
self.ChrName = None if msfile.Name == '' or msfile.Name == ' ' else msfile.Name
except:
msfile = None
BuiltinCraftNames = self.GetBuiltinNames()
disasm = Disassembler(edao.edao_as_op_table, self.DiasmInstructionCallback)
index = -1
codeblocks = []
blockoffsetmap = {}
for func in self.ActionList:
index += 1
if func == INVALID_ACTION_OFFSET:
codeblocks.append(CodeBlock(INVALID_ACTION_OFFSET))
continue
if func in blockoffsetmap:
codeblocks.append(blockoffsetmap[func])
continue
fs.seek(func)
data = Disassembler.DisasmData()
data.Stream = fs
data.GlobalLabelTable = self.GlobalLabelTable
block = disasm.DisasmBlock2(data)
if index >= len(BuiltinCraftNames) or BuiltinCraftNames[index] == '':
name = 'Craft_%X_%d_%X' % (index, index, block.Offset)
if msfile != None:
craft = msfile.FindCraftByActionIndex(index)
if craft != None:
if craft.Name != '' and craft.Name != ' ':
name += '_' + craft.Name.replace(' ', '_').replace(' ', '_').replace('·', '')
block.Name = name
else:
block.Name = BuiltinCraftNames[index]
codeblocks.append(block)
blockoffsetmap[func] = block
return codeblocks
def FormatCodeBlocks(self):
disasm = Disassembler(edao.edao_as_op_table)
blocks = []
blockoffsetmap = {}
for block in sorted(self.CraftActions, key=lambda x: x.Offset):
if block.Offset == INVALID_ACTION_OFFSET:
continue
if block.Offset in blockoffsetmap:
continue
blockoffsetmap[block.Offset] = True
data = Disassembler.FormatData()
data.Block = block
data.Block.Instructions = sorted(data.Block.Instructions, key=lambda x: x.Offset)
data.GlobalLabelTable = self.GlobalLabelTable
name = GetValidLabelName(block.Name)
if not name.startswith('Craft_'): name = 'Craft_' + name
blocks.append(['def %s(): pass' % name])
blocks.append(disasm.FormatCodeBlock2(data))
#for x in disasmtbl: print('%08X' % x)
#input()
return blocks
def SaveToFile(self, filename):
lines = []
#lines.append('from %s import *' % os.path.splitext(os.path.basename(__file__))[0])
lines.append('from ActionHelper import *')
lines.append('')
lines.append('SetCodePage("%s")' % edao.CODE_PAGE)
lines.append('')
name = os.path.splitext(os.path.basename(filename))[0]
name = os.path.splitext(name)[0]
if self.ActionFileType == self.ActionFileType_Arts:
lines.append('CreateArtsAction("%s")' % (name + '.dat'))
else:
tmp = []
for pos in self.ChrPosFactor:
tmp.append('(%d, %d)' % (pos.X, pos.Y))
lines.append('CreateBattleAction("%s", (%s))' % (name + '.dat', ', '.join(tmp)))
lines.append('')
lines.append('AddPreloadChip((')
index = 0
for chip in self.PreloadChipList:
x = ljust_cn(' "%s",' % chip.Name(), 30)
x += ' # %02X %d' % (index, index)
lines.append(x)
index += 1
lines.append('))')
lines.append('')
lines.append('CraftAction((')
index = 0
for craft in self.CraftActions:
name = ('"%s"'% craft.Name) if craft.Offset != INVALID_ACTION_OFFSET else 'EMPTY_ACTION'
lines.append( ljust_cn(' %s,' % name, 40) + ('# %02X %d' % (index, index)))
index += 1
lines.append('))')
lines.append('')
blocks = self.FormatCodeBlocks()
for block in blocks:
lines += block
lines.append('SaveToFile()')
lines.append('')
txt = '\r\n'.join(lines)
lines = txt.replace('\r\n', '\n').replace('\r', '\n').split('\n')
for i in range(2, len(lines)):
if lines[i] != '':
lines[i] = ' %s' % lines[i]
lines.insert(2, 'def main():')
lines.append('Try(main)')
lines.append('')
if self.ChrName != None:
lines.insert(2, '# %s' % self.ChrName)
lines.insert(3, '')
fs = open(filename, 'wb')
fs.write(''.encode('utf_8_sig'))
fs.write('\r\n'.join(lines).encode('UTF8'))
############################################################################################
# support functions
############################################################################################
class BattleActionScriptInfoPort(BattleActionScriptInfo):
def __init__(self):
super().__init__()
self.FileName = ''
self.Labels = {} # map<name, offset>
self.DelayFixLabels = [] # list of LabelEntry
self.PrevousHandlerData = None
self.fs = None
actionfile = None
def label(labelname):
pos = actionfile.fs.tell()
if actionfile.PrevousHandlerData is not None:
pos += actionfile.PrevousHandlerData.FileStream.tell()
plog('%08X: %s' % (pos, labelname))
if labelname in actionfile.Labels and actionfile.Labels[labelname] != pos:
raise Exception('label name conflict: %s' % labelname)
actionfile.Labels[labelname] = pos
def getlabel(name):
return actionfile.Labels[name]
def CreateBattleAction(filename, ChrPosFactorList = None, ModelFileList = None, UnknownTableList = None):
if not IsTupleOrList(ChrPosFactorList):
raise Exception('ChrPosFactorList must be list')
global actionfile
actionfile = BattleActionScriptInfoPort()
start_argv = 1
global CODE_PAGE
cp = CODE_PAGE
if sys.argv[1].startswith('--cp='):
cp = sys.argv[1][5:]
start_argv = 2
elif sys.argv[1].startswith('--cppy='):
cppy = os.path.abspath(sys.argv[1][7:])
ccode = importlib.machinery.SourceFileLoader(os.path.basename(cppy).split('.')[0], cppy).load_module()
ccode.register()
cp = ccode.get_name()
start_argv = 2
if cp == 'NotSet':
cp = 'gbk'
CODE_PAGE = cp
edao.CODE_PAGE = cp
edao.edao_as_op_table.CodePage = cp
if len(sys.argv) > start_argv:
filename = os.path.join(sys.argv[start_argv], filename)
actionfile.fs = fileio.FileStream()
actionfile.fs.Open(filename, 'wb+')
actionfile.FileName = filename
for factor in ChrPosFactorList:
f = CharacterPositionFactor()
f.X = factor[0]
f.Y = factor[1]
actionfile.ChrPosFactor.append(f)
if IsTupleOrList(ModelFileList):
actionfile.ModelFileList = ModelFileList
if IsTupleOrList(UnknownTableList):
actionfile.UnknownTableList = UnknownTableList
asname = os.path.basename(filename).lower()
if asname == 'as90000.dat':
actionfile.ActionFileType = actionfile.ActionFileType_Arts
elif asname == 'as90001.dat':
actionfile.ActionFileType = actionfile.ActionFileType_Item
else:
actionfile.ActionFileType = actionfile.ActionFileType_Normal
def CreateArtsAction(filename):
global actionfile
actionfile = BattleActionScriptInfoPort()
actionfile.fs = fileio.FileStream()
actionfile.fs.Open(filename, 'wb+')
actionfile.ActionFileType = BattleActionScriptInfoPort.ActionFileType_Arts
actionfile.ActionListOffset = 2
actionfile.fs.WriteUShort(actionfile.ActionListOffset)
def AddPreloadChip(ChipFileList):
if not IsTupleOrList(ChipFileList):
raise Exception('ChipFileList must be list')
actionfile.ChipFileList = list(ChipFileList)
def CraftAction(CraftNameList):
if not IsTupleOrList(CraftNameList):
raise Exception('CraftNameList must be list')
actionfile.ActionList = list(CraftNameList)
fs = actionfile.fs
if actionfile.ActionFileType == actionfile.ActionFileType_Normal:
fs.seek(6)
for chip in actionfile.ChipFileList:
fs.WriteULong(ChipFileIndex(chip).Index())
fs.WriteULong(0xFFFFFFFF)
for model in actionfile.ModelFileList:
fs.WriteMultiByte(model, "cp932")
fs.WriteByte(0)
fs.WriteByte(0)
if len(actionfile.UnknownTableList) > 0:
actionfile.UnknownTableOffset = fs.tell()
for factor in actionfile.UnknownTableList:
fs.WriteUShort(factor)
else:
fs.seek(2)
actionfile.ActionListOffset = fs.tell()
for craft in CraftNameList:
if craft != INVALID_ACTION_OFFSET:
actionfile.DelayFixLabels.append(LabelEntry(craft, fs.tell()))
fs.WriteUShort(INVALID_ACTION_OFFSET)
fs.write(b'\x00\x00')
actionfile.ChrPosFactorOffset = fs.tell()
for factor in actionfile.ChrPosFactor:
fs.WriteByte(factor.X)
fs.WriteByte(factor.Y)
actionfile.ActionStartOffset = fs.tell()
fs.seek(0)
fs.WriteUShort(actionfile.ActionListOffset)
if actionfile.ActionFileType == actionfile.ActionFileType_Normal:
fs.WriteUShort(actionfile.ChrPosFactorOffset)
fs.WriteUShort(actionfile.UnknownTableOffset)
fs.seek(actionfile.ActionStartOffset)
for op, inst in edao.edao_as_op_table.items():
func = []
func.append('def %s(*args):' % inst.OpName)
func.append(' return OpCodeHandler(0x%02X, args)' % inst.OpCode)
func.append('')
exec('\r\n'.join(func))
opx = 'AS_%02X' % inst.OpCode
if inst.OpName != opx:
func[0] = 'def %s(*args):' % opx
exec('\r\n'.join(func))
def AssembleForExec(expr):
return eval(expr)
def OpCodeHandler(op, args):
entry = edao.edao_as_op_table[op]
data = HandlerData(HANDLER_REASON_ASSEMBLE)
data.Instruction = Instruction(op)
data.Arguments = list(args)
data.TableEntry = entry
data.Assemble = AssembleForExec
data.Instruction.OperandFormat = entry.Operand
UsePrevous = bool(actionfile.PrevousHandlerData != None)
if UsePrevous:
data.FileStream = actionfile.PrevousHandlerData.FileStream
data.Instruction.Labels = actionfile.PrevousHandlerData.Instruction.Labels
else:
data.FileStream = fileio.FileStream(b'')
actionfile.PrevousHandlerData = data
#print(entry.OpName)
inst = OpCodeHandlerPrivate(data)
if UsePrevous:
return inst
actionfile.PrevousHandlerData = None
offset = actionfile.fs.tell()
for lb in inst.Labels:
actionfile.DelayFixLabels.append(LabelEntry(lb.Label, lb.Offset + offset))
data.FileStream.seek(0)
actionfile.fs.write(data.FileStream.read())
return inst
def SaveToFile():
fs = actionfile.fs
for lb in actionfile.DelayFixLabels:
fs.seek(lb.Offset)
fs.WriteUShort(getlabel(lb.Label))
'''
if has_target: jump label
Jc(0x16, 0x1, 0x0, "loc_A4A")
'''
def procfile(file, cp=None):
if cp:
edao.CODE_PAGE = cp
edao.edao_op_table.CodePage = cp
console.setTitle(os.path.basename(file))
#print('disasm %s' % file)
asdat = BattleActionScriptInfo()
asdat.open(file)
outfile = os.path.splitext(file)[0] + ".py"
plog('SAVE %s' % outfile)
asdat.SaveToFile(outfile)
return outfile
if __name__ == '__main__':
# iterlib.forEachFileMP(procfile, sys.argv[1:], 'as*.dat')
cp = 'gbk'
start_argv = 1
if sys.argv[1].startswith('--cp='):
cp = sys.argv[1][5:]
start_argv = 2
elif sys.argv[1].startswith('--cppy='):
cppy = os.path.abspath(sys.argv[1][7:])
ccode = importlib.machinery.SourceFileLoader(os.path.basename(cppy).split('.')[0], cppy).load_module()
ccode.register()
cp = ccode.get_name()
start_argv = 2
edao.CODE_PAGE = cp
edao.edao_as_op_table.CodePage = cp
files = iterlib.forEachGetFiles(sys.argv[start_argv:], 'as*.dat')
#Log.OpenLog(sys.argv[start_argv] + '\..\log.txt')
for file in files:
plog('START %s' % file)
procfile(file)
plog('FINISHED %s' % file)
#Log.CloseLog()
| 31.056748 | 125 | 0.521754 | from Assembler.Assembler2s import *
from Base.EDAOBase import *
import Instruction.ActionOpTableEDAO as edao
import BattleMonsterStatus as MSFile
INVALID_ACTION_OFFSET = 0xFFFF
EMPTY_ACTION = INVALID_ACTION_OFFSET
class CharacterPositionFactor:
def __init__(self, fs = None):
if fs == None:
return
self.X = fs.ReadByte()
self.Y = fs.ReadByte()
class BattleActionScriptInfo:
ActionFileType_Normal = 0
ActionFileType_Arts = 1
ActionFileType_Item = 2
def __init__(self):
self.ChrPosFactorOffset = 0
self.ActionListOffset = 0
self.UnknownTableOffset = 0
self.ActionStartOffset = 0
self.PreloadChipList = []
self.ModelFileList = []
self.UnknownTableList = []
self.ActionList = []
self.ChrPosFactor = []
self.CraftActions = []
self.GlobalLabelTable = {}
self.ChrName = None
self.ASFileName = ''
self.ActionFileType = self.ActionFileType_Normal
def open(self, asname):
fs = fileio.FileStream()
fs.Open(asname)
self.ASFileName = asname
asname = os.path.basename(asname).lower()
if asname == 'as90000.dat':
self.ActionFileType = self.ActionFileType_Arts
elif asname == 'as90001.dat':
self.ActionFileType = self.ActionFileType_Item
else:
self.ActionFileType = self.ActionFileType_Normal
self.ActionListOffset = fs.ReadUShort()
if self.ActionFileType == self.ActionFileType_Normal:
self.ChrPosFactorOffset = fs.ReadUShort()
self.UnknownTableOffset = fs.ReadUShort()
while True:
index = fs.ReadULong()
if index == 0xFFFFFFFF:
break
self.PreloadChipList.append(ChipFileIndex(index))
fs.seek(self.ChrPosFactorOffset)
for i in range(8):
self.ChrPosFactor.append(CharacterPositionFactor(fs))
minoffset = 0xFFFFFFFF
fs.seek(self.ActionListOffset)
while True:
if fs.tell() >= minoffset:
break
offset = fs.ReadUShort()
if offset == 0:
break
minoffset = min(minoffset, offset)
self.ActionList.append(offset)
if len(self.ActionList) == 0:
raise Exception('action number == 0')
self.CraftActions = self.DisassembleCraftActions(fs)
return
for i in range(0x69, fs.Length):
if i not in offsetlist:
print('%X' % i)
input()
def GetBuiltinNames(self):
if self.ActionFileType == self.ActionFileType_Arts:
BuiltinArtsNames = []
try:
offsetlist = []
t_magic = os.path.abspath(os.path.dirname(os.path.abspath(self.ASFileName)) + '\\..\\..\\text\\t_magic._dt')
if not os.path.exists(t_magic):
if t_magic.endswith('\\patch\\text\\t_magic._dt'):
t_magic = t_magic.replace('\\patch\\text\\t_magic._dt', '\\data\\text\\t_magic._dt')
elif t_magic.endswith('\\patch2\\text\\t_magic._dt'):
t_magic = t_magic.replace('\\patch2\\text\\t_magic._dt', '\\data\\text\\t_magic._dt')
magic = fileio.FileStream()
magic.Open(t_magic)
for i in range(len(self.ActionList)):
offsetlist.append(magic.ReadUShort())
BuiltinArtsNames.append('')
NameConflict = {}
for i in range(len(offsetlist)):
offset = offsetlist[i]
if i != len(offsetlist) - 1 and offsetlist[i + 1] - offset < 0x1C:
continue
magic.seek(offset + 0x18)
offset = magic.ReadUShort()
if offset == 0:
continue
magic.seek(offset)
name = magic.ReadMultiByte().replace(' ', '')
if name == '':
continue
if name not in NameConflict:
NameConflict[name] = 1
else:
NameConflict[name] += 1
name += '_%d' % NameConflict[name]
BuiltinArtsNames[i] = name
except:
BuiltinArtsNames = []
return BuiltinArtsNames
elif self.ActionFileType == self.ActionFileType_Normal:
BuiltinCraftNames = \
[
'SysCraft_Init',
'SysCraft_Stand',
'SysCraft_Move',
'SysCraft_UnderAttack',
'SysCraft_Dead',
'SysCraft_NormalAttack',
'SysCraft_ArtsAria',
'SysCraft_ArtsCast',
'SysCraft_Win',
'SysCraft_EnterBattle',
'SysCraft_UseItem',
'SysCraft_Stun',
'SysCraft_Unknown2',
'SysCraft_Reserve1',
'SysCraft_Reserve2',
'SysCraft_Counter',
'',
'',
'',
'',
'',
'',
'',
'',
'',
'',
'',
'',
'',
'',
'SysCraft_TeamRushInit',
'SysCraft_TeamRushAction',
]
return BuiltinCraftNames
return []
def DiasmInstructionCallback(self, data):
return
def DisassembleCraftActions(self, fs):
CraftNameMap = {}
msfile = None
try:
msfile = MSFile.BattleMonsterStatus()
msfile.open(os.path.dirname(self.ASFileName) + '\\ms' + os.path.basename(self.ASFileName)[2:])
self.ChrName = None if msfile.Name == '' or msfile.Name == ' ' else msfile.Name
except:
msfile = None
BuiltinCraftNames = self.GetBuiltinNames()
disasm = Disassembler(edao.edao_as_op_table, self.DiasmInstructionCallback)
index = -1
codeblocks = []
blockoffsetmap = {}
for func in self.ActionList:
index += 1
if func == INVALID_ACTION_OFFSET:
codeblocks.append(CodeBlock(INVALID_ACTION_OFFSET))
continue
if func in blockoffsetmap:
codeblocks.append(blockoffsetmap[func])
continue
fs.seek(func)
data = Disassembler.DisasmData()
data.Stream = fs
data.GlobalLabelTable = self.GlobalLabelTable
block = disasm.DisasmBlock2(data)
if index >= len(BuiltinCraftNames) or BuiltinCraftNames[index] == '':
name = 'Craft_%X_%d_%X' % (index, index, block.Offset)
if msfile != None:
craft = msfile.FindCraftByActionIndex(index)
if craft != None:
if craft.Name != '' and craft.Name != ' ':
name += '_' + craft.Name.replace(' ', '_').replace(' ', '_').replace('·', '')
block.Name = name
else:
block.Name = BuiltinCraftNames[index]
codeblocks.append(block)
blockoffsetmap[func] = block
return codeblocks
def FormatCodeBlocks(self):
disasm = Disassembler(edao.edao_as_op_table)
blocks = []
blockoffsetmap = {}
for block in sorted(self.CraftActions, key=lambda x: x.Offset):
if block.Offset == INVALID_ACTION_OFFSET:
continue
if block.Offset in blockoffsetmap:
continue
blockoffsetmap[block.Offset] = True
data = Disassembler.FormatData()
data.Block = block
data.Block.Instructions = sorted(data.Block.Instructions, key=lambda x: x.Offset)
data.GlobalLabelTable = self.GlobalLabelTable
name = GetValidLabelName(block.Name)
if not name.startswith('Craft_'): name = 'Craft_' + name
blocks.append(['def %s(): pass' % name])
blocks.append(disasm.FormatCodeBlock2(data))
return blocks
def SaveToFile(self, filename):
lines = []
lines.append('from ActionHelper import *')
lines.append('')
lines.append('SetCodePage("%s")' % edao.CODE_PAGE)
lines.append('')
name = os.path.splitext(os.path.basename(filename))[0]
name = os.path.splitext(name)[0]
if self.ActionFileType == self.ActionFileType_Arts:
lines.append('CreateArtsAction("%s")' % (name + '.dat'))
else:
tmp = []
for pos in self.ChrPosFactor:
tmp.append('(%d, %d)' % (pos.X, pos.Y))
lines.append('CreateBattleAction("%s", (%s))' % (name + '.dat', ', '.join(tmp)))
lines.append('')
lines.append('AddPreloadChip((')
index = 0
for chip in self.PreloadChipList:
x = ljust_cn(' "%s",' % chip.Name(), 30)
x += ' # %02X %d' % (index, index)
lines.append(x)
index += 1
lines.append('))')
lines.append('')
lines.append('CraftAction((')
index = 0
for craft in self.CraftActions:
name = ('"%s"'% craft.Name) if craft.Offset != INVALID_ACTION_OFFSET else 'EMPTY_ACTION'
lines.append( ljust_cn(' %s,' % name, 40) + ('# %02X %d' % (index, index)))
index += 1
lines.append('))')
lines.append('')
blocks = self.FormatCodeBlocks()
for block in blocks:
lines += block
lines.append('SaveToFile()')
lines.append('')
txt = '\r\n'.join(lines)
lines = txt.replace('\r\n', '\n').replace('\r', '\n').split('\n')
for i in range(2, len(lines)):
if lines[i] != '':
lines[i] = ' %s' % lines[i]
lines.insert(2, 'def main():')
lines.append('Try(main)')
lines.append('')
if self.ChrName != None:
lines.insert(2, '# %s' % self.ChrName)
lines.insert(3, '')
fs = open(filename, 'wb')
fs.write(''.encode('utf_8_sig'))
fs.write('\r\n'.join(lines).encode('UTF8'))
| true | true |
f7319b025a15cca212a3758b38a253304ad2ede4 | 2,382 | py | Python | src/streamlink/plugins/huya.py | melmorabity/streamlink | 24c59a23103922977991acc28741a323d8efa7a1 | [
"BSD-2-Clause"
] | null | null | null | src/streamlink/plugins/huya.py | melmorabity/streamlink | 24c59a23103922977991acc28741a323d8efa7a1 | [
"BSD-2-Clause"
] | null | null | null | src/streamlink/plugins/huya.py | melmorabity/streamlink | 24c59a23103922977991acc28741a323d8efa7a1 | [
"BSD-2-Clause"
] | null | null | null | import base64
import logging
import re
from html import unescape as html_unescape
from streamlink.plugin import Plugin, pluginmatcher
from streamlink.plugin.api import validate
from streamlink.stream import HTTPStream
from streamlink.utils.parse import parse_json
log = logging.getLogger(__name__)
@pluginmatcher(re.compile(
r'https?://(?:www\.)?huya\.com/(?P<channel>[^/]+)'
))
class Huya(Plugin):
_re_stream = re.compile(r'"stream"\s?:\s?"([^"]+)"')
_schema_data = validate.Schema(
{
# 'status': int,
# 'msg': validate.any(None, str),
'data': [{
'gameStreamInfoList': [{
'sCdnType': str,
'sStreamName': str,
'sFlvUrl': str,
'sFlvUrlSuffix': str,
'sFlvAntiCode': validate.all(str, validate.transform(lambda v: html_unescape(v))),
# 'sHlsUrl': str,
# 'sHlsUrlSuffix': str,
# 'sHlsAntiCode': validate.all(str, validate.transform(lambda v: html_unescape(v))),
validate.optional('iIsMultiStream'): int,
'iPCPriorityRate': int,
}]
}],
# 'vMultiStreamInfo': [{
# 'sDisplayName': str,
# 'iBitRate': int,
# }],
},
validate.get('data'),
validate.get(0),
validate.get('gameStreamInfoList'),
)
QUALITY_WEIGHTS = {}
@classmethod
def stream_weight(cls, key):
weight = cls.QUALITY_WEIGHTS.get(key)
if weight:
return weight, 'huya'
return Plugin.stream_weight(key)
def _get_streams(self):
res = self.session.http.get(self.url)
data = self._re_stream.search(res.text)
if not data:
return
data = parse_json(base64.b64decode(data.group(1)), schema=self._schema_data)
for info in data:
log.trace(f'{info!r}')
flv_url = f'{info["sFlvUrl"]}/{info["sStreamName"]}.{info["sFlvUrlSuffix"]}?{info["sFlvAntiCode"]}'
name = f'source_{info["sCdnType"].lower()}'
self.QUALITY_WEIGHTS[name] = info['iPCPriorityRate']
yield name, HTTPStream(self.session, flv_url)
log.debug(f'QUALITY_WEIGHTS: {self.QUALITY_WEIGHTS!r}')
__plugin__ = Huya
| 31.76 | 111 | 0.554156 | import base64
import logging
import re
from html import unescape as html_unescape
from streamlink.plugin import Plugin, pluginmatcher
from streamlink.plugin.api import validate
from streamlink.stream import HTTPStream
from streamlink.utils.parse import parse_json
log = logging.getLogger(__name__)
@pluginmatcher(re.compile(
r'https?://(?:www\.)?huya\.com/(?P<channel>[^/]+)'
))
class Huya(Plugin):
_re_stream = re.compile(r'"stream"\s?:\s?"([^"]+)"')
_schema_data = validate.Schema(
{
# 'status': int,
# 'msg': validate.any(None, str),
'data': [{
'gameStreamInfoList': [{
'sCdnType': str,
'sStreamName': str,
'sFlvUrl': str,
'sFlvUrlSuffix': str,
'sFlvAntiCode': validate.all(str, validate.transform(lambda v: html_unescape(v))),
# 'sHlsUrl': str,
# 'sHlsUrlSuffix': str,
# 'sHlsAntiCode': validate.all(str, validate.transform(lambda v: html_unescape(v))),
validate.optional('iIsMultiStream'): int,
'iPCPriorityRate': int,
}]
}],
# 'vMultiStreamInfo': [{
# 'sDisplayName': str,
# 'iBitRate': int,
# }],
},
validate.get('data'),
validate.get(0),
validate.get('gameStreamInfoList'),
)
QUALITY_WEIGHTS = {}
@classmethod
def stream_weight(cls, key):
weight = cls.QUALITY_WEIGHTS.get(key)
if weight:
return weight, 'huya'
return Plugin.stream_weight(key)
def _get_streams(self):
res = self.session.http.get(self.url)
data = self._re_stream.search(res.text)
if not data:
return
data = parse_json(base64.b64decode(data.group(1)), schema=self._schema_data)
for info in data:
log.trace(f'{info!r}')
flv_url = f'{info["sFlvUrl"]}/{info["sStreamName"]}.{info["sFlvUrlSuffix"]}?{info["sFlvAntiCode"]}'
name = f'source_{info["sCdnType"].lower()}'
self.QUALITY_WEIGHTS[name] = info['iPCPriorityRate']
yield name, HTTPStream(self.session, flv_url)
log.debug(f'QUALITY_WEIGHTS: {self.QUALITY_WEIGHTS!r}')
__plugin__ = Huya
| true | true |
f7319b3ce9b7c689f47a1cdeca8dd710ca242644 | 6,175 | py | Python | salt/netapi/rest_cherrypy/event_processor.py | styro/salt | d087d94dca02ca8bf53a6c21b94944bc7957522c | [
"Apache-2.0"
] | 3 | 2016-09-03T06:26:42.000Z | 2019-06-30T13:04:53.000Z | salt/netapi/rest_cherrypy/event_processor.py | styro/salt | d087d94dca02ca8bf53a6c21b94944bc7957522c | [
"Apache-2.0"
] | null | null | null | salt/netapi/rest_cherrypy/event_processor.py | styro/salt | d087d94dca02ca8bf53a6c21b94944bc7957522c | [
"Apache-2.0"
] | 1 | 2021-12-02T15:30:00.000Z | 2021-12-02T15:30:00.000Z | # encoding: utf-8
from __future__ import absolute_import
import json
import logging
import salt.ext.six as six
import salt.netapi
logger = logging.getLogger(__name__)
class SaltInfo(object):
'''
Class to handle processing and publishing of "real time" Salt upates.
'''
def __init__(self, handler):
'''
handler is expected to be the server side end of a websocket
connection.
'''
self.handler = handler
# These represent a "real time" view into Salt's jobs.
self.jobs = {}
# This represents a "real time" view of minions connected to Salt.
self.minions = {}
def publish_minions(self):
'''
Publishes minions as a list of dicts.
'''
minions = []
for minion, minion_info in six.iteritems(self.minions):
curr_minion = {}
curr_minion.update(minion_info)
curr_minion.update({'id': minion})
minions.append(curr_minion)
ret = {'minions': minions}
self.handler.send(json.dumps(ret), False)
def publish(self, key, data):
'''
Publishes the data to the event stream.
'''
publish_data = {key: data}
self.handler.send(json.dumps(publish_data), False)
def process_minion_update(self, event_data):
'''
Associate grains data with a minion and publish minion update
'''
tag = event_data['tag']
event_info = event_data['data']
_, _, _, _, mid = tag.split('/')
if not self.minions.get(mid, None):
self.minions[mid] = {}
minion = self.minions[mid]
minion.update({'grains': event_info['return']})
self.publish_minions()
def process_ret_job_event(self, event_data):
'''
Process a /ret event returned by Salt for a particular minion.
These events contain the returned results from a particular execution.
'''
tag = event_data['tag']
event_info = event_data['data']
_, _, jid, _, mid = tag.split('/')
job = self.jobs.setdefault(jid, {})
minion = job.setdefault('minions', {}).setdefault(mid, {})
minion.update({'return': event_info['return']})
minion.update({'retcode': event_info['retcode']})
minion.update({'success': event_info['success']})
job_complete = all([minion['success'] for mid, minion
in six.iteritems(job['minions'])])
if job_complete:
job['state'] = 'complete'
self.publish('jobs', self.jobs)
def process_new_job_event(self, event_data):
'''
Creates a new job with properties from the event data
like jid, function, args, timestamp.
Also sets the initial state to started.
Minions that are participating in this job are also noted.
'''
job = None
tag = event_data['tag']
event_info = event_data['data']
minions = {}
for mid in event_info['minions']:
minions[mid] = {'success': False}
job = {
'jid': event_info['jid'],
'start_time': event_info['_stamp'],
'minions': minions, # is a dictionary keyed by mids
'fun': event_info['fun'],
'tgt': event_info['tgt'],
'tgt_type': event_info['tgt_type'],
'state': 'running',
}
self.jobs[event_info['jid']] = job
self.publish('jobs', self.jobs)
def process_key_event(self, event_data):
'''
Tag: salt/key
Data:
{'_stamp': '2014-05-20T22:45:04.345583',
'act': 'delete',
'id': 'compute.home',
'result': True}
'''
tag = event_data['tag']
event_info = event_data['data']
if event_info['act'] == 'delete':
self.minions.pop(event_info['id'], None)
elif event_info['act'] == 'accept':
self.minions.setdefault(event_info['id'], {})
self.publish_minions()
def process_presence_events(self, event_data, token, opts):
'''
Check if any minions have connected or dropped.
Send a message to the client if they have.
'''
tag = event_data['tag']
event_info = event_data['data']
minions_detected = event_info['present']
curr_minions = self.minions.keys()
changed = False
# check if any connections were dropped
dropped_minions = set(curr_minions) - set(minions_detected)
for minion in dropped_minions:
changed = True
self.minions.pop(minion, None)
# check if any new connections were made
new_minions = set(minions_detected) - set(curr_minions)
tgt = ','.join(new_minions)
if tgt:
changed = True
client = salt.netapi.NetapiClient(opts)
client.run(
{
'fun': 'grains.items',
'tgt': tgt,
'expr_type': 'list',
'mode': 'client',
'client': 'local',
'async': 'local_async',
'token': token,
})
if changed:
self.publish_minions()
def process(self, salt_data, token, opts):
'''
Process events and publish data
'''
parts = salt_data['tag'].split('/')
if len(parts) < 2:
return
# TBD: Simplify these conditional expressions
if parts[1] == 'job':
if parts[3] == 'new':
self.process_new_job_event(salt_data)
if salt_data['data']['fun'] == 'grains.items':
self.minions = {}
elif parts[3] == 'ret':
self.process_ret_job_event(salt_data)
if salt_data['data']['fun'] == 'grains.items':
self.process_minion_update(salt_data)
if parts[1] == 'key':
self.process_key_event(salt_data)
if parts[1] == 'presence':
self.process_presence_events(salt_data, token, opts)
| 29.830918 | 78 | 0.545263 |
from __future__ import absolute_import
import json
import logging
import salt.ext.six as six
import salt.netapi
logger = logging.getLogger(__name__)
class SaltInfo(object):
def __init__(self, handler):
self.handler = handler
self.jobs = {}
# This represents a "real time" view of minions connected to Salt.
self.minions = {}
def publish_minions(self):
minions = []
for minion, minion_info in six.iteritems(self.minions):
curr_minion = {}
curr_minion.update(minion_info)
curr_minion.update({'id': minion})
minions.append(curr_minion)
ret = {'minions': minions}
self.handler.send(json.dumps(ret), False)
def publish(self, key, data):
publish_data = {key: data}
self.handler.send(json.dumps(publish_data), False)
def process_minion_update(self, event_data):
tag = event_data['tag']
event_info = event_data['data']
_, _, _, _, mid = tag.split('/')
if not self.minions.get(mid, None):
self.minions[mid] = {}
minion = self.minions[mid]
minion.update({'grains': event_info['return']})
self.publish_minions()
def process_ret_job_event(self, event_data):
tag = event_data['tag']
event_info = event_data['data']
_, _, jid, _, mid = tag.split('/')
job = self.jobs.setdefault(jid, {})
minion = job.setdefault('minions', {}).setdefault(mid, {})
minion.update({'return': event_info['return']})
minion.update({'retcode': event_info['retcode']})
minion.update({'success': event_info['success']})
job_complete = all([minion['success'] for mid, minion
in six.iteritems(job['minions'])])
if job_complete:
job['state'] = 'complete'
self.publish('jobs', self.jobs)
def process_new_job_event(self, event_data):
job = None
tag = event_data['tag']
event_info = event_data['data']
minions = {}
for mid in event_info['minions']:
minions[mid] = {'success': False}
job = {
'jid': event_info['jid'],
'start_time': event_info['_stamp'],
'minions': minions, # is a dictionary keyed by mids
'fun': event_info['fun'],
'tgt': event_info['tgt'],
'tgt_type': event_info['tgt_type'],
'state': 'running',
}
self.jobs[event_info['jid']] = job
self.publish('jobs', self.jobs)
def process_key_event(self, event_data):
tag = event_data['tag']
event_info = event_data['data']
if event_info['act'] == 'delete':
self.minions.pop(event_info['id'], None)
elif event_info['act'] == 'accept':
self.minions.setdefault(event_info['id'], {})
self.publish_minions()
def process_presence_events(self, event_data, token, opts):
tag = event_data['tag']
event_info = event_data['data']
minions_detected = event_info['present']
curr_minions = self.minions.keys()
changed = False
# check if any connections were dropped
dropped_minions = set(curr_minions) - set(minions_detected)
for minion in dropped_minions:
changed = True
self.minions.pop(minion, None)
# check if any new connections were made
new_minions = set(minions_detected) - set(curr_minions)
tgt = ','.join(new_minions)
if tgt:
changed = True
client = salt.netapi.NetapiClient(opts)
client.run(
{
'fun': 'grains.items',
'tgt': tgt,
'expr_type': 'list',
'mode': 'client',
'client': 'local',
'async': 'local_async',
'token': token,
})
if changed:
self.publish_minions()
def process(self, salt_data, token, opts):
parts = salt_data['tag'].split('/')
if len(parts) < 2:
return
# TBD: Simplify these conditional expressions
if parts[1] == 'job':
if parts[3] == 'new':
self.process_new_job_event(salt_data)
if salt_data['data']['fun'] == 'grains.items':
self.minions = {}
elif parts[3] == 'ret':
self.process_ret_job_event(salt_data)
if salt_data['data']['fun'] == 'grains.items':
self.process_minion_update(salt_data)
if parts[1] == 'key':
self.process_key_event(salt_data)
if parts[1] == 'presence':
self.process_presence_events(salt_data, token, opts)
| true | true |
f7319b609eb62f710cfd393ed8f31297d6216369 | 2,636 | py | Python | equality/wallet/puzzles/prefarm/spend_prefarm.py | grayfallstown/equality-blockchain | 019425b703f6b013e441481ac43389a80415f2f1 | [
"Apache-2.0"
] | 10 | 2021-07-04T15:14:12.000Z | 2021-10-17T14:52:56.000Z | equality/wallet/puzzles/prefarm/spend_prefarm.py | grayfallstown/equality-blockchain | 019425b703f6b013e441481ac43389a80415f2f1 | [
"Apache-2.0"
] | 11 | 2021-07-04T19:31:36.000Z | 2022-01-11T02:46:23.000Z | equality/wallet/puzzles/prefarm/spend_prefarm.py | grayfallstown/equality-blockchain | 019425b703f6b013e441481ac43389a80415f2f1 | [
"Apache-2.0"
] | 11 | 2021-07-04T21:49:17.000Z | 2021-10-04T17:45:38.000Z | import asyncio
from blspy import G2Element
from clvm_tools import binutils
from equality.consensus.block_rewards import calculate_base_farmer_reward, calculate_pool_reward
from equality.rpc.full_node_rpc_client import FullNodeRpcClient
from equality.types.blockchain_format.program import Program
from equality.types.coin_solution import CoinSolution
from equality.types.spend_bundle import SpendBundle
from equality.util.bech32m import decode_puzzle_hash
from equality.util.config import load_config
from equality.util.default_root import DEFAULT_ROOT_PATH
from equality.util.ints import uint32, uint16
async def main() -> None:
rpc_port: uint16 = uint16(8555)
self_hostname = "localhost"
path = DEFAULT_ROOT_PATH
config = load_config(path, "config.yaml")
client = await FullNodeRpcClient.create(self_hostname, rpc_port, path, config)
try:
farmer_prefarm = (await client.get_block_record_by_height(1)).reward_claims_incorporated[1]
pool_prefarm = (await client.get_block_record_by_height(1)).reward_claims_incorporated[0]
pool_amounts = int(calculate_pool_reward(uint32(0)) / 2)
farmer_amounts = int(calculate_base_farmer_reward(uint32(0)) / 2)
print(farmer_prefarm.amount, farmer_amounts)
assert farmer_amounts == farmer_prefarm.amount // 2
assert pool_amounts == pool_prefarm.amount // 2
address1 = "xeq1rdatypul5c642jkeh4yp933zu3hw8vv8tfup8ta6zfampnyhjnusxdgns6" # Key 1
address2 = "xeq1duvy5ur5eyj7lp5geetfg84cj2d7xgpxt7pya3lr2y6ke3696w9qvda66e" # Key 2
ph1 = decode_puzzle_hash(address1)
ph2 = decode_puzzle_hash(address2)
p_farmer_2 = Program.to(
binutils.assemble(f"(q . ((51 0x{ph1.hex()} {farmer_amounts}) (51 0x{ph2.hex()} {farmer_amounts})))")
)
p_pool_2 = Program.to(
binutils.assemble(f"(q . ((51 0x{ph1.hex()} {pool_amounts}) (51 0x{ph2.hex()} {pool_amounts})))")
)
p_solution = Program.to(binutils.assemble("()"))
sb_farmer = SpendBundle([CoinSolution(farmer_prefarm, p_farmer_2, p_solution)], G2Element())
sb_pool = SpendBundle([CoinSolution(pool_prefarm, p_pool_2, p_solution)], G2Element())
print(sb_pool, sb_farmer)
res = await client.push_tx(sb_farmer)
# res = await client.push_tx(sb_pool)
print(res)
up = await client.get_coin_records_by_puzzle_hash(farmer_prefarm.puzzle_hash, True)
uf = await client.get_coin_records_by_puzzle_hash(pool_prefarm.puzzle_hash, True)
print(up)
print(uf)
finally:
client.close()
asyncio.run(main())
| 41.1875 | 113 | 0.725721 | import asyncio
from blspy import G2Element
from clvm_tools import binutils
from equality.consensus.block_rewards import calculate_base_farmer_reward, calculate_pool_reward
from equality.rpc.full_node_rpc_client import FullNodeRpcClient
from equality.types.blockchain_format.program import Program
from equality.types.coin_solution import CoinSolution
from equality.types.spend_bundle import SpendBundle
from equality.util.bech32m import decode_puzzle_hash
from equality.util.config import load_config
from equality.util.default_root import DEFAULT_ROOT_PATH
from equality.util.ints import uint32, uint16
async def main() -> None:
rpc_port: uint16 = uint16(8555)
self_hostname = "localhost"
path = DEFAULT_ROOT_PATH
config = load_config(path, "config.yaml")
client = await FullNodeRpcClient.create(self_hostname, rpc_port, path, config)
try:
farmer_prefarm = (await client.get_block_record_by_height(1)).reward_claims_incorporated[1]
pool_prefarm = (await client.get_block_record_by_height(1)).reward_claims_incorporated[0]
pool_amounts = int(calculate_pool_reward(uint32(0)) / 2)
farmer_amounts = int(calculate_base_farmer_reward(uint32(0)) / 2)
print(farmer_prefarm.amount, farmer_amounts)
assert farmer_amounts == farmer_prefarm.amount // 2
assert pool_amounts == pool_prefarm.amount // 2
address1 = "xeq1rdatypul5c642jkeh4yp933zu3hw8vv8tfup8ta6zfampnyhjnusxdgns6"
address2 = "xeq1duvy5ur5eyj7lp5geetfg84cj2d7xgpxt7pya3lr2y6ke3696w9qvda66e"
ph1 = decode_puzzle_hash(address1)
ph2 = decode_puzzle_hash(address2)
p_farmer_2 = Program.to(
binutils.assemble(f"(q . ((51 0x{ph1.hex()} {farmer_amounts}) (51 0x{ph2.hex()} {farmer_amounts})))")
)
p_pool_2 = Program.to(
binutils.assemble(f"(q . ((51 0x{ph1.hex()} {pool_amounts}) (51 0x{ph2.hex()} {pool_amounts})))")
)
p_solution = Program.to(binutils.assemble("()"))
sb_farmer = SpendBundle([CoinSolution(farmer_prefarm, p_farmer_2, p_solution)], G2Element())
sb_pool = SpendBundle([CoinSolution(pool_prefarm, p_pool_2, p_solution)], G2Element())
print(sb_pool, sb_farmer)
res = await client.push_tx(sb_farmer)
print(res)
up = await client.get_coin_records_by_puzzle_hash(farmer_prefarm.puzzle_hash, True)
uf = await client.get_coin_records_by_puzzle_hash(pool_prefarm.puzzle_hash, True)
print(up)
print(uf)
finally:
client.close()
asyncio.run(main())
| true | true |
f7319cdf9d302bcce6db295df0177ccd74b1ba86 | 457 | py | Python | server/src/views/tests/test_delete.py | budtmo/GIoT | 4cfdeacbe53b2bee10613519d86fa1bb0d9057b4 | [
"Apache-2.0"
] | null | null | null | server/src/views/tests/test_delete.py | budtmo/GIoT | 4cfdeacbe53b2bee10613519d86fa1bb0d9057b4 | [
"Apache-2.0"
] | null | null | null | server/src/views/tests/test_delete.py | budtmo/GIoT | 4cfdeacbe53b2bee10613519d86fa1bb0d9057b4 | [
"Apache-2.0"
] | null | null | null | from src.views.tests import BaseTest
class TestDeleteDevice(BaseTest):
"""Tests to delete device from the list."""
def test_delete_device(self):
self.register_device()
res = self.test_app.delete('/device/{id}'.format(id=1))
self.assertEqual(res.status_code, 204)
def test_delete_non_existing_device(self):
res = self.test_app.delete('/device/{id}'.format(id=5))
self.assertEqual(res.status_code, 404)
| 30.466667 | 63 | 0.682713 | from src.views.tests import BaseTest
class TestDeleteDevice(BaseTest):
def test_delete_device(self):
self.register_device()
res = self.test_app.delete('/device/{id}'.format(id=1))
self.assertEqual(res.status_code, 204)
def test_delete_non_existing_device(self):
res = self.test_app.delete('/device/{id}'.format(id=5))
self.assertEqual(res.status_code, 404)
| true | true |
f7319ce68a4eb20de0f2eff9b0d27c720c5108bd | 596 | py | Python | sherlockpipe/search_zones/SearchZone.py | LuisCerdenoMota/SHERLOCK | 5fb52795d3ab44e27bc7dbc6f2c2e6c214995ba1 | [
"MIT"
] | 1 | 2021-01-14T16:44:48.000Z | 2021-01-14T16:44:48.000Z | sherlockpipe/search_zones/SearchZone.py | martindevora/SHERLOCK | 5e7492552cbce29e960684a44fd6ad875c8cf60e | [
"MIT"
] | null | null | null | sherlockpipe/search_zones/SearchZone.py | martindevora/SHERLOCK | 5e7492552cbce29e960684a44fd6ad875c8cf60e | [
"MIT"
] | null | null | null | from abc import ABC, abstractmethod
from sherlockpipe.star.starinfo import StarInfo
class SearchZone(ABC):
"""
Abstract class to be implemented for calculating minimum and maximum search periods for an input star.
"""
def __init__(self):
pass
@abstractmethod
def calculate_period_range(self, star_info: StarInfo):
"""
Calculates the minimum and maximum periods for the given star_info
@param star_info: the star where the range should be calculated
@return: a tuple of minimum_period and maximum_period
"""
pass
| 28.380952 | 106 | 0.692953 | from abc import ABC, abstractmethod
from sherlockpipe.star.starinfo import StarInfo
class SearchZone(ABC):
def __init__(self):
pass
@abstractmethod
def calculate_period_range(self, star_info: StarInfo):
pass
| true | true |
f7319d88946b78f26293dd74a23f8a820010f76c | 4,775 | py | Python | .environment/lib/python3.8/site-packages/docplex/mp/engine_factory.py | LuisMi1245/QPath-and-Snakes | 48f784da67d9720c955890a28543c9863e02a455 | [
"MIT"
] | null | null | null | .environment/lib/python3.8/site-packages/docplex/mp/engine_factory.py | LuisMi1245/QPath-and-Snakes | 48f784da67d9720c955890a28543c9863e02a455 | [
"MIT"
] | null | null | null | .environment/lib/python3.8/site-packages/docplex/mp/engine_factory.py | LuisMi1245/QPath-and-Snakes | 48f784da67d9720c955890a28543c9863e02a455 | [
"MIT"
] | null | null | null | # --------------------------------------------------------------------------
# Source file provided under Apache License, Version 2.0, January 2004,
# http://www.apache.org/licenses/
# (c) Copyright IBM Corp. 2015, 2016
# --------------------------------------------------------------------------
# gendoc: ignore
from docplex.mp.engine import NoSolveEngine, ZeroSolveEngine
from docplex.mp.utils import is_string
from docplex.mp.error_handler import docplex_fatal
class EngineFactory(object):
""" A factory class that manages creation of solver instances.
"""
_default_engine_map = {"nosolve": NoSolveEngine,
"zero": ZeroSolveEngine
}
def __init__(self, env=None):
self._engine_types_by_agent = self._default_engine_map.copy()
# no cplex engine type yet?
if env is not None:
self._resolve_cplex(env)
def _get_engine_type_from_agent(self, agent, default_engine, default_engine_name):
if agent is None:
return default_engine
elif is_string(agent):
agent_key = agent.lower()
engine_type = self._engine_types_by_agent.get(agent_key)
if engine_type:
return engine_type
elif 'cplex' == agent_key:
print('* warning: CPLEX runtime not found in path, using {0} instead'.format(default_engine_name))
return self._engine_types_by_agent.get(default_engine_name)
elif '.' in agent:
# assuming a qualified name, e.g. com.ibm.docplex.quantum.QuantumEngine
from docplex.mp.internal.mloader import import_class
try:
agent_class = import_class(agent)
return agent_class
except ValueError as ve:
print(
"Cannot load agent class {0}, expecting 'cplex' or valid class path, error: {1}".format(
agent, str(ve)))
raise ve
else:
docplex_fatal("Unexpected agent name: {0}, expecting 'cplex' or valid class path", agent)
else:
# try a class type
try:
# noinspection PyUnresolvedReferences
from inspect import isclass
if isclass(agent):
return agent
except ImportError:
if type(agent) == type:
return agent
# agent cannot be mapped to any class.
docplex_fatal("* unexpected agent: {0!r} -expecting 'cplex', class or class name", agent)
def _is_cplex_resolved(self):
return hasattr(self, "_cplex_engine_type")
def _resolve_cplex(self, env):
# INTERNAL
if env is None:
docplex_fatal("need an environment to resolve cplex, got None")
if not self._is_cplex_resolved():
if env.has_cplex:
env.check_cplex_version()
from docplex.mp.cplex_engine import CplexEngine
self._cplex_engine_type = CplexEngine
# noinspection PyTypeChecker
self._engine_types_by_agent["cplex"] = CplexEngine
else:
self._cplex_engine_type = None
def _ensure_cplex_resolved(self, env):
if not self._is_cplex_resolved():
self._resolve_cplex(env)
assert self._is_cplex_resolved()
def new_engine(self, agent, env, model, context=None):
self._ensure_cplex_resolved(env)
# compute a default engine and kwargs to use..
kwargs = {}
if self._cplex_engine_type:
# default is CPLEX if we have it
default_engine_type = self._cplex_engine_type
default_engine_name = 'cplex'
else:
default_engine_type = NoSolveEngine
default_engine_name = 'nosolve'
if context is not None:
kwargs['context'] = context
engine_type = self._get_engine_type_from_agent(agent=agent,
default_engine=default_engine_type,
default_engine_name=default_engine_name)
assert engine_type is not None
try:
return engine_type(model, **kwargs)
except TypeError:
docplex_fatal("agent: {0!s} failed to create instance from model, kwargs.", agent)
def extend(self, new_agent, new_engine):
# INTERNAL
assert new_engine is not None
self._engine_types_by_agent[new_agent] = new_engine
| 39.46281 | 115 | 0.553927 |
from docplex.mp.engine import NoSolveEngine, ZeroSolveEngine
from docplex.mp.utils import is_string
from docplex.mp.error_handler import docplex_fatal
class EngineFactory(object):
_default_engine_map = {"nosolve": NoSolveEngine,
"zero": ZeroSolveEngine
}
def __init__(self, env=None):
self._engine_types_by_agent = self._default_engine_map.copy()
if env is not None:
self._resolve_cplex(env)
def _get_engine_type_from_agent(self, agent, default_engine, default_engine_name):
if agent is None:
return default_engine
elif is_string(agent):
agent_key = agent.lower()
engine_type = self._engine_types_by_agent.get(agent_key)
if engine_type:
return engine_type
elif 'cplex' == agent_key:
print('* warning: CPLEX runtime not found in path, using {0} instead'.format(default_engine_name))
return self._engine_types_by_agent.get(default_engine_name)
elif '.' in agent:
from docplex.mp.internal.mloader import import_class
try:
agent_class = import_class(agent)
return agent_class
except ValueError as ve:
print(
"Cannot load agent class {0}, expecting 'cplex' or valid class path, error: {1}".format(
agent, str(ve)))
raise ve
else:
docplex_fatal("Unexpected agent name: {0}, expecting 'cplex' or valid class path", agent)
else:
try:
from inspect import isclass
if isclass(agent):
return agent
except ImportError:
if type(agent) == type:
return agent
docplex_fatal("* unexpected agent: {0!r} -expecting 'cplex', class or class name", agent)
def _is_cplex_resolved(self):
return hasattr(self, "_cplex_engine_type")
def _resolve_cplex(self, env):
if env is None:
docplex_fatal("need an environment to resolve cplex, got None")
if not self._is_cplex_resolved():
if env.has_cplex:
env.check_cplex_version()
from docplex.mp.cplex_engine import CplexEngine
self._cplex_engine_type = CplexEngine
self._engine_types_by_agent["cplex"] = CplexEngine
else:
self._cplex_engine_type = None
def _ensure_cplex_resolved(self, env):
if not self._is_cplex_resolved():
self._resolve_cplex(env)
assert self._is_cplex_resolved()
def new_engine(self, agent, env, model, context=None):
self._ensure_cplex_resolved(env)
kwargs = {}
if self._cplex_engine_type:
default_engine_type = self._cplex_engine_type
default_engine_name = 'cplex'
else:
default_engine_type = NoSolveEngine
default_engine_name = 'nosolve'
if context is not None:
kwargs['context'] = context
engine_type = self._get_engine_type_from_agent(agent=agent,
default_engine=default_engine_type,
default_engine_name=default_engine_name)
assert engine_type is not None
try:
return engine_type(model, **kwargs)
except TypeError:
docplex_fatal("agent: {0!s} failed to create instance from model, kwargs.", agent)
def extend(self, new_agent, new_engine):
assert new_engine is not None
self._engine_types_by_agent[new_agent] = new_engine
| true | true |
f7319da39f77cefd4e939a349a31747373e82aa7 | 4,711 | py | Python | simple_api/django_object/django_object.py | karlosss/simple_api | 03f87035c648f161d5e7a59b24f4e04bd34399f1 | [
"MIT"
] | 2 | 2020-11-13T14:00:06.000Z | 2020-12-19T11:50:22.000Z | simple_api/django_object/django_object.py | karlosss/simple_api | 03f87035c648f161d5e7a59b24f4e04bd34399f1 | [
"MIT"
] | 5 | 2021-02-04T14:27:43.000Z | 2021-06-04T23:22:24.000Z | simple_api/django_object/django_object.py | karlosss/simple_api | 03f87035c648f161d5e7a59b24f4e04bd34399f1 | [
"MIT"
] | 1 | 2021-01-06T13:54:38.000Z | 2021-01-06T13:54:38.000Z | from copy import deepcopy
from simple_api.django_object.actions import DetailAction, ListAction, CreateAction, UpdateAction, DeleteAction
from simple_api.django_object.datatypes import create_associated_list_type
from simple_api.django_object.filters import generate_filters
from simple_api.django_object.converter import determine_simple_api_fields
from simple_api.django_object.utils import get_pk_field
from simple_api.object.datatypes import StringType
from simple_api.object.object import Object, ObjectMeta
from simple_api.object.registry import object_storage
from simple_api.django_object.registry import model_django_object_storage
from simple_api.utils import ClassStub
class DjangoObjectMeta(type):
base_class = "simple_api.django_object.django_object.DjangoObject"
def __new__(mcs, name, bases, attrs, **kwargs):
cls = super().__new__(mcs, name, bases, attrs)
if kwargs.get("skip", False) or object_storage.key_for_class(attrs["__module__"], name) == mcs.base_class:
return cls
object_stub = ClassStub(name=cls.__name__, bases=(Object,))
# set the module of the generated Object class to match the module of the user class
object_stub.add_attr("__module__", cls.__module__)
assert cls.model is not None, "`model` must be set."
# if the class is meant to resolve relations, store it for the particular model
if cls.class_for_related:
model_django_object_storage.store(cls.model, cls)
cls.pk_field_name, cls.pk_field = get_pk_field(cls.model)
object_stub.add_attr("pk_field", cls.pk_field_name)
# make sure the primary key is included, otherwise `ModelObjectAction`s would just not work
if cls.only_fields and cls.pk_field_name not in cls.only_fields:
cls.only_fields = cls.only_fields + (cls.pk_field_name,)
elif cls.exclude_fields and cls.pk_field_name in cls.exclude_fields:
cls.exclude_fields = (f for f in cls.exclude_fields if f != cls.pk_field_name)
fields, input_fields, output_fields, field_validators = determine_simple_api_fields(
cls.model,
cls.only_fields, cls.exclude_fields,
cls.custom_fields, cls.input_custom_fields, cls.output_custom_fields,
)
output_fields["__str__"] = StringType(resolver=lambda *a, **kw: kw["parent_val"]())
for f in input_fields:
assert f not in fields, "Redefinition of `{}` field.".format(f)
cls.in_fields = {**fields, **input_fields}
for f in output_fields:
assert f not in fields, "Redefinition of `{}` field.".format(f)
cls.out_fields = {**fields, **output_fields}
object_stub.add_attr("fields", fields)
object_stub.add_attr("input_fields", input_fields)
object_stub.add_attr("output_fields", output_fields)
# create filters and List type for potential listing actions
cls.filter_type = ObjectMeta("{}Filters".format(cls.__name__), (Object,), {"fields": generate_filters(cls),
"hidden": True})
object_stub.add_attr("filter_type", cls.filter_type)
create_associated_list_type(cls)
actions = {}
if cls.detail_action is not None:
actions["detail"] = deepcopy(cls.detail_action)
if cls.list_action is not None:
actions["list"] = deepcopy(cls.list_action)
if cls.create_action is not None:
actions["create"] = deepcopy(cls.create_action)
if cls.update_action is not None:
actions["update"] = deepcopy(cls.update_action)
if cls.delete_action is not None:
actions["delete"] = deepcopy(cls.delete_action)
actions.update(cls.custom_actions)
converted_actions = {}
for action_name, action in actions.items():
action.set_parent_class(cls)
action.set_name(action_name)
converted_actions[action_name] = action.to_action()
object_stub.add_attr("actions", converted_actions)
cls._object = object_stub.build(ObjectMeta)
return cls
class DjangoObject(metaclass=DjangoObjectMeta):
model = None
auto_pk = True
class_for_related = True
only_fields = None
exclude_fields = None
custom_fields = {}
input_custom_fields = {}
output_custom_fields = {}
detail_action = DetailAction()
list_action = ListAction()
create_action = CreateAction()
update_action = UpdateAction()
delete_action = DeleteAction()
custom_actions = {}
@classmethod
def to_object(cls):
return cls._object
| 39.923729 | 115 | 0.68478 | from copy import deepcopy
from simple_api.django_object.actions import DetailAction, ListAction, CreateAction, UpdateAction, DeleteAction
from simple_api.django_object.datatypes import create_associated_list_type
from simple_api.django_object.filters import generate_filters
from simple_api.django_object.converter import determine_simple_api_fields
from simple_api.django_object.utils import get_pk_field
from simple_api.object.datatypes import StringType
from simple_api.object.object import Object, ObjectMeta
from simple_api.object.registry import object_storage
from simple_api.django_object.registry import model_django_object_storage
from simple_api.utils import ClassStub
class DjangoObjectMeta(type):
base_class = "simple_api.django_object.django_object.DjangoObject"
def __new__(mcs, name, bases, attrs, **kwargs):
cls = super().__new__(mcs, name, bases, attrs)
if kwargs.get("skip", False) or object_storage.key_for_class(attrs["__module__"], name) == mcs.base_class:
return cls
object_stub = ClassStub(name=cls.__name__, bases=(Object,))
object_stub.add_attr("__module__", cls.__module__)
assert cls.model is not None, "`model` must be set."
if cls.class_for_related:
model_django_object_storage.store(cls.model, cls)
cls.pk_field_name, cls.pk_field = get_pk_field(cls.model)
object_stub.add_attr("pk_field", cls.pk_field_name)
if cls.only_fields and cls.pk_field_name not in cls.only_fields:
cls.only_fields = cls.only_fields + (cls.pk_field_name,)
elif cls.exclude_fields and cls.pk_field_name in cls.exclude_fields:
cls.exclude_fields = (f for f in cls.exclude_fields if f != cls.pk_field_name)
fields, input_fields, output_fields, field_validators = determine_simple_api_fields(
cls.model,
cls.only_fields, cls.exclude_fields,
cls.custom_fields, cls.input_custom_fields, cls.output_custom_fields,
)
output_fields["__str__"] = StringType(resolver=lambda *a, **kw: kw["parent_val"]())
for f in input_fields:
assert f not in fields, "Redefinition of `{}` field.".format(f)
cls.in_fields = {**fields, **input_fields}
for f in output_fields:
assert f not in fields, "Redefinition of `{}` field.".format(f)
cls.out_fields = {**fields, **output_fields}
object_stub.add_attr("fields", fields)
object_stub.add_attr("input_fields", input_fields)
object_stub.add_attr("output_fields", output_fields)
cls.filter_type = ObjectMeta("{}Filters".format(cls.__name__), (Object,), {"fields": generate_filters(cls),
"hidden": True})
object_stub.add_attr("filter_type", cls.filter_type)
create_associated_list_type(cls)
actions = {}
if cls.detail_action is not None:
actions["detail"] = deepcopy(cls.detail_action)
if cls.list_action is not None:
actions["list"] = deepcopy(cls.list_action)
if cls.create_action is not None:
actions["create"] = deepcopy(cls.create_action)
if cls.update_action is not None:
actions["update"] = deepcopy(cls.update_action)
if cls.delete_action is not None:
actions["delete"] = deepcopy(cls.delete_action)
actions.update(cls.custom_actions)
converted_actions = {}
for action_name, action in actions.items():
action.set_parent_class(cls)
action.set_name(action_name)
converted_actions[action_name] = action.to_action()
object_stub.add_attr("actions", converted_actions)
cls._object = object_stub.build(ObjectMeta)
return cls
class DjangoObject(metaclass=DjangoObjectMeta):
model = None
auto_pk = True
class_for_related = True
only_fields = None
exclude_fields = None
custom_fields = {}
input_custom_fields = {}
output_custom_fields = {}
detail_action = DetailAction()
list_action = ListAction()
create_action = CreateAction()
update_action = UpdateAction()
delete_action = DeleteAction()
custom_actions = {}
@classmethod
def to_object(cls):
return cls._object
| true | true |
f7319df270d89691854f3a1b9a62c1231d3c7bfe | 8,177 | py | Python | python/paddle/fluid/tests/unittests/xpu/test_expand_v2_op_xpu.py | L-Net-1992/Paddle | 4d0ca02ba56760b456f3d4b42a538555b9b6c307 | [
"Apache-2.0"
] | 11 | 2016-08-29T07:43:26.000Z | 2016-08-29T07:51:24.000Z | python/paddle/fluid/tests/unittests/xpu/test_expand_v2_op_xpu.py | L-Net-1992/Paddle | 4d0ca02ba56760b456f3d4b42a538555b9b6c307 | [
"Apache-2.0"
] | null | null | null | python/paddle/fluid/tests/unittests/xpu/test_expand_v2_op_xpu.py | L-Net-1992/Paddle | 4d0ca02ba56760b456f3d4b42a538555b9b6c307 | [
"Apache-2.0"
] | 1 | 2021-12-09T08:59:17.000Z | 2021-12-09T08:59:17.000Z | # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import sys
import numpy as np
sys.path.append("..")
from op_test import OpTest
from op_test_xpu import XPUOpTest
import paddle.fluid as fluid
from paddle.fluid import Program, program_guard
import paddle
from xpu.get_test_cover_info import create_test_class, get_xpu_op_support_types, XPUOpTestWrapper
paddle.enable_static()
np.random.seed(10)
# CANN Op Support X: float32, int32, int64
# Situation 1: shape is a list(without tensor)
class XPUTestExpandV2Op(XPUOpTestWrapper):
def __init__(self):
self.op_name = 'expand_v2'
self.use_dynamic_create_class = False
class TestExpandV2XPUOp(XPUOpTest):
def setUp(self):
self.init_dtype()
self.set_xpu()
self.op_type = "expand_v2"
self.place = paddle.XPUPlace(0)
self.init_data()
self.inputs = {
'X': np.random.random(self.ori_shape).astype(self.dtype)
}
self.attrs = {'shape': self.shape}
output = np.tile(self.inputs['X'], self.expand_times)
self.outputs = {'Out': output}
def init_dtype(self):
self.dtype = self.in_type
def set_xpu(self):
self.__class__.use_xpu = True
self.__class__.no_need_check_grad = True
def init_data(self):
self.ori_shape = [100]
self.shape = [100]
self.expand_times = [1]
def test_check_output(self):
self.check_output_with_place(self.place)
class TestExpandV2OpRank2_DimExpanding(TestExpandV2XPUOp):
def init_data(self):
self.ori_shape = [120]
self.shape = [2, 120]
self.expand_times = [2, 1]
class TestExpandV2OpRank2(TestExpandV2XPUOp):
def init_data(self):
self.ori_shape = [1, 140]
self.shape = [12, 140]
self.expand_times = [12, 1]
class TestExpandV2OpRank3_Corner(TestExpandV2XPUOp):
def init_data(self):
self.ori_shape = (2, 10, 5)
self.shape = (2, 10, 5)
self.expand_times = (1, 1, 1)
class TestExpandV2OpRank4(TestExpandV2XPUOp):
def init_data(self):
self.ori_shape = (2, 4, 5, 7)
self.shape = (-1, -1, -1, -1)
self.expand_times = (1, 1, 1, 1)
class TestExpandV2OpRank5(TestExpandV2XPUOp):
def init_data(self):
self.ori_shape = (2, 4, 1, 15)
self.shape = (2, -1, 4, -1)
self.expand_times = (1, 1, 4, 1)
class TestExpandV2OpRank6(TestExpandV2XPUOp):
def init_data(self):
self.ori_shape = (4, 1, 30)
self.shape = (2, -1, 4, 30)
self.expand_times = (2, 1, 4, 1)
# Situation 2: shape is a list(with tensor)
class TestExpandV2OpXPURank1_tensor_attr(TestExpandV2XPUOp):
def setUp(self):
self.set_xpu()
self.place = paddle.XPUPlace(0)
self.op_type = "expand_v2"
self.init_data()
self.dtype = np.float32
expand_shapes_tensor = []
for index, ele in enumerate(self.expand_shape):
expand_shapes_tensor.append(("x" + str(index), np.ones(
(1)).astype('int32') * ele))
self.inputs = {
'X': np.random.random(self.ori_shape).astype(self.dtype),
'expand_shapes_tensor': expand_shapes_tensor,
}
self.attrs = {"shape": self.infer_expand_shape}
output = np.tile(self.inputs['X'], self.expand_times)
self.outputs = {'Out': output}
def init_data(self):
self.ori_shape = [100]
self.expand_times = [1]
self.expand_shape = [100]
self.infer_expand_shape = [-1]
class TestExpandV2OpRank2_Corner_tensor_attr(
TestExpandV2OpXPURank1_tensor_attr):
def init_data(self):
self.ori_shape = [12, 14]
self.expand_times = [1, 1]
self.expand_shape = [12, 14]
self.infer_expand_shape = [12, -1]
# Situation 3: shape is a tensor
class TestExpandV2XPUOp_tensor(TestExpandV2XPUOp):
def setUp(self):
self.set_xpu()
self.place = paddle.XPUPlace(0)
self.op_type = "expand_v2"
self.init_data()
self.dtype = np.float32
self.inputs = {
'X': np.random.random(self.ori_shape).astype(self.dtype),
'Shape': np.array(self.expand_shape).astype("int32"),
}
self.attrs = {}
output = np.tile(self.inputs['X'], self.expand_times)
self.outputs = {'Out': output}
def init_data(self):
self.ori_shape = [100]
self.expand_times = [2, 1]
self.expand_shape = [2, 100]
# Situation 5: input x is int32
# skip grad check for int32
class TestExpandV2OpInteger(XPUOpTest):
def init_type(self):
self.dtype = 'int32'
def setUp(self):
self.set_xpu()
self.init_type()
self.place = paddle.XPUPlace(0)
self.op_type = "expand_v2"
self.inputs = {
'X': np.random.randint(10, size=(2, 4, 20)).astype(self.dtype)
}
self.attrs = {'shape': [2, 4, 20]}
output = np.tile(self.inputs['X'], (1, 1, 1))
self.outputs = {'Out': output}
def set_xpu(self):
self.__class__.use_xpu = True
def test_check_output(self):
self.check_output_with_place(self.place)
def test_check_grad(self):
pass
# Test python API
class TestExpandV2API(unittest.TestCase):
def test_static(self):
with fluid.program_guard(fluid.Program(), fluid.Program()):
input = np.random.random([12, 14]).astype("float32")
x = fluid.layers.data(name='x',
shape=[12, 14],
append_batch_size=False,
dtype="float32")
positive_2 = fluid.layers.fill_constant([1], "int32", 12)
expand_shape = fluid.layers.data(name="expand_shape",
shape=[2],
append_batch_size=False,
dtype="int32")
out_1 = paddle.expand(x, shape=[12, 14])
out_2 = paddle.expand(x, shape=[positive_2, 14])
out_3 = paddle.expand(x, shape=expand_shape)
g0 = fluid.backward.calc_gradient(out_2, x)
exe = fluid.Executor(place=paddle.XPUPlace(0))
res_1, res_2, res_3 = exe.run(fluid.default_main_program(),
feed={
"x":
input,
"expand_shape":
np.array([12, 14]).astype("int32")
},
fetch_list=[out_1, out_2, out_3])
assert np.array_equal(res_1, np.tile(input, (1, 1)))
assert np.array_equal(res_2, np.tile(input, (1, 1)))
assert np.array_equal(res_3, np.tile(input, (1, 1)))
support_types = get_xpu_op_support_types('expand_v2')
for stype in support_types:
create_test_class(globals(), XPUTestExpandV2Op, stype)
if __name__ == "__main__":
unittest.main()
| 33.239837 | 97 | 0.559863 |
from __future__ import print_function
import unittest
import sys
import numpy as np
sys.path.append("..")
from op_test import OpTest
from op_test_xpu import XPUOpTest
import paddle.fluid as fluid
from paddle.fluid import Program, program_guard
import paddle
from xpu.get_test_cover_info import create_test_class, get_xpu_op_support_types, XPUOpTestWrapper
paddle.enable_static()
np.random.seed(10)
class XPUTestExpandV2Op(XPUOpTestWrapper):
def __init__(self):
self.op_name = 'expand_v2'
self.use_dynamic_create_class = False
class TestExpandV2XPUOp(XPUOpTest):
def setUp(self):
self.init_dtype()
self.set_xpu()
self.op_type = "expand_v2"
self.place = paddle.XPUPlace(0)
self.init_data()
self.inputs = {
'X': np.random.random(self.ori_shape).astype(self.dtype)
}
self.attrs = {'shape': self.shape}
output = np.tile(self.inputs['X'], self.expand_times)
self.outputs = {'Out': output}
def init_dtype(self):
self.dtype = self.in_type
def set_xpu(self):
self.__class__.use_xpu = True
self.__class__.no_need_check_grad = True
def init_data(self):
self.ori_shape = [100]
self.shape = [100]
self.expand_times = [1]
def test_check_output(self):
self.check_output_with_place(self.place)
class TestExpandV2OpRank2_DimExpanding(TestExpandV2XPUOp):
def init_data(self):
self.ori_shape = [120]
self.shape = [2, 120]
self.expand_times = [2, 1]
class TestExpandV2OpRank2(TestExpandV2XPUOp):
def init_data(self):
self.ori_shape = [1, 140]
self.shape = [12, 140]
self.expand_times = [12, 1]
class TestExpandV2OpRank3_Corner(TestExpandV2XPUOp):
def init_data(self):
self.ori_shape = (2, 10, 5)
self.shape = (2, 10, 5)
self.expand_times = (1, 1, 1)
class TestExpandV2OpRank4(TestExpandV2XPUOp):
def init_data(self):
self.ori_shape = (2, 4, 5, 7)
self.shape = (-1, -1, -1, -1)
self.expand_times = (1, 1, 1, 1)
class TestExpandV2OpRank5(TestExpandV2XPUOp):
def init_data(self):
self.ori_shape = (2, 4, 1, 15)
self.shape = (2, -1, 4, -1)
self.expand_times = (1, 1, 4, 1)
class TestExpandV2OpRank6(TestExpandV2XPUOp):
def init_data(self):
self.ori_shape = (4, 1, 30)
self.shape = (2, -1, 4, 30)
self.expand_times = (2, 1, 4, 1)
class TestExpandV2OpXPURank1_tensor_attr(TestExpandV2XPUOp):
def setUp(self):
self.set_xpu()
self.place = paddle.XPUPlace(0)
self.op_type = "expand_v2"
self.init_data()
self.dtype = np.float32
expand_shapes_tensor = []
for index, ele in enumerate(self.expand_shape):
expand_shapes_tensor.append(("x" + str(index), np.ones(
(1)).astype('int32') * ele))
self.inputs = {
'X': np.random.random(self.ori_shape).astype(self.dtype),
'expand_shapes_tensor': expand_shapes_tensor,
}
self.attrs = {"shape": self.infer_expand_shape}
output = np.tile(self.inputs['X'], self.expand_times)
self.outputs = {'Out': output}
def init_data(self):
self.ori_shape = [100]
self.expand_times = [1]
self.expand_shape = [100]
self.infer_expand_shape = [-1]
class TestExpandV2OpRank2_Corner_tensor_attr(
TestExpandV2OpXPURank1_tensor_attr):
def init_data(self):
self.ori_shape = [12, 14]
self.expand_times = [1, 1]
self.expand_shape = [12, 14]
self.infer_expand_shape = [12, -1]
class TestExpandV2XPUOp_tensor(TestExpandV2XPUOp):
def setUp(self):
self.set_xpu()
self.place = paddle.XPUPlace(0)
self.op_type = "expand_v2"
self.init_data()
self.dtype = np.float32
self.inputs = {
'X': np.random.random(self.ori_shape).astype(self.dtype),
'Shape': np.array(self.expand_shape).astype("int32"),
}
self.attrs = {}
output = np.tile(self.inputs['X'], self.expand_times)
self.outputs = {'Out': output}
def init_data(self):
self.ori_shape = [100]
self.expand_times = [2, 1]
self.expand_shape = [2, 100]
class TestExpandV2OpInteger(XPUOpTest):
def init_type(self):
self.dtype = 'int32'
def setUp(self):
self.set_xpu()
self.init_type()
self.place = paddle.XPUPlace(0)
self.op_type = "expand_v2"
self.inputs = {
'X': np.random.randint(10, size=(2, 4, 20)).astype(self.dtype)
}
self.attrs = {'shape': [2, 4, 20]}
output = np.tile(self.inputs['X'], (1, 1, 1))
self.outputs = {'Out': output}
def set_xpu(self):
self.__class__.use_xpu = True
def test_check_output(self):
self.check_output_with_place(self.place)
def test_check_grad(self):
pass
class TestExpandV2API(unittest.TestCase):
def test_static(self):
with fluid.program_guard(fluid.Program(), fluid.Program()):
input = np.random.random([12, 14]).astype("float32")
x = fluid.layers.data(name='x',
shape=[12, 14],
append_batch_size=False,
dtype="float32")
positive_2 = fluid.layers.fill_constant([1], "int32", 12)
expand_shape = fluid.layers.data(name="expand_shape",
shape=[2],
append_batch_size=False,
dtype="int32")
out_1 = paddle.expand(x, shape=[12, 14])
out_2 = paddle.expand(x, shape=[positive_2, 14])
out_3 = paddle.expand(x, shape=expand_shape)
g0 = fluid.backward.calc_gradient(out_2, x)
exe = fluid.Executor(place=paddle.XPUPlace(0))
res_1, res_2, res_3 = exe.run(fluid.default_main_program(),
feed={
"x":
input,
"expand_shape":
np.array([12, 14]).astype("int32")
},
fetch_list=[out_1, out_2, out_3])
assert np.array_equal(res_1, np.tile(input, (1, 1)))
assert np.array_equal(res_2, np.tile(input, (1, 1)))
assert np.array_equal(res_3, np.tile(input, (1, 1)))
support_types = get_xpu_op_support_types('expand_v2')
for stype in support_types:
create_test_class(globals(), XPUTestExpandV2Op, stype)
if __name__ == "__main__":
unittest.main()
| true | true |
f7319df5d2cfd54a36d3021fcea5ac6d789dee56 | 7,191 | py | Python | mapillary_tools/commands/process.py | kudarisenmon/mapillary_tools | ef3201847ece95a0bbeb95187e38a0a84309a2b7 | [
"BSD-2-Clause"
] | null | null | null | mapillary_tools/commands/process.py | kudarisenmon/mapillary_tools | ef3201847ece95a0bbeb95187e38a0a84309a2b7 | [
"BSD-2-Clause"
] | null | null | null | mapillary_tools/commands/process.py | kudarisenmon/mapillary_tools | ef3201847ece95a0bbeb95187e38a0a84309a2b7 | [
"BSD-2-Clause"
] | null | null | null | import inspect
from mapillary_tools.process_user_properties import process_user_properties
from mapillary_tools.process_import_meta_properties import process_import_meta_properties
from mapillary_tools.process_geotag_properties import process_geotag_properties
from mapillary_tools.process_sequence_properties import process_sequence_properties
from mapillary_tools.process_upload_params import process_upload_params
from mapillary_tools.insert_MAPJson import insert_MAPJson
class Command:
name = 'process'
help = 'Main tool : Process image meta data and insert it in image EXIF.'
def add_basic_arguments(self, parser):
parser.add_argument(
'--rerun', help='rerun the processing', action='store_true', required=False)
# user properties
# user name for the import
parser.add_argument("--user_name", help="user name", required=True)
# organization level parameters
parser.add_argument(
'--organization_username', help="Specify organization user name", default=None, required=False)
parser.add_argument(
'--organization_key', help="Specify organization key", default=None, required=False)
parser.add_argument('--private',
help="Specify whether the import is private", action='store_true', default=False, required=False)
parser.add_argument(
'--skip_subfolders', help='Skip all subfolders and import only the images in the given directory path.', action='store_true', default=False, required=False)
def add_advanced_arguments(self, parser):
# master upload
parser.add_argument('--master_upload', help='Process images with a master key, note: only used by Mapillary employees',
action='store_true', default=False, required=False)
#import meta
parser.add_argument(
"--device_make", help="Specify device manufacturer. Note this input has precedence over the input read from the import source file.", default=None, required=False)
parser.add_argument(
"--device_model", help="Specify device model. Note this input has precedence over the input read from the import source file.", default=None, required=False)
parser.add_argument(
'--add_file_name', help="Add original file name to EXIF. Note this input has precedence over the input read from the import source file.", action='store_true', required=False)
parser.add_argument(
'--add_import_date', help="Add import date.", action='store_true', required=False)
parser.add_argument('--orientation', help='Specify the image orientation in degrees. Note this might result in image rotation. Note this input has precedence over the input read from the import source file.',
choices=[0, 90, 180, 270], type=int, default=None, required=False)
parser.add_argument(
"--GPS_accuracy", help="GPS accuracy in meters. Note this input has precedence over the input read from the import source file.", default=None, required=False)
# geotagging
parser.add_argument('--geotag_source', help='Provide the source of date/time and gps information needed for geotagging.', action='store',
choices=['exif', 'gpx', 'gopro_video', 'nmea'], default="exif", required=False)
parser.add_argument(
'--geotag_source_path', help='Provide the path to the file source of date/time and gps information needed for geotagging.', action='store',
default=None, required=False)
parser.add_argument(
'--local_time', help='Assume image timestamps are in your local time', action='store_true', default=False, required=False)
parser.add_argument('--sub_second_interval',
help='Sub second time between shots. Used to set image times with sub-second precision',
type=float, default=0.0, required=False)
parser.add_argument('--offset_time', default=0., type=float,
help='time offset between the camera and the gps device, in seconds.', required=False)
parser.add_argument('--offset_angle', default=0., type=float,
help='offset camera angle (90 for right facing, 180 for rear facing, -90 for left facing)', required=False)
parser.add_argument("--use_gps_start_time",
help="Use GPS trace starting time in case of derivating timestamp from filename.", action="store_true", default=False, required=False)
# sequence
parser.add_argument('--cutoff_distance', default=600., type=float,
help='maximum gps distance in meters within a sequence', required=False)
parser.add_argument('--cutoff_time', default=60., type=float,
help='maximum time interval in seconds within a sequence', required=False)
parser.add_argument('--interpolate_directions',
help='perform interploation of directions', action='store_true', required=False)
parser.add_argument('--flag_duplicates',
help='flag duplicates', action='store_true', required=False)
parser.add_argument('--duplicate_distance',
help='max distance for two images to be considered duplicates in meters', type=float, default=0.1, required=False)
parser.add_argument(
'--duplicate_angle', help='max angle for two images to be considered duplicates in degrees', type=float, default=5, required=False)
# EXIF insert
parser.add_argument('--skip_EXIF_insert', help='Skip inserting the extracted data into image EXIF.',
action='store_true', default=False, required=False)
parser.add_argument('--keep_original', help='Do not overwrite original images, instead save the processed images in a new directory by adding suffix "_processed" to the import_path.',
action='store_true', default=False, required=False)
def run(self, args):
vars_args = vars(args)
process_user_properties(**({k: v for k, v in vars_args.iteritems()
if k in inspect.getargspec(process_user_properties).args}))
process_import_meta_properties(
**({k: v for k, v in vars_args.iteritems() if k in inspect.getargspec(process_import_meta_properties).args}))
process_geotag_properties(
**({k: v for k, v in vars_args.iteritems() if k in inspect.getargspec(process_geotag_properties).args}))
process_sequence_properties(
**({k: v for k, v in vars_args.iteritems() if k in inspect.getargspec(process_sequence_properties).args}))
process_upload_params(**({k: v for k, v in vars_args.iteritems()
if k in inspect.getargspec(process_upload_params).args}))
insert_MAPJson(**({k: v for k, v in vars_args.iteritems()
if k in inspect.getargspec(insert_MAPJson).args}))
print("Process done.") | 67.205607 | 216 | 0.667779 | import inspect
from mapillary_tools.process_user_properties import process_user_properties
from mapillary_tools.process_import_meta_properties import process_import_meta_properties
from mapillary_tools.process_geotag_properties import process_geotag_properties
from mapillary_tools.process_sequence_properties import process_sequence_properties
from mapillary_tools.process_upload_params import process_upload_params
from mapillary_tools.insert_MAPJson import insert_MAPJson
class Command:
name = 'process'
help = 'Main tool : Process image meta data and insert it in image EXIF.'
def add_basic_arguments(self, parser):
parser.add_argument(
'--rerun', help='rerun the processing', action='store_true', required=False)
parser.add_argument("--user_name", help="user name", required=True)
parser.add_argument(
'--organization_username', help="Specify organization user name", default=None, required=False)
parser.add_argument(
'--organization_key', help="Specify organization key", default=None, required=False)
parser.add_argument('--private',
help="Specify whether the import is private", action='store_true', default=False, required=False)
parser.add_argument(
'--skip_subfolders', help='Skip all subfolders and import only the images in the given directory path.', action='store_true', default=False, required=False)
def add_advanced_arguments(self, parser):
parser.add_argument('--master_upload', help='Process images with a master key, note: only used by Mapillary employees',
action='store_true', default=False, required=False)
parser.add_argument(
"--device_make", help="Specify device manufacturer. Note this input has precedence over the input read from the import source file.", default=None, required=False)
parser.add_argument(
"--device_model", help="Specify device model. Note this input has precedence over the input read from the import source file.", default=None, required=False)
parser.add_argument(
'--add_file_name', help="Add original file name to EXIF. Note this input has precedence over the input read from the import source file.", action='store_true', required=False)
parser.add_argument(
'--add_import_date', help="Add import date.", action='store_true', required=False)
parser.add_argument('--orientation', help='Specify the image orientation in degrees. Note this might result in image rotation. Note this input has precedence over the input read from the import source file.',
choices=[0, 90, 180, 270], type=int, default=None, required=False)
parser.add_argument(
"--GPS_accuracy", help="GPS accuracy in meters. Note this input has precedence over the input read from the import source file.", default=None, required=False)
parser.add_argument('--geotag_source', help='Provide the source of date/time and gps information needed for geotagging.', action='store',
choices=['exif', 'gpx', 'gopro_video', 'nmea'], default="exif", required=False)
parser.add_argument(
'--geotag_source_path', help='Provide the path to the file source of date/time and gps information needed for geotagging.', action='store',
default=None, required=False)
parser.add_argument(
'--local_time', help='Assume image timestamps are in your local time', action='store_true', default=False, required=False)
parser.add_argument('--sub_second_interval',
help='Sub second time between shots. Used to set image times with sub-second precision',
type=float, default=0.0, required=False)
parser.add_argument('--offset_time', default=0., type=float,
help='time offset between the camera and the gps device, in seconds.', required=False)
parser.add_argument('--offset_angle', default=0., type=float,
help='offset camera angle (90 for right facing, 180 for rear facing, -90 for left facing)', required=False)
parser.add_argument("--use_gps_start_time",
help="Use GPS trace starting time in case of derivating timestamp from filename.", action="store_true", default=False, required=False)
parser.add_argument('--cutoff_distance', default=600., type=float,
help='maximum gps distance in meters within a sequence', required=False)
parser.add_argument('--cutoff_time', default=60., type=float,
help='maximum time interval in seconds within a sequence', required=False)
parser.add_argument('--interpolate_directions',
help='perform interploation of directions', action='store_true', required=False)
parser.add_argument('--flag_duplicates',
help='flag duplicates', action='store_true', required=False)
parser.add_argument('--duplicate_distance',
help='max distance for two images to be considered duplicates in meters', type=float, default=0.1, required=False)
parser.add_argument(
'--duplicate_angle', help='max angle for two images to be considered duplicates in degrees', type=float, default=5, required=False)
parser.add_argument('--skip_EXIF_insert', help='Skip inserting the extracted data into image EXIF.',
action='store_true', default=False, required=False)
parser.add_argument('--keep_original', help='Do not overwrite original images, instead save the processed images in a new directory by adding suffix "_processed" to the import_path.',
action='store_true', default=False, required=False)
def run(self, args):
vars_args = vars(args)
process_user_properties(**({k: v for k, v in vars_args.iteritems()
if k in inspect.getargspec(process_user_properties).args}))
process_import_meta_properties(
**({k: v for k, v in vars_args.iteritems() if k in inspect.getargspec(process_import_meta_properties).args}))
process_geotag_properties(
**({k: v for k, v in vars_args.iteritems() if k in inspect.getargspec(process_geotag_properties).args}))
process_sequence_properties(
**({k: v for k, v in vars_args.iteritems() if k in inspect.getargspec(process_sequence_properties).args}))
process_upload_params(**({k: v for k, v in vars_args.iteritems()
if k in inspect.getargspec(process_upload_params).args}))
insert_MAPJson(**({k: v for k, v in vars_args.iteritems()
if k in inspect.getargspec(insert_MAPJson).args}))
print("Process done.") | true | true |
f7319e3e7b4ea41930c29754506d03bd52eac137 | 1,166 | py | Python | envs/cell_place_gym/native/acp_state.py | Yvette1993/spinningup | 5094cf291fa24cf93d58b4507dab56dafe73dac1 | [
"MIT"
] | null | null | null | envs/cell_place_gym/native/acp_state.py | Yvette1993/spinningup | 5094cf291fa24cf93d58b4507dab56dafe73dac1 | [
"MIT"
] | null | null | null | envs/cell_place_gym/native/acp_state.py | Yvette1993/spinningup | 5094cf291fa24cf93d58b4507dab56dafe73dac1 | [
"MIT"
] | null | null | null | import numpy as np
from cell_place_gym.native.acp_placement import *
class acp_placement_state (object):
def __init__ (self, place):
self.place = place
self.design = place.design
l_inst_count = len (self.design.instances)
# Non-Placed Nets Matrix
self.c_matrix = np.zeros (shape = (l_inst_count, l_inst_count))
# Placed Nets Matrix
self.n_matrix = np.zeros (shape = (l_inst_count, l_inst_count))
def get_state (self):
nets = self.design.nets
self.c_matrix [:,:] = 0
self.n_matrix [:,:] = 0
for n in nets:
src = n.net_source
src_id = src.get_inst_id ()
src_placed = src.is_placed ()
src_position = src.get_position ()
for dst in n.net_dests:
dst_id = dst.get_inst_id ()
dst_placed = dst.is_placed ()
dst_position = dst.get_position ()
if src_placed and dst_placed:
self.n_matrix [src_id][dst_id] = 1
else:
self.n_matrix [src_id][dst_id] = 1
return
| 34.294118 | 71 | 0.542882 | import numpy as np
from cell_place_gym.native.acp_placement import *
class acp_placement_state (object):
def __init__ (self, place):
self.place = place
self.design = place.design
l_inst_count = len (self.design.instances)
self.c_matrix = np.zeros (shape = (l_inst_count, l_inst_count))
self.n_matrix = np.zeros (shape = (l_inst_count, l_inst_count))
def get_state (self):
nets = self.design.nets
self.c_matrix [:,:] = 0
self.n_matrix [:,:] = 0
for n in nets:
src = n.net_source
src_id = src.get_inst_id ()
src_placed = src.is_placed ()
src_position = src.get_position ()
for dst in n.net_dests:
dst_id = dst.get_inst_id ()
dst_placed = dst.is_placed ()
dst_position = dst.get_position ()
if src_placed and dst_placed:
self.n_matrix [src_id][dst_id] = 1
else:
self.n_matrix [src_id][dst_id] = 1
return
| true | true |
f7319e3e83dd42729fdb8ee14c594cab83ad45a6 | 873 | py | Python | reddit_scraper.py | ZER-0-NE/Real-Time-Sentiment-Analysis-from-Memes | 7d95ca63f0643e85f49c8b3bc37e6362ae51c76b | [
"MIT"
] | 3 | 2021-05-24T11:23:44.000Z | 2022-03-19T14:01:47.000Z | reddit_scraper.py | ZER-0-NE/Real-Time-Sentiment-Analysis-from-Memes | 7d95ca63f0643e85f49c8b3bc37e6362ae51c76b | [
"MIT"
] | null | null | null | reddit_scraper.py | ZER-0-NE/Real-Time-Sentiment-Analysis-from-Memes | 7d95ca63f0643e85f49c8b3bc37e6362ae51c76b | [
"MIT"
] | 3 | 2020-04-05T22:06:50.000Z | 2020-06-25T18:59:06.000Z | import urllib
import re
import subprocess
import time
import sys
urls="https://old.reddit.com/r/dankmemes/"
i=0
num = input("How many pages would you like to download(25 memes per page)?")
if num<1:
sys.exit("Number of pages should be > 0")
else :
while i<num:
these_regex="data-url=\"(.+?)\""
pattern=re.compile(these_regex)
htmlfile=urllib.urlopen(urls)
htmltext=htmlfile.read()
titles=re.findall(pattern,htmltext)
for s in titles:
com = "wget " + s
subprocess.call(com,shell=True)
regex1 = "next-button.+?\"(.+?)\""
pattern1 = re.compile(regex1)
link1=re.findall(pattern1,htmltext)
if(len(link1)==0):
print "Something went wrong for i = %d. trying again..."%i
time.sleep(2)
else:
urls = link1[3]
i+=1 | 28.16129 | 76 | 0.57732 | import urllib
import re
import subprocess
import time
import sys
urls="https://old.reddit.com/r/dankmemes/"
i=0
num = input("How many pages would you like to download(25 memes per page)?")
if num<1:
sys.exit("Number of pages should be > 0")
else :
while i<num:
these_regex="data-url=\"(.+?)\""
pattern=re.compile(these_regex)
htmlfile=urllib.urlopen(urls)
htmltext=htmlfile.read()
titles=re.findall(pattern,htmltext)
for s in titles:
com = "wget " + s
subprocess.call(com,shell=True)
regex1 = "next-button.+?\"(.+?)\""
pattern1 = re.compile(regex1)
link1=re.findall(pattern1,htmltext)
if(len(link1)==0):
print "Something went wrong for i = %d. trying again..."%i
time.sleep(2)
else:
urls = link1[3]
i+=1 | false | true |
f7319e483d3b8d072192a8a4e21ee30c26c57e8a | 2,638 | py | Python | homeassistant/components/sonos/entity.py | DavidDeSloovere/core | 909a20b36d4df6724c955c2ae28cb82fe6d50c2e | [
"Apache-2.0"
] | 1 | 2019-08-19T18:18:50.000Z | 2019-08-19T18:18:50.000Z | homeassistant/components/sonos/entity.py | jagadeeshvenkatesh/core | 1bd982668449815fee2105478569f8e4b5670add | [
"Apache-2.0"
] | 70 | 2020-08-05T07:20:00.000Z | 2022-03-31T06:01:46.000Z | homeassistant/components/sonos/entity.py | jagadeeshvenkatesh/core | 1bd982668449815fee2105478569f8e4b5670add | [
"Apache-2.0"
] | 1 | 2021-10-29T22:10:26.000Z | 2021-10-29T22:10:26.000Z | """Entity representing a Sonos player."""
from __future__ import annotations
import logging
from pysonos.core import SoCo
import homeassistant.helpers.device_registry as dr
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.entity import DeviceInfo, Entity
from .const import (
DOMAIN,
SONOS_ENTITY_CREATED,
SONOS_ENTITY_UPDATE,
SONOS_STATE_UPDATED,
)
from .speaker import SonosSpeaker
_LOGGER = logging.getLogger(__name__)
class SonosEntity(Entity):
"""Representation of a Sonos entity."""
def __init__(self, speaker: SonosSpeaker) -> None:
"""Initialize a SonosEntity."""
self.speaker = speaker
async def async_added_to_hass(self) -> None:
"""Handle common setup when added to hass."""
await self.speaker.async_seen()
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{SONOS_ENTITY_UPDATE}-{self.soco.uid}",
self.async_update, # pylint: disable=no-member
)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{SONOS_STATE_UPDATED}-{self.soco.uid}",
self.async_write_ha_state,
)
)
@property
def soco(self) -> SoCo:
"""Return the speaker SoCo instance."""
return self.speaker.soco
@property
def device_info(self) -> DeviceInfo:
"""Return information about the device."""
return {
"identifiers": {(DOMAIN, self.soco.uid)},
"name": self.speaker.zone_name,
"model": self.speaker.model_name.replace("Sonos ", ""),
"sw_version": self.speaker.version,
"connections": {(dr.CONNECTION_NETWORK_MAC, self.speaker.mac_address)},
"manufacturer": "Sonos",
"suggested_area": self.speaker.zone_name,
}
@property
def available(self) -> bool:
"""Return whether this device is available."""
return self.speaker.available
@property
def should_poll(self) -> bool:
"""Return that we should not be polled (we handle that internally)."""
return False
class SonosSensorEntity(SonosEntity):
"""Representation of a Sonos sensor entity."""
async def async_added_to_hass(self) -> None:
"""Handle common setup when added to hass."""
await super().async_added_to_hass()
async_dispatcher_send(
self.hass, f"{SONOS_ENTITY_CREATED}-{self.soco.uid}", self.platform.domain
)
| 29.311111 | 86 | 0.628886 | from __future__ import annotations
import logging
from pysonos.core import SoCo
import homeassistant.helpers.device_registry as dr
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.entity import DeviceInfo, Entity
from .const import (
DOMAIN,
SONOS_ENTITY_CREATED,
SONOS_ENTITY_UPDATE,
SONOS_STATE_UPDATED,
)
from .speaker import SonosSpeaker
_LOGGER = logging.getLogger(__name__)
class SonosEntity(Entity):
def __init__(self, speaker: SonosSpeaker) -> None:
self.speaker = speaker
async def async_added_to_hass(self) -> None:
await self.speaker.async_seen()
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{SONOS_ENTITY_UPDATE}-{self.soco.uid}",
self.async_update,
)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{SONOS_STATE_UPDATED}-{self.soco.uid}",
self.async_write_ha_state,
)
)
@property
def soco(self) -> SoCo:
return self.speaker.soco
@property
def device_info(self) -> DeviceInfo:
return {
"identifiers": {(DOMAIN, self.soco.uid)},
"name": self.speaker.zone_name,
"model": self.speaker.model_name.replace("Sonos ", ""),
"sw_version": self.speaker.version,
"connections": {(dr.CONNECTION_NETWORK_MAC, self.speaker.mac_address)},
"manufacturer": "Sonos",
"suggested_area": self.speaker.zone_name,
}
@property
def available(self) -> bool:
return self.speaker.available
@property
def should_poll(self) -> bool:
return False
class SonosSensorEntity(SonosEntity):
async def async_added_to_hass(self) -> None:
await super().async_added_to_hass()
async_dispatcher_send(
self.hass, f"{SONOS_ENTITY_CREATED}-{self.soco.uid}", self.platform.domain
)
| true | true |
f7319eef72e8d23633c8d8b5b9c928a276dda952 | 912 | py | Python | vimfiles/bundle/vim-python/submodules/pylint/tests/functional/n/not_in_loop.py | ciskoinch8/vimrc | 5bf77a7e7bc70fac5173ab2e9ea05d7dda3e52b8 | [
"MIT"
] | 463 | 2015-01-15T08:17:42.000Z | 2022-03-28T15:10:20.000Z | vimfiles/bundle/vim-python/submodules/pylint/tests/functional/n/not_in_loop.py | ciskoinch8/vimrc | 5bf77a7e7bc70fac5173ab2e9ea05d7dda3e52b8 | [
"MIT"
] | 52 | 2015-01-06T02:43:59.000Z | 2022-03-14T11:15:21.000Z | vimfiles/bundle/vim-python/submodules/pylint/tests/functional/n/not_in_loop.py | ciskoinch8/vimrc | 5bf77a7e7bc70fac5173ab2e9ea05d7dda3e52b8 | [
"MIT"
] | 249 | 2015-01-07T22:49:49.000Z | 2022-03-18T02:32:06.000Z | """Test that not-in-loop is detected properly."""
# pylint: disable=missing-docstring, invalid-name, too-few-public-methods
# pylint: disable=useless-else-on-loop, using-constant-test, useless-object-inheritance
# pylint: disable=no-else-continue
while True:
def ala():
continue # [not-in-loop]
while True:
pass
else:
continue # [not-in-loop]
def lala():
continue # [not-in-loop]
while True:
class A(object):
continue # [not-in-loop]
for _ in range(10):
pass
else:
continue # [not-in-loop]
for _ in range(42):
pass
else:
break # [not-in-loop]
if True:
continue # [not-in-loop]
else:
break # [not-in-loop]
for _ in range(10):
for _ in range(20):
pass
else:
continue
while True:
while True:
break
else:
break
break
else:
pass
for _ in range(1):
continue
for _ in range(42):
break
| 16.285714 | 87 | 0.612939 |
while True:
def ala():
continue
while True:
pass
else:
continue
def lala():
continue
while True:
class A(object):
continue
for _ in range(10):
pass
else:
continue
for _ in range(42):
pass
else:
break
if True:
continue
else:
break
for _ in range(10):
for _ in range(20):
pass
else:
continue
while True:
while True:
break
else:
break
break
else:
pass
for _ in range(1):
continue
for _ in range(42):
break
| true | true |
f7319f7dedc4d4bfa733e08b15bd1217c7611f0d | 860 | py | Python | wagtail_localize/migrations/0004_one_source_per_objectlocale.py | KalobTaulien/wagtail-localize | e513d18dea5f76f6941f1acf55f945150de767eb | [
"BSD-3-Clause"
] | 123 | 2019-11-21T12:55:04.000Z | 2022-03-23T08:08:47.000Z | wagtail_localize/migrations/0004_one_source_per_objectlocale.py | KalobTaulien/wagtail-localize | e513d18dea5f76f6941f1acf55f945150de767eb | [
"BSD-3-Clause"
] | 334 | 2019-11-20T10:40:08.000Z | 2022-03-27T17:33:01.000Z | wagtail_localize/migrations/0004_one_source_per_objectlocale.py | KalobTaulien/wagtail-localize | e513d18dea5f76f6941f1acf55f945150de767eb | [
"BSD-3-Clause"
] | 41 | 2020-01-16T17:24:52.000Z | 2022-03-28T13:09:59.000Z | # Generated by Django 3.0.8 on 2020-08-05 09:33
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('wagtail_localize', '0003_delete_translation_sources'),
]
operations = [
migrations.AddField(
model_name='translationsource',
name='last_updated_at',
field=models.DateTimeField(default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AlterField(
model_name='translationsource',
name='created_at',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterUniqueTogether(
name='translationsource',
unique_together={('object', 'locale')},
),
]
| 27.741935 | 74 | 0.626744 |
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('wagtail_localize', '0003_delete_translation_sources'),
]
operations = [
migrations.AddField(
model_name='translationsource',
name='last_updated_at',
field=models.DateTimeField(default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AlterField(
model_name='translationsource',
name='created_at',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterUniqueTogether(
name='translationsource',
unique_together={('object', 'locale')},
),
]
| true | true |
f731a03ce0e6675d8bd15187040163106262826b | 23,980 | py | Python | awx/main/models/notifications.py | sganesh999/awx | 783a6dc5111e1757982bf7f5e1bb4bb197d4f199 | [
"Apache-2.0"
] | null | null | null | awx/main/models/notifications.py | sganesh999/awx | 783a6dc5111e1757982bf7f5e1bb4bb197d4f199 | [
"Apache-2.0"
] | null | null | null | awx/main/models/notifications.py | sganesh999/awx | 783a6dc5111e1757982bf7f5e1bb4bb197d4f199 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
from copy import deepcopy
import datetime
import logging
import json
from django.db import models
from django.conf import settings
from django.core.mail.message import EmailMessage
from django.db import connection
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import smart_str, force_text
from jinja2 import sandbox
from jinja2.exceptions import TemplateSyntaxError, UndefinedError, SecurityError
# AWX
from awx.api.versioning import reverse
from awx.main.models.base import CommonModelNameNotUnique, CreatedModifiedModel, prevent_search
from awx.main.utils import encrypt_field, decrypt_field, set_environ
from awx.main.notifications.email_backend import CustomEmailBackend
from awx.main.notifications.slack_backend import SlackBackend
from awx.main.notifications.twilio_backend import TwilioBackend
from awx.main.notifications.pagerduty_backend import PagerDutyBackend
from awx.main.notifications.hipchat_backend import HipChatBackend
from awx.main.notifications.webhook_backend import WebhookBackend
from awx.main.notifications.mattermost_backend import MattermostBackend
from awx.main.notifications.grafana_backend import GrafanaBackend
from awx.main.notifications.rocketchat_backend import RocketChatBackend
from awx.main.notifications.irc_backend import IrcBackend
from awx.main.fields import JSONField
logger = logging.getLogger('awx.main.models.notifications')
__all__ = ['NotificationTemplate', 'Notification']
class NotificationTemplate(CommonModelNameNotUnique):
NOTIFICATION_TYPES = [('email', _('Email'), CustomEmailBackend),
('slack', _('Slack'), SlackBackend),
('twilio', _('Twilio'), TwilioBackend),
('pagerduty', _('Pagerduty'), PagerDutyBackend),
('grafana', _('Grafana'), GrafanaBackend),
('hipchat', _('HipChat'), HipChatBackend),
('webhook', _('Webhook'), WebhookBackend),
('mattermost', _('Mattermost'), MattermostBackend),
('rocketchat', _('Rocket.Chat'), RocketChatBackend),
('irc', _('IRC'), IrcBackend)]
NOTIFICATION_TYPE_CHOICES = sorted([(x[0], x[1]) for x in NOTIFICATION_TYPES])
CLASS_FOR_NOTIFICATION_TYPE = dict([(x[0], x[2]) for x in NOTIFICATION_TYPES])
class Meta:
app_label = 'main'
unique_together = ('organization', 'name')
ordering = ("name",)
organization = models.ForeignKey(
'Organization',
blank=False,
null=True,
on_delete=models.CASCADE,
related_name='notification_templates',
)
notification_type = models.CharField(
max_length = 32,
choices=NOTIFICATION_TYPE_CHOICES,
)
notification_configuration = prevent_search(JSONField(blank=False))
def default_messages():
return {'started': None, 'success': None, 'error': None, 'workflow_approval': None}
messages = JSONField(
null=True,
blank=True,
default=default_messages,
help_text=_('Optional custom messages for notification template.'))
def has_message(self, condition):
potential_template = self.messages.get(condition, {})
if potential_template == {}:
return False
if potential_template.get('message', {}) == {}:
return False
return True
def get_message(self, condition):
return self.messages.get(condition, {})
def get_absolute_url(self, request=None):
return reverse('api:notification_template_detail', kwargs={'pk': self.pk}, request=request)
@property
def notification_class(self):
return self.CLASS_FOR_NOTIFICATION_TYPE[self.notification_type]
def save(self, *args, **kwargs):
new_instance = not bool(self.pk)
update_fields = kwargs.get('update_fields', [])
# preserve existing notification messages if not overwritten by new messages
if not new_instance:
old_nt = NotificationTemplate.objects.get(pk=self.id)
old_messages = old_nt.messages
new_messages = self.messages
def merge_messages(local_old_messages, local_new_messages, local_event):
if local_new_messages.get(local_event, {}) and local_old_messages.get(local_event, {}):
local_old_event_msgs = local_old_messages[local_event]
local_new_event_msgs = local_new_messages[local_event]
for msg_type in ['message', 'body']:
if msg_type not in local_new_event_msgs and local_old_event_msgs.get(msg_type, None):
local_new_event_msgs[msg_type] = local_old_event_msgs[msg_type]
if old_messages is not None and new_messages is not None:
for event in ('started', 'success', 'error', 'workflow_approval'):
if not new_messages.get(event, {}) and old_messages.get(event, {}):
new_messages[event] = old_messages[event]
continue
if event == 'workflow_approval' and old_messages.get('workflow_approval', None):
new_messages.setdefault('workflow_approval', {})
for subevent in ('running', 'approved', 'timed_out', 'denied'):
old_wfa_messages = old_messages['workflow_approval']
new_wfa_messages = new_messages['workflow_approval']
if not new_wfa_messages.get(subevent, {}) and old_wfa_messages.get(subevent, {}):
new_wfa_messages[subevent] = old_wfa_messages[subevent]
continue
if old_wfa_messages:
merge_messages(old_wfa_messages, new_wfa_messages, subevent)
else:
merge_messages(old_messages, new_messages, event)
new_messages.setdefault(event, None)
for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password",
self.notification_class.init_parameters):
if self.notification_configuration[field].startswith("$encrypted$"):
continue
if new_instance:
value = self.notification_configuration[field]
setattr(self, '_saved_{}_{}'.format("config", field), value)
self.notification_configuration[field] = ''
else:
encrypted = encrypt_field(self, 'notification_configuration', subfield=field)
self.notification_configuration[field] = encrypted
if 'notification_configuration' not in update_fields:
update_fields.append('notification_configuration')
super(NotificationTemplate, self).save(*args, **kwargs)
if new_instance:
update_fields = []
for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password",
self.notification_class.init_parameters):
saved_value = getattr(self, '_saved_{}_{}'.format("config", field), '')
self.notification_configuration[field] = saved_value
if 'notification_configuration' not in update_fields:
update_fields.append('notification_configuration')
self.save(update_fields=update_fields)
@property
def recipients(self):
return self.notification_configuration[self.notification_class.recipient_parameter]
def generate_notification(self, msg, body):
notification = Notification(notification_template=self,
notification_type=self.notification_type,
recipients=smart_str(self.recipients),
subject=msg,
body=body)
notification.save()
return notification
def send(self, subject, body):
for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password",
self.notification_class.init_parameters):
if field in self.notification_configuration:
self.notification_configuration[field] = decrypt_field(self,
'notification_configuration',
subfield=field)
recipients = self.notification_configuration.pop(self.notification_class.recipient_parameter)
if not isinstance(recipients, list):
recipients = [recipients]
sender = self.notification_configuration.pop(self.notification_class.sender_parameter, None)
notification_configuration = deepcopy(self.notification_configuration)
for field, params in self.notification_class.init_parameters.items():
if field not in notification_configuration:
if 'default' in params:
notification_configuration[field] = params['default']
backend_obj = self.notification_class(**notification_configuration)
notification_obj = EmailMessage(subject, backend_obj.format_body(body), sender, recipients)
with set_environ(**settings.AWX_TASK_ENV):
return backend_obj.send_messages([notification_obj])
def display_notification_configuration(self):
field_val = self.notification_configuration.copy()
for field in self.notification_class.init_parameters:
if field in field_val and force_text(field_val[field]).startswith('$encrypted$'):
field_val[field] = '$encrypted$'
return field_val
class Notification(CreatedModifiedModel):
'''
A notification event emitted when a NotificationTemplate is run
'''
NOTIFICATION_STATE_CHOICES = [
('pending', _('Pending')),
('successful', _('Successful')),
('failed', _('Failed')),
]
class Meta:
app_label = 'main'
ordering = ('pk',)
notification_template = models.ForeignKey(
'NotificationTemplate',
related_name='notifications',
on_delete=models.CASCADE,
editable=False
)
status = models.CharField(
max_length=20,
choices=NOTIFICATION_STATE_CHOICES,
default='pending',
editable=False,
)
error = models.TextField(
blank=True,
default='',
editable=False,
)
notifications_sent = models.IntegerField(
default=0,
editable=False,
)
notification_type = models.CharField(
max_length = 32,
choices=NotificationTemplate.NOTIFICATION_TYPE_CHOICES,
)
recipients = models.TextField(
blank=True,
default='',
editable=False,
)
subject = models.TextField(
blank=True,
default='',
editable=False,
)
body = JSONField(blank=True)
def get_absolute_url(self, request=None):
return reverse('api:notification_detail', kwargs={'pk': self.pk}, request=request)
class JobNotificationMixin(object):
STATUS_TO_TEMPLATE_TYPE = {'succeeded': 'success',
'running': 'started',
'failed': 'error'}
# Tree of fields that can be safely referenced in a notification message
JOB_FIELDS_WHITELIST = ['id', 'type', 'url', 'created', 'modified', 'name', 'description', 'job_type', 'playbook',
'forks', 'limit', 'verbosity', 'job_tags', 'force_handlers', 'skip_tags', 'start_at_task',
'timeout', 'use_fact_cache', 'launch_type', 'status', 'failed', 'started', 'finished',
'elapsed', 'job_explanation', 'execution_node', 'controller_node', 'allow_simultaneous',
'scm_revision', 'diff_mode', 'job_slice_number', 'job_slice_count', 'custom_virtualenv',
'approval_status', 'approval_node_name', 'workflow_url',
{'host_status_counts': ['skipped', 'ok', 'changed', 'failed', 'failures', 'dark'
'processed', 'rescued', 'ignored']},
{'summary_fields': [{'inventory': ['id', 'name', 'description', 'has_active_failures',
'total_hosts', 'hosts_with_active_failures', 'total_groups',
'has_inventory_sources',
'total_inventory_sources', 'inventory_sources_with_failures',
'organization_id', 'kind']},
{'project': ['id', 'name', 'description', 'status', 'scm_type']},
{'job_template': ['id', 'name', 'description']},
{'unified_job_template': ['id', 'name', 'description', 'unified_job_type']},
{'instance_group': ['name', 'id']},
{'created_by': ['id', 'username', 'first_name', 'last_name']},
{'labels': ['count', 'results']}]}]
@classmethod
def context_stub(cls):
"""Returns a stub context that can be used for validating notification messages.
Context has the same structure as the context that will actually be used to render
a notification message."""
context = {'job': {'allow_simultaneous': False,
'controller_node': 'foo_controller',
'created': datetime.datetime(2018, 11, 13, 6, 4, 0, 0, tzinfo=datetime.timezone.utc),
'custom_virtualenv': 'my_venv',
'description': 'Sample job description',
'diff_mode': False,
'elapsed': 0.403018,
'execution_node': 'awx',
'failed': False,
'finished': False,
'force_handlers': False,
'forks': 0,
'host_status_counts': {'skipped': 1, 'ok': 5, 'changed': 3, 'failures': 0, 'dark': 0, 'failed': False, 'processed': 0, 'rescued': 0},
'id': 42,
'job_explanation': 'Sample job explanation',
'job_slice_count': 1,
'job_slice_number': 0,
'job_tags': '',
'job_type': 'run',
'launch_type': 'workflow',
'limit': 'bar_limit',
'modified': datetime.datetime(2018, 12, 13, 6, 4, 0, 0, tzinfo=datetime.timezone.utc),
'name': 'Stub JobTemplate',
'playbook': 'ping.yml',
'scm_revision': '',
'skip_tags': '',
'start_at_task': '',
'started': '2019-07-29T17:38:14.137461Z',
'status': 'running',
'summary_fields': {'created_by': {'first_name': '',
'id': 1,
'last_name': '',
'username': 'admin'},
'instance_group': {'id': 1, 'name': 'tower'},
'inventory': {'description': 'Sample inventory description',
'has_active_failures': False,
'has_inventory_sources': False,
'hosts_with_active_failures': 0,
'id': 17,
'inventory_sources_with_failures': 0,
'kind': '',
'name': 'Stub Inventory',
'organization_id': 121,
'total_groups': 0,
'total_hosts': 1,
'total_inventory_sources': 0},
'job_template': {'description': 'Sample job template description',
'id': 39,
'name': 'Stub JobTemplate'},
'labels': {'count': 0, 'results': []},
'project': {'description': 'Sample project description',
'id': 38,
'name': 'Stub project',
'scm_type': 'git',
'status': 'successful'},
'unified_job_template': {'description': 'Sample unified job template description',
'id': 39,
'name': 'Stub Job Template',
'unified_job_type': 'job'}},
'timeout': 0,
'type': 'job',
'url': '/api/v2/jobs/13/',
'use_fact_cache': False,
'verbosity': 0},
'job_friendly_name': 'Job',
'url': 'https://towerhost/#/jobs/playbook/1010',
'approval_status': 'approved',
'approval_node_name': 'Approve Me',
'workflow_url': 'https://towerhost/#/workflows/1010',
'job_metadata': """{'url': 'https://towerhost/$/jobs/playbook/13',
'traceback': '',
'status': 'running',
'started': '2019-08-07T21:46:38.362630+00:00',
'project': 'Stub project',
'playbook': 'ping.yml',
'name': 'Stub Job Template',
'limit': '',
'inventory': 'Stub Inventory',
'id': 42,
'hosts': {},
'friendly_name': 'Job',
'finished': False,
'credential': 'Stub credential',
'created_by': 'admin'}"""}
return context
def context(self, serialized_job):
"""Returns a dictionary that can be used for rendering notification messages.
The context will contain whitelisted content retrieved from a serialized job object
(see JobNotificationMixin.JOB_FIELDS_WHITELIST), the job's friendly name,
and a url to the job run."""
job_context = {'host_status_counts': {}}
summary = None
if hasattr(self, 'job_host_summaries'):
summary = self.job_host_summaries.first()
if summary:
from awx.api.serializers import JobHostSummarySerializer
summary_data = JobHostSummarySerializer(summary).to_representation(summary)
job_context['host_status_counts'] = summary_data
context = {
'job': job_context,
'job_friendly_name': self.get_notification_friendly_name(),
'url': self.get_ui_url(),
'job_metadata': json.dumps(self.notification_data(), indent=4)
}
def build_context(node, fields, whitelisted_fields):
for safe_field in whitelisted_fields:
if type(safe_field) is dict:
field, whitelist_subnode = safe_field.copy().popitem()
# ensure content present in job serialization
if field not in fields:
continue
subnode = fields[field]
node[field] = {}
build_context(node[field], subnode, whitelist_subnode)
else:
# ensure content present in job serialization
if safe_field not in fields:
continue
node[safe_field] = fields[safe_field]
build_context(context['job'], serialized_job, self.JOB_FIELDS_WHITELIST)
return context
def get_notification_templates(self):
raise RuntimeError("Define me")
def get_notification_friendly_name(self):
raise RuntimeError("Define me")
def notification_data(self):
raise RuntimeError("Define me")
def build_notification_message(self, nt, status):
env = sandbox.ImmutableSandboxedEnvironment()
from awx.api.serializers import UnifiedJobSerializer
job_serialization = UnifiedJobSerializer(self).to_representation(self)
context = self.context(job_serialization)
msg_template = body_template = None
msg = body = ''
# Use custom template if available
if nt.messages:
template = nt.messages.get(self.STATUS_TO_TEMPLATE_TYPE[status], {}) or {}
msg_template = template.get('message', None)
body_template = template.get('body', None)
# If custom template not provided, look up default template
default_template = nt.notification_class.default_messages[self.STATUS_TO_TEMPLATE_TYPE[status]]
if not msg_template:
msg_template = default_template.get('message', None)
if not body_template:
body_template = default_template.get('body', None)
if msg_template:
try:
msg = env.from_string(msg_template).render(**context)
except (TemplateSyntaxError, UndefinedError, SecurityError):
msg = ''
if body_template:
try:
body = env.from_string(body_template).render(**context)
except (TemplateSyntaxError, UndefinedError, SecurityError):
body = ''
return (msg, body)
def send_notification_templates(self, status):
from awx.main.tasks import send_notifications # avoid circular import
if status not in ['running', 'succeeded', 'failed']:
raise ValueError(_("status must be either running, succeeded or failed"))
try:
notification_templates = self.get_notification_templates()
except Exception:
logger.warn("No notification template defined for emitting notification")
return
if not notification_templates:
return
for nt in set(notification_templates.get(self.STATUS_TO_TEMPLATE_TYPE[status], [])):
(msg, body) = self.build_notification_message(nt, status)
# Use kwargs to force late-binding
# https://stackoverflow.com/a/3431699/10669572
def send_it(local_nt=nt, local_msg=msg, local_body=body):
def _func():
send_notifications.delay([local_nt.generate_notification(local_msg, local_body).id],
job_id=self.id)
return _func
connection.on_commit(send_it())
| 49.240246 | 160 | 0.544287 |
from copy import deepcopy
import datetime
import logging
import json
from django.db import models
from django.conf import settings
from django.core.mail.message import EmailMessage
from django.db import connection
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import smart_str, force_text
from jinja2 import sandbox
from jinja2.exceptions import TemplateSyntaxError, UndefinedError, SecurityError
from awx.api.versioning import reverse
from awx.main.models.base import CommonModelNameNotUnique, CreatedModifiedModel, prevent_search
from awx.main.utils import encrypt_field, decrypt_field, set_environ
from awx.main.notifications.email_backend import CustomEmailBackend
from awx.main.notifications.slack_backend import SlackBackend
from awx.main.notifications.twilio_backend import TwilioBackend
from awx.main.notifications.pagerduty_backend import PagerDutyBackend
from awx.main.notifications.hipchat_backend import HipChatBackend
from awx.main.notifications.webhook_backend import WebhookBackend
from awx.main.notifications.mattermost_backend import MattermostBackend
from awx.main.notifications.grafana_backend import GrafanaBackend
from awx.main.notifications.rocketchat_backend import RocketChatBackend
from awx.main.notifications.irc_backend import IrcBackend
from awx.main.fields import JSONField
logger = logging.getLogger('awx.main.models.notifications')
__all__ = ['NotificationTemplate', 'Notification']
class NotificationTemplate(CommonModelNameNotUnique):
NOTIFICATION_TYPES = [('email', _('Email'), CustomEmailBackend),
('slack', _('Slack'), SlackBackend),
('twilio', _('Twilio'), TwilioBackend),
('pagerduty', _('Pagerduty'), PagerDutyBackend),
('grafana', _('Grafana'), GrafanaBackend),
('hipchat', _('HipChat'), HipChatBackend),
('webhook', _('Webhook'), WebhookBackend),
('mattermost', _('Mattermost'), MattermostBackend),
('rocketchat', _('Rocket.Chat'), RocketChatBackend),
('irc', _('IRC'), IrcBackend)]
NOTIFICATION_TYPE_CHOICES = sorted([(x[0], x[1]) for x in NOTIFICATION_TYPES])
CLASS_FOR_NOTIFICATION_TYPE = dict([(x[0], x[2]) for x in NOTIFICATION_TYPES])
class Meta:
app_label = 'main'
unique_together = ('organization', 'name')
ordering = ("name",)
organization = models.ForeignKey(
'Organization',
blank=False,
null=True,
on_delete=models.CASCADE,
related_name='notification_templates',
)
notification_type = models.CharField(
max_length = 32,
choices=NOTIFICATION_TYPE_CHOICES,
)
notification_configuration = prevent_search(JSONField(blank=False))
def default_messages():
return {'started': None, 'success': None, 'error': None, 'workflow_approval': None}
messages = JSONField(
null=True,
blank=True,
default=default_messages,
help_text=_('Optional custom messages for notification template.'))
def has_message(self, condition):
potential_template = self.messages.get(condition, {})
if potential_template == {}:
return False
if potential_template.get('message', {}) == {}:
return False
return True
def get_message(self, condition):
return self.messages.get(condition, {})
def get_absolute_url(self, request=None):
return reverse('api:notification_template_detail', kwargs={'pk': self.pk}, request=request)
@property
def notification_class(self):
return self.CLASS_FOR_NOTIFICATION_TYPE[self.notification_type]
def save(self, *args, **kwargs):
new_instance = not bool(self.pk)
update_fields = kwargs.get('update_fields', [])
if not new_instance:
old_nt = NotificationTemplate.objects.get(pk=self.id)
old_messages = old_nt.messages
new_messages = self.messages
def merge_messages(local_old_messages, local_new_messages, local_event):
if local_new_messages.get(local_event, {}) and local_old_messages.get(local_event, {}):
local_old_event_msgs = local_old_messages[local_event]
local_new_event_msgs = local_new_messages[local_event]
for msg_type in ['message', 'body']:
if msg_type not in local_new_event_msgs and local_old_event_msgs.get(msg_type, None):
local_new_event_msgs[msg_type] = local_old_event_msgs[msg_type]
if old_messages is not None and new_messages is not None:
for event in ('started', 'success', 'error', 'workflow_approval'):
if not new_messages.get(event, {}) and old_messages.get(event, {}):
new_messages[event] = old_messages[event]
continue
if event == 'workflow_approval' and old_messages.get('workflow_approval', None):
new_messages.setdefault('workflow_approval', {})
for subevent in ('running', 'approved', 'timed_out', 'denied'):
old_wfa_messages = old_messages['workflow_approval']
new_wfa_messages = new_messages['workflow_approval']
if not new_wfa_messages.get(subevent, {}) and old_wfa_messages.get(subevent, {}):
new_wfa_messages[subevent] = old_wfa_messages[subevent]
continue
if old_wfa_messages:
merge_messages(old_wfa_messages, new_wfa_messages, subevent)
else:
merge_messages(old_messages, new_messages, event)
new_messages.setdefault(event, None)
for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password",
self.notification_class.init_parameters):
if self.notification_configuration[field].startswith("$encrypted$"):
continue
if new_instance:
value = self.notification_configuration[field]
setattr(self, '_saved_{}_{}'.format("config", field), value)
self.notification_configuration[field] = ''
else:
encrypted = encrypt_field(self, 'notification_configuration', subfield=field)
self.notification_configuration[field] = encrypted
if 'notification_configuration' not in update_fields:
update_fields.append('notification_configuration')
super(NotificationTemplate, self).save(*args, **kwargs)
if new_instance:
update_fields = []
for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password",
self.notification_class.init_parameters):
saved_value = getattr(self, '_saved_{}_{}'.format("config", field), '')
self.notification_configuration[field] = saved_value
if 'notification_configuration' not in update_fields:
update_fields.append('notification_configuration')
self.save(update_fields=update_fields)
@property
def recipients(self):
return self.notification_configuration[self.notification_class.recipient_parameter]
def generate_notification(self, msg, body):
notification = Notification(notification_template=self,
notification_type=self.notification_type,
recipients=smart_str(self.recipients),
subject=msg,
body=body)
notification.save()
return notification
def send(self, subject, body):
for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password",
self.notification_class.init_parameters):
if field in self.notification_configuration:
self.notification_configuration[field] = decrypt_field(self,
'notification_configuration',
subfield=field)
recipients = self.notification_configuration.pop(self.notification_class.recipient_parameter)
if not isinstance(recipients, list):
recipients = [recipients]
sender = self.notification_configuration.pop(self.notification_class.sender_parameter, None)
notification_configuration = deepcopy(self.notification_configuration)
for field, params in self.notification_class.init_parameters.items():
if field not in notification_configuration:
if 'default' in params:
notification_configuration[field] = params['default']
backend_obj = self.notification_class(**notification_configuration)
notification_obj = EmailMessage(subject, backend_obj.format_body(body), sender, recipients)
with set_environ(**settings.AWX_TASK_ENV):
return backend_obj.send_messages([notification_obj])
def display_notification_configuration(self):
field_val = self.notification_configuration.copy()
for field in self.notification_class.init_parameters:
if field in field_val and force_text(field_val[field]).startswith('$encrypted$'):
field_val[field] = '$encrypted$'
return field_val
class Notification(CreatedModifiedModel):
NOTIFICATION_STATE_CHOICES = [
('pending', _('Pending')),
('successful', _('Successful')),
('failed', _('Failed')),
]
class Meta:
app_label = 'main'
ordering = ('pk',)
notification_template = models.ForeignKey(
'NotificationTemplate',
related_name='notifications',
on_delete=models.CASCADE,
editable=False
)
status = models.CharField(
max_length=20,
choices=NOTIFICATION_STATE_CHOICES,
default='pending',
editable=False,
)
error = models.TextField(
blank=True,
default='',
editable=False,
)
notifications_sent = models.IntegerField(
default=0,
editable=False,
)
notification_type = models.CharField(
max_length = 32,
choices=NotificationTemplate.NOTIFICATION_TYPE_CHOICES,
)
recipients = models.TextField(
blank=True,
default='',
editable=False,
)
subject = models.TextField(
blank=True,
default='',
editable=False,
)
body = JSONField(blank=True)
def get_absolute_url(self, request=None):
return reverse('api:notification_detail', kwargs={'pk': self.pk}, request=request)
class JobNotificationMixin(object):
STATUS_TO_TEMPLATE_TYPE = {'succeeded': 'success',
'running': 'started',
'failed': 'error'}
JOB_FIELDS_WHITELIST = ['id', 'type', 'url', 'created', 'modified', 'name', 'description', 'job_type', 'playbook',
'forks', 'limit', 'verbosity', 'job_tags', 'force_handlers', 'skip_tags', 'start_at_task',
'timeout', 'use_fact_cache', 'launch_type', 'status', 'failed', 'started', 'finished',
'elapsed', 'job_explanation', 'execution_node', 'controller_node', 'allow_simultaneous',
'scm_revision', 'diff_mode', 'job_slice_number', 'job_slice_count', 'custom_virtualenv',
'approval_status', 'approval_node_name', 'workflow_url',
{'host_status_counts': ['skipped', 'ok', 'changed', 'failed', 'failures', 'dark'
'processed', 'rescued', 'ignored']},
{'summary_fields': [{'inventory': ['id', 'name', 'description', 'has_active_failures',
'total_hosts', 'hosts_with_active_failures', 'total_groups',
'has_inventory_sources',
'total_inventory_sources', 'inventory_sources_with_failures',
'organization_id', 'kind']},
{'project': ['id', 'name', 'description', 'status', 'scm_type']},
{'job_template': ['id', 'name', 'description']},
{'unified_job_template': ['id', 'name', 'description', 'unified_job_type']},
{'instance_group': ['name', 'id']},
{'created_by': ['id', 'username', 'first_name', 'last_name']},
{'labels': ['count', 'results']}]}]
@classmethod
def context_stub(cls):
context = {'job': {'allow_simultaneous': False,
'controller_node': 'foo_controller',
'created': datetime.datetime(2018, 11, 13, 6, 4, 0, 0, tzinfo=datetime.timezone.utc),
'custom_virtualenv': 'my_venv',
'description': 'Sample job description',
'diff_mode': False,
'elapsed': 0.403018,
'execution_node': 'awx',
'failed': False,
'finished': False,
'force_handlers': False,
'forks': 0,
'host_status_counts': {'skipped': 1, 'ok': 5, 'changed': 3, 'failures': 0, 'dark': 0, 'failed': False, 'processed': 0, 'rescued': 0},
'id': 42,
'job_explanation': 'Sample job explanation',
'job_slice_count': 1,
'job_slice_number': 0,
'job_tags': '',
'job_type': 'run',
'launch_type': 'workflow',
'limit': 'bar_limit',
'modified': datetime.datetime(2018, 12, 13, 6, 4, 0, 0, tzinfo=datetime.timezone.utc),
'name': 'Stub JobTemplate',
'playbook': 'ping.yml',
'scm_revision': '',
'skip_tags': '',
'start_at_task': '',
'started': '2019-07-29T17:38:14.137461Z',
'status': 'running',
'summary_fields': {'created_by': {'first_name': '',
'id': 1,
'last_name': '',
'username': 'admin'},
'instance_group': {'id': 1, 'name': 'tower'},
'inventory': {'description': 'Sample inventory description',
'has_active_failures': False,
'has_inventory_sources': False,
'hosts_with_active_failures': 0,
'id': 17,
'inventory_sources_with_failures': 0,
'kind': '',
'name': 'Stub Inventory',
'organization_id': 121,
'total_groups': 0,
'total_hosts': 1,
'total_inventory_sources': 0},
'job_template': {'description': 'Sample job template description',
'id': 39,
'name': 'Stub JobTemplate'},
'labels': {'count': 0, 'results': []},
'project': {'description': 'Sample project description',
'id': 38,
'name': 'Stub project',
'scm_type': 'git',
'status': 'successful'},
'unified_job_template': {'description': 'Sample unified job template description',
'id': 39,
'name': 'Stub Job Template',
'unified_job_type': 'job'}},
'timeout': 0,
'type': 'job',
'url': '/api/v2/jobs/13/',
'use_fact_cache': False,
'verbosity': 0},
'job_friendly_name': 'Job',
'url': 'https://towerhost/#/jobs/playbook/1010',
'approval_status': 'approved',
'approval_node_name': 'Approve Me',
'workflow_url': 'https://towerhost/#/workflows/1010',
'job_metadata': """{'url': 'https://towerhost/$/jobs/playbook/13',
'traceback': '',
'status': 'running',
'started': '2019-08-07T21:46:38.362630+00:00',
'project': 'Stub project',
'playbook': 'ping.yml',
'name': 'Stub Job Template',
'limit': '',
'inventory': 'Stub Inventory',
'id': 42,
'hosts': {},
'friendly_name': 'Job',
'finished': False,
'credential': 'Stub credential',
'created_by': 'admin'}"""}
return context
def context(self, serialized_job):
job_context = {'host_status_counts': {}}
summary = None
if hasattr(self, 'job_host_summaries'):
summary = self.job_host_summaries.first()
if summary:
from awx.api.serializers import JobHostSummarySerializer
summary_data = JobHostSummarySerializer(summary).to_representation(summary)
job_context['host_status_counts'] = summary_data
context = {
'job': job_context,
'job_friendly_name': self.get_notification_friendly_name(),
'url': self.get_ui_url(),
'job_metadata': json.dumps(self.notification_data(), indent=4)
}
def build_context(node, fields, whitelisted_fields):
for safe_field in whitelisted_fields:
if type(safe_field) is dict:
field, whitelist_subnode = safe_field.copy().popitem()
if field not in fields:
continue
subnode = fields[field]
node[field] = {}
build_context(node[field], subnode, whitelist_subnode)
else:
if safe_field not in fields:
continue
node[safe_field] = fields[safe_field]
build_context(context['job'], serialized_job, self.JOB_FIELDS_WHITELIST)
return context
def get_notification_templates(self):
raise RuntimeError("Define me")
def get_notification_friendly_name(self):
raise RuntimeError("Define me")
def notification_data(self):
raise RuntimeError("Define me")
def build_notification_message(self, nt, status):
env = sandbox.ImmutableSandboxedEnvironment()
from awx.api.serializers import UnifiedJobSerializer
job_serialization = UnifiedJobSerializer(self).to_representation(self)
context = self.context(job_serialization)
msg_template = body_template = None
msg = body = ''
if nt.messages:
template = nt.messages.get(self.STATUS_TO_TEMPLATE_TYPE[status], {}) or {}
msg_template = template.get('message', None)
body_template = template.get('body', None)
default_template = nt.notification_class.default_messages[self.STATUS_TO_TEMPLATE_TYPE[status]]
if not msg_template:
msg_template = default_template.get('message', None)
if not body_template:
body_template = default_template.get('body', None)
if msg_template:
try:
msg = env.from_string(msg_template).render(**context)
except (TemplateSyntaxError, UndefinedError, SecurityError):
msg = ''
if body_template:
try:
body = env.from_string(body_template).render(**context)
except (TemplateSyntaxError, UndefinedError, SecurityError):
body = ''
return (msg, body)
def send_notification_templates(self, status):
from awx.main.tasks import send_notifications
if status not in ['running', 'succeeded', 'failed']:
raise ValueError(_("status must be either running, succeeded or failed"))
try:
notification_templates = self.get_notification_templates()
except Exception:
logger.warn("No notification template defined for emitting notification")
return
if not notification_templates:
return
for nt in set(notification_templates.get(self.STATUS_TO_TEMPLATE_TYPE[status], [])):
(msg, body) = self.build_notification_message(nt, status)
def send_it(local_nt=nt, local_msg=msg, local_body=body):
def _func():
send_notifications.delay([local_nt.generate_notification(local_msg, local_body).id],
job_id=self.id)
return _func
connection.on_commit(send_it())
| true | true |
f731a055821f656efbcb5c83ad2f7ff8e59dd572 | 818 | py | Python | manage.py | neosergio/WisdomBox | f41bd828f5e264c7ad05262b29c8f02cf904b54a | [
"MIT"
] | null | null | null | manage.py | neosergio/WisdomBox | f41bd828f5e264c7ad05262b29c8f02cf904b54a | [
"MIT"
] | 2 | 2017-02-09T14:52:43.000Z | 2017-02-10T19:31:38.000Z | manage.py | neosergio/WisdomBox | f41bd828f5e264c7ad05262b29c8f02cf904b54a | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wisdom.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django # noqa: F401
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| 35.565217 | 77 | 0.640587 |
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wisdom.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| true | true |
f731a0e7d3248ef87117117d791e52aa967f0a12 | 2,465 | py | Python | forecaster.py | Adri1bo/HEMS_API | dca26e55696f9f2e36f29968a8c3a90871d6bc16 | [
"MIT"
] | 1 | 2021-02-17T08:47:47.000Z | 2021-02-17T08:47:47.000Z | forecaster.py | Adri1bo/HEMS_API | dca26e55696f9f2e36f29968a8c3a90871d6bc16 | [
"MIT"
] | null | null | null | forecaster.py | Adri1bo/HEMS_API | dca26e55696f9f2e36f29968a8c3a90871d6bc16 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Sun Jun 7 17:58:57 2020
@author: adria.bove
"""
from BBDD import BBDD
import analytics
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
import datetime as dt
class consumption:
def __init__(self, nom):
self.nom_BBDD=nom
def forecaster(self,timestamp):
self.timestamp=str(timestamp)
# get the df
this_BBDD=BBDD(self.nom_BBDD)
a=pd.Series(dtype='float64')
k=0
list_weekdays=[(dt.datetime.now()+dt.timedelta(days=i+k)).weekday() for i in range(5)]
for weekday in list_weekdays:
df=this_BBDD.extract_weekday(weekday)
#send it to the mean_day function que farà el dia tipus
a=a.append(self.mean_day(df,k))
k=k+1
del(a[0])
a['dt']=a.index
a = a.reset_index(drop=True)
self.store_data(a,'loads_forecast')
return a
def mean_day(self,df,k): #amb el weekday he de fer millor de date_range i fer-lo sobre el valor del timer
df['timer'] = df.dt.apply(self.massive_rounder,groupsize=1,groups=int(60))
df = df.rename(columns={'P_load [kW]': 'P_load'})
df.P_load=pd.to_numeric(df.P_load)
mean_DAY=df.groupby('timer').P_load.mean()
mean_DAY=mean_DAY.to_frame()
start_date=dt.datetime.combine(dt.date.today(), dt.datetime.min.time())#+dt.timedelta(days=1)
mean_DAY['dta']=mean_DAY.index
mean_DAY.dta=mean_DAY.dta.apply(lambda x: dt.timedelta(minutes=x) + start_date + dt.timedelta(days=k))
mean_DAY.index=mean_DAY.dta
del(mean_DAY['dta'])
new_mean_DAY=mean_DAY.resample(self.timestamp+'T').pad()
return new_mean_DAY
def massive_rounder(self, element, groupsize, groups):
for i in range(groups):
if element.time().minute < (groupsize*(range(groups)[i]+1)):
return range(groups)[i] + element.time().hour*groups
def store_data(self,data,name):
this_BBDD=BBDD(name)
this_BBDD.store_data(data)
if __name__ == '__main__':
consumption_forecast=consumption('grid_BBDD')
b=consumption_forecast.forecaster(timestamp=15)
plt.plot(-b.P_load) | 30.8125 | 111 | 0.577688 |
from BBDD import BBDD
import analytics
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
import datetime as dt
class consumption:
def __init__(self, nom):
self.nom_BBDD=nom
def forecaster(self,timestamp):
self.timestamp=str(timestamp)
this_BBDD=BBDD(self.nom_BBDD)
a=pd.Series(dtype='float64')
k=0
list_weekdays=[(dt.datetime.now()+dt.timedelta(days=i+k)).weekday() for i in range(5)]
for weekday in list_weekdays:
df=this_BBDD.extract_weekday(weekday)
a=a.append(self.mean_day(df,k))
k=k+1
del(a[0])
a['dt']=a.index
a = a.reset_index(drop=True)
self.store_data(a,'loads_forecast')
return a
def mean_day(self,df,k):
df['timer'] = df.dt.apply(self.massive_rounder,groupsize=1,groups=int(60))
df = df.rename(columns={'P_load [kW]': 'P_load'})
df.P_load=pd.to_numeric(df.P_load)
mean_DAY=df.groupby('timer').P_load.mean()
mean_DAY=mean_DAY.to_frame()
start_date=dt.datetime.combine(dt.date.today(), dt.datetime.min.time())
mean_DAY['dta']=mean_DAY.index
mean_DAY.dta=mean_DAY.dta.apply(lambda x: dt.timedelta(minutes=x) + start_date + dt.timedelta(days=k))
mean_DAY.index=mean_DAY.dta
del(mean_DAY['dta'])
new_mean_DAY=mean_DAY.resample(self.timestamp+'T').pad()
return new_mean_DAY
def massive_rounder(self, element, groupsize, groups):
for i in range(groups):
if element.time().minute < (groupsize*(range(groups)[i]+1)):
return range(groups)[i] + element.time().hour*groups
def store_data(self,data,name):
this_BBDD=BBDD(name)
this_BBDD.store_data(data)
if __name__ == '__main__':
consumption_forecast=consumption('grid_BBDD')
b=consumption_forecast.forecaster(timestamp=15)
plt.plot(-b.P_load) | true | true |
f731a16e0fc215d4c3817488810ef1839361e054 | 49,894 | py | Python | cogs/TwitchAlert.py | SnowyJaguar1034/KoalaBot | 840a5e30476492c60157687804a2445903279207 | [
"MIT"
] | null | null | null | cogs/TwitchAlert.py | SnowyJaguar1034/KoalaBot | 840a5e30476492c60157687804a2445903279207 | [
"MIT"
] | 13 | 2021-07-02T04:11:14.000Z | 2022-03-25T04:08:55.000Z | cogs/TwitchAlert.py | SnowyJaguar1034/KoalaBot | 840a5e30476492c60157687804a2445903279207 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""
Koala Bot Base Cog code and additional base cog functions
Commented using reStructuredText (reST)
"""
# Futures
# Built-in/Generic Imports
import os
import time
import re
import aiohttp
import logging
from concurrent.futures import ThreadPoolExecutor
logging.basicConfig(filename='TwitchAlert.log')
# Own modules
import KoalaBot
from utils.KoalaColours import *
from utils.KoalaUtils import error_embed, is_channel_in_guild, extract_id
from utils import KoalaDBManager
# Libs
from discord.ext import commands, tasks
from dotenv import load_dotenv
import asyncio
# Constants
load_dotenv()
DEFAULT_MESSAGE = ""
TWITCH_ICON = "https://cdn3.iconfinder.com/data/icons/social-messaging-ui-color-shapes-2-free" \
"/128/social-twitch-circle-512.png"
TWITCH_CLIENT_ID = os.environ.get('TWITCH_TOKEN')
TWITCH_SECRET = os.environ.get('TWITCH_SECRET')
TWITCH_USERNAME_REGEX = "^[a-z0-9][a-z0-9_]{3,24}$"
LOOP_CHECK_LIVE_DELAY = 1
TEAMS_LOOP_CHECK_LIVE_DELAY = 1
REFRESH_TEAMS_DELAY = 5
# Variables
def twitch_is_enabled(ctx):
"""
A command used to check if the guild has enabled twitch alert
e.g. @commands.check(KoalaBot.is_admin)
:param ctx: The context of the message
:return: True if admin or test, False otherwise
"""
try:
result = KoalaBot.check_guild_has_ext(ctx, "TwitchAlert")
except PermissionError:
result = False
return result
class TwitchAlert(commands.Cog):
"""
A discord.py cog for alerting when someone goes live on twitch
"""
def __init__(self, bot, database_manager=None):
"""
Initialises local variables
:param bot: The bot client for this cog
"""
if not database_manager:
database_manager = KoalaBot.database_manager
self.bot = bot
database_manager.create_base_tables()
database_manager.insert_extension("TwitchAlert", 0, True, True)
self.ta_database_manager = TwitchAlertDBManager(database_manager, bot)
self.ta_database_manager.create_tables()
self.loop_thread = None
self.loop_team_thread = None
self.running = False
self.stop_loop = False
@commands.command(name="twitchEditMsg", aliases=["edit_default_message"])
@commands.check(KoalaBot.is_admin)
@commands.check(twitch_is_enabled)
async def edit_default_message(self, ctx, raw_channel_id, *default_live_message):
"""
Edit the default message put in a Twitch Alert Notification
:param ctx: The discord context of the command
:param raw_channel_id: The channel ID where the twitch alert is being used
:param default_live_message: The default live message of users within this Twitch Alert,
leave empty for program default
:return:
"""
try:
channel_id = extract_id(raw_channel_id)
except TypeError:
channel_id = ctx.message.channel.id
default_live_message = (raw_channel_id,) + default_live_message
if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id):
await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server."))
return
# Assigning default message if provided
if default_live_message is not None and default_live_message != (None,):
default_message = " ".join(default_live_message)
if len(default_message) > 1000:
await ctx.send(embed=error_embed(
"custom_message is too long, try something with less than 1000 characters"))
return
else:
default_message = None
# Creates a new Twitch Alert with the used guild ID and default message if provided
default_message = self.ta_database_manager.new_ta(ctx.message.guild.id, channel_id, default_message,
replace=True)
# Returns an embed with information altered
new_embed = discord.Embed(title="Default Message Edited", colour=KOALA_GREEN,
description=f"Guild: {ctx.message.guild.id}\n"
f"Channel: {channel_id}\n"
f"Default Message: {default_message}")
await ctx.send(embed=new_embed)
@commands.command(name="twitchViewMsg", aliases=["view_default_message"])
@commands.check(KoalaBot.is_admin)
@commands.check(twitch_is_enabled)
async def view_default_message(self, ctx, raw_channel_id=None):
"""
Shows the current default message for Twitch Alerts
:param ctx: The discord context of the command
:param raw_channel_id: The channel ID where the twitch alert is being used
leave empty for program default
:return:
"""
if raw_channel_id is None:
channel_id = ctx.message.channel.id
else:
channel_id = extract_id(raw_channel_id)
if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id):
await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server."))
return
# Creates a new Twitch Alert with the used guild ID and default message if provided
default_message = self.ta_database_manager.get_default_message(channel_id)[0][0]
# Returns an embed with information altered
new_embed = discord.Embed(title="Default Message", colour=KOALA_GREEN,
description=f"Guild: {ctx.message.guild.id}\n"
f"Channel: {channel_id}\n"
f"Default Message: {default_message}")
# new_embed.set_footer(text=f"Twitch Alert ID: {new_id}")
await ctx.send(embed=new_embed)
@commands.command(name="twitchAdd", aliases=['add_user_to_twitch_alert'])
@commands.check(KoalaBot.is_admin)
@commands.check(twitch_is_enabled)
async def add_user_to_twitch_alert(self, ctx, raw_channel_id, twitch_username=None, *custom_live_message):
"""
Add a Twitch user to a Twitch Alert
:param ctx: The discord context of the command
:param raw_channel_id: The channel ID where the twitch alert is being used
:param twitch_username: The Twitch Username of the user being added (lowercase)
:param custom_live_message: the custom live message for this user's alert
:return:
"""
try:
channel_id = extract_id(raw_channel_id)
except TypeError:
custom_live_message = (twitch_username,) + custom_live_message
twitch_username = raw_channel_id
channel_id = ctx.message.channel.id
if twitch_username is None:
raise discord.errors.InvalidArgument("twitch_username is a required argument that is missing.")
elif not re.search(TWITCH_USERNAME_REGEX, twitch_username):
raise discord.errors.InvalidArgument(
"The given twitch_username is not a valid username (please use lowercase)")
# Check the channel specified is in this guild
if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id):
await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server."))
return
default_message = self.ta_database_manager.new_ta(ctx.message.guild.id, channel_id)
# Setting the custom message as required
if custom_live_message is not None and custom_live_message != (None,):
custom_message = " ".join(custom_live_message)
default_message = custom_message
if len(default_message) > 1000:
await ctx.send(embed=error_embed(
"custom_message is too long, try something with less than 1000 characters"))
return
else:
custom_message = None
self.ta_database_manager.add_user_to_ta(channel_id, twitch_username, custom_message, ctx.message.guild.id)
# Response Message
new_embed = discord.Embed(title="Added User to Twitch Alert", colour=KOALA_GREEN,
description=f"Channel: {channel_id}\n"
f"User: {twitch_username}\n"
f"Message: {default_message}")
await ctx.send(embed=new_embed)
@commands.command(name="twitchRemove", aliases=['remove_user_from_twitch_alert'])
@commands.check(KoalaBot.is_admin)
@commands.check(twitch_is_enabled)
async def remove_user_from_twitch_alert(self, ctx, raw_channel_id, twitch_username=None):
"""
Removes a user from a Twitch Alert
:param ctx: the discord context
:param raw_channel_id: The discord channel ID of the Twitch Alert
:param twitch_username: The username of the user to be removed
:return:
"""
try:
channel_id = extract_id(raw_channel_id)
except TypeError:
twitch_username = raw_channel_id
channel_id = ctx.message.channel.id
if twitch_username is None:
raise discord.errors.InvalidArgument("twitch_username is a required argument that is missing.")
# Check the channel specified is in this guild
if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id):
await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server."))
return
await self.ta_database_manager.remove_user_from_ta(channel_id, twitch_username)
# Response Message
new_embed = discord.Embed(title="Removed User from Twitch Alert", colour=KOALA_GREEN,
description=f"Channel: {channel_id}\n"
f"User: {twitch_username}")
await ctx.send(embed=new_embed)
@commands.command(name="twitchAddTeam", aliases=["add_team_to_twitch_alert"])
@commands.check(KoalaBot.is_admin)
@commands.check(twitch_is_enabled)
async def add_team_to_twitch_alert(self, ctx, raw_channel_id, team_name=None, *custom_live_message):
"""
Add a Twitch team to a Twitch Alert
:param ctx: The discord context of the command
:param raw_channel_id: The channel ID where the twitch alert is being used
:param team_name: The Twitch team being added (lowercase)
:param custom_live_message: the custom live message for this team's alert
:return:
"""
try:
channel_id = extract_id(raw_channel_id)
except TypeError:
custom_live_message = (team_name,) + custom_live_message
team_name = raw_channel_id
channel_id = ctx.message.channel.id
if team_name is None:
raise discord.errors.InvalidArgument("team_name is a required argument that is missing.")
elif not re.search(TWITCH_USERNAME_REGEX, team_name):
raise discord.errors.InvalidArgument(
"The given team_name is not a valid twitch team name (please use lowercase)")
# Check the channel specified is in this guild
if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id):
await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server."))
return
self.ta_database_manager.new_ta(ctx.message.guild.id, channel_id)
# Setting the custom message as required
if custom_live_message is not None and custom_live_message != (None,):
default_message = " ".join(custom_live_message)
if len(default_message) > 1000:
await ctx.send(embed=error_embed(
"custom_message is too long, try something with less than 1000 characters"))
return
else:
default_message = DEFAULT_MESSAGE
self.ta_database_manager.add_team_to_ta(channel_id, team_name, default_message, ctx.message.guild.id)
# Response Message
new_embed = discord.Embed(title="Added Team to Twitch Alert", colour=KOALA_GREEN,
description=f"Channel: {channel_id}\n"
f"Team: {team_name}\n"
f"Message: {default_message}")
# new_embed.set_footer(text=f"Twitch Alert ID: {channel_id}")
await ctx.send(embed=new_embed)
@commands.command(name="twitchRemoveTeam", aliases=["remove_team_from_twitch_alert"])
@commands.check(KoalaBot.is_admin)
@commands.check(twitch_is_enabled)
async def remove_team_from_twitch_alert(self, ctx, raw_channel_id, team_name=None):
"""
Removes a team from a Twitch Alert
:param ctx: the discord context
:param raw_channel_id: The discord channel ID of the Twitch Alert
:param team_name: The Twitch team being added (lowercase)
:return:
"""
try:
channel_id = extract_id(raw_channel_id)
except TypeError:
team_name = raw_channel_id
channel_id = ctx.message.channel.id
if team_name is None:
raise discord.errors.InvalidArgument("team_name is a required argument that is missing.")
# Check the channel specified is in this guild
if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id):
await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server."))
return
await self.ta_database_manager.remove_team_from_ta(channel_id, team_name)
# Response Message
new_embed = discord.Embed(title="Removed Team from Twitch Alert", colour=KOALA_GREEN,
description=f"Channel: {channel_id}\n"
f"Team: {team_name}")
await ctx.send(embed=new_embed)
@commands.command(name="twitchList", aliases=["list_twitch_alert"])
@commands.check(KoalaBot.is_admin)
@commands.check(twitch_is_enabled)
async def list_twitch_alert(self, ctx, raw_channel_id=None):
"""
Shows all current TwitchAlert users and teams in a channel
:param ctx:
:param raw_channel_id:
:return:
"""
if raw_channel_id is None:
channel_id = ctx.message.channel.id
else:
channel_id = extract_id(raw_channel_id)
if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id):
await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server."))
return
embed = discord.Embed()
embed.title = "Twitch Alerts"
embed.colour = KOALA_GREEN
embed.set_footer(text=f"Channel ID: {channel_id}")
results = self.ta_database_manager.get_users_in_ta(channel_id)
if results:
users = ""
for result in results:
users += f"{result[0]}\n"
embed.add_field(name=":bust_in_silhouette: Users", value=users)
else:
embed.add_field(name=":bust_in_silhouette: Users", value="None")
results = self.ta_database_manager.get_teams_in_ta(channel_id)
if results:
teams = ""
for result in results:
teams += f"{result[0]}\n"
embed.add_field(name=":busts_in_silhouette: Teams", value=teams)
else:
embed.add_field(name=":busts_in_silhouette: Teams", value="None")
await ctx.send(embed=embed)
@commands.Cog.listener()
async def on_ready(self):
"""
When the bot is started up, the loop begins
:return:
"""
if not self.running:
self.start_loops()
def start_loops(self):
self.loop_update_teams.start()
self.loop_check_team_live.start()
self.loop_check_live.start()
self.running = True
def end_loops(self):
self.loop_update_teams.cancel()
self.loop_check_team_live.cancel()
self.loop_check_live.cancel()
self.running = False
@tasks.loop(minutes=LOOP_CHECK_LIVE_DELAY)
async def loop_check_live(self):
"""
A loop that continually checks the live status of users and
sends alerts when online, removing them when offline
:return:
"""
start = time.time()
# logging.info("TwitchAlert: User Loop Started")
sql_find_users = "SELECT twitch_username " \
"FROM UserInTwitchAlert " \
"JOIN TwitchAlerts TA on UserInTwitchAlert.channel_id = TA.channel_id " \
"JOIN (SELECT extension_id, guild_id FROM GuildExtensions " \
"WHERE extension_id = 'TwitchAlert' OR extension_id = 'All') GE on TA.guild_id = GE.guild_id;"
users = self.ta_database_manager.database_manager.db_execute_select(sql_find_users)
usernames = []
for user in users:
if not re.search(TWITCH_USERNAME_REGEX, user[0]):
sql_remove_invalid_user = "DELETE FROM UserInTwitchAlert WHERE twitch_username = ?"
self.ta_database_manager.database_manager.db_execute_commit(sql_remove_invalid_user, args=[user[0]])
else:
usernames.append(user[0])
# user_streams = self.ta_database_manager.twitch_handler.get_streams_data(usernames)
if not usernames:
return
user_streams = await self.ta_database_manager.twitch_handler.get_streams_data(usernames)
if user_streams is None:
return
# Deals with online streams
for streams_details in user_streams:
try:
if streams_details.get('type') == "live":
current_username = str.lower(streams_details.get("user_name"))
usernames.remove(current_username)
sql_find_message_id = \
"SELECT UserInTwitchAlert.channel_id, message_id, custom_message, default_message " \
"FROM UserInTwitchAlert " \
"JOIN TwitchAlerts TA on UserInTwitchAlert.channel_id = TA.channel_id " \
"JOIN (SELECT extension_id, guild_id FROM GuildExtensions " \
"WHERE extension_id = 'TwitchAlert' " \
" OR extension_id = 'All') GE on TA.guild_id = GE.guild_id " \
"WHERE twitch_username = ?;"
results = self.ta_database_manager.database_manager.db_execute_select(
sql_find_message_id, args=[current_username])
new_message_embed = None
for result in results:
channel_id = result[0]
message_id = result[1]
custom_message = result[2]
channel_default_message = result[3]
channel = self.bot.get_channel(id=channel_id)
try:
# If no Alert is posted
if message_id is None:
if new_message_embed is None:
if custom_message is not None:
message = custom_message
else:
message = channel_default_message
new_message_embed = await self.create_alert_embed(streams_details, message)
if new_message_embed is not None and channel is not None:
new_message = await channel.send(embed=new_message_embed)
sql_update_message_id = """
UPDATE UserInTwitchAlert
SET message_id = ?
WHERE channel_id = ?
AND twitch_username = ?"""
self.ta_database_manager.database_manager.db_execute_commit(
sql_update_message_id, args=[new_message.id, result[0], current_username])
except discord.errors.Forbidden as err:
logging.warning(f"TwitchAlert: {err} Name: {channel} ID: {channel.id}")
sql_remove_invalid_channel = "DELETE FROM TwitchAlerts WHERE channel_id = ?"
self.ta_database_manager.database_manager.db_execute_commit(sql_remove_invalid_channel,
args=[channel.id])
except Exception as err:
logging.error(f"TwitchAlert: User Loop error {err}")
# Deals with remaining offline streams
await self.ta_database_manager.delete_all_offline_streams(False, usernames)
time_diff = time.time() - start
if time_diff > 5:
logging.warning(f"TwitchAlert: User Loop Finished in > 5s | {time_diff}s")
async def create_alert_embed(self, stream_data, message):
"""
Creates and sends an alert message
:param stream_data: The twitch stream data to have in the message
:param message: The custom message to be added as a description
:return: The discord message id of the sent message
"""
user_details = await self.ta_database_manager.twitch_handler.get_user_data(
stream_data.get("user_name"))
game_details = await self.ta_database_manager.twitch_handler.get_game_data(
stream_data.get("game_id"))
return create_live_embed(stream_data, user_details, game_details, message)
@tasks.loop(minutes=REFRESH_TEAMS_DELAY)
async def loop_update_teams(self):
start = time.time()
# logging.info("TwitchAlert: Started Update Teams")
await self.ta_database_manager.update_all_teams_members()
time_diff = time.time() - start
if time_diff > 5:
logging.warning(f"TwitchAlert: Teams updated in > 5s | {time_diff}s")
@tasks.loop(minutes=TEAMS_LOOP_CHECK_LIVE_DELAY)
async def loop_check_team_live(self):
"""
A loop to repeatedly send messages if a member of a team is live, and remove it when they are not
:return:
"""
start = time.time()
# logging.info("TwitchAlert: Team Loop Started")
sql_select_team_users = "SELECT twitch_username, twitch_team_name " \
"FROM UserInTwitchTeam " \
"JOIN TeamInTwitchAlert TITA " \
" ON UserInTwitchTeam.team_twitch_alert_id = TITA.team_twitch_alert_id " \
"JOIN TwitchAlerts TA on TITA.channel_id = TA.channel_id " \
"JOIN (SELECT extension_id, guild_id FROM GuildExtensions " \
"WHERE extension_id = 'TwitchAlert' " \
" OR extension_id = 'All') GE on TA.guild_id = GE.guild_id "
users_and_teams = self.ta_database_manager.database_manager.db_execute_select(sql_select_team_users)
usernames = []
for user in users_and_teams:
if not re.search(TWITCH_USERNAME_REGEX, user[1]):
sql_remove_invalid_user = "DELETE FROM TeamInTwitchAlert WHERE twitch_team_name = ?"
self.ta_database_manager.database_manager.db_execute_commit(sql_remove_invalid_user, args=[user[1]])
else:
usernames.append(user[0])
if not usernames:
return
streams_data = await self.ta_database_manager.twitch_handler.get_streams_data(usernames)
if streams_data is None:
return
# Deals with online streams
for stream_data in streams_data:
try:
if stream_data.get('type') == "live":
current_username = str.lower(stream_data.get("user_name"))
usernames.remove(current_username)
sql_find_message_id = """
SELECT TITA.channel_id, UserInTwitchTeam.message_id, TITA.team_twitch_alert_id, custom_message,
default_message
FROM UserInTwitchTeam
JOIN TeamInTwitchAlert TITA on UserInTwitchTeam.team_twitch_alert_id = TITA.team_twitch_alert_id
JOIN TwitchAlerts TA on TITA.channel_id = TA.channel_id
JOIN (SELECT extension_id, guild_id
FROM GuildExtensions
WHERE extension_id = 'TwitchAlert' OR extension_id = 'All') GE ON TA.guild_id = GE.guild_id
WHERE twitch_username = ?"""
results = self.ta_database_manager.database_manager.db_execute_select(
sql_find_message_id, args=[current_username])
new_message_embed = None
for result in results:
channel_id = result[0]
message_id = result[1]
team_twitch_alert_id = result[2]
custom_message = result[3]
channel_default_message = result[4]
channel = self.bot.get_channel(id=channel_id)
try:
# If no Alert is posted
if message_id is None:
if new_message_embed is None:
if custom_message is not None:
message = custom_message
else:
message = channel_default_message
new_message_embed = await self.create_alert_embed(stream_data, message)
if new_message_embed is not None and channel is not None:
new_message = await channel.send(embed=new_message_embed)
sql_update_message_id = """
UPDATE UserInTwitchTeam
SET message_id = ?
WHERE team_twitch_alert_id = ?
AND twitch_username = ?"""
self.ta_database_manager.database_manager.db_execute_commit(
sql_update_message_id,
args=[new_message.id, team_twitch_alert_id, current_username])
except discord.errors.Forbidden as err:
logging.warning(f"TwitchAlert: {err} Name: {channel} ID: {channel.id}")
sql_remove_invalid_channel = "DELETE FROM TwitchAlerts WHERE channel_id = ?"
self.ta_database_manager.database_manager.db_execute_commit(sql_remove_invalid_channel,
args=[channel.id])
except Exception as err:
logging.error(f"TwitchAlert: Team Loop error {err}")
# Deals with remaining offline streams
await self.ta_database_manager.delete_all_offline_streams(True, usernames)
time_diff = time.time() - start
if time_diff > 5:
logging.warning(f"TwitchAlert: Teams Loop Finished in > 5s | {time_diff}s")
def create_live_embed(stream_info, user_info, game_info, message):
"""
Creates an embed for the go live announcement
:param stream_info: The stream data from the Twitch API
:param user_info: The user data for this streamer from the Twitch API
:param game_info: The game data for this game from the Twitch API
:param message: The custom message to be added as a description
:return: The embed created
"""
embed = discord.Embed(colour=KOALA_GREEN)
if message is not None and message != "":
embed.description = message
embed.set_author(name=stream_info.get("user_name") + " is now streaming!",
icon_url=TWITCH_ICON)
embed.title = "https://twitch.tv/" + str.lower(stream_info.get("user_name"))
embed.add_field(name="Stream Title", value=stream_info.get("title"))
if game_info is None:
embed.add_field(name="Playing", value="No Category")
else:
embed.add_field(name="Playing", value=game_info.get("name"))
embed.set_thumbnail(url=user_info.get("profile_image_url"))
return embed
class TwitchAPIHandler:
"""
A wrapper to interact with the twitch API
"""
def __init__(self, client_id: str, client_secret: str):
self.client_id = client_id
self.client_secret = client_secret
self.params = {'client_id': self.client_id,
'client_secret': self.client_secret,
'grant_type': 'client_credentials'}
self.token = {}
@property
def base_headers(self):
return {
'Authorization': f'Bearer {self.token.get("access_token")}',
'Client-ID': self.client_id
}
async def get_new_twitch_oauth(self):
"""
Get a new OAuth2 token from twitch using client_id and client_secret
:return: The new OAuth2 token
"""
async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(60)) as client:
async with client.post('https://id.twitch.tv/oauth2/token', params=self.params) as response:
if response.status > 399:
logging.critical(f'TwitchAlert: Error {response.status} while getting Oauth token')
self.token = {}
response_json = await response.json()
try:
response_json['expires_in'] += time.time()
except KeyError:
# probably shouldn't need this, but catch just in case
logging.warning('TwitchAlert: Failed to set token expiration time')
self.token = response_json
return self.token
async def requests_get(self, url, headers=None, params=None):
"""
Gets a response from a curl get request to the given url using headers of this object
:param headers: the Headers required for the request, will use self.headers by default
:param url: The URL to send the request to
:param params: The parameters of the request
:return: The response of the request
"""
if self.token.get('expires_in', 0) <= time.time() + 1 or not self.token:
await self.get_new_twitch_oauth()
async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(60)) as client:
async with client.get(url=url, headers=headers if headers else self.base_headers, params=params) as \
response:
if response.status == 401:
logging.info(f"TwitchAlert: {response.status}, getting new oauth and retrying")
await self.get_new_twitch_oauth()
return await self.requests_get(url, headers, params)
elif response.status > 399:
logging.warning(f'TwitchAlert: {response.status} while getting requesting URL:{url}')
return await response.json()
async def get_streams_data(self, usernames):
"""
Gets all stream information from a list of given usernames
:param usernames: The list of usernames
:return: The JSON data of the request
"""
url = 'https://api.twitch.tv/helix/streams?'
next_hundred_users = usernames[:100]
usernames = usernames[100:]
result = (await self.requests_get(url + "user_login=" + "&user_login=".join(next_hundred_users))).get("data")
while usernames:
next_hundred_users = usernames[:100]
usernames = usernames[100:]
result += (await self.requests_get(url + "user_login=" + "&user_login=".join(next_hundred_users))).get(
"data")
return result
async def get_user_data(self, username):
"""
Gets the user information of a given user
:param username: The display twitch username of the user
:return: The JSON information of the user's data
"""
url = 'https://api.twitch.tv/helix/users?login=' + username
return (await self.requests_get(url)).get("data")[0]
async def get_game_data(self, game_id):
"""
Gets the game information of a given game
:param game_id: The twitch game ID of a game
:return: The JSON information of the game's data
"""
if game_id != "":
url = 'https://api.twitch.tv/helix/games?id=' + game_id
game_data = await self.requests_get(url)
return game_data.get("data")[0]
else:
return None
async def get_team_users(self, team_id):
"""
Gets the users data about a given team
:param team_id: The team name of the twitch team
:return: the JSON information of the users
"""
url = 'https://api.twitch.tv/helix/teams?name=' + team_id
return (
await self.requests_get(url)).get("data")[0].get("users")
class TwitchAlertDBManager:
"""
A class for interacting with the Koala twitch database
"""
def __init__(self, database_manager: KoalaDBManager.KoalaDBManager, bot_client: discord.client):
"""
Initialises local variables
:param database_manager:
:param bot_client:
"""
self.database_manager = database_manager
self.twitch_handler = TwitchAPIHandler(TWITCH_CLIENT_ID, TWITCH_SECRET)
self.bot = bot_client
def get_parent_database_manager(self):
"""
A getter for the database manager of this object
:return:
"""
return self.database_manager
def create_tables(self):
"""
Creates all the tables associated with the twitch alert extension
:return:
"""
# TwitchAlerts
sql_create_twitch_alerts_table = """
CREATE TABLE IF NOT EXISTS TwitchAlerts (
guild_id integer NOT NULL,
channel_id integer NOT NULL,
default_message text NOT NULL,
PRIMARY KEY (guild_id, channel_id),
CONSTRAINT fk_guild
FOREIGN KEY (guild_id)
REFERENCES GuildExtensions (guild_id)
ON DELETE CASCADE
);"""
# UserInTwitchAlert
sql_create_user_in_twitch_alert_table = """
CREATE TABLE IF NOT EXISTS UserInTwitchAlert (
channel_id integer NOT NULL,
twitch_username text NOT NULL,
custom_message text,
message_id integer,
PRIMARY KEY (channel_id, twitch_username),
CONSTRAINT fk_channel
FOREIGN KEY (channel_id)
REFERENCES TwitchAlerts (channel_id)
ON DELETE CASCADE
);"""
# TeamInTwitchAlert
sql_create_team_in_twitch_alert_table = """
CREATE TABLE IF NOT EXISTS TeamInTwitchAlert (
team_twitch_alert_id integer PRIMARY KEY AUTOINCREMENT,
channel_id integer NOT NULL,
twitch_team_name text NOT NULL,
custom_message text,
CONSTRAINT fk_channel
FOREIGN KEY (channel_id)
REFERENCES TwitchAlerts (channel_id)
ON DELETE CASCADE
);"""
# UserInTwitchTeam
sql_create_user_in_twitch_team_table = """
CREATE TABLE IF NOT EXISTS UserInTwitchTeam (
team_twitch_alert_id text NOT NULL,
twitch_username text NOT NULL,
message_id integer,
PRIMARY KEY (team_twitch_alert_id, twitch_username),
CONSTRAINT fk_twitch_team_alert
FOREIGN KEY (team_twitch_alert_id)
REFERENCES TeamInTwitchAlert (team_twitch_alert_id)
ON DELETE CASCADE
);"""
# Create Tables
self.database_manager.db_execute_commit(sql_create_twitch_alerts_table)
self.database_manager.db_execute_commit(sql_create_user_in_twitch_alert_table)
self.database_manager.db_execute_commit(sql_create_team_in_twitch_alert_table)
self.database_manager.db_execute_commit(sql_create_user_in_twitch_team_table)
def new_ta(self, guild_id, channel_id, default_message=None, replace=False):
"""
Creates a new Twitch Alert and gives the ID associated with it
:param guild_id: The discord guild ID where the Twitch Alert is located
:param channel_id: The discord channel ID of the twitch Alert
:param default_message: The default message of users in the Twitch Alert
:param replace: True if the new ta should replace the current if exists
:return: The new default_message
"""
sql_find_ta = "SELECT default_message FROM TwitchAlerts WHERE channel_id=?"
message = self.database_manager.db_execute_select(sql_find_ta, args=[channel_id])
if message and not replace:
return message[0][0]
# Sets the default message if not provided
if default_message is None:
default_message = DEFAULT_MESSAGE
# Insert new Twitch Alert to database
if replace:
sql_insert_twitch_alert = """
REPLACE INTO TwitchAlerts(guild_id, channel_id, default_message)
VALUES(?,?,?)
"""
else:
sql_insert_twitch_alert = """
INSERT INTO TwitchAlerts(guild_id, channel_id, default_message)
VALUES(?,?,?)
"""
self.database_manager.db_execute_commit(sql_insert_twitch_alert, args=[guild_id, channel_id, default_message])
return default_message
def get_default_message(self, channel_id):
"""
Get the set default message for the twitch alert
:param channel_id: The discord channel ID of the twitch Alert
:return: The current default_message
"""
sql_find_ta = "SELECT default_message FROM TwitchAlerts WHERE channel_id= ?"
return self.database_manager.db_execute_select(sql_find_ta, args=[channel_id])
def add_user_to_ta(self, channel_id, twitch_username, custom_message, guild_id=None):
"""
Add a twitch user to a given Twitch Alert
:param channel_id: The discord channel ID of the twitch Alert
:param twitch_username: The Twitch username of the user to be added
:param custom_message: The custom Message of the user's live notification.
None = use default Twitch Alert message
:param guild_id: The guild ID of the channel
:return:
:raises: KeyError if channel ID is not defined in TwitchAlerts and guild_id is not provided
"""
self.new_ta(guild_id, channel_id)
if custom_message:
sql_insert_user_twitch_alert = """
INSERT INTO UserInTwitchAlert(channel_id, twitch_username, custom_message)
VALUES(?, ?, ?)
"""
self.database_manager.db_execute_commit(
sql_insert_user_twitch_alert, args=[channel_id, str.lower(twitch_username), custom_message])
else:
sql_insert_user_twitch_alert = """
INSERT INTO UserInTwitchAlert(channel_id, twitch_username)
VALUES(?, ?)
"""
self.database_manager.db_execute_commit(
sql_insert_user_twitch_alert, args=[channel_id, str.lower(twitch_username)])
async def remove_user_from_ta(self, channel_id, twitch_username):
"""
Removes a user from a given Twitch Alert
:param channel_id: The discord channel ID of the twitch Alert
:param twitch_username: The Twitch username of the user to be added
:return:
"""
sql_get_message_id = "SELECT message_id " \
"FROM UserInTwitchAlert " \
"WHERE twitch_username = ? " \
"AND channel_id = ? "
message_id = self.database_manager.db_execute_select(sql_get_message_id,
args=[twitch_username, channel_id])[0][0]
if message_id is not None:
await self.delete_message(message_id, channel_id)
sql_remove_entry = """DELETE FROM UserInTwitchAlert
WHERE twitch_username = ? AND channel_id = ?"""
self.database_manager.db_execute_commit(sql_remove_entry, args=[twitch_username, channel_id])
async def delete_message(self, message_id, channel_id):
"""
Deletes a given discord message
:param message_id: discord message ID of the message to delete
:param channel_id: discord channel ID which has the message
:return:
"""
try:
channel = self.bot.get_channel(int(channel_id))
if channel is None:
logging.warning(f"TwitchAlert: Channel ID {channel_id} does not exist, removing from database")
sql_remove_invalid_channel = "DELETE FROM TwitchAlerts WHERE channel_id = ?"
self.database_manager.db_execute_commit(sql_remove_invalid_channel, args=[channel_id])
return
message = await channel.fetch_message(message_id)
await message.delete()
except discord.errors.NotFound as err:
logging.warning(f"TwitchAlert: Message ID {message_id} does not exist, skipping \nError: {err}")
except discord.errors.Forbidden as err:
logging.warning(f"TwitchAlert: {err} Channel ID: {channel_id}")
sql_remove_invalid_channel = "DELETE FROM TwitchAlerts WHERE channel_id = ?"
self.database_manager.db_execute_commit(sql_remove_invalid_channel, args=[channel_id])
def get_users_in_ta(self, channel_id):
"""
Returns all users in a given Twitch Alert
:param channel_id: The channel ID of the Twitch Alert
:return: The sql results of the users
"""
sql_get_users = "SELECT twitch_username FROM UserInTwitchAlert WHERE channel_id = ?"
return self.database_manager.db_execute_select(sql_get_users, args=[channel_id])
def get_teams_in_ta(self, channel_id):
"""
Returns all teams in a given Twitch Alert
:param channel_id: The channel ID of the Twitch Alert
:return: The sql results of the teams
"""
sql_get_teams = "SELECT twitch_team_name FROM TeamInTwitchAlert WHERE channel_id = ?"
return self.database_manager.db_execute_select(sql_get_teams, args=[channel_id])
def add_team_to_ta(self, channel_id, twitch_team, custom_message, guild_id=None):
"""
Add a twitch team to a given Twitch Alert
:param channel_id: The discord channel ID of the twitch Alert
:param twitch_team: The Twitch team to be added
:param custom_message: The custom Message of the team's live notification.
None = use default Twitch Alert message
:param guild_id: The guild ID of the channel
:return:
:raises: KeyError if channel ID is not defined in TwitchAlerts and guild_id is not provided
"""
self.new_ta(guild_id, channel_id)
if custom_message:
sql_insert_team_twitch_alert = """
INSERT INTO TeamInTwitchAlert(channel_id, twitch_team_name, custom_message)
VALUES(?, ?, ?)
"""
self.database_manager.db_execute_commit(
sql_insert_team_twitch_alert, args=[channel_id, str.lower(twitch_team), custom_message])
else:
sql_insert_team_twitch_alert = """
INSERT INTO TeamInTwitchAlert(channel_id, twitch_team_name)
VALUES(?, ?)
"""
self.database_manager.db_execute_commit(
sql_insert_team_twitch_alert, args=[channel_id, str.lower(twitch_team)])
async def remove_team_from_ta(self, channel_id, team_name):
"""
Removes a team from a given twitch alert
:param channel_id: The channel ID of the Twitch Alert
:param team_name: The team name of the team to be removed
:return:
"""
sql_get_team_alert_id = "SELECT team_twitch_alert_id " \
"FROM TeamInTwitchAlert " \
"WHERE twitch_team_name = ? " \
" AND channel_id = ?"
result = self.database_manager.db_execute_select(sql_get_team_alert_id, args=[team_name, channel_id])
if not result:
raise AttributeError("Team name not found")
team_alert_id = result[0][0]
sql_get_message_id = """SELECT UserInTwitchTeam.message_id
FROM UserInTwitchTeam
WHERE team_twitch_alert_id = ?"""
message_ids = self.database_manager.db_execute_select(sql_get_message_id, args=[team_alert_id])
if message_ids is not None:
for message_id in message_ids:
if message_id[0] is not None:
await self.delete_message(message_id[0], channel_id)
sql_remove_users = """DELETE FROM UserInTwitchTeam WHERE team_twitch_alert_id = ?"""
sql_remove_team = """DELETE FROM TeamInTwitchAlert WHERE team_twitch_alert_id = ?"""
self.database_manager.db_execute_commit(sql_remove_users, args=[team_alert_id])
self.database_manager.db_execute_commit(sql_remove_team, args=[team_alert_id])
async def update_team_members(self, twitch_team_id, team_name):
"""
Users in a team are updated to ensure they are assigned to the correct team
:param twitch_team_id: the team twitch alert id
:param team_name: the name of the team
:return:
"""
if re.search(TWITCH_USERNAME_REGEX, team_name):
users = await self.twitch_handler.get_team_users(team_name)
for user in users:
sql_add_user = """INSERT OR IGNORE INTO UserInTwitchTeam(team_twitch_alert_id, twitch_username)
VALUES(?, ?)"""
try:
self.database_manager.db_execute_commit(sql_add_user, args=[twitch_team_id, user.get("user_login")],
pass_errors=True)
except KoalaDBManager.sqlite3.IntegrityError as err:
logging.error(f"Twitch Alert: 1034: {err}")
pass
async def update_all_teams_members(self):
"""
Updates all teams with the current team members
:return:
"""
sql_get_teams = """SELECT team_twitch_alert_id, twitch_team_name FROM TeamInTwitchAlert"""
teams_info = self.database_manager.db_execute_select(sql_get_teams)
for team_info in teams_info:
await self.update_team_members(team_info[0], team_info[1])
async def delete_all_offline_streams(self, team: bool, usernames):
"""
A method that deletes all currently offline streams
:param team: True if the users are from teams, false if individuals
:param usernames: The usernames of the team members
:return:
"""
if team:
sql_select_offline_streams_with_message_ids = f"""
SELECT channel_id, message_id
FROM UserInTwitchTeam
JOIN TeamInTwitchAlert TITA on UserInTwitchTeam.team_twitch_alert_id = TITA.team_twitch_alert_id
WHERE message_id NOT NULL
AND twitch_username in ({','.join(['?'] * len(usernames))})"""
sql_update_offline_streams = f"""
UPDATE UserInTwitchTeam
SET message_id = NULL
WHERE twitch_username in ({','.join(['?'] * len(usernames))})"""
else:
sql_select_offline_streams_with_message_ids = f"""
SELECT channel_id, message_id
FROM UserInTwitchAlert
WHERE message_id NOT NULL
AND twitch_username in ({','.join(['?'] * len(usernames))})"""
sql_update_offline_streams = f"""
UPDATE UserInTwitchAlert
SET message_id = NULL
WHERE twitch_username in ({','.join(['?'] * len(usernames))})"""
results = self.database_manager.db_execute_select(
sql_select_offline_streams_with_message_ids, usernames)
for result in results:
await self.delete_message(result[1], result[0])
self.database_manager.db_execute_commit(sql_update_offline_streams, usernames)
def setup(bot: KoalaBot) -> None:
"""
Load this cog to the KoalaBot.
:param bot: the bot client for KoalaBot
"""
if TWITCH_SECRET is None or TWITCH_CLIENT_ID is None:
logging.error("TwitchAlert not started. API keys not found in environment.")
print("TwitchAlert not started. API keys not found in environment.")
KoalaBot.database_manager.insert_extension("TwitchAlert", 0, False, False)
else:
bot.add_cog(TwitchAlert(bot))
logging.info("TwitchAlert is ready.")
print("TwitchAlert is ready.")
| 44.94955 | 120 | 0.615445 |
import os
import time
import re
import aiohttp
import logging
from concurrent.futures import ThreadPoolExecutor
logging.basicConfig(filename='TwitchAlert.log')
import KoalaBot
from utils.KoalaColours import *
from utils.KoalaUtils import error_embed, is_channel_in_guild, extract_id
from utils import KoalaDBManager
from discord.ext import commands, tasks
from dotenv import load_dotenv
import asyncio
load_dotenv()
DEFAULT_MESSAGE = ""
TWITCH_ICON = "https://cdn3.iconfinder.com/data/icons/social-messaging-ui-color-shapes-2-free" \
"/128/social-twitch-circle-512.png"
TWITCH_CLIENT_ID = os.environ.get('TWITCH_TOKEN')
TWITCH_SECRET = os.environ.get('TWITCH_SECRET')
TWITCH_USERNAME_REGEX = "^[a-z0-9][a-z0-9_]{3,24}$"
LOOP_CHECK_LIVE_DELAY = 1
TEAMS_LOOP_CHECK_LIVE_DELAY = 1
REFRESH_TEAMS_DELAY = 5
def twitch_is_enabled(ctx):
try:
result = KoalaBot.check_guild_has_ext(ctx, "TwitchAlert")
except PermissionError:
result = False
return result
class TwitchAlert(commands.Cog):
def __init__(self, bot, database_manager=None):
if not database_manager:
database_manager = KoalaBot.database_manager
self.bot = bot
database_manager.create_base_tables()
database_manager.insert_extension("TwitchAlert", 0, True, True)
self.ta_database_manager = TwitchAlertDBManager(database_manager, bot)
self.ta_database_manager.create_tables()
self.loop_thread = None
self.loop_team_thread = None
self.running = False
self.stop_loop = False
@commands.command(name="twitchEditMsg", aliases=["edit_default_message"])
@commands.check(KoalaBot.is_admin)
@commands.check(twitch_is_enabled)
async def edit_default_message(self, ctx, raw_channel_id, *default_live_message):
try:
channel_id = extract_id(raw_channel_id)
except TypeError:
channel_id = ctx.message.channel.id
default_live_message = (raw_channel_id,) + default_live_message
if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id):
await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server."))
return
if default_live_message is not None and default_live_message != (None,):
default_message = " ".join(default_live_message)
if len(default_message) > 1000:
await ctx.send(embed=error_embed(
"custom_message is too long, try something with less than 1000 characters"))
return
else:
default_message = None
default_message = self.ta_database_manager.new_ta(ctx.message.guild.id, channel_id, default_message,
replace=True)
new_embed = discord.Embed(title="Default Message Edited", colour=KOALA_GREEN,
description=f"Guild: {ctx.message.guild.id}\n"
f"Channel: {channel_id}\n"
f"Default Message: {default_message}")
await ctx.send(embed=new_embed)
@commands.command(name="twitchViewMsg", aliases=["view_default_message"])
@commands.check(KoalaBot.is_admin)
@commands.check(twitch_is_enabled)
async def view_default_message(self, ctx, raw_channel_id=None):
if raw_channel_id is None:
channel_id = ctx.message.channel.id
else:
channel_id = extract_id(raw_channel_id)
if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id):
await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server."))
return
default_message = self.ta_database_manager.get_default_message(channel_id)[0][0]
new_embed = discord.Embed(title="Default Message", colour=KOALA_GREEN,
description=f"Guild: {ctx.message.guild.id}\n"
f"Channel: {channel_id}\n"
f"Default Message: {default_message}")
await ctx.send(embed=new_embed)
@commands.command(name="twitchAdd", aliases=['add_user_to_twitch_alert'])
@commands.check(KoalaBot.is_admin)
@commands.check(twitch_is_enabled)
async def add_user_to_twitch_alert(self, ctx, raw_channel_id, twitch_username=None, *custom_live_message):
try:
channel_id = extract_id(raw_channel_id)
except TypeError:
custom_live_message = (twitch_username,) + custom_live_message
twitch_username = raw_channel_id
channel_id = ctx.message.channel.id
if twitch_username is None:
raise discord.errors.InvalidArgument("twitch_username is a required argument that is missing.")
elif not re.search(TWITCH_USERNAME_REGEX, twitch_username):
raise discord.errors.InvalidArgument(
"The given twitch_username is not a valid username (please use lowercase)")
if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id):
await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server."))
return
default_message = self.ta_database_manager.new_ta(ctx.message.guild.id, channel_id)
if custom_live_message is not None and custom_live_message != (None,):
custom_message = " ".join(custom_live_message)
default_message = custom_message
if len(default_message) > 1000:
await ctx.send(embed=error_embed(
"custom_message is too long, try something with less than 1000 characters"))
return
else:
custom_message = None
self.ta_database_manager.add_user_to_ta(channel_id, twitch_username, custom_message, ctx.message.guild.id)
new_embed = discord.Embed(title="Added User to Twitch Alert", colour=KOALA_GREEN,
description=f"Channel: {channel_id}\n"
f"User: {twitch_username}\n"
f"Message: {default_message}")
await ctx.send(embed=new_embed)
@commands.command(name="twitchRemove", aliases=['remove_user_from_twitch_alert'])
@commands.check(KoalaBot.is_admin)
@commands.check(twitch_is_enabled)
async def remove_user_from_twitch_alert(self, ctx, raw_channel_id, twitch_username=None):
try:
channel_id = extract_id(raw_channel_id)
except TypeError:
twitch_username = raw_channel_id
channel_id = ctx.message.channel.id
if twitch_username is None:
raise discord.errors.InvalidArgument("twitch_username is a required argument that is missing.")
if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id):
await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server."))
return
await self.ta_database_manager.remove_user_from_ta(channel_id, twitch_username)
new_embed = discord.Embed(title="Removed User from Twitch Alert", colour=KOALA_GREEN,
description=f"Channel: {channel_id}\n"
f"User: {twitch_username}")
await ctx.send(embed=new_embed)
@commands.command(name="twitchAddTeam", aliases=["add_team_to_twitch_alert"])
@commands.check(KoalaBot.is_admin)
@commands.check(twitch_is_enabled)
async def add_team_to_twitch_alert(self, ctx, raw_channel_id, team_name=None, *custom_live_message):
try:
channel_id = extract_id(raw_channel_id)
except TypeError:
custom_live_message = (team_name,) + custom_live_message
team_name = raw_channel_id
channel_id = ctx.message.channel.id
if team_name is None:
raise discord.errors.InvalidArgument("team_name is a required argument that is missing.")
elif not re.search(TWITCH_USERNAME_REGEX, team_name):
raise discord.errors.InvalidArgument(
"The given team_name is not a valid twitch team name (please use lowercase)")
if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id):
await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server."))
return
self.ta_database_manager.new_ta(ctx.message.guild.id, channel_id)
if custom_live_message is not None and custom_live_message != (None,):
default_message = " ".join(custom_live_message)
if len(default_message) > 1000:
await ctx.send(embed=error_embed(
"custom_message is too long, try something with less than 1000 characters"))
return
else:
default_message = DEFAULT_MESSAGE
self.ta_database_manager.add_team_to_ta(channel_id, team_name, default_message, ctx.message.guild.id)
new_embed = discord.Embed(title="Added Team to Twitch Alert", colour=KOALA_GREEN,
description=f"Channel: {channel_id}\n"
f"Team: {team_name}\n"
f"Message: {default_message}")
await ctx.send(embed=new_embed)
@commands.command(name="twitchRemoveTeam", aliases=["remove_team_from_twitch_alert"])
@commands.check(KoalaBot.is_admin)
@commands.check(twitch_is_enabled)
async def remove_team_from_twitch_alert(self, ctx, raw_channel_id, team_name=None):
try:
channel_id = extract_id(raw_channel_id)
except TypeError:
team_name = raw_channel_id
channel_id = ctx.message.channel.id
if team_name is None:
raise discord.errors.InvalidArgument("team_name is a required argument that is missing.")
if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id):
await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server."))
return
await self.ta_database_manager.remove_team_from_ta(channel_id, team_name)
new_embed = discord.Embed(title="Removed Team from Twitch Alert", colour=KOALA_GREEN,
description=f"Channel: {channel_id}\n"
f"Team: {team_name}")
await ctx.send(embed=new_embed)
@commands.command(name="twitchList", aliases=["list_twitch_alert"])
@commands.check(KoalaBot.is_admin)
@commands.check(twitch_is_enabled)
async def list_twitch_alert(self, ctx, raw_channel_id=None):
if raw_channel_id is None:
channel_id = ctx.message.channel.id
else:
channel_id = extract_id(raw_channel_id)
if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id):
await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server."))
return
embed = discord.Embed()
embed.title = "Twitch Alerts"
embed.colour = KOALA_GREEN
embed.set_footer(text=f"Channel ID: {channel_id}")
results = self.ta_database_manager.get_users_in_ta(channel_id)
if results:
users = ""
for result in results:
users += f"{result[0]}\n"
embed.add_field(name=":bust_in_silhouette: Users", value=users)
else:
embed.add_field(name=":bust_in_silhouette: Users", value="None")
results = self.ta_database_manager.get_teams_in_ta(channel_id)
if results:
teams = ""
for result in results:
teams += f"{result[0]}\n"
embed.add_field(name=":busts_in_silhouette: Teams", value=teams)
else:
embed.add_field(name=":busts_in_silhouette: Teams", value="None")
await ctx.send(embed=embed)
@commands.Cog.listener()
async def on_ready(self):
if not self.running:
self.start_loops()
def start_loops(self):
self.loop_update_teams.start()
self.loop_check_team_live.start()
self.loop_check_live.start()
self.running = True
def end_loops(self):
self.loop_update_teams.cancel()
self.loop_check_team_live.cancel()
self.loop_check_live.cancel()
self.running = False
@tasks.loop(minutes=LOOP_CHECK_LIVE_DELAY)
async def loop_check_live(self):
start = time.time()
sql_find_users = "SELECT twitch_username " \
"FROM UserInTwitchAlert " \
"JOIN TwitchAlerts TA on UserInTwitchAlert.channel_id = TA.channel_id " \
"JOIN (SELECT extension_id, guild_id FROM GuildExtensions " \
"WHERE extension_id = 'TwitchAlert' OR extension_id = 'All') GE on TA.guild_id = GE.guild_id;"
users = self.ta_database_manager.database_manager.db_execute_select(sql_find_users)
usernames = []
for user in users:
if not re.search(TWITCH_USERNAME_REGEX, user[0]):
sql_remove_invalid_user = "DELETE FROM UserInTwitchAlert WHERE twitch_username = ?"
self.ta_database_manager.database_manager.db_execute_commit(sql_remove_invalid_user, args=[user[0]])
else:
usernames.append(user[0])
if not usernames:
return
user_streams = await self.ta_database_manager.twitch_handler.get_streams_data(usernames)
if user_streams is None:
return
for streams_details in user_streams:
try:
if streams_details.get('type') == "live":
current_username = str.lower(streams_details.get("user_name"))
usernames.remove(current_username)
sql_find_message_id = \
"SELECT UserInTwitchAlert.channel_id, message_id, custom_message, default_message " \
"FROM UserInTwitchAlert " \
"JOIN TwitchAlerts TA on UserInTwitchAlert.channel_id = TA.channel_id " \
"JOIN (SELECT extension_id, guild_id FROM GuildExtensions " \
"WHERE extension_id = 'TwitchAlert' " \
" OR extension_id = 'All') GE on TA.guild_id = GE.guild_id " \
"WHERE twitch_username = ?;"
results = self.ta_database_manager.database_manager.db_execute_select(
sql_find_message_id, args=[current_username])
new_message_embed = None
for result in results:
channel_id = result[0]
message_id = result[1]
custom_message = result[2]
channel_default_message = result[3]
channel = self.bot.get_channel(id=channel_id)
try:
if message_id is None:
if new_message_embed is None:
if custom_message is not None:
message = custom_message
else:
message = channel_default_message
new_message_embed = await self.create_alert_embed(streams_details, message)
if new_message_embed is not None and channel is not None:
new_message = await channel.send(embed=new_message_embed)
sql_update_message_id = """
UPDATE UserInTwitchAlert
SET message_id = ?
WHERE channel_id = ?
AND twitch_username = ?"""
self.ta_database_manager.database_manager.db_execute_commit(
sql_update_message_id, args=[new_message.id, result[0], current_username])
except discord.errors.Forbidden as err:
logging.warning(f"TwitchAlert: {err} Name: {channel} ID: {channel.id}")
sql_remove_invalid_channel = "DELETE FROM TwitchAlerts WHERE channel_id = ?"
self.ta_database_manager.database_manager.db_execute_commit(sql_remove_invalid_channel,
args=[channel.id])
except Exception as err:
logging.error(f"TwitchAlert: User Loop error {err}")
await self.ta_database_manager.delete_all_offline_streams(False, usernames)
time_diff = time.time() - start
if time_diff > 5:
logging.warning(f"TwitchAlert: User Loop Finished in > 5s | {time_diff}s")
async def create_alert_embed(self, stream_data, message):
user_details = await self.ta_database_manager.twitch_handler.get_user_data(
stream_data.get("user_name"))
game_details = await self.ta_database_manager.twitch_handler.get_game_data(
stream_data.get("game_id"))
return create_live_embed(stream_data, user_details, game_details, message)
@tasks.loop(minutes=REFRESH_TEAMS_DELAY)
async def loop_update_teams(self):
start = time.time()
await self.ta_database_manager.update_all_teams_members()
time_diff = time.time() - start
if time_diff > 5:
logging.warning(f"TwitchAlert: Teams updated in > 5s | {time_diff}s")
@tasks.loop(minutes=TEAMS_LOOP_CHECK_LIVE_DELAY)
async def loop_check_team_live(self):
start = time.time()
sql_select_team_users = "SELECT twitch_username, twitch_team_name " \
"FROM UserInTwitchTeam " \
"JOIN TeamInTwitchAlert TITA " \
" ON UserInTwitchTeam.team_twitch_alert_id = TITA.team_twitch_alert_id " \
"JOIN TwitchAlerts TA on TITA.channel_id = TA.channel_id " \
"JOIN (SELECT extension_id, guild_id FROM GuildExtensions " \
"WHERE extension_id = 'TwitchAlert' " \
" OR extension_id = 'All') GE on TA.guild_id = GE.guild_id "
users_and_teams = self.ta_database_manager.database_manager.db_execute_select(sql_select_team_users)
usernames = []
for user in users_and_teams:
if not re.search(TWITCH_USERNAME_REGEX, user[1]):
sql_remove_invalid_user = "DELETE FROM TeamInTwitchAlert WHERE twitch_team_name = ?"
self.ta_database_manager.database_manager.db_execute_commit(sql_remove_invalid_user, args=[user[1]])
else:
usernames.append(user[0])
if not usernames:
return
streams_data = await self.ta_database_manager.twitch_handler.get_streams_data(usernames)
if streams_data is None:
return
for stream_data in streams_data:
try:
if stream_data.get('type') == "live":
current_username = str.lower(stream_data.get("user_name"))
usernames.remove(current_username)
sql_find_message_id = """
SELECT TITA.channel_id, UserInTwitchTeam.message_id, TITA.team_twitch_alert_id, custom_message,
default_message
FROM UserInTwitchTeam
JOIN TeamInTwitchAlert TITA on UserInTwitchTeam.team_twitch_alert_id = TITA.team_twitch_alert_id
JOIN TwitchAlerts TA on TITA.channel_id = TA.channel_id
JOIN (SELECT extension_id, guild_id
FROM GuildExtensions
WHERE extension_id = 'TwitchAlert' OR extension_id = 'All') GE ON TA.guild_id = GE.guild_id
WHERE twitch_username = ?"""
results = self.ta_database_manager.database_manager.db_execute_select(
sql_find_message_id, args=[current_username])
new_message_embed = None
for result in results:
channel_id = result[0]
message_id = result[1]
team_twitch_alert_id = result[2]
custom_message = result[3]
channel_default_message = result[4]
channel = self.bot.get_channel(id=channel_id)
try:
if message_id is None:
if new_message_embed is None:
if custom_message is not None:
message = custom_message
else:
message = channel_default_message
new_message_embed = await self.create_alert_embed(stream_data, message)
if new_message_embed is not None and channel is not None:
new_message = await channel.send(embed=new_message_embed)
sql_update_message_id = """
UPDATE UserInTwitchTeam
SET message_id = ?
WHERE team_twitch_alert_id = ?
AND twitch_username = ?"""
self.ta_database_manager.database_manager.db_execute_commit(
sql_update_message_id,
args=[new_message.id, team_twitch_alert_id, current_username])
except discord.errors.Forbidden as err:
logging.warning(f"TwitchAlert: {err} Name: {channel} ID: {channel.id}")
sql_remove_invalid_channel = "DELETE FROM TwitchAlerts WHERE channel_id = ?"
self.ta_database_manager.database_manager.db_execute_commit(sql_remove_invalid_channel,
args=[channel.id])
except Exception as err:
logging.error(f"TwitchAlert: Team Loop error {err}")
await self.ta_database_manager.delete_all_offline_streams(True, usernames)
time_diff = time.time() - start
if time_diff > 5:
logging.warning(f"TwitchAlert: Teams Loop Finished in > 5s | {time_diff}s")
def create_live_embed(stream_info, user_info, game_info, message):
embed = discord.Embed(colour=KOALA_GREEN)
if message is not None and message != "":
embed.description = message
embed.set_author(name=stream_info.get("user_name") + " is now streaming!",
icon_url=TWITCH_ICON)
embed.title = "https://twitch.tv/" + str.lower(stream_info.get("user_name"))
embed.add_field(name="Stream Title", value=stream_info.get("title"))
if game_info is None:
embed.add_field(name="Playing", value="No Category")
else:
embed.add_field(name="Playing", value=game_info.get("name"))
embed.set_thumbnail(url=user_info.get("profile_image_url"))
return embed
class TwitchAPIHandler:
def __init__(self, client_id: str, client_secret: str):
self.client_id = client_id
self.client_secret = client_secret
self.params = {'client_id': self.client_id,
'client_secret': self.client_secret,
'grant_type': 'client_credentials'}
self.token = {}
@property
def base_headers(self):
return {
'Authorization': f'Bearer {self.token.get("access_token")}',
'Client-ID': self.client_id
}
async def get_new_twitch_oauth(self):
async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(60)) as client:
async with client.post('https://id.twitch.tv/oauth2/token', params=self.params) as response:
if response.status > 399:
logging.critical(f'TwitchAlert: Error {response.status} while getting Oauth token')
self.token = {}
response_json = await response.json()
try:
response_json['expires_in'] += time.time()
except KeyError:
logging.warning('TwitchAlert: Failed to set token expiration time')
self.token = response_json
return self.token
async def requests_get(self, url, headers=None, params=None):
if self.token.get('expires_in', 0) <= time.time() + 1 or not self.token:
await self.get_new_twitch_oauth()
async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(60)) as client:
async with client.get(url=url, headers=headers if headers else self.base_headers, params=params) as \
response:
if response.status == 401:
logging.info(f"TwitchAlert: {response.status}, getting new oauth and retrying")
await self.get_new_twitch_oauth()
return await self.requests_get(url, headers, params)
elif response.status > 399:
logging.warning(f'TwitchAlert: {response.status} while getting requesting URL:{url}')
return await response.json()
async def get_streams_data(self, usernames):
url = 'https://api.twitch.tv/helix/streams?'
next_hundred_users = usernames[:100]
usernames = usernames[100:]
result = (await self.requests_get(url + "user_login=" + "&user_login=".join(next_hundred_users))).get("data")
while usernames:
next_hundred_users = usernames[:100]
usernames = usernames[100:]
result += (await self.requests_get(url + "user_login=" + "&user_login=".join(next_hundred_users))).get(
"data")
return result
async def get_user_data(self, username):
url = 'https://api.twitch.tv/helix/users?login=' + username
return (await self.requests_get(url)).get("data")[0]
async def get_game_data(self, game_id):
if game_id != "":
url = 'https://api.twitch.tv/helix/games?id=' + game_id
game_data = await self.requests_get(url)
return game_data.get("data")[0]
else:
return None
async def get_team_users(self, team_id):
url = 'https://api.twitch.tv/helix/teams?name=' + team_id
return (
await self.requests_get(url)).get("data")[0].get("users")
class TwitchAlertDBManager:
def __init__(self, database_manager: KoalaDBManager.KoalaDBManager, bot_client: discord.client):
self.database_manager = database_manager
self.twitch_handler = TwitchAPIHandler(TWITCH_CLIENT_ID, TWITCH_SECRET)
self.bot = bot_client
def get_parent_database_manager(self):
return self.database_manager
def create_tables(self):
# TwitchAlerts
sql_create_twitch_alerts_table = """
CREATE TABLE IF NOT EXISTS TwitchAlerts (
guild_id integer NOT NULL,
channel_id integer NOT NULL,
default_message text NOT NULL,
PRIMARY KEY (guild_id, channel_id),
CONSTRAINT fk_guild
FOREIGN KEY (guild_id)
REFERENCES GuildExtensions (guild_id)
ON DELETE CASCADE
);"""
# UserInTwitchAlert
sql_create_user_in_twitch_alert_table = """
CREATE TABLE IF NOT EXISTS UserInTwitchAlert (
channel_id integer NOT NULL,
twitch_username text NOT NULL,
custom_message text,
message_id integer,
PRIMARY KEY (channel_id, twitch_username),
CONSTRAINT fk_channel
FOREIGN KEY (channel_id)
REFERENCES TwitchAlerts (channel_id)
ON DELETE CASCADE
);"""
# TeamInTwitchAlert
sql_create_team_in_twitch_alert_table = """
CREATE TABLE IF NOT EXISTS TeamInTwitchAlert (
team_twitch_alert_id integer PRIMARY KEY AUTOINCREMENT,
channel_id integer NOT NULL,
twitch_team_name text NOT NULL,
custom_message text,
CONSTRAINT fk_channel
FOREIGN KEY (channel_id)
REFERENCES TwitchAlerts (channel_id)
ON DELETE CASCADE
);"""
# UserInTwitchTeam
sql_create_user_in_twitch_team_table = """
CREATE TABLE IF NOT EXISTS UserInTwitchTeam (
team_twitch_alert_id text NOT NULL,
twitch_username text NOT NULL,
message_id integer,
PRIMARY KEY (team_twitch_alert_id, twitch_username),
CONSTRAINT fk_twitch_team_alert
FOREIGN KEY (team_twitch_alert_id)
REFERENCES TeamInTwitchAlert (team_twitch_alert_id)
ON DELETE CASCADE
);"""
# Create Tables
self.database_manager.db_execute_commit(sql_create_twitch_alerts_table)
self.database_manager.db_execute_commit(sql_create_user_in_twitch_alert_table)
self.database_manager.db_execute_commit(sql_create_team_in_twitch_alert_table)
self.database_manager.db_execute_commit(sql_create_user_in_twitch_team_table)
def new_ta(self, guild_id, channel_id, default_message=None, replace=False):
sql_find_ta = "SELECT default_message FROM TwitchAlerts WHERE channel_id=?"
message = self.database_manager.db_execute_select(sql_find_ta, args=[channel_id])
if message and not replace:
return message[0][0]
# Sets the default message if not provided
if default_message is None:
default_message = DEFAULT_MESSAGE
# Insert new Twitch Alert to database
if replace:
sql_insert_twitch_alert = """
REPLACE INTO TwitchAlerts(guild_id, channel_id, default_message)
VALUES(?,?,?)
"""
else:
sql_insert_twitch_alert = """
INSERT INTO TwitchAlerts(guild_id, channel_id, default_message)
VALUES(?,?,?)
"""
self.database_manager.db_execute_commit(sql_insert_twitch_alert, args=[guild_id, channel_id, default_message])
return default_message
def get_default_message(self, channel_id):
sql_find_ta = "SELECT default_message FROM TwitchAlerts WHERE channel_id= ?"
return self.database_manager.db_execute_select(sql_find_ta, args=[channel_id])
def add_user_to_ta(self, channel_id, twitch_username, custom_message, guild_id=None):
self.new_ta(guild_id, channel_id)
if custom_message:
sql_insert_user_twitch_alert = """
INSERT INTO UserInTwitchAlert(channel_id, twitch_username, custom_message)
VALUES(?, ?, ?)
"""
self.database_manager.db_execute_commit(
sql_insert_user_twitch_alert, args=[channel_id, str.lower(twitch_username), custom_message])
else:
sql_insert_user_twitch_alert = """
INSERT INTO UserInTwitchAlert(channel_id, twitch_username)
VALUES(?, ?)
"""
self.database_manager.db_execute_commit(
sql_insert_user_twitch_alert, args=[channel_id, str.lower(twitch_username)])
async def remove_user_from_ta(self, channel_id, twitch_username):
sql_get_message_id = "SELECT message_id " \
"FROM UserInTwitchAlert " \
"WHERE twitch_username = ? " \
"AND channel_id = ? "
message_id = self.database_manager.db_execute_select(sql_get_message_id,
args=[twitch_username, channel_id])[0][0]
if message_id is not None:
await self.delete_message(message_id, channel_id)
sql_remove_entry = """DELETE FROM UserInTwitchAlert
WHERE twitch_username = ? AND channel_id = ?"""
self.database_manager.db_execute_commit(sql_remove_entry, args=[twitch_username, channel_id])
async def delete_message(self, message_id, channel_id):
try:
channel = self.bot.get_channel(int(channel_id))
if channel is None:
logging.warning(f"TwitchAlert: Channel ID {channel_id} does not exist, removing from database")
sql_remove_invalid_channel = "DELETE FROM TwitchAlerts WHERE channel_id = ?"
self.database_manager.db_execute_commit(sql_remove_invalid_channel, args=[channel_id])
return
message = await channel.fetch_message(message_id)
await message.delete()
except discord.errors.NotFound as err:
logging.warning(f"TwitchAlert: Message ID {message_id} does not exist, skipping \nError: {err}")
except discord.errors.Forbidden as err:
logging.warning(f"TwitchAlert: {err} Channel ID: {channel_id}")
sql_remove_invalid_channel = "DELETE FROM TwitchAlerts WHERE channel_id = ?"
self.database_manager.db_execute_commit(sql_remove_invalid_channel, args=[channel_id])
def get_users_in_ta(self, channel_id):
sql_get_users = "SELECT twitch_username FROM UserInTwitchAlert WHERE channel_id = ?"
return self.database_manager.db_execute_select(sql_get_users, args=[channel_id])
def get_teams_in_ta(self, channel_id):
sql_get_teams = "SELECT twitch_team_name FROM TeamInTwitchAlert WHERE channel_id = ?"
return self.database_manager.db_execute_select(sql_get_teams, args=[channel_id])
def add_team_to_ta(self, channel_id, twitch_team, custom_message, guild_id=None):
self.new_ta(guild_id, channel_id)
if custom_message:
sql_insert_team_twitch_alert = """
INSERT INTO TeamInTwitchAlert(channel_id, twitch_team_name, custom_message)
VALUES(?, ?, ?)
"""
self.database_manager.db_execute_commit(
sql_insert_team_twitch_alert, args=[channel_id, str.lower(twitch_team), custom_message])
else:
sql_insert_team_twitch_alert = """
INSERT INTO TeamInTwitchAlert(channel_id, twitch_team_name)
VALUES(?, ?)
"""
self.database_manager.db_execute_commit(
sql_insert_team_twitch_alert, args=[channel_id, str.lower(twitch_team)])
async def remove_team_from_ta(self, channel_id, team_name):
sql_get_team_alert_id = "SELECT team_twitch_alert_id " \
"FROM TeamInTwitchAlert " \
"WHERE twitch_team_name = ? " \
" AND channel_id = ?"
result = self.database_manager.db_execute_select(sql_get_team_alert_id, args=[team_name, channel_id])
if not result:
raise AttributeError("Team name not found")
team_alert_id = result[0][0]
sql_get_message_id = """SELECT UserInTwitchTeam.message_id
FROM UserInTwitchTeam
WHERE team_twitch_alert_id = ?"""
message_ids = self.database_manager.db_execute_select(sql_get_message_id, args=[team_alert_id])
if message_ids is not None:
for message_id in message_ids:
if message_id[0] is not None:
await self.delete_message(message_id[0], channel_id)
sql_remove_users = """DELETE FROM UserInTwitchTeam WHERE team_twitch_alert_id = ?"""
sql_remove_team = """DELETE FROM TeamInTwitchAlert WHERE team_twitch_alert_id = ?"""
self.database_manager.db_execute_commit(sql_remove_users, args=[team_alert_id])
self.database_manager.db_execute_commit(sql_remove_team, args=[team_alert_id])
async def update_team_members(self, twitch_team_id, team_name):
if re.search(TWITCH_USERNAME_REGEX, team_name):
users = await self.twitch_handler.get_team_users(team_name)
for user in users:
sql_add_user = """INSERT OR IGNORE INTO UserInTwitchTeam(team_twitch_alert_id, twitch_username)
VALUES(?, ?)"""
try:
self.database_manager.db_execute_commit(sql_add_user, args=[twitch_team_id, user.get("user_login")],
pass_errors=True)
except KoalaDBManager.sqlite3.IntegrityError as err:
logging.error(f"Twitch Alert: 1034: {err}")
pass
async def update_all_teams_members(self):
sql_get_teams = """SELECT team_twitch_alert_id, twitch_team_name FROM TeamInTwitchAlert"""
teams_info = self.database_manager.db_execute_select(sql_get_teams)
for team_info in teams_info:
await self.update_team_members(team_info[0], team_info[1])
async def delete_all_offline_streams(self, team: bool, usernames):
if team:
sql_select_offline_streams_with_message_ids = f"""
SELECT channel_id, message_id
FROM UserInTwitchTeam
JOIN TeamInTwitchAlert TITA on UserInTwitchTeam.team_twitch_alert_id = TITA.team_twitch_alert_id
WHERE message_id NOT NULL
AND twitch_username in ({','.join(['?'] * len(usernames))})"""
sql_update_offline_streams = f"""
UPDATE UserInTwitchTeam
SET message_id = NULL
WHERE twitch_username in ({','.join(['?'] * len(usernames))})"""
else:
sql_select_offline_streams_with_message_ids = f"""
SELECT channel_id, message_id
FROM UserInTwitchAlert
WHERE message_id NOT NULL
AND twitch_username in ({','.join(['?'] * len(usernames))})"""
sql_update_offline_streams = f"""
UPDATE UserInTwitchAlert
SET message_id = NULL
WHERE twitch_username in ({','.join(['?'] * len(usernames))})"""
results = self.database_manager.db_execute_select(
sql_select_offline_streams_with_message_ids, usernames)
for result in results:
await self.delete_message(result[1], result[0])
self.database_manager.db_execute_commit(sql_update_offline_streams, usernames)
def setup(bot: KoalaBot) -> None:
if TWITCH_SECRET is None or TWITCH_CLIENT_ID is None:
logging.error("TwitchAlert not started. API keys not found in environment.")
print("TwitchAlert not started. API keys not found in environment.")
KoalaBot.database_manager.insert_extension("TwitchAlert", 0, False, False)
else:
bot.add_cog(TwitchAlert(bot))
logging.info("TwitchAlert is ready.")
print("TwitchAlert is ready.")
| true | true |
f731a1ae7561e1b6b771c407eeb69465e2d4177b | 3,929 | py | Python | openstack/tests/unit/cloud/test_limits.py | NeCTAR-RC/openstacksdk | 60a24f6c4717a1f9a0e545c9a07e68afaedc5a27 | [
"Apache-2.0"
] | 99 | 2018-03-28T15:41:45.000Z | 2022-01-23T17:22:13.000Z | openstack/tests/unit/cloud/test_limits.py | NeCTAR-RC/openstacksdk | 60a24f6c4717a1f9a0e545c9a07e68afaedc5a27 | [
"Apache-2.0"
] | 5 | 2018-05-25T16:54:23.000Z | 2021-11-21T02:27:16.000Z | openstack/tests/unit/cloud/test_limits.py | NeCTAR-RC/openstacksdk | 60a24f6c4717a1f9a0e545c9a07e68afaedc5a27 | [
"Apache-2.0"
] | 104 | 2018-04-06T14:33:54.000Z | 2022-03-01T01:58:09.000Z | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack.tests.unit import base
class TestLimits(base.TestCase):
def test_get_compute_limits(self):
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'compute', 'public', append=['limits']),
json={
"limits": {
"absolute": {
"maxImageMeta": 128,
"maxPersonality": 5,
"maxPersonalitySize": 10240,
"maxSecurityGroupRules": 20,
"maxSecurityGroups": 10,
"maxServerMeta": 128,
"maxTotalCores": 20,
"maxTotalFloatingIps": 10,
"maxTotalInstances": 10,
"maxTotalKeypairs": 100,
"maxTotalRAMSize": 51200,
"maxServerGroups": 10,
"maxServerGroupMembers": 10,
"totalCoresUsed": 0,
"totalInstancesUsed": 0,
"totalRAMUsed": 0,
"totalSecurityGroupsUsed": 0,
"totalFloatingIpsUsed": 0,
"totalServerGroupsUsed": 0
},
"rate": []
}
}),
])
self.cloud.get_compute_limits()
self.assert_calls()
def test_other_get_compute_limits(self):
project = self.mock_for_keystone_projects(project_count=1,
list_get=True)[0]
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'compute', 'public', append=['limits'],
qs_elements=[
'tenant_id={id}'.format(id=project.project_id)
]),
json={
"limits": {
"absolute": {
"maxImageMeta": 128,
"maxPersonality": 5,
"maxPersonalitySize": 10240,
"maxSecurityGroupRules": 20,
"maxSecurityGroups": 10,
"maxServerMeta": 128,
"maxTotalCores": 20,
"maxTotalFloatingIps": 10,
"maxTotalInstances": 10,
"maxTotalKeypairs": 100,
"maxTotalRAMSize": 51200,
"maxServerGroups": 10,
"maxServerGroupMembers": 10,
"totalCoresUsed": 0,
"totalInstancesUsed": 0,
"totalRAMUsed": 0,
"totalSecurityGroupsUsed": 0,
"totalFloatingIpsUsed": 0,
"totalServerGroupsUsed": 0
},
"rate": []
}
}),
])
self.cloud.get_compute_limits(project.project_id)
self.assert_calls()
| 40.927083 | 75 | 0.423772 |
from openstack.tests.unit import base
class TestLimits(base.TestCase):
def test_get_compute_limits(self):
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'compute', 'public', append=['limits']),
json={
"limits": {
"absolute": {
"maxImageMeta": 128,
"maxPersonality": 5,
"maxPersonalitySize": 10240,
"maxSecurityGroupRules": 20,
"maxSecurityGroups": 10,
"maxServerMeta": 128,
"maxTotalCores": 20,
"maxTotalFloatingIps": 10,
"maxTotalInstances": 10,
"maxTotalKeypairs": 100,
"maxTotalRAMSize": 51200,
"maxServerGroups": 10,
"maxServerGroupMembers": 10,
"totalCoresUsed": 0,
"totalInstancesUsed": 0,
"totalRAMUsed": 0,
"totalSecurityGroupsUsed": 0,
"totalFloatingIpsUsed": 0,
"totalServerGroupsUsed": 0
},
"rate": []
}
}),
])
self.cloud.get_compute_limits()
self.assert_calls()
def test_other_get_compute_limits(self):
project = self.mock_for_keystone_projects(project_count=1,
list_get=True)[0]
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'compute', 'public', append=['limits'],
qs_elements=[
'tenant_id={id}'.format(id=project.project_id)
]),
json={
"limits": {
"absolute": {
"maxImageMeta": 128,
"maxPersonality": 5,
"maxPersonalitySize": 10240,
"maxSecurityGroupRules": 20,
"maxSecurityGroups": 10,
"maxServerMeta": 128,
"maxTotalCores": 20,
"maxTotalFloatingIps": 10,
"maxTotalInstances": 10,
"maxTotalKeypairs": 100,
"maxTotalRAMSize": 51200,
"maxServerGroups": 10,
"maxServerGroupMembers": 10,
"totalCoresUsed": 0,
"totalInstancesUsed": 0,
"totalRAMUsed": 0,
"totalSecurityGroupsUsed": 0,
"totalFloatingIpsUsed": 0,
"totalServerGroupsUsed": 0
},
"rate": []
}
}),
])
self.cloud.get_compute_limits(project.project_id)
self.assert_calls()
| true | true |
f731a2121c3590ca3b044f3b321708cb81eb5991 | 9,878 | py | Python | GlutWrapper.py | kosystem/PythonGlutWrapper | e619b62927d6875f1bd3d2da8b5f2291487c8920 | [
"MIT"
] | null | null | null | GlutWrapper.py | kosystem/PythonGlutWrapper | e619b62927d6875f1bd3d2da8b5f2291487c8920 | [
"MIT"
] | null | null | null | GlutWrapper.py | kosystem/PythonGlutWrapper | e619b62927d6875f1bd3d2da8b5f2291487c8920 | [
"MIT"
] | null | null | null | from OpenGL.GL import *
from OpenGL.GLUT import *
from OpenGL.GLU import *
import sys
from math import *
import time
ESCAPE = '\033'
class Camera(object):
"""docstring for Camera"""
def __init__(self):
self.lock_x = 0
self.lock_y = 0
self.lock_z = 0
self.distance = 300
self.pan = 0.0
self.tilt = 0.0
class MouseState(object):
"""docstring for MouseState"""
def __init__(self):
self.button = 0
self.pressed = 0
self.x = 0
self.y = 0
class GlutWrapper(object):
"""docstring for GlutWrapper"""
def __init__(self):
self.windowWidth = 640
self.windowHeight = 480
self.windowPositionX = 100
self.windowPositionY = 100
self.title = b"Glut Wrapper"
self.camera = Camera()
self.mouseState = MouseState()
self.frameElapsed = 0.0
self.displayElapsed = 0.0
self.elapsedTime = 0.0
self.frameTime = 1.0/20.0
def startFramework(self):
glutInit(sys.argv)
glutInitDisplayMode(GLUT_RGB | GLUT_DEPTH)
glutInitWindowPosition(self.windowPositionX, self.windowPositionY)
glutInitWindowSize(self.windowWidth, self.windowHeight)
glutCreateWindow(self.title)
glutDisplayFunc(self.displayFramework)
glutReshapeFunc(self.reshape)
glutIdleFunc(self.idle)
glutMouseFunc(self.mouse)
glutMotionFunc(self.motion)
glutPassiveMotionFunc(self.passiveMotion)
# glutMouseWheelFunc(self.mouseWheel)
glutKeyboardFunc(self.keyboard)
glutKeyboardUpFunc(self.keyboardUp)
glutSpecialFunc(self.special)
glutSpecialUpFunc(self.specialUp)
self.initialize()
self.load()
glutMainLoop()
def displayFramework(self):
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
self.setLights()
self.setCamera()
self.display(self.getDisplayElapsed())
glutSwapBuffers()
def setCamera(self):
width = glutGet(GLUT_WINDOW_WIDTH)
height = glutGet(GLUT_WINDOW_HEIGHT)
glViewport(0, 0, width, height)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
gluPerspective(30.0, float(width) / height, 0.5, 10000)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
distance = self.camera.distance
tilt = self.camera.tilt
pan = self.camera.pan
lock_x = self.camera.lock_x
lock_y = self.camera.lock_y
lock_z = self.camera.lock_z
gluLookAt(
distance*sin(pan)*cos(tilt) + lock_x,
distance*sin(tilt) + lock_y,
distance*cos(pan)*cos(tilt) + lock_z,
lock_x, lock_y, lock_z,
0.0,
1.0,
0.0)
def setLights(self):
light1_position = (0.0, 1.0, 1.0, 0.0)
light2_position = (0.0, -1.0, -1.0, 0.0)
white_light = (1.0, 1.0, 1.0, 1.0)
lmodel_ambient = (0.2, 0.2, 0.2, 1.0)
ambient_light = (0.4, 0.4, 0.4, 1.0)
glLight(GL_LIGHT0, GL_POSITION, light1_position)
glLight(GL_LIGHT0, GL_AMBIENT, ambient_light)
glLight(GL_LIGHT0, GL_DIFFUSE, white_light)
glLight(GL_LIGHT0, GL_SPECULAR, white_light)
glLight(GL_LIGHT1, GL_POSITION, light2_position)
glLight(GL_LIGHT1, GL_AMBIENT, lmodel_ambient)
glLight(GL_LIGHT1, GL_DIFFUSE, ambient_light)
glLight(GL_LIGHT1, GL_SPECULAR, lmodel_ambient)
# glLightModel(GL_LIGHT_MODEL_AMBIENT, lmodel_ambient)
def getFrameElapsed(self):
now = time.time()
if self.frameElapsed == 0.0:
self.frameElapsed = now
elapsed = now - self.frameElapsed
self.frameElapsed = now
return elapsed
def getDisplayElapsed(self):
now = time.time()
if self.displayElapsed == 0.0:
self.displayElapsed = now
elapsed = now - self.displayElapsed
self.displayElapsed = now
return elapsed
# User overwite ---------------------------------------
def display(self, deltaTime):
glMaterial(GL_FRONT, GL_AMBIENT, (0.8, 0.6, 0.5, 1.0))
glMaterial(GL_FRONT, GL_DIFFUSE, (0.8, 0.6, 0.5, 1.0))
glutSolidTeapot(50)
def idle(self):
self.elapsedTime += self.getFrameElapsed()
if self.elapsedTime >= self.frameTime:
glutPostRedisplay()
self.elapsedTime -= self.frameTime
def reshape(self, w, h):
glViewport(0, 0, w, h)
def initialize(self):
glClearColor(0.4, 0.5, 0.5, 1.0)
glEnable(GL_LIGHTING)
glEnable(GL_LIGHT0)
glEnable(GL_LIGHT1)
glShadeModel(GL_SMOOTH)
glEnable(GL_DEPTH_TEST)
def load(self):
# NOTE: model data load
pass
# User interface -----------------------------------
def mouse(self, button, state, x, y):
#print "MousePress: button: %d, x: %d, y:%d" % (button, x, y)
pass
def motion(self, x, y):
#print "MouseMove: x: %d, y: %d" % (x, y)
pass
def passiveMotion(self, x, y):
self.mouseState.x = x
self.mouseState.y = y
def keyboard(self, key, x, y):
#print "KeyboardPress: %s" % key
if key == ESCAPE:
sys.exit()
def keyboardUp(self, key, x, y):
#print "KeyboardUp: %s" % key
pass
def special(self, key, x, y):
#print "SpecialKeyPress: %s" % key
pass
def specialUp(self, key, x, y):
#print "SpecialKeyUp: %s" % key
pass
# Basic Draw ----------------------------------------------
def drawAxis(self, length):
lighting = glGetBoolean(GL_LIGHTING)
light0 = glGetBoolean(GL_LIGHT0)
light1 = glGetBoolean(GL_LIGHT1)
color = glGetFloatv(GL_CURRENT_COLOR)
depth = glGetBoolean(GL_DEPTH_TEST)
glEnable(GL_DEPTH_TEST)
glDisable(GL_LIGHTING)
glDisable(GL_LIGHT0)
glLineWidth(1.0)
glBegin(GL_LINES)
glColor(1.0, 0.0, 0.0, 1.0)
glVertex(0.0, 0.0, 0.0)
glVertex(length, 0.0, 0.0)
glColor(0.0, 1.0, 0.0, 1.0)
glVertex(0.0, 0.0, 0.0)
glVertex(0.0, length, 0.0)
glColor(0.0, 0.0, 1.0, 1.0)
glVertex(0.0, 0.0, 0.0)
glVertex(0.0, 0.0, length)
glEnd()
if lighting:
glEnable(GL_LIGHTING)
if light0:
glEnable(GL_LIGHT0)
if light1:
glEnable(GL_LIGHT1)
if not depth:
glDisable(GL_DEPTH_TEST)
glColor(color)
def drawHorizon(self, x, y, xTick, yTick):
lighting = glGetBoolean(GL_LIGHTING)
light0 = glGetBoolean(GL_LIGHT0)
light1 = glGetBoolean(GL_LIGHT1)
color = glGetFloatv(GL_CURRENT_COLOR)
depth = glGetBoolean(GL_DEPTH_TEST)
glEnable(GL_DEPTH_TEST)
glDisable(GL_LIGHTING)
glDisable(GL_LIGHT0)
glLineWidth(1.0)
glBegin(GL_LINES)
glColor(0.7, 0.7, 0.7, 1.0)
for xi in range(-x, x+xTick, xTick):
glVertex(xi, 0.0, -y)
glVertex(xi, 0.0, y)
for yi in range(-y, y+yTick, yTick):
glVertex(-x, 0.0, yi)
glVertex(x, 0.0, yi)
glEnd()
if lighting:
glEnable(GL_LIGHTING)
if light0:
glEnable(GL_LIGHT0)
if light1:
glEnable(GL_LIGHT1)
if not depth:
glDisable(GL_DEPTH_TEST)
glColor(color)
def overlayString(self, string, x, y, color=(1, 1, 1)):
lighting = glGetBoolean(GL_LIGHTING)
light0 = glGetBoolean(GL_LIGHT0)
light1 = glGetBoolean(GL_LIGHT1)
currentcolor = glGetFloatv(GL_CURRENT_COLOR)
depth = glGetBoolean(GL_DEPTH_TEST)
glEnable(GL_DEPTH_TEST)
glDisable(GL_LIGHTING)
glDisable(GL_LIGHT0)
glDisable(GL_LIGHT1)
glLineWidth(1.0)
glMatrixMode(GL_PROJECTION)
# glPushMatrix()
glLoadIdentity()
glOrtho(0.0, 2.0, 2.0, 0.0, -1.0, 1.0)
glMatrixMode(GL_MODELVIEW)
# glPushMatrix()
glLoadIdentity()
# glPushAttrib(GL_ENABLE_BIT)
glDisable(GL_DEPTH_TEST)
glDisable(GL_CULL_FACE)
width = glutGet(GLUT_WINDOW_WIDTH)
height = glutGet(GLUT_WINDOW_HEIGHT)/2
glColor(color)
if x >= 0:
positionX = x/width*2.0
else:
positionX = (width + x)/width*2.0
if y >= 0:
positionY = (y + 10.0)/height*2.0
else:
positionY = (height + y)/height*2.0
glRasterPos3f(positionX, positionY, 0.0)
for x in string:
glutBitmapCharacter(GLUT_BITMAP_HELVETICA_12, ord(x))
# glPopAttrib()
# glPopMatrix()
glMatrixMode(GL_PROJECTION)
# glPopMatrix()
glMatrixMode(GL_MODELVIEW)
if lighting:
glEnable(GL_LIGHTING)
if light0:
glEnable(GL_LIGHT0)
if light1:
glEnable(GL_LIGHT1)
if depth:
glEnable(GL_DEPTH_TEST)
glColor(currentcolor)
def drawBlock(self, w, h, d):
glPushMatrix()
glScale(w/100, h/100, d/100 )
glutSolidCube(100)
glPopMatrix()
def drawSquer(self, w, h):
glPushMatrix()
glBegin(GL_QUADS)
glVertex(w, h, 0)
glVertex(-w, h, 0)
glVertex(-w, -h, 0)
glVertex(w, -h, 0)
glEnd()
glPopMatrix()
def setColor(self, color):
glColor(color[0], color[1], color[2])
glMaterial(GL_FRONT, GL_AMBIENT, color)
glMaterial(GL_FRONT, GL_DIFFUSE, color)
if __name__ == '__main__':
#print "Hit ESC key to quit."
gl = GlutWrapper()
gl.title = b"Tracer"
gl.startFramework()
| 27.983003 | 74 | 0.571877 | from OpenGL.GL import *
from OpenGL.GLUT import *
from OpenGL.GLU import *
import sys
from math import *
import time
ESCAPE = '\033'
class Camera(object):
def __init__(self):
self.lock_x = 0
self.lock_y = 0
self.lock_z = 0
self.distance = 300
self.pan = 0.0
self.tilt = 0.0
class MouseState(object):
def __init__(self):
self.button = 0
self.pressed = 0
self.x = 0
self.y = 0
class GlutWrapper(object):
def __init__(self):
self.windowWidth = 640
self.windowHeight = 480
self.windowPositionX = 100
self.windowPositionY = 100
self.title = b"Glut Wrapper"
self.camera = Camera()
self.mouseState = MouseState()
self.frameElapsed = 0.0
self.displayElapsed = 0.0
self.elapsedTime = 0.0
self.frameTime = 1.0/20.0
def startFramework(self):
glutInit(sys.argv)
glutInitDisplayMode(GLUT_RGB | GLUT_DEPTH)
glutInitWindowPosition(self.windowPositionX, self.windowPositionY)
glutInitWindowSize(self.windowWidth, self.windowHeight)
glutCreateWindow(self.title)
glutDisplayFunc(self.displayFramework)
glutReshapeFunc(self.reshape)
glutIdleFunc(self.idle)
glutMouseFunc(self.mouse)
glutMotionFunc(self.motion)
glutPassiveMotionFunc(self.passiveMotion)
glutKeyboardFunc(self.keyboard)
glutKeyboardUpFunc(self.keyboardUp)
glutSpecialFunc(self.special)
glutSpecialUpFunc(self.specialUp)
self.initialize()
self.load()
glutMainLoop()
def displayFramework(self):
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
self.setLights()
self.setCamera()
self.display(self.getDisplayElapsed())
glutSwapBuffers()
def setCamera(self):
width = glutGet(GLUT_WINDOW_WIDTH)
height = glutGet(GLUT_WINDOW_HEIGHT)
glViewport(0, 0, width, height)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
gluPerspective(30.0, float(width) / height, 0.5, 10000)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
distance = self.camera.distance
tilt = self.camera.tilt
pan = self.camera.pan
lock_x = self.camera.lock_x
lock_y = self.camera.lock_y
lock_z = self.camera.lock_z
gluLookAt(
distance*sin(pan)*cos(tilt) + lock_x,
distance*sin(tilt) + lock_y,
distance*cos(pan)*cos(tilt) + lock_z,
lock_x, lock_y, lock_z,
0.0,
1.0,
0.0)
def setLights(self):
light1_position = (0.0, 1.0, 1.0, 0.0)
light2_position = (0.0, -1.0, -1.0, 0.0)
white_light = (1.0, 1.0, 1.0, 1.0)
lmodel_ambient = (0.2, 0.2, 0.2, 1.0)
ambient_light = (0.4, 0.4, 0.4, 1.0)
glLight(GL_LIGHT0, GL_POSITION, light1_position)
glLight(GL_LIGHT0, GL_AMBIENT, ambient_light)
glLight(GL_LIGHT0, GL_DIFFUSE, white_light)
glLight(GL_LIGHT0, GL_SPECULAR, white_light)
glLight(GL_LIGHT1, GL_POSITION, light2_position)
glLight(GL_LIGHT1, GL_AMBIENT, lmodel_ambient)
glLight(GL_LIGHT1, GL_DIFFUSE, ambient_light)
glLight(GL_LIGHT1, GL_SPECULAR, lmodel_ambient)
def getFrameElapsed(self):
now = time.time()
if self.frameElapsed == 0.0:
self.frameElapsed = now
elapsed = now - self.frameElapsed
self.frameElapsed = now
return elapsed
def getDisplayElapsed(self):
now = time.time()
if self.displayElapsed == 0.0:
self.displayElapsed = now
elapsed = now - self.displayElapsed
self.displayElapsed = now
return elapsed
def display(self, deltaTime):
glMaterial(GL_FRONT, GL_AMBIENT, (0.8, 0.6, 0.5, 1.0))
glMaterial(GL_FRONT, GL_DIFFUSE, (0.8, 0.6, 0.5, 1.0))
glutSolidTeapot(50)
def idle(self):
self.elapsedTime += self.getFrameElapsed()
if self.elapsedTime >= self.frameTime:
glutPostRedisplay()
self.elapsedTime -= self.frameTime
def reshape(self, w, h):
glViewport(0, 0, w, h)
def initialize(self):
glClearColor(0.4, 0.5, 0.5, 1.0)
glEnable(GL_LIGHTING)
glEnable(GL_LIGHT0)
glEnable(GL_LIGHT1)
glShadeModel(GL_SMOOTH)
glEnable(GL_DEPTH_TEST)
def load(self):
pass
def mouse(self, button, state, x, y):
pass
def motion(self, x, y):
pass
def passiveMotion(self, x, y):
self.mouseState.x = x
self.mouseState.y = y
def keyboard(self, key, x, y):
if key == ESCAPE:
sys.exit()
def keyboardUp(self, key, x, y):
pass
def special(self, key, x, y):
pass
def specialUp(self, key, x, y):
pass
def drawAxis(self, length):
lighting = glGetBoolean(GL_LIGHTING)
light0 = glGetBoolean(GL_LIGHT0)
light1 = glGetBoolean(GL_LIGHT1)
color = glGetFloatv(GL_CURRENT_COLOR)
depth = glGetBoolean(GL_DEPTH_TEST)
glEnable(GL_DEPTH_TEST)
glDisable(GL_LIGHTING)
glDisable(GL_LIGHT0)
glLineWidth(1.0)
glBegin(GL_LINES)
glColor(1.0, 0.0, 0.0, 1.0)
glVertex(0.0, 0.0, 0.0)
glVertex(length, 0.0, 0.0)
glColor(0.0, 1.0, 0.0, 1.0)
glVertex(0.0, 0.0, 0.0)
glVertex(0.0, length, 0.0)
glColor(0.0, 0.0, 1.0, 1.0)
glVertex(0.0, 0.0, 0.0)
glVertex(0.0, 0.0, length)
glEnd()
if lighting:
glEnable(GL_LIGHTING)
if light0:
glEnable(GL_LIGHT0)
if light1:
glEnable(GL_LIGHT1)
if not depth:
glDisable(GL_DEPTH_TEST)
glColor(color)
def drawHorizon(self, x, y, xTick, yTick):
lighting = glGetBoolean(GL_LIGHTING)
light0 = glGetBoolean(GL_LIGHT0)
light1 = glGetBoolean(GL_LIGHT1)
color = glGetFloatv(GL_CURRENT_COLOR)
depth = glGetBoolean(GL_DEPTH_TEST)
glEnable(GL_DEPTH_TEST)
glDisable(GL_LIGHTING)
glDisable(GL_LIGHT0)
glLineWidth(1.0)
glBegin(GL_LINES)
glColor(0.7, 0.7, 0.7, 1.0)
for xi in range(-x, x+xTick, xTick):
glVertex(xi, 0.0, -y)
glVertex(xi, 0.0, y)
for yi in range(-y, y+yTick, yTick):
glVertex(-x, 0.0, yi)
glVertex(x, 0.0, yi)
glEnd()
if lighting:
glEnable(GL_LIGHTING)
if light0:
glEnable(GL_LIGHT0)
if light1:
glEnable(GL_LIGHT1)
if not depth:
glDisable(GL_DEPTH_TEST)
glColor(color)
def overlayString(self, string, x, y, color=(1, 1, 1)):
lighting = glGetBoolean(GL_LIGHTING)
light0 = glGetBoolean(GL_LIGHT0)
light1 = glGetBoolean(GL_LIGHT1)
currentcolor = glGetFloatv(GL_CURRENT_COLOR)
depth = glGetBoolean(GL_DEPTH_TEST)
glEnable(GL_DEPTH_TEST)
glDisable(GL_LIGHTING)
glDisable(GL_LIGHT0)
glDisable(GL_LIGHT1)
glLineWidth(1.0)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glOrtho(0.0, 2.0, 2.0, 0.0, -1.0, 1.0)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
glDisable(GL_DEPTH_TEST)
glDisable(GL_CULL_FACE)
width = glutGet(GLUT_WINDOW_WIDTH)
height = glutGet(GLUT_WINDOW_HEIGHT)/2
glColor(color)
if x >= 0:
positionX = x/width*2.0
else:
positionX = (width + x)/width*2.0
if y >= 0:
positionY = (y + 10.0)/height*2.0
else:
positionY = (height + y)/height*2.0
glRasterPos3f(positionX, positionY, 0.0)
for x in string:
glutBitmapCharacter(GLUT_BITMAP_HELVETICA_12, ord(x))
glMatrixMode(GL_PROJECTION)
glMatrixMode(GL_MODELVIEW)
if lighting:
glEnable(GL_LIGHTING)
if light0:
glEnable(GL_LIGHT0)
if light1:
glEnable(GL_LIGHT1)
if depth:
glEnable(GL_DEPTH_TEST)
glColor(currentcolor)
def drawBlock(self, w, h, d):
glPushMatrix()
glScale(w/100, h/100, d/100 )
glutSolidCube(100)
glPopMatrix()
def drawSquer(self, w, h):
glPushMatrix()
glBegin(GL_QUADS)
glVertex(w, h, 0)
glVertex(-w, h, 0)
glVertex(-w, -h, 0)
glVertex(w, -h, 0)
glEnd()
glPopMatrix()
def setColor(self, color):
glColor(color[0], color[1], color[2])
glMaterial(GL_FRONT, GL_AMBIENT, color)
glMaterial(GL_FRONT, GL_DIFFUSE, color)
if __name__ == '__main__':
gl = GlutWrapper()
gl.title = b"Tracer"
gl.startFramework()
| true | true |
f731a2862378f2a67c66b8effa16a59cf7ae26f1 | 867 | py | Python | py/pe/pe8.py | kittttttan/pe | 6f87e4527793198c393700fedbdd52274fec5b44 | [
"MIT"
] | null | null | null | py/pe/pe8.py | kittttttan/pe | 6f87e4527793198c393700fedbdd52274fec5b44 | [
"MIT"
] | null | null | null | py/pe/pe8.py | kittttttan/pe | 6f87e4527793198c393700fedbdd52274fec5b44 | [
"MIT"
] | 1 | 2016-09-01T22:47:28.000Z | 2016-09-01T22:47:28.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Discover the largest product of five consecutive digits
in the 1000-digit number.
"""
def pe8(fname="../../res/pe8.txt", n=5):
"""
Discover the largest product of five consecutive digits
in the 1000-digit number.
>>> pe8()
40824
"""
with open(fname, 'r') as f:
s = f.read()
s = s.replace('\n', '')
ls = len(s)
if ls < n:
raise ValueError
m = 0
for x in range(ls - n + 1):
t = 1
for y in range(n):
t *= int(s[x + y])
if m < t:
m = t
return(m)
if __name__ == "__main__":
import doctest
doctest.testmod()
try:
while True:
s = input('> ')
n = int(s)
print(pe8(n=n))
except (SyntaxError, EOFError, KeyboardInterrupt, NameError):
pass
| 20.642857 | 65 | 0.500577 |
def pe8(fname="../../res/pe8.txt", n=5):
with open(fname, 'r') as f:
s = f.read()
s = s.replace('\n', '')
ls = len(s)
if ls < n:
raise ValueError
m = 0
for x in range(ls - n + 1):
t = 1
for y in range(n):
t *= int(s[x + y])
if m < t:
m = t
return(m)
if __name__ == "__main__":
import doctest
doctest.testmod()
try:
while True:
s = input('> ')
n = int(s)
print(pe8(n=n))
except (SyntaxError, EOFError, KeyboardInterrupt, NameError):
pass
| true | true |
f731a3593f56de78afad7e6ff95b896326f0909e | 2,885 | py | Python | ExcelPixelator.py | nibble-4bits/excel-pixel-art | 793c04c69f5035034dc42e26948b3e6390c81668 | [
"MIT"
] | 2 | 2021-01-02T02:46:30.000Z | 2021-06-30T00:37:06.000Z | ExcelPixelator.py | nibble-4bits/excel-pixel-art | 793c04c69f5035034dc42e26948b3e6390c81668 | [
"MIT"
] | null | null | null | ExcelPixelator.py | nibble-4bits/excel-pixel-art | 793c04c69f5035034dc42e26948b3e6390c81668 | [
"MIT"
] | 1 | 2021-06-30T00:37:13.000Z | 2021-06-30T00:37:13.000Z | from openpyxl import Workbook
from openpyxl.utils import get_column_letter
from openpyxl.styles import PatternFill
from PIL import Image
class ExcelPixelator:
def __init__(self, input_path, output_path, file_name, cell_size, pixel_size):
self.image = Image.open(input_path).convert('RGB')
self.output_path = output_path
self.file_name = file_name
self.cell_size = cell_size # size of the cell in pixels
self.pixel_size = pixel_size
self.__is_pixelsize_common_factor()
def create_pixel_art(self):
default_excel_font_size = 16
wb = Workbook()
ws = wb.create_sheet('Pixel-Art')
wb.remove(wb['Sheet']) # remove default worksheet
width, height = self.image.size
pixel_map = self.get_pixel_map(width, height)
for row in range(len(pixel_map)):
ws.row_dimensions[row + 1].height = self.cell_size * 10 / default_excel_font_size
for col in range(len(pixel_map[row])):
curr_col = get_column_letter(col + 1)
ws.column_dimensions[curr_col].width = self.cell_size / 9
rgbTuple = pixel_map[row][col]
fill_color = self.__rgbToHex(rgbTuple)
ws[f'{curr_col}{row + 1}'].fill = PatternFill(start_color=fill_color, end_color=fill_color, fill_type='solid')
wb.save(f'{self.output_path}/{self.file_name}.xlsx')
wb.close()
def get_pixel_map(self, w, h):
pixel_map = [[0 for x in range(w // self.pixel_size)] for y in range(h // self.pixel_size)]
squares = w * h // self.pixel_size ** 2
i, j = 0, 0
for sq in range(squares):
rAvg, gAvg, bAvg = 0, 0, 0
row_start = (sq * self.pixel_size // h) * self.pixel_size
row_end = row_start + self.pixel_size
col_start = sq * self.pixel_size % w
col_end = col_start + self.pixel_size
for row in range(row_start, row_end):
for col in range(col_start, col_end):
r, g, b = self.image.getpixel((col, row))
rAvg += r
gAvg += g
bAvg += b
rAvg //= self.pixel_size ** 2
gAvg //= self.pixel_size ** 2
bAvg //= self.pixel_size ** 2
pixel_map[i][j] = (rAvg, gAvg, bAvg)
i = i + 1 if j >= (w // self.pixel_size) - 1 else i
j = (j + 1) % (w // self.pixel_size)
return pixel_map
def __rgbToHex(self, rgbTuple):
return ('%02x%02x%02x' % rgbTuple).upper()
def __is_pixelsize_common_factor(self):
width, height = self.image.size
if width % self.pixel_size != 0 or height % self.pixel_size != 0:
print('ERROR: Pixel size must be a number divisible exactly by both the image width and height')
exit(1)
| 38.986486 | 126 | 0.586828 | from openpyxl import Workbook
from openpyxl.utils import get_column_letter
from openpyxl.styles import PatternFill
from PIL import Image
class ExcelPixelator:
def __init__(self, input_path, output_path, file_name, cell_size, pixel_size):
self.image = Image.open(input_path).convert('RGB')
self.output_path = output_path
self.file_name = file_name
self.cell_size = cell_size
self.pixel_size = pixel_size
self.__is_pixelsize_common_factor()
def create_pixel_art(self):
default_excel_font_size = 16
wb = Workbook()
ws = wb.create_sheet('Pixel-Art')
wb.remove(wb['Sheet'])
width, height = self.image.size
pixel_map = self.get_pixel_map(width, height)
for row in range(len(pixel_map)):
ws.row_dimensions[row + 1].height = self.cell_size * 10 / default_excel_font_size
for col in range(len(pixel_map[row])):
curr_col = get_column_letter(col + 1)
ws.column_dimensions[curr_col].width = self.cell_size / 9
rgbTuple = pixel_map[row][col]
fill_color = self.__rgbToHex(rgbTuple)
ws[f'{curr_col}{row + 1}'].fill = PatternFill(start_color=fill_color, end_color=fill_color, fill_type='solid')
wb.save(f'{self.output_path}/{self.file_name}.xlsx')
wb.close()
def get_pixel_map(self, w, h):
pixel_map = [[0 for x in range(w // self.pixel_size)] for y in range(h // self.pixel_size)]
squares = w * h // self.pixel_size ** 2
i, j = 0, 0
for sq in range(squares):
rAvg, gAvg, bAvg = 0, 0, 0
row_start = (sq * self.pixel_size // h) * self.pixel_size
row_end = row_start + self.pixel_size
col_start = sq * self.pixel_size % w
col_end = col_start + self.pixel_size
for row in range(row_start, row_end):
for col in range(col_start, col_end):
r, g, b = self.image.getpixel((col, row))
rAvg += r
gAvg += g
bAvg += b
rAvg //= self.pixel_size ** 2
gAvg //= self.pixel_size ** 2
bAvg //= self.pixel_size ** 2
pixel_map[i][j] = (rAvg, gAvg, bAvg)
i = i + 1 if j >= (w // self.pixel_size) - 1 else i
j = (j + 1) % (w // self.pixel_size)
return pixel_map
def __rgbToHex(self, rgbTuple):
return ('%02x%02x%02x' % rgbTuple).upper()
def __is_pixelsize_common_factor(self):
width, height = self.image.size
if width % self.pixel_size != 0 or height % self.pixel_size != 0:
print('ERROR: Pixel size must be a number divisible exactly by both the image width and height')
exit(1)
| true | true |
f731a40b7f102fd430b7e6b6d58a594d1b5f7c7b | 3,806 | py | Python | scripts/spectral_clustering_demo.py | vipavlovic/pyprobml | 59a2edc682d0163955db5e2f27491ad772b60141 | [
"MIT"
] | 4,895 | 2016-08-17T22:28:34.000Z | 2022-03-31T17:07:15.000Z | scripts/spectral_clustering_demo.py | vipavlovic/pyprobml | 59a2edc682d0163955db5e2f27491ad772b60141 | [
"MIT"
] | 446 | 2016-09-17T14:35:29.000Z | 2022-03-31T19:59:33.000Z | scripts/spectral_clustering_demo.py | vipavlovic/pyprobml | 59a2edc682d0163955db5e2f27491ad772b60141 | [
"MIT"
] | 1,160 | 2016-08-18T23:19:27.000Z | 2022-03-31T12:44:07.000Z | import superimport
import itertools
import matplotlib.pyplot as plt
import numpy as np
from scipy.linalg import eigh
from sklearn.cluster import KMeans
from sklearn.metrics.pairwise import rbf_kernel
import pyprobml_utils as pml
plt.style.use('classic')
def spectral_clustering_demo():
np.random.seed(0)
num_clusters = 2
for data_type, data in (('circle', sample_circle(num_clusters)),
('spiral', sample_spiral())):
kmeans = KMeans(n_clusters=num_clusters, random_state=0)
kmeans.fit(data)
assignments = kmeans.predict(data)
plot_data(data, assignments, 'k-means clustering', data_type)
sigma = 0.1
gamma = 1 / (2 * sigma ** 2)
W = rbf_kernel(data, gamma=gamma)
d = np.sum(W, 1, keepdims=True)
sqrt_d = np.sqrt(d)
normalized_W = (W / sqrt_d) / sqrt_d.T
paranoid_assert(W, normalized_W, False)
# We select the largest eigen values of normalized_W, rather
# than the smallest eigenvalues of I - normalized_W. The two
# problems are equivalent. The eigen values can be converted
# between the two problems via `1 - eigen_values`. The eigen
# vectors are the same between both problems.
eigen_values, eigen_vectors = eigh(normalized_W,
# Get only the top num_clusters eigenvalues
eigvals=(data.shape[0] - num_clusters, data.shape[0]-1))
eigen_vectors = eigen_vectors / np.linalg.norm(eigen_vectors, axis=1, keepdims=True)
kmeans.fit(eigen_vectors)
assignments = kmeans.predict(eigen_vectors)
plot_data(data, assignments, 'spectral clustering', data_type)
plt.show()
def paranoid_assert(W, normalized_W, enable):
if not enable:
return
D = np.diag(np.sum(W, 1))
L = D - W
D_inv_sqrt = np.diag(1 / np.diag(np.sqrt(D)))
np.testing.assert_almost_equal(np.sum(L, 1), 0, err_msg="Rows of Laplacian must sum to 0.")
np.testing.assert_allclose(normalized_W, D_inv_sqrt * W * D_inv_sqrt, rtol=0, atol=1)
def sample_circle(num_clusters):
points_per_cluster = 500
bandwidth = 0.1
data = np.zeros((num_clusters * points_per_cluster, 2))
for k, n in itertools.product(range(num_clusters), range(points_per_cluster)):
theta = 2 * np.pi * np.random.uniform()
rho = k + 1 + np.random.randn() * bandwidth
x, y = pol2cart(theta, rho)
idx = k * points_per_cluster + n
data[idx, 0] = x
data[idx, 1] = y
data = data.reshape((num_clusters * points_per_cluster, 2))
return data
def pol2cart(theta, rho):
x = rho * np.cos(theta)
y = rho * np.sin(theta)
return(x, y)
def sample_spiral():
# Only 2 clusters in this case. This is hard-coded.
points_per_cluster = 500
bandwidth = 0.1
data = np.empty((points_per_cluster, 2))
w = np.arange(1, points_per_cluster + 1).astype(np.float32) / points_per_cluster
data[:,0] = (4 * w + 1) * np.cos(2*np.pi * w) + np.random.randn(points_per_cluster) * bandwidth
data[:,1] = (4 * w + 1) * np.sin(2*np.pi * w) + np.random.randn(points_per_cluster) * bandwidth
data = np.vstack((data, -data))
return data
def plot_data(data, assignments, title, data_type):
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
ax.plot(data[assignments == 0, 0], data[assignments == 0, 1], 'o', color='r')
ax.plot(data[assignments == 1, 0], data[assignments == 1, 1], 'o', color='b')
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.axis('square')
ax.grid(True)
ax.set_title(title)
plt.tight_layout()
pml.savefig(f"{data_type}_{title.replace(' ', '_')}.pdf")
if __name__ == '__main__':
spectral_clustering_demo()
| 35.90566 | 99 | 0.633211 | import superimport
import itertools
import matplotlib.pyplot as plt
import numpy as np
from scipy.linalg import eigh
from sklearn.cluster import KMeans
from sklearn.metrics.pairwise import rbf_kernel
import pyprobml_utils as pml
plt.style.use('classic')
def spectral_clustering_demo():
np.random.seed(0)
num_clusters = 2
for data_type, data in (('circle', sample_circle(num_clusters)),
('spiral', sample_spiral())):
kmeans = KMeans(n_clusters=num_clusters, random_state=0)
kmeans.fit(data)
assignments = kmeans.predict(data)
plot_data(data, assignments, 'k-means clustering', data_type)
sigma = 0.1
gamma = 1 / (2 * sigma ** 2)
W = rbf_kernel(data, gamma=gamma)
d = np.sum(W, 1, keepdims=True)
sqrt_d = np.sqrt(d)
normalized_W = (W / sqrt_d) / sqrt_d.T
paranoid_assert(W, normalized_W, False)
eigen_values, eigen_vectors = eigh(normalized_W,
eigvals=(data.shape[0] - num_clusters, data.shape[0]-1))
eigen_vectors = eigen_vectors / np.linalg.norm(eigen_vectors, axis=1, keepdims=True)
kmeans.fit(eigen_vectors)
assignments = kmeans.predict(eigen_vectors)
plot_data(data, assignments, 'spectral clustering', data_type)
plt.show()
def paranoid_assert(W, normalized_W, enable):
if not enable:
return
D = np.diag(np.sum(W, 1))
L = D - W
D_inv_sqrt = np.diag(1 / np.diag(np.sqrt(D)))
np.testing.assert_almost_equal(np.sum(L, 1), 0, err_msg="Rows of Laplacian must sum to 0.")
np.testing.assert_allclose(normalized_W, D_inv_sqrt * W * D_inv_sqrt, rtol=0, atol=1)
def sample_circle(num_clusters):
points_per_cluster = 500
bandwidth = 0.1
data = np.zeros((num_clusters * points_per_cluster, 2))
for k, n in itertools.product(range(num_clusters), range(points_per_cluster)):
theta = 2 * np.pi * np.random.uniform()
rho = k + 1 + np.random.randn() * bandwidth
x, y = pol2cart(theta, rho)
idx = k * points_per_cluster + n
data[idx, 0] = x
data[idx, 1] = y
data = data.reshape((num_clusters * points_per_cluster, 2))
return data
def pol2cart(theta, rho):
x = rho * np.cos(theta)
y = rho * np.sin(theta)
return(x, y)
def sample_spiral():
points_per_cluster = 500
bandwidth = 0.1
data = np.empty((points_per_cluster, 2))
w = np.arange(1, points_per_cluster + 1).astype(np.float32) / points_per_cluster
data[:,0] = (4 * w + 1) * np.cos(2*np.pi * w) + np.random.randn(points_per_cluster) * bandwidth
data[:,1] = (4 * w + 1) * np.sin(2*np.pi * w) + np.random.randn(points_per_cluster) * bandwidth
data = np.vstack((data, -data))
return data
def plot_data(data, assignments, title, data_type):
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
ax.plot(data[assignments == 0, 0], data[assignments == 0, 1], 'o', color='r')
ax.plot(data[assignments == 1, 0], data[assignments == 1, 1], 'o', color='b')
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.axis('square')
ax.grid(True)
ax.set_title(title)
plt.tight_layout()
pml.savefig(f"{data_type}_{title.replace(' ', '_')}.pdf")
if __name__ == '__main__':
spectral_clustering_demo()
| true | true |
f731a4104b7ef8d6f39fe8aa8ecf2aad84c9136c | 5,770 | py | Python | run_fits.py | ruizca/pseudoXspec | a8ad2ca3744fe73e5ace78aedbabc49e32e2202a | [
"MIT"
] | null | null | null | run_fits.py | ruizca/pseudoXspec | a8ad2ca3744fe73e5ace78aedbabc49e32e2202a | [
"MIT"
] | null | null | null | run_fits.py | ruizca/pseudoXspec | a8ad2ca3744fe73e5ace78aedbabc49e32e2202a | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""
Created on Fri Jun 19 10:46:32 2015
@author: ruizca
"""
import argparse
import logging
import subprocess
from itertools import count
from pathlib import Path
from astropy.coordinates.sky_coordinate import SkyCoord
from astropy.table import Table
from astropy.units import UnitTypeError
from gdpyc import GasMap
from tqdm.contrib import tzip
logging.basicConfig(level=logging.INFO)
def get_last_source_fit(last_source_file):
try:
with last_source_file.open("r") as fp:
first_source = int(fp.readline())
except FileNotFoundError:
first_source = 0
return first_source
def update_last_source_fit(last_source, last_source_file):
with last_source_file.open("w") as fp:
fp.write(str(last_source))
def check_results_folder(results_folder):
if results_folder.exists():
raise FileExistsError(f"results_folder '{results_folder}' already exists!")
else:
results_folder.mkdir()
def _get_redshift(sample, zcol):
if not zcol:
zcol = "DEFAULT_REDSHIFT"
sample[zcol] = 0.0
return sample[zcol]
def _set_coords(ra, dec, unit):
try:
coords = SkyCoord(ra, dec)
except UnitTypeError:
coords = SkyCoord(ra, dec, unit=unit)
return coords
def _get_nhgal(sample, nhcol, racol, deccol, unit="deg"):
if not nhcol:
nhcol = "NHGAL"
coords = _set_coords(sample[racol], sample[deccol], unit)
sample[nhcol] = GasMap.nh(coords, nhmap="LAB")
return sample[nhcol]
def get_sources_data(sample_file, racol, deccol, zcol=None, nhcol=None, first_source=0):
sample = Table.read(sample_file)
sample = sample[first_source:]
obsid = sample["OBS_ID"]
detid = sample["DETID"]
z = _get_redshift(sample, zcol)
nhgal = _get_nhgal(sample, nhcol, racol, deccol)
return obsid, detid, z, nhgal
def stack_spectra(obsid, detid, spec_folder):
# Find spectra of interest for this detection
obs_path = spec_folder.joinpath(obsid)
spec_files = obs_path.glob(f"{detid}_SRSPEC_*.pha")
# Create stack file for existing spectra in the observation
stack_file = Path(f"spec_{detid}.lis")
with stack_file.open("w") as fp:
for spec in spec_files:
fp.write(spec.resolve().as_posix() + "\n")
return stack_file
def remove_stack_spectra(stack_file):
try:
stack_file.unlink()
except FileNotFoundError:
logging.warning("No stack file!")
def fit_detection(z, nh, obsid, detid, results_folder, spectra_folder, fixgamma=True):
task = "./fit_Xspec.py"
args = [
"--redshift",
f"{z:f}",
"--nh",
str(nh),
"--obsid",
obsid,
"--detid",
str(detid),
"--output_folder",
results_folder.as_posix(),
"--spectra_folder",
spectra_folder.as_posix(),
]
if fixgamma:
args += ["--fixGamma"]
logging.debug(" ".join([task] + args))
subprocess.check_output([task] + args, stderr=subprocess.STDOUT)
def main(args):
spec_folder = Path(args.spec_folder)
results_folder = Path(args.results_folder)
lastsource_file = Path(args.file_lastsource)
first_source = get_last_source_fit(lastsource_file)
if first_source == 0:
check_results_folder(results_folder)
obsids, detids, redshifts, nhgals = get_sources_data(
args.sources_table, args.racol, args.deccol, args.zcol, args.nhcol, first_source
)
for obsid, detid, z, nh, current_source in tzip(
obsids, detids, redshifts, nhgals, count(first_source)
):
try:
fit_detection(z, nh, obsid, detid, results_folder, spec_folder, args.fixgamma)
update_last_source_fit(current_source + 1, lastsource_file)
except Exception as e:
logging.error(e)
logging.error(f"Something went wrong fitting detection {detid}")
if __name__ == "__main__":
# Parser for shell parameters
parser = argparse.ArgumentParser(description="Fitting X-ray pseudospectra")
parser.add_argument(
"--catalogue",
dest="sources_table",
action="store",
default=None,
help="Full route to the detections catalogue.",
)
parser.add_argument(
"--spec_folder",
dest="spec_folder",
action="store",
default="./data/spectra/",
help="Folder of the pseudospectra.",
)
parser.add_argument(
"--results_folder",
dest="results_folder",
action="store",
default="./fit_results/",
help="Folder for saving the fit results.",
)
parser.add_argument(
"--racol",
dest="racol",
action="store",
default="XMM_RA",
help="Name of the RA column in the catalogue.",
)
parser.add_argument(
"--deccol",
dest="deccol",
action="store",
default="XMM_DEC",
help="Name of the Dec column in the catalogue.",
)
parser.add_argument(
"--zcol",
dest="zcol",
action="store",
default=None,
help="Name of the redshift column in the catalogue.",
)
parser.add_argument(
"--nhcol",
dest="nhcol",
action="store",
default=None,
help="Name of the Galactic NH column in the catalogue.",
)
parser.add_argument(
"--lsf",
dest="file_lastsource",
action="store",
default="last_source.dat",
help="File to store the last fitted source.",
)
parser.add_argument(
"--fixGamma",
dest="fixgamma",
action="store_true",
default=False,
help="Fit with a fixed photon index (1.9).",
)
main(parser.parse_args())
| 25.874439 | 90 | 0.629116 |
import argparse
import logging
import subprocess
from itertools import count
from pathlib import Path
from astropy.coordinates.sky_coordinate import SkyCoord
from astropy.table import Table
from astropy.units import UnitTypeError
from gdpyc import GasMap
from tqdm.contrib import tzip
logging.basicConfig(level=logging.INFO)
def get_last_source_fit(last_source_file):
try:
with last_source_file.open("r") as fp:
first_source = int(fp.readline())
except FileNotFoundError:
first_source = 0
return first_source
def update_last_source_fit(last_source, last_source_file):
with last_source_file.open("w") as fp:
fp.write(str(last_source))
def check_results_folder(results_folder):
if results_folder.exists():
raise FileExistsError(f"results_folder '{results_folder}' already exists!")
else:
results_folder.mkdir()
def _get_redshift(sample, zcol):
if not zcol:
zcol = "DEFAULT_REDSHIFT"
sample[zcol] = 0.0
return sample[zcol]
def _set_coords(ra, dec, unit):
try:
coords = SkyCoord(ra, dec)
except UnitTypeError:
coords = SkyCoord(ra, dec, unit=unit)
return coords
def _get_nhgal(sample, nhcol, racol, deccol, unit="deg"):
if not nhcol:
nhcol = "NHGAL"
coords = _set_coords(sample[racol], sample[deccol], unit)
sample[nhcol] = GasMap.nh(coords, nhmap="LAB")
return sample[nhcol]
def get_sources_data(sample_file, racol, deccol, zcol=None, nhcol=None, first_source=0):
sample = Table.read(sample_file)
sample = sample[first_source:]
obsid = sample["OBS_ID"]
detid = sample["DETID"]
z = _get_redshift(sample, zcol)
nhgal = _get_nhgal(sample, nhcol, racol, deccol)
return obsid, detid, z, nhgal
def stack_spectra(obsid, detid, spec_folder):
obs_path = spec_folder.joinpath(obsid)
spec_files = obs_path.glob(f"{detid}_SRSPEC_*.pha")
stack_file = Path(f"spec_{detid}.lis")
with stack_file.open("w") as fp:
for spec in spec_files:
fp.write(spec.resolve().as_posix() + "\n")
return stack_file
def remove_stack_spectra(stack_file):
try:
stack_file.unlink()
except FileNotFoundError:
logging.warning("No stack file!")
def fit_detection(z, nh, obsid, detid, results_folder, spectra_folder, fixgamma=True):
task = "./fit_Xspec.py"
args = [
"--redshift",
f"{z:f}",
"--nh",
str(nh),
"--obsid",
obsid,
"--detid",
str(detid),
"--output_folder",
results_folder.as_posix(),
"--spectra_folder",
spectra_folder.as_posix(),
]
if fixgamma:
args += ["--fixGamma"]
logging.debug(" ".join([task] + args))
subprocess.check_output([task] + args, stderr=subprocess.STDOUT)
def main(args):
spec_folder = Path(args.spec_folder)
results_folder = Path(args.results_folder)
lastsource_file = Path(args.file_lastsource)
first_source = get_last_source_fit(lastsource_file)
if first_source == 0:
check_results_folder(results_folder)
obsids, detids, redshifts, nhgals = get_sources_data(
args.sources_table, args.racol, args.deccol, args.zcol, args.nhcol, first_source
)
for obsid, detid, z, nh, current_source in tzip(
obsids, detids, redshifts, nhgals, count(first_source)
):
try:
fit_detection(z, nh, obsid, detid, results_folder, spec_folder, args.fixgamma)
update_last_source_fit(current_source + 1, lastsource_file)
except Exception as e:
logging.error(e)
logging.error(f"Something went wrong fitting detection {detid}")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Fitting X-ray pseudospectra")
parser.add_argument(
"--catalogue",
dest="sources_table",
action="store",
default=None,
help="Full route to the detections catalogue.",
)
parser.add_argument(
"--spec_folder",
dest="spec_folder",
action="store",
default="./data/spectra/",
help="Folder of the pseudospectra.",
)
parser.add_argument(
"--results_folder",
dest="results_folder",
action="store",
default="./fit_results/",
help="Folder for saving the fit results.",
)
parser.add_argument(
"--racol",
dest="racol",
action="store",
default="XMM_RA",
help="Name of the RA column in the catalogue.",
)
parser.add_argument(
"--deccol",
dest="deccol",
action="store",
default="XMM_DEC",
help="Name of the Dec column in the catalogue.",
)
parser.add_argument(
"--zcol",
dest="zcol",
action="store",
default=None,
help="Name of the redshift column in the catalogue.",
)
parser.add_argument(
"--nhcol",
dest="nhcol",
action="store",
default=None,
help="Name of the Galactic NH column in the catalogue.",
)
parser.add_argument(
"--lsf",
dest="file_lastsource",
action="store",
default="last_source.dat",
help="File to store the last fitted source.",
)
parser.add_argument(
"--fixGamma",
dest="fixgamma",
action="store_true",
default=False,
help="Fit with a fixed photon index (1.9).",
)
main(parser.parse_args())
| true | true |
f731a426c81b0dc8e44312a6025cd8a35b8138fd | 1,329 | py | Python | week9/tests/test_calc_class.py | zzsza/kyle-school | 8cf6cffd3d86a25c29f914a9d4802cdb8e6dd478 | [
"MIT"
] | 189 | 2019-11-15T11:33:50.000Z | 2022-03-27T08:23:35.000Z | week9/tests/test_calc_class.py | zzsza/kyle-school | 8cf6cffd3d86a25c29f914a9d4802cdb8e6dd478 | [
"MIT"
] | 3 | 2020-05-29T03:26:32.000Z | 2021-07-11T15:46:07.000Z | week9/tests/test_calc_class.py | zzsza/kyle-school | 8cf6cffd3d86a25c29f914a9d4802cdb8e6dd478 | [
"MIT"
] | 39 | 2019-11-16T04:02:06.000Z | 2022-03-21T04:18:14.000Z | import pytest
from calc_class import Calculator
# 상수
NUMBER_1 = 3.0
NUMBER_2 = 2.0
# Fixtures
@pytest.fixture
def calculator():
return Calculator()
def verify_answer(expected, answer, last_answer):
assert expected == answer
assert expected == last_answer
# ======Test Cases 시작======
def test_last_answer_init(calculator):
# TODO : Test Code
def test_add(calculator):
# TODO: Use NUMBER_1, NUMBER_2을 사용해 Test
def test_subtract(calculator):
# TODO: Use NUMBER_1, NUMBER_2을 사용해 Test
def test_subtract_negative(calculator):
# TODO: Use NUMBER_1, NUMBER_2을 사용해 Test
def test_multiply(calculator):
# TODO: Use NUMBER_1, NUMBER_2을 사용해 Test
def test_divide(calculator):
# TODO: Use NUMBER_1, NUMBER_2을 사용해 Test
def test_divide_by_zero(calculator):
# TODO : ZeroDivisionError가 나오는지 확인하는 Test
@pytest.mark.parametrize("a,b,expected", [
(NUMBER_1, NUMBER_2, NUMBER_1),
(NUMBER_2, NUMBER_1, NUMBER_1),
(NUMBER_1, NUMBER_1, NUMBER_1),
])
def test_maximum(calculator, a, b, expected):
# TODO : parametrize를 사용해 파라미터를 주입
@pytest.mark.parametrize("a,b,expected", [
(NUMBER_1, NUMBER_2, NUMBER_2),
(NUMBER_2, NUMBER_1, NUMBER_2),
(NUMBER_2, NUMBER_2, NUMBER_2),
])
def test_minimum(calculator, a, b, expected):
# TODO : parametrize를 사용해 파라미터를 주입
| 19.26087 | 49 | 0.705794 | import pytest
from calc_class import Calculator
NUMBER_1 = 3.0
NUMBER_2 = 2.0
@pytest.fixture
def calculator():
return Calculator()
def verify_answer(expected, answer, last_answer):
assert expected == answer
assert expected == last_answer
def test_last_answer_init(calculator):
def test_add(calculator):
def test_subtract(calculator):
def test_subtract_negative(calculator):
def test_multiply(calculator):
def test_divide(calculator):
def test_divide_by_zero(calculator):
@pytest.mark.parametrize("a,b,expected", [
(NUMBER_1, NUMBER_2, NUMBER_1),
(NUMBER_2, NUMBER_1, NUMBER_1),
(NUMBER_1, NUMBER_1, NUMBER_1),
])
def test_maximum(calculator, a, b, expected):
@pytest.mark.parametrize("a,b,expected", [
(NUMBER_1, NUMBER_2, NUMBER_2),
(NUMBER_2, NUMBER_1, NUMBER_2),
(NUMBER_2, NUMBER_2, NUMBER_2),
])
def test_minimum(calculator, a, b, expected):
| false | true |
f731a56cb0c8590006223c1be11722be665011fc | 1,998 | py | Python | nitro-python/nssrc/com/citrix/netscaler/nitro/resource/config/dns/dnssoarec_args.py | culbertm/NSttyPython | ff9f6aedae3fb8495342cd0fc4247c819cf47397 | [
"Apache-2.0"
] | null | null | null | nitro-python/nssrc/com/citrix/netscaler/nitro/resource/config/dns/dnssoarec_args.py | culbertm/NSttyPython | ff9f6aedae3fb8495342cd0fc4247c819cf47397 | [
"Apache-2.0"
] | null | null | null | nitro-python/nssrc/com/citrix/netscaler/nitro/resource/config/dns/dnssoarec_args.py | culbertm/NSttyPython | ff9f6aedae3fb8495342cd0fc4247c819cf47397 | [
"Apache-2.0"
] | null | null | null | #
# Copyright (c) 2008-2016 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class dnssoarec_args :
r""" Provides additional arguments required for fetching the dnssoarec resource.
"""
def __init__(self) :
self._type = None
self._nodeid = None
@property
def type(self) :
r"""Type of records to display. Available settings function as follows:
* ADNS - Display all authoritative address records.
* PROXY - Display all proxy address records.
* ALL - Display all address records.<br/>Possible values = ALL, ADNS, PROXY.
"""
try :
return self._type
except Exception as e:
raise e
@type.setter
def type(self, type) :
r"""Type of records to display. Available settings function as follows:
* ADNS - Display all authoritative address records.
* PROXY - Display all proxy address records.
* ALL - Display all address records.<br/>Possible values = ALL, ADNS, PROXY
"""
try :
self._type = type
except Exception as e:
raise e
@property
def nodeid(self) :
r"""Unique number that identifies the cluster node.<br/>Minimum value = 0<br/>Maximum value = 31.
"""
try :
return self._nodeid
except Exception as e:
raise e
@nodeid.setter
def nodeid(self, nodeid) :
r"""Unique number that identifies the cluster node.<br/>Minimum value = 0<br/>Maximum value = 31
"""
try :
self._nodeid = nodeid
except Exception as e:
raise e
class Type:
ALL = "ALL"
ADNS = "ADNS"
PROXY = "PROXY"
| 27.75 | 101 | 0.698699 |
class dnssoarec_args :
def __init__(self) :
self._type = None
self._nodeid = None
@property
def type(self) :
try :
return self._type
except Exception as e:
raise e
@type.setter
def type(self, type) :
try :
self._type = type
except Exception as e:
raise e
@property
def nodeid(self) :
try :
return self._nodeid
except Exception as e:
raise e
@nodeid.setter
def nodeid(self, nodeid) :
try :
self._nodeid = nodeid
except Exception as e:
raise e
class Type:
ALL = "ALL"
ADNS = "ADNS"
PROXY = "PROXY"
| true | true |
f731a5771e4fe79a045785785375a014bc97a2e2 | 717 | py | Python | bot_v_bot.py | garybake/man_alphagb | e69c954e3f42dd556f9b5b7b9edcbf2856389d2f | [
"MIT"
] | null | null | null | bot_v_bot.py | garybake/man_alphagb | e69c954e3f42dd556f9b5b7b9edcbf2856389d2f | [
"MIT"
] | null | null | null | bot_v_bot.py | garybake/man_alphagb | e69c954e3f42dd556f9b5b7b9edcbf2856389d2f | [
"MIT"
] | null | null | null | import time
# from dlgo.agent import naive
from dlgo.agent import naive
from dlgo import gotypes
from dlgo import goboard_slow as goboard
from dlgo.utils import print_board, print_move
def main():
board_size = 9
game = goboard.GameState.new_game(board_size)
bots = {
gotypes.Player.black: naive.RandomBot(),
gotypes.Player.white: naive.RandomBot()
}
while not game.is_over():
time.sleep(0.3)
# print(chr(27) + "[2J")
# print_board(game.board)
bot_move = bots[game.next_player].select_move(game)
print_move(game.next_player, bot_move)
game = game.apply_move(bot_move)
if __name__ == '__main__':
main()
# x = naive.RandomBot() | 25.607143 | 59 | 0.666667 | import time
from dlgo.agent import naive
from dlgo import gotypes
from dlgo import goboard_slow as goboard
from dlgo.utils import print_board, print_move
def main():
board_size = 9
game = goboard.GameState.new_game(board_size)
bots = {
gotypes.Player.black: naive.RandomBot(),
gotypes.Player.white: naive.RandomBot()
}
while not game.is_over():
time.sleep(0.3)
bot_move = bots[game.next_player].select_move(game)
print_move(game.next_player, bot_move)
game = game.apply_move(bot_move)
if __name__ == '__main__':
main()
| true | true |
f731a5baeb0a5f98871e45535782937905736879 | 187,564 | bzl | Python | go_third_party.bzl | enp0s3/project-infra | a53795f5df0dbb22c94ba04da47e9ba2fdfe60ed | [
"Apache-2.0"
] | 16 | 2017-07-30T10:51:51.000Z | 2021-09-02T17:58:03.000Z | go_third_party.bzl | enp0s3/project-infra | a53795f5df0dbb22c94ba04da47e9ba2fdfe60ed | [
"Apache-2.0"
] | 1,946 | 2017-07-03T13:00:30.000Z | 2022-03-31T18:55:19.000Z | go_third_party.bzl | enp0s3/project-infra | a53795f5df0dbb22c94ba04da47e9ba2fdfe60ed | [
"Apache-2.0"
] | 103 | 2017-07-03T09:53:36.000Z | 2022-03-22T20:12:00.000Z | load("@bazel_gazelle//:deps.bzl", "go_repository")
def go_deps():
go_repository(
name = "ag_pack_amqp",
build_file_proto_mode = "disable",
importpath = "pack.ag/amqp",
sum = "h1:cuNDWLUTbKRtEZwhB0WQBXf9pGbm87pUBXQhvcFxBWg=",
version = "v0.11.2",
)
go_repository(
name = "cc_mvdan_interfacer",
build_file_proto_mode = "disable",
importpath = "mvdan.cc/interfacer",
sum = "h1:WX1yoOaKQfddO/mLzdV4wptyWgoH/6hwLs7QHTixo0I=",
version = "v0.0.0-20180901003855-c20040233aed",
)
go_repository(
name = "cc_mvdan_lint",
build_file_proto_mode = "disable",
importpath = "mvdan.cc/lint",
sum = "h1:DxJ5nJdkhDlLok9K6qO+5290kphDJbHOQO1DFFFTeBo=",
version = "v0.0.0-20170908181259-adc824a0674b",
)
go_repository(
name = "cc_mvdan_unparam",
build_file_proto_mode = "disable",
importpath = "mvdan.cc/unparam",
sum = "h1:kAREL6MPwpsk1/PQPFD3Eg7WAQR5mPTWZJaBiG5LDbY=",
version = "v0.0.0-20200501210554-b37ab49443f7",
)
go_repository(
name = "cc_mvdan_xurls_v2",
build_file_proto_mode = "disable",
importpath = "mvdan.cc/xurls/v2",
sum = "h1:r1zSOSNS/kqtpmATyMMMvaZ4/djsesbYz5kr0+qMRWc=",
version = "v2.0.0",
)
go_repository(
name = "co_honnef_go_tools",
build_file_proto_mode = "disable",
importpath = "honnef.co/go/tools",
sum = "h1:UoveltGrhghAA7ePc+e+QYDHXrBps2PqFZiHkGR/xK8=",
version = "v0.0.1-2020.1.4",
)
go_repository(
name = "com_github_agnivade_levenshtein",
build_file_proto_mode = "disable",
importpath = "github.com/agnivade/levenshtein",
sum = "h1:3oJU7J3FGFmyhn8KHjmVaZCN5hxTr7GxgRue+sxIXdQ=",
version = "v1.0.1",
)
go_repository(
name = "com_github_alcortesm_tgz",
build_file_proto_mode = "disable",
importpath = "github.com/alcortesm/tgz",
sum = "h1:uSoVVbwJiQipAclBbw+8quDsfcvFjOpI5iCf4p/cqCs=",
version = "v0.0.0-20161220082320-9c5fe88206d7",
)
go_repository(
name = "com_github_alecthomas_kingpin",
build_file_proto_mode = "disable",
importpath = "github.com/alecthomas/kingpin",
sum = "h1:5svnBTFgJjZvGKyYBtMB0+m5wvrbUHiqye8wRJMlnYI=",
version = "v2.2.6+incompatible",
)
go_repository(
name = "com_github_alecthomas_template",
build_file_proto_mode = "disable",
importpath = "github.com/alecthomas/template",
sum = "h1:JYp7IbQjafoB+tBA3gMyHYHrpOtNuDiK/uB5uXxq5wM=",
version = "v0.0.0-20190718012654-fb15b899a751",
)
go_repository(
name = "com_github_alecthomas_units",
build_file_proto_mode = "disable",
importpath = "github.com/alecthomas/units",
sum = "h1:UQZhZ2O0vMHr2cI+DC1Mbh0TJxzA3RcLoMsFw+aXw7E=",
version = "v0.0.0-20190924025748-f65c72e2690d",
)
go_repository(
name = "com_github_andreyvit_diff",
build_file_proto_mode = "disable",
importpath = "github.com/andreyvit/diff",
sum = "h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ=",
version = "v0.0.0-20170406064948-c7f18ee00883",
)
go_repository(
name = "com_github_andybalholm_brotli",
build_file_proto_mode = "disable",
importpath = "github.com/andybalholm/brotli",
sum = "h1:bZ28Hqta7TFAK3Q08CMvv8y3/8ATaEqv2nGoc6yff6c=",
version = "v0.0.0-20190621154722-5f990b63d2d6",
)
go_repository(
name = "com_github_andygrunwald_go_gerrit",
build_file_proto_mode = "disable",
importpath = "github.com/andygrunwald/go-gerrit",
sum = "h1:7gokoTWteZhP1t2f0OzrFFXlyL8o0+b0r4ZaRV9PXOs=",
version = "v0.0.0-20210709065208-9d38b0be0268",
)
go_repository(
name = "com_github_andygrunwald_go_jira",
build_file_proto_mode = "disable",
importpath = "github.com/andygrunwald/go-jira",
sum = "h1:vvIImGgX32bHfoiyUwkNo+/YrPnRczNarvhLOncP6dE=",
version = "v1.13.0",
)
go_repository(
name = "com_github_anmitsu_go_shlex",
build_file_proto_mode = "disable",
importpath = "github.com/anmitsu/go-shlex",
sum = "h1:kFOfPq6dUM1hTo4JG6LR5AXSUEsOjtdm0kw0FtQtMJA=",
version = "v0.0.0-20161002113705-648efa622239",
)
go_repository(
name = "com_github_antihax_optional",
build_file_proto_mode = "disable",
importpath = "github.com/antihax/optional",
sum = "h1:uZuxRZCz65cG1o6K/xUqImNcYKtmk9ylqaH0itMSvzA=",
version = "v0.0.0-20180407024304-ca021399b1a6",
)
go_repository(
name = "com_github_apache_thrift",
build_file_proto_mode = "disable",
importpath = "github.com/apache/thrift",
sum = "h1:pODnxUFNcjP9UTLZGTdeh+j16A8lJbRvD3rOtrk/7bs=",
version = "v0.12.0",
)
go_repository(
name = "com_github_apex_log",
build_file_proto_mode = "disable",
importpath = "github.com/apex/log",
sum = "h1:1fyfbPvUwD10nMoh3hY6MXzvZShJQn9/ck7ATgAt5pA=",
version = "v1.3.0",
)
go_repository(
name = "com_github_apex_logs",
build_file_proto_mode = "disable",
importpath = "github.com/apex/logs",
sum = "h1:KmEBVwfDUOTFcBO8cfkJYwdQ5487UZSN+GteOGPmiro=",
version = "v0.0.4",
)
go_repository(
name = "com_github_aphistic_golf",
build_file_proto_mode = "disable",
importpath = "github.com/aphistic/golf",
sum = "h1:2KLQMJ8msqoPHIPDufkxVcoTtcmE5+1sL9950m4R9Pk=",
version = "v0.0.0-20180712155816-02c07f170c5a",
)
go_repository(
name = "com_github_aphistic_sweet",
build_file_proto_mode = "disable",
importpath = "github.com/aphistic/sweet",
sum = "h1:I4z+fAUqvKfvZV/CHi5dV0QuwbmIvYYFDjG0Ss5QpAs=",
version = "v0.2.0",
)
go_repository(
name = "com_github_armon_circbuf",
build_file_proto_mode = "disable",
importpath = "github.com/armon/circbuf",
sum = "h1:QEF07wC0T1rKkctt1RINW/+RMTVmiwxETico2l3gxJA=",
version = "v0.0.0-20150827004946-bbbad097214e",
)
go_repository(
name = "com_github_armon_consul_api",
build_file_proto_mode = "disable",
importpath = "github.com/armon/consul-api",
sum = "h1:G1bPvciwNyF7IUmKXNt9Ak3m6u9DE1rF+RmtIkBpVdA=",
version = "v0.0.0-20180202201655-eb2c6b5be1b6",
)
go_repository(
name = "com_github_armon_go_metrics",
build_file_proto_mode = "disable",
importpath = "github.com/armon/go-metrics",
sum = "h1:8GUt8eRujhVEGZFFEjBj46YV4rDjvGrNxb0KMWYkL2I=",
version = "v0.0.0-20180917152333-f0300d1749da",
)
go_repository(
name = "com_github_armon_go_radix",
build_file_proto_mode = "disable",
importpath = "github.com/armon/go-radix",
sum = "h1:BUAU3CGlLvorLI26FmByPp2eC2qla6E1Tw+scpcg/to=",
version = "v0.0.0-20180808171621-7fddfc383310",
)
go_repository(
name = "com_github_armon_go_socks5",
build_file_proto_mode = "disable",
importpath = "github.com/armon/go-socks5",
sum = "h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio=",
version = "v0.0.0-20160902184237-e75332964ef5",
)
go_repository(
name = "com_github_asaskevich_govalidator",
build_file_proto_mode = "disable",
importpath = "github.com/asaskevich/govalidator",
sum = "h1:zV3ejI06GQ59hwDQAvmK1qxOQGB3WuVTRoY0okPTAv0=",
version = "v0.0.0-20200108200545-475eaeb16496",
)
go_repository(
name = "com_github_aws_aws_k8s_tester",
build_file_proto_mode = "disable",
importpath = "github.com/aws/aws-k8s-tester",
sum = "h1:Zr5NWiRK5fhmRIlhrsTwrY8yB488FyN6iulci2D7VaI=",
version = "v1.0.0",
)
go_repository(
name = "com_github_aws_aws_sdk_go",
build_file_proto_mode = "disable",
importpath = "github.com/aws/aws-sdk-go",
sum = "h1:cyZp8TvUbH9rrShdrwULtCj4pB5szddrw9aKHUsw1Ic=",
version = "v1.37.22",
)
go_repository(
name = "com_github_aybabtme_rgbterm",
build_file_proto_mode = "disable",
importpath = "github.com/aybabtme/rgbterm",
sum = "h1:WWB576BN5zNSZc/M9d/10pqEx5VHNhaQ/yOVAkmj5Yo=",
version = "v0.0.0-20170906152045-cc83f3b3ce59",
)
go_repository(
name = "com_github_azure_azure_amqp_common_go_v2",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/azure-amqp-common-go/v2",
sum = "h1:+QbFgmWCnPzdaRMfsI0Yb6GrRdBj5jVL8N3EXuEUcBQ=",
version = "v2.1.0",
)
go_repository(
name = "com_github_azure_azure_pipeline_go",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/azure-pipeline-go",
sum = "h1:6oiIS9yaG6XCCzhgAgKFfIWyo4LLCiDhZot6ltoThhY=",
version = "v0.2.2",
)
go_repository(
name = "com_github_azure_azure_sdk_for_go",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/azure-sdk-for-go",
sum = "h1:m4oQOm3HXtQh2Ipata+pLSS1kGUD/7ikkvNq81XM/7s=",
version = "v46.3.0+incompatible",
)
go_repository(
name = "com_github_azure_azure_service_bus_go",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/azure-service-bus-go",
sum = "h1:G1qBLQvHCFDv9pcpgwgFkspzvnGknJRR0PYJ9ytY/JA=",
version = "v0.9.1",
)
go_repository(
name = "com_github_azure_azure_storage_blob_go",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/azure-storage-blob-go",
sum = "h1:53qhf0Oxa0nOjgbDeeYPUeyiNmafAFEY95rZLK0Tj6o=",
version = "v0.8.0",
)
go_repository(
name = "com_github_azure_go_ansiterm",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-ansiterm",
sum = "h1:w+iIsaOQNcT7OZ575w+acHgRric5iCyQh+xv+KJ4HB8=",
version = "v0.0.0-20170929234023-d6e3b3328b78",
)
go_repository(
name = "com_github_azure_go_autorest",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest",
replace = "github.com/Azure/go-autorest",
sum = "h1:V5VMDjClD3GiElqLWO7mz2MxNAK/vTfRHdAubSIPRgs=",
version = "v14.2.0+incompatible",
)
go_repository(
name = "com_github_azure_go_autorest_autorest",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest",
sum = "h1:gI8ytXbxMfI+IVbI9mP2JGCTXIuhHLgRlvQ9X4PsnHE=",
version = "v0.11.12",
)
go_repository(
name = "com_github_azure_go_autorest_autorest_adal",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest/adal",
sum = "h1:Y3bBUV4rTuxenJJs41HU3qmqsb+auo+a3Lz+PlJPpL0=",
version = "v0.9.5",
)
go_repository(
name = "com_github_azure_go_autorest_autorest_azure_auth",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest/azure/auth",
sum = "h1:iM6UAvjR97ZIeR93qTcwpKNMpV+/FTWjwEbuPD495Tk=",
version = "v0.4.2",
)
go_repository(
name = "com_github_azure_go_autorest_autorest_azure_cli",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest/azure/cli",
sum = "h1:LXl088ZQlP0SBppGFsRZonW6hSvwgL5gRByMbvUbx8U=",
version = "v0.3.1",
)
go_repository(
name = "com_github_azure_go_autorest_autorest_date",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest/date",
sum = "h1:7gUk1U5M/CQbp9WoqinNzJar+8KY+LPI6wiWrP/myHw=",
version = "v0.3.0",
)
go_repository(
name = "com_github_azure_go_autorest_autorest_mocks",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest/mocks",
sum = "h1:K0laFcLE6VLTOwNgSxaGbUcLPuGXlNkbVvq4cW4nIHk=",
version = "v0.4.1",
)
go_repository(
name = "com_github_azure_go_autorest_autorest_to",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest/to",
sum = "h1:oXVqrxakqqV1UZdSazDOPOLvOIz+XA683u8EctwboHk=",
version = "v0.4.0",
)
go_repository(
name = "com_github_azure_go_autorest_autorest_validation",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest/validation",
sum = "h1:3I9AAI63HfcLtphd9g39ruUwRI+Ca+z/f36KHPFRUss=",
version = "v0.3.0",
)
go_repository(
name = "com_github_azure_go_autorest_logger",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/logger",
sum = "h1:e4RVHVZKC5p6UANLJHkM4OfR1UKZPj8Wt8Pcx+3oqrE=",
version = "v0.2.0",
)
go_repository(
name = "com_github_azure_go_autorest_tracing",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/tracing",
sum = "h1:TYi4+3m5t6K48TGI9AUdb+IzbnSxvnvUMfuitfgcfuo=",
version = "v0.6.0",
)
go_repository(
name = "com_github_beorn7_perks",
build_file_proto_mode = "disable",
importpath = "github.com/beorn7/perks",
sum = "h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=",
version = "v1.0.1",
)
go_repository(
name = "com_github_bgentry_speakeasy",
build_file_proto_mode = "disable",
importpath = "github.com/bgentry/speakeasy",
sum = "h1:ByYyxL9InA1OWqxJqqp2A5pYHUrCiAL6K3J+LKSsQkY=",
version = "v0.1.0",
)
go_repository(
name = "com_github_bitly_go_simplejson",
build_file_proto_mode = "disable",
importpath = "github.com/bitly/go-simplejson",
sum = "h1:6IH+V8/tVMab511d5bn4M7EwGXZf9Hj6i2xSwkNEM+Y=",
version = "v0.5.0",
)
go_repository(
name = "com_github_bketelsen_crypt",
build_file_proto_mode = "disable",
importpath = "github.com/bketelsen/crypt",
sum = "h1:+0HFd5KSZ/mm3JmhmrDukiId5iR6w4+BdFtfSy4yWIc=",
version = "v0.0.3-0.20200106085610-5cbc8cc4026c",
)
go_repository(
name = "com_github_blakesmith_ar",
build_file_proto_mode = "disable",
importpath = "github.com/blakesmith/ar",
sum = "h1:m935MPodAbYS46DG4pJSv7WO+VECIWUQ7OJYSoTrMh4=",
version = "v0.0.0-20190502131153-809d4375e1fb",
)
go_repository(
name = "com_github_blang_semver",
build_file_proto_mode = "disable",
importpath = "github.com/blang/semver",
sum = "h1:cQNTCjp13qL8KC3Nbxr/y2Bqb63oX6wdnnjpJbkM4JQ=",
version = "v3.5.1+incompatible",
)
go_repository(
name = "com_github_bmizerany_assert",
build_file_proto_mode = "disable",
importpath = "github.com/bmizerany/assert",
sum = "h1:DDGfHa7BWjL4YnC6+E63dPcxHo2sUxDIu8g3QgEJdRY=",
version = "v0.0.0-20160611221934-b7ed37b82869",
)
go_repository(
name = "com_github_bmizerany_perks",
build_file_proto_mode = "disable",
importpath = "github.com/bmizerany/perks",
sum = "h1:AP/Y7sqYicnjGDfD5VcY4CIfh1hRXBUavxrvELjTiOE=",
version = "v0.0.0-20141205001514-d9a9656a3a4b",
)
go_repository(
name = "com_github_bombsimon_wsl_v2",
build_file_proto_mode = "disable",
importpath = "github.com/bombsimon/wsl/v2",
sum = "h1:/DdSteYCq4lPX+LqDg7mdoxm14UxzZPoDT0taYc3DTU=",
version = "v2.2.0",
)
go_repository(
name = "com_github_bombsimon_wsl_v3",
build_file_proto_mode = "disable",
importpath = "github.com/bombsimon/wsl/v3",
sum = "h1:E5SRssoBgtVFPcYWUOFJEcgaySgdtTNYzsSKDOY7ss8=",
version = "v3.1.0",
)
go_repository(
name = "com_github_bshuster_repo_logrus_logstash_hook",
build_file_proto_mode = "disable",
importpath = "github.com/bshuster-repo/logrus-logstash-hook",
sum = "h1:pgAtgj+A31JBVtEHu2uHuEx0n+2ukqUJnS2vVe5pQNA=",
version = "v0.4.1",
)
go_repository(
name = "com_github_bugsnag_bugsnag_go",
build_file_proto_mode = "disable",
importpath = "github.com/bugsnag/bugsnag-go",
sum = "h1:rFt+Y/IK1aEZkEHchZRSq9OQbsSzIT/OrI8YFFmRIng=",
version = "v0.0.0-20141110184014-b1d153021fcd",
)
go_repository(
name = "com_github_bugsnag_osext",
build_file_proto_mode = "disable",
importpath = "github.com/bugsnag/osext",
sum = "h1:otBG+dV+YK+Soembjv71DPz3uX/V/6MMlSyD9JBQ6kQ=",
version = "v0.0.0-20130617224835-0dd3f918b21b",
)
go_repository(
name = "com_github_bugsnag_panicwrap",
build_file_proto_mode = "disable",
importpath = "github.com/bugsnag/panicwrap",
sum = "h1:nvj0OLI3YqYXer/kZD8Ri1aaunCxIEsOst1BVJswV0o=",
version = "v0.0.0-20151223152923-e2c28503fcd0",
)
go_repository(
name = "com_github_burntsushi_toml",
build_file_proto_mode = "disable",
importpath = "github.com/BurntSushi/toml",
sum = "h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=",
version = "v0.3.1",
)
go_repository(
name = "com_github_burntsushi_xgb",
build_file_proto_mode = "disable",
importpath = "github.com/BurntSushi/xgb",
sum = "h1:1BDTz0u9nC3//pOCMdNH+CiXJVYJh5UQNCOBG7jbELc=",
version = "v0.0.0-20160522181843-27f122750802",
)
go_repository(
name = "com_github_bwmarrin_snowflake",
build_file_proto_mode = "disable",
importpath = "github.com/bwmarrin/snowflake",
sum = "h1:dRbqXFjM10uA3wdrVZ8Kh19uhciRMOroUYJ7qAqDLhY=",
version = "v0.0.0",
)
go_repository(
name = "com_github_caarlos0_ctrlc",
build_file_proto_mode = "disable",
importpath = "github.com/caarlos0/ctrlc",
sum = "h1:2DtF8GSIcajgffDFJzyG15vO+1PuBWOMUdFut7NnXhw=",
version = "v1.0.0",
)
go_repository(
name = "com_github_campoy_unique",
build_file_proto_mode = "disable",
importpath = "github.com/campoy/unique",
sum = "h1:V9a67dfYqPLAvzk5hMQOXYJlZ4SLIXgyKIE+ZiHzgGQ=",
version = "v0.0.0-20180121183637-88950e537e7e",
)
go_repository(
name = "com_github_cavaliercoder_go_cpio",
build_file_proto_mode = "disable",
importpath = "github.com/cavaliercoder/go-cpio",
sum = "h1:hHg27A0RSSp2Om9lubZpiMgVbvn39bsUmW9U5h0twqc=",
version = "v0.0.0-20180626203310-925f9528c45e",
)
go_repository(
name = "com_github_census_instrumentation_opencensus_proto",
build_file_proto_mode = "disable",
importpath = "github.com/census-instrumentation/opencensus-proto",
sum = "h1:glEXhBS5PSLLv4IXzLA5yPRVX4bilULVyxxbrfOtDAk=",
version = "v0.2.1",
)
go_repository(
name = "com_github_cespare_xxhash",
build_file_proto_mode = "disable",
importpath = "github.com/cespare/xxhash",
sum = "h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=",
version = "v1.1.0",
)
go_repository(
name = "com_github_cespare_xxhash_v2",
build_file_proto_mode = "disable",
importpath = "github.com/cespare/xxhash/v2",
sum = "h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY=",
version = "v2.1.1",
)
go_repository(
name = "com_github_chai2010_gettext_go",
build_file_proto_mode = "disable",
importpath = "github.com/chai2010/gettext-go",
sum = "h1:7aWHqerlJ41y6FOsEUvknqgXnGmJyJSbjhAWq5pO4F8=",
version = "v0.0.0-20160711120539-c6fed771bfd5",
)
go_repository(
name = "com_github_chzyer_logex",
build_file_proto_mode = "disable",
importpath = "github.com/chzyer/logex",
sum = "h1:Swpa1K6QvQznwJRcfTfQJmTE72DqScAa40E+fbHEXEE=",
version = "v1.1.10",
)
go_repository(
name = "com_github_chzyer_readline",
build_file_proto_mode = "disable",
importpath = "github.com/chzyer/readline",
sum = "h1:fY5BOSpyZCqRo5OhCuC+XN+r/bBCmeuuJtjz+bCNIf8=",
version = "v0.0.0-20180603132655-2972be24d48e",
)
go_repository(
name = "com_github_chzyer_test",
build_file_proto_mode = "disable",
importpath = "github.com/chzyer/test",
sum = "h1:q763qf9huN11kDQavWsoZXJNW3xEE4JJyHa5Q25/sd8=",
version = "v0.0.0-20180213035817-a1ea475d72b1",
)
go_repository(
name = "com_github_cihub_seelog",
build_file_proto_mode = "disable",
importpath = "github.com/cihub/seelog",
sum = "h1:kHaBemcxl8o/pQ5VM1c8PVE1PubbNx3mjUr09OqWGCs=",
version = "v0.0.0-20170130134532-f561c5e57575",
)
go_repository(
name = "com_github_clarketm_json",
build_file_proto_mode = "disable",
importpath = "github.com/clarketm/json",
sum = "h1:0JketcMdLC16WGnRGJiNmTXuQznDEQaiknxSPRBxg+k=",
version = "v1.13.4",
)
go_repository(
name = "com_github_client9_misspell",
build_file_proto_mode = "disable",
importpath = "github.com/client9/misspell",
sum = "h1:ta993UF76GwbvJcIo3Y68y/M3WxlpEHPWIGDkJYwzJI=",
version = "v0.3.4",
)
go_repository(
name = "com_github_cloudevents_sdk_go",
build_file_proto_mode = "disable",
importpath = "github.com/cloudevents/sdk-go",
sum = "h1:gS5I0s2qPmdc4GBPlUmzZU7RH30BaiOdcRJ1RkXnPrc=",
version = "v1.0.0",
)
go_repository(
name = "com_github_cloudevents_sdk_go_v2",
build_file_proto_mode = "disable",
importpath = "github.com/cloudevents/sdk-go/v2",
sum = "h1:AUdGJwaSUnA+VvepKqgjy6XDkPcf0hf/3L7icEs1ibs=",
version = "v2.0.0",
)
go_repository(
name = "com_github_cloudflare_cloudflare_go",
build_file_proto_mode = "disable",
importpath = "github.com/cloudflare/cloudflare-go",
sum = "h1:bhMGoNhAg21DuqJjU9jQepRRft6vYfo6pejT3NN4V6A=",
version = "v0.13.2",
)
go_repository(
name = "com_github_cncf_udpa_go",
build_file_proto_mode = "disable",
importpath = "github.com/cncf/udpa/go",
sum = "h1:WBZRG4aNOuI15bLRrCgN8fCq8E5Xuty6jGbmSNEvSsU=",
version = "v0.0.0-20191209042840-269d4d468f6f",
)
go_repository(
name = "com_github_cockroachdb_datadriven",
build_file_proto_mode = "disable",
importpath = "github.com/cockroachdb/datadriven",
sum = "h1:OaNxuTZr7kxeODyLWsRMC+OD03aFUH+mW6r2d+MWa5Y=",
version = "v0.0.0-20190809214429-80d97fb3cbaa",
)
go_repository(
name = "com_github_containerd_cgroups",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/cgroups",
sum = "h1:tSNMc+rJDfmYntojat8lljbt1mgKNpTxUZJsSzJ9Y1s=",
version = "v0.0.0-20190919134610-bf292b21730f",
)
go_repository(
name = "com_github_containerd_console",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/console",
sum = "h1:uict5mhHFTzKLUCufdSLym7z/J0CbBJT59lYbP9wtbg=",
version = "v0.0.0-20180822173158-c12b1e7919c1",
)
go_repository(
name = "com_github_containerd_containerd",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/containerd",
sum = "h1:LoIzb5y9x5l8VKAlyrbusNPXqBY0+kviRloxFUMFwKc=",
version = "v1.3.3",
)
go_repository(
name = "com_github_containerd_continuity",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/continuity",
sum = "h1:kIFnQBO7rQ0XkMe6xEwbybYHBEaWmh/f++laI6Emt7M=",
version = "v0.0.0-20200107194136-26c1120b8d41",
)
go_repository(
name = "com_github_containerd_fifo",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/fifo",
sum = "h1:PUD50EuOMkXVcpBIA/R95d56duJR9VxhwncsFbNnxW4=",
version = "v0.0.0-20190226154929-a9fb20d87448",
)
go_repository(
name = "com_github_containerd_go_runc",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/go-runc",
sum = "h1:esQOJREg8nw8aXj6uCN5dfW5cKUBiEJ/+nni1Q/D/sw=",
version = "v0.0.0-20180907222934-5a6d9f37cfa3",
)
go_repository(
name = "com_github_containerd_ttrpc",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/ttrpc",
sum = "h1:dlfGmNcE3jDAecLqwKPMNX6nk2qh1c1Vg1/YTzpOOF4=",
version = "v0.0.0-20190828154514-0e0f228740de",
)
go_repository(
name = "com_github_containerd_typeurl",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/typeurl",
sum = "h1:JNn81o/xG+8NEo3bC/vx9pbi/g2WI8mtP2/nXzu297Y=",
version = "v0.0.0-20180627222232-a93fcdb778cd",
)
go_repository(
name = "com_github_coreos_bbolt",
build_file_proto_mode = "disable",
importpath = "github.com/coreos/bbolt",
sum = "h1:n6AiVyVRKQFNb6mJlwESEvvLoDyiTzXX7ORAUlkeBdY=",
version = "v1.3.3",
)
go_repository(
name = "com_github_coreos_etcd",
build_file_proto_mode = "disable",
importpath = "github.com/coreos/etcd",
sum = "h1:f/Z3EoDSx1yjaIjLQGo1diYUlQYSBrrAQ5vP8NjwXwo=",
version = "v3.3.17+incompatible",
)
go_repository(
name = "com_github_coreos_go_etcd",
build_file_proto_mode = "disable",
importpath = "github.com/coreos/go-etcd",
sum = "h1:bXhRBIXoTm9BYHS3gE0TtQuyNZyeEMux2sDi4oo5YOo=",
version = "v2.0.0+incompatible",
)
go_repository(
name = "com_github_coreos_go_oidc",
build_file_proto_mode = "disable",
importpath = "github.com/coreos/go-oidc",
sum = "h1:sdJrfw8akMnCuUlaZU3tE/uYXFgfqom8DBE9so9EBsM=",
version = "v2.1.0+incompatible",
)
go_repository(
name = "com_github_coreos_go_semver",
build_file_proto_mode = "disable",
importpath = "github.com/coreos/go-semver",
sum = "h1:wkHLiw0WNATZnSG7epLsujiMCgPAc9xhjJ4tgnAxmfM=",
version = "v0.3.0",
)
go_repository(
name = "com_github_coreos_go_systemd",
build_file_proto_mode = "disable",
importpath = "github.com/coreos/go-systemd",
sum = "h1:Wf6HqHfScWJN9/ZjdUKyjop4mf3Qdd+1TvvltAvM3m8=",
version = "v0.0.0-20190321100706-95778dfbb74e",
)
go_repository(
name = "com_github_coreos_pkg",
build_file_proto_mode = "disable",
importpath = "github.com/coreos/pkg",
sum = "h1:lBNOc5arjvs8E5mO2tbpBpLoyyu8B6e44T7hJy6potg=",
version = "v0.0.0-20180928190104-399ea9e2e55f",
)
go_repository(
name = "com_github_cpu_goacmedns",
build_file_proto_mode = "disable",
importpath = "github.com/cpu/goacmedns",
sum = "h1:QOeMpIEsIdm1LSASSswjaTf8CXmzcrgy5OeCfHjppA4=",
version = "v0.0.3",
)
go_repository(
name = "com_github_cpuguy83_go_md2man",
build_file_proto_mode = "disable",
importpath = "github.com/cpuguy83/go-md2man",
sum = "h1:BSKMNlYxDvnunlTymqtgONjNnaRV1sTpcovwwjF22jk=",
version = "v1.0.10",
)
go_repository(
name = "com_github_cpuguy83_go_md2man_v2",
build_file_proto_mode = "disable",
importpath = "github.com/cpuguy83/go-md2man/v2",
sum = "h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM=",
version = "v2.0.0",
)
go_repository(
name = "com_github_creack_pty",
build_file_proto_mode = "disable",
importpath = "github.com/creack/pty",
sum = "h1:07n33Z8lZxZ2qwegKbObQohDhXDQxiMMz1NOUGYlesw=",
version = "v1.1.11",
)
go_repository(
name = "com_github_cyphar_filepath_securejoin",
build_file_proto_mode = "disable",
importpath = "github.com/cyphar/filepath-securejoin",
sum = "h1:jCwT2GTP+PY5nBz3c/YL5PAIbusElVrPujOBSCj8xRg=",
version = "v0.2.2",
)
go_repository(
name = "com_github_datadog_zstd",
build_file_proto_mode = "disable",
importpath = "github.com/DataDog/zstd",
sum = "h1:3oxKN3wbHibqx897utPC2LTQU4J+IHWWJO+glkAkpFM=",
version = "v1.4.1",
)
go_repository(
name = "com_github_davecgh_go_spew",
build_file_proto_mode = "disable",
importpath = "github.com/davecgh/go-spew",
sum = "h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=",
version = "v1.1.1",
)
go_repository(
name = "com_github_daviddengcn_go_colortext",
build_file_proto_mode = "disable",
importpath = "github.com/daviddengcn/go-colortext",
sum = "h1:uVsMphB1eRx7xB1njzL3fuMdWRN8HtVzoUOItHMwv5c=",
version = "v0.0.0-20160507010035-511bcaf42ccd",
)
go_repository(
name = "com_github_deislabs_oras",
build_file_proto_mode = "disable",
importpath = "github.com/deislabs/oras",
sum = "h1:If674KraJVpujYR00rzdi0QAmW4BxzMJPVAZJKuhQ0c=",
version = "v0.8.1",
)
go_repository(
name = "com_github_denisenkom_go_mssqldb",
build_file_proto_mode = "disable",
importpath = "github.com/denisenkom/go-mssqldb",
sum = "h1:83Wprp6ROGeiHFAP8WJdI2RoxALQYgdllERc3N5N2DM=",
version = "v0.0.0-20191124224453-732737034ffd",
)
go_repository(
name = "com_github_denverdino_aliyungo",
build_file_proto_mode = "disable",
importpath = "github.com/denverdino/aliyungo",
sum = "h1:p6poVbjHDkKa+wtC8frBMwQtT3BmqGYBjzMwJ63tuR4=",
version = "v0.0.0-20190125010748-a747050bb1ba",
)
go_repository(
name = "com_github_devigned_tab",
build_file_proto_mode = "disable",
importpath = "github.com/devigned/tab",
sum = "h1:3mD6Kb1mUOYeLpJvTVSDwSg5ZsfSxfvxGRTxRsJsITA=",
version = "v0.1.1",
)
go_repository(
name = "com_github_dgrijalva_jwt_go",
build_file_proto_mode = "disable",
importpath = "github.com/dgrijalva/jwt-go",
sum = "h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM=",
version = "v3.2.0+incompatible",
)
go_repository(
name = "com_github_dgrijalva_jwt_go_v4",
build_file_proto_mode = "disable",
importpath = "github.com/dgrijalva/jwt-go/v4",
sum = "h1:CaO/zOnF8VvUfEbhRatPcwKVWamvbYd8tQGRWacE9kU=",
version = "v4.0.0-preview1",
)
go_repository(
name = "com_github_dgryski_go_gk",
build_file_proto_mode = "disable",
importpath = "github.com/dgryski/go-gk",
sum = "h1:XOPLOMn/zT4jIgxfxSsoXPxkrzz0FaCHwp33x5POJ+Q=",
version = "v0.0.0-20200319235926-a69029f61654",
)
go_repository(
name = "com_github_dgryski_go_sip13",
build_file_proto_mode = "disable",
importpath = "github.com/dgryski/go-sip13",
sum = "h1:RMLoZVzv4GliuWafOuPuQDKSm1SJph7uCRnnS61JAn4=",
version = "v0.0.0-20181026042036-e10d5fee7954",
)
go_repository(
name = "com_github_digitalocean_godo",
build_file_proto_mode = "disable",
importpath = "github.com/digitalocean/godo",
sum = "h1:IMElzMUpO1dVR8qjSg53+5vDkOLzMbhJt4yTAq7NGCQ=",
version = "v1.44.0",
)
go_repository(
name = "com_github_dimchansky_utfbom",
build_file_proto_mode = "disable",
importpath = "github.com/dimchansky/utfbom",
sum = "h1:FcM3g+nofKgUteL8dm/UpdRXNC9KmADgTpLKsu0TRo4=",
version = "v1.1.0",
)
go_repository(
name = "com_github_djarvur_go_err113",
build_file_proto_mode = "disable",
importpath = "github.com/Djarvur/go-err113",
sum = "h1:uCRZZOdMQ0TZPHYTdYpoC0bLYJKPEHPUJ8MeAa51lNU=",
version = "v0.1.0",
)
go_repository(
name = "com_github_djherbis_atime",
build_file_proto_mode = "disable",
importpath = "github.com/djherbis/atime",
sum = "h1:ySLvBAM0EvOGaX7TI4dAM5lWj+RdJUCKtGSEHN8SGBg=",
version = "v1.0.0",
)
go_repository(
name = "com_github_dnaeon_go_vcr",
build_file_proto_mode = "disable",
importpath = "github.com/dnaeon/go-vcr",
sum = "h1:r8L/HqC0Hje5AXMu1ooW8oyQyOFv4GxqpL0nRP7SLLY=",
version = "v1.0.1",
)
go_repository(
name = "com_github_docker_cli",
build_file_proto_mode = "disable",
importpath = "github.com/docker/cli",
sum = "h1:AbI1uj9w4yt6TvfKHfRu7G55KuQe7NCvWPQRKDoXggE=",
version = "v0.0.0-20200210162036-a4bedce16568",
)
go_repository(
name = "com_github_docker_distribution",
build_file_proto_mode = "disable",
importpath = "github.com/docker/distribution",
sum = "h1:a5mlkVzth6W5A4fOsS3D2EO5BUmsJpcB+cRlLU7cSug=",
version = "v2.7.1+incompatible",
)
go_repository(
name = "com_github_docker_docker",
build_file_proto_mode = "disable",
importpath = "github.com/docker/docker",
sum = "h1:IkZjBSIc8hBjLpqeAbeE5mca5mNgeatLHBy3GO78BWo=",
version = "v1.13.1",
)
go_repository(
name = "com_github_docker_docker_credential_helpers",
build_file_proto_mode = "disable",
importpath = "github.com/docker/docker-credential-helpers",
sum = "h1:zI2p9+1NQYdnG6sMU26EX4aVGlqbInSQxQXLvzJ4RPQ=",
version = "v0.6.3",
)
go_repository(
name = "com_github_docker_go_connections",
build_file_proto_mode = "disable",
importpath = "github.com/docker/go-connections",
sum = "h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ=",
version = "v0.4.0",
)
go_repository(
name = "com_github_docker_go_metrics",
build_file_proto_mode = "disable",
importpath = "github.com/docker/go-metrics",
sum = "h1:yWHOI+vFjEsAakUTSrtqc/SAHrhSkmn48pqjidZX3QA=",
version = "v0.0.0-20180209012529-399ea8c73916",
)
go_repository(
name = "com_github_docker_go_units",
build_file_proto_mode = "disable",
importpath = "github.com/docker/go-units",
sum = "h1:3uh0PgVws3nIA0Q+MwDC8yjEPf9zjRfZZWXZYDct3Tw=",
version = "v0.4.0",
)
go_repository(
name = "com_github_docker_libtrust",
build_file_proto_mode = "disable",
importpath = "github.com/docker/libtrust",
sum = "h1:ZClxb8laGDf5arXfYcAtECDFgAgHklGI8CxgjHnXKJ4=",
version = "v0.0.0-20150114040149-fa567046d9b1",
)
go_repository(
name = "com_github_docker_spdystream",
build_file_proto_mode = "disable",
importpath = "github.com/docker/spdystream",
sum = "h1:cenwrSVm+Z7QLSV/BsnenAOcDXdX4cMv4wP0B/5QbPg=",
version = "v0.0.0-20160310174837-449fdfce4d96",
)
go_repository(
name = "com_github_docopt_docopt_go",
build_file_proto_mode = "disable",
importpath = "github.com/docopt/docopt-go",
sum = "h1:bWDMxwH3px2JBh6AyO7hdCn/PkvCZXii8TGj7sbtEbQ=",
version = "v0.0.0-20180111231733-ee0de3bc6815",
)
go_repository(
name = "com_github_dsnet_compress",
build_file_proto_mode = "disable",
importpath = "github.com/dsnet/compress",
sum = "h1:PlZu0n3Tuv04TzpfPbrnI0HW/YwodEXDS+oPKahKF0Q=",
version = "v0.0.1",
)
go_repository(
name = "com_github_dsnet_golib",
build_file_proto_mode = "disable",
importpath = "github.com/dsnet/golib",
sum = "h1:tFh1tRc4CA31yP6qDcu+Trax5wW5GuMxvkIba07qVLY=",
version = "v0.0.0-20171103203638-1ea166775780",
)
go_repository(
name = "com_github_dustin_go_humanize",
build_file_proto_mode = "disable",
importpath = "github.com/dustin/go-humanize",
sum = "h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo=",
version = "v1.0.0",
)
go_repository(
name = "com_github_eapache_go_resiliency",
build_file_proto_mode = "disable",
importpath = "github.com/eapache/go-resiliency",
sum = "h1:v7g92e/KSN71Rq7vSThKaWIq68fL4YHvWyiUKorFR1Q=",
version = "v1.2.0",
)
go_repository(
name = "com_github_eapache_go_xerial_snappy",
build_file_proto_mode = "disable",
importpath = "github.com/eapache/go-xerial-snappy",
sum = "h1:YEetp8/yCZMuEPMUDHG0CW/brkkEp8mzqk2+ODEitlw=",
version = "v0.0.0-20180814174437-776d5712da21",
)
go_repository(
name = "com_github_eapache_queue",
build_file_proto_mode = "disable",
importpath = "github.com/eapache/queue",
sum = "h1:YOEu7KNc61ntiQlcEeUIoDTJ2o8mQznoNvUhiigpIqc=",
version = "v1.1.0",
)
go_repository(
name = "com_github_elazarl_goproxy",
build_file_proto_mode = "disable",
importpath = "github.com/elazarl/goproxy",
sum = "h1:yUdfgN0XgIJw7foRItutHYUIhlcKzcSf5vDpdhQAKTc=",
version = "v0.0.0-20180725130230-947c36da3153",
)
go_repository(
name = "com_github_emicklei_go_restful",
build_file_proto_mode = "disable",
importpath = "github.com/emicklei/go-restful",
sum = "h1:spTtZBk5DYEvbxMVutUuTyh1Ao2r4iyvLdACqsl/Ljk=",
version = "v2.9.5+incompatible",
)
go_repository(
name = "com_github_emirpasic_gods",
build_file_proto_mode = "disable",
importpath = "github.com/emirpasic/gods",
sum = "h1:QAUIPSaCu4G+POclxeqb3F+WPpdKqFGlw36+yOzGlrg=",
version = "v1.12.0",
)
go_repository(
name = "com_github_envoyproxy_go_control_plane",
build_file_proto_mode = "disable",
importpath = "github.com/envoyproxy/go-control-plane",
sum = "h1:rEvIZUSZ3fx39WIi3JkQqQBitGwpELBIYWeBVh6wn+E=",
version = "v0.9.4",
)
go_repository(
name = "com_github_envoyproxy_protoc_gen_validate",
build_file_proto_mode = "disable",
importpath = "github.com/envoyproxy/protoc-gen-validate",
sum = "h1:EQciDnbrYxy13PgWoY8AqoxGiPrpgBZ1R8UNe3ddc+A=",
version = "v0.1.0",
)
go_repository(
name = "com_github_erikstmartin_go_testdb",
build_file_proto_mode = "disable",
importpath = "github.com/erikstmartin/go-testdb",
sum = "h1:Yzb9+7DPaBjB8zlTR87/ElzFsnQfuHnVUVqpZZIcV5Y=",
version = "v0.0.0-20160219214506-8d10e4a1bae5",
)
go_repository(
name = "com_github_evanphx_json_patch",
build_file_proto_mode = "disable",
importpath = "github.com/evanphx/json-patch",
sum = "h1:glyUF9yIYtMHzn8xaKw5rMhdWcwsYV8dZHIq5567/xs=",
version = "v4.11.0+incompatible",
)
go_repository(
name = "com_github_exponent_io_jsonpath",
build_file_proto_mode = "disable",
importpath = "github.com/exponent-io/jsonpath",
sum = "h1:105gxyaGwCFad8crR9dcMQWvV9Hvulu6hwUh4tWPJnM=",
version = "v0.0.0-20151013193312-d6023ce2651d",
)
go_repository(
name = "com_github_fatih_camelcase",
build_file_proto_mode = "disable",
importpath = "github.com/fatih/camelcase",
sum = "h1:hxNvNX/xYBp0ovncs8WyWZrOrpBNub/JfaMvbURyft8=",
version = "v1.0.0",
)
go_repository(
name = "com_github_fatih_color",
build_file_proto_mode = "disable",
importpath = "github.com/fatih/color",
sum = "h1:8xPHl4/q1VyqGIPif1F+1V3Y3lSmrq01EabUW3CoW5s=",
version = "v1.9.0",
)
go_repository(
name = "com_github_fatih_structs",
build_file_proto_mode = "disable",
importpath = "github.com/fatih/structs",
sum = "h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo=",
version = "v1.1.0",
)
go_repository(
name = "com_github_felixge_fgprof",
build_file_proto_mode = "disable",
importpath = "github.com/felixge/fgprof",
sum = "h1:E6FUJ2Mlv043ipLOCFqo8+cHo9MhQ203E2cdEK/isEs=",
version = "v0.9.1",
)
go_repository(
name = "com_github_flynn_go_shlex",
build_file_proto_mode = "disable",
importpath = "github.com/flynn/go-shlex",
sum = "h1:BHsljHzVlRcyQhjrss6TZTdY2VfCqZPbv5k3iBFa2ZQ=",
version = "v0.0.0-20150515145356-3f9db97f8568",
)
go_repository(
name = "com_github_form3tech_oss_jwt_go",
build_file_proto_mode = "disable",
importpath = "github.com/form3tech-oss/jwt-go",
sum = "h1:TcekIExNqud5crz4xD2pavyTgWiPvpYe4Xau31I0PRk=",
version = "v3.2.2+incompatible",
)
go_repository(
name = "com_github_fortytw2_leaktest",
build_file_proto_mode = "disable",
importpath = "github.com/fortytw2/leaktest",
sum = "h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw=",
version = "v1.3.0",
)
go_repository(
name = "com_github_frankban_quicktest",
build_file_proto_mode = "disable",
importpath = "github.com/frankban/quicktest",
sum = "h1:PvpJR0Uq8SdX+zagCMsarBMlhz6ysGTf1+pRmCsRXqY=",
version = "v1.8.1",
)
go_repository(
name = "com_github_fsnotify_fsnotify",
build_file_proto_mode = "disable",
importpath = "github.com/fsnotify/fsnotify",
sum = "h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=",
version = "v1.4.9",
)
go_repository(
name = "com_github_fsouza_fake_gcs_server",
build_file_proto_mode = "disable",
importpath = "github.com/fsouza/fake-gcs-server",
sum = "h1:3bRRh/rQnB2XbrMolHAj9oX/PFiWVQFVVfPR5y2pxb8=",
version = "v1.19.4",
)
go_repository(
name = "com_github_fvbommel_sortorder",
build_file_proto_mode = "disable",
importpath = "github.com/fvbommel/sortorder",
sum = "h1:dSnXLt4mJYH25uDDGa3biZNQsozaUWDSWeKJ0qqFfzE=",
version = "v1.0.1",
)
go_repository(
name = "com_github_garyburd_redigo",
build_file_proto_mode = "disable",
importpath = "github.com/garyburd/redigo",
sum = "h1:LofdAjjjqCSXMwLGgOgnE+rdPuvX9DxCqaHwKy7i/ko=",
version = "v0.0.0-20150301180006-535138d7bcd7",
)
go_repository(
name = "com_github_ghodss_yaml",
build_file_proto_mode = "disable",
importpath = "github.com/ghodss/yaml",
sum = "h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk=",
version = "v1.0.0",
)
go_repository(
name = "com_github_gliderlabs_ssh",
build_file_proto_mode = "disable",
importpath = "github.com/gliderlabs/ssh",
sum = "h1:6zsha5zo/TWhRhwqCD3+EarCAgZ2yN28ipRnGPnwkI0=",
version = "v0.2.2",
)
go_repository(
name = "com_github_globalsign_mgo",
build_file_proto_mode = "disable",
importpath = "github.com/globalsign/mgo",
sum = "h1:DujepqpGd1hyOd7aW59XpK7Qymp8iy83xq74fLr21is=",
version = "v0.0.0-20181015135952-eeefdecb41b8",
)
go_repository(
name = "com_github_go_bindata_go_bindata_v3",
build_file_proto_mode = "disable",
importpath = "github.com/go-bindata/go-bindata/v3",
sum = "h1:F0nVttLC3ws0ojc7p60veTurcOm//D4QBODNM7EGrCI=",
version = "v3.1.3",
)
go_repository(
name = "com_github_go_critic_go_critic",
build_file_proto_mode = "disable",
importpath = "github.com/go-critic/go-critic",
sum = "h1:sGEEdiuvLV0OC7/yC6MnK3K6LCPBplspK45B0XVdFAc=",
version = "v0.4.3",
)
go_repository(
name = "com_github_go_git_gcfg",
build_file_proto_mode = "disable",
importpath = "github.com/go-git/gcfg",
sum = "h1:Q5ViNfGF8zFgyJWPqYwA7qGFoMTEiBmdlkcfRmpIMa4=",
version = "v1.5.0",
)
go_repository(
name = "com_github_go_git_go_billy_v5",
build_file_proto_mode = "disable",
importpath = "github.com/go-git/go-billy/v5",
sum = "h1:4pl5BV4o7ZG/lterP4S6WzJ6xr49Ba5ET9ygheTYahk=",
version = "v5.1.0",
)
go_repository(
name = "com_github_go_git_go_git_fixtures_v4",
build_file_proto_mode = "disable",
importpath = "github.com/go-git/go-git-fixtures/v4",
sum = "h1:PbKy9zOy4aAKrJ5pibIRpVO2BXnK1Tlcg+caKI7Ox5M=",
version = "v4.0.2-0.20200613231340-f56387b50c12",
)
go_repository(
name = "com_github_go_git_go_git_v5",
build_file_proto_mode = "disable",
importpath = "github.com/go-git/go-git/v5",
sum = "h1:8WKMtJR2j8RntEXR/uvTKagfEt4GYlwQ7mntE4+0GWc=",
version = "v5.3.0",
)
go_repository(
name = "com_github_go_gl_glfw",
build_file_proto_mode = "disable",
importpath = "github.com/go-gl/glfw",
sum = "h1:QbL/5oDUmRBzO9/Z7Seo6zf912W/a6Sr4Eu0G/3Jho0=",
version = "v0.0.0-20190409004039-e6da0acd62b1",
)
go_repository(
name = "com_github_go_gl_glfw_v3_3_glfw",
build_file_proto_mode = "disable",
importpath = "github.com/go-gl/glfw/v3.3/glfw",
sum = "h1:WtGNWLvXpe6ZudgnXrq0barxBImvnnJoMEhXAzcbM0I=",
version = "v0.0.0-20200222043503-6f7a984d4dc4",
)
go_repository(
name = "com_github_go_ini_ini",
build_file_proto_mode = "disable",
importpath = "github.com/go-ini/ini",
sum = "h1:0wVcG9udk2C3TGgmdIGKK9ScOZHZB5nbG+gwji9fhhc=",
version = "v1.55.0",
)
go_repository(
name = "com_github_go_kit_kit",
build_file_proto_mode = "disable",
importpath = "github.com/go-kit/kit",
sum = "h1:wDJmvq38kDhkVxi50ni9ykkdUr1PKgqKOoi01fa0Mdk=",
version = "v0.9.0",
)
go_repository(
name = "com_github_go_kit_log",
build_file_proto_mode = "disable",
importpath = "github.com/go-kit/log",
sum = "h1:DGJh0Sm43HbOeYDNnVZFl8BvcYVvjD5bqYJvp0REbwQ=",
version = "v0.1.0",
)
go_repository(
name = "com_github_go_ldap_ldap",
build_file_proto_mode = "disable",
importpath = "github.com/go-ldap/ldap",
sum = "h1:kD5HQcAzlQ7yrhfn+h+MSABeAy/jAJhvIJ/QDllP44g=",
version = "v3.0.2+incompatible",
)
go_repository(
name = "com_github_go_lintpack_lintpack",
build_file_proto_mode = "disable",
importpath = "github.com/go-lintpack/lintpack",
sum = "h1:DI5mA3+eKdWeJ40nU4d6Wc26qmdG8RCi/btYq0TuRN0=",
version = "v0.5.2",
)
go_repository(
name = "com_github_go_logfmt_logfmt",
build_file_proto_mode = "disable",
importpath = "github.com/go-logfmt/logfmt",
sum = "h1:TrB8swr/68K7m9CcGut2g3UOihhbcbiMAYiuTXdEih4=",
version = "v0.5.0",
)
go_repository(
name = "com_github_go_logr_logr",
build_file_proto_mode = "disable",
importpath = "github.com/go-logr/logr",
sum = "h1:K7/B1jt6fIBQVd4Owv2MqGQClcgf0R266+7C/QjRcLc=",
version = "v0.4.0",
)
go_repository(
name = "com_github_go_logr_zapr",
build_file_proto_mode = "disable",
importpath = "github.com/go-logr/zapr",
sum = "h1:uc1uML3hRYL9/ZZPdgHS/n8Nzo+eaYL/Efxkkamf7OM=",
version = "v0.4.0",
)
go_repository(
name = "com_github_go_ole_go_ole",
build_file_proto_mode = "disable",
importpath = "github.com/go-ole/go-ole",
sum = "h1:2lOsA72HgjxAuMlKpFiCbHTvu44PIVkZ5hqm3RSdI/E=",
version = "v1.2.1",
)
go_repository(
name = "com_github_go_openapi_analysis",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/analysis",
sum = "h1:8b2ZgKfKIUTVQpTb77MoRDIMEIwvDVw40o3aOXdfYzI=",
version = "v0.19.5",
)
go_repository(
name = "com_github_go_openapi_errors",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/errors",
sum = "h1:a2kIyV3w+OS3S97zxUndRVD46+FhGOUBDFY7nmu4CsY=",
version = "v0.19.2",
)
go_repository(
name = "com_github_go_openapi_jsonpointer",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/jsonpointer",
sum = "h1:gihV7YNZK1iK6Tgwwsxo2rJbD1GTbdm72325Bq8FI3w=",
version = "v0.19.3",
)
go_repository(
name = "com_github_go_openapi_jsonreference",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/jsonreference",
sum = "h1:5cxNfTy0UVC3X8JL5ymxzyoUZmo8iZb+jeTWn7tUa8o=",
version = "v0.19.3",
)
go_repository(
name = "com_github_go_openapi_loads",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/loads",
sum = "h1:5I4CCSqoWzT+82bBkNIvmLc0UOsoKKQ4Fz+3VxOB7SY=",
version = "v0.19.4",
)
go_repository(
name = "com_github_go_openapi_runtime",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/runtime",
sum = "h1:csnOgcgAiuGoM/Po7PEpKDoNulCcF3FGbSnbHfxgjMI=",
version = "v0.19.4",
)
go_repository(
name = "com_github_go_openapi_spec",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/spec",
sum = "h1:rMMMj8cV38KVXK7SFc+I2MWClbEfbK705+j+dyqun5g=",
version = "v0.19.6",
)
go_repository(
name = "com_github_go_openapi_strfmt",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/strfmt",
sum = "h1:eRfyY5SkaNJCAwmmMcADjY31ow9+N7MCLW7oRkbsINA=",
version = "v0.19.3",
)
go_repository(
name = "com_github_go_openapi_swag",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/swag",
sum = "h1:VRuXN2EnMSsZdauzdss6JBC29YotDqG59BZ+tdlIL1s=",
version = "v0.19.7",
)
go_repository(
name = "com_github_go_openapi_validate",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/validate",
sum = "h1:QhCBKRYqZR+SKo4gl1lPhPahope8/RLt6EVgY8X80w0=",
version = "v0.19.5",
)
go_repository(
name = "com_github_go_sql_driver_mysql",
build_file_proto_mode = "disable",
importpath = "github.com/go-sql-driver/mysql",
sum = "h1:ozyZYNQW3x3HtqT1jira07DN2PArx2v7/mN66gGcHOs=",
version = "v1.5.0",
)
go_repository(
name = "com_github_go_stack_stack",
build_file_proto_mode = "disable",
importpath = "github.com/go-stack/stack",
sum = "h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk=",
version = "v1.8.0",
)
go_repository(
name = "com_github_go_task_slim_sprig",
build_file_proto_mode = "disable",
importpath = "github.com/go-task/slim-sprig",
sum = "h1:p104kn46Q8WdvHunIJ9dAyjPVtrBPhSr3KT2yUst43I=",
version = "v0.0.0-20210107165309-348f09dbbbc0",
)
go_repository(
name = "com_github_go_test_deep",
build_file_proto_mode = "disable",
importpath = "github.com/go-test/deep",
sum = "h1:/VSMRlnY/JSyqxQUzQLKVMAskpY/NZKFA5j2P+0pP2M=",
version = "v1.0.7",
)
go_repository(
name = "com_github_go_toolsmith_astcast",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/astcast",
sum = "h1:JojxlmI6STnFVG9yOImLeGREv8W2ocNUM+iOhR6jE7g=",
version = "v1.0.0",
)
go_repository(
name = "com_github_go_toolsmith_astcopy",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/astcopy",
sum = "h1:OMgl1b1MEpjFQ1m5ztEO06rz5CUd3oBv9RF7+DyvdG8=",
version = "v1.0.0",
)
go_repository(
name = "com_github_go_toolsmith_astequal",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/astequal",
sum = "h1:4zxD8j3JRFNyLN46lodQuqz3xdKSrur7U/sr0SDS/gQ=",
version = "v1.0.0",
)
go_repository(
name = "com_github_go_toolsmith_astfmt",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/astfmt",
sum = "h1:A0vDDXt+vsvLEdbMFJAUBI/uTbRw1ffOPnxsILnFL6k=",
version = "v1.0.0",
)
go_repository(
name = "com_github_go_toolsmith_astinfo",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/astinfo",
sum = "h1:wP6mXeB2V/d1P1K7bZ5vDUO3YqEzcvOREOxZPEu3gVI=",
version = "v0.0.0-20180906194353-9809ff7efb21",
)
go_repository(
name = "com_github_go_toolsmith_astp",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/astp",
sum = "h1:alXE75TXgcmupDsMK1fRAy0YUzLzqPVvBKoyWV+KPXg=",
version = "v1.0.0",
)
go_repository(
name = "com_github_go_toolsmith_pkgload",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/pkgload",
sum = "h1:4DFWWMXVfbcN5So1sBNW9+yeiMqLFGl1wFLTL5R0Tgg=",
version = "v1.0.0",
)
go_repository(
name = "com_github_go_toolsmith_strparse",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/strparse",
sum = "h1:Vcw78DnpCAKlM20kSbAyO4mPfJn/lyYA4BJUDxe2Jb4=",
version = "v1.0.0",
)
go_repository(
name = "com_github_go_toolsmith_typep",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/typep",
sum = "h1:8xdsa1+FSIH/RhEkgnD1j2CJOy5mNllW1Q9tRiYwvlk=",
version = "v1.0.2",
)
go_repository(
name = "com_github_go_xmlfmt_xmlfmt",
build_file_proto_mode = "disable",
importpath = "github.com/go-xmlfmt/xmlfmt",
sum = "h1:khEcpUM4yFcxg4/FHQWkvVRmgijNXRfzkIDHh23ggEo=",
version = "v0.0.0-20191208150333-d5b6f63a941b",
)
go_repository(
name = "com_github_go_yaml_yaml",
build_file_proto_mode = "disable",
importpath = "github.com/go-yaml/yaml",
sum = "h1:RYi2hDdss1u4YE7GwixGzWwVo47T8UQwnTLB6vQiq+o=",
version = "v2.1.0+incompatible",
)
go_repository(
name = "com_github_gobuffalo_envy",
build_file_proto_mode = "disable",
importpath = "github.com/gobuffalo/envy",
sum = "h1:OQl5ys5MBea7OGCdvPbBJWRgnhC/fGona6QKfvFeau8=",
version = "v1.7.1",
)
go_repository(
name = "com_github_gobuffalo_flect",
build_file_proto_mode = "disable",
importpath = "github.com/gobuffalo/flect",
sum = "h1:EWCvMGGxOjsgwlWaP+f4+Hh6yrrte7JeFL2S6b+0hdM=",
version = "v0.2.0",
)
go_repository(
name = "com_github_gobwas_glob",
build_file_proto_mode = "disable",
importpath = "github.com/gobwas/glob",
sum = "h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=",
version = "v0.2.3",
)
go_repository(
name = "com_github_godbus_dbus",
build_file_proto_mode = "disable",
importpath = "github.com/godbus/dbus",
sum = "h1:BWhy2j3IXJhjCbC68FptL43tDKIq8FladmaTs3Xs7Z8=",
version = "v0.0.0-20190422162347-ade71ed3457e",
)
go_repository(
name = "com_github_gofrs_flock",
build_file_proto_mode = "disable",
importpath = "github.com/gofrs/flock",
sum = "h1:DP+LD/t0njgoPBvT5MJLeliUIVQR03hiKR6vezdwHlc=",
version = "v0.7.1",
)
go_repository(
name = "com_github_gogo_googleapis",
build_file_proto_mode = "disable",
importpath = "github.com/gogo/googleapis",
sum = "h1:kFkMAZBNAn4j7K0GiZr8cRYzejq68VbheufiV3YuyFI=",
version = "v1.1.0",
)
go_repository(
name = "com_github_gogo_protobuf",
build_file_proto_mode = "disable",
importpath = "github.com/gogo/protobuf",
sum = "h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=",
version = "v1.3.2",
)
go_repository(
name = "com_github_golang_gddo",
build_file_proto_mode = "disable",
importpath = "github.com/golang/gddo",
sum = "h1:KRMr9A3qfbVM7iV/WcLY/rL5LICqwMHLhwRXKu99fXw=",
version = "v0.0.0-20190419222130-af0f2af80721",
)
go_repository(
name = "com_github_golang_glog",
build_file_proto_mode = "disable",
importpath = "github.com/golang/glog",
sum = "h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58=",
version = "v0.0.0-20160126235308-23def4e6c14b",
)
go_repository(
name = "com_github_golang_groupcache",
build_file_proto_mode = "disable",
importpath = "github.com/golang/groupcache",
sum = "h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY=",
version = "v0.0.0-20200121045136-8c9f03a8e57e",
)
go_repository(
name = "com_github_golang_lint",
build_file_proto_mode = "disable",
importpath = "github.com/golang/lint",
sum = "h1:2hRPrmiwPrp3fQX967rNJIhQPtiGXdlQWAxKbKw3VHA=",
version = "v0.0.0-20180702182130-06c8688daad7",
)
go_repository(
name = "com_github_golang_mock",
build_file_proto_mode = "disable",
importpath = "github.com/golang/mock",
sum = "h1:jlYHihg//f7RRwuPfptm04yp4s7O6Kw8EZiVYIGcH0g=",
version = "v1.5.0",
)
go_repository(
name = "com_github_golang_protobuf",
build_file_proto_mode = "disable",
importpath = "github.com/golang/protobuf",
sum = "h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw=",
version = "v1.5.2",
)
go_repository(
name = "com_github_golang_snappy",
build_file_proto_mode = "disable",
importpath = "github.com/golang/snappy",
sum = "h1:Qgr9rKW7uDUkrbSmQeiDsGa8SjGyCOGtuasMWwvp2P4=",
version = "v0.0.1",
)
go_repository(
name = "com_github_golang_sql_civil",
build_file_proto_mode = "disable",
importpath = "github.com/golang-sql/civil",
sum = "h1:lXe2qZdvpiX5WZkZR4hgp4KJVfY3nMkvmwbVkpv1rVY=",
version = "v0.0.0-20190719163853-cb61b32ac6fe",
)
go_repository(
name = "com_github_golangci_check",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/check",
sum = "h1:23T5iq8rbUYlhpt5DB4XJkc6BU31uODLD1o1gKvZmD0=",
version = "v0.0.0-20180506172741-cfe4005ccda2",
)
go_repository(
name = "com_github_golangci_dupl",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/dupl",
sum = "h1:w8hkcTqaFpzKqonE9uMCefW1WDie15eSP/4MssdenaM=",
version = "v0.0.0-20180902072040-3e9179ac440a",
)
go_repository(
name = "com_github_golangci_errcheck",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/errcheck",
sum = "h1:YYWNAGTKWhKpcLLt7aSj/odlKrSrelQwlovBpDuf19w=",
version = "v0.0.0-20181223084120-ef45e06d44b6",
)
go_repository(
name = "com_github_golangci_go_misc",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/go-misc",
sum = "h1:9kfjN3AdxcbsZBf8NjltjWihK2QfBBBZuv91cMFfDHw=",
version = "v0.0.0-20180628070357-927a3d87b613",
)
go_repository(
name = "com_github_golangci_goconst",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/goconst",
sum = "h1:pe9JHs3cHHDQgOFXJJdYkK6fLz2PWyYtP4hthoCMvs8=",
version = "v0.0.0-20180610141641-041c5f2b40f3",
)
go_repository(
name = "com_github_golangci_gocyclo",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/gocyclo",
sum = "h1:pXTK/gkVNs7Zyy7WKgLXmpQ5bHTrq5GDsp8R9Qs67g0=",
version = "v0.0.0-20180528144436-0a533e8fa43d",
)
go_repository(
name = "com_github_golangci_gofmt",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/gofmt",
sum = "h1:iR3fYXUjHCR97qWS8ch1y9zPNsgXThGwjKPrYfqMPks=",
version = "v0.0.0-20190930125516-244bba706f1a",
)
go_repository(
name = "com_github_golangci_golangci_lint",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/golangci-lint",
sum = "h1:VYLx63qb+XJsHdZ27PMS2w5JZacN0XG8ffUwe7yQomo=",
version = "v1.27.0",
)
go_repository(
name = "com_github_golangci_ineffassign",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/ineffassign",
sum = "h1:gLLhTLMk2/SutryVJ6D4VZCU3CUqr8YloG7FPIBWFpI=",
version = "v0.0.0-20190609212857-42439a7714cc",
)
go_repository(
name = "com_github_golangci_lint_1",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/lint-1",
sum = "h1:MfyDlzVjl1hoaPzPD4Gpb/QgoRfSBR0jdhwGyAWwMSA=",
version = "v0.0.0-20191013205115-297bf364a8e0",
)
go_repository(
name = "com_github_golangci_maligned",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/maligned",
sum = "h1:kNY3/svz5T29MYHubXix4aDDuE3RWHkPvopM/EDv/MA=",
version = "v0.0.0-20180506175553-b1d89398deca",
)
go_repository(
name = "com_github_golangci_misspell",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/misspell",
sum = "h1:pLzmVdl3VxTOncgzHcvLOKirdvcx/TydsClUQXTehjo=",
version = "v0.3.5",
)
go_repository(
name = "com_github_golangci_prealloc",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/prealloc",
sum = "h1:leSNB7iYzLYSSx3J/s5sVf4Drkc68W2wm4Ixh/mr0us=",
version = "v0.0.0-20180630174525-215b22d4de21",
)
go_repository(
name = "com_github_golangci_revgrep",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/revgrep",
sum = "h1:XQKc8IYQOeRwVs36tDrEmTgDgP88d5iEURwpmtiAlOM=",
version = "v0.0.0-20180812185044-276a5c0a1039",
)
go_repository(
name = "com_github_golangci_unconvert",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/unconvert",
sum = "h1:zwtduBRr5SSWhqsYNgcuWO2kFlpdOZbP0+yRjmvPGys=",
version = "v0.0.0-20180507085042-28b1c447d1f4",
)
go_repository(
name = "com_github_golangplus_bytes",
build_file_proto_mode = "disable",
importpath = "github.com/golangplus/bytes",
sum = "h1:7xqw01UYS+KCI25bMrPxwNYkSns2Db1ziQPpVq99FpE=",
version = "v0.0.0-20160111154220-45c989fe5450",
)
go_repository(
name = "com_github_golangplus_fmt",
build_file_proto_mode = "disable",
importpath = "github.com/golangplus/fmt",
sum = "h1:f5gsjBiF9tRRVomCvrkGMMWI8W1f2OBFar2c5oakAP0=",
version = "v0.0.0-20150411045040-2a5d6d7d2995",
)
go_repository(
name = "com_github_golangplus_testing",
build_file_proto_mode = "disable",
importpath = "github.com/golangplus/testing",
sum = "h1:KhcknUwkWHKZPbFy2P7jH5LKJ3La+0ZeknkkmrSgqb0=",
version = "v0.0.0-20180327235837-af21d9c3145e",
)
go_repository(
name = "com_github_gomodule_redigo",
build_file_proto_mode = "disable",
importpath = "github.com/gomodule/redigo",
sum = "h1:nRAxCa+SVsyjSBrtZmG/cqb6VbTmuRzpg/PoTFlpumc=",
version = "v1.8.5",
)
go_repository(
name = "com_github_google_btree",
build_file_proto_mode = "disable",
importpath = "github.com/google/btree",
sum = "h1:0udJVsspx3VBr5FwtLhQQtuAsVc79tTq0ocGIPAU6qo=",
version = "v1.0.0",
)
go_repository(
name = "com_github_google_go_cmp",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-cmp",
sum = "h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU=",
version = "v0.5.5",
)
go_repository(
name = "com_github_google_go_containerregistry",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-containerregistry",
sum = "h1:AG8FSAfXglim2l5qSrqp5VK2Xl03PiBf25NiTGGamws=",
version = "v0.1.1",
)
go_repository(
name = "com_github_google_go_github",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-github",
sum = "h1:N0LgJ1j65A7kfXrZnUDaYCs/Sf4rEjNlfyDHW9dolSY=",
version = "v17.0.0+incompatible",
)
go_repository(
name = "com_github_google_go_github_v27",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-github/v27",
sum = "h1:oiOZuBmGHvrGM1X9uNUAUlLgp5r1UUO/M/KnbHnLRlQ=",
version = "v27.0.6",
)
go_repository(
name = "com_github_google_go_github_v28",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-github/v28",
sum = "h1:kORf5ekX5qwXO2mGzXXOjMe/g6ap8ahVe0sBEulhSxo=",
version = "v28.1.1",
)
go_repository(
name = "com_github_google_go_github_v29",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-github/v29",
sum = "h1:IktKCTwU//aFHnpA+2SLIi7Oo9uhAzgsdZNbcAqhgdc=",
version = "v29.0.3",
)
go_repository(
name = "com_github_google_go_github_v32",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-github/v32",
sum = "h1:q74KVb22spUq0U5HqZ9VCYqQz8YRuOtL/39ZnfwO+NM=",
version = "v32.0.0",
)
go_repository(
name = "com_github_google_go_licenses",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-licenses",
sum = "h1:eVR9gT5gBPTHXeyGAcA8OF/SKNUFFg+a0BJqfx4z5eE=",
version = "v0.0.0-20200227160636-0fa8c766a591",
)
go_repository(
name = "com_github_google_go_querystring",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-querystring",
sum = "h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8=",
version = "v1.1.0",
)
go_repository(
name = "com_github_google_go_replayers_grpcreplay",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-replayers/grpcreplay",
sum = "h1:eNb1y9rZFmY4ax45uEEECSa8fsxGRU+8Bil52ASAwic=",
version = "v0.1.0",
)
go_repository(
name = "com_github_google_go_replayers_httpreplay",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-replayers/httpreplay",
sum = "h1:AX7FUb4BjrrzNvblr/OlgwrmFiep6soj5K2QSDW7BGk=",
version = "v0.1.0",
)
go_repository(
name = "com_github_google_gofuzz",
build_file_proto_mode = "disable_global",
importpath = "github.com/google/gofuzz",
sum = "h1:VcIYpAGBae3Z6BVncE0OnTE/ZjlDXqtYhOZky88neLM=",
version = "v1.2.1-0.20210504230335-f78f29fc09ea",
)
go_repository(
name = "com_github_google_licenseclassifier",
build_file_proto_mode = "disable",
importpath = "github.com/google/licenseclassifier",
sum = "h1:OggOMmdI0JLwg1FkOKH9S7fVHF0oEm8PX6S8kAdpOps=",
version = "v0.0.0-20200402202327-879cb1424de0",
)
go_repository(
name = "com_github_google_mako",
build_file_proto_mode = "disable",
importpath = "github.com/google/mako",
sum = "h1:/o5e44nTD/QEEiWPGSFT3bSqcq3Qg7q27N9bv4gKh5M=",
version = "v0.0.0-20190821191249-122f8dcef9e3",
)
go_repository(
name = "com_github_google_martian",
build_file_proto_mode = "disable",
importpath = "github.com/google/martian",
sum = "h1:xmapqc1AyLoB+ddYT6r04bD9lIjlOqGaREovi0SzFaE=",
version = "v2.1.1-0.20190517191504-25dcb96d9e51+incompatible",
)
go_repository(
name = "com_github_google_martian_v3",
build_file_proto_mode = "disable",
importpath = "github.com/google/martian/v3",
sum = "h1:pMen7vLs8nvgEYhywH3KDWJIJTeEr2ULsVWHWYHQyBs=",
version = "v3.0.0",
)
go_repository(
name = "com_github_google_pprof",
build_file_proto_mode = "disable",
importpath = "github.com/google/pprof",
sum = "h1:k+KkMRk8mGOu1xG38StS7dQ+Z6oW1i9n3dgrAVU9Q/E=",
version = "v0.0.0-20200905233945-acf8798be1f7",
)
go_repository(
name = "com_github_google_renameio",
build_file_proto_mode = "disable",
importpath = "github.com/google/renameio",
sum = "h1:GOZbcHa3HfsPKPlmyPyN2KEohoMXOhdMbHrvbpl2QaA=",
version = "v0.1.0",
)
go_repository(
name = "com_github_google_rpmpack",
build_file_proto_mode = "disable",
importpath = "github.com/google/rpmpack",
sum = "h1:BW6OvS3kpT5UEPbCZ+KyX/OB4Ks9/MNMhWjqPPkZxsE=",
version = "v0.0.0-20191226140753-aa36bfddb3a0",
)
go_repository(
name = "com_github_google_subcommands",
build_file_proto_mode = "disable",
importpath = "github.com/google/subcommands",
sum = "h1:/eqq+otEXm5vhfBrbREPCSVQbvofip6kIz+mX5TUH7k=",
version = "v1.0.1",
)
go_repository(
name = "com_github_google_uuid",
build_file_proto_mode = "disable",
importpath = "github.com/google/uuid",
sum = "h1:qJYtXnJRWmpe7m/3XlyhrsLrEURqHRM2kxzoxXqyUDs=",
version = "v1.2.0",
)
go_repository(
name = "com_github_google_wire",
build_file_proto_mode = "disable",
importpath = "github.com/google/wire",
sum = "h1:kXcsA/rIGzJImVqPdhfnr6q0xsS9gU0515q1EPpJ9fE=",
version = "v0.4.0",
)
go_repository(
name = "com_github_googleapis_gax_go",
build_file_proto_mode = "disable",
importpath = "github.com/googleapis/gax-go",
sum = "h1:silFMLAnr330+NRuag/VjIGF7TLp/LBrV2CJKFLWEww=",
version = "v2.0.2+incompatible",
)
go_repository(
name = "com_github_googleapis_gax_go_v2",
build_file_proto_mode = "disable",
importpath = "github.com/googleapis/gax-go/v2",
sum = "h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM=",
version = "v2.0.5",
)
go_repository(
name = "com_github_googleapis_gnostic",
build_file_generation = "on",
build_file_proto_mode = "disable",
importpath = "github.com/googleapis/gnostic",
replace = "github.com/googleapis/gnostic",
sum = "h1:DLJCy1n/vrD4HPjOvYcT8aYQXpPIzoRZONaYwyycI+I=",
version = "v0.4.1",
)
go_repository(
name = "com_github_googlecloudplatform_cloud_builders_gcs_fetcher",
build_file_proto_mode = "disable",
importpath = "github.com/GoogleCloudPlatform/cloud-builders/gcs-fetcher",
sum = "h1:Pjo3SOZigEnIGevhFqcbFndnqyCH8WimcREd3hRM9vU=",
version = "v0.0.0-20191203181535-308b93ad1f39",
)
go_repository(
name = "com_github_googlecloudplatform_cloudsql_proxy",
build_file_proto_mode = "disable",
importpath = "github.com/GoogleCloudPlatform/cloudsql-proxy",
sum = "h1:sTOp2Ajiew5XIH92YSdwhYc+bgpUX5j5TKK/Ac8Saw8=",
version = "v0.0.0-20191009163259-e802c2cb94ae",
)
go_repository(
name = "com_github_googlecloudplatform_k8s_cloud_provider",
build_file_proto_mode = "disable",
importpath = "github.com/GoogleCloudPlatform/k8s-cloud-provider",
sum = "h1:N7lSsF+R7wSulUADi36SInSQA3RvfO/XclHQfedr0qk=",
version = "v0.0.0-20190822182118-27a4ced34534",
)
go_repository(
name = "com_github_googlecloudplatform_testgrid",
build_file_proto_mode = "disable",
importpath = "github.com/GoogleCloudPlatform/testgrid",
sum = "h1:qs3/BQpz3j3qsgnfjV8aVBfPopkGxp/TnWjjiboUVf8=",
version = "v0.0.68",
)
go_repository(
name = "com_github_gookit_color",
build_file_proto_mode = "disable",
importpath = "github.com/gookit/color",
sum = "h1:xOYBan3Fwlrqj1M1UN2TlHOCRiek3bGzWf/vPnJ1roE=",
version = "v1.2.4",
)
go_repository(
name = "com_github_gophercloud_gophercloud",
build_file_proto_mode = "disable",
importpath = "github.com/gophercloud/gophercloud",
sum = "h1:P/nh25+rzXouhytV2pUHBb65fnds26Ghl8/391+sT5o=",
version = "v0.1.0",
)
go_repository(
name = "com_github_gopherjs_gopherjs",
build_file_proto_mode = "disable",
importpath = "github.com/gopherjs/gopherjs",
sum = "h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=",
version = "v0.0.0-20181017120253-0766667cb4d1",
)
go_repository(
name = "com_github_goreleaser_goreleaser",
build_file_proto_mode = "disable",
importpath = "github.com/goreleaser/goreleaser",
sum = "h1:Z+7XPrfGK11s/Sp+a06sx2FzGuCjTBdxN2ubpGvQbjY=",
version = "v0.136.0",
)
go_repository(
name = "com_github_goreleaser_nfpm",
build_file_proto_mode = "disable",
importpath = "github.com/goreleaser/nfpm",
sum = "h1:BPwIomC+e+yuDX9poJowzV7JFVcYA0+LwGSkbAPs2Hw=",
version = "v1.3.0",
)
go_repository(
name = "com_github_gorilla_context",
build_file_proto_mode = "disable",
importpath = "github.com/gorilla/context",
sum = "h1:AWwleXJkX/nhcU9bZSnZoi3h/qGYqQAGhq6zZe/aQW8=",
version = "v1.1.1",
)
go_repository(
name = "com_github_gorilla_csrf",
build_file_proto_mode = "disable",
importpath = "github.com/gorilla/csrf",
sum = "h1:QqQ/OWwuFp4jMKgBFAzJVW3FMULdyUW7JoM4pEWuqKg=",
version = "v1.6.2",
)
go_repository(
name = "com_github_gorilla_handlers",
build_file_proto_mode = "disable",
importpath = "github.com/gorilla/handlers",
sum = "h1:0QniY0USkHQ1RGCLfKxeNHK9bkDHGRYGNDFBCS+YARg=",
version = "v1.4.2",
)
go_repository(
name = "com_github_gorilla_mux",
build_file_proto_mode = "disable",
importpath = "github.com/gorilla/mux",
sum = "h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI=",
version = "v1.8.0",
)
go_repository(
name = "com_github_gorilla_securecookie",
build_file_proto_mode = "disable",
importpath = "github.com/gorilla/securecookie",
sum = "h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyCS8BvQ=",
version = "v1.1.1",
)
go_repository(
name = "com_github_gorilla_sessions",
build_file_proto_mode = "disable",
importpath = "github.com/gorilla/sessions",
sum = "h1:S7P+1Hm5V/AT9cjEcUD5uDaQSX0OE577aCXgoaKpYbQ=",
version = "v1.2.0",
)
go_repository(
name = "com_github_gorilla_websocket",
build_file_proto_mode = "disable",
importpath = "github.com/gorilla/websocket",
sum = "h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc=",
version = "v1.4.2",
)
go_repository(
name = "com_github_gostaticanalysis_analysisutil",
build_file_proto_mode = "disable",
importpath = "github.com/gostaticanalysis/analysisutil",
sum = "h1:iwp+5/UAyzQSFgQ4uR2sni99sJ8Eo9DEacKWM5pekIg=",
version = "v0.0.3",
)
go_repository(
name = "com_github_gosuri_uitable",
build_file_proto_mode = "disable",
importpath = "github.com/gosuri/uitable",
sum = "h1:IG2xLKRvErL3uhY6e1BylFzG+aJiwQviDDTfOKeKTpY=",
version = "v0.0.4",
)
go_repository(
name = "com_github_gotestyourself_gotestyourself",
build_file_proto_mode = "disable",
importpath = "github.com/gotestyourself/gotestyourself",
sum = "h1:AQwinXlbQR2HvPjQZOmDhRqsv5mZf+Jb1RnSLxcqZcI=",
version = "v2.2.0+incompatible",
)
go_repository(
name = "com_github_gregjones_httpcache",
build_file_proto_mode = "disable",
importpath = "github.com/gregjones/httpcache",
sum = "h1:f8eY6cV/x1x+HLjOp4r72s/31/V2aTUtg5oKRRPf8/Q=",
version = "v0.0.0-20190212212710-3befbb6ad0cc",
)
go_repository(
name = "com_github_grpc_ecosystem_go_grpc_middleware",
build_file_proto_mode = "disable",
importpath = "github.com/grpc-ecosystem/go-grpc-middleware",
sum = "h1:z53tR0945TRRQO/fLEVPI6SMv7ZflF0TEaTAoU7tOzg=",
version = "v1.0.1-0.20190118093823-f849b5445de4",
)
go_repository(
name = "com_github_grpc_ecosystem_go_grpc_prometheus",
build_file_proto_mode = "disable",
importpath = "github.com/grpc-ecosystem/go-grpc-prometheus",
sum = "h1:Ovs26xHkKqVztRpIrF/92BcuyuQ/YW4NSIpoGtfXNho=",
version = "v1.2.0",
)
go_repository(
name = "com_github_grpc_ecosystem_grpc_gateway",
build_file_proto_mode = "disable",
importpath = "github.com/grpc-ecosystem/grpc-gateway",
sum = "h1:D0EVSTwQoQOyfY35QNSuPJA4jpZRtkoGYWQMB7XNg5o=",
version = "v1.12.2",
)
go_repository(
name = "com_github_h2non_gock",
build_file_proto_mode = "disable",
importpath = "github.com/h2non/gock",
sum = "h1:17gCehSo8ZOgEsFKpQgqHiR7VLyjxdAG3lkhVvO9QZU=",
version = "v1.0.9",
)
go_repository(
name = "com_github_hashicorp_consul_api",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/consul/api",
sum = "h1:BNQPM9ytxj6jbjjdRPioQ94T6YXriSopn0i8COv6SRA=",
version = "v1.1.0",
)
go_repository(
name = "com_github_hashicorp_consul_sdk",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/consul/sdk",
sum = "h1:LnuDWGNsoajlhGyHJvuWW6FVqRl8JOTPqS6CPTsYjhY=",
version = "v0.1.1",
)
go_repository(
name = "com_github_hashicorp_errwrap",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/errwrap",
sum = "h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA=",
version = "v1.0.0",
)
go_repository(
name = "com_github_hashicorp_go_cleanhttp",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-cleanhttp",
sum = "h1:dH3aiDG9Jvb5r5+bYHsikaOUIpcM0xvgMXVoDkXMzJM=",
version = "v0.5.1",
)
go_repository(
name = "com_github_hashicorp_go_hclog",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-hclog",
sum = "h1:CG6TE5H9/JXsFWJCfoIVpKFIkFe6ysEuHirp4DxCsHI=",
version = "v0.9.2",
)
go_repository(
name = "com_github_hashicorp_go_immutable_radix",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-immutable-radix",
sum = "h1:AKDB1HM5PWEA7i4nhcpwOrO2byshxBjXVn/J/3+z5/0=",
version = "v1.0.0",
)
go_repository(
name = "com_github_hashicorp_go_msgpack",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-msgpack",
sum = "h1:zKjpN5BK/P5lMYrLmBHdBULWbJ0XpYR+7NGzqkZzoD4=",
version = "v0.5.3",
)
go_repository(
name = "com_github_hashicorp_go_multierror",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-multierror",
sum = "h1:B9UzwGQJehnUY1yNrnwREHc3fGbC2xefo8g4TbElacI=",
version = "v1.1.0",
)
go_repository(
name = "com_github_hashicorp_go_net",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go.net",
sum = "h1:sNCoNyDEvN1xa+X0baata4RdcpKwcMS6DH+xwfqPgjw=",
version = "v0.0.1",
)
go_repository(
name = "com_github_hashicorp_go_plugin",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-plugin",
sum = "h1:4OtAfUGbnKC6yS48p0CtMX2oFYtzFZVv6rok3cRWgnE=",
version = "v1.0.1",
)
go_repository(
name = "com_github_hashicorp_go_retryablehttp",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-retryablehttp",
sum = "h1:HJunrbHTDDbBb/ay4kxa1n+dLmttUlnP3V9oNE4hmsM=",
version = "v0.6.6",
)
go_repository(
name = "com_github_hashicorp_go_rootcerts",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-rootcerts",
sum = "h1:DMo4fmknnz0E0evoNYnV48RjWndOsmd6OW+09R3cEP8=",
version = "v1.0.1",
)
go_repository(
name = "com_github_hashicorp_go_sockaddr",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-sockaddr",
sum = "h1:ztczhD1jLxIRjVejw8gFomI1BQZOe2WoVOu0SyteCQc=",
version = "v1.0.2",
)
go_repository(
name = "com_github_hashicorp_go_syslog",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-syslog",
sum = "h1:KaodqZuhUoZereWVIYmpUgZysurB1kBLX2j0MwMrUAE=",
version = "v1.0.0",
)
go_repository(
name = "com_github_hashicorp_go_uuid",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-uuid",
sum = "h1:fv1ep09latC32wFoVwnqcnKJGnMSdBanPczbHAYm1BE=",
version = "v1.0.1",
)
go_repository(
name = "com_github_hashicorp_go_version",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-version",
sum = "h1:3vNe/fWF5CBgRIguda1meWhsZHy3m8gCJ5wx+dIzX/E=",
version = "v1.2.0",
)
go_repository(
name = "com_github_hashicorp_golang_lru",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/golang-lru",
sum = "h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc=",
version = "v0.5.4",
)
go_repository(
name = "com_github_hashicorp_hcl",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/hcl",
sum = "h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=",
version = "v1.0.0",
)
go_repository(
name = "com_github_hashicorp_logutils",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/logutils",
sum = "h1:dLEQVugN8vlakKOUE3ihGLTZJRB4j+M2cdTm/ORI65Y=",
version = "v1.0.0",
)
go_repository(
name = "com_github_hashicorp_mdns",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/mdns",
sum = "h1:WhIgCr5a7AaVH6jPUwjtRuuE7/RDufnUvzIr48smyxs=",
version = "v1.0.0",
)
go_repository(
name = "com_github_hashicorp_memberlist",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/memberlist",
sum = "h1:EmmoJme1matNzb+hMpDuR/0sbJSUisxyqBGG676r31M=",
version = "v0.1.3",
)
go_repository(
name = "com_github_hashicorp_serf",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/serf",
sum = "h1:YZ7UKsJv+hKjqGVUUbtE3HNj79Eln2oQ75tniF6iPt0=",
version = "v0.8.2",
)
go_repository(
name = "com_github_hashicorp_vault_api",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/vault/api",
sum = "h1:j08Or/wryXT4AcHj1oCbMd7IijXcKzYUGw59LGu9onU=",
version = "v1.0.4",
)
go_repository(
name = "com_github_hashicorp_vault_sdk",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/vault/sdk",
sum = "h1:mOEPeOhT7jl0J4AMl1E705+BcmeRs1VmKNb9F0sMLy8=",
version = "v0.1.13",
)
go_repository(
name = "com_github_hashicorp_yamux",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/yamux",
sum = "h1:kJCB4vdITiW1eC1vq2e6IsrXKrZit1bv/TDYFGMp4BQ=",
version = "v0.0.0-20181012175058-2f1d1f20f75d",
)
go_repository(
name = "com_github_howeyc_gopass",
build_file_proto_mode = "disable",
importpath = "github.com/howeyc/gopass",
sum = "h1:kQWxfPIHVLbgLzphqk3QUflDy9QdksZR4ygR807bpy0=",
version = "v0.0.0-20170109162249-bf9dde6d0d2c",
)
go_repository(
name = "com_github_hpcloud_tail",
build_file_proto_mode = "disable",
importpath = "github.com/hpcloud/tail",
sum = "h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI=",
version = "v1.0.0",
)
go_repository(
name = "com_github_huandu_xstrings",
build_file_proto_mode = "disable",
importpath = "github.com/huandu/xstrings",
sum = "h1:yPeWdRnmynF7p+lLYz0H2tthW9lqhMJrQV/U7yy4wX0=",
version = "v1.2.0",
)
go_repository(
name = "com_github_ianlancetaylor_demangle",
build_file_proto_mode = "disable",
importpath = "github.com/ianlancetaylor/demangle",
sum = "h1:UDMh68UUwekSh5iP2OMhRRZJiiBccgV7axzUG8vi56c=",
version = "v0.0.0-20181102032728-5e5cf60278f6",
)
go_repository(
name = "com_github_imdario_mergo",
build_file_proto_mode = "disable",
importpath = "github.com/imdario/mergo",
sum = "h1:b6R2BslTbIEToALKP7LxUvijTsNI9TAe80pLWN2g/HU=",
version = "v0.3.12",
)
go_repository(
name = "com_github_inconshreveable_mousetrap",
build_file_proto_mode = "disable",
importpath = "github.com/inconshreveable/mousetrap",
sum = "h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=",
version = "v1.0.0",
)
go_repository(
name = "com_github_influxdata_influxdb",
build_file_proto_mode = "disable",
importpath = "github.com/influxdata/influxdb",
sum = "h1:AciJ2ei/llFRundm7CtqwF6B2aOds1A7QG3sMW8QiaQ=",
version = "v0.0.0-20161215172503-049f9b42e9a5",
)
go_repository(
name = "com_github_influxdata_tdigest",
build_file_proto_mode = "disable",
importpath = "github.com/influxdata/tdigest",
sum = "h1:MHTrDWmQpHq/hkq+7cw9oYAt2PqUw52TZazRA0N7PGE=",
version = "v0.0.0-20181121200506-bf2b5ad3c0a9",
)
go_repository(
name = "com_github_jarcoal_httpmock",
build_file_proto_mode = "disable",
importpath = "github.com/jarcoal/httpmock",
sum = "h1:cHtVEcTxRSX4J0je7mWPfc9BpDpqzXSJ5HbymZmyHck=",
version = "v1.0.5",
)
go_repository(
name = "com_github_jbenet_go_context",
build_file_proto_mode = "disable",
importpath = "github.com/jbenet/go-context",
sum = "h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A=",
version = "v0.0.0-20150711004518-d14ea06fba99",
)
go_repository(
name = "com_github_jcmturner_gofork",
build_file_proto_mode = "disable",
importpath = "github.com/jcmturner/gofork",
sum = "h1:J7uCkflzTEhUZ64xqKnkDxq3kzc96ajM1Gli5ktUem8=",
version = "v1.0.0",
)
go_repository(
name = "com_github_jenkins_x_go_scm",
build_file_proto_mode = "disable",
importpath = "github.com/jenkins-x/go-scm",
sum = "h1:D7d1sDWUU+xocCNLQVoYKpMjVKnQvsPva+hPzruchbM=",
version = "v1.5.117",
)
go_repository(
name = "com_github_jessevdk_go_flags",
build_file_proto_mode = "disable",
importpath = "github.com/jessevdk/go-flags",
sum = "h1:1jKYvbxEjfUl0fmqTCOfonvskHHXMjBySTLW4y9LFvc=",
version = "v1.5.0",
)
go_repository(
name = "com_github_jetstack_cert_manager",
build_file_proto_mode = "disable",
importpath = "github.com/jetstack/cert-manager",
sum = "h1:gEhBV9I83m+kpQShDhNO4+J8O2qfNDjvAEL27pThGmg=",
version = "v1.1.0",
)
go_repository(
name = "com_github_jingyugao_rowserrcheck",
build_file_proto_mode = "disable",
importpath = "github.com/jingyugao/rowserrcheck",
sum = "h1:GmsqmapfzSJkm28dhRoHz2tLRbJmqhU86IPgBtN3mmk=",
version = "v0.0.0-20191204022205-72ab7603b68a",
)
go_repository(
name = "com_github_jinzhu_gorm",
build_file_proto_mode = "disable",
importpath = "github.com/jinzhu/gorm",
sum = "h1:Drgk1clyWT9t9ERbzHza6Mj/8FY/CqMyVzOiHviMo6Q=",
version = "v1.9.12",
)
go_repository(
name = "com_github_jinzhu_inflection",
build_file_proto_mode = "disable",
importpath = "github.com/jinzhu/inflection",
sum = "h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=",
version = "v1.0.0",
)
go_repository(
name = "com_github_jinzhu_now",
build_file_proto_mode = "disable",
importpath = "github.com/jinzhu/now",
sum = "h1:g39TucaRWyV3dwDO++eEc6qf8TVIQ/Da48WmqjZ3i7E=",
version = "v1.1.1",
)
go_repository(
name = "com_github_jirfag_go_printf_func_name",
build_file_proto_mode = "disable",
importpath = "github.com/jirfag/go-printf-func-name",
sum = "h1:KA9BjwUk7KlCh6S9EAGWBt1oExIUv9WyNCiRz5amv48=",
version = "v0.0.0-20200119135958-7558a9eaa5af",
)
go_repository(
name = "com_github_jmespath_go_jmespath",
build_file_proto_mode = "disable",
importpath = "github.com/jmespath/go-jmespath",
sum = "h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=",
version = "v0.4.0",
)
go_repository(
name = "com_github_jmespath_go_jmespath_internal_testify",
build_file_proto_mode = "disable",
importpath = "github.com/jmespath/go-jmespath/internal/testify",
sum = "h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8=",
version = "v1.5.1",
)
go_repository(
name = "com_github_jmoiron_sqlx",
build_file_proto_mode = "disable",
importpath = "github.com/jmoiron/sqlx",
sum = "h1:lrdPtrORjGv1HbbEvKWDUAy97mPpFm4B8hp77tcCUJY=",
version = "v1.2.1-0.20190826204134-d7d95172beb5",
)
go_repository(
name = "com_github_joefitzgerald_rainbow_reporter",
build_file_proto_mode = "disable",
importpath = "github.com/joefitzgerald/rainbow-reporter",
sum = "h1:AuMG652zjdzI0YCCnXAqATtRBpGXMcAnrajcaTrSeuo=",
version = "v0.1.0",
)
go_repository(
name = "com_github_joho_godotenv",
build_file_proto_mode = "disable",
importpath = "github.com/joho/godotenv",
sum = "h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc=",
version = "v1.3.0",
)
go_repository(
name = "com_github_jonboulle_clockwork",
build_file_proto_mode = "disable",
importpath = "github.com/jonboulle/clockwork",
sum = "h1:VKV+ZcuP6l3yW9doeqz6ziZGgcynBVQO+obU0+0hcPo=",
version = "v0.1.0",
)
go_repository(
name = "com_github_joshdk_go_junit",
build_file_proto_mode = "disable",
importpath = "github.com/joshdk/go-junit",
sum = "h1:Bp5LAZasx/ev9wUmIIC74+MsXgwD99VjV1JmDVbpJm8=",
version = "v0.0.0-20190428045703-ad7e11aa49ff",
)
go_repository(
name = "com_github_jpillora_backoff",
build_file_proto_mode = "disable",
importpath = "github.com/jpillora/backoff",
sum = "h1:uvFg412JmmHBHw7iwprIxkPMI+sGQ4kzOWsMeHnm2EA=",
version = "v1.0.0",
)
go_repository(
name = "com_github_json_iterator_go",
build_file_proto_mode = "disable",
importpath = "github.com/json-iterator/go",
sum = "h1:uVUAXhF2To8cbw/3xN3pxj6kk7TYKs98NIrTqPlMWAQ=",
version = "v1.1.11",
)
go_repository(
name = "com_github_jstemmer_go_junit_report",
build_file_proto_mode = "disable",
importpath = "github.com/jstemmer/go-junit-report",
sum = "h1:6QPYqodiu3GuPL+7mfx+NwDdp2eTkp9IfEUpgAwUN0o=",
version = "v0.9.1",
)
go_repository(
name = "com_github_jtolds_gls",
build_file_proto_mode = "disable",
importpath = "github.com/jtolds/gls",
sum = "h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=",
version = "v4.20.0+incompatible",
)
go_repository(
name = "com_github_julienschmidt_httprouter",
build_file_proto_mode = "disable",
importpath = "github.com/julienschmidt/httprouter",
sum = "h1:U0609e9tgbseu3rBINet9P48AI/D3oJs4dN7jwJOQ1U=",
version = "v1.3.0",
)
go_repository(
name = "com_github_kballard_go_shellquote",
build_file_proto_mode = "disable",
importpath = "github.com/kballard/go-shellquote",
sum = "h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs=",
version = "v0.0.0-20180428030007-95032a82bc51",
)
go_repository(
name = "com_github_kelseyhightower_envconfig",
build_file_proto_mode = "disable",
importpath = "github.com/kelseyhightower/envconfig",
sum = "h1:Im6hONhd3pLkfDFsbRgu68RDNkGF1r3dvMUtDTo2cv8=",
version = "v1.4.0",
)
go_repository(
name = "com_github_kevinburke_ssh_config",
build_file_proto_mode = "disable",
importpath = "github.com/kevinburke/ssh_config",
sum = "h1:DowS9hvgyYSX4TO5NpyC606/Z4SxnNYbT+WX27or6Ck=",
version = "v0.0.0-20201106050909-4977a11b4351",
)
go_repository(
name = "com_github_kisielk_errcheck",
build_file_proto_mode = "disable",
importpath = "github.com/kisielk/errcheck",
sum = "h1:e8esj/e4R+SAOwFwN+n3zr0nYeCyeweozKfO23MvHzY=",
version = "v1.5.0",
)
go_repository(
name = "com_github_kisielk_gotool",
build_file_proto_mode = "disable",
importpath = "github.com/kisielk/gotool",
sum = "h1:AV2c/EiW3KqPNT9ZKl07ehoAGi4C5/01Cfbblndcapg=",
version = "v1.0.0",
)
go_repository(
name = "com_github_klauspost_compress",
build_file_proto_mode = "disable",
importpath = "github.com/klauspost/compress",
sum = "h1:Znfn6hXZAHaLPNnlqUYRrBSReFHYybslgv4PTiyz6P0=",
version = "v1.10.2",
)
go_repository(
name = "com_github_klauspost_cpuid",
build_file_proto_mode = "disable",
importpath = "github.com/klauspost/cpuid",
sum = "h1:1xAgYebNnsb9LKCdLOvFWtAxGU/33mjJtyOVbmUa0Us=",
version = "v1.2.2",
)
go_repository(
name = "com_github_klauspost_pgzip",
build_file_proto_mode = "disable",
importpath = "github.com/klauspost/pgzip",
sum = "h1:oIPZROsWuPHpOdMVWLuJZXwgjhrW8r1yEX8UqMyeNHM=",
version = "v1.2.1",
)
go_repository(
name = "com_github_knative_build",
build_file_proto_mode = "disable",
importpath = "github.com/knative/build",
sum = "h1:o/VYWA3HKyZlNqdU2hDE5LHpanBe8gazgPKL97XJ6bo=",
version = "v0.1.2",
)
go_repository(
name = "com_github_konsorten_go_windows_terminal_sequences",
build_file_proto_mode = "disable",
importpath = "github.com/konsorten/go-windows-terminal-sequences",
sum = "h1:CE8S1cTafDpPvMhIxNJKvHsGVBgn1xWYf1NbHQhywc8=",
version = "v1.0.3",
)
go_repository(
name = "com_github_kr_logfmt",
build_file_proto_mode = "disable",
importpath = "github.com/kr/logfmt",
sum = "h1:T+h1c/A9Gawja4Y9mFVWj2vyii2bbUNDw3kt9VxK2EY=",
version = "v0.0.0-20140226030751-b84e30acd515",
)
go_repository(
name = "com_github_kr_pretty",
build_file_proto_mode = "disable",
importpath = "github.com/kr/pretty",
sum = "h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI=",
version = "v0.2.1",
)
go_repository(
name = "com_github_kr_pty",
build_file_proto_mode = "disable",
importpath = "github.com/kr/pty",
sum = "h1:AkaSdXYQOWeaO3neb8EM634ahkXXe3jYbVh/F9lq+GI=",
version = "v1.1.8",
)
go_repository(
name = "com_github_kr_text",
build_file_proto_mode = "disable",
importpath = "github.com/kr/text",
sum = "h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=",
version = "v0.2.0",
)
go_repository(
name = "com_github_lib_pq",
build_file_proto_mode = "disable",
importpath = "github.com/lib/pq",
sum = "h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0=",
version = "v1.2.0",
)
go_repository(
name = "com_github_liggitt_tabwriter",
build_file_proto_mode = "disable",
importpath = "github.com/liggitt/tabwriter",
sum = "h1:9TO3cAIGXtEhnIaL+V+BEER86oLrvS+kWobKpbJuye0=",
version = "v0.0.0-20181228230101-89fcab3d43de",
)
go_repository(
name = "com_github_lightstep_tracecontext_go",
build_file_proto_mode = "disable",
importpath = "github.com/lightstep/tracecontext.go",
sum = "h1:+2b6iGRJe3hvV/yVXrd41yVEjxuFHxasJqDhkIjS4gk=",
version = "v0.0.0-20181129014701-1757c391b1ac",
)
go_repository(
name = "com_github_lithammer_dedent",
build_file_proto_mode = "disable",
importpath = "github.com/lithammer/dedent",
sum = "h1:VNzHMVCBNG1j0fh3OrsFRkVUwStdDArbgBWoPAffktY=",
version = "v1.1.0",
)
go_repository(
name = "com_github_logrusorgru_aurora",
build_file_proto_mode = "disable",
importpath = "github.com/logrusorgru/aurora",
sum = "h1:9MlwzLdW7QSDrhDjFlsEYmxpFyIoXmYRon3dt0io31k=",
version = "v0.0.0-20181002194514-a7b3b318ed4e",
)
go_repository(
name = "com_github_lyft_protoc_gen_validate",
build_file_proto_mode = "disable",
importpath = "github.com/lyft/protoc-gen-validate",
sum = "h1:KNt/RhmQTOLr7Aj8PsJ7mTronaFyx80mRTT9qF261dA=",
version = "v0.0.13",
)
go_repository(
name = "com_github_magiconair_properties",
build_file_proto_mode = "disable",
importpath = "github.com/magiconair/properties",
sum = "h1:ZC2Vc7/ZFkGmsVC9KvOjumD+G5lXy2RtTKyzRKO2BQ4=",
version = "v1.8.1",
)
go_repository(
name = "com_github_mailru_easyjson",
build_file_proto_mode = "disable",
importpath = "github.com/mailru/easyjson",
sum = "h1:jcoUdG1TzY/M/eM5BLFLP8DJeMximx5NQYSlLL9YeWc=",
version = "v0.7.1-0.20191009090205-6c0755d89d1e",
)
go_repository(
name = "com_github_makenowjust_heredoc",
build_file_proto_mode = "disable",
importpath = "github.com/MakeNowJust/heredoc",
sum = "h1:sjQovDkwrZp8u+gxLtPgKGjk5hCxuy2hrRejBTA9xFU=",
version = "v0.0.0-20170808103936-bb23615498cd",
)
go_repository(
name = "com_github_maratori_testpackage",
build_file_proto_mode = "disable",
importpath = "github.com/maratori/testpackage",
sum = "h1:QtJ5ZjqapShm0w5DosRjg0PRlSdAdlx+W6cCKoALdbQ=",
version = "v1.0.1",
)
go_repository(
name = "com_github_markbates_inflect",
build_file_proto_mode = "disable",
importpath = "github.com/markbates/inflect",
sum = "h1:5fh1gzTFhfae06u3hzHYO9xe3l3v3nW5Pwt3naLTP5g=",
version = "v1.0.4",
)
go_repository(
name = "com_github_marstr_guid",
build_file_proto_mode = "disable",
importpath = "github.com/marstr/guid",
sum = "h1:/M4H/1G4avsieL6BbUwCOBzulmoeKVP5ux/3mQNnbyI=",
version = "v1.1.0",
)
go_repository(
name = "com_github_masterminds_goutils",
build_file_proto_mode = "disable",
importpath = "github.com/Masterminds/goutils",
sum = "h1:zukEsf/1JZwCMgHiK3GZftabmxiCw4apj3a28RPBiVg=",
version = "v1.1.0",
)
go_repository(
name = "com_github_masterminds_semver",
build_file_proto_mode = "disable",
importpath = "github.com/Masterminds/semver",
sum = "h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww=",
version = "v1.5.0",
)
go_repository(
name = "com_github_masterminds_semver_v3",
build_file_proto_mode = "disable",
importpath = "github.com/Masterminds/semver/v3",
sum = "h1:Y2lUDsFKVRSYGojLJ1yLxSXdMmMYTYls0rCvoqmMUQk=",
version = "v3.1.0",
)
go_repository(
name = "com_github_masterminds_sprig_v3",
build_file_proto_mode = "disable",
importpath = "github.com/Masterminds/sprig/v3",
sum = "h1:wz22D0CiSctrliXiI9ZO3HoNApweeRGftyDN+BQa3B8=",
version = "v3.0.2",
)
go_repository(
name = "com_github_masterminds_vcs",
build_file_proto_mode = "disable",
importpath = "github.com/Masterminds/vcs",
sum = "h1:NL3G1X7/7xduQtA2sJLpVpfHTNBALVNSjob6KEjPXNQ=",
version = "v1.13.1",
)
go_repository(
name = "com_github_matoous_godox",
build_file_proto_mode = "disable",
importpath = "github.com/matoous/godox",
sum = "h1:RHba4YImhrUVQDHUCe2BNSOz4tVy2yGyXhvYDvxGgeE=",
version = "v0.0.0-20190911065817-5d6d842e92eb",
)
go_repository(
name = "com_github_mattbaird_jsonpatch",
build_file_proto_mode = "disable",
importpath = "github.com/mattbaird/jsonpatch",
sum = "h1:+J2gw7Bw77w/fbK7wnNJJDKmw1IbWft2Ul5BzrG1Qm8=",
version = "v0.0.0-20171005235357-81af80346b1a",
)
go_repository(
name = "com_github_mattn_go_colorable",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/go-colorable",
sum = "h1:6Su7aK7lXmJ/U79bYtBjLNaha4Fs1Rg9plHpcH+vvnE=",
version = "v0.1.6",
)
go_repository(
name = "com_github_mattn_go_ieproxy",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/go-ieproxy",
sum = "h1:qiyop7gCflfhwCzGyeT0gro3sF9AIg9HU98JORTkqfI=",
version = "v0.0.1",
)
go_repository(
name = "com_github_mattn_go_isatty",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/go-isatty",
sum = "h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=",
version = "v0.0.12",
)
go_repository(
name = "com_github_mattn_go_runewidth",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/go-runewidth",
sum = "h1:3tS41NlGYSmhhe/8fhGRzc+z3AYCw1Fe1WAyLuujKs0=",
version = "v0.0.8",
)
go_repository(
name = "com_github_mattn_go_shellwords",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/go-shellwords",
sum = "h1:Y7Xqm8piKOO3v10Thp7Z36h4FYFjt5xB//6XvOrs2Gw=",
version = "v1.0.10",
)
go_repository(
name = "com_github_mattn_go_sqlite3",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/go-sqlite3",
sum = "h1:xQ15muvnzGBHpIpdrNi1DA5x0+TcBZzsIDwmw9uTHzw=",
version = "v2.0.1+incompatible",
)
go_repository(
name = "com_github_mattn_go_zglob",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/go-zglob",
sum = "h1:0qT24o2wsZ8cOXQAERwBX6s+rPMs/bJTKxLVVtgfDXc=",
version = "v0.0.2",
)
go_repository(
name = "com_github_mattn_goveralls",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/goveralls",
sum = "h1:7eJB6EqsPhRVxvwEXGnqdO2sJI0PTsrWoTMXEk9/OQc=",
version = "v0.0.2",
)
go_repository(
name = "com_github_matttproud_golang_protobuf_extensions",
build_file_proto_mode = "disable",
importpath = "github.com/matttproud/golang_protobuf_extensions",
sum = "h1:I0XW9+e1XWDxdcEniV4rQAIOPUGDq67JSCiRCgGCZLI=",
version = "v1.0.2-0.20181231171920-c182affec369",
)
go_repository(
name = "com_github_maxbrunsfeld_counterfeiter_v6",
build_file_proto_mode = "disable",
importpath = "github.com/maxbrunsfeld/counterfeiter/v6",
sum = "h1:g+4J5sZg6osfvEfkRZxJ1em0VT95/UOZgi/l7zi1/oE=",
version = "v6.2.2",
)
go_repository(
name = "com_github_mgutz_ansi",
build_file_proto_mode = "disable",
importpath = "github.com/mgutz/ansi",
sum = "h1:j7+1HpAFS1zy5+Q4qx1fWh90gTKwiN4QCGoY9TWyyO4=",
version = "v0.0.0-20170206155736-9520e82c474b",
)
go_repository(
name = "com_github_mholt_archiver_v3",
build_file_proto_mode = "disable",
importpath = "github.com/mholt/archiver/v3",
sum = "h1:vWjhY8SQp5yzM9P6OJ/eZEkmi3UAbRrxCq48MxjAzig=",
version = "v3.3.0",
)
go_repository(
name = "com_github_microsoft_go_winio",
build_file_proto_mode = "disable",
importpath = "github.com/Microsoft/go-winio",
sum = "h1:FtSW/jqD+l4ba5iPBj9CODVtgfYAD8w2wS923g/cFDk=",
version = "v0.4.16",
)
go_repository(
name = "com_github_microsoft_hcsshim",
build_file_proto_mode = "disable",
importpath = "github.com/Microsoft/hcsshim",
sum = "h1:ptnOoufxGSzauVTsdE+wMYnCWA301PdoN4xg5oRdZpg=",
version = "v0.8.7",
)
go_repository(
name = "com_github_miekg_dns",
build_file_proto_mode = "disable",
importpath = "github.com/miekg/dns",
sum = "h1:sJFOl9BgwbYAWOGEwr61FU28pqsBNdpRBnhGXtO06Oo=",
version = "v1.1.31",
)
go_repository(
name = "com_github_mitchellh_cli",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/cli",
sum = "h1:iGBIsUe3+HZ/AD/Vd7DErOt5sU9fa8Uj7A2s1aggv1Y=",
version = "v1.0.0",
)
go_repository(
name = "com_github_mitchellh_copystructure",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/copystructure",
sum = "h1:Laisrj+bAB6b/yJwB5Bt3ITZhGJdqmxquMKeZ+mmkFQ=",
version = "v1.0.0",
)
go_repository(
name = "com_github_mitchellh_go_homedir",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/go-homedir",
sum = "h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=",
version = "v1.1.0",
)
go_repository(
name = "com_github_mitchellh_go_ps",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/go-ps",
sum = "h1:9+ke9YJ9KGWw5ANXK6ozjoK47uI3uNbXv4YVINBnGm8=",
version = "v0.0.0-20190716172923-621e5597135b",
)
go_repository(
name = "com_github_mitchellh_go_testing_interface",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/go-testing-interface",
sum = "h1:fzU/JVNcaqHQEcVFAKeR41fkiLdIPrefOvVG1VZ96U0=",
version = "v1.0.0",
)
go_repository(
name = "com_github_mitchellh_go_wordwrap",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/go-wordwrap",
sum = "h1:6GlHJ/LTGMrIJbwgdqdl2eEH8o+Exx/0m8ir9Gns0u4=",
version = "v1.0.0",
)
go_repository(
name = "com_github_mitchellh_gox",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/gox",
sum = "h1:lfGJxY7ToLJQjHHwi0EX6uYBdK78egf954SQl13PQJc=",
version = "v0.4.0",
)
go_repository(
name = "com_github_mitchellh_iochan",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/iochan",
sum = "h1:C+X3KsSTLFVBr/tK1eYN/vs4rJcvsiLU338UhYPJWeY=",
version = "v1.0.0",
)
go_repository(
name = "com_github_mitchellh_ioprogress",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/ioprogress",
sum = "h1:Qa6dnn8DlasdXRnacluu8HzPts0S1I9zvvUPDbBnXFI=",
version = "v0.0.0-20180201004757-6a23b12fa88e",
)
go_repository(
name = "com_github_mitchellh_mapstructure",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/mapstructure",
sum = "h1:cCBH2gTD2K0OtLlv/Y5H01VQCqmlDxz30kS5Y5bqfLA=",
version = "v1.3.1",
)
go_repository(
name = "com_github_mitchellh_osext",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/osext",
sum = "h1:2+myh5ml7lgEU/51gbeLHfKGNfgEQQIWrlbdaOsidbQ=",
version = "v0.0.0-20151018003038-5e2d6d41470f",
)
go_repository(
name = "com_github_mitchellh_reflectwalk",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/reflectwalk",
sum = "h1:9D+8oIskB4VJBN5SFlmc27fSlIBZaov1Wpk/IfikLNY=",
version = "v1.0.0",
)
go_repository(
name = "com_github_moby_spdystream",
build_file_proto_mode = "disable",
importpath = "github.com/moby/spdystream",
sum = "h1:cjW1zVyyoiM0T7b6UoySUFqzXMoqRckQtXwGPiBhOM8=",
version = "v0.2.0",
)
go_repository(
name = "com_github_moby_term",
build_file_proto_mode = "disable",
importpath = "github.com/moby/term",
sum = "h1:rzf0wL0CHVc8CEsgyygG0Mn9CNCCPZqOPaz8RiiHYQk=",
version = "v0.0.0-20201216013528-df9cb8a40635",
)
go_repository(
name = "com_github_modern_go_concurrent",
build_file_proto_mode = "disable",
importpath = "github.com/modern-go/concurrent",
sum = "h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=",
version = "v0.0.0-20180306012644-bacd9c7ef1dd",
)
go_repository(
name = "com_github_modern_go_reflect2",
build_file_proto_mode = "disable",
importpath = "github.com/modern-go/reflect2",
sum = "h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI=",
version = "v1.0.1",
)
go_repository(
name = "com_github_mohae_deepcopy",
build_file_proto_mode = "disable",
importpath = "github.com/mohae/deepcopy",
sum = "h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw=",
version = "v0.0.0-20170929034955-c48cc78d4826",
)
go_repository(
name = "com_github_morikuni_aec",
build_file_proto_mode = "disable",
importpath = "github.com/morikuni/aec",
sum = "h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=",
version = "v1.0.0",
)
go_repository(
name = "com_github_mozilla_tls_observatory",
build_file_proto_mode = "disable",
importpath = "github.com/mozilla/tls-observatory",
sum = "h1:1xJ+Xi9lYWLaaP4yB67ah0+548CD3110mCPWhVVjFkI=",
version = "v0.0.0-20200317151703-4fa42e1c2dee",
)
go_repository(
name = "com_github_munnerz_crd_schema_fuzz",
build_file_proto_mode = "disable",
importpath = "github.com/munnerz/crd-schema-fuzz",
sum = "h1:8erI9yzEnOGw9K5O+a8zZdoo8N/OwrFi7c7SjBtkHAs=",
version = "v1.0.0",
)
go_repository(
name = "com_github_munnerz_goautoneg",
build_file_proto_mode = "disable",
importpath = "github.com/munnerz/goautoneg",
sum = "h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=",
version = "v0.0.0-20191010083416-a7dc8b61c822",
)
go_repository(
name = "com_github_mwitkow_go_conntrack",
build_file_proto_mode = "disable",
importpath = "github.com/mwitkow/go-conntrack",
sum = "h1:KUppIJq7/+SVif2QVs3tOP0zanoHgBEVAwHxUSIzRqU=",
version = "v0.0.0-20190716064945-2f068394615f",
)
go_repository(
name = "com_github_mxk_go_flowrate",
build_file_proto_mode = "disable",
importpath = "github.com/mxk/go-flowrate",
sum = "h1:y5//uYreIhSUg3J1GEMiLbxo1LJaP8RfCpH6pymGZus=",
version = "v0.0.0-20140419014527-cca7078d478f",
)
go_repository(
name = "com_github_nakabonne_nestif",
build_file_proto_mode = "disable",
importpath = "github.com/nakabonne/nestif",
sum = "h1:+yOViDGhg8ygGrmII72nV9B/zGxY188TYpfolntsaPw=",
version = "v0.3.0",
)
go_repository(
name = "com_github_natefinch_lumberjack",
build_file_proto_mode = "disable",
importpath = "github.com/natefinch/lumberjack",
sum = "h1:4QJd3OLAMgj7ph+yZTuX13Ld4UpgHp07nNdFX7mqFfM=",
version = "v2.0.0+incompatible",
)
go_repository(
name = "com_github_nats_io_gnatsd",
build_file_proto_mode = "disable",
importpath = "github.com/nats-io/gnatsd",
sum = "h1:RconcfDeWpKCD6QIIwiVFcvForlXpWeJP7i5/lDLy44=",
version = "v1.4.1",
)
go_repository(
name = "com_github_nats_io_go_nats",
build_file_proto_mode = "disable",
importpath = "github.com/nats-io/go-nats",
sum = "h1:oQOfHcLr8hb43QG8yeVyY2jtarIaTjOv41CGdF3tTvQ=",
version = "v1.7.0",
)
go_repository(
name = "com_github_nats_io_jwt",
build_file_proto_mode = "disable",
importpath = "github.com/nats-io/jwt",
sum = "h1:+RB5hMpXUUA2dfxuhBTEkMOrYmM+gKIZYS1KjSostMI=",
version = "v0.3.2",
)
go_repository(
name = "com_github_nats_io_nats_go",
build_file_proto_mode = "disable",
importpath = "github.com/nats-io/nats.go",
sum = "h1:ik3HbLhZ0YABLto7iX80pZLPw/6dx3T+++MZJwLnMrQ=",
version = "v1.9.1",
)
go_repository(
name = "com_github_nats_io_nats_server_v2",
build_file_proto_mode = "disable",
importpath = "github.com/nats-io/nats-server/v2",
sum = "h1:i2Ly0B+1+rzNZHHWtD4ZwKi+OU5l+uQo1iDHZ2PmiIc=",
version = "v2.1.2",
)
go_repository(
name = "com_github_nats_io_nkeys",
build_file_proto_mode = "disable",
importpath = "github.com/nats-io/nkeys",
sum = "h1:6JrEfig+HzTH85yxzhSVbjHRJv9cn0p6n3IngIcM5/k=",
version = "v0.1.3",
)
go_repository(
name = "com_github_nats_io_nuid",
build_file_proto_mode = "disable",
importpath = "github.com/nats-io/nuid",
sum = "h1:5iA8DT8V7q8WK2EScv2padNa/rTESc1KdnPw4TC2paw=",
version = "v1.0.1",
)
go_repository(
name = "com_github_nbio_st",
build_file_proto_mode = "disable",
importpath = "github.com/nbio/st",
sum = "h1:W6apQkHrMkS0Muv8G/TipAy/FJl/rCYT0+EuS8+Z0z4=",
version = "v0.0.0-20140626010706-e9e8d9816f32",
)
go_repository(
name = "com_github_nbutton23_zxcvbn_go",
build_file_proto_mode = "disable",
importpath = "github.com/nbutton23/zxcvbn-go",
sum = "h1:AREM5mwr4u1ORQBMvzfzBgpsctsbQikCVpvC+tX285E=",
version = "v0.0.0-20180912185939-ae427f1e4c1d",
)
go_repository(
name = "com_github_ncw_swift",
build_file_proto_mode = "disable",
importpath = "github.com/ncw/swift",
sum = "h1:4DQRPj35Y41WogBxyhOXlrI37nzGlyEcsforeudyYPQ=",
version = "v1.0.47",
)
go_repository(
name = "com_github_niemeyer_pretty",
build_file_proto_mode = "disable",
importpath = "github.com/niemeyer/pretty",
sum = "h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=",
version = "v0.0.0-20200227124842-a10e7caefd8e",
)
go_repository(
name = "com_github_nwaples_rardecode",
build_file_proto_mode = "disable",
importpath = "github.com/nwaples/rardecode",
sum = "h1:r7vGuS5akxOnR4JQSkko62RJ1ReCMXxQRPtxsiFMBOs=",
version = "v1.0.0",
)
go_repository(
name = "com_github_nxadm_tail",
build_file_proto_mode = "disable",
importpath = "github.com/nxadm/tail",
sum = "h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE=",
version = "v1.4.8",
)
go_repository(
name = "com_github_nytimes_gziphandler",
build_file_proto_mode = "disable",
importpath = "github.com/NYTimes/gziphandler",
sum = "h1:ZUDjpQae29j0ryrS0u/B8HZfJBtBQHjqw2rQ2cqUQ3I=",
version = "v1.1.1",
)
go_repository(
name = "com_github_octago_sflags",
build_file_proto_mode = "disable",
importpath = "github.com/octago/sflags",
sum = "h1:XceYzkRXGAHa/lSFmKLcaxSrsh4MTuOMQdIGsUD0wlk=",
version = "v0.2.0",
)
go_repository(
name = "com_github_oklog_run",
build_file_proto_mode = "disable",
importpath = "github.com/oklog/run",
sum = "h1:Ru7dDtJNOyC66gQ5dQmaCa0qIsAUFY3sFpK1Xk8igrw=",
version = "v1.0.0",
)
go_repository(
name = "com_github_oklog_ulid",
build_file_proto_mode = "disable",
importpath = "github.com/oklog/ulid",
sum = "h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4=",
version = "v1.3.1",
)
go_repository(
name = "com_github_olekukonko_tablewriter",
build_file_proto_mode = "disable",
importpath = "github.com/olekukonko/tablewriter",
sum = "h1:vHD/YYe1Wolo78koG299f7V/VAS08c6IpCLn+Ejf/w8=",
version = "v0.0.4",
)
go_repository(
name = "com_github_oneofone_xxhash",
build_file_proto_mode = "disable",
importpath = "github.com/OneOfOne/xxhash",
sum = "h1:KMrpdQIwFcEqXDklaen+P1axHaj9BSKzvpUUfnHldSE=",
version = "v1.2.2",
)
go_repository(
name = "com_github_onsi_ginkgo",
build_file_proto_mode = "disable",
importpath = "github.com/onsi/ginkgo",
sum = "h1:29JGrr5oVBm5ulCWet69zQkzWipVXIol6ygQUe/EzNc=",
version = "v1.16.4",
)
go_repository(
name = "com_github_onsi_gomega",
build_file_proto_mode = "disable",
importpath = "github.com/onsi/gomega",
sum = "h1:7lLHu94wT9Ij0o6EWWclhu0aOh32VxhkwEJvzuWPeak=",
version = "v1.13.0",
)
go_repository(
name = "com_github_op_go_logging",
build_file_proto_mode = "disable",
importpath = "github.com/op/go-logging",
sum = "h1:lDH9UUVJtmYCjyT0CI4q8xvlXPxeZ0gYCVvWbmPlp88=",
version = "v0.0.0-20160315200505-970db520ece7",
)
go_repository(
name = "com_github_opencontainers_go_digest",
build_file_proto_mode = "disable",
importpath = "github.com/opencontainers/go-digest",
sum = "h1:WzifXhOVOEOuFYOJAW6aQqW0TooG2iki3E3Ii+WN7gQ=",
version = "v1.0.0-rc1",
)
go_repository(
name = "com_github_opencontainers_image_spec",
build_file_proto_mode = "disable",
importpath = "github.com/opencontainers/image-spec",
sum = "h1:JMemWkRwHx4Zj+fVxWoMCFm/8sYGGrUVojFA6h/TRcI=",
version = "v1.0.1",
)
go_repository(
name = "com_github_opencontainers_runc",
build_file_proto_mode = "disable",
importpath = "github.com/opencontainers/runc",
sum = "h1:GlxAyO6x8rfZYN9Tt0Kti5a/cP41iuiO2yYT0IJGY8Y=",
version = "v0.1.1",
)
go_repository(
name = "com_github_opencontainers_runtime_spec",
build_file_proto_mode = "disable",
importpath = "github.com/opencontainers/runtime-spec",
sum = "h1:eNUVfm/RFLIi1G7flU5/ZRTHvd4kcVuzfRnL6OFlzCI=",
version = "v0.1.2-0.20190507144316-5b71a03e2700",
)
go_repository(
name = "com_github_opencontainers_runtime_tools",
build_file_proto_mode = "disable",
importpath = "github.com/opencontainers/runtime-tools",
sum = "h1:H7DMc6FAjgwZZi8BRqjrAAHWoqEr5e5L6pS4V0ezet4=",
version = "v0.0.0-20181011054405-1d69bd0f9c39",
)
go_repository(
name = "com_github_openpeedeep_depguard",
build_file_proto_mode = "disable",
importpath = "github.com/OpenPeeDeeP/depguard",
sum = "h1:VlW4R6jmBIv3/u1JNlawEvJMM4J+dPORPaZasQee8Us=",
version = "v1.0.1",
)
go_repository(
name = "com_github_openzipkin_zipkin_go",
build_file_proto_mode = "disable",
importpath = "github.com/openzipkin/zipkin-go",
sum = "h1:nY8Hti+WKaP0cRsSeQ026wU03QsM762XBeCXBb9NAWI=",
version = "v0.2.2",
)
go_repository(
name = "com_github_otiai10_copy",
build_file_proto_mode = "disable",
importpath = "github.com/otiai10/copy",
sum = "h1:DDNipYy6RkIkjMwy+AWzgKiNTyj2RUI9yEMeETEpVyc=",
version = "v1.0.2",
)
go_repository(
name = "com_github_otiai10_curr",
build_file_proto_mode = "disable",
importpath = "github.com/otiai10/curr",
sum = "h1:+OLn68pqasWca0z5ryit9KGfp3sUsW4Lqg32iRMJyzs=",
version = "v0.0.0-20150429015615-9b4961190c95",
)
go_repository(
name = "com_github_otiai10_mint",
build_file_proto_mode = "disable",
importpath = "github.com/otiai10/mint",
sum = "h1:Ady6MKVezQwHBkGzLFbrsywyp09Ah7rkmfjV3Bcr5uc=",
version = "v1.3.0",
)
go_repository(
name = "com_github_pascaldekloe_goe",
build_file_proto_mode = "disable",
importpath = "github.com/pascaldekloe/goe",
sum = "h1:cBOtyMzM9HTpWjXfbbunk26uA6nG3a8n06Wieeh0MwY=",
version = "v0.1.0",
)
go_repository(
name = "com_github_pavel_v_chernykh_keystore_go",
build_file_proto_mode = "disable",
importpath = "github.com/pavel-v-chernykh/keystore-go",
sum = "h1:Jd6xfriVlJ6hWPvYOE0Ni0QWcNTLRehfGPFxr3eSL80=",
version = "v2.1.0+incompatible",
)
go_repository(
name = "com_github_pborman_uuid",
build_file_proto_mode = "disable",
importpath = "github.com/pborman/uuid",
sum = "h1:J7Q5mO4ysT1dv8hyrUGHb9+ooztCXu1D8MY8DZYsu3g=",
version = "v1.2.0",
)
go_repository(
name = "com_github_pelletier_go_buffruneio",
build_file_proto_mode = "disable",
importpath = "github.com/pelletier/go-buffruneio",
sum = "h1:U4t4R6YkofJ5xHm3dJzuRpPZ0mr5MMCoAWooScCR7aA=",
version = "v0.2.0",
)
go_repository(
name = "com_github_pelletier_go_toml",
build_file_proto_mode = "disable",
importpath = "github.com/pelletier/go-toml",
sum = "h1:Keo9qb7iRJs2voHvunFtuuYFsbWeOBh8/P9v/kVMFtw=",
version = "v1.8.0",
)
go_repository(
name = "com_github_peterbourgon_diskv",
build_file_proto_mode = "disable",
importpath = "github.com/peterbourgon/diskv",
sum = "h1:UBdAOUP5p4RWqPBg048CAvpKN+vxiaj6gdUUzhl4XmI=",
version = "v2.0.1+incompatible",
)
go_repository(
name = "com_github_phayes_checkstyle",
build_file_proto_mode = "disable",
importpath = "github.com/phayes/checkstyle",
sum = "h1:CdDQnGF8Nq9ocOS/xlSptM1N3BbrA6/kmaep5ggwaIA=",
version = "v0.0.0-20170904204023-bfd46e6a821d",
)
go_repository(
name = "com_github_phayes_freeport",
build_file_proto_mode = "disable",
importpath = "github.com/phayes/freeport",
sum = "h1:JhzVVoYvbOACxoUmOs6V/G4D5nPVUW73rKvXxP4XUJc=",
version = "v0.0.0-20180830031419-95f893ade6f2",
)
go_repository(
name = "com_github_pierrec_lz4",
build_file_proto_mode = "disable",
importpath = "github.com/pierrec/lz4",
sum = "h1:6aCX4/YZ9v8q69hTyiR7dNLnTA3fgtKHVVW5BCd5Znw=",
version = "v2.2.6+incompatible",
)
go_repository(
name = "com_github_pkg_errors",
build_file_proto_mode = "disable",
importpath = "github.com/pkg/errors",
sum = "h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=",
version = "v0.9.1",
)
go_repository(
name = "com_github_pkg_profile",
build_file_proto_mode = "disable",
importpath = "github.com/pkg/profile",
sum = "h1:F++O52m40owAmADcojzM+9gyjmMOY/T4oYJkgFDH8RE=",
version = "v1.2.1",
)
go_repository(
name = "com_github_pmezard_go_difflib",
build_file_proto_mode = "disable",
importpath = "github.com/pmezard/go-difflib",
sum = "h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=",
version = "v1.0.0",
)
go_repository(
name = "com_github_posener_complete",
build_file_proto_mode = "disable",
importpath = "github.com/posener/complete",
sum = "h1:ccV59UEOTzVDnDUEFdT95ZzHVZ+5+158q8+SJb2QV5w=",
version = "v1.1.1",
)
go_repository(
name = "com_github_pquerna_cachecontrol",
build_file_proto_mode = "disable",
importpath = "github.com/pquerna/cachecontrol",
sum = "h1:0XM1XL/OFFJjXsYXlG30spTkV/E9+gmd5GD1w2HE8xM=",
version = "v0.0.0-20171018203845-0dec1b30a021",
)
go_repository(
name = "com_github_prometheus_client_golang",
build_file_proto_mode = "disable",
importpath = "github.com/prometheus/client_golang",
sum = "h1:HNkLOAEQMIDv/K+04rukrLx6ch7msSRwf3/SASFAGtQ=",
version = "v1.11.0",
)
go_repository(
name = "com_github_prometheus_client_model",
build_file_proto_mode = "disable",
importpath = "github.com/prometheus/client_model",
sum = "h1:uq5h0d+GuxiXLJLNABMgp2qUWDPiLvgCzz2dUR+/W/M=",
version = "v0.2.0",
)
go_repository(
name = "com_github_prometheus_common",
build_file_proto_mode = "disable",
importpath = "github.com/prometheus/common",
sum = "h1:iMAkS2TDoNWnKM+Kopnx/8tnEStIfpYA0ur0xQzzhMQ=",
version = "v0.26.0",
)
go_repository(
name = "com_github_prometheus_procfs",
build_file_proto_mode = "disable",
importpath = "github.com/prometheus/procfs",
sum = "h1:mxy4L2jP6qMonqmq+aTtOx1ifVWUgG/TAmntgbh3xv4=",
version = "v0.6.0",
)
go_repository(
name = "com_github_prometheus_tsdb",
build_file_proto_mode = "disable",
importpath = "github.com/prometheus/tsdb",
sum = "h1:YZcsG11NqnK4czYLrWd9mpEuAJIHVQLwdrleYfszMAA=",
version = "v0.7.1",
)
go_repository(
name = "com_github_puerkitobio_purell",
build_file_proto_mode = "disable",
importpath = "github.com/PuerkitoBio/purell",
sum = "h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI=",
version = "v1.1.1",
)
go_repository(
name = "com_github_puerkitobio_urlesc",
build_file_proto_mode = "disable",
importpath = "github.com/PuerkitoBio/urlesc",
sum = "h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M=",
version = "v0.0.0-20170810143723-de5bf2ad4578",
)
go_repository(
name = "com_github_quasilyte_go_consistent",
build_file_proto_mode = "disable",
importpath = "github.com/quasilyte/go-consistent",
sum = "h1:JoUA0uz9U0FVFq5p4LjEq4C0VgQ0El320s3Ms0V4eww=",
version = "v0.0.0-20190521200055-c6f3937de18c",
)
go_repository(
name = "com_github_quasilyte_go_ruleguard",
build_file_proto_mode = "disable",
importpath = "github.com/quasilyte/go-ruleguard",
sum = "h1:DvnesvLtRPQOvaUbfXfh0tpMHg29by0H7F2U+QIkSu8=",
version = "v0.1.2-0.20200318202121-b00d7a75d3d8",
)
go_repository(
name = "com_github_rcrowley_go_metrics",
build_file_proto_mode = "disable",
importpath = "github.com/rcrowley/go-metrics",
sum = "h1:eUm8ma4+yPknhXtkYlWh3tMkE6gBjXZToDned9s2gbQ=",
version = "v0.0.0-20190706150252-9beb055b7962",
)
go_repository(
name = "com_github_remyoudompheng_bigfft",
build_file_proto_mode = "disable",
importpath = "github.com/remyoudompheng/bigfft",
sum = "h1:/NRJ5vAYoqz+7sG51ubIDHXeWO8DlTSrToPu6q11ziA=",
version = "v0.0.0-20170806203942-52369c62f446",
)
go_repository(
name = "com_github_rogpeppe_fastuuid",
build_file_proto_mode = "disable",
importpath = "github.com/rogpeppe/fastuuid",
sum = "h1:Ppwyp6VYCF1nvBTXL3trRso7mXMlRrw9ooo375wvi2s=",
version = "v1.2.0",
)
go_repository(
name = "com_github_rogpeppe_go_internal",
build_file_proto_mode = "disable",
importpath = "github.com/rogpeppe/go-internal",
sum = "h1:qLvObTrvO/XRCqmkKxUlOBc48bI3efyDuAZe25QiF0w=",
version = "v1.5.2",
)
go_repository(
name = "com_github_rubiojr_go_vhd",
build_file_proto_mode = "disable",
importpath = "github.com/rubiojr/go-vhd",
sum = "h1:ht7N4d/B7Ezf58nvMNVF3OlvDlz9pp+WHVcRNS0nink=",
version = "v0.0.0-20160810183302-0bfd3b39853c",
)
go_repository(
name = "com_github_russross_blackfriday",
build_file_proto_mode = "disable",
importpath = "github.com/russross/blackfriday",
sum = "h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo=",
version = "v1.5.2",
)
go_repository(
name = "com_github_russross_blackfriday_v2",
build_file_proto_mode = "disable",
importpath = "github.com/russross/blackfriday/v2",
sum = "h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q=",
version = "v2.0.1",
)
go_repository(
name = "com_github_rwcarlsen_goexif",
build_file_proto_mode = "disable",
importpath = "github.com/rwcarlsen/goexif",
sum = "h1:CmH9+J6ZSsIjUK3dcGsnCnO41eRBOnY12zwkn5qVwgc=",
version = "v0.0.0-20190401172101-9e8deecbddbd",
)
go_repository(
name = "com_github_ryancurrah_gomodguard",
build_file_proto_mode = "disable",
importpath = "github.com/ryancurrah/gomodguard",
sum = "h1:DWbye9KyMgytn8uYpuHkwf0RHqAYO6Ay/D0TbCpPtVU=",
version = "v1.1.0",
)
go_repository(
name = "com_github_ryanuber_columnize",
build_file_proto_mode = "disable",
importpath = "github.com/ryanuber/columnize",
sum = "h1:j1Wcmh8OrK4Q7GXY+V7SVSY8nUWQxHW5TkBe7YUl+2s=",
version = "v2.1.0+incompatible",
)
go_repository(
name = "com_github_ryanuber_go_glob",
build_file_proto_mode = "disable",
importpath = "github.com/ryanuber/go-glob",
sum = "h1:iQh3xXAumdQ+4Ufa5b25cRpC5TYKlno6hsv6Cb3pkBk=",
version = "v1.0.0",
)
go_repository(
name = "com_github_sassoftware_go_rpmutils",
build_file_proto_mode = "disable",
importpath = "github.com/sassoftware/go-rpmutils",
sum = "h1:+gCnWOZV8Z/8jehJ2CdqB47Z3S+SREmQcuXkRFLNsiI=",
version = "v0.0.0-20190420191620-a8f1baeba37b",
)
go_repository(
name = "com_github_satori_go_uuid",
build_file_proto_mode = "disable",
importpath = "github.com/satori/go.uuid",
sum = "h1:0uYX9dsZ2yD7q2RtLRtPSdGDWzjeM3TbMJP9utgA0ww=",
version = "v1.2.0",
)
go_repository(
name = "com_github_sclevine_spec",
build_file_proto_mode = "disable",
importpath = "github.com/sclevine/spec",
sum = "h1:1Jwdf9jSfDl9NVmt8ndHqbTZ7XCCPbh1jI3hkDBHVYA=",
version = "v1.2.0",
)
go_repository(
name = "com_github_sean_seed",
build_file_proto_mode = "disable",
importpath = "github.com/sean-/seed",
sum = "h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I=",
version = "v0.0.0-20170313163322-e2103e2c3529",
)
go_repository(
name = "com_github_securego_gosec",
build_file_proto_mode = "disable",
importpath = "github.com/securego/gosec",
sum = "h1:rq2/kILQnPtq5oL4+IAjgVOjh5e2yj2aaCYi7squEvI=",
version = "v0.0.0-20200401082031-e946c8c39989",
)
go_repository(
name = "com_github_securego_gosec_v2",
build_file_proto_mode = "disable",
importpath = "github.com/securego/gosec/v2",
sum = "h1:y/9mCF2WPDbSDpL3QDWZD3HHGrSYw0QSHnCqTfs4JPE=",
version = "v2.3.0",
)
go_repository(
name = "com_github_sergi_go_diff",
build_file_proto_mode = "disable",
importpath = "github.com/sergi/go-diff",
sum = "h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=",
version = "v1.1.0",
)
go_repository(
name = "com_github_shirou_gopsutil",
build_file_proto_mode = "disable",
importpath = "github.com/shirou/gopsutil",
sum = "h1:WokF3GuxBeL+n4Lk4Fa8v9mbdjlrl7bHuneF4N1bk2I=",
version = "v0.0.0-20190901111213-e4ec7b275ada",
)
go_repository(
name = "com_github_shirou_w32",
build_file_proto_mode = "disable",
importpath = "github.com/shirou/w32",
sum = "h1:udFKJ0aHUL60LboW/A+DfgoHVedieIzIXE8uylPue0U=",
version = "v0.0.0-20160930032740-bb4de0191aa4",
)
go_repository(
name = "com_github_shopify_logrus_bugsnag",
build_file_proto_mode = "disable",
importpath = "github.com/Shopify/logrus-bugsnag",
sum = "h1:UrqY+r/OJnIp5u0s1SbQ8dVfLCZJsnvazdBP5hS4iRs=",
version = "v0.0.0-20171204204709-577dee27f20d",
)
go_repository(
name = "com_github_shopify_sarama",
build_file_proto_mode = "disable",
importpath = "github.com/Shopify/sarama",
sum = "h1:XxJBCZEoWJtoWjf/xRbmGUpAmTZGnuuF0ON0EvxxBrs=",
version = "v1.23.1",
)
go_repository(
name = "com_github_shopify_toxiproxy",
build_file_proto_mode = "disable",
importpath = "github.com/Shopify/toxiproxy",
sum = "h1:TKdv8HiTLgE5wdJuEML90aBgNWsokNbMijUGhmcoBJc=",
version = "v2.1.4+incompatible",
)
go_repository(
name = "com_github_shurcool_githubv4",
build_file_proto_mode = "disable",
importpath = "github.com/shurcooL/githubv4",
sum = "h1:N5B+JgvM/DVYIxreItPJMM3yWrNO/GB2q4nESrtBisM=",
version = "v0.0.0-20210725200734-83ba7b4c9228",
)
go_repository(
name = "com_github_shurcool_go",
build_file_proto_mode = "disable",
importpath = "github.com/shurcooL/go",
sum = "h1:MZM7FHLqUHYI0Y/mQAt3d2aYa0SiNms/hFqC9qJYolM=",
version = "v0.0.0-20180423040247-9e1955d9fb6e",
)
go_repository(
name = "com_github_shurcool_go_goon",
build_file_proto_mode = "disable",
importpath = "github.com/shurcooL/go-goon",
sum = "h1:llrF3Fs4018ePo4+G/HV/uQUqEI1HMDjCeOf2V6puPc=",
version = "v0.0.0-20170922171312-37c2f522c041",
)
go_repository(
name = "com_github_shurcool_graphql",
build_file_proto_mode = "disable",
importpath = "github.com/shurcooL/graphql",
sum = "h1:tygelZueB1EtXkPI6mQ4o9DQ0+FKW41hTbunoXZCTqk=",
version = "v0.0.0-20181231061246-d48a9a75455f",
)
go_repository(
name = "com_github_shurcool_sanitized_anchor_name",
build_file_proto_mode = "disable",
importpath = "github.com/shurcooL/sanitized_anchor_name",
sum = "h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo=",
version = "v1.0.0",
)
go_repository(
name = "com_github_sirupsen_logrus",
build_file_proto_mode = "disable",
importpath = "github.com/sirupsen/logrus",
sum = "h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE=",
version = "v1.8.1",
)
go_repository(
name = "com_github_smartystreets_assertions",
build_file_proto_mode = "disable",
importpath = "github.com/smartystreets/assertions",
sum = "h1:42S6lae5dvLc7BrLu/0ugRtcFVjoJNMC/N3yZFZkDFs=",
version = "v1.2.0",
)
go_repository(
name = "com_github_smartystreets_go_aws_auth",
build_file_proto_mode = "disable",
importpath = "github.com/smartystreets/go-aws-auth",
sum = "h1:hp2CYQUINdZMHdvTdXtPOY2ainKl4IoMcpAXEf2xj3Q=",
version = "v0.0.0-20180515143844-0c1422d1fdb9",
)
go_repository(
name = "com_github_smartystreets_goconvey",
build_file_proto_mode = "disable",
importpath = "github.com/smartystreets/goconvey",
sum = "h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s=",
version = "v1.6.4",
)
go_repository(
name = "com_github_smartystreets_gunit",
build_file_proto_mode = "disable",
importpath = "github.com/smartystreets/gunit",
sum = "h1:RyPDUFcJbvtXlhJPk7v+wnxZRY2EUokhEYl2EJOPToI=",
version = "v1.0.0",
)
go_repository(
name = "com_github_soheilhy_cmux",
build_file_proto_mode = "disable",
importpath = "github.com/soheilhy/cmux",
sum = "h1:0HKaf1o97UwFjHH9o5XsHUOF+tqmdA7KEzXLpiyaw0E=",
version = "v0.1.4",
)
go_repository(
name = "com_github_sourcegraph_go_diff",
build_file_proto_mode = "disable",
importpath = "github.com/sourcegraph/go-diff",
sum = "h1:lhIKJ2nXLZZ+AfbHpYxTn0pXpNTTui0DX7DO3xeb1Zs=",
version = "v0.5.3",
)
go_repository(
name = "com_github_spaolacci_murmur3",
build_file_proto_mode = "disable",
importpath = "github.com/spaolacci/murmur3",
sum = "h1:qLC7fQah7D6K1B0ujays3HV9gkFtllcxhzImRR7ArPQ=",
version = "v0.0.0-20180118202830-f09979ecbc72",
)
go_repository(
name = "com_github_spf13_afero",
build_file_proto_mode = "disable",
importpath = "github.com/spf13/afero",
sum = "h1:5jhuqJyZCZf2JRofRvN/nIFgIWNzPa3/Vz8mYylgbWc=",
version = "v1.2.2",
)
go_repository(
name = "com_github_spf13_cast",
build_file_proto_mode = "disable",
importpath = "github.com/spf13/cast",
sum = "h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng=",
version = "v1.3.1",
)
go_repository(
name = "com_github_spf13_cobra",
build_file_proto_mode = "disable",
importpath = "github.com/spf13/cobra",
sum = "h1:xghbfqPkxzxP3C/f3n5DdpAbdKLj4ZE4BWQI362l53M=",
version = "v1.1.3",
)
go_repository(
name = "com_github_spf13_jwalterweatherman",
build_file_proto_mode = "disable",
importpath = "github.com/spf13/jwalterweatherman",
sum = "h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=",
version = "v1.1.0",
)
go_repository(
name = "com_github_spf13_pflag",
build_file_proto_mode = "disable",
importpath = "github.com/spf13/pflag",
sum = "h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=",
version = "v1.0.5",
)
go_repository(
name = "com_github_spf13_viper",
build_file_proto_mode = "disable",
importpath = "github.com/spf13/viper",
sum = "h1:pM5oEahlgWv/WnHXpgbKz7iLIxRf65tye2Ci+XFK5sk=",
version = "v1.7.1",
)
go_repository(
name = "com_github_src_d_gcfg",
build_file_proto_mode = "disable",
importpath = "github.com/src-d/gcfg",
sum = "h1:xXbNR5AlLSA315x2UO+fTSSAXCDf+Ar38/6oyGbDKQ4=",
version = "v1.4.0",
)
go_repository(
name = "com_github_stackexchange_wmi",
build_file_proto_mode = "disable",
importpath = "github.com/StackExchange/wmi",
sum = "h1:fLjPD/aNc3UIOA6tDi6QXUemppXK3P9BI7mr2hd6gx8=",
version = "v0.0.0-20180116203802-5d049714c4a6",
)
go_repository(
name = "com_github_streadway_amqp",
build_file_proto_mode = "disable",
importpath = "github.com/streadway/amqp",
sum = "h1:0ngsPmuP6XIjiFRNFYlvKwSr5zff2v+uPHaffZ6/M4k=",
version = "v0.0.0-20190404075320-75d898a42a94",
)
go_repository(
name = "com_github_streadway_quantile",
build_file_proto_mode = "disable",
importpath = "github.com/streadway/quantile",
sum = "h1:7z3LSn867ex6VSaahyKadf4WtSsJIgne6A1WLOAGM8A=",
version = "v0.0.0-20150917103942-b0c588724d25",
)
go_repository(
name = "com_github_stretchr_objx",
build_file_proto_mode = "disable",
importpath = "github.com/stretchr/objx",
sum = "h1:Hbg2NidpLE8veEBkEZTL3CvlkUIVzuU9jDplZO54c48=",
version = "v0.2.0",
)
go_repository(
name = "com_github_stretchr_testify",
build_file_proto_mode = "disable",
importpath = "github.com/stretchr/testify",
sum = "h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=",
version = "v1.7.0",
)
go_repository(
name = "com_github_subosito_gotenv",
build_file_proto_mode = "disable",
importpath = "github.com/subosito/gotenv",
sum = "h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s=",
version = "v1.2.0",
)
go_repository(
name = "com_github_syndtr_gocapability",
build_file_proto_mode = "disable",
importpath = "github.com/syndtr/gocapability",
sum = "h1:zLV6q4e8Jv9EHjNg/iHfzwDkCve6Ua5jCygptrtXHvI=",
version = "v0.0.0-20170704070218-db04d3cc01c8",
)
go_repository(
name = "com_github_tdakkota_asciicheck",
build_file_proto_mode = "disable",
importpath = "github.com/tdakkota/asciicheck",
sum = "h1:HxLVTlqcHhFAz3nWUcuvpH7WuOMv8LQoCWmruLfFH2U=",
version = "v0.0.0-20200416200610-e657995f937b",
)
go_repository(
name = "com_github_tektoncd_pipeline",
build_file_proto_mode = "disable",
importpath = "github.com/tektoncd/pipeline",
sum = "h1:hWdWj5bDjkSGYLlJS+u+Kh9ZktBJgs2JNUv/kP0LVOA=",
version = "v0.13.1-0.20200625065359-44f22a067b75",
)
go_repository(
name = "com_github_tektoncd_plumbing",
build_file_proto_mode = "disable",
importpath = "github.com/tektoncd/plumbing",
sum = "h1:crv70CBAJ2gZFSbf13aRVwdbjR2GYwTms/ZEok/SnFM=",
version = "v0.0.0-20200430135134-e53521e1d887",
)
go_repository(
name = "com_github_tektoncd_plumbing_pipelinerun_logs",
build_file_proto_mode = "disable",
importpath = "github.com/tektoncd/plumbing/pipelinerun-logs",
sum = "h1:9qeyrQsoPZbHOyOPt0OeB1TCYXfYb5swrxlFWzTIYYk=",
version = "v0.0.0-20191206114338-712d544c2c21",
)
go_repository(
name = "com_github_tetafro_godot",
build_file_proto_mode = "disable",
importpath = "github.com/tetafro/godot",
sum = "h1:Dib7un+rYJFUi8vN0Bk6EHheKy6fv6ZzFURHw75g6m8=",
version = "v0.4.2",
)
go_repository(
name = "com_github_tidwall_pretty",
build_file_proto_mode = "disable",
importpath = "github.com/tidwall/pretty",
sum = "h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4=",
version = "v1.0.0",
)
go_repository(
name = "com_github_timakin_bodyclose",
build_file_proto_mode = "disable",
importpath = "github.com/timakin/bodyclose",
sum = "h1:ig99OeTyDwQWhPe2iw9lwfQVF1KB3Q4fpP3X7/2VBG8=",
version = "v0.0.0-20200424151742-cb6215831a94",
)
go_repository(
name = "com_github_tj_assert",
build_file_proto_mode = "disable",
importpath = "github.com/tj/assert",
sum = "h1:Rw8kxzWo1mr6FSaYXjQELRe88y2KdfynXdnK72rdjtA=",
version = "v0.0.0-20171129193455-018094318fb0",
)
go_repository(
name = "com_github_tj_go_elastic",
build_file_proto_mode = "disable",
importpath = "github.com/tj/go-elastic",
sum = "h1:eGaGNxrtoZf/mBURsnNQKDR7u50Klgcf2eFDQEnc8Bc=",
version = "v0.0.0-20171221160941-36157cbbebc2",
)
go_repository(
name = "com_github_tj_go_kinesis",
build_file_proto_mode = "disable",
importpath = "github.com/tj/go-kinesis",
sum = "h1:m74UWYy+HBs+jMFR9mdZU6shPewugMyH5+GV6LNgW8w=",
version = "v0.0.0-20171128231115-08b17f58cb1b",
)
go_repository(
name = "com_github_tj_go_spin",
build_file_proto_mode = "disable",
importpath = "github.com/tj/go-spin",
sum = "h1:lhdWZsvImxvZ3q1C5OIB7d72DuOwP4O2NdBg9PyzNds=",
version = "v1.1.0",
)
go_repository(
name = "com_github_tmc_grpc_websocket_proxy",
build_file_proto_mode = "disable",
importpath = "github.com/tmc/grpc-websocket-proxy",
sum = "h1:LnC5Kc/wtumK+WB441p7ynQJzVuNRJiqddSIE3IlSEQ=",
version = "v0.0.0-20190109142713-0ad062ec5ee5",
)
go_repository(
name = "com_github_tommy_muehle_go_mnd",
build_file_proto_mode = "disable",
importpath = "github.com/tommy-muehle/go-mnd",
sum = "h1:RC4maTWLKKwb7p1cnoygsbKIgNlJqSYBeAFON3Ar8As=",
version = "v1.3.1-0.20200224220436-e6f9a994e8fa",
)
go_repository(
name = "com_github_trivago_tgo",
build_file_proto_mode = "disable",
importpath = "github.com/trivago/tgo",
sum = "h1:bxatjJIXNIpV18bucU4Uk/LaoxvxuOlp/oowRHyncLQ=",
version = "v1.0.1",
)
go_repository(
name = "com_github_tsenart_vegeta",
build_file_proto_mode = "disable",
importpath = "github.com/tsenart/vegeta",
sum = "h1:ErZrHhRveAoznVW80gbrxz+qxJNydpA2fcQxTPHkZbU=",
version = "v12.7.1-0.20190725001342-b5f4fca92137+incompatible",
)
go_repository(
name = "com_github_ugorji_go",
build_file_proto_mode = "disable",
importpath = "github.com/ugorji/go",
sum = "h1:j4s+tAvLfL3bZyefP2SEWmhBzmuIlH/eqNuPdFPgngw=",
version = "v1.1.4",
)
go_repository(
name = "com_github_ugorji_go_codec",
build_file_proto_mode = "disable",
importpath = "github.com/ugorji/go/codec",
sum = "h1:3SVOIvH7Ae1KRYyQWRjXWJEA9sS/c/pjvH++55Gr648=",
version = "v0.0.0-20181204163529-d75b2dcb6bc8",
)
go_repository(
name = "com_github_ulikunitz_xz",
build_file_proto_mode = "disable",
importpath = "github.com/ulikunitz/xz",
sum = "h1:YvTNdFzX6+W5m9msiYg/zpkSURPPtOlzbqYjrFn7Yt4=",
version = "v0.5.7",
)
go_repository(
name = "com_github_ultraware_funlen",
build_file_proto_mode = "disable",
importpath = "github.com/ultraware/funlen",
sum = "h1:Av96YVBwwNSe4MLR7iI/BIa3VyI7/djnto/pK3Uxbdo=",
version = "v0.0.2",
)
go_repository(
name = "com_github_ultraware_whitespace",
build_file_proto_mode = "disable",
importpath = "github.com/ultraware/whitespace",
sum = "h1:If7Va4cM03mpgrNH9k49/VOicWpGoG70XPBFFODYDsg=",
version = "v0.0.4",
)
go_repository(
name = "com_github_urfave_cli",
build_file_proto_mode = "disable",
importpath = "github.com/urfave/cli",
sum = "h1:u7tSpNPPswAFymm8IehJhy4uJMlUuU/GmqSkvJ1InXA=",
version = "v1.22.4",
)
go_repository(
name = "com_github_urfave_cli_v2",
build_file_proto_mode = "disable",
importpath = "github.com/urfave/cli/v2",
sum = "h1:Qt8FeAtxE/vfdrLmR3rxR6JRE0RoVmbXu8+6kZtYU4k=",
version = "v2.1.1",
)
go_repository(
name = "com_github_uudashr_gocognit",
build_file_proto_mode = "disable",
importpath = "github.com/uudashr/gocognit",
sum = "h1:MoG2fZ0b/Eo7NXoIwCVFLG5JED3qgQz5/NEE+rOsjPs=",
version = "v1.0.1",
)
go_repository(
name = "com_github_valyala_bytebufferpool",
build_file_proto_mode = "disable",
importpath = "github.com/valyala/bytebufferpool",
sum = "h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=",
version = "v1.0.0",
)
go_repository(
name = "com_github_valyala_fasthttp",
build_file_proto_mode = "disable",
importpath = "github.com/valyala/fasthttp",
sum = "h1:dzZJf2IuMiclVjdw0kkT+f9u4YdrapbNyGAN47E/qnk=",
version = "v1.2.0",
)
go_repository(
name = "com_github_valyala_quicktemplate",
build_file_proto_mode = "disable",
importpath = "github.com/valyala/quicktemplate",
sum = "h1:BaO1nHTkspYzmAjPXj0QiDJxai96tlcZyKcI9dyEGvM=",
version = "v1.2.0",
)
go_repository(
name = "com_github_valyala_tcplisten",
build_file_proto_mode = "disable",
importpath = "github.com/valyala/tcplisten",
sum = "h1:0R4NLDRDZX6JcmhJgXi5E4b8Wg84ihbmUKp/GvSPEzc=",
version = "v0.0.0-20161114210144-ceec8f93295a",
)
go_repository(
name = "com_github_vdemeester_k8s_pkg_credentialprovider",
build_file_proto_mode = "disable",
importpath = "github.com/vdemeester/k8s-pkg-credentialprovider",
sum = "h1:czKEIG2Q3YRTgs6x/8xhjVMJD5byPo6cZuostkbTM74=",
version = "v1.17.4",
)
go_repository(
name = "com_github_vektah_gqlparser",
build_file_proto_mode = "disable",
importpath = "github.com/vektah/gqlparser",
sum = "h1:ZsyLGn7/7jDNI+y4SEhI4yAxRChlv15pUHMjijT+e68=",
version = "v1.1.2",
)
go_repository(
name = "com_github_venafi_vcert_v4",
build_file_proto_mode = "disable",
importpath = "github.com/Venafi/vcert/v4",
sum = "h1:37gfyjS9v5YvZcIABwNPo1fAC31lIZT7glVK1vfUxk4=",
version = "v4.11.0",
)
go_repository(
name = "com_github_vmware_govmomi",
build_file_proto_mode = "disable",
importpath = "github.com/vmware/govmomi",
sum = "h1:gpw/0Ku+6RgF3jsi7fnCLmlcikBHfKBCUcu1qgc16OU=",
version = "v0.20.3",
)
go_repository(
name = "com_github_xanzy_go_gitlab",
build_file_proto_mode = "disable",
importpath = "github.com/xanzy/go-gitlab",
sum = "h1:tBm+OXv1t+KBsqlXkSDFz+YUjRM0GFsjpOWYOod3Ebs=",
version = "v0.32.0",
)
go_repository(
name = "com_github_xanzy_ssh_agent",
build_file_proto_mode = "disable",
importpath = "github.com/xanzy/ssh-agent",
sum = "h1:wUMzuKtKilRgBAD1sUb8gOwwRr2FGoBVumcjoOACClI=",
version = "v0.3.0",
)
go_repository(
name = "com_github_xdg_scram",
build_file_proto_mode = "disable",
importpath = "github.com/xdg/scram",
sum = "h1:u40Z8hqBAAQyv+vATcGgV0YCnDjqSL7/q/JyPhhJSPk=",
version = "v0.0.0-20180814205039-7eeb5667e42c",
)
go_repository(
name = "com_github_xdg_stringprep",
build_file_proto_mode = "disable",
importpath = "github.com/xdg/stringprep",
sum = "h1:d9X0esnoa3dFsV0FG35rAT0RIhYFlPq7MiP+DW89La0=",
version = "v1.0.0",
)
go_repository(
name = "com_github_xeipuuv_gojsonpointer",
build_file_proto_mode = "disable",
importpath = "github.com/xeipuuv/gojsonpointer",
sum = "h1:J9EGpcZtP0E/raorCMxlFGSTBrsSlaDGf3jU/qvAE2c=",
version = "v0.0.0-20180127040702-4e3ac2762d5f",
)
go_repository(
name = "com_github_xeipuuv_gojsonreference",
build_file_proto_mode = "disable",
importpath = "github.com/xeipuuv/gojsonreference",
sum = "h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0=",
version = "v0.0.0-20180127040603-bd5ef7bd5415",
)
go_repository(
name = "com_github_xeipuuv_gojsonschema",
build_file_proto_mode = "disable",
importpath = "github.com/xeipuuv/gojsonschema",
sum = "h1:ngVtJC9TY/lg0AA/1k48FYhBrhRoFlEmWzsehpNAaZg=",
version = "v1.1.0",
)
go_repository(
name = "com_github_xi2_xz",
build_file_proto_mode = "disable",
importpath = "github.com/xi2/xz",
sum = "h1:nIPpBwaJSVYIxUFsDv3M8ofmx9yWTog9BfvIu0q41lo=",
version = "v0.0.0-20171230120015-48954b6210f8",
)
go_repository(
name = "com_github_xiang90_probing",
build_file_proto_mode = "disable",
importpath = "github.com/xiang90/probing",
sum = "h1:eY9dn8+vbi4tKz5Qo6v2eYzo7kUS51QINcR5jNpbZS8=",
version = "v0.0.0-20190116061207-43a291ad63a2",
)
go_repository(
name = "com_github_xlab_handysort",
build_file_proto_mode = "disable",
importpath = "github.com/xlab/handysort",
sum = "h1:j2hhcujLRHAg872RWAV5yaUrEjHEObwDv3aImCaNLek=",
version = "v0.0.0-20150421192137-fb3537ed64a1",
)
go_repository(
name = "com_github_xordataexchange_crypt",
build_file_proto_mode = "disable",
importpath = "github.com/xordataexchange/crypt",
sum = "h1:ESFSdwYZvkeru3RtdrYueztKhOBCSAAzS4Gf+k0tEow=",
version = "v0.0.3-0.20170626215501-b2862e3d0a77",
)
go_repository(
name = "com_github_yuin_goldmark",
build_file_proto_mode = "disable",
importpath = "github.com/yuin/goldmark",
sum = "h1:ruQGxdhGHe7FWOJPT0mKs5+pD2Xs1Bm/kdGlHO04FmM=",
version = "v1.2.1",
)
go_repository(
name = "com_github_yvasiyarov_go_metrics",
build_file_proto_mode = "disable",
importpath = "github.com/yvasiyarov/go-metrics",
sum = "h1:+lm10QQTNSBd8DVTNGHx7o/IKu9HYDvLMffDhbyLccI=",
version = "v0.0.0-20140926110328-57bccd1ccd43",
)
go_repository(
name = "com_github_yvasiyarov_gorelic",
build_file_proto_mode = "disable",
importpath = "github.com/yvasiyarov/gorelic",
sum = "h1:hlE8//ciYMztlGpl/VA+Zm1AcTPHYkHJPbHqE6WJUXE=",
version = "v0.0.0-20141212073537-a9bba5b9ab50",
)
go_repository(
name = "com_github_yvasiyarov_newrelic_platform_go",
build_file_proto_mode = "disable",
importpath = "github.com/yvasiyarov/newrelic_platform_go",
sum = "h1:ERexzlUfuTvpE74urLSbIQW0Z/6hF9t8U4NsJLaioAY=",
version = "v0.0.0-20140908184405-b21fdbd4370f",
)
go_repository(
name = "com_google_cloud_go",
build_file_proto_mode = "disable",
importpath = "cloud.google.com/go",
sum = "h1:DZeAkuQGQqnm9Xv36SbMJEU8aFBz4wL04UpMWPWwjzg=",
version = "v0.66.0",
)
go_repository(
name = "com_google_cloud_go_bigquery",
build_file_proto_mode = "disable",
importpath = "cloud.google.com/go/bigquery",
sum = "h1:PQcPefKFdaIzjQFbiyOgAqyx8q5djaE7x9Sqe712DPA=",
version = "v1.8.0",
)
go_repository(
name = "com_google_cloud_go_datastore",
build_file_proto_mode = "disable",
importpath = "cloud.google.com/go/datastore",
sum = "h1:/May9ojXjRkPBNVrq+oWLqmWCkr4OU5uRY29bu0mRyQ=",
version = "v1.1.0",
)
go_repository(
name = "com_google_cloud_go_firestore",
build_file_proto_mode = "disable",
importpath = "cloud.google.com/go/firestore",
sum = "h1:9x7Bx0A9R5/M9jibeJeZWqjeVEIxYW9fZYqB9a70/bY=",
version = "v1.1.0",
)
go_repository(
name = "com_google_cloud_go_logging",
build_file_proto_mode = "disable",
importpath = "cloud.google.com/go/logging",
sum = "h1:kaunpnoEh9L4hu6JUsBa8Y20LBfKnCuDhKUgdZp7oK8=",
version = "v1.0.0",
)
go_repository(
name = "com_google_cloud_go_pubsub",
build_file_proto_mode = "disable",
importpath = "cloud.google.com/go/pubsub",
sum = "h1:76oR7VBOkL7ivoIrFKyW0k7YDCRelrlxktIzQiIUGgg=",
version = "v1.4.0",
)
go_repository(
name = "com_google_cloud_go_storage",
build_file_proto_mode = "disable",
importpath = "cloud.google.com/go/storage",
sum = "h1:4y3gHptW1EHVtcPAVE0eBBlFuGqEejTTG3KdIE0lUX4=",
version = "v1.12.0",
)
go_repository(
name = "com_shuralyov_dmitri_gpu_mtl",
build_file_proto_mode = "disable",
importpath = "dmitri.shuralyov.com/gpu/mtl",
sum = "h1:VpgP7xuJadIUuKccphEpTJnWhS2jkQyMt6Y7pJCD7fY=",
version = "v0.0.0-20190408044501-666a987793e9",
)
go_repository(
name = "com_sourcegraph_sqs_pbtypes",
build_file_proto_mode = "disable",
importpath = "sourcegraph.com/sqs/pbtypes",
sum = "h1:f7lAwqviDEGvON4kRv0o5V7FT/IQK+tbkF664XMbP3o=",
version = "v1.0.0",
)
go_repository(
name = "com_sslmate_software_src_go_pkcs12",
build_file_proto_mode = "disable",
importpath = "software.sslmate.com/src/go-pkcs12",
sum = "h1:AVd6O+azYjVQYW1l55IqkbL8/JxjrLtO6q4FCmV8N5c=",
version = "v0.0.0-20200830195227-52f69702a001",
)
go_repository(
name = "dev_gocloud",
build_file_proto_mode = "disable",
importpath = "gocloud.dev",
sum = "h1:EDRyaRAnMGSq/QBto486gWFxMLczAfIYUmusV7XLNBM=",
version = "v0.19.0",
)
go_repository(
name = "dev_knative_caching",
build_file_proto_mode = "disable",
importpath = "knative.dev/caching",
sum = "h1:mxrur6DsVK8uIjhIq7c1OMls4YjBcRlyvnh3Vx13a0M=",
version = "v0.0.0-20200116200605-67bca2c83dfa",
)
go_repository(
name = "dev_knative_eventing_contrib",
build_file_proto_mode = "disable",
importpath = "knative.dev/eventing-contrib",
sum = "h1:xncT+JrokPG+hPUFJwue8ubPpzmziV9GUIZqYt01JDo=",
version = "v0.11.2",
)
go_repository(
name = "dev_knative_pkg",
build_file_proto_mode = "disable",
importpath = "knative.dev/pkg",
sum = "h1:NDQS+236vhwCP9oiBBGvQ5WGzbD0Y8Pcv9dtE2stg+Q=",
version = "v0.0.0-20200711004937-22502028e31a",
)
go_repository(
name = "dev_knative_test_infra",
build_file_proto_mode = "disable",
importpath = "knative.dev/test-infra",
sum = "h1:wNlGK4f5Ykqh3KLC5RlyR9kvzvRgo/LwJQNsZWGVHnU=",
version = "v0.0.0-20200707183444-aed09e56ddc7",
)
go_repository(
name = "in_gopkg_airbrake_gobrake_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/airbrake/gobrake.v2",
sum = "h1:7z2uVWwn7oVeeugY1DtlPAy5H+KYgB1KeKTnqjNatLo=",
version = "v2.0.9",
)
go_repository(
name = "in_gopkg_alecthomas_kingpin_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/alecthomas/kingpin.v2",
sum = "h1:jMFz6MfLP0/4fUyZle81rXUoxOBFi19VUFKVDOQfozc=",
version = "v2.2.6",
)
go_repository(
name = "in_gopkg_asn1_ber_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/asn1-ber.v1",
sum = "h1:TxyelI5cVkbREznMhfzycHdkp5cLA7DpE+GKjSslYhM=",
version = "v1.0.0-20181015200546-f715ec2f112d",
)
go_repository(
name = "in_gopkg_check_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/check.v1",
sum = "h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=",
version = "v1.0.0-20201130134442-10cb98267c6c",
)
go_repository(
name = "in_gopkg_cheggaaa_pb_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/cheggaaa/pb.v1",
sum = "h1:Ev7yu1/f6+d+b3pi5vPdRPc6nNtP1umSfcWiEfRqv6I=",
version = "v1.0.25",
)
go_repository(
name = "in_gopkg_errgo_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/errgo.v2",
sum = "h1:0vLT13EuvQ0hNvakwLuFZ/jYrLp5F3kcWHXdRggjCE8=",
version = "v2.1.0",
)
go_repository(
name = "in_gopkg_fsnotify_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/fsnotify.v1",
sum = "h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4=",
version = "v1.4.7",
)
go_repository(
name = "in_gopkg_gcfg_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/gcfg.v1",
sum = "h1:0HIbH907iBTAntm+88IJV2qmJALDAh8sPekI9Vc1fm0=",
version = "v1.2.0",
)
go_repository(
name = "in_gopkg_gemnasium_logrus_airbrake_hook_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/gemnasium/logrus-airbrake-hook.v2",
sum = "h1:OAj3g0cR6Dx/R07QgQe8wkA9RNjB2u4i700xBkIT4e0=",
version = "v2.1.2",
)
go_repository(
name = "in_gopkg_inf_v0",
build_file_proto_mode = "disable",
importpath = "gopkg.in/inf.v0",
sum = "h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc=",
version = "v0.9.1",
)
go_repository(
name = "in_gopkg_ini_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/ini.v1",
sum = "h1:DPMeDvGTM54DXbPkVIZsp19fp/I2K7zwA/itHYHKo8Y=",
version = "v1.56.0",
)
go_repository(
name = "in_gopkg_jcmturner_aescts_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/jcmturner/aescts.v1",
sum = "h1:cVVZBK2b1zY26haWB4vbBiZrfFQnfbTVrE3xZq6hrEw=",
version = "v1.0.1",
)
go_repository(
name = "in_gopkg_jcmturner_dnsutils_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/jcmturner/dnsutils.v1",
sum = "h1:cIuC1OLRGZrld+16ZJvvZxVJeKPsvd5eUIvxfoN5hSM=",
version = "v1.0.1",
)
go_repository(
name = "in_gopkg_jcmturner_gokrb5_v7",
build_file_proto_mode = "disable",
importpath = "gopkg.in/jcmturner/gokrb5.v7",
sum = "h1:0709Jtq/6QXEuWRfAm260XqlpcwL1vxtO1tUE2qK8Z4=",
version = "v7.3.0",
)
go_repository(
name = "in_gopkg_jcmturner_rpc_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/jcmturner/rpc.v1",
sum = "h1:QHIUxTX1ISuAv9dD2wJ9HWQVuWDX/Zc0PfeC2tjc4rU=",
version = "v1.1.0",
)
go_repository(
name = "in_gopkg_natefinch_lumberjack_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/natefinch/lumberjack.v2",
sum = "h1:1Lc07Kr7qY4U2YPouBjpCLxpiyxIVoxqXgkXLknAOE8=",
version = "v2.0.0",
)
go_repository(
name = "in_gopkg_resty_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/resty.v1",
sum = "h1:CuXP0Pjfw9rOuY6EP+UvtNvt5DSqHpIxILZKT/quCZI=",
version = "v1.12.0",
)
go_repository(
name = "in_gopkg_robfig_cron_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/robfig/cron.v2",
sum = "h1:E846t8CnR+lv5nE+VuiKTDG/v1U2stad0QzddfJC7kY=",
version = "v2.0.0-20150107220207-be2e0b0deed5",
)
go_repository(
name = "in_gopkg_square_go_jose_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/square/go-jose.v2",
sum = "h1:SK5KegNXmKmqE342YYN2qPHEnUYeoMiXXl1poUlI+o4=",
version = "v2.3.1",
)
go_repository(
name = "in_gopkg_src_d_go_billy_v4",
build_file_proto_mode = "disable",
importpath = "gopkg.in/src-d/go-billy.v4",
sum = "h1:0SQA1pRztfTFx2miS8sA97XvooFeNOmvUenF4o0EcVg=",
version = "v4.3.2",
)
go_repository(
name = "in_gopkg_src_d_go_git_fixtures_v3",
build_file_proto_mode = "disable",
importpath = "gopkg.in/src-d/go-git-fixtures.v3",
sum = "h1:ivZFOIltbce2Mo8IjzUHAFoq/IylO9WHhNOAJK+LsJg=",
version = "v3.5.0",
)
go_repository(
name = "in_gopkg_src_d_go_git_v4",
build_file_proto_mode = "disable",
importpath = "gopkg.in/src-d/go-git.v4",
sum = "h1:SRtFyV8Kxc0UP7aCHcijOMQGPxHSmMOPrzulQWolkYE=",
version = "v4.13.1",
)
go_repository(
name = "in_gopkg_tomb_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/tomb.v1",
sum = "h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=",
version = "v1.0.0-20141024135613-dd632973f1e7",
)
go_repository(
name = "in_gopkg_warnings_v0",
build_file_proto_mode = "disable",
importpath = "gopkg.in/warnings.v0",
sum = "h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=",
version = "v0.1.2",
)
go_repository(
name = "in_gopkg_yaml_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/yaml.v1",
sum = "h1:POO/ycCATvegFmVuPpQzZFJ+pGZeX22Ufu6fibxDVjU=",
version = "v1.0.0-20140924161607-9f9df34309c0",
)
go_repository(
name = "in_gopkg_yaml_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/yaml.v2",
sum = "h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=",
version = "v2.4.0",
)
go_repository(
name = "in_gopkg_yaml_v3",
build_file_proto_mode = "disable",
importpath = "gopkg.in/yaml.v3",
sum = "h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo=",
version = "v3.0.0-20210107192922-496545a6307b",
)
go_repository(
name = "io_etcd_go_bbolt",
build_file_proto_mode = "disable",
importpath = "go.etcd.io/bbolt",
sum = "h1:XAzx9gjCb0Rxj7EoqcClPD1d5ZBxZJk0jbuoPHenBt0=",
version = "v1.3.5",
)
go_repository(
name = "io_etcd_go_etcd",
build_file_proto_mode = "disable",
importpath = "go.etcd.io/etcd",
sum = "h1:1JFLBqwIgdyHN1ZtgjTBwO+blA6gVOmZurpiMEsETKo=",
version = "v0.5.0-alpha.5.0.20200910180754-dd1b699fc489",
)
go_repository(
name = "io_gitea_code_sdk_gitea",
build_file_proto_mode = "disable",
importpath = "code.gitea.io/sdk/gitea",
sum = "h1:hvDCz4wtFvo7rf5Ebj8tGd4aJ4wLPKX3BKFX9Dk1Pgs=",
version = "v0.12.0",
)
go_repository(
name = "io_k8s_api",
build_file_proto_mode = "disable_global",
importpath = "k8s.io/api",
sum = "h1:94bbZ5NTjdINJEdzOkpS4vdPhkb1VFpTYC9zh43f75c=",
version = "v0.21.1",
)
go_repository(
name = "io_k8s_apiextensions_apiserver",
build_file_proto_mode = "disable",
importpath = "k8s.io/apiextensions-apiserver",
sum = "h1:AA+cnsb6w7SZ1vD32Z+zdgfXdXY8X9uGX5bN6EoPEIo=",
version = "v0.21.1",
)
go_repository(
name = "io_k8s_apimachinery",
build_file_proto_mode = "disable_global",
importpath = "k8s.io/apimachinery",
sum = "h1:Q6XuHGlj2xc+hlMCvqyYfbv3H7SRGn2c8NycxJquDVs=",
version = "v0.21.1",
)
go_repository(
name = "io_k8s_apiserver",
build_file_proto_mode = "disable",
importpath = "k8s.io/apiserver",
sum = "h1:wTRcid53IhxhbFt4KTrFSw8tAncfr01EP91lzfcygVg=",
version = "v0.21.1",
)
go_repository(
name = "io_k8s_cli_runtime",
build_file_proto_mode = "disable",
importpath = "k8s.io/cli-runtime",
sum = "h1:wLe+osHSqcItyS3MYQXVyGFa54fppORVA8Jn7DBGSWw=",
version = "v0.19.0",
)
go_repository(
name = "io_k8s_client_go",
build_file_proto_mode = "disable_global",
importpath = "k8s.io/client-go",
replace = "k8s.io/client-go",
sum = "h1:bhblWYLZKUu+pm50plvQF8WpY6TXdRRtcS/K9WauOj4=",
version = "v0.21.1",
)
go_repository(
name = "io_k8s_cloud_provider",
build_file_proto_mode = "disable",
importpath = "k8s.io/cloud-provider",
sum = "h1:ELMIQwweSNu8gfVEnLDypxd9034S1sZJg6QcdWJOvMI=",
version = "v0.17.4",
)
go_repository(
name = "io_k8s_code_generator",
build_file_proto_mode = "disable",
importpath = "k8s.io/code-generator",
sum = "h1:jvcxHpVu5dm/LMXr3GOj/jroiP8+v2YnJE9i2OVRenk=",
version = "v0.21.1",
)
go_repository(
name = "io_k8s_component_base",
build_file_proto_mode = "disable",
importpath = "k8s.io/component-base",
sum = "h1:iLpj2btXbR326s/xNQWmPNGu0gaYSjzn7IN/5i28nQw=",
version = "v0.21.1",
)
go_repository(
name = "io_k8s_csi_translation_lib",
build_file_proto_mode = "disable",
importpath = "k8s.io/csi-translation-lib",
sum = "h1:bP9yGfCJDknP7tklCwizZtwgJNRePMVcEaFIfeA11ho=",
version = "v0.17.4",
)
go_repository(
name = "io_k8s_gengo",
build_file_proto_mode = "disable",
importpath = "k8s.io/gengo",
sum = "h1:Uusb3oh8XcdzDF/ndlI4ToKTYVlkCSJP39SRY2mfRAw=",
version = "v0.0.0-20201214224949-b6c5ce23f027",
)
go_repository(
name = "io_k8s_klog",
build_file_proto_mode = "disable",
importpath = "k8s.io/klog",
sum = "h1:Pt+yjF5aB1xDSVbau4VsWe+dQNzA0qv1LlXdC2dF6Q8=",
version = "v1.0.0",
)
go_repository(
name = "io_k8s_klog_v2",
build_file_proto_mode = "disable",
importpath = "k8s.io/klog/v2",
sum = "h1:Q3gmuM9hKEjefWFFYF0Mat+YyFJvsUyYuwyNNJ5C9Ts=",
version = "v2.8.0",
)
go_repository(
name = "io_k8s_kube_aggregator",
build_file_proto_mode = "disable",
importpath = "k8s.io/kube-aggregator",
sum = "h1:rL4fsftMaqkKjaibArYDaBeqN41CHaJzgRJjUB9IrIg=",
version = "v0.19.0",
)
go_repository(
name = "io_k8s_kube_openapi",
build_file_proto_mode = "disable",
importpath = "k8s.io/kube-openapi",
sum = "h1:vEx13qjvaZ4yfObSSXW7BrMc/KQBBT/Jyee8XtLf4x0=",
version = "v0.0.0-20210305001622-591a79e4bda7",
)
go_repository(
name = "io_k8s_kubectl",
build_file_proto_mode = "disable",
importpath = "k8s.io/kubectl",
sum = "h1:t9uxaZzGvqc2jY96mjnPSjFHtaKOxoUegeGZdaGT6aw=",
version = "v0.19.0",
)
go_repository(
name = "io_k8s_kubernetes",
build_file_proto_mode = "disable",
importpath = "k8s.io/kubernetes",
sum = "h1:wJx/r2HuPVaaBeCUk/P47GSK0eyrj3mI/kESRFBp6/A=",
version = "v1.14.7",
)
go_repository(
name = "io_k8s_legacy_cloud_providers",
build_file_proto_mode = "disable",
importpath = "k8s.io/legacy-cloud-providers",
sum = "h1:VvFqJGiYAr2gIdoNuqbeZLEdxIFeN4Yt6OLJS9l2oIE=",
version = "v0.17.4",
)
go_repository(
name = "io_k8s_metrics",
build_file_proto_mode = "disable",
importpath = "k8s.io/metrics",
sum = "h1:cKq0+Z7wg5qkK1n8dryNffKfU22DBX83JguGpR+TCk0=",
version = "v0.19.0",
)
go_repository(
name = "io_k8s_sigs_apiserver_network_proxy_konnectivity_client",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/apiserver-network-proxy/konnectivity-client",
sum = "h1:4uqm9Mv+w2MmBYD+F4qf/v6tDFUdPOk29C095RbU5mY=",
version = "v0.0.15",
)
go_repository(
name = "io_k8s_sigs_boskos",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/boskos",
sum = "h1:OCr84Jrq4HgrYxP9wrfSsGioR1VSpTZMh/RXMu5sm+8=",
version = "v0.0.0-20210730172138-093b54882439",
)
go_repository(
name = "io_k8s_sigs_controller_runtime",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/controller-runtime",
sum = "h1:ZIZ/dtpboPSbZYY7uUz2OzrkaBTOThx2yekLtpGB+zY=",
version = "v0.9.0",
)
go_repository(
name = "io_k8s_sigs_controller_tools",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/controller-tools",
sum = "h1:PXOHvyYAjWfO0UfQvaUo33HpXNCOilV3i/Vjc7iM1/A=",
version = "v0.2.9-0.20200414181213-645d44dca7c0",
)
go_repository(
name = "io_k8s_sigs_kustomize",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/kustomize",
sum = "h1:JUufWFNlI44MdtnjUqVnvh29rR37PQFzPbLXqhyOyX0=",
version = "v2.0.3+incompatible",
)
go_repository(
name = "io_k8s_sigs_structured_merge_diff",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/structured-merge-diff",
sum = "h1:LOs1LZWMsz1xs77Phr/pkB4LFaavH7IVq/3+WTN9XTA=",
version = "v1.0.1",
)
go_repository(
name = "io_k8s_sigs_structured_merge_diff_v2",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/structured-merge-diff/v2",
sum = "h1:I0h4buiCqDtPztO3NOiyoNMtqSIfld49D4Wj3UBXYZA=",
version = "v2.0.1",
)
go_repository(
name = "io_k8s_sigs_structured_merge_diff_v3",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/structured-merge-diff/v3",
sum = "h1:dOmIZBMfhcHS09XZkMyUgkq5trg3/jRyJYFZUiaOp8E=",
version = "v3.0.0",
)
go_repository(
name = "io_k8s_sigs_structured_merge_diff_v4",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/structured-merge-diff/v4",
sum = "h1:C4r9BgJ98vrKnnVCjwCSXcWjWe0NKcUQkmzDXZXGwH8=",
version = "v4.1.0",
)
go_repository(
name = "io_k8s_sigs_testing_frameworks",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/testing_frameworks",
sum = "h1:vK0+tvjF0BZ/RYFeZ1E6BYBwHJJXhjuZ3TdsEKH+UQM=",
version = "v0.1.2",
)
go_repository(
name = "io_k8s_sigs_yaml",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/yaml",
sum = "h1:kr/MCeFWJWTwyaHoR9c8EjH9OumOmoF9YGiZd7lFm/Q=",
version = "v1.2.0",
)
go_repository(
name = "io_k8s_test_infra",
build_file_proto_mode = "disable",
importpath = "k8s.io/test-infra",
sum = "h1:g5GLdRIOMs9vnEM/ZWG67f1Stn8cW1dER+MNK9P7Xn8=",
version = "v0.0.0-20210903101950-5c7809e9c5e9",
)
go_repository(
name = "io_k8s_utils",
build_file_proto_mode = "disable",
importpath = "k8s.io/utils",
sum = "h1:MSqsVQ3pZvPGTqCjptfimO2WjG7A9un2zcpiHkA6M/s=",
version = "v0.0.0-20210527160623-6fdb442a123b",
)
go_repository(
name = "io_opencensus_go",
build_file_proto_mode = "disable",
importpath = "go.opencensus.io",
sum = "h1:LYy1Hy3MJdrCdMwwzxA/dRok4ejH+RwNGbuoD9fCjto=",
version = "v0.22.4",
)
go_repository(
name = "io_opencensus_go_contrib_exporter_aws",
build_file_proto_mode = "disable",
importpath = "contrib.go.opencensus.io/exporter/aws",
sum = "h1:YsbWYxDZkC7x2OxlsDEYvvEXZ3cBI3qBgUK5BqkZvRw=",
version = "v0.0.0-20181029163544-2befc13012d0",
)
go_repository(
name = "io_opencensus_go_contrib_exporter_ocagent",
build_file_proto_mode = "disable",
importpath = "contrib.go.opencensus.io/exporter/ocagent",
sum = "h1:Z1n6UAyr0QwM284yUuh5Zd8JlvxUGAhFZcgMJkMPrGM=",
version = "v0.6.0",
)
go_repository(
name = "io_opencensus_go_contrib_exporter_prometheus",
build_file_proto_mode = "disable",
importpath = "contrib.go.opencensus.io/exporter/prometheus",
sum = "h1:SByaIoWwNgMdPSgl5sMqM2KDE5H/ukPWBRo314xiDvg=",
version = "v0.1.0",
)
go_repository(
name = "io_opencensus_go_contrib_exporter_stackdriver",
build_file_proto_mode = "disable",
importpath = "contrib.go.opencensus.io/exporter/stackdriver",
sum = "h1:RX9W6FelAqTVnBi/bRXJLXr9n18v4QkQwZYIdnNS51I=",
version = "v0.13.1",
)
go_repository(
name = "io_opencensus_go_contrib_exporter_zipkin",
build_file_proto_mode = "disable",
importpath = "contrib.go.opencensus.io/exporter/zipkin",
sum = "h1:PR+1zWqY8ceXs1qDQQIlgXe+sdiwCf0n32bH4+Epk8g=",
version = "v0.1.1",
)
go_repository(
name = "io_opencensus_go_contrib_integrations_ocsql",
build_file_proto_mode = "disable",
importpath = "contrib.go.opencensus.io/integrations/ocsql",
sum = "h1:kfg5Yyy1nYUrqzyfW5XX+dzMASky8IJXhtHe0KTYNS4=",
version = "v0.1.4",
)
go_repository(
name = "io_opencensus_go_contrib_resource",
build_file_proto_mode = "disable",
importpath = "contrib.go.opencensus.io/resource",
sum = "h1:4r2CANuYhKGmYWP02+5E94rLRcS/YeD+KlxSrOsMxk0=",
version = "v0.1.1",
)
go_repository(
name = "io_rsc_binaryregexp",
build_file_proto_mode = "disable",
importpath = "rsc.io/binaryregexp",
sum = "h1:HfqmD5MEmC0zvwBuF187nq9mdnXjXsSivRiXN7SmRkE=",
version = "v0.2.0",
)
go_repository(
name = "io_rsc_letsencrypt",
build_file_proto_mode = "disable",
importpath = "rsc.io/letsencrypt",
sum = "h1:H7xDfhkaFFSYEJlKeq38RwX2jYcnTeHuDQyT+mMNMwM=",
version = "v0.0.3",
)
go_repository(
name = "io_rsc_quote_v3",
build_file_proto_mode = "disable",
importpath = "rsc.io/quote/v3",
sum = "h1:9JKUTTIUgS6kzR9mK1YuGKv6Nl+DijDNIc0ghT58FaY=",
version = "v3.1.0",
)
go_repository(
name = "io_rsc_sampler",
build_file_proto_mode = "disable",
importpath = "rsc.io/sampler",
sum = "h1:7uVkIFmeBqHfdjD+gZwtXXI+RODJ2Wc4O7MPEh/QiW4=",
version = "v1.3.0",
)
go_repository(
name = "ml_vbom_util",
build_file_proto_mode = "disable",
importpath = "vbom.ml/util",
sum = "h1:O69FD9pJA4WUZlEwYatBEEkRWKQ5cKodWpdKTrCS/iQ=",
version = "v0.0.0-20180919145318-efcd4e0f9787",
)
go_repository(
name = "org_apache_git_thrift_git",
build_file_proto_mode = "disable",
importpath = "git.apache.org/thrift.git",
sum = "h1:CMxsZlAmxKs+VAZMlDDL0wXciMblJcutQbEe3A9CYUM=",
version = "v0.12.0",
)
go_repository(
name = "org_bazil_fuse",
build_file_proto_mode = "disable",
importpath = "bazil.org/fuse",
sum = "h1:FNCRpXiquG1aoyqcIWVFmpTSKVcx2bQD38uZZeGtdlw=",
version = "v0.0.0-20180421153158-65cc252bf669",
)
go_repository(
name = "org_go4",
build_file_proto_mode = "disable",
importpath = "go4.org",
sum = "h1:iqAGo78tVOJXELHQFRjR6TMwItrvXH4hrGJ32I/NFF8=",
version = "v0.0.0-20201209231011-d4a079459e60",
)
go_repository(
name = "org_golang_google_api",
build_file_proto_mode = "disable",
importpath = "google.golang.org/api",
sum = "h1:Le77IccnTqEa8ryp9wIpX5W3zYm7Gf9LhOp9PHcwFts=",
version = "v0.32.0",
)
go_repository(
name = "org_golang_google_appengine",
build_file_proto_mode = "disable",
importpath = "google.golang.org/appengine",
sum = "h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c=",
version = "v1.6.7",
)
go_repository(
name = "org_golang_google_cloud",
build_file_proto_mode = "disable",
importpath = "google.golang.org/cloud",
sum = "h1:Cpp2P6TPjujNoC5M2KHY6g7wfyLYfIWRZaSdIKfDasA=",
version = "v0.0.0-20151119220103-975617b05ea8",
)
go_repository(
name = "org_golang_google_genproto",
build_file_proto_mode = "disable",
importpath = "google.golang.org/genproto",
sum = "h1:pOwg4OoaRYScjmR4LlLgdtnyoHYTSAVhhqe5uPdpII8=",
version = "v0.0.0-20201110150050-8816d57aaa9a",
)
go_repository(
name = "org_golang_google_grpc",
build_file_proto_mode = "disable",
importpath = "google.golang.org/grpc",
sum = "h1:zWTV+LMdc3kaiJMSTOFz2UgSBgx8RNQoTGiZu3fR9S0=",
version = "v1.32.0",
)
go_repository(
name = "org_golang_google_protobuf",
build_file_proto_mode = "disable",
importpath = "google.golang.org/protobuf",
sum = "h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk=",
version = "v1.26.0",
)
go_repository(
name = "org_golang_x_crypto",
build_file_proto_mode = "disable",
importpath = "golang.org/x/crypto",
sum = "h1:It14KIkyBFYkHkwZ7k45minvA9aorojkyjGk9KJ5B/w=",
version = "v0.0.0-20210322153248-0c34fe9e7dc2",
)
go_repository(
name = "org_golang_x_exp",
build_file_proto_mode = "disable",
importpath = "golang.org/x/exp",
sum = "h1:QE6XYQK6naiK1EPAe1g/ILLxN5RBoH5xkJk3CqlMI/Y=",
version = "v0.0.0-20200224162631-6cc2880d07d6",
)
go_repository(
name = "org_golang_x_image",
build_file_proto_mode = "disable",
importpath = "golang.org/x/image",
sum = "h1:+qEpEAPhDZ1o0x3tHzZTQDArnOixOzGD9HUJfcg0mb4=",
version = "v0.0.0-20190802002840-cff245a6509b",
)
go_repository(
name = "org_golang_x_lint",
build_file_proto_mode = "disable",
importpath = "golang.org/x/lint",
replace = "golang.org/x/lint",
sum = "h1:QzoH/1pFpZguR8NrRHLcO6jKqfv2zpuSqZLgdm7ZmjI=",
version = "v0.0.0-20190409202823-959b441ac422",
)
go_repository(
name = "org_golang_x_mobile",
build_file_proto_mode = "disable",
importpath = "golang.org/x/mobile",
sum = "h1:b373EGXtj0o+ssqkOkdVphTCZ/fVg2LwhctJn2QQbqA=",
version = "v0.0.0-20190806162312-597adff16ade",
)
go_repository(
name = "org_golang_x_mod",
build_file_proto_mode = "disable",
importpath = "golang.org/x/mod",
sum = "h1:8pl+sMODzuvGJkmj2W4kZihvVb5mKm8pB/X44PIQHv8=",
version = "v0.4.0",
)
go_repository(
name = "org_golang_x_net",
build_file_proto_mode = "disable",
importpath = "golang.org/x/net",
sum = "h1:DzZ89McO9/gWPsQXS/FVKAlG02ZjaQ6AlZRBimEYOd0=",
version = "v0.0.0-20210428140749-89ef3d95e781",
)
go_repository(
name = "org_golang_x_oauth2",
build_file_proto_mode = "disable",
importpath = "golang.org/x/oauth2",
sum = "h1:ld7aEMNHoBnnDAX15v1T6z31v8HwR2A9FYOuAhWqkwc=",
version = "v0.0.0-20200902213428-5d25da1a8d43",
)
go_repository(
name = "org_golang_x_sync",
build_file_proto_mode = "disable",
importpath = "golang.org/x/sync",
sum = "h1:DcqTD9SDLc+1P/r1EmRBwnVsrOwW+kk2vWf9n+1sGhs=",
version = "v0.0.0-20201207232520-09787c993a3a",
)
go_repository(
name = "org_golang_x_sys",
build_file_proto_mode = "disable",
importpath = "golang.org/x/sys",
sum = "h1:JWgyZ1qgdTaF3N3oxC+MdTV7qvEEgHo3otj+HB5CM7Q=",
version = "v0.0.0-20210603081109-ebe580a85c40",
)
go_repository(
name = "org_golang_x_term",
build_file_proto_mode = "disable",
importpath = "golang.org/x/term",
sum = "h1:SZxvLBoTP5yHO3Frd4z4vrF+DBX9vMVanchswa69toE=",
version = "v0.0.0-20210220032956-6a3ed077a48d",
)
go_repository(
name = "org_golang_x_text",
build_file_proto_mode = "disable",
importpath = "golang.org/x/text",
sum = "h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M=",
version = "v0.3.6",
)
go_repository(
name = "org_golang_x_time",
build_file_proto_mode = "disable",
importpath = "golang.org/x/time",
sum = "h1:O8mE0/t419eoIwhTFpKVkHiTs/Igowgfkj25AcZrtiE=",
version = "v0.0.0-20210220033141-f8bda1e9f3ba",
)
go_repository(
name = "org_golang_x_tools",
build_file_proto_mode = "disable",
importpath = "golang.org/x/tools",
sum = "h1:po9/4sTYwZU9lPhi1tOrb4hCv3qrhiQ77LZfGa2OjwY=",
version = "v0.1.0",
)
go_repository(
name = "org_golang_x_xerrors",
build_file_proto_mode = "disable",
importpath = "golang.org/x/xerrors",
sum = "h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=",
version = "v0.0.0-20200804184101-5ec99f83aff1",
)
go_repository(
name = "org_gonum_v1_gonum",
build_file_proto_mode = "disable",
importpath = "gonum.org/v1/gonum",
sum = "h1:OB/uP/Puiu5vS5QMRPrXCDWUPb+kt8f1KW8oQzFejQw=",
version = "v0.0.0-20190331200053-3d26580ed485",
)
go_repository(
name = "org_gonum_v1_netlib",
build_file_proto_mode = "disable",
importpath = "gonum.org/v1/netlib",
sum = "h1:jRyg0XfpwWlhEV8mDfdNGBeSJM2fuyh9Yjrnd8kF2Ts=",
version = "v0.0.0-20190331212654-76723241ea4e",
)
go_repository(
name = "org_modernc_cc",
build_file_proto_mode = "disable",
importpath = "modernc.org/cc",
sum = "h1:nPibNuDEx6tvYrUAtvDTTw98rx5juGsa5zuDnKwEEQQ=",
version = "v1.0.0",
)
go_repository(
name = "org_modernc_golex",
build_file_proto_mode = "disable",
importpath = "modernc.org/golex",
sum = "h1:wWpDlbK8ejRfSyi0frMyhilD3JBvtcx2AdGDnU+JtsE=",
version = "v1.0.0",
)
go_repository(
name = "org_modernc_mathutil",
build_file_proto_mode = "disable",
importpath = "modernc.org/mathutil",
sum = "h1:93vKjrJopTPrtTNpZ8XIovER7iCIH1QU7wNbOQXC60I=",
version = "v1.0.0",
)
go_repository(
name = "org_modernc_strutil",
build_file_proto_mode = "disable",
importpath = "modernc.org/strutil",
sum = "h1:XVFtQwFVwc02Wk+0L/Z/zDDXO81r5Lhe6iMKmGX3KhE=",
version = "v1.0.0",
)
go_repository(
name = "org_modernc_xc",
build_file_proto_mode = "disable",
importpath = "modernc.org/xc",
sum = "h1:7ccXrupWZIS3twbUGrtKmHS2DXY6xegFua+6O3xgAFU=",
version = "v1.0.0",
)
go_repository(
name = "org_mongodb_go_mongo_driver",
build_file_proto_mode = "disable",
importpath = "go.mongodb.org/mongo-driver",
sum = "h1:jxcFYjlkl8xaERsgLo+RNquI0epW6zuy/ZRQs6jnrFA=",
version = "v1.1.2",
)
go_repository(
name = "org_uber_go_atomic",
build_file_proto_mode = "disable",
importpath = "go.uber.org/atomic",
sum = "h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw=",
version = "v1.7.0",
)
go_repository(
name = "org_uber_go_goleak",
build_file_proto_mode = "disable",
importpath = "go.uber.org/goleak",
sum = "h1:z+mqJhf6ss6BSfSM671tgKyZBFPTTJM+HLxnhPC3wu0=",
version = "v1.1.10",
)
go_repository(
name = "org_uber_go_multierr",
build_file_proto_mode = "disable",
importpath = "go.uber.org/multierr",
sum = "h1:y6IPFStTAIT5Ytl7/XYmHvzXQ7S3g/IeZW9hyZ5thw4=",
version = "v1.6.0",
)
go_repository(
name = "org_uber_go_tools",
build_file_proto_mode = "disable",
importpath = "go.uber.org/tools",
sum = "h1:0mgffUl7nfd+FpvXMVz4IDEaUSmT1ysygQC7qYo7sG4=",
version = "v0.0.0-20190618225709-2cfd321de3ee",
)
go_repository(
name = "org_uber_go_zap",
build_file_proto_mode = "disable",
importpath = "go.uber.org/zap",
sum = "h1:MTjgFu6ZLKvY6Pvaqk97GlxNBuMpV4Hy/3P6tRGlI2U=",
version = "v1.17.0",
)
go_repository(
name = "sh_helm_helm_v3",
build_file_proto_mode = "disable",
importpath = "helm.sh/helm/v3",
sum = "h1:aykwPMVyQyncZ8iLNVMXgJ1l3c6W0+LSOPmqp8JdCjs=",
version = "v3.1.1",
)
go_repository(
name = "tools_gotest",
build_file_proto_mode = "disable",
importpath = "gotest.tools",
sum = "h1:VsBPFP1AI068pPrMxtb/S8Zkgf9xEmTLJjfM+P5UIEo=",
version = "v2.2.0+incompatible",
)
go_repository(
name = "tools_gotest_v3",
build_file_proto_mode = "disable",
importpath = "gotest.tools/v3",
sum = "h1:4AuOwCGf4lLR9u3YOe2awrHygurzhO/HeQ6laiA6Sx0=",
version = "v3.0.3",
)
go_repository(
name = "xyz_gomodules_jsonpatch_v2",
build_file_proto_mode = "disable",
importpath = "gomodules.xyz/jsonpatch/v2",
sum = "h1:4pT439QV83L+G9FkcCriY6EkpcK6r6bK+A5FBUMI7qY=",
version = "v2.2.0",
)
| 38.474667 | 81 | 0.641536 | load("@bazel_gazelle//:deps.bzl", "go_repository")
def go_deps():
go_repository(
name = "ag_pack_amqp",
build_file_proto_mode = "disable",
importpath = "pack.ag/amqp",
sum = "h1:cuNDWLUTbKRtEZwhB0WQBXf9pGbm87pUBXQhvcFxBWg=",
version = "v0.11.2",
)
go_repository(
name = "cc_mvdan_interfacer",
build_file_proto_mode = "disable",
importpath = "mvdan.cc/interfacer",
sum = "h1:WX1yoOaKQfddO/mLzdV4wptyWgoH/6hwLs7QHTixo0I=",
version = "v0.0.0-20180901003855-c20040233aed",
)
go_repository(
name = "cc_mvdan_lint",
build_file_proto_mode = "disable",
importpath = "mvdan.cc/lint",
sum = "h1:DxJ5nJdkhDlLok9K6qO+5290kphDJbHOQO1DFFFTeBo=",
version = "v0.0.0-20170908181259-adc824a0674b",
)
go_repository(
name = "cc_mvdan_unparam",
build_file_proto_mode = "disable",
importpath = "mvdan.cc/unparam",
sum = "h1:kAREL6MPwpsk1/PQPFD3Eg7WAQR5mPTWZJaBiG5LDbY=",
version = "v0.0.0-20200501210554-b37ab49443f7",
)
go_repository(
name = "cc_mvdan_xurls_v2",
build_file_proto_mode = "disable",
importpath = "mvdan.cc/xurls/v2",
sum = "h1:r1zSOSNS/kqtpmATyMMMvaZ4/djsesbYz5kr0+qMRWc=",
version = "v2.0.0",
)
go_repository(
name = "co_honnef_go_tools",
build_file_proto_mode = "disable",
importpath = "honnef.co/go/tools",
sum = "h1:UoveltGrhghAA7ePc+e+QYDHXrBps2PqFZiHkGR/xK8=",
version = "v0.0.1-2020.1.4",
)
go_repository(
name = "com_github_agnivade_levenshtein",
build_file_proto_mode = "disable",
importpath = "github.com/agnivade/levenshtein",
sum = "h1:3oJU7J3FGFmyhn8KHjmVaZCN5hxTr7GxgRue+sxIXdQ=",
version = "v1.0.1",
)
go_repository(
name = "com_github_alcortesm_tgz",
build_file_proto_mode = "disable",
importpath = "github.com/alcortesm/tgz",
sum = "h1:uSoVVbwJiQipAclBbw+8quDsfcvFjOpI5iCf4p/cqCs=",
version = "v0.0.0-20161220082320-9c5fe88206d7",
)
go_repository(
name = "com_github_alecthomas_kingpin",
build_file_proto_mode = "disable",
importpath = "github.com/alecthomas/kingpin",
sum = "h1:5svnBTFgJjZvGKyYBtMB0+m5wvrbUHiqye8wRJMlnYI=",
version = "v2.2.6+incompatible",
)
go_repository(
name = "com_github_alecthomas_template",
build_file_proto_mode = "disable",
importpath = "github.com/alecthomas/template",
sum = "h1:JYp7IbQjafoB+tBA3gMyHYHrpOtNuDiK/uB5uXxq5wM=",
version = "v0.0.0-20190718012654-fb15b899a751",
)
go_repository(
name = "com_github_alecthomas_units",
build_file_proto_mode = "disable",
importpath = "github.com/alecthomas/units",
sum = "h1:UQZhZ2O0vMHr2cI+DC1Mbh0TJxzA3RcLoMsFw+aXw7E=",
version = "v0.0.0-20190924025748-f65c72e2690d",
)
go_repository(
name = "com_github_andreyvit_diff",
build_file_proto_mode = "disable",
importpath = "github.com/andreyvit/diff",
sum = "h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ=",
version = "v0.0.0-20170406064948-c7f18ee00883",
)
go_repository(
name = "com_github_andybalholm_brotli",
build_file_proto_mode = "disable",
importpath = "github.com/andybalholm/brotli",
sum = "h1:bZ28Hqta7TFAK3Q08CMvv8y3/8ATaEqv2nGoc6yff6c=",
version = "v0.0.0-20190621154722-5f990b63d2d6",
)
go_repository(
name = "com_github_andygrunwald_go_gerrit",
build_file_proto_mode = "disable",
importpath = "github.com/andygrunwald/go-gerrit",
sum = "h1:7gokoTWteZhP1t2f0OzrFFXlyL8o0+b0r4ZaRV9PXOs=",
version = "v0.0.0-20210709065208-9d38b0be0268",
)
go_repository(
name = "com_github_andygrunwald_go_jira",
build_file_proto_mode = "disable",
importpath = "github.com/andygrunwald/go-jira",
sum = "h1:vvIImGgX32bHfoiyUwkNo+/YrPnRczNarvhLOncP6dE=",
version = "v1.13.0",
)
go_repository(
name = "com_github_anmitsu_go_shlex",
build_file_proto_mode = "disable",
importpath = "github.com/anmitsu/go-shlex",
sum = "h1:kFOfPq6dUM1hTo4JG6LR5AXSUEsOjtdm0kw0FtQtMJA=",
version = "v0.0.0-20161002113705-648efa622239",
)
go_repository(
name = "com_github_antihax_optional",
build_file_proto_mode = "disable",
importpath = "github.com/antihax/optional",
sum = "h1:uZuxRZCz65cG1o6K/xUqImNcYKtmk9ylqaH0itMSvzA=",
version = "v0.0.0-20180407024304-ca021399b1a6",
)
go_repository(
name = "com_github_apache_thrift",
build_file_proto_mode = "disable",
importpath = "github.com/apache/thrift",
sum = "h1:pODnxUFNcjP9UTLZGTdeh+j16A8lJbRvD3rOtrk/7bs=",
version = "v0.12.0",
)
go_repository(
name = "com_github_apex_log",
build_file_proto_mode = "disable",
importpath = "github.com/apex/log",
sum = "h1:1fyfbPvUwD10nMoh3hY6MXzvZShJQn9/ck7ATgAt5pA=",
version = "v1.3.0",
)
go_repository(
name = "com_github_apex_logs",
build_file_proto_mode = "disable",
importpath = "github.com/apex/logs",
sum = "h1:KmEBVwfDUOTFcBO8cfkJYwdQ5487UZSN+GteOGPmiro=",
version = "v0.0.4",
)
go_repository(
name = "com_github_aphistic_golf",
build_file_proto_mode = "disable",
importpath = "github.com/aphistic/golf",
sum = "h1:2KLQMJ8msqoPHIPDufkxVcoTtcmE5+1sL9950m4R9Pk=",
version = "v0.0.0-20180712155816-02c07f170c5a",
)
go_repository(
name = "com_github_aphistic_sweet",
build_file_proto_mode = "disable",
importpath = "github.com/aphistic/sweet",
sum = "h1:I4z+fAUqvKfvZV/CHi5dV0QuwbmIvYYFDjG0Ss5QpAs=",
version = "v0.2.0",
)
go_repository(
name = "com_github_armon_circbuf",
build_file_proto_mode = "disable",
importpath = "github.com/armon/circbuf",
sum = "h1:QEF07wC0T1rKkctt1RINW/+RMTVmiwxETico2l3gxJA=",
version = "v0.0.0-20150827004946-bbbad097214e",
)
go_repository(
name = "com_github_armon_consul_api",
build_file_proto_mode = "disable",
importpath = "github.com/armon/consul-api",
sum = "h1:G1bPvciwNyF7IUmKXNt9Ak3m6u9DE1rF+RmtIkBpVdA=",
version = "v0.0.0-20180202201655-eb2c6b5be1b6",
)
go_repository(
name = "com_github_armon_go_metrics",
build_file_proto_mode = "disable",
importpath = "github.com/armon/go-metrics",
sum = "h1:8GUt8eRujhVEGZFFEjBj46YV4rDjvGrNxb0KMWYkL2I=",
version = "v0.0.0-20180917152333-f0300d1749da",
)
go_repository(
name = "com_github_armon_go_radix",
build_file_proto_mode = "disable",
importpath = "github.com/armon/go-radix",
sum = "h1:BUAU3CGlLvorLI26FmByPp2eC2qla6E1Tw+scpcg/to=",
version = "v0.0.0-20180808171621-7fddfc383310",
)
go_repository(
name = "com_github_armon_go_socks5",
build_file_proto_mode = "disable",
importpath = "github.com/armon/go-socks5",
sum = "h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio=",
version = "v0.0.0-20160902184237-e75332964ef5",
)
go_repository(
name = "com_github_asaskevich_govalidator",
build_file_proto_mode = "disable",
importpath = "github.com/asaskevich/govalidator",
sum = "h1:zV3ejI06GQ59hwDQAvmK1qxOQGB3WuVTRoY0okPTAv0=",
version = "v0.0.0-20200108200545-475eaeb16496",
)
go_repository(
name = "com_github_aws_aws_k8s_tester",
build_file_proto_mode = "disable",
importpath = "github.com/aws/aws-k8s-tester",
sum = "h1:Zr5NWiRK5fhmRIlhrsTwrY8yB488FyN6iulci2D7VaI=",
version = "v1.0.0",
)
go_repository(
name = "com_github_aws_aws_sdk_go",
build_file_proto_mode = "disable",
importpath = "github.com/aws/aws-sdk-go",
sum = "h1:cyZp8TvUbH9rrShdrwULtCj4pB5szddrw9aKHUsw1Ic=",
version = "v1.37.22",
)
go_repository(
name = "com_github_aybabtme_rgbterm",
build_file_proto_mode = "disable",
importpath = "github.com/aybabtme/rgbterm",
sum = "h1:WWB576BN5zNSZc/M9d/10pqEx5VHNhaQ/yOVAkmj5Yo=",
version = "v0.0.0-20170906152045-cc83f3b3ce59",
)
go_repository(
name = "com_github_azure_azure_amqp_common_go_v2",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/azure-amqp-common-go/v2",
sum = "h1:+QbFgmWCnPzdaRMfsI0Yb6GrRdBj5jVL8N3EXuEUcBQ=",
version = "v2.1.0",
)
go_repository(
name = "com_github_azure_azure_pipeline_go",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/azure-pipeline-go",
sum = "h1:6oiIS9yaG6XCCzhgAgKFfIWyo4LLCiDhZot6ltoThhY=",
version = "v0.2.2",
)
go_repository(
name = "com_github_azure_azure_sdk_for_go",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/azure-sdk-for-go",
sum = "h1:m4oQOm3HXtQh2Ipata+pLSS1kGUD/7ikkvNq81XM/7s=",
version = "v46.3.0+incompatible",
)
go_repository(
name = "com_github_azure_azure_service_bus_go",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/azure-service-bus-go",
sum = "h1:G1qBLQvHCFDv9pcpgwgFkspzvnGknJRR0PYJ9ytY/JA=",
version = "v0.9.1",
)
go_repository(
name = "com_github_azure_azure_storage_blob_go",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/azure-storage-blob-go",
sum = "h1:53qhf0Oxa0nOjgbDeeYPUeyiNmafAFEY95rZLK0Tj6o=",
version = "v0.8.0",
)
go_repository(
name = "com_github_azure_go_ansiterm",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-ansiterm",
sum = "h1:w+iIsaOQNcT7OZ575w+acHgRric5iCyQh+xv+KJ4HB8=",
version = "v0.0.0-20170929234023-d6e3b3328b78",
)
go_repository(
name = "com_github_azure_go_autorest",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest",
replace = "github.com/Azure/go-autorest",
sum = "h1:V5VMDjClD3GiElqLWO7mz2MxNAK/vTfRHdAubSIPRgs=",
version = "v14.2.0+incompatible",
)
go_repository(
name = "com_github_azure_go_autorest_autorest",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest",
sum = "h1:gI8ytXbxMfI+IVbI9mP2JGCTXIuhHLgRlvQ9X4PsnHE=",
version = "v0.11.12",
)
go_repository(
name = "com_github_azure_go_autorest_autorest_adal",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest/adal",
sum = "h1:Y3bBUV4rTuxenJJs41HU3qmqsb+auo+a3Lz+PlJPpL0=",
version = "v0.9.5",
)
go_repository(
name = "com_github_azure_go_autorest_autorest_azure_auth",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest/azure/auth",
sum = "h1:iM6UAvjR97ZIeR93qTcwpKNMpV+/FTWjwEbuPD495Tk=",
version = "v0.4.2",
)
go_repository(
name = "com_github_azure_go_autorest_autorest_azure_cli",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest/azure/cli",
sum = "h1:LXl088ZQlP0SBppGFsRZonW6hSvwgL5gRByMbvUbx8U=",
version = "v0.3.1",
)
go_repository(
name = "com_github_azure_go_autorest_autorest_date",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest/date",
sum = "h1:7gUk1U5M/CQbp9WoqinNzJar+8KY+LPI6wiWrP/myHw=",
version = "v0.3.0",
)
go_repository(
name = "com_github_azure_go_autorest_autorest_mocks",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest/mocks",
sum = "h1:K0laFcLE6VLTOwNgSxaGbUcLPuGXlNkbVvq4cW4nIHk=",
version = "v0.4.1",
)
go_repository(
name = "com_github_azure_go_autorest_autorest_to",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest/to",
sum = "h1:oXVqrxakqqV1UZdSazDOPOLvOIz+XA683u8EctwboHk=",
version = "v0.4.0",
)
go_repository(
name = "com_github_azure_go_autorest_autorest_validation",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest/validation",
sum = "h1:3I9AAI63HfcLtphd9g39ruUwRI+Ca+z/f36KHPFRUss=",
version = "v0.3.0",
)
go_repository(
name = "com_github_azure_go_autorest_logger",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/logger",
sum = "h1:e4RVHVZKC5p6UANLJHkM4OfR1UKZPj8Wt8Pcx+3oqrE=",
version = "v0.2.0",
)
go_repository(
name = "com_github_azure_go_autorest_tracing",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/tracing",
sum = "h1:TYi4+3m5t6K48TGI9AUdb+IzbnSxvnvUMfuitfgcfuo=",
version = "v0.6.0",
)
go_repository(
name = "com_github_beorn7_perks",
build_file_proto_mode = "disable",
importpath = "github.com/beorn7/perks",
sum = "h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=",
version = "v1.0.1",
)
go_repository(
name = "com_github_bgentry_speakeasy",
build_file_proto_mode = "disable",
importpath = "github.com/bgentry/speakeasy",
sum = "h1:ByYyxL9InA1OWqxJqqp2A5pYHUrCiAL6K3J+LKSsQkY=",
version = "v0.1.0",
)
go_repository(
name = "com_github_bitly_go_simplejson",
build_file_proto_mode = "disable",
importpath = "github.com/bitly/go-simplejson",
sum = "h1:6IH+V8/tVMab511d5bn4M7EwGXZf9Hj6i2xSwkNEM+Y=",
version = "v0.5.0",
)
go_repository(
name = "com_github_bketelsen_crypt",
build_file_proto_mode = "disable",
importpath = "github.com/bketelsen/crypt",
sum = "h1:+0HFd5KSZ/mm3JmhmrDukiId5iR6w4+BdFtfSy4yWIc=",
version = "v0.0.3-0.20200106085610-5cbc8cc4026c",
)
go_repository(
name = "com_github_blakesmith_ar",
build_file_proto_mode = "disable",
importpath = "github.com/blakesmith/ar",
sum = "h1:m935MPodAbYS46DG4pJSv7WO+VECIWUQ7OJYSoTrMh4=",
version = "v0.0.0-20190502131153-809d4375e1fb",
)
go_repository(
name = "com_github_blang_semver",
build_file_proto_mode = "disable",
importpath = "github.com/blang/semver",
sum = "h1:cQNTCjp13qL8KC3Nbxr/y2Bqb63oX6wdnnjpJbkM4JQ=",
version = "v3.5.1+incompatible",
)
go_repository(
name = "com_github_bmizerany_assert",
build_file_proto_mode = "disable",
importpath = "github.com/bmizerany/assert",
sum = "h1:DDGfHa7BWjL4YnC6+E63dPcxHo2sUxDIu8g3QgEJdRY=",
version = "v0.0.0-20160611221934-b7ed37b82869",
)
go_repository(
name = "com_github_bmizerany_perks",
build_file_proto_mode = "disable",
importpath = "github.com/bmizerany/perks",
sum = "h1:AP/Y7sqYicnjGDfD5VcY4CIfh1hRXBUavxrvELjTiOE=",
version = "v0.0.0-20141205001514-d9a9656a3a4b",
)
go_repository(
name = "com_github_bombsimon_wsl_v2",
build_file_proto_mode = "disable",
importpath = "github.com/bombsimon/wsl/v2",
sum = "h1:/DdSteYCq4lPX+LqDg7mdoxm14UxzZPoDT0taYc3DTU=",
version = "v2.2.0",
)
go_repository(
name = "com_github_bombsimon_wsl_v3",
build_file_proto_mode = "disable",
importpath = "github.com/bombsimon/wsl/v3",
sum = "h1:E5SRssoBgtVFPcYWUOFJEcgaySgdtTNYzsSKDOY7ss8=",
version = "v3.1.0",
)
go_repository(
name = "com_github_bshuster_repo_logrus_logstash_hook",
build_file_proto_mode = "disable",
importpath = "github.com/bshuster-repo/logrus-logstash-hook",
sum = "h1:pgAtgj+A31JBVtEHu2uHuEx0n+2ukqUJnS2vVe5pQNA=",
version = "v0.4.1",
)
go_repository(
name = "com_github_bugsnag_bugsnag_go",
build_file_proto_mode = "disable",
importpath = "github.com/bugsnag/bugsnag-go",
sum = "h1:rFt+Y/IK1aEZkEHchZRSq9OQbsSzIT/OrI8YFFmRIng=",
version = "v0.0.0-20141110184014-b1d153021fcd",
)
go_repository(
name = "com_github_bugsnag_osext",
build_file_proto_mode = "disable",
importpath = "github.com/bugsnag/osext",
sum = "h1:otBG+dV+YK+Soembjv71DPz3uX/V/6MMlSyD9JBQ6kQ=",
version = "v0.0.0-20130617224835-0dd3f918b21b",
)
go_repository(
name = "com_github_bugsnag_panicwrap",
build_file_proto_mode = "disable",
importpath = "github.com/bugsnag/panicwrap",
sum = "h1:nvj0OLI3YqYXer/kZD8Ri1aaunCxIEsOst1BVJswV0o=",
version = "v0.0.0-20151223152923-e2c28503fcd0",
)
go_repository(
name = "com_github_burntsushi_toml",
build_file_proto_mode = "disable",
importpath = "github.com/BurntSushi/toml",
sum = "h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=",
version = "v0.3.1",
)
go_repository(
name = "com_github_burntsushi_xgb",
build_file_proto_mode = "disable",
importpath = "github.com/BurntSushi/xgb",
sum = "h1:1BDTz0u9nC3//pOCMdNH+CiXJVYJh5UQNCOBG7jbELc=",
version = "v0.0.0-20160522181843-27f122750802",
)
go_repository(
name = "com_github_bwmarrin_snowflake",
build_file_proto_mode = "disable",
importpath = "github.com/bwmarrin/snowflake",
sum = "h1:dRbqXFjM10uA3wdrVZ8Kh19uhciRMOroUYJ7qAqDLhY=",
version = "v0.0.0",
)
go_repository(
name = "com_github_caarlos0_ctrlc",
build_file_proto_mode = "disable",
importpath = "github.com/caarlos0/ctrlc",
sum = "h1:2DtF8GSIcajgffDFJzyG15vO+1PuBWOMUdFut7NnXhw=",
version = "v1.0.0",
)
go_repository(
name = "com_github_campoy_unique",
build_file_proto_mode = "disable",
importpath = "github.com/campoy/unique",
sum = "h1:V9a67dfYqPLAvzk5hMQOXYJlZ4SLIXgyKIE+ZiHzgGQ=",
version = "v0.0.0-20180121183637-88950e537e7e",
)
go_repository(
name = "com_github_cavaliercoder_go_cpio",
build_file_proto_mode = "disable",
importpath = "github.com/cavaliercoder/go-cpio",
sum = "h1:hHg27A0RSSp2Om9lubZpiMgVbvn39bsUmW9U5h0twqc=",
version = "v0.0.0-20180626203310-925f9528c45e",
)
go_repository(
name = "com_github_census_instrumentation_opencensus_proto",
build_file_proto_mode = "disable",
importpath = "github.com/census-instrumentation/opencensus-proto",
sum = "h1:glEXhBS5PSLLv4IXzLA5yPRVX4bilULVyxxbrfOtDAk=",
version = "v0.2.1",
)
go_repository(
name = "com_github_cespare_xxhash",
build_file_proto_mode = "disable",
importpath = "github.com/cespare/xxhash",
sum = "h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=",
version = "v1.1.0",
)
go_repository(
name = "com_github_cespare_xxhash_v2",
build_file_proto_mode = "disable",
importpath = "github.com/cespare/xxhash/v2",
sum = "h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY=",
version = "v2.1.1",
)
go_repository(
name = "com_github_chai2010_gettext_go",
build_file_proto_mode = "disable",
importpath = "github.com/chai2010/gettext-go",
sum = "h1:7aWHqerlJ41y6FOsEUvknqgXnGmJyJSbjhAWq5pO4F8=",
version = "v0.0.0-20160711120539-c6fed771bfd5",
)
go_repository(
name = "com_github_chzyer_logex",
build_file_proto_mode = "disable",
importpath = "github.com/chzyer/logex",
sum = "h1:Swpa1K6QvQznwJRcfTfQJmTE72DqScAa40E+fbHEXEE=",
version = "v1.1.10",
)
go_repository(
name = "com_github_chzyer_readline",
build_file_proto_mode = "disable",
importpath = "github.com/chzyer/readline",
sum = "h1:fY5BOSpyZCqRo5OhCuC+XN+r/bBCmeuuJtjz+bCNIf8=",
version = "v0.0.0-20180603132655-2972be24d48e",
)
go_repository(
name = "com_github_chzyer_test",
build_file_proto_mode = "disable",
importpath = "github.com/chzyer/test",
sum = "h1:q763qf9huN11kDQavWsoZXJNW3xEE4JJyHa5Q25/sd8=",
version = "v0.0.0-20180213035817-a1ea475d72b1",
)
go_repository(
name = "com_github_cihub_seelog",
build_file_proto_mode = "disable",
importpath = "github.com/cihub/seelog",
sum = "h1:kHaBemcxl8o/pQ5VM1c8PVE1PubbNx3mjUr09OqWGCs=",
version = "v0.0.0-20170130134532-f561c5e57575",
)
go_repository(
name = "com_github_clarketm_json",
build_file_proto_mode = "disable",
importpath = "github.com/clarketm/json",
sum = "h1:0JketcMdLC16WGnRGJiNmTXuQznDEQaiknxSPRBxg+k=",
version = "v1.13.4",
)
go_repository(
name = "com_github_client9_misspell",
build_file_proto_mode = "disable",
importpath = "github.com/client9/misspell",
sum = "h1:ta993UF76GwbvJcIo3Y68y/M3WxlpEHPWIGDkJYwzJI=",
version = "v0.3.4",
)
go_repository(
name = "com_github_cloudevents_sdk_go",
build_file_proto_mode = "disable",
importpath = "github.com/cloudevents/sdk-go",
sum = "h1:gS5I0s2qPmdc4GBPlUmzZU7RH30BaiOdcRJ1RkXnPrc=",
version = "v1.0.0",
)
go_repository(
name = "com_github_cloudevents_sdk_go_v2",
build_file_proto_mode = "disable",
importpath = "github.com/cloudevents/sdk-go/v2",
sum = "h1:AUdGJwaSUnA+VvepKqgjy6XDkPcf0hf/3L7icEs1ibs=",
version = "v2.0.0",
)
go_repository(
name = "com_github_cloudflare_cloudflare_go",
build_file_proto_mode = "disable",
importpath = "github.com/cloudflare/cloudflare-go",
sum = "h1:bhMGoNhAg21DuqJjU9jQepRRft6vYfo6pejT3NN4V6A=",
version = "v0.13.2",
)
go_repository(
name = "com_github_cncf_udpa_go",
build_file_proto_mode = "disable",
importpath = "github.com/cncf/udpa/go",
sum = "h1:WBZRG4aNOuI15bLRrCgN8fCq8E5Xuty6jGbmSNEvSsU=",
version = "v0.0.0-20191209042840-269d4d468f6f",
)
go_repository(
name = "com_github_cockroachdb_datadriven",
build_file_proto_mode = "disable",
importpath = "github.com/cockroachdb/datadriven",
sum = "h1:OaNxuTZr7kxeODyLWsRMC+OD03aFUH+mW6r2d+MWa5Y=",
version = "v0.0.0-20190809214429-80d97fb3cbaa",
)
go_repository(
name = "com_github_containerd_cgroups",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/cgroups",
sum = "h1:tSNMc+rJDfmYntojat8lljbt1mgKNpTxUZJsSzJ9Y1s=",
version = "v0.0.0-20190919134610-bf292b21730f",
)
go_repository(
name = "com_github_containerd_console",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/console",
sum = "h1:uict5mhHFTzKLUCufdSLym7z/J0CbBJT59lYbP9wtbg=",
version = "v0.0.0-20180822173158-c12b1e7919c1",
)
go_repository(
name = "com_github_containerd_containerd",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/containerd",
sum = "h1:LoIzb5y9x5l8VKAlyrbusNPXqBY0+kviRloxFUMFwKc=",
version = "v1.3.3",
)
go_repository(
name = "com_github_containerd_continuity",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/continuity",
sum = "h1:kIFnQBO7rQ0XkMe6xEwbybYHBEaWmh/f++laI6Emt7M=",
version = "v0.0.0-20200107194136-26c1120b8d41",
)
go_repository(
name = "com_github_containerd_fifo",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/fifo",
sum = "h1:PUD50EuOMkXVcpBIA/R95d56duJR9VxhwncsFbNnxW4=",
version = "v0.0.0-20190226154929-a9fb20d87448",
)
go_repository(
name = "com_github_containerd_go_runc",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/go-runc",
sum = "h1:esQOJREg8nw8aXj6uCN5dfW5cKUBiEJ/+nni1Q/D/sw=",
version = "v0.0.0-20180907222934-5a6d9f37cfa3",
)
go_repository(
name = "com_github_containerd_ttrpc",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/ttrpc",
sum = "h1:dlfGmNcE3jDAecLqwKPMNX6nk2qh1c1Vg1/YTzpOOF4=",
version = "v0.0.0-20190828154514-0e0f228740de",
)
go_repository(
name = "com_github_containerd_typeurl",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/typeurl",
sum = "h1:JNn81o/xG+8NEo3bC/vx9pbi/g2WI8mtP2/nXzu297Y=",
version = "v0.0.0-20180627222232-a93fcdb778cd",
)
go_repository(
name = "com_github_coreos_bbolt",
build_file_proto_mode = "disable",
importpath = "github.com/coreos/bbolt",
sum = "h1:n6AiVyVRKQFNb6mJlwESEvvLoDyiTzXX7ORAUlkeBdY=",
version = "v1.3.3",
)
go_repository(
name = "com_github_coreos_etcd",
build_file_proto_mode = "disable",
importpath = "github.com/coreos/etcd",
sum = "h1:f/Z3EoDSx1yjaIjLQGo1diYUlQYSBrrAQ5vP8NjwXwo=",
version = "v3.3.17+incompatible",
)
go_repository(
name = "com_github_coreos_go_etcd",
build_file_proto_mode = "disable",
importpath = "github.com/coreos/go-etcd",
sum = "h1:bXhRBIXoTm9BYHS3gE0TtQuyNZyeEMux2sDi4oo5YOo=",
version = "v2.0.0+incompatible",
)
go_repository(
name = "com_github_coreos_go_oidc",
build_file_proto_mode = "disable",
importpath = "github.com/coreos/go-oidc",
sum = "h1:sdJrfw8akMnCuUlaZU3tE/uYXFgfqom8DBE9so9EBsM=",
version = "v2.1.0+incompatible",
)
go_repository(
name = "com_github_coreos_go_semver",
build_file_proto_mode = "disable",
importpath = "github.com/coreos/go-semver",
sum = "h1:wkHLiw0WNATZnSG7epLsujiMCgPAc9xhjJ4tgnAxmfM=",
version = "v0.3.0",
)
go_repository(
name = "com_github_coreos_go_systemd",
build_file_proto_mode = "disable",
importpath = "github.com/coreos/go-systemd",
sum = "h1:Wf6HqHfScWJN9/ZjdUKyjop4mf3Qdd+1TvvltAvM3m8=",
version = "v0.0.0-20190321100706-95778dfbb74e",
)
go_repository(
name = "com_github_coreos_pkg",
build_file_proto_mode = "disable",
importpath = "github.com/coreos/pkg",
sum = "h1:lBNOc5arjvs8E5mO2tbpBpLoyyu8B6e44T7hJy6potg=",
version = "v0.0.0-20180928190104-399ea9e2e55f",
)
go_repository(
name = "com_github_cpu_goacmedns",
build_file_proto_mode = "disable",
importpath = "github.com/cpu/goacmedns",
sum = "h1:QOeMpIEsIdm1LSASSswjaTf8CXmzcrgy5OeCfHjppA4=",
version = "v0.0.3",
)
go_repository(
name = "com_github_cpuguy83_go_md2man",
build_file_proto_mode = "disable",
importpath = "github.com/cpuguy83/go-md2man",
sum = "h1:BSKMNlYxDvnunlTymqtgONjNnaRV1sTpcovwwjF22jk=",
version = "v1.0.10",
)
go_repository(
name = "com_github_cpuguy83_go_md2man_v2",
build_file_proto_mode = "disable",
importpath = "github.com/cpuguy83/go-md2man/v2",
sum = "h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM=",
version = "v2.0.0",
)
go_repository(
name = "com_github_creack_pty",
build_file_proto_mode = "disable",
importpath = "github.com/creack/pty",
sum = "h1:07n33Z8lZxZ2qwegKbObQohDhXDQxiMMz1NOUGYlesw=",
version = "v1.1.11",
)
go_repository(
name = "com_github_cyphar_filepath_securejoin",
build_file_proto_mode = "disable",
importpath = "github.com/cyphar/filepath-securejoin",
sum = "h1:jCwT2GTP+PY5nBz3c/YL5PAIbusElVrPujOBSCj8xRg=",
version = "v0.2.2",
)
go_repository(
name = "com_github_datadog_zstd",
build_file_proto_mode = "disable",
importpath = "github.com/DataDog/zstd",
sum = "h1:3oxKN3wbHibqx897utPC2LTQU4J+IHWWJO+glkAkpFM=",
version = "v1.4.1",
)
go_repository(
name = "com_github_davecgh_go_spew",
build_file_proto_mode = "disable",
importpath = "github.com/davecgh/go-spew",
sum = "h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=",
version = "v1.1.1",
)
go_repository(
name = "com_github_daviddengcn_go_colortext",
build_file_proto_mode = "disable",
importpath = "github.com/daviddengcn/go-colortext",
sum = "h1:uVsMphB1eRx7xB1njzL3fuMdWRN8HtVzoUOItHMwv5c=",
version = "v0.0.0-20160507010035-511bcaf42ccd",
)
go_repository(
name = "com_github_deislabs_oras",
build_file_proto_mode = "disable",
importpath = "github.com/deislabs/oras",
sum = "h1:If674KraJVpujYR00rzdi0QAmW4BxzMJPVAZJKuhQ0c=",
version = "v0.8.1",
)
go_repository(
name = "com_github_denisenkom_go_mssqldb",
build_file_proto_mode = "disable",
importpath = "github.com/denisenkom/go-mssqldb",
sum = "h1:83Wprp6ROGeiHFAP8WJdI2RoxALQYgdllERc3N5N2DM=",
version = "v0.0.0-20191124224453-732737034ffd",
)
go_repository(
name = "com_github_denverdino_aliyungo",
build_file_proto_mode = "disable",
importpath = "github.com/denverdino/aliyungo",
sum = "h1:p6poVbjHDkKa+wtC8frBMwQtT3BmqGYBjzMwJ63tuR4=",
version = "v0.0.0-20190125010748-a747050bb1ba",
)
go_repository(
name = "com_github_devigned_tab",
build_file_proto_mode = "disable",
importpath = "github.com/devigned/tab",
sum = "h1:3mD6Kb1mUOYeLpJvTVSDwSg5ZsfSxfvxGRTxRsJsITA=",
version = "v0.1.1",
)
go_repository(
name = "com_github_dgrijalva_jwt_go",
build_file_proto_mode = "disable",
importpath = "github.com/dgrijalva/jwt-go",
sum = "h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM=",
version = "v3.2.0+incompatible",
)
go_repository(
name = "com_github_dgrijalva_jwt_go_v4",
build_file_proto_mode = "disable",
importpath = "github.com/dgrijalva/jwt-go/v4",
sum = "h1:CaO/zOnF8VvUfEbhRatPcwKVWamvbYd8tQGRWacE9kU=",
version = "v4.0.0-preview1",
)
go_repository(
name = "com_github_dgryski_go_gk",
build_file_proto_mode = "disable",
importpath = "github.com/dgryski/go-gk",
sum = "h1:XOPLOMn/zT4jIgxfxSsoXPxkrzz0FaCHwp33x5POJ+Q=",
version = "v0.0.0-20200319235926-a69029f61654",
)
go_repository(
name = "com_github_dgryski_go_sip13",
build_file_proto_mode = "disable",
importpath = "github.com/dgryski/go-sip13",
sum = "h1:RMLoZVzv4GliuWafOuPuQDKSm1SJph7uCRnnS61JAn4=",
version = "v0.0.0-20181026042036-e10d5fee7954",
)
go_repository(
name = "com_github_digitalocean_godo",
build_file_proto_mode = "disable",
importpath = "github.com/digitalocean/godo",
sum = "h1:IMElzMUpO1dVR8qjSg53+5vDkOLzMbhJt4yTAq7NGCQ=",
version = "v1.44.0",
)
go_repository(
name = "com_github_dimchansky_utfbom",
build_file_proto_mode = "disable",
importpath = "github.com/dimchansky/utfbom",
sum = "h1:FcM3g+nofKgUteL8dm/UpdRXNC9KmADgTpLKsu0TRo4=",
version = "v1.1.0",
)
go_repository(
name = "com_github_djarvur_go_err113",
build_file_proto_mode = "disable",
importpath = "github.com/Djarvur/go-err113",
sum = "h1:uCRZZOdMQ0TZPHYTdYpoC0bLYJKPEHPUJ8MeAa51lNU=",
version = "v0.1.0",
)
go_repository(
name = "com_github_djherbis_atime",
build_file_proto_mode = "disable",
importpath = "github.com/djherbis/atime",
sum = "h1:ySLvBAM0EvOGaX7TI4dAM5lWj+RdJUCKtGSEHN8SGBg=",
version = "v1.0.0",
)
go_repository(
name = "com_github_dnaeon_go_vcr",
build_file_proto_mode = "disable",
importpath = "github.com/dnaeon/go-vcr",
sum = "h1:r8L/HqC0Hje5AXMu1ooW8oyQyOFv4GxqpL0nRP7SLLY=",
version = "v1.0.1",
)
go_repository(
name = "com_github_docker_cli",
build_file_proto_mode = "disable",
importpath = "github.com/docker/cli",
sum = "h1:AbI1uj9w4yt6TvfKHfRu7G55KuQe7NCvWPQRKDoXggE=",
version = "v0.0.0-20200210162036-a4bedce16568",
)
go_repository(
name = "com_github_docker_distribution",
build_file_proto_mode = "disable",
importpath = "github.com/docker/distribution",
sum = "h1:a5mlkVzth6W5A4fOsS3D2EO5BUmsJpcB+cRlLU7cSug=",
version = "v2.7.1+incompatible",
)
go_repository(
name = "com_github_docker_docker",
build_file_proto_mode = "disable",
importpath = "github.com/docker/docker",
sum = "h1:IkZjBSIc8hBjLpqeAbeE5mca5mNgeatLHBy3GO78BWo=",
version = "v1.13.1",
)
go_repository(
name = "com_github_docker_docker_credential_helpers",
build_file_proto_mode = "disable",
importpath = "github.com/docker/docker-credential-helpers",
sum = "h1:zI2p9+1NQYdnG6sMU26EX4aVGlqbInSQxQXLvzJ4RPQ=",
version = "v0.6.3",
)
go_repository(
name = "com_github_docker_go_connections",
build_file_proto_mode = "disable",
importpath = "github.com/docker/go-connections",
sum = "h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ=",
version = "v0.4.0",
)
go_repository(
name = "com_github_docker_go_metrics",
build_file_proto_mode = "disable",
importpath = "github.com/docker/go-metrics",
sum = "h1:yWHOI+vFjEsAakUTSrtqc/SAHrhSkmn48pqjidZX3QA=",
version = "v0.0.0-20180209012529-399ea8c73916",
)
go_repository(
name = "com_github_docker_go_units",
build_file_proto_mode = "disable",
importpath = "github.com/docker/go-units",
sum = "h1:3uh0PgVws3nIA0Q+MwDC8yjEPf9zjRfZZWXZYDct3Tw=",
version = "v0.4.0",
)
go_repository(
name = "com_github_docker_libtrust",
build_file_proto_mode = "disable",
importpath = "github.com/docker/libtrust",
sum = "h1:ZClxb8laGDf5arXfYcAtECDFgAgHklGI8CxgjHnXKJ4=",
version = "v0.0.0-20150114040149-fa567046d9b1",
)
go_repository(
name = "com_github_docker_spdystream",
build_file_proto_mode = "disable",
importpath = "github.com/docker/spdystream",
sum = "h1:cenwrSVm+Z7QLSV/BsnenAOcDXdX4cMv4wP0B/5QbPg=",
version = "v0.0.0-20160310174837-449fdfce4d96",
)
go_repository(
name = "com_github_docopt_docopt_go",
build_file_proto_mode = "disable",
importpath = "github.com/docopt/docopt-go",
sum = "h1:bWDMxwH3px2JBh6AyO7hdCn/PkvCZXii8TGj7sbtEbQ=",
version = "v0.0.0-20180111231733-ee0de3bc6815",
)
go_repository(
name = "com_github_dsnet_compress",
build_file_proto_mode = "disable",
importpath = "github.com/dsnet/compress",
sum = "h1:PlZu0n3Tuv04TzpfPbrnI0HW/YwodEXDS+oPKahKF0Q=",
version = "v0.0.1",
)
go_repository(
name = "com_github_dsnet_golib",
build_file_proto_mode = "disable",
importpath = "github.com/dsnet/golib",
sum = "h1:tFh1tRc4CA31yP6qDcu+Trax5wW5GuMxvkIba07qVLY=",
version = "v0.0.0-20171103203638-1ea166775780",
)
go_repository(
name = "com_github_dustin_go_humanize",
build_file_proto_mode = "disable",
importpath = "github.com/dustin/go-humanize",
sum = "h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo=",
version = "v1.0.0",
)
go_repository(
name = "com_github_eapache_go_resiliency",
build_file_proto_mode = "disable",
importpath = "github.com/eapache/go-resiliency",
sum = "h1:v7g92e/KSN71Rq7vSThKaWIq68fL4YHvWyiUKorFR1Q=",
version = "v1.2.0",
)
go_repository(
name = "com_github_eapache_go_xerial_snappy",
build_file_proto_mode = "disable",
importpath = "github.com/eapache/go-xerial-snappy",
sum = "h1:YEetp8/yCZMuEPMUDHG0CW/brkkEp8mzqk2+ODEitlw=",
version = "v0.0.0-20180814174437-776d5712da21",
)
go_repository(
name = "com_github_eapache_queue",
build_file_proto_mode = "disable",
importpath = "github.com/eapache/queue",
sum = "h1:YOEu7KNc61ntiQlcEeUIoDTJ2o8mQznoNvUhiigpIqc=",
version = "v1.1.0",
)
go_repository(
name = "com_github_elazarl_goproxy",
build_file_proto_mode = "disable",
importpath = "github.com/elazarl/goproxy",
sum = "h1:yUdfgN0XgIJw7foRItutHYUIhlcKzcSf5vDpdhQAKTc=",
version = "v0.0.0-20180725130230-947c36da3153",
)
go_repository(
name = "com_github_emicklei_go_restful",
build_file_proto_mode = "disable",
importpath = "github.com/emicklei/go-restful",
sum = "h1:spTtZBk5DYEvbxMVutUuTyh1Ao2r4iyvLdACqsl/Ljk=",
version = "v2.9.5+incompatible",
)
go_repository(
name = "com_github_emirpasic_gods",
build_file_proto_mode = "disable",
importpath = "github.com/emirpasic/gods",
sum = "h1:QAUIPSaCu4G+POclxeqb3F+WPpdKqFGlw36+yOzGlrg=",
version = "v1.12.0",
)
go_repository(
name = "com_github_envoyproxy_go_control_plane",
build_file_proto_mode = "disable",
importpath = "github.com/envoyproxy/go-control-plane",
sum = "h1:rEvIZUSZ3fx39WIi3JkQqQBitGwpELBIYWeBVh6wn+E=",
version = "v0.9.4",
)
go_repository(
name = "com_github_envoyproxy_protoc_gen_validate",
build_file_proto_mode = "disable",
importpath = "github.com/envoyproxy/protoc-gen-validate",
sum = "h1:EQciDnbrYxy13PgWoY8AqoxGiPrpgBZ1R8UNe3ddc+A=",
version = "v0.1.0",
)
go_repository(
name = "com_github_erikstmartin_go_testdb",
build_file_proto_mode = "disable",
importpath = "github.com/erikstmartin/go-testdb",
sum = "h1:Yzb9+7DPaBjB8zlTR87/ElzFsnQfuHnVUVqpZZIcV5Y=",
version = "v0.0.0-20160219214506-8d10e4a1bae5",
)
go_repository(
name = "com_github_evanphx_json_patch",
build_file_proto_mode = "disable",
importpath = "github.com/evanphx/json-patch",
sum = "h1:glyUF9yIYtMHzn8xaKw5rMhdWcwsYV8dZHIq5567/xs=",
version = "v4.11.0+incompatible",
)
go_repository(
name = "com_github_exponent_io_jsonpath",
build_file_proto_mode = "disable",
importpath = "github.com/exponent-io/jsonpath",
sum = "h1:105gxyaGwCFad8crR9dcMQWvV9Hvulu6hwUh4tWPJnM=",
version = "v0.0.0-20151013193312-d6023ce2651d",
)
go_repository(
name = "com_github_fatih_camelcase",
build_file_proto_mode = "disable",
importpath = "github.com/fatih/camelcase",
sum = "h1:hxNvNX/xYBp0ovncs8WyWZrOrpBNub/JfaMvbURyft8=",
version = "v1.0.0",
)
go_repository(
name = "com_github_fatih_color",
build_file_proto_mode = "disable",
importpath = "github.com/fatih/color",
sum = "h1:8xPHl4/q1VyqGIPif1F+1V3Y3lSmrq01EabUW3CoW5s=",
version = "v1.9.0",
)
go_repository(
name = "com_github_fatih_structs",
build_file_proto_mode = "disable",
importpath = "github.com/fatih/structs",
sum = "h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo=",
version = "v1.1.0",
)
go_repository(
name = "com_github_felixge_fgprof",
build_file_proto_mode = "disable",
importpath = "github.com/felixge/fgprof",
sum = "h1:E6FUJ2Mlv043ipLOCFqo8+cHo9MhQ203E2cdEK/isEs=",
version = "v0.9.1",
)
go_repository(
name = "com_github_flynn_go_shlex",
build_file_proto_mode = "disable",
importpath = "github.com/flynn/go-shlex",
sum = "h1:BHsljHzVlRcyQhjrss6TZTdY2VfCqZPbv5k3iBFa2ZQ=",
version = "v0.0.0-20150515145356-3f9db97f8568",
)
go_repository(
name = "com_github_form3tech_oss_jwt_go",
build_file_proto_mode = "disable",
importpath = "github.com/form3tech-oss/jwt-go",
sum = "h1:TcekIExNqud5crz4xD2pavyTgWiPvpYe4Xau31I0PRk=",
version = "v3.2.2+incompatible",
)
go_repository(
name = "com_github_fortytw2_leaktest",
build_file_proto_mode = "disable",
importpath = "github.com/fortytw2/leaktest",
sum = "h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw=",
version = "v1.3.0",
)
go_repository(
name = "com_github_frankban_quicktest",
build_file_proto_mode = "disable",
importpath = "github.com/frankban/quicktest",
sum = "h1:PvpJR0Uq8SdX+zagCMsarBMlhz6ysGTf1+pRmCsRXqY=",
version = "v1.8.1",
)
go_repository(
name = "com_github_fsnotify_fsnotify",
build_file_proto_mode = "disable",
importpath = "github.com/fsnotify/fsnotify",
sum = "h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=",
version = "v1.4.9",
)
go_repository(
name = "com_github_fsouza_fake_gcs_server",
build_file_proto_mode = "disable",
importpath = "github.com/fsouza/fake-gcs-server",
sum = "h1:3bRRh/rQnB2XbrMolHAj9oX/PFiWVQFVVfPR5y2pxb8=",
version = "v1.19.4",
)
go_repository(
name = "com_github_fvbommel_sortorder",
build_file_proto_mode = "disable",
importpath = "github.com/fvbommel/sortorder",
sum = "h1:dSnXLt4mJYH25uDDGa3biZNQsozaUWDSWeKJ0qqFfzE=",
version = "v1.0.1",
)
go_repository(
name = "com_github_garyburd_redigo",
build_file_proto_mode = "disable",
importpath = "github.com/garyburd/redigo",
sum = "h1:LofdAjjjqCSXMwLGgOgnE+rdPuvX9DxCqaHwKy7i/ko=",
version = "v0.0.0-20150301180006-535138d7bcd7",
)
go_repository(
name = "com_github_ghodss_yaml",
build_file_proto_mode = "disable",
importpath = "github.com/ghodss/yaml",
sum = "h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk=",
version = "v1.0.0",
)
go_repository(
name = "com_github_gliderlabs_ssh",
build_file_proto_mode = "disable",
importpath = "github.com/gliderlabs/ssh",
sum = "h1:6zsha5zo/TWhRhwqCD3+EarCAgZ2yN28ipRnGPnwkI0=",
version = "v0.2.2",
)
go_repository(
name = "com_github_globalsign_mgo",
build_file_proto_mode = "disable",
importpath = "github.com/globalsign/mgo",
sum = "h1:DujepqpGd1hyOd7aW59XpK7Qymp8iy83xq74fLr21is=",
version = "v0.0.0-20181015135952-eeefdecb41b8",
)
go_repository(
name = "com_github_go_bindata_go_bindata_v3",
build_file_proto_mode = "disable",
importpath = "github.com/go-bindata/go-bindata/v3",
sum = "h1:F0nVttLC3ws0ojc7p60veTurcOm//D4QBODNM7EGrCI=",
version = "v3.1.3",
)
go_repository(
name = "com_github_go_critic_go_critic",
build_file_proto_mode = "disable",
importpath = "github.com/go-critic/go-critic",
sum = "h1:sGEEdiuvLV0OC7/yC6MnK3K6LCPBplspK45B0XVdFAc=",
version = "v0.4.3",
)
go_repository(
name = "com_github_go_git_gcfg",
build_file_proto_mode = "disable",
importpath = "github.com/go-git/gcfg",
sum = "h1:Q5ViNfGF8zFgyJWPqYwA7qGFoMTEiBmdlkcfRmpIMa4=",
version = "v1.5.0",
)
go_repository(
name = "com_github_go_git_go_billy_v5",
build_file_proto_mode = "disable",
importpath = "github.com/go-git/go-billy/v5",
sum = "h1:4pl5BV4o7ZG/lterP4S6WzJ6xr49Ba5ET9ygheTYahk=",
version = "v5.1.0",
)
go_repository(
name = "com_github_go_git_go_git_fixtures_v4",
build_file_proto_mode = "disable",
importpath = "github.com/go-git/go-git-fixtures/v4",
sum = "h1:PbKy9zOy4aAKrJ5pibIRpVO2BXnK1Tlcg+caKI7Ox5M=",
version = "v4.0.2-0.20200613231340-f56387b50c12",
)
go_repository(
name = "com_github_go_git_go_git_v5",
build_file_proto_mode = "disable",
importpath = "github.com/go-git/go-git/v5",
sum = "h1:8WKMtJR2j8RntEXR/uvTKagfEt4GYlwQ7mntE4+0GWc=",
version = "v5.3.0",
)
go_repository(
name = "com_github_go_gl_glfw",
build_file_proto_mode = "disable",
importpath = "github.com/go-gl/glfw",
sum = "h1:QbL/5oDUmRBzO9/Z7Seo6zf912W/a6Sr4Eu0G/3Jho0=",
version = "v0.0.0-20190409004039-e6da0acd62b1",
)
go_repository(
name = "com_github_go_gl_glfw_v3_3_glfw",
build_file_proto_mode = "disable",
importpath = "github.com/go-gl/glfw/v3.3/glfw",
sum = "h1:WtGNWLvXpe6ZudgnXrq0barxBImvnnJoMEhXAzcbM0I=",
version = "v0.0.0-20200222043503-6f7a984d4dc4",
)
go_repository(
name = "com_github_go_ini_ini",
build_file_proto_mode = "disable",
importpath = "github.com/go-ini/ini",
sum = "h1:0wVcG9udk2C3TGgmdIGKK9ScOZHZB5nbG+gwji9fhhc=",
version = "v1.55.0",
)
go_repository(
name = "com_github_go_kit_kit",
build_file_proto_mode = "disable",
importpath = "github.com/go-kit/kit",
sum = "h1:wDJmvq38kDhkVxi50ni9ykkdUr1PKgqKOoi01fa0Mdk=",
version = "v0.9.0",
)
go_repository(
name = "com_github_go_kit_log",
build_file_proto_mode = "disable",
importpath = "github.com/go-kit/log",
sum = "h1:DGJh0Sm43HbOeYDNnVZFl8BvcYVvjD5bqYJvp0REbwQ=",
version = "v0.1.0",
)
go_repository(
name = "com_github_go_ldap_ldap",
build_file_proto_mode = "disable",
importpath = "github.com/go-ldap/ldap",
sum = "h1:kD5HQcAzlQ7yrhfn+h+MSABeAy/jAJhvIJ/QDllP44g=",
version = "v3.0.2+incompatible",
)
go_repository(
name = "com_github_go_lintpack_lintpack",
build_file_proto_mode = "disable",
importpath = "github.com/go-lintpack/lintpack",
sum = "h1:DI5mA3+eKdWeJ40nU4d6Wc26qmdG8RCi/btYq0TuRN0=",
version = "v0.5.2",
)
go_repository(
name = "com_github_go_logfmt_logfmt",
build_file_proto_mode = "disable",
importpath = "github.com/go-logfmt/logfmt",
sum = "h1:TrB8swr/68K7m9CcGut2g3UOihhbcbiMAYiuTXdEih4=",
version = "v0.5.0",
)
go_repository(
name = "com_github_go_logr_logr",
build_file_proto_mode = "disable",
importpath = "github.com/go-logr/logr",
sum = "h1:K7/B1jt6fIBQVd4Owv2MqGQClcgf0R266+7C/QjRcLc=",
version = "v0.4.0",
)
go_repository(
name = "com_github_go_logr_zapr",
build_file_proto_mode = "disable",
importpath = "github.com/go-logr/zapr",
sum = "h1:uc1uML3hRYL9/ZZPdgHS/n8Nzo+eaYL/Efxkkamf7OM=",
version = "v0.4.0",
)
go_repository(
name = "com_github_go_ole_go_ole",
build_file_proto_mode = "disable",
importpath = "github.com/go-ole/go-ole",
sum = "h1:2lOsA72HgjxAuMlKpFiCbHTvu44PIVkZ5hqm3RSdI/E=",
version = "v1.2.1",
)
go_repository(
name = "com_github_go_openapi_analysis",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/analysis",
sum = "h1:8b2ZgKfKIUTVQpTb77MoRDIMEIwvDVw40o3aOXdfYzI=",
version = "v0.19.5",
)
go_repository(
name = "com_github_go_openapi_errors",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/errors",
sum = "h1:a2kIyV3w+OS3S97zxUndRVD46+FhGOUBDFY7nmu4CsY=",
version = "v0.19.2",
)
go_repository(
name = "com_github_go_openapi_jsonpointer",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/jsonpointer",
sum = "h1:gihV7YNZK1iK6Tgwwsxo2rJbD1GTbdm72325Bq8FI3w=",
version = "v0.19.3",
)
go_repository(
name = "com_github_go_openapi_jsonreference",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/jsonreference",
sum = "h1:5cxNfTy0UVC3X8JL5ymxzyoUZmo8iZb+jeTWn7tUa8o=",
version = "v0.19.3",
)
go_repository(
name = "com_github_go_openapi_loads",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/loads",
sum = "h1:5I4CCSqoWzT+82bBkNIvmLc0UOsoKKQ4Fz+3VxOB7SY=",
version = "v0.19.4",
)
go_repository(
name = "com_github_go_openapi_runtime",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/runtime",
sum = "h1:csnOgcgAiuGoM/Po7PEpKDoNulCcF3FGbSnbHfxgjMI=",
version = "v0.19.4",
)
go_repository(
name = "com_github_go_openapi_spec",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/spec",
sum = "h1:rMMMj8cV38KVXK7SFc+I2MWClbEfbK705+j+dyqun5g=",
version = "v0.19.6",
)
go_repository(
name = "com_github_go_openapi_strfmt",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/strfmt",
sum = "h1:eRfyY5SkaNJCAwmmMcADjY31ow9+N7MCLW7oRkbsINA=",
version = "v0.19.3",
)
go_repository(
name = "com_github_go_openapi_swag",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/swag",
sum = "h1:VRuXN2EnMSsZdauzdss6JBC29YotDqG59BZ+tdlIL1s=",
version = "v0.19.7",
)
go_repository(
name = "com_github_go_openapi_validate",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/validate",
sum = "h1:QhCBKRYqZR+SKo4gl1lPhPahope8/RLt6EVgY8X80w0=",
version = "v0.19.5",
)
go_repository(
name = "com_github_go_sql_driver_mysql",
build_file_proto_mode = "disable",
importpath = "github.com/go-sql-driver/mysql",
sum = "h1:ozyZYNQW3x3HtqT1jira07DN2PArx2v7/mN66gGcHOs=",
version = "v1.5.0",
)
go_repository(
name = "com_github_go_stack_stack",
build_file_proto_mode = "disable",
importpath = "github.com/go-stack/stack",
sum = "h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk=",
version = "v1.8.0",
)
go_repository(
name = "com_github_go_task_slim_sprig",
build_file_proto_mode = "disable",
importpath = "github.com/go-task/slim-sprig",
sum = "h1:p104kn46Q8WdvHunIJ9dAyjPVtrBPhSr3KT2yUst43I=",
version = "v0.0.0-20210107165309-348f09dbbbc0",
)
go_repository(
name = "com_github_go_test_deep",
build_file_proto_mode = "disable",
importpath = "github.com/go-test/deep",
sum = "h1:/VSMRlnY/JSyqxQUzQLKVMAskpY/NZKFA5j2P+0pP2M=",
version = "v1.0.7",
)
go_repository(
name = "com_github_go_toolsmith_astcast",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/astcast",
sum = "h1:JojxlmI6STnFVG9yOImLeGREv8W2ocNUM+iOhR6jE7g=",
version = "v1.0.0",
)
go_repository(
name = "com_github_go_toolsmith_astcopy",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/astcopy",
sum = "h1:OMgl1b1MEpjFQ1m5ztEO06rz5CUd3oBv9RF7+DyvdG8=",
version = "v1.0.0",
)
go_repository(
name = "com_github_go_toolsmith_astequal",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/astequal",
sum = "h1:4zxD8j3JRFNyLN46lodQuqz3xdKSrur7U/sr0SDS/gQ=",
version = "v1.0.0",
)
go_repository(
name = "com_github_go_toolsmith_astfmt",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/astfmt",
sum = "h1:A0vDDXt+vsvLEdbMFJAUBI/uTbRw1ffOPnxsILnFL6k=",
version = "v1.0.0",
)
go_repository(
name = "com_github_go_toolsmith_astinfo",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/astinfo",
sum = "h1:wP6mXeB2V/d1P1K7bZ5vDUO3YqEzcvOREOxZPEu3gVI=",
version = "v0.0.0-20180906194353-9809ff7efb21",
)
go_repository(
name = "com_github_go_toolsmith_astp",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/astp",
sum = "h1:alXE75TXgcmupDsMK1fRAy0YUzLzqPVvBKoyWV+KPXg=",
version = "v1.0.0",
)
go_repository(
name = "com_github_go_toolsmith_pkgload",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/pkgload",
sum = "h1:4DFWWMXVfbcN5So1sBNW9+yeiMqLFGl1wFLTL5R0Tgg=",
version = "v1.0.0",
)
go_repository(
name = "com_github_go_toolsmith_strparse",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/strparse",
sum = "h1:Vcw78DnpCAKlM20kSbAyO4mPfJn/lyYA4BJUDxe2Jb4=",
version = "v1.0.0",
)
go_repository(
name = "com_github_go_toolsmith_typep",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/typep",
sum = "h1:8xdsa1+FSIH/RhEkgnD1j2CJOy5mNllW1Q9tRiYwvlk=",
version = "v1.0.2",
)
go_repository(
name = "com_github_go_xmlfmt_xmlfmt",
build_file_proto_mode = "disable",
importpath = "github.com/go-xmlfmt/xmlfmt",
sum = "h1:khEcpUM4yFcxg4/FHQWkvVRmgijNXRfzkIDHh23ggEo=",
version = "v0.0.0-20191208150333-d5b6f63a941b",
)
go_repository(
name = "com_github_go_yaml_yaml",
build_file_proto_mode = "disable",
importpath = "github.com/go-yaml/yaml",
sum = "h1:RYi2hDdss1u4YE7GwixGzWwVo47T8UQwnTLB6vQiq+o=",
version = "v2.1.0+incompatible",
)
go_repository(
name = "com_github_gobuffalo_envy",
build_file_proto_mode = "disable",
importpath = "github.com/gobuffalo/envy",
sum = "h1:OQl5ys5MBea7OGCdvPbBJWRgnhC/fGona6QKfvFeau8=",
version = "v1.7.1",
)
go_repository(
name = "com_github_gobuffalo_flect",
build_file_proto_mode = "disable",
importpath = "github.com/gobuffalo/flect",
sum = "h1:EWCvMGGxOjsgwlWaP+f4+Hh6yrrte7JeFL2S6b+0hdM=",
version = "v0.2.0",
)
go_repository(
name = "com_github_gobwas_glob",
build_file_proto_mode = "disable",
importpath = "github.com/gobwas/glob",
sum = "h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=",
version = "v0.2.3",
)
go_repository(
name = "com_github_godbus_dbus",
build_file_proto_mode = "disable",
importpath = "github.com/godbus/dbus",
sum = "h1:BWhy2j3IXJhjCbC68FptL43tDKIq8FladmaTs3Xs7Z8=",
version = "v0.0.0-20190422162347-ade71ed3457e",
)
go_repository(
name = "com_github_gofrs_flock",
build_file_proto_mode = "disable",
importpath = "github.com/gofrs/flock",
sum = "h1:DP+LD/t0njgoPBvT5MJLeliUIVQR03hiKR6vezdwHlc=",
version = "v0.7.1",
)
go_repository(
name = "com_github_gogo_googleapis",
build_file_proto_mode = "disable",
importpath = "github.com/gogo/googleapis",
sum = "h1:kFkMAZBNAn4j7K0GiZr8cRYzejq68VbheufiV3YuyFI=",
version = "v1.1.0",
)
go_repository(
name = "com_github_gogo_protobuf",
build_file_proto_mode = "disable",
importpath = "github.com/gogo/protobuf",
sum = "h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=",
version = "v1.3.2",
)
go_repository(
name = "com_github_golang_gddo",
build_file_proto_mode = "disable",
importpath = "github.com/golang/gddo",
sum = "h1:KRMr9A3qfbVM7iV/WcLY/rL5LICqwMHLhwRXKu99fXw=",
version = "v0.0.0-20190419222130-af0f2af80721",
)
go_repository(
name = "com_github_golang_glog",
build_file_proto_mode = "disable",
importpath = "github.com/golang/glog",
sum = "h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58=",
version = "v0.0.0-20160126235308-23def4e6c14b",
)
go_repository(
name = "com_github_golang_groupcache",
build_file_proto_mode = "disable",
importpath = "github.com/golang/groupcache",
sum = "h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY=",
version = "v0.0.0-20200121045136-8c9f03a8e57e",
)
go_repository(
name = "com_github_golang_lint",
build_file_proto_mode = "disable",
importpath = "github.com/golang/lint",
sum = "h1:2hRPrmiwPrp3fQX967rNJIhQPtiGXdlQWAxKbKw3VHA=",
version = "v0.0.0-20180702182130-06c8688daad7",
)
go_repository(
name = "com_github_golang_mock",
build_file_proto_mode = "disable",
importpath = "github.com/golang/mock",
sum = "h1:jlYHihg//f7RRwuPfptm04yp4s7O6Kw8EZiVYIGcH0g=",
version = "v1.5.0",
)
go_repository(
name = "com_github_golang_protobuf",
build_file_proto_mode = "disable",
importpath = "github.com/golang/protobuf",
sum = "h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw=",
version = "v1.5.2",
)
go_repository(
name = "com_github_golang_snappy",
build_file_proto_mode = "disable",
importpath = "github.com/golang/snappy",
sum = "h1:Qgr9rKW7uDUkrbSmQeiDsGa8SjGyCOGtuasMWwvp2P4=",
version = "v0.0.1",
)
go_repository(
name = "com_github_golang_sql_civil",
build_file_proto_mode = "disable",
importpath = "github.com/golang-sql/civil",
sum = "h1:lXe2qZdvpiX5WZkZR4hgp4KJVfY3nMkvmwbVkpv1rVY=",
version = "v0.0.0-20190719163853-cb61b32ac6fe",
)
go_repository(
name = "com_github_golangci_check",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/check",
sum = "h1:23T5iq8rbUYlhpt5DB4XJkc6BU31uODLD1o1gKvZmD0=",
version = "v0.0.0-20180506172741-cfe4005ccda2",
)
go_repository(
name = "com_github_golangci_dupl",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/dupl",
sum = "h1:w8hkcTqaFpzKqonE9uMCefW1WDie15eSP/4MssdenaM=",
version = "v0.0.0-20180902072040-3e9179ac440a",
)
go_repository(
name = "com_github_golangci_errcheck",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/errcheck",
sum = "h1:YYWNAGTKWhKpcLLt7aSj/odlKrSrelQwlovBpDuf19w=",
version = "v0.0.0-20181223084120-ef45e06d44b6",
)
go_repository(
name = "com_github_golangci_go_misc",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/go-misc",
sum = "h1:9kfjN3AdxcbsZBf8NjltjWihK2QfBBBZuv91cMFfDHw=",
version = "v0.0.0-20180628070357-927a3d87b613",
)
go_repository(
name = "com_github_golangci_goconst",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/goconst",
sum = "h1:pe9JHs3cHHDQgOFXJJdYkK6fLz2PWyYtP4hthoCMvs8=",
version = "v0.0.0-20180610141641-041c5f2b40f3",
)
go_repository(
name = "com_github_golangci_gocyclo",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/gocyclo",
sum = "h1:pXTK/gkVNs7Zyy7WKgLXmpQ5bHTrq5GDsp8R9Qs67g0=",
version = "v0.0.0-20180528144436-0a533e8fa43d",
)
go_repository(
name = "com_github_golangci_gofmt",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/gofmt",
sum = "h1:iR3fYXUjHCR97qWS8ch1y9zPNsgXThGwjKPrYfqMPks=",
version = "v0.0.0-20190930125516-244bba706f1a",
)
go_repository(
name = "com_github_golangci_golangci_lint",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/golangci-lint",
sum = "h1:VYLx63qb+XJsHdZ27PMS2w5JZacN0XG8ffUwe7yQomo=",
version = "v1.27.0",
)
go_repository(
name = "com_github_golangci_ineffassign",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/ineffassign",
sum = "h1:gLLhTLMk2/SutryVJ6D4VZCU3CUqr8YloG7FPIBWFpI=",
version = "v0.0.0-20190609212857-42439a7714cc",
)
go_repository(
name = "com_github_golangci_lint_1",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/lint-1",
sum = "h1:MfyDlzVjl1hoaPzPD4Gpb/QgoRfSBR0jdhwGyAWwMSA=",
version = "v0.0.0-20191013205115-297bf364a8e0",
)
go_repository(
name = "com_github_golangci_maligned",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/maligned",
sum = "h1:kNY3/svz5T29MYHubXix4aDDuE3RWHkPvopM/EDv/MA=",
version = "v0.0.0-20180506175553-b1d89398deca",
)
go_repository(
name = "com_github_golangci_misspell",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/misspell",
sum = "h1:pLzmVdl3VxTOncgzHcvLOKirdvcx/TydsClUQXTehjo=",
version = "v0.3.5",
)
go_repository(
name = "com_github_golangci_prealloc",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/prealloc",
sum = "h1:leSNB7iYzLYSSx3J/s5sVf4Drkc68W2wm4Ixh/mr0us=",
version = "v0.0.0-20180630174525-215b22d4de21",
)
go_repository(
name = "com_github_golangci_revgrep",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/revgrep",
sum = "h1:XQKc8IYQOeRwVs36tDrEmTgDgP88d5iEURwpmtiAlOM=",
version = "v0.0.0-20180812185044-276a5c0a1039",
)
go_repository(
name = "com_github_golangci_unconvert",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/unconvert",
sum = "h1:zwtduBRr5SSWhqsYNgcuWO2kFlpdOZbP0+yRjmvPGys=",
version = "v0.0.0-20180507085042-28b1c447d1f4",
)
go_repository(
name = "com_github_golangplus_bytes",
build_file_proto_mode = "disable",
importpath = "github.com/golangplus/bytes",
sum = "h1:7xqw01UYS+KCI25bMrPxwNYkSns2Db1ziQPpVq99FpE=",
version = "v0.0.0-20160111154220-45c989fe5450",
)
go_repository(
name = "com_github_golangplus_fmt",
build_file_proto_mode = "disable",
importpath = "github.com/golangplus/fmt",
sum = "h1:f5gsjBiF9tRRVomCvrkGMMWI8W1f2OBFar2c5oakAP0=",
version = "v0.0.0-20150411045040-2a5d6d7d2995",
)
go_repository(
name = "com_github_golangplus_testing",
build_file_proto_mode = "disable",
importpath = "github.com/golangplus/testing",
sum = "h1:KhcknUwkWHKZPbFy2P7jH5LKJ3La+0ZeknkkmrSgqb0=",
version = "v0.0.0-20180327235837-af21d9c3145e",
)
go_repository(
name = "com_github_gomodule_redigo",
build_file_proto_mode = "disable",
importpath = "github.com/gomodule/redigo",
sum = "h1:nRAxCa+SVsyjSBrtZmG/cqb6VbTmuRzpg/PoTFlpumc=",
version = "v1.8.5",
)
go_repository(
name = "com_github_google_btree",
build_file_proto_mode = "disable",
importpath = "github.com/google/btree",
sum = "h1:0udJVsspx3VBr5FwtLhQQtuAsVc79tTq0ocGIPAU6qo=",
version = "v1.0.0",
)
go_repository(
name = "com_github_google_go_cmp",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-cmp",
sum = "h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU=",
version = "v0.5.5",
)
go_repository(
name = "com_github_google_go_containerregistry",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-containerregistry",
sum = "h1:AG8FSAfXglim2l5qSrqp5VK2Xl03PiBf25NiTGGamws=",
version = "v0.1.1",
)
go_repository(
name = "com_github_google_go_github",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-github",
sum = "h1:N0LgJ1j65A7kfXrZnUDaYCs/Sf4rEjNlfyDHW9dolSY=",
version = "v17.0.0+incompatible",
)
go_repository(
name = "com_github_google_go_github_v27",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-github/v27",
sum = "h1:oiOZuBmGHvrGM1X9uNUAUlLgp5r1UUO/M/KnbHnLRlQ=",
version = "v27.0.6",
)
go_repository(
name = "com_github_google_go_github_v28",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-github/v28",
sum = "h1:kORf5ekX5qwXO2mGzXXOjMe/g6ap8ahVe0sBEulhSxo=",
version = "v28.1.1",
)
go_repository(
name = "com_github_google_go_github_v29",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-github/v29",
sum = "h1:IktKCTwU//aFHnpA+2SLIi7Oo9uhAzgsdZNbcAqhgdc=",
version = "v29.0.3",
)
go_repository(
name = "com_github_google_go_github_v32",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-github/v32",
sum = "h1:q74KVb22spUq0U5HqZ9VCYqQz8YRuOtL/39ZnfwO+NM=",
version = "v32.0.0",
)
go_repository(
name = "com_github_google_go_licenses",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-licenses",
sum = "h1:eVR9gT5gBPTHXeyGAcA8OF/SKNUFFg+a0BJqfx4z5eE=",
version = "v0.0.0-20200227160636-0fa8c766a591",
)
go_repository(
name = "com_github_google_go_querystring",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-querystring",
sum = "h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8=",
version = "v1.1.0",
)
go_repository(
name = "com_github_google_go_replayers_grpcreplay",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-replayers/grpcreplay",
sum = "h1:eNb1y9rZFmY4ax45uEEECSa8fsxGRU+8Bil52ASAwic=",
version = "v0.1.0",
)
go_repository(
name = "com_github_google_go_replayers_httpreplay",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-replayers/httpreplay",
sum = "h1:AX7FUb4BjrrzNvblr/OlgwrmFiep6soj5K2QSDW7BGk=",
version = "v0.1.0",
)
go_repository(
name = "com_github_google_gofuzz",
build_file_proto_mode = "disable_global",
importpath = "github.com/google/gofuzz",
sum = "h1:VcIYpAGBae3Z6BVncE0OnTE/ZjlDXqtYhOZky88neLM=",
version = "v1.2.1-0.20210504230335-f78f29fc09ea",
)
go_repository(
name = "com_github_google_licenseclassifier",
build_file_proto_mode = "disable",
importpath = "github.com/google/licenseclassifier",
sum = "h1:OggOMmdI0JLwg1FkOKH9S7fVHF0oEm8PX6S8kAdpOps=",
version = "v0.0.0-20200402202327-879cb1424de0",
)
go_repository(
name = "com_github_google_mako",
build_file_proto_mode = "disable",
importpath = "github.com/google/mako",
sum = "h1:/o5e44nTD/QEEiWPGSFT3bSqcq3Qg7q27N9bv4gKh5M=",
version = "v0.0.0-20190821191249-122f8dcef9e3",
)
go_repository(
name = "com_github_google_martian",
build_file_proto_mode = "disable",
importpath = "github.com/google/martian",
sum = "h1:xmapqc1AyLoB+ddYT6r04bD9lIjlOqGaREovi0SzFaE=",
version = "v2.1.1-0.20190517191504-25dcb96d9e51+incompatible",
)
go_repository(
name = "com_github_google_martian_v3",
build_file_proto_mode = "disable",
importpath = "github.com/google/martian/v3",
sum = "h1:pMen7vLs8nvgEYhywH3KDWJIJTeEr2ULsVWHWYHQyBs=",
version = "v3.0.0",
)
go_repository(
name = "com_github_google_pprof",
build_file_proto_mode = "disable",
importpath = "github.com/google/pprof",
sum = "h1:k+KkMRk8mGOu1xG38StS7dQ+Z6oW1i9n3dgrAVU9Q/E=",
version = "v0.0.0-20200905233945-acf8798be1f7",
)
go_repository(
name = "com_github_google_renameio",
build_file_proto_mode = "disable",
importpath = "github.com/google/renameio",
sum = "h1:GOZbcHa3HfsPKPlmyPyN2KEohoMXOhdMbHrvbpl2QaA=",
version = "v0.1.0",
)
go_repository(
name = "com_github_google_rpmpack",
build_file_proto_mode = "disable",
importpath = "github.com/google/rpmpack",
sum = "h1:BW6OvS3kpT5UEPbCZ+KyX/OB4Ks9/MNMhWjqPPkZxsE=",
version = "v0.0.0-20191226140753-aa36bfddb3a0",
)
go_repository(
name = "com_github_google_subcommands",
build_file_proto_mode = "disable",
importpath = "github.com/google/subcommands",
sum = "h1:/eqq+otEXm5vhfBrbREPCSVQbvofip6kIz+mX5TUH7k=",
version = "v1.0.1",
)
go_repository(
name = "com_github_google_uuid",
build_file_proto_mode = "disable",
importpath = "github.com/google/uuid",
sum = "h1:qJYtXnJRWmpe7m/3XlyhrsLrEURqHRM2kxzoxXqyUDs=",
version = "v1.2.0",
)
go_repository(
name = "com_github_google_wire",
build_file_proto_mode = "disable",
importpath = "github.com/google/wire",
sum = "h1:kXcsA/rIGzJImVqPdhfnr6q0xsS9gU0515q1EPpJ9fE=",
version = "v0.4.0",
)
go_repository(
name = "com_github_googleapis_gax_go",
build_file_proto_mode = "disable",
importpath = "github.com/googleapis/gax-go",
sum = "h1:silFMLAnr330+NRuag/VjIGF7TLp/LBrV2CJKFLWEww=",
version = "v2.0.2+incompatible",
)
go_repository(
name = "com_github_googleapis_gax_go_v2",
build_file_proto_mode = "disable",
importpath = "github.com/googleapis/gax-go/v2",
sum = "h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM=",
version = "v2.0.5",
)
go_repository(
name = "com_github_googleapis_gnostic",
build_file_generation = "on",
build_file_proto_mode = "disable",
importpath = "github.com/googleapis/gnostic",
replace = "github.com/googleapis/gnostic",
sum = "h1:DLJCy1n/vrD4HPjOvYcT8aYQXpPIzoRZONaYwyycI+I=",
version = "v0.4.1",
)
go_repository(
name = "com_github_googlecloudplatform_cloud_builders_gcs_fetcher",
build_file_proto_mode = "disable",
importpath = "github.com/GoogleCloudPlatform/cloud-builders/gcs-fetcher",
sum = "h1:Pjo3SOZigEnIGevhFqcbFndnqyCH8WimcREd3hRM9vU=",
version = "v0.0.0-20191203181535-308b93ad1f39",
)
go_repository(
name = "com_github_googlecloudplatform_cloudsql_proxy",
build_file_proto_mode = "disable",
importpath = "github.com/GoogleCloudPlatform/cloudsql-proxy",
sum = "h1:sTOp2Ajiew5XIH92YSdwhYc+bgpUX5j5TKK/Ac8Saw8=",
version = "v0.0.0-20191009163259-e802c2cb94ae",
)
go_repository(
name = "com_github_googlecloudplatform_k8s_cloud_provider",
build_file_proto_mode = "disable",
importpath = "github.com/GoogleCloudPlatform/k8s-cloud-provider",
sum = "h1:N7lSsF+R7wSulUADi36SInSQA3RvfO/XclHQfedr0qk=",
version = "v0.0.0-20190822182118-27a4ced34534",
)
go_repository(
name = "com_github_googlecloudplatform_testgrid",
build_file_proto_mode = "disable",
importpath = "github.com/GoogleCloudPlatform/testgrid",
sum = "h1:qs3/BQpz3j3qsgnfjV8aVBfPopkGxp/TnWjjiboUVf8=",
version = "v0.0.68",
)
go_repository(
name = "com_github_gookit_color",
build_file_proto_mode = "disable",
importpath = "github.com/gookit/color",
sum = "h1:xOYBan3Fwlrqj1M1UN2TlHOCRiek3bGzWf/vPnJ1roE=",
version = "v1.2.4",
)
go_repository(
name = "com_github_gophercloud_gophercloud",
build_file_proto_mode = "disable",
importpath = "github.com/gophercloud/gophercloud",
sum = "h1:P/nh25+rzXouhytV2pUHBb65fnds26Ghl8/391+sT5o=",
version = "v0.1.0",
)
go_repository(
name = "com_github_gopherjs_gopherjs",
build_file_proto_mode = "disable",
importpath = "github.com/gopherjs/gopherjs",
sum = "h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=",
version = "v0.0.0-20181017120253-0766667cb4d1",
)
go_repository(
name = "com_github_goreleaser_goreleaser",
build_file_proto_mode = "disable",
importpath = "github.com/goreleaser/goreleaser",
sum = "h1:Z+7XPrfGK11s/Sp+a06sx2FzGuCjTBdxN2ubpGvQbjY=",
version = "v0.136.0",
)
go_repository(
name = "com_github_goreleaser_nfpm",
build_file_proto_mode = "disable",
importpath = "github.com/goreleaser/nfpm",
sum = "h1:BPwIomC+e+yuDX9poJowzV7JFVcYA0+LwGSkbAPs2Hw=",
version = "v1.3.0",
)
go_repository(
name = "com_github_gorilla_context",
build_file_proto_mode = "disable",
importpath = "github.com/gorilla/context",
sum = "h1:AWwleXJkX/nhcU9bZSnZoi3h/qGYqQAGhq6zZe/aQW8=",
version = "v1.1.1",
)
go_repository(
name = "com_github_gorilla_csrf",
build_file_proto_mode = "disable",
importpath = "github.com/gorilla/csrf",
sum = "h1:QqQ/OWwuFp4jMKgBFAzJVW3FMULdyUW7JoM4pEWuqKg=",
version = "v1.6.2",
)
go_repository(
name = "com_github_gorilla_handlers",
build_file_proto_mode = "disable",
importpath = "github.com/gorilla/handlers",
sum = "h1:0QniY0USkHQ1RGCLfKxeNHK9bkDHGRYGNDFBCS+YARg=",
version = "v1.4.2",
)
go_repository(
name = "com_github_gorilla_mux",
build_file_proto_mode = "disable",
importpath = "github.com/gorilla/mux",
sum = "h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI=",
version = "v1.8.0",
)
go_repository(
name = "com_github_gorilla_securecookie",
build_file_proto_mode = "disable",
importpath = "github.com/gorilla/securecookie",
sum = "h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyCS8BvQ=",
version = "v1.1.1",
)
go_repository(
name = "com_github_gorilla_sessions",
build_file_proto_mode = "disable",
importpath = "github.com/gorilla/sessions",
sum = "h1:S7P+1Hm5V/AT9cjEcUD5uDaQSX0OE577aCXgoaKpYbQ=",
version = "v1.2.0",
)
go_repository(
name = "com_github_gorilla_websocket",
build_file_proto_mode = "disable",
importpath = "github.com/gorilla/websocket",
sum = "h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc=",
version = "v1.4.2",
)
go_repository(
name = "com_github_gostaticanalysis_analysisutil",
build_file_proto_mode = "disable",
importpath = "github.com/gostaticanalysis/analysisutil",
sum = "h1:iwp+5/UAyzQSFgQ4uR2sni99sJ8Eo9DEacKWM5pekIg=",
version = "v0.0.3",
)
go_repository(
name = "com_github_gosuri_uitable",
build_file_proto_mode = "disable",
importpath = "github.com/gosuri/uitable",
sum = "h1:IG2xLKRvErL3uhY6e1BylFzG+aJiwQviDDTfOKeKTpY=",
version = "v0.0.4",
)
go_repository(
name = "com_github_gotestyourself_gotestyourself",
build_file_proto_mode = "disable",
importpath = "github.com/gotestyourself/gotestyourself",
sum = "h1:AQwinXlbQR2HvPjQZOmDhRqsv5mZf+Jb1RnSLxcqZcI=",
version = "v2.2.0+incompatible",
)
go_repository(
name = "com_github_gregjones_httpcache",
build_file_proto_mode = "disable",
importpath = "github.com/gregjones/httpcache",
sum = "h1:f8eY6cV/x1x+HLjOp4r72s/31/V2aTUtg5oKRRPf8/Q=",
version = "v0.0.0-20190212212710-3befbb6ad0cc",
)
go_repository(
name = "com_github_grpc_ecosystem_go_grpc_middleware",
build_file_proto_mode = "disable",
importpath = "github.com/grpc-ecosystem/go-grpc-middleware",
sum = "h1:z53tR0945TRRQO/fLEVPI6SMv7ZflF0TEaTAoU7tOzg=",
version = "v1.0.1-0.20190118093823-f849b5445de4",
)
go_repository(
name = "com_github_grpc_ecosystem_go_grpc_prometheus",
build_file_proto_mode = "disable",
importpath = "github.com/grpc-ecosystem/go-grpc-prometheus",
sum = "h1:Ovs26xHkKqVztRpIrF/92BcuyuQ/YW4NSIpoGtfXNho=",
version = "v1.2.0",
)
go_repository(
name = "com_github_grpc_ecosystem_grpc_gateway",
build_file_proto_mode = "disable",
importpath = "github.com/grpc-ecosystem/grpc-gateway",
sum = "h1:D0EVSTwQoQOyfY35QNSuPJA4jpZRtkoGYWQMB7XNg5o=",
version = "v1.12.2",
)
go_repository(
name = "com_github_h2non_gock",
build_file_proto_mode = "disable",
importpath = "github.com/h2non/gock",
sum = "h1:17gCehSo8ZOgEsFKpQgqHiR7VLyjxdAG3lkhVvO9QZU=",
version = "v1.0.9",
)
go_repository(
name = "com_github_hashicorp_consul_api",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/consul/api",
sum = "h1:BNQPM9ytxj6jbjjdRPioQ94T6YXriSopn0i8COv6SRA=",
version = "v1.1.0",
)
go_repository(
name = "com_github_hashicorp_consul_sdk",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/consul/sdk",
sum = "h1:LnuDWGNsoajlhGyHJvuWW6FVqRl8JOTPqS6CPTsYjhY=",
version = "v0.1.1",
)
go_repository(
name = "com_github_hashicorp_errwrap",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/errwrap",
sum = "h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA=",
version = "v1.0.0",
)
go_repository(
name = "com_github_hashicorp_go_cleanhttp",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-cleanhttp",
sum = "h1:dH3aiDG9Jvb5r5+bYHsikaOUIpcM0xvgMXVoDkXMzJM=",
version = "v0.5.1",
)
go_repository(
name = "com_github_hashicorp_go_hclog",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-hclog",
sum = "h1:CG6TE5H9/JXsFWJCfoIVpKFIkFe6ysEuHirp4DxCsHI=",
version = "v0.9.2",
)
go_repository(
name = "com_github_hashicorp_go_immutable_radix",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-immutable-radix",
sum = "h1:AKDB1HM5PWEA7i4nhcpwOrO2byshxBjXVn/J/3+z5/0=",
version = "v1.0.0",
)
go_repository(
name = "com_github_hashicorp_go_msgpack",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-msgpack",
sum = "h1:zKjpN5BK/P5lMYrLmBHdBULWbJ0XpYR+7NGzqkZzoD4=",
version = "v0.5.3",
)
go_repository(
name = "com_github_hashicorp_go_multierror",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-multierror",
sum = "h1:B9UzwGQJehnUY1yNrnwREHc3fGbC2xefo8g4TbElacI=",
version = "v1.1.0",
)
go_repository(
name = "com_github_hashicorp_go_net",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go.net",
sum = "h1:sNCoNyDEvN1xa+X0baata4RdcpKwcMS6DH+xwfqPgjw=",
version = "v0.0.1",
)
go_repository(
name = "com_github_hashicorp_go_plugin",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-plugin",
sum = "h1:4OtAfUGbnKC6yS48p0CtMX2oFYtzFZVv6rok3cRWgnE=",
version = "v1.0.1",
)
go_repository(
name = "com_github_hashicorp_go_retryablehttp",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-retryablehttp",
sum = "h1:HJunrbHTDDbBb/ay4kxa1n+dLmttUlnP3V9oNE4hmsM=",
version = "v0.6.6",
)
go_repository(
name = "com_github_hashicorp_go_rootcerts",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-rootcerts",
sum = "h1:DMo4fmknnz0E0evoNYnV48RjWndOsmd6OW+09R3cEP8=",
version = "v1.0.1",
)
go_repository(
name = "com_github_hashicorp_go_sockaddr",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-sockaddr",
sum = "h1:ztczhD1jLxIRjVejw8gFomI1BQZOe2WoVOu0SyteCQc=",
version = "v1.0.2",
)
go_repository(
name = "com_github_hashicorp_go_syslog",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-syslog",
sum = "h1:KaodqZuhUoZereWVIYmpUgZysurB1kBLX2j0MwMrUAE=",
version = "v1.0.0",
)
go_repository(
name = "com_github_hashicorp_go_uuid",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-uuid",
sum = "h1:fv1ep09latC32wFoVwnqcnKJGnMSdBanPczbHAYm1BE=",
version = "v1.0.1",
)
go_repository(
name = "com_github_hashicorp_go_version",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-version",
sum = "h1:3vNe/fWF5CBgRIguda1meWhsZHy3m8gCJ5wx+dIzX/E=",
version = "v1.2.0",
)
go_repository(
name = "com_github_hashicorp_golang_lru",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/golang-lru",
sum = "h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc=",
version = "v0.5.4",
)
go_repository(
name = "com_github_hashicorp_hcl",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/hcl",
sum = "h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=",
version = "v1.0.0",
)
go_repository(
name = "com_github_hashicorp_logutils",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/logutils",
sum = "h1:dLEQVugN8vlakKOUE3ihGLTZJRB4j+M2cdTm/ORI65Y=",
version = "v1.0.0",
)
go_repository(
name = "com_github_hashicorp_mdns",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/mdns",
sum = "h1:WhIgCr5a7AaVH6jPUwjtRuuE7/RDufnUvzIr48smyxs=",
version = "v1.0.0",
)
go_repository(
name = "com_github_hashicorp_memberlist",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/memberlist",
sum = "h1:EmmoJme1matNzb+hMpDuR/0sbJSUisxyqBGG676r31M=",
version = "v0.1.3",
)
go_repository(
name = "com_github_hashicorp_serf",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/serf",
sum = "h1:YZ7UKsJv+hKjqGVUUbtE3HNj79Eln2oQ75tniF6iPt0=",
version = "v0.8.2",
)
go_repository(
name = "com_github_hashicorp_vault_api",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/vault/api",
sum = "h1:j08Or/wryXT4AcHj1oCbMd7IijXcKzYUGw59LGu9onU=",
version = "v1.0.4",
)
go_repository(
name = "com_github_hashicorp_vault_sdk",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/vault/sdk",
sum = "h1:mOEPeOhT7jl0J4AMl1E705+BcmeRs1VmKNb9F0sMLy8=",
version = "v0.1.13",
)
go_repository(
name = "com_github_hashicorp_yamux",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/yamux",
sum = "h1:kJCB4vdITiW1eC1vq2e6IsrXKrZit1bv/TDYFGMp4BQ=",
version = "v0.0.0-20181012175058-2f1d1f20f75d",
)
go_repository(
name = "com_github_howeyc_gopass",
build_file_proto_mode = "disable",
importpath = "github.com/howeyc/gopass",
sum = "h1:kQWxfPIHVLbgLzphqk3QUflDy9QdksZR4ygR807bpy0=",
version = "v0.0.0-20170109162249-bf9dde6d0d2c",
)
go_repository(
name = "com_github_hpcloud_tail",
build_file_proto_mode = "disable",
importpath = "github.com/hpcloud/tail",
sum = "h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI=",
version = "v1.0.0",
)
go_repository(
name = "com_github_huandu_xstrings",
build_file_proto_mode = "disable",
importpath = "github.com/huandu/xstrings",
sum = "h1:yPeWdRnmynF7p+lLYz0H2tthW9lqhMJrQV/U7yy4wX0=",
version = "v1.2.0",
)
go_repository(
name = "com_github_ianlancetaylor_demangle",
build_file_proto_mode = "disable",
importpath = "github.com/ianlancetaylor/demangle",
sum = "h1:UDMh68UUwekSh5iP2OMhRRZJiiBccgV7axzUG8vi56c=",
version = "v0.0.0-20181102032728-5e5cf60278f6",
)
go_repository(
name = "com_github_imdario_mergo",
build_file_proto_mode = "disable",
importpath = "github.com/imdario/mergo",
sum = "h1:b6R2BslTbIEToALKP7LxUvijTsNI9TAe80pLWN2g/HU=",
version = "v0.3.12",
)
go_repository(
name = "com_github_inconshreveable_mousetrap",
build_file_proto_mode = "disable",
importpath = "github.com/inconshreveable/mousetrap",
sum = "h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=",
version = "v1.0.0",
)
go_repository(
name = "com_github_influxdata_influxdb",
build_file_proto_mode = "disable",
importpath = "github.com/influxdata/influxdb",
sum = "h1:AciJ2ei/llFRundm7CtqwF6B2aOds1A7QG3sMW8QiaQ=",
version = "v0.0.0-20161215172503-049f9b42e9a5",
)
go_repository(
name = "com_github_influxdata_tdigest",
build_file_proto_mode = "disable",
importpath = "github.com/influxdata/tdigest",
sum = "h1:MHTrDWmQpHq/hkq+7cw9oYAt2PqUw52TZazRA0N7PGE=",
version = "v0.0.0-20181121200506-bf2b5ad3c0a9",
)
go_repository(
name = "com_github_jarcoal_httpmock",
build_file_proto_mode = "disable",
importpath = "github.com/jarcoal/httpmock",
sum = "h1:cHtVEcTxRSX4J0je7mWPfc9BpDpqzXSJ5HbymZmyHck=",
version = "v1.0.5",
)
go_repository(
name = "com_github_jbenet_go_context",
build_file_proto_mode = "disable",
importpath = "github.com/jbenet/go-context",
sum = "h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A=",
version = "v0.0.0-20150711004518-d14ea06fba99",
)
go_repository(
name = "com_github_jcmturner_gofork",
build_file_proto_mode = "disable",
importpath = "github.com/jcmturner/gofork",
sum = "h1:J7uCkflzTEhUZ64xqKnkDxq3kzc96ajM1Gli5ktUem8=",
version = "v1.0.0",
)
go_repository(
name = "com_github_jenkins_x_go_scm",
build_file_proto_mode = "disable",
importpath = "github.com/jenkins-x/go-scm",
sum = "h1:D7d1sDWUU+xocCNLQVoYKpMjVKnQvsPva+hPzruchbM=",
version = "v1.5.117",
)
go_repository(
name = "com_github_jessevdk_go_flags",
build_file_proto_mode = "disable",
importpath = "github.com/jessevdk/go-flags",
sum = "h1:1jKYvbxEjfUl0fmqTCOfonvskHHXMjBySTLW4y9LFvc=",
version = "v1.5.0",
)
go_repository(
name = "com_github_jetstack_cert_manager",
build_file_proto_mode = "disable",
importpath = "github.com/jetstack/cert-manager",
sum = "h1:gEhBV9I83m+kpQShDhNO4+J8O2qfNDjvAEL27pThGmg=",
version = "v1.1.0",
)
go_repository(
name = "com_github_jingyugao_rowserrcheck",
build_file_proto_mode = "disable",
importpath = "github.com/jingyugao/rowserrcheck",
sum = "h1:GmsqmapfzSJkm28dhRoHz2tLRbJmqhU86IPgBtN3mmk=",
version = "v0.0.0-20191204022205-72ab7603b68a",
)
go_repository(
name = "com_github_jinzhu_gorm",
build_file_proto_mode = "disable",
importpath = "github.com/jinzhu/gorm",
sum = "h1:Drgk1clyWT9t9ERbzHza6Mj/8FY/CqMyVzOiHviMo6Q=",
version = "v1.9.12",
)
go_repository(
name = "com_github_jinzhu_inflection",
build_file_proto_mode = "disable",
importpath = "github.com/jinzhu/inflection",
sum = "h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=",
version = "v1.0.0",
)
go_repository(
name = "com_github_jinzhu_now",
build_file_proto_mode = "disable",
importpath = "github.com/jinzhu/now",
sum = "h1:g39TucaRWyV3dwDO++eEc6qf8TVIQ/Da48WmqjZ3i7E=",
version = "v1.1.1",
)
go_repository(
name = "com_github_jirfag_go_printf_func_name",
build_file_proto_mode = "disable",
importpath = "github.com/jirfag/go-printf-func-name",
sum = "h1:KA9BjwUk7KlCh6S9EAGWBt1oExIUv9WyNCiRz5amv48=",
version = "v0.0.0-20200119135958-7558a9eaa5af",
)
go_repository(
name = "com_github_jmespath_go_jmespath",
build_file_proto_mode = "disable",
importpath = "github.com/jmespath/go-jmespath",
sum = "h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=",
version = "v0.4.0",
)
go_repository(
name = "com_github_jmespath_go_jmespath_internal_testify",
build_file_proto_mode = "disable",
importpath = "github.com/jmespath/go-jmespath/internal/testify",
sum = "h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8=",
version = "v1.5.1",
)
go_repository(
name = "com_github_jmoiron_sqlx",
build_file_proto_mode = "disable",
importpath = "github.com/jmoiron/sqlx",
sum = "h1:lrdPtrORjGv1HbbEvKWDUAy97mPpFm4B8hp77tcCUJY=",
version = "v1.2.1-0.20190826204134-d7d95172beb5",
)
go_repository(
name = "com_github_joefitzgerald_rainbow_reporter",
build_file_proto_mode = "disable",
importpath = "github.com/joefitzgerald/rainbow-reporter",
sum = "h1:AuMG652zjdzI0YCCnXAqATtRBpGXMcAnrajcaTrSeuo=",
version = "v0.1.0",
)
go_repository(
name = "com_github_joho_godotenv",
build_file_proto_mode = "disable",
importpath = "github.com/joho/godotenv",
sum = "h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc=",
version = "v1.3.0",
)
go_repository(
name = "com_github_jonboulle_clockwork",
build_file_proto_mode = "disable",
importpath = "github.com/jonboulle/clockwork",
sum = "h1:VKV+ZcuP6l3yW9doeqz6ziZGgcynBVQO+obU0+0hcPo=",
version = "v0.1.0",
)
go_repository(
name = "com_github_joshdk_go_junit",
build_file_proto_mode = "disable",
importpath = "github.com/joshdk/go-junit",
sum = "h1:Bp5LAZasx/ev9wUmIIC74+MsXgwD99VjV1JmDVbpJm8=",
version = "v0.0.0-20190428045703-ad7e11aa49ff",
)
go_repository(
name = "com_github_jpillora_backoff",
build_file_proto_mode = "disable",
importpath = "github.com/jpillora/backoff",
sum = "h1:uvFg412JmmHBHw7iwprIxkPMI+sGQ4kzOWsMeHnm2EA=",
version = "v1.0.0",
)
go_repository(
name = "com_github_json_iterator_go",
build_file_proto_mode = "disable",
importpath = "github.com/json-iterator/go",
sum = "h1:uVUAXhF2To8cbw/3xN3pxj6kk7TYKs98NIrTqPlMWAQ=",
version = "v1.1.11",
)
go_repository(
name = "com_github_jstemmer_go_junit_report",
build_file_proto_mode = "disable",
importpath = "github.com/jstemmer/go-junit-report",
sum = "h1:6QPYqodiu3GuPL+7mfx+NwDdp2eTkp9IfEUpgAwUN0o=",
version = "v0.9.1",
)
go_repository(
name = "com_github_jtolds_gls",
build_file_proto_mode = "disable",
importpath = "github.com/jtolds/gls",
sum = "h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=",
version = "v4.20.0+incompatible",
)
go_repository(
name = "com_github_julienschmidt_httprouter",
build_file_proto_mode = "disable",
importpath = "github.com/julienschmidt/httprouter",
sum = "h1:U0609e9tgbseu3rBINet9P48AI/D3oJs4dN7jwJOQ1U=",
version = "v1.3.0",
)
go_repository(
name = "com_github_kballard_go_shellquote",
build_file_proto_mode = "disable",
importpath = "github.com/kballard/go-shellquote",
sum = "h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs=",
version = "v0.0.0-20180428030007-95032a82bc51",
)
go_repository(
name = "com_github_kelseyhightower_envconfig",
build_file_proto_mode = "disable",
importpath = "github.com/kelseyhightower/envconfig",
sum = "h1:Im6hONhd3pLkfDFsbRgu68RDNkGF1r3dvMUtDTo2cv8=",
version = "v1.4.0",
)
go_repository(
name = "com_github_kevinburke_ssh_config",
build_file_proto_mode = "disable",
importpath = "github.com/kevinburke/ssh_config",
sum = "h1:DowS9hvgyYSX4TO5NpyC606/Z4SxnNYbT+WX27or6Ck=",
version = "v0.0.0-20201106050909-4977a11b4351",
)
go_repository(
name = "com_github_kisielk_errcheck",
build_file_proto_mode = "disable",
importpath = "github.com/kisielk/errcheck",
sum = "h1:e8esj/e4R+SAOwFwN+n3zr0nYeCyeweozKfO23MvHzY=",
version = "v1.5.0",
)
go_repository(
name = "com_github_kisielk_gotool",
build_file_proto_mode = "disable",
importpath = "github.com/kisielk/gotool",
sum = "h1:AV2c/EiW3KqPNT9ZKl07ehoAGi4C5/01Cfbblndcapg=",
version = "v1.0.0",
)
go_repository(
name = "com_github_klauspost_compress",
build_file_proto_mode = "disable",
importpath = "github.com/klauspost/compress",
sum = "h1:Znfn6hXZAHaLPNnlqUYRrBSReFHYybslgv4PTiyz6P0=",
version = "v1.10.2",
)
go_repository(
name = "com_github_klauspost_cpuid",
build_file_proto_mode = "disable",
importpath = "github.com/klauspost/cpuid",
sum = "h1:1xAgYebNnsb9LKCdLOvFWtAxGU/33mjJtyOVbmUa0Us=",
version = "v1.2.2",
)
go_repository(
name = "com_github_klauspost_pgzip",
build_file_proto_mode = "disable",
importpath = "github.com/klauspost/pgzip",
sum = "h1:oIPZROsWuPHpOdMVWLuJZXwgjhrW8r1yEX8UqMyeNHM=",
version = "v1.2.1",
)
go_repository(
name = "com_github_knative_build",
build_file_proto_mode = "disable",
importpath = "github.com/knative/build",
sum = "h1:o/VYWA3HKyZlNqdU2hDE5LHpanBe8gazgPKL97XJ6bo=",
version = "v0.1.2",
)
go_repository(
name = "com_github_konsorten_go_windows_terminal_sequences",
build_file_proto_mode = "disable",
importpath = "github.com/konsorten/go-windows-terminal-sequences",
sum = "h1:CE8S1cTafDpPvMhIxNJKvHsGVBgn1xWYf1NbHQhywc8=",
version = "v1.0.3",
)
go_repository(
name = "com_github_kr_logfmt",
build_file_proto_mode = "disable",
importpath = "github.com/kr/logfmt",
sum = "h1:T+h1c/A9Gawja4Y9mFVWj2vyii2bbUNDw3kt9VxK2EY=",
version = "v0.0.0-20140226030751-b84e30acd515",
)
go_repository(
name = "com_github_kr_pretty",
build_file_proto_mode = "disable",
importpath = "github.com/kr/pretty",
sum = "h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI=",
version = "v0.2.1",
)
go_repository(
name = "com_github_kr_pty",
build_file_proto_mode = "disable",
importpath = "github.com/kr/pty",
sum = "h1:AkaSdXYQOWeaO3neb8EM634ahkXXe3jYbVh/F9lq+GI=",
version = "v1.1.8",
)
go_repository(
name = "com_github_kr_text",
build_file_proto_mode = "disable",
importpath = "github.com/kr/text",
sum = "h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=",
version = "v0.2.0",
)
go_repository(
name = "com_github_lib_pq",
build_file_proto_mode = "disable",
importpath = "github.com/lib/pq",
sum = "h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0=",
version = "v1.2.0",
)
go_repository(
name = "com_github_liggitt_tabwriter",
build_file_proto_mode = "disable",
importpath = "github.com/liggitt/tabwriter",
sum = "h1:9TO3cAIGXtEhnIaL+V+BEER86oLrvS+kWobKpbJuye0=",
version = "v0.0.0-20181228230101-89fcab3d43de",
)
go_repository(
name = "com_github_lightstep_tracecontext_go",
build_file_proto_mode = "disable",
importpath = "github.com/lightstep/tracecontext.go",
sum = "h1:+2b6iGRJe3hvV/yVXrd41yVEjxuFHxasJqDhkIjS4gk=",
version = "v0.0.0-20181129014701-1757c391b1ac",
)
go_repository(
name = "com_github_lithammer_dedent",
build_file_proto_mode = "disable",
importpath = "github.com/lithammer/dedent",
sum = "h1:VNzHMVCBNG1j0fh3OrsFRkVUwStdDArbgBWoPAffktY=",
version = "v1.1.0",
)
go_repository(
name = "com_github_logrusorgru_aurora",
build_file_proto_mode = "disable",
importpath = "github.com/logrusorgru/aurora",
sum = "h1:9MlwzLdW7QSDrhDjFlsEYmxpFyIoXmYRon3dt0io31k=",
version = "v0.0.0-20181002194514-a7b3b318ed4e",
)
go_repository(
name = "com_github_lyft_protoc_gen_validate",
build_file_proto_mode = "disable",
importpath = "github.com/lyft/protoc-gen-validate",
sum = "h1:KNt/RhmQTOLr7Aj8PsJ7mTronaFyx80mRTT9qF261dA=",
version = "v0.0.13",
)
go_repository(
name = "com_github_magiconair_properties",
build_file_proto_mode = "disable",
importpath = "github.com/magiconair/properties",
sum = "h1:ZC2Vc7/ZFkGmsVC9KvOjumD+G5lXy2RtTKyzRKO2BQ4=",
version = "v1.8.1",
)
go_repository(
name = "com_github_mailru_easyjson",
build_file_proto_mode = "disable",
importpath = "github.com/mailru/easyjson",
sum = "h1:jcoUdG1TzY/M/eM5BLFLP8DJeMximx5NQYSlLL9YeWc=",
version = "v0.7.1-0.20191009090205-6c0755d89d1e",
)
go_repository(
name = "com_github_makenowjust_heredoc",
build_file_proto_mode = "disable",
importpath = "github.com/MakeNowJust/heredoc",
sum = "h1:sjQovDkwrZp8u+gxLtPgKGjk5hCxuy2hrRejBTA9xFU=",
version = "v0.0.0-20170808103936-bb23615498cd",
)
go_repository(
name = "com_github_maratori_testpackage",
build_file_proto_mode = "disable",
importpath = "github.com/maratori/testpackage",
sum = "h1:QtJ5ZjqapShm0w5DosRjg0PRlSdAdlx+W6cCKoALdbQ=",
version = "v1.0.1",
)
go_repository(
name = "com_github_markbates_inflect",
build_file_proto_mode = "disable",
importpath = "github.com/markbates/inflect",
sum = "h1:5fh1gzTFhfae06u3hzHYO9xe3l3v3nW5Pwt3naLTP5g=",
version = "v1.0.4",
)
go_repository(
name = "com_github_marstr_guid",
build_file_proto_mode = "disable",
importpath = "github.com/marstr/guid",
sum = "h1:/M4H/1G4avsieL6BbUwCOBzulmoeKVP5ux/3mQNnbyI=",
version = "v1.1.0",
)
go_repository(
name = "com_github_masterminds_goutils",
build_file_proto_mode = "disable",
importpath = "github.com/Masterminds/goutils",
sum = "h1:zukEsf/1JZwCMgHiK3GZftabmxiCw4apj3a28RPBiVg=",
version = "v1.1.0",
)
go_repository(
name = "com_github_masterminds_semver",
build_file_proto_mode = "disable",
importpath = "github.com/Masterminds/semver",
sum = "h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww=",
version = "v1.5.0",
)
go_repository(
name = "com_github_masterminds_semver_v3",
build_file_proto_mode = "disable",
importpath = "github.com/Masterminds/semver/v3",
sum = "h1:Y2lUDsFKVRSYGojLJ1yLxSXdMmMYTYls0rCvoqmMUQk=",
version = "v3.1.0",
)
go_repository(
name = "com_github_masterminds_sprig_v3",
build_file_proto_mode = "disable",
importpath = "github.com/Masterminds/sprig/v3",
sum = "h1:wz22D0CiSctrliXiI9ZO3HoNApweeRGftyDN+BQa3B8=",
version = "v3.0.2",
)
go_repository(
name = "com_github_masterminds_vcs",
build_file_proto_mode = "disable",
importpath = "github.com/Masterminds/vcs",
sum = "h1:NL3G1X7/7xduQtA2sJLpVpfHTNBALVNSjob6KEjPXNQ=",
version = "v1.13.1",
)
go_repository(
name = "com_github_matoous_godox",
build_file_proto_mode = "disable",
importpath = "github.com/matoous/godox",
sum = "h1:RHba4YImhrUVQDHUCe2BNSOz4tVy2yGyXhvYDvxGgeE=",
version = "v0.0.0-20190911065817-5d6d842e92eb",
)
go_repository(
name = "com_github_mattbaird_jsonpatch",
build_file_proto_mode = "disable",
importpath = "github.com/mattbaird/jsonpatch",
sum = "h1:+J2gw7Bw77w/fbK7wnNJJDKmw1IbWft2Ul5BzrG1Qm8=",
version = "v0.0.0-20171005235357-81af80346b1a",
)
go_repository(
name = "com_github_mattn_go_colorable",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/go-colorable",
sum = "h1:6Su7aK7lXmJ/U79bYtBjLNaha4Fs1Rg9plHpcH+vvnE=",
version = "v0.1.6",
)
go_repository(
name = "com_github_mattn_go_ieproxy",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/go-ieproxy",
sum = "h1:qiyop7gCflfhwCzGyeT0gro3sF9AIg9HU98JORTkqfI=",
version = "v0.0.1",
)
go_repository(
name = "com_github_mattn_go_isatty",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/go-isatty",
sum = "h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=",
version = "v0.0.12",
)
go_repository(
name = "com_github_mattn_go_runewidth",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/go-runewidth",
sum = "h1:3tS41NlGYSmhhe/8fhGRzc+z3AYCw1Fe1WAyLuujKs0=",
version = "v0.0.8",
)
go_repository(
name = "com_github_mattn_go_shellwords",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/go-shellwords",
sum = "h1:Y7Xqm8piKOO3v10Thp7Z36h4FYFjt5xB//6XvOrs2Gw=",
version = "v1.0.10",
)
go_repository(
name = "com_github_mattn_go_sqlite3",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/go-sqlite3",
sum = "h1:xQ15muvnzGBHpIpdrNi1DA5x0+TcBZzsIDwmw9uTHzw=",
version = "v2.0.1+incompatible",
)
go_repository(
name = "com_github_mattn_go_zglob",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/go-zglob",
sum = "h1:0qT24o2wsZ8cOXQAERwBX6s+rPMs/bJTKxLVVtgfDXc=",
version = "v0.0.2",
)
go_repository(
name = "com_github_mattn_goveralls",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/goveralls",
sum = "h1:7eJB6EqsPhRVxvwEXGnqdO2sJI0PTsrWoTMXEk9/OQc=",
version = "v0.0.2",
)
go_repository(
name = "com_github_matttproud_golang_protobuf_extensions",
build_file_proto_mode = "disable",
importpath = "github.com/matttproud/golang_protobuf_extensions",
sum = "h1:I0XW9+e1XWDxdcEniV4rQAIOPUGDq67JSCiRCgGCZLI=",
version = "v1.0.2-0.20181231171920-c182affec369",
)
go_repository(
name = "com_github_maxbrunsfeld_counterfeiter_v6",
build_file_proto_mode = "disable",
importpath = "github.com/maxbrunsfeld/counterfeiter/v6",
sum = "h1:g+4J5sZg6osfvEfkRZxJ1em0VT95/UOZgi/l7zi1/oE=",
version = "v6.2.2",
)
go_repository(
name = "com_github_mgutz_ansi",
build_file_proto_mode = "disable",
importpath = "github.com/mgutz/ansi",
sum = "h1:j7+1HpAFS1zy5+Q4qx1fWh90gTKwiN4QCGoY9TWyyO4=",
version = "v0.0.0-20170206155736-9520e82c474b",
)
go_repository(
name = "com_github_mholt_archiver_v3",
build_file_proto_mode = "disable",
importpath = "github.com/mholt/archiver/v3",
sum = "h1:vWjhY8SQp5yzM9P6OJ/eZEkmi3UAbRrxCq48MxjAzig=",
version = "v3.3.0",
)
go_repository(
name = "com_github_microsoft_go_winio",
build_file_proto_mode = "disable",
importpath = "github.com/Microsoft/go-winio",
sum = "h1:FtSW/jqD+l4ba5iPBj9CODVtgfYAD8w2wS923g/cFDk=",
version = "v0.4.16",
)
go_repository(
name = "com_github_microsoft_hcsshim",
build_file_proto_mode = "disable",
importpath = "github.com/Microsoft/hcsshim",
sum = "h1:ptnOoufxGSzauVTsdE+wMYnCWA301PdoN4xg5oRdZpg=",
version = "v0.8.7",
)
go_repository(
name = "com_github_miekg_dns",
build_file_proto_mode = "disable",
importpath = "github.com/miekg/dns",
sum = "h1:sJFOl9BgwbYAWOGEwr61FU28pqsBNdpRBnhGXtO06Oo=",
version = "v1.1.31",
)
go_repository(
name = "com_github_mitchellh_cli",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/cli",
sum = "h1:iGBIsUe3+HZ/AD/Vd7DErOt5sU9fa8Uj7A2s1aggv1Y=",
version = "v1.0.0",
)
go_repository(
name = "com_github_mitchellh_copystructure",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/copystructure",
sum = "h1:Laisrj+bAB6b/yJwB5Bt3ITZhGJdqmxquMKeZ+mmkFQ=",
version = "v1.0.0",
)
go_repository(
name = "com_github_mitchellh_go_homedir",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/go-homedir",
sum = "h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=",
version = "v1.1.0",
)
go_repository(
name = "com_github_mitchellh_go_ps",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/go-ps",
sum = "h1:9+ke9YJ9KGWw5ANXK6ozjoK47uI3uNbXv4YVINBnGm8=",
version = "v0.0.0-20190716172923-621e5597135b",
)
go_repository(
name = "com_github_mitchellh_go_testing_interface",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/go-testing-interface",
sum = "h1:fzU/JVNcaqHQEcVFAKeR41fkiLdIPrefOvVG1VZ96U0=",
version = "v1.0.0",
)
go_repository(
name = "com_github_mitchellh_go_wordwrap",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/go-wordwrap",
sum = "h1:6GlHJ/LTGMrIJbwgdqdl2eEH8o+Exx/0m8ir9Gns0u4=",
version = "v1.0.0",
)
go_repository(
name = "com_github_mitchellh_gox",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/gox",
sum = "h1:lfGJxY7ToLJQjHHwi0EX6uYBdK78egf954SQl13PQJc=",
version = "v0.4.0",
)
go_repository(
name = "com_github_mitchellh_iochan",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/iochan",
sum = "h1:C+X3KsSTLFVBr/tK1eYN/vs4rJcvsiLU338UhYPJWeY=",
version = "v1.0.0",
)
go_repository(
name = "com_github_mitchellh_ioprogress",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/ioprogress",
sum = "h1:Qa6dnn8DlasdXRnacluu8HzPts0S1I9zvvUPDbBnXFI=",
version = "v0.0.0-20180201004757-6a23b12fa88e",
)
go_repository(
name = "com_github_mitchellh_mapstructure",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/mapstructure",
sum = "h1:cCBH2gTD2K0OtLlv/Y5H01VQCqmlDxz30kS5Y5bqfLA=",
version = "v1.3.1",
)
go_repository(
name = "com_github_mitchellh_osext",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/osext",
sum = "h1:2+myh5ml7lgEU/51gbeLHfKGNfgEQQIWrlbdaOsidbQ=",
version = "v0.0.0-20151018003038-5e2d6d41470f",
)
go_repository(
name = "com_github_mitchellh_reflectwalk",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/reflectwalk",
sum = "h1:9D+8oIskB4VJBN5SFlmc27fSlIBZaov1Wpk/IfikLNY=",
version = "v1.0.0",
)
go_repository(
name = "com_github_moby_spdystream",
build_file_proto_mode = "disable",
importpath = "github.com/moby/spdystream",
sum = "h1:cjW1zVyyoiM0T7b6UoySUFqzXMoqRckQtXwGPiBhOM8=",
version = "v0.2.0",
)
go_repository(
name = "com_github_moby_term",
build_file_proto_mode = "disable",
importpath = "github.com/moby/term",
sum = "h1:rzf0wL0CHVc8CEsgyygG0Mn9CNCCPZqOPaz8RiiHYQk=",
version = "v0.0.0-20201216013528-df9cb8a40635",
)
go_repository(
name = "com_github_modern_go_concurrent",
build_file_proto_mode = "disable",
importpath = "github.com/modern-go/concurrent",
sum = "h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=",
version = "v0.0.0-20180306012644-bacd9c7ef1dd",
)
go_repository(
name = "com_github_modern_go_reflect2",
build_file_proto_mode = "disable",
importpath = "github.com/modern-go/reflect2",
sum = "h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI=",
version = "v1.0.1",
)
go_repository(
name = "com_github_mohae_deepcopy",
build_file_proto_mode = "disable",
importpath = "github.com/mohae/deepcopy",
sum = "h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw=",
version = "v0.0.0-20170929034955-c48cc78d4826",
)
go_repository(
name = "com_github_morikuni_aec",
build_file_proto_mode = "disable",
importpath = "github.com/morikuni/aec",
sum = "h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=",
version = "v1.0.0",
)
go_repository(
name = "com_github_mozilla_tls_observatory",
build_file_proto_mode = "disable",
importpath = "github.com/mozilla/tls-observatory",
sum = "h1:1xJ+Xi9lYWLaaP4yB67ah0+548CD3110mCPWhVVjFkI=",
version = "v0.0.0-20200317151703-4fa42e1c2dee",
)
go_repository(
name = "com_github_munnerz_crd_schema_fuzz",
build_file_proto_mode = "disable",
importpath = "github.com/munnerz/crd-schema-fuzz",
sum = "h1:8erI9yzEnOGw9K5O+a8zZdoo8N/OwrFi7c7SjBtkHAs=",
version = "v1.0.0",
)
go_repository(
name = "com_github_munnerz_goautoneg",
build_file_proto_mode = "disable",
importpath = "github.com/munnerz/goautoneg",
sum = "h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=",
version = "v0.0.0-20191010083416-a7dc8b61c822",
)
go_repository(
name = "com_github_mwitkow_go_conntrack",
build_file_proto_mode = "disable",
importpath = "github.com/mwitkow/go-conntrack",
sum = "h1:KUppIJq7/+SVif2QVs3tOP0zanoHgBEVAwHxUSIzRqU=",
version = "v0.0.0-20190716064945-2f068394615f",
)
go_repository(
name = "com_github_mxk_go_flowrate",
build_file_proto_mode = "disable",
importpath = "github.com/mxk/go-flowrate",
sum = "h1:y5//uYreIhSUg3J1GEMiLbxo1LJaP8RfCpH6pymGZus=",
version = "v0.0.0-20140419014527-cca7078d478f",
)
go_repository(
name = "com_github_nakabonne_nestif",
build_file_proto_mode = "disable",
importpath = "github.com/nakabonne/nestif",
sum = "h1:+yOViDGhg8ygGrmII72nV9B/zGxY188TYpfolntsaPw=",
version = "v0.3.0",
)
go_repository(
name = "com_github_natefinch_lumberjack",
build_file_proto_mode = "disable",
importpath = "github.com/natefinch/lumberjack",
sum = "h1:4QJd3OLAMgj7ph+yZTuX13Ld4UpgHp07nNdFX7mqFfM=",
version = "v2.0.0+incompatible",
)
go_repository(
name = "com_github_nats_io_gnatsd",
build_file_proto_mode = "disable",
importpath = "github.com/nats-io/gnatsd",
sum = "h1:RconcfDeWpKCD6QIIwiVFcvForlXpWeJP7i5/lDLy44=",
version = "v1.4.1",
)
go_repository(
name = "com_github_nats_io_go_nats",
build_file_proto_mode = "disable",
importpath = "github.com/nats-io/go-nats",
sum = "h1:oQOfHcLr8hb43QG8yeVyY2jtarIaTjOv41CGdF3tTvQ=",
version = "v1.7.0",
)
go_repository(
name = "com_github_nats_io_jwt",
build_file_proto_mode = "disable",
importpath = "github.com/nats-io/jwt",
sum = "h1:+RB5hMpXUUA2dfxuhBTEkMOrYmM+gKIZYS1KjSostMI=",
version = "v0.3.2",
)
go_repository(
name = "com_github_nats_io_nats_go",
build_file_proto_mode = "disable",
importpath = "github.com/nats-io/nats.go",
sum = "h1:ik3HbLhZ0YABLto7iX80pZLPw/6dx3T+++MZJwLnMrQ=",
version = "v1.9.1",
)
go_repository(
name = "com_github_nats_io_nats_server_v2",
build_file_proto_mode = "disable",
importpath = "github.com/nats-io/nats-server/v2",
sum = "h1:i2Ly0B+1+rzNZHHWtD4ZwKi+OU5l+uQo1iDHZ2PmiIc=",
version = "v2.1.2",
)
go_repository(
name = "com_github_nats_io_nkeys",
build_file_proto_mode = "disable",
importpath = "github.com/nats-io/nkeys",
sum = "h1:6JrEfig+HzTH85yxzhSVbjHRJv9cn0p6n3IngIcM5/k=",
version = "v0.1.3",
)
go_repository(
name = "com_github_nats_io_nuid",
build_file_proto_mode = "disable",
importpath = "github.com/nats-io/nuid",
sum = "h1:5iA8DT8V7q8WK2EScv2padNa/rTESc1KdnPw4TC2paw=",
version = "v1.0.1",
)
go_repository(
name = "com_github_nbio_st",
build_file_proto_mode = "disable",
importpath = "github.com/nbio/st",
sum = "h1:W6apQkHrMkS0Muv8G/TipAy/FJl/rCYT0+EuS8+Z0z4=",
version = "v0.0.0-20140626010706-e9e8d9816f32",
)
go_repository(
name = "com_github_nbutton23_zxcvbn_go",
build_file_proto_mode = "disable",
importpath = "github.com/nbutton23/zxcvbn-go",
sum = "h1:AREM5mwr4u1ORQBMvzfzBgpsctsbQikCVpvC+tX285E=",
version = "v0.0.0-20180912185939-ae427f1e4c1d",
)
go_repository(
name = "com_github_ncw_swift",
build_file_proto_mode = "disable",
importpath = "github.com/ncw/swift",
sum = "h1:4DQRPj35Y41WogBxyhOXlrI37nzGlyEcsforeudyYPQ=",
version = "v1.0.47",
)
go_repository(
name = "com_github_niemeyer_pretty",
build_file_proto_mode = "disable",
importpath = "github.com/niemeyer/pretty",
sum = "h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=",
version = "v0.0.0-20200227124842-a10e7caefd8e",
)
go_repository(
name = "com_github_nwaples_rardecode",
build_file_proto_mode = "disable",
importpath = "github.com/nwaples/rardecode",
sum = "h1:r7vGuS5akxOnR4JQSkko62RJ1ReCMXxQRPtxsiFMBOs=",
version = "v1.0.0",
)
go_repository(
name = "com_github_nxadm_tail",
build_file_proto_mode = "disable",
importpath = "github.com/nxadm/tail",
sum = "h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE=",
version = "v1.4.8",
)
go_repository(
name = "com_github_nytimes_gziphandler",
build_file_proto_mode = "disable",
importpath = "github.com/NYTimes/gziphandler",
sum = "h1:ZUDjpQae29j0ryrS0u/B8HZfJBtBQHjqw2rQ2cqUQ3I=",
version = "v1.1.1",
)
go_repository(
name = "com_github_octago_sflags",
build_file_proto_mode = "disable",
importpath = "github.com/octago/sflags",
sum = "h1:XceYzkRXGAHa/lSFmKLcaxSrsh4MTuOMQdIGsUD0wlk=",
version = "v0.2.0",
)
go_repository(
name = "com_github_oklog_run",
build_file_proto_mode = "disable",
importpath = "github.com/oklog/run",
sum = "h1:Ru7dDtJNOyC66gQ5dQmaCa0qIsAUFY3sFpK1Xk8igrw=",
version = "v1.0.0",
)
go_repository(
name = "com_github_oklog_ulid",
build_file_proto_mode = "disable",
importpath = "github.com/oklog/ulid",
sum = "h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4=",
version = "v1.3.1",
)
go_repository(
name = "com_github_olekukonko_tablewriter",
build_file_proto_mode = "disable",
importpath = "github.com/olekukonko/tablewriter",
sum = "h1:vHD/YYe1Wolo78koG299f7V/VAS08c6IpCLn+Ejf/w8=",
version = "v0.0.4",
)
go_repository(
name = "com_github_oneofone_xxhash",
build_file_proto_mode = "disable",
importpath = "github.com/OneOfOne/xxhash",
sum = "h1:KMrpdQIwFcEqXDklaen+P1axHaj9BSKzvpUUfnHldSE=",
version = "v1.2.2",
)
go_repository(
name = "com_github_onsi_ginkgo",
build_file_proto_mode = "disable",
importpath = "github.com/onsi/ginkgo",
sum = "h1:29JGrr5oVBm5ulCWet69zQkzWipVXIol6ygQUe/EzNc=",
version = "v1.16.4",
)
go_repository(
name = "com_github_onsi_gomega",
build_file_proto_mode = "disable",
importpath = "github.com/onsi/gomega",
sum = "h1:7lLHu94wT9Ij0o6EWWclhu0aOh32VxhkwEJvzuWPeak=",
version = "v1.13.0",
)
go_repository(
name = "com_github_op_go_logging",
build_file_proto_mode = "disable",
importpath = "github.com/op/go-logging",
sum = "h1:lDH9UUVJtmYCjyT0CI4q8xvlXPxeZ0gYCVvWbmPlp88=",
version = "v0.0.0-20160315200505-970db520ece7",
)
go_repository(
name = "com_github_opencontainers_go_digest",
build_file_proto_mode = "disable",
importpath = "github.com/opencontainers/go-digest",
sum = "h1:WzifXhOVOEOuFYOJAW6aQqW0TooG2iki3E3Ii+WN7gQ=",
version = "v1.0.0-rc1",
)
go_repository(
name = "com_github_opencontainers_image_spec",
build_file_proto_mode = "disable",
importpath = "github.com/opencontainers/image-spec",
sum = "h1:JMemWkRwHx4Zj+fVxWoMCFm/8sYGGrUVojFA6h/TRcI=",
version = "v1.0.1",
)
go_repository(
name = "com_github_opencontainers_runc",
build_file_proto_mode = "disable",
importpath = "github.com/opencontainers/runc",
sum = "h1:GlxAyO6x8rfZYN9Tt0Kti5a/cP41iuiO2yYT0IJGY8Y=",
version = "v0.1.1",
)
go_repository(
name = "com_github_opencontainers_runtime_spec",
build_file_proto_mode = "disable",
importpath = "github.com/opencontainers/runtime-spec",
sum = "h1:eNUVfm/RFLIi1G7flU5/ZRTHvd4kcVuzfRnL6OFlzCI=",
version = "v0.1.2-0.20190507144316-5b71a03e2700",
)
go_repository(
name = "com_github_opencontainers_runtime_tools",
build_file_proto_mode = "disable",
importpath = "github.com/opencontainers/runtime-tools",
sum = "h1:H7DMc6FAjgwZZi8BRqjrAAHWoqEr5e5L6pS4V0ezet4=",
version = "v0.0.0-20181011054405-1d69bd0f9c39",
)
go_repository(
name = "com_github_openpeedeep_depguard",
build_file_proto_mode = "disable",
importpath = "github.com/OpenPeeDeeP/depguard",
sum = "h1:VlW4R6jmBIv3/u1JNlawEvJMM4J+dPORPaZasQee8Us=",
version = "v1.0.1",
)
go_repository(
name = "com_github_openzipkin_zipkin_go",
build_file_proto_mode = "disable",
importpath = "github.com/openzipkin/zipkin-go",
sum = "h1:nY8Hti+WKaP0cRsSeQ026wU03QsM762XBeCXBb9NAWI=",
version = "v0.2.2",
)
go_repository(
name = "com_github_otiai10_copy",
build_file_proto_mode = "disable",
importpath = "github.com/otiai10/copy",
sum = "h1:DDNipYy6RkIkjMwy+AWzgKiNTyj2RUI9yEMeETEpVyc=",
version = "v1.0.2",
)
go_repository(
name = "com_github_otiai10_curr",
build_file_proto_mode = "disable",
importpath = "github.com/otiai10/curr",
sum = "h1:+OLn68pqasWca0z5ryit9KGfp3sUsW4Lqg32iRMJyzs=",
version = "v0.0.0-20150429015615-9b4961190c95",
)
go_repository(
name = "com_github_otiai10_mint",
build_file_proto_mode = "disable",
importpath = "github.com/otiai10/mint",
sum = "h1:Ady6MKVezQwHBkGzLFbrsywyp09Ah7rkmfjV3Bcr5uc=",
version = "v1.3.0",
)
go_repository(
name = "com_github_pascaldekloe_goe",
build_file_proto_mode = "disable",
importpath = "github.com/pascaldekloe/goe",
sum = "h1:cBOtyMzM9HTpWjXfbbunk26uA6nG3a8n06Wieeh0MwY=",
version = "v0.1.0",
)
go_repository(
name = "com_github_pavel_v_chernykh_keystore_go",
build_file_proto_mode = "disable",
importpath = "github.com/pavel-v-chernykh/keystore-go",
sum = "h1:Jd6xfriVlJ6hWPvYOE0Ni0QWcNTLRehfGPFxr3eSL80=",
version = "v2.1.0+incompatible",
)
go_repository(
name = "com_github_pborman_uuid",
build_file_proto_mode = "disable",
importpath = "github.com/pborman/uuid",
sum = "h1:J7Q5mO4ysT1dv8hyrUGHb9+ooztCXu1D8MY8DZYsu3g=",
version = "v1.2.0",
)
go_repository(
name = "com_github_pelletier_go_buffruneio",
build_file_proto_mode = "disable",
importpath = "github.com/pelletier/go-buffruneio",
sum = "h1:U4t4R6YkofJ5xHm3dJzuRpPZ0mr5MMCoAWooScCR7aA=",
version = "v0.2.0",
)
go_repository(
name = "com_github_pelletier_go_toml",
build_file_proto_mode = "disable",
importpath = "github.com/pelletier/go-toml",
sum = "h1:Keo9qb7iRJs2voHvunFtuuYFsbWeOBh8/P9v/kVMFtw=",
version = "v1.8.0",
)
go_repository(
name = "com_github_peterbourgon_diskv",
build_file_proto_mode = "disable",
importpath = "github.com/peterbourgon/diskv",
sum = "h1:UBdAOUP5p4RWqPBg048CAvpKN+vxiaj6gdUUzhl4XmI=",
version = "v2.0.1+incompatible",
)
go_repository(
name = "com_github_phayes_checkstyle",
build_file_proto_mode = "disable",
importpath = "github.com/phayes/checkstyle",
sum = "h1:CdDQnGF8Nq9ocOS/xlSptM1N3BbrA6/kmaep5ggwaIA=",
version = "v0.0.0-20170904204023-bfd46e6a821d",
)
go_repository(
name = "com_github_phayes_freeport",
build_file_proto_mode = "disable",
importpath = "github.com/phayes/freeport",
sum = "h1:JhzVVoYvbOACxoUmOs6V/G4D5nPVUW73rKvXxP4XUJc=",
version = "v0.0.0-20180830031419-95f893ade6f2",
)
go_repository(
name = "com_github_pierrec_lz4",
build_file_proto_mode = "disable",
importpath = "github.com/pierrec/lz4",
sum = "h1:6aCX4/YZ9v8q69hTyiR7dNLnTA3fgtKHVVW5BCd5Znw=",
version = "v2.2.6+incompatible",
)
go_repository(
name = "com_github_pkg_errors",
build_file_proto_mode = "disable",
importpath = "github.com/pkg/errors",
sum = "h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=",
version = "v0.9.1",
)
go_repository(
name = "com_github_pkg_profile",
build_file_proto_mode = "disable",
importpath = "github.com/pkg/profile",
sum = "h1:F++O52m40owAmADcojzM+9gyjmMOY/T4oYJkgFDH8RE=",
version = "v1.2.1",
)
go_repository(
name = "com_github_pmezard_go_difflib",
build_file_proto_mode = "disable",
importpath = "github.com/pmezard/go-difflib",
sum = "h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=",
version = "v1.0.0",
)
go_repository(
name = "com_github_posener_complete",
build_file_proto_mode = "disable",
importpath = "github.com/posener/complete",
sum = "h1:ccV59UEOTzVDnDUEFdT95ZzHVZ+5+158q8+SJb2QV5w=",
version = "v1.1.1",
)
go_repository(
name = "com_github_pquerna_cachecontrol",
build_file_proto_mode = "disable",
importpath = "github.com/pquerna/cachecontrol",
sum = "h1:0XM1XL/OFFJjXsYXlG30spTkV/E9+gmd5GD1w2HE8xM=",
version = "v0.0.0-20171018203845-0dec1b30a021",
)
go_repository(
name = "com_github_prometheus_client_golang",
build_file_proto_mode = "disable",
importpath = "github.com/prometheus/client_golang",
sum = "h1:HNkLOAEQMIDv/K+04rukrLx6ch7msSRwf3/SASFAGtQ=",
version = "v1.11.0",
)
go_repository(
name = "com_github_prometheus_client_model",
build_file_proto_mode = "disable",
importpath = "github.com/prometheus/client_model",
sum = "h1:uq5h0d+GuxiXLJLNABMgp2qUWDPiLvgCzz2dUR+/W/M=",
version = "v0.2.0",
)
go_repository(
name = "com_github_prometheus_common",
build_file_proto_mode = "disable",
importpath = "github.com/prometheus/common",
sum = "h1:iMAkS2TDoNWnKM+Kopnx/8tnEStIfpYA0ur0xQzzhMQ=",
version = "v0.26.0",
)
go_repository(
name = "com_github_prometheus_procfs",
build_file_proto_mode = "disable",
importpath = "github.com/prometheus/procfs",
sum = "h1:mxy4L2jP6qMonqmq+aTtOx1ifVWUgG/TAmntgbh3xv4=",
version = "v0.6.0",
)
go_repository(
name = "com_github_prometheus_tsdb",
build_file_proto_mode = "disable",
importpath = "github.com/prometheus/tsdb",
sum = "h1:YZcsG11NqnK4czYLrWd9mpEuAJIHVQLwdrleYfszMAA=",
version = "v0.7.1",
)
go_repository(
name = "com_github_puerkitobio_purell",
build_file_proto_mode = "disable",
importpath = "github.com/PuerkitoBio/purell",
sum = "h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI=",
version = "v1.1.1",
)
go_repository(
name = "com_github_puerkitobio_urlesc",
build_file_proto_mode = "disable",
importpath = "github.com/PuerkitoBio/urlesc",
sum = "h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M=",
version = "v0.0.0-20170810143723-de5bf2ad4578",
)
go_repository(
name = "com_github_quasilyte_go_consistent",
build_file_proto_mode = "disable",
importpath = "github.com/quasilyte/go-consistent",
sum = "h1:JoUA0uz9U0FVFq5p4LjEq4C0VgQ0El320s3Ms0V4eww=",
version = "v0.0.0-20190521200055-c6f3937de18c",
)
go_repository(
name = "com_github_quasilyte_go_ruleguard",
build_file_proto_mode = "disable",
importpath = "github.com/quasilyte/go-ruleguard",
sum = "h1:DvnesvLtRPQOvaUbfXfh0tpMHg29by0H7F2U+QIkSu8=",
version = "v0.1.2-0.20200318202121-b00d7a75d3d8",
)
go_repository(
name = "com_github_rcrowley_go_metrics",
build_file_proto_mode = "disable",
importpath = "github.com/rcrowley/go-metrics",
sum = "h1:eUm8ma4+yPknhXtkYlWh3tMkE6gBjXZToDned9s2gbQ=",
version = "v0.0.0-20190706150252-9beb055b7962",
)
go_repository(
name = "com_github_remyoudompheng_bigfft",
build_file_proto_mode = "disable",
importpath = "github.com/remyoudompheng/bigfft",
sum = "h1:/NRJ5vAYoqz+7sG51ubIDHXeWO8DlTSrToPu6q11ziA=",
version = "v0.0.0-20170806203942-52369c62f446",
)
go_repository(
name = "com_github_rogpeppe_fastuuid",
build_file_proto_mode = "disable",
importpath = "github.com/rogpeppe/fastuuid",
sum = "h1:Ppwyp6VYCF1nvBTXL3trRso7mXMlRrw9ooo375wvi2s=",
version = "v1.2.0",
)
go_repository(
name = "com_github_rogpeppe_go_internal",
build_file_proto_mode = "disable",
importpath = "github.com/rogpeppe/go-internal",
sum = "h1:qLvObTrvO/XRCqmkKxUlOBc48bI3efyDuAZe25QiF0w=",
version = "v1.5.2",
)
go_repository(
name = "com_github_rubiojr_go_vhd",
build_file_proto_mode = "disable",
importpath = "github.com/rubiojr/go-vhd",
sum = "h1:ht7N4d/B7Ezf58nvMNVF3OlvDlz9pp+WHVcRNS0nink=",
version = "v0.0.0-20160810183302-0bfd3b39853c",
)
go_repository(
name = "com_github_russross_blackfriday",
build_file_proto_mode = "disable",
importpath = "github.com/russross/blackfriday",
sum = "h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo=",
version = "v1.5.2",
)
go_repository(
name = "com_github_russross_blackfriday_v2",
build_file_proto_mode = "disable",
importpath = "github.com/russross/blackfriday/v2",
sum = "h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q=",
version = "v2.0.1",
)
go_repository(
name = "com_github_rwcarlsen_goexif",
build_file_proto_mode = "disable",
importpath = "github.com/rwcarlsen/goexif",
sum = "h1:CmH9+J6ZSsIjUK3dcGsnCnO41eRBOnY12zwkn5qVwgc=",
version = "v0.0.0-20190401172101-9e8deecbddbd",
)
go_repository(
name = "com_github_ryancurrah_gomodguard",
build_file_proto_mode = "disable",
importpath = "github.com/ryancurrah/gomodguard",
sum = "h1:DWbye9KyMgytn8uYpuHkwf0RHqAYO6Ay/D0TbCpPtVU=",
version = "v1.1.0",
)
go_repository(
name = "com_github_ryanuber_columnize",
build_file_proto_mode = "disable",
importpath = "github.com/ryanuber/columnize",
sum = "h1:j1Wcmh8OrK4Q7GXY+V7SVSY8nUWQxHW5TkBe7YUl+2s=",
version = "v2.1.0+incompatible",
)
go_repository(
name = "com_github_ryanuber_go_glob",
build_file_proto_mode = "disable",
importpath = "github.com/ryanuber/go-glob",
sum = "h1:iQh3xXAumdQ+4Ufa5b25cRpC5TYKlno6hsv6Cb3pkBk=",
version = "v1.0.0",
)
go_repository(
name = "com_github_sassoftware_go_rpmutils",
build_file_proto_mode = "disable",
importpath = "github.com/sassoftware/go-rpmutils",
sum = "h1:+gCnWOZV8Z/8jehJ2CdqB47Z3S+SREmQcuXkRFLNsiI=",
version = "v0.0.0-20190420191620-a8f1baeba37b",
)
go_repository(
name = "com_github_satori_go_uuid",
build_file_proto_mode = "disable",
importpath = "github.com/satori/go.uuid",
sum = "h1:0uYX9dsZ2yD7q2RtLRtPSdGDWzjeM3TbMJP9utgA0ww=",
version = "v1.2.0",
)
go_repository(
name = "com_github_sclevine_spec",
build_file_proto_mode = "disable",
importpath = "github.com/sclevine/spec",
sum = "h1:1Jwdf9jSfDl9NVmt8ndHqbTZ7XCCPbh1jI3hkDBHVYA=",
version = "v1.2.0",
)
go_repository(
name = "com_github_sean_seed",
build_file_proto_mode = "disable",
importpath = "github.com/sean-/seed",
sum = "h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I=",
version = "v0.0.0-20170313163322-e2103e2c3529",
)
go_repository(
name = "com_github_securego_gosec",
build_file_proto_mode = "disable",
importpath = "github.com/securego/gosec",
sum = "h1:rq2/kILQnPtq5oL4+IAjgVOjh5e2yj2aaCYi7squEvI=",
version = "v0.0.0-20200401082031-e946c8c39989",
)
go_repository(
name = "com_github_securego_gosec_v2",
build_file_proto_mode = "disable",
importpath = "github.com/securego/gosec/v2",
sum = "h1:y/9mCF2WPDbSDpL3QDWZD3HHGrSYw0QSHnCqTfs4JPE=",
version = "v2.3.0",
)
go_repository(
name = "com_github_sergi_go_diff",
build_file_proto_mode = "disable",
importpath = "github.com/sergi/go-diff",
sum = "h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=",
version = "v1.1.0",
)
go_repository(
name = "com_github_shirou_gopsutil",
build_file_proto_mode = "disable",
importpath = "github.com/shirou/gopsutil",
sum = "h1:WokF3GuxBeL+n4Lk4Fa8v9mbdjlrl7bHuneF4N1bk2I=",
version = "v0.0.0-20190901111213-e4ec7b275ada",
)
go_repository(
name = "com_github_shirou_w32",
build_file_proto_mode = "disable",
importpath = "github.com/shirou/w32",
sum = "h1:udFKJ0aHUL60LboW/A+DfgoHVedieIzIXE8uylPue0U=",
version = "v0.0.0-20160930032740-bb4de0191aa4",
)
go_repository(
name = "com_github_shopify_logrus_bugsnag",
build_file_proto_mode = "disable",
importpath = "github.com/Shopify/logrus-bugsnag",
sum = "h1:UrqY+r/OJnIp5u0s1SbQ8dVfLCZJsnvazdBP5hS4iRs=",
version = "v0.0.0-20171204204709-577dee27f20d",
)
go_repository(
name = "com_github_shopify_sarama",
build_file_proto_mode = "disable",
importpath = "github.com/Shopify/sarama",
sum = "h1:XxJBCZEoWJtoWjf/xRbmGUpAmTZGnuuF0ON0EvxxBrs=",
version = "v1.23.1",
)
go_repository(
name = "com_github_shopify_toxiproxy",
build_file_proto_mode = "disable",
importpath = "github.com/Shopify/toxiproxy",
sum = "h1:TKdv8HiTLgE5wdJuEML90aBgNWsokNbMijUGhmcoBJc=",
version = "v2.1.4+incompatible",
)
go_repository(
name = "com_github_shurcool_githubv4",
build_file_proto_mode = "disable",
importpath = "github.com/shurcooL/githubv4",
sum = "h1:N5B+JgvM/DVYIxreItPJMM3yWrNO/GB2q4nESrtBisM=",
version = "v0.0.0-20210725200734-83ba7b4c9228",
)
go_repository(
name = "com_github_shurcool_go",
build_file_proto_mode = "disable",
importpath = "github.com/shurcooL/go",
sum = "h1:MZM7FHLqUHYI0Y/mQAt3d2aYa0SiNms/hFqC9qJYolM=",
version = "v0.0.0-20180423040247-9e1955d9fb6e",
)
go_repository(
name = "com_github_shurcool_go_goon",
build_file_proto_mode = "disable",
importpath = "github.com/shurcooL/go-goon",
sum = "h1:llrF3Fs4018ePo4+G/HV/uQUqEI1HMDjCeOf2V6puPc=",
version = "v0.0.0-20170922171312-37c2f522c041",
)
go_repository(
name = "com_github_shurcool_graphql",
build_file_proto_mode = "disable",
importpath = "github.com/shurcooL/graphql",
sum = "h1:tygelZueB1EtXkPI6mQ4o9DQ0+FKW41hTbunoXZCTqk=",
version = "v0.0.0-20181231061246-d48a9a75455f",
)
go_repository(
name = "com_github_shurcool_sanitized_anchor_name",
build_file_proto_mode = "disable",
importpath = "github.com/shurcooL/sanitized_anchor_name",
sum = "h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo=",
version = "v1.0.0",
)
go_repository(
name = "com_github_sirupsen_logrus",
build_file_proto_mode = "disable",
importpath = "github.com/sirupsen/logrus",
sum = "h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE=",
version = "v1.8.1",
)
go_repository(
name = "com_github_smartystreets_assertions",
build_file_proto_mode = "disable",
importpath = "github.com/smartystreets/assertions",
sum = "h1:42S6lae5dvLc7BrLu/0ugRtcFVjoJNMC/N3yZFZkDFs=",
version = "v1.2.0",
)
go_repository(
name = "com_github_smartystreets_go_aws_auth",
build_file_proto_mode = "disable",
importpath = "github.com/smartystreets/go-aws-auth",
sum = "h1:hp2CYQUINdZMHdvTdXtPOY2ainKl4IoMcpAXEf2xj3Q=",
version = "v0.0.0-20180515143844-0c1422d1fdb9",
)
go_repository(
name = "com_github_smartystreets_goconvey",
build_file_proto_mode = "disable",
importpath = "github.com/smartystreets/goconvey",
sum = "h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s=",
version = "v1.6.4",
)
go_repository(
name = "com_github_smartystreets_gunit",
build_file_proto_mode = "disable",
importpath = "github.com/smartystreets/gunit",
sum = "h1:RyPDUFcJbvtXlhJPk7v+wnxZRY2EUokhEYl2EJOPToI=",
version = "v1.0.0",
)
go_repository(
name = "com_github_soheilhy_cmux",
build_file_proto_mode = "disable",
importpath = "github.com/soheilhy/cmux",
sum = "h1:0HKaf1o97UwFjHH9o5XsHUOF+tqmdA7KEzXLpiyaw0E=",
version = "v0.1.4",
)
go_repository(
name = "com_github_sourcegraph_go_diff",
build_file_proto_mode = "disable",
importpath = "github.com/sourcegraph/go-diff",
sum = "h1:lhIKJ2nXLZZ+AfbHpYxTn0pXpNTTui0DX7DO3xeb1Zs=",
version = "v0.5.3",
)
go_repository(
name = "com_github_spaolacci_murmur3",
build_file_proto_mode = "disable",
importpath = "github.com/spaolacci/murmur3",
sum = "h1:qLC7fQah7D6K1B0ujays3HV9gkFtllcxhzImRR7ArPQ=",
version = "v0.0.0-20180118202830-f09979ecbc72",
)
go_repository(
name = "com_github_spf13_afero",
build_file_proto_mode = "disable",
importpath = "github.com/spf13/afero",
sum = "h1:5jhuqJyZCZf2JRofRvN/nIFgIWNzPa3/Vz8mYylgbWc=",
version = "v1.2.2",
)
go_repository(
name = "com_github_spf13_cast",
build_file_proto_mode = "disable",
importpath = "github.com/spf13/cast",
sum = "h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng=",
version = "v1.3.1",
)
go_repository(
name = "com_github_spf13_cobra",
build_file_proto_mode = "disable",
importpath = "github.com/spf13/cobra",
sum = "h1:xghbfqPkxzxP3C/f3n5DdpAbdKLj4ZE4BWQI362l53M=",
version = "v1.1.3",
)
go_repository(
name = "com_github_spf13_jwalterweatherman",
build_file_proto_mode = "disable",
importpath = "github.com/spf13/jwalterweatherman",
sum = "h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=",
version = "v1.1.0",
)
go_repository(
name = "com_github_spf13_pflag",
build_file_proto_mode = "disable",
importpath = "github.com/spf13/pflag",
sum = "h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=",
version = "v1.0.5",
)
go_repository(
name = "com_github_spf13_viper",
build_file_proto_mode = "disable",
importpath = "github.com/spf13/viper",
sum = "h1:pM5oEahlgWv/WnHXpgbKz7iLIxRf65tye2Ci+XFK5sk=",
version = "v1.7.1",
)
go_repository(
name = "com_github_src_d_gcfg",
build_file_proto_mode = "disable",
importpath = "github.com/src-d/gcfg",
sum = "h1:xXbNR5AlLSA315x2UO+fTSSAXCDf+Ar38/6oyGbDKQ4=",
version = "v1.4.0",
)
go_repository(
name = "com_github_stackexchange_wmi",
build_file_proto_mode = "disable",
importpath = "github.com/StackExchange/wmi",
sum = "h1:fLjPD/aNc3UIOA6tDi6QXUemppXK3P9BI7mr2hd6gx8=",
version = "v0.0.0-20180116203802-5d049714c4a6",
)
go_repository(
name = "com_github_streadway_amqp",
build_file_proto_mode = "disable",
importpath = "github.com/streadway/amqp",
sum = "h1:0ngsPmuP6XIjiFRNFYlvKwSr5zff2v+uPHaffZ6/M4k=",
version = "v0.0.0-20190404075320-75d898a42a94",
)
go_repository(
name = "com_github_streadway_quantile",
build_file_proto_mode = "disable",
importpath = "github.com/streadway/quantile",
sum = "h1:7z3LSn867ex6VSaahyKadf4WtSsJIgne6A1WLOAGM8A=",
version = "v0.0.0-20150917103942-b0c588724d25",
)
go_repository(
name = "com_github_stretchr_objx",
build_file_proto_mode = "disable",
importpath = "github.com/stretchr/objx",
sum = "h1:Hbg2NidpLE8veEBkEZTL3CvlkUIVzuU9jDplZO54c48=",
version = "v0.2.0",
)
go_repository(
name = "com_github_stretchr_testify",
build_file_proto_mode = "disable",
importpath = "github.com/stretchr/testify",
sum = "h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=",
version = "v1.7.0",
)
go_repository(
name = "com_github_subosito_gotenv",
build_file_proto_mode = "disable",
importpath = "github.com/subosito/gotenv",
sum = "h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s=",
version = "v1.2.0",
)
go_repository(
name = "com_github_syndtr_gocapability",
build_file_proto_mode = "disable",
importpath = "github.com/syndtr/gocapability",
sum = "h1:zLV6q4e8Jv9EHjNg/iHfzwDkCve6Ua5jCygptrtXHvI=",
version = "v0.0.0-20170704070218-db04d3cc01c8",
)
go_repository(
name = "com_github_tdakkota_asciicheck",
build_file_proto_mode = "disable",
importpath = "github.com/tdakkota/asciicheck",
sum = "h1:HxLVTlqcHhFAz3nWUcuvpH7WuOMv8LQoCWmruLfFH2U=",
version = "v0.0.0-20200416200610-e657995f937b",
)
go_repository(
name = "com_github_tektoncd_pipeline",
build_file_proto_mode = "disable",
importpath = "github.com/tektoncd/pipeline",
sum = "h1:hWdWj5bDjkSGYLlJS+u+Kh9ZktBJgs2JNUv/kP0LVOA=",
version = "v0.13.1-0.20200625065359-44f22a067b75",
)
go_repository(
name = "com_github_tektoncd_plumbing",
build_file_proto_mode = "disable",
importpath = "github.com/tektoncd/plumbing",
sum = "h1:crv70CBAJ2gZFSbf13aRVwdbjR2GYwTms/ZEok/SnFM=",
version = "v0.0.0-20200430135134-e53521e1d887",
)
go_repository(
name = "com_github_tektoncd_plumbing_pipelinerun_logs",
build_file_proto_mode = "disable",
importpath = "github.com/tektoncd/plumbing/pipelinerun-logs",
sum = "h1:9qeyrQsoPZbHOyOPt0OeB1TCYXfYb5swrxlFWzTIYYk=",
version = "v0.0.0-20191206114338-712d544c2c21",
)
go_repository(
name = "com_github_tetafro_godot",
build_file_proto_mode = "disable",
importpath = "github.com/tetafro/godot",
sum = "h1:Dib7un+rYJFUi8vN0Bk6EHheKy6fv6ZzFURHw75g6m8=",
version = "v0.4.2",
)
go_repository(
name = "com_github_tidwall_pretty",
build_file_proto_mode = "disable",
importpath = "github.com/tidwall/pretty",
sum = "h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4=",
version = "v1.0.0",
)
go_repository(
name = "com_github_timakin_bodyclose",
build_file_proto_mode = "disable",
importpath = "github.com/timakin/bodyclose",
sum = "h1:ig99OeTyDwQWhPe2iw9lwfQVF1KB3Q4fpP3X7/2VBG8=",
version = "v0.0.0-20200424151742-cb6215831a94",
)
go_repository(
name = "com_github_tj_assert",
build_file_proto_mode = "disable",
importpath = "github.com/tj/assert",
sum = "h1:Rw8kxzWo1mr6FSaYXjQELRe88y2KdfynXdnK72rdjtA=",
version = "v0.0.0-20171129193455-018094318fb0",
)
go_repository(
name = "com_github_tj_go_elastic",
build_file_proto_mode = "disable",
importpath = "github.com/tj/go-elastic",
sum = "h1:eGaGNxrtoZf/mBURsnNQKDR7u50Klgcf2eFDQEnc8Bc=",
version = "v0.0.0-20171221160941-36157cbbebc2",
)
go_repository(
name = "com_github_tj_go_kinesis",
build_file_proto_mode = "disable",
importpath = "github.com/tj/go-kinesis",
sum = "h1:m74UWYy+HBs+jMFR9mdZU6shPewugMyH5+GV6LNgW8w=",
version = "v0.0.0-20171128231115-08b17f58cb1b",
)
go_repository(
name = "com_github_tj_go_spin",
build_file_proto_mode = "disable",
importpath = "github.com/tj/go-spin",
sum = "h1:lhdWZsvImxvZ3q1C5OIB7d72DuOwP4O2NdBg9PyzNds=",
version = "v1.1.0",
)
go_repository(
name = "com_github_tmc_grpc_websocket_proxy",
build_file_proto_mode = "disable",
importpath = "github.com/tmc/grpc-websocket-proxy",
sum = "h1:LnC5Kc/wtumK+WB441p7ynQJzVuNRJiqddSIE3IlSEQ=",
version = "v0.0.0-20190109142713-0ad062ec5ee5",
)
go_repository(
name = "com_github_tommy_muehle_go_mnd",
build_file_proto_mode = "disable",
importpath = "github.com/tommy-muehle/go-mnd",
sum = "h1:RC4maTWLKKwb7p1cnoygsbKIgNlJqSYBeAFON3Ar8As=",
version = "v1.3.1-0.20200224220436-e6f9a994e8fa",
)
go_repository(
name = "com_github_trivago_tgo",
build_file_proto_mode = "disable",
importpath = "github.com/trivago/tgo",
sum = "h1:bxatjJIXNIpV18bucU4Uk/LaoxvxuOlp/oowRHyncLQ=",
version = "v1.0.1",
)
go_repository(
name = "com_github_tsenart_vegeta",
build_file_proto_mode = "disable",
importpath = "github.com/tsenart/vegeta",
sum = "h1:ErZrHhRveAoznVW80gbrxz+qxJNydpA2fcQxTPHkZbU=",
version = "v12.7.1-0.20190725001342-b5f4fca92137+incompatible",
)
go_repository(
name = "com_github_ugorji_go",
build_file_proto_mode = "disable",
importpath = "github.com/ugorji/go",
sum = "h1:j4s+tAvLfL3bZyefP2SEWmhBzmuIlH/eqNuPdFPgngw=",
version = "v1.1.4",
)
go_repository(
name = "com_github_ugorji_go_codec",
build_file_proto_mode = "disable",
importpath = "github.com/ugorji/go/codec",
sum = "h1:3SVOIvH7Ae1KRYyQWRjXWJEA9sS/c/pjvH++55Gr648=",
version = "v0.0.0-20181204163529-d75b2dcb6bc8",
)
go_repository(
name = "com_github_ulikunitz_xz",
build_file_proto_mode = "disable",
importpath = "github.com/ulikunitz/xz",
sum = "h1:YvTNdFzX6+W5m9msiYg/zpkSURPPtOlzbqYjrFn7Yt4=",
version = "v0.5.7",
)
go_repository(
name = "com_github_ultraware_funlen",
build_file_proto_mode = "disable",
importpath = "github.com/ultraware/funlen",
sum = "h1:Av96YVBwwNSe4MLR7iI/BIa3VyI7/djnto/pK3Uxbdo=",
version = "v0.0.2",
)
go_repository(
name = "com_github_ultraware_whitespace",
build_file_proto_mode = "disable",
importpath = "github.com/ultraware/whitespace",
sum = "h1:If7Va4cM03mpgrNH9k49/VOicWpGoG70XPBFFODYDsg=",
version = "v0.0.4",
)
go_repository(
name = "com_github_urfave_cli",
build_file_proto_mode = "disable",
importpath = "github.com/urfave/cli",
sum = "h1:u7tSpNPPswAFymm8IehJhy4uJMlUuU/GmqSkvJ1InXA=",
version = "v1.22.4",
)
go_repository(
name = "com_github_urfave_cli_v2",
build_file_proto_mode = "disable",
importpath = "github.com/urfave/cli/v2",
sum = "h1:Qt8FeAtxE/vfdrLmR3rxR6JRE0RoVmbXu8+6kZtYU4k=",
version = "v2.1.1",
)
go_repository(
name = "com_github_uudashr_gocognit",
build_file_proto_mode = "disable",
importpath = "github.com/uudashr/gocognit",
sum = "h1:MoG2fZ0b/Eo7NXoIwCVFLG5JED3qgQz5/NEE+rOsjPs=",
version = "v1.0.1",
)
go_repository(
name = "com_github_valyala_bytebufferpool",
build_file_proto_mode = "disable",
importpath = "github.com/valyala/bytebufferpool",
sum = "h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=",
version = "v1.0.0",
)
go_repository(
name = "com_github_valyala_fasthttp",
build_file_proto_mode = "disable",
importpath = "github.com/valyala/fasthttp",
sum = "h1:dzZJf2IuMiclVjdw0kkT+f9u4YdrapbNyGAN47E/qnk=",
version = "v1.2.0",
)
go_repository(
name = "com_github_valyala_quicktemplate",
build_file_proto_mode = "disable",
importpath = "github.com/valyala/quicktemplate",
sum = "h1:BaO1nHTkspYzmAjPXj0QiDJxai96tlcZyKcI9dyEGvM=",
version = "v1.2.0",
)
go_repository(
name = "com_github_valyala_tcplisten",
build_file_proto_mode = "disable",
importpath = "github.com/valyala/tcplisten",
sum = "h1:0R4NLDRDZX6JcmhJgXi5E4b8Wg84ihbmUKp/GvSPEzc=",
version = "v0.0.0-20161114210144-ceec8f93295a",
)
go_repository(
name = "com_github_vdemeester_k8s_pkg_credentialprovider",
build_file_proto_mode = "disable",
importpath = "github.com/vdemeester/k8s-pkg-credentialprovider",
sum = "h1:czKEIG2Q3YRTgs6x/8xhjVMJD5byPo6cZuostkbTM74=",
version = "v1.17.4",
)
go_repository(
name = "com_github_vektah_gqlparser",
build_file_proto_mode = "disable",
importpath = "github.com/vektah/gqlparser",
sum = "h1:ZsyLGn7/7jDNI+y4SEhI4yAxRChlv15pUHMjijT+e68=",
version = "v1.1.2",
)
go_repository(
name = "com_github_venafi_vcert_v4",
build_file_proto_mode = "disable",
importpath = "github.com/Venafi/vcert/v4",
sum = "h1:37gfyjS9v5YvZcIABwNPo1fAC31lIZT7glVK1vfUxk4=",
version = "v4.11.0",
)
go_repository(
name = "com_github_vmware_govmomi",
build_file_proto_mode = "disable",
importpath = "github.com/vmware/govmomi",
sum = "h1:gpw/0Ku+6RgF3jsi7fnCLmlcikBHfKBCUcu1qgc16OU=",
version = "v0.20.3",
)
go_repository(
name = "com_github_xanzy_go_gitlab",
build_file_proto_mode = "disable",
importpath = "github.com/xanzy/go-gitlab",
sum = "h1:tBm+OXv1t+KBsqlXkSDFz+YUjRM0GFsjpOWYOod3Ebs=",
version = "v0.32.0",
)
go_repository(
name = "com_github_xanzy_ssh_agent",
build_file_proto_mode = "disable",
importpath = "github.com/xanzy/ssh-agent",
sum = "h1:wUMzuKtKilRgBAD1sUb8gOwwRr2FGoBVumcjoOACClI=",
version = "v0.3.0",
)
go_repository(
name = "com_github_xdg_scram",
build_file_proto_mode = "disable",
importpath = "github.com/xdg/scram",
sum = "h1:u40Z8hqBAAQyv+vATcGgV0YCnDjqSL7/q/JyPhhJSPk=",
version = "v0.0.0-20180814205039-7eeb5667e42c",
)
go_repository(
name = "com_github_xdg_stringprep",
build_file_proto_mode = "disable",
importpath = "github.com/xdg/stringprep",
sum = "h1:d9X0esnoa3dFsV0FG35rAT0RIhYFlPq7MiP+DW89La0=",
version = "v1.0.0",
)
go_repository(
name = "com_github_xeipuuv_gojsonpointer",
build_file_proto_mode = "disable",
importpath = "github.com/xeipuuv/gojsonpointer",
sum = "h1:J9EGpcZtP0E/raorCMxlFGSTBrsSlaDGf3jU/qvAE2c=",
version = "v0.0.0-20180127040702-4e3ac2762d5f",
)
go_repository(
name = "com_github_xeipuuv_gojsonreference",
build_file_proto_mode = "disable",
importpath = "github.com/xeipuuv/gojsonreference",
sum = "h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0=",
version = "v0.0.0-20180127040603-bd5ef7bd5415",
)
go_repository(
name = "com_github_xeipuuv_gojsonschema",
build_file_proto_mode = "disable",
importpath = "github.com/xeipuuv/gojsonschema",
sum = "h1:ngVtJC9TY/lg0AA/1k48FYhBrhRoFlEmWzsehpNAaZg=",
version = "v1.1.0",
)
go_repository(
name = "com_github_xi2_xz",
build_file_proto_mode = "disable",
importpath = "github.com/xi2/xz",
sum = "h1:nIPpBwaJSVYIxUFsDv3M8ofmx9yWTog9BfvIu0q41lo=",
version = "v0.0.0-20171230120015-48954b6210f8",
)
go_repository(
name = "com_github_xiang90_probing",
build_file_proto_mode = "disable",
importpath = "github.com/xiang90/probing",
sum = "h1:eY9dn8+vbi4tKz5Qo6v2eYzo7kUS51QINcR5jNpbZS8=",
version = "v0.0.0-20190116061207-43a291ad63a2",
)
go_repository(
name = "com_github_xlab_handysort",
build_file_proto_mode = "disable",
importpath = "github.com/xlab/handysort",
sum = "h1:j2hhcujLRHAg872RWAV5yaUrEjHEObwDv3aImCaNLek=",
version = "v0.0.0-20150421192137-fb3537ed64a1",
)
go_repository(
name = "com_github_xordataexchange_crypt",
build_file_proto_mode = "disable",
importpath = "github.com/xordataexchange/crypt",
sum = "h1:ESFSdwYZvkeru3RtdrYueztKhOBCSAAzS4Gf+k0tEow=",
version = "v0.0.3-0.20170626215501-b2862e3d0a77",
)
go_repository(
name = "com_github_yuin_goldmark",
build_file_proto_mode = "disable",
importpath = "github.com/yuin/goldmark",
sum = "h1:ruQGxdhGHe7FWOJPT0mKs5+pD2Xs1Bm/kdGlHO04FmM=",
version = "v1.2.1",
)
go_repository(
name = "com_github_yvasiyarov_go_metrics",
build_file_proto_mode = "disable",
importpath = "github.com/yvasiyarov/go-metrics",
sum = "h1:+lm10QQTNSBd8DVTNGHx7o/IKu9HYDvLMffDhbyLccI=",
version = "v0.0.0-20140926110328-57bccd1ccd43",
)
go_repository(
name = "com_github_yvasiyarov_gorelic",
build_file_proto_mode = "disable",
importpath = "github.com/yvasiyarov/gorelic",
sum = "h1:hlE8//ciYMztlGpl/VA+Zm1AcTPHYkHJPbHqE6WJUXE=",
version = "v0.0.0-20141212073537-a9bba5b9ab50",
)
go_repository(
name = "com_github_yvasiyarov_newrelic_platform_go",
build_file_proto_mode = "disable",
importpath = "github.com/yvasiyarov/newrelic_platform_go",
sum = "h1:ERexzlUfuTvpE74urLSbIQW0Z/6hF9t8U4NsJLaioAY=",
version = "v0.0.0-20140908184405-b21fdbd4370f",
)
go_repository(
name = "com_google_cloud_go",
build_file_proto_mode = "disable",
importpath = "cloud.google.com/go",
sum = "h1:DZeAkuQGQqnm9Xv36SbMJEU8aFBz4wL04UpMWPWwjzg=",
version = "v0.66.0",
)
go_repository(
name = "com_google_cloud_go_bigquery",
build_file_proto_mode = "disable",
importpath = "cloud.google.com/go/bigquery",
sum = "h1:PQcPefKFdaIzjQFbiyOgAqyx8q5djaE7x9Sqe712DPA=",
version = "v1.8.0",
)
go_repository(
name = "com_google_cloud_go_datastore",
build_file_proto_mode = "disable",
importpath = "cloud.google.com/go/datastore",
sum = "h1:/May9ojXjRkPBNVrq+oWLqmWCkr4OU5uRY29bu0mRyQ=",
version = "v1.1.0",
)
go_repository(
name = "com_google_cloud_go_firestore",
build_file_proto_mode = "disable",
importpath = "cloud.google.com/go/firestore",
sum = "h1:9x7Bx0A9R5/M9jibeJeZWqjeVEIxYW9fZYqB9a70/bY=",
version = "v1.1.0",
)
go_repository(
name = "com_google_cloud_go_logging",
build_file_proto_mode = "disable",
importpath = "cloud.google.com/go/logging",
sum = "h1:kaunpnoEh9L4hu6JUsBa8Y20LBfKnCuDhKUgdZp7oK8=",
version = "v1.0.0",
)
go_repository(
name = "com_google_cloud_go_pubsub",
build_file_proto_mode = "disable",
importpath = "cloud.google.com/go/pubsub",
sum = "h1:76oR7VBOkL7ivoIrFKyW0k7YDCRelrlxktIzQiIUGgg=",
version = "v1.4.0",
)
go_repository(
name = "com_google_cloud_go_storage",
build_file_proto_mode = "disable",
importpath = "cloud.google.com/go/storage",
sum = "h1:4y3gHptW1EHVtcPAVE0eBBlFuGqEejTTG3KdIE0lUX4=",
version = "v1.12.0",
)
go_repository(
name = "com_shuralyov_dmitri_gpu_mtl",
build_file_proto_mode = "disable",
importpath = "dmitri.shuralyov.com/gpu/mtl",
sum = "h1:VpgP7xuJadIUuKccphEpTJnWhS2jkQyMt6Y7pJCD7fY=",
version = "v0.0.0-20190408044501-666a987793e9",
)
go_repository(
name = "com_sourcegraph_sqs_pbtypes",
build_file_proto_mode = "disable",
importpath = "sourcegraph.com/sqs/pbtypes",
sum = "h1:f7lAwqviDEGvON4kRv0o5V7FT/IQK+tbkF664XMbP3o=",
version = "v1.0.0",
)
go_repository(
name = "com_sslmate_software_src_go_pkcs12",
build_file_proto_mode = "disable",
importpath = "software.sslmate.com/src/go-pkcs12",
sum = "h1:AVd6O+azYjVQYW1l55IqkbL8/JxjrLtO6q4FCmV8N5c=",
version = "v0.0.0-20200830195227-52f69702a001",
)
go_repository(
name = "dev_gocloud",
build_file_proto_mode = "disable",
importpath = "gocloud.dev",
sum = "h1:EDRyaRAnMGSq/QBto486gWFxMLczAfIYUmusV7XLNBM=",
version = "v0.19.0",
)
go_repository(
name = "dev_knative_caching",
build_file_proto_mode = "disable",
importpath = "knative.dev/caching",
sum = "h1:mxrur6DsVK8uIjhIq7c1OMls4YjBcRlyvnh3Vx13a0M=",
version = "v0.0.0-20200116200605-67bca2c83dfa",
)
go_repository(
name = "dev_knative_eventing_contrib",
build_file_proto_mode = "disable",
importpath = "knative.dev/eventing-contrib",
sum = "h1:xncT+JrokPG+hPUFJwue8ubPpzmziV9GUIZqYt01JDo=",
version = "v0.11.2",
)
go_repository(
name = "dev_knative_pkg",
build_file_proto_mode = "disable",
importpath = "knative.dev/pkg",
sum = "h1:NDQS+236vhwCP9oiBBGvQ5WGzbD0Y8Pcv9dtE2stg+Q=",
version = "v0.0.0-20200711004937-22502028e31a",
)
go_repository(
name = "dev_knative_test_infra",
build_file_proto_mode = "disable",
importpath = "knative.dev/test-infra",
sum = "h1:wNlGK4f5Ykqh3KLC5RlyR9kvzvRgo/LwJQNsZWGVHnU=",
version = "v0.0.0-20200707183444-aed09e56ddc7",
)
go_repository(
name = "in_gopkg_airbrake_gobrake_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/airbrake/gobrake.v2",
sum = "h1:7z2uVWwn7oVeeugY1DtlPAy5H+KYgB1KeKTnqjNatLo=",
version = "v2.0.9",
)
go_repository(
name = "in_gopkg_alecthomas_kingpin_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/alecthomas/kingpin.v2",
sum = "h1:jMFz6MfLP0/4fUyZle81rXUoxOBFi19VUFKVDOQfozc=",
version = "v2.2.6",
)
go_repository(
name = "in_gopkg_asn1_ber_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/asn1-ber.v1",
sum = "h1:TxyelI5cVkbREznMhfzycHdkp5cLA7DpE+GKjSslYhM=",
version = "v1.0.0-20181015200546-f715ec2f112d",
)
go_repository(
name = "in_gopkg_check_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/check.v1",
sum = "h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=",
version = "v1.0.0-20201130134442-10cb98267c6c",
)
go_repository(
name = "in_gopkg_cheggaaa_pb_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/cheggaaa/pb.v1",
sum = "h1:Ev7yu1/f6+d+b3pi5vPdRPc6nNtP1umSfcWiEfRqv6I=",
version = "v1.0.25",
)
go_repository(
name = "in_gopkg_errgo_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/errgo.v2",
sum = "h1:0vLT13EuvQ0hNvakwLuFZ/jYrLp5F3kcWHXdRggjCE8=",
version = "v2.1.0",
)
go_repository(
name = "in_gopkg_fsnotify_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/fsnotify.v1",
sum = "h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4=",
version = "v1.4.7",
)
go_repository(
name = "in_gopkg_gcfg_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/gcfg.v1",
sum = "h1:0HIbH907iBTAntm+88IJV2qmJALDAh8sPekI9Vc1fm0=",
version = "v1.2.0",
)
go_repository(
name = "in_gopkg_gemnasium_logrus_airbrake_hook_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/gemnasium/logrus-airbrake-hook.v2",
sum = "h1:OAj3g0cR6Dx/R07QgQe8wkA9RNjB2u4i700xBkIT4e0=",
version = "v2.1.2",
)
go_repository(
name = "in_gopkg_inf_v0",
build_file_proto_mode = "disable",
importpath = "gopkg.in/inf.v0",
sum = "h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc=",
version = "v0.9.1",
)
go_repository(
name = "in_gopkg_ini_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/ini.v1",
sum = "h1:DPMeDvGTM54DXbPkVIZsp19fp/I2K7zwA/itHYHKo8Y=",
version = "v1.56.0",
)
go_repository(
name = "in_gopkg_jcmturner_aescts_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/jcmturner/aescts.v1",
sum = "h1:cVVZBK2b1zY26haWB4vbBiZrfFQnfbTVrE3xZq6hrEw=",
version = "v1.0.1",
)
go_repository(
name = "in_gopkg_jcmturner_dnsutils_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/jcmturner/dnsutils.v1",
sum = "h1:cIuC1OLRGZrld+16ZJvvZxVJeKPsvd5eUIvxfoN5hSM=",
version = "v1.0.1",
)
go_repository(
name = "in_gopkg_jcmturner_gokrb5_v7",
build_file_proto_mode = "disable",
importpath = "gopkg.in/jcmturner/gokrb5.v7",
sum = "h1:0709Jtq/6QXEuWRfAm260XqlpcwL1vxtO1tUE2qK8Z4=",
version = "v7.3.0",
)
go_repository(
name = "in_gopkg_jcmturner_rpc_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/jcmturner/rpc.v1",
sum = "h1:QHIUxTX1ISuAv9dD2wJ9HWQVuWDX/Zc0PfeC2tjc4rU=",
version = "v1.1.0",
)
go_repository(
name = "in_gopkg_natefinch_lumberjack_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/natefinch/lumberjack.v2",
sum = "h1:1Lc07Kr7qY4U2YPouBjpCLxpiyxIVoxqXgkXLknAOE8=",
version = "v2.0.0",
)
go_repository(
name = "in_gopkg_resty_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/resty.v1",
sum = "h1:CuXP0Pjfw9rOuY6EP+UvtNvt5DSqHpIxILZKT/quCZI=",
version = "v1.12.0",
)
go_repository(
name = "in_gopkg_robfig_cron_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/robfig/cron.v2",
sum = "h1:E846t8CnR+lv5nE+VuiKTDG/v1U2stad0QzddfJC7kY=",
version = "v2.0.0-20150107220207-be2e0b0deed5",
)
go_repository(
name = "in_gopkg_square_go_jose_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/square/go-jose.v2",
sum = "h1:SK5KegNXmKmqE342YYN2qPHEnUYeoMiXXl1poUlI+o4=",
version = "v2.3.1",
)
go_repository(
name = "in_gopkg_src_d_go_billy_v4",
build_file_proto_mode = "disable",
importpath = "gopkg.in/src-d/go-billy.v4",
sum = "h1:0SQA1pRztfTFx2miS8sA97XvooFeNOmvUenF4o0EcVg=",
version = "v4.3.2",
)
go_repository(
name = "in_gopkg_src_d_go_git_fixtures_v3",
build_file_proto_mode = "disable",
importpath = "gopkg.in/src-d/go-git-fixtures.v3",
sum = "h1:ivZFOIltbce2Mo8IjzUHAFoq/IylO9WHhNOAJK+LsJg=",
version = "v3.5.0",
)
go_repository(
name = "in_gopkg_src_d_go_git_v4",
build_file_proto_mode = "disable",
importpath = "gopkg.in/src-d/go-git.v4",
sum = "h1:SRtFyV8Kxc0UP7aCHcijOMQGPxHSmMOPrzulQWolkYE=",
version = "v4.13.1",
)
go_repository(
name = "in_gopkg_tomb_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/tomb.v1",
sum = "h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=",
version = "v1.0.0-20141024135613-dd632973f1e7",
)
go_repository(
name = "in_gopkg_warnings_v0",
build_file_proto_mode = "disable",
importpath = "gopkg.in/warnings.v0",
sum = "h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=",
version = "v0.1.2",
)
go_repository(
name = "in_gopkg_yaml_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/yaml.v1",
sum = "h1:POO/ycCATvegFmVuPpQzZFJ+pGZeX22Ufu6fibxDVjU=",
version = "v1.0.0-20140924161607-9f9df34309c0",
)
go_repository(
name = "in_gopkg_yaml_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/yaml.v2",
sum = "h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=",
version = "v2.4.0",
)
go_repository(
name = "in_gopkg_yaml_v3",
build_file_proto_mode = "disable",
importpath = "gopkg.in/yaml.v3",
sum = "h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo=",
version = "v3.0.0-20210107192922-496545a6307b",
)
go_repository(
name = "io_etcd_go_bbolt",
build_file_proto_mode = "disable",
importpath = "go.etcd.io/bbolt",
sum = "h1:XAzx9gjCb0Rxj7EoqcClPD1d5ZBxZJk0jbuoPHenBt0=",
version = "v1.3.5",
)
go_repository(
name = "io_etcd_go_etcd",
build_file_proto_mode = "disable",
importpath = "go.etcd.io/etcd",
sum = "h1:1JFLBqwIgdyHN1ZtgjTBwO+blA6gVOmZurpiMEsETKo=",
version = "v0.5.0-alpha.5.0.20200910180754-dd1b699fc489",
)
go_repository(
name = "io_gitea_code_sdk_gitea",
build_file_proto_mode = "disable",
importpath = "code.gitea.io/sdk/gitea",
sum = "h1:hvDCz4wtFvo7rf5Ebj8tGd4aJ4wLPKX3BKFX9Dk1Pgs=",
version = "v0.12.0",
)
go_repository(
name = "io_k8s_api",
build_file_proto_mode = "disable_global",
importpath = "k8s.io/api",
sum = "h1:94bbZ5NTjdINJEdzOkpS4vdPhkb1VFpTYC9zh43f75c=",
version = "v0.21.1",
)
go_repository(
name = "io_k8s_apiextensions_apiserver",
build_file_proto_mode = "disable",
importpath = "k8s.io/apiextensions-apiserver",
sum = "h1:AA+cnsb6w7SZ1vD32Z+zdgfXdXY8X9uGX5bN6EoPEIo=",
version = "v0.21.1",
)
go_repository(
name = "io_k8s_apimachinery",
build_file_proto_mode = "disable_global",
importpath = "k8s.io/apimachinery",
sum = "h1:Q6XuHGlj2xc+hlMCvqyYfbv3H7SRGn2c8NycxJquDVs=",
version = "v0.21.1",
)
go_repository(
name = "io_k8s_apiserver",
build_file_proto_mode = "disable",
importpath = "k8s.io/apiserver",
sum = "h1:wTRcid53IhxhbFt4KTrFSw8tAncfr01EP91lzfcygVg=",
version = "v0.21.1",
)
go_repository(
name = "io_k8s_cli_runtime",
build_file_proto_mode = "disable",
importpath = "k8s.io/cli-runtime",
sum = "h1:wLe+osHSqcItyS3MYQXVyGFa54fppORVA8Jn7DBGSWw=",
version = "v0.19.0",
)
go_repository(
name = "io_k8s_client_go",
build_file_proto_mode = "disable_global",
importpath = "k8s.io/client-go",
replace = "k8s.io/client-go",
sum = "h1:bhblWYLZKUu+pm50plvQF8WpY6TXdRRtcS/K9WauOj4=",
version = "v0.21.1",
)
go_repository(
name = "io_k8s_cloud_provider",
build_file_proto_mode = "disable",
importpath = "k8s.io/cloud-provider",
sum = "h1:ELMIQwweSNu8gfVEnLDypxd9034S1sZJg6QcdWJOvMI=",
version = "v0.17.4",
)
go_repository(
name = "io_k8s_code_generator",
build_file_proto_mode = "disable",
importpath = "k8s.io/code-generator",
sum = "h1:jvcxHpVu5dm/LMXr3GOj/jroiP8+v2YnJE9i2OVRenk=",
version = "v0.21.1",
)
go_repository(
name = "io_k8s_component_base",
build_file_proto_mode = "disable",
importpath = "k8s.io/component-base",
sum = "h1:iLpj2btXbR326s/xNQWmPNGu0gaYSjzn7IN/5i28nQw=",
version = "v0.21.1",
)
go_repository(
name = "io_k8s_csi_translation_lib",
build_file_proto_mode = "disable",
importpath = "k8s.io/csi-translation-lib",
sum = "h1:bP9yGfCJDknP7tklCwizZtwgJNRePMVcEaFIfeA11ho=",
version = "v0.17.4",
)
go_repository(
name = "io_k8s_gengo",
build_file_proto_mode = "disable",
importpath = "k8s.io/gengo",
sum = "h1:Uusb3oh8XcdzDF/ndlI4ToKTYVlkCSJP39SRY2mfRAw=",
version = "v0.0.0-20201214224949-b6c5ce23f027",
)
go_repository(
name = "io_k8s_klog",
build_file_proto_mode = "disable",
importpath = "k8s.io/klog",
sum = "h1:Pt+yjF5aB1xDSVbau4VsWe+dQNzA0qv1LlXdC2dF6Q8=",
version = "v1.0.0",
)
go_repository(
name = "io_k8s_klog_v2",
build_file_proto_mode = "disable",
importpath = "k8s.io/klog/v2",
sum = "h1:Q3gmuM9hKEjefWFFYF0Mat+YyFJvsUyYuwyNNJ5C9Ts=",
version = "v2.8.0",
)
go_repository(
name = "io_k8s_kube_aggregator",
build_file_proto_mode = "disable",
importpath = "k8s.io/kube-aggregator",
sum = "h1:rL4fsftMaqkKjaibArYDaBeqN41CHaJzgRJjUB9IrIg=",
version = "v0.19.0",
)
go_repository(
name = "io_k8s_kube_openapi",
build_file_proto_mode = "disable",
importpath = "k8s.io/kube-openapi",
sum = "h1:vEx13qjvaZ4yfObSSXW7BrMc/KQBBT/Jyee8XtLf4x0=",
version = "v0.0.0-20210305001622-591a79e4bda7",
)
go_repository(
name = "io_k8s_kubectl",
build_file_proto_mode = "disable",
importpath = "k8s.io/kubectl",
sum = "h1:t9uxaZzGvqc2jY96mjnPSjFHtaKOxoUegeGZdaGT6aw=",
version = "v0.19.0",
)
go_repository(
name = "io_k8s_kubernetes",
build_file_proto_mode = "disable",
importpath = "k8s.io/kubernetes",
sum = "h1:wJx/r2HuPVaaBeCUk/P47GSK0eyrj3mI/kESRFBp6/A=",
version = "v1.14.7",
)
go_repository(
name = "io_k8s_legacy_cloud_providers",
build_file_proto_mode = "disable",
importpath = "k8s.io/legacy-cloud-providers",
sum = "h1:VvFqJGiYAr2gIdoNuqbeZLEdxIFeN4Yt6OLJS9l2oIE=",
version = "v0.17.4",
)
go_repository(
name = "io_k8s_metrics",
build_file_proto_mode = "disable",
importpath = "k8s.io/metrics",
sum = "h1:cKq0+Z7wg5qkK1n8dryNffKfU22DBX83JguGpR+TCk0=",
version = "v0.19.0",
)
go_repository(
name = "io_k8s_sigs_apiserver_network_proxy_konnectivity_client",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/apiserver-network-proxy/konnectivity-client",
sum = "h1:4uqm9Mv+w2MmBYD+F4qf/v6tDFUdPOk29C095RbU5mY=",
version = "v0.0.15",
)
go_repository(
name = "io_k8s_sigs_boskos",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/boskos",
sum = "h1:OCr84Jrq4HgrYxP9wrfSsGioR1VSpTZMh/RXMu5sm+8=",
version = "v0.0.0-20210730172138-093b54882439",
)
go_repository(
name = "io_k8s_sigs_controller_runtime",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/controller-runtime",
sum = "h1:ZIZ/dtpboPSbZYY7uUz2OzrkaBTOThx2yekLtpGB+zY=",
version = "v0.9.0",
)
go_repository(
name = "io_k8s_sigs_controller_tools",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/controller-tools",
sum = "h1:PXOHvyYAjWfO0UfQvaUo33HpXNCOilV3i/Vjc7iM1/A=",
version = "v0.2.9-0.20200414181213-645d44dca7c0",
)
go_repository(
name = "io_k8s_sigs_kustomize",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/kustomize",
sum = "h1:JUufWFNlI44MdtnjUqVnvh29rR37PQFzPbLXqhyOyX0=",
version = "v2.0.3+incompatible",
)
go_repository(
name = "io_k8s_sigs_structured_merge_diff",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/structured-merge-diff",
sum = "h1:LOs1LZWMsz1xs77Phr/pkB4LFaavH7IVq/3+WTN9XTA=",
version = "v1.0.1",
)
go_repository(
name = "io_k8s_sigs_structured_merge_diff_v2",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/structured-merge-diff/v2",
sum = "h1:I0h4buiCqDtPztO3NOiyoNMtqSIfld49D4Wj3UBXYZA=",
version = "v2.0.1",
)
go_repository(
name = "io_k8s_sigs_structured_merge_diff_v3",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/structured-merge-diff/v3",
sum = "h1:dOmIZBMfhcHS09XZkMyUgkq5trg3/jRyJYFZUiaOp8E=",
version = "v3.0.0",
)
go_repository(
name = "io_k8s_sigs_structured_merge_diff_v4",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/structured-merge-diff/v4",
sum = "h1:C4r9BgJ98vrKnnVCjwCSXcWjWe0NKcUQkmzDXZXGwH8=",
version = "v4.1.0",
)
go_repository(
name = "io_k8s_sigs_testing_frameworks",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/testing_frameworks",
sum = "h1:vK0+tvjF0BZ/RYFeZ1E6BYBwHJJXhjuZ3TdsEKH+UQM=",
version = "v0.1.2",
)
go_repository(
name = "io_k8s_sigs_yaml",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/yaml",
sum = "h1:kr/MCeFWJWTwyaHoR9c8EjH9OumOmoF9YGiZd7lFm/Q=",
version = "v1.2.0",
)
go_repository(
name = "io_k8s_test_infra",
build_file_proto_mode = "disable",
importpath = "k8s.io/test-infra",
sum = "h1:g5GLdRIOMs9vnEM/ZWG67f1Stn8cW1dER+MNK9P7Xn8=",
version = "v0.0.0-20210903101950-5c7809e9c5e9",
)
go_repository(
name = "io_k8s_utils",
build_file_proto_mode = "disable",
importpath = "k8s.io/utils",
sum = "h1:MSqsVQ3pZvPGTqCjptfimO2WjG7A9un2zcpiHkA6M/s=",
version = "v0.0.0-20210527160623-6fdb442a123b",
)
go_repository(
name = "io_opencensus_go",
build_file_proto_mode = "disable",
importpath = "go.opencensus.io",
sum = "h1:LYy1Hy3MJdrCdMwwzxA/dRok4ejH+RwNGbuoD9fCjto=",
version = "v0.22.4",
)
go_repository(
name = "io_opencensus_go_contrib_exporter_aws",
build_file_proto_mode = "disable",
importpath = "contrib.go.opencensus.io/exporter/aws",
sum = "h1:YsbWYxDZkC7x2OxlsDEYvvEXZ3cBI3qBgUK5BqkZvRw=",
version = "v0.0.0-20181029163544-2befc13012d0",
)
go_repository(
name = "io_opencensus_go_contrib_exporter_ocagent",
build_file_proto_mode = "disable",
importpath = "contrib.go.opencensus.io/exporter/ocagent",
sum = "h1:Z1n6UAyr0QwM284yUuh5Zd8JlvxUGAhFZcgMJkMPrGM=",
version = "v0.6.0",
)
go_repository(
name = "io_opencensus_go_contrib_exporter_prometheus",
build_file_proto_mode = "disable",
importpath = "contrib.go.opencensus.io/exporter/prometheus",
sum = "h1:SByaIoWwNgMdPSgl5sMqM2KDE5H/ukPWBRo314xiDvg=",
version = "v0.1.0",
)
go_repository(
name = "io_opencensus_go_contrib_exporter_stackdriver",
build_file_proto_mode = "disable",
importpath = "contrib.go.opencensus.io/exporter/stackdriver",
sum = "h1:RX9W6FelAqTVnBi/bRXJLXr9n18v4QkQwZYIdnNS51I=",
version = "v0.13.1",
)
go_repository(
name = "io_opencensus_go_contrib_exporter_zipkin",
build_file_proto_mode = "disable",
importpath = "contrib.go.opencensus.io/exporter/zipkin",
sum = "h1:PR+1zWqY8ceXs1qDQQIlgXe+sdiwCf0n32bH4+Epk8g=",
version = "v0.1.1",
)
go_repository(
name = "io_opencensus_go_contrib_integrations_ocsql",
build_file_proto_mode = "disable",
importpath = "contrib.go.opencensus.io/integrations/ocsql",
sum = "h1:kfg5Yyy1nYUrqzyfW5XX+dzMASky8IJXhtHe0KTYNS4=",
version = "v0.1.4",
)
go_repository(
name = "io_opencensus_go_contrib_resource",
build_file_proto_mode = "disable",
importpath = "contrib.go.opencensus.io/resource",
sum = "h1:4r2CANuYhKGmYWP02+5E94rLRcS/YeD+KlxSrOsMxk0=",
version = "v0.1.1",
)
go_repository(
name = "io_rsc_binaryregexp",
build_file_proto_mode = "disable",
importpath = "rsc.io/binaryregexp",
sum = "h1:HfqmD5MEmC0zvwBuF187nq9mdnXjXsSivRiXN7SmRkE=",
version = "v0.2.0",
)
go_repository(
name = "io_rsc_letsencrypt",
build_file_proto_mode = "disable",
importpath = "rsc.io/letsencrypt",
sum = "h1:H7xDfhkaFFSYEJlKeq38RwX2jYcnTeHuDQyT+mMNMwM=",
version = "v0.0.3",
)
go_repository(
name = "io_rsc_quote_v3",
build_file_proto_mode = "disable",
importpath = "rsc.io/quote/v3",
sum = "h1:9JKUTTIUgS6kzR9mK1YuGKv6Nl+DijDNIc0ghT58FaY=",
version = "v3.1.0",
)
go_repository(
name = "io_rsc_sampler",
build_file_proto_mode = "disable",
importpath = "rsc.io/sampler",
sum = "h1:7uVkIFmeBqHfdjD+gZwtXXI+RODJ2Wc4O7MPEh/QiW4=",
version = "v1.3.0",
)
go_repository(
name = "ml_vbom_util",
build_file_proto_mode = "disable",
importpath = "vbom.ml/util",
sum = "h1:O69FD9pJA4WUZlEwYatBEEkRWKQ5cKodWpdKTrCS/iQ=",
version = "v0.0.0-20180919145318-efcd4e0f9787",
)
go_repository(
name = "org_apache_git_thrift_git",
build_file_proto_mode = "disable",
importpath = "git.apache.org/thrift.git",
sum = "h1:CMxsZlAmxKs+VAZMlDDL0wXciMblJcutQbEe3A9CYUM=",
version = "v0.12.0",
)
go_repository(
name = "org_bazil_fuse",
build_file_proto_mode = "disable",
importpath = "bazil.org/fuse",
sum = "h1:FNCRpXiquG1aoyqcIWVFmpTSKVcx2bQD38uZZeGtdlw=",
version = "v0.0.0-20180421153158-65cc252bf669",
)
go_repository(
name = "org_go4",
build_file_proto_mode = "disable",
importpath = "go4.org",
sum = "h1:iqAGo78tVOJXELHQFRjR6TMwItrvXH4hrGJ32I/NFF8=",
version = "v0.0.0-20201209231011-d4a079459e60",
)
go_repository(
name = "org_golang_google_api",
build_file_proto_mode = "disable",
importpath = "google.golang.org/api",
sum = "h1:Le77IccnTqEa8ryp9wIpX5W3zYm7Gf9LhOp9PHcwFts=",
version = "v0.32.0",
)
go_repository(
name = "org_golang_google_appengine",
build_file_proto_mode = "disable",
importpath = "google.golang.org/appengine",
sum = "h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c=",
version = "v1.6.7",
)
go_repository(
name = "org_golang_google_cloud",
build_file_proto_mode = "disable",
importpath = "google.golang.org/cloud",
sum = "h1:Cpp2P6TPjujNoC5M2KHY6g7wfyLYfIWRZaSdIKfDasA=",
version = "v0.0.0-20151119220103-975617b05ea8",
)
go_repository(
name = "org_golang_google_genproto",
build_file_proto_mode = "disable",
importpath = "google.golang.org/genproto",
sum = "h1:pOwg4OoaRYScjmR4LlLgdtnyoHYTSAVhhqe5uPdpII8=",
version = "v0.0.0-20201110150050-8816d57aaa9a",
)
go_repository(
name = "org_golang_google_grpc",
build_file_proto_mode = "disable",
importpath = "google.golang.org/grpc",
sum = "h1:zWTV+LMdc3kaiJMSTOFz2UgSBgx8RNQoTGiZu3fR9S0=",
version = "v1.32.0",
)
go_repository(
name = "org_golang_google_protobuf",
build_file_proto_mode = "disable",
importpath = "google.golang.org/protobuf",
sum = "h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk=",
version = "v1.26.0",
)
go_repository(
name = "org_golang_x_crypto",
build_file_proto_mode = "disable",
importpath = "golang.org/x/crypto",
sum = "h1:It14KIkyBFYkHkwZ7k45minvA9aorojkyjGk9KJ5B/w=",
version = "v0.0.0-20210322153248-0c34fe9e7dc2",
)
go_repository(
name = "org_golang_x_exp",
build_file_proto_mode = "disable",
importpath = "golang.org/x/exp",
sum = "h1:QE6XYQK6naiK1EPAe1g/ILLxN5RBoH5xkJk3CqlMI/Y=",
version = "v0.0.0-20200224162631-6cc2880d07d6",
)
go_repository(
name = "org_golang_x_image",
build_file_proto_mode = "disable",
importpath = "golang.org/x/image",
sum = "h1:+qEpEAPhDZ1o0x3tHzZTQDArnOixOzGD9HUJfcg0mb4=",
version = "v0.0.0-20190802002840-cff245a6509b",
)
go_repository(
name = "org_golang_x_lint",
build_file_proto_mode = "disable",
importpath = "golang.org/x/lint",
replace = "golang.org/x/lint",
sum = "h1:QzoH/1pFpZguR8NrRHLcO6jKqfv2zpuSqZLgdm7ZmjI=",
version = "v0.0.0-20190409202823-959b441ac422",
)
go_repository(
name = "org_golang_x_mobile",
build_file_proto_mode = "disable",
importpath = "golang.org/x/mobile",
sum = "h1:b373EGXtj0o+ssqkOkdVphTCZ/fVg2LwhctJn2QQbqA=",
version = "v0.0.0-20190806162312-597adff16ade",
)
go_repository(
name = "org_golang_x_mod",
build_file_proto_mode = "disable",
importpath = "golang.org/x/mod",
sum = "h1:8pl+sMODzuvGJkmj2W4kZihvVb5mKm8pB/X44PIQHv8=",
version = "v0.4.0",
)
go_repository(
name = "org_golang_x_net",
build_file_proto_mode = "disable",
importpath = "golang.org/x/net",
sum = "h1:DzZ89McO9/gWPsQXS/FVKAlG02ZjaQ6AlZRBimEYOd0=",
version = "v0.0.0-20210428140749-89ef3d95e781",
)
go_repository(
name = "org_golang_x_oauth2",
build_file_proto_mode = "disable",
importpath = "golang.org/x/oauth2",
sum = "h1:ld7aEMNHoBnnDAX15v1T6z31v8HwR2A9FYOuAhWqkwc=",
version = "v0.0.0-20200902213428-5d25da1a8d43",
)
go_repository(
name = "org_golang_x_sync",
build_file_proto_mode = "disable",
importpath = "golang.org/x/sync",
sum = "h1:DcqTD9SDLc+1P/r1EmRBwnVsrOwW+kk2vWf9n+1sGhs=",
version = "v0.0.0-20201207232520-09787c993a3a",
)
go_repository(
name = "org_golang_x_sys",
build_file_proto_mode = "disable",
importpath = "golang.org/x/sys",
sum = "h1:JWgyZ1qgdTaF3N3oxC+MdTV7qvEEgHo3otj+HB5CM7Q=",
version = "v0.0.0-20210603081109-ebe580a85c40",
)
go_repository(
name = "org_golang_x_term",
build_file_proto_mode = "disable",
importpath = "golang.org/x/term",
sum = "h1:SZxvLBoTP5yHO3Frd4z4vrF+DBX9vMVanchswa69toE=",
version = "v0.0.0-20210220032956-6a3ed077a48d",
)
go_repository(
name = "org_golang_x_text",
build_file_proto_mode = "disable",
importpath = "golang.org/x/text",
sum = "h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M=",
version = "v0.3.6",
)
go_repository(
name = "org_golang_x_time",
build_file_proto_mode = "disable",
importpath = "golang.org/x/time",
sum = "h1:O8mE0/t419eoIwhTFpKVkHiTs/Igowgfkj25AcZrtiE=",
version = "v0.0.0-20210220033141-f8bda1e9f3ba",
)
go_repository(
name = "org_golang_x_tools",
build_file_proto_mode = "disable",
importpath = "golang.org/x/tools",
sum = "h1:po9/4sTYwZU9lPhi1tOrb4hCv3qrhiQ77LZfGa2OjwY=",
version = "v0.1.0",
)
go_repository(
name = "org_golang_x_xerrors",
build_file_proto_mode = "disable",
importpath = "golang.org/x/xerrors",
sum = "h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=",
version = "v0.0.0-20200804184101-5ec99f83aff1",
)
go_repository(
name = "org_gonum_v1_gonum",
build_file_proto_mode = "disable",
importpath = "gonum.org/v1/gonum",
sum = "h1:OB/uP/Puiu5vS5QMRPrXCDWUPb+kt8f1KW8oQzFejQw=",
version = "v0.0.0-20190331200053-3d26580ed485",
)
go_repository(
name = "org_gonum_v1_netlib",
build_file_proto_mode = "disable",
importpath = "gonum.org/v1/netlib",
sum = "h1:jRyg0XfpwWlhEV8mDfdNGBeSJM2fuyh9Yjrnd8kF2Ts=",
version = "v0.0.0-20190331212654-76723241ea4e",
)
go_repository(
name = "org_modernc_cc",
build_file_proto_mode = "disable",
importpath = "modernc.org/cc",
sum = "h1:nPibNuDEx6tvYrUAtvDTTw98rx5juGsa5zuDnKwEEQQ=",
version = "v1.0.0",
)
go_repository(
name = "org_modernc_golex",
build_file_proto_mode = "disable",
importpath = "modernc.org/golex",
sum = "h1:wWpDlbK8ejRfSyi0frMyhilD3JBvtcx2AdGDnU+JtsE=",
version = "v1.0.0",
)
go_repository(
name = "org_modernc_mathutil",
build_file_proto_mode = "disable",
importpath = "modernc.org/mathutil",
sum = "h1:93vKjrJopTPrtTNpZ8XIovER7iCIH1QU7wNbOQXC60I=",
version = "v1.0.0",
)
go_repository(
name = "org_modernc_strutil",
build_file_proto_mode = "disable",
importpath = "modernc.org/strutil",
sum = "h1:XVFtQwFVwc02Wk+0L/Z/zDDXO81r5Lhe6iMKmGX3KhE=",
version = "v1.0.0",
)
go_repository(
name = "org_modernc_xc",
build_file_proto_mode = "disable",
importpath = "modernc.org/xc",
sum = "h1:7ccXrupWZIS3twbUGrtKmHS2DXY6xegFua+6O3xgAFU=",
version = "v1.0.0",
)
go_repository(
name = "org_mongodb_go_mongo_driver",
build_file_proto_mode = "disable",
importpath = "go.mongodb.org/mongo-driver",
sum = "h1:jxcFYjlkl8xaERsgLo+RNquI0epW6zuy/ZRQs6jnrFA=",
version = "v1.1.2",
)
go_repository(
name = "org_uber_go_atomic",
build_file_proto_mode = "disable",
importpath = "go.uber.org/atomic",
sum = "h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw=",
version = "v1.7.0",
)
go_repository(
name = "org_uber_go_goleak",
build_file_proto_mode = "disable",
importpath = "go.uber.org/goleak",
sum = "h1:z+mqJhf6ss6BSfSM671tgKyZBFPTTJM+HLxnhPC3wu0=",
version = "v1.1.10",
)
go_repository(
name = "org_uber_go_multierr",
build_file_proto_mode = "disable",
importpath = "go.uber.org/multierr",
sum = "h1:y6IPFStTAIT5Ytl7/XYmHvzXQ7S3g/IeZW9hyZ5thw4=",
version = "v1.6.0",
)
go_repository(
name = "org_uber_go_tools",
build_file_proto_mode = "disable",
importpath = "go.uber.org/tools",
sum = "h1:0mgffUl7nfd+FpvXMVz4IDEaUSmT1ysygQC7qYo7sG4=",
version = "v0.0.0-20190618225709-2cfd321de3ee",
)
go_repository(
name = "org_uber_go_zap",
build_file_proto_mode = "disable",
importpath = "go.uber.org/zap",
sum = "h1:MTjgFu6ZLKvY6Pvaqk97GlxNBuMpV4Hy/3P6tRGlI2U=",
version = "v1.17.0",
)
go_repository(
name = "sh_helm_helm_v3",
build_file_proto_mode = "disable",
importpath = "helm.sh/helm/v3",
sum = "h1:aykwPMVyQyncZ8iLNVMXgJ1l3c6W0+LSOPmqp8JdCjs=",
version = "v3.1.1",
)
go_repository(
name = "tools_gotest",
build_file_proto_mode = "disable",
importpath = "gotest.tools",
sum = "h1:VsBPFP1AI068pPrMxtb/S8Zkgf9xEmTLJjfM+P5UIEo=",
version = "v2.2.0+incompatible",
)
go_repository(
name = "tools_gotest_v3",
build_file_proto_mode = "disable",
importpath = "gotest.tools/v3",
sum = "h1:4AuOwCGf4lLR9u3YOe2awrHygurzhO/HeQ6laiA6Sx0=",
version = "v3.0.3",
)
go_repository(
name = "xyz_gomodules_jsonpatch_v2",
build_file_proto_mode = "disable",
importpath = "gomodules.xyz/jsonpatch/v2",
sum = "h1:4pT439QV83L+G9FkcCriY6EkpcK6r6bK+A5FBUMI7qY=",
version = "v2.2.0",
)
| true | true |
f731a62bb6c4d06bd43c8c0f54f539302a734f73 | 539 | py | Python | regexlib/python_re2_test_file/regexlib_7018.py | yetingli/ReDoS-Benchmarks | f5b5094d835649e957bf3fec6b8bd4f6efdb35fc | [
"MIT"
] | 1 | 2022-01-24T14:43:23.000Z | 2022-01-24T14:43:23.000Z | regexlib/python_re2_test_file/regexlib_7018.py | yetingli/ReDoS-Benchmarks | f5b5094d835649e957bf3fec6b8bd4f6efdb35fc | [
"MIT"
] | null | null | null | regexlib/python_re2_test_file/regexlib_7018.py | yetingli/ReDoS-Benchmarks | f5b5094d835649e957bf3fec6b8bd4f6efdb35fc | [
"MIT"
] | null | null | null | # 7018
# ^([a-zA-Z](?:(?:(?:\w[\.\_]?)*)\w)+)([a-zA-Z0-9])$
# EXPONENT
# nums:5
# EXPONENT AttackString:"a"+"_"*32+"!1 __EOA(iii)"
import re2 as re
from time import perf_counter
regex = """^([a-zA-Z](?:(?:(?:\w[\.\_]?)*)\w)+)([a-zA-Z0-9])$"""
REGEX = re.compile(regex)
for i in range(0, 150000):
ATTACK = "a" + "_" * i * 1 + "!1 __EOA(iii)"
LEN = len(ATTACK)
BEGIN = perf_counter()
m = REGEX.search(ATTACK)
# m = REGEX.match(ATTACK)
DURATION = perf_counter() - BEGIN
print(f"{i *1}: took {DURATION} seconds!") | 28.368421 | 64 | 0.541744 |
import re2 as re
from time import perf_counter
regex = """^([a-zA-Z](?:(?:(?:\w[\.\_]?)*)\w)+)([a-zA-Z0-9])$"""
REGEX = re.compile(regex)
for i in range(0, 150000):
ATTACK = "a" + "_" * i * 1 + "!1 __EOA(iii)"
LEN = len(ATTACK)
BEGIN = perf_counter()
m = REGEX.search(ATTACK)
DURATION = perf_counter() - BEGIN
print(f"{i *1}: took {DURATION} seconds!") | true | true |
f731a7aa0c3b474639722781f113b0d34999a1c2 | 1,011 | py | Python | nova/api/openstack/compute/legacy_v2/contrib/used_limits_for_admin.py | ebalduf/nova-backports | 6bf97ec73467de522d34ab7a17ca0e0874baa7f9 | [
"Apache-2.0"
] | 7 | 2015-09-22T11:27:16.000Z | 2015-11-02T12:33:46.000Z | nova/api/openstack/compute/legacy_v2/contrib/used_limits_for_admin.py | ebalduf/nova-backports | 6bf97ec73467de522d34ab7a17ca0e0874baa7f9 | [
"Apache-2.0"
] | 9 | 2015-05-20T11:20:17.000Z | 2017-07-27T08:21:33.000Z | nova/api/openstack/compute/legacy_v2/contrib/used_limits_for_admin.py | ebalduf/nova-backports | 6bf97ec73467de522d34ab7a17ca0e0874baa7f9 | [
"Apache-2.0"
] | 13 | 2015-05-05T09:34:04.000Z | 2017-11-08T02:03:46.000Z | # Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.openstack import extensions
class Used_limits_for_admin(extensions.ExtensionDescriptor):
"""Provide data to admin on limited resources used by other tenants."""
name = "UsedLimitsForAdmin"
alias = "os-used-limits-for-admin"
namespace = ("http://docs.openstack.org/compute/ext/used_limits_for_admin"
"/api/v1.1")
updated = "2013-05-02T00:00:00Z"
| 38.884615 | 78 | 0.721068 |
from nova.api.openstack import extensions
class Used_limits_for_admin(extensions.ExtensionDescriptor):
name = "UsedLimitsForAdmin"
alias = "os-used-limits-for-admin"
namespace = ("http://docs.openstack.org/compute/ext/used_limits_for_admin"
"/api/v1.1")
updated = "2013-05-02T00:00:00Z"
| true | true |
f731a834130281feea54b8acb4fe90404145a770 | 1,064 | py | Python | checkov/terraform/checks/resource/aws/PasswordPolicyExpiration.py | gustavotabares/checkov | 79250dba0fc5227ef3027b3c1300739f063b0d05 | [
"Apache-2.0"
] | null | null | null | checkov/terraform/checks/resource/aws/PasswordPolicyExpiration.py | gustavotabares/checkov | 79250dba0fc5227ef3027b3c1300739f063b0d05 | [
"Apache-2.0"
] | null | null | null | checkov/terraform/checks/resource/aws/PasswordPolicyExpiration.py | gustavotabares/checkov | 79250dba0fc5227ef3027b3c1300739f063b0d05 | [
"Apache-2.0"
] | null | null | null | from checkov.common.models.enums import CheckResult, CheckCategories
from checkov.terraform.checks.resource.base_resource_check import BaseResourceCheck
class PasswordPolicyExpiration(BaseResourceCheck):
def __init__(self):
name = "Ensure IAM password policy expires passwords within 90 days or less"
id = "CKV_AWS_9"
supported_resources = ['aws_iam_account_password_policy']
categories = [CheckCategories.IAM]
super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)
def scan_resource_conf(self, conf):
"""
validates iam password policy
https://www.terraform.io/docs/providers/aws/r/iam_account_password_policy.html
:param conf: aws_iam_account_password_policy configuration
:return: <CheckResult>
"""
key = 'max_password_age'
if key in conf.keys():
if conf[key][0] >= 90:
return CheckResult.PASSED
return CheckResult.FAILED
check = PasswordPolicyExpiration()
| 38 | 106 | 0.698308 | from checkov.common.models.enums import CheckResult, CheckCategories
from checkov.terraform.checks.resource.base_resource_check import BaseResourceCheck
class PasswordPolicyExpiration(BaseResourceCheck):
def __init__(self):
name = "Ensure IAM password policy expires passwords within 90 days or less"
id = "CKV_AWS_9"
supported_resources = ['aws_iam_account_password_policy']
categories = [CheckCategories.IAM]
super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)
def scan_resource_conf(self, conf):
key = 'max_password_age'
if key in conf.keys():
if conf[key][0] >= 90:
return CheckResult.PASSED
return CheckResult.FAILED
check = PasswordPolicyExpiration()
| true | true |
f731aa3598934a92decae817cbccea150452d087 | 1,898 | py | Python | python_modules/dagster/dagster_tests/api_tests/test_api_snapshot_execution_plan.py | JPeer264/dagster-fork | 32cc87a36134be7c442fa85d6867eb1d3301aea0 | [
"Apache-2.0"
] | 1 | 2020-09-19T16:35:59.000Z | 2020-09-19T16:35:59.000Z | python_modules/dagster/dagster_tests/api_tests/test_api_snapshot_execution_plan.py | JPeer264/dagster-fork | 32cc87a36134be7c442fa85d6867eb1d3301aea0 | [
"Apache-2.0"
] | null | null | null | python_modules/dagster/dagster_tests/api_tests/test_api_snapshot_execution_plan.py | JPeer264/dagster-fork | 32cc87a36134be7c442fa85d6867eb1d3301aea0 | [
"Apache-2.0"
] | null | null | null | from dagster.api.snapshot_execution_plan import sync_get_external_execution_plan
from dagster.core.snap.execution_plan_snapshot import ExecutionPlanSnapshot
from .utils import get_foo_pipeline_handle
def test_execution_plan_snapshot_api():
pipeline_handle = get_foo_pipeline_handle()
execution_plan_snapshot = sync_get_external_execution_plan(
pipeline_handle.get_origin(), environment_dict={}, mode="default", snapshot_id="12345",
)
assert isinstance(execution_plan_snapshot, ExecutionPlanSnapshot)
assert execution_plan_snapshot.step_keys_to_execute == [
'do_something.compute',
'do_input.compute',
]
assert len(execution_plan_snapshot.steps) == 2
def test_execution_plan_with_step_keys_to_execute_snapshot_api():
pipeline_handle = get_foo_pipeline_handle()
execution_plan_snapshot = sync_get_external_execution_plan(
pipeline_handle.get_origin(),
environment_dict={},
mode="default",
snapshot_id="12345",
step_keys_to_execute=['do_something.compute'],
)
assert isinstance(execution_plan_snapshot, ExecutionPlanSnapshot)
assert execution_plan_snapshot.step_keys_to_execute == [
'do_something.compute',
]
assert len(execution_plan_snapshot.steps) == 2
def test_execution_plan_with_subset_snapshot_api():
pipeline_handle = get_foo_pipeline_handle()
execution_plan_snapshot = sync_get_external_execution_plan(
pipeline_handle.get_origin(),
environment_dict={'solids': {'do_input': {'inputs': {'x': {'value': "test"}}}}},
mode="default",
snapshot_id="12345",
solid_selection=["do_input"],
)
assert isinstance(execution_plan_snapshot, ExecutionPlanSnapshot)
assert execution_plan_snapshot.step_keys_to_execute == [
'do_input.compute',
]
assert len(execution_plan_snapshot.steps) == 1
| 33.892857 | 95 | 0.74236 | from dagster.api.snapshot_execution_plan import sync_get_external_execution_plan
from dagster.core.snap.execution_plan_snapshot import ExecutionPlanSnapshot
from .utils import get_foo_pipeline_handle
def test_execution_plan_snapshot_api():
pipeline_handle = get_foo_pipeline_handle()
execution_plan_snapshot = sync_get_external_execution_plan(
pipeline_handle.get_origin(), environment_dict={}, mode="default", snapshot_id="12345",
)
assert isinstance(execution_plan_snapshot, ExecutionPlanSnapshot)
assert execution_plan_snapshot.step_keys_to_execute == [
'do_something.compute',
'do_input.compute',
]
assert len(execution_plan_snapshot.steps) == 2
def test_execution_plan_with_step_keys_to_execute_snapshot_api():
pipeline_handle = get_foo_pipeline_handle()
execution_plan_snapshot = sync_get_external_execution_plan(
pipeline_handle.get_origin(),
environment_dict={},
mode="default",
snapshot_id="12345",
step_keys_to_execute=['do_something.compute'],
)
assert isinstance(execution_plan_snapshot, ExecutionPlanSnapshot)
assert execution_plan_snapshot.step_keys_to_execute == [
'do_something.compute',
]
assert len(execution_plan_snapshot.steps) == 2
def test_execution_plan_with_subset_snapshot_api():
pipeline_handle = get_foo_pipeline_handle()
execution_plan_snapshot = sync_get_external_execution_plan(
pipeline_handle.get_origin(),
environment_dict={'solids': {'do_input': {'inputs': {'x': {'value': "test"}}}}},
mode="default",
snapshot_id="12345",
solid_selection=["do_input"],
)
assert isinstance(execution_plan_snapshot, ExecutionPlanSnapshot)
assert execution_plan_snapshot.step_keys_to_execute == [
'do_input.compute',
]
assert len(execution_plan_snapshot.steps) == 1
| true | true |
f731aa9055ffb5d097e1f239a5baee16564f7fec | 1,564 | py | Python | improved-ver1/6.Real_Estate_Own_Data_Prediction_improved1.py | ARAN1218/RealEstateRentPrediction_AI | da537f3204fa1bc80a499a03b2fd015926ccc755 | [
"MIT"
] | 1 | 2021-06-26T04:44:14.000Z | 2021-06-26T04:44:14.000Z | improved-ver1/6.Real_Estate_Own_Data_Prediction_improved1.py | ARAN1218/RealEstateRentPrediction_AI | da537f3204fa1bc80a499a03b2fd015926ccc755 | [
"MIT"
] | 1 | 2021-08-06T15:58:27.000Z | 2021-11-27T14:59:00.000Z | improved-ver1/6.Real_Estate_Own_Data_Prediction_improved1.py | ARAN1218/RealEstateRentPrediction_AI | da537f3204fa1bc80a499a03b2fd015926ccc755 | [
"MIT"
] | null | null | null | #必要なライブラリをインポート
import numpy as np
import pandas as pd
import xgboost as xgb
from sklearn.preprocessing import LabelEncoder
#numpyのリスト表示制限を解除しておく
np.set_printoptions(threshold=np.inf)
#既に学習データとREDP関数を用いてAIが作成されているものとする
#model = RERL(df_l)
#住所と路線と間取りはラベルエンコーディングの都合により学習データ/テストデータにあったものしか使えない為、予め確保しておいた使える要素を表示させる
#上記の3つの要素はこの中から選んでもらう
#このプログラムは別のセルで起動させると見やすい
print(adressC,'\n','\n',stationC,'\n','\n',layoutC)
#(学習した範囲内の)任意のデータを入力して賃料を予測できる関数
#Real_Estate_Own_Data_Prediction
def REODP(address,station,access,mc_fees,k_fees,s_fees,area,layout,age):
#入力したデータを辞書d_tryに格納する
d_try = {
'住所':address,
'路線':station,
'交通':access,
'管理共益費':mc_fees,
'礼金':k_fees,
'敷金':s_fees,
'専有面積':area,
'間取り':layout,
'築年数':age
}
#辞書d_tryをデータフレームdf_tryに変換する
df_try = pd.DataFrame(d_try,index=['own'])
#入力情報の確認用
display(df_try)
#ラベルエンコーディングを行い、文字列を数値化する
df_try.住所 = LE1.transform(df_try.住所)
df_try.路線 = LE2.transform(df_try.路線)
df_try.間取り = LE3.transform(df_try.間取り)
#データ型をfloat64で統一する
df_try = df_try.astype('float64')
#予測結果(少数第二位まで)を表示する
df_try = xgb.DMatrix(df_try)
return print('予想賃料:',round(float(model.predict(df_try)),2),'万円')
#REODP(住所, 路線, 交通, 管理共益費, 礼金, 敷金, 専有面積, 間取り, 築年数)
#データ型に気をつける
#住所と路線と間取りはラベルエンコーディングの都合により学習データ/テストデータにあったものしか使えない為、上で表示させた要素から選ぶこと
REODP(address=''
,station=''
,access=0
,mc_fees=0
,k_fees=0
,s_fees=0
,area=0
,layout=''
,age=0
)
| 22.342857 | 74 | 0.671995 |
import numpy as np
import pandas as pd
import xgboost as xgb
from sklearn.preprocessing import LabelEncoder
np.set_printoptions(threshold=np.inf)
print(adressC,'\n','\n',stationC,'\n','\n',layoutC)
def REODP(address,station,access,mc_fees,k_fees,s_fees,area,layout,age):
d_try = {
'住所':address,
'路線':station,
'交通':access,
'管理共益費':mc_fees,
'礼金':k_fees,
'敷金':s_fees,
'専有面積':area,
'間取り':layout,
'築年数':age
}
df_try = pd.DataFrame(d_try,index=['own'])
display(df_try)
df_try.住所 = LE1.transform(df_try.住所)
df_try.路線 = LE2.transform(df_try.路線)
df_try.間取り = LE3.transform(df_try.間取り)
df_try = df_try.astype('float64')
df_try = xgb.DMatrix(df_try)
return print('予想賃料:',round(float(model.predict(df_try)),2),'万円')
REODP(address=''
,station=''
,access=0
,mc_fees=0
,k_fees=0
,s_fees=0
,area=0
,layout=''
,age=0
)
| true | true |
f731aaf8a13f76df61485cd65cdaf166bf910a5e | 7,617 | py | Python | modules/filter_bed.py | hillerlab/TOGA | c4bce48c760d51739a8a63132316f49de77e0c71 | [
"MIT"
] | 32 | 2020-07-20T03:18:28.000Z | 2022-03-28T09:04:48.000Z | modules/filter_bed.py | hillerlab/TOGA | c4bce48c760d51739a8a63132316f49de77e0c71 | [
"MIT"
] | 28 | 2020-07-20T14:03:55.000Z | 2022-02-28T08:08:39.000Z | modules/filter_bed.py | hillerlab/TOGA | c4bce48c760d51739a8a63132316f49de77e0c71 | [
"MIT"
] | 5 | 2020-07-23T13:03:32.000Z | 2021-09-16T15:39:23.000Z | #!/usr/bin/env python3
"""Filter bed-12 file.
Remove:
- incomplete annotations
- genes without CDS
"""
import argparse
import sys
import re
from collections import Counter
try:
from modules.common import die
from modules.common import eprint
except ImportError:
from common import die
from commom import eprint
__author__ = "Bogdan Kirilenko, 2020."
__version__ = "1.0"
__email__ = "bogdan.kirilenko@senckenberg.de"
__credits__ = ["Michael Hiller", "Virag Sharma", "David Jebb"]
ALLOWED_CHARSET = "a-zA-Z0-9._-"
ALLOWED_CHARSET_RE = rf"[^{ALLOWED_CHARSET}]"
def parse_args():
"""Read args, check."""
app = argparse.ArgumentParser()
app.add_argument("input", help="Bed-12 formatted annotation track.")
app.add_argument(
"output", default="stdout", help="Output destination, stdout as default"
)
app.add_argument(
"--out_of_frame",
action="store_true",
dest="out_of_frame",
help="Do not skip out-of-frame genes.",
)
# print help if there are no args
if len(sys.argv) < 2:
app.print_help()
sys.exit(0)
args = app.parse_args()
return args
def prepare_bed_file(bed_file, output, ouf=False, save_rejected=None, only_chrom=None):
"""Filter the bed file given and save the updated version."""
new_lines = [] # keep updated lines
rejected = [] # keep IDs of skipped transcripts + the reason why
names = Counter() # we need to make sure that all names are unique
allowed_re = re.compile(ALLOWED_CHARSET_RE).search
broken_names = []
f = open(bed_file, "r")
for num, line in enumerate(f, 1):
# parse bed file according to specification
line_data = line.rstrip().split("\t")
if len(line_data) != 12:
f.close() # this is for sure an error
# it is possible only if something except a bed12 was provided
die(
"Error! Bed 12 file is required! Got a file with {len(line_data)} fields instead"
)
chrom = line_data[0]
if only_chrom and chrom != only_chrom:
# TOGA allows to perform the analysis on a specific chromosome only
# is so, we can skip all transcripts that located on other chromosomes
continue
chromStart = int(line_data[1])
chromEnd = int(line_data[2])
name = line_data[3] # gene_name usually
corr_name = not bool(allowed_re(name))
if corr_name is False:
broken_names.append(name)
# TODO: check weird characters in the transcript name
# bed_score = int(line_data[4]) # never used
# strand = line_data[5] # otherwise:
# strand = True if line_data[5] == '+' else False
thickStart = int(line_data[6])
thickEnd = int(line_data[7])
# itemRgb = line_data[8] # never used
blockCount = int(line_data[9])
blockSizes = [int(x) for x in line_data[10].split(",") if x != ""]
blockStarts = [int(x) for x in line_data[11].split(",") if x != ""]
blockEnds = [blockStarts[i] + blockSizes[i] for i in range(blockCount)]
blockAbsStarts = [blockStarts[i] + chromStart for i in range(blockCount)]
blockAbsEnds = [blockEnds[i] + chromStart for i in range(blockCount)]
blockNewStarts, blockNewEnds = [], []
names[name] += 1
if thickStart > thickEnd:
f.close() # according to bed12 specification this should never happen
sys.stderr.write(f"Problem occurred at line {num}, gene {name}\n")
die("Error! Bed file is corrupted, thickEnd MUST be >= thickStart")
elif thickStart == thickEnd:
# this means that this is a non-coding transcript
# TOGA cannot process them: we can skip it
rejected.append((name, "No CDS"))
continue
if thickStart < chromStart or thickEnd > chromEnd:
# a very strange (but still possible) case
f.close() # for sure an error with input data
sys.stderr.write(f"Problem occurred at line {num}, gene {name}\n")
die("Error! Bed file is corrupted, thickRange is outside chromRange!")
# now select CDS only
# we keep UTRs in the filtered file
# however, we need CDS to check whether it's correct (% 3 == 0)
for block_num in range(blockCount):
blockStart = blockAbsStarts[block_num]
blockEnd = blockAbsEnds[block_num]
# skip the block if it is entirely UTR
if blockEnd <= thickStart:
continue
elif blockStart >= thickEnd:
continue
# if we are here: this is not an entirely UTR exon
# it might intersect the CDS border or to be in the CDS entirely
# remove UTRs: block start must be >= CDS_start (thickStart)
# block end must be <= CDS_end (thickEnd)
blockNewStart = blockStart if blockStart >= thickStart else thickStart
blockNewEnd = blockEnd if blockEnd <= thickEnd else thickEnd
blockNewStarts.append(blockNewStart - thickStart)
blockNewEnds.append(blockNewEnd - thickStart)
if len(blockNewStarts) == 0:
# even it thickStart != thickEnd this transcript still can be non-coding
# but if there are no blocks in the CDS -> we can catch this
rejected.append((name, "No CDS"))
continue
block_new_count = len(blockNewStarts)
blockNewSizes = [
blockNewEnds[i] - blockNewStarts[i] for i in range(block_new_count)
]
if sum(blockNewSizes) % 3 != 0 and not ouf:
# this is an out-of-frame (or incomplete transcript)
# ideally CDS length should be divisible by 3
# not ouf means that we like to keep such transcripts for some reason
rejected.append((name, "Out-of-frame gene"))
continue
# we keep this transcript: add in to the list
new_line = "\t".join([str(x) for x in line_data])
new_lines.append(new_line)
f.close()
# if not allowed characters in transcript names: list them
if len(broken_names) > 0:
eprint("Error! Some transcript names contain not allowed characters")
for t in broken_names:
eprint(t)
die(f"Allowed characters are: {ALLOWED_CHARSET}")
# if there are non-unique transcript IDs: die
# I kill it there, not earlier to show them altogether
if any(v > 1 for v in names.values()):
eprint("Error! There are non-uniq transcript IDs:")
duplicates = [k for k, v in names.items() if v > 1]
for d in duplicates:
eprint(d)
die("Abort")
if len(new_lines) == 0:
# no transcripts pass the filter: probably an input data mistake
sys.exit(
f"Error! No reference annotation tracks left after filtering procedure! Abort"
)
# write transcripts that passed the filter to the output file
f = open(output, "w") if output != "stdout" else sys.stdout
f.write("\n".join(new_lines) + "\n")
f.close() if output != "stdout" else None
if save_rejected:
# save transcripts that didn't pass the filter + reason why
f = open(save_rejected, "w")
for elem in rejected:
f.write(f"{elem[0]}\t{elem[1]}\n")
f.close()
def main():
"""Entry point."""
args = parse_args()
prepare_bed_file(args.input, args.output, args.out_of_frame)
sys.exit(0)
if __name__ == "__main__":
main()
| 37.895522 | 97 | 0.616516 |
import argparse
import sys
import re
from collections import Counter
try:
from modules.common import die
from modules.common import eprint
except ImportError:
from common import die
from commom import eprint
__author__ = "Bogdan Kirilenko, 2020."
__version__ = "1.0"
__email__ = "bogdan.kirilenko@senckenberg.de"
__credits__ = ["Michael Hiller", "Virag Sharma", "David Jebb"]
ALLOWED_CHARSET = "a-zA-Z0-9._-"
ALLOWED_CHARSET_RE = rf"[^{ALLOWED_CHARSET}]"
def parse_args():
app = argparse.ArgumentParser()
app.add_argument("input", help="Bed-12 formatted annotation track.")
app.add_argument(
"output", default="stdout", help="Output destination, stdout as default"
)
app.add_argument(
"--out_of_frame",
action="store_true",
dest="out_of_frame",
help="Do not skip out-of-frame genes.",
)
if len(sys.argv) < 2:
app.print_help()
sys.exit(0)
args = app.parse_args()
return args
def prepare_bed_file(bed_file, output, ouf=False, save_rejected=None, only_chrom=None):
new_lines = []
rejected = []
names = Counter()
allowed_re = re.compile(ALLOWED_CHARSET_RE).search
broken_names = []
f = open(bed_file, "r")
for num, line in enumerate(f, 1):
line_data = line.rstrip().split("\t")
if len(line_data) != 12:
f.close()
die(
"Error! Bed 12 file is required! Got a file with {len(line_data)} fields instead"
)
chrom = line_data[0]
if only_chrom and chrom != only_chrom:
continue
chromStart = int(line_data[1])
chromEnd = int(line_data[2])
name = line_data[3]
corr_name = not bool(allowed_re(name))
if corr_name is False:
broken_names.append(name)
thickStart = int(line_data[6])
thickEnd = int(line_data[7])
ckCount = int(line_data[9])
blockSizes = [int(x) for x in line_data[10].split(",") if x != ""]
blockStarts = [int(x) for x in line_data[11].split(",") if x != ""]
blockEnds = [blockStarts[i] + blockSizes[i] for i in range(blockCount)]
blockAbsStarts = [blockStarts[i] + chromStart for i in range(blockCount)]
blockAbsEnds = [blockEnds[i] + chromStart for i in range(blockCount)]
blockNewStarts, blockNewEnds = [], []
names[name] += 1
if thickStart > thickEnd:
f.close()
sys.stderr.write(f"Problem occurred at line {num}, gene {name}\n")
die("Error! Bed file is corrupted, thickEnd MUST be >= thickStart")
elif thickStart == thickEnd:
rejected.append((name, "No CDS"))
continue
if thickStart < chromStart or thickEnd > chromEnd:
f.close()
sys.stderr.write(f"Problem occurred at line {num}, gene {name}\n")
die("Error! Bed file is corrupted, thickRange is outside chromRange!")
for block_num in range(blockCount):
blockStart = blockAbsStarts[block_num]
blockEnd = blockAbsEnds[block_num]
# skip the block if it is entirely UTR
if blockEnd <= thickStart:
continue
elif blockStart >= thickEnd:
continue
# if we are here: this is not an entirely UTR exon
# it might intersect the CDS border or to be in the CDS entirely
# remove UTRs: block start must be >= CDS_start (thickStart)
# block end must be <= CDS_end (thickEnd)
blockNewStart = blockStart if blockStart >= thickStart else thickStart
blockNewEnd = blockEnd if blockEnd <= thickEnd else thickEnd
blockNewStarts.append(blockNewStart - thickStart)
blockNewEnds.append(blockNewEnd - thickStart)
if len(blockNewStarts) == 0:
# even it thickStart != thickEnd this transcript still can be non-coding
# but if there are no blocks in the CDS -> we can catch this
rejected.append((name, "No CDS"))
continue
block_new_count = len(blockNewStarts)
blockNewSizes = [
blockNewEnds[i] - blockNewStarts[i] for i in range(block_new_count)
]
if sum(blockNewSizes) % 3 != 0 and not ouf:
# this is an out-of-frame (or incomplete transcript)
# ideally CDS length should be divisible by 3
# not ouf means that we like to keep such transcripts for some reason
rejected.append((name, "Out-of-frame gene"))
continue
# we keep this transcript: add in to the list
new_line = "\t".join([str(x) for x in line_data])
new_lines.append(new_line)
f.close()
# if not allowed characters in transcript names: list them
if len(broken_names) > 0:
eprint("Error! Some transcript names contain not allowed characters")
for t in broken_names:
eprint(t)
die(f"Allowed characters are: {ALLOWED_CHARSET}")
# if there are non-unique transcript IDs: die
# I kill it there, not earlier to show them altogether
if any(v > 1 for v in names.values()):
eprint("Error! There are non-uniq transcript IDs:")
duplicates = [k for k, v in names.items() if v > 1]
for d in duplicates:
eprint(d)
die("Abort")
if len(new_lines) == 0:
# no transcripts pass the filter: probably an input data mistake
sys.exit(
f"Error! No reference annotation tracks left after filtering procedure! Abort"
)
# write transcripts that passed the filter to the output file
f = open(output, "w") if output != "stdout" else sys.stdout
f.write("\n".join(new_lines) + "\n")
f.close() if output != "stdout" else None
if save_rejected:
# save transcripts that didn't pass the filter + reason why
f = open(save_rejected, "w")
for elem in rejected:
f.write(f"{elem[0]}\t{elem[1]}\n")
f.close()
def main():
args = parse_args()
prepare_bed_file(args.input, args.output, args.out_of_frame)
sys.exit(0)
if __name__ == "__main__":
main()
| true | true |
f731abd403e3a3f072d4bde30a655735bb7f1420 | 8,680 | py | Python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2017_09_01/operations/_default_security_rules_operations.py | LianwMS/azure-sdk-for-python | 612d7bca9de86ee1bd1fa59291d7bf897ba9213f | [
"MIT"
] | 2 | 2019-05-17T21:24:53.000Z | 2020-02-12T11:13:42.000Z | sdk/network/azure-mgmt-network/azure/mgmt/network/v2017_09_01/operations/_default_security_rules_operations.py | LianwMS/azure-sdk-for-python | 612d7bca9de86ee1bd1fa59291d7bf897ba9213f | [
"MIT"
] | 15 | 2019-07-12T18:18:04.000Z | 2019-07-25T20:55:51.000Z | sdk/network/azure-mgmt-network/azure/mgmt/network/v2017_09_01/operations/_default_security_rules_operations.py | LianwMS/azure-sdk-for-python | 612d7bca9de86ee1bd1fa59291d7bf897ba9213f | [
"MIT"
] | 2 | 2020-05-21T22:51:22.000Z | 2020-05-26T20:53:01.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class DefaultSecurityRulesOperations(object):
"""DefaultSecurityRulesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2017_09_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name, # type: str
network_security_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["models.SecurityRuleListResult"]
"""Gets all default security rules in a network security group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security group.
:type network_security_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SecurityRuleListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2017_09_01.models.SecurityRuleListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.SecurityRuleListResult"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-09-01"
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('SecurityRuleListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}/defaultSecurityRules'} # type: ignore
def get(
self,
resource_group_name, # type: str
network_security_group_name, # type: str
default_security_rule_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.SecurityRule"
"""Get the specified default network security rule.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security group.
:type network_security_group_name: str
:param default_security_rule_name: The name of the default security rule.
:type default_security_rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SecurityRule, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2017_09_01.models.SecurityRule
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.SecurityRule"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-09-01"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'defaultSecurityRuleName': self._serialize.url("default_security_rule_name", default_security_rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('SecurityRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}/defaultSecurityRules/{defaultSecurityRuleName}'} # type: ignore
| 48.222222 | 236 | 0.673272 |
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models
if TYPE_CHECKING:
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class DefaultSecurityRulesOperations(object):
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name,
network_security_group_name,
**kwargs
):
cls = kwargs.pop('cls', None)
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-09-01"
def prepare_request(next_link=None):
if not next_link:
url = self.list.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {}
header_parameters = {}
header_parameters['Accept'] = 'application/json'
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('SecurityRuleListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}/defaultSecurityRules'}
def get(
self,
resource_group_name,
network_security_group_name,
default_security_rule_name,
**kwargs
):
cls = kwargs.pop('cls', None)
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-09-01"
url = self.get.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'defaultSecurityRuleName': self._serialize.url("default_security_rule_name", default_security_rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Accept'] = 'application/json'
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('SecurityRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}/defaultSecurityRules/{defaultSecurityRuleName}'}
| true | true |
f731ac74f48315280995265c0525ff9726f39e0d | 1,156 | py | Python | 02_ukb/src/01_data_preparation/02_qdec_table/01_creat_qdec_table.py | Lifebrain/p025_education_brain | 507cca3514b8ddbf65df7a047dba5bae1295badf | [
"MIT"
] | null | null | null | 02_ukb/src/01_data_preparation/02_qdec_table/01_creat_qdec_table.py | Lifebrain/p025_education_brain | 507cca3514b8ddbf65df7a047dba5bae1295badf | [
"MIT"
] | null | null | null | 02_ukb/src/01_data_preparation/02_qdec_table/01_creat_qdec_table.py | Lifebrain/p025_education_brain | 507cca3514b8ddbf65df7a047dba5bae1295badf | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Purpose: Create qdec table
import pandas as pd
import numpy as np
import glob
import os.path as op
import os
data_csv = op.join(os.environ['TABULAR_DATA_DIR'],'data.csv')
output_file = op.join(os.environ['QDEC_DATA_DIR'],'qdec.table.dat')
fs_dir = "/cluster/projects/p23/data/open_datasets/ukb_long/bids/derivatives/freesurfer.7.1.0/recon"
df = pd.read_csv(data_csv)
def extract_fs_long_dir(id,timepoint):
""" Extract fs long dir based on id and timepoint """
search = glob.glob(op.join(fs_dir,"*"+str(id)+"*"+str(timepoint+1)+"*"+".long.*"))
try:
return op.basename(search[0])
except:
return np.nan
df['fs_long_dir'] = df[['eid','mr_timepoint']].apply(lambda x: extract_fs_long_dir(x.eid,x.mr_timepoint), axis=1)
df = df.dropna()
df['fsid'] = df['fs_long_dir'].apply(lambda x: x.split(".long.")[0])
df['fsid_base'] = df['fs_long_dir'].apply(lambda x: x.split(".long.")[1])
df['edu_coded'] = df['education'].apply(lambda x: 1 if x==1 else 0)
df['sex'] = df['sex'].apply(lambda x: int(x))
df[['fsid','fsid_base','int','bl_age','sex','edu_coded']].to_csv(output_file, sep=" ", index=False) | 31.243243 | 113 | 0.675606 |
import pandas as pd
import numpy as np
import glob
import os.path as op
import os
data_csv = op.join(os.environ['TABULAR_DATA_DIR'],'data.csv')
output_file = op.join(os.environ['QDEC_DATA_DIR'],'qdec.table.dat')
fs_dir = "/cluster/projects/p23/data/open_datasets/ukb_long/bids/derivatives/freesurfer.7.1.0/recon"
df = pd.read_csv(data_csv)
def extract_fs_long_dir(id,timepoint):
search = glob.glob(op.join(fs_dir,"*"+str(id)+"*"+str(timepoint+1)+"*"+".long.*"))
try:
return op.basename(search[0])
except:
return np.nan
df['fs_long_dir'] = df[['eid','mr_timepoint']].apply(lambda x: extract_fs_long_dir(x.eid,x.mr_timepoint), axis=1)
df = df.dropna()
df['fsid'] = df['fs_long_dir'].apply(lambda x: x.split(".long.")[0])
df['fsid_base'] = df['fs_long_dir'].apply(lambda x: x.split(".long.")[1])
df['edu_coded'] = df['education'].apply(lambda x: 1 if x==1 else 0)
df['sex'] = df['sex'].apply(lambda x: int(x))
df[['fsid','fsid_base','int','bl_age','sex','edu_coded']].to_csv(output_file, sep=" ", index=False) | true | true |
f731ad39fbfd6798ad047b1f2307e945dfddf5eb | 14,114 | py | Python | utime/callbacks/callbacks.py | amiyapatanaik/U-Time | a9ed4892da77d165a71dbfef1d069d782c909757 | [
"MIT"
] | 1 | 2022-03-15T12:31:30.000Z | 2022-03-15T12:31:30.000Z | utime/callbacks/callbacks.py | amiyapatanaik/U-Time | a9ed4892da77d165a71dbfef1d069d782c909757 | [
"MIT"
] | null | null | null | utime/callbacks/callbacks.py | amiyapatanaik/U-Time | a9ed4892da77d165a71dbfef1d069d782c909757 | [
"MIT"
] | null | null | null | import numpy as np
import pandas as pd
from carbontracker.tracker import CarbonTracker
from tensorflow.keras.callbacks import Callback
from utime.utils import get_memory_usage
from mpunet.utils import highlighted
from mpunet.logging import ScreenLogger
from collections import defaultdict
from datetime import timedelta
class Validation(Callback):
"""
Validation computation callback.
Samples a number of validation batches from a deepsleep
ValidationMultiSequence object
and computes for all tasks:
- Batch-wise validation loss
- Batch-wise metrics as specified in model.metrics_tensors
- Epoch-wise pr-class and average precision
- Epoch-wise pr-class and average recall
- Epoch-wise pr-class and average dice coefficients
... and adds all results to the log dict
Note: The purpose of this callback over the default tf.keras evaluation
mechanism is to calculate certain metrics over the entire epoch of data as
opposed to averaged batch-wise computations.
"""
def __init__(self,
val_sequence,
max_val_studies_per_dataset=20,
logger=None, verbose=True):
"""
Args:
val_sequence: A deepsleep ValidationMultiSequence object
logger: An instance of a MultiPlanar Logger that prints to
screen and/or file
verbose: Print progress to screen - OBS does not use Logger
"""
super().__init__()
self.logger = logger or ScreenLogger()
self.sequences = val_sequence.sequences
self.verbose = verbose
self.max_studies = max_val_studies_per_dataset
self.n_classes = val_sequence.n_classes
self.IDs = val_sequence.IDs
self.print_round = 3
self.log_round = 4
def _compute_counts(self, pred, true, ignore_class=None):
# Argmax and CM elements
pred = pred.argmax(-1).ravel()
true = true.ravel()
if ignore_class:
mask = np.where(true != ignore_class)
true = true[mask]
pred = pred[mask]
# Compute relevant CM elements
# We select the number following the largest class integer when
# y != pred, then bincount and remove the added dummy class
tps = np.bincount(np.where(true == pred, true, self.n_classes),
minlength=self.n_classes+1)[:-1].astype(np.uint64)
rel = np.bincount(true, minlength=self.n_classes).astype(np.uint64)
sel = np.bincount(pred, minlength=self.n_classes).astype(np.uint64)
return tps, rel, sel
def predict(self):
# Get tensors to run and their names
metrics = self.model.loss_functions + self.model.metrics
metrics_names = self.model.metrics_names
self.model.reset_metrics()
assert len(metrics_names) == len(metrics)
# Prepare arrays for CM summary stats
true_pos, relevant, selected, metrics_results = {}, {}, {}, {}
for id_, sequence in zip(self.IDs, self.sequences):
# Add count arrays to the result dictionaries
true_pos[id_] = np.zeros(shape=(self.n_classes,), dtype=np.uint64)
relevant[id_] = np.zeros(shape=(self.n_classes,), dtype=np.uint64)
selected[id_] = np.zeros(shape=(self.n_classes,), dtype=np.uint64)
# Get validation sleep study loader
n_val = min(len(sequence.dataset_queue), self.max_studies)
study_iterator = sequence.dataset_queue.get_study_iterator(n_val)
# Predict and evaluate on all studies
per_study_metrics = defaultdict(list)
for i, sleep_study_context in enumerate(study_iterator):
if self.verbose:
s = " {}Validation subject: {}/{}".format(f"[{id_}] "
if id_ else "",
i+1,
n_val)
print(s, end="\r", flush=True)
with sleep_study_context as ss:
x, y = sequence.get_single_study_full_seq(ss.identifier,
reshape=True)
pred = self.model.predict_on_batch(x)
# Compute counts
tps, rel, sel = self._compute_counts(pred=pred.numpy(),
true=y,
ignore_class=5)
true_pos[id_] += tps
relevant[id_] += rel
selected[id_] += sel
# Run all metrics
for metric, name in zip(metrics, metrics_names):
per_study_metrics[name].append(metric(y, pred).numpy())
# Compute mean metrics for the dataset
metrics_results[id_] = {}
for metric, name in zip(metrics, metrics_names):
metrics_results[id_][name] = np.mean(per_study_metrics[name])
self.model.reset_metrics()
self.logger("")
self.logger("")
return true_pos, relevant, selected, metrics_results
@staticmethod
def _compute_dice(tp, rel, sel):
# Get data masks (to avoid div. by zero warnings)
# We set precision, recall, dice to 0 in for those particular cls.
sel_mask = sel > 0
rel_mask = rel > 0
# prepare arrays
precisions = np.zeros(shape=tp.shape, dtype=np.float32)
recalls = np.zeros_like(precisions)
dices = np.zeros_like(precisions)
# Compute precisions, recalls
precisions[sel_mask] = tp[sel_mask] / sel[sel_mask]
recalls[rel_mask] = tp[rel_mask] / rel[rel_mask]
# Compute dice
intrs = (2 * precisions * recalls)
union = (precisions + recalls)
dice_mask = union > 0
dices[dice_mask] = intrs[dice_mask] / union[dice_mask]
return precisions, recalls, dices
def _print_val_results(self, precisions, recalls, dices, metrics, epoch,
name, classes):
# Log the results
# We add them to a pd dataframe just for the pretty print output
index = ["cls %i" % i for i in classes]
metric_keys, metric_vals = map(list, list(zip(*metrics.items())))
col_order = metric_keys + ["precision", "recall", "dice"]
nan_arr = np.empty(shape=len(precisions))
nan_arr[:] = np.nan
value_dict = {"precision": precisions,
"recall": recalls,
"dice": dices}
value_dict.update({key: nan_arr for key in metrics})
val_results = pd.DataFrame(value_dict,
index=index).loc[:, col_order] # ensure order
# Transpose the results to have metrics in rows
val_results = val_results.T
# Add mean and set in first row
means = metric_vals + [precisions.mean(), recalls.mean(), dices.mean()]
val_results["mean"] = means
cols = list(val_results.columns)
cols.insert(0, cols.pop(cols.index('mean')))
val_results = val_results.loc[:, cols]
# Print the df to screen
self.logger(highlighted(("[%s] Validation Results for "
"Epoch %i" % (name, epoch)).lstrip(" ")))
print_string = val_results.round(self.print_round).to_string()
self.logger(print_string.replace("NaN", "---") + "\n")
def on_epoch_end(self, epoch, logs={}):
self.logger("\n")
# Predict and get CM
TPs, relevant, selected, metrics = self.predict()
for id_ in self.IDs:
tp, rel, sel = TPs[id_], relevant[id_], selected[id_]
precisions, recalls, dices = self._compute_dice(tp=tp, sel=sel, rel=rel)
classes = np.arange(len(dices))
# Add to log
n = (id_ + "_") if len(self.IDs) > 1 else ""
logs[f"{n}val_dice"] = dices.mean().round(self.log_round)
logs[f"{n}val_precision"] = precisions.mean().round(self.log_round)
logs[f"{n}val_recall"] = recalls.mean().round(self.log_round)
for m_name, value in metrics[id_].items():
logs[f"{n}val_{m_name}"] = value.round(self.log_round)
if self.verbose:
self._print_val_results(precisions=precisions,
recalls=recalls,
dices=dices,
metrics=metrics[id_],
epoch=epoch,
name=id_,
classes=classes)
if len(self.IDs) > 1:
# Print cross-dataset mean values
if self.verbose:
self.logger(highlighted(f"[ALL DATASETS] Means Across Classes"
f" for Epoch {epoch}"))
fetch = ("val_dice", "val_precision", "val_recall")
m_fetch = tuple(["val_" + s for s in self.model.metrics_names])
to_print = {}
for f in fetch + m_fetch:
scores = [logs["%s_%s" % (name, f)] for name in self.IDs]
res = np.mean(scores)
logs[f] = res.round(self.log_round) # Add to log file
to_print[f.split("_")[-1]] = list(scores) + [res]
if self.verbose:
df = pd.DataFrame(to_print)
df.index = self.IDs + ["mean"]
self.logger(df.round(self.print_round))
self.logger("")
class MemoryConsumption(Callback):
def __init__(self, max_gib=None, round_=2, logger=None, set_limit=False):
self.max_gib = max_gib
self.logger = logger
self.round_ = round_
if set_limit:
import resource
_, hard = resource.getrlimit(resource.RLIMIT_AS)
resource.setrlimit(resource.RLIMIT_AS,
(self._gib_to_bytes(max_gib), hard))
self.logger("Setting memory limit to {} GiB".format(max_gib))
@staticmethod
def _gib_to_bytes(gib):
return gib * (1024 ** 3)
@staticmethod
def _bytes_to_gib(bytes):
return bytes / (1024 ** 3)
def on_epoch_end(self, epoch, logs={}):
mem_bytes = get_memory_usage()
mem_gib = round(self._bytes_to_gib(mem_bytes), self.round_)
logs['memory_usage_gib'] = mem_gib
if self.max_gib and mem_gib >= self.max_gib:
self.logger.warn("Stopping training from callback 'MemoryConsumption'! "
"Total memory consumption of {} GiB exceeds limitation"
" (self.max_gib = {}) ".format(mem_gib, self.max_gib))
self.model.stop_training = True
class MaxTrainingTime(Callback):
def __init__(self, max_minutes, log_name='train_time_total', logger=None):
"""
TODO
Args:
"""
super().__init__()
self.max_minutes = int(max_minutes)
self.log_name = log_name
self.logger = logger or ScreenLogger()
def on_epoch_end(self, epochs, logs={}):
"""
TODO
Args:
epochs:
logs:
Returns:
"""
train_time_str = logs.get(self.log_name, None)
if not train_time_str:
self.logger.warn("Did not find log entry '{}' (needed in callback "
"'MaxTrainingTime')".format(self.log_name))
return
train_time_m = timedelta(
days=int(train_time_str[:2]),
hours=int(train_time_str[4:6]),
minutes=int(train_time_str[8:10]),
seconds=int(train_time_str[12:14])
).total_seconds() / 60
if train_time_m >= self.max_minutes:
# Stop training
self.warn("Stopping training from callback 'MaxTrainingTime'! "
"Total training length of {} minutes exceeded (now {}) "
"".format(self.max_minutes, train_time_m))
self.model.stop_training = True
class CarbonUsageTracking(Callback):
"""
tf.keras Callback for the Carbontracker package.
See https://github.com/lfwa/carbontracker.
"""
def __init__(self, epochs, add_to_logs=True, monitor_epochs=-1,
epochs_before_pred=-1, devices_by_pid=True, **additional_tracker_kwargs):
"""
Accepts parameters as per CarbonTracker.__init__
Sets other default values for key parameters.
Args:
add_to_logs: bool, Add total_energy_kwh and total_co2_g to the keras logs after each epoch
For other arguments, please refer to CarbonTracker.__init__
"""
super().__init__()
self.tracker = None
self.add_to_logs = bool(add_to_logs)
self.parameters = {"epochs": epochs,
"monitor_epochs": monitor_epochs,
"epochs_before_pred": epochs_before_pred,
"devices_by_pid": devices_by_pid}
self.parameters.update(additional_tracker_kwargs)
def on_train_end(self, logs={}):
""" Ensure actual consumption is reported """
self.tracker.stop()
def on_epoch_begin(self, epoch, logs={}):
""" Start tracking this epoch """
if self.tracker is None:
# At this point all CPUs should be discoverable
self.tracker = CarbonTracker(**self.parameters)
self.tracker.epoch_start()
def on_epoch_end(self, epoch, logs={}):
""" End tracking this epoch """
self.tracker.epoch_end()
if self.add_to_logs:
energy_kwh = self.tracker.tracker.total_energy_per_epoch().sum()
co2eq_g = self.tracker._co2eq(energy_kwh)
logs["total_energy_kwh"] = round(energy_kwh, 6)
logs["total_co2_g"] = round(co2eq_g, 6)
| 41.511765 | 102 | 0.568372 | import numpy as np
import pandas as pd
from carbontracker.tracker import CarbonTracker
from tensorflow.keras.callbacks import Callback
from utime.utils import get_memory_usage
from mpunet.utils import highlighted
from mpunet.logging import ScreenLogger
from collections import defaultdict
from datetime import timedelta
class Validation(Callback):
def __init__(self,
val_sequence,
max_val_studies_per_dataset=20,
logger=None, verbose=True):
super().__init__()
self.logger = logger or ScreenLogger()
self.sequences = val_sequence.sequences
self.verbose = verbose
self.max_studies = max_val_studies_per_dataset
self.n_classes = val_sequence.n_classes
self.IDs = val_sequence.IDs
self.print_round = 3
self.log_round = 4
def _compute_counts(self, pred, true, ignore_class=None):
pred = pred.argmax(-1).ravel()
true = true.ravel()
if ignore_class:
mask = np.where(true != ignore_class)
true = true[mask]
pred = pred[mask]
tps = np.bincount(np.where(true == pred, true, self.n_classes),
minlength=self.n_classes+1)[:-1].astype(np.uint64)
rel = np.bincount(true, minlength=self.n_classes).astype(np.uint64)
sel = np.bincount(pred, minlength=self.n_classes).astype(np.uint64)
return tps, rel, sel
def predict(self):
metrics = self.model.loss_functions + self.model.metrics
metrics_names = self.model.metrics_names
self.model.reset_metrics()
assert len(metrics_names) == len(metrics)
true_pos, relevant, selected, metrics_results = {}, {}, {}, {}
for id_, sequence in zip(self.IDs, self.sequences):
true_pos[id_] = np.zeros(shape=(self.n_classes,), dtype=np.uint64)
relevant[id_] = np.zeros(shape=(self.n_classes,), dtype=np.uint64)
selected[id_] = np.zeros(shape=(self.n_classes,), dtype=np.uint64)
n_val = min(len(sequence.dataset_queue), self.max_studies)
study_iterator = sequence.dataset_queue.get_study_iterator(n_val)
per_study_metrics = defaultdict(list)
for i, sleep_study_context in enumerate(study_iterator):
if self.verbose:
s = " {}Validation subject: {}/{}".format(f"[{id_}] "
if id_ else "",
i+1,
n_val)
print(s, end="\r", flush=True)
with sleep_study_context as ss:
x, y = sequence.get_single_study_full_seq(ss.identifier,
reshape=True)
pred = self.model.predict_on_batch(x)
tps, rel, sel = self._compute_counts(pred=pred.numpy(),
true=y,
ignore_class=5)
true_pos[id_] += tps
relevant[id_] += rel
selected[id_] += sel
for metric, name in zip(metrics, metrics_names):
per_study_metrics[name].append(metric(y, pred).numpy())
metrics_results[id_] = {}
for metric, name in zip(metrics, metrics_names):
metrics_results[id_][name] = np.mean(per_study_metrics[name])
self.model.reset_metrics()
self.logger("")
self.logger("")
return true_pos, relevant, selected, metrics_results
@staticmethod
def _compute_dice(tp, rel, sel):
sel_mask = sel > 0
rel_mask = rel > 0
precisions = np.zeros(shape=tp.shape, dtype=np.float32)
recalls = np.zeros_like(precisions)
dices = np.zeros_like(precisions)
precisions[sel_mask] = tp[sel_mask] / sel[sel_mask]
recalls[rel_mask] = tp[rel_mask] / rel[rel_mask]
intrs = (2 * precisions * recalls)
union = (precisions + recalls)
dice_mask = union > 0
dices[dice_mask] = intrs[dice_mask] / union[dice_mask]
return precisions, recalls, dices
def _print_val_results(self, precisions, recalls, dices, metrics, epoch,
name, classes):
index = ["cls %i" % i for i in classes]
metric_keys, metric_vals = map(list, list(zip(*metrics.items())))
col_order = metric_keys + ["precision", "recall", "dice"]
nan_arr = np.empty(shape=len(precisions))
nan_arr[:] = np.nan
value_dict = {"precision": precisions,
"recall": recalls,
"dice": dices}
value_dict.update({key: nan_arr for key in metrics})
val_results = pd.DataFrame(value_dict,
index=index).loc[:, col_order]
val_results = val_results.T
means = metric_vals + [precisions.mean(), recalls.mean(), dices.mean()]
val_results["mean"] = means
cols = list(val_results.columns)
cols.insert(0, cols.pop(cols.index('mean')))
val_results = val_results.loc[:, cols]
self.logger(highlighted(("[%s] Validation Results for "
"Epoch %i" % (name, epoch)).lstrip(" ")))
print_string = val_results.round(self.print_round).to_string()
self.logger(print_string.replace("NaN", "---") + "\n")
def on_epoch_end(self, epoch, logs={}):
self.logger("\n")
TPs, relevant, selected, metrics = self.predict()
for id_ in self.IDs:
tp, rel, sel = TPs[id_], relevant[id_], selected[id_]
precisions, recalls, dices = self._compute_dice(tp=tp, sel=sel, rel=rel)
classes = np.arange(len(dices))
n = (id_ + "_") if len(self.IDs) > 1 else ""
logs[f"{n}val_dice"] = dices.mean().round(self.log_round)
logs[f"{n}val_precision"] = precisions.mean().round(self.log_round)
logs[f"{n}val_recall"] = recalls.mean().round(self.log_round)
for m_name, value in metrics[id_].items():
logs[f"{n}val_{m_name}"] = value.round(self.log_round)
if self.verbose:
self._print_val_results(precisions=precisions,
recalls=recalls,
dices=dices,
metrics=metrics[id_],
epoch=epoch,
name=id_,
classes=classes)
if len(self.IDs) > 1:
if self.verbose:
self.logger(highlighted(f"[ALL DATASETS] Means Across Classes"
f" for Epoch {epoch}"))
fetch = ("val_dice", "val_precision", "val_recall")
m_fetch = tuple(["val_" + s for s in self.model.metrics_names])
to_print = {}
for f in fetch + m_fetch:
scores = [logs["%s_%s" % (name, f)] for name in self.IDs]
res = np.mean(scores)
logs[f] = res.round(self.log_round)
to_print[f.split("_")[-1]] = list(scores) + [res]
if self.verbose:
df = pd.DataFrame(to_print)
df.index = self.IDs + ["mean"]
self.logger(df.round(self.print_round))
self.logger("")
class MemoryConsumption(Callback):
def __init__(self, max_gib=None, round_=2, logger=None, set_limit=False):
self.max_gib = max_gib
self.logger = logger
self.round_ = round_
if set_limit:
import resource
_, hard = resource.getrlimit(resource.RLIMIT_AS)
resource.setrlimit(resource.RLIMIT_AS,
(self._gib_to_bytes(max_gib), hard))
self.logger("Setting memory limit to {} GiB".format(max_gib))
@staticmethod
def _gib_to_bytes(gib):
return gib * (1024 ** 3)
@staticmethod
def _bytes_to_gib(bytes):
return bytes / (1024 ** 3)
def on_epoch_end(self, epoch, logs={}):
mem_bytes = get_memory_usage()
mem_gib = round(self._bytes_to_gib(mem_bytes), self.round_)
logs['memory_usage_gib'] = mem_gib
if self.max_gib and mem_gib >= self.max_gib:
self.logger.warn("Stopping training from callback 'MemoryConsumption'! "
"Total memory consumption of {} GiB exceeds limitation"
" (self.max_gib = {}) ".format(mem_gib, self.max_gib))
self.model.stop_training = True
class MaxTrainingTime(Callback):
def __init__(self, max_minutes, log_name='train_time_total', logger=None):
super().__init__()
self.max_minutes = int(max_minutes)
self.log_name = log_name
self.logger = logger or ScreenLogger()
def on_epoch_end(self, epochs, logs={}):
train_time_str = logs.get(self.log_name, None)
if not train_time_str:
self.logger.warn("Did not find log entry '{}' (needed in callback "
"'MaxTrainingTime')".format(self.log_name))
return
train_time_m = timedelta(
days=int(train_time_str[:2]),
hours=int(train_time_str[4:6]),
minutes=int(train_time_str[8:10]),
seconds=int(train_time_str[12:14])
).total_seconds() / 60
if train_time_m >= self.max_minutes:
self.warn("Stopping training from callback 'MaxTrainingTime'! "
"Total training length of {} minutes exceeded (now {}) "
"".format(self.max_minutes, train_time_m))
self.model.stop_training = True
class CarbonUsageTracking(Callback):
def __init__(self, epochs, add_to_logs=True, monitor_epochs=-1,
epochs_before_pred=-1, devices_by_pid=True, **additional_tracker_kwargs):
super().__init__()
self.tracker = None
self.add_to_logs = bool(add_to_logs)
self.parameters = {"epochs": epochs,
"monitor_epochs": monitor_epochs,
"epochs_before_pred": epochs_before_pred,
"devices_by_pid": devices_by_pid}
self.parameters.update(additional_tracker_kwargs)
def on_train_end(self, logs={}):
self.tracker.stop()
def on_epoch_begin(self, epoch, logs={}):
if self.tracker is None:
self.tracker = CarbonTracker(**self.parameters)
self.tracker.epoch_start()
def on_epoch_end(self, epoch, logs={}):
self.tracker.epoch_end()
if self.add_to_logs:
energy_kwh = self.tracker.tracker.total_energy_per_epoch().sum()
co2eq_g = self.tracker._co2eq(energy_kwh)
logs["total_energy_kwh"] = round(energy_kwh, 6)
logs["total_co2_g"] = round(co2eq_g, 6)
| true | true |
f731ad77a89b21efe9c339549c926df276f7e654 | 3,152 | py | Python | stackalytics/processor/config.py | duongmn89/stackalytics | 7b2412cddf5f48f42f92bc3400754a6c21742f0c | [
"Apache-2.0"
] | null | null | null | stackalytics/processor/config.py | duongmn89/stackalytics | 7b2412cddf5f48f42f92bc3400754a6c21742f0c | [
"Apache-2.0"
] | null | null | null | stackalytics/processor/config.py | duongmn89/stackalytics | 7b2412cddf5f48f42f92bc3400754a6c21742f0c | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
from oslo_config import cfg
CONNECTION_OPTS = [
cfg.StrOpt('runtime-storage-uri', default='memcached://127.0.0.1:11211',
help='Storage URI'),
]
PROCESSOR_OPTS = [
cfg.StrOpt('default-data-uri',
default='https://raw.githubusercontent.com/stackalytics/'
'default_data/master/default_data.json',
help='URI for default data. A local file can be used with the '
'prefix "file://". For example, '
'default_data_uri = file:///path/to/default_data.json'),
cfg.StrOpt('sources-root', default='/var/local/stackalytics',
help='The folder that holds all project sources to analyze'),
cfg.IntOpt('days_to_update_members', default=30,
help='Number of days to update members'),
cfg.StrOpt('corrections-uri',
default=('https://git.openstack.org/cgit/'
'openstack/stackalytics/plain/etc/corrections.json'),
help='The address of file with corrections data'),
cfg.StrOpt('review-uri', default='gerrit://review.openstack.org',
help='URI of review system'),
cfg.StrOpt('git-base-uri', default='git://git.openstack.org',
help='git base location'),
cfg.StrOpt('ssh-key-filename', default='/home/user/.ssh/id_rsa',
help='SSH key for gerrit review system access'),
cfg.StrOpt('ssh-username', default='user',
help='SSH username for gerrit review system access'),
cfg.StrOpt('github-login', default=None,
help='Login for github access'),
cfg.StrOpt('github-password', default=None,
help='Password for github access'),
cfg.StrOpt('translation-team-uri',
default='https://git.openstack.org/cgit/openstack/i18n/'
'plain/tools/zanata/translation_team.yaml',
help='URI of translation team data'),
cfg.StrOpt("fetching-user-source", default='launchpad',
choices=['launchpad', '<None>'],
help="Source for fetching user profiles"),
cfg.IntOpt('members-look-ahead', default=250,
help='How many member profiles to look ahead after the last'),
cfg.IntOpt('read-timeout', default=120,
help='Number of seconds to wait for remote response'),
cfg.IntOpt('gerrit-retry', default=10,
help='How many times to retry after Gerrit errors'),
]
def list_opts():
yield (None, copy.deepcopy(CONNECTION_OPTS + PROCESSOR_OPTS))
| 44.394366 | 78 | 0.63769 |
import copy
from oslo_config import cfg
CONNECTION_OPTS = [
cfg.StrOpt('runtime-storage-uri', default='memcached://127.0.0.1:11211',
help='Storage URI'),
]
PROCESSOR_OPTS = [
cfg.StrOpt('default-data-uri',
default='https://raw.githubusercontent.com/stackalytics/'
'default_data/master/default_data.json',
help='URI for default data. A local file can be used with the '
'prefix "file://". For example, '
'default_data_uri = file:///path/to/default_data.json'),
cfg.StrOpt('sources-root', default='/var/local/stackalytics',
help='The folder that holds all project sources to analyze'),
cfg.IntOpt('days_to_update_members', default=30,
help='Number of days to update members'),
cfg.StrOpt('corrections-uri',
default=('https://git.openstack.org/cgit/'
'openstack/stackalytics/plain/etc/corrections.json'),
help='The address of file with corrections data'),
cfg.StrOpt('review-uri', default='gerrit://review.openstack.org',
help='URI of review system'),
cfg.StrOpt('git-base-uri', default='git://git.openstack.org',
help='git base location'),
cfg.StrOpt('ssh-key-filename', default='/home/user/.ssh/id_rsa',
help='SSH key for gerrit review system access'),
cfg.StrOpt('ssh-username', default='user',
help='SSH username for gerrit review system access'),
cfg.StrOpt('github-login', default=None,
help='Login for github access'),
cfg.StrOpt('github-password', default=None,
help='Password for github access'),
cfg.StrOpt('translation-team-uri',
default='https://git.openstack.org/cgit/openstack/i18n/'
'plain/tools/zanata/translation_team.yaml',
help='URI of translation team data'),
cfg.StrOpt("fetching-user-source", default='launchpad',
choices=['launchpad', '<None>'],
help="Source for fetching user profiles"),
cfg.IntOpt('members-look-ahead', default=250,
help='How many member profiles to look ahead after the last'),
cfg.IntOpt('read-timeout', default=120,
help='Number of seconds to wait for remote response'),
cfg.IntOpt('gerrit-retry', default=10,
help='How many times to retry after Gerrit errors'),
]
def list_opts():
yield (None, copy.deepcopy(CONNECTION_OPTS + PROCESSOR_OPTS))
| true | true |
f731ad9e1174078405c48bb42fee69eb69fa5af1 | 1,103 | py | Python | piqa/model/tokenizers.py | AndrzejGretkowski/masters-piqa | 5924cb5261bbcc2bb9970622c12c29f49bdea6e7 | [
"MIT"
] | null | null | null | piqa/model/tokenizers.py | AndrzejGretkowski/masters-piqa | 5924cb5261bbcc2bb9970622c12c29f49bdea6e7 | [
"MIT"
] | null | null | null | piqa/model/tokenizers.py | AndrzejGretkowski/masters-piqa | 5924cb5261bbcc2bb9970622c12c29f49bdea6e7 | [
"MIT"
] | null | null | null | from transformers import AlbertTokenizerFast, RobertaTokenizerFast, DistilBertTokenizerFast
from piqa.model.tokenizers_base import BaseTokenizerPIQA
class PIQATokenizer(object):
tokenizer_mapping = dict()
@classmethod
def register(cls, *args):
def decorator(fn):
for arg in args:
cls.tokenizer_mapping[arg] = fn
return fn
return decorator
@classmethod
def get(cls, tokenizer):
return cls.tokenizer_mapping.get(tokenizer)
@PIQATokenizer.register('roberta-base', 'roberta-large')
class RobertaPIQATokenizer(BaseTokenizerPIQA):
@property
def get_tokenizer(self):
return RobertaTokenizerFast
@PIQATokenizer.register('albert-base-v2', 'albert-large-v2', 'albert-xlarge-v2', 'albert-xxlarge-v2')
class AlbertPIQATokenizer(BaseTokenizerPIQA):
@property
def get_tokenizer(self):
return AlbertTokenizerFast
@PIQATokenizer.register('distilbert-base-uncased')
class DistilPIQATokenizer(BaseTokenizerPIQA):
@property
def get_tokenizer(self):
return DistilBertTokenizerFast
| 27.575 | 101 | 0.724388 | from transformers import AlbertTokenizerFast, RobertaTokenizerFast, DistilBertTokenizerFast
from piqa.model.tokenizers_base import BaseTokenizerPIQA
class PIQATokenizer(object):
tokenizer_mapping = dict()
@classmethod
def register(cls, *args):
def decorator(fn):
for arg in args:
cls.tokenizer_mapping[arg] = fn
return fn
return decorator
@classmethod
def get(cls, tokenizer):
return cls.tokenizer_mapping.get(tokenizer)
@PIQATokenizer.register('roberta-base', 'roberta-large')
class RobertaPIQATokenizer(BaseTokenizerPIQA):
@property
def get_tokenizer(self):
return RobertaTokenizerFast
@PIQATokenizer.register('albert-base-v2', 'albert-large-v2', 'albert-xlarge-v2', 'albert-xxlarge-v2')
class AlbertPIQATokenizer(BaseTokenizerPIQA):
@property
def get_tokenizer(self):
return AlbertTokenizerFast
@PIQATokenizer.register('distilbert-base-uncased')
class DistilPIQATokenizer(BaseTokenizerPIQA):
@property
def get_tokenizer(self):
return DistilBertTokenizerFast
| true | true |
f731ae5909a825418b54595ca18abd70d6832fb9 | 1,895 | py | Python | home/urls.py | Paphra/recruitment-cms | 2894d5285a6da4ff47c114377968c8fda95c24b6 | [
"MIT"
] | 2 | 2021-02-07T12:08:47.000Z | 2021-02-22T07:12:53.000Z | home/urls.py | Paphra/recruitment-cms | 2894d5285a6da4ff47c114377968c8fda95c24b6 | [
"MIT"
] | null | null | null | home/urls.py | Paphra/recruitment-cms | 2894d5285a6da4ff47c114377968c8fda95c24b6 | [
"MIT"
] | null | null | null | from django.urls import path
from . import views
from files import views as files
from finances import views as finances
from others import views as others
from operations import views as operations
urlpatterns = [
path('clients/<int:client_id>/pay-reg-fee', views.pay_reg_fee, name='pay-reg-fee'),
path('clients/<int:client_id>/register', views.register, name='register'),
path('clients', views.clients, name='clients'),
path('jobs', views.jobs, name='jobs'),
path('branches', views.branches, name='branches'),
# operations
path('clearances', operations.clearances, name='clearances'),
path('contracts', operations.contracts, name='contracts'),
path('interpols', operations.interpols, name='interpols'),
path('interviews', operations.interviews, name='interviews'),
path('trainings', operations.trainings, name='trainings'),
path('medicals', operations.medicals, name='medicals'),
path('passports', operations.passports, name='passports'),
path('tickets', operations.tickets, name='tickets'),
path('vettings', operations.vettings, name='vettings'),
path('vetting-album', operations.vetting_album, name='vetting-album'),
path('visas', operations.visas, name='visas'),
path('travel-plans', operations.travel_plans, name='travel-plans'),
path('other-ops', operations.other_ops, name='other-ops'),
# finances
path('payments', finances.payments, name='payments'),
path('expenses', finances.expenses, name='expenses'),
path('fees', finances.fees, name='fees'),
# files
path('clearance-files', files.clearance_files, name='clearance-files'),
path('client-files', files.client_files, name='client-files'),
# others
path('tasks', others.tasks, name='tasks'),
path('recruitments', others.recruitments, name='recruitments'),
path('', views.index, name="index"),
]
| 39.479167 | 87 | 0.692876 | from django.urls import path
from . import views
from files import views as files
from finances import views as finances
from others import views as others
from operations import views as operations
urlpatterns = [
path('clients/<int:client_id>/pay-reg-fee', views.pay_reg_fee, name='pay-reg-fee'),
path('clients/<int:client_id>/register', views.register, name='register'),
path('clients', views.clients, name='clients'),
path('jobs', views.jobs, name='jobs'),
path('branches', views.branches, name='branches'),
path('clearances', operations.clearances, name='clearances'),
path('contracts', operations.contracts, name='contracts'),
path('interpols', operations.interpols, name='interpols'),
path('interviews', operations.interviews, name='interviews'),
path('trainings', operations.trainings, name='trainings'),
path('medicals', operations.medicals, name='medicals'),
path('passports', operations.passports, name='passports'),
path('tickets', operations.tickets, name='tickets'),
path('vettings', operations.vettings, name='vettings'),
path('vetting-album', operations.vetting_album, name='vetting-album'),
path('visas', operations.visas, name='visas'),
path('travel-plans', operations.travel_plans, name='travel-plans'),
path('other-ops', operations.other_ops, name='other-ops'),
path('payments', finances.payments, name='payments'),
path('expenses', finances.expenses, name='expenses'),
path('fees', finances.fees, name='fees'),
path('clearance-files', files.clearance_files, name='clearance-files'),
path('client-files', files.client_files, name='client-files'),
path('tasks', others.tasks, name='tasks'),
path('recruitments', others.recruitments, name='recruitments'),
path('', views.index, name="index"),
]
| true | true |
f731afae0e47cb38488bcaa0764e4ae4aadbc6ac | 6,801 | py | Python | kerberos.py | bbhunter/kerberoast | e983ba1d5290e94c71e04fc15dc2cee482873f18 | [
"Apache-2.0"
] | 1,011 | 2015-03-17T21:52:07.000Z | 2022-03-31T09:25:41.000Z | kerberos.py | bbhunter/kerberoast | e983ba1d5290e94c71e04fc15dc2cee482873f18 | [
"Apache-2.0"
] | 16 | 2015-07-08T09:12:27.000Z | 2022-03-05T19:04:26.000Z | kerberos.py | bbhunter/kerberoast | e983ba1d5290e94c71e04fc15dc2cee482873f18 | [
"Apache-2.0"
] | 274 | 2015-04-28T21:24:23.000Z | 2022-03-29T18:37:27.000Z | #!/usr/bin/env python3 -tt
import hashlib
import hmac
from pyasn1.type import univ, char, useful, tag
from pyasn1.codec.ber import encoder, decoder
import datetime
import base64
import sys
#REF: http://tools.ietf.org/id/draft-brezak-win2k-krb-rc4-hmac-03.txt
#T = 1 for TS-ENC-TS in the AS-Request
#T = 8 for the AS-Reply
#T = 7 for the Authenticator in the TGS-Request
#T = 8 for the TGS-Reply
#T = 2 for the Server Ticket in the AP-Request
#T = 11 for the Authenticator in the AP-Request
#T = 12 for the Server returned AP-Reply
#T = 15 in the generation of checksum for the MIC token
#T = 0 in the generation of sequence number for the MIC token
#T = 13 in the generation of checksum for the WRAP token
#T = 0 in the generation of sequence number for the WRAP token
#T = 0 in the generation of encrypted data for the WRAPPED token
def ntlmhash(s):
hash = hashlib.new('md4', s.encode('utf-16le')).digest()
return hash
#return binascii.hexlify(hash)
def rc4crypt(key, data):
x = 0
box = list(range(256))
for i in range(256):
x = (x + box[i] + (key[i % len(key)])) % 256
box[i], box[x] = box[x], box[i]
x = 0
y = 0
out = b''
for char in data:
x = (x + 1) % 256
y = (y + box[x]) % 256
box[x], box[y] = box[y], box[x]
out += bytes([char ^ box[(box[x] + box[y]) % 256]])
return out
#print decoder.decode(enc)
#define KERB_ETYPE_RC4_HMAC 23
KERB_ETYPE_RC4_HMAC = 23
#define KERB_ETYPE_RC4_HMAC_EXP 24
def decrypt(key, messagetype, edata):
#DECRYPT (K, fRC4_EXP, T, edata, edata_len, data, data_len)
#{
# if (fRC4_EXP){
# *((DWORD *)(L40+10)) = T;
# HMAC (K, L40, 14, K1);
# }else{
# HMAC (K, &T, 4, K1);
# }
#K1 = hmac.new(key, chr(messagetype) + "\x00\x00\x00", hashlib.md5).digest() # \x0b = 11
K1 = hmac.new(key, bytes([messagetype]) + b"\x00\x00\x00", hashlib.md5).digest() # \x0b = 11
# memcpy (K2, K1, 16);
K2 = K1
# if (fRC4_EXP) memset (K1+7, 0xAB, 9);
# HMAC (K1, edata, 16, K3); // checksum is at edata
K3 = hmac.new(K1, edata[:16], hashlib.md5).digest()
# RC4(K3, edata + 16, edata_len - 16, edata + 16);
ddata = rc4crypt(K3, edata[16:])
# data_len = edata_len - 16 - 8;
# memcpy (data, edata + 16 + 8, data_len);
#
# // verify generated and received checksums
# HMAC (K2, edata + 16, edata_len - 16, checksum);
checksum = hmac.new(K2, ddata, hashlib.md5).digest()
# if (memcmp(edata, checksum, 16) != 0)
# printf("CHECKSUM ERROR !!!!!!\n");
#}
if checksum == edata[:16]:
#print "Decrypt Checksum: %s" % str(checksum).encode('hex') # == edata[:16])
#print "Checksum Calc: %s" % str(checksum).encode('hex')
#print "Checksum Pkct: %s" % str(edata[:16]).encode('hex')
#print messagetype
#print data
#print "Nonce: %s" % ddata[:8].encode('hex')
#return ddata[8:] # first 8 bytes are nonce, the rest is data
#return {
# 'data': ddata[8:],
# 'nonce': ddata[:8]
#}
return ddata[8:], ddata[:8]
else:
#print "CHECKSUM ERROR!"
return None, None
def encrypt(key, messagetype, data, nonce):
# if (fRC4_EXP){
# *((DWORD *)(L40+10)) = T;
# HMAC (K, L40, 10 + 4, K1);
# }else{
# HMAC (K, &T, 4, K1);
# }
K1 = hmac.new(key, chr(messagetype) + "\x00\x00\x00", hashlib.md5).digest() # \x0b = 11
# memcpy (K2, K1, 16);
K2 = K1
# if (fRC4_EXP) memset (K1+7, 0xAB, 9);
# add_8_random_bytes(data, data_len, conf_plus_data);
ddata = nonce + data
# HMAC (K2, conf_plus_data, 8 + data_len, checksum);
checksum = hmac.new(K2, ddata, hashlib.md5).digest()
# HMAC (K1, checksum, 16, K3);
K3 = hmac.new(K1, checksum, hashlib.md5).digest()
#print "K3: %s" % K3.encode('hex')
# RC4(K3, conf_plus_data, 8 + data_len, edata + 16);
# print "EN DDATA: %s" % ddata[:32].encode('hex')
edata = rc4crypt(K3, ddata)
# memcpy (edata, checksum, 16);
# edata_len = 16 + 8 + data_len;
return checksum + edata
def zerosigs(data):
d = list(map(ord, data))
for i in range(5, 21): # zero out the 16 char sig, KDC
d[len(d) - i] = 0
for i in range(29, 45): # zero out the 16 char sig, Server
d[len(d) - i] = 0
retval = "".join(map(chr, d))
#print retval.encode('hex')
return retval
def chksum(K, T, data):
data = zerosigs(data)
# K = the Key
#T = the message type, encoded as a little-endian four-byte integer
#Ksign = HMAC(K, "signaturekey") //includes zero octet at end
SIGNATUREKEY = 'signaturekey\x00'
Ksign = hmac.new(K, SIGNATUREKEY, hashlib.md5).digest()
#tmp = MD5(concat(T, data))
tmp = hashlib.md5(T + data).digest()
#CHKSUM = HMAC(Ksign, tmp)
chksum = hmac.new(Ksign, tmp, hashlib.md5).digest()
return chksum
def getservsig(encchunk):
return str(encchunk[-44:-28])
def getprivsig(encchunk):
return str(encchunk[-20:-4])
def printdecode(kerbpayload, ktype=2):
d = decoder.decode(kerbpayload)
if ktype == 32:
#print "Protocol Version (pvno): " + str(d[0][0])
print("Message Type: " + str(d[0][1]))
print("Realm: " + str(d[0][2]))
print("Principal: " + str(d[0][3][1][0]))
print("Ticket Version (tkt-vno): " + str(d[0][4][0]))
print("Ticket Realm: " + str(d[0][4][1]))
#print "Name-Type (Service & Instance): " + str(d[0][4][2][0])
print("Server, Name: " + str(d[0][4][2][1][0]))
print("Server, Name: " + str(d[0][4][2][1][1]))
#print "Data: " + str(d[0][4][3][2]).encode('hex')
#print "Encryption Type: : " + str(d[0][5][0])
#print "Data: " + str(d[0])
#print "Server Realm: " + str(d[0][4][2][4])
elif ktype == 2:
print("a")
def extract_ticket_from_kirbi(filename):
with open(filename, 'rb') as fd:
data = fd.read()
return extract_ticket(data)
def extract_ticket(data):
if data[0] == 0x76:
# ram dump
#enctickets.append(((decoder.decode(data)[0][2][0][3][2]).asOctets(), i, f))
return (decoder.decode(data)[0][2][0][3][2]).asOctets()
elif data[:2] == b'6d':
# honestly, i completely forgot. I think this is from a pcap -Tim
#enctickets.append(((decoder.decode(ticket.decode('hex'))[0][4][3][2]).asOctets(), i, f))
return (decoder.decode(ticket.decode('hex'))[0][4][3][2]).asOctets()
| 34.522843 | 97 | 0.551242 |
import hashlib
import hmac
from pyasn1.type import univ, char, useful, tag
from pyasn1.codec.ber import encoder, decoder
import datetime
import base64
import sys
def ntlmhash(s):
hash = hashlib.new('md4', s.encode('utf-16le')).digest()
return hash
def rc4crypt(key, data):
x = 0
box = list(range(256))
for i in range(256):
x = (x + box[i] + (key[i % len(key)])) % 256
box[i], box[x] = box[x], box[i]
x = 0
y = 0
out = b''
for char in data:
x = (x + 1) % 256
y = (y + box[x]) % 256
box[x], box[y] = box[y], box[x]
out += bytes([char ^ box[(box[x] + box[y]) % 256]])
return out
KERB_ETYPE_RC4_HMAC = 23
def decrypt(key, messagetype, edata):
mac.new(key, bytes([messagetype]) + b"\x00\x00\x00", hashlib.md5).digest()
K2 = K1
K3 = hmac.new(K1, edata[:16], hashlib.md5).digest()
ddata = rc4crypt(K3, edata[16:])
checksum = hmac.new(K2, ddata, hashlib.md5).digest()
if checksum == edata[:16]:
return ddata[8:], ddata[:8]
else:
return None, None
def encrypt(key, messagetype, data, nonce):
K1 = hmac.new(key, chr(messagetype) + "\x00\x00\x00", hashlib.md5).digest()
K2 = K1
ddata = nonce + data
checksum = hmac.new(K2, ddata, hashlib.md5).digest()
K3 = hmac.new(K1, checksum, hashlib.md5).digest()
edata = rc4crypt(K3, ddata)
return checksum + edata
def zerosigs(data):
d = list(map(ord, data))
for i in range(5, 21):
d[len(d) - i] = 0
for i in range(29, 45):
d[len(d) - i] = 0
retval = "".join(map(chr, d))
return retval
def chksum(K, T, data):
data = zerosigs(data)
SIGNATUREKEY = 'signaturekey\x00'
Ksign = hmac.new(K, SIGNATUREKEY, hashlib.md5).digest()
tmp = hashlib.md5(T + data).digest()
chksum = hmac.new(Ksign, tmp, hashlib.md5).digest()
return chksum
def getservsig(encchunk):
return str(encchunk[-44:-28])
def getprivsig(encchunk):
return str(encchunk[-20:-4])
def printdecode(kerbpayload, ktype=2):
d = decoder.decode(kerbpayload)
if ktype == 32:
print("Message Type: " + str(d[0][1]))
print("Realm: " + str(d[0][2]))
print("Principal: " + str(d[0][3][1][0]))
print("Ticket Version (tkt-vno): " + str(d[0][4][0]))
print("Ticket Realm: " + str(d[0][4][1]))
print("Server, Name: " + str(d[0][4][2][1][0]))
print("Server, Name: " + str(d[0][4][2][1][1]))
elif ktype == 2:
print("a")
def extract_ticket_from_kirbi(filename):
with open(filename, 'rb') as fd:
data = fd.read()
return extract_ticket(data)
def extract_ticket(data):
if data[0] == 0x76:
return (decoder.decode(data)[0][2][0][3][2]).asOctets()
elif data[:2] == b'6d':
return (decoder.decode(ticket.decode('hex'))[0][4][3][2]).asOctets()
| true | true |
f731b06c3942398040ef6910dde20647365a1923 | 992 | py | Python | IPAddressPool/IPAddressPool/ip/IpAcquire.py | ruxuezhuimeng/spider | badf61a66e99cf1936b5c404ad85f1b00838943e | [
"MIT"
] | 1 | 2020-06-17T05:56:19.000Z | 2020-06-17T05:56:19.000Z | IPAddressPool/IPAddressPool/ip/IpAcquire.py | ruxuezhuimeng/spider | badf61a66e99cf1936b5c404ad85f1b00838943e | [
"MIT"
] | null | null | null | IPAddressPool/IPAddressPool/ip/IpAcquire.py | ruxuezhuimeng/spider | badf61a66e99cf1936b5c404ad85f1b00838943e | [
"MIT"
] | null | null | null | from config.SourceUrl import getUrl
from ip.Ip2Db import insert
import threading
import requests
from Log import log
header = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36'
}
def acquireIp():
aUrl = getUrl()
log.info('获取ip地址:{}'.format(aUrl))
try:
reponse = requests.get(aUrl, headers=header, timeout=5)
if reponse.status_code == 200:
parseHtml(reponse.text)
except:
# traceback.print_exc()
log.error('请求ip异常:{}'.format(aUrl))
def parseHtml(html):
html = html.replace('\'', '').replace('b', '').replace('<r/>', '').replace('\r', '')
ips = html.split("\n")
for ip in ips:
ip = ip.strip()
if 'false' in ip:
log.war('您的套餐今日已到达上限')
return
elif '' == ip:
return
else:
if '.' in ip:
threading.Thread(target=insert, args=(ip,)).start()
| 26.810811 | 134 | 0.566532 | from config.SourceUrl import getUrl
from ip.Ip2Db import insert
import threading
import requests
from Log import log
header = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36'
}
def acquireIp():
aUrl = getUrl()
log.info('获取ip地址:{}'.format(aUrl))
try:
reponse = requests.get(aUrl, headers=header, timeout=5)
if reponse.status_code == 200:
parseHtml(reponse.text)
except:
log.error('请求ip异常:{}'.format(aUrl))
def parseHtml(html):
html = html.replace('\'', '').replace('b', '').replace('<r/>', '').replace('\r', '')
ips = html.split("\n")
for ip in ips:
ip = ip.strip()
if 'false' in ip:
log.war('您的套餐今日已到达上限')
return
elif '' == ip:
return
else:
if '.' in ip:
threading.Thread(target=insert, args=(ip,)).start()
| true | true |
f731b08e6a086de9df0318dd73e3fdb7854f1ed6 | 1,413 | py | Python | models/diabetes/train.py | aditya9126/pipelines-azureml | 0c747f12e02ee3d3976746663bd1da0ab5935887 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | models/diabetes/train.py | aditya9126/pipelines-azureml | 0c747f12e02ee3d3976746663bd1da0ab5935887 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | models/diabetes/train.py | aditya9126/pipelines-azureml | 0c747f12e02ee3d3976746663bd1da0ab5935887 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | # new update
import pickle
import os
import numpy as np
from sklearn.datasets import load_diabetes
from sklearn.linear_model import Ridge
from sklearn.metrics import mean_squared_error
from sklearn.model_selection import train_test_split
from azureml.core.run import Run
from utils import mylib
os.makedirs('./outputs', exist_ok=True)
X, y = load_diabetes(return_X_y=True)
run = Run.get_context()
X_train, X_test, y_train, y_test = train_test_split(X, y,
test_size=0.2,
random_state=0)
data = {"train": {"X": X_train, "y": y_train},
"test": {"X": X_test, "y": y_test}}
# list of numbers from 0.0 to 1.0 with a 0.05 interval
alphas = mylib.get_alphas()
for alpha in alphas:
# Use Ridge algorithm to create a regression model
reg = Ridge(alpha=alpha)
reg.fit(data["train"]["X"], data["train"]["y"])
preds = reg.predict(data["test"]["X"])
mse = mean_squared_error(preds, data["test"]["y"])
run.log('alpha', alpha)
run.log('mse', mse)
# Save model in the outputs folder so it automatically get uploaded when running on AML Compute
model_file_name = 'ridge_{0:.2f}.pkl'.format(alpha)
with open(os.path.join('./outputs/', model_file_name), 'wb') as file:
pickle.dump(reg, file)
print('alpha is {0:.2f}, and mse is {1:0.2f}'.format(alpha, mse))
| 31.4 | 99 | 0.643312 |
import pickle
import os
import numpy as np
from sklearn.datasets import load_diabetes
from sklearn.linear_model import Ridge
from sklearn.metrics import mean_squared_error
from sklearn.model_selection import train_test_split
from azureml.core.run import Run
from utils import mylib
os.makedirs('./outputs', exist_ok=True)
X, y = load_diabetes(return_X_y=True)
run = Run.get_context()
X_train, X_test, y_train, y_test = train_test_split(X, y,
test_size=0.2,
random_state=0)
data = {"train": {"X": X_train, "y": y_train},
"test": {"X": X_test, "y": y_test}}
alphas = mylib.get_alphas()
for alpha in alphas:
reg = Ridge(alpha=alpha)
reg.fit(data["train"]["X"], data["train"]["y"])
preds = reg.predict(data["test"]["X"])
mse = mean_squared_error(preds, data["test"]["y"])
run.log('alpha', alpha)
run.log('mse', mse)
model_file_name = 'ridge_{0:.2f}.pkl'.format(alpha)
with open(os.path.join('./outputs/', model_file_name), 'wb') as file:
pickle.dump(reg, file)
print('alpha is {0:.2f}, and mse is {1:0.2f}'.format(alpha, mse))
| true | true |
f731b0a7f4cf43d68c7253dd502f524c7bafe510 | 1,522 | py | Python | src/ewaluacja2021/management/commands/raport_3n_to_xlsx.py | iplweb/bpp | f027415cc3faf1ca79082bf7bacd4be35b1a6fdf | [
"BSD-3-Clause"
] | null | null | null | src/ewaluacja2021/management/commands/raport_3n_to_xlsx.py | iplweb/bpp | f027415cc3faf1ca79082bf7bacd4be35b1a6fdf | [
"BSD-3-Clause"
] | 41 | 2019-11-07T00:07:02.000Z | 2022-02-27T22:09:39.000Z | src/ewaluacja2021/management/commands/raport_3n_to_xlsx.py | iplweb/bpp | f027415cc3faf1ca79082bf7bacd4be35b1a6fdf | [
"BSD-3-Clause"
] | null | null | null | # -*- encoding: utf-8 -*-
import os
from argparse import FileType
from django.core.management import BaseCommand
from ewaluacja2021.reports import load_data, rekordy
from ewaluacja2021.util import autor2fn
from ewaluacja2021.xlsy import AutorskiXLSX, CalosciowyXLSX
from bpp.models import Autor
from bpp.util import pbar
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument("wejscie", type=FileType("r"))
parser.add_argument("--katalog-wyjsciowy", type=str, default=None)
def handle(self, wejscie, katalog_wyjsciowy, liczba_n=None, *args, **options):
dane = load_data(wejscie)
if katalog_wyjsciowy is None:
katalog_wyjsciowy = wejscie.name.replace(".json", "_output")
if not os.path.exists(katalog_wyjsciowy):
os.mkdir(katalog_wyjsciowy)
rekordy_danych = rekordy(dane)
CalosciowyXLSX(
"AAA_rekordy",
rekordy=rekordy_danych,
dane=dane,
katalog_wyjsciowy=katalog_wyjsciowy,
).zrob()
for autor in pbar(
Autor.objects.filter(pk__in=(x.autor_id for x in rekordy(dane))),
label="Dane autorow...",
):
rekordy_autora = rekordy_danych.filter(autor_id=autor.id)
AutorskiXLSX(
autor=autor,
title=autor2fn(autor),
rekordy=rekordy_autora,
dane=dane,
katalog_wyjsciowy=katalog_wyjsciowy,
).zrob()
| 30.44 | 82 | 0.635348 |
import os
from argparse import FileType
from django.core.management import BaseCommand
from ewaluacja2021.reports import load_data, rekordy
from ewaluacja2021.util import autor2fn
from ewaluacja2021.xlsy import AutorskiXLSX, CalosciowyXLSX
from bpp.models import Autor
from bpp.util import pbar
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument("wejscie", type=FileType("r"))
parser.add_argument("--katalog-wyjsciowy", type=str, default=None)
def handle(self, wejscie, katalog_wyjsciowy, liczba_n=None, *args, **options):
dane = load_data(wejscie)
if katalog_wyjsciowy is None:
katalog_wyjsciowy = wejscie.name.replace(".json", "_output")
if not os.path.exists(katalog_wyjsciowy):
os.mkdir(katalog_wyjsciowy)
rekordy_danych = rekordy(dane)
CalosciowyXLSX(
"AAA_rekordy",
rekordy=rekordy_danych,
dane=dane,
katalog_wyjsciowy=katalog_wyjsciowy,
).zrob()
for autor in pbar(
Autor.objects.filter(pk__in=(x.autor_id for x in rekordy(dane))),
label="Dane autorow...",
):
rekordy_autora = rekordy_danych.filter(autor_id=autor.id)
AutorskiXLSX(
autor=autor,
title=autor2fn(autor),
rekordy=rekordy_autora,
dane=dane,
katalog_wyjsciowy=katalog_wyjsciowy,
).zrob()
| true | true |
f731b0e7750a7bea8517170165c3c68c3dc22cf8 | 844 | py | Python | setup.py | jbarbadillo/pycoinmon | 035e64652c533a569c6b236f54e12aff35ad82b1 | [
"MIT"
] | null | null | null | setup.py | jbarbadillo/pycoinmon | 035e64652c533a569c6b236f54e12aff35ad82b1 | [
"MIT"
] | null | null | null | setup.py | jbarbadillo/pycoinmon | 035e64652c533a569c6b236f54e12aff35ad82b1 | [
"MIT"
] | null | null | null | from setuptools import setup
from pycoinmon.metadata import Metadata
metadata = Metadata()
setup(
name = 'pycoinmon',
packages = ['pycoinmon'],
version = metadata.get_version(),
license = 'MIT',
description = 'Python Port Based on COINMON',
url = 'https://github.com/RDCH106/pycoinmon',
keywords = ['bitcoin', 'criptocurrency', 'crypto', 'ticker', 'python', 'cli', 'price-tracker', 'command-line'],
classifiers = ['Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'],
) | 40.190476 | 115 | 0.575829 | from setuptools import setup
from pycoinmon.metadata import Metadata
metadata = Metadata()
setup(
name = 'pycoinmon',
packages = ['pycoinmon'],
version = metadata.get_version(),
license = 'MIT',
description = 'Python Port Based on COINMON',
url = 'https://github.com/RDCH106/pycoinmon',
keywords = ['bitcoin', 'criptocurrency', 'crypto', 'ticker', 'python', 'cli', 'price-tracker', 'command-line'],
classifiers = ['Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'],
) | true | true |
f731b187b5cb80cd697134e200a8839565bad37f | 453 | py | Python | card.py | yehudareisler/risky-game | ea919bd07a2acf75dfd184b5c59ad80d41f47428 | [
"MIT"
] | 3 | 2021-01-21T02:06:12.000Z | 2022-03-14T10:26:43.000Z | card.py | yehudareisler/risky-game | ea919bd07a2acf75dfd184b5c59ad80d41f47428 | [
"MIT"
] | null | null | null | card.py | yehudareisler/risky-game | ea919bd07a2acf75dfd184b5c59ad80d41f47428 | [
"MIT"
] | 1 | 2021-08-29T07:47:12.000Z | 2021-08-29T07:47:12.000Z | from enum import Enum
class Card:
card_type = None
territory_name = ''
def __init__(self, territory_name, card_type):
self.territory_name = territory_name
self.card_type = card_type
def __str__(self):
return f'Card of {self.territory_name} with {self.card_type} type'
class CardType(Enum):
WILDCARD = 0
INFANTRY = 1
CAVALRY = 2
ARTILLERY = 3
def __str__(self):
return self.name
| 18.875 | 74 | 0.646799 | from enum import Enum
class Card:
card_type = None
territory_name = ''
def __init__(self, territory_name, card_type):
self.territory_name = territory_name
self.card_type = card_type
def __str__(self):
return f'Card of {self.territory_name} with {self.card_type} type'
class CardType(Enum):
WILDCARD = 0
INFANTRY = 1
CAVALRY = 2
ARTILLERY = 3
def __str__(self):
return self.name
| true | true |
f731b1ea9b5a643b7fe01d0c7b531d8169710a1a | 401 | py | Python | mmtbx/command_line/ntc_validation.py | dperl-sol/cctbx_project | b9e390221a2bc4fd00b9122e97c3b79c632c6664 | [
"BSD-3-Clause-LBNL"
] | 155 | 2016-11-23T12:52:16.000Z | 2022-03-31T15:35:44.000Z | mmtbx/command_line/ntc_validation.py | dperl-sol/cctbx_project | b9e390221a2bc4fd00b9122e97c3b79c632c6664 | [
"BSD-3-Clause-LBNL"
] | 590 | 2016-12-10T11:31:18.000Z | 2022-03-30T23:10:09.000Z | mmtbx/command_line/ntc_validation.py | dperl-sol/cctbx_project | b9e390221a2bc4fd00b9122e97c3b79c632c6664 | [
"BSD-3-Clause-LBNL"
] | 115 | 2016-11-15T08:17:28.000Z | 2022-02-09T15:30:14.000Z | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
# LIBTBX_SET_DISPATCHER_NAME cctbx.development.ntc_validation
from iotbx.cli_parser import run_program
from mmtbx.programs.ntc_validation import Program
# =============================================================================
if (__name__ == '__main__'):
results = run_program(program_class=Program)
| 33.416667 | 79 | 0.645885 |
from __future__ import absolute_import, division, print_function
from iotbx.cli_parser import run_program
from mmtbx.programs.ntc_validation import Program
if (__name__ == '__main__'):
results = run_program(program_class=Program)
| true | true |
f731b29f030996794fb1c57d9250faa45d387432 | 811 | py | Python | 1 Scripto/07.py | peterszerzo/rhino-pythonscript-tutorials | 880861eecc4e7dd5897695aae532e81c9bcacf94 | [
"MIT"
] | 2 | 2019-10-26T17:25:57.000Z | 2020-12-29T14:52:45.000Z | 1 Scripto/07.py | peterszerzo/rhino-pythonscript-tutorials | 880861eecc4e7dd5897695aae532e81c9bcacf94 | [
"MIT"
] | 1 | 2019-04-23T02:17:35.000Z | 2019-04-23T10:24:51.000Z | 1 Scripto/07.py | peterszerzo/rhino-pythonscript-tutorials | 880861eecc4e7dd5897695aae532e81c9bcacf94 | [
"MIT"
] | null | null | null | """
Rhino Python Script Tutorial
Exercise 07
Let's reorganize the previous code to store the coordinates of our points in a list.
This list is called an array.
The following lesson explains why this is useful.
"""
import rhinoscriptsyntax as rs
import math
def Main():
n = 50
radius_0 = 3
points = []
for i in range(0, n): # notice how range() accepts variables
# a little trick here: as i goes from 0 to n-1, k will go from 0 to 1
k = i / (n - 1)
# traverse the full circle
angle = k * 3 * math.pi
# increase radius
radius = radius_0 * (1 + 5 * k)
point = [radius * math.cos(angle), radius * math.sin(angle),0]
rs.AddPoint(point)
points.append(point)
Main() | 19.309524 | 84 | 0.574599 | """
Rhino Python Script Tutorial
Exercise 07
Let's reorganize the previous code to store the coordinates of our points in a list.
This list is called an array.
The following lesson explains why this is useful.
"""
import rhinoscriptsyntax as rs
import math
def Main():
n = 50
radius_0 = 3
points = []
for i in range(0, n): # notice how range() accepts variables
# a little trick here: as i goes from 0 to n-1, k will go from 0 to 1
k = i / (n - 1)
# traverse the full circle
angle = k * 3 * math.pi
# increase radius
radius = radius_0 * (1 + 5 * k)
point = [radius * math.cos(angle), radius * math.sin(angle),0]
rs.AddPoint(point)
points.append(point)
Main() | false | true |
f731b41b93060edbb8a60d852567a8c02dfe18c9 | 19,457 | py | Python | hive/indexer/cached_post.py | RailCoin/hivemind | cd78b952b55911b63a59c762d1d4530a25d7d8f5 | [
"MIT"
] | null | null | null | hive/indexer/cached_post.py | RailCoin/hivemind | cd78b952b55911b63a59c762d1d4530a25d7d8f5 | [
"MIT"
] | null | null | null | hive/indexer/cached_post.py | RailCoin/hivemind | cd78b952b55911b63a59c762d1d4530a25d7d8f5 | [
"MIT"
] | 1 | 2020-03-24T12:26:08.000Z | 2020-03-24T12:26:08.000Z | """Manages cached post data."""
import math
import collections
import logging
import ujson as json
from toolz import partition_all
from hive.db.adapter import Db
from hive.utils.post import post_basic, post_legacy, post_payout, post_stats
from hive.utils.timer import Timer
from hive.indexer.accounts import Accounts
log = logging.getLogger(__name__)
DB = Db.instance()
# levels of post dirtiness, in order of decreasing priority
LEVELS = ['insert', 'payout', 'update', 'upvote', 'recount']
def _keyify(items):
return dict(map(lambda x: ("val_%d" % x[0], x[1]), enumerate(items)))
class CachedPost:
"""Maintain update queue and writing to `hive_posts_cache`."""
# cursor signifying upper bound of cached post span
_last_id = -1
# cached id map
_ids = {}
# urls which are missing from id map
_noids = set()
# dirty posts; {key: dirty_level}
_queue = collections.OrderedDict()
# new promoted values, pending write
_pending_promoted = {}
@classmethod
def update_promoted_amount(cls, post_id, amount):
"""Set a new pending amount for a post for its next update."""
cls._pending_promoted[post_id] = amount
@classmethod
def _dirty(cls, level, author, permlink, pid=None):
"""Mark a post as dirty."""
assert level in LEVELS, "invalid level {}".format(level)
mode = LEVELS.index(level)
url = author + '/' + permlink
# add to appropriate queue.
if url not in cls._queue:
cls._queue[url] = mode
# upgrade priority if needed
elif cls._queue[url] > mode:
cls._queue[url] = mode
# add to id map, or register missing
if pid and url in cls._ids:
assert pid == cls._ids[url], "pid map conflict #78"
elif pid:
cls._ids[url] = pid
else:
cls._noids.add(url)
@classmethod
def _get_id(cls, url):
"""Given a post url, get its id."""
if url in cls._ids:
return cls._ids[url]
raise Exception("requested id for %s not in map" % url)
@classmethod
def recount(cls, author, permlink, pid=None):
"""Force a child re-count."""
cls._dirty('recount', author, permlink, pid)
@classmethod
def vote(cls, author, permlink, pid=None):
"""Handle a post dirtied by a `vote` op."""
cls._dirty('upvote', author, permlink, pid)
Accounts.dirty(set([author])) # rep changed
@classmethod
def insert(cls, author, permlink, pid):
"""Handle a post created by a `comment` op."""
cls._dirty('insert', author, permlink, pid)
@classmethod
def update(cls, author, permlink, pid):
"""Handle a post updated by a `comment` op."""
cls._dirty('update', author, permlink, pid)
@classmethod
def delete(cls, post_id, author, permlink):
"""Handle a post deleted by a `delete_comment` op.
With steemd, posts can be 'deleted' or unallocated in certain
conditions. It requires foregoing convenient assumptions, e.g.:
- author/permlink is unique and always references the same post
- you can always get_content on any author/permlink you see in an op
"""
DB.query("DELETE FROM hive_posts_cache WHERE post_id = :id", id=post_id)
# if it was queued for a write, remove it
url = author+'/'+permlink
if url in cls._queue:
del cls._queue[url]
if url in cls._ids:
del cls._ids[url]
@classmethod
def undelete(cls, post_id, author, permlink):
"""Handle a post 'undeleted' by a `comment` op.
'Undeletion' occurs when hive detects that a previously deleted
author/permlink combination has been reused on a new post. Hive
does not delete hive_posts entries because they are currently
irreplaceable in case of a fork. Instead, we reuse the slot.
It's important to immediately insert a placeholder in the cache
table since hive only scans forward. This row's properties push
it to the front of update-immediately queue.
Alternate ways of handling undeletes:
- delete row from hive_posts so that it can be re-indexed (re-id'd)
- comes at a risk of losing expensive entry on fork (and no undo)
- create undo table for hive_posts, hive_follows, etc, & link to block
- rely on steemd's post.id instead of database autoincrement
- requires way to query steemd post objects by id to be useful
- batch get_content_by_ids in steemd would be /huge/ speedup
- create a consistent cache queue table or dirty flag col
"""
# do not force-write unless cache spans this id.
if post_id > cls.last_id():
cls.insert(author, permlink, post_id)
return
# force-create dummy row to ensure cache is aware. only needed when
# cache already spans this id, in case in-mem buffer is lost. default
# value for payout_at ensures that it will get picked up for update.
DB.query(cls._insert({
'post_id': post_id,
'author': author,
'permlink': permlink}))
cls.update(author, permlink, post_id)
@classmethod
def flush(cls, steem, trx=False, spread=1, full_total=None):
"""Process all posts which have been marked as dirty."""
cls._load_noids() # load missing ids
assert spread == 1, "not fully tested, use with caution"
counts = {}
tuples = []
for level in LEVELS:
tups = cls._get_tuples_for_level(level, spread)
counts[level] = len(tups)
tuples.extend(tups)
if trx or len(tuples) > 250:
changed = filter(lambda t: t[1], counts.items())
summary = list(map(lambda group: "%d %ss" % group[::-1], changed))
summary = ', '.join(summary) if summary else 'none'
log.info("[PREP] posts cache process: %s", summary)
cls._update_batch(steem, tuples, trx, full_total=full_total)
for url, _, _ in tuples:
del cls._queue[url]
if url in cls._ids:
del cls._ids[url]
return counts
@classmethod
def _get_tuples_for_level(cls, level, fraction=1):
"""Query tuples to be updated.
Given a specific flush level (insert, payout, update, upvote),
returns a list of tuples to be passed to _update_batch, in the
form of: `[(url, id, level)*]`
"""
mode = LEVELS.index(level)
urls = [url for url, i in cls._queue.items() if i == mode]
if fraction > 1 and level != 'insert': # inserts must be full flush
urls = urls[0:math.ceil(len(urls) / fraction)]
return [(url, cls._get_id(url), level) for url in urls]
@classmethod
def _load_noids(cls):
"""Load ids for posts we don't know the ids of.
When posts are marked dirty, specifying the id is optional
because a successive call might be able to provide it "for
free". Before flushing changes this method should be called
to fill in any gaps.
"""
from hive.indexer.posts import Posts
noids = cls._noids - set(cls._ids.keys())
tuples = [(Posts.get_id(*url.split('/')), url) for url in noids]
for pid, url in tuples:
assert pid, "WARNING: missing id for %s" % url
cls._ids[url] = pid
cls._noids = set()
return len(tuples)
@classmethod
def _select_paidout_tuples(cls, date):
"""Query hive_posts_cache for payout sweep.
Select all posts which should have been paid out before `date`
yet do not have the `is_paidout` flag set. We perform this
sweep to ensure that we always have accurate final payout
state. Since payout values vary even between votes, we'd have
stale data if we didn't sweep, and only waited for incoming
votes before an update.
"""
from hive.indexer.posts import Posts
sql = """SELECT post_id FROM hive_posts_cache
WHERE is_paidout = '0' AND payout_at <= :date"""
ids = DB.query_col(sql, date=date)
if not ids:
return []
sql = """SELECT id, author, permlink
FROM hive_posts WHERE id IN :ids"""
results = DB.query_all(sql, ids=tuple(ids))
return Posts.save_ids_from_tuples(results)
@classmethod
def dirty_paidouts(cls, date):
"""Mark dirty all paidout posts not yet updated in db."""
paidout = cls._select_paidout_tuples(date)
authors = set()
for (pid, author, permlink) in paidout:
authors.add(author)
cls._dirty('payout', author, permlink, pid)
Accounts.dirty(authors) # force-update accounts on payout
if len(paidout) > 200:
log.info("[PREP] Found %d payouts for %d authors since %s",
len(paidout), len(authors), date)
return len(paidout)
@classmethod
def _select_missing_tuples(cls, last_cached_id, limit=1000000):
"""Fetch posts inserted into main posts table but not cache."""
from hive.indexer.posts import Posts
sql = """SELECT id, author, permlink, promoted FROM hive_posts
WHERE is_deleted = '0' AND id > :id
ORDER BY id LIMIT :limit"""
results = DB.query_all(sql, id=last_cached_id, limit=limit)
return Posts.save_ids_from_tuples(results)
@classmethod
def dirty_missing(cls, limit=250000):
"""Mark dirty all hive_posts records not yet written to cache."""
from hive.indexer.posts import Posts
# cached posts inserted sequentially, so compare MAX(id)'s
last_cached_id = cls.last_id()
last_post_id = Posts.last_id()
gap = last_post_id - last_cached_id
if gap:
missing = cls._select_missing_tuples(last_cached_id, limit)
for pid, author, permlink, promoted in missing:
if promoted > 0: # ensure we don't miss promote amount
cls.update_promoted_amount(pid, promoted)
cls._dirty('insert', author, permlink, pid)
return gap
@classmethod
def recover_missing_posts(cls, steem):
"""Startup routine that cycles through missing posts.
This is used for (1) initial sync, and (2) recovering missing
cache records upon launch if hive fast-sync was interrupted.
"""
gap = cls.dirty_missing()
log.info("[INIT] %d missing post cache entries", gap)
while cls.flush(steem, trx=True, full_total=gap)['insert']:
gap = cls.dirty_missing()
@classmethod
def _update_batch(cls, steem, tuples, trx=True, full_total=None):
"""Fetch, process, and write a batch of posts.
Given a set of posts, fetch from steemd and write them to the
db. The `tuples` arg is the form of `[(url, id, level)*]`
representing posts which are to be fetched from steemd and
updated in cache.
Regarding _bump_last_id: there's a rare edge case when the last
hive_post entry has been deleted "in the future" (ie, we haven't
seen the delete op yet). So even when the post is not found
(i.e. `not post['author']`), it's important to advance _last_id,
because this cursor is used to deduce any missing cache entries.
"""
timer = Timer(total=len(tuples), entity='post',
laps=['rps', 'wps'], full_total=full_total)
tuples = sorted(tuples, key=lambda x: x[1]) # enforce ASC id's
for tups in partition_all(1000, tuples):
timer.batch_start()
buffer = []
post_args = [tup[0].split('/') for tup in tups]
posts = steem.get_content_batch(post_args)
post_ids = [tup[1] for tup in tups]
post_levels = [tup[2] for tup in tups]
for pid, post, level in zip(post_ids, posts, post_levels):
if post['author']:
buffer.extend(cls._sql(pid, post, level=level))
else:
# When a post has been deleted (or otherwise DNE),
# steemd simply returns a blank post object w/ all
# fields blank. While it's best to not try to cache
# already-deleted posts, it can happen during missed
# post sweep and while using `trail_blocks` > 0.
pass
cls._bump_last_id(pid)
timer.batch_lap()
DB.batch_queries(buffer, trx)
timer.batch_finish(len(posts))
if len(tuples) >= 1000:
log.info(timer.batch_status())
@classmethod
def last_id(cls):
"""Retrieve the latest post_id that was cached."""
if cls._last_id == -1:
# after initial query, we maintain last_id w/ _bump_last_id()
sql = "SELECT COALESCE(MAX(post_id), 0) FROM hive_posts_cache"
cls._last_id = DB.query_one(sql)
return cls._last_id
@classmethod
def _bump_last_id(cls, next_id):
"""Update our last_id based on a recent insert."""
last_id = cls.last_id()
if next_id <= last_id:
return
if next_id - last_id > 2:
cls._ensure_safe_gap(last_id, next_id)
if next_id - last_id > 4:
# gap of 2 is common due to deletions. report on larger gaps.
log.warning("skipping post ids %d -> %d", last_id, next_id)
cls._last_id = next_id
@classmethod
def _ensure_safe_gap(cls, last_id, next_id):
"""Paranoid check of important operating assumption."""
sql = """
SELECT COUNT(*) FROM hive_posts
WHERE id BETWEEN :x1 AND :x2 AND is_deleted = '0'
"""
missing_posts = DB.query_one(sql, x1=(last_id + 1), x2=(next_id - 1))
if not missing_posts:
return
raise Exception("found large cache gap: %d --> %d (%d)"
% (last_id, next_id, missing_posts))
@classmethod
def _sql(cls, pid, post, level=None):
"""Given a post and "update level", generate SQL edit statement.
Valid levels are:
- `insert`: post does not yet exist in cache
- `update`: post was modified
- `payout`: post was paidout
- `upvote`: post payout/votes changed
"""
#pylint: disable=bad-whitespace
assert post['author'], "post {} is blank".format(pid)
# last-minute sanity check to ensure `pid` is correct #78
pid2 = cls._get_id(post['author']+'/'+post['permlink'])
assert pid == pid2, "hpc id %d maps to %d" % (pid, pid2)
# inserts always sequential. if pid > last_id, this operation
# *must* be an insert; so `level` must not be any form of update.
if pid > cls.last_id() and level != 'insert':
raise Exception("WARNING: new pid, but level=%s. #%d vs %d, %s"
% (level, pid, cls.last_id(), repr(post)))
# start building the queries
tag_sqls = []
values = [('post_id', pid)]
# immutable; write only once (*edge case: undeleted posts)
if level == 'insert':
values.extend([
('author', post['author']),
('permlink', post['permlink']),
('category', post['category']),
('depth', post['depth'])])
# always write, unless simple vote update
if level in ['insert', 'payout', 'update']:
basic = post_basic(post)
values.extend([
('created_at', post['created']), # immutable*
('updated_at', post['last_update']),
('title', post['title']),
('payout_at', basic['payout_at']), # immutable*
('preview', basic['preview']),
('body', basic['body']),
('img_url', basic['image']),
('is_nsfw', basic['is_nsfw']),
('is_declined', basic['is_payout_declined']),
('is_full_power', basic['is_full_power']),
('is_paidout', basic['is_paidout']),
('json', json.dumps(basic['json_metadata'])),
('raw_json', json.dumps(post_legacy(post))),
])
# update tags if action is insert/update and is root post
if level in ['insert', 'update'] and not post['depth']:
diff = level != 'insert' # do not attempt tag diff on insert
tag_sqls.extend(cls._tag_sqls(pid, basic['tags'], diff=diff))
# if there's a pending promoted value to write, pull it out
if pid in cls._pending_promoted:
bal = cls._pending_promoted.pop(pid)
values.append(('promoted', bal))
# update unconditionally
payout = post_payout(post)
stats = post_stats(post)
values.extend([
('payout', "%f" % payout['payout']),
('rshares', "%d" % payout['rshares']),
('votes', "%s" % payout['csvotes']),
('sc_trend', "%f" % payout['sc_trend']),
('sc_hot', "%f" % payout['sc_hot']),
('flag_weight', "%f" % stats['flag_weight']),
('total_votes', "%d" % stats['total_votes']),
('up_votes', "%d" % stats['up_votes']),
('is_hidden', "%d" % stats['hide']),
('is_grayed', "%d" % stats['gray']),
('author_rep', "%f" % stats['author_rep']),
('children', "%d" % min(post['children'], 32767)),
])
# if recounting, update the parent next pass.
if level == 'recount' and post['depth']:
cls.recount(post['parent_author'], post['parent_permlink'])
# build the post insert/update SQL, add tag SQLs
if level == 'insert':
sql = cls._insert(values)
else:
sql = cls._update(values)
return [sql] + tag_sqls
@classmethod
def _tag_sqls(cls, pid, tags, diff=True):
"""Generate SQL "deltas" for a post_id's associated tags."""
next_tags = set(tags)
curr_tags = set()
if diff:
sql = "SELECT tag FROM hive_post_tags WHERE post_id = :id"
curr_tags = set(DB.query_col(sql, id=pid))
to_rem = (curr_tags - next_tags)
if to_rem:
sql = "DELETE FROM hive_post_tags WHERE post_id = :id AND tag IN :tags"
yield (sql, dict(id=pid, tags=tuple(to_rem)))
to_add = (next_tags - curr_tags)
if to_add:
params = _keyify(to_add)
vals = ["(:id, :%s)" % key for key in params.keys()]
sql = "INSERT INTO hive_post_tags (post_id, tag) VALUES %s"
sql += " ON CONFLICT DO NOTHING" # (conflicts due to collation)
yield (sql % ','.join(vals), {'id': pid, **params})
@classmethod
def _insert(cls, values):
return DB.build_insert('hive_posts_cache', values, pk='post_id')
@classmethod
def _update(cls, values):
return DB.build_update('hive_posts_cache', values, pk='post_id')
| 38.991984 | 83 | 0.582412 |
import math
import collections
import logging
import ujson as json
from toolz import partition_all
from hive.db.adapter import Db
from hive.utils.post import post_basic, post_legacy, post_payout, post_stats
from hive.utils.timer import Timer
from hive.indexer.accounts import Accounts
log = logging.getLogger(__name__)
DB = Db.instance()
LEVELS = ['insert', 'payout', 'update', 'upvote', 'recount']
def _keyify(items):
return dict(map(lambda x: ("val_%d" % x[0], x[1]), enumerate(items)))
class CachedPost:
_last_id = -1
_ids = {}
_noids = set()
_queue = collections.OrderedDict()
_pending_promoted = {}
@classmethod
def update_promoted_amount(cls, post_id, amount):
cls._pending_promoted[post_id] = amount
@classmethod
def _dirty(cls, level, author, permlink, pid=None):
assert level in LEVELS, "invalid level {}".format(level)
mode = LEVELS.index(level)
url = author + '/' + permlink
if url not in cls._queue:
cls._queue[url] = mode
elif cls._queue[url] > mode:
cls._queue[url] = mode
if pid and url in cls._ids:
assert pid == cls._ids[url], "pid map conflict #78"
elif pid:
cls._ids[url] = pid
else:
cls._noids.add(url)
@classmethod
def _get_id(cls, url):
if url in cls._ids:
return cls._ids[url]
raise Exception("requested id for %s not in map" % url)
@classmethod
def recount(cls, author, permlink, pid=None):
cls._dirty('recount', author, permlink, pid)
@classmethod
def vote(cls, author, permlink, pid=None):
cls._dirty('upvote', author, permlink, pid)
Accounts.dirty(set([author]))
@classmethod
def insert(cls, author, permlink, pid):
cls._dirty('insert', author, permlink, pid)
@classmethod
def update(cls, author, permlink, pid):
cls._dirty('update', author, permlink, pid)
@classmethod
def delete(cls, post_id, author, permlink):
DB.query("DELETE FROM hive_posts_cache WHERE post_id = :id", id=post_id)
url = author+'/'+permlink
if url in cls._queue:
del cls._queue[url]
if url in cls._ids:
del cls._ids[url]
@classmethod
def undelete(cls, post_id, author, permlink):
if post_id > cls.last_id():
cls.insert(author, permlink, post_id)
return
DB.query(cls._insert({
'post_id': post_id,
'author': author,
'permlink': permlink}))
cls.update(author, permlink, post_id)
@classmethod
def flush(cls, steem, trx=False, spread=1, full_total=None):
cls._load_noids()
assert spread == 1, "not fully tested, use with caution"
counts = {}
tuples = []
for level in LEVELS:
tups = cls._get_tuples_for_level(level, spread)
counts[level] = len(tups)
tuples.extend(tups)
if trx or len(tuples) > 250:
changed = filter(lambda t: t[1], counts.items())
summary = list(map(lambda group: "%d %ss" % group[::-1], changed))
summary = ', '.join(summary) if summary else 'none'
log.info("[PREP] posts cache process: %s", summary)
cls._update_batch(steem, tuples, trx, full_total=full_total)
for url, _, _ in tuples:
del cls._queue[url]
if url in cls._ids:
del cls._ids[url]
return counts
@classmethod
def _get_tuples_for_level(cls, level, fraction=1):
mode = LEVELS.index(level)
urls = [url for url, i in cls._queue.items() if i == mode]
if fraction > 1 and level != 'insert':
urls = urls[0:math.ceil(len(urls) / fraction)]
return [(url, cls._get_id(url), level) for url in urls]
@classmethod
def _load_noids(cls):
from hive.indexer.posts import Posts
noids = cls._noids - set(cls._ids.keys())
tuples = [(Posts.get_id(*url.split('/')), url) for url in noids]
for pid, url in tuples:
assert pid, "WARNING: missing id for %s" % url
cls._ids[url] = pid
cls._noids = set()
return len(tuples)
@classmethod
def _select_paidout_tuples(cls, date):
from hive.indexer.posts import Posts
sql = """SELECT post_id FROM hive_posts_cache
WHERE is_paidout = '0' AND payout_at <= :date"""
ids = DB.query_col(sql, date=date)
if not ids:
return []
sql = """SELECT id, author, permlink
FROM hive_posts WHERE id IN :ids"""
results = DB.query_all(sql, ids=tuple(ids))
return Posts.save_ids_from_tuples(results)
@classmethod
def dirty_paidouts(cls, date):
paidout = cls._select_paidout_tuples(date)
authors = set()
for (pid, author, permlink) in paidout:
authors.add(author)
cls._dirty('payout', author, permlink, pid)
Accounts.dirty(authors)
if len(paidout) > 200:
log.info("[PREP] Found %d payouts for %d authors since %s",
len(paidout), len(authors), date)
return len(paidout)
@classmethod
def _select_missing_tuples(cls, last_cached_id, limit=1000000):
from hive.indexer.posts import Posts
sql = """SELECT id, author, permlink, promoted FROM hive_posts
WHERE is_deleted = '0' AND id > :id
ORDER BY id LIMIT :limit"""
results = DB.query_all(sql, id=last_cached_id, limit=limit)
return Posts.save_ids_from_tuples(results)
@classmethod
def dirty_missing(cls, limit=250000):
from hive.indexer.posts import Posts
last_cached_id = cls.last_id()
last_post_id = Posts.last_id()
gap = last_post_id - last_cached_id
if gap:
missing = cls._select_missing_tuples(last_cached_id, limit)
for pid, author, permlink, promoted in missing:
if promoted > 0: # ensure we don't miss promote amount
cls.update_promoted_amount(pid, promoted)
cls._dirty('insert', author, permlink, pid)
return gap
@classmethod
def recover_missing_posts(cls, steem):
gap = cls.dirty_missing()
log.info("[INIT] %d missing post cache entries", gap)
while cls.flush(steem, trx=True, full_total=gap)['insert']:
gap = cls.dirty_missing()
@classmethod
def _update_batch(cls, steem, tuples, trx=True, full_total=None):
timer = Timer(total=len(tuples), entity='post',
laps=['rps', 'wps'], full_total=full_total)
tuples = sorted(tuples, key=lambda x: x[1])
for tups in partition_all(1000, tuples):
timer.batch_start()
buffer = []
post_args = [tup[0].split('/') for tup in tups]
posts = steem.get_content_batch(post_args)
post_ids = [tup[1] for tup in tups]
post_levels = [tup[2] for tup in tups]
for pid, post, level in zip(post_ids, posts, post_levels):
if post['author']:
buffer.extend(cls._sql(pid, post, level=level))
else:
# When a post has been deleted (or otherwise DNE),
# steemd simply returns a blank post object w/ all
# fields blank. While it's best to not try to cache
pass
cls._bump_last_id(pid)
timer.batch_lap()
DB.batch_queries(buffer, trx)
timer.batch_finish(len(posts))
if len(tuples) >= 1000:
log.info(timer.batch_status())
@classmethod
def last_id(cls):
if cls._last_id == -1:
sql = "SELECT COALESCE(MAX(post_id), 0) FROM hive_posts_cache"
cls._last_id = DB.query_one(sql)
return cls._last_id
@classmethod
def _bump_last_id(cls, next_id):
last_id = cls.last_id()
if next_id <= last_id:
return
if next_id - last_id > 2:
cls._ensure_safe_gap(last_id, next_id)
if next_id - last_id > 4:
log.warning("skipping post ids %d -> %d", last_id, next_id)
cls._last_id = next_id
@classmethod
def _ensure_safe_gap(cls, last_id, next_id):
sql = """
SELECT COUNT(*) FROM hive_posts
WHERE id BETWEEN :x1 AND :x2 AND is_deleted = '0'
"""
missing_posts = DB.query_one(sql, x1=(last_id + 1), x2=(next_id - 1))
if not missing_posts:
return
raise Exception("found large cache gap: %d --> %d (%d)"
% (last_id, next_id, missing_posts))
@classmethod
def _sql(cls, pid, post, level=None):
assert post['author'], "post {} is blank".format(pid)
pid2 = cls._get_id(post['author']+'/'+post['permlink'])
assert pid == pid2, "hpc id %d maps to %d" % (pid, pid2)
if pid > cls.last_id() and level != 'insert':
raise Exception("WARNING: new pid, but level=%s. #%d vs %d, %s"
% (level, pid, cls.last_id(), repr(post)))
tag_sqls = []
values = [('post_id', pid)]
if level == 'insert':
values.extend([
('author', post['author']),
('permlink', post['permlink']),
('category', post['category']),
('depth', post['depth'])])
if level in ['insert', 'payout', 'update']:
basic = post_basic(post)
values.extend([
('created_at', post['created']),
('updated_at', post['last_update']),
('title', post['title']),
('payout_at', basic['payout_at']),
('preview', basic['preview']),
('body', basic['body']),
('img_url', basic['image']),
('is_nsfw', basic['is_nsfw']),
('is_declined', basic['is_payout_declined']),
('is_full_power', basic['is_full_power']),
('is_paidout', basic['is_paidout']),
('json', json.dumps(basic['json_metadata'])),
('raw_json', json.dumps(post_legacy(post))),
])
if level in ['insert', 'update'] and not post['depth']:
diff = level != 'insert'
tag_sqls.extend(cls._tag_sqls(pid, basic['tags'], diff=diff))
if pid in cls._pending_promoted:
bal = cls._pending_promoted.pop(pid)
values.append(('promoted', bal))
# update unconditionally
payout = post_payout(post)
stats = post_stats(post)
values.extend([
('payout', "%f" % payout['payout']),
('rshares', "%d" % payout['rshares']),
('votes', "%s" % payout['csvotes']),
('sc_trend', "%f" % payout['sc_trend']),
('sc_hot', "%f" % payout['sc_hot']),
('flag_weight', "%f" % stats['flag_weight']),
('total_votes', "%d" % stats['total_votes']),
('up_votes', "%d" % stats['up_votes']),
('is_hidden', "%d" % stats['hide']),
('is_grayed', "%d" % stats['gray']),
('author_rep', "%f" % stats['author_rep']),
('children', "%d" % min(post['children'], 32767)),
])
# if recounting, update the parent next pass.
if level == 'recount' and post['depth']:
cls.recount(post['parent_author'], post['parent_permlink'])
# build the post insert/update SQL, add tag SQLs
if level == 'insert':
sql = cls._insert(values)
else:
sql = cls._update(values)
return [sql] + tag_sqls
@classmethod
def _tag_sqls(cls, pid, tags, diff=True):
next_tags = set(tags)
curr_tags = set()
if diff:
sql = "SELECT tag FROM hive_post_tags WHERE post_id = :id"
curr_tags = set(DB.query_col(sql, id=pid))
to_rem = (curr_tags - next_tags)
if to_rem:
sql = "DELETE FROM hive_post_tags WHERE post_id = :id AND tag IN :tags"
yield (sql, dict(id=pid, tags=tuple(to_rem)))
to_add = (next_tags - curr_tags)
if to_add:
params = _keyify(to_add)
vals = ["(:id, :%s)" % key for key in params.keys()]
sql = "INSERT INTO hive_post_tags (post_id, tag) VALUES %s"
sql += " ON CONFLICT DO NOTHING" # (conflicts due to collation)
yield (sql % ','.join(vals), {'id': pid, **params})
@classmethod
def _insert(cls, values):
return DB.build_insert('hive_posts_cache', values, pk='post_id')
@classmethod
def _update(cls, values):
return DB.build_update('hive_posts_cache', values, pk='post_id')
| true | true |
f731b44538e1b9c630d1af353fa35d3677a745d9 | 1,210 | py | Python | BotClean_Large.py | Aditya148/Hackerrank-Artificial-Intelligence | 5abbe561115b75cb28397661b7b0d7d53486ffee | [
"MIT"
] | null | null | null | BotClean_Large.py | Aditya148/Hackerrank-Artificial-Intelligence | 5abbe561115b75cb28397661b7b0d7d53486ffee | [
"MIT"
] | null | null | null | BotClean_Large.py | Aditya148/Hackerrank-Artificial-Intelligence | 5abbe561115b75cb28397661b7b0d7d53486ffee | [
"MIT"
] | null | null | null | import math
def update_position(posr, posc, dirties):
nearest_dirt = []
for i in range(len(dirties)):
# Euclidean distance
result = math.sqrt(((dirties[i][0] - posr) ** 2) + ((dirties[i][1] - posc) ** 2))
nearest_dirt.append(result)
return [x for (y,x) in sorted(zip(nearest_dirt,dirties))]
# Set the bot in your new position
def next_move(posr, posc, x, y, board):
dirties = []
for i in range(x):
for j in range(y):
if board[i][j] == 'd':
dirties.append([i, j])
next_dirt = update_position(posr, posc, dirties)
if next_dirt[0][1] < posc:
print('LEFT')
elif next_dirt[0][1] > posc:
print('RIGHT')
elif next_dirt[0][0] < posr:
print('UP')
elif next_dirt[0][0] > posr:
print('DOWN')
else:
print('CLEAN')
if __name__ == "__main__":
pos = [int(i) for i in input().strip().split()]
dim = [int(i) for i in input().strip().split()]
board = [[j for j in input().strip()] for i in range(dim[0])]
next_move(pos[0], pos[1], dim[0], dim[1], board)
'''
Sample Input
0 0
5 5
b---d
-d--d
--dd-
--d--
----d
Sample Output
RIGHT
'''
| 22.407407 | 89 | 0.539669 | import math
def update_position(posr, posc, dirties):
nearest_dirt = []
for i in range(len(dirties)):
result = math.sqrt(((dirties[i][0] - posr) ** 2) + ((dirties[i][1] - posc) ** 2))
nearest_dirt.append(result)
return [x for (y,x) in sorted(zip(nearest_dirt,dirties))]
def next_move(posr, posc, x, y, board):
dirties = []
for i in range(x):
for j in range(y):
if board[i][j] == 'd':
dirties.append([i, j])
next_dirt = update_position(posr, posc, dirties)
if next_dirt[0][1] < posc:
print('LEFT')
elif next_dirt[0][1] > posc:
print('RIGHT')
elif next_dirt[0][0] < posr:
print('UP')
elif next_dirt[0][0] > posr:
print('DOWN')
else:
print('CLEAN')
if __name__ == "__main__":
pos = [int(i) for i in input().strip().split()]
dim = [int(i) for i in input().strip().split()]
board = [[j for j in input().strip()] for i in range(dim[0])]
next_move(pos[0], pos[1], dim[0], dim[1], board)
| true | true |
f731b4a38a9bc8969c194157663764cf285ad1bc | 4,079 | py | Python | AC_Network.py | pasindubawantha/just-copy | 919b1723c87cadc5946f891da53f4abc7d50ff6e | [
"Apache-2.0"
] | 1 | 2020-07-30T19:29:06.000Z | 2020-07-30T19:29:06.000Z | AC_Network.py | pasindubawantha/just-copy | 919b1723c87cadc5946f891da53f4abc7d50ff6e | [
"Apache-2.0"
] | null | null | null | AC_Network.py | pasindubawantha/just-copy | 919b1723c87cadc5946f891da53f4abc7d50ff6e | [
"Apache-2.0"
] | null | null | null | import tensorflow as tf
import tensorflow.contrib.slim as slim
#import tensorflow.nn as slim
import numpy as np
from helpers import *
class AC_Network():
def __init__(self,s_size,a_size,scope,trainer,s_shape):
with tf.variable_scope(scope):
#Input and visual encoding layers
self.inputs = tf.placeholder(shape=[None,s_size],dtype=tf.float32)
self.imageIn = tf.reshape(self.inputs,shape=[-1,s_shape[0],s_shape[1],s_shape[2]])
self.conv1 = slim.conv2d(activation_fn=tf.nn.elu,
inputs=self.imageIn,num_outputs=16,
kernel_size=[8,8],stride=[4,4],padding='VALID')
self.conv2 = slim.conv2d(activation_fn=tf.nn.elu,
inputs=self.conv1,num_outputs=32,
kernel_size=[4,4],stride=[2,2],padding='VALID')
hidden = slim.fully_connected(slim.flatten(self.conv2),256,activation_fn=tf.nn.elu)
#Recurrent network for temporal dependencies
lstm_cell = tf.nn.rnn_cell.BasicLSTMCell(256,state_is_tuple=True)
c_init = np.zeros((1, lstm_cell.state_size.c), np.float32)
h_init = np.zeros((1, lstm_cell.state_size.h), np.float32)
self.state_init = [c_init, h_init]
c_in = tf.placeholder(tf.float32, [1, lstm_cell.state_size.c])
h_in = tf.placeholder(tf.float32, [1, lstm_cell.state_size.h])
self.state_in = (c_in, h_in)
rnn_in = tf.expand_dims(hidden, [0])
step_size = tf.shape(self.imageIn)[:1]
state_in = tf.nn.rnn_cell.LSTMStateTuple(c_in, h_in)
lstm_outputs, lstm_state = tf.nn.dynamic_rnn(
lstm_cell, rnn_in, initial_state=state_in, sequence_length=step_size,
time_major=False)
lstm_c, lstm_h = lstm_state
self.state_out = (lstm_c[:1, :], lstm_h[:1, :])
rnn_out = tf.reshape(lstm_outputs, [-1, 256])
#Output layers for policy and value estimations
self.policy = slim.fully_connected(rnn_out,a_size,
activation_fn=tf.nn.softmax,
weights_initializer=normalized_columns_initializer(0.01),
biases_initializer=None)
self.value = slim.fully_connected(rnn_out,1,
activation_fn=None,
weights_initializer=normalized_columns_initializer(1.0),
biases_initializer=None)
#Only the worker network need ops for loss functions and gradient updating.
if scope != 'global':
self.actions = tf.placeholder(shape=[None],dtype=tf.int32)
self.actions_onehot = tf.one_hot(self.actions,a_size,dtype=tf.float32)
self.target_v = tf.placeholder(shape=[None],dtype=tf.float32)
self.advantages = tf.placeholder(shape=[None],dtype=tf.float32)
self.responsible_outputs = tf.reduce_sum(self.policy * self.actions_onehot, [1])
#Loss functions
self.value_loss = 0.5 * tf.reduce_sum(tf.square(self.target_v - tf.reshape(self.value,[-1])))
self.entropy = - tf.reduce_sum(self.policy * tf.log(self.policy))
self.policy_loss = -tf.reduce_sum(tf.log(self.responsible_outputs)*self.advantages)
self.loss = 0.5 * self.value_loss + self.policy_loss - self.entropy * 0.01
#Get gradients from local network using local losses
local_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope)
self.gradients = tf.gradients(self.loss,local_vars)
self.var_norms = tf.global_norm(local_vars)
self.grads,self.grad_norms = tf.clip_by_global_norm(self.gradients,40.0)
#Apply local gradients to global network
self.global_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, 'global')
self.apply_grads = trainer.apply_gradients(zip(self.grads,self.global_vars)) | 53.671053 | 109 | 0.617308 | import tensorflow as tf
import tensorflow.contrib.slim as slim
import numpy as np
from helpers import *
class AC_Network():
def __init__(self,s_size,a_size,scope,trainer,s_shape):
with tf.variable_scope(scope):
self.inputs = tf.placeholder(shape=[None,s_size],dtype=tf.float32)
self.imageIn = tf.reshape(self.inputs,shape=[-1,s_shape[0],s_shape[1],s_shape[2]])
self.conv1 = slim.conv2d(activation_fn=tf.nn.elu,
inputs=self.imageIn,num_outputs=16,
kernel_size=[8,8],stride=[4,4],padding='VALID')
self.conv2 = slim.conv2d(activation_fn=tf.nn.elu,
inputs=self.conv1,num_outputs=32,
kernel_size=[4,4],stride=[2,2],padding='VALID')
hidden = slim.fully_connected(slim.flatten(self.conv2),256,activation_fn=tf.nn.elu)
lstm_cell = tf.nn.rnn_cell.BasicLSTMCell(256,state_is_tuple=True)
c_init = np.zeros((1, lstm_cell.state_size.c), np.float32)
h_init = np.zeros((1, lstm_cell.state_size.h), np.float32)
self.state_init = [c_init, h_init]
c_in = tf.placeholder(tf.float32, [1, lstm_cell.state_size.c])
h_in = tf.placeholder(tf.float32, [1, lstm_cell.state_size.h])
self.state_in = (c_in, h_in)
rnn_in = tf.expand_dims(hidden, [0])
step_size = tf.shape(self.imageIn)[:1]
state_in = tf.nn.rnn_cell.LSTMStateTuple(c_in, h_in)
lstm_outputs, lstm_state = tf.nn.dynamic_rnn(
lstm_cell, rnn_in, initial_state=state_in, sequence_length=step_size,
time_major=False)
lstm_c, lstm_h = lstm_state
self.state_out = (lstm_c[:1, :], lstm_h[:1, :])
rnn_out = tf.reshape(lstm_outputs, [-1, 256])
self.policy = slim.fully_connected(rnn_out,a_size,
activation_fn=tf.nn.softmax,
weights_initializer=normalized_columns_initializer(0.01),
biases_initializer=None)
self.value = slim.fully_connected(rnn_out,1,
activation_fn=None,
weights_initializer=normalized_columns_initializer(1.0),
biases_initializer=None)
if scope != 'global':
self.actions = tf.placeholder(shape=[None],dtype=tf.int32)
self.actions_onehot = tf.one_hot(self.actions,a_size,dtype=tf.float32)
self.target_v = tf.placeholder(shape=[None],dtype=tf.float32)
self.advantages = tf.placeholder(shape=[None],dtype=tf.float32)
self.responsible_outputs = tf.reduce_sum(self.policy * self.actions_onehot, [1])
self.value_loss = 0.5 * tf.reduce_sum(tf.square(self.target_v - tf.reshape(self.value,[-1])))
self.entropy = - tf.reduce_sum(self.policy * tf.log(self.policy))
self.policy_loss = -tf.reduce_sum(tf.log(self.responsible_outputs)*self.advantages)
self.loss = 0.5 * self.value_loss + self.policy_loss - self.entropy * 0.01
local_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope)
self.gradients = tf.gradients(self.loss,local_vars)
self.var_norms = tf.global_norm(local_vars)
self.grads,self.grad_norms = tf.clip_by_global_norm(self.gradients,40.0)
self.global_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, 'global')
self.apply_grads = trainer.apply_gradients(zip(self.grads,self.global_vars)) | true | true |
f731b554fa6750e8f70b9ce4eeb1ca9577514c53 | 7,291 | py | Python | tests/pipeline/test_node_run.py | andmikey/kedro | 9b4e4135720609d44ffdf5248246fe805f0b5469 | [
"Apache-2.0"
] | 1 | 2021-11-19T05:36:47.000Z | 2021-11-19T05:36:47.000Z | tests/pipeline/test_node_run.py | andmikey/kedro | 9b4e4135720609d44ffdf5248246fe805f0b5469 | [
"Apache-2.0"
] | null | null | null | tests/pipeline/test_node_run.py | andmikey/kedro | 9b4e4135720609d44ffdf5248246fe805f0b5469 | [
"Apache-2.0"
] | 1 | 2021-11-19T05:36:49.000Z | 2021-11-19T05:36:49.000Z | # Copyright 2018-2019 QuantumBlack Visual Analytics Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, AND
# NONINFRINGEMENT. IN NO EVENT WILL THE LICENSOR OR OTHER CONTRIBUTORS
# BE LIABLE FOR ANY CLAIM, DAMAGES, OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF, OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# The QuantumBlack Visual Analytics Limited ("QuantumBlack") name and logo
# (either separately or in combination, "QuantumBlack Trademarks") are
# trademarks of QuantumBlack. The License does not grant you any right or
# license to the QuantumBlack Trademarks. You may not use the QuantumBlack
# Trademarks or any confusingly similar mark as a trademark for your product,
# or use the QuantumBlack Trademarks in any other manner that might cause
# confusion in the marketplace, including but not limited to in advertising,
# on websites, or on software.
#
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=unused-argument
import pytest
from kedro.io import LambdaDataSet
from kedro.pipeline import node
@pytest.fixture
def mocked_dataset(mocker):
load = mocker.Mock(return_value=42)
save = mocker.Mock()
return LambdaDataSet(load, save)
def one_in_one_out(arg):
return arg
def one_in_dict_out(arg):
return dict(ret=arg)
def two_in_first_out(arg1, arg2):
return arg1
@pytest.fixture
def valid_nodes_with_inputs():
return [
(node(one_in_one_out, "ds1", "dsOut"), dict(ds1=42)),
(node(one_in_dict_out, dict(arg="ds1"), dict(ret="dsOut")), dict(ds1=42)),
(node(two_in_first_out, ["ds1", "ds2"], "dsOut"), dict(ds1=42, ds2=58)),
]
def test_valid_nodes(valid_nodes_with_inputs):
"""Check if node.run works as expected."""
for node_, input_ in valid_nodes_with_inputs:
output = node_.run(input_)
assert output["dsOut"] == 42
def test_run_got_dataframe(mocked_dataset):
"""Check an exception when non-dictionary (class object) is passed."""
pattern = r"Node.run\(\) expects a dictionary or None, "
pattern += r"but got <class \'kedro.io.lambda_data_set.LambdaDataSet\'> instead"
with pytest.raises(ValueError, match=pattern):
node(one_in_one_out, dict(arg="ds1"), "A").run(mocked_dataset)
class TestNodeRunInvalidInput:
def test_unresolved(self):
"""Pass no input when one is expected."""
with pytest.raises(ValueError, match=r"expected one input"):
node(one_in_one_out, "unresolved", "ds1").run(None)
def test_no_inputs_node_error(self, mocked_dataset):
"""Pass one input when none is expected."""
with pytest.raises(ValueError, match=r"expected no inputs"):
node(lambda: 1, None, "A").run(dict(unexpected=mocked_dataset))
def test_one_input_error(self, mocked_dataset):
"""Pass a different input."""
pattern = r"expected one input named 'ds1', but got the "
pattern += r"following 1 input\(s\) instead: \['arg'\]"
with pytest.raises(ValueError, match=pattern):
node(one_in_dict_out, "ds1", dict(ret="B", ans="C")).run(
dict(arg=mocked_dataset)
)
def test_run_diff_size_lists(self, mocked_dataset):
"""Pass only one dict input when two (list) are expected."""
pattern = r"expected 2 input\(s\) \['ds1', 'ds2'\], but "
pattern += r"got the following 1 input\(s\) instead."
with pytest.raises(ValueError, match=pattern):
node(two_in_first_out, ["ds1", "ds2"], "A").run(dict(ds1=mocked_dataset))
def test_run_diff_size_list_dict(self, mocked_dataset):
"""Pass two dict inputs when one (list) are expected."""
pattern = r"expected 1 input\(s\) \['ds1'\], but got the "
pattern += r"following 2 input\(s\) instead: \['ds1', 'ds2'\]\."
with pytest.raises(ValueError, match=pattern):
node(one_in_one_out, ["ds1"], "A").run(dict(ds1=mocked_dataset, ds2=2))
def test_run_list_dict_unavailable(self, mocked_dataset):
"""Pass one dict which is different from expected."""
pattern = r"expected 1 input\(s\) \['ds1'\], but got the "
pattern += r"following 1 input\(s\) instead: \['ds2'\]\."
with pytest.raises(ValueError, match=pattern):
node(one_in_one_out, ["ds1"], "A").run(dict(ds2=mocked_dataset))
def test_run_dict_unavailable(self, mocked_dataset):
"""Pass one dict which is different from expected."""
pattern = r"expected 1 input\(s\) \['ds1'\], but got the "
pattern += r"following 1 input\(s\) instead: \['ds2'\]\."
with pytest.raises(ValueError, match=pattern):
node(one_in_one_out, dict(arg="ds1"), "A").run(dict(ds2=mocked_dataset))
def test_run_dict_diff_size(self, mocked_dataset):
"""Pass two dict inputs when one is expected."""
pattern = r"expected 1 input\(s\) \['ds1'\], but got the "
pattern += r"following 2 input\(s\) instead: \['ds1', 'ds2'\]\."
with pytest.raises(ValueError, match=pattern):
node(one_in_one_out, dict(arg="ds1"), "A").run(
dict(ds1=mocked_dataset, ds2=2)
)
class TestNodeRunInvalidOutput:
def test_miss_matching_output_types(self, mocked_dataset):
pattern = r"The node output is a dictionary, whereas the function "
pattern += r"output is not\."
with pytest.raises(ValueError, match=pattern):
node(one_in_one_out, "ds1", dict(a="ds")).run(dict(ds1=mocked_dataset))
def test_miss_matching_output_keys(self, mocked_dataset):
pattern = r"The node's output keys {'ret'} do not match "
pattern += r"with the returned output's keys"
with pytest.raises(ValueError, match=pattern):
node(one_in_dict_out, "ds1", dict(ret="B", ans="C")).run(
dict(ds1=mocked_dataset)
)
def test_node_not_list_output(self, mocked_dataset):
pattern = r"The node definition contains a list of outputs "
pattern += r"\['B', 'C'\], whereas the node function returned "
pattern += r"a `LambdaDataSet`"
with pytest.raises(ValueError, match=pattern):
node(one_in_one_out, "ds1", ["B", "C"]).run(dict(ds1=mocked_dataset))
def test_node_wrong_num_of_outputs(self, mocker, mocked_dataset):
def one_in_two_out(arg):
load = mocker.Mock(return_value=42)
save = mocker.Mock()
return [LambdaDataSet(load, save), LambdaDataSet(load, save)]
pattern = r"The node function returned 2 output\(s\), whereas "
pattern += r"the node definition contains 3 output\(s\)\."
with pytest.raises(ValueError, match=pattern):
node(one_in_two_out, "ds1", ["A", "B", "C"]).run(dict(ds1=mocked_dataset))
| 42.888235 | 86 | 0.665478 |
import pytest
from kedro.io import LambdaDataSet
from kedro.pipeline import node
@pytest.fixture
def mocked_dataset(mocker):
load = mocker.Mock(return_value=42)
save = mocker.Mock()
return LambdaDataSet(load, save)
def one_in_one_out(arg):
return arg
def one_in_dict_out(arg):
return dict(ret=arg)
def two_in_first_out(arg1, arg2):
return arg1
@pytest.fixture
def valid_nodes_with_inputs():
return [
(node(one_in_one_out, "ds1", "dsOut"), dict(ds1=42)),
(node(one_in_dict_out, dict(arg="ds1"), dict(ret="dsOut")), dict(ds1=42)),
(node(two_in_first_out, ["ds1", "ds2"], "dsOut"), dict(ds1=42, ds2=58)),
]
def test_valid_nodes(valid_nodes_with_inputs):
for node_, input_ in valid_nodes_with_inputs:
output = node_.run(input_)
assert output["dsOut"] == 42
def test_run_got_dataframe(mocked_dataset):
pattern = r"Node.run\(\) expects a dictionary or None, "
pattern += r"but got <class \'kedro.io.lambda_data_set.LambdaDataSet\'> instead"
with pytest.raises(ValueError, match=pattern):
node(one_in_one_out, dict(arg="ds1"), "A").run(mocked_dataset)
class TestNodeRunInvalidInput:
def test_unresolved(self):
with pytest.raises(ValueError, match=r"expected one input"):
node(one_in_one_out, "unresolved", "ds1").run(None)
def test_no_inputs_node_error(self, mocked_dataset):
with pytest.raises(ValueError, match=r"expected no inputs"):
node(lambda: 1, None, "A").run(dict(unexpected=mocked_dataset))
def test_one_input_error(self, mocked_dataset):
pattern = r"expected one input named 'ds1', but got the "
pattern += r"following 1 input\(s\) instead: \['arg'\]"
with pytest.raises(ValueError, match=pattern):
node(one_in_dict_out, "ds1", dict(ret="B", ans="C")).run(
dict(arg=mocked_dataset)
)
def test_run_diff_size_lists(self, mocked_dataset):
pattern = r"expected 2 input\(s\) \['ds1', 'ds2'\], but "
pattern += r"got the following 1 input\(s\) instead."
with pytest.raises(ValueError, match=pattern):
node(two_in_first_out, ["ds1", "ds2"], "A").run(dict(ds1=mocked_dataset))
def test_run_diff_size_list_dict(self, mocked_dataset):
pattern = r"expected 1 input\(s\) \['ds1'\], but got the "
pattern += r"following 2 input\(s\) instead: \['ds1', 'ds2'\]\."
with pytest.raises(ValueError, match=pattern):
node(one_in_one_out, ["ds1"], "A").run(dict(ds1=mocked_dataset, ds2=2))
def test_run_list_dict_unavailable(self, mocked_dataset):
pattern = r"expected 1 input\(s\) \['ds1'\], but got the "
pattern += r"following 1 input\(s\) instead: \['ds2'\]\."
with pytest.raises(ValueError, match=pattern):
node(one_in_one_out, ["ds1"], "A").run(dict(ds2=mocked_dataset))
def test_run_dict_unavailable(self, mocked_dataset):
pattern = r"expected 1 input\(s\) \['ds1'\], but got the "
pattern += r"following 1 input\(s\) instead: \['ds2'\]\."
with pytest.raises(ValueError, match=pattern):
node(one_in_one_out, dict(arg="ds1"), "A").run(dict(ds2=mocked_dataset))
def test_run_dict_diff_size(self, mocked_dataset):
pattern = r"expected 1 input\(s\) \['ds1'\], but got the "
pattern += r"following 2 input\(s\) instead: \['ds1', 'ds2'\]\."
with pytest.raises(ValueError, match=pattern):
node(one_in_one_out, dict(arg="ds1"), "A").run(
dict(ds1=mocked_dataset, ds2=2)
)
class TestNodeRunInvalidOutput:
def test_miss_matching_output_types(self, mocked_dataset):
pattern = r"The node output is a dictionary, whereas the function "
pattern += r"output is not\."
with pytest.raises(ValueError, match=pattern):
node(one_in_one_out, "ds1", dict(a="ds")).run(dict(ds1=mocked_dataset))
def test_miss_matching_output_keys(self, mocked_dataset):
pattern = r"The node's output keys {'ret'} do not match "
pattern += r"with the returned output's keys"
with pytest.raises(ValueError, match=pattern):
node(one_in_dict_out, "ds1", dict(ret="B", ans="C")).run(
dict(ds1=mocked_dataset)
)
def test_node_not_list_output(self, mocked_dataset):
pattern = r"The node definition contains a list of outputs "
pattern += r"\['B', 'C'\], whereas the node function returned "
pattern += r"a `LambdaDataSet`"
with pytest.raises(ValueError, match=pattern):
node(one_in_one_out, "ds1", ["B", "C"]).run(dict(ds1=mocked_dataset))
def test_node_wrong_num_of_outputs(self, mocker, mocked_dataset):
def one_in_two_out(arg):
load = mocker.Mock(return_value=42)
save = mocker.Mock()
return [LambdaDataSet(load, save), LambdaDataSet(load, save)]
pattern = r"The node function returned 2 output\(s\), whereas "
pattern += r"the node definition contains 3 output\(s\)\."
with pytest.raises(ValueError, match=pattern):
node(one_in_two_out, "ds1", ["A", "B", "C"]).run(dict(ds1=mocked_dataset))
| true | true |
f731b5fbc7cc5db0a0a57a78bcbce234f1e07f3c | 2,103 | py | Python | beta_rec/recommenders/userKNN.py | mengzaiqiao/TVBR | cdac86a753c41f8f3c55a025be8d88dd305325f5 | [
"MIT"
] | 126 | 2020-03-19T02:30:23.000Z | 2022-03-15T11:10:46.000Z | beta_rec/recommenders/userKNN.py | mengzaiqiao/TVBR | cdac86a753c41f8f3c55a025be8d88dd305325f5 | [
"MIT"
] | 174 | 2020-03-15T17:28:10.000Z | 2022-03-15T22:38:51.000Z | beta_rec/recommenders/userKNN.py | mengzaiqiao/TVBR | cdac86a753c41f8f3c55a025be8d88dd305325f5 | [
"MIT"
] | 38 | 2020-03-19T00:38:47.000Z | 2022-02-24T11:03:12.000Z | import os
import time
from munch import munchify
from ray import tune
from ..core.recommender import Recommender
from ..models.userKNN import UserKNNEngine
from ..utils.monitor import Monitor
def tune_train(config):
"""Train the model with a hyper-parameter tuner (ray).
Args:
config (dict): All the parameters for the model.
"""
data = config["data"]
train_engine = UserKNN(munchify(config))
result = train_engine.train(data)
while train_engine.eval_engine.n_worker > 0:
time.sleep(20)
tune.report(
valid_metric=result["valid_metric"],
model_save_dir=result["model_save_dir"],
)
class UserKNN(Recommender):
"""The User-based K Nearest Neighbour Model."""
def __init__(self, config):
"""Initialize the config of this recommender.
Args:
config:
"""
super(UserKNN, self).__init__(config, name="UserKNN")
def init_engine(self, data):
"""Initialize the required parameters for the model.
Args:
data: the Dataset object.
"""
self.config["model"]["n_users"] = data.n_users
self.config["model"]["n_items"] = data.n_items
self.engine = UserKNNEngine(self.config)
def train(self, data):
"""Training the model.
Args:
data: the Dataset object.
Returns:
dict: {}
"""
self.gpu_id, self.config["device_str"] = self.get_device() # Train the model.
self.config["model"]["n_users"] = data.n_users
self.config["model"]["n_items"] = data.n_items
self.monitor = Monitor(
log_dir=self.config["system"]["run_dir"], delay=1, gpu_id=self.gpu_id
)
self.init_engine(data)
print(type(data.train))
print(data.train.head())
self.engine.model.prepare_model(data)
self.model_save_dir = os.path.join(
self.config["system"]["model_save_dir"], self.config["model"]["save_name"]
)
self.config["run_time"] = self.monitor.stop()
return "data loaded"
| 27.311688 | 86 | 0.611032 | import os
import time
from munch import munchify
from ray import tune
from ..core.recommender import Recommender
from ..models.userKNN import UserKNNEngine
from ..utils.monitor import Monitor
def tune_train(config):
data = config["data"]
train_engine = UserKNN(munchify(config))
result = train_engine.train(data)
while train_engine.eval_engine.n_worker > 0:
time.sleep(20)
tune.report(
valid_metric=result["valid_metric"],
model_save_dir=result["model_save_dir"],
)
class UserKNN(Recommender):
def __init__(self, config):
super(UserKNN, self).__init__(config, name="UserKNN")
def init_engine(self, data):
self.config["model"]["n_users"] = data.n_users
self.config["model"]["n_items"] = data.n_items
self.engine = UserKNNEngine(self.config)
def train(self, data):
self.gpu_id, self.config["device_str"] = self.get_device()
self.config["model"]["n_users"] = data.n_users
self.config["model"]["n_items"] = data.n_items
self.monitor = Monitor(
log_dir=self.config["system"]["run_dir"], delay=1, gpu_id=self.gpu_id
)
self.init_engine(data)
print(type(data.train))
print(data.train.head())
self.engine.model.prepare_model(data)
self.model_save_dir = os.path.join(
self.config["system"]["model_save_dir"], self.config["model"]["save_name"]
)
self.config["run_time"] = self.monitor.stop()
return "data loaded"
| true | true |
f731b60bb39975a9d09169527228d3f67c163104 | 2,647 | py | Python | sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py | yrbahn/feast | b5648f0cb6109594fcc8881237ddd583960d244f | [
"Apache-2.0"
] | null | null | null | sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py | yrbahn/feast | b5648f0cb6109594fcc8881237ddd583960d244f | [
"Apache-2.0"
] | null | null | null | sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py | yrbahn/feast | b5648f0cb6109594fcc8881237ddd583960d244f | [
"Apache-2.0"
] | null | null | null | from typing import Dict
import pandas as pd
from feast import RedshiftSource
from feast.data_source import DataSource
from feast.infra.offline_stores.redshift import RedshiftOfflineStoreConfig
from feast.infra.utils import aws_utils
from feast.repo_config import FeastConfigBaseModel
from tests.integration.feature_repos.universal.data_source_creator import (
DataSourceCreator,
)
class RedshiftDataSourceCreator(DataSourceCreator):
tables = []
def __init__(self, project_name: str):
super().__init__()
self.project_name = project_name
self.client = aws_utils.get_redshift_data_client("us-west-2")
self.s3 = aws_utils.get_s3_resource("us-west-2")
self.offline_store_config = RedshiftOfflineStoreConfig(
cluster_id="feast-integration-tests",
region="us-west-2",
user="admin",
database="feast",
s3_staging_location="s3://feast-integration-tests/redshift/tests/ingestion",
iam_role="arn:aws:iam::402087665549:role/redshift_s3_access_role",
)
def create_data_sources(
self,
destination: str,
df: pd.DataFrame,
event_timestamp_column="ts",
created_timestamp_column="created_ts",
field_mapping: Dict[str, str] = None,
) -> DataSource:
aws_utils.upload_df_to_redshift(
self.client,
self.offline_store_config.cluster_id,
self.offline_store_config.database,
self.offline_store_config.user,
self.s3,
f"{self.offline_store_config.s3_staging_location}/copy/{destination}.parquet",
self.offline_store_config.iam_role,
destination,
df,
)
self.tables.append(destination)
return RedshiftSource(
table=destination,
event_timestamp_column=event_timestamp_column,
created_timestamp_column=created_timestamp_column,
date_partition_column="",
field_mapping=field_mapping or {"ts_1": "ts"},
)
def create_offline_store_config(self) -> FeastConfigBaseModel:
return self.offline_store_config
def get_prefixed_table_name(self, name: str, suffix: str) -> str:
return f"{name}_{suffix}"
def teardown(self):
for table in self.tables:
aws_utils.execute_redshift_statement(
self.client,
self.offline_store_config.cluster_id,
self.offline_store_config.database,
self.offline_store_config.user,
f"DROP TABLE IF EXISTS {table}",
)
| 33.0875 | 90 | 0.656215 | from typing import Dict
import pandas as pd
from feast import RedshiftSource
from feast.data_source import DataSource
from feast.infra.offline_stores.redshift import RedshiftOfflineStoreConfig
from feast.infra.utils import aws_utils
from feast.repo_config import FeastConfigBaseModel
from tests.integration.feature_repos.universal.data_source_creator import (
DataSourceCreator,
)
class RedshiftDataSourceCreator(DataSourceCreator):
tables = []
def __init__(self, project_name: str):
super().__init__()
self.project_name = project_name
self.client = aws_utils.get_redshift_data_client("us-west-2")
self.s3 = aws_utils.get_s3_resource("us-west-2")
self.offline_store_config = RedshiftOfflineStoreConfig(
cluster_id="feast-integration-tests",
region="us-west-2",
user="admin",
database="feast",
s3_staging_location="s3://feast-integration-tests/redshift/tests/ingestion",
iam_role="arn:aws:iam::402087665549:role/redshift_s3_access_role",
)
def create_data_sources(
self,
destination: str,
df: pd.DataFrame,
event_timestamp_column="ts",
created_timestamp_column="created_ts",
field_mapping: Dict[str, str] = None,
) -> DataSource:
aws_utils.upload_df_to_redshift(
self.client,
self.offline_store_config.cluster_id,
self.offline_store_config.database,
self.offline_store_config.user,
self.s3,
f"{self.offline_store_config.s3_staging_location}/copy/{destination}.parquet",
self.offline_store_config.iam_role,
destination,
df,
)
self.tables.append(destination)
return RedshiftSource(
table=destination,
event_timestamp_column=event_timestamp_column,
created_timestamp_column=created_timestamp_column,
date_partition_column="",
field_mapping=field_mapping or {"ts_1": "ts"},
)
def create_offline_store_config(self) -> FeastConfigBaseModel:
return self.offline_store_config
def get_prefixed_table_name(self, name: str, suffix: str) -> str:
return f"{name}_{suffix}"
def teardown(self):
for table in self.tables:
aws_utils.execute_redshift_statement(
self.client,
self.offline_store_config.cluster_id,
self.offline_store_config.database,
self.offline_store_config.user,
f"DROP TABLE IF EXISTS {table}",
)
| true | true |
f731b61e890831d1cd4fbf37958278bb584384cb | 2,988 | py | Python | setup.py | NeilBotelho/pip | d01bfcfaa13a4f06fa0ce61fa18cf06012f2e78f | [
"MIT"
] | null | null | null | setup.py | NeilBotelho/pip | d01bfcfaa13a4f06fa0ce61fa18cf06012f2e78f | [
"MIT"
] | 1 | 2021-10-04T12:25:25.000Z | 2021-10-05T07:30:54.000Z | setup.py | NeilBotelho/pip | d01bfcfaa13a4f06fa0ce61fa18cf06012f2e78f | [
"MIT"
] | 1 | 2020-06-01T19:13:16.000Z | 2020-06-01T19:13:16.000Z | # The following comment should be removed at some point in the future.
# mypy: disallow-untyped-defs=False
import codecs
import os
import sys
from setuptools import find_packages, setup
def read(rel_path):
here = os.path.abspath(os.path.dirname(__file__))
# intentionally *not* adding an encoding option to open, See:
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
with codecs.open(os.path.join(here, rel_path), 'r') as fp:
return fp.read()
def get_version(rel_path):
for line in read(rel_path).splitlines():
if line.startswith('__version__'):
# __version__ = "0.9"
delim = '"' if '"' in line else "'"
return line.split(delim)[1]
else:
raise RuntimeError("Unable to find version string.")
long_description = read('README.rst')
setup(
name="pip",
version=get_version("src/pip/__init__.py"),
description="The PyPA recommended tool for installing Python packages.",
long_description=long_description,
license='MIT',
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Software Development :: Build Tools",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
],
url='https://pip.pypa.io/',
keywords='distutils easy_install egg setuptools wheel virtualenv',
project_urls={
"Documentation": "https://pip.pypa.io",
"Source": "https://github.com/pypa/pip",
"Changelog": "https://pip.pypa.io/en/stable/news/",
},
author='The pip developers',
author_email='distutils-sig@python.org',
package_dir={"": "src"},
packages=find_packages(
where="src",
exclude=["contrib", "docs", "tests*", "tasks"],
),
package_data={
"pip._vendor": ["vendor.txt"],
"pip._vendor.certifi": ["*.pem"],
"pip._vendor.requests": ["*.pem"],
"pip._vendor.distlib._backport": ["sysconfig.cfg"],
"pip._vendor.distlib": ["t32.exe", "t64.exe", "w32.exe", "w64.exe"],
},
entry_points={
"console_scripts": [
"pip=pip._internal.cli.main:main",
"pip{}=pip._internal.cli.main:main".format(sys.version_info[0]),
"pip{}.{}=pip._internal.cli.main:main".format(
*sys.version_info[:2]
),
],
},
zip_safe=False,
python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*',
)
| 33.2 | 76 | 0.598728 |
import codecs
import os
import sys
from setuptools import find_packages, setup
def read(rel_path):
here = os.path.abspath(os.path.dirname(__file__))
(os.path.join(here, rel_path), 'r') as fp:
return fp.read()
def get_version(rel_path):
for line in read(rel_path).splitlines():
if line.startswith('__version__'):
delim = '"' if '"' in line else "'"
return line.split(delim)[1]
else:
raise RuntimeError("Unable to find version string.")
long_description = read('README.rst')
setup(
name="pip",
version=get_version("src/pip/__init__.py"),
description="The PyPA recommended tool for installing Python packages.",
long_description=long_description,
license='MIT',
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Software Development :: Build Tools",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
],
url='https://pip.pypa.io/',
keywords='distutils easy_install egg setuptools wheel virtualenv',
project_urls={
"Documentation": "https://pip.pypa.io",
"Source": "https://github.com/pypa/pip",
"Changelog": "https://pip.pypa.io/en/stable/news/",
},
author='The pip developers',
author_email='distutils-sig@python.org',
package_dir={"": "src"},
packages=find_packages(
where="src",
exclude=["contrib", "docs", "tests*", "tasks"],
),
package_data={
"pip._vendor": ["vendor.txt"],
"pip._vendor.certifi": ["*.pem"],
"pip._vendor.requests": ["*.pem"],
"pip._vendor.distlib._backport": ["sysconfig.cfg"],
"pip._vendor.distlib": ["t32.exe", "t64.exe", "w32.exe", "w64.exe"],
},
entry_points={
"console_scripts": [
"pip=pip._internal.cli.main:main",
"pip{}=pip._internal.cli.main:main".format(sys.version_info[0]),
"pip{}.{}=pip._internal.cli.main:main".format(
*sys.version_info[:2]
),
],
},
zip_safe=False,
python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*',
)
| true | true |
f731b631d25327869866e36afed8e0d2ed3b20d4 | 184 | py | Python | domain/message.py | wazatoki/IotLoggerClientSample | e69f0f5f4caa3714041f4d5e8a16cde03bbbd912 | [
"MIT"
] | null | null | null | domain/message.py | wazatoki/IotLoggerClientSample | e69f0f5f4caa3714041f4d5e8a16cde03bbbd912 | [
"MIT"
] | 2 | 2021-04-06T18:18:36.000Z | 2021-06-02T02:26:19.000Z | domain/message.py | wazatoki/IotLoggerClientSample | e69f0f5f4caa3714041f4d5e8a16cde03bbbd912 | [
"MIT"
] | null | null | null | class message_data:
device_id = ""
message = ""
def get_Data(self):
return {
"deviceID": self.device_id,
"message": self.message
} | 18.4 | 39 | 0.505435 | class message_data:
device_id = ""
message = ""
def get_Data(self):
return {
"deviceID": self.device_id,
"message": self.message
} | true | true |
f731b70415e7f66bf51834ee97e333db6e5dc6c6 | 52,857 | py | Python | Lib/test/test_array.py | chexca/cpython | cfc6ce4d40f2f01314b7e283fb972a7bb3ed3faa | [
"CNRI-Python-GPL-Compatible"
] | 1,318 | 2019-07-11T10:34:39.000Z | 2022-03-29T15:05:19.000Z | Lib/test/test_array.py | chexca/cpython | cfc6ce4d40f2f01314b7e283fb972a7bb3ed3faa | [
"CNRI-Python-GPL-Compatible"
] | 387 | 2020-12-15T14:54:04.000Z | 2022-03-31T07:00:21.000Z | Lib/test/test_array.py | chexca/cpython | cfc6ce4d40f2f01314b7e283fb972a7bb3ed3faa | [
"CNRI-Python-GPL-Compatible"
] | 66 | 2019-11-11T15:33:12.000Z | 2022-03-01T07:55:55.000Z | """Test the arraymodule.
Roger E. Masse
"""
import unittest
from test import support
from test.support import _2G
import weakref
import pickle
import operator
import struct
import sys
import array
from array import _array_reconstructor as array_reconstructor
sizeof_wchar = array.array('u').itemsize
class ArraySubclass(array.array):
pass
class ArraySubclassWithKwargs(array.array):
def __init__(self, typecode, newarg=None):
array.array.__init__(self)
typecodes = 'ubBhHiIlLfdqQ'
class MiscTest(unittest.TestCase):
def test_bad_constructor(self):
self.assertRaises(TypeError, array.array)
self.assertRaises(TypeError, array.array, spam=42)
self.assertRaises(TypeError, array.array, 'xx')
self.assertRaises(ValueError, array.array, 'x')
def test_empty(self):
# Exercise code for handling zero-length arrays
a = array.array('B')
a[:] = a
self.assertEqual(len(a), 0)
self.assertEqual(len(a + a), 0)
self.assertEqual(len(a * 3), 0)
a += a
self.assertEqual(len(a), 0)
# Machine format codes.
#
# Search for "enum machine_format_code" in Modules/arraymodule.c to get the
# authoritative values.
UNKNOWN_FORMAT = -1
UNSIGNED_INT8 = 0
SIGNED_INT8 = 1
UNSIGNED_INT16_LE = 2
UNSIGNED_INT16_BE = 3
SIGNED_INT16_LE = 4
SIGNED_INT16_BE = 5
UNSIGNED_INT32_LE = 6
UNSIGNED_INT32_BE = 7
SIGNED_INT32_LE = 8
SIGNED_INT32_BE = 9
UNSIGNED_INT64_LE = 10
UNSIGNED_INT64_BE = 11
SIGNED_INT64_LE = 12
SIGNED_INT64_BE = 13
IEEE_754_FLOAT_LE = 14
IEEE_754_FLOAT_BE = 15
IEEE_754_DOUBLE_LE = 16
IEEE_754_DOUBLE_BE = 17
UTF16_LE = 18
UTF16_BE = 19
UTF32_LE = 20
UTF32_BE = 21
class ArrayReconstructorTest(unittest.TestCase):
def test_error(self):
self.assertRaises(TypeError, array_reconstructor,
"", "b", 0, b"")
self.assertRaises(TypeError, array_reconstructor,
str, "b", 0, b"")
self.assertRaises(TypeError, array_reconstructor,
array.array, "b", '', b"")
self.assertRaises(TypeError, array_reconstructor,
array.array, "b", 0, "")
self.assertRaises(ValueError, array_reconstructor,
array.array, "?", 0, b"")
self.assertRaises(ValueError, array_reconstructor,
array.array, "b", UNKNOWN_FORMAT, b"")
self.assertRaises(ValueError, array_reconstructor,
array.array, "b", 22, b"")
self.assertRaises(ValueError, array_reconstructor,
array.array, "d", 16, b"a")
def test_numbers(self):
testcases = (
(['B', 'H', 'I', 'L'], UNSIGNED_INT8, '=BBBB',
[0x80, 0x7f, 0, 0xff]),
(['b', 'h', 'i', 'l'], SIGNED_INT8, '=bbb',
[-0x80, 0x7f, 0]),
(['H', 'I', 'L'], UNSIGNED_INT16_LE, '<HHHH',
[0x8000, 0x7fff, 0, 0xffff]),
(['H', 'I', 'L'], UNSIGNED_INT16_BE, '>HHHH',
[0x8000, 0x7fff, 0, 0xffff]),
(['h', 'i', 'l'], SIGNED_INT16_LE, '<hhh',
[-0x8000, 0x7fff, 0]),
(['h', 'i', 'l'], SIGNED_INT16_BE, '>hhh',
[-0x8000, 0x7fff, 0]),
(['I', 'L'], UNSIGNED_INT32_LE, '<IIII',
[1<<31, (1<<31)-1, 0, (1<<32)-1]),
(['I', 'L'], UNSIGNED_INT32_BE, '>IIII',
[1<<31, (1<<31)-1, 0, (1<<32)-1]),
(['i', 'l'], SIGNED_INT32_LE, '<iii',
[-1<<31, (1<<31)-1, 0]),
(['i', 'l'], SIGNED_INT32_BE, '>iii',
[-1<<31, (1<<31)-1, 0]),
(['L'], UNSIGNED_INT64_LE, '<QQQQ',
[1<<31, (1<<31)-1, 0, (1<<32)-1]),
(['L'], UNSIGNED_INT64_BE, '>QQQQ',
[1<<31, (1<<31)-1, 0, (1<<32)-1]),
(['l'], SIGNED_INT64_LE, '<qqq',
[-1<<31, (1<<31)-1, 0]),
(['l'], SIGNED_INT64_BE, '>qqq',
[-1<<31, (1<<31)-1, 0]),
# The following tests for INT64 will raise an OverflowError
# when run on a 32-bit machine. The tests are simply skipped
# in that case.
(['L'], UNSIGNED_INT64_LE, '<QQQQ',
[1<<63, (1<<63)-1, 0, (1<<64)-1]),
(['L'], UNSIGNED_INT64_BE, '>QQQQ',
[1<<63, (1<<63)-1, 0, (1<<64)-1]),
(['l'], SIGNED_INT64_LE, '<qqq',
[-1<<63, (1<<63)-1, 0]),
(['l'], SIGNED_INT64_BE, '>qqq',
[-1<<63, (1<<63)-1, 0]),
(['f'], IEEE_754_FLOAT_LE, '<ffff',
[16711938.0, float('inf'), float('-inf'), -0.0]),
(['f'], IEEE_754_FLOAT_BE, '>ffff',
[16711938.0, float('inf'), float('-inf'), -0.0]),
(['d'], IEEE_754_DOUBLE_LE, '<dddd',
[9006104071832581.0, float('inf'), float('-inf'), -0.0]),
(['d'], IEEE_754_DOUBLE_BE, '>dddd',
[9006104071832581.0, float('inf'), float('-inf'), -0.0])
)
for testcase in testcases:
valid_typecodes, mformat_code, struct_fmt, values = testcase
arraystr = struct.pack(struct_fmt, *values)
for typecode in valid_typecodes:
try:
a = array.array(typecode, values)
except OverflowError:
continue # Skip this test case.
b = array_reconstructor(
array.array, typecode, mformat_code, arraystr)
self.assertEqual(a, b,
msg="{0!r} != {1!r}; testcase={2!r}".format(a, b, testcase))
def test_unicode(self):
teststr = "Bonne Journ\xe9e \U0002030a\U00020347"
testcases = (
(UTF16_LE, "UTF-16-LE"),
(UTF16_BE, "UTF-16-BE"),
(UTF32_LE, "UTF-32-LE"),
(UTF32_BE, "UTF-32-BE")
)
for testcase in testcases:
mformat_code, encoding = testcase
a = array.array('u', teststr)
b = array_reconstructor(
array.array, 'u', mformat_code, teststr.encode(encoding))
self.assertEqual(a, b,
msg="{0!r} != {1!r}; testcase={2!r}".format(a, b, testcase))
class BaseTest:
# Required class attributes (provided by subclasses
# typecode: the typecode to test
# example: an initializer usable in the constructor for this type
# smallerexample: the same length as example, but smaller
# biggerexample: the same length as example, but bigger
# outside: An entry that is not in example
# minitemsize: the minimum guaranteed itemsize
def assertEntryEqual(self, entry1, entry2):
self.assertEqual(entry1, entry2)
def badtypecode(self):
# Return a typecode that is different from our own
return typecodes[(typecodes.index(self.typecode)+1) % len(typecodes)]
def test_constructor(self):
a = array.array(self.typecode)
self.assertEqual(a.typecode, self.typecode)
self.assertGreaterEqual(a.itemsize, self.minitemsize)
self.assertRaises(TypeError, array.array, self.typecode, None)
def test_len(self):
a = array.array(self.typecode)
a.append(self.example[0])
self.assertEqual(len(a), 1)
a = array.array(self.typecode, self.example)
self.assertEqual(len(a), len(self.example))
def test_buffer_info(self):
a = array.array(self.typecode, self.example)
self.assertRaises(TypeError, a.buffer_info, 42)
bi = a.buffer_info()
self.assertIsInstance(bi, tuple)
self.assertEqual(len(bi), 2)
self.assertIsInstance(bi[0], int)
self.assertIsInstance(bi[1], int)
self.assertEqual(bi[1], len(a))
def test_byteswap(self):
if self.typecode == 'u':
example = '\U00100100'
else:
example = self.example
a = array.array(self.typecode, example)
self.assertRaises(TypeError, a.byteswap, 42)
if a.itemsize in (1, 2, 4, 8):
b = array.array(self.typecode, example)
b.byteswap()
if a.itemsize==1:
self.assertEqual(a, b)
else:
self.assertNotEqual(a, b)
b.byteswap()
self.assertEqual(a, b)
def test_copy(self):
import copy
a = array.array(self.typecode, self.example)
b = copy.copy(a)
self.assertNotEqual(id(a), id(b))
self.assertEqual(a, b)
def test_deepcopy(self):
import copy
a = array.array(self.typecode, self.example)
b = copy.deepcopy(a)
self.assertNotEqual(id(a), id(b))
self.assertEqual(a, b)
def test_reduce_ex(self):
a = array.array(self.typecode, self.example)
for protocol in range(3):
self.assertIs(a.__reduce_ex__(protocol)[0], array.array)
for protocol in range(3, pickle.HIGHEST_PROTOCOL + 1):
self.assertIs(a.__reduce_ex__(protocol)[0], array_reconstructor)
def test_pickle(self):
for protocol in range(pickle.HIGHEST_PROTOCOL + 1):
a = array.array(self.typecode, self.example)
b = pickle.loads(pickle.dumps(a, protocol))
self.assertNotEqual(id(a), id(b))
self.assertEqual(a, b)
a = ArraySubclass(self.typecode, self.example)
a.x = 10
b = pickle.loads(pickle.dumps(a, protocol))
self.assertNotEqual(id(a), id(b))
self.assertEqual(a, b)
self.assertEqual(a.x, b.x)
self.assertEqual(type(a), type(b))
def test_pickle_for_empty_array(self):
for protocol in range(pickle.HIGHEST_PROTOCOL + 1):
a = array.array(self.typecode)
b = pickle.loads(pickle.dumps(a, protocol))
self.assertNotEqual(id(a), id(b))
self.assertEqual(a, b)
a = ArraySubclass(self.typecode)
a.x = 10
b = pickle.loads(pickle.dumps(a, protocol))
self.assertNotEqual(id(a), id(b))
self.assertEqual(a, b)
self.assertEqual(a.x, b.x)
self.assertEqual(type(a), type(b))
def test_iterator_pickle(self):
orig = array.array(self.typecode, self.example)
data = list(orig)
data2 = data[::-1]
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
# initial iterator
itorig = iter(orig)
d = pickle.dumps((itorig, orig), proto)
it, a = pickle.loads(d)
a.fromlist(data2)
self.assertEqual(type(it), type(itorig))
self.assertEqual(list(it), data + data2)
# running iterator
next(itorig)
d = pickle.dumps((itorig, orig), proto)
it, a = pickle.loads(d)
a.fromlist(data2)
self.assertEqual(type(it), type(itorig))
self.assertEqual(list(it), data[1:] + data2)
# empty iterator
for i in range(1, len(data)):
next(itorig)
d = pickle.dumps((itorig, orig), proto)
it, a = pickle.loads(d)
a.fromlist(data2)
self.assertEqual(type(it), type(itorig))
self.assertEqual(list(it), data2)
# exhausted iterator
self.assertRaises(StopIteration, next, itorig)
d = pickle.dumps((itorig, orig), proto)
it, a = pickle.loads(d)
a.fromlist(data2)
self.assertEqual(list(it), [])
def test_exhausted_iterator(self):
a = array.array(self.typecode, self.example)
self.assertEqual(list(a), list(self.example))
exhit = iter(a)
empit = iter(a)
for x in exhit: # exhaust the iterator
next(empit) # not exhausted
a.append(self.outside)
self.assertEqual(list(exhit), [])
self.assertEqual(list(empit), [self.outside])
self.assertEqual(list(a), list(self.example) + [self.outside])
def test_insert(self):
a = array.array(self.typecode, self.example)
a.insert(0, self.example[0])
self.assertEqual(len(a), 1+len(self.example))
self.assertEqual(a[0], a[1])
self.assertRaises(TypeError, a.insert)
self.assertRaises(TypeError, a.insert, None)
self.assertRaises(TypeError, a.insert, 0, None)
a = array.array(self.typecode, self.example)
a.insert(-1, self.example[0])
self.assertEqual(
a,
array.array(
self.typecode,
self.example[:-1] + self.example[:1] + self.example[-1:]
)
)
a = array.array(self.typecode, self.example)
a.insert(-1000, self.example[0])
self.assertEqual(
a,
array.array(self.typecode, self.example[:1] + self.example)
)
a = array.array(self.typecode, self.example)
a.insert(1000, self.example[0])
self.assertEqual(
a,
array.array(self.typecode, self.example + self.example[:1])
)
def test_tofromfile(self):
a = array.array(self.typecode, 2*self.example)
self.assertRaises(TypeError, a.tofile)
support.unlink(support.TESTFN)
f = open(support.TESTFN, 'wb')
try:
a.tofile(f)
f.close()
b = array.array(self.typecode)
f = open(support.TESTFN, 'rb')
self.assertRaises(TypeError, b.fromfile)
b.fromfile(f, len(self.example))
self.assertEqual(b, array.array(self.typecode, self.example))
self.assertNotEqual(a, b)
self.assertRaises(EOFError, b.fromfile, f, len(self.example)+1)
self.assertEqual(a, b)
f.close()
finally:
if not f.closed:
f.close()
support.unlink(support.TESTFN)
def test_fromfile_ioerror(self):
# Issue #5395: Check if fromfile raises a proper OSError
# instead of EOFError.
a = array.array(self.typecode)
f = open(support.TESTFN, 'wb')
try:
self.assertRaises(OSError, a.fromfile, f, len(self.example))
finally:
f.close()
support.unlink(support.TESTFN)
def test_filewrite(self):
a = array.array(self.typecode, 2*self.example)
f = open(support.TESTFN, 'wb')
try:
f.write(a)
f.close()
b = array.array(self.typecode)
f = open(support.TESTFN, 'rb')
b.fromfile(f, len(self.example))
self.assertEqual(b, array.array(self.typecode, self.example))
self.assertNotEqual(a, b)
b.fromfile(f, len(self.example))
self.assertEqual(a, b)
f.close()
finally:
if not f.closed:
f.close()
support.unlink(support.TESTFN)
def test_tofromlist(self):
a = array.array(self.typecode, 2*self.example)
b = array.array(self.typecode)
self.assertRaises(TypeError, a.tolist, 42)
self.assertRaises(TypeError, b.fromlist)
self.assertRaises(TypeError, b.fromlist, 42)
self.assertRaises(TypeError, b.fromlist, [None])
b.fromlist(a.tolist())
self.assertEqual(a, b)
def test_tofrombytes(self):
a = array.array(self.typecode, 2*self.example)
b = array.array(self.typecode)
self.assertRaises(TypeError, a.tobytes, 42)
self.assertRaises(TypeError, b.frombytes)
self.assertRaises(TypeError, b.frombytes, 42)
b.frombytes(a.tobytes())
c = array.array(self.typecode, bytearray(a.tobytes()))
self.assertEqual(a, b)
self.assertEqual(a, c)
if a.itemsize>1:
self.assertRaises(ValueError, b.frombytes, b"x")
def test_fromarray(self):
a = array.array(self.typecode, self.example)
b = array.array(self.typecode, a)
self.assertEqual(a, b)
def test_repr(self):
a = array.array(self.typecode, 2*self.example)
self.assertEqual(a, eval(repr(a), {"array": array.array}))
a = array.array(self.typecode)
self.assertEqual(repr(a), "array('%s')" % self.typecode)
def test_str(self):
a = array.array(self.typecode, 2*self.example)
str(a)
def test_cmp(self):
a = array.array(self.typecode, self.example)
self.assertIs(a == 42, False)
self.assertIs(a != 42, True)
self.assertIs(a == a, True)
self.assertIs(a != a, False)
self.assertIs(a < a, False)
self.assertIs(a <= a, True)
self.assertIs(a > a, False)
self.assertIs(a >= a, True)
al = array.array(self.typecode, self.smallerexample)
ab = array.array(self.typecode, self.biggerexample)
self.assertIs(a == 2*a, False)
self.assertIs(a != 2*a, True)
self.assertIs(a < 2*a, True)
self.assertIs(a <= 2*a, True)
self.assertIs(a > 2*a, False)
self.assertIs(a >= 2*a, False)
self.assertIs(a == al, False)
self.assertIs(a != al, True)
self.assertIs(a < al, False)
self.assertIs(a <= al, False)
self.assertIs(a > al, True)
self.assertIs(a >= al, True)
self.assertIs(a == ab, False)
self.assertIs(a != ab, True)
self.assertIs(a < ab, True)
self.assertIs(a <= ab, True)
self.assertIs(a > ab, False)
self.assertIs(a >= ab, False)
def test_add(self):
a = array.array(self.typecode, self.example) \
+ array.array(self.typecode, self.example[::-1])
self.assertEqual(
a,
array.array(self.typecode, self.example + self.example[::-1])
)
b = array.array(self.badtypecode())
self.assertRaises(TypeError, a.__add__, b)
self.assertRaises(TypeError, a.__add__, "bad")
def test_iadd(self):
a = array.array(self.typecode, self.example[::-1])
b = a
a += array.array(self.typecode, 2*self.example)
self.assertIs(a, b)
self.assertEqual(
a,
array.array(self.typecode, self.example[::-1]+2*self.example)
)
a = array.array(self.typecode, self.example)
a += a
self.assertEqual(
a,
array.array(self.typecode, self.example + self.example)
)
b = array.array(self.badtypecode())
self.assertRaises(TypeError, a.__add__, b)
self.assertRaises(TypeError, a.__iadd__, "bad")
def test_mul(self):
a = 5*array.array(self.typecode, self.example)
self.assertEqual(
a,
array.array(self.typecode, 5*self.example)
)
a = array.array(self.typecode, self.example)*5
self.assertEqual(
a,
array.array(self.typecode, self.example*5)
)
a = 0*array.array(self.typecode, self.example)
self.assertEqual(
a,
array.array(self.typecode)
)
a = (-1)*array.array(self.typecode, self.example)
self.assertEqual(
a,
array.array(self.typecode)
)
a = 5 * array.array(self.typecode, self.example[:1])
self.assertEqual(
a,
array.array(self.typecode, [a[0]] * 5)
)
self.assertRaises(TypeError, a.__mul__, "bad")
def test_imul(self):
a = array.array(self.typecode, self.example)
b = a
a *= 5
self.assertIs(a, b)
self.assertEqual(
a,
array.array(self.typecode, 5*self.example)
)
a *= 0
self.assertIs(a, b)
self.assertEqual(a, array.array(self.typecode))
a *= 1000
self.assertIs(a, b)
self.assertEqual(a, array.array(self.typecode))
a *= -1
self.assertIs(a, b)
self.assertEqual(a, array.array(self.typecode))
a = array.array(self.typecode, self.example)
a *= -1
self.assertEqual(a, array.array(self.typecode))
self.assertRaises(TypeError, a.__imul__, "bad")
def test_getitem(self):
a = array.array(self.typecode, self.example)
self.assertEntryEqual(a[0], self.example[0])
self.assertEntryEqual(a[0], self.example[0])
self.assertEntryEqual(a[-1], self.example[-1])
self.assertEntryEqual(a[-1], self.example[-1])
self.assertEntryEqual(a[len(self.example)-1], self.example[-1])
self.assertEntryEqual(a[-len(self.example)], self.example[0])
self.assertRaises(TypeError, a.__getitem__)
self.assertRaises(IndexError, a.__getitem__, len(self.example))
self.assertRaises(IndexError, a.__getitem__, -len(self.example)-1)
def test_setitem(self):
a = array.array(self.typecode, self.example)
a[0] = a[-1]
self.assertEntryEqual(a[0], a[-1])
a = array.array(self.typecode, self.example)
a[0] = a[-1]
self.assertEntryEqual(a[0], a[-1])
a = array.array(self.typecode, self.example)
a[-1] = a[0]
self.assertEntryEqual(a[0], a[-1])
a = array.array(self.typecode, self.example)
a[-1] = a[0]
self.assertEntryEqual(a[0], a[-1])
a = array.array(self.typecode, self.example)
a[len(self.example)-1] = a[0]
self.assertEntryEqual(a[0], a[-1])
a = array.array(self.typecode, self.example)
a[-len(self.example)] = a[-1]
self.assertEntryEqual(a[0], a[-1])
self.assertRaises(TypeError, a.__setitem__)
self.assertRaises(TypeError, a.__setitem__, None)
self.assertRaises(TypeError, a.__setitem__, 0, None)
self.assertRaises(
IndexError,
a.__setitem__,
len(self.example), self.example[0]
)
self.assertRaises(
IndexError,
a.__setitem__,
-len(self.example)-1, self.example[0]
)
def test_delitem(self):
a = array.array(self.typecode, self.example)
del a[0]
self.assertEqual(
a,
array.array(self.typecode, self.example[1:])
)
a = array.array(self.typecode, self.example)
del a[-1]
self.assertEqual(
a,
array.array(self.typecode, self.example[:-1])
)
a = array.array(self.typecode, self.example)
del a[len(self.example)-1]
self.assertEqual(
a,
array.array(self.typecode, self.example[:-1])
)
a = array.array(self.typecode, self.example)
del a[-len(self.example)]
self.assertEqual(
a,
array.array(self.typecode, self.example[1:])
)
self.assertRaises(TypeError, a.__delitem__)
self.assertRaises(TypeError, a.__delitem__, None)
self.assertRaises(IndexError, a.__delitem__, len(self.example))
self.assertRaises(IndexError, a.__delitem__, -len(self.example)-1)
def test_getslice(self):
a = array.array(self.typecode, self.example)
self.assertEqual(a[:], a)
self.assertEqual(
a[1:],
array.array(self.typecode, self.example[1:])
)
self.assertEqual(
a[:1],
array.array(self.typecode, self.example[:1])
)
self.assertEqual(
a[:-1],
array.array(self.typecode, self.example[:-1])
)
self.assertEqual(
a[-1:],
array.array(self.typecode, self.example[-1:])
)
self.assertEqual(
a[-1:-1],
array.array(self.typecode)
)
self.assertEqual(
a[2:1],
array.array(self.typecode)
)
self.assertEqual(
a[1000:],
array.array(self.typecode)
)
self.assertEqual(a[-1000:], a)
self.assertEqual(a[:1000], a)
self.assertEqual(
a[:-1000],
array.array(self.typecode)
)
self.assertEqual(a[-1000:1000], a)
self.assertEqual(
a[2000:1000],
array.array(self.typecode)
)
def test_extended_getslice(self):
# Test extended slicing by comparing with list slicing
# (Assumes list conversion works correctly, too)
a = array.array(self.typecode, self.example)
indices = (0, None, 1, 3, 19, 100, sys.maxsize, -1, -2, -31, -100)
for start in indices:
for stop in indices:
# Everything except the initial 0 (invalid step)
for step in indices[1:]:
self.assertEqual(list(a[start:stop:step]),
list(a)[start:stop:step])
def test_setslice(self):
a = array.array(self.typecode, self.example)
a[:1] = a
self.assertEqual(
a,
array.array(self.typecode, self.example + self.example[1:])
)
a = array.array(self.typecode, self.example)
a[:-1] = a
self.assertEqual(
a,
array.array(self.typecode, self.example + self.example[-1:])
)
a = array.array(self.typecode, self.example)
a[-1:] = a
self.assertEqual(
a,
array.array(self.typecode, self.example[:-1] + self.example)
)
a = array.array(self.typecode, self.example)
a[1:] = a
self.assertEqual(
a,
array.array(self.typecode, self.example[:1] + self.example)
)
a = array.array(self.typecode, self.example)
a[1:-1] = a
self.assertEqual(
a,
array.array(
self.typecode,
self.example[:1] + self.example + self.example[-1:]
)
)
a = array.array(self.typecode, self.example)
a[1000:] = a
self.assertEqual(
a,
array.array(self.typecode, 2*self.example)
)
a = array.array(self.typecode, self.example)
a[-1000:] = a
self.assertEqual(
a,
array.array(self.typecode, self.example)
)
a = array.array(self.typecode, self.example)
a[:1000] = a
self.assertEqual(
a,
array.array(self.typecode, self.example)
)
a = array.array(self.typecode, self.example)
a[:-1000] = a
self.assertEqual(
a,
array.array(self.typecode, 2*self.example)
)
a = array.array(self.typecode, self.example)
a[1:0] = a
self.assertEqual(
a,
array.array(self.typecode, self.example[:1] + self.example + self.example[1:])
)
a = array.array(self.typecode, self.example)
a[2000:1000] = a
self.assertEqual(
a,
array.array(self.typecode, 2*self.example)
)
a = array.array(self.typecode, self.example)
self.assertRaises(TypeError, a.__setitem__, slice(0, 0), None)
self.assertRaises(TypeError, a.__setitem__, slice(0, 1), None)
b = array.array(self.badtypecode())
self.assertRaises(TypeError, a.__setitem__, slice(0, 0), b)
self.assertRaises(TypeError, a.__setitem__, slice(0, 1), b)
def test_extended_set_del_slice(self):
indices = (0, None, 1, 3, 19, 100, sys.maxsize, -1, -2, -31, -100)
for start in indices:
for stop in indices:
# Everything except the initial 0 (invalid step)
for step in indices[1:]:
a = array.array(self.typecode, self.example)
L = list(a)
# Make sure we have a slice of exactly the right length,
# but with (hopefully) different data.
data = L[start:stop:step]
data.reverse()
L[start:stop:step] = data
a[start:stop:step] = array.array(self.typecode, data)
self.assertEqual(a, array.array(self.typecode, L))
del L[start:stop:step]
del a[start:stop:step]
self.assertEqual(a, array.array(self.typecode, L))
def test_index(self):
example = 2*self.example
a = array.array(self.typecode, example)
self.assertRaises(TypeError, a.index)
for x in example:
self.assertEqual(a.index(x), example.index(x))
self.assertRaises(ValueError, a.index, None)
self.assertRaises(ValueError, a.index, self.outside)
def test_count(self):
example = 2*self.example
a = array.array(self.typecode, example)
self.assertRaises(TypeError, a.count)
for x in example:
self.assertEqual(a.count(x), example.count(x))
self.assertEqual(a.count(self.outside), 0)
self.assertEqual(a.count(None), 0)
def test_remove(self):
for x in self.example:
example = 2*self.example
a = array.array(self.typecode, example)
pos = example.index(x)
example2 = example[:pos] + example[pos+1:]
a.remove(x)
self.assertEqual(a, array.array(self.typecode, example2))
a = array.array(self.typecode, self.example)
self.assertRaises(ValueError, a.remove, self.outside)
self.assertRaises(ValueError, a.remove, None)
def test_pop(self):
a = array.array(self.typecode)
self.assertRaises(IndexError, a.pop)
a = array.array(self.typecode, 2*self.example)
self.assertRaises(TypeError, a.pop, 42, 42)
self.assertRaises(TypeError, a.pop, None)
self.assertRaises(IndexError, a.pop, len(a))
self.assertRaises(IndexError, a.pop, -len(a)-1)
self.assertEntryEqual(a.pop(0), self.example[0])
self.assertEqual(
a,
array.array(self.typecode, self.example[1:]+self.example)
)
self.assertEntryEqual(a.pop(1), self.example[2])
self.assertEqual(
a,
array.array(self.typecode, self.example[1:2]+self.example[3:]+self.example)
)
self.assertEntryEqual(a.pop(0), self.example[1])
self.assertEntryEqual(a.pop(), self.example[-1])
self.assertEqual(
a,
array.array(self.typecode, self.example[3:]+self.example[:-1])
)
def test_reverse(self):
a = array.array(self.typecode, self.example)
self.assertRaises(TypeError, a.reverse, 42)
a.reverse()
self.assertEqual(
a,
array.array(self.typecode, self.example[::-1])
)
def test_extend(self):
a = array.array(self.typecode, self.example)
self.assertRaises(TypeError, a.extend)
a.extend(array.array(self.typecode, self.example[::-1]))
self.assertEqual(
a,
array.array(self.typecode, self.example+self.example[::-1])
)
a = array.array(self.typecode, self.example)
a.extend(a)
self.assertEqual(
a,
array.array(self.typecode, self.example+self.example)
)
b = array.array(self.badtypecode())
self.assertRaises(TypeError, a.extend, b)
a = array.array(self.typecode, self.example)
a.extend(self.example[::-1])
self.assertEqual(
a,
array.array(self.typecode, self.example+self.example[::-1])
)
def test_constructor_with_iterable_argument(self):
a = array.array(self.typecode, iter(self.example))
b = array.array(self.typecode, self.example)
self.assertEqual(a, b)
# non-iterable argument
self.assertRaises(TypeError, array.array, self.typecode, 10)
# pass through errors raised in __iter__
class A:
def __iter__(self):
raise UnicodeError
self.assertRaises(UnicodeError, array.array, self.typecode, A())
# pass through errors raised in next()
def B():
raise UnicodeError
yield None
self.assertRaises(UnicodeError, array.array, self.typecode, B())
def test_coveritertraverse(self):
try:
import gc
except ImportError:
self.skipTest('gc module not available')
a = array.array(self.typecode)
l = [iter(a)]
l.append(l)
gc.collect()
def test_buffer(self):
a = array.array(self.typecode, self.example)
m = memoryview(a)
expected = m.tobytes()
self.assertEqual(a.tobytes(), expected)
self.assertEqual(a.tobytes()[0], expected[0])
# Resizing is forbidden when there are buffer exports.
# For issue 4509, we also check after each error that
# the array was not modified.
self.assertRaises(BufferError, a.append, a[0])
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, a.extend, a[0:1])
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, a.remove, a[0])
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, a.pop, 0)
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, a.fromlist, a.tolist())
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, a.frombytes, a.tobytes())
self.assertEqual(m.tobytes(), expected)
if self.typecode == 'u':
self.assertRaises(BufferError, a.fromunicode, a.tounicode())
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, operator.imul, a, 2)
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, operator.imul, a, 0)
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, operator.setitem, a, slice(0, 0), a)
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, operator.delitem, a, 0)
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, operator.delitem, a, slice(0, 1))
self.assertEqual(m.tobytes(), expected)
def test_weakref(self):
s = array.array(self.typecode, self.example)
p = weakref.proxy(s)
self.assertEqual(p.tobytes(), s.tobytes())
s = None
self.assertRaises(ReferenceError, len, p)
@unittest.skipUnless(hasattr(sys, 'getrefcount'),
'test needs sys.getrefcount()')
def test_bug_782369(self):
for i in range(10):
b = array.array('B', range(64))
rc = sys.getrefcount(10)
for i in range(10):
b = array.array('B', range(64))
self.assertEqual(rc, sys.getrefcount(10))
def test_subclass_with_kwargs(self):
# SF bug #1486663 -- this used to erroneously raise a TypeError
ArraySubclassWithKwargs('b', newarg=1)
def test_create_from_bytes(self):
# XXX This test probably needs to be moved in a subclass or
# generalized to use self.typecode.
a = array.array('H', b"1234")
self.assertEqual(len(a) * a.itemsize, 4)
@support.cpython_only
def test_sizeof_with_buffer(self):
a = array.array(self.typecode, self.example)
basesize = support.calcvobjsize('Pn2Pi')
buffer_size = a.buffer_info()[1] * a.itemsize
support.check_sizeof(self, a, basesize + buffer_size)
@support.cpython_only
def test_sizeof_without_buffer(self):
a = array.array(self.typecode)
basesize = support.calcvobjsize('Pn2Pi')
support.check_sizeof(self, a, basesize)
def test_initialize_with_unicode(self):
if self.typecode != 'u':
with self.assertRaises(TypeError) as cm:
a = array.array(self.typecode, 'foo')
self.assertIn("cannot use a str", str(cm.exception))
with self.assertRaises(TypeError) as cm:
a = array.array(self.typecode, array.array('u', 'foo'))
self.assertIn("cannot use a unicode array", str(cm.exception))
else:
a = array.array(self.typecode, "foo")
a = array.array(self.typecode, array.array('u', 'foo'))
@support.cpython_only
def test_obsolete_write_lock(self):
from _testcapi import getbuffer_with_null_view
a = array.array('B', b"")
self.assertRaises(BufferError, getbuffer_with_null_view, a)
def test_free_after_iterating(self):
support.check_free_after_iterating(self, iter, array.array,
(self.typecode,))
support.check_free_after_iterating(self, reversed, array.array,
(self.typecode,))
class StringTest(BaseTest):
def test_setitem(self):
super().test_setitem()
a = array.array(self.typecode, self.example)
self.assertRaises(TypeError, a.__setitem__, 0, self.example[:2])
class UnicodeTest(StringTest, unittest.TestCase):
typecode = 'u'
example = '\x01\u263a\x00\ufeff'
smallerexample = '\x01\u263a\x00\ufefe'
biggerexample = '\x01\u263a\x01\ufeff'
outside = str('\x33')
minitemsize = 2
def test_unicode(self):
self.assertRaises(TypeError, array.array, 'b', 'foo')
a = array.array('u', '\xa0\xc2\u1234')
a.fromunicode(' ')
a.fromunicode('')
a.fromunicode('')
a.fromunicode('\x11abc\xff\u1234')
s = a.tounicode()
self.assertEqual(s, '\xa0\xc2\u1234 \x11abc\xff\u1234')
self.assertEqual(a.itemsize, sizeof_wchar)
s = '\x00="\'a\\b\x80\xff\u0000\u0001\u1234'
a = array.array('u', s)
self.assertEqual(
repr(a),
"array('u', '\\x00=\"\\'a\\\\b\\x80\xff\\x00\\x01\u1234')")
self.assertRaises(TypeError, a.fromunicode)
def test_issue17223(self):
# this used to crash
if sizeof_wchar == 4:
# U+FFFFFFFF is an invalid code point in Unicode 6.0
invalid_str = b'\xff\xff\xff\xff'
else:
# PyUnicode_FromUnicode() cannot fail with 16-bit wchar_t
self.skipTest("specific to 32-bit wchar_t")
a = array.array('u', invalid_str)
self.assertRaises(ValueError, a.tounicode)
self.assertRaises(ValueError, str, a)
class NumberTest(BaseTest):
def test_extslice(self):
a = array.array(self.typecode, range(5))
self.assertEqual(a[::], a)
self.assertEqual(a[::2], array.array(self.typecode, [0,2,4]))
self.assertEqual(a[1::2], array.array(self.typecode, [1,3]))
self.assertEqual(a[::-1], array.array(self.typecode, [4,3,2,1,0]))
self.assertEqual(a[::-2], array.array(self.typecode, [4,2,0]))
self.assertEqual(a[3::-2], array.array(self.typecode, [3,1]))
self.assertEqual(a[-100:100:], a)
self.assertEqual(a[100:-100:-1], a[::-1])
self.assertEqual(a[-100:100:2], array.array(self.typecode, [0,2,4]))
self.assertEqual(a[1000:2000:2], array.array(self.typecode, []))
self.assertEqual(a[-1000:-2000:-2], array.array(self.typecode, []))
def test_delslice(self):
a = array.array(self.typecode, range(5))
del a[::2]
self.assertEqual(a, array.array(self.typecode, [1,3]))
a = array.array(self.typecode, range(5))
del a[1::2]
self.assertEqual(a, array.array(self.typecode, [0,2,4]))
a = array.array(self.typecode, range(5))
del a[1::-2]
self.assertEqual(a, array.array(self.typecode, [0,2,3,4]))
a = array.array(self.typecode, range(10))
del a[::1000]
self.assertEqual(a, array.array(self.typecode, [1,2,3,4,5,6,7,8,9]))
# test issue7788
a = array.array(self.typecode, range(10))
del a[9::1<<333]
def test_assignment(self):
a = array.array(self.typecode, range(10))
a[::2] = array.array(self.typecode, [42]*5)
self.assertEqual(a, array.array(self.typecode, [42, 1, 42, 3, 42, 5, 42, 7, 42, 9]))
a = array.array(self.typecode, range(10))
a[::-4] = array.array(self.typecode, [10]*3)
self.assertEqual(a, array.array(self.typecode, [0, 10, 2, 3, 4, 10, 6, 7, 8 ,10]))
a = array.array(self.typecode, range(4))
a[::-1] = a
self.assertEqual(a, array.array(self.typecode, [3, 2, 1, 0]))
a = array.array(self.typecode, range(10))
b = a[:]
c = a[:]
ins = array.array(self.typecode, range(2))
a[2:3] = ins
b[slice(2,3)] = ins
c[2:3:] = ins
def test_iterationcontains(self):
a = array.array(self.typecode, range(10))
self.assertEqual(list(a), list(range(10)))
b = array.array(self.typecode, [20])
self.assertEqual(a[-1] in a, True)
self.assertEqual(b[0] not in a, True)
def check_overflow(self, lower, upper):
# method to be used by subclasses
# should not overflow assigning lower limit
a = array.array(self.typecode, [lower])
a[0] = lower
# should overflow assigning less than lower limit
self.assertRaises(OverflowError, array.array, self.typecode, [lower-1])
self.assertRaises(OverflowError, a.__setitem__, 0, lower-1)
# should not overflow assigning upper limit
a = array.array(self.typecode, [upper])
a[0] = upper
# should overflow assigning more than upper limit
self.assertRaises(OverflowError, array.array, self.typecode, [upper+1])
self.assertRaises(OverflowError, a.__setitem__, 0, upper+1)
def test_subclassing(self):
typecode = self.typecode
class ExaggeratingArray(array.array):
__slots__ = ['offset']
def __new__(cls, typecode, data, offset):
return array.array.__new__(cls, typecode, data)
def __init__(self, typecode, data, offset):
self.offset = offset
def __getitem__(self, i):
return array.array.__getitem__(self, i) + self.offset
a = ExaggeratingArray(self.typecode, [3, 6, 7, 11], 4)
self.assertEntryEqual(a[0], 7)
self.assertRaises(AttributeError, setattr, a, "color", "blue")
def test_frombytearray(self):
a = array.array('b', range(10))
b = array.array(self.typecode, a)
self.assertEqual(a, b)
class IntegerNumberTest(NumberTest):
def test_type_error(self):
a = array.array(self.typecode)
a.append(42)
with self.assertRaises(TypeError):
a.append(42.0)
with self.assertRaises(TypeError):
a[0] = 42.0
class Intable:
def __init__(self, num):
self._num = num
def __index__(self):
return self._num
def __int__(self):
return self._num
def __sub__(self, other):
return Intable(int(self) - int(other))
def __add__(self, other):
return Intable(int(self) + int(other))
class SignedNumberTest(IntegerNumberTest):
example = [-1, 0, 1, 42, 0x7f]
smallerexample = [-1, 0, 1, 42, 0x7e]
biggerexample = [-1, 0, 1, 43, 0x7f]
outside = 23
def test_overflow(self):
a = array.array(self.typecode)
lower = -1 * int(pow(2, a.itemsize * 8 - 1))
upper = int(pow(2, a.itemsize * 8 - 1)) - 1
self.check_overflow(lower, upper)
self.check_overflow(Intable(lower), Intable(upper))
class UnsignedNumberTest(IntegerNumberTest):
example = [0, 1, 17, 23, 42, 0xff]
smallerexample = [0, 1, 17, 23, 42, 0xfe]
biggerexample = [0, 1, 17, 23, 43, 0xff]
outside = 0xaa
def test_overflow(self):
a = array.array(self.typecode)
lower = 0
upper = int(pow(2, a.itemsize * 8)) - 1
self.check_overflow(lower, upper)
self.check_overflow(Intable(lower), Intable(upper))
def test_bytes_extend(self):
s = bytes(self.example)
a = array.array(self.typecode, self.example)
a.extend(s)
self.assertEqual(
a,
array.array(self.typecode, self.example+self.example)
)
a = array.array(self.typecode, self.example)
a.extend(bytearray(reversed(s)))
self.assertEqual(
a,
array.array(self.typecode, self.example+self.example[::-1])
)
class ByteTest(SignedNumberTest, unittest.TestCase):
typecode = 'b'
minitemsize = 1
class UnsignedByteTest(UnsignedNumberTest, unittest.TestCase):
typecode = 'B'
minitemsize = 1
class ShortTest(SignedNumberTest, unittest.TestCase):
typecode = 'h'
minitemsize = 2
class UnsignedShortTest(UnsignedNumberTest, unittest.TestCase):
typecode = 'H'
minitemsize = 2
class IntTest(SignedNumberTest, unittest.TestCase):
typecode = 'i'
minitemsize = 2
class UnsignedIntTest(UnsignedNumberTest, unittest.TestCase):
typecode = 'I'
minitemsize = 2
class LongTest(SignedNumberTest, unittest.TestCase):
typecode = 'l'
minitemsize = 4
class UnsignedLongTest(UnsignedNumberTest, unittest.TestCase):
typecode = 'L'
minitemsize = 4
class LongLongTest(SignedNumberTest, unittest.TestCase):
typecode = 'q'
minitemsize = 8
class UnsignedLongLongTest(UnsignedNumberTest, unittest.TestCase):
typecode = 'Q'
minitemsize = 8
class FPTest(NumberTest):
example = [-42.0, 0, 42, 1e5, -1e10]
smallerexample = [-42.0, 0, 42, 1e5, -2e10]
biggerexample = [-42.0, 0, 42, 1e5, 1e10]
outside = 23
def assertEntryEqual(self, entry1, entry2):
self.assertAlmostEqual(entry1, entry2)
def test_nan(self):
a = array.array(self.typecode, [float('nan')])
b = array.array(self.typecode, [float('nan')])
self.assertIs(a != b, True)
self.assertIs(a == b, False)
self.assertIs(a > b, False)
self.assertIs(a >= b, False)
self.assertIs(a < b, False)
self.assertIs(a <= b, False)
def test_byteswap(self):
a = array.array(self.typecode, self.example)
self.assertRaises(TypeError, a.byteswap, 42)
if a.itemsize in (1, 2, 4, 8):
b = array.array(self.typecode, self.example)
b.byteswap()
if a.itemsize==1:
self.assertEqual(a, b)
else:
# On alphas treating the byte swapped bit patters as
# floats/doubles results in floating point exceptions
# => compare the 8bit string values instead
self.assertNotEqual(a.tobytes(), b.tobytes())
b.byteswap()
self.assertEqual(a, b)
class FloatTest(FPTest, unittest.TestCase):
typecode = 'f'
minitemsize = 4
class DoubleTest(FPTest, unittest.TestCase):
typecode = 'd'
minitemsize = 8
def test_alloc_overflow(self):
from sys import maxsize
a = array.array('d', [-1]*65536)
try:
a *= maxsize//65536 + 1
except MemoryError:
pass
else:
self.fail("Array of size > maxsize created - MemoryError expected")
b = array.array('d', [ 2.71828183, 3.14159265, -1])
try:
b * (maxsize//3 + 1)
except MemoryError:
pass
else:
self.fail("Array of size > maxsize created - MemoryError expected")
class LargeArrayTest(unittest.TestCase):
typecode = 'b'
def example(self, size):
# We assess a base memuse of <=2.125 for constructing this array
base = array.array(self.typecode, [0, 1, 2, 3, 4, 5, 6, 7]) * (size // 8)
base += array.array(self.typecode, [99]*(size % 8) + [8, 9, 10, 11])
return base
@support.bigmemtest(_2G, memuse=2.125)
def test_example_data(self, size):
example = self.example(size)
self.assertEqual(len(example), size+4)
@support.bigmemtest(_2G, memuse=2.125)
def test_access(self, size):
example = self.example(size)
self.assertEqual(example[0], 0)
self.assertEqual(example[-(size+4)], 0)
self.assertEqual(example[size], 8)
self.assertEqual(example[-4], 8)
self.assertEqual(example[size+3], 11)
self.assertEqual(example[-1], 11)
@support.bigmemtest(_2G, memuse=2.125+1)
def test_slice(self, size):
example = self.example(size)
self.assertEqual(list(example[:4]), [0, 1, 2, 3])
self.assertEqual(list(example[-4:]), [8, 9, 10, 11])
part = example[1:-1]
self.assertEqual(len(part), size+2)
self.assertEqual(part[0], 1)
self.assertEqual(part[-1], 10)
del part
part = example[::2]
self.assertEqual(len(part), (size+5)//2)
self.assertEqual(list(part[:4]), [0, 2, 4, 6])
if size % 2:
self.assertEqual(list(part[-2:]), [9, 11])
else:
self.assertEqual(list(part[-2:]), [8, 10])
@support.bigmemtest(_2G, memuse=2.125)
def test_count(self, size):
example = self.example(size)
self.assertEqual(example.count(0), size//8)
self.assertEqual(example.count(11), 1)
@support.bigmemtest(_2G, memuse=2.125)
def test_append(self, size):
example = self.example(size)
example.append(12)
self.assertEqual(example[-1], 12)
@support.bigmemtest(_2G, memuse=2.125)
def test_extend(self, size):
example = self.example(size)
example.extend(iter([12, 13, 14, 15]))
self.assertEqual(len(example), size+8)
self.assertEqual(list(example[-8:]), [8, 9, 10, 11, 12, 13, 14, 15])
@support.bigmemtest(_2G, memuse=2.125)
def test_frombytes(self, size):
example = self.example(size)
example.frombytes(b'abcd')
self.assertEqual(len(example), size+8)
self.assertEqual(list(example[-8:]), [8, 9, 10, 11] + list(b'abcd'))
@support.bigmemtest(_2G, memuse=2.125)
def test_fromlist(self, size):
example = self.example(size)
example.fromlist([12, 13, 14, 15])
self.assertEqual(len(example), size+8)
self.assertEqual(list(example[-8:]), [8, 9, 10, 11, 12, 13, 14, 15])
@support.bigmemtest(_2G, memuse=2.125)
def test_index(self, size):
example = self.example(size)
self.assertEqual(example.index(0), 0)
self.assertEqual(example.index(1), 1)
self.assertEqual(example.index(7), 7)
self.assertEqual(example.index(11), size+3)
@support.bigmemtest(_2G, memuse=2.125)
def test_insert(self, size):
example = self.example(size)
example.insert(0, 12)
example.insert(10, 13)
example.insert(size+1, 14)
self.assertEqual(len(example), size+7)
self.assertEqual(example[0], 12)
self.assertEqual(example[10], 13)
self.assertEqual(example[size+1], 14)
@support.bigmemtest(_2G, memuse=2.125)
def test_pop(self, size):
example = self.example(size)
self.assertEqual(example.pop(0), 0)
self.assertEqual(example[0], 1)
self.assertEqual(example.pop(size+1), 10)
self.assertEqual(example[size+1], 11)
self.assertEqual(example.pop(1), 2)
self.assertEqual(example[1], 3)
self.assertEqual(len(example), size+1)
self.assertEqual(example.pop(), 11)
self.assertEqual(len(example), size)
@support.bigmemtest(_2G, memuse=2.125)
def test_remove(self, size):
example = self.example(size)
example.remove(0)
self.assertEqual(len(example), size+3)
self.assertEqual(example[0], 1)
example.remove(10)
self.assertEqual(len(example), size+2)
self.assertEqual(example[size], 9)
self.assertEqual(example[size+1], 11)
@support.bigmemtest(_2G, memuse=2.125)
def test_reverse(self, size):
example = self.example(size)
example.reverse()
self.assertEqual(len(example), size+4)
self.assertEqual(example[0], 11)
self.assertEqual(example[3], 8)
self.assertEqual(example[-1], 0)
example.reverse()
self.assertEqual(len(example), size+4)
self.assertEqual(list(example[:4]), [0, 1, 2, 3])
self.assertEqual(list(example[-4:]), [8, 9, 10, 11])
# list takes about 9 bytes per element
@support.bigmemtest(_2G, memuse=2.125+9)
def test_tolist(self, size):
example = self.example(size)
ls = example.tolist()
self.assertEqual(len(ls), len(example))
self.assertEqual(ls[:8], list(example[:8]))
self.assertEqual(ls[-8:], list(example[-8:]))
if __name__ == "__main__":
unittest.main()
| 34.751479 | 92 | 0.572999 |
import unittest
from test import support
from test.support import _2G
import weakref
import pickle
import operator
import struct
import sys
import array
from array import _array_reconstructor as array_reconstructor
sizeof_wchar = array.array('u').itemsize
class ArraySubclass(array.array):
pass
class ArraySubclassWithKwargs(array.array):
def __init__(self, typecode, newarg=None):
array.array.__init__(self)
typecodes = 'ubBhHiIlLfdqQ'
class MiscTest(unittest.TestCase):
def test_bad_constructor(self):
self.assertRaises(TypeError, array.array)
self.assertRaises(TypeError, array.array, spam=42)
self.assertRaises(TypeError, array.array, 'xx')
self.assertRaises(ValueError, array.array, 'x')
def test_empty(self):
a = array.array('B')
a[:] = a
self.assertEqual(len(a), 0)
self.assertEqual(len(a + a), 0)
self.assertEqual(len(a * 3), 0)
a += a
self.assertEqual(len(a), 0)
UNKNOWN_FORMAT = -1
UNSIGNED_INT8 = 0
SIGNED_INT8 = 1
UNSIGNED_INT16_LE = 2
UNSIGNED_INT16_BE = 3
SIGNED_INT16_LE = 4
SIGNED_INT16_BE = 5
UNSIGNED_INT32_LE = 6
UNSIGNED_INT32_BE = 7
SIGNED_INT32_LE = 8
SIGNED_INT32_BE = 9
UNSIGNED_INT64_LE = 10
UNSIGNED_INT64_BE = 11
SIGNED_INT64_LE = 12
SIGNED_INT64_BE = 13
IEEE_754_FLOAT_LE = 14
IEEE_754_FLOAT_BE = 15
IEEE_754_DOUBLE_LE = 16
IEEE_754_DOUBLE_BE = 17
UTF16_LE = 18
UTF16_BE = 19
UTF32_LE = 20
UTF32_BE = 21
class ArrayReconstructorTest(unittest.TestCase):
def test_error(self):
self.assertRaises(TypeError, array_reconstructor,
"", "b", 0, b"")
self.assertRaises(TypeError, array_reconstructor,
str, "b", 0, b"")
self.assertRaises(TypeError, array_reconstructor,
array.array, "b", '', b"")
self.assertRaises(TypeError, array_reconstructor,
array.array, "b", 0, "")
self.assertRaises(ValueError, array_reconstructor,
array.array, "?", 0, b"")
self.assertRaises(ValueError, array_reconstructor,
array.array, "b", UNKNOWN_FORMAT, b"")
self.assertRaises(ValueError, array_reconstructor,
array.array, "b", 22, b"")
self.assertRaises(ValueError, array_reconstructor,
array.array, "d", 16, b"a")
def test_numbers(self):
testcases = (
(['B', 'H', 'I', 'L'], UNSIGNED_INT8, '=BBBB',
[0x80, 0x7f, 0, 0xff]),
(['b', 'h', 'i', 'l'], SIGNED_INT8, '=bbb',
[-0x80, 0x7f, 0]),
(['H', 'I', 'L'], UNSIGNED_INT16_LE, '<HHHH',
[0x8000, 0x7fff, 0, 0xffff]),
(['H', 'I', 'L'], UNSIGNED_INT16_BE, '>HHHH',
[0x8000, 0x7fff, 0, 0xffff]),
(['h', 'i', 'l'], SIGNED_INT16_LE, '<hhh',
[-0x8000, 0x7fff, 0]),
(['h', 'i', 'l'], SIGNED_INT16_BE, '>hhh',
[-0x8000, 0x7fff, 0]),
(['I', 'L'], UNSIGNED_INT32_LE, '<IIII',
[1<<31, (1<<31)-1, 0, (1<<32)-1]),
(['I', 'L'], UNSIGNED_INT32_BE, '>IIII',
[1<<31, (1<<31)-1, 0, (1<<32)-1]),
(['i', 'l'], SIGNED_INT32_LE, '<iii',
[-1<<31, (1<<31)-1, 0]),
(['i', 'l'], SIGNED_INT32_BE, '>iii',
[-1<<31, (1<<31)-1, 0]),
(['L'], UNSIGNED_INT64_LE, '<QQQQ',
[1<<31, (1<<31)-1, 0, (1<<32)-1]),
(['L'], UNSIGNED_INT64_BE, '>QQQQ',
[1<<31, (1<<31)-1, 0, (1<<32)-1]),
(['l'], SIGNED_INT64_LE, '<qqq',
[-1<<31, (1<<31)-1, 0]),
(['l'], SIGNED_INT64_BE, '>qqq',
[-1<<31, (1<<31)-1, 0]),
(['L'], UNSIGNED_INT64_LE, '<QQQQ',
[1<<63, (1<<63)-1, 0, (1<<64)-1]),
(['L'], UNSIGNED_INT64_BE, '>QQQQ',
[1<<63, (1<<63)-1, 0, (1<<64)-1]),
(['l'], SIGNED_INT64_LE, '<qqq',
[-1<<63, (1<<63)-1, 0]),
(['l'], SIGNED_INT64_BE, '>qqq',
[-1<<63, (1<<63)-1, 0]),
(['f'], IEEE_754_FLOAT_LE, '<ffff',
[16711938.0, float('inf'), float('-inf'), -0.0]),
(['f'], IEEE_754_FLOAT_BE, '>ffff',
[16711938.0, float('inf'), float('-inf'), -0.0]),
(['d'], IEEE_754_DOUBLE_LE, '<dddd',
[9006104071832581.0, float('inf'), float('-inf'), -0.0]),
(['d'], IEEE_754_DOUBLE_BE, '>dddd',
[9006104071832581.0, float('inf'), float('-inf'), -0.0])
)
for testcase in testcases:
valid_typecodes, mformat_code, struct_fmt, values = testcase
arraystr = struct.pack(struct_fmt, *values)
for typecode in valid_typecodes:
try:
a = array.array(typecode, values)
except OverflowError:
continue
b = array_reconstructor(
array.array, typecode, mformat_code, arraystr)
self.assertEqual(a, b,
msg="{0!r} != {1!r}; testcase={2!r}".format(a, b, testcase))
def test_unicode(self):
teststr = "Bonne Journ\xe9e \U0002030a\U00020347"
testcases = (
(UTF16_LE, "UTF-16-LE"),
(UTF16_BE, "UTF-16-BE"),
(UTF32_LE, "UTF-32-LE"),
(UTF32_BE, "UTF-32-BE")
)
for testcase in testcases:
mformat_code, encoding = testcase
a = array.array('u', teststr)
b = array_reconstructor(
array.array, 'u', mformat_code, teststr.encode(encoding))
self.assertEqual(a, b,
msg="{0!r} != {1!r}; testcase={2!r}".format(a, b, testcase))
class BaseTest:
def assertEntryEqual(self, entry1, entry2):
self.assertEqual(entry1, entry2)
def badtypecode(self):
return typecodes[(typecodes.index(self.typecode)+1) % len(typecodes)]
def test_constructor(self):
a = array.array(self.typecode)
self.assertEqual(a.typecode, self.typecode)
self.assertGreaterEqual(a.itemsize, self.minitemsize)
self.assertRaises(TypeError, array.array, self.typecode, None)
def test_len(self):
a = array.array(self.typecode)
a.append(self.example[0])
self.assertEqual(len(a), 1)
a = array.array(self.typecode, self.example)
self.assertEqual(len(a), len(self.example))
def test_buffer_info(self):
a = array.array(self.typecode, self.example)
self.assertRaises(TypeError, a.buffer_info, 42)
bi = a.buffer_info()
self.assertIsInstance(bi, tuple)
self.assertEqual(len(bi), 2)
self.assertIsInstance(bi[0], int)
self.assertIsInstance(bi[1], int)
self.assertEqual(bi[1], len(a))
def test_byteswap(self):
if self.typecode == 'u':
example = '\U00100100'
else:
example = self.example
a = array.array(self.typecode, example)
self.assertRaises(TypeError, a.byteswap, 42)
if a.itemsize in (1, 2, 4, 8):
b = array.array(self.typecode, example)
b.byteswap()
if a.itemsize==1:
self.assertEqual(a, b)
else:
self.assertNotEqual(a, b)
b.byteswap()
self.assertEqual(a, b)
def test_copy(self):
import copy
a = array.array(self.typecode, self.example)
b = copy.copy(a)
self.assertNotEqual(id(a), id(b))
self.assertEqual(a, b)
def test_deepcopy(self):
import copy
a = array.array(self.typecode, self.example)
b = copy.deepcopy(a)
self.assertNotEqual(id(a), id(b))
self.assertEqual(a, b)
def test_reduce_ex(self):
a = array.array(self.typecode, self.example)
for protocol in range(3):
self.assertIs(a.__reduce_ex__(protocol)[0], array.array)
for protocol in range(3, pickle.HIGHEST_PROTOCOL + 1):
self.assertIs(a.__reduce_ex__(protocol)[0], array_reconstructor)
def test_pickle(self):
for protocol in range(pickle.HIGHEST_PROTOCOL + 1):
a = array.array(self.typecode, self.example)
b = pickle.loads(pickle.dumps(a, protocol))
self.assertNotEqual(id(a), id(b))
self.assertEqual(a, b)
a = ArraySubclass(self.typecode, self.example)
a.x = 10
b = pickle.loads(pickle.dumps(a, protocol))
self.assertNotEqual(id(a), id(b))
self.assertEqual(a, b)
self.assertEqual(a.x, b.x)
self.assertEqual(type(a), type(b))
def test_pickle_for_empty_array(self):
for protocol in range(pickle.HIGHEST_PROTOCOL + 1):
a = array.array(self.typecode)
b = pickle.loads(pickle.dumps(a, protocol))
self.assertNotEqual(id(a), id(b))
self.assertEqual(a, b)
a = ArraySubclass(self.typecode)
a.x = 10
b = pickle.loads(pickle.dumps(a, protocol))
self.assertNotEqual(id(a), id(b))
self.assertEqual(a, b)
self.assertEqual(a.x, b.x)
self.assertEqual(type(a), type(b))
def test_iterator_pickle(self):
orig = array.array(self.typecode, self.example)
data = list(orig)
data2 = data[::-1]
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
itorig = iter(orig)
d = pickle.dumps((itorig, orig), proto)
it, a = pickle.loads(d)
a.fromlist(data2)
self.assertEqual(type(it), type(itorig))
self.assertEqual(list(it), data + data2)
next(itorig)
d = pickle.dumps((itorig, orig), proto)
it, a = pickle.loads(d)
a.fromlist(data2)
self.assertEqual(type(it), type(itorig))
self.assertEqual(list(it), data[1:] + data2)
for i in range(1, len(data)):
next(itorig)
d = pickle.dumps((itorig, orig), proto)
it, a = pickle.loads(d)
a.fromlist(data2)
self.assertEqual(type(it), type(itorig))
self.assertEqual(list(it), data2)
self.assertRaises(StopIteration, next, itorig)
d = pickle.dumps((itorig, orig), proto)
it, a = pickle.loads(d)
a.fromlist(data2)
self.assertEqual(list(it), [])
def test_exhausted_iterator(self):
a = array.array(self.typecode, self.example)
self.assertEqual(list(a), list(self.example))
exhit = iter(a)
empit = iter(a)
for x in exhit:
next(empit)
a.append(self.outside)
self.assertEqual(list(exhit), [])
self.assertEqual(list(empit), [self.outside])
self.assertEqual(list(a), list(self.example) + [self.outside])
def test_insert(self):
a = array.array(self.typecode, self.example)
a.insert(0, self.example[0])
self.assertEqual(len(a), 1+len(self.example))
self.assertEqual(a[0], a[1])
self.assertRaises(TypeError, a.insert)
self.assertRaises(TypeError, a.insert, None)
self.assertRaises(TypeError, a.insert, 0, None)
a = array.array(self.typecode, self.example)
a.insert(-1, self.example[0])
self.assertEqual(
a,
array.array(
self.typecode,
self.example[:-1] + self.example[:1] + self.example[-1:]
)
)
a = array.array(self.typecode, self.example)
a.insert(-1000, self.example[0])
self.assertEqual(
a,
array.array(self.typecode, self.example[:1] + self.example)
)
a = array.array(self.typecode, self.example)
a.insert(1000, self.example[0])
self.assertEqual(
a,
array.array(self.typecode, self.example + self.example[:1])
)
def test_tofromfile(self):
a = array.array(self.typecode, 2*self.example)
self.assertRaises(TypeError, a.tofile)
support.unlink(support.TESTFN)
f = open(support.TESTFN, 'wb')
try:
a.tofile(f)
f.close()
b = array.array(self.typecode)
f = open(support.TESTFN, 'rb')
self.assertRaises(TypeError, b.fromfile)
b.fromfile(f, len(self.example))
self.assertEqual(b, array.array(self.typecode, self.example))
self.assertNotEqual(a, b)
self.assertRaises(EOFError, b.fromfile, f, len(self.example)+1)
self.assertEqual(a, b)
f.close()
finally:
if not f.closed:
f.close()
support.unlink(support.TESTFN)
def test_fromfile_ioerror(self):
f = open(support.TESTFN, 'wb')
try:
self.assertRaises(OSError, a.fromfile, f, len(self.example))
finally:
f.close()
support.unlink(support.TESTFN)
def test_filewrite(self):
a = array.array(self.typecode, 2*self.example)
f = open(support.TESTFN, 'wb')
try:
f.write(a)
f.close()
b = array.array(self.typecode)
f = open(support.TESTFN, 'rb')
b.fromfile(f, len(self.example))
self.assertEqual(b, array.array(self.typecode, self.example))
self.assertNotEqual(a, b)
b.fromfile(f, len(self.example))
self.assertEqual(a, b)
f.close()
finally:
if not f.closed:
f.close()
support.unlink(support.TESTFN)
def test_tofromlist(self):
a = array.array(self.typecode, 2*self.example)
b = array.array(self.typecode)
self.assertRaises(TypeError, a.tolist, 42)
self.assertRaises(TypeError, b.fromlist)
self.assertRaises(TypeError, b.fromlist, 42)
self.assertRaises(TypeError, b.fromlist, [None])
b.fromlist(a.tolist())
self.assertEqual(a, b)
def test_tofrombytes(self):
a = array.array(self.typecode, 2*self.example)
b = array.array(self.typecode)
self.assertRaises(TypeError, a.tobytes, 42)
self.assertRaises(TypeError, b.frombytes)
self.assertRaises(TypeError, b.frombytes, 42)
b.frombytes(a.tobytes())
c = array.array(self.typecode, bytearray(a.tobytes()))
self.assertEqual(a, b)
self.assertEqual(a, c)
if a.itemsize>1:
self.assertRaises(ValueError, b.frombytes, b"x")
def test_fromarray(self):
a = array.array(self.typecode, self.example)
b = array.array(self.typecode, a)
self.assertEqual(a, b)
def test_repr(self):
a = array.array(self.typecode, 2*self.example)
self.assertEqual(a, eval(repr(a), {"array": array.array}))
a = array.array(self.typecode)
self.assertEqual(repr(a), "array('%s')" % self.typecode)
def test_str(self):
a = array.array(self.typecode, 2*self.example)
str(a)
def test_cmp(self):
a = array.array(self.typecode, self.example)
self.assertIs(a == 42, False)
self.assertIs(a != 42, True)
self.assertIs(a == a, True)
self.assertIs(a != a, False)
self.assertIs(a < a, False)
self.assertIs(a <= a, True)
self.assertIs(a > a, False)
self.assertIs(a >= a, True)
al = array.array(self.typecode, self.smallerexample)
ab = array.array(self.typecode, self.biggerexample)
self.assertIs(a == 2*a, False)
self.assertIs(a != 2*a, True)
self.assertIs(a < 2*a, True)
self.assertIs(a <= 2*a, True)
self.assertIs(a > 2*a, False)
self.assertIs(a >= 2*a, False)
self.assertIs(a == al, False)
self.assertIs(a != al, True)
self.assertIs(a < al, False)
self.assertIs(a <= al, False)
self.assertIs(a > al, True)
self.assertIs(a >= al, True)
self.assertIs(a == ab, False)
self.assertIs(a != ab, True)
self.assertIs(a < ab, True)
self.assertIs(a <= ab, True)
self.assertIs(a > ab, False)
self.assertIs(a >= ab, False)
def test_add(self):
a = array.array(self.typecode, self.example) \
+ array.array(self.typecode, self.example[::-1])
self.assertEqual(
a,
array.array(self.typecode, self.example + self.example[::-1])
)
b = array.array(self.badtypecode())
self.assertRaises(TypeError, a.__add__, b)
self.assertRaises(TypeError, a.__add__, "bad")
def test_iadd(self):
a = array.array(self.typecode, self.example[::-1])
b = a
a += array.array(self.typecode, 2*self.example)
self.assertIs(a, b)
self.assertEqual(
a,
array.array(self.typecode, self.example[::-1]+2*self.example)
)
a = array.array(self.typecode, self.example)
a += a
self.assertEqual(
a,
array.array(self.typecode, self.example + self.example)
)
b = array.array(self.badtypecode())
self.assertRaises(TypeError, a.__add__, b)
self.assertRaises(TypeError, a.__iadd__, "bad")
def test_mul(self):
a = 5*array.array(self.typecode, self.example)
self.assertEqual(
a,
array.array(self.typecode, 5*self.example)
)
a = array.array(self.typecode, self.example)*5
self.assertEqual(
a,
array.array(self.typecode, self.example*5)
)
a = 0*array.array(self.typecode, self.example)
self.assertEqual(
a,
array.array(self.typecode)
)
a = (-1)*array.array(self.typecode, self.example)
self.assertEqual(
a,
array.array(self.typecode)
)
a = 5 * array.array(self.typecode, self.example[:1])
self.assertEqual(
a,
array.array(self.typecode, [a[0]] * 5)
)
self.assertRaises(TypeError, a.__mul__, "bad")
def test_imul(self):
a = array.array(self.typecode, self.example)
b = a
a *= 5
self.assertIs(a, b)
self.assertEqual(
a,
array.array(self.typecode, 5*self.example)
)
a *= 0
self.assertIs(a, b)
self.assertEqual(a, array.array(self.typecode))
a *= 1000
self.assertIs(a, b)
self.assertEqual(a, array.array(self.typecode))
a *= -1
self.assertIs(a, b)
self.assertEqual(a, array.array(self.typecode))
a = array.array(self.typecode, self.example)
a *= -1
self.assertEqual(a, array.array(self.typecode))
self.assertRaises(TypeError, a.__imul__, "bad")
def test_getitem(self):
a = array.array(self.typecode, self.example)
self.assertEntryEqual(a[0], self.example[0])
self.assertEntryEqual(a[0], self.example[0])
self.assertEntryEqual(a[-1], self.example[-1])
self.assertEntryEqual(a[-1], self.example[-1])
self.assertEntryEqual(a[len(self.example)-1], self.example[-1])
self.assertEntryEqual(a[-len(self.example)], self.example[0])
self.assertRaises(TypeError, a.__getitem__)
self.assertRaises(IndexError, a.__getitem__, len(self.example))
self.assertRaises(IndexError, a.__getitem__, -len(self.example)-1)
def test_setitem(self):
a = array.array(self.typecode, self.example)
a[0] = a[-1]
self.assertEntryEqual(a[0], a[-1])
a = array.array(self.typecode, self.example)
a[0] = a[-1]
self.assertEntryEqual(a[0], a[-1])
a = array.array(self.typecode, self.example)
a[-1] = a[0]
self.assertEntryEqual(a[0], a[-1])
a = array.array(self.typecode, self.example)
a[-1] = a[0]
self.assertEntryEqual(a[0], a[-1])
a = array.array(self.typecode, self.example)
a[len(self.example)-1] = a[0]
self.assertEntryEqual(a[0], a[-1])
a = array.array(self.typecode, self.example)
a[-len(self.example)] = a[-1]
self.assertEntryEqual(a[0], a[-1])
self.assertRaises(TypeError, a.__setitem__)
self.assertRaises(TypeError, a.__setitem__, None)
self.assertRaises(TypeError, a.__setitem__, 0, None)
self.assertRaises(
IndexError,
a.__setitem__,
len(self.example), self.example[0]
)
self.assertRaises(
IndexError,
a.__setitem__,
-len(self.example)-1, self.example[0]
)
def test_delitem(self):
a = array.array(self.typecode, self.example)
del a[0]
self.assertEqual(
a,
array.array(self.typecode, self.example[1:])
)
a = array.array(self.typecode, self.example)
del a[-1]
self.assertEqual(
a,
array.array(self.typecode, self.example[:-1])
)
a = array.array(self.typecode, self.example)
del a[len(self.example)-1]
self.assertEqual(
a,
array.array(self.typecode, self.example[:-1])
)
a = array.array(self.typecode, self.example)
del a[-len(self.example)]
self.assertEqual(
a,
array.array(self.typecode, self.example[1:])
)
self.assertRaises(TypeError, a.__delitem__)
self.assertRaises(TypeError, a.__delitem__, None)
self.assertRaises(IndexError, a.__delitem__, len(self.example))
self.assertRaises(IndexError, a.__delitem__, -len(self.example)-1)
def test_getslice(self):
a = array.array(self.typecode, self.example)
self.assertEqual(a[:], a)
self.assertEqual(
a[1:],
array.array(self.typecode, self.example[1:])
)
self.assertEqual(
a[:1],
array.array(self.typecode, self.example[:1])
)
self.assertEqual(
a[:-1],
array.array(self.typecode, self.example[:-1])
)
self.assertEqual(
a[-1:],
array.array(self.typecode, self.example[-1:])
)
self.assertEqual(
a[-1:-1],
array.array(self.typecode)
)
self.assertEqual(
a[2:1],
array.array(self.typecode)
)
self.assertEqual(
a[1000:],
array.array(self.typecode)
)
self.assertEqual(a[-1000:], a)
self.assertEqual(a[:1000], a)
self.assertEqual(
a[:-1000],
array.array(self.typecode)
)
self.assertEqual(a[-1000:1000], a)
self.assertEqual(
a[2000:1000],
array.array(self.typecode)
)
def test_extended_getslice(self):
a = array.array(self.typecode, self.example)
indices = (0, None, 1, 3, 19, 100, sys.maxsize, -1, -2, -31, -100)
for start in indices:
for stop in indices:
for step in indices[1:]:
self.assertEqual(list(a[start:stop:step]),
list(a)[start:stop:step])
def test_setslice(self):
a = array.array(self.typecode, self.example)
a[:1] = a
self.assertEqual(
a,
array.array(self.typecode, self.example + self.example[1:])
)
a = array.array(self.typecode, self.example)
a[:-1] = a
self.assertEqual(
a,
array.array(self.typecode, self.example + self.example[-1:])
)
a = array.array(self.typecode, self.example)
a[-1:] = a
self.assertEqual(
a,
array.array(self.typecode, self.example[:-1] + self.example)
)
a = array.array(self.typecode, self.example)
a[1:] = a
self.assertEqual(
a,
array.array(self.typecode, self.example[:1] + self.example)
)
a = array.array(self.typecode, self.example)
a[1:-1] = a
self.assertEqual(
a,
array.array(
self.typecode,
self.example[:1] + self.example + self.example[-1:]
)
)
a = array.array(self.typecode, self.example)
a[1000:] = a
self.assertEqual(
a,
array.array(self.typecode, 2*self.example)
)
a = array.array(self.typecode, self.example)
a[-1000:] = a
self.assertEqual(
a,
array.array(self.typecode, self.example)
)
a = array.array(self.typecode, self.example)
a[:1000] = a
self.assertEqual(
a,
array.array(self.typecode, self.example)
)
a = array.array(self.typecode, self.example)
a[:-1000] = a
self.assertEqual(
a,
array.array(self.typecode, 2*self.example)
)
a = array.array(self.typecode, self.example)
a[1:0] = a
self.assertEqual(
a,
array.array(self.typecode, self.example[:1] + self.example + self.example[1:])
)
a = array.array(self.typecode, self.example)
a[2000:1000] = a
self.assertEqual(
a,
array.array(self.typecode, 2*self.example)
)
a = array.array(self.typecode, self.example)
self.assertRaises(TypeError, a.__setitem__, slice(0, 0), None)
self.assertRaises(TypeError, a.__setitem__, slice(0, 1), None)
b = array.array(self.badtypecode())
self.assertRaises(TypeError, a.__setitem__, slice(0, 0), b)
self.assertRaises(TypeError, a.__setitem__, slice(0, 1), b)
def test_extended_set_del_slice(self):
indices = (0, None, 1, 3, 19, 100, sys.maxsize, -1, -2, -31, -100)
for start in indices:
for stop in indices:
for step in indices[1:]:
a = array.array(self.typecode, self.example)
L = list(a)
data = L[start:stop:step]
data.reverse()
L[start:stop:step] = data
a[start:stop:step] = array.array(self.typecode, data)
self.assertEqual(a, array.array(self.typecode, L))
del L[start:stop:step]
del a[start:stop:step]
self.assertEqual(a, array.array(self.typecode, L))
def test_index(self):
example = 2*self.example
a = array.array(self.typecode, example)
self.assertRaises(TypeError, a.index)
for x in example:
self.assertEqual(a.index(x), example.index(x))
self.assertRaises(ValueError, a.index, None)
self.assertRaises(ValueError, a.index, self.outside)
def test_count(self):
example = 2*self.example
a = array.array(self.typecode, example)
self.assertRaises(TypeError, a.count)
for x in example:
self.assertEqual(a.count(x), example.count(x))
self.assertEqual(a.count(self.outside), 0)
self.assertEqual(a.count(None), 0)
def test_remove(self):
for x in self.example:
example = 2*self.example
a = array.array(self.typecode, example)
pos = example.index(x)
example2 = example[:pos] + example[pos+1:]
a.remove(x)
self.assertEqual(a, array.array(self.typecode, example2))
a = array.array(self.typecode, self.example)
self.assertRaises(ValueError, a.remove, self.outside)
self.assertRaises(ValueError, a.remove, None)
def test_pop(self):
a = array.array(self.typecode)
self.assertRaises(IndexError, a.pop)
a = array.array(self.typecode, 2*self.example)
self.assertRaises(TypeError, a.pop, 42, 42)
self.assertRaises(TypeError, a.pop, None)
self.assertRaises(IndexError, a.pop, len(a))
self.assertRaises(IndexError, a.pop, -len(a)-1)
self.assertEntryEqual(a.pop(0), self.example[0])
self.assertEqual(
a,
array.array(self.typecode, self.example[1:]+self.example)
)
self.assertEntryEqual(a.pop(1), self.example[2])
self.assertEqual(
a,
array.array(self.typecode, self.example[1:2]+self.example[3:]+self.example)
)
self.assertEntryEqual(a.pop(0), self.example[1])
self.assertEntryEqual(a.pop(), self.example[-1])
self.assertEqual(
a,
array.array(self.typecode, self.example[3:]+self.example[:-1])
)
def test_reverse(self):
a = array.array(self.typecode, self.example)
self.assertRaises(TypeError, a.reverse, 42)
a.reverse()
self.assertEqual(
a,
array.array(self.typecode, self.example[::-1])
)
def test_extend(self):
a = array.array(self.typecode, self.example)
self.assertRaises(TypeError, a.extend)
a.extend(array.array(self.typecode, self.example[::-1]))
self.assertEqual(
a,
array.array(self.typecode, self.example+self.example[::-1])
)
a = array.array(self.typecode, self.example)
a.extend(a)
self.assertEqual(
a,
array.array(self.typecode, self.example+self.example)
)
b = array.array(self.badtypecode())
self.assertRaises(TypeError, a.extend, b)
a = array.array(self.typecode, self.example)
a.extend(self.example[::-1])
self.assertEqual(
a,
array.array(self.typecode, self.example+self.example[::-1])
)
def test_constructor_with_iterable_argument(self):
a = array.array(self.typecode, iter(self.example))
b = array.array(self.typecode, self.example)
self.assertEqual(a, b)
self.assertRaises(TypeError, array.array, self.typecode, 10)
class A:
def __iter__(self):
raise UnicodeError
self.assertRaises(UnicodeError, array.array, self.typecode, A())
def B():
raise UnicodeError
yield None
self.assertRaises(UnicodeError, array.array, self.typecode, B())
def test_coveritertraverse(self):
try:
import gc
except ImportError:
self.skipTest('gc module not available')
a = array.array(self.typecode)
l = [iter(a)]
l.append(l)
gc.collect()
def test_buffer(self):
a = array.array(self.typecode, self.example)
m = memoryview(a)
expected = m.tobytes()
self.assertEqual(a.tobytes(), expected)
self.assertEqual(a.tobytes()[0], expected[0])
self.assertRaises(BufferError, a.append, a[0])
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, a.extend, a[0:1])
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, a.remove, a[0])
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, a.pop, 0)
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, a.fromlist, a.tolist())
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, a.frombytes, a.tobytes())
self.assertEqual(m.tobytes(), expected)
if self.typecode == 'u':
self.assertRaises(BufferError, a.fromunicode, a.tounicode())
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, operator.imul, a, 2)
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, operator.imul, a, 0)
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, operator.setitem, a, slice(0, 0), a)
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, operator.delitem, a, 0)
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, operator.delitem, a, slice(0, 1))
self.assertEqual(m.tobytes(), expected)
def test_weakref(self):
s = array.array(self.typecode, self.example)
p = weakref.proxy(s)
self.assertEqual(p.tobytes(), s.tobytes())
s = None
self.assertRaises(ReferenceError, len, p)
@unittest.skipUnless(hasattr(sys, 'getrefcount'),
'test needs sys.getrefcount()')
def test_bug_782369(self):
for i in range(10):
b = array.array('B', range(64))
rc = sys.getrefcount(10)
for i in range(10):
b = array.array('B', range(64))
self.assertEqual(rc, sys.getrefcount(10))
def test_subclass_with_kwargs(self):
ef test_create_from_bytes(self):
a = array.array('H', b"1234")
self.assertEqual(len(a) * a.itemsize, 4)
@support.cpython_only
def test_sizeof_with_buffer(self):
a = array.array(self.typecode, self.example)
basesize = support.calcvobjsize('Pn2Pi')
buffer_size = a.buffer_info()[1] * a.itemsize
support.check_sizeof(self, a, basesize + buffer_size)
@support.cpython_only
def test_sizeof_without_buffer(self):
a = array.array(self.typecode)
basesize = support.calcvobjsize('Pn2Pi')
support.check_sizeof(self, a, basesize)
def test_initialize_with_unicode(self):
if self.typecode != 'u':
with self.assertRaises(TypeError) as cm:
a = array.array(self.typecode, 'foo')
self.assertIn("cannot use a str", str(cm.exception))
with self.assertRaises(TypeError) as cm:
a = array.array(self.typecode, array.array('u', 'foo'))
self.assertIn("cannot use a unicode array", str(cm.exception))
else:
a = array.array(self.typecode, "foo")
a = array.array(self.typecode, array.array('u', 'foo'))
@support.cpython_only
def test_obsolete_write_lock(self):
from _testcapi import getbuffer_with_null_view
a = array.array('B', b"")
self.assertRaises(BufferError, getbuffer_with_null_view, a)
def test_free_after_iterating(self):
support.check_free_after_iterating(self, iter, array.array,
(self.typecode,))
support.check_free_after_iterating(self, reversed, array.array,
(self.typecode,))
class StringTest(BaseTest):
def test_setitem(self):
super().test_setitem()
a = array.array(self.typecode, self.example)
self.assertRaises(TypeError, a.__setitem__, 0, self.example[:2])
class UnicodeTest(StringTest, unittest.TestCase):
typecode = 'u'
example = '\x01\u263a\x00\ufeff'
smallerexample = '\x01\u263a\x00\ufefe'
biggerexample = '\x01\u263a\x01\ufeff'
outside = str('\x33')
minitemsize = 2
def test_unicode(self):
self.assertRaises(TypeError, array.array, 'b', 'foo')
a = array.array('u', '\xa0\xc2\u1234')
a.fromunicode(' ')
a.fromunicode('')
a.fromunicode('')
a.fromunicode('\x11abc\xff\u1234')
s = a.tounicode()
self.assertEqual(s, '\xa0\xc2\u1234 \x11abc\xff\u1234')
self.assertEqual(a.itemsize, sizeof_wchar)
s = '\x00="\'a\\b\x80\xff\u0000\u0001\u1234'
a = array.array('u', s)
self.assertEqual(
repr(a),
"array('u', '\\x00=\"\\'a\\\\b\\x80\xff\\x00\\x01\u1234')")
self.assertRaises(TypeError, a.fromunicode)
def test_issue17223(self):
if sizeof_wchar == 4:
invalid_str = b'\xff\xff\xff\xff'
else:
self.skipTest("specific to 32-bit wchar_t")
a = array.array('u', invalid_str)
self.assertRaises(ValueError, a.tounicode)
self.assertRaises(ValueError, str, a)
class NumberTest(BaseTest):
def test_extslice(self):
a = array.array(self.typecode, range(5))
self.assertEqual(a[::], a)
self.assertEqual(a[::2], array.array(self.typecode, [0,2,4]))
self.assertEqual(a[1::2], array.array(self.typecode, [1,3]))
self.assertEqual(a[::-1], array.array(self.typecode, [4,3,2,1,0]))
self.assertEqual(a[::-2], array.array(self.typecode, [4,2,0]))
self.assertEqual(a[3::-2], array.array(self.typecode, [3,1]))
self.assertEqual(a[-100:100:], a)
self.assertEqual(a[100:-100:-1], a[::-1])
self.assertEqual(a[-100:100:2], array.array(self.typecode, [0,2,4]))
self.assertEqual(a[1000:2000:2], array.array(self.typecode, []))
self.assertEqual(a[-1000:-2000:-2], array.array(self.typecode, []))
def test_delslice(self):
a = array.array(self.typecode, range(5))
del a[::2]
self.assertEqual(a, array.array(self.typecode, [1,3]))
a = array.array(self.typecode, range(5))
del a[1::2]
self.assertEqual(a, array.array(self.typecode, [0,2,4]))
a = array.array(self.typecode, range(5))
del a[1::-2]
self.assertEqual(a, array.array(self.typecode, [0,2,3,4]))
a = array.array(self.typecode, range(10))
del a[::1000]
self.assertEqual(a, array.array(self.typecode, [1,2,3,4,5,6,7,8,9]))
a = array.array(self.typecode, range(10))
del a[9::1<<333]
def test_assignment(self):
a = array.array(self.typecode, range(10))
a[::2] = array.array(self.typecode, [42]*5)
self.assertEqual(a, array.array(self.typecode, [42, 1, 42, 3, 42, 5, 42, 7, 42, 9]))
a = array.array(self.typecode, range(10))
a[::-4] = array.array(self.typecode, [10]*3)
self.assertEqual(a, array.array(self.typecode, [0, 10, 2, 3, 4, 10, 6, 7, 8 ,10]))
a = array.array(self.typecode, range(4))
a[::-1] = a
self.assertEqual(a, array.array(self.typecode, [3, 2, 1, 0]))
a = array.array(self.typecode, range(10))
b = a[:]
c = a[:]
ins = array.array(self.typecode, range(2))
a[2:3] = ins
b[slice(2,3)] = ins
c[2:3:] = ins
def test_iterationcontains(self):
a = array.array(self.typecode, range(10))
self.assertEqual(list(a), list(range(10)))
b = array.array(self.typecode, [20])
self.assertEqual(a[-1] in a, True)
self.assertEqual(b[0] not in a, True)
def check_overflow(self, lower, upper):
a = array.array(self.typecode, [lower])
a[0] = lower
self.assertRaises(OverflowError, array.array, self.typecode, [lower-1])
self.assertRaises(OverflowError, a.__setitem__, 0, lower-1)
a = array.array(self.typecode, [upper])
a[0] = upper
self.assertRaises(OverflowError, array.array, self.typecode, [upper+1])
self.assertRaises(OverflowError, a.__setitem__, 0, upper+1)
def test_subclassing(self):
typecode = self.typecode
class ExaggeratingArray(array.array):
__slots__ = ['offset']
def __new__(cls, typecode, data, offset):
return array.array.__new__(cls, typecode, data)
def __init__(self, typecode, data, offset):
self.offset = offset
def __getitem__(self, i):
return array.array.__getitem__(self, i) + self.offset
a = ExaggeratingArray(self.typecode, [3, 6, 7, 11], 4)
self.assertEntryEqual(a[0], 7)
self.assertRaises(AttributeError, setattr, a, "color", "blue")
def test_frombytearray(self):
a = array.array('b', range(10))
b = array.array(self.typecode, a)
self.assertEqual(a, b)
class IntegerNumberTest(NumberTest):
def test_type_error(self):
a = array.array(self.typecode)
a.append(42)
with self.assertRaises(TypeError):
a.append(42.0)
with self.assertRaises(TypeError):
a[0] = 42.0
class Intable:
def __init__(self, num):
self._num = num
def __index__(self):
return self._num
def __int__(self):
return self._num
def __sub__(self, other):
return Intable(int(self) - int(other))
def __add__(self, other):
return Intable(int(self) + int(other))
class SignedNumberTest(IntegerNumberTest):
example = [-1, 0, 1, 42, 0x7f]
smallerexample = [-1, 0, 1, 42, 0x7e]
biggerexample = [-1, 0, 1, 43, 0x7f]
outside = 23
def test_overflow(self):
a = array.array(self.typecode)
lower = -1 * int(pow(2, a.itemsize * 8 - 1))
upper = int(pow(2, a.itemsize * 8 - 1)) - 1
self.check_overflow(lower, upper)
self.check_overflow(Intable(lower), Intable(upper))
class UnsignedNumberTest(IntegerNumberTest):
example = [0, 1, 17, 23, 42, 0xff]
smallerexample = [0, 1, 17, 23, 42, 0xfe]
biggerexample = [0, 1, 17, 23, 43, 0xff]
outside = 0xaa
def test_overflow(self):
a = array.array(self.typecode)
lower = 0
upper = int(pow(2, a.itemsize * 8)) - 1
self.check_overflow(lower, upper)
self.check_overflow(Intable(lower), Intable(upper))
def test_bytes_extend(self):
s = bytes(self.example)
a = array.array(self.typecode, self.example)
a.extend(s)
self.assertEqual(
a,
array.array(self.typecode, self.example+self.example)
)
a = array.array(self.typecode, self.example)
a.extend(bytearray(reversed(s)))
self.assertEqual(
a,
array.array(self.typecode, self.example+self.example[::-1])
)
class ByteTest(SignedNumberTest, unittest.TestCase):
typecode = 'b'
minitemsize = 1
class UnsignedByteTest(UnsignedNumberTest, unittest.TestCase):
typecode = 'B'
minitemsize = 1
class ShortTest(SignedNumberTest, unittest.TestCase):
typecode = 'h'
minitemsize = 2
class UnsignedShortTest(UnsignedNumberTest, unittest.TestCase):
typecode = 'H'
minitemsize = 2
class IntTest(SignedNumberTest, unittest.TestCase):
typecode = 'i'
minitemsize = 2
class UnsignedIntTest(UnsignedNumberTest, unittest.TestCase):
typecode = 'I'
minitemsize = 2
class LongTest(SignedNumberTest, unittest.TestCase):
typecode = 'l'
minitemsize = 4
class UnsignedLongTest(UnsignedNumberTest, unittest.TestCase):
typecode = 'L'
minitemsize = 4
class LongLongTest(SignedNumberTest, unittest.TestCase):
typecode = 'q'
minitemsize = 8
class UnsignedLongLongTest(UnsignedNumberTest, unittest.TestCase):
typecode = 'Q'
minitemsize = 8
class FPTest(NumberTest):
example = [-42.0, 0, 42, 1e5, -1e10]
smallerexample = [-42.0, 0, 42, 1e5, -2e10]
biggerexample = [-42.0, 0, 42, 1e5, 1e10]
outside = 23
def assertEntryEqual(self, entry1, entry2):
self.assertAlmostEqual(entry1, entry2)
def test_nan(self):
a = array.array(self.typecode, [float('nan')])
b = array.array(self.typecode, [float('nan')])
self.assertIs(a != b, True)
self.assertIs(a == b, False)
self.assertIs(a > b, False)
self.assertIs(a >= b, False)
self.assertIs(a < b, False)
self.assertIs(a <= b, False)
def test_byteswap(self):
a = array.array(self.typecode, self.example)
self.assertRaises(TypeError, a.byteswap, 42)
if a.itemsize in (1, 2, 4, 8):
b = array.array(self.typecode, self.example)
b.byteswap()
if a.itemsize==1:
self.assertEqual(a, b)
else:
self.assertNotEqual(a.tobytes(), b.tobytes())
b.byteswap()
self.assertEqual(a, b)
class FloatTest(FPTest, unittest.TestCase):
typecode = 'f'
minitemsize = 4
class DoubleTest(FPTest, unittest.TestCase):
typecode = 'd'
minitemsize = 8
def test_alloc_overflow(self):
from sys import maxsize
a = array.array('d', [-1]*65536)
try:
a *= maxsize//65536 + 1
except MemoryError:
pass
else:
self.fail("Array of size > maxsize created - MemoryError expected")
b = array.array('d', [ 2.71828183, 3.14159265, -1])
try:
b * (maxsize//3 + 1)
except MemoryError:
pass
else:
self.fail("Array of size > maxsize created - MemoryError expected")
class LargeArrayTest(unittest.TestCase):
typecode = 'b'
def example(self, size):
base = array.array(self.typecode, [0, 1, 2, 3, 4, 5, 6, 7]) * (size // 8)
base += array.array(self.typecode, [99]*(size % 8) + [8, 9, 10, 11])
return base
@support.bigmemtest(_2G, memuse=2.125)
def test_example_data(self, size):
example = self.example(size)
self.assertEqual(len(example), size+4)
@support.bigmemtest(_2G, memuse=2.125)
def test_access(self, size):
example = self.example(size)
self.assertEqual(example[0], 0)
self.assertEqual(example[-(size+4)], 0)
self.assertEqual(example[size], 8)
self.assertEqual(example[-4], 8)
self.assertEqual(example[size+3], 11)
self.assertEqual(example[-1], 11)
@support.bigmemtest(_2G, memuse=2.125+1)
def test_slice(self, size):
example = self.example(size)
self.assertEqual(list(example[:4]), [0, 1, 2, 3])
self.assertEqual(list(example[-4:]), [8, 9, 10, 11])
part = example[1:-1]
self.assertEqual(len(part), size+2)
self.assertEqual(part[0], 1)
self.assertEqual(part[-1], 10)
del part
part = example[::2]
self.assertEqual(len(part), (size+5)//2)
self.assertEqual(list(part[:4]), [0, 2, 4, 6])
if size % 2:
self.assertEqual(list(part[-2:]), [9, 11])
else:
self.assertEqual(list(part[-2:]), [8, 10])
@support.bigmemtest(_2G, memuse=2.125)
def test_count(self, size):
example = self.example(size)
self.assertEqual(example.count(0), size//8)
self.assertEqual(example.count(11), 1)
@support.bigmemtest(_2G, memuse=2.125)
def test_append(self, size):
example = self.example(size)
example.append(12)
self.assertEqual(example[-1], 12)
@support.bigmemtest(_2G, memuse=2.125)
def test_extend(self, size):
example = self.example(size)
example.extend(iter([12, 13, 14, 15]))
self.assertEqual(len(example), size+8)
self.assertEqual(list(example[-8:]), [8, 9, 10, 11, 12, 13, 14, 15])
@support.bigmemtest(_2G, memuse=2.125)
def test_frombytes(self, size):
example = self.example(size)
example.frombytes(b'abcd')
self.assertEqual(len(example), size+8)
self.assertEqual(list(example[-8:]), [8, 9, 10, 11] + list(b'abcd'))
@support.bigmemtest(_2G, memuse=2.125)
def test_fromlist(self, size):
example = self.example(size)
example.fromlist([12, 13, 14, 15])
self.assertEqual(len(example), size+8)
self.assertEqual(list(example[-8:]), [8, 9, 10, 11, 12, 13, 14, 15])
@support.bigmemtest(_2G, memuse=2.125)
def test_index(self, size):
example = self.example(size)
self.assertEqual(example.index(0), 0)
self.assertEqual(example.index(1), 1)
self.assertEqual(example.index(7), 7)
self.assertEqual(example.index(11), size+3)
@support.bigmemtest(_2G, memuse=2.125)
def test_insert(self, size):
example = self.example(size)
example.insert(0, 12)
example.insert(10, 13)
example.insert(size+1, 14)
self.assertEqual(len(example), size+7)
self.assertEqual(example[0], 12)
self.assertEqual(example[10], 13)
self.assertEqual(example[size+1], 14)
@support.bigmemtest(_2G, memuse=2.125)
def test_pop(self, size):
example = self.example(size)
self.assertEqual(example.pop(0), 0)
self.assertEqual(example[0], 1)
self.assertEqual(example.pop(size+1), 10)
self.assertEqual(example[size+1], 11)
self.assertEqual(example.pop(1), 2)
self.assertEqual(example[1], 3)
self.assertEqual(len(example), size+1)
self.assertEqual(example.pop(), 11)
self.assertEqual(len(example), size)
@support.bigmemtest(_2G, memuse=2.125)
def test_remove(self, size):
example = self.example(size)
example.remove(0)
self.assertEqual(len(example), size+3)
self.assertEqual(example[0], 1)
example.remove(10)
self.assertEqual(len(example), size+2)
self.assertEqual(example[size], 9)
self.assertEqual(example[size+1], 11)
@support.bigmemtest(_2G, memuse=2.125)
def test_reverse(self, size):
example = self.example(size)
example.reverse()
self.assertEqual(len(example), size+4)
self.assertEqual(example[0], 11)
self.assertEqual(example[3], 8)
self.assertEqual(example[-1], 0)
example.reverse()
self.assertEqual(len(example), size+4)
self.assertEqual(list(example[:4]), [0, 1, 2, 3])
self.assertEqual(list(example[-4:]), [8, 9, 10, 11])
@support.bigmemtest(_2G, memuse=2.125+9)
def test_tolist(self, size):
example = self.example(size)
ls = example.tolist()
self.assertEqual(len(ls), len(example))
self.assertEqual(ls[:8], list(example[:8]))
self.assertEqual(ls[-8:], list(example[-8:]))
if __name__ == "__main__":
unittest.main()
| true | true |
f731b7992515f911392bdff02585fa6debdfa83b | 1,504 | py | Python | apps/contrib/models/mixins.py | vicobits/django-wise | 3fdc01eabdff459b31e016f9f6d1cafc19c5a292 | [
"MIT"
] | 5 | 2020-04-11T20:11:48.000Z | 2021-03-16T23:58:01.000Z | apps/contrib/models/mixins.py | victoraguilarc/django-wise | 3fdc01eabdff459b31e016f9f6d1cafc19c5a292 | [
"MIT"
] | 5 | 2020-04-11T20:17:56.000Z | 2021-06-16T19:18:29.000Z | apps/contrib/models/mixins.py | victoraguilarc/django-wise | 3fdc01eabdff459b31e016f9f6d1cafc19c5a292 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import uuid
from django.db import models
class TimeStampedModelMixin(models.Model):
"""Timestamp extra field.
An abstract base class model that provides self updating 'created' and 'modified' fields
https://docs.djangoproject.com/en/2.1/ref/models/fields/#django.db.models.DateField.auto_now_add
"""
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
ordering = ['-created_at', '-updated_at']
class UUIDModelMixin(models.Model):
"""An abstract base class model that provides an uuid field."""
uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True)
class Meta:
abstract = True
class SlugModelMixin(models.Model):
"""An abstract base class model that provides a slug field."""
slug = models.SlugField(max_length=255, unique=True)
class Meta:
abstract = True
class UUIDPrimaryKeyModelMixin(models.Model):
"""An abstract base class model that provides an uuid field that is the primary key."""
uuid = models.UUIDField(
verbose_name='UUID',
primary_key=True,
default=uuid.uuid4,
editable=False,
)
class Meta:
abstract = True
class UUIDWithTimestampMixin(UUIDPrimaryKeyModelMixin, TimeStampedModelMixin):
"""An abstract base class model that provides an uuid and timestamp fields."""
class Meta:
abstract = True
| 25.491525 | 100 | 0.691489 |
import uuid
from django.db import models
class TimeStampedModelMixin(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
ordering = ['-created_at', '-updated_at']
class UUIDModelMixin(models.Model):
uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True)
class Meta:
abstract = True
class SlugModelMixin(models.Model):
slug = models.SlugField(max_length=255, unique=True)
class Meta:
abstract = True
class UUIDPrimaryKeyModelMixin(models.Model):
uuid = models.UUIDField(
verbose_name='UUID',
primary_key=True,
default=uuid.uuid4,
editable=False,
)
class Meta:
abstract = True
class UUIDWithTimestampMixin(UUIDPrimaryKeyModelMixin, TimeStampedModelMixin):
class Meta:
abstract = True
| true | true |
f731b7d1d6104774ffc3c228a51322e1885b01cf | 18,640 | py | Python | meta_dataset/analysis/select_best_model.py | jishnujayakumar/meta-dataset | fac43975e7e8931bd9c9a9171268758e26469646 | [
"Apache-2.0"
] | 643 | 2019-03-05T18:42:45.000Z | 2022-03-27T21:20:46.000Z | meta_dataset/analysis/select_best_model.py | jishnujayakumar/meta-dataset | fac43975e7e8931bd9c9a9171268758e26469646 | [
"Apache-2.0"
] | 80 | 2019-03-16T15:42:49.000Z | 2022-03-30T22:22:31.000Z | meta_dataset/analysis/select_best_model.py | jishnujayakumar/meta-dataset | fac43975e7e8931bd9c9a9171268758e26469646 | [
"Apache-2.0"
] | 126 | 2019-03-14T01:36:32.000Z | 2022-03-26T17:33:11.000Z | # coding=utf-8
# Copyright 2021 The Meta-Dataset Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python2, python3
r"""A script for choosing the best variant of a model automatically.
It takes as input the root directory of all experiments, and a list of names of
directories in that root, each storing the data of an experiment with multiple
variants accross which we want to select the best. Each experiment directory
should contain a directoy named 'summaries' that hosts subdirectories for the
different runs with each one containing event files. These event files are read
to figure out which is best in terms of mean validation accuracy, and at which
step of that run this best value occurs in.
For each of the experiment directories provided, the output information is saved
in a 'best.pklz' file in that directory. This file contains a dict with keys
'best_variant', 'best_valid_acc', and 'best_update_num' where the name of the
variant is simply the name of the sub-directory corresponding to that variant.
Example directory structure (after the script is ran):
Root contains: 'Exp1', 'Exp2'.
Exp1 contains: 'checkpoints', 'summaries', and best.pklz
summaries contains: '1', '2', '3', ..., '20'
'1' contains event files
'2' contains event files
...
'20' contains event files
Sample command:
# pylint: disable=line-too-long
python -m meta_dataset.analysis.select_best_model \
--alsologtostderr \
--all_experiments_root=<experiments_root> \
--experiment_dir_basenames=baseline_imagenet_icml2019_1/3602170,baselinefinetune_imagenet_icml2019_1/3581340
# pylint: enable=line-too-long
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import os
from absl import logging
import numpy as np
from six.moves import range
from six.moves import zip
import six.moves.cPickle as pkl
import tensorflow.compat.v1 as tf
FLAGS = tf.flags.FLAGS
tf.flags.DEFINE_string(
'all_experiments_root',
'',
'The overall experiments directory root.')
tf.flags.DEFINE_string(
'experiment_dir_basenames', ''
'baseline_imagenet_icml2019_1/3602170,'
'baselinefinetune_imagenet_icml2019_1/3581340',
'A comma-separated list of directory basenames. Adding each basename as a '
'suffix to FLAGS.all_experiments_root forms a path that stores the data of '
'an experiment with multiple variants accross which we want to select the '
'best. Each such path is expected to host a directory named "summaries" '
'that contains subdirectories for the different runs with each such '
'subdirectory containing event files.')
# TODO(etriantafillou): This assumes the variants to omit are the same for all
# experiments that model selection will be ran for which doesn't make much
# sense. Maybe just remove this altogether?
tf.flags.DEFINE_string(
'restrict_to_variants', '', 'A comma-separated list of '
'variants to restrict to for model selection. This is '
'useful for example for finding the best out of all '
'variants that use a specific embedding or image size.')
tf.flags.DEFINE_string(
'restrict_to_variants_by_range', '', 'A comma-separated list of '
'two integers that represent the start and end range (both inclusive) '
'of variant ids to restrict to.')
tf.flags.DEFINE_string(
'description', 'best', 'The description for the output. The output will '
'then be named as description.pklz and description.txt. For example, this '
'can be used to reflect that some variants were omitted.')
# The following two flags assume that the parameters of the experiments have
# been logged (they attempt to read from them). If this is not the case, the
# restrict_to_variants flag should be used instead.
tf.flags.DEFINE_string(
'restrict_to_architectures', '', 'The comma-separated names of the '
'embedding networks to restrict to for model selection.')
tf.flags.DEFINE_enum(
'restrict_to_pretrained_source', '', ['', 'scratch', 'imagenet'],
'The name of a pretrained_source to '
'restrict to for model selection.')
tf.flags.DEFINE_integer(
'smooth_window', 1, 'rolling average window to be '
'applied before the best model selection. '
'Set 1 for no smoothing.')
VALIDATION_ACCURACY_TAGS = (
'valid_acc/mean',
'mean valid acc',
'mean acc', # TODO(doersch): rather unclear tag written by trainer.py
)
def get_value_from_params_dir(params_dir, param_names):
"""Gets the first found value from `param_names` in `params_dir`."""
def _load_params(param_name, params_file, loader, mode):
with tf.io.gfile.GFile(params_file, mode) as f:
params = loader(f)
logging.info('Found params file %s', params_file)
return params[param_name]
for param_name in param_names:
try:
try:
return _load_params(param_name, os.path.join(params_dir, 'params.json'),
json.load, 'r')
except tf.errors.NotFoundError:
logging.info('%s does not exist in %s', 'params.json', params_dir)
try:
return _load_params(param_name, os.path.join(params_dir, 'params.pkl'),
pkl.load, 'rb')
except tf.errors.NotFoundError:
logging.info('%s does not exist in %s', 'params.pkl', params_dir)
except KeyError:
pass
raise ValueError('Did not find any of the following keys: %s' % param_names)
def get_paths_to_events(root_dir,
restrict_to_architectures,
restrict_to_pretrained_source,
restrict_to_variants=None):
"""Returns a dict that maps each variant name to its event file.
The name of the variant is the basename of the directory where it's stored.
Assumes the following directory organization root_dir contains a sub-directory
for every variant where event files can be found.
There may be more than one event file for each variant, e.g. a new one will be
created upon restarting an experiment that was pre-empted. So later event
files contain the summaries for larger values of 'step'. We need all of them
for determining the global 'best'.
Args:
root_dir: A str. The root directory of experiments of all models variants.
restrict_to_architectures: A list of names of architectures to restrict to
when choosing the best variant.
restrict_to_pretrained_source: A string. The pretrained_source to restrict
to when choosing the best variant.
restrict_to_variants: Optionally, a set of variant names to restrict to.
"""
params_dir = os.path.join(root_dir, 'params')
summary_dir = os.path.join(root_dir, 'summaries')
logging.info('Looking for parameters in params_dir: %s', params_dir)
logging.info('Looking for summaries in summary_dir: %s', summary_dir)
def get_variant_architecture(name):
"""Return the architecture of the given variant if recorded; o/w None."""
variant_params_dir = os.path.join(params_dir, name)
architecture = get_value_from_params_dir(
variant_params_dir,
(
'_gin.Learner.embedding_fn',
# The following are for backwards compatibility.
'_gin.Trainer.embedding_network',
'_gin.LearnerConfig.embedding_network',
))
return architecture
def get_variant_pretrained_source(name):
"""Return the pretrained src of the given variant if recorded; o/w None."""
variant_params_dir = os.path.join(params_dir, name)
pretrained_source = get_value_from_params_dir(
variant_params_dir, '_gin.Trainer.pretrained_source')
if not pretrained_source:
# Backwards compatibility.
pretrained_source = get_value_from_params_dir(
variant_params_dir, '_gin.LearnerConfig.pretrained_source')
return pretrained_source
def keep_variant(name):
"""Determine if the variant in directory name should be considered."""
value_error_msg = (
'Requested to restrict to an architecture or '
'pretrained_source but the given experiment does not '
'have its params recorded. Looked in: {}'.format(params_dir))
if restrict_to_architectures:
architecture = get_variant_architecture(name)
if architecture is None:
raise ValueError(value_error_msg)
valid_architecture = (not restrict_to_architectures or
architecture in restrict_to_architectures)
if restrict_to_pretrained_source:
pretrained_source = get_variant_pretrained_source(name)
if pretrained_source is None:
raise ValueError(value_error_msg)
valid_pretrained_source = (
not restrict_to_pretrained_source or
pretrained_source == restrict_to_pretrained_source)
valid_variant_name = True
if restrict_to_variants is not None:
valid_variant_name = name in restrict_to_variants
return (valid_architecture and valid_pretrained_source and
valid_variant_name)
variant_names = [
fname for fname in tf.io.gfile.listdir(summary_dir)
if tf.io.gfile.isdir(os.path.join(summary_dir, fname))
]
if not variant_names:
# Maybe there are no variants, and we are already in the directory that
# contains the summaries. In this case, we consider that the current
# directory (.) is the only variant.
variant_names = ['.']
# Further filter variant names based on the given restrictions.
variant_names = [name for name in variant_names if keep_variant(name)]
if not variant_names:
raise ValueError('Found no subdirectories in {}. Was expecting a '
'subdirectory per variant.'.format(summary_dir))
variant_paths = [
os.path.join(summary_dir, variant_dir) for variant_dir in variant_names
]
event_paths = {}
for variant_path, variant_name in zip(variant_paths, variant_names):
event_filenames = [
f_name for f_name in tf.io.gfile.listdir(variant_path)
if f_name.startswith('events.out.tfevents')
]
if len(event_filenames) < 1:
logging.warn('Skipping empty variant %s.', variant_path)
logging.info(
'Was expecting at least one event file '
'in directory %s. Instead, found %d.', variant_path,
len(event_filenames))
continue
event_paths[variant_name] = [
os.path.join(variant_path, event_filename)
for event_filename in event_filenames
]
logging.info('Found event files for variants: %s', list(event_paths.keys()))
return event_paths
# TODO(crisnv): add smooth_type='uniform' that defines the smooth policy
def moving_average(x, smooth_window):
"""Returns a smoothed version of x.
This smoothes the x array according to the smooth_window parameter.
Args:
x: The array to smooth.
smooth_window: An integer that defines the neighborhood to be used in
smoothing.
"""
conv_filter = getattr(moving_average, 'conv_filter', None)
if conv_filter is None or (moving_average.conv_filter_size != smooth_window):
moving_average.conv_filter = np.ones((smooth_window,)) / smooth_window
moving_average.conv_filter_size = smooth_window
# if smooth_window is even, pad accordingly to keep stream size
x = np.pad(x, (smooth_window // 2, smooth_window - 1 - (smooth_window // 2)),
'reflect')
return np.convolve(x, moving_average.conv_filter, mode='valid')
def extract_best_from_event_file(event_path, smooth_window, log_details=False):
"""Returns the best accuracy and the step it occurs in in the given events.
This searches the summaries written in a given event file, which may be only a
subset of the total summaries of a run, since the summaries of a run are
sometimes split into multiple event files.
Args:
event_path: A string. The path to an event file.
smooth_window: An integer that defines the neighborhood to be used in
smoothing before the argmax (use <=1 for no smoothing)
log_details: A boolean. Whether to log details regarding skipped event paths
in which locating the validation accuracy tag failed.
"""
steps, valid_accs = [], []
try:
for event in tf.train.summary_iterator(event_path):
step = event.step
for value in event.summary.value:
if any(
valid_tag in value.tag for valid_tag in VALIDATION_ACCURACY_TAGS):
steps.append(step)
valid_accs.append(value.simple_value)
except tf.errors.DataLossError:
if log_details:
tf.logging.info(
'Omitting events from event_path {} because '
'tf.train.summary_iterator(event_path) failed.'.format(event_path))
return 0, 0
if not valid_accs:
# Could happen if there is no DataLossError above but for some reason
# there is no validation accuracy tag found in the summary values.
tf.logging.info(
'Did not find any validation accuracy tags ({}) in event_path {}'
.format(' or '.join(VALIDATION_ACCURACY_TAGS), event_path))
return 0, 0
if smooth_window > 1:
valid_accs = moving_average(valid_accs, smooth_window)
argmax_ind = np.argmax(valid_accs)
best_acc = valid_accs[argmax_ind]
best_step = steps[argmax_ind]
if log_details:
tf.logging.info('Successfully read event_path {} with best_acc {}'.format(
event_path, best_acc))
return best_acc, best_step
def extract_best_from_variant(event_paths, smooth_window):
"""Returns the best accuracy and the step it occurs in for the given run.
Args:
event_paths: A list of strings. The event files of the given run.
smooth_window: An integer that defines the neighborhood to be used in
smoothing before the argmax (use <=1 for no smoothing)
Raises:
RuntimeError: No 'valid' event file for the given variant ('valid' here
refers to an event file that has a validation accuracy tag).
"""
best_step = best_acc = -1
for event_path in event_paths:
best_acc_, best_step_ = extract_best_from_event_file(
event_path, smooth_window)
if best_acc_ > best_acc:
best_acc = best_acc_
best_step = best_step_
if best_acc <= 0:
raise RuntimeError('Something went wrong with the summary event reading.')
return best_acc, best_step
def main(argv):
del argv
experiment_paths = [
os.path.join(FLAGS.all_experiments_root, basename)
for basename in FLAGS.experiment_dir_basenames.split(',')
]
# Perform model selection for each provided experiment root.
for root_experiment_dir in experiment_paths:
stars_string = '\n**************************************\n'
architecture_string = ''
if FLAGS.restrict_to_architectures:
architecture_string = ' out of the {} variants'.format(
FLAGS.restrict_to_architectures)
logging.info('%sSelecting the best variant for: %s%s.%s', stars_string,
root_experiment_dir, architecture_string, stars_string)
if FLAGS.restrict_to_variants_by_range and FLAGS.restrict_to_variants:
raise ValueError('Please provide only one of '
'FLAGS.restrict_to_variants_by_range and '
'FLAGS.restrict_to_variants, not both.')
restrict_to_variants = None
if FLAGS.restrict_to_variants_by_range:
start, end = FLAGS.restrict_to_variants_by_range.split(',')
start, end = int(start), int(end)
restrict_to_variants = set(
[str(variant_id) for variant_id in range(start, end + 1)])
if FLAGS.restrict_to_variants:
restrict_to_variants = set(FLAGS.restrict_to_variants.split(','))
restrict_to_architectures = []
if FLAGS.restrict_to_architectures:
restrict_to_architectures = FLAGS.restrict_to_architectures.split(',')
smooth_window = FLAGS.smooth_window
event_paths = get_paths_to_events(
root_experiment_dir,
restrict_to_architectures,
FLAGS.restrict_to_pretrained_source,
restrict_to_variants=restrict_to_variants)
# Read the event file of each variant to find the highest mean validation
# accuracy reached with it.
best_variant = ''
best_valid_acc = -1
best_step = -1
for variant_name, event_path in event_paths.items():
best_valid_acc_, best_step_ = extract_best_from_variant(
event_path, smooth_window)
if best_valid_acc_ > best_valid_acc:
best_variant = variant_name
best_valid_acc = best_valid_acc_
best_step = best_step_
output_dict = {
'best_variant': best_variant,
'best_valid_acc': best_valid_acc,
'best_update_num': best_step
}
# Create a more informative description if necessary.
description = FLAGS.description
if FLAGS.restrict_to_architectures and FLAGS.description == 'best':
description += '_{}'.format(FLAGS.restrict_to_architectures)
if (FLAGS.restrict_to_pretrained_source and FLAGS.description == 'best'):
if FLAGS.restrict_to_pretrained_source == 'scratch':
description += '_trained_from_scratch'
else:
description += '_pretrained_on_{}'.format(
FLAGS.restrict_to_pretrained_source)
if FLAGS.smooth_window > 1:
description += '_smoothed_by_window_{}'.format(smooth_window)
output_path_pklz = os.path.join(root_experiment_dir,
'{}.pklz'.format(description))
with tf.io.gfile.GFile(output_path_pklz, 'wb') as f:
pkl.dump(output_dict, f, protocol=pkl.HIGHEST_PROTOCOL)
# Also write this info as a .txt file for easier reading.
output_path_txt = os.path.join(root_experiment_dir,
'{}.txt'.format(description))
with tf.io.gfile.GFile(output_path_txt, 'w') as f:
f.write(
'best_variant: {}\nbest_valid_acc: {}\nbest_update_num: {}\n'.format(
best_variant, best_valid_acc, best_step))
logging.info(
'Best variant: %s. Best valid acc: %s. Best update num: %d. '
'Just wrote this info to %s and %s', best_variant, best_valid_acc,
best_step, output_path_pklz, output_path_txt)
if __name__ == '__main__':
logging.set_verbosity(logging.INFO)
tf.app.run(main)
| 39.82906 | 110 | 0.712554 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import os
from absl import logging
import numpy as np
from six.moves import range
from six.moves import zip
import six.moves.cPickle as pkl
import tensorflow.compat.v1 as tf
FLAGS = tf.flags.FLAGS
tf.flags.DEFINE_string(
'all_experiments_root',
'',
'The overall experiments directory root.')
tf.flags.DEFINE_string(
'experiment_dir_basenames', ''
'baseline_imagenet_icml2019_1/3602170,'
'baselinefinetune_imagenet_icml2019_1/3581340',
'A comma-separated list of directory basenames. Adding each basename as a '
'suffix to FLAGS.all_experiments_root forms a path that stores the data of '
'an experiment with multiple variants accross which we want to select the '
'best. Each such path is expected to host a directory named "summaries" '
'that contains subdirectories for the different runs with each such '
'subdirectory containing event files.')
# sense. Maybe just remove this altogether?
tf.flags.DEFINE_string(
'restrict_to_variants', '', 'A comma-separated list of '
'variants to restrict to for model selection. This is '
'useful for example for finding the best out of all '
'variants that use a specific embedding or image size.')
tf.flags.DEFINE_string(
'restrict_to_variants_by_range', '', 'A comma-separated list of '
'two integers that represent the start and end range (both inclusive) '
'of variant ids to restrict to.')
tf.flags.DEFINE_string(
'description', 'best', 'The description for the output. The output will '
'then be named as description.pklz and description.txt. For example, this '
'can be used to reflect that some variants were omitted.')
# The following two flags assume that the parameters of the experiments have
# been logged (they attempt to read from them). If this is not the case, the
# restrict_to_variants flag should be used instead.
tf.flags.DEFINE_string(
'restrict_to_architectures', '', 'The comma-separated names of the '
'embedding networks to restrict to for model selection.')
tf.flags.DEFINE_enum(
'restrict_to_pretrained_source', '', ['', 'scratch', 'imagenet'],
'The name of a pretrained_source to '
'restrict to for model selection.')
tf.flags.DEFINE_integer(
'smooth_window', 1, 'rolling average window to be '
'applied before the best model selection. '
'Set 1 for no smoothing.')
VALIDATION_ACCURACY_TAGS = (
'valid_acc/mean',
'mean valid acc',
'mean acc', # TODO(doersch): rather unclear tag written by trainer.py
)
def get_value_from_params_dir(params_dir, param_names):
def _load_params(param_name, params_file, loader, mode):
with tf.io.gfile.GFile(params_file, mode) as f:
params = loader(f)
logging.info('Found params file %s', params_file)
return params[param_name]
for param_name in param_names:
try:
try:
return _load_params(param_name, os.path.join(params_dir, 'params.json'),
json.load, 'r')
except tf.errors.NotFoundError:
logging.info('%s does not exist in %s', 'params.json', params_dir)
try:
return _load_params(param_name, os.path.join(params_dir, 'params.pkl'),
pkl.load, 'rb')
except tf.errors.NotFoundError:
logging.info('%s does not exist in %s', 'params.pkl', params_dir)
except KeyError:
pass
raise ValueError('Did not find any of the following keys: %s' % param_names)
def get_paths_to_events(root_dir,
restrict_to_architectures,
restrict_to_pretrained_source,
restrict_to_variants=None):
params_dir = os.path.join(root_dir, 'params')
summary_dir = os.path.join(root_dir, 'summaries')
logging.info('Looking for parameters in params_dir: %s', params_dir)
logging.info('Looking for summaries in summary_dir: %s', summary_dir)
def get_variant_architecture(name):
variant_params_dir = os.path.join(params_dir, name)
architecture = get_value_from_params_dir(
variant_params_dir,
(
'_gin.Learner.embedding_fn',
# The following are for backwards compatibility.
'_gin.Trainer.embedding_network',
'_gin.LearnerConfig.embedding_network',
))
return architecture
def get_variant_pretrained_source(name):
variant_params_dir = os.path.join(params_dir, name)
pretrained_source = get_value_from_params_dir(
variant_params_dir, '_gin.Trainer.pretrained_source')
if not pretrained_source:
# Backwards compatibility.
pretrained_source = get_value_from_params_dir(
variant_params_dir, '_gin.LearnerConfig.pretrained_source')
return pretrained_source
def keep_variant(name):
value_error_msg = (
'Requested to restrict to an architecture or '
'pretrained_source but the given experiment does not '
'have its params recorded. Looked in: {}'.format(params_dir))
if restrict_to_architectures:
architecture = get_variant_architecture(name)
if architecture is None:
raise ValueError(value_error_msg)
valid_architecture = (not restrict_to_architectures or
architecture in restrict_to_architectures)
if restrict_to_pretrained_source:
pretrained_source = get_variant_pretrained_source(name)
if pretrained_source is None:
raise ValueError(value_error_msg)
valid_pretrained_source = (
not restrict_to_pretrained_source or
pretrained_source == restrict_to_pretrained_source)
valid_variant_name = True
if restrict_to_variants is not None:
valid_variant_name = name in restrict_to_variants
return (valid_architecture and valid_pretrained_source and
valid_variant_name)
variant_names = [
fname for fname in tf.io.gfile.listdir(summary_dir)
if tf.io.gfile.isdir(os.path.join(summary_dir, fname))
]
if not variant_names:
# Maybe there are no variants, and we are already in the directory that
# contains the summaries. In this case, we consider that the current
# directory (.) is the only variant.
variant_names = ['.']
# Further filter variant names based on the given restrictions.
variant_names = [name for name in variant_names if keep_variant(name)]
if not variant_names:
raise ValueError('Found no subdirectories in {}. Was expecting a '
'subdirectory per variant.'.format(summary_dir))
variant_paths = [
os.path.join(summary_dir, variant_dir) for variant_dir in variant_names
]
event_paths = {}
for variant_path, variant_name in zip(variant_paths, variant_names):
event_filenames = [
f_name for f_name in tf.io.gfile.listdir(variant_path)
if f_name.startswith('events.out.tfevents')
]
if len(event_filenames) < 1:
logging.warn('Skipping empty variant %s.', variant_path)
logging.info(
'Was expecting at least one event file '
'in directory %s. Instead, found %d.', variant_path,
len(event_filenames))
continue
event_paths[variant_name] = [
os.path.join(variant_path, event_filename)
for event_filename in event_filenames
]
logging.info('Found event files for variants: %s', list(event_paths.keys()))
return event_paths
# TODO(crisnv): add smooth_type='uniform' that defines the smooth policy
def moving_average(x, smooth_window):
conv_filter = getattr(moving_average, 'conv_filter', None)
if conv_filter is None or (moving_average.conv_filter_size != smooth_window):
moving_average.conv_filter = np.ones((smooth_window,)) / smooth_window
moving_average.conv_filter_size = smooth_window
# if smooth_window is even, pad accordingly to keep stream size
x = np.pad(x, (smooth_window // 2, smooth_window - 1 - (smooth_window // 2)),
'reflect')
return np.convolve(x, moving_average.conv_filter, mode='valid')
def extract_best_from_event_file(event_path, smooth_window, log_details=False):
steps, valid_accs = [], []
try:
for event in tf.train.summary_iterator(event_path):
step = event.step
for value in event.summary.value:
if any(
valid_tag in value.tag for valid_tag in VALIDATION_ACCURACY_TAGS):
steps.append(step)
valid_accs.append(value.simple_value)
except tf.errors.DataLossError:
if log_details:
tf.logging.info(
'Omitting events from event_path {} because '
'tf.train.summary_iterator(event_path) failed.'.format(event_path))
return 0, 0
if not valid_accs:
# Could happen if there is no DataLossError above but for some reason
# there is no validation accuracy tag found in the summary values.
tf.logging.info(
'Did not find any validation accuracy tags ({}) in event_path {}'
.format(' or '.join(VALIDATION_ACCURACY_TAGS), event_path))
return 0, 0
if smooth_window > 1:
valid_accs = moving_average(valid_accs, smooth_window)
argmax_ind = np.argmax(valid_accs)
best_acc = valid_accs[argmax_ind]
best_step = steps[argmax_ind]
if log_details:
tf.logging.info('Successfully read event_path {} with best_acc {}'.format(
event_path, best_acc))
return best_acc, best_step
def extract_best_from_variant(event_paths, smooth_window):
best_step = best_acc = -1
for event_path in event_paths:
best_acc_, best_step_ = extract_best_from_event_file(
event_path, smooth_window)
if best_acc_ > best_acc:
best_acc = best_acc_
best_step = best_step_
if best_acc <= 0:
raise RuntimeError('Something went wrong with the summary event reading.')
return best_acc, best_step
def main(argv):
del argv
experiment_paths = [
os.path.join(FLAGS.all_experiments_root, basename)
for basename in FLAGS.experiment_dir_basenames.split(',')
]
# Perform model selection for each provided experiment root.
for root_experiment_dir in experiment_paths:
stars_string = '\n**************************************\n'
architecture_string = ''
if FLAGS.restrict_to_architectures:
architecture_string = ' out of the {} variants'.format(
FLAGS.restrict_to_architectures)
logging.info('%sSelecting the best variant for: %s%s.%s', stars_string,
root_experiment_dir, architecture_string, stars_string)
if FLAGS.restrict_to_variants_by_range and FLAGS.restrict_to_variants:
raise ValueError('Please provide only one of '
'FLAGS.restrict_to_variants_by_range and '
'FLAGS.restrict_to_variants, not both.')
restrict_to_variants = None
if FLAGS.restrict_to_variants_by_range:
start, end = FLAGS.restrict_to_variants_by_range.split(',')
start, end = int(start), int(end)
restrict_to_variants = set(
[str(variant_id) for variant_id in range(start, end + 1)])
if FLAGS.restrict_to_variants:
restrict_to_variants = set(FLAGS.restrict_to_variants.split(','))
restrict_to_architectures = []
if FLAGS.restrict_to_architectures:
restrict_to_architectures = FLAGS.restrict_to_architectures.split(',')
smooth_window = FLAGS.smooth_window
event_paths = get_paths_to_events(
root_experiment_dir,
restrict_to_architectures,
FLAGS.restrict_to_pretrained_source,
restrict_to_variants=restrict_to_variants)
# Read the event file of each variant to find the highest mean validation
# accuracy reached with it.
best_variant = ''
best_valid_acc = -1
best_step = -1
for variant_name, event_path in event_paths.items():
best_valid_acc_, best_step_ = extract_best_from_variant(
event_path, smooth_window)
if best_valid_acc_ > best_valid_acc:
best_variant = variant_name
best_valid_acc = best_valid_acc_
best_step = best_step_
output_dict = {
'best_variant': best_variant,
'best_valid_acc': best_valid_acc,
'best_update_num': best_step
}
# Create a more informative description if necessary.
description = FLAGS.description
if FLAGS.restrict_to_architectures and FLAGS.description == 'best':
description += '_{}'.format(FLAGS.restrict_to_architectures)
if (FLAGS.restrict_to_pretrained_source and FLAGS.description == 'best'):
if FLAGS.restrict_to_pretrained_source == 'scratch':
description += '_trained_from_scratch'
else:
description += '_pretrained_on_{}'.format(
FLAGS.restrict_to_pretrained_source)
if FLAGS.smooth_window > 1:
description += '_smoothed_by_window_{}'.format(smooth_window)
output_path_pklz = os.path.join(root_experiment_dir,
'{}.pklz'.format(description))
with tf.io.gfile.GFile(output_path_pklz, 'wb') as f:
pkl.dump(output_dict, f, protocol=pkl.HIGHEST_PROTOCOL)
# Also write this info as a .txt file for easier reading.
output_path_txt = os.path.join(root_experiment_dir,
'{}.txt'.format(description))
with tf.io.gfile.GFile(output_path_txt, 'w') as f:
f.write(
'best_variant: {}\nbest_valid_acc: {}\nbest_update_num: {}\n'.format(
best_variant, best_valid_acc, best_step))
logging.info(
'Best variant: %s. Best valid acc: %s. Best update num: %d. '
'Just wrote this info to %s and %s', best_variant, best_valid_acc,
best_step, output_path_pklz, output_path_txt)
if __name__ == '__main__':
logging.set_verbosity(logging.INFO)
tf.app.run(main)
| true | true |
f731b83aa10a8eac18891fcac6e5c14b63d9aa57 | 355 | py | Python | nikebot/__version__.py | olegaobini/NikeBot | fec0943c9dd215d05403bedfc69ef8a8ac0b228c | [
"MIT"
] | null | null | null | nikebot/__version__.py | olegaobini/NikeBot | fec0943c9dd215d05403bedfc69ef8a8ac0b228c | [
"MIT"
] | null | null | null | nikebot/__version__.py | olegaobini/NikeBot | fec0943c9dd215d05403bedfc69ef8a8ac0b228c | [
"MIT"
] | null | null | null | __logo__ = """
NikeBot
"""
__title__ = 'nikebotandroid'
__description__ = 'A retail automation bot for the Nike mobile app'
__url__ = 'https: // github.com/olegaobini/NikeBot'
__version__ = '0.0.1'
__debug_mode__ = False
__author__ = 'Olega Obini'
__author_email__ = 'obiniolega@gmail.com'
__license__ = 'MIT'
__copyright__ = 'Copyright 2022 Olega Obini'
| 27.307692 | 67 | 0.752113 | __logo__ = """
NikeBot
"""
__title__ = 'nikebotandroid'
__description__ = 'A retail automation bot for the Nike mobile app'
__url__ = 'https: // github.com/olegaobini/NikeBot'
__version__ = '0.0.1'
__debug_mode__ = False
__author__ = 'Olega Obini'
__author_email__ = 'obiniolega@gmail.com'
__license__ = 'MIT'
__copyright__ = 'Copyright 2022 Olega Obini'
| true | true |
f731b843168340ad5c3d5f4674b0c14240c39751 | 5,587 | py | Python | gremlin-python/src/main/python/tests/process/test_traversal.py | helpspace-co/tinkerpop | a31e8f402237de3491397d4f8bbd6a9761cd9068 | [
"Apache-2.0"
] | 1,425 | 2016-06-13T06:08:39.000Z | 2022-03-28T09:02:43.000Z | gremlin-python/src/main/python/tests/process/test_traversal.py | helpspace-co/tinkerpop | a31e8f402237de3491397d4f8bbd6a9761cd9068 | [
"Apache-2.0"
] | 1,049 | 2016-06-10T10:25:59.000Z | 2022-03-30T11:25:44.000Z | gremlin-python/src/main/python/tests/process/test_traversal.py | helpspace-co/tinkerpop | a31e8f402237de3491397d4f8bbd6a9761cd9068 | [
"Apache-2.0"
] | 732 | 2016-06-13T20:53:51.000Z | 2022-03-30T06:49:29.000Z | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
__author__ = 'Marko A. Rodriguez (http://markorodriguez.com)'
from pytest import fail
from gremlin_python.structure.graph import Graph
from gremlin_python.process.anonymous_traversal import traversal
from gremlin_python.process.traversal import P
from gremlin_python.process.traversal import Binding, Bindings
from gremlin_python.process.graph_traversal import __
class TestTraversal(object):
def test_bytecode(self):
g = traversal().withGraph(Graph())
bytecode = g.V().out("created").bytecode
assert 0 == len(bytecode.bindings.keys())
assert 0 == len(bytecode.source_instructions)
assert 2 == len(bytecode.step_instructions)
assert "V" == bytecode.step_instructions[0][0]
assert "out" == bytecode.step_instructions[1][0]
assert "created" == bytecode.step_instructions[1][1]
assert 1 == len(bytecode.step_instructions[0])
assert 2 == len(bytecode.step_instructions[1])
##
bytecode = g.withSack(1).E().groupCount().by("weight").bytecode
assert 0 == len(bytecode.bindings.keys())
assert 1 == len(bytecode.source_instructions)
assert "withSack" == bytecode.source_instructions[0][0]
assert 1 == bytecode.source_instructions[0][1]
assert 3 == len(bytecode.step_instructions)
assert "E" == bytecode.step_instructions[0][0]
assert "groupCount" == bytecode.step_instructions[1][0]
assert "by" == bytecode.step_instructions[2][0]
assert "weight" == bytecode.step_instructions[2][1]
assert 1 == len(bytecode.step_instructions[0])
assert 1 == len(bytecode.step_instructions[1])
assert 2 == len(bytecode.step_instructions[2])
##
bytecode = g.V(Bindings.of('a', [1, 2, 3])) \
.out(Bindings.of('b', 'created')) \
.where(__.in_(Bindings.of('c', 'created'), Bindings.of('d', 'knows')) \
.count().is_(Bindings.of('e', P.gt(2)))).bytecode
assert 5 == len(bytecode.bindings.keys())
assert [1,2,3] == bytecode.bindings['a']
assert 'created' == bytecode.bindings['b']
assert 'created' == bytecode.bindings['c']
assert 'knows' == bytecode.bindings['d']
assert P.gt(2) == bytecode.bindings['e']
assert Binding('b', 'created') == bytecode.step_instructions[1][1]
assert 'binding[b=created]' == str(bytecode.step_instructions[1][1])
assert isinstance(hash(bytecode.step_instructions[1][1]), int)
def test_P(self):
# verify that the order of operations is respected
assert "and(eq(a),lt(b))" == str(P.eq("a").and_(P.lt("b")))
assert "and(or(lt(b),gt(c)),neq(d))" == str(P.lt("b").or_(P.gt("c")).and_(P.neq("d")))
assert "and(or(lt(b),gt(c)),or(neq(d),gte(e)))" == str(
P.lt("b").or_(P.gt("c")).and_(P.neq("d").or_(P.gte("e"))))
def test_anonymous_traversal(self):
bytecode = __.__(1).bytecode
assert 0 == len(bytecode.bindings.keys())
assert 0 == len(bytecode.source_instructions)
assert 1 == len(bytecode.step_instructions)
assert "inject" == bytecode.step_instructions[0][0]
assert 1 == bytecode.step_instructions[0][1]
##
bytecode = __.start().bytecode
assert 0 == len(bytecode.bindings.keys())
assert 0 == len(bytecode.source_instructions)
assert 0 == len(bytecode.step_instructions)
def test_clone_traversal(self):
g = traversal().withGraph(Graph())
original = g.V().out("created")
clone = original.clone().out("knows")
cloneClone = clone.clone().out("created")
assert 2 == len(original.bytecode.step_instructions)
assert 3 == len(clone.bytecode.step_instructions)
assert 4 == len(cloneClone.bytecode.step_instructions)
original.has("person", "name", "marko")
clone.V().out()
assert 3 == len(original.bytecode.step_instructions)
assert 5 == len(clone.bytecode.step_instructions)
assert 4 == len(cloneClone.bytecode.step_instructions)
def test_no_sugar_for_magic_methods(self):
g = traversal().withGraph(Graph())
t = g.V().age
assert 2 == len(t.bytecode.step_instructions)
try:
t = g.V().__len__
fail("can't do sugar with magic")
except AttributeError as err:
assert str(err) == 'Python magic methods or keys starting with double underscore cannot be used for Gremlin sugar - prefer values(__len__)'
def test_enforce_anonymous_child_traversal(self):
g = traversal().withGraph(Graph())
g.V(0).addE("self").to(__.V(1))
try:
g.V(0).addE("self").to(g.V(1))
assert false
except TypeError:
pass
| 42.325758 | 151 | 0.642742 |
__author__ = 'Marko A. Rodriguez (http://markorodriguez.com)'
from pytest import fail
from gremlin_python.structure.graph import Graph
from gremlin_python.process.anonymous_traversal import traversal
from gremlin_python.process.traversal import P
from gremlin_python.process.traversal import Binding, Bindings
from gremlin_python.process.graph_traversal import __
class TestTraversal(object):
def test_bytecode(self):
g = traversal().withGraph(Graph())
bytecode = g.V().out("created").bytecode
assert 0 == len(bytecode.bindings.keys())
assert 0 == len(bytecode.source_instructions)
assert 2 == len(bytecode.step_instructions)
assert "V" == bytecode.step_instructions[0][0]
assert "out" == bytecode.step_instructions[1][0]
assert "created" == bytecode.step_instructions[1][1]
assert 1 == len(bytecode.step_instructions[0])
assert 2 == len(bytecode.step_instructions[1])
bytecode = g.withSack(1).E().groupCount().by("weight").bytecode
assert 0 == len(bytecode.bindings.keys())
assert 1 == len(bytecode.source_instructions)
assert "withSack" == bytecode.source_instructions[0][0]
assert 1 == bytecode.source_instructions[0][1]
assert 3 == len(bytecode.step_instructions)
assert "E" == bytecode.step_instructions[0][0]
assert "groupCount" == bytecode.step_instructions[1][0]
assert "by" == bytecode.step_instructions[2][0]
assert "weight" == bytecode.step_instructions[2][1]
assert 1 == len(bytecode.step_instructions[0])
assert 1 == len(bytecode.step_instructions[1])
assert 2 == len(bytecode.step_instructions[2])
bytecode = g.V(Bindings.of('a', [1, 2, 3])) \
.out(Bindings.of('b', 'created')) \
.where(__.in_(Bindings.of('c', 'created'), Bindings.of('d', 'knows')) \
.count().is_(Bindings.of('e', P.gt(2)))).bytecode
assert 5 == len(bytecode.bindings.keys())
assert [1,2,3] == bytecode.bindings['a']
assert 'created' == bytecode.bindings['b']
assert 'created' == bytecode.bindings['c']
assert 'knows' == bytecode.bindings['d']
assert P.gt(2) == bytecode.bindings['e']
assert Binding('b', 'created') == bytecode.step_instructions[1][1]
assert 'binding[b=created]' == str(bytecode.step_instructions[1][1])
assert isinstance(hash(bytecode.step_instructions[1][1]), int)
def test_P(self):
assert "and(eq(a),lt(b))" == str(P.eq("a").and_(P.lt("b")))
assert "and(or(lt(b),gt(c)),neq(d))" == str(P.lt("b").or_(P.gt("c")).and_(P.neq("d")))
assert "and(or(lt(b),gt(c)),or(neq(d),gte(e)))" == str(
P.lt("b").or_(P.gt("c")).and_(P.neq("d").or_(P.gte("e"))))
def test_anonymous_traversal(self):
bytecode = __.__(1).bytecode
assert 0 == len(bytecode.bindings.keys())
assert 0 == len(bytecode.source_instructions)
assert 1 == len(bytecode.step_instructions)
assert "inject" == bytecode.step_instructions[0][0]
assert 1 == bytecode.step_instructions[0][1]
bytecode = __.start().bytecode
assert 0 == len(bytecode.bindings.keys())
assert 0 == len(bytecode.source_instructions)
assert 0 == len(bytecode.step_instructions)
def test_clone_traversal(self):
g = traversal().withGraph(Graph())
original = g.V().out("created")
clone = original.clone().out("knows")
cloneClone = clone.clone().out("created")
assert 2 == len(original.bytecode.step_instructions)
assert 3 == len(clone.bytecode.step_instructions)
assert 4 == len(cloneClone.bytecode.step_instructions)
original.has("person", "name", "marko")
clone.V().out()
assert 3 == len(original.bytecode.step_instructions)
assert 5 == len(clone.bytecode.step_instructions)
assert 4 == len(cloneClone.bytecode.step_instructions)
def test_no_sugar_for_magic_methods(self):
g = traversal().withGraph(Graph())
t = g.V().age
assert 2 == len(t.bytecode.step_instructions)
try:
t = g.V().__len__
fail("can't do sugar with magic")
except AttributeError as err:
assert str(err) == 'Python magic methods or keys starting with double underscore cannot be used for Gremlin sugar - prefer values(__len__)'
def test_enforce_anonymous_child_traversal(self):
g = traversal().withGraph(Graph())
g.V(0).addE("self").to(__.V(1))
try:
g.V(0).addE("self").to(g.V(1))
assert false
except TypeError:
pass
| true | true |
f731b86cd45adfb32aa7de95f369a6218bdbc653 | 42,404 | py | Python | tests/test_data.py | sdrobert/pydrobert-pytorch | 7abad0dbb2e80b4267aebcee492aa9fd7d83ea3f | [
"Apache-2.0"
] | 14 | 2019-01-04T21:19:55.000Z | 2021-01-06T16:01:03.000Z | tests/test_data.py | sdrobert/pydrobert-pytorch | 7abad0dbb2e80b4267aebcee492aa9fd7d83ea3f | [
"Apache-2.0"
] | 6 | 2021-04-17T23:34:57.000Z | 2022-02-11T00:49:41.000Z | tests/test_data.py | sdrobert/pydrobert-pytorch | 7abad0dbb2e80b4267aebcee492aa9fd7d83ea3f | [
"Apache-2.0"
] | 1 | 2020-05-19T08:03:43.000Z | 2020-05-19T08:03:43.000Z | # Copyright 2021 Sean Robertson
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from itertools import repeat
from io import StringIO
import pytest
import torch
import torch.utils.data
import pydrobert.torch.data as data
from pydrobert.torch import INDEX_PAD_VALUE
@pytest.mark.cpu
@pytest.mark.parametrize("left", [0, 1, 100])
@pytest.mark.parametrize("right", [0, 1, 100])
@pytest.mark.parametrize("T", [1, 5, 10])
def test_extract_window(left, right, T):
signal = torch.arange(T).view(-1, 1).expand(-1, 10)
for frame_idx in range(T):
window = data.extract_window(signal, frame_idx, left, right)
left_pad = max(left - frame_idx, 0)
right_pad = max(frame_idx + right + 1 - T, 0)
assert tuple(window.shape) == (1 + left + right, 10)
if left_pad:
assert torch.all(window[:left_pad] == torch.tensor([0]))
if right_pad:
assert torch.all(window[-right_pad:] == torch.tensor([T - 1]))
assert torch.all(
window[left_pad : 1 + left + right - right_pad]
== torch.arange(
frame_idx - left + left_pad, frame_idx + right - right_pad + 1
)
.view(-1, 1)
.expand(-1, 10)
)
@pytest.mark.cpu
@pytest.mark.parametrize("num_utts", [1, 2, 10])
@pytest.mark.parametrize("file_prefix", ["prefix_", ""])
@pytest.mark.parametrize("eos", [1000, None])
@pytest.mark.parametrize("sos", [2000, None])
@pytest.mark.parametrize("feat_dtype", [torch.float, torch.int])
def test_valid_spect_data_set(
temp_dir, num_utts, file_prefix, populate_torch_dir, sos, eos, feat_dtype
):
feats, _, _, _, _, utt_ids = populate_torch_dir(
temp_dir,
num_utts,
file_prefix=file_prefix,
include_ali=False,
include_ref=False,
feat_dtype=feat_dtype,
)
# note that this'll just resave the same features if there's no file
# prefix. If there is, these ought to be ignored by the data set
populate_torch_dir(
temp_dir, num_utts, include_ali=False, include_ref=False, feat_dtype=feat_dtype
)
if not os.path.isdir(os.path.join(temp_dir, "feat", "fake")):
os.makedirs(os.path.join(temp_dir, "feat", "fake"))
torch.save(
torch.randint(100, (10, 5), dtype=feat_dtype),
os.path.join(temp_dir, "feat", "fake", file_prefix + "fake.pt"),
)
data_set = data.SpectDataSet(temp_dir, file_prefix=file_prefix, eos=eos)
assert not data_set.has_ali and not data_set.has_ref
assert len(utt_ids) == len(data_set.utt_ids)
assert all(utt_a == utt_b for (utt_a, utt_b) in zip(utt_ids, data_set.utt_ids))
assert all(
ali_b is None and ref_b is None and torch.allclose(feat_a, feat_b)
for (feat_a, (feat_b, ali_b, ref_b)) in zip(feats, data_set)
)
feats, alis, refs, _, _, utt_ids = populate_torch_dir(
temp_dir, num_utts, file_prefix=file_prefix, feat_dtype=feat_dtype
)
if sos is not None:
sos_sym = torch.full((3,), -1, dtype=torch.long)
sos_sym[0] = sos
sos_sym = sos_sym.unsqueeze(0)
refs = [torch.cat([sos_sym, x]) for x in refs]
if eos is not None:
eos_sym = torch.full((3,), -1, dtype=torch.long)
eos_sym[0] = eos
eos_sym = eos_sym.unsqueeze(0)
refs = [torch.cat([x, eos_sym]) for x in refs]
data_set = data.SpectDataSet(temp_dir, file_prefix=file_prefix, sos=sos, eos=eos)
assert data_set.has_ali and data_set.has_ref
assert len(utt_ids) == len(data_set.utt_ids)
assert all(utt_a == utt_b for (utt_a, utt_b) in zip(utt_ids, data_set.utt_ids))
assert all(
torch.all(ali_a == ali_b)
and torch.all(ref_a == ref_b)
and feat_a.dtype == feat_b.dtype
and torch.allclose(feat_a, feat_b)
for ((feat_a, ali_a, ref_a), (feat_b, ali_b, ref_b)) in zip(
zip(feats, alis, refs), data_set
)
)
subset_ids = data_set.utt_ids[: num_utts // 2]
data_set = data.SpectDataSet(
temp_dir, file_prefix=file_prefix, subset_ids=set(subset_ids), sos=sos, eos=eos
)
assert all(utt_a == utt_b for (utt_a, utt_b) in zip(subset_ids, data_set.utt_ids))
assert all(
torch.all(ali_a == ali_b)
and torch.all(ref_a == ref_b)
and torch.allclose(feat_a, feat_b)
for ((feat_a, ali_a, ref_a), (feat_b, ali_b, ref_b)) in zip(
zip(feats[: num_utts // 2], alis[: num_utts // 2], refs[: num_utts // 2]),
data_set,
)
)
@pytest.mark.cpu
def test_spect_data_set_warnings(temp_dir):
torch.manual_seed(1)
feat_dir = os.path.join(temp_dir, "feat")
ali_dir = os.path.join(temp_dir, "ali")
os.makedirs(feat_dir)
os.makedirs(ali_dir)
torch.save(torch.rand(3, 3), os.path.join(feat_dir, "a.pt"))
torch.save(torch.rand(4, 3), os.path.join(feat_dir, "b.pt"))
torch.save(torch.randint(10, (4,), dtype=torch.long), os.path.join(ali_dir, "b.pt"))
torch.save(torch.randint(10, (5,), dtype=torch.long), os.path.join(ali_dir, "c.pt"))
data_set = data.SpectDataSet(temp_dir, warn_on_missing=False)
assert data_set.has_ali
assert data_set.utt_ids == ("b",)
with pytest.warns(UserWarning) as warnings:
data_set = data.SpectDataSet(temp_dir)
assert len(warnings) == 2
assert any(str(x.message) == "Missing ali for uttid: 'a'" for x in warnings)
assert any(str(x.message) == "Missing feat for uttid: 'c'" for x in warnings)
def test_spect_data_write_pdf(temp_dir, device):
torch.manual_seed(1)
feat_dir = os.path.join(temp_dir, "feat")
os.makedirs(feat_dir)
torch.save(torch.rand(3, 3), os.path.join(feat_dir, "a.pt"))
data_set = data.SpectDataSet(temp_dir)
z = torch.randint(10, (4, 5), dtype=torch.long)
if device == "cuda":
data_set.write_pdf("b", z.cuda())
else:
data_set.write_pdf("b", z)
zp = torch.load(os.path.join(temp_dir, "pdfs", "b.pt"))
assert isinstance(zp, torch.FloatTensor)
assert torch.allclose(zp, z.float())
data_set.write_pdf(0, torch.rand(10, 4))
assert os.path.exists(os.path.join(temp_dir, "pdfs", "a.pt"))
data_set.write_pdf("c", z, pdfs_dir=os.path.join(temp_dir, "foop"))
assert os.path.exists(os.path.join(temp_dir, "foop", "c.pt"))
@pytest.mark.parametrize("eos", [None, -1])
@pytest.mark.parametrize("sos", [None, -2])
def test_spect_data_write_hyp(temp_dir, device, sos, eos):
torch.manual_seed(1)
feat_dir = os.path.join(temp_dir, "feat")
os.makedirs(feat_dir)
torch.save(torch.rand(3, 3), os.path.join(feat_dir, "a.pt"))
data_set = data.SpectDataSet(temp_dir, sos=sos, eos=eos)
z = torch.randint(10, (4, 3), dtype=torch.float)
zz = z
if sos:
zz = torch.cat([torch.full_like(zz, sos), zz])
if eos:
zz = torch.cat([zz, torch.full_like(z, eos)])
if device == "cuda":
data_set.write_hyp("b", zz.cuda())
else:
data_set.write_hyp("b", zz)
zp = torch.load(os.path.join(temp_dir, "hyp", "b.pt"))
assert isinstance(zp, torch.LongTensor)
assert torch.all(zp == z.long())
data_set.write_hyp(0, torch.randint(10, (11, 3)))
assert os.path.exists(os.path.join(temp_dir, "hyp", "a.pt"))
data_set.write_hyp("c", z, hyp_dir=os.path.join(temp_dir, "foop"))
assert os.path.exists(os.path.join(temp_dir, "foop", "c.pt"))
@pytest.mark.cpu
@pytest.mark.parametrize("eos", [None, 10000])
def test_spect_data_set_validity(temp_dir, eos):
torch.manual_seed(1)
feat_dir = os.path.join(temp_dir, "feat")
ali_dir = os.path.join(temp_dir, "ali")
ref_dir = os.path.join(temp_dir, "ref")
feats_a_pt = os.path.join(feat_dir, "a.pt")
feats_b_pt = os.path.join(feat_dir, "b.pt")
ali_a_pt = os.path.join(ali_dir, "a.pt")
ali_b_pt = os.path.join(ali_dir, "b.pt")
ref_a_pt = os.path.join(ref_dir, "a.pt")
ref_b_pt = os.path.join(ref_dir, "b.pt")
os.makedirs(feat_dir)
os.makedirs(ali_dir)
os.makedirs(ref_dir)
torch.save(torch.rand(10, 4), feats_a_pt)
torch.save(torch.rand(4, 4), feats_b_pt)
torch.save(torch.randint(10, (10,), dtype=torch.long), ali_a_pt)
torch.save(torch.randint(10, (4,), dtype=torch.long), ali_b_pt)
torch.save(
torch.cat(
[
torch.randint(10, (11, 1), dtype=torch.long),
torch.full((11, 2), -1, dtype=torch.long),
],
-1,
),
ref_a_pt,
)
torch.save(torch.tensor([[0, 3, 4], [1, 1, 2]]), ref_b_pt)
data_set = data.SpectDataSet(temp_dir, eos=eos)
data.validate_spect_data_set(data_set)
torch.save(torch.rand(4, 4).long(), feats_b_pt)
with pytest.raises(ValueError, match="not the same tensor type"):
data.validate_spect_data_set(data_set)
torch.save(
torch.rand(
4,
),
feats_b_pt,
)
with pytest.raises(ValueError, match="does not have two dimensions"):
data.validate_spect_data_set(data_set)
torch.save(torch.rand(4, 3), feats_b_pt)
with pytest.raises(ValueError, match="has second dimension of size 3.*"):
data.validate_spect_data_set(data_set)
torch.save(torch.rand(4, 4), feats_b_pt)
data.validate_spect_data_set(data_set)
torch.save(torch.randint(10, (4,)).int(), ali_b_pt)
with pytest.raises(ValueError, match="is not a long tensor"):
data.validate_spect_data_set(data_set)
with pytest.warns(UserWarning):
data.validate_spect_data_set(data_set, True) # will fix bad type
data.validate_spect_data_set(data_set) # fine after correction
torch.save(torch.randint(10, (4, 1), dtype=torch.long), ali_b_pt)
with pytest.raises(ValueError, match="does not have one dimension"):
data.validate_spect_data_set(data_set)
torch.save(torch.randint(10, (3,), dtype=torch.long), ali_b_pt)
with pytest.raises(ValueError, match="does not have the same first"):
data.validate_spect_data_set(data_set)
torch.save(torch.randint(10, (4,), dtype=torch.long), ali_b_pt)
data.validate_spect_data_set(data_set)
torch.save(torch.Tensor([[0, 1, 2]]).int(), ref_b_pt)
with pytest.raises(ValueError, match="is not a long tensor"):
data.validate_spect_data_set(data_set)
with pytest.warns(UserWarning):
data.validate_spect_data_set(data_set, True) # convert to long
data.validate_spect_data_set(data_set)
torch.save(torch.tensor([[0, -1, 2], [1, 1, 2]]), ref_b_pt)
with pytest.raises(ValueError, match="invalid boundaries"):
data.validate_spect_data_set(data_set)
with pytest.warns(UserWarning):
data.validate_spect_data_set(data_set, True) # will remove end bound
data.validate_spect_data_set(data_set)
torch.save(torch.tensor([[0, 0, 1], [1, 3, 5]]), ref_b_pt)
with pytest.raises(ValueError, match="invalid boundaries"):
data.validate_spect_data_set(data_set)
with pytest.warns(UserWarning):
data.validate_spect_data_set(data_set, True) # will trim 5 to 4
data.validate_spect_data_set(data_set)
torch.save(torch.tensor([[0, 0, 1], [1, 4, 5]]), ref_b_pt)
with pytest.raises(ValueError, match="invalid boundaries"):
data.validate_spect_data_set(data_set, True) # will not trim b/c causes s == e
torch.save(torch.tensor([1, 2, 3]), ref_b_pt)
with pytest.raises(ValueError, match="were 2D"):
data.validate_spect_data_set(data_set)
torch.save(torch.tensor([10, 4, 2, 5]), ref_a_pt)
data.validate_spect_data_set(data_set)
@pytest.mark.gpu
def test_validate_spect_data_set_cuda(temp_dir):
torch.manual_seed(29)
feat_dir = os.path.join(temp_dir, "feat")
ali_dir = os.path.join(temp_dir, "ali")
ref_dir = os.path.join(temp_dir, "ref")
feats_pt = os.path.join(feat_dir, "a.pt")
ali_pt = os.path.join(ali_dir, "a.pt")
ref_pt = os.path.join(ref_dir, "a.pt")
os.makedirs(feat_dir)
os.makedirs(ali_dir)
os.makedirs(ref_dir)
torch.save(torch.rand(10, 5), feats_pt)
torch.save(torch.randint(10, (10,), dtype=torch.long), ali_pt)
torch.save(torch.tensor([1, 2, 3]), ref_pt)
data_set = data.SpectDataSet(temp_dir)
data.validate_spect_data_set(data_set)
torch.save(torch.rand(10, 5).cuda(), feats_pt)
with pytest.raises(ValueError, match="cuda"):
data.validate_spect_data_set(data_set)
with pytest.warns(UserWarning):
data.validate_spect_data_set(data_set, True) # to CPU
data.validate_spect_data_set(data_set)
torch.save(torch.rand(10, 5).cuda(), feats_pt)
torch.save(torch.randint(10, (10,), dtype=torch.long).cuda(), ali_pt)
torch.save(torch.tensor([1, 2, 3]).cuda(), ref_pt)
with pytest.raises(ValueError, match="cuda"):
data.validate_spect_data_set(data_set)
with pytest.warns(UserWarning):
data.validate_spect_data_set(data_set, True) # to CPU
data.validate_spect_data_set(data_set)
@pytest.mark.cpu
@pytest.mark.parametrize("processes", [0, 2])
def test_read_trn(processes):
trn = StringIO()
trn.write(
"""\
here is a simple example (a)
nothing should go wrong (b)
"""
)
trn.seek(0)
act = data.read_trn(trn, processes=processes, chunk_size=1)
assert act == [
("a", ["here", "is", "a", "simple", "example"]),
("b", ["nothing", "should", "go", "wrong"]),
]
trn.seek(0)
trn.write(
"""\
here is an { example /with} some alternates (a)
} and /here/ is {something really / {really}} (stupid) { ignore this (b)
(c)
a11 (d)
"""
)
trn.seek(0)
act = data.read_trn(trn, warn=False, processes=processes)
assert act == [
(
"a",
[
"here",
"is",
"an",
([["example"], ["with"]], -1, -1),
"some",
"alternates",
],
),
(
"b",
[
"}",
"and",
"/here/",
"is",
([["something", "really"], [[["really"]]]], -1, -1),
"(stupid)",
],
),
("c", []),
("d", ["a11"]),
]
@pytest.mark.cpu
def test_read_ctm():
ctm = StringIO()
ctm.write(
"""\
utt1 A 0.0 0.1 a
utt1 A 0.5 0.1 c ;; ctm files should always be ordered, but we tolerate
;; different orders
utt2 B 0.1 1.0 d
utt1 B 0.4 0.3 b
;; utt2 A 0.2 1.0 f
"""
)
ctm.seek(0)
act = data.read_ctm(ctm)
assert act == [
("utt1", [("a", 0.0, 0.1), ("b", 0.4, 0.7), ("c", 0.5, 0.6)]),
("utt2", [("d", 0.1, 1.1)]),
]
ctm.seek(0)
act = data.read_ctm(
ctm, {("utt1", "A"): "foo", ("utt1", "B"): "bar", ("utt2", "B"): "baz"}
)
assert act == [
("foo", [("a", 0.0, 0.1), ("c", 0.5, 0.6)]),
("baz", [("d", 0.1, 1.1)]),
("bar", [("b", 0.4, 0.7)]),
]
with pytest.raises(ValueError):
ctm.write("utt3 -0.1 1.0 woop\n")
ctm.seek(0)
data.read_ctm(ctm)
@pytest.mark.cpu
def test_write_trn():
trn = StringIO()
transcripts = [
("a", ["again", "a", "simple", "example"]),
("b", ["should", "get", "right", "no", "prob"]),
]
data.write_trn(transcripts, trn)
trn.seek(0)
assert (
"""\
again a simple example (a)
should get right no prob (b)
"""
== trn.read()
)
trn.seek(0)
trn.truncate()
transcripts = [
(
" c ",
[
("unnecessary", -1, -1),
([["complexity", [["can"]]], ["also", "be"]], 10, 4),
"handled",
],
),
("d", []),
("e", ["a11"]),
]
data.write_trn(transcripts, trn)
trn.seek(0)
assert (
"""\
unnecessary { complexity { can } / also be } handled ( c )
(d)
a11 (e)
"""
== trn.read()
)
@pytest.mark.cpu
def test_write_ctm():
ctm = StringIO()
transcripts = [
(
"c",
[
("here", 0.1, 0.2),
("are", 0.3, 0.5),
("some", 0.2, 0.4),
("unordered", 0.5, 0.5),
("tokens", 10.0, 1000),
],
),
("b", []),
("a", [("hullo", 0.0, 10.0111)]),
]
data.write_ctm(transcripts, ctm)
ctm.seek(0)
assert (
"""\
a A 0.0 10.0111 hullo
c A 0.1 0.1 here
c A 0.2 0.2 some
c A 0.3 0.2 are
c A 0.5 0.0 unordered
c A 10.0 990.0 tokens
"""
== ctm.read()
)
ctm.seek(0)
ctm.truncate()
data.write_ctm(
transcripts,
ctm,
{"a": ("last", "A"), "b": ("middle", "B"), "c": ("first", "C")},
)
ctm.seek(0)
assert (
"""\
first C 0.1 0.1 here
first C 0.2 0.2 some
first C 0.3 0.2 are
first C 0.5 0.0 unordered
first C 10.0 990.0 tokens
last A 0.0 10.0111 hullo
"""
== ctm.read()
)
transcripts.append(("foo", [("a", 0.1, 0.2), ("b", 0.2, 0.1)]))
with pytest.raises(ValueError):
data.write_ctm(transcripts, ctm)
@pytest.mark.cpu
@pytest.mark.parametrize(
"transcript,token2id,unk,skip_frame_times,exp",
[
([], None, None, False, torch.LongTensor(0, 3)),
(
[1, 2, 3, 4],
None,
None,
True,
torch.LongTensor([1, 2, 3, 4]),
),
(
[1, ("a", 4, 10), "a", 3],
{"a": 2},
None,
False,
torch.LongTensor([[1, -1, -1], [2, 4, 10], [2, -1, -1], [3, -1, -1]]),
),
(
["foo", 1, "bar"],
{"foo": 0, "baz": 3},
"baz",
False,
torch.LongTensor([[0, -1, -1], [3, -1, -1], [3, -1, -1]]),
),
],
)
def test_transcript_to_token(transcript, token2id, unk, skip_frame_times, exp):
act = data.transcript_to_token(
transcript, token2id, unk=unk, skip_frame_times=skip_frame_times
)
assert torch.all(exp == act)
transcript = ["foo"] + transcript
with pytest.raises(Exception):
data.transcript_to_token(transcript, token2id)
@pytest.mark.cpu
def test_transcript_to_token_frame_shift():
trans = [(12, 0.5, 0.81), 420, (1, 2.1, 2.2), (3, 2.8, 2.815), (12, 2.9, 3.0025)]
# normal case: frame shift 10ms. Frame happens every hundredth of a second,
# so multiply by 100. Half-frames should round up; quarter-frames down
tok = data.transcript_to_token(trans, frame_shift_ms=10)
assert torch.allclose(
tok,
torch.LongTensor(
[[12, 50, 81], [420, -1, -1], [1, 210, 220], [3, 280, 282], [12, 290, 300]]
),
)
# raw case @ 8000Hz sample rate. "Frame" is every sample. frames/msec =
# 1000 / sample_rate_hz = 1 / 8.
tok = data.transcript_to_token(trans, frame_shift_ms=1 / 8)
assert torch.allclose(
tok,
torch.LongTensor(
[
[12, 4000, 6480],
[420, -1, -1],
[1, 16800, 17600],
[3, 22400, 22520],
[12, 23200, 24020],
]
),
)
@pytest.mark.cpu
@pytest.mark.parametrize(
"tok,id2token,exp",
[
(torch.LongTensor(0, 3), None, []),
(
torch.LongTensor([[1, -1, -1], [2, -1, -1], [3, -1, -1], [4, -1, -1]]),
None,
[1, 2, 3, 4],
),
(
torch.LongTensor([[1, 3, 4], [3, 4, 5], [2, -1, -1]]),
{1: "a", 2: "b"},
[("a", 3, 4), (3, 4, 5), "b"],
),
(torch.tensor(range(10)), None, list(range(10))),
(torch.tensor(range(5)).unsqueeze(-1), None, list(range(5))),
],
)
def test_token_to_transcript(tok, id2token, exp):
act = data.token_to_transcript(tok, id2token)
assert exp == act
@pytest.mark.cpu
def test_token_to_transcript_frame_shift():
tok = torch.LongTensor([[1, -1, 10], [2, 1000, 2000], [3, 12345, 678910]])
# standard case: 10ms frame shift
# 10ms per frame means divide frame number by 100
trans = data.token_to_transcript(tok, frame_shift_ms=10)
assert trans == [1, (2, 10.0, 20.0), (3, 123.45, 6789.10)]
# raw case: 8000 samples / sec = 8 samples / msec so frame shift is 1 / 8
trans = data.token_to_transcript(tok, frame_shift_ms=1 / 8)
assert trans == [
1,
(2, 1000 / 8000, 2000 / 8000),
(3, 12345 / 8000, 678910 / 8000),
]
@pytest.mark.cpu
@pytest.mark.parametrize("reverse", [True, False])
def test_context_window_data_set(temp_dir, reverse):
torch.manual_seed(1)
feat_dir = os.path.join(temp_dir, "feat")
os.makedirs(feat_dir)
a = torch.rand(2, 10)
torch.save(a, os.path.join(feat_dir, "a.pt"))
data_set = data.ContextWindowDataSet(temp_dir, 1, 1, reverse=reverse)
windowed, _ = data_set[0]
assert tuple(windowed.shape) == (2, 3, 10)
if reverse:
# [[a1, a0, a0], [a1, a1, a0]]
assert torch.allclose(a[0], windowed[0, 1:])
assert torch.allclose(a[1], windowed[0, 0])
assert torch.allclose(a[0], windowed[1, 2])
assert torch.allclose(a[1], windowed[1, :2])
else:
# [[a0, a0, a1], [a0, a1, a1]]
assert torch.allclose(a[0], windowed[0, :2])
assert torch.allclose(a[1], windowed[0, 2])
assert torch.allclose(a[0], windowed[1, 0])
assert torch.allclose(a[1], windowed[1, 1:])
@pytest.mark.cpu
def test_epoch_random_sampler(temp_dir):
data_source = torch.utils.data.TensorDataset(torch.arange(100))
sampler = data.EpochRandomSampler(data_source, base_seed=1)
samples_ep0 = tuple(sampler)
samples_ep1 = tuple(sampler)
assert samples_ep0 != samples_ep1
assert sorted(samples_ep0) == list(range(100))
assert sorted(samples_ep1) == list(range(100))
assert samples_ep0 == tuple(sampler.get_samples_for_epoch(0))
assert samples_ep1 == tuple(sampler.get_samples_for_epoch(1))
sampler = data.EpochRandomSampler(data_source, init_epoch=10, base_seed=1)
assert samples_ep0 == tuple(sampler.get_samples_for_epoch(0))
assert samples_ep1 == tuple(sampler.get_samples_for_epoch(1))
# should be reproducible if we set torch manual seed
torch.manual_seed(5)
sampler = data.EpochRandomSampler(data_source)
samples_ep0 = tuple(sampler)
torch.manual_seed(5)
sampler = data.EpochRandomSampler(data_source)
assert samples_ep0 == tuple(sampler)
@pytest.mark.cpu
@pytest.mark.parametrize(
"feat_sizes",
[((3, 5, 4), (4, 5, 4), (1, 5, 4)), ((2, 10, 5),) * 10],
ids=["short", "long"],
)
@pytest.mark.parametrize("include_ali", [True, False])
def test_context_window_seq_to_batch(feat_sizes, include_ali):
torch.manual_seed(1)
feats = tuple(torch.rand(*x) for x in feat_sizes)
if include_ali:
alis = tuple(torch.randint(10, (x[0],), dtype=torch.long) for x in feat_sizes)
else:
alis = repeat(None)
seq = zip(feats, alis)
batch_feats, batch_ali = data.context_window_seq_to_batch(seq)
assert torch.allclose(torch.cat(feats), batch_feats)
if include_ali:
assert torch.all(torch.cat(alis) == batch_ali)
else:
assert batch_ali is None
@pytest.mark.cpu
@pytest.mark.parametrize("include_ali", [True, False])
@pytest.mark.parametrize(
"include_ref,include_frame_shift", [(True, True), (True, False), (False, None)]
)
@pytest.mark.parametrize("batch_first", [True, False])
def test_spect_seq_to_batch(include_ali, include_ref, batch_first, include_frame_shift):
torch.manual_seed(1)
feat_sizes = tuple(
torch.randint(1, 30, (1,)).item()
for _ in range(torch.randint(3, 10, (1,)).item())
)
feats = tuple(torch.randn(x, 5) for x in feat_sizes)
if include_ali:
alis = tuple(torch.randint(100, (x,), dtype=torch.long) for x in feat_sizes)
else:
alis = repeat(None)
if include_ref:
ref_sizes = tuple(
torch.randint(1, 30, (1,)).item() for _ in range(len(feat_sizes))
)
extra_dim = (3,) if include_frame_shift else tuple()
refs = tuple(
torch.randint(100, (x,) + extra_dim, dtype=torch.long) for x in ref_sizes
)
else:
ref_sizes = repeat(None)
refs = repeat(None)
(
batch_feats,
batch_ali,
batch_ref,
batch_feat_sizes,
batch_ref_sizes,
) = data.spect_seq_to_batch(zip(feats, alis, refs), batch_first=batch_first)
feat_sizes, feats, alis, refs, ref_sizes = zip(
*sorted(zip(feat_sizes, feats, alis, refs, ref_sizes), key=lambda x: -x[0])
)
assert torch.all(torch.tensor(feat_sizes) == batch_feat_sizes)
if not batch_first:
batch_feats = batch_feats.transpose(0, 1)
if include_ali:
batch_ali = batch_ali.transpose(0, 1)
if include_ref:
batch_ref = batch_ref.transpose(0, 1)
assert all(
torch.allclose(a[: b.shape[0]], b)
and torch.allclose(a[b.shape[0] :], torch.tensor([0.0]))
for (a, b) in zip(batch_feats, feats)
)
if include_ali:
assert all(
torch.all(a[: b.shape[0]] == b)
and torch.all(a[b.shape[0] :] == torch.tensor([INDEX_PAD_VALUE]))
for (a, b) in zip(batch_ali, alis)
)
else:
assert batch_ali is None
if include_ref:
assert torch.all(torch.tensor(ref_sizes) == batch_ref_sizes)
assert all(
torch.all(a[: b.shape[0]] == b)
and torch.all(a[b.shape[0] :] == torch.tensor([INDEX_PAD_VALUE]))
for (a, b) in zip(batch_ref, refs)
)
else:
assert batch_ref is None
assert batch_ref_sizes is None
@pytest.mark.cpu
@pytest.mark.parametrize("eos", [None, -1])
@pytest.mark.parametrize("sos", [None, -2])
@pytest.mark.parametrize("split_params", [True, False])
@pytest.mark.parametrize("include_frame_shift", [True, False])
@pytest.mark.parametrize("feat_dtype", [torch.float, torch.int])
def test_spect_training_data_loader(
temp_dir,
populate_torch_dir,
sos,
eos,
split_params,
include_frame_shift,
feat_dtype,
):
torch.manual_seed(40)
num_utts, batch_size, num_filts = 20, 5, 11
populate_torch_dir(
temp_dir,
num_utts,
num_filts=num_filts,
include_frame_shift=include_frame_shift,
feat_dtype=feat_dtype,
)
if split_params:
params = data.DataSetParams(batch_size=batch_size)
data_params = data.SpectDataParams(sos=sos, eos=eos)
else:
params = data.SpectDataSetParams(batch_size=batch_size, sos=sos, eos=eos)
data_params = None
# check missing either ali or ref gives None in batches
data_loader = data.SpectTrainingDataLoader(
temp_dir, params, data_params=data_params, ali_subdir=None, seed=2
)
assert next(iter(data_loader))[1] is None
data_loader = data.SpectTrainingDataLoader(
temp_dir, params, data_params=data_params, ref_subdir=None, seed=2
)
assert next(iter(data_loader))[2] is None
assert next(iter(data_loader))[4] is None
data_loader = data.SpectTrainingDataLoader(
temp_dir, params, data_params=data_params, seed=2
)
def _get_epoch(sort):
ep_feats, ep_ali, ep_ref = [], [], []
ep_feat_sizes, ep_ref_sizes = [], []
max_T = 0
max_R = 0
batch_first = data_loader.batch_first
for b_feats, b_ali, b_ref, b_feat_sizes, b_ref_sizes in data_loader:
if not batch_first:
b_feats = b_feats.transpose(0, 1)
b_ali = b_ali.transpose(0, 1)
b_ref = b_ref.transpose(0, 1)
max_T = max(max_T, b_feat_sizes[0])
R_star = max(b_ref_sizes)
max_R = max(max_R, R_star)
assert b_feats.shape[0] == batch_size
assert b_ali.shape[0] == batch_size
assert b_ref.shape[0] == batch_size
assert b_feats.shape[-1] == num_filts
assert b_feats.shape[1] == b_feat_sizes[0]
assert b_ali.shape[1] == b_feat_sizes[0]
assert b_ref.shape[1] == R_star
assert b_ref.dim() == (3 if include_frame_shift else 2)
ep_feats += tuple(b_feats)
ep_ali += tuple(b_ali)
ep_ref += tuple(b_ref)
ep_feat_sizes += tuple(b_feat_sizes)
ep_ref_sizes += tuple(b_ref_sizes)
assert len(ep_feats) == num_utts
assert len(ep_ali) == num_utts
for i in range(num_utts):
ep_feats[i] = torch.nn.functional.pad(
ep_feats[i], (0, 0, 0, max_T - ep_ali[i].shape[0])
)
ep_ali[i] = torch.nn.functional.pad(
ep_ali[i], (0, max_T - ep_ali[i].shape[0]), value=INDEX_PAD_VALUE
)
if include_frame_shift:
ep_ref[i] = torch.nn.functional.pad(
ep_ref[i],
(0, 0, 0, max_R - ep_ref[i].shape[0]),
value=INDEX_PAD_VALUE,
)
else:
ep_ref[i] = torch.nn.functional.pad(
ep_ref[i], (0, max_R - ep_ref[i].shape[0]), value=INDEX_PAD_VALUE
)
if sort:
ep_feats, ep_ali, ep_ref, ep_feat_sizes, ep_ref_sizes = zip(
*sorted(
zip(ep_feats, ep_ali, ep_ref, ep_feat_sizes, ep_ref_sizes),
key=lambda x: (-x[3], -x[4], x[0][0, 0]),
)
)
return ep_feats, ep_ali, ep_ref, ep_feat_sizes, ep_ref_sizes
def _compare_epochs(ep_a, ep_b, same):
a_feats, a_ali, a_ref, a_feat_sizes, a_ref_sizes = ep_a
b_feats, b_ali, b_ref, b_feat_sizes, b_ref_sizes = ep_b
a_feats, b_feats = torch.stack(a_feats), torch.stack(b_feats)
a_ali, b_ali = torch.stack(a_ali), torch.stack(b_ali)
a_ref, b_ref = torch.stack(a_ref), torch.stack(b_ref)
if same:
assert a_feat_sizes == b_feat_sizes
assert a_ref_sizes == b_ref_sizes
assert torch.allclose(a_feats, b_feats)
assert torch.all(a_ali == b_ali)
assert torch.all(a_ref == b_ref)
else:
assert a_feat_sizes != b_feat_sizes
assert a_ref_sizes != b_ref_sizes
assert not torch.allclose(a_feats, b_feats)
assert torch.any(a_ali != b_ali)
assert torch.any(a_ref != b_ref)
ep0 = _get_epoch(False)
ep1 = _get_epoch(False)
_compare_epochs(ep0, ep1, False) # could be same by fluke
_compare_epochs(_get_epoch(True), _get_epoch(True), True)
data_loader.epoch = 1
_compare_epochs(ep1, _get_epoch(False), True)
# XXX(sdrobert): warning spit out on CI if num_workers > 2
data_loader = data.SpectTrainingDataLoader(
temp_dir, params, data_params=data_params, num_workers=2, seed=2
)
_compare_epochs(ep0, _get_epoch(False), True)
_compare_epochs(ep1, _get_epoch(False), True)
data_loader.batch_first = False
data_loader.epoch = 0
_compare_epochs(ep0, _get_epoch(False), True)
_compare_epochs(ep1, _get_epoch(False), True)
@pytest.mark.cpu
@pytest.mark.parametrize("eos", [None, -1])
@pytest.mark.parametrize("sos", [None, -2])
@pytest.mark.parametrize("split_params", [True, False])
@pytest.mark.parametrize("include_frame_shift", [True, False])
@pytest.mark.parametrize("feat_dtype", [torch.float, torch.int])
def test_spect_evaluation_data_loader(
temp_dir,
populate_torch_dir,
sos,
eos,
split_params,
include_frame_shift,
feat_dtype,
):
torch.manual_seed(41)
feat_dir = os.path.join(temp_dir, "feat")
ali_dir = os.path.join(temp_dir, "ali")
os.makedirs(feat_dir)
os.makedirs(ali_dir)
batch_size = 5
if split_params:
params = data.DataSetParams(batch_size=batch_size)
data_params = data.SpectDataParams(sos=sos, eos=eos)
else:
params = data.SpectDataSetParams(batch_size=batch_size, sos=sos, eos=eos)
data_params = None
feats, ali, ref, feat_sizes, ref_sizes, utt_ids = populate_torch_dir(
temp_dir, 20, include_frame_shift=include_frame_shift, feat_dtype=feat_dtype
)
if sos is not None:
if include_frame_shift:
sos_sym = torch.full((3,), -1, dtype=torch.long)
sos_sym[0] = sos
sos_sym = sos_sym.unsqueeze(0)
else:
sos_sym = torch.full((1,), sos, dtype=torch.long)
ref = [torch.cat([sos_sym, x], 0) for x in ref]
ref_sizes = [x + 1 for x in ref_sizes]
if eos is not None:
if include_frame_shift:
eos_sym = torch.full((3,), eos, dtype=torch.long)
eos_sym[0] = eos
eos_sym = eos_sym.unsqueeze(0)
else:
eos_sym = torch.full((1,), eos, dtype=torch.long)
ref = [torch.cat([x, eos_sym], 0) for x in ref]
ref_sizes = [x + 1 for x in ref_sizes]
# check that ali and ref can be missing
data_loader = data.SpectEvaluationDataLoader(
temp_dir, params, data_params=data_params, ali_subdir=None, ref_subdir=None
)
assert next(iter(data_loader))[1:3] == (None, None)
assert next(iter(data_loader))[4] is None
data_loader = data.SpectEvaluationDataLoader(
temp_dir, params, data_params=data_params
)
def _compare_data_loader():
batch_first = data_loader.batch_first
assert len(data_loader) == 4
cur_idx = 0
for (
b_feats,
b_ali,
b_ref,
b_feat_sizes,
b_ref_sizes,
b_utt_ids,
) in data_loader:
if not batch_first:
b_feats = b_feats.transpose(0, 1)
b_ali = b_ali.transpose(0, 1)
b_ref = b_ref.transpose(0, 1)
R_star = max(b_ref_sizes)
assert tuple(b_feats.shape) == (5, b_feat_sizes[0], 5)
assert tuple(b_ali.shape) == (5, b_feat_sizes[0])
if include_frame_shift:
assert tuple(b_ref.shape) == (5, R_star, 3)
else:
assert tuple(b_ref.shape) == (5, R_star)
# sort the sub-section of the master list by feature size
s_feats, s_ali, s_ref, s_feat_sizes, s_ref_sizes, s_utt_ids = zip(
*sorted(
zip(
feats[cur_idx : cur_idx + 5],
ali[cur_idx : cur_idx + 5],
ref[cur_idx : cur_idx + 5],
feat_sizes[cur_idx : cur_idx + 5],
ref_sizes[cur_idx : cur_idx + 5],
utt_ids[cur_idx : cur_idx + 5],
),
key=lambda x: -x[3],
)
)
assert b_utt_ids == s_utt_ids
assert tuple(b_feat_sizes) == s_feat_sizes
assert tuple(b_ref_sizes) == s_ref_sizes
for a, b in zip(b_feats, s_feats):
assert torch.allclose(a[: b.shape[0]], b)
assert torch.allclose(
a[b.shape[0] :], torch.tensor([0], dtype=feat_dtype)
)
for a, b in zip(b_ali, s_ali):
assert torch.all(a[: b.shape[0]] == b)
assert torch.all(a[b.shape[0] :] == torch.tensor([INDEX_PAD_VALUE]))
for a, b in zip(b_ref, s_ref):
assert torch.all(a[: b.shape[0]] == b)
assert torch.all(a[b.shape[0] :] == torch.tensor([INDEX_PAD_VALUE]))
cur_idx += 5
_compare_data_loader()
_compare_data_loader() # order should not change
data_loader = data.SpectEvaluationDataLoader(
temp_dir, params, data_params=data_params, num_workers=2
)
_compare_data_loader() # order should still not change
data_loader.batch_first = False
_compare_data_loader()
@pytest.mark.cpu
@pytest.mark.parametrize("split_params", [True, False])
def test_window_training_data_loader(temp_dir, populate_torch_dir, split_params):
populate_torch_dir(temp_dir, 5, num_filts=2)
seed, batch_size, context_left, context_right = 2, 5, 1, 1
if split_params:
params = data.DataSetParams(batch_size=batch_size, drop_last=True)
data_params = data.ContextWindowDataParams(
context_left=context_left, context_right=context_right
)
else:
params = data.ContextWindowDataSetParams(
context_left=context_left,
context_right=context_right,
batch_size=batch_size,
drop_last=True,
)
data_params = None
data_loader = data.ContextWindowTrainingDataLoader(
temp_dir, params, data_params=data_params, seed=seed
)
total_windows_ep0 = 0
for feat, ali in data_loader:
windows = feat.shape[0]
assert tuple(feat.shape) == (windows, 3, 2)
assert tuple(ali.shape) == (windows,)
total_windows_ep0 += windows
assert total_windows_ep0 >= batch_size
feats_ep1_a, alis_ep1_a = [], []
total_windows_ep1 = 0
for feats, alis in data_loader:
windows = feat.shape[0]
assert tuple(feat.shape) == (windows, 3, 2)
assert tuple(ali.shape) == (windows,)
feats_ep1_a.append(feats)
alis_ep1_a.append(alis)
total_windows_ep1 += windows
assert total_windows_ep0 == total_windows_ep1
data_loader = data.ContextWindowTrainingDataLoader(
temp_dir,
params,
init_epoch=1,
data_params=data_params,
num_workers=2,
seed=seed,
)
feats_ep1_b, alis_ep1_b = [], []
for feats, alis in data_loader:
feats_ep1_b.append(feats)
alis_ep1_b.append(alis)
assert all(
torch.allclose(feats_a, feats_b)
for (feats_a, feats_b) in zip(feats_ep1_a, feats_ep1_b)
)
assert all(
torch.all(alis_a == alis_b) for (alis_a, alis_b) in zip(alis_ep1_a, alis_ep1_b)
)
data_loader.epoch = 1
feats_ep1_c, alis_ep1_c = [], []
for feats, alis in data_loader:
feats_ep1_c.append(feats)
alis_ep1_c.append(alis)
assert all(
torch.allclose(feats_a, feats_c)
for (feats_a, feats_c) in zip(feats_ep1_a, feats_ep1_c)
)
assert all(
torch.all(alis_a == alis_c) for (alis_a, alis_c) in zip(alis_ep1_a, alis_ep1_c)
)
@pytest.mark.cpu
@pytest.mark.parametrize("split_params", [True, False])
def test_window_evaluation_data_loader(temp_dir, populate_torch_dir, split_params):
torch.manual_seed(1)
feat_dir = os.path.join(temp_dir, "feat")
ali_dir = os.path.join(temp_dir, "ali")
os.makedirs(feat_dir)
os.makedirs(ali_dir)
if split_params:
params = data.DataSetParams(batch_size=5)
data_params = data.ContextWindowDataParams(context_left=1, context_right=1)
else:
params = data.ContextWindowDataSetParams(
context_left=1, context_right=1, batch_size=5
)
data_params = None
feats, alis, _, feat_sizes, _, utt_ids = populate_torch_dir(
temp_dir, 20, include_ref=False
)
def _compare_data_loader(data_loader):
assert len(data_loader) == 4
cur_idx = 0
for b_feats, b_alis, b_feat_sizes, b_utt_ids in data_loader:
assert tuple(b_feats.shape[1:]) == (3, 5)
assert b_feats.shape[0] == sum(b_feat_sizes)
assert tuple(b_utt_ids) == tuple(utt_ids[cur_idx : cur_idx + 5])
assert torch.allclose(
b_feats[:, 1], torch.cat(feats[cur_idx : cur_idx + 5])
)
assert torch.all(b_alis == torch.cat(alis[cur_idx : cur_idx + 5]))
cur_idx += 5
data_loader = data.ContextWindowEvaluationDataLoader(
temp_dir, params, data_params=data_params, ali_subdir=None
)
# check batching works when alignments are empty
assert next(iter(data_loader))[1] is None
data_loader = data.ContextWindowEvaluationDataLoader(
temp_dir, params, data_params=data_params
)
_compare_data_loader(data_loader)
_compare_data_loader(data_loader) # order should not change
data_loader = data.ContextWindowEvaluationDataLoader(
temp_dir, params, data_params=data_params, num_workers=2
)
_compare_data_loader(data_loader) # order should still not change
@pytest.mark.cpu
def test_pydrobert_param_optuna_hooks():
poptuna = pytest.importorskip("pydrobert.param.optuna")
optuna = pytest.importorskip("optuna")
for class_ in (
data.DataSetParams,
data.SpectDataSetParams,
data.ContextWindowDataParams,
data.ContextWindowDataSetParams,
):
assert issubclass(class_, poptuna.TunableParameterized)
global_dict = {
"data_set": data.DataSetParams(),
"spect_data": data.SpectDataParams(),
"spect_data_set": data.SpectDataSetParams(),
"context_window_data": data.ContextWindowDataParams(),
"context_window_data_set": data.ContextWindowDataSetParams(),
}
assert {
"data_set.batch_size",
"spect_data.eos",
"spect_data_set.batch_size",
"context_window_data.reverse",
"context_window_data_set.batch_size",
} - poptuna.get_param_dict_tunable(global_dict) == {"spect_data.eos"}
def objective(trial):
param_dict = poptuna.suggest_param_dict(trial, global_dict)
return param_dict["data_set"].batch_size
sampler = optuna.samplers.RandomSampler(seed=5)
study = optuna.create_study(sampler=sampler)
study.optimize(objective, n_trials=10)
assert (
not {
"data_set.batch_size",
"spect_data_set.batch_size",
"context_window_data.reverse",
"context_window_data_set.batch_size",
}
- set(study.best_params)
)
assert study.best_params["data_set.batch_size"] < 7
| 36.027188 | 88 | 0.603905 |
import os
from itertools import repeat
from io import StringIO
import pytest
import torch
import torch.utils.data
import pydrobert.torch.data as data
from pydrobert.torch import INDEX_PAD_VALUE
@pytest.mark.cpu
@pytest.mark.parametrize("left", [0, 1, 100])
@pytest.mark.parametrize("right", [0, 1, 100])
@pytest.mark.parametrize("T", [1, 5, 10])
def test_extract_window(left, right, T):
signal = torch.arange(T).view(-1, 1).expand(-1, 10)
for frame_idx in range(T):
window = data.extract_window(signal, frame_idx, left, right)
left_pad = max(left - frame_idx, 0)
right_pad = max(frame_idx + right + 1 - T, 0)
assert tuple(window.shape) == (1 + left + right, 10)
if left_pad:
assert torch.all(window[:left_pad] == torch.tensor([0]))
if right_pad:
assert torch.all(window[-right_pad:] == torch.tensor([T - 1]))
assert torch.all(
window[left_pad : 1 + left + right - right_pad]
== torch.arange(
frame_idx - left + left_pad, frame_idx + right - right_pad + 1
)
.view(-1, 1)
.expand(-1, 10)
)
@pytest.mark.cpu
@pytest.mark.parametrize("num_utts", [1, 2, 10])
@pytest.mark.parametrize("file_prefix", ["prefix_", ""])
@pytest.mark.parametrize("eos", [1000, None])
@pytest.mark.parametrize("sos", [2000, None])
@pytest.mark.parametrize("feat_dtype", [torch.float, torch.int])
def test_valid_spect_data_set(
temp_dir, num_utts, file_prefix, populate_torch_dir, sos, eos, feat_dtype
):
feats, _, _, _, _, utt_ids = populate_torch_dir(
temp_dir,
num_utts,
file_prefix=file_prefix,
include_ali=False,
include_ref=False,
feat_dtype=feat_dtype,
)
populate_torch_dir(
temp_dir, num_utts, include_ali=False, include_ref=False, feat_dtype=feat_dtype
)
if not os.path.isdir(os.path.join(temp_dir, "feat", "fake")):
os.makedirs(os.path.join(temp_dir, "feat", "fake"))
torch.save(
torch.randint(100, (10, 5), dtype=feat_dtype),
os.path.join(temp_dir, "feat", "fake", file_prefix + "fake.pt"),
)
data_set = data.SpectDataSet(temp_dir, file_prefix=file_prefix, eos=eos)
assert not data_set.has_ali and not data_set.has_ref
assert len(utt_ids) == len(data_set.utt_ids)
assert all(utt_a == utt_b for (utt_a, utt_b) in zip(utt_ids, data_set.utt_ids))
assert all(
ali_b is None and ref_b is None and torch.allclose(feat_a, feat_b)
for (feat_a, (feat_b, ali_b, ref_b)) in zip(feats, data_set)
)
feats, alis, refs, _, _, utt_ids = populate_torch_dir(
temp_dir, num_utts, file_prefix=file_prefix, feat_dtype=feat_dtype
)
if sos is not None:
sos_sym = torch.full((3,), -1, dtype=torch.long)
sos_sym[0] = sos
sos_sym = sos_sym.unsqueeze(0)
refs = [torch.cat([sos_sym, x]) for x in refs]
if eos is not None:
eos_sym = torch.full((3,), -1, dtype=torch.long)
eos_sym[0] = eos
eos_sym = eos_sym.unsqueeze(0)
refs = [torch.cat([x, eos_sym]) for x in refs]
data_set = data.SpectDataSet(temp_dir, file_prefix=file_prefix, sos=sos, eos=eos)
assert data_set.has_ali and data_set.has_ref
assert len(utt_ids) == len(data_set.utt_ids)
assert all(utt_a == utt_b for (utt_a, utt_b) in zip(utt_ids, data_set.utt_ids))
assert all(
torch.all(ali_a == ali_b)
and torch.all(ref_a == ref_b)
and feat_a.dtype == feat_b.dtype
and torch.allclose(feat_a, feat_b)
for ((feat_a, ali_a, ref_a), (feat_b, ali_b, ref_b)) in zip(
zip(feats, alis, refs), data_set
)
)
subset_ids = data_set.utt_ids[: num_utts // 2]
data_set = data.SpectDataSet(
temp_dir, file_prefix=file_prefix, subset_ids=set(subset_ids), sos=sos, eos=eos
)
assert all(utt_a == utt_b for (utt_a, utt_b) in zip(subset_ids, data_set.utt_ids))
assert all(
torch.all(ali_a == ali_b)
and torch.all(ref_a == ref_b)
and torch.allclose(feat_a, feat_b)
for ((feat_a, ali_a, ref_a), (feat_b, ali_b, ref_b)) in zip(
zip(feats[: num_utts // 2], alis[: num_utts // 2], refs[: num_utts // 2]),
data_set,
)
)
@pytest.mark.cpu
def test_spect_data_set_warnings(temp_dir):
torch.manual_seed(1)
feat_dir = os.path.join(temp_dir, "feat")
ali_dir = os.path.join(temp_dir, "ali")
os.makedirs(feat_dir)
os.makedirs(ali_dir)
torch.save(torch.rand(3, 3), os.path.join(feat_dir, "a.pt"))
torch.save(torch.rand(4, 3), os.path.join(feat_dir, "b.pt"))
torch.save(torch.randint(10, (4,), dtype=torch.long), os.path.join(ali_dir, "b.pt"))
torch.save(torch.randint(10, (5,), dtype=torch.long), os.path.join(ali_dir, "c.pt"))
data_set = data.SpectDataSet(temp_dir, warn_on_missing=False)
assert data_set.has_ali
assert data_set.utt_ids == ("b",)
with pytest.warns(UserWarning) as warnings:
data_set = data.SpectDataSet(temp_dir)
assert len(warnings) == 2
assert any(str(x.message) == "Missing ali for uttid: 'a'" for x in warnings)
assert any(str(x.message) == "Missing feat for uttid: 'c'" for x in warnings)
def test_spect_data_write_pdf(temp_dir, device):
torch.manual_seed(1)
feat_dir = os.path.join(temp_dir, "feat")
os.makedirs(feat_dir)
torch.save(torch.rand(3, 3), os.path.join(feat_dir, "a.pt"))
data_set = data.SpectDataSet(temp_dir)
z = torch.randint(10, (4, 5), dtype=torch.long)
if device == "cuda":
data_set.write_pdf("b", z.cuda())
else:
data_set.write_pdf("b", z)
zp = torch.load(os.path.join(temp_dir, "pdfs", "b.pt"))
assert isinstance(zp, torch.FloatTensor)
assert torch.allclose(zp, z.float())
data_set.write_pdf(0, torch.rand(10, 4))
assert os.path.exists(os.path.join(temp_dir, "pdfs", "a.pt"))
data_set.write_pdf("c", z, pdfs_dir=os.path.join(temp_dir, "foop"))
assert os.path.exists(os.path.join(temp_dir, "foop", "c.pt"))
@pytest.mark.parametrize("eos", [None, -1])
@pytest.mark.parametrize("sos", [None, -2])
def test_spect_data_write_hyp(temp_dir, device, sos, eos):
torch.manual_seed(1)
feat_dir = os.path.join(temp_dir, "feat")
os.makedirs(feat_dir)
torch.save(torch.rand(3, 3), os.path.join(feat_dir, "a.pt"))
data_set = data.SpectDataSet(temp_dir, sos=sos, eos=eos)
z = torch.randint(10, (4, 3), dtype=torch.float)
zz = z
if sos:
zz = torch.cat([torch.full_like(zz, sos), zz])
if eos:
zz = torch.cat([zz, torch.full_like(z, eos)])
if device == "cuda":
data_set.write_hyp("b", zz.cuda())
else:
data_set.write_hyp("b", zz)
zp = torch.load(os.path.join(temp_dir, "hyp", "b.pt"))
assert isinstance(zp, torch.LongTensor)
assert torch.all(zp == z.long())
data_set.write_hyp(0, torch.randint(10, (11, 3)))
assert os.path.exists(os.path.join(temp_dir, "hyp", "a.pt"))
data_set.write_hyp("c", z, hyp_dir=os.path.join(temp_dir, "foop"))
assert os.path.exists(os.path.join(temp_dir, "foop", "c.pt"))
@pytest.mark.cpu
@pytest.mark.parametrize("eos", [None, 10000])
def test_spect_data_set_validity(temp_dir, eos):
torch.manual_seed(1)
feat_dir = os.path.join(temp_dir, "feat")
ali_dir = os.path.join(temp_dir, "ali")
ref_dir = os.path.join(temp_dir, "ref")
feats_a_pt = os.path.join(feat_dir, "a.pt")
feats_b_pt = os.path.join(feat_dir, "b.pt")
ali_a_pt = os.path.join(ali_dir, "a.pt")
ali_b_pt = os.path.join(ali_dir, "b.pt")
ref_a_pt = os.path.join(ref_dir, "a.pt")
ref_b_pt = os.path.join(ref_dir, "b.pt")
os.makedirs(feat_dir)
os.makedirs(ali_dir)
os.makedirs(ref_dir)
torch.save(torch.rand(10, 4), feats_a_pt)
torch.save(torch.rand(4, 4), feats_b_pt)
torch.save(torch.randint(10, (10,), dtype=torch.long), ali_a_pt)
torch.save(torch.randint(10, (4,), dtype=torch.long), ali_b_pt)
torch.save(
torch.cat(
[
torch.randint(10, (11, 1), dtype=torch.long),
torch.full((11, 2), -1, dtype=torch.long),
],
-1,
),
ref_a_pt,
)
torch.save(torch.tensor([[0, 3, 4], [1, 1, 2]]), ref_b_pt)
data_set = data.SpectDataSet(temp_dir, eos=eos)
data.validate_spect_data_set(data_set)
torch.save(torch.rand(4, 4).long(), feats_b_pt)
with pytest.raises(ValueError, match="not the same tensor type"):
data.validate_spect_data_set(data_set)
torch.save(
torch.rand(
4,
),
feats_b_pt,
)
with pytest.raises(ValueError, match="does not have two dimensions"):
data.validate_spect_data_set(data_set)
torch.save(torch.rand(4, 3), feats_b_pt)
with pytest.raises(ValueError, match="has second dimension of size 3.*"):
data.validate_spect_data_set(data_set)
torch.save(torch.rand(4, 4), feats_b_pt)
data.validate_spect_data_set(data_set)
torch.save(torch.randint(10, (4,)).int(), ali_b_pt)
with pytest.raises(ValueError, match="is not a long tensor"):
data.validate_spect_data_set(data_set)
with pytest.warns(UserWarning):
data.validate_spect_data_set(data_set, True)
data.validate_spect_data_set(data_set)
torch.save(torch.randint(10, (4, 1), dtype=torch.long), ali_b_pt)
with pytest.raises(ValueError, match="does not have one dimension"):
data.validate_spect_data_set(data_set)
torch.save(torch.randint(10, (3,), dtype=torch.long), ali_b_pt)
with pytest.raises(ValueError, match="does not have the same first"):
data.validate_spect_data_set(data_set)
torch.save(torch.randint(10, (4,), dtype=torch.long), ali_b_pt)
data.validate_spect_data_set(data_set)
torch.save(torch.Tensor([[0, 1, 2]]).int(), ref_b_pt)
with pytest.raises(ValueError, match="is not a long tensor"):
data.validate_spect_data_set(data_set)
with pytest.warns(UserWarning):
data.validate_spect_data_set(data_set, True)
data.validate_spect_data_set(data_set)
torch.save(torch.tensor([[0, -1, 2], [1, 1, 2]]), ref_b_pt)
with pytest.raises(ValueError, match="invalid boundaries"):
data.validate_spect_data_set(data_set)
with pytest.warns(UserWarning):
data.validate_spect_data_set(data_set, True)
data.validate_spect_data_set(data_set)
torch.save(torch.tensor([[0, 0, 1], [1, 3, 5]]), ref_b_pt)
with pytest.raises(ValueError, match="invalid boundaries"):
data.validate_spect_data_set(data_set)
with pytest.warns(UserWarning):
data.validate_spect_data_set(data_set, True)
data.validate_spect_data_set(data_set)
torch.save(torch.tensor([[0, 0, 1], [1, 4, 5]]), ref_b_pt)
with pytest.raises(ValueError, match="invalid boundaries"):
data.validate_spect_data_set(data_set, True)
torch.save(torch.tensor([1, 2, 3]), ref_b_pt)
with pytest.raises(ValueError, match="were 2D"):
data.validate_spect_data_set(data_set)
torch.save(torch.tensor([10, 4, 2, 5]), ref_a_pt)
data.validate_spect_data_set(data_set)
@pytest.mark.gpu
def test_validate_spect_data_set_cuda(temp_dir):
torch.manual_seed(29)
feat_dir = os.path.join(temp_dir, "feat")
ali_dir = os.path.join(temp_dir, "ali")
ref_dir = os.path.join(temp_dir, "ref")
feats_pt = os.path.join(feat_dir, "a.pt")
ali_pt = os.path.join(ali_dir, "a.pt")
ref_pt = os.path.join(ref_dir, "a.pt")
os.makedirs(feat_dir)
os.makedirs(ali_dir)
os.makedirs(ref_dir)
torch.save(torch.rand(10, 5), feats_pt)
torch.save(torch.randint(10, (10,), dtype=torch.long), ali_pt)
torch.save(torch.tensor([1, 2, 3]), ref_pt)
data_set = data.SpectDataSet(temp_dir)
data.validate_spect_data_set(data_set)
torch.save(torch.rand(10, 5).cuda(), feats_pt)
with pytest.raises(ValueError, match="cuda"):
data.validate_spect_data_set(data_set)
with pytest.warns(UserWarning):
data.validate_spect_data_set(data_set, True)
data.validate_spect_data_set(data_set)
torch.save(torch.rand(10, 5).cuda(), feats_pt)
torch.save(torch.randint(10, (10,), dtype=torch.long).cuda(), ali_pt)
torch.save(torch.tensor([1, 2, 3]).cuda(), ref_pt)
with pytest.raises(ValueError, match="cuda"):
data.validate_spect_data_set(data_set)
with pytest.warns(UserWarning):
data.validate_spect_data_set(data_set, True)
data.validate_spect_data_set(data_set)
@pytest.mark.cpu
@pytest.mark.parametrize("processes", [0, 2])
def test_read_trn(processes):
trn = StringIO()
trn.write(
"""\
here is a simple example (a)
nothing should go wrong (b)
"""
)
trn.seek(0)
act = data.read_trn(trn, processes=processes, chunk_size=1)
assert act == [
("a", ["here", "is", "a", "simple", "example"]),
("b", ["nothing", "should", "go", "wrong"]),
]
trn.seek(0)
trn.write(
"""\
here is an { example /with} some alternates (a)
} and /here/ is {something really / {really}} (stupid) { ignore this (b)
(c)
a11 (d)
"""
)
trn.seek(0)
act = data.read_trn(trn, warn=False, processes=processes)
assert act == [
(
"a",
[
"here",
"is",
"an",
([["example"], ["with"]], -1, -1),
"some",
"alternates",
],
),
(
"b",
[
"}",
"and",
"/here/",
"is",
([["something", "really"], [[["really"]]]], -1, -1),
"(stupid)",
],
),
("c", []),
("d", ["a11"]),
]
@pytest.mark.cpu
def test_read_ctm():
ctm = StringIO()
ctm.write(
"""\
utt1 A 0.0 0.1 a
utt1 A 0.5 0.1 c ;; ctm files should always be ordered, but we tolerate
;; different orders
utt2 B 0.1 1.0 d
utt1 B 0.4 0.3 b
;; utt2 A 0.2 1.0 f
"""
)
ctm.seek(0)
act = data.read_ctm(ctm)
assert act == [
("utt1", [("a", 0.0, 0.1), ("b", 0.4, 0.7), ("c", 0.5, 0.6)]),
("utt2", [("d", 0.1, 1.1)]),
]
ctm.seek(0)
act = data.read_ctm(
ctm, {("utt1", "A"): "foo", ("utt1", "B"): "bar", ("utt2", "B"): "baz"}
)
assert act == [
("foo", [("a", 0.0, 0.1), ("c", 0.5, 0.6)]),
("baz", [("d", 0.1, 1.1)]),
("bar", [("b", 0.4, 0.7)]),
]
with pytest.raises(ValueError):
ctm.write("utt3 -0.1 1.0 woop\n")
ctm.seek(0)
data.read_ctm(ctm)
@pytest.mark.cpu
def test_write_trn():
trn = StringIO()
transcripts = [
("a", ["again", "a", "simple", "example"]),
("b", ["should", "get", "right", "no", "prob"]),
]
data.write_trn(transcripts, trn)
trn.seek(0)
assert (
"""\
again a simple example (a)
should get right no prob (b)
"""
== trn.read()
)
trn.seek(0)
trn.truncate()
transcripts = [
(
" c ",
[
("unnecessary", -1, -1),
([["complexity", [["can"]]], ["also", "be"]], 10, 4),
"handled",
],
),
("d", []),
("e", ["a11"]),
]
data.write_trn(transcripts, trn)
trn.seek(0)
assert (
"""\
unnecessary { complexity { can } / also be } handled ( c )
(d)
a11 (e)
"""
== trn.read()
)
@pytest.mark.cpu
def test_write_ctm():
ctm = StringIO()
transcripts = [
(
"c",
[
("here", 0.1, 0.2),
("are", 0.3, 0.5),
("some", 0.2, 0.4),
("unordered", 0.5, 0.5),
("tokens", 10.0, 1000),
],
),
("b", []),
("a", [("hullo", 0.0, 10.0111)]),
]
data.write_ctm(transcripts, ctm)
ctm.seek(0)
assert (
"""\
a A 0.0 10.0111 hullo
c A 0.1 0.1 here
c A 0.2 0.2 some
c A 0.3 0.2 are
c A 0.5 0.0 unordered
c A 10.0 990.0 tokens
"""
== ctm.read()
)
ctm.seek(0)
ctm.truncate()
data.write_ctm(
transcripts,
ctm,
{"a": ("last", "A"), "b": ("middle", "B"), "c": ("first", "C")},
)
ctm.seek(0)
assert (
"""\
first C 0.1 0.1 here
first C 0.2 0.2 some
first C 0.3 0.2 are
first C 0.5 0.0 unordered
first C 10.0 990.0 tokens
last A 0.0 10.0111 hullo
"""
== ctm.read()
)
transcripts.append(("foo", [("a", 0.1, 0.2), ("b", 0.2, 0.1)]))
with pytest.raises(ValueError):
data.write_ctm(transcripts, ctm)
@pytest.mark.cpu
@pytest.mark.parametrize(
"transcript,token2id,unk,skip_frame_times,exp",
[
([], None, None, False, torch.LongTensor(0, 3)),
(
[1, 2, 3, 4],
None,
None,
True,
torch.LongTensor([1, 2, 3, 4]),
),
(
[1, ("a", 4, 10), "a", 3],
{"a": 2},
None,
False,
torch.LongTensor([[1, -1, -1], [2, 4, 10], [2, -1, -1], [3, -1, -1]]),
),
(
["foo", 1, "bar"],
{"foo": 0, "baz": 3},
"baz",
False,
torch.LongTensor([[0, -1, -1], [3, -1, -1], [3, -1, -1]]),
),
],
)
def test_transcript_to_token(transcript, token2id, unk, skip_frame_times, exp):
act = data.transcript_to_token(
transcript, token2id, unk=unk, skip_frame_times=skip_frame_times
)
assert torch.all(exp == act)
transcript = ["foo"] + transcript
with pytest.raises(Exception):
data.transcript_to_token(transcript, token2id)
@pytest.mark.cpu
def test_transcript_to_token_frame_shift():
trans = [(12, 0.5, 0.81), 420, (1, 2.1, 2.2), (3, 2.8, 2.815), (12, 2.9, 3.0025)]
tok = data.transcript_to_token(trans, frame_shift_ms=10)
assert torch.allclose(
tok,
torch.LongTensor(
[[12, 50, 81], [420, -1, -1], [1, 210, 220], [3, 280, 282], [12, 290, 300]]
),
)
tok = data.transcript_to_token(trans, frame_shift_ms=1 / 8)
assert torch.allclose(
tok,
torch.LongTensor(
[
[12, 4000, 6480],
[420, -1, -1],
[1, 16800, 17600],
[3, 22400, 22520],
[12, 23200, 24020],
]
),
)
@pytest.mark.cpu
@pytest.mark.parametrize(
"tok,id2token,exp",
[
(torch.LongTensor(0, 3), None, []),
(
torch.LongTensor([[1, -1, -1], [2, -1, -1], [3, -1, -1], [4, -1, -1]]),
None,
[1, 2, 3, 4],
),
(
torch.LongTensor([[1, 3, 4], [3, 4, 5], [2, -1, -1]]),
{1: "a", 2: "b"},
[("a", 3, 4), (3, 4, 5), "b"],
),
(torch.tensor(range(10)), None, list(range(10))),
(torch.tensor(range(5)).unsqueeze(-1), None, list(range(5))),
],
)
def test_token_to_transcript(tok, id2token, exp):
act = data.token_to_transcript(tok, id2token)
assert exp == act
@pytest.mark.cpu
def test_token_to_transcript_frame_shift():
tok = torch.LongTensor([[1, -1, 10], [2, 1000, 2000], [3, 12345, 678910]])
trans = data.token_to_transcript(tok, frame_shift_ms=10)
assert trans == [1, (2, 10.0, 20.0), (3, 123.45, 6789.10)]
trans = data.token_to_transcript(tok, frame_shift_ms=1 / 8)
assert trans == [
1,
(2, 1000 / 8000, 2000 / 8000),
(3, 12345 / 8000, 678910 / 8000),
]
@pytest.mark.cpu
@pytest.mark.parametrize("reverse", [True, False])
def test_context_window_data_set(temp_dir, reverse):
torch.manual_seed(1)
feat_dir = os.path.join(temp_dir, "feat")
os.makedirs(feat_dir)
a = torch.rand(2, 10)
torch.save(a, os.path.join(feat_dir, "a.pt"))
data_set = data.ContextWindowDataSet(temp_dir, 1, 1, reverse=reverse)
windowed, _ = data_set[0]
assert tuple(windowed.shape) == (2, 3, 10)
if reverse:
assert torch.allclose(a[0], windowed[0, 1:])
assert torch.allclose(a[1], windowed[0, 0])
assert torch.allclose(a[0], windowed[1, 2])
assert torch.allclose(a[1], windowed[1, :2])
else:
assert torch.allclose(a[0], windowed[0, :2])
assert torch.allclose(a[1], windowed[0, 2])
assert torch.allclose(a[0], windowed[1, 0])
assert torch.allclose(a[1], windowed[1, 1:])
@pytest.mark.cpu
def test_epoch_random_sampler(temp_dir):
data_source = torch.utils.data.TensorDataset(torch.arange(100))
sampler = data.EpochRandomSampler(data_source, base_seed=1)
samples_ep0 = tuple(sampler)
samples_ep1 = tuple(sampler)
assert samples_ep0 != samples_ep1
assert sorted(samples_ep0) == list(range(100))
assert sorted(samples_ep1) == list(range(100))
assert samples_ep0 == tuple(sampler.get_samples_for_epoch(0))
assert samples_ep1 == tuple(sampler.get_samples_for_epoch(1))
sampler = data.EpochRandomSampler(data_source, init_epoch=10, base_seed=1)
assert samples_ep0 == tuple(sampler.get_samples_for_epoch(0))
assert samples_ep1 == tuple(sampler.get_samples_for_epoch(1))
torch.manual_seed(5)
sampler = data.EpochRandomSampler(data_source)
samples_ep0 = tuple(sampler)
torch.manual_seed(5)
sampler = data.EpochRandomSampler(data_source)
assert samples_ep0 == tuple(sampler)
@pytest.mark.cpu
@pytest.mark.parametrize(
"feat_sizes",
[((3, 5, 4), (4, 5, 4), (1, 5, 4)), ((2, 10, 5),) * 10],
ids=["short", "long"],
)
@pytest.mark.parametrize("include_ali", [True, False])
def test_context_window_seq_to_batch(feat_sizes, include_ali):
torch.manual_seed(1)
feats = tuple(torch.rand(*x) for x in feat_sizes)
if include_ali:
alis = tuple(torch.randint(10, (x[0],), dtype=torch.long) for x in feat_sizes)
else:
alis = repeat(None)
seq = zip(feats, alis)
batch_feats, batch_ali = data.context_window_seq_to_batch(seq)
assert torch.allclose(torch.cat(feats), batch_feats)
if include_ali:
assert torch.all(torch.cat(alis) == batch_ali)
else:
assert batch_ali is None
@pytest.mark.cpu
@pytest.mark.parametrize("include_ali", [True, False])
@pytest.mark.parametrize(
"include_ref,include_frame_shift", [(True, True), (True, False), (False, None)]
)
@pytest.mark.parametrize("batch_first", [True, False])
def test_spect_seq_to_batch(include_ali, include_ref, batch_first, include_frame_shift):
torch.manual_seed(1)
feat_sizes = tuple(
torch.randint(1, 30, (1,)).item()
for _ in range(torch.randint(3, 10, (1,)).item())
)
feats = tuple(torch.randn(x, 5) for x in feat_sizes)
if include_ali:
alis = tuple(torch.randint(100, (x,), dtype=torch.long) for x in feat_sizes)
else:
alis = repeat(None)
if include_ref:
ref_sizes = tuple(
torch.randint(1, 30, (1,)).item() for _ in range(len(feat_sizes))
)
extra_dim = (3,) if include_frame_shift else tuple()
refs = tuple(
torch.randint(100, (x,) + extra_dim, dtype=torch.long) for x in ref_sizes
)
else:
ref_sizes = repeat(None)
refs = repeat(None)
(
batch_feats,
batch_ali,
batch_ref,
batch_feat_sizes,
batch_ref_sizes,
) = data.spect_seq_to_batch(zip(feats, alis, refs), batch_first=batch_first)
feat_sizes, feats, alis, refs, ref_sizes = zip(
*sorted(zip(feat_sizes, feats, alis, refs, ref_sizes), key=lambda x: -x[0])
)
assert torch.all(torch.tensor(feat_sizes) == batch_feat_sizes)
if not batch_first:
batch_feats = batch_feats.transpose(0, 1)
if include_ali:
batch_ali = batch_ali.transpose(0, 1)
if include_ref:
batch_ref = batch_ref.transpose(0, 1)
assert all(
torch.allclose(a[: b.shape[0]], b)
and torch.allclose(a[b.shape[0] :], torch.tensor([0.0]))
for (a, b) in zip(batch_feats, feats)
)
if include_ali:
assert all(
torch.all(a[: b.shape[0]] == b)
and torch.all(a[b.shape[0] :] == torch.tensor([INDEX_PAD_VALUE]))
for (a, b) in zip(batch_ali, alis)
)
else:
assert batch_ali is None
if include_ref:
assert torch.all(torch.tensor(ref_sizes) == batch_ref_sizes)
assert all(
torch.all(a[: b.shape[0]] == b)
and torch.all(a[b.shape[0] :] == torch.tensor([INDEX_PAD_VALUE]))
for (a, b) in zip(batch_ref, refs)
)
else:
assert batch_ref is None
assert batch_ref_sizes is None
@pytest.mark.cpu
@pytest.mark.parametrize("eos", [None, -1])
@pytest.mark.parametrize("sos", [None, -2])
@pytest.mark.parametrize("split_params", [True, False])
@pytest.mark.parametrize("include_frame_shift", [True, False])
@pytest.mark.parametrize("feat_dtype", [torch.float, torch.int])
def test_spect_training_data_loader(
temp_dir,
populate_torch_dir,
sos,
eos,
split_params,
include_frame_shift,
feat_dtype,
):
torch.manual_seed(40)
num_utts, batch_size, num_filts = 20, 5, 11
populate_torch_dir(
temp_dir,
num_utts,
num_filts=num_filts,
include_frame_shift=include_frame_shift,
feat_dtype=feat_dtype,
)
if split_params:
params = data.DataSetParams(batch_size=batch_size)
data_params = data.SpectDataParams(sos=sos, eos=eos)
else:
params = data.SpectDataSetParams(batch_size=batch_size, sos=sos, eos=eos)
data_params = None
data_loader = data.SpectTrainingDataLoader(
temp_dir, params, data_params=data_params, ali_subdir=None, seed=2
)
assert next(iter(data_loader))[1] is None
data_loader = data.SpectTrainingDataLoader(
temp_dir, params, data_params=data_params, ref_subdir=None, seed=2
)
assert next(iter(data_loader))[2] is None
assert next(iter(data_loader))[4] is None
data_loader = data.SpectTrainingDataLoader(
temp_dir, params, data_params=data_params, seed=2
)
def _get_epoch(sort):
ep_feats, ep_ali, ep_ref = [], [], []
ep_feat_sizes, ep_ref_sizes = [], []
max_T = 0
max_R = 0
batch_first = data_loader.batch_first
for b_feats, b_ali, b_ref, b_feat_sizes, b_ref_sizes in data_loader:
if not batch_first:
b_feats = b_feats.transpose(0, 1)
b_ali = b_ali.transpose(0, 1)
b_ref = b_ref.transpose(0, 1)
max_T = max(max_T, b_feat_sizes[0])
R_star = max(b_ref_sizes)
max_R = max(max_R, R_star)
assert b_feats.shape[0] == batch_size
assert b_ali.shape[0] == batch_size
assert b_ref.shape[0] == batch_size
assert b_feats.shape[-1] == num_filts
assert b_feats.shape[1] == b_feat_sizes[0]
assert b_ali.shape[1] == b_feat_sizes[0]
assert b_ref.shape[1] == R_star
assert b_ref.dim() == (3 if include_frame_shift else 2)
ep_feats += tuple(b_feats)
ep_ali += tuple(b_ali)
ep_ref += tuple(b_ref)
ep_feat_sizes += tuple(b_feat_sizes)
ep_ref_sizes += tuple(b_ref_sizes)
assert len(ep_feats) == num_utts
assert len(ep_ali) == num_utts
for i in range(num_utts):
ep_feats[i] = torch.nn.functional.pad(
ep_feats[i], (0, 0, 0, max_T - ep_ali[i].shape[0])
)
ep_ali[i] = torch.nn.functional.pad(
ep_ali[i], (0, max_T - ep_ali[i].shape[0]), value=INDEX_PAD_VALUE
)
if include_frame_shift:
ep_ref[i] = torch.nn.functional.pad(
ep_ref[i],
(0, 0, 0, max_R - ep_ref[i].shape[0]),
value=INDEX_PAD_VALUE,
)
else:
ep_ref[i] = torch.nn.functional.pad(
ep_ref[i], (0, max_R - ep_ref[i].shape[0]), value=INDEX_PAD_VALUE
)
if sort:
ep_feats, ep_ali, ep_ref, ep_feat_sizes, ep_ref_sizes = zip(
*sorted(
zip(ep_feats, ep_ali, ep_ref, ep_feat_sizes, ep_ref_sizes),
key=lambda x: (-x[3], -x[4], x[0][0, 0]),
)
)
return ep_feats, ep_ali, ep_ref, ep_feat_sizes, ep_ref_sizes
def _compare_epochs(ep_a, ep_b, same):
a_feats, a_ali, a_ref, a_feat_sizes, a_ref_sizes = ep_a
b_feats, b_ali, b_ref, b_feat_sizes, b_ref_sizes = ep_b
a_feats, b_feats = torch.stack(a_feats), torch.stack(b_feats)
a_ali, b_ali = torch.stack(a_ali), torch.stack(b_ali)
a_ref, b_ref = torch.stack(a_ref), torch.stack(b_ref)
if same:
assert a_feat_sizes == b_feat_sizes
assert a_ref_sizes == b_ref_sizes
assert torch.allclose(a_feats, b_feats)
assert torch.all(a_ali == b_ali)
assert torch.all(a_ref == b_ref)
else:
assert a_feat_sizes != b_feat_sizes
assert a_ref_sizes != b_ref_sizes
assert not torch.allclose(a_feats, b_feats)
assert torch.any(a_ali != b_ali)
assert torch.any(a_ref != b_ref)
ep0 = _get_epoch(False)
ep1 = _get_epoch(False)
_compare_epochs(ep0, ep1, False)
_compare_epochs(_get_epoch(True), _get_epoch(True), True)
data_loader.epoch = 1
_compare_epochs(ep1, _get_epoch(False), True)
data_loader = data.SpectTrainingDataLoader(
temp_dir, params, data_params=data_params, num_workers=2, seed=2
)
_compare_epochs(ep0, _get_epoch(False), True)
_compare_epochs(ep1, _get_epoch(False), True)
data_loader.batch_first = False
data_loader.epoch = 0
_compare_epochs(ep0, _get_epoch(False), True)
_compare_epochs(ep1, _get_epoch(False), True)
@pytest.mark.cpu
@pytest.mark.parametrize("eos", [None, -1])
@pytest.mark.parametrize("sos", [None, -2])
@pytest.mark.parametrize("split_params", [True, False])
@pytest.mark.parametrize("include_frame_shift", [True, False])
@pytest.mark.parametrize("feat_dtype", [torch.float, torch.int])
def test_spect_evaluation_data_loader(
temp_dir,
populate_torch_dir,
sos,
eos,
split_params,
include_frame_shift,
feat_dtype,
):
torch.manual_seed(41)
feat_dir = os.path.join(temp_dir, "feat")
ali_dir = os.path.join(temp_dir, "ali")
os.makedirs(feat_dir)
os.makedirs(ali_dir)
batch_size = 5
if split_params:
params = data.DataSetParams(batch_size=batch_size)
data_params = data.SpectDataParams(sos=sos, eos=eos)
else:
params = data.SpectDataSetParams(batch_size=batch_size, sos=sos, eos=eos)
data_params = None
feats, ali, ref, feat_sizes, ref_sizes, utt_ids = populate_torch_dir(
temp_dir, 20, include_frame_shift=include_frame_shift, feat_dtype=feat_dtype
)
if sos is not None:
if include_frame_shift:
sos_sym = torch.full((3,), -1, dtype=torch.long)
sos_sym[0] = sos
sos_sym = sos_sym.unsqueeze(0)
else:
sos_sym = torch.full((1,), sos, dtype=torch.long)
ref = [torch.cat([sos_sym, x], 0) for x in ref]
ref_sizes = [x + 1 for x in ref_sizes]
if eos is not None:
if include_frame_shift:
eos_sym = torch.full((3,), eos, dtype=torch.long)
eos_sym[0] = eos
eos_sym = eos_sym.unsqueeze(0)
else:
eos_sym = torch.full((1,), eos, dtype=torch.long)
ref = [torch.cat([x, eos_sym], 0) for x in ref]
ref_sizes = [x + 1 for x in ref_sizes]
data_loader = data.SpectEvaluationDataLoader(
temp_dir, params, data_params=data_params, ali_subdir=None, ref_subdir=None
)
assert next(iter(data_loader))[1:3] == (None, None)
assert next(iter(data_loader))[4] is None
data_loader = data.SpectEvaluationDataLoader(
temp_dir, params, data_params=data_params
)
def _compare_data_loader():
batch_first = data_loader.batch_first
assert len(data_loader) == 4
cur_idx = 0
for (
b_feats,
b_ali,
b_ref,
b_feat_sizes,
b_ref_sizes,
b_utt_ids,
) in data_loader:
if not batch_first:
b_feats = b_feats.transpose(0, 1)
b_ali = b_ali.transpose(0, 1)
b_ref = b_ref.transpose(0, 1)
R_star = max(b_ref_sizes)
assert tuple(b_feats.shape) == (5, b_feat_sizes[0], 5)
assert tuple(b_ali.shape) == (5, b_feat_sizes[0])
if include_frame_shift:
assert tuple(b_ref.shape) == (5, R_star, 3)
else:
assert tuple(b_ref.shape) == (5, R_star)
s_feats, s_ali, s_ref, s_feat_sizes, s_ref_sizes, s_utt_ids = zip(
*sorted(
zip(
feats[cur_idx : cur_idx + 5],
ali[cur_idx : cur_idx + 5],
ref[cur_idx : cur_idx + 5],
feat_sizes[cur_idx : cur_idx + 5],
ref_sizes[cur_idx : cur_idx + 5],
utt_ids[cur_idx : cur_idx + 5],
),
key=lambda x: -x[3],
)
)
assert b_utt_ids == s_utt_ids
assert tuple(b_feat_sizes) == s_feat_sizes
assert tuple(b_ref_sizes) == s_ref_sizes
for a, b in zip(b_feats, s_feats):
assert torch.allclose(a[: b.shape[0]], b)
assert torch.allclose(
a[b.shape[0] :], torch.tensor([0], dtype=feat_dtype)
)
for a, b in zip(b_ali, s_ali):
assert torch.all(a[: b.shape[0]] == b)
assert torch.all(a[b.shape[0] :] == torch.tensor([INDEX_PAD_VALUE]))
for a, b in zip(b_ref, s_ref):
assert torch.all(a[: b.shape[0]] == b)
assert torch.all(a[b.shape[0] :] == torch.tensor([INDEX_PAD_VALUE]))
cur_idx += 5
_compare_data_loader()
_compare_data_loader()
data_loader = data.SpectEvaluationDataLoader(
temp_dir, params, data_params=data_params, num_workers=2
)
_compare_data_loader()
data_loader.batch_first = False
_compare_data_loader()
@pytest.mark.cpu
@pytest.mark.parametrize("split_params", [True, False])
def test_window_training_data_loader(temp_dir, populate_torch_dir, split_params):
populate_torch_dir(temp_dir, 5, num_filts=2)
seed, batch_size, context_left, context_right = 2, 5, 1, 1
if split_params:
params = data.DataSetParams(batch_size=batch_size, drop_last=True)
data_params = data.ContextWindowDataParams(
context_left=context_left, context_right=context_right
)
else:
params = data.ContextWindowDataSetParams(
context_left=context_left,
context_right=context_right,
batch_size=batch_size,
drop_last=True,
)
data_params = None
data_loader = data.ContextWindowTrainingDataLoader(
temp_dir, params, data_params=data_params, seed=seed
)
total_windows_ep0 = 0
for feat, ali in data_loader:
windows = feat.shape[0]
assert tuple(feat.shape) == (windows, 3, 2)
assert tuple(ali.shape) == (windows,)
total_windows_ep0 += windows
assert total_windows_ep0 >= batch_size
feats_ep1_a, alis_ep1_a = [], []
total_windows_ep1 = 0
for feats, alis in data_loader:
windows = feat.shape[0]
assert tuple(feat.shape) == (windows, 3, 2)
assert tuple(ali.shape) == (windows,)
feats_ep1_a.append(feats)
alis_ep1_a.append(alis)
total_windows_ep1 += windows
assert total_windows_ep0 == total_windows_ep1
data_loader = data.ContextWindowTrainingDataLoader(
temp_dir,
params,
init_epoch=1,
data_params=data_params,
num_workers=2,
seed=seed,
)
feats_ep1_b, alis_ep1_b = [], []
for feats, alis in data_loader:
feats_ep1_b.append(feats)
alis_ep1_b.append(alis)
assert all(
torch.allclose(feats_a, feats_b)
for (feats_a, feats_b) in zip(feats_ep1_a, feats_ep1_b)
)
assert all(
torch.all(alis_a == alis_b) for (alis_a, alis_b) in zip(alis_ep1_a, alis_ep1_b)
)
data_loader.epoch = 1
feats_ep1_c, alis_ep1_c = [], []
for feats, alis in data_loader:
feats_ep1_c.append(feats)
alis_ep1_c.append(alis)
assert all(
torch.allclose(feats_a, feats_c)
for (feats_a, feats_c) in zip(feats_ep1_a, feats_ep1_c)
)
assert all(
torch.all(alis_a == alis_c) for (alis_a, alis_c) in zip(alis_ep1_a, alis_ep1_c)
)
@pytest.mark.cpu
@pytest.mark.parametrize("split_params", [True, False])
def test_window_evaluation_data_loader(temp_dir, populate_torch_dir, split_params):
torch.manual_seed(1)
feat_dir = os.path.join(temp_dir, "feat")
ali_dir = os.path.join(temp_dir, "ali")
os.makedirs(feat_dir)
os.makedirs(ali_dir)
if split_params:
params = data.DataSetParams(batch_size=5)
data_params = data.ContextWindowDataParams(context_left=1, context_right=1)
else:
params = data.ContextWindowDataSetParams(
context_left=1, context_right=1, batch_size=5
)
data_params = None
feats, alis, _, feat_sizes, _, utt_ids = populate_torch_dir(
temp_dir, 20, include_ref=False
)
def _compare_data_loader(data_loader):
assert len(data_loader) == 4
cur_idx = 0
for b_feats, b_alis, b_feat_sizes, b_utt_ids in data_loader:
assert tuple(b_feats.shape[1:]) == (3, 5)
assert b_feats.shape[0] == sum(b_feat_sizes)
assert tuple(b_utt_ids) == tuple(utt_ids[cur_idx : cur_idx + 5])
assert torch.allclose(
b_feats[:, 1], torch.cat(feats[cur_idx : cur_idx + 5])
)
assert torch.all(b_alis == torch.cat(alis[cur_idx : cur_idx + 5]))
cur_idx += 5
data_loader = data.ContextWindowEvaluationDataLoader(
temp_dir, params, data_params=data_params, ali_subdir=None
)
assert next(iter(data_loader))[1] is None
data_loader = data.ContextWindowEvaluationDataLoader(
temp_dir, params, data_params=data_params
)
_compare_data_loader(data_loader)
_compare_data_loader(data_loader)
data_loader = data.ContextWindowEvaluationDataLoader(
temp_dir, params, data_params=data_params, num_workers=2
)
_compare_data_loader(data_loader)
@pytest.mark.cpu
def test_pydrobert_param_optuna_hooks():
poptuna = pytest.importorskip("pydrobert.param.optuna")
optuna = pytest.importorskip("optuna")
for class_ in (
data.DataSetParams,
data.SpectDataSetParams,
data.ContextWindowDataParams,
data.ContextWindowDataSetParams,
):
assert issubclass(class_, poptuna.TunableParameterized)
global_dict = {
"data_set": data.DataSetParams(),
"spect_data": data.SpectDataParams(),
"spect_data_set": data.SpectDataSetParams(),
"context_window_data": data.ContextWindowDataParams(),
"context_window_data_set": data.ContextWindowDataSetParams(),
}
assert {
"data_set.batch_size",
"spect_data.eos",
"spect_data_set.batch_size",
"context_window_data.reverse",
"context_window_data_set.batch_size",
} - poptuna.get_param_dict_tunable(global_dict) == {"spect_data.eos"}
def objective(trial):
param_dict = poptuna.suggest_param_dict(trial, global_dict)
return param_dict["data_set"].batch_size
sampler = optuna.samplers.RandomSampler(seed=5)
study = optuna.create_study(sampler=sampler)
study.optimize(objective, n_trials=10)
assert (
not {
"data_set.batch_size",
"spect_data_set.batch_size",
"context_window_data.reverse",
"context_window_data_set.batch_size",
}
- set(study.best_params)
)
assert study.best_params["data_set.batch_size"] < 7
| true | true |
f731b8a53d9c151e77995f19a47fbebe678c89cd | 769 | py | Python | create.py | keatonkraiger/pysot-mot | b1447dc9569339592a09b25b097b363cff9f6de4 | [
"Apache-2.0"
] | null | null | null | create.py | keatonkraiger/pysot-mot | b1447dc9569339592a09b25b097b363cff9f6de4 | [
"Apache-2.0"
] | null | null | null | create.py | keatonkraiger/pysot-mot | b1447dc9569339592a09b25b097b363cff9f6de4 | [
"Apache-2.0"
] | null | null | null | import os
import re
import imageio
from glob import glob
from PIL import Image
SAVE_FORMAT = 'gif'
video_name = 'ants1'
image_folder = os.path.join(os.getcwd(), 'demo/demo_images/')
#images = [img for img in os.listdir(image_folder) if img.endswith(".jpg")]
#images.sort(key=lambda var:[int(x) if x.isdigit() else x for
# x in re.findall(r'[^0-9]|[0-9]+', var)])
if SAVE_FORMAT == 'gif':
images = []
images = glob("demo/demo_images/*.jpg")
images.sort(key=lambda var:[int(x) if x.isdigit() else x for
x in re.findall(r'[^0-9]|[0-9]+', var)])
img, *imgs = [Image.open(f) for f in images]
img.save(fp='demo/output/{}.gif'.format(video_name), format='GIF', append_images=imgs,
save_all=True, duration=10, loop=0)
| 24.03125 | 90 | 0.638492 | import os
import re
import imageio
from glob import glob
from PIL import Image
SAVE_FORMAT = 'gif'
video_name = 'ants1'
image_folder = os.path.join(os.getcwd(), 'demo/demo_images/')
if SAVE_FORMAT == 'gif':
images = []
images = glob("demo/demo_images/*.jpg")
images.sort(key=lambda var:[int(x) if x.isdigit() else x for
x in re.findall(r'[^0-9]|[0-9]+', var)])
img, *imgs = [Image.open(f) for f in images]
img.save(fp='demo/output/{}.gif'.format(video_name), format='GIF', append_images=imgs,
save_all=True, duration=10, loop=0)
| true | true |
f731b9052df0f2dc9c9b090455306ea466ce939c | 253 | py | Python | cmdline_provenance/__init__.py | znicholls/cmdline_provenance | c9df55a9e4b0e7435499993c39eb9ff7e360b1c6 | [
"MIT"
] | null | null | null | cmdline_provenance/__init__.py | znicholls/cmdline_provenance | c9df55a9e4b0e7435499993c39eb9ff7e360b1c6 | [
"MIT"
] | null | null | null | cmdline_provenance/__init__.py | znicholls/cmdline_provenance | c9df55a9e4b0e7435499993c39eb9ff7e360b1c6 | [
"MIT"
] | null | null | null | """Utilities for capturing the history of commands used to produce a given output"""
from .cmdline_provenance import new_log
from .cmdline_provenance import read_log
from .cmdline_provenance import write_log
__all__ = [new_log, read_log, write_log]
| 25.3 | 84 | 0.810277 |
from .cmdline_provenance import new_log
from .cmdline_provenance import read_log
from .cmdline_provenance import write_log
__all__ = [new_log, read_log, write_log]
| true | true |
f731b912d65c6127ea588331a16ed36a53def65b | 3,387 | py | Python | homeassistant/components/tmb/sensor.py | tbarbette/core | 8e58c3aa7bc8d2c2b09b6bd329daa1c092d52d3c | [
"Apache-2.0"
] | 6 | 2020-07-18T16:33:25.000Z | 2021-09-26T09:52:04.000Z | homeassistant/components/tmb/sensor.py | tbarbette/core | 8e58c3aa7bc8d2c2b09b6bd329daa1c092d52d3c | [
"Apache-2.0"
] | 58 | 2020-08-03T07:33:02.000Z | 2022-03-31T06:02:05.000Z | homeassistant/components/tmb/sensor.py | tbarbette/core | 8e58c3aa7bc8d2c2b09b6bd329daa1c092d52d3c | [
"Apache-2.0"
] | 14 | 2018-08-19T16:28:26.000Z | 2021-09-02T18:26:53.000Z | """Support for TMB (Transports Metropolitans de Barcelona) Barcelona public transport."""
from datetime import timedelta
import logging
from requests import HTTPError
from tmb import IBus
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CONF_NAME, TIME_MINUTES
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Data provided by Transport Metropolitans de Barcelona"
ICON = "mdi:bus-clock"
CONF_APP_ID = "app_id"
CONF_APP_KEY = "app_key"
CONF_LINE = "line"
CONF_BUS_STOP = "stop"
CONF_BUS_STOPS = "stops"
ATTR_BUS_STOP = "stop"
ATTR_LINE = "line"
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
LINE_STOP_SCHEMA = vol.Schema(
{
vol.Required(CONF_BUS_STOP): cv.string,
vol.Required(CONF_LINE): cv.string,
vol.Optional(CONF_NAME): cv.string,
}
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_APP_ID): cv.string,
vol.Required(CONF_APP_KEY): cv.string,
vol.Required(CONF_BUS_STOPS): vol.All(cv.ensure_list, [LINE_STOP_SCHEMA]),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the sensors."""
ibus_client = IBus(config[CONF_APP_ID], config[CONF_APP_KEY])
sensors = []
for line_stop in config.get(CONF_BUS_STOPS):
line = line_stop[CONF_LINE]
stop = line_stop[CONF_BUS_STOP]
if line_stop.get(CONF_NAME):
name = f"{line} - {line_stop[CONF_NAME]} ({stop})"
else:
name = f"{line} - {stop}"
sensors.append(TMBSensor(ibus_client, stop, line, name))
add_entities(sensors, True)
class TMBSensor(Entity):
"""Implementation of a TMB line/stop Sensor."""
def __init__(self, ibus_client, stop, line, name):
"""Initialize the sensor."""
self._ibus_client = ibus_client
self._stop = stop
self._line = line.upper()
self._name = name
self._unit = TIME_MINUTES
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def icon(self):
"""Return the icon for the frontend."""
return ICON
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self._unit
@property
def unique_id(self):
"""Return a unique, HASS-friendly identifier for this entity."""
return f"{self._stop}_{self._line}"
@property
def state(self):
"""Return the next departure time."""
return self._state
@property
def device_state_attributes(self):
"""Return the state attributes of the last update."""
return {
ATTR_ATTRIBUTION: ATTRIBUTION,
ATTR_BUS_STOP: self._stop,
ATTR_LINE: self._line,
}
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the next bus information."""
try:
self._state = self._ibus_client.get_stop_forecast(self._stop, self._line)
except HTTPError:
_LOGGER.error(
"Unable to fetch data from TMB API. Please check your API keys are valid"
)
| 27.991736 | 89 | 0.660171 | from datetime import timedelta
import logging
from requests import HTTPError
from tmb import IBus
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CONF_NAME, TIME_MINUTES
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Data provided by Transport Metropolitans de Barcelona"
ICON = "mdi:bus-clock"
CONF_APP_ID = "app_id"
CONF_APP_KEY = "app_key"
CONF_LINE = "line"
CONF_BUS_STOP = "stop"
CONF_BUS_STOPS = "stops"
ATTR_BUS_STOP = "stop"
ATTR_LINE = "line"
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
LINE_STOP_SCHEMA = vol.Schema(
{
vol.Required(CONF_BUS_STOP): cv.string,
vol.Required(CONF_LINE): cv.string,
vol.Optional(CONF_NAME): cv.string,
}
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_APP_ID): cv.string,
vol.Required(CONF_APP_KEY): cv.string,
vol.Required(CONF_BUS_STOPS): vol.All(cv.ensure_list, [LINE_STOP_SCHEMA]),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
ibus_client = IBus(config[CONF_APP_ID], config[CONF_APP_KEY])
sensors = []
for line_stop in config.get(CONF_BUS_STOPS):
line = line_stop[CONF_LINE]
stop = line_stop[CONF_BUS_STOP]
if line_stop.get(CONF_NAME):
name = f"{line} - {line_stop[CONF_NAME]} ({stop})"
else:
name = f"{line} - {stop}"
sensors.append(TMBSensor(ibus_client, stop, line, name))
add_entities(sensors, True)
class TMBSensor(Entity):
def __init__(self, ibus_client, stop, line, name):
self._ibus_client = ibus_client
self._stop = stop
self._line = line.upper()
self._name = name
self._unit = TIME_MINUTES
self._state = None
@property
def name(self):
return self._name
@property
def icon(self):
return ICON
@property
def unit_of_measurement(self):
return self._unit
@property
def unique_id(self):
return f"{self._stop}_{self._line}"
@property
def state(self):
return self._state
@property
def device_state_attributes(self):
return {
ATTR_ATTRIBUTION: ATTRIBUTION,
ATTR_BUS_STOP: self._stop,
ATTR_LINE: self._line,
}
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
try:
self._state = self._ibus_client.get_stop_forecast(self._stop, self._line)
except HTTPError:
_LOGGER.error(
"Unable to fetch data from TMB API. Please check your API keys are valid"
)
| true | true |
f731b91eec02eb0ee49fd3752978d19259b17ce2 | 5,140 | py | Python | my_memory_card.py | all0ws/memorecard | ba90d4fd0663f11c963907290c19ad1305ba67cb | [
"CC0-1.0"
] | null | null | null | my_memory_card.py | all0ws/memorecard | ba90d4fd0663f11c963907290c19ad1305ba67cb | [
"CC0-1.0"
] | null | null | null | my_memory_card.py | all0ws/memorecard | ba90d4fd0663f11c963907290c19ad1305ba67cb | [
"CC0-1.0"
] | null | null | null |
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import (QApplication, QWidget, QHBoxLayout, QVBoxLayout, QGroupBox, QButtonGroup, QRadioButton, QPushButton, QLabel)
from random import shuffle,randint
class Question():
def __init__(self,question,right_answer,wrong1,wrong2 ,wrong3):
self.right_answer =right_answer
self.wrong1 =wrong1
self.wrong2 =wrong2
self.wrong3 = wrong3
self.question =question
question_list=[]
question_list.append (Question('Когда основали nvidia?','1993','2000','1874','2007'))
question_list.append (Question('Как зовут Путина?',' Вова','Владимер','Путин','Вася'))
question_list.append (Question('Государственный язык Кореи','Корейский ','Японский','Китайский','Русский'))
question_list.append (Question('Кто написал стих Ода','Ломоносов','Толстой','Царь батюшка','Я'))
question_list.append (Question('какой iphone последний?',' 12','11','xr','13'))
question_list.append (Question('Когда умер Стив Джобс?',' 2007','2020','1999','2000'))
question_list.append (Question('Кто такой Путин?','Призедент','Сталин','человек','что-то'))
question_list.append (Question('Сколько букв в Русском алфавите','33','32','26','23'))
question_list.append (Question('Сколько букв в английком алфавите?','26','33','22','31'))
question_list.append (Question('Сколько символов в Корейском алфавите','51','21','33','41'))
app = QApplication([])
btn_OK = QPushButton('Ответить')
lb_Question = QLabel('Самый сложный вопрос в мире!')
RadioGroupBox = QGroupBox("Варианты ответов")
rbtn_1 = QRadioButton('Вариант 1')
rbtn_2 = QRadioButton('Вариант 2')
rbtn_3 = QRadioButton('Вариант 3')
rbtn_4 = QRadioButton('Вариант 4')
RadioGroup = QButtonGroup()
RadioGroup.addButton(rbtn_1)
RadioGroup.addButton(rbtn_2)
RadioGroup.addButton(rbtn_3)
RadioGroup.addButton(rbtn_4)
layout_ans1 = QHBoxLayout()
layout_ans2 = QVBoxLayout()
layout_ans3 = QVBoxLayout()
layout_ans2.addWidget(rbtn_1)
layout_ans2.addWidget(rbtn_2)
layout_ans3.addWidget(rbtn_3)
layout_ans3.addWidget(rbtn_4)
layout_ans1.addLayout(layout_ans2)
layout_ans1.addLayout(layout_ans3)
RadioGroupBox.setLayout(layout_ans1)
AnsGroupBox = QGroupBox("Результат теста")
lb_Result = QLabel('прав ты или нет?')
lb_Correct = QLabel('ответ будет тут!')
layout_res = QVBoxLayout()
layout_res.addWidget(lb_Result, alignment=(Qt.AlignLeft | Qt.AlignTop))
layout_res.addWidget(lb_Correct, alignment=Qt.AlignHCenter, stretch=2)
AnsGroupBox.setLayout(layout_res)
layout_line1 = QHBoxLayout()
layout_line2 = QHBoxLayout()
layout_line3 = QHBoxLayout()
layout_line1.addWidget(lb_Question, alignment=(Qt.AlignHCenter | Qt.AlignVCenter))
layout_line2.addWidget(RadioGroupBox)
layout_line2.addWidget(AnsGroupBox)
AnsGroupBox.hide()
layout_line3.addStretch(1)
layout_line3.addWidget(btn_OK, stretch=2)
layout_line3.addStretch(1)
layout_card = QVBoxLayout()
layout_card.addLayout(layout_line1, stretch=2)
layout_card.addLayout(layout_line2, stretch=8)
layout_card.addStretch(1)
layout_card.addLayout(layout_line3, stretch=1)
layout_card.addStretch(1)
layout_card.setSpacing(5)
def show_result():
''' показать панель ответов '''
RadioGroupBox.hide()
AnsGroupBox.show()
btn_OK.setText('Следующий вопрос')
def show_question():
''' показать панель вопросов '''
RadioGroupBox.show()
AnsGroupBox.hide()
btn_OK.setText('Ответить')
RadioGroup.setExclusive(False)
rbtn_1.setChecked(False)
rbtn_2.setChecked(False)
rbtn_3.setChecked(False)
rbtn_4.setChecked(False)
RadioGroup.setExclusive(True)
answers = [rbtn_1, rbtn_2, rbtn_3, rbtn_4]
def ask(q: Question):
''' функция записывает значения вопроса и ответов в соответствующие виджеты,
при этом варианты ответов распределяются случайным образом'''
shuffle(answers)
answers[0].setText(q.right_answer)
answers[1].setText(q.wrong1)
answers[2].setText(q.wrong2)
answers[3].setText(q.wrong3)
lb_Question.setText(q.question)
lb_Correct.setText(q.right_answer)
show_question()
def show_correct(res):
''' показать результат - установим переданный текст в надпись "результат" и покажем нужную панель '''
lb_Result.setText(res)
show_result()
def check_answer():
''' если выбран какой-то вариант ответа, то надо проверить и показать панель ответов'''
if answers[0].isChecked():
show_correct('Правильно!')
else:
if answers[1].isChecked() or answers[2].isChecked() or answers[3].isChecked():
show_correct('Неверно!')
def next_question():
cur_question = randint(0,len(question_list)-1)
q= question_list[cur_question]
ask(q)
def click_OK():
if btn_OK.text()=='Ответить':
check_answer()
else:
next_question()
window = QWidget()
window.setLayout(layout_card)
window.setWindowTitle('Memo Card')
btn_OK.clicked.connect(click_OK)
next_question()
window.show()
app.exec() | 31.533742 | 138 | 0.706809 |
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import (QApplication, QWidget, QHBoxLayout, QVBoxLayout, QGroupBox, QButtonGroup, QRadioButton, QPushButton, QLabel)
from random import shuffle,randint
class Question():
def __init__(self,question,right_answer,wrong1,wrong2 ,wrong3):
self.right_answer =right_answer
self.wrong1 =wrong1
self.wrong2 =wrong2
self.wrong3 = wrong3
self.question =question
question_list=[]
question_list.append (Question('Когда основали nvidia?','1993','2000','1874','2007'))
question_list.append (Question('Как зовут Путина?',' Вова','Владимер','Путин','Вася'))
question_list.append (Question('Государственный язык Кореи','Корейский ','Японский','Китайский','Русский'))
question_list.append (Question('Кто написал стих Ода','Ломоносов','Толстой','Царь батюшка','Я'))
question_list.append (Question('какой iphone последний?',' 12','11','xr','13'))
question_list.append (Question('Когда умер Стив Джобс?',' 2007','2020','1999','2000'))
question_list.append (Question('Кто такой Путин?','Призедент','Сталин','человек','что-то'))
question_list.append (Question('Сколько букв в Русском алфавите','33','32','26','23'))
question_list.append (Question('Сколько букв в английком алфавите?','26','33','22','31'))
question_list.append (Question('Сколько символов в Корейском алфавите','51','21','33','41'))
app = QApplication([])
btn_OK = QPushButton('Ответить')
lb_Question = QLabel('Самый сложный вопрос в мире!')
RadioGroupBox = QGroupBox("Варианты ответов")
rbtn_1 = QRadioButton('Вариант 1')
rbtn_2 = QRadioButton('Вариант 2')
rbtn_3 = QRadioButton('Вариант 3')
rbtn_4 = QRadioButton('Вариант 4')
RadioGroup = QButtonGroup()
RadioGroup.addButton(rbtn_1)
RadioGroup.addButton(rbtn_2)
RadioGroup.addButton(rbtn_3)
RadioGroup.addButton(rbtn_4)
layout_ans1 = QHBoxLayout()
layout_ans2 = QVBoxLayout()
layout_ans3 = QVBoxLayout()
layout_ans2.addWidget(rbtn_1)
layout_ans2.addWidget(rbtn_2)
layout_ans3.addWidget(rbtn_3)
layout_ans3.addWidget(rbtn_4)
layout_ans1.addLayout(layout_ans2)
layout_ans1.addLayout(layout_ans3)
RadioGroupBox.setLayout(layout_ans1)
AnsGroupBox = QGroupBox("Результат теста")
lb_Result = QLabel('прав ты или нет?')
lb_Correct = QLabel('ответ будет тут!')
layout_res = QVBoxLayout()
layout_res.addWidget(lb_Result, alignment=(Qt.AlignLeft | Qt.AlignTop))
layout_res.addWidget(lb_Correct, alignment=Qt.AlignHCenter, stretch=2)
AnsGroupBox.setLayout(layout_res)
layout_line1 = QHBoxLayout()
layout_line2 = QHBoxLayout()
layout_line3 = QHBoxLayout()
layout_line1.addWidget(lb_Question, alignment=(Qt.AlignHCenter | Qt.AlignVCenter))
layout_line2.addWidget(RadioGroupBox)
layout_line2.addWidget(AnsGroupBox)
AnsGroupBox.hide()
layout_line3.addStretch(1)
layout_line3.addWidget(btn_OK, stretch=2)
layout_line3.addStretch(1)
layout_card = QVBoxLayout()
layout_card.addLayout(layout_line1, stretch=2)
layout_card.addLayout(layout_line2, stretch=8)
layout_card.addStretch(1)
layout_card.addLayout(layout_line3, stretch=1)
layout_card.addStretch(1)
layout_card.setSpacing(5)
def show_result():
RadioGroupBox.hide()
AnsGroupBox.show()
btn_OK.setText('Следующий вопрос')
def show_question():
RadioGroupBox.show()
AnsGroupBox.hide()
btn_OK.setText('Ответить')
RadioGroup.setExclusive(False)
rbtn_1.setChecked(False)
rbtn_2.setChecked(False)
rbtn_3.setChecked(False)
rbtn_4.setChecked(False)
RadioGroup.setExclusive(True)
answers = [rbtn_1, rbtn_2, rbtn_3, rbtn_4]
def ask(q: Question):
shuffle(answers)
answers[0].setText(q.right_answer)
answers[1].setText(q.wrong1)
answers[2].setText(q.wrong2)
answers[3].setText(q.wrong3)
lb_Question.setText(q.question)
lb_Correct.setText(q.right_answer)
show_question()
def show_correct(res):
lb_Result.setText(res)
show_result()
def check_answer():
if answers[0].isChecked():
show_correct('Правильно!')
else:
if answers[1].isChecked() or answers[2].isChecked() or answers[3].isChecked():
show_correct('Неверно!')
def next_question():
cur_question = randint(0,len(question_list)-1)
q= question_list[cur_question]
ask(q)
def click_OK():
if btn_OK.text()=='Ответить':
check_answer()
else:
next_question()
window = QWidget()
window.setLayout(layout_card)
window.setWindowTitle('Memo Card')
btn_OK.clicked.connect(click_OK)
next_question()
window.show()
app.exec() | true | true |
f731baf2c2d2eefe7ec99e5e55ef74d57635e126 | 1,624 | py | Python | tools/mo/unit_tests/mo/ops/dft_signal_size_canonicalization_test.py | ryanloney/openvino-1 | 4e0a740eb3ee31062ba0df88fcf438564f67edb7 | [
"Apache-2.0"
] | 1,127 | 2018-10-15T14:36:58.000Z | 2020-04-20T09:29:44.000Z | tools/mo/unit_tests/mo/ops/dft_signal_size_canonicalization_test.py | ryanloney/openvino-1 | 4e0a740eb3ee31062ba0df88fcf438564f67edb7 | [
"Apache-2.0"
] | 439 | 2018-10-20T04:40:35.000Z | 2020-04-19T05:56:25.000Z | tools/mo/unit_tests/mo/ops/dft_signal_size_canonicalization_test.py | ryanloney/openvino-1 | 4e0a740eb3ee31062ba0df88fcf438564f67edb7 | [
"Apache-2.0"
] | 414 | 2018-10-17T05:53:46.000Z | 2020-04-16T17:29:53.000Z | # Copyright (C) 2018-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import unittest
import numpy as np
from generator import generator, generate
from openvino.tools.mo.ops.dft import FFTBase
from openvino.tools.mo.front.common.partial_infer.utils import int64_array
@generator
class DFTSignalSizeCanonicalizationTest(unittest.TestCase):
@generate(*[
(int64_array([-1, 77]), int64_array([1, 2]), int64_array([2, 180, 180, 2]), int64_array([180, 77])),
(int64_array([390, 87]), int64_array([2, 0]), int64_array([2, 180, 180, 2]), int64_array([390, 87])),
(int64_array([600, -1, 40]),
int64_array([3, 0, 1]),
int64_array([7, 50, 130, 400, 2]),
int64_array([600, 7, 40])),
(int64_array([-1, 16, -1]),
int64_array([3, 0, 2]),
int64_array([7, 50, 130, 400, 2]),
int64_array([400, 16, 130])),
(int64_array([16, -1, -1]),
int64_array([3, 0, 2]),
int64_array([7, 50, 130, 400, 2]),
int64_array([16, 7, 130])),
(int64_array([-1, -1, 16]),
int64_array([3, 0, 2]),
int64_array([7, 50, 130, 400, 2]),
int64_array([400, 7, 16])),
(int64_array([-1, -1, -1]),
int64_array([3, 0, 2]),
int64_array([7, 50, 130, 400, 2]),
int64_array([400, 7, 130])),
])
def test_canonicalization(self, signal_size, axes, input_shape, expected_result):
canonicalized_signal_size = FFTBase.canonicalize_signal_size(signal_size, axes, input_shape)
self.assertTrue(np.array_equal(canonicalized_signal_size, expected_result))
| 38.666667 | 109 | 0.606527 |
import unittest
import numpy as np
from generator import generator, generate
from openvino.tools.mo.ops.dft import FFTBase
from openvino.tools.mo.front.common.partial_infer.utils import int64_array
@generator
class DFTSignalSizeCanonicalizationTest(unittest.TestCase):
@generate(*[
(int64_array([-1, 77]), int64_array([1, 2]), int64_array([2, 180, 180, 2]), int64_array([180, 77])),
(int64_array([390, 87]), int64_array([2, 0]), int64_array([2, 180, 180, 2]), int64_array([390, 87])),
(int64_array([600, -1, 40]),
int64_array([3, 0, 1]),
int64_array([7, 50, 130, 400, 2]),
int64_array([600, 7, 40])),
(int64_array([-1, 16, -1]),
int64_array([3, 0, 2]),
int64_array([7, 50, 130, 400, 2]),
int64_array([400, 16, 130])),
(int64_array([16, -1, -1]),
int64_array([3, 0, 2]),
int64_array([7, 50, 130, 400, 2]),
int64_array([16, 7, 130])),
(int64_array([-1, -1, 16]),
int64_array([3, 0, 2]),
int64_array([7, 50, 130, 400, 2]),
int64_array([400, 7, 16])),
(int64_array([-1, -1, -1]),
int64_array([3, 0, 2]),
int64_array([7, 50, 130, 400, 2]),
int64_array([400, 7, 130])),
])
def test_canonicalization(self, signal_size, axes, input_shape, expected_result):
canonicalized_signal_size = FFTBase.canonicalize_signal_size(signal_size, axes, input_shape)
self.assertTrue(np.array_equal(canonicalized_signal_size, expected_result))
| true | true |
f731bcca089c5e02b3aa0fbb5699fe5db61f89d1 | 30,419 | py | Python | back of code/RSCFN/rsden/models/rsn_cluster # without 0 fuse cluster.py | lidongyv/Monocular-depth-esitimation-with-region-support-cvpr | 7715c91b9c9f88de5c0233923c3a073edf9b2ca8 | [
"Apache-2.0"
] | null | null | null | back of code/RSCFN/rsden/models/rsn_cluster # without 0 fuse cluster.py | lidongyv/Monocular-depth-esitimation-with-region-support-cvpr | 7715c91b9c9f88de5c0233923c3a073edf9b2ca8 | [
"Apache-2.0"
] | null | null | null | back of code/RSCFN/rsden/models/rsn_cluster # without 0 fuse cluster.py | lidongyv/Monocular-depth-esitimation-with-region-support-cvpr | 7715c91b9c9f88de5c0233923c3a073edf9b2ca8 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# @Author: lidong
# @Date: 2018-03-20 18:01:52
# @Last Modified by: yulidong
# @Last Modified time: 2018-11-06 20:45:11
import torch
import numpy as np
import torch.nn as nn
import math
from math import ceil
from torch.autograd import Variable
from rsden.cluster_loss import *
from rsden import caffe_pb2
from rsden.models.utils import *
import time
cuda_id=3
group_dim=1
def mean_shift(feature,mean,bandwidth):
#feature shape c h w
for t in range(10):
#print(t)
dis=feature-mean
dis=torch.norm(dis,dim=0)
mask=torch.where(dis<bandwidth,torch.tensor(1).cuda(cuda_id),torch.tensor(0).cuda(cuda_id)).float()
mean=torch.sum((feature*mask).view(feature.shape[0],feature.shape[1]*feature.shape[2]),dim=1)/torch.sum(mask)
mean=mean.view([feature.shape[0],1,1])
return mean
def get_mask(feature,mean,bandwidth):
mean=mean.view([mean.shape[0],1,1])
dis=feature-mean
dis=torch.norm(dis,dim=0)
mask=torch.where(dis<bandwidth,torch.tensor(1).cuda(cuda_id),torch.tensor(0).cuda(cuda_id))
#pixels=mask.nonzero()
return mask.float()
def re_label(mask,area,bandwidth):
index=torch.sum(area)
print(index)
count=torch.tensor(0).float().cuda(cuda_id)
for i in range(area.shape[0]):
mask[i,:,:]=torch.where(mask[i,:,:]>0,mask[i,:,:]+count,mask[i,:,:])
count+=area[i]
segment=torch.where(mask>0,torch.tensor(1).cuda(cuda_id),torch.tensor(0).cuda(cuda_id)).float()
final=torch.sum(mask,dim=0)/torch.sum(segment,dim=0)
final=torch.squeeze(final)
final=final/255
return mask,area,final
def refine_mask(mask):
pixels=mask.nonzero()
if torch.sum(mask)<400:
return mask
minx=torch.min(pixels[:,0])
maxx=torch.max(pixels[:,0])
miny=torch.min(pixels[:,1])
maxy=torch.max(pixels[:,1])
for i in range(1,torch.ceil((maxx-minx).float()/80).int()+1):
for j in range(1,torch.ceil((maxy-miny).float()/80).int()+1):
if torch.sum(mask[minx+80*(i-1):minx+80*i,miny+80*(j-1):miny+80*j])>400:
mask[minx+80*(i-1):minx+80*i,miny+80*(j-1):miny+80*j]*=i*j
areas=torch.unique(mask).sort()[0]
for i in range(1,len(areas)):
mask=torch.where(mask==areas[i],-torch.ones(1).float().cuda(cuda_id)*i,mask)
mask=-mask
return mask.float()
def fuse_mask(n_mask,r_mask):
base=torch.where(n_mask>0,torch.tensor(1).cuda(cuda_id),torch.tensor(0).cuda(cuda_id)).float()
areas=torch.max(n_mask)
#for i in range(1,torch.max(r_mask).long()+1):
i=1
shift=torch.where(r_mask==i,torch.tensor(1).cuda(cuda_id),torch.tensor(0).cuda(cuda_id)).float()
non_overlap=torch.where(base-shift==-1,torch.tensor(1).cuda(cuda_id),torch.tensor(0).cuda(cuda_id)).float()
overlap=shift-non_overlap
if torch.sum(non_overlap)/torch.sum(shift)>0.4:
areas+=1
n_mask=torch.where(non_overlap==1,areas,n_mask)
base=torch.where(n_mask>0,torch.tensor(1).cuda(cuda_id),torch.tensor(0).cuda(cuda_id)).float()
#print(areas)
else:
area_num=torch.argmax(torch.bincount(torch.where(overlap.long()==1,n_mask.long(),torch.tensor(0).cuda(cuda_id)).view(-1))[1:]).float()+1
n_mask=torch.where(non_overlap==1,area_num,n_mask)
base=torch.where(n_mask>0,torch.tensor(1).cuda(cuda_id),torch.tensor(0).cuda(cuda_id)).float()
#print(areas)
# areas_nums=torch.tensor(1).float().cuda(cuda_id)
# for i in range(1,torch.max(n_mask).long()+1):
# region=torch.where(n_mask==i,torch.tensor(1).cuda(cuda_id),torch.tensor(0).cuda(cuda_id)).float()
# pixels=region.nonzero()
# if pixels.shape[0]>0:
# minx=torch.min(pixels[:,0])
# maxx=torch.max(pixels[:,0])
# miny=torch.min(pixels[:,1])
# maxy=torch.max(pixels[:,1])
# for i in range(1,torch.ceil((maxx-minx).float()/80).int()+1):
# for j in range(1,torch.ceil((maxy-miny).float()/80).int()+1):
# if torch.sum(region[minx+80*(i-1):minx+80*i,miny+80*(j-1):miny+80*j])>400:
# region[minx+80*(i-1):minx+80*i,miny+80*(j-1):miny+80*j]*=i*j
# areas=torch.unique(region).sort()[0]
# for i in range(1,len(areas)):
# region=torch.where(region==areas[i],-areas_nums,region)
# areas_nums+=1
# n_mask=torch.where(n_mask==i,region,n_mask)
# n_mask=-n_mask
return n_mask
def fast_cluster(feature,bandwidth=0.16):
masks=[]
areas=[]
segments=[]
#start_time=time.time()
for i in range(feature.shape[0]):
n_mask=0
n_feature=feature[i,...]
label=torch.zeros(n_feature.shape[1],n_feature.shape[2]).cuda(cuda_id).float()
check=0
count=0
while(torch.min(label)==0):
candidate=torch.where(label==0,torch.tensor(1).float().cuda(cuda_id),torch.tensor(0).float().cuda(cuda_id)).nonzero()
#print(len(candidate))
seed=torch.randint(len(candidate),(1,))[0].long()
mean=n_feature[:,candidate[seed][0].long(),candidate[seed][1].long()].view(n_feature.shape[0],1,1)
mean=mean_shift(n_feature, mean, bandwidth)
t_masks=get_mask(n_feature, mean, bandwidth)
#print(len(candidate),n_mask)
label=label+t_masks
if n_mask==0:
#r_masks=refine_mask(t_masks)
n_masks=t_masks
n_mask=torch.max(n_masks)
else:
#r_masks=refine_mask(t_masks)
n_masks=fuse_mask(n_masks,t_masks)
n_mask=torch.max(n_masks)
#print(torch.max(n_masks))
if len(candidate)==check:
count+=1
else:
check=len(candidate)
if count>3:
bandwidth=bandwidth*1.1
count=0
if n_mask==50:
bandwidth=bandwidth*1.1
if n_mask==60:
bandwidth=bandwidth*1.1
# if n_mask==70:
# bandwidth=bandwidth*1.1
# if n_mask==100:
# bandwidth=bandwidth*1.1
if n_mask>70:
#n_masks=fuse_mask(n_masks,torch.where(label==0,torch.tensor(1).float().cuda(cuda_id),torch.tensor(0).float().cuda(cuda_id)))
break
#print(time.time()-start_time)
return n_masks
def conv3x3(in_planes, out_planes, stride=1):
"""3x3 convolution with padding"""
pad=nn.ReplicationPad2d(1)
padding=0
conv_mod = nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=padding, bias=False)
return nn.Sequential(pad,conv_mod)
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(BasicBlock, self).__init__()
self.conv1 = conv3x3(inplanes, planes, stride)
self.gn1 = nn.GroupNorm(group_dim,planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(planes, planes)
self.gn2 = nn.GroupNorm(group_dim,planes)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.gn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.gn2(out)
if self.downsample is not None:
residual = self.downsample(x)
# print(residual.shape)
# print(out.shape)
out += residual
out = self.relu(out)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(Bottleneck, self).__init__()
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
self.gn1 = nn.GroupNorm(group_dim,planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride,
padding=1, bias=False)
self.gn2 = nn.GroupNorm(group_dim,planes)
self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
self.gn3 = nn.GroupNorm(group_dim,planes * 4)
self.relu = nn.ReLU(inplace=True)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.gn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.gn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.gn3(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class rsn_cluster(nn.Module):
def __init__(self,
n_classes=64,
block_config=[3, 16, 3, 3],
input_size= (480, 640),
version='scene'):
super(rsn_cluster, self).__init__()
self.inplanes = 64
layers=[4, 10, 5, 5]
block=BasicBlock
# Encoder
self.conv1=conv2DGroupNormRelu(3, 32, k_size=3,
padding=1, stride=1, bias=False)
self.conv2=conv2DGroupNormRelu(32, 64, k_size=3,
padding=1, stride=1, bias=False)
self.layer1 = self._make_layer(block, 64, layers[0],stride=1)
self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
self.layer4 = self._make_layer(block, 256, layers[3], stride=1)
# self.layer5 = conv2DGroupNormRelu(in_channels=128, k_size=3, n_filters=256,
# padding=1, stride=1, bias=False,group_dim=group_dim)
# Pyramid Pooling Module
#we need to modify the padding to keep the diminsion
#remove 1 ,because the error of bn
self.pyramid_pooling = pyramidPoolingGroupNorm(256, [[30,40],[12,16],[3,4],[1,1]],group_dim=group_dim)
#self.global_pooling = globalPooling(256, 1)
# Final conv layers
#self.cbr_final = conv2DBatchNormRelu(512, 256, 3, 1, 1, False)
#self.dropout = nn.Dropout2d(p=0.1, inplace=True)
self.fuse0 = conv2DGroupNormRelu(in_channels=512, k_size=3, n_filters=256,
padding=1, stride=1, bias=False,group_dim=group_dim)
self.fuse1 = conv2DGroupNormRelu(in_channels=256, k_size=3, n_filters=128,
padding=1, stride=1, bias=False,group_dim=group_dim)
#we need to replace the upsampling unit with nearest and deconv2d
self.deconv1 = deconv2DGroupNormRelu(in_channels=128, n_filters=128, k_size=4,
stride=2, padding=1,output_padding=0, bias=False,group_dim=group_dim)
self.fuse2 = conv2DGroupNormRelu(in_channels=256, k_size=3, n_filters=192,
padding=1, stride=1, bias=False,group_dim=group_dim)
self.deconv2 = deconv2DGroupNormRelu(in_channels=192, n_filters=192, k_size=4,
stride=2, padding=1,output_padding=0, bias=False,group_dim=group_dim)
self.fuse3 = conv2DGroupNormRelu(in_channels=256, k_size=3, n_filters=256,
padding=1, stride=1, bias=False,group_dim=group_dim)
self.inplanes = 257
self.regress1 = self._make_layer(block,128, 4, stride=1)
self.regress2 = conv2DGroupNormRelu(in_channels=128, k_size=3, n_filters=64,
padding=1, stride=1, bias=False,group_dim=group_dim)
self.regress3 = conv2DGroupNormRelu(in_channels=64, k_size=3, n_filters=32,
padding=1, stride=1, bias=False,group_dim=group_dim)
self.regress4 = conv2DRelu(in_channels=32, k_size=3, n_filters=1,
padding=1, stride=1, bias=False)
self.class0= conv2DGroupNormRelu(in_channels=258, k_size=1, n_filters=128,
padding=0, stride=1, bias=False,group_dim=group_dim)
self.class1= conv2DGroupNormRelu(in_channels=128, k_size=3, n_filters=64,
padding=1, stride=1, bias=False,group_dim=group_dim)
self.class2= conv2DRelu(in_channels=64, k_size=3, n_filters=64,
padding=1, stride=1, bias=False)
self.class3= conv2DRelu(in_channels=64, k_size=3, n_filters=32,
padding=1, stride=1, bias=False)
self.class4= conv2D(in_channels=32, k_size=1, n_filters=16,
padding=0, stride=1, bias=False)
# self.outrefine1=conv2DGroupNormRelu(in_channels=514, k_size=1, n_filters=128,
# padding=0, stride=1, bias=False,group_dim=group_dim)
# self.outrefine2=conv2DGroupNormRelu(in_channels=128, k_size=1, n_filters=64,
# padding=0, stride=1, bias=False,group_dim=group_dim)
# self.outrefine3=conv2DRelu(in_channels=64, k_size=3, n_filters=32,
# padding=1, stride=1, bias=False)
# self.outrefine4= conv2D(in_channels=32, k_size=1, n_filters=1,
# padding=0, stride=1, bias=False)
self.inrefine1=conv2DGroupNormRelu(in_channels=513, k_size=3, n_filters=128,
padding=1, stride=1, bias=False,group_dim=group_dim)
self.inrefine2=conv2DGroupNormRelu(in_channels=128, k_size=3, n_filters=64,
padding=1, stride=1, bias=False,group_dim=group_dim)
self.inrefine3=conv2DGroupNormRelu(in_channels=64, k_size=3, n_filters=32,
padding=1, stride=1, bias=False,group_dim=group_dim)
self.inrefine4= conv2DRelu(in_channels=32, k_size=1, n_filters=16,
padding=0, stride=1, bias=False)
self.inrefine5= conv2D(in_channels=16, k_size=1, n_filters=1,
padding=0, stride=1, bias=False)
self.reliable1=conv2DGroupNormRelu(in_channels=513, k_size=3, n_filters=128,
padding=1, stride=1, bias=False,group_dim=group_dim)
self.reliable2=conv2DGroupNormRelu(in_channels=128, k_size=3, n_filters=64,
padding=1, stride=1, bias=False,group_dim=group_dim)
self.reliable3= conv2DGroupNormRelu(in_channels=64, k_size=1, n_filters=32,
padding=0, stride=1, bias=False,group_dim=group_dim)
self.reliable4= conv2DGroupNormRelu(in_channels=32, k_size=1, n_filters=16,
padding=0, stride=1, bias=False)
self.reliable5= conv2D(in_channels=16, k_size=1, n_filters=1,
padding=0, stride=1, bias=False)
self.output=nn.ReLU(inplace=True)
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.GroupNorm):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.ConvTranspose2d):
m.weight.data.fill_(1)
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.ReplicationPad2d(0),
nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False,padding=0),
nn.GroupNorm(group_dim,planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
#print(self.inplanes)
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def forward(self, x,segments,labels,flag,task):
#print(x.shape)
location_map=torch.cat([(torch.arange(x.shape[-1])/x.shape[-1]).unsqueeze(0).expand(x.shape[-2],x.shape[-1]).unsqueeze(0), \
(torch.arange(x.shape[-2])/x.shape[-2]).unsqueeze(0).transpose(1,0).expand(x.shape[-2],x.shape[-1]).unsqueeze(0)],0).unsqueeze(0).float().cuda(cuda_id)
#x=torch.cat([x,location_map],1)
zero=torch.zeros(1).cuda(cuda_id)
one=torch.ones(1).cuda(cuda_id)
x = self.conv1(x)
x=self.conv2(x)
x1 = self.layer1(x)
#half resolution
x2 = self.layer2(x1)
#print(x.shape)
x = self.layer3(x2)
#print(x.shape)
x = self.layer4(x)
#print(x.shape)
# H, W -> H/2, W/2
x = self.pyramid_pooling(x)
#x = self.cbr_final(x)
#x = self.dropout(x)
x = self.fuse0(x)
x = self.fuse1(x)
#print(x.shape)
x = self.deconv1(x)
#print(x.shape,x2.shape)
x = self.fuse2(torch.cat((x,x2),1))
x = self.deconv2(x)
#print(x.shape)
x_share = self.fuse3(torch.cat((x,x1),1))
# x=self.regress1(x_share)
# #print(x.shape)
# x=self.regress2(x)
# x=self.regress3(x)
# depth=self.regress4(x)
# accurate_depth=depth
# return depth,accurate_depth
#clustering feature
#accurate_depth=depth*reliable
if flag==0:
x_fuse=torch.cat([x_share,location_map],1)
y=self.class0(x_fuse)
y=self.class1(y)
y=self.class2(y)
y=self.class3(y)
y=self.class4(y)
with torch.no_grad():
masks=fast_cluster(y).view(1,1,x_share.shape[-2],x_share.shape[-1])
#masks=segments.view(1,1,x_share.shape[-2],x_share.shape[-1])
x=self.regress1(torch.cat([x_share,masks],1))
#x=self.regress1(torch.cat([x_share,masks],1))
#print(x.shape)
x=self.regress2(x)
x=self.regress3(x)
depth=self.regress4(x)
with torch.no_grad():
#masks=fast_cluster(y).view_as(depth)
#masks=segments.view_as(depth)
labels=labels.view_as(depth)
#coarse depth
coarse_depth=depth+0
coarse_feature=x_share+0
mean_features=torch.zeros(1,x_share.shape[1],torch.max(masks).long()+1).cuda(cuda_id)
mean_depth=torch.zeros(torch.max(masks).long()+1).cuda(cuda_id)
#print(torch.max(masks))
for i in range(1,torch.max(masks).int()+1):
index_r=torch.where(masks==i,one,zero)
mean_d=torch.sum(index_r*depth)/torch.sum(index_r)
mean_depth[i]=mean_d
coarse_depth=torch.where(masks==i,mean_d,coarse_depth)
mean_f=torch.sum((index_r*x_share).view(x_share.shape[0],x_share.shape[1],-1),dim=-1)/torch.sum(index_r)
#print(mean_f.shape,mean_features[...,i].shape)
mean_features[...,i]=mean_f
coarse_feature=torch.where(masks==i,mean_f.view(x_share.shape[0],x_share.shape[1],1,1),coarse_feature)
# #refine outer
# outer_feature=torch.zeros(1,2*x_share.shape[1]+2,torch.max(masks).long()+1,torch.max(masks).long()+1).cuda(cuda_id)
# for i in range(torch.min(masks).int(),torch.max(masks).int()+1):
# for j in range(torch.min(masks).int(),torch.max(masks).int()+1):
# if i!=j:
# #print(outer_feature[...,i,j].shape,mean_depth[i].view(1,1).shape,mean_features[...,i].shape)
# outer_feature[...,i,j]=torch.cat([mean_depth[i].view(1,1),mean_features[...,i],mean_depth[j].view(1,1),mean_features[...,j]],dim=-1)
# outer=self.outrefine1(outer_feature)
# outer=self.outrefine2(outer)
# outer=self.outrefine3(outer)
# outer_variance=self.outrefine4(outer)
# outer_depth=torch.zeros(torch.max(masks).long()+1).cuda(cuda_id)
# # #mean_depth_map=coarse_depth+0
# #with torch.no_grad():
# for i in range(torch.min(masks).int(),torch.max(masks).int()+1):
# outer_depth[i]=(torch.sum(mean_depth*outer_variance[...,i,:])+mean_depth[i])/torch.sum(outer_variance[...,i,:]+1)
# #outer_depth[i]=(torch.sum(mean_depth*outer_variance[...,i,:])+mean_depth[i])
# coarse_depth=torch.where(masks==i,outer_depth[i],coarse_depth)+0
#refine inner
#coarse_depth=self.output(coarse_depth)
inner_feature= torch.cat([coarse_depth,x_share,coarse_feature],1)
inner=self.inrefine1(inner_feature)
inner=self.inrefine2(inner)
inner=self.inrefine3(inner)
inner=self.inrefine4(inner)
inner_variance=self.inrefine5(inner)
reliable_feature= torch.cat([depth,x_share,coarse_feature],1)
reliable=self.inrefine1(reliable_feature)
reliable=self.inrefine2(reliable)
reliable=self.inrefine3(reliable)
reliable=self.inrefine4(reliable)
reliable_variance=self.inrefine5(reliable)
# #inner_variance[:,0,...]=inner_variance[:,0,...]/torch.max(inner_variance[:,0,...])
# reliable_to_depth=(inner_variance[:,0,...]/torch.max(inner_variance[:,0,...])).unsqueeze(1)
# variance_on_cosrse=inner_variance[:,1,...].unsqueeze(1)
# #print(inner_variance.shape)
# accurate_depth=depth*reliable_to_depth+(coarse_depth*variance_on_cosrse)*(1-reliable_to_depth)
loss_var,loss_dis,loss_reg = cluster_loss(y,segments.long(),device_id=cuda_id)
loss_var=loss_var.reshape((y.shape[0],1))
loss_dis=loss_dis.reshape((y.shape[0],1))
loss_reg=loss_reg.reshape((y.shape[0],1))
accurate_depth=self.output(inner_variance+coarse_depth)
depth=self.output(reliable_variance+depth)
accurate_depth=torch.where(masks>0,(depth+accurate_depth)/2,depth)
#print(torch.mean(depth).item(),torch.mean(coarse_depth).item())
return masks,accurate_depth,loss_var,loss_dis,loss_reg
else:
if task=='train':
with torch.no_grad():
masks=fast_cluster(y).view_as(depth)
print(torch.max(masks))
loss_var,loss_dis,loss_reg = cluster_loss(y,segments.long())
loss_var=loss_var.reshape((y.shape[0],1))
loss_dis=loss_dis.reshape((y.shape[0],1))
loss_reg=loss_reg.reshape((y.shape[0],1))
return depth,masks,loss_var,loss_dis,loss_reg
elif task=='test':
loss_var,loss_dis,loss_reg = cluster_loss(y,segments.long())
loss_var=loss_var.reshape((y.shape[0],1))
loss_dis=loss_dis.reshape((y.shape[0],1))
loss_reg=loss_reg.reshape((y.shape[0],1))
return depth,loss_var,loss_dis,loss_reg
elif task=='eval':
x_fuse=torch.cat([x_share,location_map],1)
masks=segments.view_as(depth)
#coarse depth
coarse_depth=depth+0
coarse_feature=x_fuse+0
mean_features=torch.zeros(1,x_fuse.shape[1],torch.max(masks).long()+1).cuda(cuda_id)
mean_depth=torch.zeros(torch.max(masks).long()+1).cuda(cuda_id)
for i in range(torch.min(masks).int(),torch.max(masks).int()+1):
index_r=torch.where(masks==i,one,zero)
mean_d=torch.sum(index_r*depth)/torch.sum(index_r)
mean_depth[i]=mean_d+0
coarse_depth=torch.where(masks==i,mean_depth[i],coarse_depth)
mean_f=torch.sum((index_r*x_fuse).view(x_fuse.shape[0],x_fuse.shape[1],-1),dim=-1)/torch.sum(index_r)
#print(mean_f.shape,mean_features[...,i].shape)
mean_features[...,i]=mean_f
coarse_feature=torch.where(masks==i,mean_f.view(x_fuse.shape[0],x_fuse.shape[1],1,1),coarse_feature)
#refine outer
# outer_feature=torch.zeros(1,2*x_fuse.shape[1]+2,torch.max(masks).long()-torch.min(masks).long()+1,torch.max(masks).long()-torch.min(masks).long()+1).cuda(cuda_id)
# for i in range(torch.min(masks).int(),torch.max(masks).int()+1):
# for j in range(torch.min(masks).int(),torch.max(masks).int()+1):
# if i!=j:
# #print(outer_feature[...,i,j].shape,mean_depth[i].view(1,1).shape,mean_features[...,i].shape)
# outer_feature[...,i,j]=torch.cat([mean_depth[i].view(1,1),mean_features[...,i],mean_depth[j].view(1,1),mean_features[...,j]],dim=-1)
# outer=self.outrefine1(outer_feature)
# outer=self.outrefine2(outer)
# outer=self.outrefine3(outer)
# outer_variance=self.outrefine4(outer)
# outer_depth=torch.zeros(torch.max(masks).long()-torch.min(masks).long()+1).cuda(cuda_id)
# #mean_depth_map=coarse_depth+0
# # print(torch.min(masks))
# # print(torch.sum(torch.where(masks==0,torch.ones(1).cuda(cuda_id),torch.zeros(1).cuda(cuda_id))))
# for i in range(torch.min(masks).int(),torch.max(masks).int()+1):
# outer_depth[i]=(torch.sum(mean_depth*outer_variance[...,i,:])+mean_depth[i])/(torch.sum(outer_variance[...,i,:])+1)
# #outer_depth[i]=(torch.sum(mean_depth*outer_variance[...,i,:])+mean_depth[i])
# coarse_depth=torch.where(masks==i,outer_depth[i],coarse_depth)+0
#print(torch.max(coarse_depth),torch.mean(mean_depth),torch.mean(outer_depth),torch.max(outer_variance))
#mean_depth_map=coarse_depth+0
#refine inner
inner_feature= torch.cat([coarse_depth,x_fuse-coarse_feature],1)
#print('inner_feature',torch.max(inner_feature).item())
inner=self.inrefine1(inner_feature)
#print('inner_1',torch.max(inner).item())
inner=self.inrefine2(inner)
#print('inner_2',torch.max(inner).item())
inner=self.inrefine3(inner)
#print('inner_3',torch.max(inner).item())
inner=self.inrefine4(inner)
inner_variance=self.inrefine5(inner)
accurate_depth=inner_variance
# inner_feature= torch.cat([depth,x_share],1)
# relialbe=self.reliable1(inner_feature)
# relialbe=self.reliable2(relialbe)
# relialbe=self.reliable3(relialbe)
# relialbe=self.reliable4(relialbe)
# relialbe=self.reliable5(relialbe)
# accurate_depth=relialbe
# print('inner_variance',torch.max(inner_variance).item())
# inner_variance[:,0,...]=inner_variance[:,0,...]/torch.max(inner_variance[:,0,...])
# reliable_to_depth=(torch.exp(-relialbe[:,0,...])).unsqueeze(1)
# reliable_to_coarse=(torch.exp(-inner_variance[:,0,...])).unsqueeze(1)
# variance_on_depth=relialbe[:,1,...].unsqueeze(1)
# variance_on_cosrse=inner_variance[:,1,...].unsqueeze(1)
# print('reliable_depth: %.2f reliable_coarse: %.2f variance_depth %.2f variance_coarse %.2f'%(torch.mean(reliable_to_depth).item(), \
# torch.mean(reliable_to_coarse).item(),torch.mean(variance_on_depth).item(),torch.mean(variance_on_cosrse).item()))
# #print('variance %.2f'%(torch.mean(inner_variance).item()))
# relialbe_weights=reliable_to_coarse+reliable_to_depth
# # #print(inner_variance.shape)
# accurate_depth=(depth*variance_on_depth*reliable_to_coarse+coarse_depth*variance_on_cosrse*reliable_to_coarse)/ \
# (torch.where(relialbe_weights==0,torch.ones(1).cuda(cuda_id),relialbe_weights))
# refined_depth=depth*variance_on_depth
# coarse_depth=coarse_depth*variance_on_cosrse
# accurate_depth=(coarse_depth*reliable_to_coarse+refined_depth*(1-reliable_to_coarse))
# accurate_depth=refined_depth*reliable_to_depth
# print('depth',torch.max(depth).item())
# print('coarse',torch.max(coarse_depth).item())
# print('accurate',torch.max(accurate_depth).item())
# loss_var,loss_dis,loss_reg = cluster_loss(y,segments.long())
# loss_var=loss_var.reshape((y.shape[0],1))
# loss_dis=loss_dis.reshape((y.shape[0],1))
# loss_reg=loss_reg.reshape((y.shape[0],1))
# accurate_depth=inner_variance
# simple refinement
# x_fuse=x_share+depth.expand_as(x_share)
# inner=self.inrefine1(x_fuse)
# inner=self.inrefine2(inner)
# inner=self.inrefine3(inner)
# inner=self.inrefine4(inner)
# accurate_depth=self.inrefine5(inner)
accurate_depth=depth
return depth,accurate_depth
| 49.704248 | 180 | 0.571715 |
import torch
import numpy as np
import torch.nn as nn
import math
from math import ceil
from torch.autograd import Variable
from rsden.cluster_loss import *
from rsden import caffe_pb2
from rsden.models.utils import *
import time
cuda_id=3
group_dim=1
def mean_shift(feature,mean,bandwidth):
for t in range(10):
dis=feature-mean
dis=torch.norm(dis,dim=0)
mask=torch.where(dis<bandwidth,torch.tensor(1).cuda(cuda_id),torch.tensor(0).cuda(cuda_id)).float()
mean=torch.sum((feature*mask).view(feature.shape[0],feature.shape[1]*feature.shape[2]),dim=1)/torch.sum(mask)
mean=mean.view([feature.shape[0],1,1])
return mean
def get_mask(feature,mean,bandwidth):
mean=mean.view([mean.shape[0],1,1])
dis=feature-mean
dis=torch.norm(dis,dim=0)
mask=torch.where(dis<bandwidth,torch.tensor(1).cuda(cuda_id),torch.tensor(0).cuda(cuda_id))
return mask.float()
def re_label(mask,area,bandwidth):
index=torch.sum(area)
print(index)
count=torch.tensor(0).float().cuda(cuda_id)
for i in range(area.shape[0]):
mask[i,:,:]=torch.where(mask[i,:,:]>0,mask[i,:,:]+count,mask[i,:,:])
count+=area[i]
segment=torch.where(mask>0,torch.tensor(1).cuda(cuda_id),torch.tensor(0).cuda(cuda_id)).float()
final=torch.sum(mask,dim=0)/torch.sum(segment,dim=0)
final=torch.squeeze(final)
final=final/255
return mask,area,final
def refine_mask(mask):
pixels=mask.nonzero()
if torch.sum(mask)<400:
return mask
minx=torch.min(pixels[:,0])
maxx=torch.max(pixels[:,0])
miny=torch.min(pixels[:,1])
maxy=torch.max(pixels[:,1])
for i in range(1,torch.ceil((maxx-minx).float()/80).int()+1):
for j in range(1,torch.ceil((maxy-miny).float()/80).int()+1):
if torch.sum(mask[minx+80*(i-1):minx+80*i,miny+80*(j-1):miny+80*j])>400:
mask[minx+80*(i-1):minx+80*i,miny+80*(j-1):miny+80*j]*=i*j
areas=torch.unique(mask).sort()[0]
for i in range(1,len(areas)):
mask=torch.where(mask==areas[i],-torch.ones(1).float().cuda(cuda_id)*i,mask)
mask=-mask
return mask.float()
def fuse_mask(n_mask,r_mask):
base=torch.where(n_mask>0,torch.tensor(1).cuda(cuda_id),torch.tensor(0).cuda(cuda_id)).float()
areas=torch.max(n_mask)
i=1
shift=torch.where(r_mask==i,torch.tensor(1).cuda(cuda_id),torch.tensor(0).cuda(cuda_id)).float()
non_overlap=torch.where(base-shift==-1,torch.tensor(1).cuda(cuda_id),torch.tensor(0).cuda(cuda_id)).float()
overlap=shift-non_overlap
if torch.sum(non_overlap)/torch.sum(shift)>0.4:
areas+=1
n_mask=torch.where(non_overlap==1,areas,n_mask)
base=torch.where(n_mask>0,torch.tensor(1).cuda(cuda_id),torch.tensor(0).cuda(cuda_id)).float()
else:
area_num=torch.argmax(torch.bincount(torch.where(overlap.long()==1,n_mask.long(),torch.tensor(0).cuda(cuda_id)).view(-1))[1:]).float()+1
n_mask=torch.where(non_overlap==1,area_num,n_mask)
base=torch.where(n_mask>0,torch.tensor(1).cuda(cuda_id),torch.tensor(0).cuda(cuda_id)).float()
return n_mask
def fast_cluster(feature,bandwidth=0.16):
masks=[]
areas=[]
segments=[]
for i in range(feature.shape[0]):
n_mask=0
n_feature=feature[i,...]
label=torch.zeros(n_feature.shape[1],n_feature.shape[2]).cuda(cuda_id).float()
check=0
count=0
while(torch.min(label)==0):
candidate=torch.where(label==0,torch.tensor(1).float().cuda(cuda_id),torch.tensor(0).float().cuda(cuda_id)).nonzero()
seed=torch.randint(len(candidate),(1,))[0].long()
mean=n_feature[:,candidate[seed][0].long(),candidate[seed][1].long()].view(n_feature.shape[0],1,1)
mean=mean_shift(n_feature, mean, bandwidth)
t_masks=get_mask(n_feature, mean, bandwidth)
label=label+t_masks
if n_mask==0:
n_masks=t_masks
n_mask=torch.max(n_masks)
else:
n_masks=fuse_mask(n_masks,t_masks)
n_mask=torch.max(n_masks)
if len(candidate)==check:
count+=1
else:
check=len(candidate)
if count>3:
bandwidth=bandwidth*1.1
count=0
if n_mask==50:
bandwidth=bandwidth*1.1
if n_mask==60:
bandwidth=bandwidth*1.1
if n_mask>70:
break
return n_masks
def conv3x3(in_planes, out_planes, stride=1):
pad=nn.ReplicationPad2d(1)
padding=0
conv_mod = nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=padding, bias=False)
return nn.Sequential(pad,conv_mod)
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(BasicBlock, self).__init__()
self.conv1 = conv3x3(inplanes, planes, stride)
self.gn1 = nn.GroupNorm(group_dim,planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(planes, planes)
self.gn2 = nn.GroupNorm(group_dim,planes)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.gn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.gn2(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(Bottleneck, self).__init__()
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
self.gn1 = nn.GroupNorm(group_dim,planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride,
padding=1, bias=False)
self.gn2 = nn.GroupNorm(group_dim,planes)
self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
self.gn3 = nn.GroupNorm(group_dim,planes * 4)
self.relu = nn.ReLU(inplace=True)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.gn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.gn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.gn3(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class rsn_cluster(nn.Module):
def __init__(self,
n_classes=64,
block_config=[3, 16, 3, 3],
input_size= (480, 640),
version='scene'):
super(rsn_cluster, self).__init__()
self.inplanes = 64
layers=[4, 10, 5, 5]
block=BasicBlock
self.conv1=conv2DGroupNormRelu(3, 32, k_size=3,
padding=1, stride=1, bias=False)
self.conv2=conv2DGroupNormRelu(32, 64, k_size=3,
padding=1, stride=1, bias=False)
self.layer1 = self._make_layer(block, 64, layers[0],stride=1)
self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
self.layer4 = self._make_layer(block, 256, layers[3], stride=1)
self.pyramid_pooling = pyramidPoolingGroupNorm(256, [[30,40],[12,16],[3,4],[1,1]],group_dim=group_dim)
self.fuse0 = conv2DGroupNormRelu(in_channels=512, k_size=3, n_filters=256,
padding=1, stride=1, bias=False,group_dim=group_dim)
self.fuse1 = conv2DGroupNormRelu(in_channels=256, k_size=3, n_filters=128,
padding=1, stride=1, bias=False,group_dim=group_dim)
self.deconv1 = deconv2DGroupNormRelu(in_channels=128, n_filters=128, k_size=4,
stride=2, padding=1,output_padding=0, bias=False,group_dim=group_dim)
self.fuse2 = conv2DGroupNormRelu(in_channels=256, k_size=3, n_filters=192,
padding=1, stride=1, bias=False,group_dim=group_dim)
self.deconv2 = deconv2DGroupNormRelu(in_channels=192, n_filters=192, k_size=4,
stride=2, padding=1,output_padding=0, bias=False,group_dim=group_dim)
self.fuse3 = conv2DGroupNormRelu(in_channels=256, k_size=3, n_filters=256,
padding=1, stride=1, bias=False,group_dim=group_dim)
self.inplanes = 257
self.regress1 = self._make_layer(block,128, 4, stride=1)
self.regress2 = conv2DGroupNormRelu(in_channels=128, k_size=3, n_filters=64,
padding=1, stride=1, bias=False,group_dim=group_dim)
self.regress3 = conv2DGroupNormRelu(in_channels=64, k_size=3, n_filters=32,
padding=1, stride=1, bias=False,group_dim=group_dim)
self.regress4 = conv2DRelu(in_channels=32, k_size=3, n_filters=1,
padding=1, stride=1, bias=False)
self.class0= conv2DGroupNormRelu(in_channels=258, k_size=1, n_filters=128,
padding=0, stride=1, bias=False,group_dim=group_dim)
self.class1= conv2DGroupNormRelu(in_channels=128, k_size=3, n_filters=64,
padding=1, stride=1, bias=False,group_dim=group_dim)
self.class2= conv2DRelu(in_channels=64, k_size=3, n_filters=64,
padding=1, stride=1, bias=False)
self.class3= conv2DRelu(in_channels=64, k_size=3, n_filters=32,
padding=1, stride=1, bias=False)
self.class4= conv2D(in_channels=32, k_size=1, n_filters=16,
padding=0, stride=1, bias=False)
self.inrefine1=conv2DGroupNormRelu(in_channels=513, k_size=3, n_filters=128,
padding=1, stride=1, bias=False,group_dim=group_dim)
self.inrefine2=conv2DGroupNormRelu(in_channels=128, k_size=3, n_filters=64,
padding=1, stride=1, bias=False,group_dim=group_dim)
self.inrefine3=conv2DGroupNormRelu(in_channels=64, k_size=3, n_filters=32,
padding=1, stride=1, bias=False,group_dim=group_dim)
self.inrefine4= conv2DRelu(in_channels=32, k_size=1, n_filters=16,
padding=0, stride=1, bias=False)
self.inrefine5= conv2D(in_channels=16, k_size=1, n_filters=1,
padding=0, stride=1, bias=False)
self.reliable1=conv2DGroupNormRelu(in_channels=513, k_size=3, n_filters=128,
padding=1, stride=1, bias=False,group_dim=group_dim)
self.reliable2=conv2DGroupNormRelu(in_channels=128, k_size=3, n_filters=64,
padding=1, stride=1, bias=False,group_dim=group_dim)
self.reliable3= conv2DGroupNormRelu(in_channels=64, k_size=1, n_filters=32,
padding=0, stride=1, bias=False,group_dim=group_dim)
self.reliable4= conv2DGroupNormRelu(in_channels=32, k_size=1, n_filters=16,
padding=0, stride=1, bias=False)
self.reliable5= conv2D(in_channels=16, k_size=1, n_filters=1,
padding=0, stride=1, bias=False)
self.output=nn.ReLU(inplace=True)
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.GroupNorm):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.ConvTranspose2d):
m.weight.data.fill_(1)
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.ReplicationPad2d(0),
nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False,padding=0),
nn.GroupNorm(group_dim,planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def forward(self, x,segments,labels,flag,task):
location_map=torch.cat([(torch.arange(x.shape[-1])/x.shape[-1]).unsqueeze(0).expand(x.shape[-2],x.shape[-1]).unsqueeze(0), \
(torch.arange(x.shape[-2])/x.shape[-2]).unsqueeze(0).transpose(1,0).expand(x.shape[-2],x.shape[-1]).unsqueeze(0)],0).unsqueeze(0).float().cuda(cuda_id)
zero=torch.zeros(1).cuda(cuda_id)
one=torch.ones(1).cuda(cuda_id)
x = self.conv1(x)
x=self.conv2(x)
x1 = self.layer1(x)
x2 = self.layer2(x1)
x = self.layer3(x2)
x = self.layer4(x)
x = self.pyramid_pooling(x)
x = self.fuse0(x)
x = self.fuse1(x)
x = self.deconv1(x)
x = self.fuse2(torch.cat((x,x2),1))
x = self.deconv2(x)
x_share = self.fuse3(torch.cat((x,x1),1))
if flag==0:
x_fuse=torch.cat([x_share,location_map],1)
y=self.class0(x_fuse)
y=self.class1(y)
y=self.class2(y)
y=self.class3(y)
y=self.class4(y)
with torch.no_grad():
masks=fast_cluster(y).view(1,1,x_share.shape[-2],x_share.shape[-1])
x=self.regress1(torch.cat([x_share,masks],1))
x=self.regress2(x)
x=self.regress3(x)
depth=self.regress4(x)
with torch.no_grad():
labels=labels.view_as(depth)
coarse_depth=depth+0
coarse_feature=x_share+0
mean_features=torch.zeros(1,x_share.shape[1],torch.max(masks).long()+1).cuda(cuda_id)
mean_depth=torch.zeros(torch.max(masks).long()+1).cuda(cuda_id)
for i in range(1,torch.max(masks).int()+1):
index_r=torch.where(masks==i,one,zero)
mean_d=torch.sum(index_r*depth)/torch.sum(index_r)
mean_depth[i]=mean_d
coarse_depth=torch.where(masks==i,mean_d,coarse_depth)
mean_f=torch.sum((index_r*x_share).view(x_share.shape[0],x_share.shape[1],-1),dim=-1)/torch.sum(index_r)
mean_features[...,i]=mean_f
coarse_feature=torch.where(masks==i,mean_f.view(x_share.shape[0],x_share.shape[1],1,1),coarse_feature)
ure],1)
inner=self.inrefine1(inner_feature)
inner=self.inrefine2(inner)
inner=self.inrefine3(inner)
inner=self.inrefine4(inner)
inner_variance=self.inrefine5(inner)
reliable_feature= torch.cat([depth,x_share,coarse_feature],1)
reliable=self.inrefine1(reliable_feature)
reliable=self.inrefine2(reliable)
reliable=self.inrefine3(reliable)
reliable=self.inrefine4(reliable)
reliable_variance=self.inrefine5(reliable)
gments.long(),device_id=cuda_id)
loss_var=loss_var.reshape((y.shape[0],1))
loss_dis=loss_dis.reshape((y.shape[0],1))
loss_reg=loss_reg.reshape((y.shape[0],1))
accurate_depth=self.output(inner_variance+coarse_depth)
depth=self.output(reliable_variance+depth)
accurate_depth=torch.where(masks>0,(depth+accurate_depth)/2,depth)
return masks,accurate_depth,loss_var,loss_dis,loss_reg
else:
if task=='train':
with torch.no_grad():
masks=fast_cluster(y).view_as(depth)
print(torch.max(masks))
loss_var,loss_dis,loss_reg = cluster_loss(y,segments.long())
loss_var=loss_var.reshape((y.shape[0],1))
loss_dis=loss_dis.reshape((y.shape[0],1))
loss_reg=loss_reg.reshape((y.shape[0],1))
return depth,masks,loss_var,loss_dis,loss_reg
elif task=='test':
loss_var,loss_dis,loss_reg = cluster_loss(y,segments.long())
loss_var=loss_var.reshape((y.shape[0],1))
loss_dis=loss_dis.reshape((y.shape[0],1))
loss_reg=loss_reg.reshape((y.shape[0],1))
return depth,loss_var,loss_dis,loss_reg
elif task=='eval':
x_fuse=torch.cat([x_share,location_map],1)
masks=segments.view_as(depth)
coarse_depth=depth+0
coarse_feature=x_fuse+0
mean_features=torch.zeros(1,x_fuse.shape[1],torch.max(masks).long()+1).cuda(cuda_id)
mean_depth=torch.zeros(torch.max(masks).long()+1).cuda(cuda_id)
for i in range(torch.min(masks).int(),torch.max(masks).int()+1):
index_r=torch.where(masks==i,one,zero)
mean_d=torch.sum(index_r*depth)/torch.sum(index_r)
mean_depth[i]=mean_d+0
coarse_depth=torch.where(masks==i,mean_depth[i],coarse_depth)
mean_f=torch.sum((index_r*x_fuse).view(x_fuse.shape[0],x_fuse.shape[1],-1),dim=-1)/torch.sum(index_r)
mean_features[...,i]=mean_f
coarse_feature=torch.where(masks==i,mean_f.view(x_fuse.shape[0],x_fuse.shape[1],1,1),coarse_feature)
],1)
inner=self.inrefine1(inner_feature)
inner=self.inrefine2(inner)
inner=self.inrefine3(inner)
inner=self.inrefine4(inner)
inner_variance=self.inrefine5(inner)
accurate_depth=inner_variance
accurate_depth=depth
return depth,accurate_depth
| true | true |
f731bcccc4e6d8aaa6addf7cc5d2ba13d7d25f7c | 907 | py | Python | feed_generator.py | chand1012/static-rss-generator | 931ae40b156232d783202520c34bd175f615d4a4 | [
"MIT"
] | 1 | 2021-07-31T14:55:05.000Z | 2021-07-31T14:55:05.000Z | feed_generator.py | chand1012/static-rss-generator | 931ae40b156232d783202520c34bd175f615d4a4 | [
"MIT"
] | null | null | null | feed_generator.py | chand1012/static-rss-generator | 931ae40b156232d783202520c34bd175f615d4a4 | [
"MIT"
] | null | null | null | from datetime import datetime
import random
import json
import arrow
import feedparser
from rfeed import Feed, Item
# returns the feed string given the JSON object
def generate_feed(link_data: list[dict], rss_link: str) -> str:
data = []
for link in link_data:
feed = feedparser.parse(list(link.keys())[0])
for i in range(list(link.values())[0]):
newest = feed['entries'][i]
data.append(Item(
title=newest['title'],
pubDate=arrow.get(newest['published'], 'DD MMM YYYY HH:mm:ss'),
description=newest['summary'],
link=newest['link']
))
random.shuffle(data)
return_feed = Feed(
title='Aggregate RSS Feed',
description='Aggregate RSS Feed',
link=rss_link,
lastBuildDate=datetime.now(),
items=data,
)
return return_feed.rss() | 25.914286 | 79 | 0.594267 | from datetime import datetime
import random
import json
import arrow
import feedparser
from rfeed import Feed, Item
def generate_feed(link_data: list[dict], rss_link: str) -> str:
data = []
for link in link_data:
feed = feedparser.parse(list(link.keys())[0])
for i in range(list(link.values())[0]):
newest = feed['entries'][i]
data.append(Item(
title=newest['title'],
pubDate=arrow.get(newest['published'], 'DD MMM YYYY HH:mm:ss'),
description=newest['summary'],
link=newest['link']
))
random.shuffle(data)
return_feed = Feed(
title='Aggregate RSS Feed',
description='Aggregate RSS Feed',
link=rss_link,
lastBuildDate=datetime.now(),
items=data,
)
return return_feed.rss() | true | true |
f731bd9216657521387add8186e360b1be8b9acc | 20,089 | py | Python | tests/unit/containers/test_declarative_py2_py3.py | whysage/python-dependency-injector | cef6d35cfdf5f39438a89f000d11a21860bc8c5f | [
"BSD-3-Clause"
] | 1,997 | 2016-04-26T13:41:45.000Z | 2022-03-31T16:17:53.000Z | tests/unit/containers/test_declarative_py2_py3.py | whysage/python-dependency-injector | cef6d35cfdf5f39438a89f000d11a21860bc8c5f | [
"BSD-3-Clause"
] | 399 | 2016-05-16T07:20:07.000Z | 2022-03-31T18:23:49.000Z | tests/unit/containers/test_declarative_py2_py3.py | whysage/python-dependency-injector | cef6d35cfdf5f39438a89f000d11a21860bc8c5f | [
"BSD-3-Clause"
] | 162 | 2016-05-16T09:21:43.000Z | 2022-03-30T23:00:26.000Z | """Dependency injector declarative container unit tests."""
import collections
import unittest
from dependency_injector import (
containers,
providers,
errors,
)
class ContainerA(containers.DeclarativeContainer):
p11 = providers.Provider()
p12 = providers.Provider()
class ContainerB(ContainerA):
p21 = providers.Provider()
p22 = providers.Provider()
class ContainerC(ContainerB):
p31 = providers.Provider()
p32 = providers.Provider()
class DeclarativeContainerTests(unittest.TestCase):
def test_providers_attribute(self):
self.assertEqual(ContainerA.providers, dict(p11=ContainerA.p11,
p12=ContainerA.p12))
self.assertEqual(ContainerB.providers, dict(p11=ContainerA.p11,
p12=ContainerA.p12,
p21=ContainerB.p21,
p22=ContainerB.p22))
self.assertEqual(ContainerC.providers, dict(p11=ContainerA.p11,
p12=ContainerA.p12,
p21=ContainerB.p21,
p22=ContainerB.p22,
p31=ContainerC.p31,
p32=ContainerC.p32))
def test_providers_attribute_with_redefinition(self):
p1 = providers.Provider()
p2 = providers.Provider()
class ContainerA2(ContainerA):
p11 = p1
p12 = p2
self.assertEqual(
ContainerA.providers,
{
'p11': ContainerA.p11,
'p12': ContainerA.p12,
},
)
self.assertEqual(
ContainerA2.providers,
{
'p11': p1,
'p12': p2,
},
)
def test_cls_providers_attribute(self):
self.assertEqual(ContainerA.cls_providers, dict(p11=ContainerA.p11,
p12=ContainerA.p12))
self.assertEqual(ContainerB.cls_providers, dict(p21=ContainerB.p21,
p22=ContainerB.p22))
self.assertEqual(ContainerC.cls_providers, dict(p31=ContainerC.p31,
p32=ContainerC.p32))
def test_inherited_providers_attribute(self):
self.assertEqual(ContainerA.inherited_providers, dict())
self.assertEqual(ContainerB.inherited_providers,
dict(p11=ContainerA.p11,
p12=ContainerA.p12))
self.assertEqual(ContainerC.inherited_providers,
dict(p11=ContainerA.p11,
p12=ContainerA.p12,
p21=ContainerB.p21,
p22=ContainerB.p22))
def test_dependencies_attribute(self):
class ContainerD(ContainerC):
p41 = providers.Dependency()
p42 = providers.DependenciesContainer()
class ContainerE(ContainerD):
p51 = providers.Dependency()
p52 = providers.DependenciesContainer()
self.assertEqual(
ContainerD.dependencies,
{
'p41': ContainerD.p41,
'p42': ContainerD.p42,
},
)
self.assertEqual(
ContainerE.dependencies,
{
'p41': ContainerD.p41,
'p42': ContainerD.p42,
'p51': ContainerE.p51,
'p52': ContainerE.p52,
},
)
def test_set_get_del_providers(self):
a_p13 = providers.Provider()
b_p23 = providers.Provider()
ContainerA.p13 = a_p13
ContainerB.p23 = b_p23
self.assertEqual(ContainerA.providers, dict(p11=ContainerA.p11,
p12=ContainerA.p12,
p13=a_p13))
self.assertEqual(ContainerB.providers, dict(p11=ContainerA.p11,
p12=ContainerA.p12,
p21=ContainerB.p21,
p22=ContainerB.p22,
p23=b_p23))
self.assertEqual(ContainerA.cls_providers, dict(p11=ContainerA.p11,
p12=ContainerA.p12,
p13=a_p13))
self.assertEqual(ContainerB.cls_providers, dict(p21=ContainerB.p21,
p22=ContainerB.p22,
p23=b_p23))
del ContainerA.p13
del ContainerB.p23
self.assertEqual(ContainerA.providers, dict(p11=ContainerA.p11,
p12=ContainerA.p12))
self.assertEqual(ContainerB.providers, dict(p11=ContainerA.p11,
p12=ContainerA.p12,
p21=ContainerB.p21,
p22=ContainerB.p22))
self.assertEqual(ContainerA.cls_providers, dict(p11=ContainerA.p11,
p12=ContainerA.p12))
self.assertEqual(ContainerB.cls_providers, dict(p21=ContainerB.p21,
p22=ContainerB.p22))
def test_declare_with_valid_provider_type(self):
class _Container(containers.DeclarativeContainer):
provider_type = providers.Object
px = providers.Object(object())
self.assertIsInstance(_Container.px, providers.Object)
def test_declare_with_invalid_provider_type(self):
with self.assertRaises(errors.Error):
class _Container(containers.DeclarativeContainer):
provider_type = providers.Object
px = providers.Provider()
def test_seth_valid_provider_type(self):
class _Container(containers.DeclarativeContainer):
provider_type = providers.Object
_Container.px = providers.Object(object())
self.assertIsInstance(_Container.px, providers.Object)
def test_set_invalid_provider_type(self):
class _Container(containers.DeclarativeContainer):
provider_type = providers.Object
with self.assertRaises(errors.Error):
_Container.px = providers.Provider()
def test_override(self):
class _Container(containers.DeclarativeContainer):
p11 = providers.Provider()
class _OverridingContainer1(containers.DeclarativeContainer):
p11 = providers.Provider()
class _OverridingContainer2(containers.DeclarativeContainer):
p11 = providers.Provider()
p12 = providers.Provider()
_Container.override(_OverridingContainer1)
_Container.override(_OverridingContainer2)
self.assertEqual(_Container.overridden,
(_OverridingContainer1,
_OverridingContainer2))
self.assertEqual(_Container.p11.overridden,
(_OverridingContainer1.p11,
_OverridingContainer2.p11))
def test_override_with_itself(self):
with self.assertRaises(errors.Error):
ContainerA.override(ContainerA)
def test_override_with_parent(self):
with self.assertRaises(errors.Error):
ContainerB.override(ContainerA)
def test_override_decorator(self):
class _Container(containers.DeclarativeContainer):
p11 = providers.Provider()
@containers.override(_Container)
class _OverridingContainer1(containers.DeclarativeContainer):
p11 = providers.Provider()
@containers.override(_Container)
class _OverridingContainer2(containers.DeclarativeContainer):
p11 = providers.Provider()
p12 = providers.Provider()
self.assertEqual(_Container.overridden,
(_OverridingContainer1,
_OverridingContainer2))
self.assertEqual(_Container.p11.overridden,
(_OverridingContainer1.p11,
_OverridingContainer2.p11))
def test_reset_last_overriding(self):
class _Container(containers.DeclarativeContainer):
p11 = providers.Provider()
class _OverridingContainer1(containers.DeclarativeContainer):
p11 = providers.Provider()
class _OverridingContainer2(containers.DeclarativeContainer):
p11 = providers.Provider()
p12 = providers.Provider()
_Container.override(_OverridingContainer1)
_Container.override(_OverridingContainer2)
_Container.reset_last_overriding()
self.assertEqual(_Container.overridden,
(_OverridingContainer1,))
self.assertEqual(_Container.p11.overridden,
(_OverridingContainer1.p11,))
def test_reset_last_overriding_when_not_overridden(self):
with self.assertRaises(errors.Error):
ContainerA.reset_last_overriding()
def test_reset_override(self):
class _Container(containers.DeclarativeContainer):
p11 = providers.Provider()
class _OverridingContainer1(containers.DeclarativeContainer):
p11 = providers.Provider()
class _OverridingContainer2(containers.DeclarativeContainer):
p11 = providers.Provider()
p12 = providers.Provider()
_Container.override(_OverridingContainer1)
_Container.override(_OverridingContainer2)
_Container.reset_override()
self.assertEqual(_Container.overridden, tuple())
self.assertEqual(_Container.p11.overridden, tuple())
def test_copy(self):
@containers.copy(ContainerA)
class _Container1(ContainerA):
pass
@containers.copy(ContainerA)
class _Container2(ContainerA):
pass
self.assertIsNot(ContainerA.p11, _Container1.p11)
self.assertIsNot(ContainerA.p12, _Container1.p12)
self.assertIsNot(ContainerA.p11, _Container2.p11)
self.assertIsNot(ContainerA.p12, _Container2.p12)
self.assertIsNot(_Container1.p11, _Container2.p11)
self.assertIsNot(_Container1.p12, _Container2.p12)
def test_copy_with_replacing(self):
class _Container(containers.DeclarativeContainer):
p11 = providers.Object(0)
p12 = providers.Factory(dict, p11=p11)
@containers.copy(_Container)
class _Container1(_Container):
p11 = providers.Object(1)
p13 = providers.Object(11)
@containers.copy(_Container)
class _Container2(_Container):
p11 = providers.Object(2)
p13 = providers.Object(22)
self.assertIsNot(_Container.p11, _Container1.p11)
self.assertIsNot(_Container.p12, _Container1.p12)
self.assertIsNot(_Container.p11, _Container2.p11)
self.assertIsNot(_Container.p12, _Container2.p12)
self.assertIsNot(_Container1.p11, _Container2.p11)
self.assertIsNot(_Container1.p12, _Container2.p12)
self.assertEqual(_Container.p12(), {'p11': 0})
self.assertEqual(_Container1.p12(), {'p11': 1})
self.assertEqual(_Container2.p12(), {'p11': 2})
self.assertEqual(_Container1.p13(), 11)
self.assertEqual(_Container2.p13(), 22)
def test_copy_with_parent_dependency(self):
# See: https://github.com/ets-labs/python-dependency-injector/issues/477
class Base(containers.DeclarativeContainer):
p11 = providers.Object(0)
p12 = providers.Factory(dict, p11=p11)
@containers.copy(Base)
class New(Base):
p13 = providers.Factory(dict, p12=Base.p12)
new1 = New()
new2 = New(p11=1)
new3 = New(p11=2)
self.assertEqual(new1.p13(), {'p12': {'p11': 0}})
self.assertEqual(new2.p13(), {'p12': {'p11': 1}})
self.assertEqual(new3.p13(), {'p12': {'p11': 2}})
def test_copy_with_replacing_subcontainer_providers(self):
# See: https://github.com/ets-labs/python-dependency-injector/issues/374
class X(containers.DeclarativeContainer):
foo = providers.Dependency(instance_of=str)
def build_x():
return X(foo='1')
class A(containers.DeclarativeContainer):
x = providers.DependenciesContainer(**X.providers)
y = x.foo
@containers.copy(A)
class B1(A):
x = providers.Container(build_x)
b1 = B1()
self.assertEqual(b1.y(), '1')
def test_containers_attribute(self):
class Container(containers.DeclarativeContainer):
class Container1(containers.DeclarativeContainer):
pass
class Container2(containers.DeclarativeContainer):
pass
Container3 = containers.DynamicContainer()
self.assertEqual(Container.containers,
dict(Container1=Container.Container1,
Container2=Container.Container2,
Container3=Container.Container3))
def test_init_with_overriding_providers(self):
p1 = providers.Provider()
p2 = providers.Provider()
container = ContainerA(p11=p1, p12=p2)
self.assertIs(container.p11.last_overriding, p1)
self.assertIs(container.p12.last_overriding, p2)
def test_init_with_overridden_dependency(self):
# Bug:
# https://github.com/ets-labs/python-dependency-injector/issues/198
class _Container(containers.DeclarativeContainer):
p1 = providers.Dependency(instance_of=int)
p2 = providers.Dependency(object)
p2.override(providers.Factory(dict, p1=p1))
container = _Container(p1=1)
self.assertEqual(container.p2(), {'p1': 1})
self.assertIs(
container.p2.last_overriding.kwargs['p1'],
container.p1,
)
self.assertIsNot(
container.p2.last_overriding.kwargs['p1'],
_Container.p1,
)
self.assertIs(
_Container.p2.last_overriding.kwargs['p1'],
_Container.p1,
)
def test_init_with_chained_dependency(self):
# Bug:
# https://github.com/ets-labs/python-dependency-injector/issues/200
class _Container(containers.DeclarativeContainer):
p1 = providers.Dependency(instance_of=int)
p2 = providers.Factory(p1)
container = _Container(p1=1)
self.assertEqual(container.p2(), 1)
self.assertIs(container.p2.cls, container.p1)
self.assertIs(_Container.p2.cls, _Container.p1)
self.assertIsNot(container.p2.cls, _Container.p1)
def test_init_with_dependency_delegation(self):
# Bug:
# https://github.com/ets-labs/python-dependency-injector/issues/235
A = collections.namedtuple('A', [])
B = collections.namedtuple('B', ['fa'])
C = collections.namedtuple('B', ['a'])
class Services(containers.DeclarativeContainer):
a = providers.Dependency()
c = providers.Factory(C, a=a)
b = providers.Factory(B, fa=a.provider)
a = providers.Factory(A)
assert isinstance(Services(a=a).c().a, A) # ok
Services(a=a).b().fa()
def test_init_with_grand_child_provider(self):
# Bug:
# https://github.com/ets-labs/python-dependency-injector/issues/350
provider = providers.Provider()
container = ContainerC(p11=provider)
self.assertIsInstance(container.p11, providers.Provider)
self.assertIsInstance(container.p12, providers.Provider)
self.assertIsInstance(container.p21, providers.Provider)
self.assertIsInstance(container.p22, providers.Provider)
self.assertIsInstance(container.p31, providers.Provider)
self.assertIsInstance(container.p32, providers.Provider)
self.assertIs(container.p11.last_overriding, provider)
def test_parent_set_in__new__(self):
class Container(containers.DeclarativeContainer):
dependency = providers.Dependency()
dependencies_container = providers.DependenciesContainer()
container = providers.Container(ContainerA)
self.assertIs(Container.dependency.parent, Container)
self.assertIs(Container.dependencies_container.parent, Container)
self.assertIs(Container.container.parent, Container)
def test_parent_set_in__setattr__(self):
class Container(containers.DeclarativeContainer):
pass
Container.dependency = providers.Dependency()
Container.dependencies_container = providers.DependenciesContainer()
Container.container = providers.Container(ContainerA)
self.assertIs(Container.dependency.parent, Container)
self.assertIs(Container.dependencies_container.parent, Container)
self.assertIs(Container.container.parent, Container)
def test_resolve_provider_name(self):
self.assertEqual(ContainerA.resolve_provider_name(ContainerA.p11), 'p11')
def test_resolve_provider_name_no_provider(self):
with self.assertRaises(errors.Error):
ContainerA.resolve_provider_name(providers.Provider())
def test_child_dependency_parent_name(self):
class Container(containers.DeclarativeContainer):
dependency = providers.Dependency()
with self.assertRaises(errors.Error) as context:
Container.dependency()
self.assertEqual(
str(context.exception),
'Dependency "Container.dependency" is not defined',
)
def test_child_dependencies_container_parent_name(self):
class Container(containers.DeclarativeContainer):
dependencies_container = providers.DependenciesContainer()
with self.assertRaises(errors.Error) as context:
Container.dependencies_container.dependency()
self.assertEqual(
str(context.exception),
'Dependency "Container.dependencies_container.dependency" is not defined',
)
def test_child_container_parent_name(self):
class ChildContainer(containers.DeclarativeContainer):
dependency = providers.Dependency()
class Container(containers.DeclarativeContainer):
child_container = providers.Container(ChildContainer)
with self.assertRaises(errors.Error) as context:
Container.child_container.dependency()
self.assertEqual(
str(context.exception),
'Dependency "Container.child_container.dependency" is not defined',
)
class DeclarativeContainerWithCustomStringTests(unittest.TestCase):
# See: https://github.com/ets-labs/python-dependency-injector/issues/479
class CustomString(str):
pass
class CustomClass:
thing = None
class CustomContainer(containers.DeclarativeContainer):
pass
def setUp(self):
self.container = self.CustomContainer
self.provider = providers.Provider()
def test_setattr(self):
setattr(self.container, self.CustomString('test_attr'), self.provider)
self.assertIs(self.container.test_attr, self.provider)
def test_delattr(self):
setattr(self.container, self.CustomString('test_attr'), self.provider)
delattr(self.container, self.CustomString('test_attr'))
with self.assertRaises(AttributeError):
self.container.test_attr
| 37.201852 | 86 | 0.603116 |
import collections
import unittest
from dependency_injector import (
containers,
providers,
errors,
)
class ContainerA(containers.DeclarativeContainer):
p11 = providers.Provider()
p12 = providers.Provider()
class ContainerB(ContainerA):
p21 = providers.Provider()
p22 = providers.Provider()
class ContainerC(ContainerB):
p31 = providers.Provider()
p32 = providers.Provider()
class DeclarativeContainerTests(unittest.TestCase):
def test_providers_attribute(self):
self.assertEqual(ContainerA.providers, dict(p11=ContainerA.p11,
p12=ContainerA.p12))
self.assertEqual(ContainerB.providers, dict(p11=ContainerA.p11,
p12=ContainerA.p12,
p21=ContainerB.p21,
p22=ContainerB.p22))
self.assertEqual(ContainerC.providers, dict(p11=ContainerA.p11,
p12=ContainerA.p12,
p21=ContainerB.p21,
p22=ContainerB.p22,
p31=ContainerC.p31,
p32=ContainerC.p32))
def test_providers_attribute_with_redefinition(self):
p1 = providers.Provider()
p2 = providers.Provider()
class ContainerA2(ContainerA):
p11 = p1
p12 = p2
self.assertEqual(
ContainerA.providers,
{
'p11': ContainerA.p11,
'p12': ContainerA.p12,
},
)
self.assertEqual(
ContainerA2.providers,
{
'p11': p1,
'p12': p2,
},
)
def test_cls_providers_attribute(self):
self.assertEqual(ContainerA.cls_providers, dict(p11=ContainerA.p11,
p12=ContainerA.p12))
self.assertEqual(ContainerB.cls_providers, dict(p21=ContainerB.p21,
p22=ContainerB.p22))
self.assertEqual(ContainerC.cls_providers, dict(p31=ContainerC.p31,
p32=ContainerC.p32))
def test_inherited_providers_attribute(self):
self.assertEqual(ContainerA.inherited_providers, dict())
self.assertEqual(ContainerB.inherited_providers,
dict(p11=ContainerA.p11,
p12=ContainerA.p12))
self.assertEqual(ContainerC.inherited_providers,
dict(p11=ContainerA.p11,
p12=ContainerA.p12,
p21=ContainerB.p21,
p22=ContainerB.p22))
def test_dependencies_attribute(self):
class ContainerD(ContainerC):
p41 = providers.Dependency()
p42 = providers.DependenciesContainer()
class ContainerE(ContainerD):
p51 = providers.Dependency()
p52 = providers.DependenciesContainer()
self.assertEqual(
ContainerD.dependencies,
{
'p41': ContainerD.p41,
'p42': ContainerD.p42,
},
)
self.assertEqual(
ContainerE.dependencies,
{
'p41': ContainerD.p41,
'p42': ContainerD.p42,
'p51': ContainerE.p51,
'p52': ContainerE.p52,
},
)
def test_set_get_del_providers(self):
a_p13 = providers.Provider()
b_p23 = providers.Provider()
ContainerA.p13 = a_p13
ContainerB.p23 = b_p23
self.assertEqual(ContainerA.providers, dict(p11=ContainerA.p11,
p12=ContainerA.p12,
p13=a_p13))
self.assertEqual(ContainerB.providers, dict(p11=ContainerA.p11,
p12=ContainerA.p12,
p21=ContainerB.p21,
p22=ContainerB.p22,
p23=b_p23))
self.assertEqual(ContainerA.cls_providers, dict(p11=ContainerA.p11,
p12=ContainerA.p12,
p13=a_p13))
self.assertEqual(ContainerB.cls_providers, dict(p21=ContainerB.p21,
p22=ContainerB.p22,
p23=b_p23))
del ContainerA.p13
del ContainerB.p23
self.assertEqual(ContainerA.providers, dict(p11=ContainerA.p11,
p12=ContainerA.p12))
self.assertEqual(ContainerB.providers, dict(p11=ContainerA.p11,
p12=ContainerA.p12,
p21=ContainerB.p21,
p22=ContainerB.p22))
self.assertEqual(ContainerA.cls_providers, dict(p11=ContainerA.p11,
p12=ContainerA.p12))
self.assertEqual(ContainerB.cls_providers, dict(p21=ContainerB.p21,
p22=ContainerB.p22))
def test_declare_with_valid_provider_type(self):
class _Container(containers.DeclarativeContainer):
provider_type = providers.Object
px = providers.Object(object())
self.assertIsInstance(_Container.px, providers.Object)
def test_declare_with_invalid_provider_type(self):
with self.assertRaises(errors.Error):
class _Container(containers.DeclarativeContainer):
provider_type = providers.Object
px = providers.Provider()
def test_seth_valid_provider_type(self):
class _Container(containers.DeclarativeContainer):
provider_type = providers.Object
_Container.px = providers.Object(object())
self.assertIsInstance(_Container.px, providers.Object)
def test_set_invalid_provider_type(self):
class _Container(containers.DeclarativeContainer):
provider_type = providers.Object
with self.assertRaises(errors.Error):
_Container.px = providers.Provider()
def test_override(self):
class _Container(containers.DeclarativeContainer):
p11 = providers.Provider()
class _OverridingContainer1(containers.DeclarativeContainer):
p11 = providers.Provider()
class _OverridingContainer2(containers.DeclarativeContainer):
p11 = providers.Provider()
p12 = providers.Provider()
_Container.override(_OverridingContainer1)
_Container.override(_OverridingContainer2)
self.assertEqual(_Container.overridden,
(_OverridingContainer1,
_OverridingContainer2))
self.assertEqual(_Container.p11.overridden,
(_OverridingContainer1.p11,
_OverridingContainer2.p11))
def test_override_with_itself(self):
with self.assertRaises(errors.Error):
ContainerA.override(ContainerA)
def test_override_with_parent(self):
with self.assertRaises(errors.Error):
ContainerB.override(ContainerA)
def test_override_decorator(self):
class _Container(containers.DeclarativeContainer):
p11 = providers.Provider()
@containers.override(_Container)
class _OverridingContainer1(containers.DeclarativeContainer):
p11 = providers.Provider()
@containers.override(_Container)
class _OverridingContainer2(containers.DeclarativeContainer):
p11 = providers.Provider()
p12 = providers.Provider()
self.assertEqual(_Container.overridden,
(_OverridingContainer1,
_OverridingContainer2))
self.assertEqual(_Container.p11.overridden,
(_OverridingContainer1.p11,
_OverridingContainer2.p11))
def test_reset_last_overriding(self):
class _Container(containers.DeclarativeContainer):
p11 = providers.Provider()
class _OverridingContainer1(containers.DeclarativeContainer):
p11 = providers.Provider()
class _OverridingContainer2(containers.DeclarativeContainer):
p11 = providers.Provider()
p12 = providers.Provider()
_Container.override(_OverridingContainer1)
_Container.override(_OverridingContainer2)
_Container.reset_last_overriding()
self.assertEqual(_Container.overridden,
(_OverridingContainer1,))
self.assertEqual(_Container.p11.overridden,
(_OverridingContainer1.p11,))
def test_reset_last_overriding_when_not_overridden(self):
with self.assertRaises(errors.Error):
ContainerA.reset_last_overriding()
def test_reset_override(self):
class _Container(containers.DeclarativeContainer):
p11 = providers.Provider()
class _OverridingContainer1(containers.DeclarativeContainer):
p11 = providers.Provider()
class _OverridingContainer2(containers.DeclarativeContainer):
p11 = providers.Provider()
p12 = providers.Provider()
_Container.override(_OverridingContainer1)
_Container.override(_OverridingContainer2)
_Container.reset_override()
self.assertEqual(_Container.overridden, tuple())
self.assertEqual(_Container.p11.overridden, tuple())
def test_copy(self):
@containers.copy(ContainerA)
class _Container1(ContainerA):
pass
@containers.copy(ContainerA)
class _Container2(ContainerA):
pass
self.assertIsNot(ContainerA.p11, _Container1.p11)
self.assertIsNot(ContainerA.p12, _Container1.p12)
self.assertIsNot(ContainerA.p11, _Container2.p11)
self.assertIsNot(ContainerA.p12, _Container2.p12)
self.assertIsNot(_Container1.p11, _Container2.p11)
self.assertIsNot(_Container1.p12, _Container2.p12)
def test_copy_with_replacing(self):
class _Container(containers.DeclarativeContainer):
p11 = providers.Object(0)
p12 = providers.Factory(dict, p11=p11)
@containers.copy(_Container)
class _Container1(_Container):
p11 = providers.Object(1)
p13 = providers.Object(11)
@containers.copy(_Container)
class _Container2(_Container):
p11 = providers.Object(2)
p13 = providers.Object(22)
self.assertIsNot(_Container.p11, _Container1.p11)
self.assertIsNot(_Container.p12, _Container1.p12)
self.assertIsNot(_Container.p11, _Container2.p11)
self.assertIsNot(_Container.p12, _Container2.p12)
self.assertIsNot(_Container1.p11, _Container2.p11)
self.assertIsNot(_Container1.p12, _Container2.p12)
self.assertEqual(_Container.p12(), {'p11': 0})
self.assertEqual(_Container1.p12(), {'p11': 1})
self.assertEqual(_Container2.p12(), {'p11': 2})
self.assertEqual(_Container1.p13(), 11)
self.assertEqual(_Container2.p13(), 22)
def test_copy_with_parent_dependency(self):
class Base(containers.DeclarativeContainer):
p11 = providers.Object(0)
p12 = providers.Factory(dict, p11=p11)
@containers.copy(Base)
class New(Base):
p13 = providers.Factory(dict, p12=Base.p12)
new1 = New()
new2 = New(p11=1)
new3 = New(p11=2)
self.assertEqual(new1.p13(), {'p12': {'p11': 0}})
self.assertEqual(new2.p13(), {'p12': {'p11': 1}})
self.assertEqual(new3.p13(), {'p12': {'p11': 2}})
def test_copy_with_replacing_subcontainer_providers(self):
class X(containers.DeclarativeContainer):
foo = providers.Dependency(instance_of=str)
def build_x():
return X(foo='1')
class A(containers.DeclarativeContainer):
x = providers.DependenciesContainer(**X.providers)
y = x.foo
@containers.copy(A)
class B1(A):
x = providers.Container(build_x)
b1 = B1()
self.assertEqual(b1.y(), '1')
def test_containers_attribute(self):
class Container(containers.DeclarativeContainer):
class Container1(containers.DeclarativeContainer):
pass
class Container2(containers.DeclarativeContainer):
pass
Container3 = containers.DynamicContainer()
self.assertEqual(Container.containers,
dict(Container1=Container.Container1,
Container2=Container.Container2,
Container3=Container.Container3))
def test_init_with_overriding_providers(self):
p1 = providers.Provider()
p2 = providers.Provider()
container = ContainerA(p11=p1, p12=p2)
self.assertIs(container.p11.last_overriding, p1)
self.assertIs(container.p12.last_overriding, p2)
def test_init_with_overridden_dependency(self):
class _Container(containers.DeclarativeContainer):
p1 = providers.Dependency(instance_of=int)
p2 = providers.Dependency(object)
p2.override(providers.Factory(dict, p1=p1))
container = _Container(p1=1)
self.assertEqual(container.p2(), {'p1': 1})
self.assertIs(
container.p2.last_overriding.kwargs['p1'],
container.p1,
)
self.assertIsNot(
container.p2.last_overriding.kwargs['p1'],
_Container.p1,
)
self.assertIs(
_Container.p2.last_overriding.kwargs['p1'],
_Container.p1,
)
def test_init_with_chained_dependency(self):
class _Container(containers.DeclarativeContainer):
p1 = providers.Dependency(instance_of=int)
p2 = providers.Factory(p1)
container = _Container(p1=1)
self.assertEqual(container.p2(), 1)
self.assertIs(container.p2.cls, container.p1)
self.assertIs(_Container.p2.cls, _Container.p1)
self.assertIsNot(container.p2.cls, _Container.p1)
def test_init_with_dependency_delegation(self):
A = collections.namedtuple('A', [])
B = collections.namedtuple('B', ['fa'])
C = collections.namedtuple('B', ['a'])
class Services(containers.DeclarativeContainer):
a = providers.Dependency()
c = providers.Factory(C, a=a)
b = providers.Factory(B, fa=a.provider)
a = providers.Factory(A)
assert isinstance(Services(a=a).c().a, A)
Services(a=a).b().fa()
def test_init_with_grand_child_provider(self):
provider = providers.Provider()
container = ContainerC(p11=provider)
self.assertIsInstance(container.p11, providers.Provider)
self.assertIsInstance(container.p12, providers.Provider)
self.assertIsInstance(container.p21, providers.Provider)
self.assertIsInstance(container.p22, providers.Provider)
self.assertIsInstance(container.p31, providers.Provider)
self.assertIsInstance(container.p32, providers.Provider)
self.assertIs(container.p11.last_overriding, provider)
def test_parent_set_in__new__(self):
class Container(containers.DeclarativeContainer):
dependency = providers.Dependency()
dependencies_container = providers.DependenciesContainer()
container = providers.Container(ContainerA)
self.assertIs(Container.dependency.parent, Container)
self.assertIs(Container.dependencies_container.parent, Container)
self.assertIs(Container.container.parent, Container)
def test_parent_set_in__setattr__(self):
class Container(containers.DeclarativeContainer):
pass
Container.dependency = providers.Dependency()
Container.dependencies_container = providers.DependenciesContainer()
Container.container = providers.Container(ContainerA)
self.assertIs(Container.dependency.parent, Container)
self.assertIs(Container.dependencies_container.parent, Container)
self.assertIs(Container.container.parent, Container)
def test_resolve_provider_name(self):
self.assertEqual(ContainerA.resolve_provider_name(ContainerA.p11), 'p11')
def test_resolve_provider_name_no_provider(self):
with self.assertRaises(errors.Error):
ContainerA.resolve_provider_name(providers.Provider())
def test_child_dependency_parent_name(self):
class Container(containers.DeclarativeContainer):
dependency = providers.Dependency()
with self.assertRaises(errors.Error) as context:
Container.dependency()
self.assertEqual(
str(context.exception),
'Dependency "Container.dependency" is not defined',
)
def test_child_dependencies_container_parent_name(self):
class Container(containers.DeclarativeContainer):
dependencies_container = providers.DependenciesContainer()
with self.assertRaises(errors.Error) as context:
Container.dependencies_container.dependency()
self.assertEqual(
str(context.exception),
'Dependency "Container.dependencies_container.dependency" is not defined',
)
def test_child_container_parent_name(self):
class ChildContainer(containers.DeclarativeContainer):
dependency = providers.Dependency()
class Container(containers.DeclarativeContainer):
child_container = providers.Container(ChildContainer)
with self.assertRaises(errors.Error) as context:
Container.child_container.dependency()
self.assertEqual(
str(context.exception),
'Dependency "Container.child_container.dependency" is not defined',
)
class DeclarativeContainerWithCustomStringTests(unittest.TestCase):
class CustomString(str):
pass
class CustomClass:
thing = None
class CustomContainer(containers.DeclarativeContainer):
pass
def setUp(self):
self.container = self.CustomContainer
self.provider = providers.Provider()
def test_setattr(self):
setattr(self.container, self.CustomString('test_attr'), self.provider)
self.assertIs(self.container.test_attr, self.provider)
def test_delattr(self):
setattr(self.container, self.CustomString('test_attr'), self.provider)
delattr(self.container, self.CustomString('test_attr'))
with self.assertRaises(AttributeError):
self.container.test_attr
| true | true |
f731bddc26f0944e091eb4201d1fed5699a2d7df | 359 | py | Python | sdk/python/pulumi_azure/notificationhub/__init__.py | kenny-wealth/pulumi-azure | e57e3a81f95bf622e7429c53f0bff93e33372aa1 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure/notificationhub/__init__.py | kenny-wealth/pulumi-azure | e57e3a81f95bf622e7429c53f0bff93e33372aa1 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure/notificationhub/__init__.py | kenny-wealth/pulumi-azure | e57e3a81f95bf622e7429c53f0bff93e33372aa1 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
# Export this package's modules as members:
from .hub import *
from .authorization_rule import *
from .namespace import *
from .get_hub import *
from .get_namespace import *
| 32.636364 | 87 | 0.727019 |
# Export this package's modules as members:
from .hub import *
from .authorization_rule import *
from .namespace import *
from .get_hub import *
from .get_namespace import *
| true | true |
f731be3829420df3473b875de15a7e7c4ab77ba2 | 653 | py | Python | synlib/descriptions/ADDFXL.py | vhnatyk/vlsistuff | 0981097bd19a0c482728dcc5048a3615ac9a9a90 | [
"MIT"
] | 26 | 2018-03-17T18:14:22.000Z | 2022-03-14T07:23:13.000Z | synlib/descriptions/ADDFXL.py | psumesh/vlsistuff | 1fe64b093d0581d99c7d826b74c31b8655fa0b31 | [
"MIT"
] | 1 | 2019-10-16T10:31:11.000Z | 2019-10-17T04:14:53.000Z | synlib/descriptions/ADDFXL.py | psumesh/vlsistuff | 1fe64b093d0581d99c7d826b74c31b8655fa0b31 | [
"MIT"
] | 7 | 2018-07-16T07:51:25.000Z | 2022-02-15T14:22:54.000Z | Desc = cellDescClass("ADDFXL")
Desc.properties["cell_footprint"] = "addf"
Desc.properties["area"] = "69.854400"
Desc.properties["cell_leakage_power"] = "3632.360760"
Desc.pinOrder = ['A', 'B', 'CI', 'CO', 'S']
Desc.add_arc("A","S","combi")
Desc.add_arc("B","S","combi")
Desc.add_arc("CI","S","combi")
Desc.add_arc("A","CO","combi")
Desc.add_arc("B","CO","combi")
Desc.add_arc("CI","CO","combi")
Desc.add_param("area",69.854400);
Desc.add_pin("A","input")
Desc.add_pin("B","input")
Desc.add_pin("CI","input")
Desc.add_pin("S","output")
Desc.add_pin_func("S","unknown")
Desc.add_pin("CO","output")
Desc.add_pin_func("CO","unknown")
CellLib["ADDFXL"]=Desc
| 31.095238 | 53 | 0.666156 | Desc = cellDescClass("ADDFXL")
Desc.properties["cell_footprint"] = "addf"
Desc.properties["area"] = "69.854400"
Desc.properties["cell_leakage_power"] = "3632.360760"
Desc.pinOrder = ['A', 'B', 'CI', 'CO', 'S']
Desc.add_arc("A","S","combi")
Desc.add_arc("B","S","combi")
Desc.add_arc("CI","S","combi")
Desc.add_arc("A","CO","combi")
Desc.add_arc("B","CO","combi")
Desc.add_arc("CI","CO","combi")
Desc.add_param("area",69.854400);
Desc.add_pin("A","input")
Desc.add_pin("B","input")
Desc.add_pin("CI","input")
Desc.add_pin("S","output")
Desc.add_pin_func("S","unknown")
Desc.add_pin("CO","output")
Desc.add_pin_func("CO","unknown")
CellLib["ADDFXL"]=Desc
| true | true |
f731beda637d2a5569a79ee173f7c5968ddc0fe2 | 1,688 | py | Python | base/log.py | testtuantuan/appTest | 2717b30b2cc63080cb0c68d72f4a772daf49e5c3 | [
"BSD-3-Clause"
] | null | null | null | base/log.py | testtuantuan/appTest | 2717b30b2cc63080cb0c68d72f4a772daf49e5c3 | [
"BSD-3-Clause"
] | null | null | null | base/log.py | testtuantuan/appTest | 2717b30b2cc63080cb0c68d72f4a772daf49e5c3 | [
"BSD-3-Clause"
] | null | null | null | # !/uer/bin/env python3
# coding=utf-8
import datetime
import logging
import functools
import os
import traceback
import inspect
if "logs" in os.listdir('../'):
pass
else:
os.mkdir('../logs')
now = datetime.datetime.now().strftime('%Y-%m-%d_%H_%M_%S')
_log_fp = "../logs/" + now + ".log"
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',
datefmt='%a, %d %b %Y %H:%M:%S',
filename=_log_fp,
filemode='w')
_console = logging.StreamHandler()
_console.setLevel(logging.INFO)
formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
_console.setFormatter(formatter)
LOGGER = logging.getLogger('czb test')
LOGGER.addHandler(_console)
def logged(method):
"""创建一个日志装饰器,它会记录所装饰函数的入参和
这是一个很糟糕的代码,需要把logging模块替换为CLog
"""
return_value = None
@functools.wraps(method)
def inner(*args, **kwargs):
start = datetime.datetime.now()
try:
nonlocal return_value
return_value = method(*args, **kwargs)
except Exception:
e = traceback.format_exc()
LOGGER.error('Exception:{}'.format(e))
finally:
pass
end = datetime.datetime.now()
delta = end - start
LOGGER.info('调用 {}函数;\n 传入参数: {}\n 或许还有: {},\n 返回结果: {} ;\n'
.format(inspect.stack()[1][3], str(args), str(kwargs), return_value))
LOGGER.warning('调用 {}函数;\n 时间 {};\n 执行时间 {} ;\n'
.format(inspect.stack()[1][3], start, delta, return_value))
return return_value
return inner
| 28.133333 | 97 | 0.584716 |
import datetime
import logging
import functools
import os
import traceback
import inspect
if "logs" in os.listdir('../'):
pass
else:
os.mkdir('../logs')
now = datetime.datetime.now().strftime('%Y-%m-%d_%H_%M_%S')
_log_fp = "../logs/" + now + ".log"
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',
datefmt='%a, %d %b %Y %H:%M:%S',
filename=_log_fp,
filemode='w')
_console = logging.StreamHandler()
_console.setLevel(logging.INFO)
formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
_console.setFormatter(formatter)
LOGGER = logging.getLogger('czb test')
LOGGER.addHandler(_console)
def logged(method):
return_value = None
@functools.wraps(method)
def inner(*args, **kwargs):
start = datetime.datetime.now()
try:
nonlocal return_value
return_value = method(*args, **kwargs)
except Exception:
e = traceback.format_exc()
LOGGER.error('Exception:{}'.format(e))
finally:
pass
end = datetime.datetime.now()
delta = end - start
LOGGER.info('调用 {}函数;\n 传入参数: {}\n 或许还有: {},\n 返回结果: {} ;\n'
.format(inspect.stack()[1][3], str(args), str(kwargs), return_value))
LOGGER.warning('调用 {}函数;\n 时间 {};\n 执行时间 {} ;\n'
.format(inspect.stack()[1][3], start, delta, return_value))
return return_value
return inner
| true | true |
f731bfab7f7356124c08783189c0a2d6c9d964d1 | 4,425 | py | Python | test/functional/wallet_part_segwit_scripts.py | bleach86/ghost-core | 59824a5e00fbc500eeec28950999a05967bad608 | [
"MIT"
] | null | null | null | test/functional/wallet_part_segwit_scripts.py | bleach86/ghost-core | 59824a5e00fbc500eeec28950999a05967bad608 | [
"MIT"
] | null | null | null | test/functional/wallet_part_segwit_scripts.py | bleach86/ghost-core | 59824a5e00fbc500eeec28950999a05967bad608 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2019 The Particl Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_particl import GhostTestFramework
class SegwitScriptsTest(GhostTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
self.extra_args = [ ['-debug','-noacceptnonstdtxn','-reservebalance=10000000'] for i in range(self.num_nodes)]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def setup_network(self, split=False):
self.add_nodes(self.num_nodes, extra_args=self.extra_args)
self.start_nodes()
self.connect_nodes_bi(0, 1)
self.connect_nodes_bi(0, 2)
self.sync_all()
def run_test(self):
nodes = self.nodes
self.import_genesis_coins_a(nodes[0])
self.import_genesis_coins_b(nodes[1])
nodes[2].extkeyimportmaster(nodes[2].mnemonic('new')['master'])
addr_part_native = nodes[2].getnewaddress('addr_part_native')
nodes[1].sendtoaddress(addr_part_native, 1)
self.log.info('Test Bitcoin native segwit, p2wpkh')
addr_sw_bech32 = nodes[2].getnewaddress('segwit script', False, False, False, 'bech32')
nodes[2].manageaddressbook('newsend', addr_sw_bech32)
nodes[1].sendtoaddress(addr_sw_bech32, 2)
self.log.info('Test Bitcoin embedded segwit')
addr_sw_p2sh = nodes[2].getnewaddress('segwit script', False, False, False, 'p2sh-segwit')
nodes[2].manageaddressbook('newsend', addr_sw_p2sh)
nodes[1].sendtoaddress(addr_sw_p2sh, 3)
ro = nodes[2].getaddressinfo(addr_part_native)
assert(ro['iswitness'] == False)
pk0 = ro['pubkey']
ro = nodes[2].getaddressinfo(addr_sw_bech32)
assert(ro['witness_version'] == 0)
pk1 = ro['pubkey']
ro = nodes[2].getaddressinfo(addr_sw_p2sh)
assert(ro['script'] == 'witness_v0_keyhash')
pk2 = ro['pubkey']
self.log.info('Test P2SH')
ms_standard = nodes[2].addmultisigaddress_part(2, [pk0, pk1])
ms_p2shsegwit = nodes[2].addmultisigaddress_part(2, [pk0, pk2], 'ms_p2shsegwit', False, False, 'p2sh-segwit')
ms_btcnative = nodes[2].addmultisigaddress_part(2, [pk1, pk2], 'ms_btcnative', False, False, 'bech32')
ro = nodes[2].getaddressinfo(ms_standard['address'])
assert(ro['iswitness'] == False)
script = nodes[2].decodescript(ms_standard['redeemScript'])
assert(ms_standard['address'] == script['p2sh'])
script = nodes[2].decodescript(ms_p2shsegwit['redeemScript'])
assert(ms_p2shsegwit['address'] == script['segwit']['p2sh-segwit'])
script = nodes[2].decodescript(ms_btcnative['redeemScript'])
assert(ms_btcnative['address'] in script['segwit']['addresses'])
nodes[1].sendtoaddress(ms_standard['address'], 4)
nodes[1].sendtoaddress(ms_p2shsegwit['address'], 5)
nodes[1].sendtoaddress(ms_btcnative['address'], 6)
self.sync_all()
txns = nodes[2].filtertransactions()
assert(len(txns) == 6)
walletinfo = nodes[2].getwalletinfo()
assert(walletinfo['balance'] == 0.0)
assert(walletinfo['unconfirmed_balance'] == 21.0)
self.stakeBlocks(1)
walletinfo = nodes[2].getwalletinfo()
assert(walletinfo['balance'] == 21.0)
assert(walletinfo['unconfirmed_balance'] == 0.0)
self.log.info('Test p2wpkh changeaddress')
addr_p2wpkh = nodes[1].getnewaddress('p2wpkh change addr', False, False, False, 'bech32')
assert(addr_p2wpkh.startswith('rtpw1'))
rv = nodes[1].walletsettings('changeaddress', {'address_standard': addr_p2wpkh})
assert(rv['changeaddress']['address_standard'] == addr_p2wpkh)
txid = nodes[1].sendtoaddress(ms_standard['address'], 7)
wtx = nodes[1].gettransaction(txid)
# addr_p2wpkh was derived from the external chain and won't be seen as change.
assert(len(wtx['details']) == 3)
addrs = set()
for i in range(3):
addrs.add(wtx['details'][i]['address'])
assert(len(addrs) == 2)
assert(ms_standard['address'] in addrs)
assert(addr_p2wpkh in addrs)
if __name__ == '__main__':
SegwitScriptsTest().main()
| 40.59633 | 118 | 0.654011 |
from test_framework.test_particl import GhostTestFramework
class SegwitScriptsTest(GhostTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
self.extra_args = [ ['-debug','-noacceptnonstdtxn','-reservebalance=10000000'] for i in range(self.num_nodes)]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def setup_network(self, split=False):
self.add_nodes(self.num_nodes, extra_args=self.extra_args)
self.start_nodes()
self.connect_nodes_bi(0, 1)
self.connect_nodes_bi(0, 2)
self.sync_all()
def run_test(self):
nodes = self.nodes
self.import_genesis_coins_a(nodes[0])
self.import_genesis_coins_b(nodes[1])
nodes[2].extkeyimportmaster(nodes[2].mnemonic('new')['master'])
addr_part_native = nodes[2].getnewaddress('addr_part_native')
nodes[1].sendtoaddress(addr_part_native, 1)
self.log.info('Test Bitcoin native segwit, p2wpkh')
addr_sw_bech32 = nodes[2].getnewaddress('segwit script', False, False, False, 'bech32')
nodes[2].manageaddressbook('newsend', addr_sw_bech32)
nodes[1].sendtoaddress(addr_sw_bech32, 2)
self.log.info('Test Bitcoin embedded segwit')
addr_sw_p2sh = nodes[2].getnewaddress('segwit script', False, False, False, 'p2sh-segwit')
nodes[2].manageaddressbook('newsend', addr_sw_p2sh)
nodes[1].sendtoaddress(addr_sw_p2sh, 3)
ro = nodes[2].getaddressinfo(addr_part_native)
assert(ro['iswitness'] == False)
pk0 = ro['pubkey']
ro = nodes[2].getaddressinfo(addr_sw_bech32)
assert(ro['witness_version'] == 0)
pk1 = ro['pubkey']
ro = nodes[2].getaddressinfo(addr_sw_p2sh)
assert(ro['script'] == 'witness_v0_keyhash')
pk2 = ro['pubkey']
self.log.info('Test P2SH')
ms_standard = nodes[2].addmultisigaddress_part(2, [pk0, pk1])
ms_p2shsegwit = nodes[2].addmultisigaddress_part(2, [pk0, pk2], 'ms_p2shsegwit', False, False, 'p2sh-segwit')
ms_btcnative = nodes[2].addmultisigaddress_part(2, [pk1, pk2], 'ms_btcnative', False, False, 'bech32')
ro = nodes[2].getaddressinfo(ms_standard['address'])
assert(ro['iswitness'] == False)
script = nodes[2].decodescript(ms_standard['redeemScript'])
assert(ms_standard['address'] == script['p2sh'])
script = nodes[2].decodescript(ms_p2shsegwit['redeemScript'])
assert(ms_p2shsegwit['address'] == script['segwit']['p2sh-segwit'])
script = nodes[2].decodescript(ms_btcnative['redeemScript'])
assert(ms_btcnative['address'] in script['segwit']['addresses'])
nodes[1].sendtoaddress(ms_standard['address'], 4)
nodes[1].sendtoaddress(ms_p2shsegwit['address'], 5)
nodes[1].sendtoaddress(ms_btcnative['address'], 6)
self.sync_all()
txns = nodes[2].filtertransactions()
assert(len(txns) == 6)
walletinfo = nodes[2].getwalletinfo()
assert(walletinfo['balance'] == 0.0)
assert(walletinfo['unconfirmed_balance'] == 21.0)
self.stakeBlocks(1)
walletinfo = nodes[2].getwalletinfo()
assert(walletinfo['balance'] == 21.0)
assert(walletinfo['unconfirmed_balance'] == 0.0)
self.log.info('Test p2wpkh changeaddress')
addr_p2wpkh = nodes[1].getnewaddress('p2wpkh change addr', False, False, False, 'bech32')
assert(addr_p2wpkh.startswith('rtpw1'))
rv = nodes[1].walletsettings('changeaddress', {'address_standard': addr_p2wpkh})
assert(rv['changeaddress']['address_standard'] == addr_p2wpkh)
txid = nodes[1].sendtoaddress(ms_standard['address'], 7)
wtx = nodes[1].gettransaction(txid)
assert(len(wtx['details']) == 3)
addrs = set()
for i in range(3):
addrs.add(wtx['details'][i]['address'])
assert(len(addrs) == 2)
assert(ms_standard['address'] in addrs)
assert(addr_p2wpkh in addrs)
if __name__ == '__main__':
SegwitScriptsTest().main()
| true | true |
f731c0b87e19e1774f95606b9d89e1dd2dc40b0a | 15,296 | py | Python | venv/lib/python3.6/site-packages/feedgen/ext/dc.py | jannahuang/blog | e1d8cfa9d79ac06097a0e55531bba9421fcbf283 | [
"MIT"
] | null | null | null | venv/lib/python3.6/site-packages/feedgen/ext/dc.py | jannahuang/blog | e1d8cfa9d79ac06097a0e55531bba9421fcbf283 | [
"MIT"
] | null | null | null | venv/lib/python3.6/site-packages/feedgen/ext/dc.py | jannahuang/blog | e1d8cfa9d79ac06097a0e55531bba9421fcbf283 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
'''
feedgen.ext.dc
~~~~~~~~~~~~~~~~~~~
Extends the FeedGenerator to add Dubline Core Elements to the feeds.
Descriptions partly taken from
http://dublincore.org/documents/dcmi-terms/#elements-coverage
:copyright: 2013-2017, Lars Kiesow <lkiesow@uos.de>
:license: FreeBSD and LGPL, see license.* for more details.
'''
from feedgen.ext.base import BaseExtension
from feedgen.util import xml_elem
class DcBaseExtension(BaseExtension):
'''Dublin Core Elements extension for podcasts.
'''
def __init__(self):
# http://dublincore.org/documents/usageguide/elements.shtml
# http://dublincore.org/documents/dces/
# http://dublincore.org/documents/dcmi-terms/
self._dcelem_contributor = None
self._dcelem_coverage = None
self._dcelem_creator = None
self._dcelem_date = None
self._dcelem_description = None
self._dcelem_format = None
self._dcelem_identifier = None
self._dcelem_language = None
self._dcelem_publisher = None
self._dcelem_relation = None
self._dcelem_rights = None
self._dcelem_source = None
self._dcelem_subject = None
self._dcelem_title = None
self._dcelem_type = None
def extend_ns(self):
return {'dc': 'http://purl.org/dc/elements/1.1/'}
def _extend_xml(self, xml_element):
'''Extend xml_element with set DC fields.
:param xml_element: etree element
'''
DCELEMENTS_NS = 'http://purl.org/dc/elements/1.1/'
for elem in ['contributor', 'coverage', 'creator', 'date',
'description', 'language', 'publisher', 'relation',
'rights', 'source', 'subject', 'title', 'type', 'format',
'identifier']:
if hasattr(self, '_dcelem_%s' % elem):
for val in getattr(self, '_dcelem_%s' % elem) or []:
node = xml_elem('{%s}%s' % (DCELEMENTS_NS, elem),
xml_element)
node.text = val
def extend_atom(self, atom_feed):
'''Extend an Atom feed with the set DC fields.
:param atom_feed: The feed root element
:returns: The feed root element
'''
self._extend_xml(atom_feed)
return atom_feed
def extend_rss(self, rss_feed):
'''Extend a RSS feed with the set DC fields.
:param rss_feed: The feed root element
:returns: The feed root element.
'''
channel = rss_feed[0]
self._extend_xml(channel)
return rss_feed
def dc_contributor(self, contributor=None, replace=False):
'''Get or set the dc:contributor which is an entity responsible for
making contributions to the resource.
For more information see:
http://dublincore.org/documents/dcmi-terms/#elements-contributor
:param contributor: Contributor or list of contributors.
:param replace: Replace alredy set contributors (deault: False).
:returns: List of contributors.
'''
if contributor is not None:
if not isinstance(contributor, list):
contributor = [contributor]
if replace or not self._dcelem_contributor:
self._dcelem_contributor = []
self._dcelem_contributor += contributor
return self._dcelem_contributor
def dc_coverage(self, coverage=None, replace=True):
'''Get or set the dc:coverage which indicated the spatial or temporal
topic of the resource, the spatial applicability of the resource, or
the jurisdiction under which the resource is relevant.
Spatial topic and spatial applicability may be a named place or a
location specified by its geographic coordinates. Temporal topic may be
a named period, date, or date range. A jurisdiction may be a named
administrative entity or a geographic place to which the resource
applies. Recommended best practice is to use a controlled vocabulary
such as the Thesaurus of Geographic Names [TGN]. Where appropriate,
named places or time periods can be used in preference to numeric
identifiers such as sets of coordinates or date ranges.
References:
[TGN] http://www.getty.edu/research/tools/vocabulary/tgn/index.html
:param coverage: Coverage of the feed.
:param replace: Replace already set coverage (default: True).
:returns: Coverage of the feed.
'''
if coverage is not None:
if not isinstance(coverage, list):
coverage = [coverage]
if replace or not self._dcelem_coverage:
self._dcelem_coverage = []
self._dcelem_coverage = coverage
return self._dcelem_coverage
def dc_creator(self, creator=None, replace=False):
'''Get or set the dc:creator which is an entity primarily responsible
for making the resource.
For more information see:
http://dublincore.org/documents/dcmi-terms/#elements-creator
:param creator: Creator or list of creators.
:param replace: Replace alredy set creators (deault: False).
:returns: List of creators.
'''
if creator is not None:
if not isinstance(creator, list):
creator = [creator]
if replace or not self._dcelem_creator:
self._dcelem_creator = []
self._dcelem_creator += creator
return self._dcelem_creator
def dc_date(self, date=None, replace=True):
'''Get or set the dc:date which describes a point or period of time
associated with an event in the lifecycle of the resource.
For more information see:
http://dublincore.org/documents/dcmi-terms/#elements-date
:param date: Date or list of dates.
:param replace: Replace alredy set dates (deault: True).
:returns: List of dates.
'''
if date is not None:
if not isinstance(date, list):
date = [date]
if replace or not self._dcelem_date:
self._dcelem_date = []
self._dcelem_date += date
return self._dcelem_date
def dc_description(self, description=None, replace=True):
'''Get or set the dc:description which is an account of the resource.
For more information see:
http://dublincore.org/documents/dcmi-terms/#elements-description
:param description: Description or list of descriptions.
:param replace: Replace alredy set descriptions (deault: True).
:returns: List of descriptions.
'''
if description is not None:
if not isinstance(description, list):
description = [description]
if replace or not self._dcelem_description:
self._dcelem_description = []
self._dcelem_description += description
return self._dcelem_description
def dc_format(self, format=None, replace=True):
'''Get or set the dc:format which describes the file format, physical
medium, or dimensions of the resource.
For more information see:
http://dublincore.org/documents/dcmi-terms/#elements-format
:param format: Format of the resource or list of formats.
:param replace: Replace alredy set format (deault: True).
:returns: Format of the resource.
'''
if format is not None:
if not isinstance(format, list):
format = [format]
if replace or not self._dcelem_format:
self._dcelem_format = []
self._dcelem_format += format
return self._dcelem_format
def dc_identifier(self, identifier=None, replace=True):
'''Get or set the dc:identifier which should be an unambiguous
reference to the resource within a given context.
For more inidentifierion see:
http://dublincore.org/documents/dcmi-terms/#elements-identifier
:param identifier: Identifier of the resource or list of identifiers.
:param replace: Replace alredy set identifier (deault: True).
:returns: Identifiers of the resource.
'''
if identifier is not None:
if not isinstance(identifier, list):
identifier = [identifier]
if replace or not self._dcelem_identifier:
self._dcelem_identifier = []
self._dcelem_identifier += identifier
return self._dcelem_identifier
def dc_language(self, language=None, replace=True):
'''Get or set the dc:language which describes a language of the
resource.
For more information see:
http://dublincore.org/documents/dcmi-terms/#elements-language
:param language: Language or list of languages.
:param replace: Replace alredy set languages (deault: True).
:returns: List of languages.
'''
if language is not None:
if not isinstance(language, list):
language = [language]
if replace or not self._dcelem_language:
self._dcelem_language = []
self._dcelem_language += language
return self._dcelem_language
def dc_publisher(self, publisher=None, replace=False):
'''Get or set the dc:publisher which is an entity responsible for
making the resource available.
For more information see:
http://dublincore.org/documents/dcmi-terms/#elements-publisher
:param publisher: Publisher or list of publishers.
:param replace: Replace alredy set publishers (deault: False).
:returns: List of publishers.
'''
if publisher is not None:
if not isinstance(publisher, list):
publisher = [publisher]
if replace or not self._dcelem_publisher:
self._dcelem_publisher = []
self._dcelem_publisher += publisher
return self._dcelem_publisher
def dc_relation(self, relation=None, replace=False):
'''Get or set the dc:relation which describes a related resource.
For more information see:
http://dublincore.org/documents/dcmi-terms/#elements-relation
:param relation: Relation or list of relations.
:param replace: Replace alredy set relations (deault: False).
:returns: List of relations.
'''
if relation is not None:
if not isinstance(relation, list):
relation = [relation]
if replace or not self._dcelem_relation:
self._dcelem_relation = []
self._dcelem_relation += relation
return self._dcelem_relation
def dc_rights(self, rights=None, replace=False):
'''Get or set the dc:rights which may contain information about rights
held in and over the resource.
For more information see:
http://dublincore.org/documents/dcmi-terms/#elements-rights
:param rights: Rights information or list of rights information.
:param replace: Replace alredy set rightss (deault: False).
:returns: List of rights information.
'''
if rights is not None:
if not isinstance(rights, list):
rights = [rights]
if replace or not self._dcelem_rights:
self._dcelem_rights = []
self._dcelem_rights += rights
return self._dcelem_rights
def dc_source(self, source=None, replace=False):
'''Get or set the dc:source which is a related resource from which the
described resource is derived.
The described resource may be derived from the related resource in
whole or in part. Recommended best practice is to identify the related
resource by means of a string conforming to a formal identification
system.
For more information see:
http://dublincore.org/documents/dcmi-terms/#elements-source
:param source: Source or list of sources.
:param replace: Replace alredy set sources (deault: False).
:returns: List of sources.
'''
if source is not None:
if not isinstance(source, list):
source = [source]
if replace or not self._dcelem_source:
self._dcelem_source = []
self._dcelem_source += source
return self._dcelem_source
def dc_subject(self, subject=None, replace=False):
'''Get or set the dc:subject which describes the topic of the resource.
For more information see:
http://dublincore.org/documents/dcmi-terms/#elements-subject
:param subject: Subject or list of subjects.
:param replace: Replace alredy set subjects (deault: False).
:returns: List of subjects.
'''
if subject is not None:
if not isinstance(subject, list):
subject = [subject]
if replace or not self._dcelem_subject:
self._dcelem_subject = []
self._dcelem_subject += subject
return self._dcelem_subject
def dc_title(self, title=None, replace=True):
'''Get or set the dc:title which is a name given to the resource.
For more information see:
http://dublincore.org/documents/dcmi-terms/#elements-title
:param title: Title or list of titles.
:param replace: Replace alredy set titles (deault: False).
:returns: List of titles.
'''
if title is not None:
if not isinstance(title, list):
title = [title]
if replace or not self._dcelem_title:
self._dcelem_title = []
self._dcelem_title += title
return self._dcelem_title
def dc_type(self, type=None, replace=False):
'''Get or set the dc:type which describes the nature or genre of the
resource.
For more information see:
http://dublincore.org/documents/dcmi-terms/#elements-type
:param type: Type or list of types.
:param replace: Replace alredy set types (deault: False).
:returns: List of types.
'''
if type is not None:
if not isinstance(type, list):
type = [type]
if replace or not self._dcelem_type:
self._dcelem_type = []
self._dcelem_type += type
return self._dcelem_type
class DcExtension(DcBaseExtension):
'''Dublin Core Elements extension for podcasts.
'''
class DcEntryExtension(DcBaseExtension):
'''Dublin Core Elements extension for podcasts.
'''
def extend_atom(self, entry):
'''Add dc elements to an atom item. Alters the item itself.
:param entry: An atom entry element.
:returns: The entry element.
'''
self._extend_xml(entry)
return entry
def extend_rss(self, item):
'''Add dc elements to a RSS item. Alters the item itself.
:param item: A RSS item element.
:returns: The item element.
'''
self._extend_xml(item)
return item
| 37.490196 | 79 | 0.622973 |
from feedgen.ext.base import BaseExtension
from feedgen.util import xml_elem
class DcBaseExtension(BaseExtension):
def __init__(self):
self._dcelem_contributor = None
self._dcelem_coverage = None
self._dcelem_creator = None
self._dcelem_date = None
self._dcelem_description = None
self._dcelem_format = None
self._dcelem_identifier = None
self._dcelem_language = None
self._dcelem_publisher = None
self._dcelem_relation = None
self._dcelem_rights = None
self._dcelem_source = None
self._dcelem_subject = None
self._dcelem_title = None
self._dcelem_type = None
def extend_ns(self):
return {'dc': 'http://purl.org/dc/elements/1.1/'}
def _extend_xml(self, xml_element):
DCELEMENTS_NS = 'http://purl.org/dc/elements/1.1/'
for elem in ['contributor', 'coverage', 'creator', 'date',
'description', 'language', 'publisher', 'relation',
'rights', 'source', 'subject', 'title', 'type', 'format',
'identifier']:
if hasattr(self, '_dcelem_%s' % elem):
for val in getattr(self, '_dcelem_%s' % elem) or []:
node = xml_elem('{%s}%s' % (DCELEMENTS_NS, elem),
xml_element)
node.text = val
def extend_atom(self, atom_feed):
self._extend_xml(atom_feed)
return atom_feed
def extend_rss(self, rss_feed):
channel = rss_feed[0]
self._extend_xml(channel)
return rss_feed
def dc_contributor(self, contributor=None, replace=False):
if contributor is not None:
if not isinstance(contributor, list):
contributor = [contributor]
if replace or not self._dcelem_contributor:
self._dcelem_contributor = []
self._dcelem_contributor += contributor
return self._dcelem_contributor
def dc_coverage(self, coverage=None, replace=True):
if coverage is not None:
if not isinstance(coverage, list):
coverage = [coverage]
if replace or not self._dcelem_coverage:
self._dcelem_coverage = []
self._dcelem_coverage = coverage
return self._dcelem_coverage
def dc_creator(self, creator=None, replace=False):
if creator is not None:
if not isinstance(creator, list):
creator = [creator]
if replace or not self._dcelem_creator:
self._dcelem_creator = []
self._dcelem_creator += creator
return self._dcelem_creator
def dc_date(self, date=None, replace=True):
if date is not None:
if not isinstance(date, list):
date = [date]
if replace or not self._dcelem_date:
self._dcelem_date = []
self._dcelem_date += date
return self._dcelem_date
def dc_description(self, description=None, replace=True):
if description is not None:
if not isinstance(description, list):
description = [description]
if replace or not self._dcelem_description:
self._dcelem_description = []
self._dcelem_description += description
return self._dcelem_description
def dc_format(self, format=None, replace=True):
if format is not None:
if not isinstance(format, list):
format = [format]
if replace or not self._dcelem_format:
self._dcelem_format = []
self._dcelem_format += format
return self._dcelem_format
def dc_identifier(self, identifier=None, replace=True):
if identifier is not None:
if not isinstance(identifier, list):
identifier = [identifier]
if replace or not self._dcelem_identifier:
self._dcelem_identifier = []
self._dcelem_identifier += identifier
return self._dcelem_identifier
def dc_language(self, language=None, replace=True):
if language is not None:
if not isinstance(language, list):
language = [language]
if replace or not self._dcelem_language:
self._dcelem_language = []
self._dcelem_language += language
return self._dcelem_language
def dc_publisher(self, publisher=None, replace=False):
if publisher is not None:
if not isinstance(publisher, list):
publisher = [publisher]
if replace or not self._dcelem_publisher:
self._dcelem_publisher = []
self._dcelem_publisher += publisher
return self._dcelem_publisher
def dc_relation(self, relation=None, replace=False):
if relation is not None:
if not isinstance(relation, list):
relation = [relation]
if replace or not self._dcelem_relation:
self._dcelem_relation = []
self._dcelem_relation += relation
return self._dcelem_relation
def dc_rights(self, rights=None, replace=False):
if rights is not None:
if not isinstance(rights, list):
rights = [rights]
if replace or not self._dcelem_rights:
self._dcelem_rights = []
self._dcelem_rights += rights
return self._dcelem_rights
def dc_source(self, source=None, replace=False):
if source is not None:
if not isinstance(source, list):
source = [source]
if replace or not self._dcelem_source:
self._dcelem_source = []
self._dcelem_source += source
return self._dcelem_source
def dc_subject(self, subject=None, replace=False):
if subject is not None:
if not isinstance(subject, list):
subject = [subject]
if replace or not self._dcelem_subject:
self._dcelem_subject = []
self._dcelem_subject += subject
return self._dcelem_subject
def dc_title(self, title=None, replace=True):
if title is not None:
if not isinstance(title, list):
title = [title]
if replace or not self._dcelem_title:
self._dcelem_title = []
self._dcelem_title += title
return self._dcelem_title
def dc_type(self, type=None, replace=False):
if type is not None:
if not isinstance(type, list):
type = [type]
if replace or not self._dcelem_type:
self._dcelem_type = []
self._dcelem_type += type
return self._dcelem_type
class DcExtension(DcBaseExtension):
class DcEntryExtension(DcBaseExtension):
def extend_atom(self, entry):
self._extend_xml(entry)
return entry
def extend_rss(self, item):
self._extend_xml(item)
return item
| true | true |
f731c242461116c49eda6d5115c4af06fc3b920c | 602 | py | Python | var/spack/repos/builtin/packages/opendx/package.py | kkauder/spack | 6ae8d5c380c1f42094b05d38be26b03650aafb39 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 2,360 | 2017-11-06T08:47:01.000Z | 2022-03-31T14:45:33.000Z | var/spack/repos/builtin/packages/opendx/package.py | kkauder/spack | 6ae8d5c380c1f42094b05d38be26b03650aafb39 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 13,838 | 2017-11-04T07:49:45.000Z | 2022-03-31T23:38:39.000Z | var/spack/repos/builtin/packages/opendx/package.py | kkauder/spack | 6ae8d5c380c1f42094b05d38be26b03650aafb39 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 1,793 | 2017-11-04T07:45:50.000Z | 2022-03-30T14:31:53.000Z | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
class Opendx(AutotoolsPackage):
"""Open Visualization Data Explorer."""
homepage = "https://github.com/Mwoolsey/OpenDX"
git = "https://github.com/Mwoolsey/OpenDX.git"
version('master', branch='master')
depends_on('motif') # lesstif also works, but exhibits odd behaviors
depends_on('gl')
@run_before('autoreconf')
def distclean(self):
make('distclean')
| 28.666667 | 73 | 0.697674 |
class Opendx(AutotoolsPackage):
homepage = "https://github.com/Mwoolsey/OpenDX"
git = "https://github.com/Mwoolsey/OpenDX.git"
version('master', branch='master')
depends_on('motif')
depends_on('gl')
@run_before('autoreconf')
def distclean(self):
make('distclean')
| true | true |
f731c2a3b3050eec2782a2b33492c47117028a37 | 2,974 | py | Python | hatanaka/test/test_general_compression.py | valgur/hatanaka | 2f2f413050a922a507841546ba9ac5610a0dd882 | [
"MIT"
] | 5 | 2021-04-14T15:37:48.000Z | 2022-02-11T08:59:34.000Z | hatanaka/test/test_general_compression.py | valgur/hatanaka | 2f2f413050a922a507841546ba9ac5610a0dd882 | [
"MIT"
] | null | null | null | hatanaka/test/test_general_compression.py | valgur/hatanaka | 2f2f413050a922a507841546ba9ac5610a0dd882 | [
"MIT"
] | 1 | 2021-11-10T22:28:50.000Z | 2021-11-10T22:28:50.000Z | import gzip
import io
import shutil
import pytest
from hatanaka import compress, compress_on_disk, decompress, decompress_on_disk
from .conftest import clean, compress_pairs, decompress_pairs, get_data_path
@pytest.mark.parametrize(
'input_suffix, expected_suffix',
decompress_pairs
)
def test_decompress(tmp_path, crx_sample, rnx_bytes, input_suffix, expected_suffix):
# prepare
sample_path = tmp_path / ('sample' + input_suffix)
in_file = 'sample' + input_suffix
shutil.copy(get_data_path(in_file), sample_path)
# decompress
converted = decompress(sample_path)
# check
assert clean(converted) == clean(rnx_bytes)
converted = decompress(sample_path.read_bytes())
assert clean(converted) == clean(rnx_bytes)
def make_nav(txt):
return txt.replace(b'OBSERVATION', b'NAVIGATION ')
@pytest.mark.parametrize(
'input_suffix',
['.rnx', '.RNX', '.21n']
)
def test_decompress_non_obs(tmp_path, rnx_bytes, input_suffix):
# prepare
txt = make_nav(rnx_bytes)
sample_path = tmp_path / ('sample' + input_suffix + '.gz')
sample_path.write_bytes(gzip.compress(txt))
# decompress
out_path = decompress_on_disk(sample_path)
# check
assert out_path.exists()
assert out_path == tmp_path / ('sample' + input_suffix)
assert clean(out_path.read_bytes()) == clean(txt)
@pytest.mark.parametrize(
'input_suffix, compression, expected_suffix',
compress_pairs
)
def test_compress(tmp_path, crx_sample, rnx_bytes, input_suffix, compression, expected_suffix):
# prepare
in_file = 'sample' + input_suffix
sample_path = tmp_path / in_file
shutil.copy(get_data_path(in_file), sample_path)
# compress
converted = compress(sample_path, compression=compression)
# check
assert clean(decompress(converted)) == clean(rnx_bytes)
converted = compress(sample_path.read_bytes(), compression=compression)
assert clean(decompress(converted)) == clean(rnx_bytes)
@pytest.mark.parametrize(
'input_suffix',
['.rnx', '.RNX', '.21n']
)
def test_compress_non_obs(tmp_path, rnx_bytes, input_suffix):
# prepare
txt = make_nav(rnx_bytes)
sample_path = tmp_path / ('sample' + input_suffix)
sample_path.write_bytes(txt)
# compress
out_path = compress_on_disk(sample_path)
# check
assert out_path.exists()
assert out_path == tmp_path / ('sample' + input_suffix + '.gz')
assert clean(decompress(out_path)) == clean(txt)
def test_invalid_input(crx_str, rnx_bytes):
with pytest.raises(ValueError):
decompress(io.BytesIO(rnx_bytes))
with pytest.raises(ValueError):
compress(io.BytesIO(rnx_bytes))
def test_invalid_name(tmp_path, rnx_sample):
sample_path = tmp_path / 'sample'
shutil.copy(rnx_sample, sample_path)
with pytest.raises(ValueError) as excinfo:
decompress_on_disk(sample_path)
msg = excinfo.value.args[0]
assert msg.endswith('is not a valid RINEX file name')
| 30.659794 | 95 | 0.718225 | import gzip
import io
import shutil
import pytest
from hatanaka import compress, compress_on_disk, decompress, decompress_on_disk
from .conftest import clean, compress_pairs, decompress_pairs, get_data_path
@pytest.mark.parametrize(
'input_suffix, expected_suffix',
decompress_pairs
)
def test_decompress(tmp_path, crx_sample, rnx_bytes, input_suffix, expected_suffix):
sample_path = tmp_path / ('sample' + input_suffix)
in_file = 'sample' + input_suffix
shutil.copy(get_data_path(in_file), sample_path)
converted = decompress(sample_path)
assert clean(converted) == clean(rnx_bytes)
converted = decompress(sample_path.read_bytes())
assert clean(converted) == clean(rnx_bytes)
def make_nav(txt):
return txt.replace(b'OBSERVATION', b'NAVIGATION ')
@pytest.mark.parametrize(
'input_suffix',
['.rnx', '.RNX', '.21n']
)
def test_decompress_non_obs(tmp_path, rnx_bytes, input_suffix):
txt = make_nav(rnx_bytes)
sample_path = tmp_path / ('sample' + input_suffix + '.gz')
sample_path.write_bytes(gzip.compress(txt))
out_path = decompress_on_disk(sample_path)
assert out_path.exists()
assert out_path == tmp_path / ('sample' + input_suffix)
assert clean(out_path.read_bytes()) == clean(txt)
@pytest.mark.parametrize(
'input_suffix, compression, expected_suffix',
compress_pairs
)
def test_compress(tmp_path, crx_sample, rnx_bytes, input_suffix, compression, expected_suffix):
in_file = 'sample' + input_suffix
sample_path = tmp_path / in_file
shutil.copy(get_data_path(in_file), sample_path)
converted = compress(sample_path, compression=compression)
assert clean(decompress(converted)) == clean(rnx_bytes)
converted = compress(sample_path.read_bytes(), compression=compression)
assert clean(decompress(converted)) == clean(rnx_bytes)
@pytest.mark.parametrize(
'input_suffix',
['.rnx', '.RNX', '.21n']
)
def test_compress_non_obs(tmp_path, rnx_bytes, input_suffix):
txt = make_nav(rnx_bytes)
sample_path = tmp_path / ('sample' + input_suffix)
sample_path.write_bytes(txt)
out_path = compress_on_disk(sample_path)
assert out_path.exists()
assert out_path == tmp_path / ('sample' + input_suffix + '.gz')
assert clean(decompress(out_path)) == clean(txt)
def test_invalid_input(crx_str, rnx_bytes):
with pytest.raises(ValueError):
decompress(io.BytesIO(rnx_bytes))
with pytest.raises(ValueError):
compress(io.BytesIO(rnx_bytes))
def test_invalid_name(tmp_path, rnx_sample):
sample_path = tmp_path / 'sample'
shutil.copy(rnx_sample, sample_path)
with pytest.raises(ValueError) as excinfo:
decompress_on_disk(sample_path)
msg = excinfo.value.args[0]
assert msg.endswith('is not a valid RINEX file name')
| true | true |
f731c31f75ea7cd476db15705457c015ff3032c0 | 17,905 | py | Python | python/apogee/aspcap/teff.py | sdss/apogee | e134409dc14b20f69e68a0d4d34b2c1b5056a901 | [
"BSD-3-Clause"
] | 5 | 2019-04-11T13:35:24.000Z | 2019-11-14T06:12:51.000Z | python/apogee/aspcap/teff.py | sdss/apogee | e134409dc14b20f69e68a0d4d34b2c1b5056a901 | [
"BSD-3-Clause"
] | null | null | null | python/apogee/aspcap/teff.py | sdss/apogee | e134409dc14b20f69e68a0d4d34b2c1b5056a901 | [
"BSD-3-Clause"
] | 5 | 2018-09-20T22:07:43.000Z | 2021-01-15T07:13:38.000Z | # routines for calibrating/comparing effective temperatures with photometric sample
from apogee.utils import apload
from apogee.utils import apselect
from astropy.io import fits, ascii
from tools import match
from tools import plots
from tools import fit
from apogee.utils import bitmask
from apogee.aspcap import err
import pdb
import matplotlib.pyplot as plt
import numpy as np
import os
import matplotlib
def bindata(xdata,ydata,bins,median=True) :
"""
Given xdata, ydata, and bins in x, returns mean of ydata in each of the bins
"""
mean=bins*0.
for i in range(len(bins)-1) :
j=np.where((xdata>bins[i]) & (xdata<bins[i+1]))[0]
if median :
mean[i]=np.median(ydata[j])
else :
mean[i]=ydata[j].mean()
return mean
def ghb(allstar,glatmin=30.,ebvmax=0.03,trange=[3750,5500],loggrange=[-1,6],mhrange=[-2.5,0.75],alpha=False,out='teffcomp',yr=[-500,500],
calib=False,dr13=False,grid=None,cmap='rainbow',doerr=True) :
"""
Compares allstar ASPCPAP Teff with photometric Teff from GHB for sample of stars with GLAT>glatmin and SFD_EBV<ebvmax,
does fits
Args:
allstar : allStar structure
Keyword args:
glatmin (float) : minimum GLAT for sample (default=30)
ebvmax (float) : maximum SFD_EBV for sample (default=0.03)
dwarf (bool) : use dwarfs and dwarf GHB (default = False)
"""
# select data to use
badtarg=['YOUNG','EMBEDDED','EXTENDED','M31','M33','EMISSION','RRLYR','DSPH','MAGCLOUD']
# plots using Berger isochrone Teff for infomational purposes
if calib : param='PARAM'
else : param = 'FPARAM'
berger=fits.open(os.environ['APOGEE_DIR']+'/data/calib/teff_berger.fits')[1].data
gd=apselect.select(allstar,badval=['STAR_BAD'],badstar=['MULTIPLE_SUSPECT'],badtarg=badtarg,raw=True)
i1,i2=match.match(allstar['APOGEE_ID'][gd],berger['APOGEE_ID'])
fig,ax=plots.multi(1,1,figsize=(12,6))
plots.plotc(ax,allstar[param][gd[i1],3],allstar[param][gd[i1],0]-berger['TEFF'][i2],allstar[param][gd[i1],0],
xt='[M/H]',yt='ASPCAP-Berger',zt='Teff',xr=[-3,1],yr=[-500,500],zr=[4500,7000],colorbar=True)
ax.grid()
fig.savefig(out+'_berger_mh.png')
plt.close()
fig,ax=plots.multi(1,1,figsize=(12,6))
plots.plotc(ax,allstar[param][gd[i1],0],allstar[param][gd[i1],1],allstar[param][gd[i1],0]-berger['TEFF'][i2],
xt='Teff',yt='log ',zt='ASPCAP-Berger',xr=[8000,3000],yr=[6,-1],zr=[-250,250],colorbar=True)
ax.grid()
fig.savefig(out+'_berger_hr.png')
plt.close()
gd=apselect.select(allstar,badval=['STAR_BAD'],badstar=['MULTIPLE_SUSPECT'],badtarg=badtarg,teff=trange,mh=mhrange,logg=loggrange,raw=True)
allstar=allstar[gd]
#if dr13 :
# j=np.where((abs(allstar['GLAT'])>glatmin)&(allstar['SFD_EBV']<ebvmax))[0]
#else :
j=np.where((abs(allstar['GLAT'])>glatmin)&(allstar['SFD_EBV']>-0.01)&(allstar['SFD_EBV']<ebvmax)&(abs(allstar['J'])<90)&(abs(allstar['K'])<90))[0]
# remove second gen GC stars
#if not dr13 :
gcstars = ascii.read(os.environ['APOGEE_DIR']+'/data/calib/gc_szabolcs.dat')
bd=np.where(gcstars['pop'] != 1)[0]
j = [x for x in j if allstar[x]['APOGEE_ID'] not in gcstars['id'][bd]]
allstar=allstar[j]
ghb,dtdjk=cte_ghb(allstar['J']-allstar['K'],allstar['FPARAM'][:,3],dwarf=False)
ghb_dwarf,dtdjk_dwarf=cte_ghb(allstar['J']-allstar['K'],allstar['FPARAM'][:,3],dwarf=True)
# use dwarf relation for dwarfs
dw=np.where(allstar['FPARAM'][:,1] > 3.8)[0]
ghb[dw]=ghb_dwarf[dw]
dtdjk[dw]=dtdjk_dwarf[dw]
gd=np.where(abs(allstar['FPARAM'][:,0]-ghb) < 500)[0]
ghb=ghb[gd]
dtdjk=dtdjk[gd]
allstar=allstar[gd]
print('Teff calibration, number of stars: ', len(allstar))
if calib :
param='PARAM'
teff=allstar[param][:,0]
logg=allstar[param][:,1]
mh=allstar[param][:,3]
am=allstar[param][:,6]
elif grid is None :
param='FPARAM'
teff=allstar[param][:,0]
logg=allstar[param][:,1]
mh=allstar[param][:,3]
am=allstar[param][:,6]
else :
param='FPARAM_CLASS'
teff=allstar[param][:,grid,0]
logg=allstar[param][:,grid,1]
mh=allstar[param][:,grid,3]
am=allstar[param][:,grid,6]
out=out+'_grid{:1d}'.format(grid)
# HR digram plot of differences
fig,ax=plots.multi(1,1,figsize=(12,6))
plots.plotc(ax,teff,logg,teff-ghb, xt='Teff',yt='log ',zt='ASPCAP-GHB',xr=[8000,3000],yr=[6,-1],zr=[-250,250],colorbar=True)
ax.grid()
fig.savefig(out+'_ghb_hr.png')
plt.close()
# plot Teff difference against metallicity, color-code by temperature
fig,ax=plots.multi(1,1,hspace=0.001,wspace=0.001,figsize=(12,6))
xr=[-3.0,1.0]
zr=trange
if dr13: zr=[3500,5500]
binsize=0.25
bins=np.arange(-2.5,0.75,binsize)
# diff color-coded by gravity as f([M/H])
if alpha :
plots.plotc(ax,mh,teff-ghb,am,zr=[-0.1,0.4],xr=xr,yr=yr,xt='[M/H]',yt='ASPCAP-photometric Teff',
colorbar=True,zt=r'[$\alpha$/M]',rasterized=True,cmap=cmap)
else :
plots.plotc(ax,mh,teff-ghb,teff,xr=xr,yr=yr,xt='[M/H]',yt='ASPCAP-photometric Teff',
colorbar=True,zt='$T_{eff}$',rasterized=True,zr=trange,cmap=cmap)
ax.grid()
mean=bindata(mh,teff-ghb,bins,median=False)
if not dr13: plots.plotp(ax,bins+binsize/2.,mean,marker='o',size=40)
mean=bindata(mh,teff-ghb,bins,median=True)
if not dr13: plots.plotp(ax,bins+binsize/2.,mean,marker='o',size=40,color='b')
ax.text(0.1,0.9,'E(B-V)<{:6.2f}'.format(ebvmax),transform=ax.transAxes)
gd=np.where(np.isfinite(mean))[0]
tefit = fit.fit1d(bins[gd]+binsize/2.,mean[gd],degree=2,reject=0)
# 1D quadratic fit as a function of metallicity
allfit = fit.fit1d(mh,teff-ghb,ydata=teff,degree=2,reject=0)
fig2,ax2=plots.multi(1,1)
tefit2 = fit.fit2d(mh,teff,teff-ghb,reject=0,plot=ax2,zr=[-500,200],xt='[M/H]',yt=['Teff'],zt='$\Delta Teff$')
#pfit = fit.fit2d(allstar[param][:,3],allstar[param][:,0],allstar[param][:,0]-ghb,plot=ax[0,0],zr=[-500,200],xt='[M/H]',yt=['Teff'],zt='$\Delta Teff$')
#ejk=np.clip(np.sqrt(allstar['J_ERR']**2+allstar['K_ERR']**2),0.,0.02)
#errpar = err.errfit(teff,allstar['SNR'],mh,teff-tefit(mh)-ghb,title='Teff',out=out+'_phot',zr=[0,250],meanerr=abs(dtdjk)*ejk)
if doerr:
errpar = err.errfit(teff,allstar['SNR'],mh,teff-tefit(mh)-ghb,title='Teff',out=out,zr=[0,150])
else: errpar=0.
x=np.linspace(-3,1,200)
rms = (teff-tefit(mh)-ghb).std()
if dr13:
plots.plotl(ax,x,-36.17+95.97*x-15.09*x**2,color='k')
print(allfit)
else :
plots.plotl(ax,x,tefit(x),color='k')
ax.text(0.98,0.9,'rms: {:6.1f}'.format(rms),transform=ax.transAxes,ha='right')
cmap = matplotlib.cm.get_cmap(cmap)
for t in np.arange(trange[0],trange[1],500.) :
rgba=cmap((t-trange[0])/(trange[1]-trange[0]))
y=x*0.+t
plots.plotl(ax,x,tefit2(x,y),color=rgba)
plots._data_x = mh
plots._data_y = teff-ghb
plots._data = allstar
plots.event(fig)
# separate fits for low/hi alpha/M if requested
if alpha :
gdlo=apselect.select(allstar,badval=['STAR_BAD'],teff=trange,mh=mhrange,logg=[0,3.8],alpha=[-0.1,0.1],raw=True)
mean=bindata(mh[gdlo],teff[gdlo]-ghb[gdlo],bins)
plots.plotp(ax,bins,mean,marker='o',size=40,color='g')
tmpfit = fit.fit1d(mh[gdlo],teff[gdlo]-ghb[gdlo],ydata=teff[gdlo],degree=2)
plots.plotl(ax,x,tmpfit(x))
print('low alpha: ', len(gdlo))
gdhi=apselect.select(allstar,badval=['STAR_BAD'],teff=trange,mh=mhrange,logg=[0,3.8],alpha=[0.1,0.5],raw=True)
mean=bindata(mh[gdhi],teff[gdhi]-ghb[gdhi],bins)
plots.plotp(ax,bins,mean,marker='o',size=40,color='b')
tmpfit = fit.fit1d(mh[gdhi],teff[gdhi]-ghb[gdhi],ydata=teff[gdhi],degree=2)
plots.plotl(ax,x,tmpfit(x))
print('hi alpha: ', len(gdhi))
fig.tight_layout()
fig.savefig(out+'.png')
plt.close()
plt.rc('font',size=14)
plt.rc('axes',titlesize=14)
plt.rc('axes',labelsize=14)
fig.savefig(out+'.pdf')
plt.close()
# auxiliary plots with different color-codings
try:
meanfib=allstar['MEANFIB']
except:
meanfib=teff*0.
fig,ax=plots.multi(2,2,hspace=0.001,wspace=0.001,figsize=(12,8))
plots.plotc(ax[0,0],mh,teff-ghb,logg,zr=[0,5],xr=xr,yr=yr,xt='[M/H]',yt='ASPCAP-photometric Teff',colorbar=True,zt='log g',size=2)
plots.plotc(ax[0,1],mh,teff-ghb,meanfib,zr=[0,300],xr=xr,yr=yr,xt='[M/H]',yt='ASPCAP-photometric Teff',colorbar=True,zt='mean fiber',size=2)
pfit = fit.fit1d(mh,teff-ghb,ydata=teff,plot=ax[1,0],zr=[-500,200],xt='[M/H]',yt='$\Delta Teff$',xr=[-2.7,0.9],yr=[3500,5000],colorbar=True,zt='Teff')
pfit = fit.fit1d(teff,teff-ghb,ydata=mh,plot=ax[1,1],zr=[-500,200],xt='Teff',yt='$\Delta Teff$',xr=trange,yr=[-2.5,0.5],colorbar=True,zt='[M/H]')
fig.tight_layout()
fig.savefig(out+'_b.png')
plt.close()
# do some test 2D and 1D fits and plots
#fig,ax=plots.multi(2,2,hspace=0.5,wspace=0.001)
#ax[0,1].xaxis.set_visible(False)
#ax[0,1].yaxis.set_visible(False)
#pfit = fit.fit2d(allstar[param][:,3],allstar[param][:,0],allstar[param][:,0]-ghb,plot=ax[0,0],zr=[-500,200],xt='[M/H]',yt=['Teff'],zt='$\Delta Teff$')
#pfit = fit.fit1d(allstar[param][:,3],allstar[param][:,0]-ghb,ydata=allstar[param][:,0],plot=ax[1,0],zr=[-500,200],xt='[M/H]',yt='$\Delta Teff$',xr=[-2.7,0.9],yr=[3500,5000])
#pfit = fit.fit1d(allstar[param][:,0],allstar[param][:,0]-ghb,ydata=allstar[param][:,3],plot=ax[1,1],zr=[-500,200],xt='Teff',xr=[3900,5100],yr=[-2.5,0.5])
plt.draw()
return {'caltemin': 3000., 'caltemax': 100000., 'temin' : trange[0], 'temax': trange[1],
'mhmin': mhrange[0], 'mhmax' : mhrange[1],
'par': tefit.parameters, 'rms' :rms, 'par2d': tefit2.parameters, 'errpar' : errpar}
def irfm(allstar,trange=[4000,5000],mhrange=[-2.5,0.75],out='dteff') :
'''
Compares allstar ASPCPAP Teff with various photometric Teff from JAJ compilation (SAGA, CL, TH, SFD)
Does fits
Args:
allstar : allStar structure
'''
# select stars
gd=apselect.select(allstar,badval=['STAR_BAD'],teff=trange,mh=mhrange,raw=True)
allstar=allstar[gd]
# get IRFM data
irfm=fits.open(os.environ['APOGEE_DIR']+'/data/calib/irfm_temp.fits')[1].data
# get the subsamples and match. Note that we have to do this separately for each subsample because some
# stars appear in more than one subsample
saga=np.where(irfm['SOURCE'] == 'SAGA')[0]
saga1,saga2=match.match(np.chararray.strip(allstar['APOGEE_ID']),np.chararray.strip(irfm['2MASS ID'][saga]))
cl=np.where(irfm['SOURCE'] == 'CL')[0]
cl1,cl2=match.match(np.chararray.strip(allstar['APOGEE_ID']),np.chararray.strip(irfm['2MASS ID'][cl]))
th=np.where(irfm['SOURCE'] == 'TH')[0]
th1,th2=match.match(np.chararray.strip(allstar['APOGEE_ID']),np.chararray.strip(irfm['2MASS ID'][th]))
sfd=np.where(irfm['SOURCE'] == 'SFD')[0]
sfd1,sfd2=match.match(np.chararray.strip(allstar['APOGEE_ID']),np.chararray.strip(irfm['2MASS ID'][sfd]))
# plot diff color-coded by gravity as f([M/H])
fig,ax=plots.multi(2,2,hspace=0.001,wspace=0.001)
xr=[-3.0,1.0]
yr=[-400,300]
zr=[3500,6000]
bins=np.arange(-2.5,0.75,0.25)
# SAGA
plots.plotc(ax[0,0],allstar['FPARAM'][saga1,3],allstar['FPARAM'][saga1,0]-irfm['IRFM TEFF'][saga[saga2]],allstar['FPARAM'][saga1,0],zr=zr,xr=xr,yr=yr,xt='[M/H]',yt='ASPCAP-photometric Teff')
mean=bindata(allstar['FPARAM'][saga1,3],allstar['FPARAM'][saga1,0]-irfm['IRFM TEFF'][saga[saga2]],bins)
plots.plotp(ax[0,0],bins,mean,marker='o',size=40)
ax[0,0].text(0.1,0.9,'SAGA',transform=ax[0,0].transAxes)
# CL
plots.plotc(ax[0,1],allstar['FPARAM'][cl1,3],allstar['FPARAM'][cl1,0]-irfm['IRFM TEFF'][cl[cl2]],allstar['FPARAM'][cl1,0],zr=zr,xr=xr,yr=yr,xt='[M/H]')
mean=bindata(allstar['FPARAM'][cl1,3],(allstar['FPARAM'][cl1,0]-irfm['IRFM TEFF'][cl[cl2]]),bins)
plots.plotp(ax[0,1],bins,mean,marker='o',size=40)
ax[0,1].text(0.1,0.9,'CL',transform=ax[0,1].transAxes)
# TH
plots.plotc(ax[1,0],allstar['FPARAM'][th1,3],allstar['FPARAM'][th1,0]-irfm['IRFM TEFF'][th[th2]],allstar['FPARAM'][th1,0],zr=zr,xr=xr,yr=yr,xt='[M/H]',yt='ASPCAP-photometric Teff')
mean=bindata(allstar['FPARAM'][th1,3],(allstar['FPARAM'][th1,0]-irfm['IRFM TEFF'][th[th2]]),bins)
plots.plotp(ax[1,0],bins,mean,marker='o',size=40)
ax[1,0].text(0.1,0.9,'TH',transform=ax[1,0].transAxes)
# SFD
plots.plotc(ax[1,1],allstar['FPARAM'][sfd1,3],allstar['FPARAM'][sfd1,0]-irfm['IRFM TEFF'][sfd[sfd2]],allstar['FPARAM'][sfd1,0],zr=zr,xr=xr,yr=yr,xt='[M/H]')
mean=bindata(allstar['FPARAM'][sfd1,3],(allstar['FPARAM'][sfd1,0]-irfm['IRFM TEFF'][sfd[sfd2]]),bins)
plots.plotp(ax[1,1],bins,mean,marker='o',size=40)
ax[1,1].text(0.1,0.9,'SFD',transform=ax[1,1].transAxes)
fig.savefig(out+'_mh.png')
# plot diff color-coded by gravity as f([M/H])
fig,ax=plots.multi(2,2,hspace=0.001,wspace=0.001)
zr=[-2.0,0.5]
yr=[-400,300]
xr=[6000,3500]
bins=np.arange(3500,5500,250)
# SAGA
plots.plotc(ax[0,0],allstar['FPARAM'][saga1,0],allstar['FPARAM'][saga1,0]-irfm['IRFM TEFF'][saga[saga2]],allstar['FPARAM'][saga1,3],zr=zr,xr=xr,yr=yr,xt='Teff',yt='ASPCAP-photometric Teff')
mean=bindata(allstar['FPARAM'][saga1,0],(allstar['FPARAM'][saga1,0]-irfm['IRFM TEFF'][saga[saga2]]),bins)
plots.plotp(ax[0,0],bins,mean,marker='o',size=40)
ax[0,0].text(0.1,0.9,'SAGA',transform=ax[0,0].transAxes)
# CL
plots.plotc(ax[0,1],allstar['FPARAM'][cl1,0],allstar['FPARAM'][cl1,0]-irfm['IRFM TEFF'][cl[cl2]],allstar['FPARAM'][cl1,3],zr=zr,xr=xr,yr=yr,xt='Teff')
mean=bindata(allstar['FPARAM'][cl1,0],(allstar['FPARAM'][cl1,0]-irfm['IRFM TEFF'][cl[cl2]]),bins)
plots.plotp(ax[0,1],bins,mean,marker='o',size=40)
ax[0,1].text(0.1,0.9,'CL',transform=ax[0,1].transAxes)
# TH
plots.plotc(ax[1,0],allstar['FPARAM'][th1,0],allstar['FPARAM'][th1,0]-irfm['IRFM TEFF'][th[th2]],allstar['FPARAM'][th1,3],zr=zr,xr=xr,yr=yr,xt='Teff',yt='ASPCAP-photometric Teff')
mean=bindata(allstar['FPARAM'][th1,0],(allstar['FPARAM'][th1,0]-irfm['IRFM TEFF'][th[th2]]),bins)
plots.plotp(ax[1,0],bins,mean,marker='o',size=40)
ax[1,0].text(0.1,0.9,'TH',transform=ax[1,0].transAxes)
# SFD
plots.plotc(ax[1,1],allstar['FPARAM'][sfd1,0],allstar['FPARAM'][sfd1,0]-irfm['IRFM TEFF'][sfd[sfd2]],allstar['FPARAM'][sfd1,3],zr=zr,xr=xr,yr=yr,xt='Teff')
mean=bindata(allstar['FPARAM'][sfd1,0],(allstar['FPARAM'][sfd1,0]-irfm['IRFM TEFF'][sfd[sfd2]]),bins)
plots.plotp(ax[1,1],bins,mean,marker='o',size=40)
ax[1,1].text(0.1,0.9,'SFD',transform=ax[1,1].transAxes)
fig.savefig(out+'_teff.png')
# do 2D fits with Teff and [M/H], and 1D fits with each
fig,ax=plots.multi(2,2,hspace=0.5,wspace=0.001)
ax[0,1].xaxis.set_visible(False)
ax[0,1].yaxis.set_visible(False)
pfit = fit.fit2d(ax[0,0],allstar['FPARAM'][sfd1,3],allstar['FPARAM'][sfd1,0],allstar['FPARAM'][sfd1,0]-irfm['IRFM TEFF'][sfd[sfd2]],plot=True,zr=[-500,200],xt='[M/H]',yt=['Teff'],zt='$\Delta Teff$')
pfit = fit.fit1d(ax[1,0],allstar['FPARAM'][sfd1,3],allstar['FPARAM'][sfd1,0]-irfm['IRFM TEFF'][sfd[sfd2]],ydata=allstar['FPARAM'][sfd1,0],plot=True,zr=[-500,200],xt='[M/H]',yt='$\Delta Teff$',xr=[-2.7,0.9],yr=[3500,5000])
pfit = fit.fit1d(ax[1,1],allstar['FPARAM'][sfd1,0],allstar['FPARAM'][sfd1,0]-irfm['IRFM TEFF'][sfd[sfd2]],ydata=allstar['FPARAM'][sfd1,3],plot=True,zr=[-500,200],xt='Teff',xr=[3900,5100],yr=[-2.5,0.5])
pdb.set_trace()
return pfit
def cte_ghb(jk0,feh,dwarf=False) :
"""
Color-temperature relation from Gonzalez Hernandez & Bonifacio (2009): (J-K)_0 - Teff
"""
if dwarf :
b0=0.6524 ; b1=0.5813 ; b2=0.1225 ; b3=-0.0646 ; b4=0.0370 ; b5=0.0016 # dwarfs
else :
b0=0.6517 ; b1=0.6312 ; b2=0.0168 ; b3=-0.0381 ; b4=0.0256 ; b5=0.0013 # giants
theta=b0+b1*jk0+b2*jk0**2+b3*jk0*feh+b4*feh+b5*feh**2
dtheta_djk = b1+2*b2*jk0+b3*feh
dt_djk= -5040./theta**2*dtheta_djk
return 5040./theta, dt_djk
def cal(a,caldir='cal/'):
""" Apply Teff calibration
"""
aspcapmask=bitmask.AspcapBitMask()
parammask=bitmask.ParamBitMask()
starmask=bitmask.StarBitMask()
#populate PARAM[0] for stars w/o STAR_BAD (change to ALL with >=0)
gd=np.where( ((a['ASPCAPFLAG']&aspcapmask.badval()) >= 0) )[0]
gd=np.where( ((a['ASPCAPFLAG']&aspcapmask.getval('NO_ASPCAP_RESULT')) == 0) )[0]
#initial values
a['PARAM'][:,0] = np.nan
a['PARAMFLAG'][gd,0] |= parammask.getval('CALRANGE_BAD')
if caldir == 'none' :
a['PARAM'][gd,0] = a['FPARAM'][gd,0]
a['PARAMFLAG'][gd,0] &= ~parammask.getval('CALRANGE_BAD')
return
calpars=fits.open(caldir+'/tecal.fits')[1].data[0]
calteffmin=calpars['caltemin']
calteffmax=calpars['caltemax']
teff=np.clip(a['FPARAM'][gd,0],calpars['temin'],calpars['temax'])
mh=np.clip(a['FPARAM'][gd,3],calpars['mhmin'],calpars['mhmax'])
try: snr=np.clip(a['SNREV'][gd],0,200.)
except:
print('No SNREV, continnue with SNR?')
pdb.set_trace()
snr=np.clip(a['SNR'][gd],0,200.)
ok =np.where((a['FPARAM'][gd,0] >= calteffmin) & (a['FPARAM'][gd,0] <= calteffmax) )[0]
a['PARAM'][gd[ok],0] = a['FPARAM'][gd[ok],0] - (calpars['par2d'][0]+calpars['par2d'][1]*mh[ok]+calpars['par2d'][2]*teff[ok])
# populate uncertainties with err.apply()
#a['PARAM_COV'][gd[ok],0,0] = err.elemerr(calpars['errpar'],a['FPARAM'][gd[ok],0]-4500.,snr[ok]-100.,a['FPARAM'][gd[ok],3])**2
a['PARAMFLAG'][gd[ok],0] &= ~parammask.getval('CALRANGE_BAD')
return
| 46.26615 | 225 | 0.628093 |
from apogee.utils import apload
from apogee.utils import apselect
from astropy.io import fits, ascii
from tools import match
from tools import plots
from tools import fit
from apogee.utils import bitmask
from apogee.aspcap import err
import pdb
import matplotlib.pyplot as plt
import numpy as np
import os
import matplotlib
def bindata(xdata,ydata,bins,median=True) :
mean=bins*0.
for i in range(len(bins)-1) :
j=np.where((xdata>bins[i]) & (xdata<bins[i+1]))[0]
if median :
mean[i]=np.median(ydata[j])
else :
mean[i]=ydata[j].mean()
return mean
def ghb(allstar,glatmin=30.,ebvmax=0.03,trange=[3750,5500],loggrange=[-1,6],mhrange=[-2.5,0.75],alpha=False,out='teffcomp',yr=[-500,500],
calib=False,dr13=False,grid=None,cmap='rainbow',doerr=True) :
badtarg=['YOUNG','EMBEDDED','EXTENDED','M31','M33','EMISSION','RRLYR','DSPH','MAGCLOUD']
if calib : param='PARAM'
else : param = 'FPARAM'
berger=fits.open(os.environ['APOGEE_DIR']+'/data/calib/teff_berger.fits')[1].data
gd=apselect.select(allstar,badval=['STAR_BAD'],badstar=['MULTIPLE_SUSPECT'],badtarg=badtarg,raw=True)
i1,i2=match.match(allstar['APOGEE_ID'][gd],berger['APOGEE_ID'])
fig,ax=plots.multi(1,1,figsize=(12,6))
plots.plotc(ax,allstar[param][gd[i1],3],allstar[param][gd[i1],0]-berger['TEFF'][i2],allstar[param][gd[i1],0],
xt='[M/H]',yt='ASPCAP-Berger',zt='Teff',xr=[-3,1],yr=[-500,500],zr=[4500,7000],colorbar=True)
ax.grid()
fig.savefig(out+'_berger_mh.png')
plt.close()
fig,ax=plots.multi(1,1,figsize=(12,6))
plots.plotc(ax,allstar[param][gd[i1],0],allstar[param][gd[i1],1],allstar[param][gd[i1],0]-berger['TEFF'][i2],
xt='Teff',yt='log ',zt='ASPCAP-Berger',xr=[8000,3000],yr=[6,-1],zr=[-250,250],colorbar=True)
ax.grid()
fig.savefig(out+'_berger_hr.png')
plt.close()
gd=apselect.select(allstar,badval=['STAR_BAD'],badstar=['MULTIPLE_SUSPECT'],badtarg=badtarg,teff=trange,mh=mhrange,logg=loggrange,raw=True)
allstar=allstar[gd]
j=np.where((abs(allstar['GLAT'])>glatmin)&(allstar['SFD_EBV']>-0.01)&(allstar['SFD_EBV']<ebvmax)&(abs(allstar['J'])<90)&(abs(allstar['K'])<90))[0]
gcstars = ascii.read(os.environ['APOGEE_DIR']+'/data/calib/gc_szabolcs.dat')
bd=np.where(gcstars['pop'] != 1)[0]
j = [x for x in j if allstar[x]['APOGEE_ID'] not in gcstars['id'][bd]]
allstar=allstar[j]
ghb,dtdjk=cte_ghb(allstar['J']-allstar['K'],allstar['FPARAM'][:,3],dwarf=False)
ghb_dwarf,dtdjk_dwarf=cte_ghb(allstar['J']-allstar['K'],allstar['FPARAM'][:,3],dwarf=True)
dw=np.where(allstar['FPARAM'][:,1] > 3.8)[0]
ghb[dw]=ghb_dwarf[dw]
dtdjk[dw]=dtdjk_dwarf[dw]
gd=np.where(abs(allstar['FPARAM'][:,0]-ghb) < 500)[0]
ghb=ghb[gd]
dtdjk=dtdjk[gd]
allstar=allstar[gd]
print('Teff calibration, number of stars: ', len(allstar))
if calib :
param='PARAM'
teff=allstar[param][:,0]
logg=allstar[param][:,1]
mh=allstar[param][:,3]
am=allstar[param][:,6]
elif grid is None :
param='FPARAM'
teff=allstar[param][:,0]
logg=allstar[param][:,1]
mh=allstar[param][:,3]
am=allstar[param][:,6]
else :
param='FPARAM_CLASS'
teff=allstar[param][:,grid,0]
logg=allstar[param][:,grid,1]
mh=allstar[param][:,grid,3]
am=allstar[param][:,grid,6]
out=out+'_grid{:1d}'.format(grid)
fig,ax=plots.multi(1,1,figsize=(12,6))
plots.plotc(ax,teff,logg,teff-ghb, xt='Teff',yt='log ',zt='ASPCAP-GHB',xr=[8000,3000],yr=[6,-1],zr=[-250,250],colorbar=True)
ax.grid()
fig.savefig(out+'_ghb_hr.png')
plt.close()
fig,ax=plots.multi(1,1,hspace=0.001,wspace=0.001,figsize=(12,6))
xr=[-3.0,1.0]
zr=trange
if dr13: zr=[3500,5500]
binsize=0.25
bins=np.arange(-2.5,0.75,binsize)
if alpha :
plots.plotc(ax,mh,teff-ghb,am,zr=[-0.1,0.4],xr=xr,yr=yr,xt='[M/H]',yt='ASPCAP-photometric Teff',
colorbar=True,zt=r'[$\alpha$/M]',rasterized=True,cmap=cmap)
else :
plots.plotc(ax,mh,teff-ghb,teff,xr=xr,yr=yr,xt='[M/H]',yt='ASPCAP-photometric Teff',
colorbar=True,zt='$T_{eff}$',rasterized=True,zr=trange,cmap=cmap)
ax.grid()
mean=bindata(mh,teff-ghb,bins,median=False)
if not dr13: plots.plotp(ax,bins+binsize/2.,mean,marker='o',size=40)
mean=bindata(mh,teff-ghb,bins,median=True)
if not dr13: plots.plotp(ax,bins+binsize/2.,mean,marker='o',size=40,color='b')
ax.text(0.1,0.9,'E(B-V)<{:6.2f}'.format(ebvmax),transform=ax.transAxes)
gd=np.where(np.isfinite(mean))[0]
tefit = fit.fit1d(bins[gd]+binsize/2.,mean[gd],degree=2,reject=0)
allfit = fit.fit1d(mh,teff-ghb,ydata=teff,degree=2,reject=0)
fig2,ax2=plots.multi(1,1)
tefit2 = fit.fit2d(mh,teff,teff-ghb,reject=0,plot=ax2,zr=[-500,200],xt='[M/H]',yt=['Teff'],zt='$\Delta Teff$')
if doerr:
errpar = err.errfit(teff,allstar['SNR'],mh,teff-tefit(mh)-ghb,title='Teff',out=out,zr=[0,150])
else: errpar=0.
x=np.linspace(-3,1,200)
rms = (teff-tefit(mh)-ghb).std()
if dr13:
plots.plotl(ax,x,-36.17+95.97*x-15.09*x**2,color='k')
print(allfit)
else :
plots.plotl(ax,x,tefit(x),color='k')
ax.text(0.98,0.9,'rms: {:6.1f}'.format(rms),transform=ax.transAxes,ha='right')
cmap = matplotlib.cm.get_cmap(cmap)
for t in np.arange(trange[0],trange[1],500.) :
rgba=cmap((t-trange[0])/(trange[1]-trange[0]))
y=x*0.+t
plots.plotl(ax,x,tefit2(x,y),color=rgba)
plots._data_x = mh
plots._data_y = teff-ghb
plots._data = allstar
plots.event(fig)
if alpha :
gdlo=apselect.select(allstar,badval=['STAR_BAD'],teff=trange,mh=mhrange,logg=[0,3.8],alpha=[-0.1,0.1],raw=True)
mean=bindata(mh[gdlo],teff[gdlo]-ghb[gdlo],bins)
plots.plotp(ax,bins,mean,marker='o',size=40,color='g')
tmpfit = fit.fit1d(mh[gdlo],teff[gdlo]-ghb[gdlo],ydata=teff[gdlo],degree=2)
plots.plotl(ax,x,tmpfit(x))
print('low alpha: ', len(gdlo))
gdhi=apselect.select(allstar,badval=['STAR_BAD'],teff=trange,mh=mhrange,logg=[0,3.8],alpha=[0.1,0.5],raw=True)
mean=bindata(mh[gdhi],teff[gdhi]-ghb[gdhi],bins)
plots.plotp(ax,bins,mean,marker='o',size=40,color='b')
tmpfit = fit.fit1d(mh[gdhi],teff[gdhi]-ghb[gdhi],ydata=teff[gdhi],degree=2)
plots.plotl(ax,x,tmpfit(x))
print('hi alpha: ', len(gdhi))
fig.tight_layout()
fig.savefig(out+'.png')
plt.close()
plt.rc('font',size=14)
plt.rc('axes',titlesize=14)
plt.rc('axes',labelsize=14)
fig.savefig(out+'.pdf')
plt.close()
try:
meanfib=allstar['MEANFIB']
except:
meanfib=teff*0.
fig,ax=plots.multi(2,2,hspace=0.001,wspace=0.001,figsize=(12,8))
plots.plotc(ax[0,0],mh,teff-ghb,logg,zr=[0,5],xr=xr,yr=yr,xt='[M/H]',yt='ASPCAP-photometric Teff',colorbar=True,zt='log g',size=2)
plots.plotc(ax[0,1],mh,teff-ghb,meanfib,zr=[0,300],xr=xr,yr=yr,xt='[M/H]',yt='ASPCAP-photometric Teff',colorbar=True,zt='mean fiber',size=2)
pfit = fit.fit1d(mh,teff-ghb,ydata=teff,plot=ax[1,0],zr=[-500,200],xt='[M/H]',yt='$\Delta Teff$',xr=[-2.7,0.9],yr=[3500,5000],colorbar=True,zt='Teff')
pfit = fit.fit1d(teff,teff-ghb,ydata=mh,plot=ax[1,1],zr=[-500,200],xt='Teff',yt='$\Delta Teff$',xr=trange,yr=[-2.5,0.5],colorbar=True,zt='[M/H]')
fig.tight_layout()
fig.savefig(out+'_b.png')
plt.close()
plt.draw()
return {'caltemin': 3000., 'caltemax': 100000., 'temin' : trange[0], 'temax': trange[1],
'mhmin': mhrange[0], 'mhmax' : mhrange[1],
'par': tefit.parameters, 'rms' :rms, 'par2d': tefit2.parameters, 'errpar' : errpar}
def irfm(allstar,trange=[4000,5000],mhrange=[-2.5,0.75],out='dteff') :
gd=apselect.select(allstar,badval=['STAR_BAD'],teff=trange,mh=mhrange,raw=True)
allstar=allstar[gd]
irfm=fits.open(os.environ['APOGEE_DIR']+'/data/calib/irfm_temp.fits')[1].data
saga=np.where(irfm['SOURCE'] == 'SAGA')[0]
saga1,saga2=match.match(np.chararray.strip(allstar['APOGEE_ID']),np.chararray.strip(irfm['2MASS ID'][saga]))
cl=np.where(irfm['SOURCE'] == 'CL')[0]
cl1,cl2=match.match(np.chararray.strip(allstar['APOGEE_ID']),np.chararray.strip(irfm['2MASS ID'][cl]))
th=np.where(irfm['SOURCE'] == 'TH')[0]
th1,th2=match.match(np.chararray.strip(allstar['APOGEE_ID']),np.chararray.strip(irfm['2MASS ID'][th]))
sfd=np.where(irfm['SOURCE'] == 'SFD')[0]
sfd1,sfd2=match.match(np.chararray.strip(allstar['APOGEE_ID']),np.chararray.strip(irfm['2MASS ID'][sfd]))
fig,ax=plots.multi(2,2,hspace=0.001,wspace=0.001)
xr=[-3.0,1.0]
yr=[-400,300]
zr=[3500,6000]
bins=np.arange(-2.5,0.75,0.25)
plots.plotc(ax[0,0],allstar['FPARAM'][saga1,3],allstar['FPARAM'][saga1,0]-irfm['IRFM TEFF'][saga[saga2]],allstar['FPARAM'][saga1,0],zr=zr,xr=xr,yr=yr,xt='[M/H]',yt='ASPCAP-photometric Teff')
mean=bindata(allstar['FPARAM'][saga1,3],allstar['FPARAM'][saga1,0]-irfm['IRFM TEFF'][saga[saga2]],bins)
plots.plotp(ax[0,0],bins,mean,marker='o',size=40)
ax[0,0].text(0.1,0.9,'SAGA',transform=ax[0,0].transAxes)
plots.plotc(ax[0,1],allstar['FPARAM'][cl1,3],allstar['FPARAM'][cl1,0]-irfm['IRFM TEFF'][cl[cl2]],allstar['FPARAM'][cl1,0],zr=zr,xr=xr,yr=yr,xt='[M/H]')
mean=bindata(allstar['FPARAM'][cl1,3],(allstar['FPARAM'][cl1,0]-irfm['IRFM TEFF'][cl[cl2]]),bins)
plots.plotp(ax[0,1],bins,mean,marker='o',size=40)
ax[0,1].text(0.1,0.9,'CL',transform=ax[0,1].transAxes)
plots.plotc(ax[1,0],allstar['FPARAM'][th1,3],allstar['FPARAM'][th1,0]-irfm['IRFM TEFF'][th[th2]],allstar['FPARAM'][th1,0],zr=zr,xr=xr,yr=yr,xt='[M/H]',yt='ASPCAP-photometric Teff')
mean=bindata(allstar['FPARAM'][th1,3],(allstar['FPARAM'][th1,0]-irfm['IRFM TEFF'][th[th2]]),bins)
plots.plotp(ax[1,0],bins,mean,marker='o',size=40)
ax[1,0].text(0.1,0.9,'TH',transform=ax[1,0].transAxes)
plots.plotc(ax[1,1],allstar['FPARAM'][sfd1,3],allstar['FPARAM'][sfd1,0]-irfm['IRFM TEFF'][sfd[sfd2]],allstar['FPARAM'][sfd1,0],zr=zr,xr=xr,yr=yr,xt='[M/H]')
mean=bindata(allstar['FPARAM'][sfd1,3],(allstar['FPARAM'][sfd1,0]-irfm['IRFM TEFF'][sfd[sfd2]]),bins)
plots.plotp(ax[1,1],bins,mean,marker='o',size=40)
ax[1,1].text(0.1,0.9,'SFD',transform=ax[1,1].transAxes)
fig.savefig(out+'_mh.png')
fig,ax=plots.multi(2,2,hspace=0.001,wspace=0.001)
zr=[-2.0,0.5]
yr=[-400,300]
xr=[6000,3500]
bins=np.arange(3500,5500,250)
plots.plotc(ax[0,0],allstar['FPARAM'][saga1,0],allstar['FPARAM'][saga1,0]-irfm['IRFM TEFF'][saga[saga2]],allstar['FPARAM'][saga1,3],zr=zr,xr=xr,yr=yr,xt='Teff',yt='ASPCAP-photometric Teff')
mean=bindata(allstar['FPARAM'][saga1,0],(allstar['FPARAM'][saga1,0]-irfm['IRFM TEFF'][saga[saga2]]),bins)
plots.plotp(ax[0,0],bins,mean,marker='o',size=40)
ax[0,0].text(0.1,0.9,'SAGA',transform=ax[0,0].transAxes)
plots.plotc(ax[0,1],allstar['FPARAM'][cl1,0],allstar['FPARAM'][cl1,0]-irfm['IRFM TEFF'][cl[cl2]],allstar['FPARAM'][cl1,3],zr=zr,xr=xr,yr=yr,xt='Teff')
mean=bindata(allstar['FPARAM'][cl1,0],(allstar['FPARAM'][cl1,0]-irfm['IRFM TEFF'][cl[cl2]]),bins)
plots.plotp(ax[0,1],bins,mean,marker='o',size=40)
ax[0,1].text(0.1,0.9,'CL',transform=ax[0,1].transAxes)
plots.plotc(ax[1,0],allstar['FPARAM'][th1,0],allstar['FPARAM'][th1,0]-irfm['IRFM TEFF'][th[th2]],allstar['FPARAM'][th1,3],zr=zr,xr=xr,yr=yr,xt='Teff',yt='ASPCAP-photometric Teff')
mean=bindata(allstar['FPARAM'][th1,0],(allstar['FPARAM'][th1,0]-irfm['IRFM TEFF'][th[th2]]),bins)
plots.plotp(ax[1,0],bins,mean,marker='o',size=40)
ax[1,0].text(0.1,0.9,'TH',transform=ax[1,0].transAxes)
plots.plotc(ax[1,1],allstar['FPARAM'][sfd1,0],allstar['FPARAM'][sfd1,0]-irfm['IRFM TEFF'][sfd[sfd2]],allstar['FPARAM'][sfd1,3],zr=zr,xr=xr,yr=yr,xt='Teff')
mean=bindata(allstar['FPARAM'][sfd1,0],(allstar['FPARAM'][sfd1,0]-irfm['IRFM TEFF'][sfd[sfd2]]),bins)
plots.plotp(ax[1,1],bins,mean,marker='o',size=40)
ax[1,1].text(0.1,0.9,'SFD',transform=ax[1,1].transAxes)
fig.savefig(out+'_teff.png')
fig,ax=plots.multi(2,2,hspace=0.5,wspace=0.001)
ax[0,1].xaxis.set_visible(False)
ax[0,1].yaxis.set_visible(False)
pfit = fit.fit2d(ax[0,0],allstar['FPARAM'][sfd1,3],allstar['FPARAM'][sfd1,0],allstar['FPARAM'][sfd1,0]-irfm['IRFM TEFF'][sfd[sfd2]],plot=True,zr=[-500,200],xt='[M/H]',yt=['Teff'],zt='$\Delta Teff$')
pfit = fit.fit1d(ax[1,0],allstar['FPARAM'][sfd1,3],allstar['FPARAM'][sfd1,0]-irfm['IRFM TEFF'][sfd[sfd2]],ydata=allstar['FPARAM'][sfd1,0],plot=True,zr=[-500,200],xt='[M/H]',yt='$\Delta Teff$',xr=[-2.7,0.9],yr=[3500,5000])
pfit = fit.fit1d(ax[1,1],allstar['FPARAM'][sfd1,0],allstar['FPARAM'][sfd1,0]-irfm['IRFM TEFF'][sfd[sfd2]],ydata=allstar['FPARAM'][sfd1,3],plot=True,zr=[-500,200],xt='Teff',xr=[3900,5100],yr=[-2.5,0.5])
pdb.set_trace()
return pfit
def cte_ghb(jk0,feh,dwarf=False) :
if dwarf :
b0=0.6524 ; b1=0.5813 ; b2=0.1225 ; b3=-0.0646 ; b4=0.0370 ; b5=0.0016
else :
b0=0.6517 ; b1=0.6312 ; b2=0.0168 ; b3=-0.0381 ; b4=0.0256 ; b5=0.0013
theta=b0+b1*jk0+b2*jk0**2+b3*jk0*feh+b4*feh+b5*feh**2
dtheta_djk = b1+2*b2*jk0+b3*feh
dt_djk= -5040./theta**2*dtheta_djk
return 5040./theta, dt_djk
def cal(a,caldir='cal/'):
aspcapmask=bitmask.AspcapBitMask()
parammask=bitmask.ParamBitMask()
starmask=bitmask.StarBitMask()
gd=np.where( ((a['ASPCAPFLAG']&aspcapmask.badval()) >= 0) )[0]
gd=np.where( ((a['ASPCAPFLAG']&aspcapmask.getval('NO_ASPCAP_RESULT')) == 0) )[0]
a['PARAM'][:,0] = np.nan
a['PARAMFLAG'][gd,0] |= parammask.getval('CALRANGE_BAD')
if caldir == 'none' :
a['PARAM'][gd,0] = a['FPARAM'][gd,0]
a['PARAMFLAG'][gd,0] &= ~parammask.getval('CALRANGE_BAD')
return
calpars=fits.open(caldir+'/tecal.fits')[1].data[0]
calteffmin=calpars['caltemin']
calteffmax=calpars['caltemax']
teff=np.clip(a['FPARAM'][gd,0],calpars['temin'],calpars['temax'])
mh=np.clip(a['FPARAM'][gd,3],calpars['mhmin'],calpars['mhmax'])
try: snr=np.clip(a['SNREV'][gd],0,200.)
except:
print('No SNREV, continnue with SNR?')
pdb.set_trace()
snr=np.clip(a['SNR'][gd],0,200.)
ok =np.where((a['FPARAM'][gd,0] >= calteffmin) & (a['FPARAM'][gd,0] <= calteffmax) )[0]
a['PARAM'][gd[ok],0] = a['FPARAM'][gd[ok],0] - (calpars['par2d'][0]+calpars['par2d'][1]*mh[ok]+calpars['par2d'][2]*teff[ok])
a['PARAMFLAG'][gd[ok],0] &= ~parammask.getval('CALRANGE_BAD')
return
| true | true |
f731c428457d805737a8bac6b920e623e8b9e75c | 34,668 | py | Python | superset/security/manager.py | Visortech-Solutions/incubator-superset | 4b33597e521e07d1ec74cdbda761e103814f60a2 | [
"Apache-2.0"
] | 1 | 2020-08-31T17:22:25.000Z | 2020-08-31T17:22:25.000Z | superset/security/manager.py | Visortech-Solutions/incubator-superset | 4b33597e521e07d1ec74cdbda761e103814f60a2 | [
"Apache-2.0"
] | 1 | 2020-08-02T04:42:57.000Z | 2020-08-02T04:42:57.000Z | superset/security/manager.py | Visortech-Solutions/incubator-superset | 4b33597e521e07d1ec74cdbda761e103814f60a2 | [
"Apache-2.0"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=too-few-public-methods
"""A set of constants and methods to manage permissions and security"""
import logging
from typing import Any, Callable, cast, List, Optional, Set, Tuple, TYPE_CHECKING, Union
from flask import current_app, g
from flask_appbuilder import Model
from flask_appbuilder.security.sqla.manager import SecurityManager
from flask_appbuilder.security.sqla.models import (
assoc_permissionview_role,
assoc_user_role,
PermissionView,
)
from flask_appbuilder.security.views import (
PermissionModelView,
PermissionViewModelView,
RoleModelView,
UserModelView,
ViewMenuModelView,
)
from flask_appbuilder.widgets import ListWidget
from sqlalchemy import or_
from sqlalchemy.engine.base import Connection
from sqlalchemy.orm.mapper import Mapper
from sqlalchemy.orm.query import Query as SqlaQuery
from superset import sql_parse
from superset.connectors.connector_registry import ConnectorRegistry
from superset.constants import RouteMethod
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from superset.exceptions import SupersetSecurityException
from superset.utils.core import DatasourceName
if TYPE_CHECKING:
from superset.common.query_context import QueryContext
from superset.connectors.base.models import BaseDatasource
from superset.connectors.druid.models import DruidCluster
from superset.models.core import Database
from superset.models.sql_lab import Query
from superset.sql_parse import Table
from superset.viz import BaseViz
logger = logging.getLogger(__name__)
class SupersetSecurityListWidget(ListWidget):
"""
Redeclaring to avoid circular imports
"""
template = "superset/fab_overrides/list.html"
class SupersetRoleListWidget(ListWidget):
"""
Role model view from FAB already uses a custom list widget override
So we override the override
"""
template = "superset/fab_overrides/list_role.html"
def __init__(self, **kwargs: Any) -> None:
kwargs["appbuilder"] = current_app.appbuilder
super().__init__(**kwargs)
UserModelView.list_widget = SupersetSecurityListWidget
RoleModelView.list_widget = SupersetRoleListWidget
PermissionViewModelView.list_widget = SupersetSecurityListWidget
PermissionModelView.list_widget = SupersetSecurityListWidget
# Limiting routes on FAB model views
UserModelView.include_route_methods = RouteMethod.CRUD_SET | {
RouteMethod.ACTION,
RouteMethod.API_READ,
RouteMethod.ACTION_POST,
"userinfo",
}
RoleModelView.include_route_methods = RouteMethod.CRUD_SET
PermissionViewModelView.include_route_methods = {RouteMethod.LIST}
PermissionModelView.include_route_methods = {RouteMethod.LIST}
ViewMenuModelView.include_route_methods = {RouteMethod.LIST}
RoleModelView.list_columns = ["name"]
RoleModelView.edit_columns = ["name", "permissions", "user"]
RoleModelView.related_views = []
class SupersetSecurityManager( # pylint: disable=too-many-public-methods
SecurityManager
):
userstatschartview = None
READ_ONLY_MODEL_VIEWS = {"DatabaseAsync", "DatabaseView", "DruidClusterModelView"}
USER_MODEL_VIEWS = {
"UserDBModelView",
"UserLDAPModelView",
"UserOAuthModelView",
"UserOIDModelView",
"UserRemoteUserModelView",
}
GAMMA_READ_ONLY_MODEL_VIEWS = {
"SqlMetricInlineView",
"TableColumnInlineView",
"TableModelView",
"DruidColumnInlineView",
"DruidDatasourceModelView",
"DruidMetricInlineView",
"Datasource",
} | READ_ONLY_MODEL_VIEWS
ADMIN_ONLY_VIEW_MENUS = {
"AccessRequestsModelView",
"SQL Lab",
"Refresh Druid Metadata",
"ResetPasswordView",
"RoleModelView",
"LogModelView",
"Security",
"RowLevelSecurityFiltersModelView",
} | USER_MODEL_VIEWS
ALPHA_ONLY_VIEW_MENUS = {
"Manage",
"CSS Templates",
"Queries",
"Import dashboards",
"Upload a CSV",
}
ADMIN_ONLY_PERMISSIONS = {
"can_sql_json", # TODO: move can_sql_json to sql_lab role
"can_override_role_permissions",
"can_sync_druid_source",
"can_override_role_permissions",
"can_approve",
"can_update_role",
"all_query_access",
}
READ_ONLY_PERMISSION = {"can_show", "can_list", "can_get", "can_external_metadata"}
ALPHA_ONLY_PERMISSIONS = {
"muldelete",
"all_database_access",
"all_datasource_access",
}
OBJECT_SPEC_PERMISSIONS = {
"database_access",
"schema_access",
"datasource_access",
"metric_access",
}
ACCESSIBLE_PERMS = {"can_userinfo"}
def get_schema_perm( # pylint: disable=no-self-use
self, database: Union["Database", str], schema: Optional[str] = None
) -> Optional[str]:
"""
Return the database specific schema permission.
:param database: The Superset database or database name
:param schema: The Superset schema name
:return: The database specific schema permission
"""
if schema:
return f"[{database}].[{schema}]"
return None
def unpack_schema_perm( # pylint: disable=no-self-use
self, schema_permission: str
) -> Tuple[str, str]:
# [database_name].[schema_name]
schema_name = schema_permission.split(".")[1][1:-1]
database_name = schema_permission.split(".")[0][1:-1]
return database_name, schema_name
def can_access(self, permission_name: str, view_name: str) -> bool:
"""
Return True if the user can access the FAB permission/view, False otherwise.
Note this method adds protection from has_access failing from missing
permission/view entries.
:param permission_name: The FAB permission name
:param view_name: The FAB view-menu name
:returns: Whether the user can access the FAB permission/view
"""
user = g.user
if user.is_anonymous:
return self.is_item_public(permission_name, view_name)
return self._has_view_access(user, permission_name, view_name)
def can_access_all_queries(self) -> bool:
"""
Return True if the user can access all SQL Lab queries, False otherwise.
:returns: Whether the user can access all queries
"""
return self.can_access("all_query_access", "all_query_access")
def can_access_all_datasources(self) -> bool:
"""
Return True if the user can fully access all the Superset datasources, False
otherwise.
:returns: Whether the user can fully access all Superset datasources
"""
return self.can_access("all_datasource_access", "all_datasource_access")
def can_access_all_databases(self) -> bool:
"""
Return True if the user can fully access all the Superset databases, False
otherwise.
:returns: Whether the user can fully access all Superset databases
"""
return self.can_access("all_database_access", "all_database_access")
def can_access_database(self, database: Union["Database", "DruidCluster"]) -> bool:
"""
Return True if the user can fully access the Superset database, False otherwise.
Note for Druid the database is akin to the Druid cluster.
:param database: The Superset database
:returns: Whether the user can fully access the Superset database
"""
return (
self.can_access_all_datasources()
or self.can_access_all_databases()
or self.can_access("database_access", database.perm) # type: ignore
)
def can_access_schema(self, datasource: "BaseDatasource") -> bool:
"""
Return True if the user can fully access the schema associated with the Superset
datasource, False otherwise.
Note for Druid datasources the database and schema are akin to the Druid cluster
and datasource name prefix respectively, i.e., [schema.]datasource.
:param datasource: The Superset datasource
:returns: Whether the user can fully access the datasource's schema
"""
return (
self.can_access_all_datasources()
or self.can_access_database(datasource.database)
or self.can_access("schema_access", datasource.schema_perm or "")
)
def can_access_datasource(self, datasource: "BaseDatasource") -> bool:
"""
Return True if the user can fully access of the Superset datasource, False
otherwise.
:param datasource: The Superset datasource
:returns: Whether the user can fully access the Superset datasource
"""
try:
self.raise_for_access(datasource=datasource)
except SupersetSecurityException:
return False
return True
@staticmethod
def get_datasource_access_error_msg(datasource: "BaseDatasource") -> str:
"""
Return the error message for the denied Superset datasource.
:param datasource: The denied Superset datasource
:returns: The error message
"""
return f"""This endpoint requires the datasource {datasource.name}, database or
`all_datasource_access` permission"""
@staticmethod
def get_datasource_access_link( # pylint: disable=unused-argument
datasource: "BaseDatasource",
) -> Optional[str]:
"""
Return the link for the denied Superset datasource.
:param datasource: The denied Superset datasource
:returns: The access URL
"""
from superset import conf
return conf.get("PERMISSION_INSTRUCTIONS_LINK")
def get_datasource_access_error_object( # pylint: disable=invalid-name
self, datasource: "BaseDatasource"
) -> SupersetError:
"""
Return the error object for the denied Superset datasource.
:param datasource: The denied Superset datasource
:returns: The error object
"""
return SupersetError(
error_type=SupersetErrorType.DATASOURCE_SECURITY_ACCESS_ERROR,
message=self.get_datasource_access_error_msg(datasource),
level=ErrorLevel.ERROR,
extra={
"link": self.get_datasource_access_link(datasource),
"datasource": datasource.name,
},
)
def get_table_access_error_msg( # pylint: disable=no-self-use
self, tables: Set["Table"]
) -> str:
"""
Return the error message for the denied SQL tables.
:param tables: The set of denied SQL tables
:returns: The error message
"""
quoted_tables = [f"`{table}`" for table in tables]
return f"""You need access to the following tables: {", ".join(quoted_tables)},
`all_database_access` or `all_datasource_access` permission"""
def get_table_access_error_object(self, tables: Set["Table"]) -> SupersetError:
"""
Return the error object for the denied SQL tables.
:param tables: The set of denied SQL tables
:returns: The error object
"""
return SupersetError(
error_type=SupersetErrorType.TABLE_SECURITY_ACCESS_ERROR,
message=self.get_table_access_error_msg(tables),
level=ErrorLevel.ERROR,
extra={
"link": self.get_table_access_link(tables),
"tables": [str(table) for table in tables],
},
)
def get_table_access_link( # pylint: disable=unused-argument,no-self-use
self, tables: Set["Table"]
) -> Optional[str]:
"""
Return the access link for the denied SQL tables.
:param tables: The set of denied SQL tables
:returns: The access URL
"""
from superset import conf
return conf.get("PERMISSION_INSTRUCTIONS_LINK")
def can_access_table(self, database: "Database", table: "Table") -> bool:
"""
Return True if the user can access the SQL table, False otherwise.
:param database: The SQL database
:param table: The SQL table
:returns: Whether the user can access the SQL table
"""
try:
self.raise_for_access(database=database, table=table)
except SupersetSecurityException:
return False
return True
def get_public_role(self) -> Optional[Any]: # Optional[self.role_model]
from superset import conf
if not conf.get("PUBLIC_ROLE_LIKE_GAMMA", False):
return None
return (
self.get_session.query(self.role_model)
.filter_by(name="Public")
.one_or_none()
)
def user_view_menu_names(self, permission_name: str) -> Set[str]:
base_query = (
self.get_session.query(self.viewmenu_model.name)
.join(self.permissionview_model)
.join(self.permission_model)
.join(assoc_permissionview_role)
.join(self.role_model)
)
if not g.user.is_anonymous:
# filter by user id
view_menu_names = (
base_query.join(assoc_user_role)
.join(self.user_model)
.filter(self.user_model.id == g.user.id)
.filter(self.permission_model.name == permission_name)
).all()
return {s.name for s in view_menu_names}
# Properly treat anonymous user
public_role = self.get_public_role()
if public_role:
# filter by public role
view_menu_names = (
base_query.filter(self.role_model.id == public_role.id).filter(
self.permission_model.name == permission_name
)
).all()
return {s.name for s in view_menu_names}
return set()
def get_schemas_accessible_by_user(
self, database: "Database", schemas: List[str], hierarchical: bool = True
) -> List[str]:
"""
Return the list of SQL schemas accessible by the user.
:param database: The SQL database
:param schemas: The list of eligible SQL schemas
:param hierarchical: Whether to check using the hierarchical permission logic
:returns: The list of accessible SQL schemas
"""
from superset.connectors.sqla.models import SqlaTable
if hierarchical and self.can_access_database(database):
return schemas
# schema_access
accessible_schemas = {
self.unpack_schema_perm(s)[1]
for s in self.user_view_menu_names("schema_access")
if s.startswith(f"[{database}].")
}
# datasource_access
perms = self.user_view_menu_names("datasource_access")
if perms:
tables = (
self.get_session.query(SqlaTable.schema)
.filter(SqlaTable.database_id == database.id)
.filter(SqlaTable.schema.isnot(None))
.filter(SqlaTable.schema != "")
.filter(or_(SqlaTable.perm.in_(perms)))
.distinct()
)
accessible_schemas.update([table.schema for table in tables])
return [s for s in schemas if s in accessible_schemas]
def get_datasources_accessible_by_user( # pylint: disable=invalid-name
self,
database: "Database",
datasource_names: List[DatasourceName],
schema: Optional[str] = None,
) -> List[DatasourceName]:
"""
Return the list of SQL tables accessible by the user.
:param database: The SQL database
:param datasource_names: The list of eligible SQL tables w/ schema
:param schema: The fallback SQL schema if not present in the table name
:returns: The list of accessible SQL tables w/ schema
"""
if self.can_access_database(database):
return datasource_names
if schema:
schema_perm = self.get_schema_perm(database, schema)
if schema_perm and self.can_access("schema_access", schema_perm):
return datasource_names
user_perms = self.user_view_menu_names("datasource_access")
schema_perms = self.user_view_menu_names("schema_access")
user_datasources = ConnectorRegistry.query_datasources_by_permissions(
database, user_perms, schema_perms
)
if schema:
names = {d.table_name for d in user_datasources if d.schema == schema}
return [d for d in datasource_names if d in names]
full_names = {d.full_name for d in user_datasources}
return [d for d in datasource_names if f"[{database}].[{d}]" in full_names]
def merge_perm(self, permission_name: str, view_menu_name: str) -> None:
"""
Add the FAB permission/view-menu.
:param permission_name: The FAB permission name
:param view_menu_names: The FAB view-menu name
:see: SecurityManager.add_permission_view_menu
"""
logger.warning(
"This method 'merge_perm' is deprecated use add_permission_view_menu"
)
self.add_permission_view_menu(permission_name, view_menu_name)
def _is_user_defined_permission(self, perm: Model) -> bool:
"""
Return True if the FAB permission is user defined, False otherwise.
:param perm: The FAB permission
:returns: Whether the FAB permission is user defined
"""
return perm.permission.name in self.OBJECT_SPEC_PERMISSIONS
def create_custom_permissions(self) -> None:
"""
Create custom FAB permissions.
"""
self.add_permission_view_menu("all_datasource_access", "all_datasource_access")
self.add_permission_view_menu("all_database_access", "all_database_access")
self.add_permission_view_menu("all_query_access", "all_query_access")
def create_missing_perms(self) -> None:
"""
Creates missing FAB permissions for datasources, schemas and metrics.
"""
from superset.connectors.base.models import BaseMetric
from superset.models import core as models
logger.info("Fetching a set of all perms to lookup which ones are missing")
all_pvs = set()
for pv in self.get_session.query(self.permissionview_model).all():
if pv.permission and pv.view_menu:
all_pvs.add((pv.permission.name, pv.view_menu.name))
def merge_pv(view_menu: str, perm: str) -> None:
"""Create permission view menu only if it doesn't exist"""
if view_menu and perm and (view_menu, perm) not in all_pvs:
self.add_permission_view_menu(view_menu, perm)
logger.info("Creating missing datasource permissions.")
datasources = ConnectorRegistry.get_all_datasources()
for datasource in datasources:
merge_pv("datasource_access", datasource.get_perm())
merge_pv("schema_access", datasource.get_schema_perm())
logger.info("Creating missing database permissions.")
databases = self.get_session.query(models.Database).all()
for database in databases:
merge_pv("database_access", database.perm)
logger.info("Creating missing metrics permissions")
metrics: List[BaseMetric] = []
for datasource_class in ConnectorRegistry.sources.values():
metrics += list(self.get_session.query(datasource_class.metric_class).all())
def clean_perms(self) -> None:
"""
Clean up the FAB faulty permissions.
"""
logger.info("Cleaning faulty perms")
sesh = self.get_session
pvms = sesh.query(PermissionView).filter(
or_(
PermissionView.permission # pylint: disable=singleton-comparison
== None,
PermissionView.view_menu # pylint: disable=singleton-comparison
== None,
)
)
deleted_count = pvms.delete()
sesh.commit()
if deleted_count:
logger.info("Deleted %i faulty permissions", deleted_count)
def sync_role_definitions(self) -> None:
"""
Initialize the Superset application with security roles and such.
"""
from superset import conf
logger.info("Syncing role definition")
self.create_custom_permissions()
# Creating default roles
self.set_role("Admin", self._is_admin_pvm)
self.set_role("Alpha", self._is_alpha_pvm)
self.set_role("Gamma", self._is_gamma_pvm)
self.set_role("granter", self._is_granter_pvm)
self.set_role("sql_lab", self._is_sql_lab_pvm)
if conf.get("PUBLIC_ROLE_LIKE_GAMMA", False):
self.set_role("Public", self._is_gamma_pvm)
self.create_missing_perms()
# commit role and view menu updates
self.get_session.commit()
self.clean_perms()
def set_role(
self, role_name: str, pvm_check: Callable[[PermissionView], bool]
) -> None:
"""
Set the FAB permission/views for the role.
:param role_name: The FAB role name
:param pvm_check: The FAB permission/view check
"""
logger.info("Syncing %s perms", role_name)
sesh = self.get_session
pvms = sesh.query(PermissionView).all()
pvms = [p for p in pvms if p.permission and p.view_menu]
role = self.add_role(role_name)
role_pvms = [p for p in pvms if pvm_check(p)]
role.permissions = role_pvms
sesh.merge(role)
sesh.commit()
def _is_admin_only(self, pvm: Model) -> bool:
"""
Return True if the FAB permission/view is accessible to only Admin users,
False otherwise.
Note readonly operations on read only model views are allowed only for admins.
:param pvm: The FAB permission/view
:returns: Whether the FAB object is accessible to only Admin users
"""
if (
pvm.view_menu.name in self.READ_ONLY_MODEL_VIEWS
and pvm.permission.name not in self.READ_ONLY_PERMISSION
):
return True
return (
pvm.view_menu.name in self.ADMIN_ONLY_VIEW_MENUS
or pvm.permission.name in self.ADMIN_ONLY_PERMISSIONS
)
def _is_alpha_only(self, pvm: PermissionModelView) -> bool:
"""
Return True if the FAB permission/view is accessible to only Alpha users,
False otherwise.
:param pvm: The FAB permission/view
:returns: Whether the FAB object is accessible to only Alpha users
"""
if (
pvm.view_menu.name in self.GAMMA_READ_ONLY_MODEL_VIEWS
and pvm.permission.name not in self.READ_ONLY_PERMISSION
):
return True
return (
pvm.view_menu.name in self.ALPHA_ONLY_VIEW_MENUS
or pvm.permission.name in self.ALPHA_ONLY_PERMISSIONS
)
def _is_accessible_to_all(self, pvm: PermissionModelView) -> bool:
"""
Return True if the FAB permission/view is accessible to all, False
otherwise.
:param pvm: The FAB permission/view
:returns: Whether the FAB object is accessible to all users
"""
return pvm.permission.name in self.ACCESSIBLE_PERMS
def _is_admin_pvm(self, pvm: PermissionModelView) -> bool:
"""
Return True if the FAB permission/view is Admin user related, False
otherwise.
:param pvm: The FAB permission/view
:returns: Whether the FAB object is Admin related
"""
return not self._is_user_defined_permission(pvm)
def _is_alpha_pvm(self, pvm: PermissionModelView) -> bool:
"""
Return True if the FAB permission/view is Alpha user related, False
otherwise.
:param pvm: The FAB permission/view
:returns: Whether the FAB object is Alpha related
"""
return not (
self._is_user_defined_permission(pvm) or self._is_admin_only(pvm)
) or self._is_accessible_to_all(pvm)
def _is_gamma_pvm(self, pvm: PermissionModelView) -> bool:
"""
Return True if the FAB permission/view is Gamma user related, False
otherwise.
:param pvm: The FAB permission/view
:returns: Whether the FAB object is Gamma related
"""
return not (
self._is_user_defined_permission(pvm)
or self._is_admin_only(pvm)
or self._is_alpha_only(pvm)
) or self._is_accessible_to_all(pvm)
def _is_sql_lab_pvm(self, pvm: PermissionModelView) -> bool:
"""
Return True if the FAB permission/view is SQL Lab related, False
otherwise.
:param pvm: The FAB permission/view
:returns: Whether the FAB object is SQL Lab related
"""
return (
pvm.view_menu.name
in {"SQL Lab", "SQL Editor", "Query Search", "Saved Queries"}
or pvm.permission.name
in {
"can_sql_json",
"can_csv",
"can_search_queries",
"can_sqllab_viz",
"can_sqllab_table_viz",
"can_sqllab",
}
or (
pvm.view_menu.name in self.USER_MODEL_VIEWS
and pvm.permission.name == "can_list"
)
)
def _is_granter_pvm( # pylint: disable=no-self-use
self, pvm: PermissionModelView
) -> bool:
"""
Return True if the user can grant the FAB permission/view, False
otherwise.
:param pvm: The FAB permission/view
:returns: Whether the user can grant the FAB permission/view
"""
return pvm.permission.name in {"can_override_role_permissions", "can_approve"}
def set_perm( # pylint: disable=no-self-use,unused-argument
self, mapper: Mapper, connection: Connection, target: "BaseDatasource"
) -> None:
"""
Set the datasource permissions.
:param mapper: The table mapper
:param connection: The DB-API connection
:param target: The mapped instance being persisted
"""
link_table = target.__table__ # pylint: disable=no-member
if target.perm != target.get_perm():
connection.execute(
link_table.update()
.where(link_table.c.id == target.id)
.values(perm=target.get_perm())
)
if (
hasattr(target, "schema_perm")
and target.schema_perm != target.get_schema_perm()
):
connection.execute(
link_table.update()
.where(link_table.c.id == target.id)
.values(schema_perm=target.get_schema_perm())
)
pvm_names = []
if target.__tablename__ in {"dbs", "clusters"}:
pvm_names.append(("database_access", target.get_perm()))
else:
pvm_names.append(("datasource_access", target.get_perm()))
if target.schema:
pvm_names.append(("schema_access", target.get_schema_perm()))
# TODO(bogdan): modify slice permissions as well.
for permission_name, view_menu_name in pvm_names:
permission = self.find_permission(permission_name)
view_menu = self.find_view_menu(view_menu_name)
pv = None
if not permission:
permission_table = (
self.permission_model.__table__ # pylint: disable=no-member
)
connection.execute(
permission_table.insert().values(name=permission_name)
)
permission = self.find_permission(permission_name)
if not view_menu:
view_menu_table = (
self.viewmenu_model.__table__ # pylint: disable=no-member
)
connection.execute(view_menu_table.insert().values(name=view_menu_name))
view_menu = self.find_view_menu(view_menu_name)
if permission and view_menu:
pv = (
self.get_session.query(self.permissionview_model)
.filter_by(permission=permission, view_menu=view_menu)
.first()
)
if not pv and permission and view_menu:
permission_view_table = (
self.permissionview_model.__table__ # pylint: disable=no-member
)
connection.execute(
permission_view_table.insert().values(
permission_id=permission.id, view_menu_id=view_menu.id
)
)
def raise_for_access( # pylint: disable=too-many-arguments,too-many-branches
self,
database: Optional["Database"] = None,
datasource: Optional["BaseDatasource"] = None,
query: Optional["Query"] = None,
query_context: Optional["QueryContext"] = None,
table: Optional["Table"] = None,
viz: Optional["BaseViz"] = None,
) -> None:
"""
Raise an exception if the user cannot access the resource.
:param database: The Superset database
:param datasource: The Superset datasource
:param query: The SQL Lab query
:param query_context: The query context
:param table: The Superset table (requires database)
:param viz: The visualization
:raises SupersetSecurityException: If the user cannot access the resource
"""
from superset.connectors.sqla.models import SqlaTable
from superset.sql_parse import Table
if database and table or query:
if query:
database = query.database
database = cast("Database", database)
if self.can_access_database(database):
return
if query:
tables = {
Table(table_.table, table_.schema or query.schema)
for table_ in sql_parse.ParsedQuery(query.sql).tables
}
elif table:
tables = {table}
denied = set()
for table_ in tables:
schema_perm = self.get_schema_perm(database, schema=table_.schema)
if not (schema_perm and self.can_access("schema_access", schema_perm)):
datasources = SqlaTable.query_datasources_by_name(
database, table_.table, schema=table_.schema
)
# Access to any datasource is suffice.
for datasource_ in datasources:
if self.can_access("datasource_access", datasource_.perm):
break
else:
denied.add(table_)
if denied:
raise SupersetSecurityException(
self.get_table_access_error_object(denied)
)
if datasource or query_context or viz:
if query_context:
datasource = query_context.datasource
elif viz:
datasource = viz.datasource
assert datasource
if not (
self.can_access_schema(datasource)
or self.can_access("datasource_access", datasource.perm or "")
):
raise SupersetSecurityException(
self.get_datasource_access_error_object(datasource)
)
def get_rls_filters( # pylint: disable=no-self-use
self, table: "BaseDatasource"
) -> List[SqlaQuery]:
"""
Retrieves the appropriate row level security filters for the current user and
the passed table.
:param table: The table to check against
:returns: A list of filters
"""
if hasattr(g, "user") and hasattr(g.user, "id"):
from superset.connectors.sqla.models import (
RLSFilterRoles,
RLSFilterTables,
RowLevelSecurityFilter,
)
user_roles = (
self.get_session.query(assoc_user_role.c.role_id)
.filter(assoc_user_role.c.user_id == g.user.id)
.subquery()
)
filter_roles = (
self.get_session.query(RLSFilterRoles.c.rls_filter_id)
.filter(RLSFilterRoles.c.role_id.in_(user_roles))
.subquery()
)
filter_tables = (
self.get_session.query(RLSFilterTables.c.rls_filter_id)
.filter(RLSFilterTables.c.table_id == table.id)
.subquery()
)
query = (
self.get_session.query(
RowLevelSecurityFilter.id, RowLevelSecurityFilter.clause
)
.filter(RowLevelSecurityFilter.id.in_(filter_tables))
.filter(RowLevelSecurityFilter.id.in_(filter_roles))
)
return query.all()
return []
def get_rls_ids(self, table: "BaseDatasource") -> List[int]:
"""
Retrieves the appropriate row level security filters IDs for the current user
and the passed table.
:param table: The table to check against
:returns: A list of IDs
"""
ids = [f.id for f in self.get_rls_filters(table)]
ids.sort() # Combinations rather than permutations
return ids
| 35.089069 | 88 | 0.625937 |
import logging
from typing import Any, Callable, cast, List, Optional, Set, Tuple, TYPE_CHECKING, Union
from flask import current_app, g
from flask_appbuilder import Model
from flask_appbuilder.security.sqla.manager import SecurityManager
from flask_appbuilder.security.sqla.models import (
assoc_permissionview_role,
assoc_user_role,
PermissionView,
)
from flask_appbuilder.security.views import (
PermissionModelView,
PermissionViewModelView,
RoleModelView,
UserModelView,
ViewMenuModelView,
)
from flask_appbuilder.widgets import ListWidget
from sqlalchemy import or_
from sqlalchemy.engine.base import Connection
from sqlalchemy.orm.mapper import Mapper
from sqlalchemy.orm.query import Query as SqlaQuery
from superset import sql_parse
from superset.connectors.connector_registry import ConnectorRegistry
from superset.constants import RouteMethod
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from superset.exceptions import SupersetSecurityException
from superset.utils.core import DatasourceName
if TYPE_CHECKING:
from superset.common.query_context import QueryContext
from superset.connectors.base.models import BaseDatasource
from superset.connectors.druid.models import DruidCluster
from superset.models.core import Database
from superset.models.sql_lab import Query
from superset.sql_parse import Table
from superset.viz import BaseViz
logger = logging.getLogger(__name__)
class SupersetSecurityListWidget(ListWidget):
template = "superset/fab_overrides/list.html"
class SupersetRoleListWidget(ListWidget):
template = "superset/fab_overrides/list_role.html"
def __init__(self, **kwargs: Any) -> None:
kwargs["appbuilder"] = current_app.appbuilder
super().__init__(**kwargs)
UserModelView.list_widget = SupersetSecurityListWidget
RoleModelView.list_widget = SupersetRoleListWidget
PermissionViewModelView.list_widget = SupersetSecurityListWidget
PermissionModelView.list_widget = SupersetSecurityListWidget
UserModelView.include_route_methods = RouteMethod.CRUD_SET | {
RouteMethod.ACTION,
RouteMethod.API_READ,
RouteMethod.ACTION_POST,
"userinfo",
}
RoleModelView.include_route_methods = RouteMethod.CRUD_SET
PermissionViewModelView.include_route_methods = {RouteMethod.LIST}
PermissionModelView.include_route_methods = {RouteMethod.LIST}
ViewMenuModelView.include_route_methods = {RouteMethod.LIST}
RoleModelView.list_columns = ["name"]
RoleModelView.edit_columns = ["name", "permissions", "user"]
RoleModelView.related_views = []
class SupersetSecurityManager(
SecurityManager
):
userstatschartview = None
READ_ONLY_MODEL_VIEWS = {"DatabaseAsync", "DatabaseView", "DruidClusterModelView"}
USER_MODEL_VIEWS = {
"UserDBModelView",
"UserLDAPModelView",
"UserOAuthModelView",
"UserOIDModelView",
"UserRemoteUserModelView",
}
GAMMA_READ_ONLY_MODEL_VIEWS = {
"SqlMetricInlineView",
"TableColumnInlineView",
"TableModelView",
"DruidColumnInlineView",
"DruidDatasourceModelView",
"DruidMetricInlineView",
"Datasource",
} | READ_ONLY_MODEL_VIEWS
ADMIN_ONLY_VIEW_MENUS = {
"AccessRequestsModelView",
"SQL Lab",
"Refresh Druid Metadata",
"ResetPasswordView",
"RoleModelView",
"LogModelView",
"Security",
"RowLevelSecurityFiltersModelView",
} | USER_MODEL_VIEWS
ALPHA_ONLY_VIEW_MENUS = {
"Manage",
"CSS Templates",
"Queries",
"Import dashboards",
"Upload a CSV",
}
ADMIN_ONLY_PERMISSIONS = {
"can_sql_json",
"can_override_role_permissions",
"can_sync_druid_source",
"can_override_role_permissions",
"can_approve",
"can_update_role",
"all_query_access",
}
READ_ONLY_PERMISSION = {"can_show", "can_list", "can_get", "can_external_metadata"}
ALPHA_ONLY_PERMISSIONS = {
"muldelete",
"all_database_access",
"all_datasource_access",
}
OBJECT_SPEC_PERMISSIONS = {
"database_access",
"schema_access",
"datasource_access",
"metric_access",
}
ACCESSIBLE_PERMS = {"can_userinfo"}
def get_schema_perm(
self, database: Union["Database", str], schema: Optional[str] = None
) -> Optional[str]:
if schema:
return f"[{database}].[{schema}]"
return None
def unpack_schema_perm(
self, schema_permission: str
) -> Tuple[str, str]:
schema_name = schema_permission.split(".")[1][1:-1]
database_name = schema_permission.split(".")[0][1:-1]
return database_name, schema_name
def can_access(self, permission_name: str, view_name: str) -> bool:
user = g.user
if user.is_anonymous:
return self.is_item_public(permission_name, view_name)
return self._has_view_access(user, permission_name, view_name)
def can_access_all_queries(self) -> bool:
return self.can_access("all_query_access", "all_query_access")
def can_access_all_datasources(self) -> bool:
return self.can_access("all_datasource_access", "all_datasource_access")
def can_access_all_databases(self) -> bool:
return self.can_access("all_database_access", "all_database_access")
def can_access_database(self, database: Union["Database", "DruidCluster"]) -> bool:
return (
self.can_access_all_datasources()
or self.can_access_all_databases()
or self.can_access("database_access", database.perm)
)
def can_access_schema(self, datasource: "BaseDatasource") -> bool:
return (
self.can_access_all_datasources()
or self.can_access_database(datasource.database)
or self.can_access("schema_access", datasource.schema_perm or "")
)
def can_access_datasource(self, datasource: "BaseDatasource") -> bool:
try:
self.raise_for_access(datasource=datasource)
except SupersetSecurityException:
return False
return True
@staticmethod
def get_datasource_access_error_msg(datasource: "BaseDatasource") -> str:
return f"""This endpoint requires the datasource {datasource.name}, database or
`all_datasource_access` permission"""
@staticmethod
def get_datasource_access_link(
datasource: "BaseDatasource",
) -> Optional[str]:
from superset import conf
return conf.get("PERMISSION_INSTRUCTIONS_LINK")
def get_datasource_access_error_object(
self, datasource: "BaseDatasource"
) -> SupersetError:
return SupersetError(
error_type=SupersetErrorType.DATASOURCE_SECURITY_ACCESS_ERROR,
message=self.get_datasource_access_error_msg(datasource),
level=ErrorLevel.ERROR,
extra={
"link": self.get_datasource_access_link(datasource),
"datasource": datasource.name,
},
)
def get_table_access_error_msg(
self, tables: Set["Table"]
) -> str:
quoted_tables = [f"`{table}`" for table in tables]
return f"""You need access to the following tables: {", ".join(quoted_tables)},
`all_database_access` or `all_datasource_access` permission"""
def get_table_access_error_object(self, tables: Set["Table"]) -> SupersetError:
return SupersetError(
error_type=SupersetErrorType.TABLE_SECURITY_ACCESS_ERROR,
message=self.get_table_access_error_msg(tables),
level=ErrorLevel.ERROR,
extra={
"link": self.get_table_access_link(tables),
"tables": [str(table) for table in tables],
},
)
def get_table_access_link(
self, tables: Set["Table"]
) -> Optional[str]:
from superset import conf
return conf.get("PERMISSION_INSTRUCTIONS_LINK")
def can_access_table(self, database: "Database", table: "Table") -> bool:
try:
self.raise_for_access(database=database, table=table)
except SupersetSecurityException:
return False
return True
def get_public_role(self) -> Optional[Any]:
from superset import conf
if not conf.get("PUBLIC_ROLE_LIKE_GAMMA", False):
return None
return (
self.get_session.query(self.role_model)
.filter_by(name="Public")
.one_or_none()
)
def user_view_menu_names(self, permission_name: str) -> Set[str]:
base_query = (
self.get_session.query(self.viewmenu_model.name)
.join(self.permissionview_model)
.join(self.permission_model)
.join(assoc_permissionview_role)
.join(self.role_model)
)
if not g.user.is_anonymous:
view_menu_names = (
base_query.join(assoc_user_role)
.join(self.user_model)
.filter(self.user_model.id == g.user.id)
.filter(self.permission_model.name == permission_name)
).all()
return {s.name for s in view_menu_names}
public_role = self.get_public_role()
if public_role:
view_menu_names = (
base_query.filter(self.role_model.id == public_role.id).filter(
self.permission_model.name == permission_name
)
).all()
return {s.name for s in view_menu_names}
return set()
def get_schemas_accessible_by_user(
self, database: "Database", schemas: List[str], hierarchical: bool = True
) -> List[str]:
from superset.connectors.sqla.models import SqlaTable
if hierarchical and self.can_access_database(database):
return schemas
accessible_schemas = {
self.unpack_schema_perm(s)[1]
for s in self.user_view_menu_names("schema_access")
if s.startswith(f"[{database}].")
}
perms = self.user_view_menu_names("datasource_access")
if perms:
tables = (
self.get_session.query(SqlaTable.schema)
.filter(SqlaTable.database_id == database.id)
.filter(SqlaTable.schema.isnot(None))
.filter(SqlaTable.schema != "")
.filter(or_(SqlaTable.perm.in_(perms)))
.distinct()
)
accessible_schemas.update([table.schema for table in tables])
return [s for s in schemas if s in accessible_schemas]
def get_datasources_accessible_by_user(
self,
database: "Database",
datasource_names: List[DatasourceName],
schema: Optional[str] = None,
) -> List[DatasourceName]:
if self.can_access_database(database):
return datasource_names
if schema:
schema_perm = self.get_schema_perm(database, schema)
if schema_perm and self.can_access("schema_access", schema_perm):
return datasource_names
user_perms = self.user_view_menu_names("datasource_access")
schema_perms = self.user_view_menu_names("schema_access")
user_datasources = ConnectorRegistry.query_datasources_by_permissions(
database, user_perms, schema_perms
)
if schema:
names = {d.table_name for d in user_datasources if d.schema == schema}
return [d for d in datasource_names if d in names]
full_names = {d.full_name for d in user_datasources}
return [d for d in datasource_names if f"[{database}].[{d}]" in full_names]
def merge_perm(self, permission_name: str, view_menu_name: str) -> None:
logger.warning(
"This method 'merge_perm' is deprecated use add_permission_view_menu"
)
self.add_permission_view_menu(permission_name, view_menu_name)
def _is_user_defined_permission(self, perm: Model) -> bool:
return perm.permission.name in self.OBJECT_SPEC_PERMISSIONS
def create_custom_permissions(self) -> None:
self.add_permission_view_menu("all_datasource_access", "all_datasource_access")
self.add_permission_view_menu("all_database_access", "all_database_access")
self.add_permission_view_menu("all_query_access", "all_query_access")
def create_missing_perms(self) -> None:
from superset.connectors.base.models import BaseMetric
from superset.models import core as models
logger.info("Fetching a set of all perms to lookup which ones are missing")
all_pvs = set()
for pv in self.get_session.query(self.permissionview_model).all():
if pv.permission and pv.view_menu:
all_pvs.add((pv.permission.name, pv.view_menu.name))
def merge_pv(view_menu: str, perm: str) -> None:
if view_menu and perm and (view_menu, perm) not in all_pvs:
self.add_permission_view_menu(view_menu, perm)
logger.info("Creating missing datasource permissions.")
datasources = ConnectorRegistry.get_all_datasources()
for datasource in datasources:
merge_pv("datasource_access", datasource.get_perm())
merge_pv("schema_access", datasource.get_schema_perm())
logger.info("Creating missing database permissions.")
databases = self.get_session.query(models.Database).all()
for database in databases:
merge_pv("database_access", database.perm)
logger.info("Creating missing metrics permissions")
metrics: List[BaseMetric] = []
for datasource_class in ConnectorRegistry.sources.values():
metrics += list(self.get_session.query(datasource_class.metric_class).all())
def clean_perms(self) -> None:
logger.info("Cleaning faulty perms")
sesh = self.get_session
pvms = sesh.query(PermissionView).filter(
or_(
PermissionView.permission
== None,
PermissionView.view_menu
== None,
)
)
deleted_count = pvms.delete()
sesh.commit()
if deleted_count:
logger.info("Deleted %i faulty permissions", deleted_count)
def sync_role_definitions(self) -> None:
from superset import conf
logger.info("Syncing role definition")
self.create_custom_permissions()
self.set_role("Admin", self._is_admin_pvm)
self.set_role("Alpha", self._is_alpha_pvm)
self.set_role("Gamma", self._is_gamma_pvm)
self.set_role("granter", self._is_granter_pvm)
self.set_role("sql_lab", self._is_sql_lab_pvm)
if conf.get("PUBLIC_ROLE_LIKE_GAMMA", False):
self.set_role("Public", self._is_gamma_pvm)
self.create_missing_perms()
self.get_session.commit()
self.clean_perms()
def set_role(
self, role_name: str, pvm_check: Callable[[PermissionView], bool]
) -> None:
logger.info("Syncing %s perms", role_name)
sesh = self.get_session
pvms = sesh.query(PermissionView).all()
pvms = [p for p in pvms if p.permission and p.view_menu]
role = self.add_role(role_name)
role_pvms = [p for p in pvms if pvm_check(p)]
role.permissions = role_pvms
sesh.merge(role)
sesh.commit()
def _is_admin_only(self, pvm: Model) -> bool:
if (
pvm.view_menu.name in self.READ_ONLY_MODEL_VIEWS
and pvm.permission.name not in self.READ_ONLY_PERMISSION
):
return True
return (
pvm.view_menu.name in self.ADMIN_ONLY_VIEW_MENUS
or pvm.permission.name in self.ADMIN_ONLY_PERMISSIONS
)
def _is_alpha_only(self, pvm: PermissionModelView) -> bool:
if (
pvm.view_menu.name in self.GAMMA_READ_ONLY_MODEL_VIEWS
and pvm.permission.name not in self.READ_ONLY_PERMISSION
):
return True
return (
pvm.view_menu.name in self.ALPHA_ONLY_VIEW_MENUS
or pvm.permission.name in self.ALPHA_ONLY_PERMISSIONS
)
def _is_accessible_to_all(self, pvm: PermissionModelView) -> bool:
return pvm.permission.name in self.ACCESSIBLE_PERMS
def _is_admin_pvm(self, pvm: PermissionModelView) -> bool:
return not self._is_user_defined_permission(pvm)
def _is_alpha_pvm(self, pvm: PermissionModelView) -> bool:
return not (
self._is_user_defined_permission(pvm) or self._is_admin_only(pvm)
) or self._is_accessible_to_all(pvm)
def _is_gamma_pvm(self, pvm: PermissionModelView) -> bool:
return not (
self._is_user_defined_permission(pvm)
or self._is_admin_only(pvm)
or self._is_alpha_only(pvm)
) or self._is_accessible_to_all(pvm)
def _is_sql_lab_pvm(self, pvm: PermissionModelView) -> bool:
return (
pvm.view_menu.name
in {"SQL Lab", "SQL Editor", "Query Search", "Saved Queries"}
or pvm.permission.name
in {
"can_sql_json",
"can_csv",
"can_search_queries",
"can_sqllab_viz",
"can_sqllab_table_viz",
"can_sqllab",
}
or (
pvm.view_menu.name in self.USER_MODEL_VIEWS
and pvm.permission.name == "can_list"
)
)
def _is_granter_pvm(
self, pvm: PermissionModelView
) -> bool:
return pvm.permission.name in {"can_override_role_permissions", "can_approve"}
def set_perm(
self, mapper: Mapper, connection: Connection, target: "BaseDatasource"
) -> None:
link_table = target.__table__
if target.perm != target.get_perm():
connection.execute(
link_table.update()
.where(link_table.c.id == target.id)
.values(perm=target.get_perm())
)
if (
hasattr(target, "schema_perm")
and target.schema_perm != target.get_schema_perm()
):
connection.execute(
link_table.update()
.where(link_table.c.id == target.id)
.values(schema_perm=target.get_schema_perm())
)
pvm_names = []
if target.__tablename__ in {"dbs", "clusters"}:
pvm_names.append(("database_access", target.get_perm()))
else:
pvm_names.append(("datasource_access", target.get_perm()))
if target.schema:
pvm_names.append(("schema_access", target.get_schema_perm()))
for permission_name, view_menu_name in pvm_names:
permission = self.find_permission(permission_name)
view_menu = self.find_view_menu(view_menu_name)
pv = None
if not permission:
permission_table = (
self.permission_model.__table__
)
connection.execute(
permission_table.insert().values(name=permission_name)
)
permission = self.find_permission(permission_name)
if not view_menu:
view_menu_table = (
self.viewmenu_model.__table__
)
connection.execute(view_menu_table.insert().values(name=view_menu_name))
view_menu = self.find_view_menu(view_menu_name)
if permission and view_menu:
pv = (
self.get_session.query(self.permissionview_model)
.filter_by(permission=permission, view_menu=view_menu)
.first()
)
if not pv and permission and view_menu:
permission_view_table = (
self.permissionview_model.__table__
)
connection.execute(
permission_view_table.insert().values(
permission_id=permission.id, view_menu_id=view_menu.id
)
)
def raise_for_access(
self,
database: Optional["Database"] = None,
datasource: Optional["BaseDatasource"] = None,
query: Optional["Query"] = None,
query_context: Optional["QueryContext"] = None,
table: Optional["Table"] = None,
viz: Optional["BaseViz"] = None,
) -> None:
from superset.connectors.sqla.models import SqlaTable
from superset.sql_parse import Table
if database and table or query:
if query:
database = query.database
database = cast("Database", database)
if self.can_access_database(database):
return
if query:
tables = {
Table(table_.table, table_.schema or query.schema)
for table_ in sql_parse.ParsedQuery(query.sql).tables
}
elif table:
tables = {table}
denied = set()
for table_ in tables:
schema_perm = self.get_schema_perm(database, schema=table_.schema)
if not (schema_perm and self.can_access("schema_access", schema_perm)):
datasources = SqlaTable.query_datasources_by_name(
database, table_.table, schema=table_.schema
)
for datasource_ in datasources:
if self.can_access("datasource_access", datasource_.perm):
break
else:
denied.add(table_)
if denied:
raise SupersetSecurityException(
self.get_table_access_error_object(denied)
)
if datasource or query_context or viz:
if query_context:
datasource = query_context.datasource
elif viz:
datasource = viz.datasource
assert datasource
if not (
self.can_access_schema(datasource)
or self.can_access("datasource_access", datasource.perm or "")
):
raise SupersetSecurityException(
self.get_datasource_access_error_object(datasource)
)
def get_rls_filters(
self, table: "BaseDatasource"
) -> List[SqlaQuery]:
if hasattr(g, "user") and hasattr(g.user, "id"):
from superset.connectors.sqla.models import (
RLSFilterRoles,
RLSFilterTables,
RowLevelSecurityFilter,
)
user_roles = (
self.get_session.query(assoc_user_role.c.role_id)
.filter(assoc_user_role.c.user_id == g.user.id)
.subquery()
)
filter_roles = (
self.get_session.query(RLSFilterRoles.c.rls_filter_id)
.filter(RLSFilterRoles.c.role_id.in_(user_roles))
.subquery()
)
filter_tables = (
self.get_session.query(RLSFilterTables.c.rls_filter_id)
.filter(RLSFilterTables.c.table_id == table.id)
.subquery()
)
query = (
self.get_session.query(
RowLevelSecurityFilter.id, RowLevelSecurityFilter.clause
)
.filter(RowLevelSecurityFilter.id.in_(filter_tables))
.filter(RowLevelSecurityFilter.id.in_(filter_roles))
)
return query.all()
return []
def get_rls_ids(self, table: "BaseDatasource") -> List[int]:
ids = [f.id for f in self.get_rls_filters(table)]
ids.sort()
return ids
| true | true |
f731c50ed0f870cecde73f61bbdb8ad20a18c647 | 7,795 | py | Python | api_tests/users/views/test_user_preprints_list.py | listinc/osf.io | b9a0357f3e9b6e905b732e750a16e9452c459d78 | [
"Apache-2.0"
] | 1 | 2019-12-23T04:30:20.000Z | 2019-12-23T04:30:20.000Z | api_tests/users/views/test_user_preprints_list.py | listinc/osf.io | b9a0357f3e9b6e905b732e750a16e9452c459d78 | [
"Apache-2.0"
] | 20 | 2020-03-24T16:48:03.000Z | 2022-03-08T22:38:38.000Z | api_tests/users/views/test_user_preprints_list.py | listinc/osf.io | b9a0357f3e9b6e905b732e750a16e9452c459d78 | [
"Apache-2.0"
] | null | null | null | import pytest
from api.base.settings.defaults import API_BASE
from api_tests.preprints.filters.test_filters import PreprintsListFilteringMixin
from api_tests.preprints.views.test_preprint_list_mixin import PreprintIsPublishedListMixin, PreprintIsValidListMixin
from osf_tests.factories import (
ProjectFactory,
PreprintFactory,
AuthUserFactory,
PreprintProviderFactory,
)
from osf.utils import permissions
@pytest.mark.django_db
class TestUserPreprints:
@pytest.fixture()
def user_one(self):
return AuthUserFactory()
@pytest.fixture()
def user_two(self):
return AuthUserFactory()
@pytest.fixture()
def preprint(self, user_one):
return PreprintFactory(title='Preprint User One', creator=user_one)
@pytest.fixture()
def project_public(self, user_one):
return ProjectFactory(
title='Public Project User One',
is_public=True,
creator=user_one)
@pytest.fixture()
def project_private(self, user_one):
return ProjectFactory(
title='Private Project User One',
is_public=False,
creator=user_one)
def test_gets(
self, app, user_one, user_two, preprint,
project_public, project_private):
# test_authorized_in_gets_200
url = '/{}users/{}/preprints/'.format(API_BASE, user_one._id)
res = app.get(url, auth=user_one.auth)
assert res.status_code == 200
assert res.content_type == 'application/vnd.api+json'
# test_anonymous_gets_200
url = '/{}users/{}/preprints/'.format(API_BASE, user_one._id)
res = app.get(url)
assert res.status_code == 200
assert res.content_type == 'application/vnd.api+json'
# test_get_preprints_logged_in
url = '/{}users/{}/preprints/'.format(API_BASE, user_one._id)
res = app.get(url, auth=user_one.auth)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert preprint._id in ids
assert project_public._id not in ids
assert project_private._id not in ids
# test_get_projects_not_logged_in
url = '/{}users/{}/preprints/'.format(API_BASE, user_one._id)
res = app.get(url)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert preprint._id in ids
assert project_public._id not in ids
assert project_private._id not in ids
# test_get_projects_logged_in_as_different_user
url = '/{}users/{}/preprints/'.format(API_BASE, user_one._id)
res = app.get(url, auth=user_two.auth)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert preprint._id in ids
assert project_public._id not in ids
assert project_private._id not in ids
class TestUserPreprintsListFiltering(PreprintsListFilteringMixin):
@pytest.fixture()
def user(self):
return AuthUserFactory()
@pytest.fixture()
def provider_one(self):
return PreprintProviderFactory(name='Sockarxiv')
@pytest.fixture()
def provider_two(self):
return PreprintProviderFactory(name='Piratearxiv')
@pytest.fixture()
def provider_three(self, provider_one):
return provider_one
@pytest.fixture()
def project_one(self, user):
return ProjectFactory(creator=user)
@pytest.fixture()
def project_two(self, user):
return ProjectFactory(creator=user)
@pytest.fixture()
def project_three(self, user):
return ProjectFactory(creator=user)
@pytest.fixture()
def url(self, user):
return '/{}users/{}/preprints/?version=2.2&'.format(API_BASE, user._id)
def test_provider_filter_equals_returns_one(
self, app, user, provider_two, preprint_two, provider_url):
expected = [preprint_two._id]
res = app.get(
'{}{}'.format(
provider_url,
provider_two._id),
auth=user.auth)
actual = [preprint['id'] for preprint in res.json['data']]
assert expected == actual
class TestUserPreprintIsPublishedList(PreprintIsPublishedListMixin):
@pytest.fixture()
def user_admin_contrib(self):
return AuthUserFactory()
@pytest.fixture()
def provider_one(self):
return PreprintProviderFactory()
@pytest.fixture()
def provider_two(self, provider_one):
return provider_one
@pytest.fixture()
def project_published(self, user_admin_contrib):
return ProjectFactory(creator=user_admin_contrib, is_public=True)
@pytest.fixture()
def project_public(self, user_admin_contrib, user_write_contrib):
project_public = ProjectFactory(
creator=user_admin_contrib, is_public=True)
project_public.add_contributor(
user_write_contrib,
permissions=permissions.DEFAULT_CONTRIBUTOR_PERMISSIONS,
save=True)
return project_public
@pytest.fixture()
def url(self, user_admin_contrib):
return '/{}users/{}/preprints/?version=2.2&'.format(
API_BASE, user_admin_contrib._id)
@pytest.fixture()
def preprint_unpublished(
self, user_admin_contrib, provider_one,
project_public, subject):
return PreprintFactory(
creator=user_admin_contrib,
filename='mgla.pdf',
provider=provider_one,
subjects=[[subject._id]],
project=project_public,
is_published=False)
def test_unpublished_visible_to_admins(
self, app, user_admin_contrib, preprint_unpublished,
preprint_published, url):
res = app.get(url, auth=user_admin_contrib.auth)
assert len(res.json['data']) == 2
assert preprint_unpublished._id in [d['id'] for d in res.json['data']]
def test_unpublished_invisible_to_write_contribs(
self, app, user_write_contrib, preprint_unpublished,
preprint_published, url):
res = app.get(url, auth=user_write_contrib.auth)
assert len(res.json['data']) == 1
assert preprint_unpublished._id not in [
d['id'] for d in res.json['data']]
def test_filter_published_false_write_contrib(
self, app, user_write_contrib, preprint_unpublished, url):
res = app.get(
'{}filter[is_published]=false'.format(url),
auth=user_write_contrib.auth)
assert len(res.json['data']) == 0
class TestUserPreprintIsValidList(PreprintIsValidListMixin):
@pytest.fixture()
def user_admin_contrib(self):
return AuthUserFactory()
@pytest.fixture()
def project(self, user_admin_contrib, user_write_contrib):
project = ProjectFactory(creator=user_admin_contrib, is_public=True)
project.add_contributor(
user_write_contrib,
permissions=permissions.DEFAULT_CONTRIBUTOR_PERMISSIONS,
save=True)
return project
@pytest.fixture()
def provider(self):
return PreprintProviderFactory()
@pytest.fixture()
def url(self, user_admin_contrib):
return '/{}users/{}/preprints/?version=2.2&'.format(
API_BASE, user_admin_contrib._id)
# test override: user nodes/preprints routes do not show private nodes to
# anyone but the self
def test_preprint_private_visible_write(
self, app, user_write_contrib, project, preprint, url):
res = app.get(url, auth=user_write_contrib.auth)
assert len(res.json['data']) == 1
project.is_public = False
project.save()
res = app.get(url, auth=user_write_contrib.auth)
assert len(res.json['data']) == 0
| 32.615063 | 117 | 0.655805 | import pytest
from api.base.settings.defaults import API_BASE
from api_tests.preprints.filters.test_filters import PreprintsListFilteringMixin
from api_tests.preprints.views.test_preprint_list_mixin import PreprintIsPublishedListMixin, PreprintIsValidListMixin
from osf_tests.factories import (
ProjectFactory,
PreprintFactory,
AuthUserFactory,
PreprintProviderFactory,
)
from osf.utils import permissions
@pytest.mark.django_db
class TestUserPreprints:
@pytest.fixture()
def user_one(self):
return AuthUserFactory()
@pytest.fixture()
def user_two(self):
return AuthUserFactory()
@pytest.fixture()
def preprint(self, user_one):
return PreprintFactory(title='Preprint User One', creator=user_one)
@pytest.fixture()
def project_public(self, user_one):
return ProjectFactory(
title='Public Project User One',
is_public=True,
creator=user_one)
@pytest.fixture()
def project_private(self, user_one):
return ProjectFactory(
title='Private Project User One',
is_public=False,
creator=user_one)
def test_gets(
self, app, user_one, user_two, preprint,
project_public, project_private):
url = '/{}users/{}/preprints/'.format(API_BASE, user_one._id)
res = app.get(url, auth=user_one.auth)
assert res.status_code == 200
assert res.content_type == 'application/vnd.api+json'
url = '/{}users/{}/preprints/'.format(API_BASE, user_one._id)
res = app.get(url)
assert res.status_code == 200
assert res.content_type == 'application/vnd.api+json'
url = '/{}users/{}/preprints/'.format(API_BASE, user_one._id)
res = app.get(url, auth=user_one.auth)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert preprint._id in ids
assert project_public._id not in ids
assert project_private._id not in ids
url = '/{}users/{}/preprints/'.format(API_BASE, user_one._id)
res = app.get(url)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert preprint._id in ids
assert project_public._id not in ids
assert project_private._id not in ids
url = '/{}users/{}/preprints/'.format(API_BASE, user_one._id)
res = app.get(url, auth=user_two.auth)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert preprint._id in ids
assert project_public._id not in ids
assert project_private._id not in ids
class TestUserPreprintsListFiltering(PreprintsListFilteringMixin):
@pytest.fixture()
def user(self):
return AuthUserFactory()
@pytest.fixture()
def provider_one(self):
return PreprintProviderFactory(name='Sockarxiv')
@pytest.fixture()
def provider_two(self):
return PreprintProviderFactory(name='Piratearxiv')
@pytest.fixture()
def provider_three(self, provider_one):
return provider_one
@pytest.fixture()
def project_one(self, user):
return ProjectFactory(creator=user)
@pytest.fixture()
def project_two(self, user):
return ProjectFactory(creator=user)
@pytest.fixture()
def project_three(self, user):
return ProjectFactory(creator=user)
@pytest.fixture()
def url(self, user):
return '/{}users/{}/preprints/?version=2.2&'.format(API_BASE, user._id)
def test_provider_filter_equals_returns_one(
self, app, user, provider_two, preprint_two, provider_url):
expected = [preprint_two._id]
res = app.get(
'{}{}'.format(
provider_url,
provider_two._id),
auth=user.auth)
actual = [preprint['id'] for preprint in res.json['data']]
assert expected == actual
class TestUserPreprintIsPublishedList(PreprintIsPublishedListMixin):
@pytest.fixture()
def user_admin_contrib(self):
return AuthUserFactory()
@pytest.fixture()
def provider_one(self):
return PreprintProviderFactory()
@pytest.fixture()
def provider_two(self, provider_one):
return provider_one
@pytest.fixture()
def project_published(self, user_admin_contrib):
return ProjectFactory(creator=user_admin_contrib, is_public=True)
@pytest.fixture()
def project_public(self, user_admin_contrib, user_write_contrib):
project_public = ProjectFactory(
creator=user_admin_contrib, is_public=True)
project_public.add_contributor(
user_write_contrib,
permissions=permissions.DEFAULT_CONTRIBUTOR_PERMISSIONS,
save=True)
return project_public
@pytest.fixture()
def url(self, user_admin_contrib):
return '/{}users/{}/preprints/?version=2.2&'.format(
API_BASE, user_admin_contrib._id)
@pytest.fixture()
def preprint_unpublished(
self, user_admin_contrib, provider_one,
project_public, subject):
return PreprintFactory(
creator=user_admin_contrib,
filename='mgla.pdf',
provider=provider_one,
subjects=[[subject._id]],
project=project_public,
is_published=False)
def test_unpublished_visible_to_admins(
self, app, user_admin_contrib, preprint_unpublished,
preprint_published, url):
res = app.get(url, auth=user_admin_contrib.auth)
assert len(res.json['data']) == 2
assert preprint_unpublished._id in [d['id'] for d in res.json['data']]
def test_unpublished_invisible_to_write_contribs(
self, app, user_write_contrib, preprint_unpublished,
preprint_published, url):
res = app.get(url, auth=user_write_contrib.auth)
assert len(res.json['data']) == 1
assert preprint_unpublished._id not in [
d['id'] for d in res.json['data']]
def test_filter_published_false_write_contrib(
self, app, user_write_contrib, preprint_unpublished, url):
res = app.get(
'{}filter[is_published]=false'.format(url),
auth=user_write_contrib.auth)
assert len(res.json['data']) == 0
class TestUserPreprintIsValidList(PreprintIsValidListMixin):
@pytest.fixture()
def user_admin_contrib(self):
return AuthUserFactory()
@pytest.fixture()
def project(self, user_admin_contrib, user_write_contrib):
project = ProjectFactory(creator=user_admin_contrib, is_public=True)
project.add_contributor(
user_write_contrib,
permissions=permissions.DEFAULT_CONTRIBUTOR_PERMISSIONS,
save=True)
return project
@pytest.fixture()
def provider(self):
return PreprintProviderFactory()
@pytest.fixture()
def url(self, user_admin_contrib):
return '/{}users/{}/preprints/?version=2.2&'.format(
API_BASE, user_admin_contrib._id)
def test_preprint_private_visible_write(
self, app, user_write_contrib, project, preprint, url):
res = app.get(url, auth=user_write_contrib.auth)
assert len(res.json['data']) == 1
project.is_public = False
project.save()
res = app.get(url, auth=user_write_contrib.auth)
assert len(res.json['data']) == 0
| true | true |
f731c668ffccf5b4cbde0be62847a563f4f61341 | 14,260 | py | Python | packages/syft/src/syft/grid/duet/webrtc_duet.py | pculliton/PySyft | 23a0d1442d3d901b1139aeabe079ccf4177ebc0d | [
"Apache-2.0"
] | null | null | null | packages/syft/src/syft/grid/duet/webrtc_duet.py | pculliton/PySyft | 23a0d1442d3d901b1139aeabe079ccf4177ebc0d | [
"Apache-2.0"
] | null | null | null | packages/syft/src/syft/grid/duet/webrtc_duet.py | pculliton/PySyft | 23a0d1442d3d901b1139aeabe079ccf4177ebc0d | [
"Apache-2.0"
] | null | null | null | """
PySyft Duet (WebRTC)
This class aims to implement the PySyft Duet concept by using WebRTC protocol as a
connection channel in order to allow two different users to establish a direct
connection with high-quality Real-time Communication using private addresses.
The most common example showing how it can be used is the notebook demo example:
Two different jupyter / collab notebooks in different machines using private addresses
behind routers, proxies and firewalls can connect using a full-duplex channel
to perform machine learning and data science tasks, working as a client
and server at the same time.
PS 1: You need a signaling server running somewhere.
If you don't know any public address running this service, or want to set up your own
signaling network you can use PyGrid's network app.
For local development you can run:
$ python src/syft/grid/example_nodes/network.py
PS 2: The PyGrid repo has a complimentary branch that matches the current PySyft release.
To use this feature you must use the correct PyGrid branch.
(https://github.com/OpenMined/PyGrid/)
You can get more details about all this process, in the syft/grid/connections/webrtc.py
source code.
"""
# stdlib
import asyncio
from typing import Optional
# third party
from nacl.signing import SigningKey
# relative
from ... import serialize
from ...core.io.route import SoloRoute
from ...core.node.common.metadata import Metadata
from ...core.node.domain.client import DomainClient
from ...core.node.domain.domain import Domain
from ...logger import error
from ...logger import traceback_and_raise
from ..connections.webrtc import WebRTCConnection
from ..services.signaling_service import AnswerPullRequestMessage
from ..services.signaling_service import InvalidLoopBackRequest
from ..services.signaling_service import OfferPullRequestMessage
from ..services.signaling_service import SignalingAnswerMessage
from ..services.signaling_service import SignalingOfferMessage
from .signaling_client import SignalingClient
class Duet(DomainClient):
def __init__(
self,
node: Domain,
target_id: str,
signaling_client: SignalingClient,
offer: bool = True,
):
# Generate a signing key
self.signing_key = SigningKey.generate()
self.verify_key = self.signing_key.verify_key
# Async Queues
# These queues will be used in order to enqueue/dequeue
# messages to be sent to the signaling server.
self._push_msg_queue: asyncio.Queue = asyncio.Queue()
self._pull_msg_queue: asyncio.Queue = asyncio.Queue()
# As we need to inject a node instance inside of
# a bidirectional connection in order to allow this
# connection to work as a client and server using the
# same channel. We need to be aware about forwarding
# node instance references in order to avoid multiple
# references to the same object (this makes the garbage
# collecting difficult).
# A good solution to avoid this problem is forward just
# weak references. These references works like a proxy
# not creating a strong reference to the object.
# So, If we delete the real object instance, the
# garbage collect will call the __del__ method without problem.
self.node = node
# WebRTCConnection instance ( Bidirectional Connection )
self.connection = WebRTCConnection(node=self.node)
# Client used to exchange signaling messages in order to establish a connection
# NOTE: In the future it may be a good idea to modularize this client to make
# it pluggable using different connection protocols.
self.signaling_client = signaling_client
# If this peer will not start the signaling process
if not offer:
# Start adding an OfferPullRequest in order to verify if
# the desired address pushed an offer request to connect with you.
# This will trigger the pull async task to be check signaling notifications
self._pull_msg_queue.put_nowait(
OfferPullRequestMessage(
address=self.signaling_client.address,
target_peer=target_id,
host_peer=self.signaling_client.duet_id,
reply_to=self.signaling_client.address,
)
)
else:
# Push a WebRTC offer request to the address.
self.send_offer(target_id=target_id)
# This flag is used in order to finish the signaling process gracefully
# While self._available is True, the pull/push tasks will be running
# This flag will be setted to false when:
# 1 - End of the signaling process (checked by _update_availability()).
# 2 - Any Exception raised during these tasks.
self._available = True
# This attribute will be setted during the signaling messages exchange,
# and used to create a SoloRoute for the both sides.
self._client_metadata: Optional[Metadata] = None
# Start async tasks and wait until one of them finishes.
# As mentioned before, these tasks can be finished by two reasons:
# 1 - Signaling process ends
# 2 - Unexpected Exception
try:
asyncio.run(self.notify())
# If client_metadata != None, then the connection was created successfully.
if self._client_metadata is not None:
# Deserialize client's metadata in order to obtain
# PySyft's location structure
( # type: ignore
spec_location,
name,
_,
) = DomainClient.deserialize_client_metadata_from_node(
metadata=serialize(self._client_metadata)
)
# Create a SoloRoute
route = SoloRoute(destination=spec_location, connection=self.connection)
# Intialize the super class
super().__init__(
domain=spec_location,
name=name,
routes=[route],
signing_key=self.signing_key,
verify_key=self.verify_key,
)
self.connection._client_address = self.address
# If client_metada is None, then an exception occurred during the process
# The exception has been caught and saved in self._exception
else:
# NOTE: Maybe we should create a custom exception type.
traceback_and_raise(
Exception(
f"Something went wrong during the Duet init process. {self._exception}"
)
)
except Exception as e:
traceback_and_raise(e)
async def notify(self) -> None:
try:
# Enqueue Pull/Push async tasks
push_task = asyncio.ensure_future(self.push())
pull_task = asyncio.ensure_future(self.pull())
# Wait until one of them finishes
done, pending = await asyncio.wait(
[pull_task, push_task], return_when=asyncio.FIRST_COMPLETED
)
# Finish the pending one.
for task in pending:
task.cancel()
except Exception as e:
traceback_and_raise(e)
def close(self) -> None:
self.connection.close()
async def push(self) -> None:
# This task is responsible for pushing offer/answer messages.
try:
while self._available:
# If push_msg_queue is empty,
# give up task queue priority, giving
# computing time to the next task.
msg = await self._push_msg_queue.get()
# If self.push_msg_queue.get() returned a message (SignalingOfferMessage,SignalingAnswerMessage)
# send it to the signaling server.
self.signaling_client.send_immediate_msg_without_reply(msg=msg)
except Exception as e:
log = f"Got an exception in Duet push. {e}"
error(log)
# If any exception raises, set the self._available flag to False
# in order to finish gracefully all the async tasks and save the exception.
self._available = False
self._exception: Exception = e
async def pull(self) -> None:
try:
while self._available:
# If pull_msg_queue is empty,
# give up task queue priority, giving
# computing time to the next task.
msg = await self._pull_msg_queue.get()
# If self.pull_msg_queue.get() returned a message (OfferPullRequestMessage,AnswerPullRequestMessage)
# send it to the signaling server.
_response = self.signaling_client.send_immediate_msg_with_reply(msg=msg)
# If Signaling Offer Message was found
if isinstance(_response, SignalingOfferMessage):
await self._send_answer(msg=_response)
# If Signaling Answer Message was found
elif isinstance(_response, SignalingAnswerMessage):
await self._ack(msg=_response)
# If LoopBack Message it was a loopback request
elif isinstance(_response, InvalidLoopBackRequest):
traceback_and_raise(
Exception(
"You can't perform p2p connection using your current node address as a destination peer."
)
)
# If Signaling Message weren't found
else:
# Just enqueue the request to be processed later.
self._pull_msg_queue.put_nowait(msg)
# Checks if the signaling process is over.
self._available = self._update_availability()
await asyncio.sleep(0.5)
except Exception as e:
log = f"Got an exception in Duet pull. {e}"
error(log)
# If any exception raises, set the self._available flag to False
# in order to finish gracefully all the async tasks and save the exception.
self._available = False
self._exception = e
def send_offer(self, target_id: str) -> None:
"""Starts a new signaling process by creating a new
offer message and pushing it to the Signaling Server."""
try:
# Generates an offer request payload containing
# local network description data/metadata (IP, MAC, Mask, etc...)
payload = asyncio.run(self.connection._set_offer())
# Creates a PySyft's SignalingOfferMessage
signaling_offer = SignalingOfferMessage(
address=self.signaling_client.address, # Target's address
payload=payload, # Offer Payload
host_metadata=self.node.get_metadata_for_client(), # Own Node Metadata
target_peer=target_id,
host_peer=self.signaling_client.duet_id, # Own Node ID
)
# Enqueue it in push msg queue to be sent to the signaling server.
self._push_msg_queue.put_nowait(signaling_offer)
# Create/enqueue a new AnswerPullRequest in order to wait for signaling response.
self._pull_msg_queue.put_nowait(
AnswerPullRequestMessage(
address=self.signaling_client.address,
target_peer=target_id,
host_peer=self.signaling_client.duet_id,
reply_to=self.signaling_client.address,
)
)
except Exception as e:
traceback_and_raise(e)
async def _send_answer(self, msg: SignalingOfferMessage) -> None:
"""Process SignalingOfferMessage and create a new
SignalingAnswerMessage as a response"""
try:
# Process received offer message updating target's remote address
# Generates an answer request payload containing
# local network description data/metadata (IP, MAC, Mask, etc...)
payload = asyncio.run(self.connection._set_answer(payload=msg.payload))
# Save remote node's metadata in roder to create a SoloRoute.
self._client_metadata = msg.host_metadata
# Create a new SignalingAnswerMessage
signaling_answer = SignalingAnswerMessage(
address=self.signaling_client.address,
payload=payload, # Signaling answer payload
host_metadata=self.node.get_metadata_for_client(), # Own Node Metadata
target_peer=msg.host_peer, # Remote Node ID
host_peer=self.signaling_client.duet_id,
)
# Enqueue it in the push msg queue to be sent to the signaling server.
await self._push_msg_queue.put(signaling_answer)
except Exception as e:
traceback_and_raise(e)
async def _ack(self, msg: SignalingAnswerMessage) -> None:
"""Last signaling message, stores remote Node
metadata and updates target's remote address"""
try:
# Save remote node's metadata in roder to create a SoloRoute.
self._client_metadata = msg.host_metadata
# Process received offer message updating target's remote address
await self.connection._process_answer(payload=msg.payload)
except Exception as e:
traceback_and_raise(e)
def _update_availability(self) -> bool:
"""Method used to check if the signaling process is over.
:return: Boolean flag, True if it's NOT over, and False if it's over.
:rtype: Boolean
"""
available = False
try:
available = (
not self._pull_msg_queue.empty()
and self.connection.peer_connection is not None
)
except Exception as e:
traceback_and_raise(e)
return available
| 42.822823 | 117 | 0.629523 |
import asyncio
from typing import Optional
from nacl.signing import SigningKey
from ... import serialize
from ...core.io.route import SoloRoute
from ...core.node.common.metadata import Metadata
from ...core.node.domain.client import DomainClient
from ...core.node.domain.domain import Domain
from ...logger import error
from ...logger import traceback_and_raise
from ..connections.webrtc import WebRTCConnection
from ..services.signaling_service import AnswerPullRequestMessage
from ..services.signaling_service import InvalidLoopBackRequest
from ..services.signaling_service import OfferPullRequestMessage
from ..services.signaling_service import SignalingAnswerMessage
from ..services.signaling_service import SignalingOfferMessage
from .signaling_client import SignalingClient
class Duet(DomainClient):
def __init__(
self,
node: Domain,
target_id: str,
signaling_client: SignalingClient,
offer: bool = True,
):
self.signing_key = SigningKey.generate()
self.verify_key = self.signing_key.verify_key
self._push_msg_queue: asyncio.Queue = asyncio.Queue()
self._pull_msg_queue: asyncio.Queue = asyncio.Queue()
self.node = node
self.connection = WebRTCConnection(node=self.node)
self.signaling_client = signaling_client
if not offer:
self._pull_msg_queue.put_nowait(
OfferPullRequestMessage(
address=self.signaling_client.address,
target_peer=target_id,
host_peer=self.signaling_client.duet_id,
reply_to=self.signaling_client.address,
)
)
else:
self.send_offer(target_id=target_id)
self._available = True
self._client_metadata: Optional[Metadata] = None
try:
asyncio.run(self.notify())
if self._client_metadata is not None:
# PySyft's location structure
(
spec_location,
name,
_,
) = DomainClient.deserialize_client_metadata_from_node(
metadata=serialize(self._client_metadata)
)
route = SoloRoute(destination=spec_location, connection=self.connection)
super().__init__(
domain=spec_location,
name=name,
routes=[route],
signing_key=self.signing_key,
verify_key=self.verify_key,
)
self.connection._client_address = self.address
else:
traceback_and_raise(
Exception(
f"Something went wrong during the Duet init process. {self._exception}"
)
)
except Exception as e:
traceback_and_raise(e)
async def notify(self) -> None:
try:
push_task = asyncio.ensure_future(self.push())
pull_task = asyncio.ensure_future(self.pull())
done, pending = await asyncio.wait(
[pull_task, push_task], return_when=asyncio.FIRST_COMPLETED
)
for task in pending:
task.cancel()
except Exception as e:
traceback_and_raise(e)
def close(self) -> None:
self.connection.close()
async def push(self) -> None:
try:
while self._available:
msg = await self._push_msg_queue.get()
self.signaling_client.send_immediate_msg_without_reply(msg=msg)
except Exception as e:
log = f"Got an exception in Duet push. {e}"
error(log)
self._available = False
self._exception: Exception = e
async def pull(self) -> None:
try:
while self._available:
msg = await self._pull_msg_queue.get()
_response = self.signaling_client.send_immediate_msg_with_reply(msg=msg)
if isinstance(_response, SignalingOfferMessage):
await self._send_answer(msg=_response)
elif isinstance(_response, SignalingAnswerMessage):
await self._ack(msg=_response)
elif isinstance(_response, InvalidLoopBackRequest):
traceback_and_raise(
Exception(
"You can't perform p2p connection using your current node address as a destination peer."
)
)
# If Signaling Message weren't found
else:
self._pull_msg_queue.put_nowait(msg)
self._available = self._update_availability()
await asyncio.sleep(0.5)
except Exception as e:
log = f"Got an exception in Duet pull. {e}"
error(log)
self._available = False
self._exception = e
def send_offer(self, target_id: str) -> None:
try:
payload = asyncio.run(self.connection._set_offer())
signaling_offer = SignalingOfferMessage(
address=self.signaling_client.address, # Target's address
payload=payload,
host_metadata=self.node.get_metadata_for_client(),
target_peer=target_id,
host_peer=self.signaling_client.duet_id,
)
self._push_msg_queue.put_nowait(signaling_offer)
self._pull_msg_queue.put_nowait(
AnswerPullRequestMessage(
address=self.signaling_client.address,
target_peer=target_id,
host_peer=self.signaling_client.duet_id,
reply_to=self.signaling_client.address,
)
)
except Exception as e:
traceback_and_raise(e)
async def _send_answer(self, msg: SignalingOfferMessage) -> None:
try:
# Generates an answer request payload containing
# local network description data/metadata (IP, MAC, Mask, etc...)
payload = asyncio.run(self.connection._set_answer(payload=msg.payload))
# Save remote node's metadata in roder to create a SoloRoute.
self._client_metadata = msg.host_metadata
signaling_answer = SignalingAnswerMessage(
address=self.signaling_client.address,
payload=payload,
host_metadata=self.node.get_metadata_for_client(),
target_peer=msg.host_peer,
host_peer=self.signaling_client.duet_id,
)
await self._push_msg_queue.put(signaling_answer)
except Exception as e:
traceback_and_raise(e)
async def _ack(self, msg: SignalingAnswerMessage) -> None:
try:
self._client_metadata = msg.host_metadata
# Process received offer message updating target's remote address
await self.connection._process_answer(payload=msg.payload)
except Exception as e:
traceback_and_raise(e)
def _update_availability(self) -> bool:
available = False
try:
available = (
not self._pull_msg_queue.empty()
and self.connection.peer_connection is not None
)
except Exception as e:
traceback_and_raise(e)
return available
| true | true |
f731c670570a5ddaf363e2a308eba6f30d1c3ed7 | 4,328 | py | Python | model-optimizer/extensions/middle/GatherNdNormalizer.py | calvinfeng/openvino | 11f591c16852637506b1b40d083b450e56d0c8ac | [
"Apache-2.0"
] | null | null | null | model-optimizer/extensions/middle/GatherNdNormalizer.py | calvinfeng/openvino | 11f591c16852637506b1b40d083b450e56d0c8ac | [
"Apache-2.0"
] | 19 | 2021-03-26T08:11:00.000Z | 2022-02-21T13:06:26.000Z | model-optimizer/extensions/middle/GatherNdNormalizer.py | calvinfeng/openvino | 11f591c16852637506b1b40d083b450e56d0c8ac | [
"Apache-2.0"
] | 1 | 2021-07-28T17:30:46.000Z | 2021-07-28T17:30:46.000Z | """
Copyright (C) 2018-2021 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging as log
import numpy as np
from extensions.ops.gather import Gather
from mo.front.common.partial_infer.utils import int64_array
from mo.front.tf.graph_utils import create_op_node_with_second_input, create_op_with_const_inputs
from mo.graph.graph import Graph, rename_node
from mo.middle.replacement import MiddleReplacementPattern
from mo.ops.reshape import Reshape
class GatherNDNormalize(MiddleReplacementPattern):
"""
Hot fix for new speech-to-text model enabling while GatherND is not implemented in IE.
We can replace GatherND to Reshape + Gather in case when GatherND indices have just one
meaningful dimension.
TODO: Investigate whether we must replace GatherND with Reshape + Gather always (due to performance benefits)
for this particular case or only if the plugin does not support GatherND.
And the best place for the transformation is nGraph so we need to move it.
"""
enabled = True
force_clean_up = True
def run_before(self):
from extensions.middle.BlockLSTMtoLSTMSequence import BlockLSTMtoLSTMSequence
return [BlockLSTMtoLSTMSequence]
def run_after(self):
from extensions.middle.pass_separator import MiddleStart
return [MiddleStart]
def pattern(self):
return dict(
nodes=[('GatherND', dict(kind='op', op='GatherND', batch_dims=0))],
edges=[]
)
@staticmethod
def indices_check(indices: np.array, input_shape: tuple):
"""
Check that indices have just one meaningful dimension and all other dimensions of input have size 1.
"""
n_dims = indices.shape[-1]
non_zero = None
for i in range(n_dims):
if not all(np.take(indices, indices=[i], axis=-1) == 0):
if non_zero is None:
non_zero = i
else:
return None
else:
if input_shape[i] != 1:
return None
return non_zero
def replace_pattern(self, graph: Graph, match: dict):
gather = match['GatherND']
gather_name = gather.soft_get('name', gather.id)
input_shape = gather.in_node(0).shape
indices = gather.in_node(1).value
if indices is None:
# We can't do such special pass without indices value
return
# 0. All needed checks that we can replace GatherND by Gather
gather_idx = self.indices_check(indices, input_shape)
if gather_idx is None:
log.warning('Node {} with op=GatherND can\'t be normalized to op=Gather.'.format(gather_name))
return
# 1. Add Reshape and connect
new_shape = int64_array([-1] + list(input_shape[indices.shape[-1]:]))
reshape = create_op_node_with_second_input(graph, Reshape, new_shape,
{'name': gather_name + '/Reshape_for_GatherND/'})
gather.in_port(0).get_connection().set_destination(reshape.in_port(0))
# 2. Change indices from Nd to 1d:
new_indices = np.reshape(np.take(indices, indices=[gather_idx], axis=-1), [-1])
rename_node(gather, gather_name + '/to_delete')
# 3. Create new Gather operation and reconnect all inputs/outputs
new_gather = create_op_with_const_inputs(graph, Gather, {1: new_indices, 2: int64_array(0)},
{'name': gather_name})
rename_node(new_gather, gather_name)
reshape.out_port(0).connect(new_gather.in_port(0))
gather.out_port(0).get_connection().set_source(new_gather.out_port(0))
# 4. Remove old Gather node
graph.remove_node(gather.id)
| 39.706422 | 113 | 0.661738 | import logging as log
import numpy as np
from extensions.ops.gather import Gather
from mo.front.common.partial_infer.utils import int64_array
from mo.front.tf.graph_utils import create_op_node_with_second_input, create_op_with_const_inputs
from mo.graph.graph import Graph, rename_node
from mo.middle.replacement import MiddleReplacementPattern
from mo.ops.reshape import Reshape
class GatherNDNormalize(MiddleReplacementPattern):
enabled = True
force_clean_up = True
def run_before(self):
from extensions.middle.BlockLSTMtoLSTMSequence import BlockLSTMtoLSTMSequence
return [BlockLSTMtoLSTMSequence]
def run_after(self):
from extensions.middle.pass_separator import MiddleStart
return [MiddleStart]
def pattern(self):
return dict(
nodes=[('GatherND', dict(kind='op', op='GatherND', batch_dims=0))],
edges=[]
)
@staticmethod
def indices_check(indices: np.array, input_shape: tuple):
n_dims = indices.shape[-1]
non_zero = None
for i in range(n_dims):
if not all(np.take(indices, indices=[i], axis=-1) == 0):
if non_zero is None:
non_zero = i
else:
return None
else:
if input_shape[i] != 1:
return None
return non_zero
def replace_pattern(self, graph: Graph, match: dict):
gather = match['GatherND']
gather_name = gather.soft_get('name', gather.id)
input_shape = gather.in_node(0).shape
indices = gather.in_node(1).value
if indices is None:
return
# 0. All needed checks that we can replace GatherND by Gather
gather_idx = self.indices_check(indices, input_shape)
if gather_idx is None:
log.warning('Node {} with op=GatherND can\'t be normalized to op=Gather.'.format(gather_name))
return
new_shape = int64_array([-1] + list(input_shape[indices.shape[-1]:]))
reshape = create_op_node_with_second_input(graph, Reshape, new_shape,
{'name': gather_name + '/Reshape_for_GatherND/'})
gather.in_port(0).get_connection().set_destination(reshape.in_port(0))
new_indices = np.reshape(np.take(indices, indices=[gather_idx], axis=-1), [-1])
rename_node(gather, gather_name + '/to_delete')
new_gather = create_op_with_const_inputs(graph, Gather, {1: new_indices, 2: int64_array(0)},
{'name': gather_name})
rename_node(new_gather, gather_name)
reshape.out_port(0).connect(new_gather.in_port(0))
gather.out_port(0).get_connection().set_source(new_gather.out_port(0))
graph.remove_node(gather.id)
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.