input stringlengths 2.65k 237k | output stringclasses 1
value |
|---|---|
<reponame>Robor-Electronics/pigweed<filename>pw_console/py/pw_console/log_pane.py
# Copyright 2021 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""LogPane class."""
import functools
import logging
import re
from typing import Any, List, Optional, Union, TYPE_CHECKING
from prompt_toolkit.application.current import get_app
from prompt_toolkit.filters import (
Condition,
has_focus,
)
from prompt_toolkit.formatted_text import StyleAndTextTuples
from prompt_toolkit.key_binding import (
KeyBindings,
KeyPressEvent,
KeyBindingsBase,
)
from prompt_toolkit.layout import (
ConditionalContainer,
Float,
FloatContainer,
UIContent,
UIControl,
VerticalAlign,
Window,
)
from prompt_toolkit.mouse_events import MouseEvent, MouseEventType, MouseButton
import pw_console.widgets.checkbox
import pw_console.style
from pw_console.log_view import LogView
from pw_console.log_pane_toolbars import (
LineInfoBar,
TableToolbar,
)
from pw_console.log_pane_saveas_dialog import LogPaneSaveAsDialog
from pw_console.log_pane_selection_dialog import LogPaneSelectionDialog
from pw_console.search_toolbar import SearchToolbar
from pw_console.filter_toolbar import FilterToolbar
from pw_console.widgets import (
ToolbarButton,
WindowPane,
WindowPaneHSplit,
WindowPaneToolbar,
)
if TYPE_CHECKING:
from pw_console.console_app import ConsoleApp
_LOG_OUTPUT_SCROLL_AMOUNT = 5
_LOG = logging.getLogger(__package__)
class LogContentControl(UIControl):
"""LogPane prompt_toolkit UIControl for displaying LogContainer lines."""
def __init__(self, log_pane: 'LogPane') -> None:
# pylint: disable=too-many-locals
self.log_pane = log_pane
self.log_view = log_pane.log_view
# Mouse drag visual selection flags.
self.visual_select_mode_drag_start = False
self.visual_select_mode_drag_stop = False
self.uicontent: Optional[UIContent] = None
self.lines: List[StyleAndTextTuples] = []
# Key bindings.
key_bindings = KeyBindings()
@key_bindings.add('w')
def _toggle_wrap_lines(_event: KeyPressEvent) -> None:
"""Toggle log line wrapping."""
self.log_pane.toggle_wrap_lines()
@key_bindings.add('t')
def _toggle_table_view(_event: KeyPressEvent) -> None:
"""Toggle table view."""
self.log_pane.toggle_table_view()
@key_bindings.add('insert')
def _duplicate(_event: KeyPressEvent) -> None:
"""Duplicate this log pane."""
self.log_pane.duplicate()
@key_bindings.add('delete')
def _delete(_event: KeyPressEvent) -> None:
"""Remove log pane."""
if self.log_pane.is_a_duplicate:
self.log_pane.application.window_manager.remove_pane(
self.log_pane)
@key_bindings.add('C')
def _clear_history(_event: KeyPressEvent) -> None:
"""Clear log pane history."""
self.log_pane.clear_history()
@key_bindings.add('g')
def _scroll_to_top(_event: KeyPressEvent) -> None:
"""Scroll to top."""
self.log_view.scroll_to_top()
@key_bindings.add('G')
def _scroll_to_bottom(_event: KeyPressEvent) -> None:
"""Scroll to bottom."""
self.log_view.scroll_to_bottom()
@key_bindings.add('f')
def _toggle_follow(_event: KeyPressEvent) -> None:
"""Toggle log line following."""
self.log_pane.toggle_follow()
@key_bindings.add('up')
@key_bindings.add('k')
def _up(_event: KeyPressEvent) -> None:
"""Move cursor up."""
self.log_view.scroll_up()
@key_bindings.add('down')
@key_bindings.add('j')
def _down(_event: KeyPressEvent) -> None:
"""Move cursor down."""
self.log_view.scroll_down()
@key_bindings.add('s-up')
def _visual_select_up(_event: KeyPressEvent) -> None:
"""Select previous log line."""
self.log_view.visual_select_up()
@key_bindings.add('s-down')
def _visual_select_down(_event: KeyPressEvent) -> None:
"""Select next log line."""
self.log_view.visual_select_down()
@key_bindings.add('pageup')
def _pageup(_event: KeyPressEvent) -> None:
"""Scroll the logs up by one page."""
self.log_view.scroll_up_one_page()
@key_bindings.add('pagedown')
def _pagedown(_event: KeyPressEvent) -> None:
"""Scroll the logs down by one page."""
self.log_view.scroll_down_one_page()
@key_bindings.add('c-o')
def _start_saveas(_event: KeyPressEvent) -> None:
"""Save logs to a file."""
self.log_pane.start_saveas()
@key_bindings.add('/')
@key_bindings.add('c-f')
def _start_search(_event: KeyPressEvent) -> None:
"""Start searching."""
self.log_pane.start_search()
@key_bindings.add('n')
@key_bindings.add('c-s')
@key_bindings.add('c-g')
def _next_search(_event: KeyPressEvent) -> None:
"""Next search match."""
self.log_view.search_forwards()
@key_bindings.add('N')
@key_bindings.add('c-r')
def _previous_search(_event: KeyPressEvent) -> None:
"""Previous search match."""
self.log_view.search_backwards()
@key_bindings.add('c-l')
def _clear_search_highlight(_event: KeyPressEvent) -> None:
"""Remove search highlighting."""
self.log_pane.log_view.clear_search_highlighting()
@key_bindings.add('escape', 'c-f') # Alt-Ctrl-f
def _apply_filter(_event: KeyPressEvent) -> None:
"""Apply current search as a filter."""
self.log_view.apply_filter()
@key_bindings.add('escape', 'c-r') # Alt-Ctrl-r
def _clear_filter(_event: KeyPressEvent) -> None:
"""Reset / erase active filters."""
self.log_view.clear_filters()
self.key_bindings = key_bindings
def is_focusable(self) -> bool:
return True
def get_key_bindings(self) -> Optional[KeyBindingsBase]:
return self.key_bindings
def preferred_width(self, max_available_width: int) -> int:
"""Return the width of the longest line."""
line_lengths = [len(l) for l in self.lines]
return max(line_lengths)
def preferred_height(
self,
width: int,
max_available_height: int,
wrap_lines: bool,
get_line_prefix,
) -> Optional[int]:
"""Return the preferred height for the log lines."""
content = self.create_content(width, None)
return content.line_count
def create_content(self, width: int, height: Optional[int]) -> UIContent:
# Update lines to render
self.lines = self.log_view.render_content()
# Create a UIContent instance if none exists
if self.uicontent is None:
self.uicontent = UIContent(get_line=lambda i: self.lines[i],
line_count=len(self.lines),
show_cursor=False)
# Update line_count
self.uicontent.line_count = len(self.lines)
return self.uicontent
def mouse_handler(self, mouse_event: MouseEvent):
"""Mouse handler for this control."""
mouse_position = mouse_event.position
# Left mouse button release should:
# 1. check if a mouse drag just completed.
# 2. If not in focus, switch focus to this log pane
# If in focus, move the cursor to that position.
if (mouse_event.event_type == MouseEventType.MOUSE_UP
and mouse_event.button == MouseButton.LEFT):
# If a drag was in progress and this is the first mouse release
# press, set the stop flag.
if (self.visual_select_mode_drag_start
and not self.visual_select_mode_drag_stop):
self.visual_select_mode_drag_stop = True
if not has_focus(self)():
# Focus the save as dialog if open.
if self.log_pane.saveas_dialog_active:
get_app().layout.focus(self.log_pane.saveas_dialog)
# Focus the search bar if open.
elif self.log_pane.search_bar_active:
get_app().layout.focus(self.log_pane.search_toolbar)
# Otherwise, focus on the log pane content.
else:
get_app().layout.focus(self)
# Mouse event handled, return None.
return None
# Log pane in focus already, move the cursor to the position of the
# mouse click.
self.log_pane.log_view.scroll_to_position(mouse_position)
# Mouse event handled, return None.
return None
# Mouse drag with left button should start selecting lines.
# The log pane does not need to be in focus to start this.
if (mouse_event.event_type == MouseEventType.MOUSE_MOVE
and mouse_event.button == MouseButton.LEFT):
# If a previous mouse drag was completed, clear the selection.
if (self.visual_select_mode_drag_start
and self.visual_select_mode_drag_stop):
self.log_pane.log_view.clear_visual_selection()
# Drag select in progress, set flags accordingly.
self.visual_select_mode_drag_start = True
self.visual_select_mode_drag_stop = False
self.log_pane.log_view.visual_select_line(mouse_position)
# Mouse event handled, return None.
return None
# Mouse wheel events should move the cursor +/- some amount of lines
# even if this pane is not in focus.
if mouse_event.event_type == MouseEventType.SCROLL_DOWN:
self.log_pane.log_view.scroll_down(lines=_LOG_OUTPUT_SCROLL_AMOUNT)
# Mouse event handled, return None.
return None
if mouse_event.event_type == MouseEventType.SCROLL_UP:
self.log_pane.log_view.scroll_up(lines=_LOG_OUTPUT_SCROLL_AMOUNT)
# Mouse event handled, return None.
return None
# Mouse event not handled, return NotImplemented.
return NotImplemented
class LogPane(WindowPane):
"""LogPane class."""
# pylint: disable=too-many-instance-attributes,too-many-public-methods
def __init__(
self,
application: Any,
pane_title: str = 'Logs',
):
super().__init__(application, pane_title)
# TODO(tonymd): Read these settings from a project (or user) config.
self.wrap_lines = False
self._table_view = True
self.is_a_duplicate = False
# Create the log container which stores and handles incoming logs.
self.log_view: LogView = LogView(self, self.application)
# Log pane size variables. These are updated just befor rendering the
# pane by the LogLineHSplit class.
self.current_log_pane_width = 0
self.current_log_pane_height = 0
self.last_log_pane_width = None
self.last_log_pane_height = None
# Search tracking
self.search_bar_active = False
self.search_toolbar = SearchToolbar(self)
self.filter_toolbar = FilterToolbar(self)
self.saveas_dialog = LogPaneSaveAsDialog(self)
self.saveas_dialog_active = False
self.visual_selection_bar = LogPaneSelectionDialog(self)
# Table header bar, only shown if table view is active.
self.table_header_toolbar = TableToolbar(self)
# Create the bottom toolbar for the whole log pane.
self.bottom_toolbar = WindowPaneToolbar(self)
self.bottom_toolbar.add_button(
ToolbarButton('/', 'Search', self.start_search))
self.bottom_toolbar.add_button(
ToolbarButton('Ctrl-o', 'Save', self.start_saveas))
self.bottom_toolbar.add_button(
ToolbarButton('f',
'Follow',
self.toggle_follow,
is_checkbox=True,
checked=lambda: self.log_view.follow))
self.bottom_toolbar.add_button(
ToolbarButton('t',
'Table',
self.toggle_table_view,
is_checkbox=True,
checked=lambda: self.table_view))
self.bottom_toolbar.add_button(
ToolbarButton('w',
'Wrap',
self.toggle_wrap_lines,
is_checkbox=True,
checked=lambda: self.wrap_lines))
self.bottom_toolbar.add_button(
ToolbarButton('C', 'Clear', self.clear_history))
self.log_content_control = LogContentControl(self)
self.log_display_window = Window(
content=self.log_content_control,
# Scrolling is handled by LogScreen
allow_scroll_beyond_bottom=False,
# Line wrapping is handled by LogScreen
wrap_lines=False,
# Selected line highlighting is handled by LogScreen
cursorline=False,
# Don't make the window taller to fill the parent split container.
# Window should match the height of the log line content. This will
# also allow the parent HSplit to justify the content to the bottom
dont_extend_height=True,
# Window width should be extended to make backround highlighting
# extend to the end of the container. Otherwise backround colors
# will only appear until the end of the log line.
dont_extend_width=False,
# Needed for log lines ANSI sequences that don't specify foreground
# or background colors.
style=functools.partial(pw_console.style.get_pane_style, self),
)
# Root level container
self.container = ConditionalContainer(
FloatContainer(
# Horizonal split containing the log lines and the toolbar.
WindowPaneHSplit(
self, # LogPane reference
[
self.table_header_toolbar,
self.log_display_window,
self.filter_toolbar,
self.search_toolbar,
self.bottom_toolbar,
],
# Align content with the bottom of the container.
align=VerticalAlign.BOTTOM,
height=lambda: self.height,
width=lambda: self.width,
style=functools.partial(pw_console.style.get_pane_style,
self),
),
floats=[
# Floating LineInfoBar
Float(top=0, right=0, height=1, content=LineInfoBar(self)),
Float(top=0,
right=0,
height=LogPaneSelectionDialog.DIALOG_HEIGHT,
content=self.visual_selection_bar),
Float(top=3,
left=2,
right=2,
height=LogPaneSaveAsDialog.DIALOG_HEIGHT + 2,
content=self.saveas_dialog),
]),
filter=Condition(lambda: self.show_pane))
@property
def table_view(self):
return self._table_view
@table_view.setter
def table_view(self, table_view):
self._table_view = table_view
def menu_title(self):
"""Return the title to display in the Window menu."""
title = self.pane_title()
# List active filters
if self.log_view.filtering_on:
title += ' (FILTERS: '
title += ' '.join([
log_filter.pattern()
for log_filter in self.log_view.filters.values()
])
title += ')'
return title
def append_pane_subtitle(self, text):
if not self._pane_subtitle:
self._pane_subtitle = text
else:
self._pane_subtitle = self._pane_subtitle + ', ' + text
def pane_subtitle(self):
if not self._pane_subtitle:
return ', '.join(self.log_view.log_store.channel_counts.keys())
| |
import time
from datetime import datetime
from os.path import join as path_join
from math import log, floor
import click
import matplotlib
import matplotlib.ticker as ticker
matplotlib.rcParams['font.family'] = 'serif'
matplotlib.rcParams['mathtext.fontset'] = 'cm'
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
import pandas
import numpy as np
from tabulate import tabulate
import tabulate as T
from scipy.stats import mannwhitneyu
from scipy.stats import ks_2samp
from scipy.stats import shapiro
from scipy.stats import ttest_ind
from scipy.stats import zscore
from statsmodels.sandbox.stats.multicomp import multipletests
from scipy.stats import chi2_contingency
from android_test_inspector.cles import cles_brute as cles
ui_automation_frameworks = [
"androidviewclient",
'appium',
'calabash',
'espresso',
'monkeyrunner',
'pythonuiautomator',
'robotium',
'uiautomator',
]
cloud_test_services = [
'projectquantum',
'qmetry',
'saucelabs',
'firebase',
'perfecto',
'bitbar',
]
unit_test_frameworks = [
'junit',
'androidjunitrunner',
'roboelectric',
'robospock',
]
ci_services = [
'travis',
'circleci',
'app_veyor',
'codeship',
'codefresh',
'wercker',
]
downloads_scale = [
'1 - 5',
'10 - 50',
'50 - 100',
'100 - 500',
'500 - 1,000',
'1,000 - 5,000',
'5,000 - 10,000',
'10,000 - 50,000',
'50,000 - 100,000',
'100,000 - 500,000',
'500,000 - 1,000,000',
'1,000,000 - 5,000,000',
'5,000,000 - 10,000,000',
'10,000,000 - 50,000,000',
'50,000,000 - 100,000,000',
'100,000,000 - 500,000,000',
'500,000,000 - 1,000,000,000',
'1,000,000,000 - 5,000,000,000',
'5,000,000,000 - 10,000,000,000',
]
def human_format(number):
units = ['', 'K', 'M', 'G', 'T', 'P']
k = 1000.0
magnitude = int(floor(log(number, k)))
return '%.0f%s' % (number / k**magnitude, units[magnitude])
@click.command()
@click.option('-i','--results_input', default=".", type=click.Path(exists=True))
@click.option('-o','--results_output', default="./reports", type=click.Path(exists=True))
def reports(results_input, results_output):
"""Generate reports for EMSE paper."""
now = pandas.Timestamp(2017, 9, 30, 12)
df = pandas.read_csv(
path_join(results_input, "results_with_coverage.csv"),
parse_dates=[0, 10]
)
df_googleplay = pandas.read_csv(
path_join(results_input, "googleplay.csv"),
index_col='package'
)
df = df.join(df_googleplay, on="app_id")
df_sonar = pandas.read_csv("results_sonar.csv", index_col='package')
df_sonar.fillna(0, inplace=True)
df_sonar = df_sonar.add_prefix('sonar_')
df = df.join(df_sonar, on="app_id")
#Feature engineering
df['tests'] = df[unit_test_frameworks+ui_automation_frameworks+cloud_test_services].any(axis=1)
df['no_tests'] = ~df['tests']
df['unit_tests'] = df[unit_test_frameworks].apply(any, axis=1)
df['ui_tests'] = df[ui_automation_frameworks].apply(any, axis=1)
df["cloud_tests"] = df[cloud_test_services].apply(any, axis=1)
df["ci/cd"] = df[ci_services].apply(any, axis=1)
df['age'] = (now - df['created_at'])
df['age_numeric'] = (now - df['created_at']).astype('<m8[Y]').astype('int')
df['time_since_last_update'] = (now - df['last_updated'])
df['time_since_last_update_numeric'] = df['time_since_last_update'].astype('<m8[Y]').astype('int')
df_old = df[df['age_numeric']>=2]
df["downloads"] = df["downloads"].astype("category", categories=downloads_scale, ordered=True)
df['sonar_issues_ratio'] = df['sonar_issues'].divide(df['sonar_files_processed'])
df['sonar_blocker_issues_ratio'] = df['sonar_blocker_issues'].divide(df['sonar_files_processed'])
df['sonar_critical_issues_ratio'] = df['sonar_critical_issues'].divide(df['sonar_files_processed'])
df['sonar_major_issues_ratio'] = df['sonar_major_issues'].divide(df['sonar_files_processed'])
df['sonar_minor_issues_ratio'] = df['sonar_minor_issues'].divide(df['sonar_files_processed'])
df_with_google_data = df[~df["rating_count"].isnull()]
df_with_tests = df[df['tests']]
df_without_tests = df[~df['tests']]
df.to_csv("results_merged.csv")
# from android_test_inspector.corr_analysis import correlation_matrix
# correlation_matrix(df, output_file=path_join(results_output, "corr_matrix.pdf"))
colors_dict = {
'any': 'C0',
'unit_test_frameworks': 'C1',
'ui_automation_frameworks': 'C2',
'cloud_test_services': 'C3',
'ci_services': 'C4',
}
marker_dict = {
'any': 'o',
'unit_test_frameworks': 'v',
'ui_automation_frameworks': '*',
'cloud_test_services': 'H',
'ci_services': 's',
}
linestyle_dict = {
'any': '-',
'unit_test_frameworks': ':',
'ui_automation_frameworks': '--',
'cloud_test_services': '-.',
}
# --- Number of projects by year --- #
figure, ax = plt.subplots(figsize=(4, 2.5))
df.groupby('age_numeric')['age_numeric'].count().plot.bar(
color='black',
width=0.25,
ax=ax,
)
ax.tick_params(direction='out', top='off')
ax.set_xlabel("Age")
ax.set_ylabel("Number of apps")
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.spines['left'].set_visible(False)
ax.yaxis.grid(linestyle='dotted')
figure.tight_layout()
figure.savefig(path_join(results_output, "app_age_count.pdf"))
# --- Number of projects by framework --- #
columns = (
['tests']
+ ['unit_tests'] + unit_test_frameworks
+ ['ui_tests'] + ui_automation_frameworks
+ ['cloud_tests'] + cloud_test_services
# + ['ci/cd'] + ci_services
)
colors = (
[colors_dict['any']] +
[colors_dict['unit_test_frameworks']] * (len(unit_test_frameworks) + 1)
+ [colors_dict['ui_automation_frameworks']] * (len(ui_automation_frameworks) + 1)
+ [colors_dict['cloud_test_services']] * (len(cloud_test_services) + 1)
+ [colors_dict['ci_services']] * (len(ci_services) + 1)
)
highlights = [
'tests',
'unit_tests',
'ui_tests',
'cloud_tests',
'ci/cd',
]
sums = df[columns].sum()
labels = (label in highlights and "• All "+label or label for label in columns)
labels = [label.title().replace("_", " ") for label in labels]
heights = sums.values
figure, ax = plt.subplots(1, 1)
ax.bar(
range(len(labels)),
heights,
0.5,
color=colors,
edgecolor = 'k',
linewidth= [column in highlights and 0.9 or 0.0 for column in columns]
)
ax.set_xticklabels(labels, rotation='vertical')
ax.set_xticks(range(len(labels)))
ax.tick_params(direction='out', top='off')
# ax.set_title("Number of projects by test framework")
ax.set_ylabel("Number of projects (out of {})".format(len(df.index)))
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.spines['left'].set_visible(False)
ax.yaxis.grid(linestyle='dotted')
# ax2 = ax.twinx()
# ax2.grid(False)
# ax2.set_ylim(ax.get_ylim())
# ax2.set_yticklabels(["{:.0%}".format(tick/len(df)) for tick in ax2.get_yticks()])
# ax2.spines['right'].set_visible(False)
# ax2.spines['top'].set_visible(False)
# ax2.spines['left'].set_visible(False)
# ax2.set_ylabel("Percentage of projects")
def draw_range(ax, xmin, xmax, label):
y=400
ax.annotate('', xy=(xmin, y), xytext=(xmax, y), xycoords='data', textcoords='data',
arrowprops={'arrowstyle': '|-|', 'color':'black', 'linewidth': 0.5})
xcenter = xmin + (xmax-xmin)/2
ytext = y + ( ax.get_ylim()[1] - ax.get_ylim()[0] ) / 22
ax.annotate(label, xy=(xcenter,ytext), ha='center', va='center', fontsize=9)
draw_range(ax, 0.5, 5.5, "Unit testing")
draw_range(ax, 5.5, 14.5, "GUI testing")
draw_range(ax, 14.5, 21.5, "Cloud testing")
# draw_range(ax, 21.5, 26.5, "CI/CD")
figure.tight_layout()
figure.savefig(path_join(results_output, "framework_count.pdf"))
# --------------------------------------- #
# --- Percentage of Android tests over the age of the apps --- #
def tests_in_projects_by_time_of_creation(df_projects, frameworks, label=None,
title=None,
zorder=1, color=None,
verbose=False, **kwargs):
portions = []
n_projects_with_tests_history = []
total_projects_history = []
age_max = df_projects['age_numeric'].max()+1
for age in range(age_max):
n_projects_with_tests = df_projects[df_projects['age_numeric']==age][frameworks].apply(any, axis=1).sum()
n_projects_with_tests_history.append(n_projects_with_tests)
total_projects = len(df_projects[df_projects['age_numeric']==age].index)
total_projects_history.append(total_projects)
if total_projects == 0:
portion = 0
else:
portion = n_projects_with_tests/total_projects
portions.append(portion)
if verbose:
print("Age {}:".format(age))
print("{} out of {} projects ({:.1%}).".format(n_projects_with_tests, total_projects, portion))
plt.plot(range(age_max), portions, label=label, zorder=zorder, **kwargs)
plt.scatter(range(age_max), portions, total_projects_history, marker='o', linewidth='1', zorder=zorder)
ax = plt.gca()
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.spines['left'].set_visible(False)
ax.set_xticks(range(age_max))
ax.set_yticklabels(["{:.0%}".format(label) for label in ax.get_yticks()])
ax.set_ylabel("Percentage of projects")
ax.yaxis.grid(linestyle='dotted', color='gray')
if label:
legend = ax.legend(loc='upper center', shadow=False)
if title:
ax.set_title(title)
figure, ax = plt.subplots(1,1)
tests_in_projects_by_time_of_creation(df, unit_test_frameworks+ui_automation_frameworks+cloud_test_services, label="Any", color=colors_dict['any'], zorder=2, linestyle=linestyle_dict['any'])
tests_in_projects_by_time_of_creation(df, unit_test_frameworks, label="Unit testing", color=colors_dict['unit_test_frameworks'], zorder=3, linestyle=linestyle_dict['unit_test_frameworks'])
tests_in_projects_by_time_of_creation(df, ui_automation_frameworks, label="GUI testing", color=colors_dict['ui_automation_frameworks'], zorder=4, linestyle=linestyle_dict['ui_automation_frameworks'])
tests_in_projects_by_time_of_creation(df, cloud_test_services, label="Cloud testing", color=colors_dict['cloud_test_services'], zorder=5, linestyle=linestyle_dict['cloud_test_services'])
ax.set_xlabel("Years since first commit")
ax.axvspan(0,2, color='darkgreen', alpha=0.1)
figure.tight_layout()
figure.savefig(path_join(results_output, "tests_by_age.pdf"))
ax.invert_xaxis()
figure.savefig(path_join(results_output, "tests_by_age_i.pdf"))
# ------------------------------------------------------------ #
# --- Percentage of Android tests over the age of the apps (cumulated) --- #
def tests_in_projects_by_time_of_creation_cumm(df_projects, frameworks,
title=None, verbose=False, **kwargs):
project_with_test_per_age = []
total_projects_per_age = []
n_projects_with_tests_history = []
total_projects_history = []
age_max = df_projects['age_numeric'].max()+1
for age in range(age_max)[::-1]:
n_projects_with_tests = df_projects[df_projects['age_numeric']==age][frameworks].apply(any, axis=1).sum()
n_projects_with_tests_history.append(n_projects_with_tests)
total_projects = len(df_projects[df_projects['age_numeric']==age].index)
total_projects_history.append(total_projects)
project_with_test_per_age.append(n_projects_with_tests)
total_projects_per_age.append(total_projects)
if verbose:
print("Age {}:".format(age))
print("{} out of {} projects ({:.1%}).".format(n_projects_with_tests, total_projects, portion))
project_with_test_per_age_cum = [sum(project_with_test_per_age[:index+1]) for index in range(len(project_with_test_per_age))]
total_projects_per_age_cum = [sum(total_projects_per_age[:index+1]) for index in range(len(total_projects_per_age))]
portions = []
for with_tests, total in zip(project_with_test_per_age_cum, total_projects_per_age_cum):
if total > 0:
portions.append(with_tests/len(df_projects))
else:
portions.append(0)
plt.plot(range(age_max)[::-1], portions, **kwargs)
# plt.scatter(range(age_max)[::-1], portions, total_projects_history, marker='o', linewidth=1, zorder=kwargs.get('zorder'))
plt.scatter(range(age_max)[::-1], portions, marker='.', linewidth=1, zorder=kwargs.get('zorder'))
ax = plt.gca()
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.spines['left'].set_visible(False)
ax.set_xticks(range(age_max))
ax.set_yticklabels(["{:.0%}".format(label) for label in ax.get_yticks()])
ax.set_ylabel("Percentage of projects")
ax.yaxis.grid(linestyle='dotted', color='gray')
ax.legend(loc='upper center', shadow=False)
if title:
ax.set_title(title)
figure, ax = plt.subplots(1,1)
tests_in_projects_by_time_of_creation_cumm(
df,
unit_test_frameworks+ui_automation_frameworks+cloud_test_services,
label="Any", color=colors_dict['any'], zorder=2,
linestyle=linestyle_dict['any'],
)
tests_in_projects_by_time_of_creation_cumm(
df,
unit_test_frameworks,
label="Unit testing", color=colors_dict['unit_test_frameworks'], zorder=3,
linestyle=linestyle_dict['unit_test_frameworks'],
)
tests_in_projects_by_time_of_creation_cumm(
df,
ui_automation_frameworks,
label="GUI testing", color=colors_dict['ui_automation_frameworks'], zorder=4,
linestyle=linestyle_dict['ui_automation_frameworks'],
)
tests_in_projects_by_time_of_creation_cumm(
df,
cloud_test_services,
label="Cloud testing", color=colors_dict['cloud_test_services'], zorder=5,
linestyle=linestyle_dict['cloud_test_services'],
)
ax.set_xlabel("Year")
ax.axvspan(0,2, color='darkgreen', alpha=0.1)
figure.tight_layout()
figure.savefig(path_join(results_output, "tests_by_age_cumm.pdf"))
ax.invert_xaxis()
figure.savefig(path_join(results_output, "tests_by_age_cumm_i.pdf"))
# ------------------------------------------------------------ #
# --- Percentage of 2+years apps with tests grouped by time since last update --- #
def tests_in_projects_by_time_of_update(df_projects, frameworks, label=None,
title=None,
verbose=False, zorder=None, color=None, **kwargs):
portions = []
n_projects_with_tests_history = []
total_projects_history = []
age_max = df_projects['time_since_last_update_numeric'].max()+1
for age in range(age_max):
n_projects_with_tests = df_projects[df_projects['time_since_last_update_numeric']==age][frameworks].apply(any, axis=1).sum()
n_projects_with_tests_history.append(n_projects_with_tests)
total_projects = len(df_projects[df_projects['time_since_last_update_numeric']==age].index)
total_projects_history.append(total_projects)
if total_projects == 0:
portion = 0
else:
portion = n_projects_with_tests/total_projects
portions.append(portion)
if verbose:
print("Age {}:".format(age))
print("{} out of {} projects ({:.1%}).".format(n_projects_with_tests, total_projects, portion))
plt.plot(range(age_max), portions, label=label, zorder=zorder, **kwargs)
plt.scatter(range(age_max), portions, total_projects_history, marker='o', linewidth='1', zorder=zorder)
ax = plt.gca()
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.spines['left'].set_visible(False)
ax.spines['bottom'].set_visible(True)
ax.set_xticks(range(age_max))
ax.set_yticklabels(["{:.0%}".format(label) for label in ax.get_yticks()])
ax.set_ylabel("Percentage of projects")
ax.yaxis.grid(linestyle='dotted', color='gray')
if label:
legend = ax.legend(loc='upper center', shadow=False)
if title:
plt.title(title)
figure, ax = plt.subplots(1,1)
tests_in_projects_by_time_of_update(df_old, unit_test_frameworks+ui_automation_frameworks+cloud_test_services, label="Any", color=colors_dict['any'], linestyle=linestyle_dict['any'], zorder=1)
tests_in_projects_by_time_of_update(df_old, unit_test_frameworks, label="Unit testing", color=colors_dict['unit_test_frameworks'], linestyle=linestyle_dict['unit_test_frameworks'], zorder=2)
tests_in_projects_by_time_of_update(df_old, ui_automation_frameworks, label="GUI testing", color=colors_dict['ui_automation_frameworks'], linestyle=linestyle_dict['ui_automation_frameworks'], zorder=3)
tests_in_projects_by_time_of_update(df_old, cloud_test_services, label="Cloud testing", color=colors_dict['cloud_test_services'], linestyle=linestyle_dict['cloud_test_services'], zorder=4)
ax.set_xlabel("Years since last update")
figure.tight_layout()
figure.savefig(path_join(results_output, "mature_tests_by_update.pdf"))
ax.invert_xaxis()
figure.savefig(path_join(results_output, "mature_tests_by_update_i.pdf"))
# ------------------------------------------------------------------------------- #
# --- Descriptive stats for popularity metrics --- #
dictionary = {
"count": "$N$",
"mean": "$\\bar{x}$",
"std": "$s$",
"min": "$min$",
"max": "$max$",
"rating_value": "Rating"
}
metrics = ['stars','forks', 'contributors', 'commits', 'rating_value', 'rating_count']
def outliers_modified_z_score(ys):
threshold = 3.5
median_y = np.median(ys)
median_absolute_deviation_y = np.median([np.abs(y - median_y) for y in ys])
modified_z_scores = [0.6745 * (y - median_y) / median_absolute_deviation_y
for y in ys]
return (np.abs(modified_z_scores) > threshold)
def outliers_z_score(ys):
return np.abs(zscore(ys) < 3)
def remove_outliers_df(df, metric):
df = df.dropna(subset=[metric])
return df[outliers_z_score(df[metric])]
def remove_outliers(series):
series = series[~series.isnull()]
return series[outliers_z_score(series)]
# return series[np.abs(zscore(series) < 3)]
def _descriptive_stats(series, ):
return (
series.count(),
series.mean(),
series.std(),
series.min(),
series.quantile(0.25),
series.median(),
series.quantile(0.75),
series.max(),
shapiro(series)[1] < 0.01 and "$p < 0.01$",
)
stats = []
for metric in metrics:
metric_title = metric.title().replace("_", " ")
df_tmp = remove_outliers_df(df, metric)
df_tmp_tests = df_tmp[df_tmp['tests']]
stats.append((
f"\\multirow{{2}}{{*}}{{{metric_title}}}",
'$W$',
*_descriptive_stats(df_tmp_tests[metric])
))
df_tmp_wo_tests = df_tmp[~df_tmp['tests']]
stats.append((
"",
'$WO$',
*_descriptive_stats(df_tmp_wo_tests[metric])
))
old_escape_rules = T.LATEX_ESCAPE_RULES
T.LATEX_ESCAPE_RULES = {'%': '\\%'}
table = tabulate(
stats,
headers=['', 'Tests', '$N$', '$\\bar{x}$', '$s$', '$min$', '$25%$', | |
<reponame>abhish3k-11/Biomedical-research<filename>gui.py
# imports
from tkinter import *
# from Tkinter import messagebox
from PIL import Image, ImageTk
import datetime
import threading
from imutils.video import WebcamVideoStream
import cv2
import time
import numpy as np
import math
import socket
from pydrive.auth import GoogleAuth
from pydrive.drive import GoogleDrive
from os import listdir
from os.path import isfile, join
from functools import reduce
# from googleapiclient.http import MediaFileUpload
import time
import os
import csv
from Gooogle import Create_Service
from googleapiclient.http import MediaFileUpload
import barcode
from barcode.writer import ImageWriter
# creating class Window inheriting Frame from tkinter
class Window(Frame):
# creating constructor function
def __init__(self, master=None):
self.frame = None
self.panel = None
self.vs = None
Frame.__init__(self, master)
self.master = master
# Loading required logos
load = Image.open("power.png")
load = load.resize((45, 45), Image.ANTIALIAS)
self.power_img = ImageTk.PhotoImage(load)
load = Image.open("cor.png")
load = load.resize((45, 45), Image.ANTIALIAS)
self.cor_img = ImageTk.PhotoImage(load)
load = Image.open("wrong.png")
load = load.resize((45, 45), Image.ANTIALIAS)
self.wrong_img = ImageTk.PhotoImage(load)
load = Image.open("add_p1.png")
load = load.resize((50, 50), Image.ANTIALIAS)
self.add_p_img = ImageTk.PhotoImage(load)
load = Image.open("manual.png")
load = load.resize((32, 32), Image.ANTIALIAS)
self.manual_img = ImageTk.PhotoImage(load)
load = Image.open("warning.png")
load = load.resize((30, 30), Image.ANTIALIAS)
self.warning_img = ImageTk.PhotoImage(load)
load = Image.open("home.png")
load = load.resize((55, 60), Image.ANTIALIAS)
self.home_img = ImageTk.PhotoImage(load)
load = Image.open("spacing.png")
load = load.resize((474, 146), Image.ANTIALIAS)
self.spacing_img = ImageTk.PhotoImage(load)
# initializing vars
self.unactive_color = 'LightBlue3'
self.active_color = 'LightGray'
self.PatientID_StrVar = StringVar(value='PID: ')
self.left_eye = 0
self.right_eye = 0
self.left_nails = 0
self.right_nails = 0
self.mucous = 0
self.palm = 0
self.video_thread = False
self.date_time_thread = False
self.rgb_frame = 0
self.patient_name = StringVar(value="Name: ")
self.patient_age = StringVar(value="Age: ")
self.patient_gender = StringVar(value="Gender: ")
self.patient_height = StringVar(value="Height: ")
self.patient_weight = StringVar(value="Weight: ")
self.CLIENT_SECRET_FILE = 'client_secret.json'
self.API_NAME = 'drive'
self.API_VERSION = 'v3'
self.SCOPES = ['https://www.googleapis.com/auth/drive']
self.service = None
self.connect_to_drive()
self.folder_id = '1C1eAzE_VkUu09hAn67BcHPmm90JhtLX0'
self.init_window(-1)
def connect_to_drive(self):
try:
self.service = Create_Service(
self.CLIENT_SECRET_FILE, self.API_NAME, self.API_VERSION, self.SCOPES)
except:
print("cannot connect to drive")
def is_connected(self, host="8.8.8.8", port=53, timeout=0.8):
try:
socket.setdefaulttimeout(timeout)
socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect(
(host, port))
return True
except socket.error as ex:
return False
def init_window(self, page_no):
self.remove_widgets(page_no)
print("O page_no init_window")
# removing previous page widgets
# self.remove_widgets(page_no)
time.sleep(0.5)
self.master.title("GUI")
# packing the frame
self.pack(fill=BOTH, expand=1)
if page_no == -1:
time.sleep(0.5)
page_no = 0
# left_eye = 0
# right_eye = 0
# left_nails = 0
# right_nails = 0
# mucous = 0
# palm = 0
# date Label
self.date_l = Label(self, text=datetime.datetime.now().strftime(
'%d/%m/%Y'), font=("Times New Roman", 17, "bold"), bg="white", anchor='w')
self.date_l.place(x=10, y=5, width=150)
# time label
self.time_l = Label(self, text=datetime.datetime.now().strftime(
'%H:%M:%S'), font=("Times New Roman", 17, "bold"), bg="white", anchor='e')
self.time_l.place(x=320, y=5, width=150)
# creating date time thread
self.dt_thread = threading.Thread(target=self.date_time)
# New patient button
self.add_p_btn = Button(self, text='Add \nPatient', compound=LEFT, bg='LightBlue3',
activebackground='LightBlue3', border='1', font=("Times New Roman", 17, 'bold'), command=lambda: self.new_patients(page_no))
self.add_p_btn.place(x=300, y=150, width=126, height=60)
# upload btn
self.upload_btn = Button(self, text='Upload \nData', compound=LEFT, bg='LightBlue3',
activebackground='LightBlue3', border='1', font=("Times New Roman", 17, 'bold'), command=lambda: self.uploadFun(page_no))
self.upload_btn.place(x=170, y=150, width=126, height=60)
# power button
self.power_btn = Button(self, text='Power \nOff', compound=LEFT, bg='LightBlue3',
activebackground='LightBlue3', border='1', font=("Times New Roman", 17, 'bold'), command=self.powerFun)
self.power_btn.place(x=40, y=150, width=126, height=60)
# making date time thrad active
self.date_time_thread = True
self.dt_thread.start()
def uploadFun(self, page_no):
self.uploading_message = Label(self, text="Processing. Please wait....", font=(
'Times New Roman', 22, "bold"), bg="white")
self.uploading_message.place(x=10, y=10, width=460, height=300)
print("Entered")
# if self.is_connected():
# if self.service == None:
# self.connect_to_drive()
# folder_names = [f for f in listdir("patients_data/")]
# #try:
# for folder_name in folder_names:
# print(folder_name)
# file_names = [f for f in os.listdir("patients_data/{}/".format(folder_name))]
# mime_types = ['image/jpeg' for names in file_names]
# for file_name, mime_type in zip(file_names, mime_types):
# print(file_names)
# file_metadata = {
# 'name' : file_name,
# 'parents': [self.folder_id]
# }
# print(file_name)
# media = MediaFileUpload("patients_data/{}/{}".format(folder_name, file_name), mimetype=mime_type)
# #print()
# self.service.files().create(
# body = file_metadata,
# media_body=media,
# fields='id'
# ).execute()
#os.remove("patients_data/{}/{}".format(folder_name, file_name))
# except :
# print("error while uploading")
time.sleep(0.5)
self.uploading_message.place_forget()
print("Exiting")
time.sleep(0.5)
self.init_window(page_no)
def powerFun(self):
os.system("sudo shutdown -h now")
def shiftFun(self, page_no):
self.remove_widgets(page_no)
# current page no
page_no = 12
# packing Frame
self.entryText.place(x=8, y=8, height=56, width=307)
self.shift_btn = Button(self, text='123', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), command=lambda: self.fetch_patient_data(page_no))
self.shift_btn.place(x=323, y=10, height=52, width=147)
# alphabets button
self.a_btn = Button(self, text='A', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), width=4, command=lambda: self.numFun('A'))
self.a_btn.place(x=10, y=67, height=43, width=71)
self.b_btn = Button(self, text='B', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), width=4, command=lambda: self.numFun('B'))
self.b_btn.place(x=88, y=67, height=43, width=71)
self.c_btn = Button(self, text='C', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), width=4, command=lambda: self.numFun('C'))
self.c_btn.place(x=166, y=67, height=43, width=71)
self.d_btn = Button(self, text='D', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), width=4, command=lambda: self.numFun('D'))
self.d_btn.place(x=244, y=67, height=43, width=71)
self.e_btn = Button(self, text='E', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), width=4, command=lambda: self.numFun('E'))
self.e_btn.place(x=322, y=67, height=43, width=71)
self.f_btn = Button(self, text='F', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), command=lambda: self.numFun('F'))
self.f_btn.place(x=400, y=67, height=43, width=70)
self.g_btn = Button(self, text='G', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), command=lambda: self.numFun('G'))
self.g_btn.place(x=10, y=117, height=43, width=71)
self.h_btn = Button(self, text='H', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), command=lambda: self.numFun('H'))
self.h_btn.place(x=88, y=117, height=43, width=71)
self.i_btn = Button(self, text='I', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), command=lambda: self.numFun('I'))
self.i_btn.place(x=166, y=117, height=43, width=71)
self.j_btn = Button(self, text='J', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), command=lambda: self.numFun('J'))
self.j_btn.place(x=244, y=117, height=43, width=71)
self.k_btn = Button(self, text='K', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), command=lambda: self.numFun('K'))
self.k_btn.place(x=322, y=117, height=43, width=71)
self.l_btn = Button(self, text='L', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), command=lambda: self.numFun('L'))
self.l_btn.place(x=400, y=117, height=43, width=70)
self.m_btn = Button(self, text='M', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), command=lambda: self.numFun('M'))
self.m_btn.place(x=10, y=167, height=43, width=71)
self.n_btn = Button(self, text='N', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), command=lambda: self.numFun('N'))
self.n_btn.place(x=88, y=167, height=43, width=71)
self.o_btn = Button(self, text='O', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), command=lambda: self.numFun('O'))
self.o_btn.place(x=166, y=167, height=43, width=71)
self.p_btn = Button(self, text='P', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), command=lambda: self.numFun('P'))
self.p_btn.place(x=244, y=167, height=43, width=71)
self.q_btn = Button(self, text='Q', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), command=lambda: self.numFun('Q'))
self.q_btn.place(x=322, y=167, height=43, width=71)
self.r_btn = Button(self, text='R', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), command=lambda: self.numFun('R'))
self.r_btn.place(x=400, y=167, height=43, width=70)
self.s_btn = Button(self, text='S', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), command=lambda: self.numFun('S'))
self.s_btn.place(x=10, y=217, height=43, width=71)
self.t_btn = Button(self, text='T', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), command=lambda: self.numFun('T'))
self.t_btn.place(x=88, y=217, height=43, width=71)
self.u_btn = Button(self, text='U', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), command=lambda: self.numFun('U'))
self.u_btn.place(x=166, y=217, height=43, width=71)
self.v_btn = Button(self, text='V', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), command=lambda: self.numFun('V'))
self.v_btn.place(x=244, y=217, height=43, width=71)
self.w_btn = Button(self, text='W', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), command=lambda: self.numFun('W'))
self.w_btn.place(x=322, y=217, height=43, width=71)
self.x_btn = Button(self, text='X', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), command=lambda: self.numFun('X'))
self.x_btn.place(x=400, y=217, height=43, width=70)
self.y_btn = Button(self, text='Y', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), command=lambda: self.numFun('Y'))
self.y_btn.place(x=10, y=267, height=43, width=71)
self.z_btn = Button(self, text='Z', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), command=lambda: self.numFun('Z'))
self.z_btn.place(x=88, y=267, height=43, width=71)
self.clear_btn = Button(self, text='Clear', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), command=lambda: self.clearFun())
self.clear_btn.place(x=166, y=267, height=43, width=149)
self.enter_btn = Button(self, text='Enter', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), command=lambda: self.fetching(page_no))
self.enter_btn.place(x=322, y=267, height=43, width=148)
def date_time(self):
while self.date_time_thread:
self.date_l.configure(
text=datetime.datetime.now().strftime('%d/%m/%Y'))
self.time_l.configure(
text=datetime.datetime.now().strftime('%H:%M:%S'))
def stop_date_time(self):
self.date_time_thread = False
# function to add new patients
def new_patients(self, page_no):
self.remove_widgets(page_no)
self.stop_date_time()
# 11 for fetch and 12 for new patients
# current page no
page_no = 120
# 120 is for 12's first page i.e for names
print("In new patients page add name")
self.txtVar = StringVar(value="Name :")
# packing the frame
# entry text
self.entryText = Entry(self, textvariable=self.txtVar, font=(
'Times New Roman', 20), border='3', width=27)
self.entryText.place(x=8, y=8, height=56, width=464)
# alphabets button
self.a_btn = Button(self, text='A', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), width=4, command=lambda: self.numFun('A'))
self.a_btn.place(x=10, y=67, height=43, width=71)
self.b_btn = Button(self, text='B', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), width=4, command=lambda: self.numFun('B'))
self.b_btn.place(x=88, y=67, height=43, width=71)
self.c_btn = Button(self, text='C', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), width=4, command=lambda: self.numFun('C'))
self.c_btn.place(x=166, y=67, height=43, width=71)
self.d_btn = Button(self, text='D', bg='LightBlue3', activebackground='LightGray', font=(
"Times New Roman", 25), width=4, command=lambda: self.numFun('D'))
self.d_btn.place(x=244, y=67, height=43, width=71)
self.e_btn = Button(self, text='E', bg='LightBlue3', activebackground='LightGray', | |
#
# Copyright (c) 2020, NVIDIA CORPORATION.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import collections
import enum
import functools
import json
import logging
import math
import os
import queue
import random
import threading
import warnings
from collections import defaultdict
from io import BytesIO
from uuid import uuid4
import cudf
import cupy as cp
import dask
import dask_cudf
import numba.cuda as cuda
import numpy as np
import pandas as pd
import pyarrow as pa
import pyarrow.parquet as pq
from cudf._lib.nvtx import annotate
from cudf.io.parquet import ParquetWriter as pwriter
from dask.base import tokenize
from dask.dataframe.core import new_dd_object
from dask.dataframe.io.parquet.utils import _analyze_paths
from dask.delayed import Delayed
from dask.highlevelgraph import HighLevelGraph
from dask.utils import natural_sort_key, parse_bytes
from fsspec.core import get_fs_token_paths
from fsspec.utils import stringify_path
from nvtabular.worker import clean_worker_cache, get_worker_cache
LOG = logging.getLogger("nvtabular")
class Shuffle(enum.Enum):
PER_PARTITION = 0
PER_WORKER = 1
FULL = 2
#
# Helper Function definitions
#
def _check_shuffle_arg(shuffle):
if shuffle is None:
return shuffle
if isinstance(shuffle, Shuffle):
if shuffle == Shuffle.FULL:
raise ValueError('`shuffle="full"` is not yet supported.')
elif shuffle is True:
shuffle = Shuffle.PER_WORKER
warnings.warn("`shuffle=True` is deprecated. Using `PER_WORKER`.", DeprecationWarning)
elif shuffle is False:
shuffle = None
else:
raise ValueError(f"`shuffle={shuffle}` not recognized.")
return shuffle
def _allowable_batch_size(gpu_memory_frac, row_size):
free_mem = device_mem_size(kind="free")
gpu_memory = free_mem * gpu_memory_frac
return max(int(gpu_memory / row_size), 1)
def _shuffle_gdf(gdf, gdf_size=None):
""" Shuffles a cudf dataframe, returning a new dataframe with randomly
ordered rows """
gdf_size = gdf_size or len(gdf)
arr = cp.arange(gdf_size)
cp.random.shuffle(arr)
return gdf.iloc[arr]
def device_mem_size(kind="total"):
if kind not in ["free", "total"]:
raise ValueError("{0} not a supported option for device_mem_size.".format(kind))
try:
if kind == "free":
return int(cuda.current_context().get_memory_info()[0])
else:
return int(cuda.current_context().get_memory_info()[1])
except NotImplementedError:
import pynvml
pynvml.nvmlInit()
if kind == "free":
warnings.warn("get_memory_info is not supported. Using total device memory from NVML.")
size = int(pynvml.nvmlDeviceGetMemoryInfo(pynvml.nvmlDeviceGetHandleByIndex(0)).total)
pynvml.nvmlShutdown()
return size
def guid():
""" Simple utility function to get random hex string
"""
return uuid4().hex
def _merge_general_metadata(meta_list):
""" Combine list of "general" metadata dicts into
a single dict
"""
if not meta_list:
return {}
meta = None
for md in meta_list:
if meta:
meta["data_paths"] += md["data_paths"]
meta["file_stats"] += md["file_stats"]
else:
meta = md.copy()
return meta
def _write_pq_metadata_file(md_list, fs, path):
""" Converts list of parquet metadata objects into
a single shared _metadata file.
"""
if md_list:
metadata_path = fs.sep.join([path, "_metadata"])
_meta = cudf.io.merge_parquet_filemetadata(md_list) if len(md_list) > 1 else md_list[0]
with fs.open(metadata_path, "wb") as fil:
_meta.tofile(fil)
return
def _set_dtypes(chunk, dtypes):
for col, dtype in dtypes.items():
if type(dtype) is str:
if "hex" in dtype and chunk[col].dtype == "object":
chunk[col] = chunk[col].str.htoi()
chunk[col] = chunk[col].astype(np.int32)
else:
chunk[col] = chunk[col].astype(dtype)
return chunk
def _detect_format(data):
""" Utility to detect the format of `data`
"""
if isinstance(data, cudf.DataFrame):
return "cudf"
elif isinstance(data, pd.DataFrame):
return "pandas"
elif isinstance(data, pa.Table):
return "arrow"
else:
file_type = str(data).split(".")[-1]
if file_type not in ("parquet", "csv"):
raise ValueError("Data format not recognized.")
return file_type
#
# Writer Definitions
#
def _writer_cls_factory(output_format, output_path):
if output_format == "parquet":
writer_cls = ParquetWriter
elif output_format == "hugectr":
writer_cls = HugeCTRWriter
else:
raise ValueError("Output format not yet supported.")
fs = get_fs_token_paths(output_path)[0]
return writer_cls, fs
def writer_factory(
output_format,
output_path,
out_files_per_proc,
shuffle,
use_guid=False,
bytes_io=False,
num_threads=0,
):
if output_format is None:
return None
writer_cls, fs = _writer_cls_factory(output_format, output_path)
return writer_cls(
output_path,
num_out_files=out_files_per_proc,
shuffle=shuffle,
fs=fs,
use_guid=use_guid,
bytes_io=bytes_io,
num_threads=num_threads,
)
class Writer:
def __init__(self):
pass
def add_data(self, gdf):
raise NotImplementedError()
def package_general_metadata(self):
raise NotImplementedError()
@classmethod
def write_general_metadata(cls, data, fs, out_dir):
raise NotImplementedError()
@classmethod
def write_special_metadata(cls, data, fs, out_dir):
raise NotImplementedError()
def close(self):
pass
class ThreadedWriter(Writer):
def __init__(
self,
out_dir,
num_out_files=30,
num_threads=0,
cats=None,
conts=None,
labels=None,
shuffle=None,
fs=None,
use_guid=False,
bytes_io=False,
):
# set variables
self.out_dir = out_dir
self.cats = cats
self.conts = conts
self.labels = labels
self.shuffle = shuffle
self.column_names = None
if labels and conts:
self.column_names = labels + conts
self.col_idx = {}
self.num_threads = num_threads
self.num_out_files = num_out_files
self.num_samples = [0] * num_out_files
self.data_paths = None
self.need_cal_col_names = True
self.use_guid = use_guid
self.bytes_io = bytes_io
# Resolve file system
self.fs = fs or get_fs_token_paths(str(out_dir))[0]
# Only use threading if num_threads > 1
self.queue = None
if self.num_threads > 1:
# create thread queue and locks
self.queue = queue.Queue(num_threads)
self.write_locks = [threading.Lock() for _ in range(num_out_files)]
# signifies that end-of-data and that the thread should shut down
self._eod = object()
# create and start threads
for _ in range(num_threads):
write_thread = threading.Thread(target=self._write_thread, daemon=True)
write_thread.start()
def set_col_names(self, labels, cats, conts):
self.cats = cats
self.conts = conts
self.labels = labels
self.column_names = labels + conts
def _write_table(self, idx, data):
return
def _write_thread(self):
return
@annotate("add_data", color="orange", domain="nvt_python")
def add_data(self, gdf):
# Populate columns idxs
if not self.col_idx:
for i, x in enumerate(gdf.columns.values):
self.col_idx[str(x)] = i
# Generate `ind` array to map each row to an output file.
# This approach is certainly more optimized for shuffling
# than it is for non-shuffling, but using a single code
# path is probably worth the (possible) minor overhead.
nrows = gdf.shape[0]
typ = np.min_scalar_type(nrows * 2)
if self.shuffle:
ind = cp.random.choice(cp.arange(self.num_out_files, dtype=typ), nrows)
else:
ind = cp.arange(nrows, dtype=typ)
cp.floor_divide(ind, math.ceil(nrows / self.num_out_files), out=ind)
for x, group in enumerate(
gdf.scatter_by_map(ind, map_size=self.num_out_files, keep_index=False)
):
self.num_samples[x] += len(group)
# It seems that the `copy()` operations here are necessary
# (test_io.py::test_mulifile_parquet fails otherwise)...
if self.num_threads > 1:
self.queue.put((x, group.copy()))
else:
self._write_table(x, group.copy())
# wait for all writes to finish before exiting
# (so that we aren't using memory)
if self.num_threads > 1:
self.queue.join()
def package_general_metadata(self):
data = {}
if self.cats is None:
return data
data["data_paths"] = self.data_paths
data["file_stats"] = []
for i, path in enumerate(self.data_paths):
fn = path.split(self.fs.sep)[-1]
data["file_stats"].append({"file_name": fn, "num_rows": self.num_samples[i]})
# cats
data["cats"] = []
for c in self.cats:
data["cats"].append({"col_name": c, "index": self.col_idx[c]})
# conts
data["conts"] = []
for c in self.conts:
data["conts"].append({"col_name": c, "index": self.col_idx[c]})
# labels
data["labels"] = []
for c in self.labels:
data["labels"].append({"col_name": c, "index": self.col_idx[c]})
return data
@classmethod
def write_general_metadata(cls, data, fs, out_dir):
if not data:
return
data_paths = data.pop("data_paths", [])
num_out_files = len(data_paths)
# Write file_list
file_list_writer = fs.open(fs.sep.join([out_dir, "_file_list.txt"]), "w")
file_list_writer.write(str(num_out_files) + "\n")
for f in data_paths:
file_list_writer.write(f + "\n")
file_list_writer.close()
# Write metadata json
metadata_writer = fs.open(fs.sep.join([out_dir, "_metadata.json"]), "w")
json.dump(data, metadata_writer)
metadata_writer.close()
@classmethod
def write_special_metadata(cls, data, fs, out_dir):
pass
def _close_writers(self):
for writer in self.data_writers:
writer.close()
return None
def close(self):
if self.num_threads > 1:
# wake up all the worker threads and signal for them to exit
for _ in range(self.num_threads):
self.queue.put(self._eod)
# wait for pending writes to finish
self.queue.join()
# Close writers and collect various metadata
_general_meta = self.package_general_metadata()
_special_meta = self._close_writers()
# Move in-meomory file to disk
if self.bytes_io:
self._bytesio_to_disk()
return _general_meta, _special_meta
def _bytesio_to_disk(self):
raise NotImplementedError("In-memory buffering/shuffling not implemented for this format.")
class ParquetWriter(ThreadedWriter):
def __init__(self, out_dir, **kwargs):
super().__init__(out_dir, **kwargs)
self.data_paths = []
self.data_writers = []
self.data_bios = []
for i in range(self.num_out_files):
if self.use_guid:
fn = f"{i}.{guid()}.parquet"
else:
fn = f"{i}.parquet"
path = os.path.join(out_dir, fn)
self.data_paths.append(path)
if self.bytes_io:
bio = BytesIO()
self.data_bios.append(bio)
self.data_writers.append(pwriter(bio, compression=None))
else:
self.data_writers.append(pwriter(path, compression=None))
def _write_table(self, idx, data):
self.data_writers[idx].write_table(data)
def _write_thread(self):
while True:
item = self.queue.get()
try:
if item is self._eod:
break
idx, data = item
with self.write_locks[idx]:
self._write_table(idx, data)
finally:
self.queue.task_done()
@classmethod
def write_special_metadata(cls, md, fs, out_dir):
# Sort metadata by file name and convert list of
# tuples to a list of metadata byte-blobs
md_list = [m[1] for m in sorted(list(md.items()), key=lambda x: natural_sort_key(x[0]))]
# Aggregate metadata and write _metadata file
_write_pq_metadata_file(md_list, fs, out_dir)
def _close_writers(self):
md_dict = {}
for writer, path in zip(self.data_writers, self.data_paths):
fn = path.split(self.fs.sep)[-1]
md_dict[fn] = writer.close(metadata_file_path=fn)
return md_dict
def _bytesio_to_disk(self):
for bio, path in zip(self.data_bios, self.data_paths):
gdf = cudf.io.read_parquet(bio, index=False)
bio.close()
if self.shuffle == Shuffle.PER_WORKER:
gdf = _shuffle_gdf(gdf)
gdf.to_parquet(path, compression=None, index=False)
return
class HugeCTRWriter(ThreadedWriter):
def __init__(self, out_dir, **kwargs):
super().__init__(out_dir, **kwargs)
self.data_paths = [os.path.join(out_dir, f"{i}.data") for i in range(self.num_out_files)]
self.data_writers = [open(f, "ab") for f in self.data_paths]
def _write_table(self, idx, data):
ones = np.array(([1] * data.shape[0]), dtype=np.intc)
df = data[self.column_names].to_pandas().astype(np.single)
for i | |
# BSD 3-Clause License; see https://github.com/jpivarski/doremi/blob/main/LICENSE
from fractions import Fraction
from dataclasses import dataclass, field
from typing import List, Tuple, Dict, Optional, Union, Generator
import lark
import doremi.parsing
def is_rest(word: str) -> bool:
return all(x == "_" for x in word)
@dataclass
class AbstractNote:
start: float
stop: float
word: "Word"
emphasis: int = field(default=0)
octave: int = field(default=0)
augmentations: Tuple["Augmentation"] = field(default=())
def copy(self) -> "AbstractNote":
return AbstractNote(
self.start,
self.stop,
self.word,
self.emphasis,
self.octave,
self.augmentations,
)
def inplace_shift(self, shift: float) -> None:
self.start += shift
self.stop += shift
def inplace_scale(self, scale: float) -> None:
self.start *= scale
self.stop *= scale
@dataclass
class Scope:
symbols: Dict[lark.lexer.Token, "NamedPassage"]
def has(self, symbol: lark.lexer.Token) -> bool:
return symbol in self.symbols
def get(self, symbol: lark.lexer.Token) -> Optional["NamedPassage"]:
return self.symbols.get(symbol)
def add(self, passage: "NamedPassage"):
self.symbols[passage.assignment.function.val] = passage
@dataclass
class SubScope(Scope):
parent: Scope
def has(self, symbol: lark.lexer.Token) -> bool:
if symbol in self.symbols:
return True
else:
return self.parent.has(symbol)
def get(self, symbol: lark.lexer.Token) -> Optional["NamedPassage"]:
out = self.symbols.get(symbol)
if out is not None:
return out
else:
return self.parent.get(symbol)
class AST:
pass
class Expression(AST):
pass
@dataclass
class Word(Expression):
val: lark.lexer.Token
@dataclass
class Call(Expression):
function: Word
args: List[Expression]
parsingtree: Optional[lark.tree.Tree] = field(
default=None, repr=False, compare=False, hash=False
)
class Augmentation(AST):
pass
@dataclass
class AugmentStep(Augmentation):
amount: int
parsingtree: Optional[lark.tree.Tree] = field(
default=None, repr=False, compare=False, hash=False
)
@dataclass
class AugmentDegree(Augmentation):
amount: int
parsingtree: Optional[lark.tree.Tree] = field(
default=None, repr=False, compare=False, hash=False
)
@dataclass
class AugmentRatio(Augmentation):
amount: Fraction
parsingtree: Optional[lark.tree.Tree] = field(
default=None, repr=False, compare=False, hash=False
)
@dataclass
class Duration(AST):
amount: Fraction
is_scaling: bool
parsingtree: Optional[lark.tree.Tree] = field(
default=None, repr=False, compare=False, hash=False
)
@dataclass
class Modified(AST):
expression: Union[Expression, List[Expression]]
emphasis: int
absolute: int
octave: int
augmentation: Augmentation
duration: Optional[Duration]
repetition: int
parsingtree: Optional[lark.tree.Tree] = field(
default=None, repr=False, compare=False, hash=False
)
@dataclass
class Line(AST):
modified: List[Modified]
parsingtree: Optional[lark.tree.Tree] = field(
default=None, repr=False, compare=False, hash=False
)
@dataclass
class Assignment(AST):
function: Word
args: List[Word]
parsingtree: Optional[lark.tree.Tree] = field(
default=None, repr=False, compare=False, hash=False
)
class Passage(AST):
pass
@dataclass
class NamedPassage(Passage):
assignment: Assignment
lines: List[Line]
parsingtree: Optional[lark.tree.Tree] = field(
default=None, repr=False, compare=False, hash=False
)
@dataclass
class UnnamedPassage(Passage):
lines: List[Line]
parsingtree: Optional[lark.tree.Tree] = field(
default=None, repr=False, compare=False, hash=False
)
def evaluate(
node: Union[list, Word, Call, Modified, Line, Passage],
scope: Scope,
emphasis: int,
octave: int,
augmentations: Tuple[Augmentation],
breadcrumbs: Tuple[str],
) -> Tuple[float, List[AbstractNote]]:
if isinstance(node, list):
last_stop = 0.0
all_notes = []
for subnode in node:
duration, notes = evaluate(
subnode, scope, emphasis, octave, augmentations, breadcrumbs
)
for note in notes:
note.inplace_shift(last_stop)
all_notes.extend(notes)
last_stop += duration
return last_stop, all_notes
elif isinstance(node, Word):
if scope.has(node.val):
return evaluate(
Call(node, []), scope, emphasis, octave, augmentations, breadcrumbs
)
elif is_rest(node.val):
return float(len(node.val)), []
else:
note = AbstractNote(
0.0,
1.0,
node,
emphasis,
octave,
augmentations,
)
return 1.0, [note]
elif isinstance(node, Call):
if node.function.val in breadcrumbs:
raise RecursiveFunction(node.function.val)
namedpassage = scope.get(node.function.val)
if namedpassage is None:
raise UndefinedSymbol(node.function.val)
parameters = namedpassage.assignment.args
arguments = node.args
if len(parameters) != len(arguments):
raise MismatchingArguments(node.function.val)
subscope = SubScope(
{
param.val: NamedPassage(Assignment(param, []), [arg])
for param, arg in zip(parameters, arguments)
},
scope,
)
breadcrumbs = breadcrumbs + (node.function.val,)
return evaluate(
namedpassage, subscope, emphasis, octave, augmentations, breadcrumbs
)
elif isinstance(node, Modified):
if node.absolute > 0:
augmentations = augmentations[: -node.absolute]
if node.augmentation is not None:
augmentations = augmentations + (node.augmentation,)
if isinstance(node.expression, Expression):
natural_duration, notes = evaluate(
node.expression,
scope,
emphasis + node.emphasis,
octave + node.octave,
augmentations,
breadcrumbs,
)
else:
natural_duration, notes = evaluate(
node.expression,
scope,
emphasis + node.emphasis,
octave + node.octave,
augmentations,
breadcrumbs,
)
if node.duration is not None:
if node.duration.is_scaling:
factor = float(node.duration.amount)
natural_duration = natural_duration * factor
else:
factor = float(node.duration.amount) / natural_duration
natural_duration = float(node.duration.amount)
for note in notes:
note.inplace_scale(factor)
if node.repetition == 1:
duration = natural_duration
else:
all_notes = list(notes)
for i in range(1, node.repetition):
new_notes = [x.copy() for x in notes]
for note in new_notes:
note.inplace_shift(i * natural_duration)
all_notes.extend(new_notes)
duration = node.repetition * natural_duration
notes = all_notes
return duration, notes
elif isinstance(node, Line):
return evaluate(
node.modified, scope, emphasis, octave, augmentations, breadcrumbs
)
elif isinstance(node, Passage):
max_duration = 0.0
all_notes = []
for line in node.lines:
duration, notes = evaluate(
line, scope, emphasis, octave, augmentations, breadcrumbs
)
all_notes.extend(notes)
if max_duration < duration:
max_duration = duration
return max_duration, all_notes
else:
raise AssertionError(repr(node))
@dataclass
class Collection(AST):
passages: List[Passage]
comments: Optional[List[lark.lexer.Token]] = field(
default=None, repr=False, compare=False, hash=False
)
parsingtree: Optional[lark.tree.Tree] = field(
default=None, repr=False, compare=False, hash=False
)
source: Optional[str] = field(default=None, repr=False, compare=False, hash=False)
def evaluate(
self, scope: Optional[Scope]
) -> Tuple[float, List[AbstractNote], Scope]:
if scope is None:
scope = Scope({})
unnamed_passages: List[UnnamedPassage] = []
for passage in self.passages:
if isinstance(passage, NamedPassage):
scope.add(passage)
else:
unnamed_passages.append(passage)
try:
duration, notes = evaluate(unnamed_passages, scope, 0, 0, (), ())
except DoremiError as err:
err.source = self.source
raise
return duration, notes, scope
def get_comments(
node: Union[lark.tree.Tree, lark.lexer.Token]
) -> Generator[str, None, None]:
if isinstance(node, lark.tree.Tree):
if node.data == "start":
for child in node.children:
yield from get_comments(child)
elif node.data == "assign_passage":
for child in node.children:
yield from get_comments(child)
elif node.data == "passage":
for child in node.children:
yield from get_comments(child)
elif node.data == "line":
pass
elif node.data == "assign":
pass
else:
raise AssertionError(repr(node))
else:
if node.type == "BLANK" or node.type == "BLANK_END":
yield node
else:
raise AssertionError(repr(node))
def to_ast(node: Union[lark.tree.Tree, lark.lexer.Token]) -> AST:
if isinstance(node, lark.tree.Tree):
if node.data == "assign_passage":
subnodes = [x for x in node.children if isinstance(x, lark.tree.Tree)]
passage = subnodes[-1]
assert isinstance(passage, lark.tree.Tree) and passage.data == "passage"
lines = [
to_ast(x)
for x in passage.children
if not isinstance(x, lark.lexer.Token)
]
if len(subnodes) == 2:
return NamedPassage(to_ast(subnodes[0]), lines, node)
else:
assert len(subnodes) == 1
return UnnamedPassage(lines, node)
elif node.data == "assign":
assert 1 <= len(node.children) <= 2
subnode1 = node.children[0]
assert isinstance(subnode1, lark.lexer.Token) and subnode1.type == "WORD"
if is_rest(subnode1):
raise SymbolAllUnderscores(subnode1)
function = Word(subnode1)
if len(node.children) == 2:
subnode2 = node.children[1]
assert (
isinstance(subnode2, lark.tree.Tree) and subnode2.data == "defargs"
)
assert all(
isinstance(x, lark.lexer.Token) and x.type == "WORD"
for x in subnode2.children
)
args = [Word(x) for x in subnode2.children]
else:
args = []
return Assignment(function, args, node)
elif node.data == "line":
return Line([to_ast(x) for x in node.children], node)
elif node.data == "modified":
assert all(isinstance(x, lark.tree.Tree) for x in node.children)
assert 1 <= len(node.children) <= 6
index = 0
if node.children[index].data == "emphasis":
emphasis = len(node.children[index].children)
index += 1
else:
emphasis = 0
if node.children[index].data == "absolute":
absolute = len(node.children[index].children)
index += 1
else:
absolute = 0
subnode = node.children[index]
assert subnode.data == "expression"
if isinstance(subnode.children[0], lark.lexer.Token):
if len(subnode.children) == 1:
expression = to_ast(subnode.children[0])
else:
function = to_ast(subnode.children[0])
subsubnode = subnode.children[1]
assert (
isinstance(subsubnode, lark.tree.Tree)
and subsubnode.data == "args"
)
args = [to_ast(x) for x in subsubnode.children]
expression = Call(function, args, subnode)
else:
expression = [to_ast(x) for x in subnode.children]
index = -1
if node.children[index].data == "repetition":
repetition = int(node.children[index].children[0])
index -= 1
else:
repetition = 1
if node.children[index].data == "duration":
subnode = node.children[index].children[0]
assert isinstance(subnode, lark.tree.Tree)
if subnode.data == "dot_duration":
duration = Duration(Fraction(len(subnode.children), 1), False)
elif (
subnode.data == "ratio_duration" or subnode.data == "scale_duration"
):
ints = subnode.children[0].children
assert all(
isinstance(x, lark.lexer.Token) and x.type == "POSITIVE_INT"
for x in ints
)
if len(ints) == 1:
ratio = Fraction(int(ints[0]), 1)
elif len(ints) == 2:
ratio = Fraction(int(ints[0]), int(ints[1]))
else:
raise AssertionError(subnode.children[0])
duration = Duration(
ratio, subnode.data == "scale_duration", subnode
)
else:
raise AssertionError(subnode)
index -= 1
else:
duration = None
if node.children[index].data == "augmentation":
subnode = node.children[index].children[0]
if subnode.data == "upward_step" or subnode.data == "downward_step":
subnodes = subnode.children
if len(subnodes) == 1:
assert isinstance(subnodes[0], lark.lexer.Token)
if subnodes[0].type == "STEP_UPS":
amount = len(subnodes[0])
elif subnodes[0].type == "STEP_DOWNS":
amount = -len(subnodes[0])
else:
raise AssertionError(repr(subnodes[0]))
elif len(subnodes) == 2:
assert isinstance(subnodes[0], lark.lexer.Token)
assert isinstance(subnodes[1], lark.lexer.Token)
assert subnodes[1].type == "INT"
if subnodes[0].type == "STEP_UP":
amount = int(subnodes[1])
elif subnodes[0].type == "STEP_DOWN":
amount = -int(subnodes[1])
else:
raise AssertionError(repr(subnodes[0]))
else:
raise AssertionError(len(subnodes))
if amount == 0:
augmentation = None
else:
augmentation = AugmentStep(amount, subnode)
elif (
subnode.data == "upward_degree" or subnode.data == "downward_degree"
):
subnodes = subnode.children
if len(subnodes) == 1:
assert isinstance(subnodes[0], lark.lexer.Token)
if subnodes[0].type == "DEGREE_UPS":
amount = len(subnodes[0])
elif subnodes[0].type == "DEGREE_DOWNS":
amount = -len(subnodes[0])
else:
raise AssertionError(repr(subnodes[0]))
elif len(subnodes) == 2:
assert isinstance(subnodes[0], lark.lexer.Token)
assert isinstance(subnodes[1], lark.lexer.Token)
assert subnodes[1].type == "INT"
if subnodes[0].type == "DEGREE_UP":
amount = int(subnodes[1])
elif | |
storage'],
'tivoli storage manager fastback center': ['tivoli storage'],
'tivoli storage manager fastback for bare': ['tivoli storage'],
'tivoli storage manager fastback for bare machine': ['tivoli storage'],
'tivoli storage manager fastback for bare machine recovery': [ 'tivoli '
'storage'],
'tivoli storage manager fastback for microsoft': ['tivoli storage'],
'tivoli storage manager fastback for microsoft exchange': [ 'tivoli '
'storage'],
'tivoli storage manager fastback for workstations': ['tivoli storage'],
'tivoli storage manager for advanced': ['tivoli storage'],
'tivoli storage manager for advanced copy': ['tivoli storage'],
'tivoli storage manager for advanced copy services': ['tivoli storage'],
'tivoli storage manager for copy': ['tivoli storage'],
'tivoli storage manager for copy services': ['tivoli storage'],
'tivoli storage manager for databases': ['tivoli storage'],
'tivoli storage manager for enterprise': ['tivoli storage'],
'tivoli storage manager for enterprise resource': ['tivoli storage'],
'tivoli storage manager for enterprise resource planning': [ 'tivoli '
'storage'],
'tivoli storage manager for hardware': ['tivoli storage'],
'tivoli storage manager for mail': ['tivoli storage'],
'tivoli storage manager for microsoft': ['tivoli storage'],
'tivoli storage manager for microsoft sharepoint': ['tivoli storage'],
'tivoli storage manager for space': ['tivoli storage'],
'tivoli storage manager for space management': ['tivoli storage'],
'tivoli storage manager for storage': ['tivoli storage'],
'tivoli storage manager for storage area': ['tivoli storage'],
'tivoli storage manager for storage area networks': ['tivoli storage'],
'tivoli storage manager for system': ['tivoli storage'],
'tivoli storage manager for system backup': ['tivoli storage'],
'tivoli storage manager for system backup and recovery': ['tivoli storage'],
'tivoli storage manager for virtual': ['tivoli storage'],
'tivoli storage manager for virtual environments': ['tivoli storage'],
'tivoli storage manager hsm': ['tivoli storage'],
'tivoli storage manager hsm for windows': ['tivoli storage'],
'tivoli storage manager suite': ['tivoli storage'],
'tivoli storage manager suite for unified': ['tivoli storage'],
'tivoli storage manager suite for unified recovery': ['tivoli storage'],
'tivoli storage manager suite for unified recovery entry': [ 'tivoli '
'storage'],
'tivoli storage optimizer': ['tivoli storage'],
'tivoli storage optimizer for z/os': ['tivoli storage'],
'tivoli storage process': ['tivoli storage'],
'tivoli storage process manager': ['tivoli storage'],
'tivoli storage productivity': ['tivoli storage'],
'tivoli storage productivity center': ['tivoli storage'],
'tivoli storage productivity center advanced': ['tivoli storage'],
'tivoli storage productivity center basic': ['tivoli storage'],
'tivoli storage productivity center basic edition': ['tivoli storage'],
'tivoli storage productivity center for data': ['tivoli storage'],
'tivoli storage productivity center for disk': ['tivoli storage'],
'tivoli storage productivity center for disk select': ['tivoli storage'],
'tivoli storage productivity center for replication': ['tivoli storage'],
'tivoli storage productivity center select': ['tivoli storage'],
'tivoli storage productivity center select edition': ['tivoli storage'],
'tivoli storage productivity center standard': ['tivoli storage'],
'tivoli storage productivity center standard edition': ['tivoli storage'],
'tivoli storage resource': ['tivoli storage'],
'tivoli storage resource manager': ['tivoli storage'],
'tivoli storage resource manager for chargeback': ['tivoli storage'],
'tivoli storage resource manager for databases': ['tivoli storage'],
'tivoli switch analyzer': ['tivoli'],
'tivoli system': ['tivoli'],
'tivoli system automation': ['tivoli system'],
'tivoli system automation application': ['tivoli system'],
'tivoli system automation application manager': ['tivoli system'],
'tivoli system automation for integrated': ['tivoli system'],
'tivoli system automation for integrated operations': ['tivoli system'],
'tivoli system automation for integrated operations management': [ 'tivoli '
'system'],
'tivoli system automation for multiplatforms': ['tivoli system'],
'tivoli system automation for z/os': ['tivoli system'],
'tivoli tape': ['tivoli'],
'tivoli tape optimizer': ['tivoli tape'],
'tivoli tape optimizer on z/os': ['tivoli tape'],
'tivoli unified': ['tivoli'],
'tivoli unified process': ['tivoli unified'],
'tivoli unified process composer': ['tivoli unified'],
'tivoli unified single': ['tivoli unified'],
'tivoli unified single sign-on': ['tivoli unified'],
'tivoli universal agent': ['tivoli'],
'tivoli usage': ['tivoli'],
'tivoli usage and accounting': ['tivoli usage'],
'tivoli usage and accounting manager': ['tivoli usage'],
'tivoli usage and accounting manager enterprise': ['tivoli usage'],
'tivoli usage and accounting manager enterprise edition': ['tivoli usage'],
'tivoli usage and accounting manager for z/os': ['tivoli usage'],
'tivoli user administration': ['tivoli'],
'tivoli web': ['tivoli'],
'tivoli web access': ['tivoli web'],
'tivoli web access for information': ['tivoli web'],
'tivoli web access for information management': ['tivoli web'],
'tivoli web availability': ['tivoli web'],
'tivoli web availability monitor': ['tivoli web'],
'tivoli web response': ['tivoli web'],
'tivoli web response monitor': ['tivoli web'],
'tivoli web segment': ['tivoli web'],
'tivoli web segment analyzer': ['tivoli web'],
'tivoli web site': ['tivoli web'],
'tivoli web site analyzer': ['tivoli web'],
'tivoli workload': ['tivoli'],
'tivoli workload scheduler': ['tivoli workload'],
'tivoli workload scheduler for applications': ['tivoli workload'],
'tivoli workload scheduler for virtualized': ['tivoli workload'],
'tivoli workload scheduler for virtualized data': ['tivoli workload'],
'tivoli workload scheduler for virtualized data centers': [ 'tivoli '
'workload'],
'tivoli workload scheduler for z/os': ['tivoli workload'],
'tivoli workload scheduler loadleveler': ['tivoli workload'],
'tivoli zsecure': ['tivoli'],
'tivoli zsecure admin': ['tivoli zsecure'],
'tivoli zsecure alert': ['tivoli zsecure'],
'tivoli zsecure alert for acf2': ['tivoli zsecure'],
'tivoli zsecure alert for racf': ['tivoli zsecure'],
'tivoli zsecure audit': ['tivoli zsecure'],
'tivoli zsecure audit for acf2': ['tivoli zsecure'],
'tivoli zsecure audit for racf': ['tivoli zsecure'],
'tivoli zsecure audit for top': ['tivoli zsecure'],
'tivoli zsecure audit for top secret': ['tivoli zsecure'],
'tivoli zsecure cics': ['tivoli zsecure'],
'tivoli zsecure cics toolkit': ['tivoli zsecure'],
'tivoli zsecure command': ['tivoli zsecure'],
'tivoli zsecure command verifier': ['tivoli zsecure'],
'tivoli zsecure manager': ['tivoli zsecure'],
'tivoli zsecure manager for racf': ['tivoli zsecure'],
'tivoli zsecure manager for racf z/vm': ['tivoli zsecure'],
'tivoli zsecure visual': ['tivoli zsecure'],
'tkinter': ['python library'],
'tls': ['network protocol'],
'tolerance of change and uncertainty': ['critical thinking'],
'tolerant': ['job specific soft skills'],
'tomcat': ['apache software', 'web server'],
'tool training': ['workplace training'],
'topic model': ['unsupervised learning'],
'topology': ['math skill'],
'total productive maintenance': ['manufacturing method'],
'total quality management': ['manufacturing method'],
'totalstorage': ['software'],
'totalstorage productivity': ['totalstorage'],
'totalstorage productivity center': ['totalstorage productivity'],
'totalstorage productivity center for fabric': [ 'totalstorage '
'productivity'],
'tpf': ['software'],
'tpf toolkit': ['tpf'],
'tracert': ['network tool'],
'train': ['industry', 'job specific soft skills'],
'train the trainers': ['training'],
'trainability': ['work ethic'],
'trainable': ['job specific soft skills'],
'trained classifier': ['machine learning'],
'training': ['industry', 'job specific soft skills'],
'transaction data': ['data'],
'transaction processing': ['capability'],
'transaction processing facility': ['real time operating system'],
'transfer': ['activity'],
'transformation model': ['feature extraction'],
'transformation role': ['business role'],
'transformation skill': ['business skill'],
'transistor': ['electrical device'],
'transition manager': ['manager'],
'translation': ['activity'],
'translation cloud': ['cloud computing platform'],
'transmission control protocol': ['control protocol'],
'transportation': ['industry'],
'travel': ['activity'],
'travel visa': ['legal document'],
'travis': ['control framework'],
'treasury with sap s/4hana': ['sap certified application associate'],
'treatment': ['situation'],
'tree based model': ['supervised learning'],
'tree ensemble': ['tree based model'],
'trello': ['software'],
'trigonometry': ['math skill'],
'triple store': ['graph database'],
'tririga': ['software'],
'tririga application platform': ['tririga'],
'tririga cad integrator/publisher': ['tririga'],
'tririga energy optimization': ['tririga'],
'tririga portfolio': ['tririga'],
'tririga portfolio data': ['tririga portfolio'],
'tririga portfolio data manager': ['tririga portfolio'],
'troubleshooting': [ 'activity',
'job specific soft skills',
'technical skill'],
'true negative': ['testing outcome'],
'true positive': ['testing outcome'],
'truncated singular value decomposition': [ 'linear dimension reduction '
'model'],
'trust': ['positive emotion'],
'twitter': ['social media company'],
'txseries': ['software'],
'txseries for multiplatforms': ['txseries'],
'type system': ['design pattern'],
'type theory': ['computer science', 'math skill'],
'typescript': ['programming language'],
'typing skill': ['business skill'],
'ubuntu linux': ['debian linux'],
'udacity': ['mooc'],
'udp': ['network protocol'],
'ui design': ['design thinking'],
'ultraseek': ['search engine'],
'uml': ['modeling language'],
'unapproachable': ['team culture', 'work culture'],
'unavailability': ['state'],
'under-specified query': ['query'],
'undergraduate degree': ['degree'],
'unica': ['software'],
'unica campaign': ['unica'],
'unica customerinsight': ['unica'],
'unica detect': ['unica'],
'unica distributed marketing': ['unica'],
'unica emessage': ['unica'],
'unica interact': ['unica'],
'unica interactive': ['unica'],
'unica interactive marketing': ['unica interactive'],
'unica interactive marketing ondemand': ['unica interactive'],
'unica leads': ['unica'],
'unica marketing': ['unica'],
'unica marketing operations': ['unica marketing'],
'unica marketing platform': ['unica marketing'],
'unica netinsight ondemand': ['unica'],
'unica optimize': ['unica'],
'unica predictiveinsight': ['unica'],
'unicredit': ['financial company'],
'unified messaging': ['software'],
'unified messaging for websphere': ['unified messaging'],
'unified messaging for websphere voice': ['unified messaging'],
'unified messaging for websphere voice response': ['unified messaging'],
'unified method framework': ['technical framework'],
'uninstall': ['activity'],
'unit testing': ['test strategy'],
'universal windows platform': ['mcsa certification'],
'university': ['school'],
'unix': ['operating system'],
'unix administrator': ['system administrator'],
'unix filesystem': ['file system'],
'unsatisfied': ['negative emotion'],
'unstructured data': ['data'],
'unsupervised learning': ['machine learning'],
'upgrade': ['activity'],
'upgrade java se 7 to java se 8 ocp programmer': ['java certification'],
'upgrade ocp java 6, 7 & 8 to java se 11 developer': ['java certification'],
'upgrade | |
$DigitValue[text.charCodeAt(p)];
accum |= k << bits_in_accum;
bits_in_accum += bits_per_char;
if (bits_in_accum >= PyLong_SHIFT) {
this.ob_digit[pdigit] = accum & PyLong_MASK;
pdigit++;
accum >>>= PyLong_SHIFT;
bits_in_accum -= PyLong_SHIFT;
}
}
if (bits_in_accum) {
this.ob_digit[pdigit++] = accum;
}
while (pdigit < n) {
this.ob_digit[pdigit++] = 0;
}
long_normalize(this);
} else {
// Non-binary bases (such as radix == 10)
var c, i, convwidth, convmultmax, convmult, pz, pzstop, scan, size_z;
if ($log_base_PyLong_BASE[radix] == 0.0) {
var i = 1;
var convmax = radix;
$log_base_PyLong_BASE[radix] = Math.log(radix) / Math.log(PyLong_BASE);
while (1) {
var next = convmax * radix;
if (next > PyLong_BASE) break;
convmax = next;
++i;
}
$convmultmax_base[radix] = convmax;
$convwidth_base[radix] = i;
}
scan = 0;
while ($DigitValue[text.charCodeAt(scan)] < radix)
++scan;
nchars = scan;
size_z = scan * $log_base_PyLong_BASE[radix] + 1;
for (var i = 0; i < size_z; i ++) {
this.ob_digit[i] = 0;
}
this.ob_size = 0;
convwidth = $convwidth_base[radix];
convmultmax = $convmultmax_base[radix];
for (var str = 0; str < scan;) {
c = $DigitValue[text.charCodeAt(str++)];
for (i = 1; i < convwidth && str != scan; ++i, ++str) {
c = c * radix + $DigitValue[text.charCodeAt(str)];
}
convmult = convmultmax;
if (i != convwidth) {
convmult = radix;
for ( ; i > 1; --i) convmult *= radix;
}
pz = 0;
pzstop = this.ob_size;
for (; pz < pzstop; ++pz) {
c += this.ob_digit[pz] * convmult;
this.ob_digit[pz] = c & PyLong_MASK;
c >>>= PyLong_SHIFT;
}
if (c) {
if (this.ob_size < size_z) {
this.ob_digit[pz] = c;
this.ob_size++;
} else {
this.ob_digit[this.ob_size] = c;
}
}
}
}
text = text.slice(nchars);
if (neg) this.ob_size = -this.ob_size;
if (text.charAt(0) == 'l' || text.charAt(0) == 'L') text = text.slice(1);
text = text.lstrip();
if (text.length === 0) {
return this;
}
throw $pyce(@{{ValueError}}("invalid literal for long() with base " +
@{{!radix}} + ": " + @{{!value}}));
} else {
throw $pyce(@{{TypeError}}("TypeError: long() argument must be a string or a number"));
}
if (isNaN(v) || !isFinite(v)) {
throw $pyce(@{{ValueError}}("invalid literal for long() with base " + @{{!radix}} + ": '" + @{{!v}} + "'"));
}
return this;
};
$long.__init__ = function () {};
$long.__number__ = 0x04;
$long.__name__ = 'long';
$long.prototype = $long;
$long.__class__ = $long;
$long.ob_size = 0;
$long.toExponential = function (fractionDigits) {
return (typeof fractionDigits == 'undefined' || fractionDigits === null) ? this.__v.toExponential() : this.__v.toExponential(fractionDigits);
};
$long.toFixed = function (digits) {
return (typeof digits == 'undefined' || digits === null) ? this.__v.toFixed() : this.__v.toFixed(digits);
};
$long.toLocaleString = function () {
return this.__v.toLocaleString();
};
$long.toPrecision = function (precision) {
return (typeof precision == 'undefined' || precision === null) ? this.__v.toPrecision() : this.__v.toPrecision(precision);
};
$long.toString = function (radix) {
return (typeof radix == 'undefined' || radix === null) ? Format(this, 10, false, false) : Format(this, radix, false, false, false);
};
$long.valueOf = function() {
var x, v;
x = AsScaledDouble(this);
// ldexp(a,b) == a * (2**b)
v = x[0] * Math.pow(2.0, x[1] * PyLong_SHIFT);
if (!isFinite(v)) {
throw $pyce(@{{OverflowError}}('long int too large to convert to float'));
}
return v;
};
$long.__str__ = function () {
if (typeof this == 'function') return "<type '" + this.__name__ + "'>";
return Format(this, 10, false, false);
};
$long.__repr__ = function () {
if (typeof this == 'function') return "<type '" + this.__name__ + "'>";
return Format(this, 10, true, false);
};
$long.__nonzero__ = function () {
return this.ob_size != 0;
};
$long.__cmp__ = function (b) {
var sign;
if (this.ob_size != b.ob_size) {
if (this.ob_size < b.ob_size) return -1;
return 1;
}
var i = this.ob_size < 0 ? - this.ob_size : this.ob_size;
while (--i >= 0 && this.ob_digit[i] == b.ob_digit[i])
;
if (i < 0) return 0;
if (this.ob_digit[i] < b.ob_digit[i]) {
if (this.ob_size < 0) return 1;
return -1;
}
if (this.ob_size < 0) return -1;
return 1;
};
$long.__hash__ = function () {
var s = this.__str__();
var v = this.valueOf();
if (v.toString() == s) {
return v;
}
return s;
};
$long.__invert__ = function () {
var x = this.__add__($const_long_1);
x.ob_size = -x.ob_size;
return x;
};
$long.__neg__ = function () {
var x = new $long(0);
x.ob_digit = this.ob_digit.slice(0);
x.ob_size = -this.ob_size;
return x;
};
$long.__abs__ = function () {
if (this.ob_size >= 0) return this;
var x = new $long(0);
x.ob_digit = this.ob_digit.slice(0);
x.ob_size = -x.ob_size;
return x;
};
$long.__lshift = function (y) {
var a, z, wordshift, remshift, oldsize, newsize,
accum, i, j;
if (y < 0) {
throw $pyce(@{{ValueError}}('negative shift count'));
}
if (y >= $max_float_int) {
throw $pyce(@{{ValueError}}('outrageous left shift count'));
}
a = this;
wordshift = Math.floor(y / PyLong_SHIFT);
remshift = y - wordshift * PyLong_SHIFT;
oldsize = a.ob_size < 0 ? -a.ob_size : a.ob_size;
newsize = oldsize + wordshift;
if (remshift) ++newsize;
z = new $long(0);
z.ob_size = a.ob_size < 0 ? -newsize : newsize;
for (i = 0; i < wordshift; i++) {
z.ob_digit[i] = 0;
}
accum = 0;
for (i = wordshift, j = 0; j < oldsize; i++, j++) {
accum |= a.ob_digit[j] << remshift;
z.ob_digit[i] = accum & PyLong_MASK;
accum >>>= PyLong_SHIFT;
}
if (remshift) {
z.ob_digit[newsize-1] = accum;
}
z = long_normalize(z);
return z;
};
$long.__lshift__ = function (y) {
switch (y.__number__) {
case 0x01:
if (y == Math.floor(y)) return this.__lshift(y);
break;
case 0x02:
return this.__lshift(y.__v);
case 0x04:
y = y.valueOf();
return this.__lshift(y);
}
return @{{NotImplemented}};
};
$long.__rlshift__ = function (y) {
switch (y.__number__) {
case 0x02:
return (new $long(y.__v)).__lshift(this.valueOf());
case 0x04:
return y.__lshift(this.valueOf());
}
return @{{NotImplemented}};
};
$long.__rshift = function (y) {
var a, z, size, wordshift, newsize, loshift, hishift,
lomask, himask, i, j;
if (y.__number__ != 0x01) {
y = y.valueOf();
} else {
if (y != Math.floor(y)) {
throw $pyce(@{{TypeError}}("unsupported operand type(s) for >>: 'long' and 'float'"));
}
}
if (y < 0) {
throw $pyce(@{{ValueError}}('negative shift count'));
}
if (y >= $max_float_int) {
throw $pyce(@{{ValueError}}('shift count too big'));
}
a = this;
size = this.ob_size;
if (this.ob_size < 0) {
size = -size;
a = this.__add__($const_long_1);
a.ob_size = -a.ob_size;
}
wordshift = Math.floor(y / PyLong_SHIFT);
newsize = size - wordshift;
if (newsize <= 0) {
z = $const_long_0;
} else {
loshift = y % PyLong_SHIFT;
hishift = PyLong_SHIFT - loshift;
lomask = (1 << hishift) - 1;
himask = PyLong_MASK ^ lomask;
z = new $long(0);
z.ob_size = a.ob_size < 0 ? -newsize : newsize;
for (i = 0, j = wordshift; i < newsize; i++, j++) {
z.ob_digit[i] = (a.ob_digit[j] >>> loshift) & lomask;
if (i+1 < newsize) {
z.ob_digit[i] |=
(a.ob_digit[j+1] << hishift) & himask;
}
}
z = long_normalize(z);
}
if (this.ob_size < 0) {
z = z.__add__($const_long_1);
z.ob_size = -z.ob_size;
}
return z;
};
$long.__rshift__ = function (y) {
switch (y.__number__) {
case 0x01:
if (y == Math.floor(y)) return this.__rshift(y);
break;
case 0x02:
return this.__rshift(y.__v);
case 0x04:
y = y.valueOf();
return this.__rshift(y);
}
return @{{NotImplemented}};
};
$long.__rrshift__ = function (y) {
switch (y.__number__) {
case 0x02:
return (new $long(y.__v)).__rshift(this.valueOf());
case 0x04:
return y.__rshift(this.valueOf());
}
return @{{NotImplemented}};
};
$long.__and = function (b) {
var a, maska, maskb, negz, size_a, size_b, size_z,
i, z, diga, digb, v, op;
a = this;
if (a.ob_size < 0) {
a = a.__invert__();
maska = PyLong_MASK;
} else {
maska = 0;
}
if (b.ob_size < 0) {
b = b.__invert__();
maskb = PyLong_MASK;
} else {
maskb = 0;
}
negz = 0;
op = '&';
if (maska && maskb) {
op = '|';
maska ^= PyLong_MASK;
maskb ^= PyLong_MASK;
negz = -1;
}
size_a = a.ob_size;
size_b = b.ob_size;
size_z = op == '&'
? (maska
? size_b
: (maskb ? size_a : (size_a < size_b ? size_a : size_b)))
: (size_a > size_b ? size_a : size_b);
z = new $long(0);
z.ob_size = size_z;
switch (op) {
case '&':
for (i = 0; i < size_z; ++i) {
diga | |
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""This module defines the JumpStartModelsCache class."""
from __future__ import absolute_import
import datetime
from difflib import get_close_matches
from typing import List, Optional
import json
import boto3
import botocore
from packaging.version import Version
from packaging.specifiers import SpecifierSet
from sagemaker.jumpstart.constants import (
JUMPSTART_DEFAULT_MANIFEST_FILE_S3_KEY,
JUMPSTART_DEFAULT_REGION_NAME,
)
from sagemaker.jumpstart.parameters import (
JUMPSTART_DEFAULT_MAX_S3_CACHE_ITEMS,
JUMPSTART_DEFAULT_MAX_SEMANTIC_VERSION_CACHE_ITEMS,
JUMPSTART_DEFAULT_S3_CACHE_EXPIRATION_HORIZON,
JUMPSTART_DEFAULT_SEMANTIC_VERSION_CACHE_EXPIRATION_HORIZON,
)
from sagemaker.jumpstart.types import (
JumpStartCachedS3ContentKey,
JumpStartCachedS3ContentValue,
JumpStartModelHeader,
JumpStartModelSpecs,
JumpStartS3FileType,
JumpStartVersionedModelId,
)
from sagemaker.jumpstart import utils
from sagemaker.utilities.cache import LRUCache
class JumpStartModelsCache:
"""Class that implements a cache for JumpStart models manifests and specs.
The manifest and specs associated with JumpStart models provide the information necessary
for launching JumpStart models from the SageMaker SDK.
"""
# fmt: off
def __init__(
self,
region: str = JUMPSTART_DEFAULT_REGION_NAME,
max_s3_cache_items: int = JUMPSTART_DEFAULT_MAX_S3_CACHE_ITEMS,
s3_cache_expiration_horizon: datetime.timedelta =
JUMPSTART_DEFAULT_S3_CACHE_EXPIRATION_HORIZON,
max_semantic_version_cache_items: int =
JUMPSTART_DEFAULT_MAX_SEMANTIC_VERSION_CACHE_ITEMS,
semantic_version_cache_expiration_horizon: datetime.timedelta =
JUMPSTART_DEFAULT_SEMANTIC_VERSION_CACHE_EXPIRATION_HORIZON,
manifest_file_s3_key: str =
JUMPSTART_DEFAULT_MANIFEST_FILE_S3_KEY,
s3_bucket_name: Optional[str] = None,
s3_client_config: Optional[botocore.config.Config] = None,
) -> None: # fmt: on
"""Initialize a ``JumpStartModelsCache`` instance.
Args:
region (str): AWS region to associate with cache. Default: region associated
with boto3 session.
max_s3_cache_items (int): Maximum number of items to store in s3 cache.
Default: 20.
s3_cache_expiration_horizon (datetime.timedelta): Maximum time to hold
items in s3 cache before invalidation. Default: 6 hours.
max_semantic_version_cache_items (int): Maximum number of items to store in
semantic version cache. Default: 20.
semantic_version_cache_expiration_horizon (datetime.timedelta):
Maximum time to hold items in semantic version cache before invalidation.
Default: 6 hours.
manifest_file_s3_key (str): The key in S3 corresponding to the sdk metadata manifest.
s3_bucket_name (Optional[str]): S3 bucket to associate with cache.
Default: JumpStart-hosted content bucket for region.
s3_client_config (Optional[botocore.config.Config]): s3 client config to use for cache.
Default: None (no config).
"""
self._region = region
self._s3_cache = LRUCache[JumpStartCachedS3ContentKey, JumpStartCachedS3ContentValue](
max_cache_items=max_s3_cache_items,
expiration_horizon=s3_cache_expiration_horizon,
retrieval_function=self._get_file_from_s3,
)
self._model_id_semantic_version_manifest_key_cache = LRUCache[
JumpStartVersionedModelId, JumpStartVersionedModelId
](
max_cache_items=max_semantic_version_cache_items,
expiration_horizon=semantic_version_cache_expiration_horizon,
retrieval_function=self._get_manifest_key_from_model_id_semantic_version,
)
self._manifest_file_s3_key = manifest_file_s3_key
self.s3_bucket_name = (
utils.get_jumpstart_content_bucket(self._region)
if s3_bucket_name is None
else s3_bucket_name
)
self._s3_client = (
boto3.client("s3", region_name=self._region, config=s3_client_config)
if s3_client_config
else boto3.client("s3", region_name=self._region)
)
def set_region(self, region: str) -> None:
"""Set region for cache. Clears cache after new region is set."""
if region != self._region:
self._region = region
self.clear()
def get_region(self) -> str:
"""Return region for cache."""
return self._region
def set_manifest_file_s3_key(self, key: str) -> None:
"""Set manifest file s3 key. Clears cache after new key is set."""
if key != self._manifest_file_s3_key:
self._manifest_file_s3_key = key
self.clear()
def get_manifest_file_s3_key(self) -> str:
"""Return manifest file s3 key for cache."""
return self._manifest_file_s3_key
def set_s3_bucket_name(self, s3_bucket_name: str) -> None:
"""Set s3 bucket used for cache."""
if s3_bucket_name != self.s3_bucket_name:
self.s3_bucket_name = s3_bucket_name
self.clear()
def get_bucket(self) -> str:
"""Return bucket used for cache."""
return self.s3_bucket_name
def _get_manifest_key_from_model_id_semantic_version(
self,
key: JumpStartVersionedModelId,
value: Optional[JumpStartVersionedModelId], # pylint: disable=W0613
) -> JumpStartVersionedModelId:
"""Return model id and version in manifest that matches semantic version/id.
Uses ``packaging.version`` to perform version comparison. The highest model version
matching the semantic version is used, which is compatible with the SageMaker
version.
Args:
key (JumpStartVersionedModelId): Key for which to fetch versioned model id.
value (Optional[JumpStartVersionedModelId]): Unused variable for current value of
old cached model id/version.
Raises:
KeyError: If the semantic version is not found in the manifest, or is found but
the SageMaker version needs to be upgraded in order for the model to be used.
"""
model_id, version = key.model_id, key.version
manifest = self._s3_cache.get(
JumpStartCachedS3ContentKey(JumpStartS3FileType.MANIFEST, self._manifest_file_s3_key)
).formatted_content
sm_version = utils.get_sagemaker_version()
versions_compatible_with_sagemaker = [
Version(header.version)
for header in manifest.values() # type: ignore
if header.model_id == model_id and Version(header.min_version) <= Version(sm_version)
]
sm_compatible_model_version = self._select_version(
version, versions_compatible_with_sagemaker
)
if sm_compatible_model_version is not None:
return JumpStartVersionedModelId(model_id, sm_compatible_model_version)
versions_incompatible_with_sagemaker = [
Version(header.version) for header in manifest.values() # type: ignore
if header.model_id == model_id
]
sm_incompatible_model_version = self._select_version(
version, versions_incompatible_with_sagemaker
)
if sm_incompatible_model_version is not None:
model_version_to_use_incompatible_with_sagemaker = sm_incompatible_model_version
sm_version_to_use_list = [
header.min_version
for header in manifest.values() # type: ignore
if header.model_id == model_id
and header.version == model_version_to_use_incompatible_with_sagemaker
]
if len(sm_version_to_use_list) != 1:
# ``manifest`` dict should already enforce this
raise RuntimeError("Found more than one incompatible SageMaker version to use.")
sm_version_to_use = sm_version_to_use_list[0]
error_msg = (
f"Unable to find model manifest for '{model_id}' with version '{version}' "
f"compatible with your SageMaker version ('{sm_version}'). "
f"Consider upgrading your SageMaker library to at least version "
f"'{sm_version_to_use}' so you can use version "
f"'{model_version_to_use_incompatible_with_sagemaker}' of '{model_id}'."
)
raise KeyError(error_msg)
error_msg = f"Unable to find model manifest for '{model_id}' with version '{version}'. "
error_msg += (
"Visit https://sagemaker.readthedocs.io/en/stable/doc_utils/jumpstart.html"
" for updated list of models. "
)
other_model_id_version = self._select_version(
"*", versions_incompatible_with_sagemaker
) # all versions here are incompatible with sagemaker
if other_model_id_version is not None:
error_msg += (
f"Consider using model ID '{model_id}' with version "
f"'{other_model_id_version}'."
)
else:
possible_model_ids = [header.model_id for header in manifest.values()]
closest_model_id = get_close_matches(model_id, possible_model_ids, n=1, cutoff=0)[0]
error_msg += f"Did you mean to use model ID '{closest_model_id}'?"
raise KeyError(error_msg)
def _get_file_from_s3(
self,
key: JumpStartCachedS3ContentKey,
value: Optional[JumpStartCachedS3ContentValue],
) -> JumpStartCachedS3ContentValue:
"""Return s3 content given a file type and s3_key in ``JumpStartCachedS3ContentKey``.
If a manifest file is being fetched, we only download the object if the md5 hash in
``head_object`` does not match the current md5 hash for the stored value. This prevents
unnecessarily downloading the full manifest when it hasn't changed.
Args:
key (JumpStartCachedS3ContentKey): key for which to fetch s3 content.
value (Optional[JumpStartVersionedModelId]): Current value of old cached
s3 content. This is used for the manifest file, so that it is only
downloaded when its content changes.
"""
file_type, s3_key = key.file_type, key.s3_key
if file_type == JumpStartS3FileType.MANIFEST:
if value is not None:
etag = self._s3_client.head_object(Bucket=self.s3_bucket_name, Key=s3_key)["ETag"]
if etag == value.md5_hash:
return value
response = self._s3_client.get_object(Bucket=self.s3_bucket_name, Key=s3_key)
formatted_body = json.loads(response["Body"].read().decode("utf-8"))
etag = response["ETag"]
return JumpStartCachedS3ContentValue(
formatted_content=utils.get_formatted_manifest(formatted_body),
md5_hash=etag,
)
if file_type == JumpStartS3FileType.SPECS:
response = self._s3_client.get_object(Bucket=self.s3_bucket_name, Key=s3_key)
formatted_body = json.loads(response["Body"].read().decode("utf-8"))
return JumpStartCachedS3ContentValue(
formatted_content=JumpStartModelSpecs(formatted_body)
)
raise ValueError(
f"Bad value for key '{key}': must be in {[JumpStartS3FileType.MANIFEST, JumpStartS3FileType.SPECS]}"
)
def get_manifest(self) -> List[JumpStartModelHeader]:
"""Return entire JumpStart models manifest."""
manifest_dict = self._s3_cache.get(
JumpStartCachedS3ContentKey(JumpStartS3FileType.MANIFEST, self._manifest_file_s3_key)
).formatted_content
manifest = list(manifest_dict.values()) # type: ignore
return manifest
def get_header(self, model_id: str, semantic_version_str: str) -> JumpStartModelHeader:
"""Return header for a given JumpStart model id and semantic version.
Args:
model_id (str): model id for which to get a header.
semantic_version_str (str): The semantic version for which to get a
header.
"""
return self._get_header_impl(model_id, semantic_version_str=semantic_version_str)
def _select_version(
self,
semantic_version_str: str,
available_versions: List[Version],
) -> Optional[str]:
"""Perform semantic version search on available versions.
Args:
semantic_version_str (str): the semantic version for which to filter
available versions.
available_versions (List[Version]): list of available versions.
"""
if semantic_version_str == "*":
if len(available_versions) == 0:
return None
return str(max(available_versions))
spec = SpecifierSet(f"=={semantic_version_str}")
available_versions_filtered = list(spec.filter(available_versions))
return (
str(max(available_versions_filtered)) if available_versions_filtered != [] else None
)
def _get_header_impl(
self,
model_id: str,
semantic_version_str: str,
attempt: int = 0,
) -> JumpStartModelHeader:
"""Lower-level function to return header.
Allows a single retry if the cache is old.
Args:
model_id (str): model id for which to get a header.
semantic_version_str (str): The semantic version for which to get a
header.
attempt (int): attempt number at retrieving a header.
"""
versioned_model_id = self._model_id_semantic_version_manifest_key_cache.get(
JumpStartVersionedModelId(model_id, semantic_version_str)
)
manifest = self._s3_cache.get(
JumpStartCachedS3ContentKey(JumpStartS3FileType.MANIFEST, self._manifest_file_s3_key)
).formatted_content
try:
header = manifest[versioned_model_id] # type: ignore
return header
except KeyError:
if attempt > 0:
raise
self.clear()
return self._get_header_impl(model_id, semantic_version_str, attempt + 1)
def get_specs(self, model_id: str, semantic_version_str: str) -> JumpStartModelSpecs:
"""Return specs for a given JumpStart model id and semantic version.
Args:
model_id (str): model id for which to get specs.
semantic_version_str (str): The semantic version for which to get
specs.
"""
header = self.get_header(model_id, semantic_version_str)
spec_key = header.spec_key
specs = self._s3_cache.get(
JumpStartCachedS3ContentKey(JumpStartS3FileType.SPECS, spec_key)
).formatted_content
return specs # type: | |
:return: string or None
"""
return self._get_aad_tenant_id(enable_validation=True)
def _get_aad_admin_group_object_ids(self, enable_validation: bool = False) -> Union[List[str], None]:
"""Internal function to obtain the value of aad_admin_group_object_ids.
This function supports the option of enable_validation. When enabled in update mode, if
aad_admin_group_object_ids is specified, while aad_profile is not set or managed aad is not enabled,
raise an InvalidArgumentValueError.
This function will normalize the parameter by default. It will split the string into a list with "," as the
delimiter.
:return: empty list or list of strings, or None
"""
# read the original value passed by the command
aad_admin_group_object_ids = self.raw_param.get("aad_admin_group_object_ids")
# In create mode, try to read the property value corresponding to the parameter from the `mc` object.
read_from_mc = False
if self.decorator_mode == DecoratorMode.CREATE:
if (
self.mc and
self.mc.aad_profile and
self.mc.aad_profile.admin_group_object_i_ds is not None
):
aad_admin_group_object_ids = self.mc.aad_profile.admin_group_object_i_ds
read_from_mc = True
# keep None as None, but empty string ("") to empty list ([])
if not read_from_mc and aad_admin_group_object_ids is not None:
aad_admin_group_object_ids = aad_admin_group_object_ids.split(',') if aad_admin_group_object_ids else []
# validation
if enable_validation:
if aad_admin_group_object_ids:
if self.decorator_mode == DecoratorMode.UPDATE:
if not check_is_managed_aad_cluster(self.mc):
raise InvalidArgumentValueError(
'Cannot specify "--aad-admin-group-object-ids" if managed AAD is not enabled'
)
return aad_admin_group_object_ids
def get_aad_admin_group_object_ids(self) -> Union[List[str], None]:
"""Obtain the value of aad_admin_group_object_ids.
This function will verify the parameter by default. In update mode, if aad_admin_group_object_ids is specified,
while aad_profile is not set or managed aad is not enabled, raise an InvalidArgumentValueError.
This function will normalize the parameter by default. It will split the string into a list with "," as the
delimiter.
:return: empty list or list of strings, or None
"""
return self._get_aad_admin_group_object_ids(enable_validation=True)
def _get_disable_rbac(self, enable_validation: bool = False) -> Union[bool, None]:
"""Internal function to obtain the value of disable_rbac.
This function supports the option of enable_validation. When enabled, if the values of disable_rbac and
enable_azure_rbac are both True, a MutuallyExclusiveArgumentError will be raised. Besides, if the values of
enable_rbac and disable_rbac are both True, a MutuallyExclusiveArgumentError will be raised.
:return: bool or None
"""
# read the original value passed by the command
disable_rbac = self.raw_param.get("disable_rbac")
# try to read the property value corresponding to the parameter from the `mc` object
if (
self.mc and
self.mc.enable_rbac is not None
):
disable_rbac = not self.mc.enable_rbac
# this parameter does not need dynamic completion
# validation
if enable_validation:
if disable_rbac and self._get_enable_azure_rbac(enable_validation=False):
raise MutuallyExclusiveArgumentError(
"--enable-azure-rbac cannot be used together with --disable-rbac"
)
if disable_rbac and self._get_enable_rbac(enable_validation=False):
raise MutuallyExclusiveArgumentError("specify either '--disable-rbac' or '--enable-rbac', not both.")
return disable_rbac
def get_disable_rbac(self) -> Union[bool, None]:
"""Obtain the value of disable_rbac.
This function will verify the parameter by default. If the values of disable_rbac and enable_azure_rbac are
both True, a MutuallyExclusiveArgumentError will be raised. Besides, if the values of enable_rbac and
disable_rbac are both True, a MutuallyExclusiveArgumentError will be raised.
:return: bool or None
"""
return self._get_disable_rbac(enable_validation=True)
def _get_enable_rbac(self, enable_validation: bool = False) -> Union[bool, None]:
"""Internal function to obtain the value of enable_rbac.
This function supports the option of enable_validation. When enabled, if the values of enable_rbac and
disable_rbac are both True, a MutuallyExclusiveArgumentError will be raised.
:return: bool or None
"""
# read the original value passed by the command
enable_rbac = self.raw_param.get("enable_rbac")
# try to read the property value corresponding to the parameter from the `mc` object
if (
self.mc and
self.mc.enable_rbac is not None
):
enable_rbac = self.mc.enable_rbac
# this parameter does not need dynamic completion
# validation
if enable_validation:
if enable_rbac and self._get_disable_rbac(enable_validation=False):
raise MutuallyExclusiveArgumentError("specify either '--disable-rbac' or '--enable-rbac', not both.")
return enable_rbac
def get_enable_rbac(self) -> Union[bool, None]:
"""Obtain the value of enable_rbac.
This function will verify the parameter by default. If the values of enable_rbac and disable_rbac are both True,
a MutuallyExclusiveArgumentError will be raised.
:return: bool or None
"""
return self._get_enable_rbac(enable_validation=True)
def _get_enable_azure_rbac(self, enable_validation: bool = False) -> bool:
"""Internal function to obtain the value of enable_azure_rbac.
This function supports the option of enable_validation. When enabled and enable_azure_rbac is specified,
in create mode, if the value of enable_aad is not True, a RequiredArgumentMissingError will be raised.
If disable_rbac is specified, a MutuallyExclusiveArgumentError will be raised. In update mode, if
enable_azure_rbac is specified, while aad_profile is not set or managed aad is not enabled,
raise an InvalidArgumentValueError. If both disable_azure_rbac and enable_azure_rbac are specified,
raise a MutuallyExclusiveArgumentError.
:return: bool
"""
# read the original value passed by the command
enable_azure_rbac = self.raw_param.get("enable_azure_rbac")
# In create mode, try to read the property value corresponding to the parameter from the `mc` object.
if self.decorator_mode == DecoratorMode.CREATE:
if (
self.mc and
self.mc.aad_profile and
self.mc.aad_profile.enable_azure_rbac is not None
):
enable_azure_rbac = self.mc.aad_profile.enable_azure_rbac
# this parameter does not need dynamic completion
# validation
if enable_validation:
if enable_azure_rbac:
if self.decorator_mode == DecoratorMode.CREATE:
if not self._get_enable_aad(enable_validation=False):
raise RequiredArgumentMissingError(
"--enable-azure-rbac can only be used together with --enable-aad"
)
if self._get_disable_rbac(enable_validation=False):
raise MutuallyExclusiveArgumentError(
"--enable-azure-rbac cannot be used together with --disable-rbac"
)
elif self.decorator_mode == DecoratorMode.UPDATE:
if not check_is_managed_aad_cluster(self.mc):
raise InvalidArgumentValueError(
'Cannot specify "--enable-azure-rbac" if managed AAD is not enabled'
)
if self._get_disable_azure_rbac(enable_validation=False):
raise MutuallyExclusiveArgumentError(
'Cannot specify "--enable-azure-rbac" and "--disable-azure-rbac" at the same time'
)
return enable_azure_rbac
def get_enable_azure_rbac(self) -> bool:
"""Obtain the value of enable_azure_rbac.
This function will verify the parameter by default. If enable_azure_rbac is specified, in create mode,
if the value of enable_aad is not True, a RequiredArgumentMissingError will be raised. If disable_rbac
is specified, a MutuallyExclusiveArgumentError will be raised. In update mode, if enable_azure_rbac
is specified, while aad_profile is not set or managed aad is not enabled, raise an InvalidArgumentValueError.
If both disable_azure_rbac and enable_azure_rbac are specified, raise a MutuallyExclusiveArgumentError.
:return: bool
"""
return self._get_enable_azure_rbac(enable_validation=True)
def _get_disable_azure_rbac(self, enable_validation: bool = False) -> bool:
"""Internal function to obtain the value of disable_azure_rbac.
This function supports the option of enable_validation. When enabled, in update mode, if disable_azure_rbac
is specified, while aad_profile is not set or managed aad is not enabled, raise an InvalidArgumentValueError.
If both disable_azure_rbac and enable_azure_rbac are specified, raise a MutuallyExclusiveArgumentError.
:return: bool
"""
# read the original value passed by the command
disable_azure_rbac = self.raw_param.get("disable_azure_rbac")
# We do not support this option in create mode, therefore we do not read the value from `mc`.
# this parameter does not need dynamic completion
# validation
if enable_validation:
if disable_azure_rbac:
if self.decorator_mode == DecoratorMode.UPDATE:
if not check_is_managed_aad_cluster(self.mc):
raise InvalidArgumentValueError(
'Cannot specify "--disable-azure-rbac" if managed AAD is not enabled'
)
if self._get_enable_azure_rbac(enable_validation=False):
raise MutuallyExclusiveArgumentError(
'Cannot specify "--enable-azure-rbac" and "--disable-azure-rbac" at the same time'
)
return disable_azure_rbac
def get_disable_azure_rbac(self) -> bool:
"""Obtain the value of disable_azure_rbac.
This function will verify the parameter by default. In update mode, if disable_azure_rbac is specified,
while aad_profile is not set or managed aad is not enabled, raise an InvalidArgumentValueError.
If both disable_azure_rbac and enable_azure_rbac are specified, raise a MutuallyExclusiveArgumentError.
:return: bool
"""
return self._get_disable_azure_rbac(enable_validation=True)
def _get_api_server_authorized_ip_ranges(self, enable_validation: bool = False) -> List[str]:
"""Internal function to obtain the value of api_server_authorized_ip_ranges.
This function supports the option of enable_validation. When enabled and api_server_authorized_ip_ranges is
assigned, if load_balancer_sku equals to CONST_LOAD_BALANCER_SKU_BASIC, raise an InvalidArgumentValueError;
if enable_private_cluster is specified, raise a MutuallyExclusiveArgumentError.
This function will normalize the parameter by default. It will split the string into a list with "," as the
delimiter.
:return: empty list or list of strings
"""
# read the original value passed by the command
api_server_authorized_ip_ranges = self.raw_param.get(
"api_server_authorized_ip_ranges"
)
# In create mode, try to read the property value corresponding to the parameter from the `mc` object.
if self.decorator_mode == DecoratorMode.CREATE:
read_from_mc = False
if (
self.mc and
self.mc.api_server_access_profile and
self.mc.api_server_access_profile.authorized_ip_ranges is not None
):
api_server_authorized_ip_ranges = (
self.mc.api_server_access_profile.authorized_ip_ranges
)
read_from_mc = True
# normalize
if not read_from_mc:
api_server_authorized_ip_ranges = [
x.strip()
for x in (
api_server_authorized_ip_ranges.split(",")
if api_server_authorized_ip_ranges
else []
)
]
elif self.decorator_mode == DecoratorMode.UPDATE:
# normalize, keep None as None
if api_server_authorized_ip_ranges is not None:
api_server_authorized_ip_ranges = [
x.strip()
for x in (
api_server_authorized_ip_ranges.split(",")
if | |
if plan.to_date < from_dt or plan.from_date > to_dt:
plan = None
if qty:
if plan:
if not qty == plan.quantity:
if plan.from_date >= from_dt and plan.to_date <= to_dt:
plan.quantity = qty
plan.save()
else:
if plan.from_date < from_dt:
new_to_dt = from_dt - datetime.timedelta(days=1)
earlier_plan = ProductPlan(
member=plan.member,
product=plan.product,
quantity=plan.quantity,
from_date=plan.from_date,
to_date=new_to_dt,
role=plan.role,
inventoried=plan.inventoried,
distributor=plan.distributor,
)
earlier_plan.save()
if plan.to_date > to_dt:
new_plan = ProductPlan(
member=plan.member,
product=plan.product,
quantity=qty,
from_date=from_dt,
to_date=to_dt,
role=plan.role,
inventoried=plan.inventoried,
distributor=plan.distributor,
)
new_plan.save()
plan.from_date = to_dt + datetime.timedelta(days=1)
plan.save()
else:
plan.from_date=from_dt
plan.quantity=qty
plan.save()
else:
new_plan = ProductPlan(
member=member,
product=product,
quantity=qty,
from_date=from_dt,
to_date=to_dt,
role=role,
)
new_plan.save()
if role == "producer":
listed_product, created = ProducerProduct.objects.get_or_create(
product=product, producer=member)
elif role == "consumer":
listed_product, created = CustomerProduct.objects.get_or_create(
product=product, customer=member)
else:
if plan:
if plan.from_date >= from_dt and plan.to_date <= to_dt:
plan.delete()
else:
if plan.to_date > to_dt:
early_from_dt = plan.from_date
if plan.from_date < from_dt:
early_to_dt = from_dt - datetime.timedelta(days=1)
earlier_plan = ProductPlan(
member=plan.member,
product=plan.product,
quantity=plan.quantity,
from_date=early_from_dt,
to_date=early_to_dt,
role=plan.role,
inventoried=plan.inventoried,
distributor=plan.distributor,
)
earlier_plan.save()
plan.from_date = to_dt + datetime.timedelta(days=1)
plan.save()
else:
plan.to_date= from_dt - datetime.timedelta(days=1)
plan.save()
wkdate = wkdate + datetime.timedelta(days=7)
response = HttpResponse(request.raw_post_data, mimetype="text/json-comment-filtered")
response['Cache-Control'] = 'no-cache'
return response
else:
try:
from_date = datetime.datetime(*time.strptime(from_date, '%Y_%m_%d')[0:5]).date()
to_date = datetime.datetime(*time.strptime(to_date, '%Y_%m_%d')[0:5]).date()
except ValueError:
raise Http404
# force from_date to Monday, to_date to Sunday
from_date = from_date - datetime.timedelta(days=datetime.date.weekday(from_date))
to_date = to_date - datetime.timedelta(days=datetime.date.weekday(to_date)+1)
to_date = to_date + datetime.timedelta(days=7)
products = None
if list_type == "M":
if role == "consumer":
products = CustomerProduct.objects.filter(customer=member, planned=True)
else:
products = ProducerProduct.objects.filter(producer=member, planned=True)
if not products:
products = Product.objects.filter(plannable=True)
list_type = "A"
#import pdb; pdb.set_trace()
rows = plans_for_dojo(member, products, from_date, to_date)
range = request.META["HTTP_RANGE"]
range = range.split("=")[1]
range = range.split("-")
range_start = int(range[0])
range_end = int(range[1])
count = len(rows)
if count < range_end:
range_end = count
rows = rows[range_start:range_end + 1]
data = simplejson.dumps(rows)
response = HttpResponse(data, mimetype="text/json-comment-filtered")
response['Cache-Control'] = 'no-cache'
response['Content-Range'] = "".join(["items ", str(range_start),
"-", str(range_end), "/", str(count + 1)])
return response
@login_required
def plan_update(request, prod_id):
try:
member = Party.objects.get(pk=prod_id)
except Party.DoesNotExist:
raise Http404
if request.method == "POST":
itemforms = create_plan_forms(member, request.POST)
if all([itemform.is_valid() for itemform in itemforms]):
member_id = request.POST['member-id']
member = Party.objects.get(pk=member_id)
role = "producer"
if member.is_customer():
role = "consumer"
for itemform in itemforms:
data = itemform.cleaned_data
prodname = data['prodname']
item_id = data['item_id']
from_date = data['from_date']
to_date = data['to_date']
quantity = data['quantity']
if item_id:
item = ProductPlan.objects.get(pk=item_id)
item.from_date = from_date
item.to_date = to_date
item.quantity = quantity
item.save()
else:
if quantity > 0:
prodname = data['prodname']
product = Product.objects.get(short_name__exact=prodname)
item = itemform.save(commit=False)
item.member = member
item.product = product
item.role = role
item.save()
return HttpResponseRedirect('/%s/%s/'
% ('distribution/producerplan', member_id))
else:
for itemform in itemforms:
if not itemform.is_valid():
print '**invalid**', itemform
else:
itemforms = create_plan_forms(member)
return render_to_response('distribution/plan_update.html', {'member': member, 'item_forms': itemforms})
@login_required
def inventory_selection(request):
try:
fn = food_network()
except FoodNetwork.DoesNotExist:
return render_to_response('distribution/network_error.html')
avail_date = next_delivery_date()
init = {"avail_date": avail_date,}
available = fn.all_avail_items(avail_date)
ihform = InventorySelectionForm(data=request.POST or None, initial=init)
unplanned_form = UnplannedInventoryForm(data=request.POST or None,
initial={"inventory_date": avail_date})
if request.method == "POST":
if request.POST.get('submit-planned'):
#ihform = InventorySelectionForm(request.POST)
if ihform.is_valid():
ihdata = ihform.cleaned_data
producer_id = ihdata['producer']
inv_date = ihdata['avail_date']
#import pdb; pdb.set_trace()
if int(producer_id):
return HttpResponseRedirect('/%s/%s/%s/%s/%s/'
% ('distribution/inventoryupdate', producer_id, inv_date.year, inv_date.month, inv_date.day))
else:
return HttpResponseRedirect('/%s/%s/%s/%s/'
% ('distribution/allinventoryupdate', inv_date.year, inv_date.month, inv_date.day))
if request.POST.get('submit-unplanned'):
#unplanned_form = UnplannedInventoryForm(request.POST)
if unplanned_form.is_valid():
unplanned_data = unplanned_form.cleaned_data
producer = unplanned_data['producer']
product = unplanned_data['product']
planned = unplanned_data['planned']
received = unplanned_data['received'] or Decimal("0")
remaining = planned
if received:
remaining = received
if not producer:
producer_name = unplanned_data['new_producer_name']
producer = Producer(
short_name = producer_name,
long_name = producer_name)
producer.save()
pp, created = ProducerProduct.objects.get_or_create(
producer=producer,
product=product)
lot = InventoryItem(
producer = producer,
product = product,
custodian = unplanned_data['custodian'],
freeform_lot_id = unplanned_data['freeform_lot_id'],
field_id = unplanned_data['field_id'],
inventory_date = unplanned_data['inventory_date'],
planned = planned,
received = received,
remaining = remaining,
notes = unplanned_data['notes'],
)
lot.save()
ihform = InventorySelectionForm(initial=init)
unplanned_form = UnplannedInventoryForm(initial={"inventory_date": avail_date})
#else:
# ihform = InventorySelectionForm(initial=init)
# unplanned_form = UnplannedInventoryForm(initial={"inventory_date":
# avail_date})
return render_to_response('distribution/inventory_selection.html', {
'header_form': ihform,
'unplanned_form': unplanned_form,
'available': available,
'avail_year': avail_date.year,
'avail_month': avail_date.month,
'avail_day': avail_date.day,
}, context_instance=RequestContext(request))
@login_required
def inventory_update(request, prod_id, year, month, day):
availdate = datetime.date(int(year), int(month), int(day))
try:
producer = Party.objects.get(pk=prod_id)
except Party.DoesNotExist:
raise Http404
monday = availdate - datetime.timedelta(days=datetime.date.weekday(availdate))
saturday = monday + datetime.timedelta(days=5)
#import pdb; pdb.set_trace()
items = InventoryItem.objects.filter(
producer=producer,
remaining__gt=0,
inventory_date__range=(monday, saturday))
plans = ProductPlan.objects.filter(
member=producer,
from_date__lte=availdate,
to_date__gte=saturday)
if plans:
planned = True
else:
planned = False
plans = producer.producer_products.all()
itemforms = create_inventory_item_forms(
producer, availdate, plans, items, data=request.POST or None)
if request.method == "POST":
#import pdb; pdb.set_trace()
if all([itemform.is_valid() for itemform in itemforms]):
producer_id = request.POST['producer-id']
producer = Party.objects.get(pk=producer_id)
inv_date = request.POST['avail-date']
for itemform in itemforms:
data = itemform.cleaned_data
prod_id = data['prod_id']
item_id = data['item_id']
custodian = data['custodian']
inventory_date = data['inventory_date']
expiration_date = data['expiration_date']
planned = data['planned']
received = data['received']
notes = data['notes']
field_id = data['field_id']
freeform_lot_id = data['freeform_lot_id']
if item_id:
item = InventoryItem.objects.get(pk=item_id)
item.custodian = custodian
item.inventory_date = inventory_date
item.expiration_date = expiration_date
rem_change = planned - item.planned
item.planned = planned
item.remaining = item.remaining + rem_change
oh_change = received - item.received
item.received = received
item.onhand = item.onhand + oh_change
item.notes = notes
item.field_id = field_id
item.freeform_lot_id = freeform_lot_id
item.save()
else:
if planned + received > 0:
prod_id = data['prod_id']
product = Product.objects.get(pk=prod_id)
item = itemform.save(commit=False)
item.producer = producer
item.product = product
item.remaining = planned
item.onhand = received
item.save()
return HttpResponseRedirect('/%s/%s/%s/%s/%s/'
% ('distribution/produceravail', producer_id, year, month, day))
return render_to_response('distribution/inventory_update.html', {
'avail_date': availdate,
'producer': producer,
'planned': planned,
'item_forms': itemforms,
'tabnav': "distribution/tabnav.html",
}, context_instance=RequestContext(request))
@login_required
def all_inventory_update(request, year, month, day):
availdate = datetime.date(int(year), int(month), int(day))
monday = availdate - datetime.timedelta(days=datetime.date.weekday(availdate))
saturday = monday + datetime.timedelta(days=5)
#import pdb; pdb.set_trace()
items = InventoryItem.objects.select_related(depth=1).filter(
remaining__gt=0,
inventory_date=availdate)
#inventory_date__range=(monday, saturday))
plans = ProductPlan.objects.select_related(depth=1).filter(
role="producer",
from_date__lte=availdate,
to_date__gte=saturday)
if plans:
planned = True
else:
planned = False
plans = ProducerProduct.objects.select_related(depth=1).all()
itemforms = create_all_inventory_item_forms(
availdate, plans, items, data=request.POST or None)
if request.method == "POST":
#import pdb; pdb.set_trace()
if all([itemform.is_valid() for itemform in itemforms]):
inv_date = request.POST['avail-date']
for itemform in itemforms:
data = itemform.cleaned_data
producer_id = int(data['producer_id'])
producer = Party.objects.get(pk=producer_id)
prod_id = int(data['product_id'])
item_id = int(data['item_id'])
custodian = data['custodian']
inventory_date = data['inventory_date']
expiration_date = data['expiration_date']
planned = data['planned']
received = data['received']
notes = data['notes']
field_id = data['field_id']
freeform_lot_id = data['freeform_lot_id']
#import pdb; pdb.set_trace()
if item_id:
item = InventoryItem.objects.get(pk=item_id)
item.custodian = custodian
item.inventory_date = inventory_date
item.expiration_date = expiration_date
rem_change = planned - item.planned
item.planned = planned
item.remaining = item.remaining + rem_change
oh_change = received - item.received
item.received = received
item.onhand = item.onhand + oh_change
item.notes = notes
item.field_id = field_id
item.freeform_lot_id = freeform_lot_id
item.save()
else:
if planned + received > 0:
product = Product.objects.get(pk=prod_id)
item = itemform.save(commit=False)
item.producer = producer
item.product = product
item.remaining = planned
item.onhand = received
item.save()
return HttpResponseRedirect('/%s/'
% ('distribution/inventoryselection',))
return render_to_response('distribution/all_inventory_update.html', {
'avail_date': availdate,
'planned': planned,
'item_forms': itemforms}, context_instance=RequestContext(request))
@login_required
def order_selection(request):
delivery_date = next_delivery_date()
changeable_orders = Order.objects.filter(
state="Submitted",
delivery_date__lte=delivery_date,
).order_by('-delivery_date')
unpaid_orders = Order.objects.exclude(state__contains="Paid").exclude(state="Unsubmitted")
if request.method == "POST":
ihform = OrderSelectionForm(request.POST)
if ihform.is_valid():
ihdata = ihform.cleaned_data
customer_id = ihdata['customer']
ord_date = ihdata['delivery_date']
if ordering_by_lot():
return HttpResponseRedirect('/%s/%s/%s/%s/%s/'
% ('distribution/orderbylot', customer_id, ord_date.year, ord_date.month, ord_date.day))
else:
return HttpResponseRedirect('/%s/%s/%s/%s/%s/'
% ('distribution/neworder', customer_id, ord_date.year, ord_date.month, ord_date.day))
else:
ihform = OrderSelectionForm(initial={'delivery_date': delivery_date})
return render_to_response(
'distribution/order_selection.html',
{'header_form': ihform,
'changeable_orders': changeable_orders,
'unpaid_orders': unpaid_orders,
}, context_instance=RequestContext(request))
@login_required
def old_order_selection(request):
unpaid_orders = Order.objects.exclude(state__contains="Paid").exclude(state="Unsubmitted")
if request.method == "POST":
ihform = OrderSelectionForm(request.POST)
if ihform.is_valid():
ihdata = ihform.cleaned_data
customer_id = ihdata['customer']
ord_date = ihdata['delivery_date']
if ordering_by_lot():
return HttpResponseRedirect('/%s/%s/%s/%s/%s/'
% ('distribution/orderbylot', customer_id, ord_date.year, ord_date.month, ord_date.day))
else:
return HttpResponseRedirect('/%s/%s/%s/%s/%s/'
% ('distribution/orderupdate', customer_id, ord_date.year, ord_date.month, ord_date.day))
else:
ihform = OrderSelectionForm()
return render_to_response(
'distribution/order_selection.html',
{'header_form': ihform,
'unpaid_orders': unpaid_orders}, context_instance=RequestContext(request))
#todo: this whole view shd be changed a la customer side
# plus, it is a logical mess...
@login_required
def order_update(request, cust_id, year, month, day):
delivery_date = datetime.date(int(year), int(month), int(day))
availdate = delivery_date
try:
fn = food_network()
except FoodNetwork.DoesNotExist:
return render_to_response('distribution/network_error.html')
cust_id = int(cust_id)
try:
customer = Customer.objects.get(pk=cust_id)
except Customer.DoesNotExist:
raise Http404
try:
order = Order.objects.get(customer=customer, delivery_date=delivery_date)
except MultipleObjectsReturned:
order = Order.objects.filter(customer=customer, delivery_date=delivery_date)[0]
except Order.DoesNotExist:
order = False
if request.method == "POST":
if order:
ordform = OrderForm(order=order, data=request.POST, instance=order)
| |
= [0, 0]
for _ in sm.xrange(400):
observed = aug.augment_image(np.zeros((20, 20, 3), dtype=np.uint8))
sums = np.sum(observed, axis=2)
values = np.unique(sums)
all_values_found = all([(value in values) for value in [0, 1, 2, 3]])
if all_values_found:
seen[0] += 1
else:
seen[1] += 1
assert 150 < seen[0] < 250
assert 150 < seen[1] < 250
# test exceptions for wrong parameter types
got_exception = False
try:
aug = iaa.ReplaceElementwise(mask="test", replacement=1)
except Exception:
got_exception = True
assert got_exception
got_exception = False
try:
aug = iaa.ReplaceElementwise(mask=1, replacement=1, per_channel="test")
except Exception:
got_exception = True
assert got_exception
# test get_parameters()
aug = iaa.ReplaceElementwise(mask=1, replacement=2, per_channel=False)
params = aug.get_parameters()
assert isinstance(params[0], iap.Binomial)
assert isinstance(params[0].p, iap.Deterministic)
assert isinstance(params[1], iap.Deterministic)
assert isinstance(params[2], iap.Deterministic)
assert params[0].p.value >= 1 - 1e-8
assert params[1].value == 2
assert params[2].value == 0
def test_SaltAndPepper():
reseed()
base_img = np.zeros((100, 100, 1), dtype=np.uint8) + 128
aug = iaa.SaltAndPepper(p=0.5)
observed = aug.augment_image(base_img)
p = np.mean(observed != 128)
assert 0.4 < p < 0.6
aug = iaa.SaltAndPepper(p=1.0)
observed = aug.augment_image(base_img)
nb_pepper = np.sum(observed < 40)
nb_salt = np.sum(observed > 255 - 40)
assert nb_pepper > 200
assert nb_salt > 200
# not more tests necessary here as SaltAndPepper is just a tiny wrapper around
# ReplaceElementwise
def test_CoarseSaltAndPepper():
reseed()
base_img = np.zeros((100, 100, 1), dtype=np.uint8) + 128
aug = iaa.CoarseSaltAndPepper(p=0.5, size_px=100)
observed = aug.augment_image(base_img)
p = np.mean(observed != 128)
assert 0.4 < p < 0.6
aug1 = iaa.CoarseSaltAndPepper(p=0.5, size_px=100)
aug2 = iaa.CoarseSaltAndPepper(p=0.5, size_px=10)
base_img = np.zeros((100, 100, 1), dtype=np.uint8) + 128
ps1 = []
ps2 = []
for _ in sm.xrange(100):
observed1 = aug1.augment_image(base_img)
observed2 = aug2.augment_image(base_img)
p1 = np.mean(observed1 != 128)
p2 = np.mean(observed2 != 128)
ps1.append(p1)
ps2.append(p2)
assert 0.4 < np.mean(ps2) < 0.6
assert np.std(ps1)*1.5 < np.std(ps2)
aug = iaa.CoarseSaltAndPepper(p=[0.2, 0.5], size_px=100)
base_img = np.zeros((100, 100, 1), dtype=np.uint8) + 128
seen = [0, 0, 0]
for _ in sm.xrange(200):
observed = aug.augment_image(base_img)
p = np.mean(observed != 128)
diff_020 = abs(0.2 - p)
diff_050 = abs(0.5 - p)
if diff_020 < 0.025:
seen[0] += 1
elif diff_050 < 0.025:
seen[1] += 1
else:
seen[2] += 1
assert seen[2] < 10
assert 75 < seen[0] < 125
assert 75 < seen[1] < 125
aug = iaa.CoarseSaltAndPepper(p=(0.0, 1.0), size_px=50)
base_img = np.zeros((50, 50, 1), dtype=np.uint8) + 128
ps = []
for _ in sm.xrange(200):
observed = aug.augment_image(base_img)
p = np.mean(observed != 128)
ps.append(p)
nb_bins = 5
hist, _ = np.histogram(ps, bins=nb_bins, range=(0.0, 1.0), density=False)
tolerance = 0.05
for nb_seen in hist:
density = nb_seen / len(ps)
assert density - tolerance < density < density + tolerance
# test exceptions for wrong parameter types
got_exception = False
try:
aug = iaa.CoarseSaltAndPepper(p="test", size_px=100)
except Exception:
got_exception = True
assert got_exception
got_exception = False
try:
aug = iaa.CoarseSaltAndPepper(p=0.5, size_px=None, size_percent=None)
except Exception:
got_exception = True
assert got_exception
def test_Salt():
reseed()
base_img = np.zeros((100, 100, 1), dtype=np.uint8) + 128
aug = iaa.Salt(p=0.5)
observed = aug.augment_image(base_img)
p = np.mean(observed != 128)
assert 0.4 < p < 0.6
assert np.all(observed >= 127) # Salt() occasionally replaces with 127,
# which probably should be the center-point here anyways
aug = iaa.Salt(p=1.0)
observed = aug.augment_image(base_img)
nb_pepper = np.sum(observed < 40)
nb_salt = np.sum(observed > 255 - 40)
assert nb_pepper == 0
assert nb_salt > 200
# not more tests necessary here as Salt is just a tiny wrapper around
# ReplaceElementwise
def test_CoarseSalt():
reseed()
base_img = np.zeros((100, 100, 1), dtype=np.uint8) + 128
aug = iaa.CoarseSalt(p=0.5, size_px=100)
observed = aug.augment_image(base_img)
p = np.mean(observed != 128)
assert 0.4 < p < 0.6
aug1 = iaa.CoarseSalt(p=0.5, size_px=100)
aug2 = iaa.CoarseSalt(p=0.5, size_px=10)
base_img = np.zeros((100, 100, 1), dtype=np.uint8) + 128
ps1 = []
ps2 = []
for _ in sm.xrange(100):
observed1 = aug1.augment_image(base_img)
observed2 = aug2.augment_image(base_img)
p1 = np.mean(observed1 != 128)
p2 = np.mean(observed2 != 128)
ps1.append(p1)
ps2.append(p2)
assert 0.4 < np.mean(ps2) < 0.6
assert np.std(ps1)*1.5 < np.std(ps2)
aug = iaa.CoarseSalt(p=[0.2, 0.5], size_px=100)
base_img = np.zeros((100, 100, 1), dtype=np.uint8) + 128
seen = [0, 0, 0]
for _ in sm.xrange(200):
observed = aug.augment_image(base_img)
p = np.mean(observed != 128)
diff_020 = abs(0.2 - p)
diff_050 = abs(0.5 - p)
if diff_020 < 0.025:
seen[0] += 1
elif diff_050 < 0.025:
seen[1] += 1
else:
seen[2] += 1
assert seen[2] < 10
assert 75 < seen[0] < 125
assert 75 < seen[1] < 125
aug = iaa.CoarseSalt(p=(0.0, 1.0), size_px=50)
base_img = np.zeros((50, 50, 1), dtype=np.uint8) + 128
ps = []
for _ in sm.xrange(200):
observed = aug.augment_image(base_img)
p = np.mean(observed != 128)
ps.append(p)
nb_bins = 5
hist, _ = np.histogram(ps, bins=nb_bins, range=(0.0, 1.0), density=False)
tolerance = 0.05
for nb_seen in hist:
density = nb_seen / len(ps)
assert density - tolerance < density < density + tolerance
# test exceptions for wrong parameter types
got_exception = False
try:
aug = iaa.CoarseSalt(p="test", size_px=100)
except Exception:
got_exception = True
assert got_exception
got_exception = False
try:
aug = iaa.CoarseSalt(p=0.5, size_px=None, size_percent=None)
except Exception:
got_exception = True
assert got_exception
def test_Pepper():
reseed()
base_img = np.zeros((100, 100, 1), dtype=np.uint8) + 128
aug = iaa.Pepper(p=0.5)
observed = aug.augment_image(base_img)
p = np.mean(observed != 128)
assert 0.4 < p < 0.6
assert np.all(observed <= 128)
aug = iaa.Pepper(p=1.0)
observed = aug.augment_image(base_img)
nb_pepper = np.sum(observed < 40)
nb_salt = np.sum(observed > 255 - 40)
assert nb_pepper > 200
assert nb_salt == 0
# not more tests necessary here as Salt is just a tiny wrapper around
# ReplaceElementwise
def test_CoarsePepper():
reseed()
base_img = np.zeros((100, 100, 1), dtype=np.uint8) + 128
aug = iaa.CoarsePepper(p=0.5, size_px=100)
observed = aug.augment_image(base_img)
p = np.mean(observed != 128)
assert 0.4 < p < 0.6
aug1 = iaa.CoarsePepper(p=0.5, size_px=100)
aug2 = iaa.CoarsePepper(p=0.5, size_px=10)
base_img = np.zeros((100, 100, 1), dtype=np.uint8) + 128
ps1 = []
ps2 = []
for _ in sm.xrange(100):
observed1 = aug1.augment_image(base_img)
observed2 = aug2.augment_image(base_img)
p1 = np.mean(observed1 != 128)
p2 = np.mean(observed2 != 128)
ps1.append(p1)
ps2.append(p2)
assert 0.4 < np.mean(ps2) < 0.6
assert np.std(ps1)*1.5 < np.std(ps2)
aug = iaa.CoarsePepper(p=[0.2, 0.5], size_px=100)
base_img = np.zeros((100, 100, 1), dtype=np.uint8) + 128
seen = [0, 0, 0]
for _ in sm.xrange(200):
observed = aug.augment_image(base_img)
p = np.mean(observed != 128)
diff_020 = abs(0.2 - p)
diff_050 = abs(0.5 - p)
if diff_020 < 0.025:
seen[0] += 1
elif diff_050 < 0.025:
seen[1] += 1
else:
seen[2] += 1
assert seen[2] < 10
assert 75 < seen[0] < 125
assert 75 < seen[1] < 125
aug = iaa.CoarsePepper(p=(0.0, 1.0), size_px=50)
base_img = np.zeros((50, 50, 1), dtype=np.uint8) + 128
ps = []
for _ in sm.xrange(200):
observed = aug.augment_image(base_img)
p = np.mean(observed != 128)
ps.append(p)
nb_bins = 5
hist, _ = np.histogram(ps, bins=nb_bins, range=(0.0, 1.0), density=False)
tolerance = 0.05
for nb_seen in hist:
density = nb_seen / len(ps)
assert density - tolerance < density < density + tolerance
# test exceptions for wrong parameter types
got_exception = False
try:
aug = iaa.CoarsePepper(p="test", size_px=100)
except Exception:
got_exception = True
assert got_exception
got_exception = False
try:
aug = iaa.CoarsePepper(p=0.5, size_px=None, size_percent=None)
except Exception:
got_exception = True
assert got_exception
def test_Add():
reseed()
base_img = np.ones((3, 3, 1), dtype=np.uint8) * 100
images = np.array([base_img])
images_list = [base_img]
keypoints = [ia.KeypointsOnImage([ia.Keypoint(x=0, y=0), ia.Keypoint(x=1, y=1),
ia.Keypoint(x=2, y=2)], shape=base_img.shape)]
# no add, shouldnt change anything
aug = iaa.Add(value=0)
aug_det = aug.to_deterministic()
observed = aug.augment_images(images)
expected = images
assert np.array_equal(observed, expected)
observed = aug.augment_images(images_list)
expected = images_list
assert array_equal_lists(observed, expected)
observed = aug_det.augment_images(images)
expected = images
assert np.array_equal(observed, expected)
observed = aug_det.augment_images(images_list)
expected = images_list
assert array_equal_lists(observed, expected)
# add > 0
aug = iaa.Add(value=1)
aug_det = aug.to_deterministic()
observed = aug.augment_images(images)
expected = images + 1
assert np.array_equal(observed, expected)
observed = aug.augment_images(images_list)
expected = [images_list[0] + 1]
assert array_equal_lists(observed, expected)
observed = aug_det.augment_images(images)
expected = images + 1
assert np.array_equal(observed, expected)
observed = aug_det.augment_images(images_list)
expected = [images_list[0] + 1]
assert array_equal_lists(observed, expected)
# add < 0
aug = iaa.Add(value=-1)
aug_det = aug.to_deterministic()
observed = aug.augment_images(images)
expected = images - 1
assert np.array_equal(observed, expected)
observed = aug.augment_images(images_list)
expected = [images_list[0] | |
<reponame>ArenaNetworks/dto-digitalmarketplace-supplier-frontend
# coding: utf-8
from __future__ import unicode_literals
import urllib2
from app.main.helpers.users import generate_supplier_invitation_token
from dmapiclient import HTTPError
from dmapiclient.audit import AuditTypes
from dmutils.email import generate_token, EmailError
from dmutils.forms import FakeCsrf
from ..helpers import BaseApplicationTest, csrf_only_request
import mock
EMAIL_EMPTY_ERROR = "Email address must be provided"
EMAIL_INVALID_ERROR = "Please enter a valid email address"
EMAIL_SENT_MESSAGE = "If the email address you've entered belongs to a Digital Marketplace account, we'll send a link to reset the password." # noqa
PASSWORD_EMPTY_ERROR = "Please enter your password"
PASSWORD_INVALID_ERROR = "Passwords must be between 10 and 50 characters"
PASSWORD_MISMATCH_ERROR = "The passwords you entered do not match"
NEW_PASSWORD_EMPTY_ERROR = "Please enter a new password"
NEW_PASSWORD_CONFIRM_EMPTY_ERROR = "Please confirm your new password"
TOKEN_CREATED_BEFORE_PASSWORD_LAST_CHANGED_ERROR = "This password reset link is invalid."
USER_LINK_EXPIRED_ERROR = "Check you’ve entered the correct link or ask the person who invited you to send a new invitation." # noqa
class TestSupplierRoleRequired(BaseApplicationTest):
def test_buyer_cannot_access_supplier_dashboard(self):
with self.app.app_context():
self.login_as_buyer()
dashboard_url = self.url_for('main.dashboard')
res = self.client.get(dashboard_url)
assert res.status_code == 302
assert res.location == self.get_login_redirect_url(dashboard_url)
self.assert_flashes('supplier-role-required', expected_category='error')
class TestInviteUser(BaseApplicationTest):
def test_should_be_an_error_for_invalid_email(self):
with self.app.app_context():
self.login()
res = self.client.post(
self.url_for('main.send_invite_user'),
data={
'csrf_token': FakeCsrf.valid_token,
'email_address': 'invalid'
}
)
assert EMAIL_INVALID_ERROR in res.get_data(as_text=True)
assert res.status_code == 400
def test_should_be_an_error_for_missing_email(self):
with self.app.app_context():
self.login()
res = self.client.post(
self.url_for('main.send_invite_user'),
data=csrf_only_request
)
assert EMAIL_EMPTY_ERROR in res.get_data(as_text=True)
assert res.status_code == 400
@mock.patch('app.main.views.login.data_api_client')
@mock.patch('app.main.views.login.send_email')
def test_should_redirect_to_list_users_on_success_invite(self, send_email, data_api_client):
with self.app.app_context():
self.login()
res = self.client.post(
self.url_for('main.send_invite_user'),
data={
'csrf_token': FakeCsrf.valid_token,
'email_address': '<EMAIL>',
}
)
assert res.status_code == 200
@mock.patch('app.main.views.login.data_api_client')
@mock.patch('app.main.views.login.send_email')
def test_should_strip_whitespace_surrounding_invite_user_email_address_field(self, send_email, data_api_client):
with self.app.app_context():
self.login()
self.client.post(
self.url_for('main.send_invite_user'),
data={
'csrf_token': FakeCsrf.valid_token,
'email_address': ' <EMAIL> ',
}
)
send_email.assert_called_once_with(
'<EMAIL>',
mock.ANY,
mock.ANY,
mock.ANY,
mock.ANY,
)
@mock.patch('app.main.views.login.data_api_client')
@mock.patch('app.main.views.login.generate_supplier_invitation_token')
@mock.patch('app.main.views.login.send_email')
def test_should_call_generate_token_with_correct_params(self, send_email, supplier_token_mock, data_api_client):
with self.app.app_context():
self.login()
res = self.client.post(
self.url_for('main.send_invite_user'),
data={
'csrf_token': FakeCsrf.valid_token,
'email_address': '<EMAIL>',
})
assert res.status_code == 200
supplier_token_mock.assert_called_once_with(
name='',
email_address='<EMAIL>',
supplier_code=1234,
supplier_name='Supplier Name',
)
@mock.patch('app.main.views.login.send_email')
@mock.patch('app.main.views.login.generate_token')
def test_should_not_generate_token_or_send_email_if_invalid_email(self, send_email, generate_token):
with self.app.app_context():
self.login()
res = self.client.post(
self.url_for('main.send_invite_user'),
data={
'csrf_token': FakeCsrf.valid_token,
'email_address': 'total rubbish',
})
assert res.status_code == 400
assert not send_email.called
assert not generate_token.called
@mock.patch('app.main.views.login.send_email')
def test_should_be_an_error_if_send_invitation_email_fails(self, send_email):
with self.app.app_context():
self.login()
send_email.side_effect = EmailError(Exception('API is down'))
res = self.client.post(
self.url_for('main.send_invite_user'),
data={
'csrf_token': FakeCsrf.valid_token,
'email_address': '<EMAIL>',
'name': 'valid',
}
)
assert res.status_code == 503
@mock.patch('app.main.views.login.data_api_client')
@mock.patch('app.main.views.login.send_email')
def test_should_call_send_invitation_email_with_correct_params(self, send_email, data_api_client):
with self.app.app_context():
self.login()
self.app.config['INVITE_EMAIL_FROM'] = "EMAIL FROM"
self.app.config['INVITE_EMAIL_NAME'] = "EMAIL NAME"
res = self.client.post(
self.url_for('main.send_invite_user'),
data={
'csrf_token': FakeCsrf.valid_token,
'email_address': '<EMAIL>',
'name': 'valid',
}
)
assert res.status_code == 200
send_email.assert_called_once_with(
"<EMAIL>",
mock.ANY,
'Invitation to join Supplier Name as a team member',
"EMAIL FROM",
"EMAIL NAME",
)
@mock.patch('app.main.views.login.data_api_client')
@mock.patch('app.main.views.login.send_email')
def test_should_create_audit_event(self, send_email, data_api_client):
with self.app.app_context():
self.login()
res = self.client.post(
self.url_for('main.send_invite_user'),
data={
'csrf_token': FakeCsrf.valid_token,
'email_address': '<EMAIL>',
'name': 'valid',
}
)
assert res.status_code == 200
data_api_client.create_audit_event.assert_called_once_with(
audit_type=AuditTypes.invite_user,
user='<EMAIL>',
object_type='suppliers',
object_id=mock.ANY,
data={'invitedEmail': '<EMAIL>'})
class TestCreateUser(BaseApplicationTest):
def _generate_token(self, supplier_code=1234, supplier_name='Supplier Name', name='Me', email_address='<EMAIL>'): # noqa
with self.app.app_context():
return generate_supplier_invitation_token(
name=name,
email_address=email_address,
supplier_code=supplier_code,
supplier_name=supplier_name,
)
def create_user_setup(self, data_api_client):
data_api_client.create_user.return_value = self.user(123, '<EMAIL>', 'Supplier', 0, 'valid name')
def test_should_be_an_error_for_invalid_token(self):
token = "<PASSWORD>"
res = self.client.get(
self.url_for('main.create_user', token=token)
)
assert res.status_code == 404
def test_should_be_an_error_for_missing_token(self):
res = self.client.get('/suppliers/create-user')
assert res.status_code == 404
@mock.patch('app.main.views.login.data_api_client')
def test_should_be_an_error_for_invalid_token_contents(self, data_api_client):
token = generate_token(
{
'this_is_not_expected': 1234
},
self.app.config['SECRET_KEY'],
self.app.config['SUPPLIER_INVITE_TOKEN_SALT']
)
res = self.client.get(
self.url_for('main.create_user', token=token)
)
assert res.status_code == 404
assert data_api_client.get_user.called is False
assert data_api_client.get_supplier.called is False
def test_should_be_a_bad_request_if_token_expired(self):
res = self.client.get(
self.url_for('main.create_user', token=12345)
)
assert res.status_code == 404
assert USER_LINK_EXPIRED_ERROR in res.get_data(as_text=True)
@mock.patch('app.main.views.login.data_api_client')
def test_should_render_create_user_page_if_user_does_not_exist(self, data_api_client):
data_api_client.get_user.return_value = None
token = self._generate_token()
res = self.client.get(
self.url_for('main.create_user', token=token)
)
assert res.status_code == 200
page_text = res.get_data(as_text=True).replace(' ', '')
for message in [
"Supplier Name",
"<EMAIL>",
'<input type="submit" class="button-save"',
urllib2.quote(token),
]:
assert message.replace(' ', '') in page_text
def test_should_be_an_error_if_invalid_token_on_submit(self):
res = self.client.post(
self.url_for('main.submit_create_user', token='invalidtoken'),
data={
'csrf_token': FakeCsrf.valid_token,
'password': '<PASSWORD>',
'name': 'name',
'email_address': '<EMAIL>',
'accept_terms': 'y',
}
)
assert res.status_code == 404
assert USER_LINK_EXPIRED_ERROR in res.get_data(as_text=True)
assert (
'<input type="submit" class="button-save"'
not in res.get_data(as_text=True)
)
def test_should_be_an_error_if_missing_name_and_password(self):
token = self._generate_token()
res = self.client.post(
self.url_for('main.submit_create_user', token=token),
data=csrf_only_request
)
assert res.status_code == 400
for message in [
"Please enter a name",
"Please enter a password"
]:
assert message in res.get_data(as_text=True)
def test_should_be_an_error_if_too_short_name_and_password(self):
token = self._generate_token()
res = self.client.post(
self.url_for('main.submit_create_user', token=token),
data={
'csrf_token': FakeCsrf.valid_token,
'password': "<PASSWORD>",
'name': '',
'accept_terms': 'y',
}
)
assert res.status_code == 400
for message in [
"Please enter a name",
"Passwords must be between 10 and 50 characters"
]:
assert message in res.get_data(as_text=True)
def test_should_be_an_error_if_too_long_name_and_password(self):
with self.app.app_context():
token = self._generate_token()
twofiftysix = "a" * 256
fiftyone = "a" * 51
res = self.client.post(
self.url_for('main.submit_create_user', token=token),
data={
'csrf_token': FakeCsrf.valid_token,
'password': <PASSWORD>,
'name': twofiftysix,
'accept_terms': 'y',
}
)
assert res.status_code == 400
for message in [
'Names must be between 1 and 255 characters',
'Passwords must be between 10 and 50 characters',
'Create',
'<EMAIL>'
]:
assert message in res.get_data(as_text=True)
def test_require_acceptance_of_terms(self):
token = self._generate_token()
res = self.client.post(
self.url_for('main.submit_create_user', token=token),
data={
'csrf_token': FakeCsrf.valid_token,
'password': '<PASSWORD>!!!',
'name': 'Person',
# no accept_terms
}
)
assert res.status_code == 400
assert 'must accept the terms' in res.get_data(as_text=True)
@mock.patch('app.main.views.login.data_api_client')
def test_should_return_an_error_if_user_exists_and_is_a_buyer(self, data_api_client):
data_api_client.get_user.return_value = self.user(123, '<EMAIL>', None, None, 'Users name')
token = self._generate_token()
res = self.client.get(
self.url_for('main.create_user', token=token)
)
assert res.status_code == 400
print("RESPONSE: {}".format(res.get_data(as_text=True)))
assert "Account already exists" in res.get_data(as_text=True)
@mock.patch('app.main.views.login.data_api_client')
def test_should_return_an_error_with_admin_message_if_user_is_an_admin(self, data_api_client):
data_api_client.get_user.return_value = self.user(123, '<EMAIL>', None, None, 'Users name', role='admin')
token = self._generate_token()
res = self.client.get(
self.url_for('main.create_user', token=token)
)
assert res.status_code == 400
assert "Account already exists" in res.get_data(as_text=True)
@mock.patch('app.main.views.login.data_api_client')
def test_should_return_an_error_with_locked_message_if_user_is_locked(self, data_api_client):
data_api_client.get_user.return_value = self.user(
123,
'<EMAIL>',
1234,
'Supplier Name',
'Users name',
locked=True
)
token = self._generate_token()
res = self.client.get(
self.url_for('main.create_user', token=token)
)
assert res.status_code == 400
assert "Your account has been locked" in res.get_data(as_text=True)
@mock.patch('app.main.views.login.data_api_client')
def test_should_return_an_error_with_inactive_message_if_user_is_not_active(self, data_api_client):
data_api_client.get_user.return_value = self.user(
123,
'<EMAIL>',
1234,
'Supplier Name',
'Users name',
active=False
)
token = self._generate_token()
res = self.client.get(
self.url_for('main.create_user', token=token)
)
assert res.status_code == 400
assert "Your account has been deactivated" in res.get_data(as_text=True)
@mock.patch('app.main.views.login.data_api_client')
def test_should_return_an_error_with_wrong_supplier_message_if_invited_by_wrong_supplier(self, data_api_client): # noqa
data_api_client.get_user.return_value = self.user(
123,
'<EMAIL>',
1234,
'Supplier Name',
'Users name'
)
token = self._generate_token(
supplier_code=9999,
supplier_name='Different Supplier Name',
email_address='<EMAIL>'
)
res = self.client.get(
self.url_for('main.create_user', token=token)
)
assert res.status_code == 400
assert u"You can only use your existing account with one company." in res.get_data(as_text=True)
assert u"You already have an existing account with Supplier Name" in res.get_data(as_text=True)
@mock.patch('app.main.views.login.data_api_client')
def test_should_return_an_error_if_user_is_already_a_supplier(self, data_api_client):
data_api_client.get_user.return_value = self.user(
123,
'<EMAIL>',
1234,
'Supplier Name',
'Users name'
)
token = self._generate_token()
res = self.client.get(
self.url_for('main.create_user', token=token),
follow_redirects=True
)
assert res.status_code == 400
assert "Account already exists" in res.get_data(as_text=True)
@mock.patch('app.main.views.login.data_api_client')
def test_should_return_an_error_if_logged_in_user_is_not_invited_user(self, data_api_client):
self.login()
data_api_client.get_user.return_value = self.user(
999,
'<EMAIL>',
1234,
'Supplier Name',
'Different users name'
)
token = self._generate_token()
res = self.client.get(
self.url_for('main.create_user', token=token)
)
assert res.status_code == 400
assert "Account already exists" in res.get_data(as_text=True)
@mock.patch('app.main.views.login.data_api_client')
def test_should_return_an_error_if_user_is_already_logged_in(self, data_api_client):
self.login()
data_api_client.get_user.return_value = self.user(
123,
'<EMAIL>',
1234,
'Supplier Name',
'Users name'
)
token = self._generate_token()
res = self.client.get(
self.url_for('main.create_user', token=token)
)
assert res.status_code == 400
assert "Account already exists" in res.get_data(as_text=True)
@mock.patch('app.main.views.login.data_api_client')
def test_should_create_user_if_user_does_not_exist(self, data_api_client):
data_api_client.get_user.return_value = None
self.create_user_setup(data_api_client)
token = self._generate_token()
res = self.client.post(
self.url_for('main.submit_create_user', token=token),
data={
'csrf_token': FakeCsrf.valid_token,
'password': '<PASSWORD>',
'name': 'valid name',
'accept_terms': 'y',
}
)
data_api_client.create_user.assert_called_once_with({
'role': 'supplier',
'password': '<PASSWORD>',
'emailAddress': '<EMAIL>',
'name': 'valid name',
'supplierCode': 1234
})
assert res.status_code == 302
assert res.location == self.url_for('main.dashboard', _external=True)
self.assert_flashes('account-created', 'flag')
@mock.patch('app.main.views.login.data_api_client')
def test_should_return_an_error_if_user_exists(self, data_api_client):
data_api_client.create_user.side_effect = HTTPError(mock.Mock(status_code=409))
token = self._generate_token()
res = self.client.post(
self.url_for('main.submit_create_user', token=token),
data={
'csrf_token': FakeCsrf.valid_token,
'password': '<PASSWORD>',
'name': 'valid name',
'accept_terms': 'y',
}
)
assert res.status_code == 400
data_api_client.create_user.assert_called_once_with({
'role': 'supplier',
'password': '<PASSWORD>',
'emailAddress': '<EMAIL>',
'name': 'valid name',
'supplierCode': 1234
})
@mock.patch('app.main.views.login.data_api_client')
def test_should_strip_whitespace_surrounding_create_user_name_field(self, data_api_client):
data_api_client.get_user.return_value = None
self.create_user_setup(data_api_client)
token = self._generate_token()
res = self.client.post(
self.url_for('main.submit_create_user', token=token),
data={
'csrf_token': FakeCsrf.valid_token,
'password': '<PASSWORD>',
'name': ' valid name ',
'accept_terms': 'y',
}
)
assert res.status_code == 302
data_api_client.create_user.assert_called_once_with({
'role': mock.ANY,
'password': '<PASSWORD>',
'emailAddress': mock.ANY,
'name': 'valid name',
'supplierCode': mock.ANY
})
@mock.patch('app.main.views.login.data_api_client')
def test_should_not_strip_whitespace_surrounding_create_user_password_field(self, data_api_client):
data_api_client.get_user.return_value = None
self.create_user_setup(data_api_client)
token = self._generate_token()
res = self.client.post(
self.url_for('main.submit_create_user', token=token),
data={
'csrf_token': FakeCsrf.valid_token,
'password': ' <PASSWORD> ',
'name': 'valid name ',
'accept_terms': 'y',
}
)
assert res.status_code == 302
data_api_client.create_user.assert_called_once_with({
'role': mock.ANY,
'password': ' <PASSWORD> ',
'emailAddress': mock.ANY,
'name': 'valid name',
'supplierCode': mock.ANY
})
@mock.patch('app.main.views.login.data_api_client')
def test_should_be_a_503_if_api_fails(self, data_api_client):
with | |
1
self.to_be_inserted.append([iid, [the_focus, vout, 'collection']])
# self.qr.put(('PRINT', 'to be inserted ={}'.\
# format(self.to_be_inserted[-1])))
thisdir = iid
e_focus = self.trout.find(".//" + the_focus)
e_parent = etree.SubElement(e_focus, iid)
e_parent.text = 'collection'
# self.qr.put(('PRINT', 'e_focus {}, e_parent {}, text {}'.\
# format(e_focus.tag, e_parent.tag, e_parent.text)))
# self.qr.put(('PRINT', self.columns))
# self.qr.put(('PRINT', vout))
for c,v in zip(self.columns, vout):
e_parent.attrib[c] = v
# self.qr.put(('PRINT', 'got past c,v, added {}'.\
# format(e_parent.tag)))
_ff = dict()
flist = dict()
#step through a list of filepaths for all mp3 files in current dir only
for f_ in [forward_slash_path(afile) \
for afile in glob.glob(adir_path + '/*.mp3')]:
_ff[sort_key_for_filenames(os.path.basename(f_)[:-4])] = \
os.path.basename(f_)[:-4]
flist[os.path.basename(f_)[:-4]] = f_
# self.qr.put(('PRINT', 'got past f_'))
for _ll in sorted(_ff):
self._add_a_file(flist[_ff[_ll]], e_parent)
self.qr.put(('PROGSTEP', 1))
# self.qr.put(('PRINT', 'got past add a file'))
# recurse through sub-dirs
for adir in sorted([os.path.normpath(adir_path + '/' + d) \
for d in os.listdir(adir_path) \
if os.path.isdir(adir_path + '/' + d) \
and len(d) > 0]):
# self.qr.put(('PRINT', 'Unpacking{}'.format(adir)))
self.qr.put(('STATUS{}', ('Unpacking{}', adir)))
self._add_tree(thisdir, adir)
# self.qr.put(('PRINT', ' added {}'.format(adir)))
def _rename_children_of(self, parent):
"""rename all the children of parent, parents name is unchanged.
Typicaly will always call on the top level project collection"""
#rename all branches
# self.qr.put(('PRINT', 'renaming children of >{}<'.format(parent)))
e_parent = self.trout.find(".//" + parent)
if e_parent is None:
return
# self.qr.put(('PRINT', 'renaming children of {}'.format(e_parent.tag)))
parent_attribs = e_parent.attrib
# children = list(e_parent)
children = e_parent.getchildren()
# self.qr.put(('PRINT', '>{}< has {} children'.format(e_parent.tag, len(children))))
# self.qr.put(('PRINT', '{}'.format(list(children))))
ancestor_name = parent_attribs['Name']
my_isalpha = True
if ancestor_name:
if ancestor_name[-1] == '@':
my_name = '@'
else:
my_name = 1
my_isalpha = ancestor_name[-1].isdecimal()
else:
my_name = 1
if self.initial_digit:
my_isalpha = self.initial_digit[-1].isdecimal()
else:
my_name = 1
my_isalpha = False
my_num = 1
nos_chars = len(to_alpha(len(children))) if my_name == 1 else 0
nos_digits = (len(str(len(children)))-1) if my_name == 1 else 0
the_format = '{0:0' + '{}'.format(nos_digits) + 'd}'
alpha_format = '{0:A>' + '{}'.format(nos_chars) + 's}'
for child in children:
# self.qr.put(('PRINT', 'for {} of {}'.format(child.tag, parent)))
self.qr.put(('PROGSTEP', 1))
#bullet proofed in to_aplpha() so not exceed limit of single digit
my_str = alpha_format.format(to_alpha(my_name - 1)) \
if my_isalpha else the_format.format(my_name)
vout = list()
if child.attrib['Type'] == 'collection':
title = self._my_unidecode(child.attrib['TIT2'])
#strip out any unapproved punctuation - done in my_unidecode
child.attrib['Name'] = ancestor_name + my_str
child.text = "{0}{1}{2}-{3}".format(self.prefix, \
ancestor_name, my_str, title)
# self.qr.put(('PRINT', '{}/{} is collection'.format(child.tag, child.text)))
vout = [['Name', child.attrib['Name']], ['TIT2', title]]
self.to_be_renamed[child.tag] = [vout, child.text]
my_name += 1
# self.qr.put(('PRINT', 'rename children of {}'.format(child.tag)))
# return
self._rename_children_of(child.tag)
else: #is file so use
size = os.path.getsize(child.attrib['Location']) \
if child.attrib['Location'] != '-' \
else 0
if size == 0:
#fetch location, trim off path and '.mp3' extension,
#transliterate unicode(utf-8) to 7-bit ascii or Latin-1?
title = self._my_unidecode(os.path.basename(\
child.attrib['Location'][:-4]))
#transliterate unicode(utf-8) to 7-bit ascii or Latin-1?
#replace spaces and punctuation - done in my_unidecode
child.attrib['Name'] = ancestor_name + my_str
child.text = "{0}{1}{2}-{3}".format(self.prefix, \
ancestor_name, my_str, title)
# self.qr.put(('PRINT', 'zero length file {}'.format(child.text)))
vout = [['Name', child.attrib['Name']], ['TIT2', title]]
else: #idiot/not idiot always downgrade TIT2 to form title
tit2 = self._downgrade_data('TIT2', child)
title = self._my_unidecode(tit2)
child.attrib['Name'] = "{0}-{1:02d}".format(\
ancestor_name, my_num)
child.text="{0}{1}-{2:02d}-{3}".format(self.prefix, \
ancestor_name, my_num, title)
# self.qr.put(('PRINT', 'mp3 file {}'.format(child.text)))
if self.mode: #advanced
vout = [['Name', child.attrib['Name']],\
['TIT2', child.attrib['TIT2']]]
else: #simple
vout = [['Name', child.attrib['Name']],['TIT2', tit2]]
self.to_be_renamed[child.tag] = [vout, child.text]
my_num += 1
self.qr.put(('PROGSTEP', 1))
def _my_unidecode(self, text):
"""normalize strings to avoid unicode character which won't display
correctly or whose use in filenames may crash filesystem"""
l = list()
# self._fix_eng_bug_in_unidecode()
if self.preferred == 0:
self.pref = list()
#aggresively normalize
elif self.preferred == 1:
#use preferred list to normalize
pass
elif self.preferred == 2:
#normalization disabled
return text
else:
self.qr.put(('PRINT', "Error, unrecognised value for self.preferred=>{}< should be [0, 1, 2]".format(self.preferred)))
return text
self._fix_eng_bug_in_unidecode()
#got this far so either aggressive with 'empty' list or used preferred
#scan list of preferred character/string pairs
for kv in self.pref:# in range(0,len(text)):
#build list of all hits in text
l.extend([[m.start(), len(kv[0]), kv[1]] \
for m in kv[2].finditer(text)])
if l:
#now sort list of hits into sequence order
l = sorted(l, key=lambda student: student[0])
result = ''
s = 0
for ll in l:
#from end of last match to start of new match + new match aggress
result += ''.join([c if c.isalnum() or \
c in self.pref_char \
else '_' \
for c in unidecode(text[s:ll[0]])]) + ll[2]
#start of match + len of match
s = ll[0] + ll[1]
if s < len(text):
#from end of last match to end of string aggress
result += ''.join([c if c.isalnum() or \
c in self.pref_char else '_' \
for c in unidecode(text[s:])])
return result
else:
return ''.join([c if c.isalnum() or c in self.pref_char else '_' \
for c in unidecode(text)])
def _fix_eng_bug_in_unidecode(self):
if 'ŋ' not in [v[0] for v in self.pref]:
self.pref.append(['ŋ', 'ng', re.compile('ŋ')])
if 'Ŋ' not in [v[0] for v in self.pref]:
self.pref.append(['Ŋ', 'Ng', re.compile('Ŋ')])
if "'" not in [v[0] for v in self.pref]:
self.pref.append(["'", '-', re.compile("'")])
if "-" not in [v[0] for v in self.pref]:
self.pref.append(["-", '-', re.compile("-")])
def _hash_it(self, artworkdata):
"""put artworkdata (is bytes) into hashedgraphics and return hashtag and length str"""
#so open artwork read in as bytes
m = hashlib.sha256(artworkdata)
length = "b'{}Kb'".format(int(len(artworkdata)/1024 + 0.5))
#so if the hash not a key in hashed_graphics, add it
if m.hexdigest() not in self.hashed_graphics:
self.hashed_graphics[m.hexdigest()] = artworkdata
return m.hexdigest(), length
def _read_mp3_process_atag(self, atag, k, apic_params, filepath):
"""process the (advanced) mp3 tag"""
#force utf8 encoding, which is the form all text is held in internally
atag.encoding = 3
theParameters = None
if k == 'APIC':
hash_tag, length = self._hash_it(atag.data)
theParameters = [int(atag.encoding), atag.mime, \
int(atag.type), atag.desc, \
hash_tag]
#There may be multiple APIC tags in a file
# displayed in APIC as multiple frames in sequence order
#the hash tag for each frame will be held as part of a frame
#in APIC_ in the corresponding order
apic_params.extend([str(theParameters)])
theParameters[4] = length
elif k in THE_P:
theParameters = THE_P[k](atag, True)
else:
self.qr.put(('MESSAGEBOXSHOWERRORIN', ('Error in read_mp3_process atag()', \
"{} is unrecognized MP3 tag in {}".format(\
atag, filepath))))
return theParameters
def _read_mp3_tags(self, filepath):
"""read in an mp3 files tags to Treeview wiget"""
if os.path.getsize(filepath) > 0:
audio = ID3(filepath)
result = ['file', '', filepath]
apic_params = list()
# self.qr.put(('PRINT', self.displayColumns[2:-1] ))
for k in self.displayColumns[2:-1]:
#list all instances of that tag
list_tags = audio.getall(k)
# if k in ['COMM',]:
# self.qr.put(('PRINT', "list_tags={}".format(list_tags)))
aresult = list()
if k in ['COMM',]:
langs = ['XXX', 'eng', 'fra', 'por']
comms = dict()
xresult = list()
# self.qr.put(('PRINT',"found {} COMM tags in {}".format(len(list_tags), os.path.basename(filepath))))
if list_tags: #not an empty list!
for atag in list_tags:
# if k in ['COMM',]:
# self.qr.put(('PRINT', "atag is {}".format(str(atag))))
#now for each tag instance...
theParameters = \
self._read_mp3_process_atag(atag, k, \
apic_params, filepath)
# if k in ['COMM',]:
# self.qr.put(('PRINT',"theParameters={}".format(theParameters)))
#accumulate COMM tags in comms all others in aresult
if k in ['COMM',] and theParameters:
if theParameters[1] in comms.keys():
comms[theParameters[1]][theParameters[1] + theParameters[2]] = theParameters
else:
comms[theParameters[1]] = dict()
comms[theParameters[1]][theParameters[1] + theParameters[2]] = theParameters
elif theParameters:
aresult.extend([str(theParameters)])
#now if idiot mode choose one frame and force lang='XXX'
# choice if more than one pick first XXX,
# if no XXX pick first eng,
# if no eng pick first fra,
# if no fra pick first
# else if advanced mode list langs
if k in ['COMM',]:
# self.qr.put(('PRINT', "processed all COMM tags for this file"))
# self.qr.put(('PRINT', "comms is {}".format(comms)))
# self.qr.put(('PRINT', "{} langs in COMM".format(comms.keys())))
for l in langs:
if not xresult and l in comms.keys():
keylist = sorted(comms[l].keys())
xresult = comms[l][keylist[0]]
xresult[0] = 3
xresult[1] = 'XXX'
for y in keylist:
this = [3, \
'XXX', \
comms[l][y][2], \
comms[l][y][3]]
aresult.append(this)
elif l in comms.keys():
keylist = sorted(comms[l].keys())
for y in keylist:
this = [3, \
comms[l][y][1], \
comms[l][y][2], \
comms[l][y][3]]
aresult.append(this)
for l in sorted(set(comms.keys()).difference(set(langs))):
keylist = sorted(comms[l].keys())
if not xresult:
xresult = comms[l][keylist[0]]
xresult[0] = 3
xresult[1] = 'XXX'
for y in keylist:
this = [3, \
'XXX', \
comms[l][y][2], \
comms[l][y][3]]
aresult.append(this)
else:
for y in keylist:
this = [3, \
comms[l][y][1], \
comms[l][y][2], \
comms[l][y][3]]
aresult.append(this)
if not | |
"""
ui.dialogs.race
Contains all the data and capabilities needed for race creation.
:author: <NAME>
:license: MIT, see LICENSE.txt for more details.
"""
from PySide.QtGui import QDialog
from PySide.QtGui import QBoxLayout
from PySide.QtGui import QLineEdit
from PySide.QtGui import QComboBox
from PySide.QtGui import QStackedLayout
from PySide.QtGui import QFormLayout
from PySide.QtGui import QLabel
from PySide.QtGui import QFrame
from PySide.QtGui import QToolButton
from PySide.QtGui import QCheckBox
from PySide.QtGui import QPixmap
from PySide.QtGui import QTextEdit
from PySide.QtGui import QGroupBox
from PySide.QtGui import QWidget
from PySide.QtGui import QPushButton
from PySide.QtGui import QPainter
from PySide.QtCore import Qt
from PySide.QtCore import QRect
from objects.race import Race
from parameters.race import TemperatureParameters
from parameters.race import LesserRacialTrait
from parameters.race import PrimaryRacialTrait
from parameters.race import LeftoverPointsOption
from parameters.race import TechnologyCostOption
from parameters.races import PredefinedRaces
import glob
import math
from ui import helpers
PREVIOUS_ICON = "resources/previous.png"
RACE_ICON_LOCATION = "resources/race"
class RaceIconResources():
"""
This is a temporary placeholder until a full resource manager is
developed.
"""
def __init__(self):
self.icons = []
self.discover_icons(RACE_ICON_LOCATION)
def discover_icons(self, location):
glob_search = location + "/*.png"
self.icons = glob.glob(glob_search)
def index_of(self, image_path):
index = 0
if(image_path in self.icons):
index = self.icons.index(image_path)
return index
class ColorSlider(QWidget):
def __init__(self, puck_color, min_value, max_value, start_min, start_max,
parent=None):
super(ColorSlider, self).__init__(parent)
self.slider_height = 30
self.slider_width = 250
self.puck_color = puck_color
self.setFixedHeight(self.slider_height)
self.setFixedWidth(self.slider_width)
self.slope = float(self.slider_width) / float(max_value - min_value)
self.offset = -1.0 * (float(min_value) * self.slope)
self.update_value(start_min, start_max)
def update_value(self, current_low, current_high, ignore_values=False):
self.low_point = math.floor(self.slope * current_low + self.offset)
self.high_point = math.floor(self.slope * current_high + self.offset)
self.ignore_values = ignore_values
self.repaint()
def paintEvent(self, event):
painter = QPainter(self)
bg_rect = self.rect()
painter.fillRect(bg_rect, Qt.black)
if(not self.ignore_values):
width = self.high_point - self.low_point
puck_rect = QRect(self.low_point, 0, width, self.slider_height)
painter.fillRect(puck_rect, self.puck_color)
class ArrowControl(QWidget):
def __init__(self, text_before, text_after,
increment_callback, decrement_callback, parent=None):
super(ArrowControl, self).__init__(parent)
self.increment_callback = increment_callback
self.decrement_callback = decrement_callback
main_layout = QBoxLayout(QBoxLayout.LeftToRight)
main_layout.addWidget(QLabel(text_before))
self.value_label = QLabel()
main_layout.addWidget(self.value_label)
self.increment_arrow = QToolButton()
self.increment_arrow.setArrowType(Qt.UpArrow)
self.increment_arrow.setMaximumHeight(10)
self.decrement_arrow = QToolButton()
self.decrement_arrow.setArrowType(Qt.DownArrow)
self.decrement_arrow.setMaximumHeight(10)
arrow_container = QBoxLayout(QBoxLayout.TopToBottom)
arrow_container.addWidget(self.increment_arrow)
arrow_container.addWidget(self.decrement_arrow)
main_layout.addLayout(arrow_container)
main_layout.addWidget(QLabel(text_after))
main_layout.addStretch(1)
self.setLayout(main_layout)
def update_value(self, new_value):
self.value_label.setText(str(new_value))
def bind_callbacks(self):
self.increment_arrow.clicked.connect(self.increment_callback)
self.decrement_arrow.clicked.connect(self.decrement_callback)
class RaceWizard(QDialog):
"""
The race wizard for race creation/editing/viewing.
In view-only (self.read_only) mode, the race will be presented, but all
controls that allow modification will be disabled.
The primary output object of this dialog in creation/edit mode, if
accepted, is:
self.race
The UI will enforce a legal race upon dialog exit, where a legal race
is one that returns True from self.race.is_legal()
"""
HELP_BUTTON_ID = 0
CANCEL_BUTTON_ID = 1
PREVIOUS_BUTTON_ID = 2
NEXT_BUTTON_ID = 3
FINISH_BUTTON_ID = 4
def __init__(self, parent=None, race=None, readonly=False):
super(RaceWizard, self).__init__(parent)
if(race):
self.race = race
else:
self.race = Race()
self.icon_resources = RaceIconResources()
self.read_only = readonly
self.init_user_controls()
self.init_ui()
self.bind_user_controls()
if not race:
self.base_races.button(PredefinedRaces.Default).setChecked(True)
self.handle_base_race_change()
def init_user_controls(self):
"""
Sets up all important user controls on this form.
"""
self.advantage_points = QLabel()
self.race_name = QLineEdit()
self.plural_race_name = QLineEdit()
self.password = QLineEdit()
self.leftover_points = QComboBox()
self.leftover_points.addItems(LeftoverPointsOption.names())
buttons = [
"&Help",
"&Cancel",
"&Previous",
"&Next"
]
if(self.read_only):
buttons.append("&Ok")
else:
buttons.append("&Finish")
self.nav_buttons = helpers.build_push_button_group(buttons)
self.primary_racials = helpers.build_radio_group(
PrimaryRacialTrait.names())
self.lesser_racials = helpers.build_checkbox_group(
LesserRacialTrait.names())
self.lesser_racials.setExclusive(False)
self.base_races = helpers.build_radio_group(PredefinedRaces.names())
self.previous_icon_button = QToolButton()
self.previous_icon_button.setArrowType(Qt.LeftArrow)
self.next_icon_button = QToolButton()
self.next_icon_button.setArrowType(Qt.RightArrow)
self.current_race_icon = self.race.icon
self.race_icon_label = QLabel()
self.race_icon_label.setAlignment(Qt.AlignCenter)
self.prt_desc = QTextEdit()
self.prt_desc.setFixedHeight(160)
self.prt_desc.setReadOnly(True)
self.lrt_desc = QTextEdit()
self.lrt_desc.setFixedHeight(110)
self.lrt_desc.setReadOnly(True)
self.temperature_max = QLabel()
self.temperature_max.setAlignment(Qt.AlignCenter)
self.increment_temperature_range = QPushButton("<< >>")
self.increment_temperature_midpoint = QToolButton()
self.increment_temperature_midpoint.setArrowType(Qt.RightArrow)
self.decrement_temperature_range = QPushButton(">> <<")
self.decrement_temperature_midpoint = QToolButton()
self.decrement_temperature_midpoint.setArrowType(Qt.LeftArrow)
self.temperature_slider = ColorSlider(Qt.darkRed,
TemperatureParameters.Minimum,
TemperatureParameters.Maximum,
self.race.temperature_min,
self.race.temperature_max)
self.temperature_min = QLabel()
self.temperature_min.setAlignment(Qt.AlignCenter)
self.temperature_immune = QCheckBox("Immune to Temperature")
self.growth_rate = ArrowControl(
"Maximum colonist growth rate percentage per year: ",
" ",
self.handle_increment_growth_rate,
self.handle_decrement_growth_rate)
self.resource_production = ArrowControl(
"One resource is generated each year for every ",
" colonists.",
self.handle_increment_colonist_resource,
self.handle_decrement_colonist_resource)
self.factory_production = ArrowControl(
"Every 10 factories produce ", " resources each year.",
self.handle_increment_factory_production,
self.handle_decrement_factory_production)
self.factory_cost = ArrowControl(
"Factories require ", "resources to build.",
self.handle_increment_factory_cost,
self.handle_decrement_factory_cost)
self.colonists_operate_factories = ArrowControl(
"Every 10,000 colonists may operate up to ",
"factories.", self.handle_increment_colonists_operate_factories,
self.handle_decrement_colonists_operate_factories)
self.factory_cheap_germanium = QCheckBox(
"Factories cost 1kT less Germanium to build.")
self.mine_production = ArrowControl(
"Every 10 mines produce up to ",
" kT of each mineral each year.",
self.handle_increment_mine_production,
self.handle_decrement_mine_production)
self.mine_cost = ArrowControl(
"Mines require ", "resources to build.",
self.handle_increment_mine_cost,
self.handle_decrement_mine_cost)
self.colonists_operate_mines = ArrowControl(
"Every 10,000 colonists may operate up to ",
"mines.", self.handle_increment_colonists_operate_mines,
self.handle_decrement_colonists_operate_mines)
tech_cost_names = TechnologyCostOption.names()
self.energy_research_cost = helpers.build_radio_group(
tech_cost_names)
self.electronics_research_cost = helpers.build_radio_group(
tech_cost_names)
self.construction_research_cost = helpers.build_radio_group(
tech_cost_names)
self.biotech_research_cost = helpers.build_radio_group(
tech_cost_names)
self.weapons_research_cost = helpers.build_radio_group(
tech_cost_names)
self.propulsion_research_cost = helpers.build_radio_group(
tech_cost_names)
self.expensive_tech = QCheckBox(
"All 'Costs 75% extra' research fields start at Tech 4.")
if(self.read_only):
self.nav_buttons.button(self.CANCEL_BUTTON_ID).setVisible(False)
self.previous_icon_button.setEnabled(False)
self.next_icon_button.setEnabled(False)
self.tabbed_layout = QStackedLayout()
def bind_user_controls(self):
"""
Binds all user controls used by this form to their handlers.
"""
self.nav_buttons.buttonClicked.connect(self.handle_nav_button)
self.previous_icon_button.clicked.connect(self.handle_previous_icon)
self.next_icon_button.clicked.connect(self.handle_next_icon)
self.primary_racials.buttonClicked.connect(
self.handle_primary_racial_change)
self.lesser_racials.buttonClicked.connect(
self.handle_lesser_racial_change)
self.base_races.buttonClicked.connect(
self.handle_base_race_change)
self.increment_temperature_midpoint.clicked.connect(
self.handle_increment_temperature_midpoint)
self.decrement_temperature_midpoint.clicked.connect(
self.handle_decrement_temperature_midpoint)
self.increment_temperature_range.clicked.connect(
self.handle_increment_temperature_range)
self.decrement_temperature_range.clicked.connect(
self.handle_decrement_temperature_range)
self.temperature_immune.clicked.connect(
self.handle_temperature_immune_change)
self.growth_rate.bind_callbacks()
self.energy_research_cost.buttonClicked.connect(
self.handle_energy_cost_change)
self.weapons_research_cost.buttonClicked.connect(
self.handle_weapons_cost_change)
self.biotech_research_cost.buttonClicked.connect(
self.handle_biotech_cost_change)
self.electronics_research_cost.buttonClicked.connect(
self.handle_electronics_cost_change)
self.propulsion_research_cost.buttonClicked.connect(
self.handle_propulsion_cost_change)
self.construction_research_cost.buttonClicked.connect(
self.handle_construction_cost_change)
self.expensive_tech.clicked.connect(self.handle_expensive_tech)
self.resource_production.bind_callbacks()
self.factory_production.bind_callbacks()
self.factory_cost.bind_callbacks()
self.colonists_operate_factories.bind_callbacks()
self.factory_cheap_germanium.clicked.connect(
self.handle_factory_cheap_germanium_change)
self.mine_production.bind_callbacks()
self.mine_cost.bind_callbacks()
self.colonists_operate_mines.bind_callbacks()
def init_ui(self):
"""
Builds up the user interface - laying out the user controls on this
form, any relevant tabbed frames, titles, icons, etc.
"""
if(self.read_only):
self.setWindowTitle('View Race Details')
else:
self.setWindowTitle('Custom Race Wizard')
self.tabbed_layout.addWidget(self.create_race_general_details_page())
self.tabbed_layout.addWidget(self.create_primary_trait_page())
self.tabbed_layout.addWidget(self.create_lesser_trait_page())
self.tabbed_layout.addWidget(self.create_environmental_habitat_page())
self.tabbed_layout.addWidget(self.create_economy_page())
self.tabbed_layout.addWidget(self.create_research_cost_page())
self.manage_navigation_state(0)
button_box = QBoxLayout(QBoxLayout.LeftToRight)
button_box.addWidget(self.nav_buttons.button(self.HELP_BUTTON_ID))
button_box.addWidget(self.nav_buttons.button(self.CANCEL_BUTTON_ID))
button_box.addWidget(self.nav_buttons.button(self.PREVIOUS_BUTTON_ID))
button_box.addWidget(self.nav_buttons.button(self.NEXT_BUTTON_ID))
button_box.addWidget(self.nav_buttons.button(self.FINISH_BUTTON_ID))
main_layout = QBoxLayout(QBoxLayout.TopToBottom, self)
self.advantage_points.setAlignment(Qt.AlignRight)
main_layout.addWidget(self.advantage_points)
main_layout.addLayout(self.tabbed_layout)
main_layout.addLayout(button_box)
self.update_advantage_points()
def handle_base_race_change(self):
selected_race = self.base_races.checkedId()
if(selected_race != -1):
self.race = PredefinedRaces.race(selected_race)
self.apply_race_settings()
def apply_race_settings(self):
self.race_name.setText(self.race.name)
self.plural_race_name.setText(self.race.plural_name)
self.manage_icon_state(self.race.icon)
self.leftover_points.setCurrentIndex(self.race.leftover_points)
self.primary_racials.button(
self.race.primary_racial_trait).setChecked(True)
self.handle_primary_racial_change()
for button in self.lesser_racials.buttons():
button.setChecked(False)
for trait in self.race.lesser_racial_traits:
self.lesser_racials.button(trait).setChecked(True)
self.update_temperature_labels()
self.temperature_immune.setChecked(self.race.temperature_immune)
self.growth_rate.update_value(self.race.growth_rate)
self.resource_production.update_value(
self.race.resource_production)
self.factory_production.update_value(
self.race.factory_production)
self.factory_cost.update_value(self.race.factory_cost)
self.colonists_operate_factories.update_value(
self.race.colonists_operate_factories)
self.factory_cheap_germanium.setChecked(
self.race.factory_cheap_germanium)
self.mine_production.update_value(self.race.mine_production)
self.mine_cost.update_value(self.race.mine_cost)
self.colonists_operate_mines.update_value(
self.race.colonists_operate_mines)
self.energy_research_cost.button(
self.race.energy_cost).setChecked(True)
self.propulsion_research_cost.button(
self.race.propulsion_cost).setChecked(True)
self.electronics_research_cost.button(
self.race.electronics_cost).setChecked(True)
self.construction_research_cost.button(
self.race.construction_cost).setChecked(True)
self.biotech_research_cost.button(
self.race.biotechnology_cost).setChecked(True)
self.weapons_research_cost.button(
self.race.weapons_cost).setChecked(True)
self.expensive_tech.setChecked(self.race.expensive_tech_boost)
self.temperature_slider.update_value(self.race.temperature_min,
self.race.temperature_max,
self.race.temperature_immune)
self.update_advantage_points()
def update_advantage_points(self):
self.race.recalculate_points()
advantage_points = self.race.advantage_points
self.advantage_points.setText('<b>Race Advantage Points: ' +
str(advantage_points) + '</b>')
def handle_primary_racial_change(self):
selected_prt = self.primary_racials.checkedId()
self.prt_desc.setText(PrimaryRacialTrait.descriptions()[selected_prt])
self.race.primary_racial_trait = selected_prt
self.update_advantage_points()
def handle_lesser_racial_change(self, button):
lrt_id = self.lesser_racials.id(button)
if(lrt_id == LesserRacialTrait.Only_Basic_Remote_Mining):
self.lesser_racials.button(
LesserRacialTrait.Advanced_Remote_Mining).setChecked(False)
if(lrt_id == LesserRacialTrait.Advanced_Remote_Mining):
self.lesser_racials.button(
LesserRacialTrait.Only_Basic_Remote_Mining).setChecked(False)
self.lrt_desc.setText(LesserRacialTrait.descriptions()[lrt_id])
self.update_lesser_racial_state()
def update_lesser_racial_state(self):
self.race.lesser_racial_traits = []
for button in self.lesser_racials.buttons():
if(button.isChecked()):
lrt_id = self.lesser_racials.id(button)
self.race.lesser_racial_traits.append(lrt_id)
self.update_advantage_points()
def handle_previous_icon(self):
print self.current_race_icon
self.manage_icon_state(self.current_race_icon - 1)
def handle_next_icon(self):
self.manage_icon_state(self.current_race_icon + 1)
def manage_icon_state(self, new_index):
self.current_race_icon = new_index % len(self.icon_resources.icons)
self.race_icon_label.setPixmap(
QPixmap(self.icon_resources.icons[self.current_race_icon]))
def handle_nav_button(self, button):
button_id = self.nav_buttons.id(button)
if(button_id == self.CANCEL_BUTTON_ID):
self.reject()
elif(button_id == self.NEXT_BUTTON_ID):
new_index = self.tabbed_layout.currentIndex() + 1
self.manage_navigation_state(new_index)
elif(button_id == self.PREVIOUS_BUTTON_ID):
new_index = self.tabbed_layout.currentIndex() - 1
self.manage_navigation_state(new_index)
else:
print "some other button"
def handle_energy_cost_change(self):
self.race.energy_cost = self.energy_research_cost.checkedId()
self.update_advantage_points()
def handle_weapons_cost_change(self):
self.race.weapons_cost = self.weapons_research_cost.checkedId()
self.update_advantage_points()
def handle_biotech_cost_change(self):
self.race.biotechnology_cost = self.biotech_research_cost.checkedId()
self.update_advantage_points()
def handle_electronics_cost_change(self):
self.race.electronics_cost = self.electronics_research_cost.checkedId()
self.update_advantage_points()
def handle_propulsion_cost_change(self):
self.race.propulsion_cost = self.propulsion_research_cost.checkedId()
self.update_advantage_points()
def handle_construction_cost_change(self):
self.race.construction_cost = \
self.construction_research_cost.checkedId()
self.update_advantage_points()
def handle_expensive_tech(self):
if(self.expensive_tech.isChecked()):
self.race.expensive_tech_boost = True
else:
self.race.expensive_tech_boost = False
self.update_advantage_points()
def update_temperature_labels(self):
self.temperature_min.setText(str(self.race.temperature_min))
self.temperature_max.setText(str(self.race.temperature_max))
def handle_increment_temperature_range(self):
self.race.increment_temperature_range()
self.update_temperature_labels()
self.update_advantage_points()
self.temperature_slider.update_value(self.race.temperature_min,
self.race.temperature_max)
def handle_increment_temperature_midpoint(self):
self.race.increment_temperature_midpoint()
self.update_temperature_labels()
self.update_advantage_points()
self.temperature_slider.update_value(self.race.temperature_min,
self.race.temperature_max)
def handle_decrement_temperature_range(self):
self.race.decrement_temperature_range()
self.update_temperature_labels()
self.update_advantage_points()
self.temperature_slider.update_value(self.race.temperature_min,
self.race.temperature_max)
def handle_decrement_temperature_midpoint(self):
self.race.decrement_temperature_midpoint()
self.update_temperature_labels()
self.update_advantage_points()
self.temperature_slider.update_value(self.race.temperature_min,
self.race.temperature_max)
def handle_temperature_immune_change(self):
if(self.temperature_immune.isChecked()):
self.race.temperature_immune = True
else:
self.race.temperature_immune = False
self.update_advantage_points()
self.temperature_slider.update_value(self.race.temperature_min,
self.race.temperature_max,
self.race.temperature_immune)
def handle_increment_growth_rate(self):
self.race.increment_growth_rate()
self.growth_rate.update_value(self.race.growth_rate)
self.update_advantage_points()
def handle_decrement_growth_rate(self):
self.race.decrement_growth_rate()
self.growth_rate.update_value(self.race.growth_rate)
self.update_advantage_points()
def handle_increment_colonist_resource(self):
self.race.increment_resource_production()
self.resource_production.update_value(
self.race.resource_production)
self.update_advantage_points()
def handle_decrement_colonist_resource(self):
self.race.decrement_resource_production()
self.resource_production.update_value(
self.race.resource_production)
self.update_advantage_points()
def handle_increment_factory_production(self):
self.race.increment_factory_production()
self.factory_production.update_value(
self.race.factory_production)
self.update_advantage_points()
def handle_decrement_factory_production(self):
self.race.decrement_factory_production()
self.factory_production.update_value(
self.race.factory_production)
self.update_advantage_points()
def handle_increment_factory_cost(self):
self.race.increment_factory_cost()
self.factory_cost.update_value(self.race.factory_cost)
self.update_advantage_points()
def handle_decrement_factory_cost(self):
self.race.decrement_factory_cost()
self.factory_cost.update_value(self.race.factory_cost)
self.update_advantage_points()
def handle_increment_colonists_operate_factories(self):
self.race.increment_colonists_operate_factories()
self.colonists_operate_factories.update_value(
self.race.colonists_operate_factories)
self.update_advantage_points()
def handle_decrement_colonists_operate_factories(self):
self.race.decrement_colonists_operate_factories()
self.colonists_operate_factories.update_value(
self.race.colonists_operate_factories)
self.update_advantage_points()
def handle_factory_cheap_germanium_change(self):
if(self.factory_cheap_germanium.isChecked()):
self.race.factory_cheap_germanium = True
else:
self.race.factory_cheap_germanium = False
self.update_advantage_points()
def handle_increment_mine_production(self):
self.race.increment_mine_production()
self.mine_production.update_value(self.race.mine_production)
self.update_advantage_points()
def handle_decrement_mine_production(self):
self.race.decrement_mine_production()
self.mine_production.update_value(self.race.mine_production)
self.update_advantage_points()
def handle_increment_mine_cost(self):
self.race.increment_mine_cost()
self.mine_cost.update_value(self.race.mine_cost)
self.update_advantage_points()
def handle_decrement_mine_cost(self):
self.race.decrement_mine_cost()
self.mine_cost.update_value(self.race.mine_cost)
self.update_advantage_points()
def handle_increment_colonists_operate_mines(self):
self.race.increment_colonists_operate_mines()
self.colonists_operate_mines.update_value(
self.race.colonists_operate_mines)
self.update_advantage_points()
def handle_decrement_colonists_operate_mines(self):
self.race.decrement_colonists_operate_mines()
self.colonists_operate_mines.update_value(
self.race.colonists_operate_mines)
self.update_advantage_points()
def manage_navigation_state(self, new_index):
prev_button = self.nav_buttons.button(self.PREVIOUS_BUTTON_ID)
next_button = self.nav_buttons.button(self.NEXT_BUTTON_ID)
self.tabbed_layout.setCurrentIndex(new_index)
if(new_index == 0):
prev_button.setEnabled(False)
next_button.setEnabled(True)
elif(new_index == 5):
prev_button.setEnabled(True)
next_button.setEnabled(False)
else:
prev_button.setEnabled(True)
next_button.setEnabled(True)
def create_race_general_details_page(self):
page = QFrame()
formLayout = QFormLayout()
formLayout.addRow("Race Name:", self.race_name)
formLayout.addRow("Plural Race Name:", self.plural_race_name)
formLayout.addRow("Password:", self.password)
base_race_group = helpers.build_button_group_box(
self.base_races, "Predefined Race Templates", 2)
leftover_box = QBoxLayout(QBoxLayout.TopToBottom)
leftover_label = QLabel("Spend up to 50 leftover advantage points on:")
leftover_box.addWidget(leftover_label)
leftover_box.addWidget(self.leftover_points)
arrow_container = QBoxLayout(QBoxLayout.LeftToRight)
arrow_container.addWidget(self.previous_icon_button)
arrow_container.addWidget(self.next_icon_button)
race_icon_box = QBoxLayout(QBoxLayout.TopToBottom)
self.manage_icon_state(self.current_race_icon)
race_icon_box.addWidget(self.race_icon_label)
race_icon_box.addLayout(arrow_container)
bottom_container = QBoxLayout(QBoxLayout.LeftToRight)
bottom_container.addLayout(leftover_box)
bottom_container.addLayout(race_icon_box)
layout = QBoxLayout(QBoxLayout.TopToBottom)
layout.addLayout(formLayout)
layout.addStretch(1)
layout.addWidget(base_race_group)
layout.addStretch(1)
layout.addLayout(bottom_container)
page.setLayout(layout)
return page
def create_primary_trait_page(self):
page = QFrame()
prt_group = helpers.build_button_group_box(
self.primary_racials, "Primary Racial Trait", 2)
description_layout = QBoxLayout(QBoxLayout.TopToBottom)
description_layout.addWidget(self.prt_desc)
description_box = QGroupBox("Description of Trait")
description_box.setLayout(description_layout)
layout | |
<reponame>edavalosanaya/SKORE<filename>Software/python/config_dialog.py
# General Utility Libraries
import sys
import os
import warnings
# PyQt5, GUI Library
from PyQt5 import QtCore, QtGui, QtWidgets
# Serial and Midi Port Library
import rtmidi
import serial
import serial.tools.list_ports
# SKORE Library
from lib_skore import read_config, update_config
import globals
#-------------------------------------------------------------------------------
# Classes
class ArduinoComboBox(QtWidgets.QComboBox):
"""
This class allows the combobox to recognize arduinos connected as soon as
the user clicks the combobox.
"""
def avaliable_arduino_com(self):
"""
This fuction returns all the available COM ports in a list of strings.
"""
ports = serial.tools.list_ports.comports(include_links=False)
results = []
for port in ports:
results.append(str(port.device))
return results
def showPopup(self):
"""
This function appends to the original showPopup function from the
QComboBox by adding the avaliable arduino com ports.
"""
avaliable_arduino_ports = self.avaliable_arduino_com()
self.clear()
for avaliable_port in avaliable_arduino_ports:
self.addItem(avaliable_port)
super(ArduinoComboBox, self).showPopup()
return None
class PianoComboBox(QtWidgets.QComboBox):
"""
This class allows the combobox to recognize piano connected as soon as the
user clicks the combobox.
"""
def avaliable_piano_port(self):
"""
This function returns all the available MIDI ports in a list of string.
"""
temp_midi_in = []
temp_midi_in = rtmidi.MidiIn()
avaliable_ports = temp_midi_in.get_ports()
results = []
for port_name in avaliable_ports:
results.append(str(port_name))
return results
def showPopup(self):
"""
This function appends to the showPopup function of the QComboBox by
adding the avaliable MIDI ports to the listed items in the QComboBox.
"""
avaliable_piano_ports = self.avaliable_piano_port()
self.clear()
for avaliable_piano_port_connected in avaliable_piano_ports:
self.addItem(avaliable_piano_port_connected)
super(PianoComboBox, self).showPopup()
return None
class ConfigDialog(QtWidgets.QDialog):
"""
This class is the settings dialog that provides the user the capability
of changing the settings of the SKORE application.
"""
finish_apply_signal = QtCore.pyqtSignal()
def __init__(self):
"""
This function sets the settings dialog by changing the title, size, icon,
and placing the widgets.
"""
super(QtWidgets.QDialog, self).__init__()
self.setObjectName("Dialog")
self.resize(530 * globals.S_W_R, 679 * globals.S_H_R)
self.setWindowTitle("SKORE - General Configuration")
self.setWindowIcon(QtGui.QIcon('.\images\skore_icon.png'))
self.setup_ui()
self.setup_func()
self.read_all_settings()
self.update_settings()
return None
def setup_ui(self):
"""
This function places all the widgets in the settings dialog.
"""
self.apply_close_buttonBox = QtWidgets.QDialogButtonBox(self)
self.apply_close_buttonBox.setGeometry(QtCore.QRect(310 * globals.S_W_R, 640 * globals.S_H_R, 201 * globals.S_W_R, 32 * globals.S_H_R))
self.apply_close_buttonBox.setLayoutDirection(QtCore.Qt.RightToLeft)
self.apply_close_buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.apply_close_buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Apply|QtWidgets.QDialogButtonBox.Close)
self.apply_close_buttonBox.setObjectName("apply_cancel_buttonBox")
#-----------------------------------------------------------------------
# Tab Widget
self.tabWidget = QtWidgets.QTabWidget(self)
self.tabWidget.setGeometry(QtCore.QRect(10 * globals.S_W_R, 10 * globals.S_H_R, 511 * globals.S_W_R, 621 * globals.S_H_R))
self.tabWidget.setLayoutDirection(QtCore.Qt.LeftToRight)
self.tabWidget.setObjectName("tabWidget")
#-----------------------------------------------------------------------#
# Tab Widget -> path_and_comm_tab
self.path_and_comm_tab = QtWidgets.QWidget()
self.path_and_comm_tab.setObjectName("path_and_comm_tab")
#-----------------------------------------------------------------------
# Tab Widget -> path_and_comm_tab -> path section
self.configure_path_label = QtWidgets.QLabel(self.path_and_comm_tab)
self.configure_path_label.setGeometry(QtCore.QRect(10 * globals.S_W_R, 5 * globals.S_H_R, 231 * globals.S_W_R, 16 * globals.S_H_R))
self.configure_path_label.setObjectName("configure_path_label")
self.path_line = QtWidgets.QFrame(self.path_and_comm_tab)
self.path_line.setGeometry(QtCore.QRect(10 * globals.S_W_R, 20 * globals.S_H_R, 481 * globals.S_W_R, 20 * globals.S_H_R))
self.path_line.setFrameShape(QtWidgets.QFrame.HLine)
self.path_line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.path_line.setObjectName("path_line")
self.audiveris_pushButton = QtWidgets.QPushButton(self.path_and_comm_tab)
self.audiveris_pushButton.setGeometry(QtCore.QRect(400 * globals.S_W_R, 60 * globals.S_H_R, 93 * globals.S_W_R, 31 * globals.S_H_R))
self.audiveris_pushButton.setObjectName("audiveris_pushButton")
self.audiveris_label = QtWidgets.QLabel(self.path_and_comm_tab)
self.audiveris_label.setGeometry(QtCore.QRect(10 * globals.S_W_R, 40 * globals.S_H_R, 101 * globals.S_W_R, 16 * globals.S_H_R))
self.audiveris_label.setObjectName("audiveris_label")
self.audiveris_lineEdit = QtWidgets.QLineEdit(self.path_and_comm_tab)
self.audiveris_lineEdit.setGeometry(QtCore.QRect(10 * globals.S_W_R, 60 * globals.S_H_R, 381 * globals.S_W_R, 31 * globals.S_H_R))
self.audiveris_lineEdit.setObjectName("audiveris_lineEdit")
self.amazingmidi_lineEdit = QtWidgets.QLineEdit(self.path_and_comm_tab)
self.amazingmidi_lineEdit.setGeometry(QtCore.QRect(10 * globals.S_W_R, 120 * globals.S_H_R, 381 * globals.S_W_R, 31 * globals.S_H_R))
self.amazingmidi_lineEdit.setObjectName("amazingmidi_lineEdit")
self.amazingmidi_label = QtWidgets.QLabel(self.path_and_comm_tab)
self.amazingmidi_label.setGeometry(QtCore.QRect(10 * globals.S_W_R, 100 * globals.S_H_R, 121 * globals.S_W_R, 16 * globals.S_H_R))
self.amazingmidi_label.setObjectName("amazingmidi_label")
self.amazingmidi_pushButton = QtWidgets.QPushButton(self.path_and_comm_tab)
self.amazingmidi_pushButton.setGeometry(QtCore.QRect(400 * globals.S_W_R, 120 * globals.S_H_R, 93 * globals.S_W_R, 31 * globals.S_H_R))
self.amazingmidi_pushButton.setObjectName("amazingmidi_pushButton")
self.anthemscore_pushButton = QtWidgets.QPushButton(self.path_and_comm_tab)
self.anthemscore_pushButton.setGeometry(QtCore.QRect(400 * globals.S_W_R, 180 * globals.S_H_R, 93 * globals.S_W_R, 31 * globals.S_H_R))
self.anthemscore_pushButton.setObjectName("anthemscore_pushButton")
self.anthemscore_lineEdit = QtWidgets.QLineEdit(self.path_and_comm_tab)
self.anthemscore_lineEdit.setGeometry(QtCore.QRect(10 * globals.S_W_R, 180 * globals.S_H_R, 381 * globals.S_W_R, 31 * globals.S_H_R))
self.anthemscore_lineEdit.setObjectName("anthemscore_lineEdit")
self.anthemscore_label = QtWidgets.QLabel(self.path_and_comm_tab)
self.anthemscore_label.setGeometry(QtCore.QRect(10 * globals.S_W_R, 160 * globals.S_H_R, 191 * globals.S_W_R, 16 * globals.S_H_R))
self.anthemscore_label.setObjectName("anthemscore_label")
self.muse_score_pushButton = QtWidgets.QPushButton(self.path_and_comm_tab)
self.muse_score_pushButton.setGeometry(QtCore.QRect(400 * globals.S_W_R, 240 * globals.S_H_R, 93 * globals.S_W_R, 31 * globals.S_H_R))
self.muse_score_pushButton.setObjectName("muse_score_pushButton")
self.muse_score_lineEdit = QtWidgets.QLineEdit(self.path_and_comm_tab)
self.muse_score_lineEdit.setGeometry(QtCore.QRect(10 * globals.S_W_R, 240 * globals.S_H_R, 381 * globals.S_W_R, 31 * globals.S_H_R))
self.muse_score_lineEdit.setObjectName("muse_score_linedEdit")
self.muse_score_label = QtWidgets.QLabel(self.path_and_comm_tab)
self.muse_score_label.setGeometry(QtCore.QRect(10 * globals.S_W_R, 220 * globals.S_H_R, 191 * globals.S_W_R, 16 * globals.S_H_R))
self.muse_score_label.setObjectName("muse_score_label")
self.mp3_to_midi_converter_label = QtWidgets.QLabel(self.path_and_comm_tab)
self.mp3_to_midi_converter_label.setGeometry(QtCore.QRect(10 * globals.S_W_R, 280 * globals.S_H_R, 141 * globals.S_W_R, 16 * globals.S_H_R))
self.mp3_to_midi_converter_label.setObjectName("mp3_to_midi_converter_label")
self.open_source_radioButton = QtWidgets.QRadioButton(self.path_and_comm_tab)
self.open_source_radioButton.setGeometry(QtCore.QRect(240 * globals.S_W_R, 280 * globals.S_H_R, 111 * globals.S_W_R, 20 * globals.S_H_R))
self.open_source_radioButton.setObjectName("open_source_radioButton")
self.close_source_radioButton = QtWidgets.QRadioButton(self.path_and_comm_tab)
self.close_source_radioButton.setGeometry(QtCore.QRect(380 * globals.S_W_R, 280 * globals.S_H_R, 111 * globals.S_W_R, 20 * globals.S_H_R))
self.close_source_radioButton.setObjectName("close_source_radioButton")
#-----------------------------------------------------------------------
# Tab Widget -> path_and_comm_tab -> comm section
self.comm_line = QtWidgets.QFrame(self.path_and_comm_tab)
self.comm_line.setGeometry(QtCore.QRect(10 * globals.S_W_R, 300 * globals.S_H_R, 481 * globals.S_W_R, 20 * globals.S_H_R))
self.comm_line.setFrameShape(QtWidgets.QFrame.HLine)
self.comm_line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.comm_line.setObjectName("comm_line")
self.portsettings_label = QtWidgets.QLabel(self.path_and_comm_tab)
self.portsettings_label.setGeometry(QtCore.QRect(210 * globals.S_W_R, 320 * globals.S_H_R, 81* globals.S_W_R, 20 * globals.S_H_R))
self.portsettings_label.setObjectName("portsettings_label")
self.piano_port_label = QtWidgets.QLabel(self.path_and_comm_tab)
self.piano_port_label.setGeometry(QtCore.QRect(10 * globals.S_W_R, 340 * globals.S_H_R, 71 * globals.S_W_R, 16 * globals.S_H_R))
self.piano_port_label.setObjectName("pianoport_label")
self.piano_port_comboBox = PianoComboBox(self.path_and_comm_tab)
self.piano_port_comboBox.setGeometry(QtCore.QRect(10 * globals.S_W_R, 360 * globals.S_H_R, 481 * globals.S_W_R, 31 * globals.S_H_R))
self.piano_port_comboBox.setObjectName("pianoport_comboBox")
self.piano_size_label = QtWidgets.QLabel(self.path_and_comm_tab)
self.piano_size_label.setGeometry(QtCore.QRect(10 * globals.S_W_R, 400 * globals.S_H_R, 71* globals.S_W_R, 16* globals.S_H_R))
self.piano_size_label.setObjectName("pianosize_label")
self.piano_size_comboBox = QtWidgets.QComboBox(self.path_and_comm_tab)
self.piano_size_comboBox.setGeometry(QtCore.QRect(10 * globals.S_W_R, 420 * globals.S_H_R, 481 * globals.S_W_R, 31 * globals.S_H_R))
self.piano_size_comboBox.setObjectName("pianosize_comboBox")
self.arduinoport_label = QtWidgets.QLabel(self.path_and_comm_tab)
self.arduinoport_label.setGeometry(QtCore.QRect(10 * globals.S_W_R, 460 * globals.S_H_R, 81 * globals.S_W_R, 16* globals.S_H_R))
self.arduinoport_label.setObjectName("arduinoport_label")
self.arduino_port_comboBox = ArduinoComboBox(self.path_and_comm_tab)
self.arduino_port_comboBox.setGeometry(QtCore.QRect(10 * globals.S_W_R, 480 * globals.S_H_R, 481 * globals.S_W_R, 31 * globals.S_H_R))
self.arduino_port_comboBox.setObjectName("arduinoport_comboBox")
self.arduino_baud_rate_label = QtWidgets.QLabel(self.path_and_comm_tab)
self.arduino_baud_rate_label.setGeometry(QtCore.QRect(10 * globals.S_W_R, 520 * globals.S_H_R, 200 * globals.S_W_R, 20* globals.S_H_R))
self.arduino_baud_rate_label.setText("Arduino Baud Rate")
self.arduino_baud_rate_comboBox = QtWidgets.QComboBox(self.path_and_comm_tab)
self.arduino_baud_rate_comboBox.setGeometry(QtCore.QRect(10 * globals.S_W_R, 540 * globals.S_H_R, 481* globals.S_W_R, 31 * globals.S_H_R))
self.tabWidget.addTab(self.path_and_comm_tab, "")
#-----------------------------------------------------------------------
# Tab Widget -> Lighting and Color Tab
self.color_tab = QtWidgets.QWidget()
self.color_tab.setObjectName("color_tab")
#-----------------------------------------------------------------------
# Tab Widget -> Tutoring Tab -> Timing Section
self.timingsettings_label = QtWidgets.QLabel(self.color_tab)
self.timingsettings_label.setGeometry(QtCore.QRect(200 * globals.S_W_R, 10 * globals.S_H_R, 151 * globals.S_W_R, 20 * globals.S_H_R))
self.timingsettings_label.setObjectName("timingsettings_label")
self.chord_tick_tolerance_label = QtWidgets.QLabel(self.color_tab)
self.chord_tick_tolerance_label.setGeometry(QtCore.QRect(20 * globals.S_W_R, 40* globals.S_H_R, 200 * globals.S_W_R, 20 * globals.S_H_R))
self.chord_tick_tolerance_label.setText("Chord Tick Tolerance:")
self.chord_tick_tolerance_lineEdit = QtWidgets.QLineEdit(self.color_tab)
self.chord_tick_tolerance_lineEdit.setGeometry(QtCore.QRect(200 * globals.S_W_R, 40 * globals.S_H_R, 280 * globals.S_W_R, 20 * globals.S_H_R))
self.chord_sum_tolerance_label = QtWidgets.QLabel(self.color_tab)
self.chord_sum_tolerance_label.setGeometry(QtCore.QRect(20 * globals.S_W_R, 80 * globals.S_H_R, 200 * globals.S_W_R, 20 * globals.S_H_R))
self.chord_sum_tolerance_label.setText("Chord Sum Tolerance:")
self.chord_sum_tolerance_lineEdit = QtWidgets.QLineEdit(self.color_tab)
self.chord_sum_tolerance_lineEdit.setGeometry(QtCore.QRect(200 * globals.S_W_R, 80 * globals.S_H_R, 280 * globals.S_W_R, 20 * globals.S_H_R))
self.record_chord_tolerance_label = QtWidgets.QLabel(self.color_tab)
self.record_chord_tolerance_label.setGeometry(QtCore.QRect(20* globals.S_W_R, 120 * globals.S_H_R, 200* globals.S_W_R, 20 * globals.S_H_R))
self.record_chord_tolerance_label.setText("Record Chord Tolerance:")
self.record_chord_tolerance_lineEdit = QtWidgets.QLineEdit(self.color_tab)
self.record_chord_tolerance_lineEdit.setGeometry(QtCore.QRect(200* globals.S_W_R, 120 * globals.S_H_R, 280 * globals.S_W_R, 20 * globals.S_H_R))
self.arduino_handshake_timeout_label = QtWidgets.QLabel(self.color_tab)
self.arduino_handshake_timeout_label.setGeometry(QtCore.QRect(20 * globals.S_W_R, 160* globals.S_H_R, 200 * globals.S_W_R, 20 * globals.S_H_R))
self.arduino_handshake_timeout_label.setText("Arduino Handshake Timeout:")
self.arduino_handshake_timeout_lineEdit = QtWidgets.QLineEdit(self.color_tab)
self.arduino_handshake_timeout_lineEdit.setGeometry(QtCore.QRect(200 * globals.S_W_R, 160 * globals.S_H_R, 280 * globals.S_W_R, 20 * globals.S_H_R))
self.line = QtWidgets.QFrame(self.color_tab)
self.line.setGeometry(QtCore.QRect(10 * globals.S_W_R, 230 * globals.S_H_R, 481 * globals.S_W_R, 16 * globals.S_H_R))
self.line.setFrameShape(QtWidgets.QFrame.HLine)
self.line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line.setObjectName("line")
#-----------------------------------------------------------------------
# Tab Widget -> Tutoring Tab -> Color Section
self.colorsettings_label = QtWidgets.QLabel(self.color_tab)
self.colorsettings_label.setGeometry(QtCore.QRect(210 * globals.S_W_R, 250 * globals.S_H_R, 81 * globals.S_W_R, 20 * globals.S_H_R))
self.colorsettings_label.setObjectName("colorsettings_label_2")
bw_y = ( 250 + 40 ) * globals.S_H_R
space = 20 * globals.S_H_R
self.black_key_label = QtWidgets.QLabel(self.color_tab)
self.black_key_label.setGeometry(QtCore.QRect(80 * globals.S_W_R, bw_y, 61 * globals.S_W_R, 16 * globals.S_H_R))
self.black_key_label.setObjectName("black_key_label")
self.black_key_pushButton = QtWidgets.QPushButton(self.color_tab)
self.black_key_pushButton.setGeometry(QtCore.QRect(40 * globals.S_W_R, bw_y + space, 141 * globals.S_W_R, 61 * globals.S_H_R))
self.black_key_pushButton.setText("")
self.black_key_pushButton.setObjectName("black_key_pushButton")
self.white_key_label = QtWidgets.QLabel(self.color_tab)
self.white_key_label.setGeometry(QtCore.QRect(360 * globals.S_W_R, bw_y, 71 * globals.S_W_R, 16 * globals.S_H_R))
self.white_key_label.setObjectName("white_key_label")
self.white_key_pushButton = QtWidgets.QPushButton(self.color_tab)
self.white_key_pushButton.setGeometry(QtCore.QRect(320 * globals.S_W_R, bw_y + space, 141 * globals.S_W_R, 61 * globals.S_W_R))
self.white_key_pushButton.setText("")
self.white_key_pushButton.setObjectName("white_key_pushButton")
wu_y = ( 390 + 40 ) * globals.S_H_R
self.wrong_label = QtWidgets.QLabel(self.color_tab)
self.wrong_label.setGeometry(QtCore.QRect(75 * globals.S_W_R, wu_y, 71 * globals.S_W_R, 16 * globals.S_H_R))
self.wrong_label.setObjectName("wrong_label")
self.wrong_pushButton = QtWidgets.QPushButton(self.color_tab)
self.wrong_pushButton.setGeometry(QtCore.QRect(40 * globals.S_W_R, wu_y + space, 141 * globals.S_W_R, 61 * globals.S_H_R))
self.wrong_pushButton.setText("")
self.wrong_pushButton.setObjectName("wrong_pushButton")
self.upcoming_label = QtWidgets.QLabel(self.color_tab)
self.upcoming_label.setGeometry(QtCore.QRect(350 * globals.S_W_R, wu_y, 91 * globals.S_W_R, 16 * globals.S_H_R))
self.upcoming_label.setObjectName("upcoming_label")
self.upcoming_pushButton = QtWidgets.QPushButton(self.color_tab)
self.upcoming_pushButton.setGeometry(QtCore.QRect(320 * globals.S_W_R, wu_y + space, 141 * globals.S_W_R, 61 * globals.S_H_R))
self.upcoming_pushButton.setText("")
self.upcoming_pushButton.setObjectName("upcoming_pushButton")
self.tabWidget.addTab(self.color_tab, "")
self.retranslate_ui()
self.tabWidget.setCurrentIndex(0)
self.apply_close_buttonBox.accepted.connect(self.accept)
self.apply_close_buttonBox.rejected.connect(self.close)
QtCore.QMetaObject.connectSlotsByName(self)
def setup_func(self):
"""
This function places all the slot and signals for the widgets of the
settings dialog.
"""
self.browse_button_group = QtWidgets.QButtonGroup()
self.browse_button_group.addButton(self.audiveris_pushButton)
self.browse_button_group.addButton(self.amazingmidi_pushButton)
self.browse_button_group.addButton(self.anthemscore_pushButton)
self.browse_button_group.addButton(self.muse_score_pushButton)
self.browse_button_group.buttonClicked.connect(self.upload_exe_file)
self.browse_button_dict = {self.audiveris_pushButton: ['', self.audiveris_lineEdit, 'audiveris'], self.amazingmidi_pushButton: ['',self.amazingmidi_lineEdit, 'amazing_midi'],
self.anthemscore_pushButton: ['', self.anthemscore_lineEdit,'anthemscore'], self.muse_score_pushButton: ['', self.muse_score_lineEdit, 'muse_score']}
self.port_dict = {self.piano_port_comboBox: ['','piano'], self.piano_size_comboBox: ['','piano_size'],
self.arduino_port_comboBox: ['','arduino'], self.arduino_baud_rate_comboBox: ['', 'arduino baud rate']}
self.piano_size_comboBox.addItem('76 Key Piano')
self.piano_size_comboBox.addItem('88 Key Piano')
self.arduino_baud_rate_comboBox.addItem('300')
self.arduino_baud_rate_comboBox.addItem('600')
self.arduino_baud_rate_comboBox.addItem('1200')
self.arduino_baud_rate_comboBox.addItem('4800')
self.arduino_baud_rate_comboBox.addItem('9600')
self.arduino_baud_rate_comboBox.addItem('14400')
self.arduino_baud_rate_comboBox.addItem('19200')
self.arduino_baud_rate_comboBox.addItem('28800')
self.arduino_baud_rate_comboBox.addItem('38400')
self.arduino_baud_rate_comboBox.addItem('57600')
self.arduino_baud_rate_comboBox.addItem('115200')
self.arduino_baud_rate_comboBox.addItem('230400')
self.timing_button_dict = {self.chord_tick_tolerance_lineEdit: ['', 'chord tick tolerance'], self.chord_sum_tolerance_lineEdit: ['','chord sum tolerance'],
self.record_chord_tolerance_lineEdit: ['', 'record chord tolerance'], self.arduino_handshake_timeout_lineEdit: ['', 'count timeout']
}
self.color_button_group = QtWidgets.QButtonGroup()
self.color_button_group.addButton(self.black_key_pushButton)
self.color_button_group.addButton(self.white_key_pushButton)
self.color_button_group.addButton(self.wrong_pushButton)
self.color_button_group.addButton(self.upcoming_pushButton)
self.color_button_group.buttonClicked.connect(self.color_picker)
self.color_button_dict = {self.black_key_pushButton: ['','black'], self.white_key_pushButton: ['','white'],
self.wrong_pushButton: ['','wrong'], self.upcoming_pushButton: ['','upcoming']
}
self.apply_close_buttonBox.button(QtWidgets.QDialogButtonBox.Apply).clicked.connect(self.apply_changes)
return None
#---------------------------------------------------------------------------
# Path Section Functions
def | |
<filename>tumor_seg.py
from PyQt5 import QtGui, QtWidgets
from PyQt5.QtCore import pyqtSlot
from PyQt5.QtWidgets import QMainWindow, QFileDialog
import cv2 as cv
import os
from Ui_tumor import Ui_tumor_seg
import imageio
import numpy as np
import PIL.Image as Image
import torch
import torch.nn as nn
import time
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from unet import *
from WL import *
import pydicom
import mcubes
import h5py
class Worker(QThread):
sinOut = pyqtSignal(str, str) # Custom signal,When the run() function is executed,Emit this signal from the relevant thread
sinOut2 = pyqtSignal(bytes)
sinOut3 = pyqtSignal(bytes, bytes)
sinOut4 = pyqtSignal(str, str)
def __init__(self, parent=None):
super(Worker, self).__init__(parent)
self.working = True
self.num = 0
def __del__(self):
self.working = False
self.wait()
def run(self):
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
if torch.cuda.is_available():
gpu_flag = '(on GPU)'
else:
gpu_flag = '(on CPU)'
t_start = time.time()
global t_elapsed
if flag == 1:
print('flag = 1')
filenames = os.listdir(path)
results_liver = np.zeros([len(filenames), 512, 512])
results_tumor = np.zeros([len(filenames), 512, 512])
slices_liver = []
slices_tumor = []
idx = []
for i, name in enumerate(filenames):
name = os.path.join(path, name)
slice = pydicom.dcmread(name)
idx.append(int(slice.InstanceNumber))
slices_liver.append(WL(slice, 0, 2048))
slices_tumor.append(WL(slice, 100, 150))
idx_new = np.argsort(idx)
slices_liver = np.stack(slices_liver)
slices_tumor = np.stack(slices_tumor)
slices_liver = slices_liver[idx_new]
slices_tumor = slices_tumor[idx_new]
slices_liver_tensor = torch.tensor(slices_liver)
slices_liver_tensor = slices_liver_tensor.unsqueeze(1).float() / 255.
slices_tumor_tensor = torch.tensor(slices_tumor)
slices_tumor_tensor = slices_tumor_tensor.unsqueeze(1).float() / 255.
model_path = 'liver_7WL.pth'
model = torch.load(model_path, map_location=device)
model = model.to(device)
model = model.eval()
sm = nn.Softmax(dim=1)
for i in range(slices_liver_tensor.shape[0]):
self.sinOut.emit("Mark the liver: " , str(i+1)+"/" +
str(slices_liver_tensor.shape[0]) + gpu_flag)
output = model(slices_liver_tensor[i, :].unsqueeze(0).to(device))
output_sm = sm(output)
_, result = torch.max(output_sm, dim=1)
results_liver[i] = result[0, :].cpu().detach().numpy()
print(results_liver.shape)
a = results_liver.tostring()
b = results_tumor.tostring()
t_end = time.time()
global t_elapsed
t_elapsed = t_end - t_start
t_elapsed = t_end - t_start
#print(str(round(t_elapsed, 4)))
self.sinOut4.emit("time consuming: " , str(round(t_elapsed, 4)))
#self.sinOut.emit("time consuming: " , 's')
self.sinOut3.emit(a, b)
elif flag == 3 or flag == 2:
filenames = os.listdir(path)
results_liver = np.zeros([len(filenames), 512, 512])
results_tumor = np.zeros([len(filenames), 512, 512])
slices_liver = []
slices_tumor = []
idx = []
for i, name in enumerate(filenames):
name = os.path.join(path, name)
slice = pydicom.dcmread(name)
idx.append(int(slice.InstanceNumber))
slices_liver.append(WL(slice, 0, 2048))
slices_tumor.append(WL(slice, 100, 150))
idx_new = np.argsort(idx)
slices_liver = np.stack(slices_liver)
slices_tumor = np.stack(slices_tumor)
slices_liver = slices_liver[idx_new]
slices_tumor = slices_tumor[idx_new]
slices_liver_tensor = torch.tensor(slices_liver)
slices_liver_tensor = slices_liver_tensor.unsqueeze(1).float() / 255.
slices_tumor_tensor = torch.tensor(slices_tumor)
slices_tumor_tensor = slices_tumor_tensor.unsqueeze(1).float() / 255.
model_path = 'liver_7WL.pth'
model = torch.load(model_path, map_location=device)
model = model.to(device)
model = model.eval()
sm = nn.Softmax(dim=1)
for i in range(slices_liver_tensor.shape[0]):
self.sinOut.emit("Mark the liver: " , str(i+1)+"/" +
str(slices_liver_tensor.shape[0]) + gpu_flag)
output = model(slices_liver_tensor[i, :].unsqueeze(0).to(device))
output_sm = sm(output)
_, result = torch.max(output_sm, dim=1)
results_liver[i] = result[0, :].cpu().detach().numpy()
a = results_liver.tostring()
del(model)
del(output)
del(output_sm)
del(result)
model_path_2 = './best_tumor.pth'
model_2 = torch.load(model_path_2, map_location=device)
model_2 = model_2.to(device)
model_2 = model_2.eval()
sm = nn.Softmax(dim=1)
for i in range(slices_tumor_tensor.shape[0]):
self.sinOut.emit("Mark the tumor: " , str(i+1)+"/" +
str(slices_tumor_tensor.shape[0]) + gpu_flag)
output_2 = model_2(slices_tumor_tensor[i, :].unsqueeze(0).to(device))
output_sm_2 = sm(output_2)
_, result_2 = torch.max(output_sm_2, dim=1)
results_tumor[i] = result_2[0, :].cpu().detach().numpy()
b = results_tumor.tostring()
t_end = time.time()
t_elapsed = t_end - t_start
self.sinOut4.emit("time consuming: " , str(round(t_elapsed, 4)))
self.sinOut3.emit(a, b)
#sinOut3 For display pictures
class MainWindow(QMainWindow, Ui_tumor_seg):
def __init__(self, parent=None):
super(MainWindow, self).__init__(parent)
self.setupUi(self)
self.choose_raw.mousePressEvent = self.lineedit_clicked
def show_img(self, image):
img = Image.fromarray(image.astype('uint8')).convert('RGB')
self.img_rgb = cv.cvtColor(np.asarray(img), cv.COLOR_RGB2BGR)
self.QtImg = QtGui.QImage(
self.img_rgb.data, self.img_rgb.shape[1], self.img_rgb.shape[0], QtGui.QImage.Format_RGB888)
self.show_area.setPixmap(QtGui.QPixmap.fromImage(self.QtImg))
def slotAdd3(self, a, b):
filenames = os.listdir(path)
self.slices_liver = []
self.slices_tumor = []
idx = []
for i, name in enumerate(filenames):
name = os.path.join(path, name)
slice = pydicom.dcmread(name)
idx.append(slice.InstanceNumber)
self.slices_liver.append(WL(slice, 0, 2048))
self.slices_tumor.append(WL(slice, 100, 150))
idx_new = np.argsort(idx)
self.slices_liver = np.stack(self.slices_liver)[idx_new]
self.slices_tumor = np.stack(self.slices_tumor)[idx_new]
self.liver = np.fromstring(a)
self.liver = np.reshape(self.liver, (len(filenames), 512, 512))
self.liver_backup = self.liver
self.liver *= 255.
self.tumor = np.fromstring(b)
self.tumor = np.reshape(self.tumor, (len(filenames), 512, 512))
self.tumor_backup = self.tumor
self.tumor *= 255.
global sum_number
sum_number = filenames
half_sample_num = sample_num/2
self.num.setText(str(int(half_sample_num)))
num_pic = self.num.text()
num2 = int(num_pic)
self.show_img(self.tumor[num2])
a = self.liver[num2]#liver
b = self.slices_tumor[num2]
c = self.tumor[num2]
overlay = b
overlay = np.uint8(overlay)
overlay = cv.cvtColor(overlay, cv.COLOR_GRAY2RGB)
mask = np.uint8(c)
_, binary_pred = cv.threshold(mask, 127, 255, cv.THRESH_BINARY)
contours_pred, _ = cv.findContours(binary_pred, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE)
cv.drawContours(overlay, contours_pred, -1, (255, 20, 147), 2) # pink contours stand for prediction
mask_2 = np.uint8(a)
_, binary_pred = cv.threshold(mask_2, 127, 255, cv.THRESH_BINARY)
contours_pred, _ = cv.findContours(binary_pred, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE)
cv.drawContours(overlay, contours_pred, -1, (0, 255, 0), 2) # pink contours stand for prediction
self.show_img(overlay)
if flag == 1:
self.liver_layer.setEnabled(True)
self.liver_layer.setChecked(True)
elif flag == 3 or flag == 2:
self.liver_layer.setEnabled(True)
self.tumor_layer.setEnabled(True)
self.liver_layer.setChecked(True)
self.tumor_layer.setChecked(True)
self.tri_d_run.setEnabled(True)
self.left.setEnabled(True)
self.right_R.setEnabled(True)
self.show_1.setEnabled(True)
def slotAdd2(self, file_inf):
self.state_show_label1.setText(
"Done. Time elapsed: " + str(format(t_elapsed, '0.2f') + 's.'))
self.state_show_label2.setText( str(format(t_elapsed, '0.2f') + 's.'))
filen = os.listdir(path)
self.results = np.fromstring(file_inf)
self.results = np.reshape(self.results, (len(filen), 512, 512))
self.results_backup = self.results
self.results *= 255.
half_sample_num = sample_num/2
self.num.setText(str(int(half_sample_num)))
num_pic = self.num.text()
num2 = int(num_pic)
array = self.results[num2]
self.show_img( array)
def slotAdd(self, file_inf, file_inf2):
self.state_show_label1.setText(file_inf)
self.state_show_label2.setText(file_inf2 )
def slotAdd4(self, file_inf, file_inf2):
self.state_show_label1.setText(file_inf)
self.state_show_label2.setText(file_inf2 + 'second')
def lineedit_clicked(self, e):
self.liver_layer.setEnabled(True)
self.tumor_layer.setEnabled(True)
self.liver_layer.setChecked(False)
self.tumor_layer.setChecked(False)
self.choose_raw.setText('')
my_file_path = QFileDialog.getExistingDirectory(self, "Select folder", ".")
self.choose_raw.setText(my_file_path)
def on_tumor_mark_clicked(self):
self.liver_mark.setChecked(True)
@pyqtSlot()
def on_seg_run_clicked(self):
test = self.choose_raw.text()
if test == '' or test == 'Select directory':
QMessageBox.information(self, "Warning", "Plese choose dir" )
else:
global flag
if self.liver_mark.isChecked() and self.tumor_mark.isChecked():
flag = 3
elif self.liver_mark.isChecked():
self.tumor_layer.setEnabled(False)
flag = 1
elif self.tumor_mark.isChecked():
self.liver_mark.setChecked(True)
flag = 2
self.liver_mark.setChecked(True)
else:
QMessageBox.information(self, "Warning", "Please choose .")
control_path = self.choose_raw.text()
global path
path = control_path
filenames = os.listdir(path)
global sample_num
sample_num = len(filenames)
self.thread = Worker()
self.thread.sinOut.connect(self.slotAdd)
self.thread.sinOut2.connect(self.slotAdd2)
self.thread.sinOut3.connect(self.slotAdd3)
self.thread.sinOut4.connect(self.slotAdd4)
self.thread.start()
self.state_show_label1.setText("Model initializing...")
self.state_show_label2.setText(" ")
@pyqtSlot()
def on_left_clicked(self):
num_pic = self.num.text()
new_num_pic = int(num_pic)-1
new_num_str = str(new_num_pic)
if new_num_pic>0:
self.num.setText(new_num_str)
new2_num_pic = int(self.num.text())
num2 = new2_num_pic
b1 = self.slices_tumor[num2]
b2 = self.slices_tumor[num2]
b3 = self.slices_tumor[num2]
a = self.liver[num2]
c = self.tumor[num2]
if self.liver_layer.isChecked() and self.tumor_layer.isChecked():
b1 = np.uint8(b1)
b1 = cv.cvtColor(b1, cv.COLOR_GRAY2RGB)
mask = np.uint8(c)
_, binary_pred = cv.threshold(mask, 127, 255, cv.THRESH_BINARY)
contours_pred, _ = cv.findContours(binary_pred, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE)
cv.drawContours(b1, contours_pred, -1, (255, 20, 147), 2)
mask_2 = np.uint8(a)
_, binary_pred = cv.threshold(mask_2, 127, 255, cv.THRESH_BINARY)
contours_pred, _ = cv.findContours(binary_pred, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE)
cv.drawContours(b1, contours_pred, -1, (0, 255, 0), 2)
self.show_img(b1)
elif self.liver_layer.isChecked():
b2 = np.uint8(b2)
b2 = cv.cvtColor(b2, cv.COLOR_GRAY2RGB)
mask = np.uint8(a)
_, binary_pred = cv.threshold(mask, 127, 255, cv.THRESH_BINARY)
contours_pred, _ = cv.findContours(binary_pred, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE)
cv.drawContours(b2, contours_pred, -1, (0, 255, 0), 2)
self.show_img(b2)
elif self.tumor_layer.isChecked():
b3 = np.uint8(b3)
b3 = cv.cvtColor(b3, cv.COLOR_GRAY2RGB)
mask = np.uint8(c)
_, binary_pred = cv.threshold(mask, 127, 255, cv.THRESH_BINARY)
contours_pred, _ = cv.findContours(binary_pred, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE)
cv.drawContours(b3, contours_pred, -1, (255, 20, 147), 2)
self.show_img(b3)
else:
self.show_img(self.slices_tumor[num2])
else:
QMessageBox.information(self, "Warning", "Out of index" )
@pyqtSlot()
def on_right_R_clicked(self):
num_pic = self.num.text()
new_num_pic = int(num_pic)+1
new_num_str = str(new_num_pic)
if new_num_pic+1>sample_num:
QMessageBox.information(self, "Warning", "Out of index" )
else:
self.num.setText(new_num_str)
new2_num_pic = int(self.num.text())
num2 = new2_num_pic
b1 = self.slices_tumor[num2]
b2 = self.slices_tumor[num2]
b3 = self.slices_tumor[num2]
a = self.liver[num2]
c = self.tumor[num2]
if self.liver_layer.isChecked() and self.tumor_layer.isChecked():
b1 = np.uint8(b1)
b1 = cv.cvtColor(b1, cv.COLOR_GRAY2RGB)
mask = np.uint8(c)
_, binary_pred = cv.threshold(mask, 127, 255, cv.THRESH_BINARY)
contours_pred, _ = cv.findContours(binary_pred, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE)
cv.drawContours(b1, contours_pred, -1, (255, 20, 147), 2) # pink contours stand for prediction
mask_2 = np.uint8(a)
_, binary_pred = cv.threshold(mask_2, 127, 255, cv.THRESH_BINARY)
contours_pred, _ = cv.findContours(binary_pred, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE)
cv.drawContours(b1, contours_pred, -1, (0, 255, 0), 2) # pink contours stand for prediction
self.show_img(b1)
elif self.liver_layer.isChecked():
b2 = np.uint8(b2)
b2 = cv.cvtColor(b2, cv.COLOR_GRAY2RGB)
mask = np.uint8(a)
_, binary_pred = cv.threshold(mask, 127, 255, cv.THRESH_BINARY)
contours_pred, _ = cv.findContours(binary_pred, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE)
cv.drawContours(b2, contours_pred, -1, (0, 255, 0), 2) # pink contours stand for prediction
self.show_img(b2)
elif self.tumor_layer.isChecked():
b3 = np.uint8(b3)
b3 = cv.cvtColor(b3, cv.COLOR_GRAY2RGB)
mask = np.uint8(c)
_, binary_pred | |
bits // 8)
print(" * encoded s({}) = {}{}{}".format(len(s_bin) * 8, color_green, xx(s_bin), color_norm))
assert sign == r_point_bin + encode_bigint_le(s, bits // 8)
pk_point = Ed25519Point.decode(public_key)
test_point = r_point + (pk_point * hash_for_s)
print(" * test point = B*r + Pubkey*hash: {}{}{}".format(color_green, test_point, color_norm))
bs_point = curve.b * s
print(" * B*s: {}{}{}".format(color_green, bs_point, color_norm))
assert test_point == bs_point
return True
def run_cryptography_test(colorize):
"""Generate a key using Cryptography.io, with API from
https://cryptography.io/en/latest/hazmat/primitives/asymmetric/ed25519/
"""
color_red = COLOR_RED if colorize else ''
color_green = COLOR_GREEN if colorize else ''
color_norm = COLOR_NORM if colorize else ''
print("Cryptography.io key generation:")
private_key = Ed25519PrivateKey.generate()
private_bytes = private_key.private_bytes(
encoding=serialization.Encoding.Raw,
format=serialization.PrivateFormat.Raw,
encryption_algorithm=serialization.NoEncryption(),
)
print("* private key({}): {}{}{}".format(len(private_bytes) * 8, color_red, xx(private_bytes), color_norm))
loaded_private_key = Ed25519PrivateKey.from_private_bytes(private_bytes)
loaded_private_bytes = loaded_private_key.private_bytes(
encoding=serialization.Encoding.Raw,
format=serialization.PrivateFormat.Raw,
encryption_algorithm=serialization.NoEncryption(),
)
assert loaded_private_bytes == private_bytes
public_key = private_key.public_key()
public_bytes = public_key.public_bytes(
encoding=serialization.Encoding.Raw,
format=serialization.PublicFormat.Raw,
)
print("* public key({}): {}{}{}".format(len(public_bytes) * 8, color_green, xx(public_bytes), color_norm))
loaded_public_key = Ed25519PublicKey.from_public_bytes(public_bytes)
loaded_public_bytes = loaded_public_key.public_bytes(
encoding=serialization.Encoding.Raw,
format=serialization.PublicFormat.Raw,
)
assert loaded_public_bytes == public_bytes
test_message = b'Hello, world! This is a test.'
signature = private_key.sign(test_message)
print("* signature({}): {}{}{}".format(len(signature) * 8, color_green, xx(signature), color_norm))
public_key.verify(signature, test_message)
curve = Ed25519()
public_point = curve.public_point(private_bytes)
assert public_point.encode() == public_bytes
assert public_point == Ed25519Point.decode(public_bytes)
public_point_2 = curve.decode_scalar(hashlib.sha512(private_bytes).digest()) * curve.b
assert public_point_2 == public_point
my_public_key = public_point.encode()
curve.check_signature(test_message, signature, my_public_key)
return True
def run_nacl_test(colorize):
"""Compare the implementation with PyNaCl bindings"""
color_red = COLOR_RED if colorize else ''
color_green = COLOR_GREEN if colorize else ''
color_norm = COLOR_NORM if colorize else ''
print("PyNaCl operations:")
# Get the encoded base by using a libsodium API
encoded_base = bytes.fromhex("5866666666666666666666666666666666666666666666666666666666666666")
encoded_base_2 = nacl.bindings.crypto_scalarmult_ed25519_base_noclamp(b"\x01" + b"\x00" * 31)
assert encoded_base_2 == encoded_base
# nacl.util.random is os.urandom:
# https://github.com/pyca/pynacl/blob/1.4.0/src/nacl/utils.py#L69
assert nacl.bindings.crypto_core_ed25519_SCALARBYTES == 32
secret_key = os.urandom(nacl.bindings.crypto_core_ed25519_SCALARBYTES)
print("* secret key({}): {}{}{}".format(len(secret_key) * 8, color_red, xx(secret_key), color_norm))
private_key = hashlib.sha512(secret_key).digest()[:32]
print("* private key({}): {}{}{}".format(len(private_key) * 8, color_red, xx(private_key), color_norm))
public_key = nacl.bindings.crypto_scalarmult_ed25519_base(private_key)
print("* public key({}): {}{}{}".format(len(public_key) * 8, color_green, xx(public_key), color_norm))
public_key2 = nacl.bindings.crypto_scalarmult_ed25519(private_key, encoded_base)
assert public_key2 == public_key
curve = Ed25519()
my_public_key = curve.public_key(secret_key)
assert my_public_key == public_key
# scalarmult expect a scalar < order. Try without... it clears the most significant bit
pub_noclamp = nacl.bindings.crypto_scalarmult_ed25519_base_noclamp(private_key)
print("* (trunc private)*Base({}): {}{}{}".format(len(pub_noclamp) * 8, color_green, xx(pub_noclamp), color_norm))
pub_noclamp_2 = nacl.bindings.crypto_scalarmult_ed25519_noclamp(private_key, encoded_base)
assert pub_noclamp_2 == pub_noclamp
noclamp_scalar = int.from_bytes(private_key, "little") & (~(1 << 255))
my_pub_noclamp_point = noclamp_scalar * curve.b
assert my_pub_noclamp_point.encode() == pub_noclamp
assert my_pub_noclamp_point == Ed25519Point.decode(pub_noclamp)
# Use crypto_core_ed25519_scalar_reduce too
assert nacl.bindings.crypto_core_ed25519_NONREDUCEDSCALARBYTES == 64
reduced_priv = nacl.bindings.crypto_core_ed25519_scalar_reduce(private_key + b"\x00" * 32)
print("* reduced private({}): {}{}{}".format(len(reduced_priv) * 8, color_red, xx(reduced_priv), color_norm))
assert int.from_bytes(reduced_priv, "little") == int.from_bytes(private_key, "little") % BASE_ORDER
pub_reduced = nacl.bindings.crypto_scalarmult_ed25519_base_noclamp(reduced_priv)
print("* private*Base({}): {}{}{}".format(len(pub_reduced) * 8, color_green, xx(pub_reduced), color_norm))
my_pub_point = int.from_bytes(private_key, "little") * curve.b
assert my_pub_point.encode() == pub_reduced
assert my_pub_point == Ed25519Point.decode(pub_reduced)
my_pub_point_reduced = int.from_bytes(reduced_priv, "little") * curve.b
assert my_pub_point_reduced == my_pub_point
# Test point addition and subtraction
rand_scalar_a = os.urandom(nacl.bindings.crypto_core_ed25519_SCALARBYTES)
rand_point_a = nacl.bindings.crypto_scalarmult_ed25519_base_noclamp(rand_scalar_a)
rand_point_a_pt = Ed25519Point.decode(rand_point_a)
print("* point A: {}".format(xx(rand_point_a)))
rand_scalar_b = os.urandom(nacl.bindings.crypto_core_ed25519_SCALARBYTES)
rand_point_b = nacl.bindings.crypto_scalarmult_ed25519_base_noclamp(rand_scalar_b)
rand_point_b_pt = Ed25519Point.decode(rand_point_b)
print("* point B: {}".format(xx(rand_point_b)))
sum_a_b = nacl.bindings.crypto_core_ed25519_add(rand_point_a, rand_point_b)
print("* A+B: {}".format(xx(sum_a_b)))
assert rand_point_a_pt + rand_point_b_pt == Ed25519Point.decode(sum_a_b)
diff_a_b = nacl.bindings.crypto_core_ed25519_sub(rand_point_a, rand_point_b)
print("* A-B: {}".format(xx(diff_a_b)))
assert rand_point_a_pt - rand_point_b_pt == Ed25519Point.decode(diff_a_b)
diff_a_b_sum_b = nacl.bindings.crypto_core_ed25519_add(diff_a_b, rand_point_b)
assert diff_a_b_sum_b == rand_point_a
return True
def run_openssl_test(colorize):
"""Generate a key using OpenSSL and load it"""
color_red = COLOR_RED if colorize else ''
color_green = COLOR_GREEN if colorize else ''
color_norm = COLOR_NORM if colorize else ''
color_purple = COLOR_PURPLE if colorize else ''
curve = Ed25519()
temporary_dir = tempfile.mkdtemp(suffix='_openssl-test')
logger.debug("Created temporary directory %s/", temporary_dir)
priv_key_path = os.path.join(temporary_dir, 'ed25519_priv.pem')
pub_key_path = os.path.join(temporary_dir, 'ed25519_pub.pem')
try:
# Generate a private key using OpenSSL
result = run_process_with_input(
['openssl', 'genpkey', '-algorithm', 'ed25519', '-outform', 'PEM', '-out', priv_key_path],
b'', color=color_purple)
if not result:
logger.error("openssl failed, probably because ed25519 keytype is not supported. Skipping the test.")
return True
with open(priv_key_path, 'r') as fprivk:
priv_key_lines = fprivk.readlines()
print("{}{}{}".format(color_red, ''.join(priv_key_lines), color_norm))
assert priv_key_lines[0] == '-----BEGIN PRIVATE KEY-----\n'
assert priv_key_lines[-1] == '-----END PRIVATE KEY-----\n'
result = run_process_with_input(
['openssl', 'asn1parse', '-i', '-dump'],
''.join(priv_key_lines).encode('ascii'), color=color_red)
if not result:
return False
# Decode PEM-encoded ASN.1 key
# Specification: https://tools.ietf.org/html/rfc8410
# Algorithm Identifiers for Ed25519, Ed448, X25519, and X448
# for Use in the Internet X.509 Public Key Infrastructure
privkey_der = base64.b64decode(''.join(priv_key_lines[1:-1]))
privkey_asn1 = Cryptodome.Util.asn1.DerSequence()
privkey_asn1.decode(privkey_der)
assert len(privkey_asn1) == 3 # Could be more, if there were attributes
# Version
assert privkey_asn1[0] == 0
# PrivateKeyAlgorithmIdentifier: sequence of 5 bytes, OID of 3 bytes:
# 192.168.3.11 = {iso(1) identified-organization(3) thawte(101) id-EdDSA25519(112)}
# (no algorithm parameter)
assert privkey_asn1[1] == b'0\x05\x06\x03+ep'
# PrivateKey (OCTET STRING, 0x22 bytes, holding an OCTET STRING, 0x20 bytes)
assert privkey_asn1[2].startswith(b'\x04\x22\x04\x20')
priv_secret_bin = privkey_asn1[2][4:]
assert len(priv_secret_bin) == 0x20
print("* secret({}): {}{}{}".format(
len(priv_secret_bin) * 8, color_red, xx(priv_secret_bin), color_norm))
real_private_key = curve.private_key(priv_secret_bin)
print(" * private key({}): {}{:#x}{}".format(
real_private_key.bit_length(), color_red, real_private_key, color_norm))
print("")
# Compute the associated public key using OpenSSL
result = run_process_with_input(
['openssl', 'pkey', '-in', priv_key_path, '-pubout', '-outform', 'PEM', '-out', pub_key_path],
b'', color=color_purple)
if not result:
return False
with open(pub_key_path, 'r') as fpubk:
pub_key_lines = fpubk.readlines()
print("{}{}{}".format(color_green, ''.join(pub_key_lines), color_norm))
assert pub_key_lines[0] == '-----BEGIN PUBLIC KEY-----\n'
assert pub_key_lines[-1] == '-----END PUBLIC KEY-----\n'
result = run_process_with_input(
['openssl', 'asn1parse', '-i', '-dump'],
''.join(pub_key_lines).encode('ascii'), color=color_green)
if not result:
return False
# Decode PEM-encoded ASN.1 key
pubkey_der = base64.b64decode(''.join(pub_key_lines[1:-1]))
pubkey_asn1 = Cryptodome.Util.asn1.DerSequence()
pubkey_asn1.decode(pubkey_der)
assert len(pubkey_asn1) == 2
assert pubkey_asn1[0] == b'0\x05\x06\x03+ep' # Object ID 192.168.3.11, like the private key
assert pubkey_asn1[1].startswith(b'\x03\x21\x00') # BIT STRING
pubkey_pt_bin = pubkey_asn1[1][3:]
pubkey_pt = Ed25519Point.decode(pubkey_pt_bin)
print("* public key point: {}{}{}".format(color_green, pubkey_pt, color_norm))
assert pubkey_pt * BASE_ORDER == Ed25519Point(0, 1)
# Ensure consistency between public and private keys
assert curve.b * real_private_key == pubkey_pt
assert curve.public_key(priv_secret_bin) == pubkey_pt_bin
finally:
try:
os.remove(priv_key_path)
os.remove(pub_key_path)
except OSError as exc:
# If removing the files failed, the error w appear in rmdir
logger.debug("Error while removing files: %r", exc)
os.rmdir(temporary_dir)
return True
def run_ssh_test(colorize):
"""Parse Ed25519 OpenSSH keys"""
color_red = COLOR_RED if colorize else ''
color_green = COLOR_GREEN if colorize else ''
color_norm = COLOR_NORM if colorize else ''
color_purple = COLOR_PURPLE if colorize else ''
curve = Ed25519()
temporary_dir = tempfile.mkdtemp(suffix='_ssh-test')
logger.debug("Created temporary directory %s/", temporary_dir)
id_key_path = os.path.join(temporary_dir, 'id_ed25519')
id_pub_path = os.path.join(temporary_dir, 'id_ed25519.pub')
try:
try:
result = run_process_with_input([
'ssh-keygen',
'-t', 'ed25519',
'-N', '',
'-f', id_key_path,
], b'', color=color_purple)
except OSError as exc:
if exc.errno == errno.ENOENT:
print("... ssh-keygen is not installed, skipping the test.")
return True
raise
if not result:
logger.error("ssh-keygen failed, probably because ed25519 keytype is not supported. Skipping the test.")
return True
with open(id_pub_path, 'r') as fpub:
pubkey_lines = fpub.readlines()
with open(id_key_path, 'r') as fpriv:
privkey_lines = fpriv.readlines()
def pop_string(key, offset):
"""Pop a string from the private key"""
field_size = struct.unpack('>I', key[offset:offset + 4])[0]
offset += 4
assert offset + field_size <= len(key)
value = key[offset:offset + field_size]
offset += field_size
return value, offset
# The public key is a single line, with base64-encoded data
print("SSH public key: {}{}{}".format(color_green, pubkey_lines[0].rstrip(), color_norm))
assert len(pubkey_lines) == 1
assert pubkey_lines[0].startswith('ssh-ed25519 ')
public_key = base64.b64decode(pubkey_lines[0].split(' ', 2)[1])
print("SSH public key hexdump:")
hexdump(public_key, color=color_green)
print("SSH public key fingerprint: SHA256:{}".format(
base64.b64encode(hashlib.sha256(public_key).digest()).decode('ascii').rstrip('=')))
print("SSH public key:")
algorithm, offset = pop_string(public_key, offset=0)
print("* algorithm: {}".format(repr(algorithm.decode('ascii'))))
assert algorithm == b'ssh-ed25519'
pubkey_pt_bin, offset = pop_string(public_key, offset)
pubkey_pt = Ed25519Point.decode(pubkey_pt_bin)
print("* public key point: {}{}{}".format(color_green, pubkey_pt, color_norm))
assert offset == len(public_key)
print("")
# The private key is base64-encoded
assert privkey_lines[0] == '-----BEGIN OPENSSH PRIVATE KEY-----\n'
assert privkey_lines[-1] == '-----END OPENSSH PRIVATE KEY-----\n'
private_key = base64.b64decode(''.join(privkey_lines[1:-1]))
print("SSH private key hexdump:")
hexdump(private_key, color=color_red)
# https://cvsweb.openbsd.org/cgi-bin/cvsweb/src/usr.bin/ssh/PROTOCOL.key
if not private_key.startswith(b'openssh-key-v1\0'):
logger.error("Unsupported private key format")
return False
print("SSH private key:")
offset = len(b'openssh-key-v1\0')
ciphername, offset = pop_string(private_key, offset)
print("* ciphername: {}".format(repr(ciphername.decode('ascii'))))
assert ciphername == b'none'
kdfname, offset = pop_string(private_key, offset)
print("* kdfname: {}".format(repr(kdfname.decode('ascii'))))
assert kdfname == b'none'
kdfoptions, offset = pop_string(private_key, offset)
print("* kdfoptions: {}".format(repr(kdfoptions.decode('ascii'))))
assert kdfoptions == b''
numkeys = struct.unpack('>I', private_key[offset:offset + 4])[0]
offset += 4
print("* numkeys: {}".format(numkeys))
assert numkeys == 1
priv_pubkey, offset = pop_string(private_key, offset)
print("* public key:")
hexdump(priv_pubkey, color=color_green)
assert priv_pubkey == public_key
priv_privkey, | |
from datetime import datetime, date
from unittest import skipIf
from django.db.models.functions import Upper
from django.test import TestCase
from django.utils.timezone import now
from django_pg_bulk_update.clause_operators import InClauseOperator
from django_pg_bulk_update.compatibility import jsonb_available, hstore_available, array_available, tz_utc, \
django_expressions_available
from django_pg_bulk_update.query import bulk_update
from django_pg_bulk_update.set_functions import ConcatSetFunction
from tests.models import TestModel, RelationModel, UpperCaseModel, AutoNowModel, TestModelWithSchema
class TestInputFormats(TestCase):
fixtures = ['test_model']
def test_model(self):
with self.assertRaises(TypeError):
bulk_update(123, [])
with self.assertRaises(TypeError):
bulk_update('123', [])
def test_values(self):
with self.assertRaises(TypeError):
bulk_update(TestModel, 123)
with self.assertRaises(TypeError):
bulk_update(TestModel, [123])
with self.assertRaises(ValueError):
bulk_update(TestModel, {(1, 2): {'id': 10}})
with self.assertRaises(ValueError):
bulk_update(TestModel, {1: {'id': 10}}, key_fields=('id', 'name'))
with self.assertRaises(ValueError):
bulk_update(TestModel, [{'name': 'test'}])
self.assertEqual(1, bulk_update(TestModel, [{'id': 1, 'name': 'abc'}]))
self.assertEqual(1, bulk_update(TestModel, [{'id': 1, 'name': 'abc', 'int_field': 2}],
key_fields=('id', 'name')))
self.assertEqual(1, bulk_update(TestModel, {1: {'name': 'abc'}}))
self.assertEqual(1, bulk_update(TestModel, {(1,): {'name': 'abc'}}))
self.assertEqual(1, bulk_update(TestModel, {(2, 'test2'): {'int_field': 2}}, key_fields=('id', 'name')))
self.assertEqual(1, bulk_update(TestModel, {('test3',): {'int_field': 2}}, key_fields='name'))
def test_key_fields(self):
values = [{
'id': 1,
'name': 'bulk_update_1'
}]
self.assertEqual(1, bulk_update(TestModel, values))
self.assertEqual(1, bulk_update(TestModel, values, key_fields='id'))
self.assertEqual(1, bulk_update(TestModel, values, key_fields=['id']))
self.assertEqual(1, bulk_update(TestModel, values, key_fields=['id', 'name']))
self.assertEqual(1, bulk_update(TestModel, values, key_fields='name'))
self.assertEqual(1, bulk_update(TestModel, values, key_fields=['name']))
def test_using(self):
values = [{
'id': 1,
'name': 'bulk_update_1'
}]
self.assertEqual(1, bulk_update(TestModel, values))
self.assertEqual(1, bulk_update(TestModel, values, using='default'))
with self.assertRaises(ValueError):
bulk_update(TestModel, values, using='invalid')
with self.assertRaises(TypeError):
bulk_update(TestModel, values, using=123)
def test_set_functions(self):
with self.assertRaises(TypeError):
bulk_update(TestModel, [{'id': 1, 'name': 'test1'}], set_functions=123)
with self.assertRaises(TypeError):
bulk_update(TestModel, [{'id': 1, 'name': 'test1'}], set_functions=[123])
with self.assertRaises(ValueError):
bulk_update(TestModel, [{'id': 1, 'name': 'test1'}], set_functions={1: 'test'})
with self.assertRaises(ValueError):
bulk_update(TestModel, [{'id': 1, 'name': 'test1'}], set_functions={'id': 1})
with self.assertRaises(ValueError):
bulk_update(TestModel, [{'id': 1, 'name': 'test1'}], set_functions={'invalid': 1})
with self.assertRaises(ValueError):
bulk_update(TestModel, [{'id': 1, 'int_field': 1}], set_functions={'int_field': 'invalid'})
# I don't test all set functions here, as there is another TestCase for this: TestSetFunctions
self.assertEqual(1, bulk_update(TestModel, [{'id': 2, 'name': 'test1'}],
set_functions={'name': ConcatSetFunction()}))
self.assertEqual(1, bulk_update(TestModel, [{'id': 2, 'name': 'test1'}], set_functions={'name': '||'}))
def test_key_fields_ops(self):
with self.assertRaises(TypeError):
bulk_update(TestModel, [{'id': 1, 'name': 'test1'}], key_fields_ops=123)
with self.assertRaises(TypeError):
bulk_update(TestModel, [{'id': 1, 'name': 'test1'}], key_fields_ops=[123])
with self.assertRaises(ValueError):
bulk_update(TestModel, [{'id': 1, 'name': 'test1'}], key_fields_ops={123: 'test'})
with self.assertRaises(ValueError):
bulk_update(TestModel, [{'id': 1, 'name': 'test1'}], key_fields_ops={'id': 'invalid'})
# name is not in key_fields
with self.assertRaises(ValueError):
bulk_update(TestModel, [{'id': 1, 'name': ['test1']}], key_fields_ops={'name': 'in'})
with self.assertRaises(ValueError):
bulk_update(TestModel, [{'id': 1, 'name': ['test1']}], key_fields_ops={'name': 123})
self.assertEqual(1, bulk_update(TestModel, [{'id': [1], 'name': 'test1'}], key_fields_ops={'id': 'in'}))
self.assertEqual(1, bulk_update(TestModel, [{'id': 1, 'name': ['test1']}], key_fields='name',
key_fields_ops={'name': 'in'}))
self.assertEqual(1, bulk_update(TestModel, [{'id': 1, 'name': ['test1']}], key_fields='name',
key_fields_ops=['in']))
self.assertEqual(1, bulk_update(TestModel, [{'id': [1], 'name': 'test1'}], key_fields_ops=['in']))
self.assertEqual(1, bulk_update(TestModel, [{'id': [1], 'name': 'test1'}], key_fields_ops=[InClauseOperator()]))
self.assertEqual(1, bulk_update(TestModel, [{'id': [1], 'name': 'test1'}],
key_fields_ops={'id': InClauseOperator()}))
def test_batch(self):
with self.assertRaises(TypeError):
bulk_update(TestModel, [{'id': 1, 'name': 'test1'}], batch_size='abc')
with self.assertRaises(ValueError):
bulk_update(TestModel, [{'id': 1, 'name': 'test1'}], batch_size=-2)
with self.assertRaises(TypeError):
bulk_update(TestModel, [{'id': 1, 'name': 'test1'}], batch_size=2.5)
with self.assertRaises(TypeError):
bulk_update(TestModel, [{'id': 1, 'name': 'test1'}], batch_size=1, batch_delay='abc')
with self.assertRaises(ValueError):
bulk_update(TestModel, [{'id': 1, 'name': 'test1'}], batch_size=1, batch_delay=-2)
class TestSimple(TestCase):
fixtures = ['test_model', 'm2m_relation', 'test_upper_case_model', 'auto_now_model', 'test_model_with_schema']
multi_db = True
databases = ['default', 'secondary']
def test_update(self):
res = bulk_update(TestModel, [{
'id': 1,
'name': 'bulk_update_1'
}, {
'id': 5,
'name': 'bulk_update_5'
}, {
'id': 8,
'name': 'bulk_update_8'
}])
self.assertEqual(3, res)
for pk, name, int_field in TestModel.objects.all().order_by('id').values_list('id', 'name', 'int_field'):
if pk in {1, 5, 8}:
self.assertEqual('bulk_update_%d' % pk, name)
else:
self.assertEqual('test%d' % pk, name)
self.assertEqual(pk, int_field)
def test_upper_case(self):
res = bulk_update(UpperCaseModel, [{
'id': 1,
'UpperCaseName': 'BulkUpdate1'
}, {
'id': 3,
'UpperCaseName': 'BulkUpdate3'
}])
self.assertEqual(2, res)
for pk, name in UpperCaseModel.objects.all().order_by('id').values_list('id', 'UpperCaseName'):
if pk in {1, 3}:
self.assertEqual('BulkUpdate%d' % pk, name)
else:
self.assertEqual('test%d' % pk, name)
def test_empty(self):
res = bulk_update(TestModel, [])
self.assertEqual(0, res)
for pk, name, int_field in TestModel.objects.all().order_by('id').values_list('id', 'name', 'int_field'):
self.assertEqual('test%d' % pk, name)
self.assertEqual(pk, int_field)
def test_quotes(self):
res = bulk_update(TestModel, [{
'id': 1,
'name': '\''
}, {
'id': 5,
'name': '"'
}])
self.assertEqual(2, res)
for pk, name, int_field in TestModel.objects.all().order_by('id').values_list('id', 'name', 'int_field'):
if pk == 1:
self.assertEqual('\'', name)
elif pk == 5:
self.assertEqual('"', name)
else:
self.assertEqual('test%d' % pk, name)
self.assertEqual(pk, int_field)
def test_key_update(self):
res = bulk_update(TestModel, {
('test1',): {
'id': 1,
'name': 'bulk_update_1'
},
('test5',): {
'id': 5,
'name': 'bulk_update_5'
},
('test8',): {
'id': 8,
'name': 'bulk_update_8'
}
}, key_fields='name')
self.assertEqual(3, res)
for pk, name, int_field in TestModel.objects.all().order_by('id').values_list('id', 'name', 'int_field'):
if pk in {1, 5, 8}:
self.assertEqual('bulk_update_%d' % pk, name)
else:
self.assertEqual('test%d' % pk, name)
self.assertEqual(pk, int_field)
def test_using(self):
res = bulk_update(TestModel, [{
'id': 1,
'name': 'bulk_update_1'
}, {
'id': 5,
'name': 'bulk_update_5'
}, {
'id': 8,
'name': 'bulk_update_8'
}], using='secondary')
self.assertEqual(3, res)
for pk, name, int_field in TestModel.objects.all().using('secondary').order_by('id').\
values_list('id', 'name', 'int_field'):
if pk in {1, 5, 8}:
self.assertEqual('bulk_update_%d' % pk, name)
else:
self.assertEqual('test%d' % pk, name)
self.assertEqual(pk, int_field)
for pk, name, int_field in TestModel.objects.all().using('default').order_by('id').\
values_list('id', 'name', 'int_field'):
self.assertEqual('test%d' % pk, name)
self.assertEqual(pk, int_field)
def test_batch(self):
res = bulk_update(TestModel, [{
'id': 1,
'name': 'bulk_update_1'
}, {
'id': 5,
'name': 'bulk_update_5'
}, {
'id': 8,
'name': 'bulk_update_8'
}], batch_size=1)
self.assertEqual(3, res)
for pk, name, int_field in TestModel.objects.all().order_by('id').values_list('id', 'name', 'int_field'):
if pk in {1, 5, 8}:
self.assertEqual('bulk_update_%d' % pk, name)
else:
self.assertEqual('test%d' % pk, name)
self.assertEqual(pk, int_field)
# Test for empty values correct
res = bulk_update(TestModel, [], batch_size=10)
self.assertEqual(0, res)
def test_same_key_fields(self):
res = bulk_update(TestModel, {
(1, 3): {
"name": "first"
},
(6, 8): {
"name": "second"
}
}, key_fields=('id', 'id'), key_fields_ops=('>=', '<'))
self.assertEqual(4, res)
for pk, name, int_field in TestModel.objects.all().order_by('id').values_list('id', 'name', 'int_field'):
if pk in {1, 2}:
self.assertEqual('first', name)
elif pk in {6, 7}:
self.assertEqual('second', name)
else:
self.assertEqual('test%d' % pk, name)
self.assertEqual(pk, int_field)
def test_returning(self):
res = bulk_update(TestModel, [{
'id': 1,
'name': 'bulk_update_1'
}, {
'id': 5,
'name': 'bulk_update_5'
}, {
'id': 8,
'name': 'bulk_update_8'
}], returning=('id', 'name', 'int_field'))
from django_pg_returning import ReturningQuerySet
self.assertIsInstance(res, ReturningQuerySet)
self.assertSetEqual({
(1, 'bulk_update_1', 1),
(5, 'bulk_update_5', 5),
(8, 'bulk_update_8', 8)
}, set(res.values_list('id', 'name', 'int_field')))
for pk, name, int_field in TestModel.objects.all().order_by('id').values_list('id', 'name', 'int_field'):
if pk in {1, 5, 8}:
self.assertEqual('bulk_update_%d' % pk, name)
else:
self.assertEqual('test%d' % pk, name)
self.assertEqual(pk, int_field)
def test_returning_empty(self):
res = bulk_update(TestModel, [{'id': 100, 'name': 'not_exist'}], returning='id')
from django_pg_returning import ReturningQuerySet
self.assertIsInstance(res, ReturningQuerySet)
self.assertEqual(0, res.count())
def test_returning_all(self):
res = bulk_update(TestModel, [{
'id': 1,
'name': 'bulk_update_1'
}, {
'id': 5,
'name': 'bulk_update_5'
}, {
'id': 8,
'name': 'bulk_update_8'
}], returning='*')
from django_pg_returning import ReturningQuerySet
self.assertIsInstance(res, ReturningQuerySet)
self.assertSetEqual({
(1, 'bulk_update_1', 1),
(5, 'bulk_update_5', 5),
(8, 'bulk_update_8', 8)
}, set(res.values_list('id', 'name', 'int_field')))
def test_returning_all_m2m(self):
res = bulk_update(RelationModel, [{
'id': 1,
'int_field': 2
}], returning='*')
from django_pg_returning import ReturningQuerySet
self.assertIsInstance(res, ReturningQuerySet)
self.assertSetEqual({(1, 2, 1, 1)}, set(res.values_list('id', 'int_field', 'fk_id', 'o2o_id')))
def test_where(self):
qs = TestModel.objects.filter(int_field__gte=5)
res = bulk_update(TestModel, [{
'id': 1,
'name': 'bulk_update_1'
}, {
'id': 5,
'name': 'bulk_update_5'
}, {
'id': 8,
'name': 'bulk_update_8'
}], where=qs.query.where)
# Only 5 and 8 elements should be updated
self.assertEqual(2, res)
for pk, name, int_field in TestModel.objects.all().order_by('id').values_list('id', 'name', 'int_field'):
if pk in {5, 8}:
self.assertEqual('bulk_update_%d' % pk, name)
else:
self.assertEqual('test%d' % pk, name)
self.assertEqual(pk, int_field)
def test_auto_now(self):
res = bulk_update(AutoNowModel, [{
'id': 1,
'checked': datetime(2020, 1, 2, 0, 0, 0, tzinfo=tz_utc)
}])
self.assertEqual(1, res)
self.assertEqual(1, AutoNowModel.objects.all().count())
instance = AutoNowModel.objects.get()
self.assertEqual(datetime(2019, 1, 1, tzinfo=tz_utc), instance.created)
self.assertEqual(datetime(2020, 1, 2, 0, 0, 0, tzinfo=tz_utc), instance.checked)
self.assertEqual(instance.updated, now().date())
def test_auto_now_given_directly(self):
res = bulk_update(AutoNowModel, [{
'id': 1,
'checked': datetime(2020, 1, 2, 0, 0, 0, tzinfo=tz_utc),
'created': now(),
'updated': now().date()
}])
self.assertEqual(1, res)
self.assertEqual(1, AutoNowModel.objects.all().count())
instance = AutoNowModel.objects.get()
self.assertEqual(datetime(2019, 1, 1, tzinfo=tz_utc), instance.created)
self.assertEqual(datetime(2020, 1, 2, 0, 0, 0, tzinfo=tz_utc), instance.checked)
self.assertEqual(instance.updated, now().date())
def test_auto_now_respects_override(self):
# Now check to make sure we can explicitly set values
# (requires passing set functions)
bulk_update(AutoNowModel, [{
'id': 1,
'created': datetime(2011, 1, 2, 0, 0, 0, tzinfo=tz_utc),
'updated': date(2011, 1, 3),
'checked': datetime(2011, 1, 4, 0, 0, 0, tzinfo=tz_utc),
}], set_functions={"created": "eq", "updated": "eq"})
instance = AutoNowModel.objects.get()
self.assertEqual(datetime(2011, 1, 2, 0, 0, 0, tzinfo=tz_utc), instance.created)
self.assertEqual(date(2011, 1, 3), instance.updated)
self.assertEqual(datetime(2011, 1, 4, 0, 0, 0, tzinfo=tz_utc), instance.checked)
def test_quoted_table_name(self):
# Test for https://github.com/M1ha-Shvn/django-pg-bulk-update/issues/63
self.assertEqual(1, bulk_update(TestModelWithSchema, [{'id': 1, 'name': 'abc'}]))
class TestReadmeExample(TestCase):
def test_example(self):
# Skip bulk_create and bulk_update_or_create sections (tested in other test)
TestModel.objects.bulk_create([TestModel(pk=i, name="item%d" % i, int_field=1) for i in range(1, 4)])
# Update by id field
updated = bulk_update(TestModel, [{
"id": 1,
"name": "updated1",
}, {
"id": 2,
"name": "updated2"
}])
self.assertEqual(2, updated)
res = bulk_update(TestModel, [{
"id": 1,
"name": "updated1",
}, {
"id": 2,
"name": "updated2"
| |
<reponame>nutofem/fenics_helpers<filename>fenics_helpers/rk.py
# -*- coding: utf-8 -*-
"""Runge Kutta methods."""
import numpy as np
from numpy import array
from numpy.lib.scimath import sqrt
import dolfin as d
import ufl
def _change_stepsize(h, err, q, f=0.8, fmin=0.1, fmax=5.0, stepMin=0, stepMax=np.Inf):
"""
Args:
h: current stepsize
err: error indicator estimatedError/allowedError
q: theoretical convergence order
fmin: minimal relative change
fmax: maximal relative change
f: safety factor: hnew = f*hoptimal
stepMin: minimal absolute stepsize
stepMin: maximal absolute stepsize
Returns:
float: changed stepsize
"""
hopt = h * (1.0 / err) ** (1.0 / (q + 1.0))
hmin = max(fmin * hopt, stepMin)
hmax = min(fmax * hopt, stepMax)
hnew = max(min(hmax, f * hopt), hmin)
return hnew
class RKo1:
"""
An explicit Runge Kutta method for differential equations
of 1st order (e.g. the heat equation) base class
"""
def __init__(self, bt, L1, L2, u, h, update=lambda t: None):
"""
Args:
bt: Butcher tableau
L1: Right hand side of the equation
expected to be a 1-form
L2: Left hand side of the equation
expected to be a 2-form
u: Function representing the values
h: stepSize
update: a function f(t) that updates relevant expressions
"""
self.bt = bt
# error control for embedded methods
try:
self.b_corrector = bt.b_corrector
except AttributeError:
self.b_corrector = None
self.order = bt.order
self.num_stages = len(bt.c)
self.u = u
self.h = d.Constant(h)
self.update = update
self.t = 0.0
def solve_stages(self, bc):
"""
Different for implicit and explicit
"""
raise Exception("To be implemented in derived classes")
def next_step(self):
"""
Returns next step from members.
"""
h = self.h.values()[0]
x1 = self.u.vector()
for j in range(self.num_stages):
x1 += h * self.bt.b[j] * self.ll[j].vector()
return x1
def next_step_with_error_estimate(self):
"""
Returns next step from members.
Uses embedded method for error estimation.
"""
h = self.h.values()[0]
x1 = 0 + self.u.vector()
x1_corr = 0 + self.u.vector()
for j in range(self.num_stages):
x1 += h * self.bt.b[j] * self.ll[j].vector()
x1_corr += h * self.bt.b_corrector[j] * self.ll[j].vector()
# Compare solution with corrector
error = d.Vector(x1 - x1_corr)
error.abs()
return x1, error
def try_adaptive_step(self, tolA, tolR, bc=None):
"""
Tries a RK step advancing values and velocities from self.t -> self.t+h.
Note: Changes the stepsize.
"""
self.solve_stages(bc)
h = self.h.values()[0]
x1, eX = self.next_step_with_error_estimate()
s1 = tolA + np.absolute(self.u.vector()[:]) * tolR
s2 = tolA + np.absolute(x1[:]) * tolR
e = eX[:]
err = max((e / s1).max(), (e / s2).max())
if err <= 1.0: # Step accepted
step_accepted = True
# Advance solution
self.u.vector().set_local(x1.get_local())
self.t += h
else:
step_accepted = False
# Change stepsize
self.h.assign(_change_stepsize(h, err, max(self.order)))
return step_accepted
def do_step(self, bc=None):
"""
Does a RK step advancing values and velocities from t -> t+h.
"""
self.solve_stages(bc)
x1 = self.next_step()
self.u.vector().set_local(x1.get_local())
self.t += self.h.values()[0]
class DIRKo1(RKo1):
"""
An diagonally implicit Runge Kutta method for differential equations
of 1st order, e.g. the heat equation.
"""
@staticmethod
def create_intermediate_forms(bt, L1, L2, u, h):
num_stages = len(bt.c)
test_func = L2.arguments()[0]
trial_func = L2.arguments()[1]
V = test_func.function_space()
ll = [d.Function(V) for i in range(num_stages)]
LL = []
for stage in range(num_stages):
xs = u
for j in range(num_stages):
xs += h * bt.A[stage, j] * ll[j]
Ls1 = ufl.replace(L1, {u: xs})
Ls2 = ufl.replace(L2, {u: xs, trial_func: ll[stage]})
Ls = Ls1 - Ls2
LL.append(Ls)
return [LL, ll]
def __init__(self, bt, L1, L2, u, h, update=lambda t: None):
"""
Args:
bt: Butcher tableau
L1: Right hand side of the equation
expected to be a 1-form
L2: Left hand side of the equation
expected to be a 2-form
u: Function representing the values
h: stepSize
update: a function f(t) that updates relevant expressions
"""
# **** Test if explicit method (bt lower triangular) ****
rows, columns = np.shape(bt.A)
if not rows == columns:
raise Exception("Butcher Table A has wrong shape")
for i in range(rows):
for j in range(i + 2, columns):
if not bt.A[i, j] == 0.0:
raise Exception("Only diagonally implicit methods supported.")
# **** DIRK method. Good. ****
super(DIRKo1, self).__init__(bt, L1, L2, u, h, update=update)
self.LL, self.ll = self.create_intermediate_forms(bt, L1, L2, u, self.h)
def solve_stages(self, bc):
"""
Solves intermediate steps.
Uses a full mass matrix constructed in init.
"""
# TODO don't use full mass matrix
for stage in range(self.num_stages):
ti = self.t + self.bt.c[stage] * self.h.values()[0]
self.update(ti)
try:
l = len(bc)
except TypeError:
l = 0
bcs = bc
if l == 2:
bcDict0 = bc[0].get_boundary_values()
bcs = bc[1]
for k in bcDict0.keys():
self.u.vector()[k] = bcDict0[k]
d.solve(self.LL[stage] == 0, self.ll[stage], bcs=bcs)
class ERKo1(RKo1):
"""
An explicit Runge Kutta method for differential equations
of 1st order, e.g. the heat equation.
"""
@staticmethod
def create_intermediate_forms(bt, L1, L2, u, h):
num_stages = len(bt.c)
test_func = L1.arguments()[0]
V = test_func.function_space()
ll = [d.Function(V) for i in range(num_stages)]
LL = []
for stage in range(num_stages):
xs = u
for j in range(num_stages):
xs += h * bt.A[stage, j] * ll[j]
Ls = ufl.replace(L1, {u: xs})
LL.append(Ls)
return [LL, ll]
def __init__(self, bt, L1, L2, u, h, update=lambda t: None):
"""
Args:
bt: Butcher tableau
L1: Right hand side of the equation
expected to be a 1-form
L2: Left hand side of the equation
expected to be a 2-form
u: Function representing the values
h: stepSize
update: a function f(t) that updates relevant expressions
"""
# **** Test if explicit method (bt lower triangular) ****
rows, columns = np.shape(bt.A)
if not rows == columns:
raise Exception("Butcher Table A has wrong shape")
for i in range(rows):
for j in range(i + 1, columns):
if not bt.A[i, j] == 0.0:
raise Exception("Only explicit methods supported.")
# **** Explicit method. Good. ****
super(ERKo1, self).__init__(bt, L1, L2, u, h, update=update)
self.LL, self.ll = self.create_intermediate_forms(bt, L1, L2, u, self.h)
self.L2 = L2
self.b = None
def solve_stages(self, bc):
"""
Solves intermediate steps.
Uses a full mass matrix constructed in init.
"""
# TODO don't use full mass matrix
for stage in range(self.num_stages):
ti = self.t + self.bt.c[stage] * self.h.values()[0]
self.update(ti)
try:
l = len(bc)
except TypeError:
l = 0
bcs = bc
if l == 2:
bcDict0 = bc[0].get_boundary_values()
bcs = bc[1]
for k in bcDict0.keys():
self.u.vector()[k] = bcDict0[k]
rhs = self.LL[stage]
self.b = d.assemble(rhs, tensor=self.b)
if bcs:
bcs.apply(self.b)
try:
self.solver.solve(self.A, self.ll[stage].vector(), self.b)
except AttributeError:
self.A = d.assemble(self.L2)
if bcs:
bcs.apply(self.b)
self.solver = d.LUSolver(self.A, method="mumps")
self.solver.parameters["symmetric"] = True
self.solver.solve(self.A, self.ll[stage].vector(), self.b)
class RKo2:
"""
Runge Kutta methods for differential equations
of 2nd order in time (e.g. the wave equation) base class
"""
def __init__(self, bt, L1, L2, u, v, h, update=lambda t: None):
"""
Args:
bt: Butcher tableau
L1: Right hand side of the equation
expected to be a 1-form
L2: Left hand side of the equation
expected to be a 2-form
u: Function representing the values
v: Function representing the velocities
h: stepSize
update: a function f(t) that updates relevant expressions
"""
# create 2nd order Butcher tableau
self.bt_a1 = bt.A
self.bt_a2 = np.dot(bt.A, bt.A)
self.bt_b1 = bt.b
self.bt_c = bt.c
self.bt_b2 = np.dot(bt.b, bt.A)
# error control for embedded methods
try:
b_corr = bt.b_corrector
self.b_corrector1 = b_corr
self.b_corrector2 = np.dot(b_corr, bt.A)
except AttributeError:
self.b_corrector1 = None
self.b_corrector2 = None
self.order = bt.order
self.num_stages = len(bt.c)
self.u = u
self.v = v
self.h = d.Constant(h)
self.update = update
self.t = 0.0
def solve_stages(self, bc):
"""
Different for implicit and explicit
"""
raise Exception("To be implemented in derived classes")
def next_step(self):
"""
Returns next step.
"""
h = self.h.values()[0]
u = self.u.vector()
v = self.v.vector()
x1 = u + np.sum(self.bt_b1) * h * v
y1 = 0 + v
for j in range(self.num_stages):
x1 += h ** 2 * self.bt_b2[j] * self.ll[j].vector()
y1 += h * self.bt_b1[j] * self.ll[j].vector()
return x1, y1
def next_step_with_error_estimate(self):
"""
Returns next step.
Uses embedded method for error estimation.
"""
u = d.Vector(self.u.vector())
v = d.Vector(self.v.vector())
h = self.h.values()[0]
# Compute solution including embedded step
x1 = u + np.sum(self.bt_b1) * h * v
y1 = 0 + v
x1_corr = u + np.sum(self.b_corrector1) * h * v
y1_corr | |
08:07:00,0.34,65.0,0.96
1482,6,28400.0,248,Work and Education,1970-01-01 08:08:00,38.5,2597.0,38.43
1483,10,27265.0,248,Leisure,1970-01-01 08:08:00,36.97,2484.0,36.76
1484,11,8355.0,248,Travel and Other,1970-01-01 08:08:00,11.33,687.0,10.17
1485,3,6159.0,248,Housework,1970-01-01 08:08:00,8.35,685.0,10.14
1486,4,3334.0,248,Child Care,1970-01-01 08:08:00,4.52,243.0,3.6
1487,5,245.0,248,Adult Care,1970-01-01 08:08:00,0.33,62.0,0.92
1488,6,28454.0,249,Work and Education,1970-01-01 08:09:00,38.58,2604.0,38.53
1489,10,27259.0,249,Leisure,1970-01-01 08:09:00,36.96,2482.0,36.73
1490,11,8317.0,249,Travel and Other,1970-01-01 08:09:00,11.28,684.0,10.12
1491,3,6156.0,249,Housework,1970-01-01 08:09:00,8.35,685.0,10.14
1492,4,3329.0,249,Child Care,1970-01-01 08:09:00,4.51,240.0,3.55
1493,5,243.0,249,Adult Care,1970-01-01 08:09:00,0.33,63.0,0.93
1494,6,28476.0,250,Work and Education,1970-01-01 08:10:00,38.61,2606.0,38.56
1495,10,27266.0,250,Leisure,1970-01-01 08:10:00,36.97,2479.0,36.68
1496,11,8291.0,250,Travel and Other,1970-01-01 08:10:00,11.24,687.0,10.17
1497,3,6166.0,250,Housework,1970-01-01 08:10:00,8.36,686.0,10.15
1498,4,3318.0,250,Child Care,1970-01-01 08:10:00,4.5,236.0,3.49
1499,5,241.0,250,Adult Care,1970-01-01 08:10:00,0.33,64.0,0.95
1500,6,29118.0,251,Work and Education,1970-01-01 08:11:00,39.48,2654.0,39.27
1501,10,27128.0,251,Leisure,1970-01-01 08:11:00,36.78,2477.0,36.65
1502,11,7859.0,251,Travel and Other,1970-01-01 08:11:00,10.66,647.0,9.57
1503,3,6109.0,251,Housework,1970-01-01 08:11:00,8.28,677.0,10.02
1504,4,3285.0,251,Child Care,1970-01-01 08:11:00,4.45,237.0,3.51
1505,5,259.0,251,Adult Care,1970-01-01 08:11:00,0.35,66.0,0.98
1506,6,29183.0,252,Work and Education,1970-01-01 08:12:00,39.57,2656.0,39.3
1507,10,27149.0,252,Leisure,1970-01-01 08:12:00,36.81,2480.0,36.7
1508,11,7802.0,252,Travel and Other,1970-01-01 08:12:00,10.58,651.0,9.63
1509,3,6118.0,252,Housework,1970-01-01 08:12:00,8.29,672.0,9.94
1510,4,3252.0,252,Child Care,1970-01-01 08:12:00,4.41,234.0,3.46
1511,5,254.0,252,Adult Care,1970-01-01 08:12:00,0.34,65.0,0.96
1512,6,29254.0,253,Work and Education,1970-01-01 08:13:00,39.66,2663.0,39.41
1513,10,27173.0,253,Leisure,1970-01-01 08:13:00,36.84,2478.0,36.67
1514,11,7729.0,253,Travel and Other,1970-01-01 08:13:00,10.48,644.0,9.53
1515,3,6137.0,253,Housework,1970-01-01 08:13:00,8.32,673.0,9.96
1516,4,3211.0,253,Child Care,1970-01-01 08:13:00,4.35,235.0,3.48
1517,5,254.0,253,Adult Care,1970-01-01 08:13:00,0.34,65.0,0.96
1518,6,29297.0,254,Work and Education,1970-01-01 08:14:00,39.72,2672.0,39.54
1519,10,27179.0,254,Leisure,1970-01-01 08:14:00,36.85,2471.0,36.56
1520,11,7707.0,254,Travel and Other,1970-01-01 08:14:00,10.45,642.0,9.5
1521,3,6135.0,254,Housework,1970-01-01 08:14:00,8.32,674.0,9.97
1522,4,3196.0,254,Child Care,1970-01-01 08:14:00,4.33,233.0,3.45
1523,5,244.0,254,Adult Care,1970-01-01 08:14:00,0.33,66.0,0.98
1524,6,29315.0,255,Work and Education,1970-01-01 08:15:00,39.74,2679.0,39.64
1525,10,27189.0,255,Leisure,1970-01-01 08:15:00,36.86,2474.0,36.61
1526,11,7683.0,255,Travel and Other,1970-01-01 08:15:00,10.42,634.0,9.38
1527,3,6142.0,255,Housework,1970-01-01 08:15:00,8.33,672.0,9.94
1528,4,3185.0,255,Child Care,1970-01-01 08:15:00,4.32,234.0,3.46
1529,5,244.0,255,Adult Care,1970-01-01 08:15:00,0.33,65.0,0.96
1530,6,30230.0,256,Work and Education,1970-01-01 08:16:00,40.99,2745.0,40.62
1531,10,26604.0,256,Leisure,1970-01-01 08:16:00,36.07,2384.0,35.28
1532,11,7404.0,256,Travel and Other,1970-01-01 08:16:00,10.04,640.0,9.47
1533,3,6114.0,256,Housework,1970-01-01 08:16:00,8.29,682.0,10.09
1534,4,3145.0,256,Child Care,1970-01-01 08:16:00,4.26,235.0,3.48
1535,5,261.0,256,Adult Care,1970-01-01 08:16:00,0.35,72.0,1.07
1536,6,30284.0,257,Work and Education,1970-01-01 08:17:00,41.06,2750.0,40.69
1537,10,26620.0,257,Leisure,1970-01-01 08:17:00,36.09,2385.0,35.29
1538,11,7386.0,257,Travel and Other,1970-01-01 08:17:00,10.01,643.0,9.51
1539,3,6116.0,257,Housework,1970-01-01 08:17:00,8.29,679.0,10.05
1540,4,3105.0,257,Child Care,1970-01-01 08:17:00,4.21,228.0,3.37
1541,5,247.0,257,Adult Care,1970-01-01 08:17:00,0.33,73.0,1.08
1542,6,30358.0,258,Work and Education,1970-01-01 08:18:00,41.16,2758.0,40.81
1543,10,26618.0,258,Leisure,1970-01-01 08:18:00,36.09,2388.0,35.34
1544,11,7339.0,258,Travel and Other,1970-01-01 08:18:00,9.95,640.0,9.47
1545,3,6116.0,258,Housework,1970-01-01 08:18:00,8.29,678.0,10.03
1546,4,3071.0,258,Child Care,1970-01-01 08:18:00,4.16,221.0,3.27
1547,5,256.0,258,Adult Care,1970-01-01 08:18:00,0.35,73.0,1.08
1548,6,30400.0,259,Work and Education,1970-01-01 08:19:00,41.22,2764.0,40.9
1549,10,26624.0,259,Leisure,1970-01-01 08:19:00,36.1,2385.0,35.29
1550,11,7295.0,259,Travel and Other,1970-01-01 08:19:00,9.89,641.0,9.49
1551,3,6126.0,259,Housework,1970-01-01 08:19:00,8.31,678.0,10.03
1552,4,3058.0,259,Child Care,1970-01-01 08:19:00,4.15,219.0,3.24
1553,5,255.0,259,Adult Care,1970-01-01 08:19:00,0.35,71.0,1.05
1554,6,30418.0,260,Work and Education,1970-01-01 08:20:00,41.24,2767.0,40.94
1555,10,26626.0,260,Leisure,1970-01-01 08:20:00,36.1,2386.0,35.31
1556,11,7286.0,260,Travel and Other,1970-01-01 08:20:00,9.88,637.0,9.43
1557,3,6124.0,260,Housework,1970-01-01 08:20:00,8.3,678.0,10.03
1558,4,3052.0,260,Child Care,1970-01-01 08:20:00,4.14,218.0,3.23
1559,5,252.0,260,Adult Care,1970-01-01 08:20:00,0.34,72.0,1.07
1560,6,30951.0,261,Work and Education,1970-01-01 08:21:00,41.96,2801.0,41.45
1561,10,26367.0,261,Leisure,1970-01-01 08:21:00,35.75,2346.0,34.71
1562,11,7058.0,261,Travel and Other,1970-01-01 08:21:00,9.57,637.0,9.43
1563,3,6130.0,261,Housework,1970-01-01 08:21:00,8.31,673.0,9.96
1564,4,2997.0,261,Child Care,1970-01-01 08:21:00,4.06,225.0,3.33
1565,5,255.0,261,Adult Care,1970-01-01 08:21:00,0.35,76.0,1.12
1566,6,30987.0,262,Work and Education,1970-01-01 08:22:00,42.01,2804.0,41.49
1567,10,26410.0,262,Leisure,1970-01-01 08:22:00,35.81,2344.0,34.68
1568,11,7008.0,262,Travel and Other,1970-01-01 08:22:00,9.5,636.0,9.41
1569,3,6117.0,262,Housework,1970-01-01 08:22:00,8.29,674.0,9.97
1570,4,2983.0,262,Child Care,1970-01-01 08:22:00,4.04,224.0,3.31
1571,5,253.0,262,Adult Care,1970-01-01 08:22:00,0.34,76.0,1.12
1572,6,31046.0,263,Work and Education,1970-01-01 08:23:00,42.09,2816.0,41.67
1573,10,26417.0,263,Leisure,1970-01-01 08:23:00,35.82,2337.0,34.58
1574,11,6939.0,263,Travel and Other,1970-01-01 08:23:00,9.41,628.0,9.29
1575,3,6113.0,263,Housework,1970-01-01 08:23:00,8.29,679.0,10.05
1576,4,2991.0,263,Child Care,1970-01-01 08:23:00,4.06,224.0,3.31
1577,5,252.0,263,Adult Care,1970-01-01 08:23:00,0.34,74.0,1.09
1578,6,31080.0,264,Work and Education,1970-01-01 08:24:00,42.14,2816.0,41.67
1579,10,26433.0,264,Leisure,1970-01-01 08:24:00,35.84,2337.0,34.58
1580,11,6896.0,264,Travel and Other,1970-01-01 08:24:00,9.35,630.0,9.32
1581,3,6119.0,264,Housework,1970-01-01 08:24:00,8.3,676.0,10.0
1582,4,2977.0,264,Child Care,1970-01-01 08:24:00,4.04,225.0,3.33
1583,5,253.0,264,Adult Care,1970-01-01 08:24:00,0.34,74.0,1.09
1584,6,31106.0,265,Work and Education,1970-01-01 08:25:00,42.17,2819.0,41.71
1585,10,26442.0,265,Leisure,1970-01-01 08:25:00,35.85,2337.0,34.58
1586,11,6869.0,265,Travel and Other,1970-01-01 08:25:00,9.31,630.0,9.32
1587,3,6122.0,265,Housework,1970-01-01 08:25:00,8.3,677.0,10.02
1588,4,2966.0,265,Child Care,1970-01-01 08:25:00,4.02,221.0,3.27
1589,5,253.0,265,Adult Care,1970-01-01 08:25:00,0.34,74.0,1.09
1590,6,31530.0,266,Work and Education,1970-01-01 08:26:00,42.75,2862.0,42.35
1591,10,26329.0,266,Leisure,1970-01-01 08:26:00,35.7,2332.0,34.51
1592,11,6620.0,266,Travel and Other,1970-01-01 08:26:00,8.98,605.0,8.95
1593,3,6086.0,266,Housework,1970-01-01 08:26:00,8.25,674.0,9.97
1594,4,2929.0,266,Child Care,1970-01-01 08:26:00,3.97,208.0,3.08
1595,5,264.0,266,Adult Care,1970-01-01 08:26:00,0.36,77.0,1.14
1596,6,31567.0,267,Work and Education,1970-01-01 08:27:00,42.8,2869.0,42.45
1597,10,26326.0,267,Leisure,1970-01-01 08:27:00,35.69,2337.0,34.58
1598,11,6617.0,267,Travel and Other,1970-01-01 08:27:00,8.97,592.0,8.76
1599,3,6091.0,267,Housework,1970-01-01 08:27:00,8.26,677.0,10.02
1600,4,2903.0,267,Child Care,1970-01-01 08:27:00,3.94,207.0,3.06
1601,5,254.0,267,Adult Care,1970-01-01 08:27:00,0.34,76.0,1.12
1602,6,31628.0,268,Work and Education,1970-01-01 08:28:00,42.88,2875.0,42.54
1603,10,26337.0,268,Leisure,1970-01-01 08:28:00,35.71,2339.0,34.61
1604,11,6564.0,268,Travel and Other,1970-01-01 08:28:00,8.9,584.0,8.64
1605,3,6091.0,268,Housework,1970-01-01 08:28:00,8.26,675.0,9.99
1606,4,2883.0,268,Child Care,1970-01-01 08:28:00,3.91,209.0,3.09
1607,5,255.0,268,Adult Care,1970-01-01 08:28:00,0.35,76.0,1.12
1608,6,31664.0,269,Work and Education,1970-01-01 08:29:00,42.93,2879.0,42.6
1609,10,26345.0,269,Leisure,1970-01-01 08:29:00,35.72,2340.0,34.63
1610,11,6534.0,269,Travel and Other,1970-01-01 08:29:00,8.86,583.0,8.63
1611,3,6086.0,269,Housework,1970-01-01 08:29:00,8.25,672.0,9.94
1612,4,2875.0,269,Child Care,1970-01-01 08:29:00,3.9,208.0,3.08
1613,5,254.0,269,Adult Care,1970-01-01 08:29:00,0.34,76.0,1.12
1614,6,31697.0,270,Work and Education,1970-01-01 08:30:00,42.97,2881.0,42.63
1615,10,26349.0,270,Leisure,1970-01-01 08:30:00,35.72,2341.0,34.64
1616,11,6506.0,270,Travel and Other,1970-01-01 08:30:00,8.82,583.0,8.63
1617,3,6094.0,270,Housework,1970-01-01 08:30:00,8.26,674.0,9.97
1618,4,2862.0,270,Child Care,1970-01-01 08:30:00,3.88,203.0,3.0
1619,5,250.0,270,Adult Care,1970-01-01 08:30:00,0.34,76.0,1.12
1620,6,32683.0,271,Work and Education,1970-01-01 08:31:00,44.31,2974.0,44.01
1621,10,24456.0,271,Leisure,1970-01-01 08:31:00,33.16,2152.0,31.84
1622,11,7195.0,271,Travel and Other,1970-01-01 08:31:00,9.75,648.0,9.59
1623,3,6500.0,271,Housework,1970-01-01 08:31:00,8.81,712.0,10.54
1624,4,2652.0,271,Child Care,1970-01-01 08:31:00,3.6,194.0,2.87
1625,5,272.0,271,Adult Care,1970-01-01 08:31:00,0.37,78.0,1.15
1626,6,32734.0,272,Work and Education,1970-01-01 08:32:00,44.38,2977.0,44.05
1627,10,24475.0,272,Leisure,1970-01-01 08:32:00,33.18,2153.0,31.86
1628,11,7148.0,272,Travel and Other,1970-01-01 08:32:00,9.69,652.0,9.65
1629,3,6518.0,272,Housework,1970-01-01 08:32:00,8.84,710.0,10.51
1630,4,2616.0,272,Child Care,1970-01-01 08:32:00,3.55,190.0,2.81
1631,5,267.0,272,Adult Care,1970-01-01 08:32:00,0.36,76.0,1.12
1632,6,32818.0,273,Work and Education,1970-01-01 08:33:00,44.49,2993.0,44.29
1633,10,24507.0,273,Leisure,1970-01-01 08:33:00,33.23,2160.0,31.96
1634,11,7040.0,273,Travel and Other,1970-01-01 08:33:00,9.54,630.0,9.32
1635,3,6505.0,273,Housework,1970-01-01 08:33:00,8.82,712.0,10.54
1636,4,2623.0,273,Child Care,1970-01-01 08:33:00,3.56,188.0,2.78
1637,5,265.0,273,Adult Care,1970-01-01 08:33:00,0.36,75.0,1.11
1638,6,32866.0,274,Work and Education,1970-01-01 08:34:00,44.56,2995.0,44.32
1639,10,24518.0,274,Leisure,1970-01-01 08:34:00,33.24,2165.0,32.04
1640,11,6979.0,274,Travel and Other,1970-01-01 08:34:00,9.46,623.0,9.22
1641,3,6501.0,274,Housework,1970-01-01 08:34:00,8.81,714.0,10.57
1642,4,2626.0,274,Child Care,1970-01-01 08:34:00,3.56,187.0,2.77
1643,5,268.0,274,Adult Care,1970-01-01 08:34:00,0.36,74.0,1.09
1644,6,32893.0,275,Work and Education,1970-01-01 08:35:00,44.6,2999.0,44.38
1645,10,24522.0,275,Leisure,1970-01-01 08:35:00,33.25,2166.0,32.05
1646,11,6946.0,275,Travel and Other,1970-01-01 08:35:00,9.42,623.0,9.22
1647,3,6508.0,275,Housework,1970-01-01 08:35:00,8.82,713.0,10.55
1648,4,2622.0,275,Child Care,1970-01-01 08:35:00,3.55,183.0,2.71
1649,5,267.0,275,Adult Care,1970-01-01 08:35:00,0.36,74.0,1.09
1650,6,33290.0,276,Work and Education,1970-01-01 08:36:00,45.13,3027.0,44.79
1651,10,24511.0,276,Leisure,1970-01-01 08:36:00,33.23,2182.0,32.29
1652,3,6525.0,276,Housework,1970-01-01 08:36:00,8.85,692.0,10.24
1653,11,6508.0,276,Travel and Other,1970-01-01 08:36:00,8.82,592.0,8.76
1654,4,2638.0,276,Child Care,1970-01-01 08:36:00,3.58,189.0,2.8
1655,5,286.0,276,Adult Care,1970-01-01 08:36:00,0.39,76.0,1.12
1656,6,33331.0,277,Work and Education,1970-01-01 08:37:00,45.19,3032.0,44.87
1657,10,24521.0,277,Leisure,1970-01-01 08:37:00,33.25,2182.0,32.29
1658,3,6514.0,277,Housework,1970-01-01 08:37:00,8.83,691.0,10.22
1659,11,6504.0,277,Travel and Other,1970-01-01 08:37:00,8.82,588.0,8.7
1660,4,2607.0,277,Child Care,1970-01-01 08:37:00,3.53,189.0,2.8
1661,5,281.0,277,Adult Care,1970-01-01 08:37:00,0.38,76.0,1.12
1662,6,33378.0,278,Work and Education,1970-01-01 08:38:00,45.25,3036.0,44.92
1663,10,24513.0,278,Leisure,1970-01-01 08:38:00,33.23,2180.0,32.26
1664,3,6526.0,278,Housework,1970-01-01 08:38:00,8.85,695.0,10.28
1665,11,6476.0,278,Travel and Other,1970-01-01 08:38:00,8.78,587.0,8.69
1666,4,2582.0,278,Child Care,1970-01-01 08:38:00,3.5,185.0,2.74
1667,5,283.0,278,Adult Care,1970-01-01 08:38:00,0.38,75.0,1.11
1668,6,33420.0,279,Work and Education,1970-01-01 08:39:00,45.31,3041.0,45.0
1669,10,24516.0,279,Leisure,1970-01-01 08:39:00,33.24,2178.0,32.23
1670,3,6525.0,279,Housework,1970-01-01 08:39:00,8.85,696.0,10.3
1671,11,6431.0,279,Travel and Other,1970-01-01 08:39:00,8.72,583.0,8.63
1672,4,2580.0,279,Child Care,1970-01-01 08:39:00,3.5,186.0,2.75
1673,5,286.0,279,Adult Care,1970-01-01 08:39:00,0.39,74.0,1.09
1674,6,33438.0,280,Work and Education,1970-01-01 08:40:00,45.33,3043.0,45.03
1675,10,24526.0,280,Leisure,1970-01-01 08:40:00,33.25,2177.0,32.21
1676,3,6528.0,280,Housework,1970-01-01 08:40:00,8.85,696.0,10.3
1677,11,6412.0,280,Travel and Other,1970-01-01 08:40:00,8.69,581.0,8.6
1678,4,2569.0,280,Child Care,1970-01-01 08:40:00,3.48,186.0,2.75
1679,5,285.0,280,Adult Care,1970-01-01 08:40:00,0.39,75.0,1.11
1680,6,33865.0,281,Work and Education,1970-01-01 08:41:00,45.91,3071.0,45.44
1681,10,24368.0,281,Leisure,1970-01-01 08:41:00,33.04,2174.0,32.17
1682,3,6552.0,281,Housework,1970-01-01 08:41:00,8.88,691.0,10.22
1683,11,6164.0,281,Travel and Other,1970-01-01 08:41:00,8.36,567.0,8.39
1684,4,2511.0,281,Child Care,1970-01-01 08:41:00,3.4,181.0,2.68
1685,5,298.0,281,Adult Care,1970-01-01 08:41:00,0.4,74.0,1.09
1686,6,33890.0,282,Work and Education,1970-01-01 08:42:00,45.95,3076.0,45.52
1687,10,24389.0,282,Leisure,1970-01-01 08:42:00,33.07,2170.0,32.11
1688,3,6566.0,282,Housework,1970-01-01 08:42:00,8.9,690.0,10.21
1689,11,6143.0,282,Travel and Other,1970-01-01 08:42:00,8.33,568.0,8.4
1690,4,2478.0,282,Child Care,1970-01-01 08:42:00,3.36,181.0,2.68
1691,5,292.0,282,Adult Care,1970-01-01 08:42:00,0.4,73.0,1.08
1692,6,33953.0,283,Work and Education,1970-01-01 08:43:00,46.03,3082.0,45.61
1693,10,24368.0,283,Leisure,1970-01-01 08:43:00,33.04,2171.0,32.12
1694,3,6574.0,283,Housework,1970-01-01 08:43:00,8.91,686.0,10.15
1695,11,6108.0,283,Travel and Other,1970-01-01 08:43:00,8.28,565.0,8.36
1696,4,2464.0,283,Child Care,1970-01-01 08:43:00,3.34,181.0,2.68
1697,5,291.0,283,Adult Care,1970-01-01 08:43:00,0.39,73.0,1.08
1698,6,33984.0,284,Work and Education,1970-01-01 08:44:00,46.08,3084.0,45.63
1699,10,24369.0,284,Leisure,1970-01-01 08:44:00,33.04,2171.0,32.12
1700,3,6581.0,284,Housework,1970-01-01 08:44:00,8.92,684.0,10.12
1701,11,6077.0,284,Travel and Other,1970-01-01 08:44:00,8.24,559.0,8.27
1702,4,2455.0,284,Child Care,1970-01-01 08:44:00,3.33,186.0,2.75
1703,5,292.0,284,Adult Care,1970-01-01 08:44:00,0.4,74.0,1.09
1704,6,34003.0,285,Work and Education,1970-01-01 08:45:00,46.1,3085.0,45.65
1705,10,24376.0,285,Leisure,1970-01-01 08:45:00,33.05,2172.0,32.14
1706,3,6573.0,285,Housework,1970-01-01 08:45:00,8.91,683.0,10.11
1707,11,6060.0,285,Travel and Other,1970-01-01 08:45:00,8.22,561.0,8.3
1708,4,2452.0,285,Child Care,1970-01-01 08:45:00,3.32,184.0,2.72
1709,5,294.0,285,Adult Care,1970-01-01 08:45:00,0.4,73.0,1.08
1710,6,34639.0,286,Work and Education,1970-01-01 08:46:00,46.96,3143.0,46.51
1711,10,23892.0,286,Leisure,1970-01-01 08:46:00,32.39,2130.0,31.52
1712,3,6565.0,286,Housework,1970-01-01 08:46:00,8.9,680.0,10.06
1713,11,6024.0,286,Travel and Other,1970-01-01 08:46:00,8.17,543.0,8.03
1714,4,2325.0,286,Child Care,1970-01-01 08:46:00,3.15,182.0,2.69
1715,5,313.0,286,Adult Care,1970-01-01 08:46:00,0.42,80.0,1.18
1716,6,34672.0,287,Work and Education,1970-01-01 08:47:00,47.01,3144.0,46.52
1717,10,23893.0,287,Leisure,1970-01-01 08:47:00,32.39,2129.0,31.5
1718,3,6567.0,287,Housework,1970-01-01 08:47:00,8.9,680.0,10.06
1719,11,6013.0,287,Travel and Other,1970-01-01 08:47:00,8.15,546.0,8.08
1720,4,2310.0,287,Child Care,1970-01-01 08:47:00,3.13,179.0,2.65
1721,5,303.0,287,Adult Care,1970-01-01 08:47:00,0.41,80.0,1.18
1722,6,34719.0,288,Work and Education,1970-01-01 08:48:00,47.07,3148.0,46.58
1723,10,23926.0,288,Leisure,1970-01-01 08:48:00,32.44,2130.0,31.52
1724,3,6568.0,288,Housework,1970-01-01 08:48:00,8.9,679.0,10.05
1725,11,5951.0,288,Travel and Other,1970-01-01 08:48:00,8.07,546.0,8.08
1726,4,2293.0,288,Child Care,1970-01-01 08:48:00,3.11,175.0,2.59
1727,5,301.0,288,Adult Care,1970-01-01 08:48:00,0.41,80.0,1.18
1728,6,34751.0,289,Work and Education,1970-01-01 08:49:00,47.11,3155.0,46.69
1729,10,23931.0,289,Leisure,1970-01-01 08:49:00,32.45,2130.0,31.52
1730,3,6565.0,289,Housework,1970-01-01 08:49:00,8.9,679.0,10.05
1731,11,5925.0,289,Travel and Other,1970-01-01 08:49:00,8.03,539.0,7.98
1732,4,2286.0,289,Child Care,1970-01-01 08:49:00,3.1,174.0,2.57
1733,5,300.0,289,Adult Care,1970-01-01 08:49:00,0.41,81.0,1.2
1734,6,34771.0,290,Work and Education,1970-01-01 08:50:00,47.14,3157.0,46.72
1735,10,23936.0,290,Leisure,1970-01-01 08:50:00,32.45,2127.0,31.47
1736,3,6573.0,290,Housework,1970-01-01 08:50:00,8.91,682.0,10.09
1737,11,5902.0,290,Travel and Other,1970-01-01 08:50:00,8.0,537.0,7.95
1738,4,2276.0,290,Child Care,1970-01-01 08:50:00,3.09,174.0,2.57
1739,5,300.0,290,Adult Care,1970-01-01 08:50:00,0.41,81.0,1.2
1740,6,35191.0,291,Work and Education,1970-01-01 08:51:00,47.71,3193.0,47.25
1741,10,23796.0,291,Leisure,1970-01-01 08:51:00,32.26,2107.0,31.18
1742,3,6557.0,291,Housework,1970-01-01 08:51:00,8.89,683.0,10.11
1743,11,5625.0,291,Travel and Other,1970-01-01 08:51:00,7.63,525.0,7.77
1744,4,2271.0,291,Child Care,1970-01-01 08:51:00,3.08,174.0,2.57
1745,5,318.0,291,Adult Care,1970-01-01 08:51:00,0.43,76.0,1.12
1746,6,35219.0,292,Work and Education,1970-01-01 08:52:00,47.75,3194.0,47.26
1747,10,23806.0,292,Leisure,1970-01-01 08:52:00,32.28,2116.0,31.31
1748,3,6571.0,292,Housework,1970-01-01 08:52:00,8.91,683.0,10.11
1749,11,5576.0,292,Travel and Other,1970-01-01 08:52:00,7.56,519.0,7.68
1750,4,2272.0,292,Child Care,1970-01-01 08:52:00,3.08,170.0,2.52
1751,5,314.0,292,Adult Care,1970-01-01 08:52:00,0.43,76.0,1.12
1752,6,35263.0,293,Work and Education,1970-01-01 08:53:00,47.81,3196.0,47.29
1753,10,23802.0,293,Leisure,1970-01-01 08:53:00,32.27,2115.0,31.3
1754,3,6582.0,293,Housework,1970-01-01 08:53:00,8.92,682.0,10.09
1755,11,5528.0,293,Travel and Other,1970-01-01 08:53:00,7.49,517.0,7.65
1756,4,2272.0,293,Child Care,1970-01-01 08:53:00,3.08,172.0,2.55
1757,5,311.0,293,Adult Care,1970-01-01 08:53:00,0.42,76.0,1.12
1758,6,35291.0,294,Work and Education,1970-01-01 08:54:00,47.85,3202.0,47.38
1759,10,23807.0,294,Leisure,1970-01-01 08:54:00,32.28,2115.0,31.3
1760,3,6583.0,294,Housework,1970-01-01 08:54:00,8.93,681.0,10.08
1761,11,5510.0,294,Travel and Other,1970-01-01 08:54:00,7.47,512.0,7.58
1762,4,2257.0,294,Child Care,1970-01-01 08:54:00,3.06,171.0,2.53
1763,5,310.0,294,Adult Care,1970-01-01 08:54:00,0.42,77.0,1.14
1764,6,35319.0,295,Work and Education,1970-01-01 08:55:00,47.88,3203.0,47.4
1765,10,23809.0,295,Leisure,1970-01-01 08:55:00,32.28,2115.0,31.3
1766,3,6582.0,295,Housework,1970-01-01 08:55:00,8.92,680.0,10.06
1767,11,5486.0,295,Travel and Other,1970-01-01 08:55:00,7.44,510.0,7.55
1768,4,2253.0,295,Child Care,1970-01-01 08:55:00,3.05,172.0,2.55
1769,5,309.0,295,Adult Care,1970-01-01 08:55:00,0.42,78.0,1.15
1770,6,35707.0,296,Work and Education,1970-01-01 08:56:00,48.41,3240.0,47.94
1771,10,23754.0,296,Leisure,1970-01-01 08:56:00,32.21,2114.0,31.28
1772,3,6592.0,296,Housework,1970-01-01 08:56:00,8.94,670.0,9.91
1773,11,5138.0,296,Travel and Other,1970-01-01 08:56:00,6.97,489.0,7.24
1774,4,2250.0,296,Child Care,1970-01-01 08:56:00,3.05,168.0,2.49
1775,5,317.0,296,Adult Care,1970-01-01 08:56:00,0.43,77.0,1.14
1776,6,35741.0,297,Work and Education,1970-01-01 08:57:00,48.46,3246.0,48.03
1777,10,23761.0,297,Leisure,1970-01-01 08:57:00,32.21,2115.0,31.3
1778,3,6600.0,297,Housework,1970-01-01 08:57:00,8.95,672.0,9.94
1779,11,5097.0,297,Travel and Other,1970-01-01 08:57:00,6.91,482.0,7.13
1780,4,2242.0,297,Child Care,1970-01-01 08:57:00,3.04,165.0,2.44
1781,5,317.0,297,Adult Care,1970-01-01 08:57:00,0.43,78.0,1.15
1782,6,35792.0,298,Work and Education,1970-01-01 08:58:00,48.53,3254.0,48.15
1783,10,23769.0,298,Leisure,1970-01-01 08:58:00,32.23,2110.0,31.22
1784,3,6594.0,298,Housework,1970-01-01 08:58:00,8.94,675.0,9.99
1785,11,5059.0,298,Travel and Other,1970-01-01 08:58:00,6.86,479.0,7.09
1786,4,2229.0,298,Child Care,1970-01-01 08:58:00,3.02,163.0,2.41
1787,5,315.0,298,Adult Care,1970-01-01 08:58:00,0.43,77.0,1.14
1788,6,35826.0,299,Work and Education,1970-01-01 08:59:00,48.57,3257.0,48.19
1789,10,23787.0,299,Leisure,1970-01-01 08:59:00,32.25,2108.0,31.19
1790,3,6589.0,299,Housework,1970-01-01 08:59:00,8.93,674.0,9.97
1791,11,5022.0,299,Travel and Other,1970-01-01 08:59:00,6.81,476.0,7.04
1792,4,2219.0,299,Child Care,1970-01-01 08:59:00,3.01,165.0,2.44
1793,5,315.0,299,Adult Care,1970-01-01 08:59:00,0.43,78.0,1.15
1794,6,35858.0,300,Work and Education,1970-01-01 09:00:00,48.62,3258.0,48.21
1795,10,23794.0,300,Leisure,1970-01-01 09:00:00,32.26,2109.0,31.21
1796,3,6590.0,300,Housework,1970-01-01 09:00:00,8.93,675.0,9.99
1797,11,4991.0,300,Travel and Other,1970-01-01 09:00:00,6.77,473.0,7.0
1798,4,2212.0,300,Child Care,1970-01-01 09:00:00,3.0,166.0,2.46
1799,5,313.0,300,Adult Care,1970-01-01 09:00:00,0.42,77.0,1.14
1800,6,35514.0,301,Work and Education,1970-01-01 09:01:00,48.15,3229.0,47.78
1801,10,22433.0,301,Leisure,1970-01-01 09:01:00,30.41,1990.0,29.45
1802,3,7472.0,301,Housework,1970-01-01 09:01:00,10.13,738.0,10.92
1803,11,5698.0,301,Travel and Other,1970-01-01 09:01:00,7.73,536.0,7.93
1804,4,2290.0,301,Child Care,1970-01-01 09:01:00,3.1,176.0,2.6
1805,5,351.0,301,Adult Care,1970-01-01 09:01:00,0.48,89.0,1.32
1806,6,35567.0,302,Work and Education,1970-01-01 09:02:00,48.22,3233.0,47.84
1807,10,22432.0,302,Leisure,1970-01-01 09:02:00,30.41,1988.0,29.42
1808,3,7477.0,302,Housework,1970-01-01 09:02:00,10.14,739.0,10.94
1809,11,5650.0,302,Travel and Other,1970-01-01 09:02:00,7.66,534.0,7.9
1810,4,2280.0,302,Child Care,1970-01-01 09:02:00,3.09,174.0,2.57
1811,5,352.0,302,Adult Care,1970-01-01 09:02:00,0.48,90.0,1.33
1812,6,35613.0,303,Work and Education,1970-01-01 09:03:00,48.28,3236.0,47.88
1813,10,22467.0,303,Leisure,1970-01-01 09:03:00,30.46,1994.0,29.51
1814,3,7465.0,303,Housework,1970-01-01 09:03:00,10.12,736.0,10.89
1815,11,5592.0,303,Travel and Other,1970-01-01 09:03:00,7.58,530.0,7.84
1816,4,2273.0,303,Child Care,1970-01-01 09:03:00,3.08,173.0,2.56
1817,5,348.0,303,Adult Care,1970-01-01 09:03:00,0.47,89.0,1.32
1818,6,35645.0,304,Work and Education,1970-01-01 09:04:00,48.33,3238.0,47.91
1819,10,22465.0,304,Leisure,1970-01-01 09:04:00,30.46,1993.0,29.49
1820,3,7462.0,304,Housework,1970-01-01 09:04:00,10.12,738.0,10.92
1821,11,5551.0,304,Travel and Other,1970-01-01 09:04:00,7.53,527.0,7.8
1822,4,2287.0,304,Child Care,1970-01-01 09:04:00,3.1,170.0,2.52
1823,5,348.0,304,Adult Care,1970-01-01 09:04:00,0.47,92.0,1.36
1824,6,35664.0,305,Work and Education,1970-01-01 09:05:00,48.35,3242.0,47.97
1825,10,22483.0,305,Leisure,1970-01-01 09:05:00,30.48,1998.0,29.56
1826,3,7461.0,305,Housework,1970-01-01 09:05:00,10.12,736.0,10.89
1827,11,5525.0,305,Travel and Other,1970-01-01 09:05:00,7.49,521.0,7.71
1828,4,2282.0,305,Child Care,1970-01-01 09:05:00,3.09,170.0,2.52
1829,5,343.0,305,Adult Care,1970-01-01 09:05:00,0.47,91.0,1.35
1830,6,35985.0,306,Work and Education,1970-01-01 09:06:00,48.79,3257.0,48.19
1831,10,22582.0,306,Leisure,1970-01-01 09:06:00,30.62,2002.0,29.62
1832,3,7461.0,306,Housework,1970-01-01 09:06:00,10.12,733.0,10.85
1833,11,5119.0,306,Travel and Other,1970-01-01 09:06:00,6.94,499.0,7.38
1834,4,2263.0,306,Child Care,1970-01-01 09:06:00,3.07,172.0,2.55
1835,5,348.0,306,Adult Care,1970-01-01 09:06:00,0.47,95.0,1.41
1836,6,36022.0,307,Work and Education,1970-01-01 09:07:00,48.84,3258.0,48.21
1837,10,22569.0,307,Leisure,1970-01-01 09:07:00,30.6,1999.0,29.58
1838,3,7472.0,307,Housework,1970-01-01 09:07:00,10.13,735.0,10.88
1839,11,5101.0,307,Travel and Other,1970-01-01 09:07:00,6.92,502.0,7.43
1840,4,2249.0,307,Child Care,1970-01-01 09:07:00,3.05,170.0,2.52
1841,5,345.0,307,Adult Care,1970-01-01 09:07:00,0.47,94.0,1.39
1842,6,36056.0,308,Work and Education,1970-01-01 09:08:00,48.88,3262.0,48.27
1843,10,22594.0,308,Leisure,1970-01-01 09:08:00,30.63,1999.0,29.58
1844,3,7479.0,308,Housework,1970-01-01 09:08:00,10.14,738.0,10.92
1845,11,5043.0,308,Travel and Other,1970-01-01 09:08:00,6.84,496.0,7.34
1846,4,2243.0,308,Child Care,1970-01-01 09:08:00,3.04,169.0,2.5
1847,5,343.0,308,Adult Care,1970-01-01 09:08:00,0.47,94.0,1.39
1848,6,36080.0,309,Work and Education,1970-01-01 09:09:00,48.92,3259.0,48.22
1849,10,22595.0,309,Leisure,1970-01-01 09:09:00,30.63,2004.0,29.65
1850,3,7476.0,309,Housework,1970-01-01 09:09:00,10.14,734.0,10.86
1851,11,5013.0,309,Travel and Other,1970-01-01 09:09:00,6.8,496.0,7.34
1852,4,2249.0,309,Child Care,1970-01-01 09:09:00,3.05,169.0,2.5
1853,5,345.0,309,Adult Care,1970-01-01 09:09:00,0.47,96.0,1.42
1854,6,36091.0,310,Work and Education,1970-01-01 09:10:00,48.93,3261.0,48.25
1855,10,22602.0,310,Leisure,1970-01-01 09:10:00,30.64,2004.0,29.65
1856,3,7471.0,310,Housework,1970-01-01 09:10:00,10.13,733.0,10.85
1857,11,5000.0,310,Travel and Other,1970-01-01 09:10:00,6.78,497.0,7.35
1858,4,2249.0,310,Child Care,1970-01-01 09:10:00,3.05,168.0,2.49
1859,5,345.0,310,Adult Care,1970-01-01 09:10:00,0.47,95.0,1.41
1860,6,36543.0,311,Work and Education,1970-01-01 09:11:00,49.54,3291.0,48.7
1861,10,22474.0,311,Leisure,1970-01-01 09:11:00,30.47,1985.0,29.37
1862,3,7437.0,311,Housework,1970-01-01 09:11:00,10.08,732.0,10.83
1863,11,4748.0,311,Travel and Other,1970-01-01 09:11:00,6.44,488.0,7.22
1864,4,2199.0,311,Child Care,1970-01-01 09:11:00,2.98,164.0,2.43
1865,5,357.0,311,Adult Care,1970-01-01 09:11:00,0.48,98.0,1.45
1866,6,36569.0,312,Work and Education,1970-01-01 09:12:00,49.58,3295.0,48.76
1867,10,22486.0,312,Leisure,1970-01-01 09:12:00,30.49,1982.0,29.33
1868,3,7436.0,312,Housework,1970-01-01 09:12:00,10.08,737.0,10.91
1869,11,4727.0,312,Travel and Other,1970-01-01 09:12:00,6.41,484.0,7.16
1870,4,2185.0,312,Child Care,1970-01-01 09:12:00,2.96,164.0,2.43
1871,5,355.0,312,Adult Care,1970-01-01 09:12:00,0.48,96.0,1.42
1872,6,36614.0,313,Work and Education,1970-01-01 09:13:00,49.64,3295.0,48.76
1873,10,22482.0,313,Leisure,1970-01-01 09:13:00,30.48,1992.0,29.48
1874,3,7438.0,313,Housework,1970-01-01 09:13:00,10.08,737.0,10.91
1875,11,4695.0,313,Travel and Other,1970-01-01 09:13:00,6.37,478.0,7.07
1876,4,2182.0,313,Child Care,1970-01-01 09:13:00,2.96,162.0,2.4
1877,5,347.0,313,Adult Care,1970-01-01 09:13:00,0.47,94.0,1.39
1878,6,36632.0,314,Work and Education,1970-01-01 09:14:00,49.67,3297.0,48.79
1879,10,22500.0,314,Leisure,1970-01-01 09:14:00,30.51,1996.0,29.54
1880,3,7434.0,314,Housework,1970-01-01 09:14:00,10.08,737.0,10.91
1881,11,4666.0,314,Travel and Other,1970-01-01 09:14:00,6.33,471.0,6.97
1882,4,2177.0,314,Child Care,1970-01-01 09:14:00,2.95,162.0,2.4
1883,5,349.0,314,Adult Care,1970-01-01 09:14:00,0.47,95.0,1.41
1884,6,36644.0,315,Work and Education,1970-01-01 09:15:00,49.68,3300.0,48.83
1885,10,22495.0,315,Leisure,1970-01-01 09:15:00,30.5,1991.0,29.46
1886,3,7443.0,315,Housework,1970-01-01 09:15:00,10.09,736.0,10.89
1887,11,4653.0,315,Travel and Other,1970-01-01 09:15:00,6.31,475.0,7.03
1888,4,2174.0,315,Child Care,1970-01-01 09:15:00,2.95,162.0,2.4
1889,5,349.0,315,Adult Care,1970-01-01 09:15:00,0.47,94.0,1.39
1890,6,37665.0,316,Work and Education,1970-01-01 09:16:00,51.07,3378.0,49.99
1891,10,21619.0,316,Leisure,1970-01-01 09:16:00,29.31,1947.0,28.81
1892,3,7452.0,316,Housework,1970-01-01 09:16:00,10.1,726.0,10.74
1893,11,4499.0,316,Travel and Other,1970-01-01 09:16:00,6.1,454.0,6.72
1894,4,2158.0,316,Child Care,1970-01-01 09:16:00,2.93,160.0,2.37
1895,5,365.0,316,Adult Care,1970-01-01 09:16:00,0.49,93.0,1.38
1896,6,37688.0,317,Work and Education,1970-01-01 09:17:00,51.1,3381.0,50.03
1897,10,21635.0,317,Leisure,1970-01-01 09:17:00,29.33,1951.0,28.87
1898,3,7444.0,317,Housework,1970-01-01 09:17:00,10.09,724.0,10.71
1899,11,4479.0,317,Travel and Other,1970-01-01 09:17:00,6.07,450.0,6.66
1900,4,2152.0,317,Child Care,1970-01-01 09:17:00,2.92,159.0,2.35
1901,5,360.0,317,Adult Care,1970-01-01 09:17:00,0.49,93.0,1.38
1902,6,37722.0,318,Work and Education,1970-01-01 09:18:00,51.14,3383.0,50.06
1903,10,21647.0,318,Leisure,1970-01-01 09:18:00,29.35,1948.0,28.83
1904,3,7439.0,318,Housework,1970-01-01 09:18:00,10.09,724.0,10.71
1905,11,4434.0,318,Travel and Other,1970-01-01 09:18:00,6.01,451.0,6.67
1906,4,2159.0,318,Child Care,1970-01-01 09:18:00,2.93,161.0,2.38
1907,5,357.0,318,Adult Care,1970-01-01 09:18:00,0.48,91.0,1.35
1908,6,37736.0,319,Work | |
"""
# Controller for Line-Following Robot
# This runs on an Adafruit Feather M4, with a MiniTFT board.
# It drives a TB6612 to control 2 DC Motors (in blue servo case)
# and talks over I2C to an ItsyBitsy that interfaces a Pololu
# line following sensor
#
# Author(s): <NAME>
# Module: mode_config.py generates and manages a menu that presents
# various configuration parameters and allows user to change them
#
# github: https://github.com/dnkorte/linefollower_controller
#
# MIT License
#
# Copyright (c) 2020 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
"""
from adafruit_display_text import label
import terminalio
import displayio
import time
import mycolors
class Mode_Config:
def __init__(self, tft_device):
self.this_tft = tft_device
self.cur_selected_list_item = 0
self.first_item_to_show = 0
self.cur_selected_list_item = 0
self.menu_items = [
["Throttle", "THR"],
["Loop Speed", "LPS"],
["Rxn Rate", "RR"],
["Rxn Limit", "RL"],
["Runtime Disp", "DSP"],
]
self.num_menu_items = len(self.menu_items)
# menu options for configuration paramters
# note that runtime display options use about 4 mS per loop if enabled
# fmt:off
self.throttle_options = [ 0.2, 0.3, 0.4, 0.5, 0.6, 0.7 ]
self.throttle_index = 2
self.loop_speed_options = [
0.012, 0.015, 0.018, 0.02, 0.03, 0.04, 0.05, 0.1, 0.15, 0.2 ]
self.loop_speed_index = 0
self.rxn_rate_options = [
0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.2, 1.3, 1.4, 1.6, 1.8, 2.0, 3.0 ]
self.rxn_rate_index = 8
self.rxn_limit_options = [
0.8, 0.9, 1.0, 1.2, 1.3, 1.4, 1.8, 2.0, 3.0 ]
self.rxn_limit_index = 7
self.showdisp_options = [ "No", "Yes" ]
self.showdisp_index = 0
# fmt:on
# actual configuration parameters
self.throttle = self.throttle_options[self.throttle_index]
self.loop_speed = self.loop_speed_options[self.loop_speed_index]
# 1=super fast rxns; 0.2 = really sluggish
self.rxn_rate = self.rxn_rate_options[self.rxn_rate_index]
# can limit big swings even if high gain (rate)
self.rxn_limit = self.rxn_limit_options[self.rxn_limit_index]
# turning it off saves about 8 mS per loop
self.show_runtime_display = self.showdisp_options[self.showdisp_index]
self.this_group = displayio.Group(max_size=10)
self.textbox_1 = label.Label(
terminalio.FONT, text="", max_glyphs=36, color=mycolors.YELLOW, x=2, y=0
)
self.this_group.append(self.textbox_1)
self.textbox_2 = label.Label(
terminalio.FONT, text="", max_glyphs=36, color=mycolors.YELLOW, x=2, y=14
)
self.this_group.append(self.textbox_2)
self.textbox_3 = label.Label(
terminalio.FONT, text="", max_glyphs=36, color=mycolors.GRAY, x=12, y=28
)
self.this_group.append(self.textbox_3)
self.textbox_3v = label.Label(
terminalio.FONT, text="", max_glyphs=36, color=mycolors.GRAY, x=100, y=28
)
self.this_group.append(self.textbox_3v)
self.textbox_4 = label.Label(
terminalio.FONT, text="", max_glyphs=36, color=mycolors.GRAY, x=12, y=40
)
self.this_group.append(self.textbox_4)
self.textbox_4v = label.Label(
terminalio.FONT, text="", max_glyphs=36, color=mycolors.GRAY, x=100, y=40
)
self.this_group.append(self.textbox_4v)
self.textbox_5 = label.Label(
terminalio.FONT, text="", max_glyphs=36, color=mycolors.GRAY, x=12, y=52
)
self.this_group.append(self.textbox_5)
self.textbox_5v = label.Label(
terminalio.FONT, text="", max_glyphs=36, color=mycolors.GRAY, x=100, y=52
)
self.this_group.append(self.textbox_5v)
self.textbox_6 = label.Label(
terminalio.FONT, text="", max_glyphs=36, color=mycolors.YELLOW, x=2, y=66
)
self.this_group.append(self.textbox_6)
self.textbox_3.text = self.menu_items[0][0]
self.textbox_3.color = mycolors.WHITE
self.textbox_4.text = self.menu_items[1][0]
self.textbox_5.text = self.menu_items[2][0]
self.textbox_3v.text = self._get_param(self.menu_items[0][1])
self.textbox_3v.color = mycolors.WHITE
self.textbox_4v.text = self._get_param(self.menu_items[1][1])
self.textbox_5v.text = self._get_param(self.menu_items[2][1])
def show_this_screen(self):
self.this_tft.display.show(self.this_group)
# this function initiates mode, runs it till done, then returns text
# string indicating next mode
def run_mode(self):
self.show_this_screen()
self.textbox_1.text = "UP / DOWN select param"
self.textbox_2.text = "LEFT / RIGHT chg param"
self.textbox_6.text = "Click A to exit"
while True:
# note possibilities are buttons.up buttons.down buttons.left
# buttons.right buttons.select buttons.a buttons.b
buttons = self.this_tft.buttons
mustScrollDisplay = False
mustUpdateValues = False
if buttons.up:
# print("Button UP!")
still_pressed = True
while still_pressed:
buttons = self.this_tft.buttons
still_pressed = buttons.up
time.sleep(0.05)
# print("released")
self.cur_selected_list_item -= 1
if self.cur_selected_list_item < 0:
self.cur_selected_list_item = self.num_menu_items - 1
elif buttons.down:
# print("Button Down!")
still_pressed = True
while still_pressed:
buttons = self.this_tft.buttons
still_pressed = buttons.down
time.sleep(0.05)
# print("released")
self.cur_selected_list_item += 1
if self.cur_selected_list_item >= self.num_menu_items:
self.cur_selected_list_item = 0
elif buttons.left:
# print("Button Down!")
still_pressed = True
while still_pressed:
buttons = self.this_tft.buttons
still_pressed = buttons.down
time.sleep(0.05)
# print("released")
temp = self._scroll_param(
self.menu_items[self.cur_selected_list_item][1], -1
)
# print(temp)
mustUpdateValues = True
elif buttons.right:
# print("Button Down!")
still_pressed = True
while still_pressed:
buttons = self.this_tft.buttons
still_pressed = buttons.down
time.sleep(0.05)
# print("released")
temp = self._scroll_param(
self.menu_items[self.cur_selected_list_item][1], +1
)
# print(temp)
mustUpdateValues = True
elif buttons.a:
# print("Button A!")
still_pressed = True
while still_pressed:
buttons = self.this_tft.buttons
still_pressed = buttons.a
time.sleep(0.05)
return
else:
pass
self.textbox_3.color = mycolors.GRAY
self.textbox_4.color = mycolors.GRAY
self.textbox_5.color = mycolors.GRAY
self.textbox_3v.color = mycolors.GRAY
self.textbox_4v.color = mycolors.GRAY
self.textbox_5v.color = mycolors.GRAY
if self.cur_selected_list_item - self.first_item_to_show >= 3:
# self.first_item_to_show = self.cur_selected_list_item - 4
self.first_item_to_show += 1
mustScrollDisplay = True
if self.cur_selected_list_item - self.first_item_to_show < 0:
# self.first_item_to_show = self.cur_selected_list_item - 4
self.first_item_to_show -= 1
mustScrollDisplay = True
# now if we moved beyond the 3 showable items, scroll the list on display
if mustScrollDisplay or mustUpdateValues:
self.textbox_3.text = self.menu_items[self.first_item_to_show + 0][0]
self.textbox_3v.text = self._get_param(
self.menu_items[self.first_item_to_show + 0][1]
)
if self.first_item_to_show + 1 < self.num_menu_items:
self.textbox_4.text = self.menu_items[self.first_item_to_show + 1][
0
]
self.textbox_4v.text = self._get_param(
self.menu_items[self.first_item_to_show + 1][1]
)
else:
self.textbox_4.text = ""
self.textbox_4v.text = ""
if self.first_item_to_show + 2 < self.num_menu_items:
self.textbox_5.text = self.menu_items[self.first_item_to_show + 2][
0
]
self.textbox_5v.text = self._get_param(
self.menu_items[self.first_item_to_show + 2][1]
)
else:
self.textbox_5.text = ""
self.textbox_5v.text = ""
# and finally, adjust the red highlight to put it on current item
if (self.first_item_to_show + 0) == self.cur_selected_list_item:
self.textbox_3.color = mycolors.WHITE
self.textbox_3v.color = mycolors.GREEN
if (self.first_item_to_show + 1) == self.cur_selected_list_item:
self.textbox_4.color = mycolors.WHITE
self.textbox_4v.color = mycolors.GREEN
if (self.first_item_to_show + 2) == self.cur_selected_list_item:
self.textbox_5.color = mycolors.WHITE
self.textbox_5v.color = mycolors.GREEN
time.sleep(0.1)
def _scroll_param(self, param, updown):
if param == "THR":
temp = self._scroll_throttle(updown)
elif param == "LPS":
temp = self._scroll_loop_speed(updown)
elif param == "RR":
temp = self._scroll_rxn_rate(updown)
elif param == "RL":
temp = self._scroll_rxn_limit(updown)
elif param == "DSP":
temp = self._scroll_showdisp(updown)
else:
temp = 0
return temp
def _get_param(self, param):
if param == "THR":
temp = self.get_throttle()
elif param == "LPS":
temp = self.get_loop_speed()
elif param == "RR":
temp = self.get_rxn_rate()
elif param == "RL":
temp = self.get_rxn_limit()
elif param == "DSP":
temp = self.get_showdisp()
else:
temp = 0
return temp
def get_throttle(self):
return self.throttle
# updown determines direction : (-) scrolls down, (+) scrolls up
def _scroll_throttle(self, updown):
if updown < 0:
self.throttle_index -= 1
if self.throttle_index < 0:
self.throttle_index = len(self.throttle_options) - 1
else:
self.throttle_index += 1
if self.throttle_index > (len(self.throttle_options) - 1):
self.throttle_index = 0
self.throttle = self.throttle_options[self.throttle_index]
return self.throttle
def get_loop_speed(self):
return self.loop_speed
# updown determines direction : (-) scrolls down, (+) scrolls up
def _scroll_loop_speed(self, updown):
if updown < 0:
self.loop_speed_index -= 1
if self.loop_speed_index < 0:
self.loop_speed_index = len(self.loop_speed_options) - 1
else:
self.loop_speed_index += 1
if self.loop_speed_index > (len(self.loop_speed_options) - 1):
self.loop_speed_index = 0
self.loop_speed = self.loop_speed_options[self.loop_speed_index]
return self.loop_speed
def get_rxn_rate(self):
return self.rxn_rate
# updown determines direction : (-) scrolls down, (+) scrolls up
def _scroll_rxn_rate(self, updown):
if updown < 0:
self.rxn_rate_index -= 1
if self.rxn_rate_index < 0:
self.rxn_rate_index = len(self.rxn_rate_options) - 1
else:
self.rxn_rate_index += 1
if self.rxn_rate_index > (len(self.rxn_rate_options) - 1):
self.rxn_rate_index = 0
self.rxn_rate = self.rxn_rate_options[self.rxn_rate_index]
return self.rxn_rate
def get_rxn_limit(self):
return self.rxn_limit
# updown determines direction : (-) scrolls down, (+) scrolls up
def _scroll_rxn_limit(self, updown):
if updown < 0:
self.rxn_limit_index -= 1
if self.rxn_limit_index < 0:
self.rxn_limit_index = len(self.rxn_limit_options) - 1
else:
self.rxn_limit_index += 1
if self.rxn_limit_index > (len(self.rxn_limit_options) - 1):
self.rxn_limit_index = 0
self.rxn_limit = self.rxn_limit_options[self.rxn_limit_index]
return self.rxn_limit
def get_showdisp(self):
return self.show_runtime_display
def _scroll_showdisp(self, updown):
if updown < 0:
self.showdisp_index -= 1
if self.showdisp_index < 0:
self.showdisp_index = len(self.showdisp_options) - 1
else:
self.showdisp_index += | |
'A069330', 'A049120', 'A114810', 'A054040', 'A066670',
'A144960', 'A101490', 'A037330', 'A011790', 'A073110', 'A102710', 'A149950', 'A170920', 'A027580',
'A250930', 'A094970', 'A056700', 'A031330', 'A178780', 'A090740', 'A104830', 'A105330', 'A278280',
'A033640', 'A155650', 'A123860', 'A036830', 'A120110', 'A089010', 'A357780', 'A121850', 'A050760',
'A067770', 'A051910', 'A080530', 'A041520', 'A062860', 'A108320', 'A004710', 'A095500', 'A036810',
'A065680', 'A051360'],
'공기청정기': ['A071460', 'A016920', 'A021240', 'A045520', 'A067170', 'A284740', 'A043260', 'A044340', 'A058610',
'A005930', 'A002700', 'A066570'],
'패션/의류': ['A105630', 'A194370', 'A008600', 'A298540', 'A306040', 'A111770', 'A383220', 'A064800', 'A101140',
'A069640', 'A090370', 'A036620', 'A204020', 'A111110', 'A007700', 'A102280', 'A033290', 'A900110',
'A241590', 'A008290', 'A267790', 'A007980', 'A047770', 'A002070', 'A110790', 'A093050', 'A033340',
'A065060', 'A058530', 'A004060', 'A005800', 'A031430', 'A000680', 'A016090', 'A006060', 'A014990',
'A120110', 'A005390', 'A225590', 'A001460', 'A215480', 'A005320', 'A308100', 'A084870', 'A323230',
'A011080', 'A081660', 'A039980', 'A337930', 'A028260', 'A020000', 'A093240', 'A098660', 'A009270',
'A026040', 'A088790'],
'DMZ 평화공원': ['A010820', 'A004920', 'A131180', 'A033170', 'A070960', 'A056080', 'A020710', 'A006920', 'A101670',
'A140520', 'A035890', 'A065950', 'A001840', 'A069140', 'A043910', 'A014970', 'A007110', 'A071950',
'A196700', 'A005740'],
'인터넷은행': ['A041460', 'A030790', 'A046440', 'A064260', 'A030200', 'A067920', 'A052400', 'A131090', 'A105560',
'A035720', 'A042510', 'A131370', 'A205100', 'A088350', 'A016450', 'A090850', 'A053280', 'A053300',
'A071050', 'A053350', 'A035600', 'A203650', 'A064480', 'A115310', 'A214180', 'A007070', 'A054920',
'A005940', 'A251270', 'A022100', 'A100030', 'A323410', 'A030190', 'A039290'],
'시스템반도체': ['A059120', 'A038880', 'A102120', 'A033170', 'A200710', 'A045970', 'A317120', 'A000990', 'A101490',
'A042700', 'A303030', 'A058470', 'A067310', 'A089030', 'A131970', 'A330860', 'A054450', 'A003160',
'A036540', 'A093640', 'A089530', 'A092600', 'A054630', 'A097800', 'A117670', 'A052860', 'A005930',
'A033640', 'A123860', 'A319660', 'A061970', 'A000660', 'A062860', 'A119830', 'A108320', 'A098460',
'A376190', 'A080520'],
'RFID(NFC 등)': ['A060230', 'A070300', 'A091700', 'A030200', 'A052400', 'A017670', 'A038680', 'A096690', 'A052710',
'A005930', 'A034730', 'A038620', 'A088800', 'A224110', 'A040160', 'A065450', 'A089850'],
'OLED(유기 발광 다이오드)': ['A321260', 'A096870', 'A226440', 'A272290', 'A138690', 'A131290', 'A090360', 'A213420',
'A121600', 'A090470', 'A114810', 'A054620', 'A265520', 'A317330', 'A068790', 'A239890',
'A144960', 'A141000', 'A079950', 'A117730', 'A320000', 'A108230', 'A131760', 'A065130',
'A178920', 'A088130', 'A080000', 'A351320', 'A238490', 'A083310', 'A094970', 'A171010',
'A250930', 'A336370', 'A160600', 'A257370', 'A297890', 'A104830', 'A005930', 'A009310',
'A040910', 'A090740', 'A278280', 'A155650', 'A171090', 'A240810', 'A066570', 'A083500',
'A110990', 'A059100', 'A056190', 'A347770', 'A115440', 'A255440', 'A256630', 'A187870',
'A078150', 'A256940', 'A089980', 'A143540', 'A161580', 'A014680', 'A051910', 'A080530',
'A036930', 'A122640', 'A108320', 'A063760', 'A034220', 'A177350', 'A290520', 'A272110',
'A083930', 'A049950', 'A251630', 'A267320', 'A136510'],
'사물인터넷': ['A041460', 'A101390', 'A032750', 'A060230', 'A049080', 'A042500', 'A200710', 'A093230', 'A038680',
'A036690', 'A060570', 'A078890', 'A056080', 'A131970', 'A091440', 'A264450', 'A018260', 'A069410',
'A094280', 'A054630', 'A018290', 'A117670', 'A032640', 'A015710', 'A046970', 'A223310', 'A054940',
'A039420', 'A220180', 'A131220', 'A038620', 'A039560', 'A049470', 'A049480', 'A184230', 'A049550',
'A040160', 'A192250', 'A045660', 'A039570'],
'블록체인': ['A027830', 'A041460', 'A030200', 'A286940', 'A017670', 'A131090', 'A035720', 'A042510', 'A950110',
'A018260', 'A139050', 'A000700', 'A112040', 'A003550', 'A053350', 'A039420', 'A141020', 'A203650',
'A047080', 'A034730', 'A035420', 'A054920', 'A184230', 'A044380', 'A150900', 'A068940', 'A192250',
'A085910'],
'클라우드 컴퓨팅': ['A064800', 'A304100', 'A242040', 'A030200', 'A290270', 'A017670', 'A170790', 'A046110', 'A079940',
'A036690', 'A205100', 'A053800', 'A060850', 'A018260', 'A069410', 'A094280', 'A072130', 'A010280',
'A307870', 'A012510', 'A034730', 'A012030', 'A041020', 'A049470', 'A022100', 'A100030', 'A030520',
'A184230', 'A093320', 'A023590', 'A192250'],
'웹툰': ['A236810', 'A035720', 'A048910', 'A263720', 'A020120', 'A181710', 'A207760', 'A035420'],
'인터넷 대표주': ['A035720', 'A035420'],
'화장품': ['A204630', 'A252500', 'A018700', 'A257720', 'A950140', 'A226340', 'A064090', 'A192440', 'A053980',
'A241710', 'A037270', 'A196300', 'A159580', 'A093230', 'A090370', 'A298060', 'A115960', 'A352480',
'A007390', 'A265740', 'A214420', 'A016100', 'A086710', 'A123330', 'A066980', 'A003580', 'A021240',
'A227610', 'A078140', 'A027050', 'A900300', 'A219550', 'A260930', 'A290650', 'A051900', 'A003350',
'A203690', 'A175250', 'A090430', 'A237880', 'A214370', 'A052260', 'A018290', 'A005690', 'A244460',
'A159910', 'A263920', 'A069110', 'A083660', 'A031430', 'A137940', 'A091090', 'A092730', 'A058530',
'A025620', 'A161890', 'A000970', 'A002840', 'A006620', 'A114840', 'A078520', 'A200130', 'A002720',
'A065170', 'A019660', 'A217730', 'A018250', 'A038460', 'A226320', 'A900310', 'A214150', 'A032980',
'A080530', 'A097950', 'A123690', 'A049830', 'A101240', 'A002210', 'A251970', 'A214260', 'A168330',
'A217480', 'A024720', 'A192820', 'A048410', 'A377220'],
'가상현실(VR)': ['A054210', 'A101390', 'A018700', 'A299900', 'A064260', 'A230980', 'A190510', 'A094360', 'A035620',
'A020710', 'A067160', 'A193250', 'A206560', 'A005930', 'A058610', 'A043610', 'A053110', 'A033320',
'A047080', 'A096630', 'A201490', 'A251270', 'A045340', 'A066570', 'A044380', 'A053450', 'A066310',
'A126700', 'A030350', 'A094170'],
'LED': ['A054090', 'A096870', 'A092460', 'A214310', 'A290690', 'A065690', 'A121600', 'A382800', 'A106240',
'A192650', 'A092190', 'A214330', 'A153490', 'A043260', 'A011070', 'A033050', 'A033180', 'A307180',
'A032500', 'A025540', 'A127160', 'A015890', 'A082800', 'A178780', 'A090740', 'A005930', 'A011690',
'A082850', 'A046890', 'A037950', 'A037400', 'A038060', 'A039010', 'A256630', 'A061040', 'A036170',
'A066570', 'A281740', 'A050760', 'A072950', 'A014580', 'A017900', 'A004710', 'A073540', 'A001210',
'A272110', 'A020760', 'A080520', 'A096610'],
'반도체 재료/부품': ['A095340', 'A272290', 'A195870', 'A005290', 'A219130', 'A074600', 'A092070', 'A114810', 'A317330',
'A144960', 'A101490', 'A033160', 'A101160', 'A067310', 'A077360', 'A089030', 'A102710', 'A008060',
'A170920', 'A241770', 'A171010', 'A281820', 'A064760', 'A031330', 'A104830', 'A241790', 'A252990',
'A080580', 'A059090', 'A127160', 'A278280', 'A033640', 'A036830', 'A060310', 'A093370', 'A083500',
'A147760', 'A166090', 'A357780', 'A281740', 'A092220', 'A311320', 'A024850', 'A036810', 'A098120',
'A272110', 'A052900', 'A094170'],
'5G(5세대 이동통신)': ['A069540', 'A353200', 'A007660', 'A062970', 'A175140', 'A010170', 'A035460', 'A049080', 'A242040',
'A030200', 'A017670', 'A178320', 'A008060', 'A178920', 'A215790', 'A091440', 'A065530', 'A122990',
'A264450', 'A037460', 'A056360', 'A069410', 'A192410', 'A032500', 'A105550', 'A032640', 'A127160',
'A073490', 'A340360', 'A005930', 'A046970', 'A050890', 'A088800', 'A332570', 'A173130', 'A115440',
'A230240', 'A061040', 'A066570', 'A095270', 'A138080', 'A218410', 'A073540', 'A078000', 'A065440',
'A052460'],
'기업인수목적회사(SPAC)': ['A321260', 'A264850', 'A317240', 'A331380', 'A333430', 'A330990', 'A353190', 'A339950',
'A335890', 'A306620', 'A373200', 'A369370', 'A307280', 'A386580', 'A320000', 'A367460',
'A335870', 'A340440', 'A372290', 'A344050', 'A291230', 'A351320', 'A336060', 'A323940',
'A323210', 'A337450', 'A353070', 'A393360', 'A355150', 'A377400', 'A284620', 'A307180',
'A270520', 'A368770', 'A380320', 'A397500', 'A391710', 'A347140', 'A342550', 'A363260',
'A400840', 'A388800', 'A351340', 'A367340', 'A365590', 'A380440', 'A353490', 'A310870',
'A310200', 'A258790', 'A323280', 'A328380', 'A331520', 'A333050', 'A336570', 'A340360',
'A340120', 'A377630', 'A307870', 'A322780', 'A387310', 'A367360', 'A340350', 'A366330',
'A388220', 'A332710', 'A391060', 'A341160', 'A400560', 'A299170', 'A349720', 'A343510',
'A332290', 'A329560', 'A261200', 'A397880', 'A388790', 'A309930', 'A281740', 'A287410',
'A396770', 'A353060', 'A323230', 'A273060', 'A307750', 'A373340', 'A359090', 'A367480',
'A319400', 'A267320'],
'제습기': ['A071460', 'A021240', 'A284740', 'A044340', 'A005930', 'A058610', 'A037070', 'A002700', 'A066570'],
'소매유통': ['A051160', 'A035080', 'A069920', 'A004170', 'A067830', 'A019010', 'A006370', 'A008770', 'A037710',
'A057050', 'A069960', 'A138250', 'A139480', 'A071840', 'A035760', 'A282330', 'A007070', 'A119860',
'A023530'],
'통신장비': ['A069540', 'A062970', 'A175140', 'A035460', 'A049080', 'A060540', 'A170790', 'A178320', 'A215790',
'A091440', 'A065530', 'A264450', 'A037460', 'A056360', 'A100590', 'A200230', 'A192410', 'A032500',
'A340360', 'A033790', 'A046970', 'A189300', 'A148250', 'A073490', 'A220180', 'A065770', 'A050890',
'A088800', 'A038060', 'A039560', 'A115440', 'A230240', 'A095270', 'A072950', 'A138080', 'A218410',
'A211270', 'A073540', 'A327260'],
'마이크로바이옴': ['A206650', 'A048530', 'A084650', 'A187420', 'A090430', 'A314130', 'A348150', 'A950200', 'A238200',
'A311690', 'A038290', 'A024720'],
'2021 하반기 신규상장': ['A376180', 'A270660', 'A298870', 'A137310', 'A159010', 'A376290', 'A257720', 'A382800', 'A376980',
'A195940', 'A290090', 'A099430', 'A308080', 'A099390', 'A261780', 'A386580', 'A361570', 'A365270',
'A376300', 'A329180', 'A372910', 'A260970', 'A357880', 'A372800', 'A393360', 'A400760', 'A381970',
'A259960', 'A400840', 'A391710', 'A389030', 'A397500', 'A388800', 'A139990', 'A377030', 'A377190',
'A395400', 'A387310', 'A391060', 'A388220', 'A348370', 'A114840', 'A400560', 'A199800', 'A271940',
'A404990', 'A089860', 'A396690', 'A388790', 'A397880', 'A352910', 'A323410', 'A367000', 'A200350',
'A382840', 'A396770', 'A311320', 'A382480', 'A315640', 'A222160', 'A377450', 'A203400', 'A273640',
'A377300', 'A377480', 'A377220', 'A058970'],
'북한 광물자원개발': ['A075970', 'A101170', 'A025890', 'A081150', 'A009520', 'A002360', 'A003670', 'A027580', 'A013310',
'A012160', 'A012320', 'A037950', 'A010040', 'A011760', 'A096350', 'A222420', 'A041440', 'A128660',
'A054540', 'A017550'],
'손해보험': ['A000060', 'A003690', 'A001450', 'A000540', 'A005830', 'A000370', 'A000400', 'A000810', 'A244920'],
'3D 낸드(NAND)': ['A222800', 'A074600', 'A092070', 'A084370', 'A089030', 'A281820', 'A079370', 'A104830', 'A031980',
'A005930', 'A240810', 'A319660', 'A036830', 'A045100', 'A093370', 'A095610', 'A086390', 'A000660',
'A014680'],
'NI(네트워크통합)': ['A042500', 'A038680', 'A046110', 'A033230', 'A031820', 'A029480', 'A049480', 'A173130', 'A040160'],
'핀테크(FinTech)': ['A041460', 'A046440', 'A064260', 'A030200', 'A036200', 'A067920', 'A047560', 'A052400', 'A158430',
'A079940', 'A131090', 'A035720', 'A042510', 'A950110', 'A163730', 'A094480', 'A032190', 'A052710',
'A053300', 'A053350', 'A035600', 'A032640', 'A234340', 'A064480', 'A034730', 'A214180', 'A035420',
'A054920', 'A060250', 'A053580', 'A025770', 'A027040', 'A036170', 'A184230', 'A079970', 'A377300',
'A081580', 'A039290'],
'휴대폰부품': ['A078650', 'A049070', 'A054210', 'A082660', 'A060720', 'A148150', 'A208710', 'A032580', 'A091580',
'A049630', 'A054040', 'A049080', 'A033560', 'A106240', 'A050110', 'A036010', 'A066670', 'A190510',
'A080220', 'A066900', 'A238090', 'A178320', 'A011070', 'A122990', 'A054450', 'A151910', 'A079190',
'A051370', 'A052710', 'A196450', 'A052860', 'A091120', 'A096630', 'A046890', 'A037950', 'A332570',
'A093920', 'A061040', 'A054940', 'A294140', 'A041520', 'A204270', 'A053450', 'A004710', 'A047310',
'A097520', 'A065680', 'A082210', 'A080420', 'A101330', 'A096610', 'A049520'],
'제지': ['A034810', 'A001020', 'A001810', 'A027970', 'A009200', 'A002870', 'A002200', 'A002310', 'A009770', 'A009460',
'A078130', 'A213500', 'A004540', 'A009580'],
'국내 상장 중국기업': ['A900070', 'A900270', 'A900250', 'A900300', 'A900290', 'A900110', 'A900340', 'A900280', 'A900310',
'A900260', 'A900120'],
'치매': ['A082270', 'A298380', 'A007390', 'A031860', 'A102460', 'A000220', 'A087010', 'A220100', 'A004310', 'A043100',
'A061250', 'A304840', 'A175250', 'A000100', 'A007370', 'A267790', 'A005500', 'A012690', 'A185750', 'A253840',
'A302550', 'A067080', 'A014570', 'A170900', 'A060590', 'A017180', 'A006620', 'A285130', 'A047920', 'A241820',
'A249420', 'A078160', 'A016580', 'A038460', 'A002800', 'A003850', 'A015860', 'A065650', 'A003060', 'A069620',
'A307750', 'A217600', 'A032300'], '요소수 관련주': ['A047400', 'A000910', 'A004000', 'A001390', 'A069260'],
'생명보험': ['A003690', 'A082640', 'A088350', 'A085620', 'A032830', 'A244920'],
'재택근무/스마트워크': ['A042500', 'A290270', 'A038680', 'A131370', 'A067010', 'A060850', 'A094280', 'A258790', 'A181710',
'A033320', 'A012510', 'A041020', 'A173130', 'A036630', 'A150900'],
'코로나19(진단키트)': ['A137310', 'A245620', 'A101140', 'A053980', 'A048530', 'A084650', 'A096530', 'A192650', 'A187420',
'A238090', 'A363250', 'A243070', 'A004720', 'A214610', 'A064550', 'A057880', 'A205470', 'A039860',
'A950200', 'A229000', 'A005690', 'A253840', 'A059090', 'A086040', 'A317690', 'A269620', 'A060590',
'A054180', 'A238120', 'A206640', 'A241820', 'A228760', 'A038460', 'A225220', 'A305090', 'A118000',
'A311690', 'A069620', 'A127120', 'A109820', 'A950130', 'A142280'],
'여름': ['A005300', 'A071460', 'A027740', 'A021240', 'A060570', 'A042110', 'A027710', 'A067170', 'A051900', 'A284740',
'A136480', 'A044340', 'A267790', 'A276730', 'A088910', 'A000080', 'A000890', 'A006890', 'A005180', 'A002270',
'A037070', 'A071840', 'A002700', 'A060310', 'A093370', 'A025860', 'A001550', 'A035810', 'A280360',
'A043340'],
'스마트팩토리(스마트공장)': ['A075970', 'A059120', 'A048770', 'A140670', 'A290090', 'A099440', 'A219420', 'A108230', 'A090710',
'A086960', 'A018260', 'A094280', 'A003550', | |
93, 734, 929, 68)
model.createElement(80, 739, 740, 935, 934, 734, 735, 930, 929)
model.createElement(81, 740, 741, 936, 935, 735, 736, 931, 930)
model.createElement(82, 741, 742, 937, 936, 736, 737, 932, 931)
model.createElement(83, 742, 743, 938, 937, 737, 738, 933, 932)
model.createElement(84, 743, 172, 145, 938, 738, 171, 146, 933)
model.createElement(85, 95, 744, 939, 66, 94, 739, 934, 67)
model.createElement(86, 744, 745, 940, 939, 739, 740, 935, 934)
model.createElement(87, 745, 746, 941, 940, 740, 741, 936, 935)
model.createElement(88, 746, 747, 942, 941, 741, 742, 937, 936)
model.createElement(89, 747, 748, 943, 942, 742, 743, 938, 937)
model.createElement(90, 748, 173, 144, 943, 743, 172, 145, 938)
model.createElement(91, 96, 749, 944, 65, 95, 744, 939, 66)
model.createElement(92, 749, 750, 945, 944, 744, 745, 940, 939)
model.createElement(93, 750, 751, 946, 945, 745, 746, 941, 940)
model.createElement(94, 751, 752, 947, 946, 746, 747, 942, 941)
model.createElement(95, 752, 753, 948, 947, 747, 748, 943, 942)
model.createElement(96, 753, 174, 143, 948, 748, 173, 144, 943)
model.createElement(97, 97, 754, 949, 64, 96, 749, 944, 65)
model.createElement(98, 754, 755, 950, 949, 749, 750, 945, 944)
model.createElement(99, 755, 756, 951, 950, 750, 751, 946, 945)
model.createElement(100, 756, 757, 952, 951, 751, 752, 947, 946)
model.createElement(101, 757, 758, 953, 952, 752, 753, 948, 947)
model.createElement(102, 758, 175, 142, 953, 753, 174, 143, 948)
model.createElement(103, 98, 759, 954, 63, 97, 754, 949, 64)
model.createElement(104, 759, 760, 955, 954, 754, 755, 950, 949)
model.createElement(105, 760, 761, 956, 955, 755, 756, 951, 950)
model.createElement(106, 761, 762, 957, 956, 756, 757, 952, 951)
model.createElement(107, 762, 763, 958, 957, 757, 758, 953, 952)
model.createElement(108, 763, 176, 141, 958, 758, 175, 142, 953)
model.createElement(109, 99, 764, 959, 62, 98, 759, 954, 63)
model.createElement(110, 764, 765, 960, 959, 759, 760, 955, 954)
model.createElement(111, 765, 766, 961, 960, 760, 761, 956, 955)
model.createElement(112, 766, 767, 962, 961, 761, 762, 957, 956)
model.createElement(113, 767, 768, 963, 962, 762, 763, 958, 957)
model.createElement(114, 768, 177, 140, 963, 763, 176, 141, 958)
model.createElement(115, 100, 769, 964, 61, 99, 764, 959, 62)
model.createElement(116, 769, 770, 965, 964, 764, 765, 960, 959)
model.createElement(117, 770, 771, 966, 965, 765, 766, 961, 960)
model.createElement(118, 771, 772, 967, 966, 766, 767, 962, 961)
model.createElement(119, 772, 773, 968, 967, 767, 768, 963, 962)
model.createElement(120, 773, 178, 139, 968, 768, 177, 140, 963)
model.createElement(121, 101, 774, 969, 60, 100, 769, 964, 61)
model.createElement(122, 774, 775, 970, 969, 769, 770, 965, 964)
model.createElement(123, 775, 776, 971, 970, 770, 771, 966, 965)
model.createElement(124, 776, 777, 972, 971, 771, 772, 967, 966)
model.createElement(125, 777, 778, 973, 972, 772, 773, 968, 967)
model.createElement(126, 778, 179, 138, 973, 773, 178, 139, 968)
model.createElement(127, 102, 779, 974, 59, 101, 774, 969, 60)
model.createElement(128, 779, 780, 975, 974, 774, 775, 970, 969)
model.createElement(129, 780, 781, 976, 975, 775, 776, 971, 970)
model.createElement(130, 781, 782, 977, 976, 776, 777, 972, 971)
model.createElement(131, 782, 783, 978, 977, 777, 778, 973, 972)
model.createElement(132, 783, 180, 137, 978, 778, 179, 138, 973)
model.createElement(133, 103, 784, 979, 58, 102, 779, 974, 59)
model.createElement(134, 784, 785, 980, 979, 779, 780, 975, 974)
model.createElement(135, 785, 786, 981, 980, 780, 781, 976, 975)
model.createElement(136, 786, 787, 982, 981, 781, 782, 977, 976)
model.createElement(137, 787, 788, 983, 982, 782, 783, 978, 977)
model.createElement(138, 788, 181, 136, 983, 783, 180, 137, 978)
model.createElement(139, 104, 789, 984, 57, 103, 784, 979, 58)
model.createElement(140, 789, 790, 985, 984, 784, 785, 980, 979)
model.createElement(141, 790, 791, 986, 985, 785, 786, 981, 980)
model.createElement(142, 791, 792, 987, 986, 786, 787, 982, 981)
model.createElement(143, 792, 793, 988, 987, 787, 788, 983, 982)
model.createElement(144, 793, 182, 135, 988, 788, 181, 136, 983)
model.createElement(145, 105, 794, 989, 56, 104, 789, 984, 57)
model.createElement(146, 794, 795, 990, 989, 789, 790, 985, 984)
model.createElement(147, 795, 796, 991, 990, 790, 791, 986, 985)
model.createElement(148, 796, 797, 992, 991, 791, 792, 987, 986)
model.createElement(149, 797, 798, 993, 992, 792, 793, 988, 987)
model.createElement(150, 798, 183, 134, 993, 793, 182, 135, 988)
model.createElement(151, 106, 799, 994, 55, 105, 794, 989, 56)
model.createElement(152, 799, 800, 995, 994, 794, 795, 990, 989)
model.createElement(153, 800, 801, 996, 995, 795, 796, 991, 990)
model.createElement(154, 801, 802, 997, 996, 796, 797, 992, 991)
model.createElement(155, 802, 803, 998, 997, 797, 798, 993, 992)
model.createElement(156, 803, 184, 133, 998, 798, 183, 134, 993)
model.createElement(157, 107, 804, 999, 54, 106, 799, 994, 55)
model.createElement(158, 804, 805, 1000, 999, 799, 800, 995, 994)
model.createElement(159, 805, 806, 1001, 1000, 800, 801, 996, 995)
model.createElement(160, 806, 807, 1002, 1001, 801, 802, 997, 996)
model.createElement(161, 807, 808, 1003, 1002, 802, 803, 998, 997)
model.createElement(162, 808, 185, 132, 1003, 803, 184, 133, 998)
model.createElement(163, 108, 809, 1004, 53, 107, 804, 999, 54)
model.createElement(164, 809, 810, 1005, 1004, 804, 805, 1000, 999)
model.createElement(165, 810, 811, 1006, 1005, 805, 806, 1001, 1000)
model.createElement(166, 811, 812, 1007, 1006, 806, 807, 1002, 1001)
model.createElement(167, 812, 813, 1008, 1007, 807, 808, 1003, 1002)
model.createElement(168, 813, 186, 131, 1008, 808, 185, 132, 1003)
model.createElement(169, 109, 814, 1009, 52, 108, 809, 1004, 53)
model.createElement(170, 814, 815, 1010, 1009, 809, 810, 1005, 1004)
model.createElement(171, 815, 816, 1011, 1010, 810, 811, 1006, 1005)
model.createElement(172, 816, 817, 1012, 1011, 811, 812, 1007, 1006)
model.createElement(173, 817, 818, 1013, 1012, 812, 813, 1008, 1007)
model.createElement(174, 818, 187, 130, 1013, 813, 186, 131, 1008)
model.createElement(175, 110, 819, 1014, 51, 109, 814, 1009, 52)
model.createElement(176, 819, 820, 1015, 1014, 814, 815, 1010, 1009)
model.createElement(177, 820, 821, 1016, 1015, 815, 816, 1011, 1010)
model.createElement(178, 821, 822, 1017, 1016, 816, 817, 1012, 1011)
model.createElement(179, 822, 823, 1018, 1017, 817, 818, 1013, 1012)
model.createElement(180, 823, 188, 129, 1018, 818, 187, 130, 1013)
model.createElement(181, 111, 824, 1019, 50, 110, 819, 1014, 51)
model.createElement(182, 824, 825, 1020, 1019, 819, 820, 1015, 1014)
model.createElement(183, 825, 826, 1021, 1020, 820, 821, 1016, 1015)
model.createElement(184, 826, 827, 1022, 1021, 821, 822, 1017, 1016)
model.createElement(185, 827, 828, 1023, 1022, 822, 823, 1018, 1017)
model.createElement(186, 828, 189, 128, 1023, 823, 188, 129, 1018)
model.createElement(187, 112, 829, 1024, 49, 111, 824, 1019, 50)
model.createElement(188, 829, 830, 1025, 1024, 824, 825, 1020, 1019)
model.createElement(189, 830, 831, 1026, 1025, 825, 826, 1021, 1020)
model.createElement(190, 831, 832, 1027, 1026, 826, 827, 1022, 1021)
model.createElement(191, 832, 833, 1028, 1027, 827, 828, 1023, 1022)
model.createElement(192, 833, 190, 127, 1028, 828, 189, 128, 1023)
model.createElement(193, 113, 834, 1029, 48, 112, 829, 1024, 49)
model.createElement(194, 834, 835, 1030, 1029, 829, 830, 1025, 1024)
model.createElement(195, 835, 836, 1031, 1030, 830, 831, 1026, 1025)
model.createElement(196, 836, 837, 1032, 1031, 831, 832, 1027, 1026)
model.createElement(197, 837, 838, 1033, 1032, 832, 833, 1028, 1027)
model.createElement(198, 838, 191, 126, 1033, 833, 190, 127, 1028)
model.createElement(199, 114, 839, 1034, 47, 113, 834, 1029, 48)
model.createElement(200, 839, 840, 1035, 1034, 834, 835, 1030, 1029)
model.createElement(201, 840, 841, 1036, 1035, 835, 836, 1031, 1030)
model.createElement(202, 841, 842, 1037, 1036, 836, 837, 1032, 1031)
model.createElement(203, 842, 843, 1038, 1037, 837, 838, 1033, 1032)
model.createElement(204, 843, 192, 125, 1038, 838, 191, 126, 1033)
model.createElement(205, 115, 844, 1039, 46, 114, 839, 1034, 47)
model.createElement(206, 844, 845, 1040, 1039, 839, 840, 1035, 1034)
model.createElement(207, 845, 846, 1041, 1040, 840, 841, 1036, 1035)
model.createElement(208, 846, 847, 1042, 1041, 841, 842, 1037, 1036)
model.createElement(209, 847, 848, 1043, 1042, 842, 843, 1038, 1037)
model.createElement(210, 848, 193, 124, 1043, 843, 192, 125, 1038)
model.createElement(211, 116, 849, 1044, 45, 115, 844, 1039, 46)
model.createElement(212, 849, 850, 1045, 1044, 844, 845, 1040, 1039)
model.createElement(213, 850, 851, 1046, 1045, 845, 846, 1041, 1040)
model.createElement(214, 851, 852, 1047, 1046, 846, 847, 1042, 1041)
model.createElement(215, 852, 853, 1048, 1047, 847, 848, 1043, 1042)
model.createElement(216, 853, 194, 123, 1048, 848, 193, 124, 1043)
model.createElement(217, 117, 854, 1049, 44, 116, 849, 1044, 45)
model.createElement(218, 854, 855, 1050, 1049, 849, 850, 1045, 1044)
model.createElement(219, 855, 856, 1051, 1050, 850, 851, 1046, 1045)
model.createElement(220, 856, 857, 1052, 1051, 851, 852, 1047, 1046)
model.createElement(221, 857, 858, 1053, 1052, 852, 853, 1048, 1047)
model.createElement(222, 858, 195, 122, 1053, 853, 194, 123, 1048)
model.createElement(223, 118, 859, 1054, 43, 117, 854, 1049, 44)
model.createElement(224, 859, 860, 1055, 1054, 854, 855, 1050, 1049)
model.createElement(225, 860, 861, 1056, 1055, 855, 856, 1051, 1050)
model.createElement(226, 861, 862, 1057, 1056, 856, 857, 1052, 1051)
model.createElement(227, 862, 863, 1058, 1057, 857, 858, 1053, 1052)
model.createElement(228, 863, 196, 121, 1058, 858, 195, 122, 1053)
model.createElement(229, 119, 864, 1059, 42, 118, 859, 1054, 43)
model.createElement(230, 864, 865, 1060, 1059, 859, 860, 1055, 1054)
model.createElement(231, 865, 866, 1061, 1060, 860, 861, 1056, 1055)
model.createElement(232, 866, 867, 1062, 1061, 861, 862, 1057, 1056)
model.createElement(233, 867, 868, 1063, 1062, 862, 863, 1058, 1057)
model.createElement(234, 868, 197, 120, 1063, 863, 196, 121, 1058)
model.createElement(235, 5, 32, 41, 8, 119, 864, 1059, 42)
model.createElement(236, 32, 33, 40, 41, 864, 865, 1060, 1059)
model.createElement(237, 33, 34, 39, 40, 865, 866, 1061, 1060)
model.createElement(238, 34, 35, 38, 39, 866, 867, 1062, 1061)
model.createElement(239, 35, 36, 37, 38, | |
def checkBlogforEVA(self, dt):
iss_blog_url = 'https://blogs.nasa.gov/spacestation/tag/spacewalk/'
def on_success(req, data): #if blog data is successfully received, it is processed here
logWrite("Blog Success")
soup = BeautifulSoup(data, "lxml")
blog_entries = soup.find("div", {"class": "entry-content"})
blog_text = blog_entries.get_text()
iss_EVcrew_url = 'https://www.howmanypeopleareinspacerightnow.com/peopleinspace.json'
def on_success2(req2, data2):
logWrite("Successfully fetched EV crew JSON")
number_of_space = int(data2['number'])
names = []
for num in range(0, number_of_space):
names.append(str(data2['people'][num]['name']))
try:
self.checkBlog(names,blog_text)
except Exception as e:
logWrite("Error checking blog: " + str(e))
def on_redirect2(req, result):
logWrite("Warning - Get EVA crew failure (redirect)")
logWrite(result)
def on_failure2(req, result):
logWrite("Warning - Get EVA crew failure (url failure)")
def on_error2(req, result):
logWrite("Warning - Get EVA crew failure (url error)")
req2 = UrlRequest(iss_EVcrew_url, on_success2, on_redirect2, on_failure2, on_error2, timeout=1)
def on_redirect(req, result):
logWrite("Warning - Get nasa blog failure (redirect)")
def on_failure(req, result):
logWrite("Warning - Get nasa blog failure (url failure)")
def on_error(req, result):
logWrite("Warning - Get nasa blog failure (url error)")
req = UrlRequest(iss_blog_url, on_success, on_redirect, on_failure, on_error, timeout=1)
def checkBlog(self, names, blog_text): #takes the nasa blog and compares it to people in space
ev1_surname = ''
ev1_firstname = ''
ev2_surname = ''
ev2_firstname = ''
ev1name = ''
ev2name = ''
name_position = 1000000
for name in names: #search for text in blog that matchs people in space list, choose 1st result as likely EV1
if name in blog_text:
if blog_text.find(name) < name_position:
name_position = blog_text.find(name)
ev1name = name
name_position = 1000000
for name in names: #search for text in blog that matchs people in space list, choose 2nd result as likely EV2
if name in blog_text and name != ev1name:
if blog_text.find(name) < name_position:
name_position = blog_text.find(name)
ev2name = name
logWrite("Likely EV1: "+ev1name)
logWrite("Likely EV2: "+ev2name)
ev1_surname = ev1name.split()[-1]
ev1_firstname = ev1name.split()[0]
ev2_surname = ev2name.split()[-1]
ev2_firstname = ev2name.split()[0]
try:
self.check_EVA_stats(ev1_surname,ev1_firstname,ev2_surname,ev2_firstname)
except Exception as e:
logWrite("Error retrieving EVA stats: " + str(e))
def flashUS_EVAbutton(self, instance):
logWrite("Function call - flashUS_EVA")
self.eva_main.ids.US_EVA_Button.background_color = (0, 0, 1, 1)
def reset_color(*args):
self.eva_main.ids.US_EVA_Button.background_color = (1, 1, 1, 1)
Clock.schedule_once(reset_color, 0.5)
def flashRS_EVAbutton(self, instance):
logWrite("Function call - flashRS_EVA")
self.eva_main.ids.RS_EVA_Button.background_color = (0, 0, 1, 1)
def reset_color(*args):
self.eva_main.ids.RS_EVA_Button.background_color = (1, 1, 1, 1)
Clock.schedule_once(reset_color, 0.5)
def flashEVAbutton(self, instance):
logWrite("Function call - flashEVA")
self.mimic_screen.ids.EVA_button.background_color = (0, 0, 1, 1)
def reset_color(*args):
self.mimic_screen.ids.EVA_button.background_color = (1, 1, 1, 1)
Clock.schedule_once(reset_color, 0.5)
def EVA_clock(self, dt):
global seconds, minutes, hours, EVAstartTime
unixconvert = time.gmtime(time.time())
currenthours = float(unixconvert[7])*24+unixconvert[3]+float(unixconvert[4])/60+float(unixconvert[5])/3600
difference = (currenthours-EVAstartTime)*3600
minutes, seconds = divmod(difference, 60)
hours, minutes = divmod(minutes, 60)
hours = int(hours)
minutes = int(minutes)
seconds = int(seconds)
self.us_eva.ids.EVA_clock.text =(str(hours) + ":" + str(minutes).zfill(2) + ":" + str(int(seconds)).zfill(2))
self.us_eva.ids.EVA_clock.color = 0.33, 0.7, 0.18
def animate(self, instance):
global new_x2, new_y2
self.main_screen.ids.ISStiny2.size_hint = 0.07, 0.07
new_x2 = new_x2+0.007
new_y2 = (math.sin(new_x2*30)/18)+0.75
if new_x2 > 1:
new_x2 = new_x2-1.0
self.main_screen.ids.ISStiny2.pos_hint = {"center_x": new_x2, "center_y": new_y2}
def animate3(self, instance):
global new_x, new_y, sizeX, sizeY, startingAnim
if new_x<0.886:
new_x = new_x+0.007
new_y = (math.sin(new_x*30)/18)+0.75
self.main_screen.ids.ISStiny.pos_hint = {"center_x": new_x, "center_y": new_y}
else:
if sizeX <= 0.15:
sizeX = sizeX + 0.01
sizeY = sizeY + 0.01
self.main_screen.ids.ISStiny.size_hint = sizeX, sizeY
else:
if startingAnim:
Clock.schedule_interval(self.animate, 0.1)
startingAnim = False
def changeColors(self, *args): #this function sets all labels on mimic screen to a certain color based on signal status
#the signalcolor is a kv property that will update all signal status dependant values to whatever color is received by this function
global ScreenList
for x in ScreenList:
getattr(self, x).signalcolor = args[0], args[1], args[2]
def changeManualControlBoolean(self, *args):
global manualcontrol
manualcontrol = args[0]
def TDRSupdate(self, dt):
global TDRS12_TLE, TDRS6_TLE, TDRS10_TLE, TDRS11_TLE, TDRS7_TLE
normalizedX = self.orbit_screen.ids.OrbitMap.norm_image_size[0] / self.orbit_screen.ids.OrbitMap.texture_size[0]
normalizedY = self.orbit_screen.ids.OrbitMap.norm_image_size[1] / self.orbit_screen.ids.OrbitMap.texture_size[1]
def scaleLatLon(latitude, longitude):
#converting lat lon to x, y for orbit map
fromLatSpan = 180.0
fromLonSpan = 360.0
toLatSpan = 0.598
toLonSpan = 0.716
valueLatScaled = (float(latitude)+90.0)/float(fromLatSpan)
valueLonScaled = (float(longitude)+180.0)/float(fromLonSpan)
newLat = (0.265) + (valueLatScaled * toLatSpan)
newLon = (0.14) + (valueLonScaled * toLonSpan)
return {'newLat': newLat, 'newLon': newLon}
def scaleLatLon2(in_latitude,in_longitude):
MAP_HEIGHT = self.orbit_screen.ids.OrbitMap.texture_size[1]
MAP_WIDTH = self.orbit_screen.ids.OrbitMap.texture_size[0]
new_x = ((MAP_WIDTH / 360.0) * (180 + in_longitude))
new_y = ((MAP_HEIGHT / 180.0) * (90 + in_latitude))
return {'new_y': new_y, 'new_x': new_x}
#TDRS East 2 sats
try:
TDRS12_TLE.compute(datetime.utcnow()) #41 West
except NameError:
TDRS12lon = -41
TDRS12lat = 0
else:
TDRS12lon = float(str(TDRS12_TLE.sublong).split(':')[0]) + float(str(TDRS12_TLE.sublong).split(':')[1])/60 + float(str(TDRS12_TLE.sublong).split(':')[2])/3600
TDRS12lat = float(str(TDRS12_TLE.sublat).split(':')[0]) + float(str(TDRS12_TLE.sublat).split(':')[1])/60 + float(str(TDRS12_TLE.sublat).split(':')[2])/3600
TDRS12_groundtrack = []
date_i = datetime.utcnow()
groundtrackdate = datetime.utcnow()
while date_i < groundtrackdate + timedelta(days=1):
TDRS12_TLE.compute(date_i)
TDRS12lon_gt = float(str(TDRS12_TLE.sublong).split(':')[0]) + float(
str(TDRS12_TLE.sublong).split(':')[1]) / 60 + float(str(TDRS12_TLE.sublong).split(':')[2]) / 3600
TDRS12lat_gt = float(str(TDRS12_TLE.sublat).split(':')[0]) + float(
str(TDRS12_TLE.sublat).split(':')[1]) / 60 + float(str(TDRS12_TLE.sublat).split(':')[2]) / 3600
TDRS12_groundtrack.append(scaleLatLon2(TDRS12lat_gt, TDRS12lon_gt)['new_x'])
TDRS12_groundtrack.append(scaleLatLon2(TDRS12lat_gt, TDRS12lon_gt)['new_y'])
date_i += timedelta(minutes=10)
self.orbit_screen.ids.TDRS12groundtrack.width = 1
self.orbit_screen.ids.TDRS12groundtrack.col = (0,0,1,1)
self.orbit_screen.ids.TDRS12groundtrack.points = TDRS12_groundtrack
try:
TDRS6_TLE.compute(datetime.utcnow()) #46 West
except NameError:
TDRS6lon = -46
TDRS6lat = 0
else:
TDRS6lon = float(str(TDRS6_TLE.sublong).split(':')[0]) + float(str(TDRS6_TLE.sublong).split(':')[1])/60 + float(str(TDRS6_TLE.sublong).split(':')[2])/3600
TDRS6lat = float(str(TDRS6_TLE.sublat).split(':')[0]) + float(str(TDRS6_TLE.sublat).split(':')[1])/60 + float(str(TDRS6_TLE.sublat).split(':')[2])/3600
TDRS6_groundtrack = []
date_i = datetime.utcnow()
groundtrackdate = datetime.utcnow()
while date_i < groundtrackdate + timedelta(days=1):
TDRS6_TLE.compute(date_i)
TDRS6lon_gt = float(str(TDRS6_TLE.sublong).split(':')[0]) + float(
str(TDRS6_TLE.sublong).split(':')[1]) / 60 + float(str(TDRS6_TLE.sublong).split(':')[2]) / 3600
TDRS6lat_gt = float(str(TDRS6_TLE.sublat).split(':')[0]) + float(
str(TDRS6_TLE.sublat).split(':')[1]) / 60 + float(str(TDRS6_TLE.sublat).split(':')[2]) / 3600
TDRS6_groundtrack.append(scaleLatLon2(TDRS6lat_gt, TDRS6lon_gt)['new_x'])
TDRS6_groundtrack.append(scaleLatLon2(TDRS6lat_gt, TDRS6lon_gt)['new_y'])
date_i += timedelta(minutes=10)
self.orbit_screen.ids.TDRS6groundtrack.width = 1
self.orbit_screen.ids.TDRS6groundtrack.col = (0,0,1,1)
self.orbit_screen.ids.TDRS6groundtrack.points = TDRS6_groundtrack
#TDRS West 2 sats
try:
TDRS11_TLE.compute(datetime.utcnow()) #171 West
except NameError:
TDRS11lon = -171
TDRS11lat = 0
else:
TDRS11lon = float(str(TDRS11_TLE.sublong).split(':')[0]) + float(str(TDRS11_TLE.sublong).split(':')[1])/60 + float(str(TDRS11_TLE.sublong).split(':')[2])/3600
TDRS11lat = float(str(TDRS11_TLE.sublat).split(':')[0]) + float(str(TDRS11_TLE.sublat).split(':')[1])/60 + float(str(TDRS11_TLE.sublat).split(':')[2])/3600
TDRS11_groundtrack = []
date_i = datetime.utcnow()
groundtrackdate = datetime.utcnow()
while date_i < groundtrackdate + timedelta(days=1):
TDRS11_TLE.compute(date_i)
TDRS11lon_gt = float(str(TDRS11_TLE.sublong).split(':')[0]) + float(
str(TDRS11_TLE.sublong).split(':')[1]) / 60 + float(str(TDRS11_TLE.sublong).split(':')[2]) / 3600
TDRS11lat_gt = float(str(TDRS11_TLE.sublat).split(':')[0]) + float(
str(TDRS11_TLE.sublat).split(':')[1]) / 60 + float(str(TDRS11_TLE.sublat).split(':')[2]) / 3600
TDRS11_groundtrack.append(scaleLatLon2(TDRS11lat_gt, TDRS11lon_gt)['new_x'])
TDRS11_groundtrack.append(scaleLatLon2(TDRS11lat_gt, TDRS11lon_gt)['new_y'])
date_i += timedelta(minutes=10)
self.orbit_screen.ids.TDRS11groundtrack.width = 1
self.orbit_screen.ids.TDRS11groundtrack.col = (0,0,1,1)
self.orbit_screen.ids.TDRS11groundtrack.points = TDRS11_groundtrack
try:
TDRS10_TLE.compute(datetime.utcnow()) #174 West
except NameError:
TDRS10lon = -174
TDRS10lat = 0
else:
TDRS10lon = float(str(TDRS10_TLE.sublong).split(':')[0]) + float(str(TDRS10_TLE.sublong).split(':')[1])/60 + float(str(TDRS10_TLE.sublong).split(':')[2])/3600
TDRS10lat = float(str(TDRS10_TLE.sublat).split(':')[0]) + float(str(TDRS10_TLE.sublat).split(':')[1])/60 + float(str(TDRS10_TLE.sublat).split(':')[2])/3600
TDRS10_groundtrack = []
date_i = datetime.utcnow()
groundtrackdate = datetime.utcnow()
while date_i < groundtrackdate + timedelta(days=1):
TDRS10_TLE.compute(date_i)
TDRS10lon_gt = float(str(TDRS10_TLE.sublong).split(':')[0]) + float(
str(TDRS10_TLE.sublong).split(':')[1]) / 60 + float(str(TDRS10_TLE.sublong).split(':')[2]) / 3600
TDRS10lat_gt = float(str(TDRS10_TLE.sublat).split(':')[0]) + float(
str(TDRS10_TLE.sublat).split(':')[1]) / 60 + float(str(TDRS10_TLE.sublat).split(':')[2]) / 3600
TDRS10_groundtrack.append(scaleLatLon2(TDRS10lat_gt, TDRS10lon_gt)['new_x'])
TDRS10_groundtrack.append(scaleLatLon2(TDRS10lat_gt, TDRS10lon_gt)['new_y'])
date_i += timedelta(minutes=10)
self.orbit_screen.ids.TDRS10groundtrack.width = 1
self.orbit_screen.ids.TDRS10groundtrack.col = (0,0,1,1)
self.orbit_screen.ids.TDRS10groundtrack.points = TDRS10_groundtrack
#ZOE TDRS-Z
try:
TDRS7_TLE.compute(datetime.utcnow()) #275 West
except NameError:
TDRS7lon = 85
TDRS7lat = 0
else:
TDRS7lon = float(str(TDRS7_TLE.sublong).split(':')[0]) + float(str(TDRS7_TLE.sublong).split(':')[1])/60 + float(str(TDRS7_TLE.sublong).split(':')[2])/3600
TDRS7lat = float(str(TDRS7_TLE.sublat).split(':')[0]) + float(str(TDRS7_TLE.sublat).split(':')[1])/60 + float(str(TDRS7_TLE.sublat).split(':')[2])/3600
TDRS7_groundtrack = []
date_i = datetime.utcnow()
groundtrackdate = datetime.utcnow()
while date_i < groundtrackdate + timedelta(days=1):
TDRS7_TLE.compute(date_i)
TDRS7lon_gt = float(str(TDRS7_TLE.sublong).split(':')[0]) + float(
str(TDRS7_TLE.sublong).split(':')[1]) / 60 + float(str(TDRS7_TLE.sublong).split(':')[2]) / 3600
TDRS7lat_gt = float(str(TDRS7_TLE.sublat).split(':')[0]) + float(
str(TDRS7_TLE.sublat).split(':')[1]) / 60 + float(str(TDRS7_TLE.sublat).split(':')[2]) / 3600
TDRS7_groundtrack.append(scaleLatLon2(TDRS7lat_gt, TDRS7lon_gt)['new_x'])
TDRS7_groundtrack.append(scaleLatLon2(TDRS7lat_gt, TDRS7lon_gt)['new_y'])
date_i += timedelta(minutes=10)
self.orbit_screen.ids.TDRS7groundtrack.width = 1
self.orbit_screen.ids.TDRS7groundtrack.col = (0,0,1,1)
self.orbit_screen.ids.TDRS7groundtrack.points = TDRS7_groundtrack
#draw the TDRS satellite locations
self.orbit_screen.ids.TDRS12.pos = (scaleLatLon2(TDRS12lat, TDRS12lon)['new_x']-((self.orbit_screen.ids.TDRS12.width/2)*normalizedX),scaleLatLon2(TDRS12lat, TDRS12lon)['new_y']-((self.orbit_screen.ids.TDRS12.height/2)*normalizedY))
self.orbit_screen.ids.TDRS6.pos = (scaleLatLon2(TDRS6lat, TDRS6lon)['new_x']-((self.orbit_screen.ids.TDRS6.width/2)*normalizedX),scaleLatLon2(TDRS6lat, TDRS6lon)['new_y']-((self.orbit_screen.ids.TDRS6.height/2)*normalizedY))
self.orbit_screen.ids.TDRS11.pos = (scaleLatLon2(TDRS11lat, TDRS11lon)['new_x']-((self.orbit_screen.ids.TDRS11.width/2)*normalizedX),scaleLatLon2(TDRS11lat, TDRS11lon)['new_y']-((self.orbit_screen.ids.TDRS11.height/2)*normalizedY))
self.orbit_screen.ids.TDRS10.pos = (scaleLatLon2(TDRS10lat, TDRS10lon)['new_x']-((self.orbit_screen.ids.TDRS10.width/2)*normalizedX),scaleLatLon2(TDRS10lat, TDRS10lon)['new_y']-((self.orbit_screen.ids.TDRS10.height/2)*normalizedY))
self.orbit_screen.ids.TDRS7.pos = (scaleLatLon2(TDRS7lat, TDRS7lon)['new_x']-((self.orbit_screen.ids.TDRS7.width/2)*normalizedX),scaleLatLon2(TDRS7lat, TDRS7lon)['new_y']-((self.orbit_screen.ids.TDRS7.height/2)*normalizedY))
#add labels and ZOE
self.orbit_screen.ids.TDRSeLabel.pos_hint = {"center_x": scaleLatLon(0, -41)['newLon']+0.06, "center_y": scaleLatLon(0, -41)['newLat']}
self.orbit_screen.ids.TDRSwLabel.pos_hint = {"center_x": scaleLatLon(0, -174)['newLon']+0.06, "center_y": scaleLatLon(0, -174)['newLat']}
self.orbit_screen.ids.TDRSzLabel.pos_hint = {"center_x": scaleLatLon(0, 85)['newLon']+0.05, "center_y": scaleLatLon(0, 85)['newLat']}
self.orbit_screen.ids.ZOE.pos_hint = {"center_x": scaleLatLon(0, 77)['newLon'], "center_y": scaleLatLon(0, 77)['newLat']}
self.orbit_screen.ids.ZOElabel.pos_hint = {"center_x": scaleLatLon(0, 77)['newLon'], "center_y": scaleLatLon(0, 77)['newLat']+0.1}
def orbitUpdate(self, dt):
global overcountry, ISS_TLE, ISS_TLE_Line1, ISS_TLE_Line2, ISS_TLE_Acquired, sgant_elevation, sgant_elevation_old, sgant_xelevation, aos, oldtdrs, tdrs, logged
global TDRS12_TLE, TDRS6_TLE, TDRS7_TLE, TDRS10_TLE, TDRS11_TLE, tdrs1, tdrs2, tdrs_timestamp
def scaleLatLon(latitude, longitude):
#converting lat lon to x, y for orbit map
fromLatSpan = 180.0
fromLonSpan = 360.0
toLatSpan = 0.598
toLonSpan = 0.716
valueLatScaled = (float(latitude)+90.0)/float(fromLatSpan)
valueLonScaled = (float(longitude)+180.0)/float(fromLonSpan)
newLat = (0.265) + (valueLatScaled * toLatSpan)
newLon = (0.14) + (valueLonScaled * toLonSpan)
return {'newLat': newLat, 'newLon': newLon}
def scaleLatLon2(in_latitude,in_longitude):
MAP_HEIGHT = self.orbit_screen.ids.OrbitMap.texture_size[1]
MAP_WIDTH = self.orbit_screen.ids.OrbitMap.texture_size[0]
new_x = ((MAP_WIDTH / 360.0) * (180 + in_longitude))
new_y = ((MAP_HEIGHT / 180.0) * (90 + in_latitude))
return {'new_y': new_y, 'new_x': new_x}
#copied from apexpy - copyright 2015 <NAME> MIT license
def subsolar(datetime):
year = datetime.year
doy = datetime.timetuple().tm_yday
ut = datetime.hour * 3600 + datetime.minute | |
frequencies = C.MICMat((1, 1, H, 2*(W/2 + 1))).offload_mic().fill_zeros()
pooled_frequencies = C.MICMat((1, 1, band_H, 2*(band_W/2 + 1))).offload_mic().fill_zeros()
inputs.fft(pooled_frequencies)
pooled_frequencies.wipe_out_irrelevant_entries()
frequencies.low_pass_filter_gradient(pooled_frequencies, band_H, band_W)
real, imaginary = pooled_frequencies.real_and_imaginary()
print real
print imaginary
real, imaginary = frequencies.real_and_imaginary()
print real
print imaginary
pooled_frequencies.ifft(outputs)
assert 2 == 1
# outputs = C.MICMat((1, 1, H, W)).offload_mic().fill_zeros()
# inputs = C.MICMat((1, 1, H, 2*(W/2 + 1))).offload_mic().fill_randn(stream, 0., 1.)
# inputs.wipe_out_irrelevant_entries()
# real, imaginary = inputs.real_and_imaginary()
# print real
# print imaginary
# inputs.ifft(outputs)
# inputs_copy = outputs.fft(inputs.deepcopy()).wipe_out_irrelevant_entries()
# print (inputs - inputs_copy).abs().mean()
# real, imaginary = inputs_copy.real_and_imaginary()
# print real
# print imaginary
# outputs = C.MICMat((1, 1, H, 2*W)).offload_mic().fill_zeros()
# inputs = C.MICMat((1, 1, H, 2*W)).offload_mic().fill_randn(stream, 0., 1.)
# inputs.update(inputs.deepcopy().conjugate())
# inputs.fft_full(outputs)
# outputs_ifft = inputs.ifft_full(outputs.deepcopy())
# inputs_copy = outputs.ifft_full(inputs.deepcopy().fill_zeros())
# real, imaginary = outputs.real_and_imaginary()
# print real
# print imaginary
# real, imaginary = outputs_ifft.real_and_imaginary()
# print real
# print imaginary
# print (outputs - outputs_ifft.conjugate()).abs().sum()
outputs = C.MICMat((1, 1, H, W)).offload_mic().fill_zeros()
outputs_full = C.MICMat((1, 1, H, 2*W)).offload_mic().fill_zeros()
consts_outputs = C.MICMat((1, 1, H, W)).offload_mic().fill_randn(stream, 0., 1.)
consts_outputs_full = C.MICMat((1, 1, H, 2*W)).offload_mic().fill_randn(stream, 0., 1.)
consts_outputs_full.update(consts_outputs_full.deepcopy().conjugate()).scale(0.5)
inputs = C.MICMat((1, 1, H, 2*(W/2 + 1))).offload_mic().fill_randn(stream, 0., 1.) #.wipe_out_irrelevant_entries()
inputs_full = C.MICMat((1, 1, H, 2*W)).offload_mic().fill_randn(stream, 0., 1.)
forward_pass = lambda inp: inp.ifft(outputs.deepcopy()).multiply(consts_outputs).sum()
R = forward_pass(inputs)
grad_R = consts_outputs.deepcopy()
# grad_inputs = grad_R.ifft_full(inputs_full.deepcopy().fill_zeros())
grad_inputs = grad_R.fft(inputs.deepcopy().fill_zeros())
# grad_inputs.conjugate()
grad_inputs.wipe_out_irrelevant_entries()
real, imaginary = grad_inputs.real_and_imaginary()
print real
print imaginary
# grad_inputs.conjugate()
# grad_inputs.wipe_out_irrelevant_entries()
grad_inputs.fft_conjugate_symmetry_scaling(W, scratch)
real, imaginary = grad_inputs.real_and_imaginary()
print real
print imaginary
print grad_inputs.deepcopy().pow(2.).sum()
epsilon = 0.0001
inputs_plus_d = inputs.deepcopy().update(grad_inputs, epsilon)
inputs_minus_d = inputs.deepcopy().update(grad_inputs, -epsilon)
R_plus = forward_pass(inputs_plus_d)
R_minus = forward_pass(inputs_minus_d)
R_diff = (R_plus - R_minus) / (2.*epsilon)
print R_diff
# ifft_d = d_inputs.ifft(outputs.deepcopy())
# computed_gradient = ifft_d
# # computed_gradient.scale(2.)
# print computed_gradient.divide(fd_gradient)
# outputs = C.MICMat((1, 1, 4, 4)).offload_mic().fill_zeros()
# inputs = C.MICMat((1, 1, 4, 2*3)).offload_mic().fill_randn(stream, 0., 1.).wipe_out_irrelevant_entries()
# d_inputs = C.MICMat((1, 1, 4, 2*3)).offload_mic().fill_randn(stream, 0., 0.00001).wipe_out_irrelevant_entries()
# inputs_plus_d = inputs.deepcopy().update(d_inputs)
# inputs_minus_d = inputs.deepcopy().update(d_inputs, -1.)
# outputs_plus_d = inputs_plus_d.ifft(outputs.deepcopy())
# outputs_minus_d = inputs_minus_d.ifft(outputs.deepcopy())
# fd_gradient = (outputs_plus_d - outputs_minus_d).scale(0.5)
# ifft_d = d_inputs.ifft(outputs.deepcopy())
# computed_gradient = ifft_d
# # computed_gradient.scale(2.)
# print computed_gradient.divide(fd_gradient)
# inputs.wipe_out_irrelevant_entries()
# # inputs = outputs.fft()
# timer.elapsed()
# # outputs.fill_zeros()
# extra.fill_randn(stream, 0., 1.)
# timer.tic()
# inputs.ifft(outputs)
# outputs.fft(inputs)
# inputs.ifft(outputs)
# outputs.fft(inputs)
# inputs.ifft(outputs)
# timer.elapsed()
# spatials = C.MICMat((128, 64, 32, 32)).offload_mic().fill_randn(stream, 0., 1.)
# frequencies = C.MICMat((128, 64, 32, 2*17)).offload_mic().fill_randn(stream, 0., 1.)
# spatials = C.MICMat((1, 1, 6, 6)).offload_mic().fill_randn(stream, 0., 1.)
# frequencies = C.MICMat((1, 1, 6, 2*4)).offload_mic().fill_randn(stream, 0., 1.)
# extra = C.MICMat((10, 3, 32, 17)).offload_mic()
# spatials_original = spatials.deepcopy()
# timer.tic()
# spatials.fft(frequencies)
# frequencies.wipe_out_irrelevant_entries()
# # frequencies = spatials.fft()
# timer.elapsed()
# # spatials.fill_zeros()
# extra.fill_randn(stream, 0., 1.)
# timer.tic()
# frequencies.ifft(spatials)
# spatials.fft(frequencies)
# frequencies.ifft(spatials)
# spatials.fft(frequencies)
# frequencies.ifft(spatials)
# timer.elapsed()
# print (spatials - spatials_original).abs().mean()
# real, imaginary = frequencies.real_and_imaginary()
# print real
# print imaginary
# frequencies.conjugate()
# real, imaginary = frequencies.real_and_imaginary()
# print real
# print imaginary
def test_convolution(time_and_dont_test, time_and_dont_test_grad, test_gradient, offload, N, K, c, H, W, X, Y, stride, padding, pooling_radius, pooling_stride, scratch, shadow, N_block, K_block, C_block, N_block_grad, C_block_grad, H_arg_block_grad, W_arg_block_grad, Y_block_grad):
output_H = (H + 2*padding - Y + 1)/stride
output_W = (W + 2*padding - X + 1)/stride
pooled_H = int(np.ceil((output_H - pooling_radius + 1.)/pooling_stride))
pooled_W = int(np.ceil((output_W - pooling_radius + 1.)/pooling_stride))
K_preshadow = K
if shadow:
K *= 2
num_operations = N*K*c*output_H*output_W*X*Y*2
num_operations_argmax = N*K*c*pooled_H*pooled_W*Y*X*2
num_operations_gradient = N*K*c*pooled_H*pooled_W*Y*X*2
inputs = C.MICMat((N, c, H, W))
inputs.fill_zeros()
filters = C.MICMat((K_preshadow, c, Y, X))
filters.fill_zeros()
outputs = C.MICMat((N, K, pooled_H, pooled_W))
argmaxs = outputs.deepcopy().offload_mic().astype('int')
if offload:
inputs.offload_mic()
inputs.fill_randn(stream, 0., 1.)
outputs.offload_mic()
filters.offload_mic()
filters.fill_randn(stream, 0., 1.)
print 'Computing convolution now.'
filters_rotated = scratch.reset(filters.shape)
filters_interleaved = scratch.append(filters.shape)
inputs_rotated = scratch.append(inputs.shape)
inputs_interleaved = scratch.append(inputs.shape)
outputs_interleaved = scratch.append(outputs.shape)
outputs_rotated = scratch.append(outputs.shape)
scratch.update_end()
outputs_interleaved.shape = outputs_interleaved.shape[1:] + (outputs_interleaved.shape[0],)
outputs_interleaved.shape = (outputs_interleaved.shape[-1]/N_block,) + outputs_interleaved.shape[0:-1] + (N_block,)
argmaxs.shape = argmaxs.shape[1:] + (argmaxs.shape[0],)
argmaxs.shape = (argmaxs.shape[-1]/N_block,) + argmaxs.shape[0:-1] + (N_block,)
inputs.rotate_dimensions('forward', inputs_rotated)
inputs_rotated.interleave_block(N_block, inputs_interleaved)
filters.rotate_dimensions('forward', filters_rotated)
filters_rotated.interleave_block(K_block, filters_interleaved)
print 'Done data conversions.'
timer.tic()
outputs_interleaved.convolution(inputs_interleaved, filters_interleaved, argmaxs, stride, padding, pooling_radius, pooling_stride, 1, False, scratch.end)
# outputs.convolve_and_pool_replace(inputs, filters, argmaxs, stride, padding, pooling_radius, pooling_stride, 1, False, scratch, shadow)
test_time = timer.toc()
print 'Done convolution.'
outputs_interleaved.uninterleave_block(outputs_rotated)
outputs_rotated.rotate_dimensions('backward', outputs)
argmaxs.uninterleave_block(scratch.end)
argmaxs.rotate_dimensions('backward', scratch.end)
print '\n \nConvolution time: %f seconds.' % test_time
print 'Speed: %f Gflops.' % (num_operations/test_time*1e-9)
# timer.tic()
# outputs.convolve_and_pool_replace(inputs, filters, argmaxs, stride, padding, pooling_radius, pooling_stride, 1, True, scratch, shadow)
# fixed_time = timer.toc()
# print outputs
if not time_and_dont_test:
print 'Running convolution test. '
inputs_np = np.lib.pad(inputs_rotated.ndarray(), ((0, 0), (padding, padding), (padding, padding), (0, 0)), 'constant', constant_values = (0, 0))
filters_np = filters.ndarray()
outputs_np = np.zeros((K, output_H, output_W, N)) - 1e10
pooled_outputs_np = np.zeros((K, pooled_H, pooled_W, N))
for k in range(K):
for h in range(output_H):
for w in range(output_W):
for n in range(N):
outputs_np[k, h, w, n] = np.sum(np.multiply(inputs_np[:, h:h+Y, w:w+X, n], filters_np[k, :, :, :]))
for k in range(K):
for h in range(pooled_H):
for w in range(pooled_W):
for n in range(N):
h_start = h*pooling_stride
h_end = min(h_start + pooling_radius, output_H)
w_start = w*pooling_stride
w_end = min(w_start + pooling_radius, output_W)
pooled_outputs_np[k, h, w, n] = np.amax(outputs_np[k, h_start:h_end, w_start:w_end, n])
difference = np.mean(np.abs(outputs_rotated.ndarray() - pooled_outputs_np))
if difference < 1.e-6:
print 'Convolution test passed. '
else:
print 'Convolution test failed with difference %f. ' % difference
# print (outputs.ndarray() - pooled_outputs_np)[0, 0, :, :]
# print ''
# for k in range(K):
# # for n in range(N):
# # print (k, n)
# print outputs.ndarray()[k, :, :, n]
# print ''
# print pooled_outputs_np[k, :, :, n]
# print ''
# print (outputs.ndarray() - pooled_outputs_np)[k, :, :, n]
# print ''
# print outputs
# print np.reshape(pooled_outputs_np[0, 0, :, :], pooled_outputs_np.shape[2:])
if test_gradient:
print '\n\n\n'
print '='*20
print 'Computing convolution gradient now.'
gradient_filters = filters.deepcopy().fill_zeros()
gradient_inputs = inputs.deepcopy().fill_zeros()
gradient_outputs = outputs.deepcopy().fill_randn(stream, 0., 1.)
# filters_rotated = scratch.reset(filters.shape)
# filters_interleaved = scratch.append(filters.shape)
# inputs_rotated = scratch.append(inputs.shape)
# inputs_interleaved = scratch.append(inputs.shape)
# outputs_interleaved = scratch.append(outputs.shape)
# outputs_rotated = scratch.append(outputs.shape)
# scratch.update_end()
# outputs_interleaved.shape = outputs_interleaved.shape[1:] + (outputs_interleaved.shape[0],)
# outputs_interleaved.shape = (outputs_interleaved.shape[-1]/N_block,) + outputs_interleaved.shape[0:-1] + (N_block,)
# inputs.rotate_dimensions('forward', inputs_rotated)
# inputs_rotated.interleave_block(N_block, inputs_interleaved)
# filters.rotate_dimensions('forward', filters_rotated)
# filters_rotated.interleave_block(K_block, filters_interleaved)
# timer.tic()
# # Satish, the routine you're testing goes here
# gradient_filters.convolution_gradient(inputs, filters, argmaxs,
# gradient_pooled_outputs, gradient_inputs, stride, padding, pooling_radius, pooling_stride, 1, scratch)
# convolution_gradient_time = timer.toc()
# print '\n \n Time: %f seconds.' % convolution_gradient_time
# print 'Speed: %f Gflops.' % (num_operations_gradient/convolution_gradient_time*1e-9)
if not time_and_dont_test_grad:
# compute gradient as already validated
inputs_interleaved = scratch.reset(inputs.shape)
filters_permuted = scratch.append(filters.shape)
filters_interleaved = scratch.append(filters.shape)
gradient_permuted = scratch.append(gradient_filters.shape)
gradient_interleaved = scratch.append(gradient_filters.shape, fill_zeros = True)
gradient_inputs_interleaved = scratch.append(gradient_inputs.shape, fill_zeros = True)
gradient_outputs_interleaved = scratch.append(gradient_outputs.shape)
scratch.update_end()
inputs.interleave_for_gradient(C_block_grad, inputs_interleaved)
filters.permute_dimensions((2, 0, 3, 1), filters_permuted)
filters_permuted.interleave_block(C_block_grad, filters_interleaved)
gradient_outputs.permute_dimensions((0, 2, 3, 1), gradient_outputs_interleaved)
N, c, H, W = gradient_inputs_interleaved.shape
gradient_inputs_interleaved.shape = (N, c/C_block_grad, H, W, C_block_grad)
K, c, Y, X = gradient_filters.shape
gradient_interleaved.shape = (Y, K, X, c)
gradient_interleaved.shape = (gradient_interleaved.shape[-1]/C_block_grad,) + gradient_interleaved.shape[0:-1] + (C_block_grad,)
argmaxs.permute_dimensions((0, 2, 3, 1), scratch.end)
gradient_interleaved.convolution_gradient(inputs_interleaved, filters_interleaved, argmaxs,
gradient_outputs_interleaved, gradient_inputs_interleaved, stride, padding,
pooling_radius, pooling_stride,
1, scratch.end)
gradient_interleaved.uninterleave_block(gradient_permuted)
gradient_permuted.permute_dimensions((1, 3, 0, 2), gradient_filters)
difference = np.mean(np.abs(gradient_filters.ndarray() - gradient_filters_tested.ndarray()))
print '\n\n'
if difference < 1.e-4:
print 'Gradient test passed. '
else:
print 'Gradient test failed. '
# def test_convolution(time_and_dont_test, time_and_dont_test_grad, test_gradient, offload, N, K, c, H, W, X, Y, stride, padding, pooling_radius, pooling_stride, scratch, shadow, N_block, K_block, C_block, N_block_grad, C_block_grad, H_arg_block_grad, W_arg_block_grad, Y_block_grad):
# output_H = (H + 2*padding - Y + 1)/stride
# output_W = (W + 2*padding - X + 1)/stride
# pooled_H = int(np.ceil((output_H - pooling_radius + 1.)/pooling_stride))
# pooled_W = int(np.ceil((output_W - pooling_radius + 1.)/pooling_stride))
# K_preshadow = K
# if shadow:
# K *= 2
# num_operations = N*K*c*output_H*output_W*X*Y*2
# num_operations_argmax = N*K*c*pooled_H*pooled_W*Y*X*2
# num_operations_gradient = N*K*c*pooled_H*pooled_W*Y*X*2
# inputs = C.MICMat((c, H, W, N))
# inputs.fill_zeros()
| |
+ "': for '" +
self.headopts.required_system + "', this system is '"
+ systemtype + "'")
return [ False, True ]
return [ True, False ]
def close(self, forcePreserve):
self.runfile.close()
if self.exedir and self.dircreated and \
not self.args.preserve and not forcePreserve:
os.chdir('..')
shutil.rmtree(self.exedir)
def fail(self, line, s):
# make it work if line is None or is a plain string.
try:
prefix = simplify_path(self.wttopdir, line.prefix())
except:
prefix = 'syscall.py: '
print(prefix + s, file=sys.stderr)
def failrange(self, line, lineto, s):
# make it work if line is None or is a plain string.
try:
prefix = simplify_path(self.wttopdir, line.range_prefix(lineto))
except:
prefix = 'syscall.py: '
print(prefix + s, file=sys.stderr)
def str_match(self, s1, s2):
fuzzyRight = False
if len(s1) < 2 or len(s2) < 2:
return False
if s1[-3:] == '...':
fuzzyRight = True
s1 = s1[:-3]
if s2[-3:] == '...':
s2 = s2[:-3]
if s1[0] != '"' or s1[-1] != '"' or s2[0] != '"' or s2[-1] != '"':
return False
s1 = s1[1:-1]
s2 = s2[1:-1]
# We allow a trailing \0
if s1[-2:] == '\\0':
s1 = s1[:-2]
if s2[-2:] == '\\0':
s2 = s2[:-2]
if fuzzyRight:
return s2.startswith(s2)
else:
return s1 == s2
def expr_eval(self, s):
return eval(s, {}, self.variables)
def arg_match(self, a1, a2):
a1 = a1.strip()
a2 = a2.strip()
if a1 == a2:
return True
if len(a1) == 0 or len(a2) == 0:
return False
if a1[0] == '"':
return self.str_match(a1, a2)
#print(' arg_match: <' + a1 + '> <' + a2 + '>')
try:
a1value = self.expr_eval(a1)
except Exception:
self.fail(a1, 'unknown expression: ' + a1)
return False
try:
a2value = self.expr_eval(a2)
except Exception:
self.fail(a2, 'unknown expression: ' + a2)
return False
return a1value == a2value or int(a1value) == int(a2value)
def split_args(self, s):
if s[0] == '(':
s = s[1:]
if s[-1] == ')':
s = s[:-1]
return argpat.split(s)[1::2]
def args_match(self, args1, args2):
#print('args_match: ' + str(s1) + ', ' + str(s2))
pos = 0
for a1 in args1:
a1 = a1.strip()
if a1 == '...': # match anything?
return True
if pos >= len(args2):
return False
if not self.arg_match(a1, args2[pos]):
return False
pos += 1
if pos < len(args2):
return False
return True
# func(args); is shorthand for for ASSERT_EQ(func(args), xxx);
# where xxx may be 0 or may be derived from one of the args.
def call_compare(self, callname, result, eargs, errline):
if callname in calls_returning_zero:
return self.compare("EQ", result, "0", errline)
elif callname == 'pwrite' or callname == 'pwrite64':
return self.compare("EQ",
re.sub(pwrite_in, pwrite_out, result),
re.sub(pwrite_in, pwrite_out, eargs[2]),
errline)
else:
self.fail(errline, 'call ' + callname +
': not known, use ASSERT_EQ()')
def compare(self, compareop, left, right, errline):
l = self.expr_eval(left)
r = self.expr_eval(right)
if (compareop == "EQ" and l == r) or \
(compareop == "NE" and l != r) or \
(compareop == "LT" and l < r) or \
(compareop == "LE" and l <= r) or \
(compareop == "GT" and l > r) or \
(compareop == "GE" and l >= r):
return True
else:
self.fail(errline,
'call returned value: ' + left + ', comparison: (' +
left + ' ' + compareop + ' ' + right +
') at line: ' + errline)
return False
def match_report(self, runline, errline, verbose, skiplines, result, desc):
if result:
if verbose:
print('MATCH:')
print(' ' + runline.prefix() + runline)
print(' ' + errline.prefix() + errline)
else:
if verbose:
if not skiplines:
msg('Expecting ' + desc)
print(' ' + runline.prefix() + runline +
' does not match:')
print(' ' + errline.prefix() + errline)
else:
print(' (... match) ' + errline.prefix() + errline)
return result
def match(self, runline, errline, verbose, skiplines):
m = re.match(outputpat, runline)
if m:
outwant = m.groups()[0]
return self.match_report(runline, errline, verbose, skiplines,
errline == outwant, 'output line')
if self.args.systype == 'Linux':
em = re.match(strace_pat, errline)
elif self.args.systype == 'Darwin':
em = re.match(dtruss_pat, errline)
if not em:
self.fail(errline, 'Unknown strace/dtruss output: ' + errline)
return False
gotcall = re.sub(pwrite_in, pwrite_out, em.groups()[0])
# filtering syscalls here if needed. If it's not a match,
# mark the errline so it is retried.
if self.strip_syscalls != None and gotcall not in self.strip_syscalls:
errline.skip = True
return False
m = re.match(assignpat, runline)
if m:
if m.groups()[1] != gotcall:
return self.match_report(runline, errline, verbose, skiplines,
False, 'syscall to match assignment')
rargs = self.split_args(m.groups()[2])
eargs = self.split_args(em.groups()[1])
result = self.args_match(rargs, eargs)
if result:
self.variables[m.groups()[0]] = em.groups()[2]
return self.match_report(runline, errline, verbose, skiplines,
result, 'syscall to match assignment')
# pattern groups using example ASSERT_EQ(close(fd), 0);
# 0 : comparison op ("EQ")
# 1 : function call name "close"
# 2 : function call args "(fd)"
# 3 : comparitor "0"
m = re.match(assertpat, runline)
if m:
if m.groups()[1] != gotcall:
return self.match_report(runline, errline, verbose, skiplines,
False, 'syscall to match ASSERT')
rargs = self.split_args(m.groups()[2])
eargs = self.split_args(em.groups()[1])
result = self.args_match(rargs, eargs)
if not result:
return self.match_report(runline, errline, verbose, skiplines,
result, 'syscall to match ASSERT')
result = self.compare(m.groups()[0], em.groups()[2],
m.groups()[3], errline)
return self.match_report(runline, errline, verbose, skiplines,
result, 'ASSERT')
# A call without an enclosing ASSERT is reduced to an ASSERT,
# depending on the particular system call.
m = re.match(callpat, runline)
if m:
if m.groups()[0] != gotcall:
return self.match_report(runline, errline, verbose, skiplines,
False, 'syscall')
rargs = self.split_args(m.groups()[1])
eargs = self.split_args(em.groups()[1])
result = self.args_match(rargs, eargs)
if not result:
return self.match_report(runline, errline, verbose, skiplines,
result, 'syscall')
result = self.call_compare(m.groups()[0], em.groups()[2],
eargs, errline)
return self.match_report(runline, errline, verbose, skiplines,
result, 'syscall')
self.fail(runline, 'unrecognized pattern in runfile:' + runline)
return False
def match_lines(self):
outfile = FileReader(self.wttopdir, self.outfilename, True)
errfile = FileReader(self.wttopdir, self.errfilename, True)
if outfile.readline():
self.fail(None, 'output file has content, expected to be empty')
return False
with outfile, errfile:
runlines = self.order_runfile(self.runfile)
errline = errfile.readline()
errline = re.sub(pwrite_in, pwrite_out, errline)
if re.match(dtruss_init_pat, errline):
errline = errfile.readline()
skiplines = False
for runline in runlines:
runline = re.sub(pwrite_in, pwrite_out, runline)
if runline == '...':
skiplines = True
if self.args.verbose:
print('Fuzzy matching:')
print(' ' + runline.prefix() + runline)
continue
first_errline = errline
while errline and not self.match(runline, errline,
self.args.verbose, skiplines):
if skiplines or hasattr(errline, 'skip'):
errline = errfile.readline()
else:
self.fail(runline, "expecting " + runline)
self.failrange(first_errline, errline, "does not match")
return False
if not errline:
self.fail(runline, "failed to match line: " + runline)
self.failrange(first_errline, errline, "does not match")
return False
errline = errfile.readline()
if re.match(dtruss_init_pat, errline):
errline = errfile.readline()
skiplines = False
if errline and not skiplines:
self.fail(errline, "extra lines seen starting at " + errline)
return False
return True
def order_runfile(self, f):
# In OS X, dtruss is implemented using dtrace's apparently buffered
# printf writes to stdout, but that is all redirected to stderr.
# Because of that, the test program's writes to stderr do not
# interleave with dtruss output as it does with Linux's strace
# (which writes directly to stderr). On OS X, we get the program's
# output first, we compensate for this by moving all the
# OUTPUT statements in the runfile to match first. This simple
# approach will break if there is more data generated by OUTPUT
# statements than a stdio buffer's size.
matchout = (self.args.systype == 'Darwin')
out = []
nonout = []
s = f.readline()
while s:
if matchout and re.match(outputpat, s):
out.append(s)
elif not re.match(discardpat, s):
nonout.append(s)
s = f.readline()
out.extend(nonout)
return out
def run(self):
if not self.exedir:
self.fail(None, "Execution directory not set")
return False
if not os.path.isfile(self.testexe):
msg("'" + self.testexe + "': no such file")
return False
shutil.rmtree(self.exedir, ignore_errors=True)
os.mkdir(self.exedir)
self.dircreated = True
os.chdir(self.exedir)
callargs = list(self.strace)
trace_syscalls = self.headopts.trace_syscalls
if self.args.systype == 'Linux':
callargs.extend(['-e', 'trace=' + trace_syscalls ])
elif self.args.systype == 'Darwin':
# dtrace has no option to limit the syscalls to be traced,
# so we'll filter the output.
self.strip_syscalls = re.sub(pwrite_in, pwrite_out,
self.headopts.trace_syscalls).split(',')
callargs.append(self.testexe)
callargs.extend(self.runargs)
outfile = open(self.outfilename, 'w')
errfile = open(self.errfilename, 'w')
if self.args.verbose:
print('RUNNING: ' + str(callargs))
subret = subprocess.call(callargs, stdout=outfile, stderr=errfile)
outfile.close()
errfile.close()
if subret != 0:
msg("'" | |
# Copyright 2013-2014 <NAME> Licensed under the
# Educational Community License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.osedu.org/licenses/ECL-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS"
# BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing
# permissions and limitations under the License.
from collections import Counter
import ast
import rethinkdb as r
from rethinkdb.errors import RqlRuntimeError
from fabric.api import task
from BanzaiDB import database
from BanzaiDB import converters
from BanzaiDB import misc
from BanzaiDB import imaging
# __version__ = 0.3.0
TABLE = 'determined_variants'
def get_required_strains(strains):
"""
Returns a list of strains stored in the database if argument strains=None
If argument strains=None we actually query the database
If argument strains is not None we actually just spit the strain string on
the space delimiter.
:param strains: a string of strain IDs
:type strains: string or None
:returns: a list of strains (if None, those all stored in the database)
"""
strains_list = []
with database.make_connection() as connection:
if strains is None:
get_strains = r.table('strains_under_investigation').pluck('StrainID').run(connection)
strains_list = [e['StrainID'].encode('ascii', 'ignore') for e in get_strains]
else:
strains_list = strains.split(' ')
return strains_list
def get_num_strains():
"""
Get the number of strains in the study
It will query all strains in the database and will factor if the reference
has been included in the run (will remove it from the count)
:returns: the number of strains as an int
"""
strains = get_required_strains(None)
strain_count = len(strains)
with database.make_connection() as connection:
# In case reference is included in run
# Supports current reference
ref_id = get_current_reference_id()
for e in strains:
if e.find(ref_id) != -1:
strain_count = strain_count-1
break
return strain_count
def get_current_reference_id():
"""
Returns the current reference
:returns the current references primary key as a string
"""
with database.make_connection() as connection:
return r.table('references').get("current_reference").run(connection)["reference_id"]
def filter_counts(list_of_elements, minimum):
"""
Filter out elements in a list that are not observed a minimum of times
:param list_of_elements: a list of for example positions
:param minimum: the miminum number of times an value must be observed
:type list_of_elements: list
:type minimum: int
:returns: a dictionary of value:observation key value pairs
"""
counts = Counter(list_of_elements)
lookup = {}
for k, v in counts.items():
if v >= minimum:
lookup[k] = v
return lookup
def position_counter(strains):
"""
Pull all the positions that we observe changes
.. note::
This query could be sped up?
"""
with database.make_connection() as connection:
pos = []
for strain in strains:
# Get every variant position
cursor = r.table(TABLE).filter({'StrainID': strain}).pluck(
'Position').run(connection)
cur = [strain['Position'] for strain in cursor]
pos = pos+cur
common = filter_counts(pos, len(strains))
return common
def fetch_given_strain_position(strain, position):
"""
With a strainID and a 'change' position return known details
Prints the position, locus tag, product, class and subclass
:param strain: the strain ID
:param position: the position relative to the reference
:type strain: string
:type position: int
:returns: a dictionary (JSON)
"""
result = {}
with database.make_connection() as connection:
result = list(r.table(TABLE).filter({'StrainID': strain, 'Position': position}).run(connection))[0]
print str(result['Position'])+","+result['LocusTag']+","+result['Product']+","+result['Class']+","+str(result['SubClass'])
return result
@task
def get_variant_stats(strains):
"""
Return (and print) variant stats given 1 or more space delimited strain IDs
Breakdown of counts:
* substitution (syn/non-sys)
* insertion
* deletion
"""
results = {}
with database.make_connection() as connection:
pass
def get_generator(strains, reference_id, start, end):
"""
Generates a list of primary keys to pass to a get all call
Exploit that SNP primary keys are in the format::
STRAINID_REFID_POSITION
ASCC880030_NC_008527_100230
:param strains: a list of strain ids
:param reference_id: the current reference id
:param start: the snp start range
:param end: the snp end range
:type strains: list
:type reference_id: string
:type start: int
:type end: int
:returns: a list of possible primary keys
"""
primary_keys = []
vals = range(start, end+1)
for val in vals:
for strain in strains:
primary_keys.append(strain+"_"+reference_id+"_"+str(val))
return primary_keys
@task
def get_SNPs_in_range(start, end, verbose=True,
plucking='StrainID Position LocusTag SubClass'):
"""
Return all the SNPs in given [start:end] range (inclusive of)
By default: print (in CSV) results with headers:
StrainID, Position, LocusTag, SubClass
Examples::
# All variants in the 1 Kb range of 60K-61K
fab variants.get_SNPs_in_range:60000,61000
# Nail down on a particular position and redefine the output
fab variants.get_SNPs_in_range:191,191,plucking='StrainID Position Class Product'
:param start: the genomic location start
:param end: the genomic location end
:param verbose: [def = True] toggle if printing results
:param plucking: [def = 'StrainID Position LocusTag SubClass']
toggle headers based on table values
:returns: List containing JSON elements with the data: 'StrainID',
'Position', 'LocusTag', 'SubClass' for each result
"""
verbose = ast.literal_eval(str(verbose))
plucking = plucking.split(' ')
strains = get_required_strains(None)
reference = get_current_reference_id()
possible_primary_keys = get_generator(strains, reference,
int(start), int(end))
JSON_result = []
count = 0
with database.make_connection() as connection:
for e in possible_primary_keys:
try:
# get won't give a cursor
value = r.table(TABLE).get(e).pluck(plucking).run(connection)
count += 1
except RqlRuntimeError:
value = None
if value is not None:
if count == 1:
print converters.convert_from_JSON_to_CSV(value, True)
else:
print converters.convert_from_JSON_to_CSV(value)
JSON_result.append(value)
dist = float((int(end)-int(start))+1)
density = count/dist
#print "\nAverage SNP density in region: %f" % (density/float(len(strains)))
return JSON_result
@task
def get_variants_by_keyword(regular_expression, ROW='Product', verbose=True,
plucking='StrainID Position LocusTag Class SubClass'):
"""
Return variants with a match in the "Product" with the regular_expression
Supported regular expression syntax:
https://code.google.com/p/re2/wiki/Syntax
By default: print (in CSV) results with headers:
StrainID, Position, LocusTag, Class, SubClass
:param regular_expression:
:param ROW: [def = 'Product'] toggle searching of other table headers
:param verbose: [def = True] toggle if printing results
:param plucking: [def = 'StrainID Position LocusTag Class SubClass']
toggle headers based on table headers
:returns: List containing JSON elements with the data: 'StrainID',
'Position', 'LocusTag', 'Class', 'SubClass' for each result
"""
verbose = ast.literal_eval(str(verbose))
plucking = plucking.split(' ')
JSON_result = []
with database.make_connection() as connection:
cursor = r.table(TABLE).filter(lambda row: row[ROW].match(
regular_expression)).pluck(plucking).run(connection)
for idx, document in enumerate(cursor):
if verbose:
if idx != 0:
print converters.convert_from_JSON_to_CSV(document)
else:
print converters.convert_from_JSON_to_CSV(document, True)
JSON_result.append(document)
return JSON_result
@task
def plot_variant_positions(strains):
"""
Generate a PDF of SNP positions for given strains using GenomeDiagram
Places the reference features on the outerring
User has to provide a space dlimited list of strains (see warning below)
.. warning: if you have heaps of variants this will most likely fry you
computer.
"""
if strains.lower() == 'all':
strains = None
strains = get_required_strains(strains)
gd_data = []
with database.make_connection() as connection:
for strain in strains:
hits = r.table(TABLE).filter(lambda row: row['StrainID'].match(
strain)).pluck('Position', 'Class').run(connection)
feat = []
for hit in hits:
cur = hit['Position']
feat.append(misc.create_feature(cur, cur, hit['Class'], strand=None))
gd_data.append(feat)
imaging.plot_SNPs(gd_data, strains)
@task
def variant_hotspots(most_prevalent=100, verbose=True):
"""
Return the (default = 100) prevalent variant positions
Example usage::
fab variants.variant_hotspots
fab variants.variant_hotspots:250
:param most_prevalent: [def = 100]
"""
verbose = ast.literal_eval(str(verbose))
most_prevalent = int(most_prevalent)
ROW = 'Position'
# Fetch & store all positions
with database.make_connection() as connection:
cursor = r.table(TABLE).pluck(ROW).run(connection)
positions = [int(e[ROW]) for e in cursor]
# Count occurences at positions
counts = Counter(positions)
mp = counts.most_common(most_prevalent)
# Now extract out
header = "Counts,Position,LocusTag,Product"
results = []
results.append(header)
if verbose:
print header
with database.make_connection() as connection:
for element in mp:
first_hit = list(r.table(TABLE).filter(r.row[ROW] == int(element[0])).pluck('Position', 'LocusTag').run(connection))[0]
product = '"'+list(r.table('reference_features').filter({'LocusTag': first_hit['LocusTag']}).pluck('Product').run(connection))[0]['Product']+'"'
cur = '%i,%i,%s,%s' % (element[1], first_hit['Position'], first_hit['LocusTag'], product)
results.append(cur)
if verbose:
print cur
return results
@task
def variant_positions_within_atleast(minimum_at_position=None, verbose=True):
"""
Return positions that have at least this many variants
By default the minimum number will be equal to all the strains in the
study.
Example usage:
fab variants.variant_positions_within_atleast
fab variants.variant_positions_within_atleast:16
:param minimum_at_position: [def = None] minimum number of variants
conserved in N strains at this positions
"""
verbose = ast.literal_eval(str(verbose))
if minimum_at_position is None:
minimum_at_position = get_num_strains()
else:
minimum_at_position = int(minimum_at_position)
ROW = 'Position'
# Fetch & store all positions
with database.make_connection() as connection:
cursor = r.table(TABLE).pluck(ROW).run(connection)
positions = [int(e[ROW]) for e in cursor]
# Count occurences at positions
counts = Counter(positions)
# Filter out those below threshold
lookup = {}
for k, v in counts.items():
if v >= minimum_at_position:
lookup[k] = v
# Now extract | |
from operator import itemgetter
import collections
from collections import deque, Counter
import re
import fitz
from docx import Document
class DocumentParser:
@staticmethod
def fonts(doc, granularity=False):
"""
Extracts fonts and their usage in PDF documents
:param doc: PDF document to iterate through
:param granularity: also use 'font', 'flags' and 'color' to discriminate text
:return: list of most used fonts sorted by count, font style information
"""
styles = {}
font_counts = {}
for page in doc:
blocks = page.getText("dict")["blocks"]
for b in blocks: # iterate through the text blocks
if b['type'] == 0: # block contains text
for l in b["lines"]: # iterate through the text lines
for s in l["spans"]: # iterate through the text spans
if granularity:
identifier = "{0}_{1}_{2}_{3}".format(s['size'], s['flags'], s['font'], s['color'])
styles[identifier] = {'size': s['size'], 'flags': s['flags'], 'font': s['font'],
'color': s['color']}
else:
identifier = "{0}".format(s['size'])
styles[identifier] = {'size': s['size'], 'font': s['font']}
font_counts[identifier] = font_counts.get(identifier, 0) + 1 # count the fonts usage
font_counts = sorted(font_counts.items(), key=itemgetter(1), reverse=True)
if not font_counts:
raise ValueError("Zero discriminating fonts found!")
return font_counts, styles
@staticmethod
def font_tags(font_counts, styles):
"""
Returns dictionary with font sizes as keys and tags as value
:param font_counts: (font_size, count) for all fonts occuring in document
:param styles: all styles found in the document
:return: all element tags based on font-sizes
"""
p_style = styles[font_counts[0][0]] # get style for most used font by count (paragraph)
p_size = p_style['size'] # get the paragraph's size
# sorting the font sizes high to low, so that we can append the right integer to each tag
font_sizes = []
for (font_size, count) in font_counts:
font_sizes.append(float(font_size))
font_sizes.sort(reverse=True)
# aggregating the tags for each font size
idx = 0
size_tag = {}
for size in font_sizes:
idx += 1
if size == p_size:
idx = 0
size_tag[size] = '<p>'
if size > p_size:
size_tag[size] = '<h{0}>'.format(idx)
elif size < p_size:
size_tag[size] = '<s{0}>'.format(idx)
return size_tag
@staticmethod
def headers_paragraphs(doc, size_tag):
"""
Scrapes headers & paragraphs from PDF and return texts with element tags
:param doc: PDF document to iterate through
:param size_tag: textual element tags for each size
:return: list of texts with pre-prended element tags
"""
header_para = [] # list with headers and paragraphs
first = True # boolean operator for first header
previous_s = {} # previous span
for page in doc:
blocks = page.getText("dict")["blocks"]
for b in blocks: # iterate through the text blocks
if b['type'] == 0: # this block contains text
# REMEMBER: multiple fonts and sizes are possible IN one block
block_string = "" # text found in block
for l in b["lines"]: # iterate through the text lines
for s in l["spans"]: # iterate through the text spans
if s['text'].strip(): # removing whitespaces:
if first:
previous_s = s
first = False
block_string = size_tag[s['size']] + " " + s['text']
else:
if s['size'] == previous_s['size']:
if block_string and all((c == "|") for c in block_string):
# block_string only contains pipes
block_string = size_tag[s['size']] + " " + s['text']
if block_string == "":
# new block has started, so append size tag
block_string = size_tag[s['size']] + " " + s['text']
else: # in the same block, so concatenate strings
block_string += " " + s['text']
else:
header_para.append(block_string)
block_string = size_tag[s['size']] + " " + s['text']
previous_s = s
# new block started, indicating with a pipe
block_string += "|"
header_para.append(block_string)
return header_para
@staticmethod
def pdf(path):
"""
Accepts the path of the pdf file and processes it
:param path: Path of the pdf file
:return: list of sentences and dictionary structure of the document
"""
document = path
doc = fitz.open(document)
# get the allowed font sizes
font_counts, styles = DocumentParser.fonts(doc, granularity=False)
allowed_sizes = []
para = float(font_counts[0][0])
for element in font_counts:
if float(element[0]) >= para:
allowed_sizes.append(float(element[0]))
allowed_sizes.sort(reverse=True)
# get tag to size dictionary
size_dict = {}
size_dict[allowed_sizes[-1]] = "<p>"
for i in range(len(allowed_sizes) - 1):
size_dict[allowed_sizes[i]] = "<h" + str(i + 1) + ">"
no_diff_fonts = len(allowed_sizes)
highestSize = no_diff_fonts
tagtosize = {}
for i in range(no_diff_fonts):
tagtosize[size_dict[allowed_sizes[i]][1:-1]] = highestSize
highestSize -= 1
# get list of strings with tags and list of priority by number
size_tag = DocumentParser.font_tags(font_counts, styles)
elements = DocumentParser.headers_paragraphs(doc, size_tag)
elements = [i.replace('|', '') for i in elements]
elements = [i for i in elements if len(i.strip()) > 0]
elements2 = [i for i in elements if not i.replace(i[i.find("<"):i.find(">") + 1], '').strip().isdigit()]
qw = [item for item, count in collections.Counter(elements2).items() if count > 5 and '<h' not in item]
final_list = [item for item in elements2 if item not in qw]
final_list = [item for item in final_list if not '<s' in item]
doc_list = final_list
docsize_list = []
for string in doc_list:
tag = string[string.find("<") + 1:string.find(">")]
docsize_list.append(tagtosize[tag])
# remove consecutive duplicates
reducedoclist = []
preValue = -1
for element_n in docsize_list:
value = element_n
if value != 1:
reducedoclist.append(value)
else:
if preValue != value:
reducedoclist.append(value)
preValue = value
# merge continuous tags
newlist = []
sizelistReduced = []
string = doc_list[0]
tag = string[string.find("<") + 1:string.find(">")]
for element in doc_list[1:]:
tag1 = element[element.find("<") + 1:element.find(">")]
if tag1 != tag:
newlist.append(string)
sizelistReduced.append(tagtosize[tag])
string = element
tag = tag1
else:
element = re.sub('<' + tag1 + '>', '', element)
if string.strip()[-1].isalpha() or string.strip()[-1].isdigit():
string = string + ', ' + element
else:
string = string + " " + element
newlist.append(string)
sizelistReduced.append(tagtosize[tag])
# order the strings based on tags
thislist = sizelistReduced
helperstack = deque()
helperid = deque()
index = 1
arrSize = len(thislist)
treedict = {}
helperstack.append(thislist[arrSize - 1])
helperid.append(arrSize - 1)
while helperstack:
value = thislist[arrSize - index - 1]
if value > helperstack[-1]:
treedict[arrSize - index - 1] = []
while helperstack and (value > helperstack[-1]):
helperstack.pop()
treedict[arrSize - index - 1].append(helperid.pop())
helperstack.append(value)
helperid.append(arrSize - index - 1)
index += 1
if index >= arrSize:
break
return treedict, newlist
@staticmethod
def docx(path):
"""
Accepts the path of the docx file and processes it
:param path: Path of the docx file
:return: list of sentences and dictionary structure of the document
"""
doc = Document(path)
size_list = [p.style.font.size for p in doc.paragraphs]
# get the allowed font sizes
A = Counter(size_list).most_common()
para = A[0][0]
allowed_sizes = []
for s in A:
if str(s[0]).isdigit() and s[0] >= para:
allowed_sizes.append(s[0])
allowed_sizes.sort(reverse=True)
# get tag to size dictionary
size_dict = {}
sizeorder_dict = {}
size_dict[allowed_sizes[-1]] = "<p>"
for i in range(len(allowed_sizes) - 1):
size_dict[allowed_sizes[i]] = "<h" + str(i) + ">"
no_diff_fonts = len(allowed_sizes)
highestSize = no_diff_fonts
tagtosize = {}
for i in range(no_diff_fonts):
sizeorder_dict[allowed_sizes[i]] = highestSize
tagtosize[size_dict[allowed_sizes[i]][1:-1]] = highestSize
highestSize -= 1
# get list of strings with tags and list of priority by number
doc_list = []
docsize_list = []
for p in doc.paragraphs:
size = p.style.font.size
if size in size_dict:
text = p.text.strip()
if text != '':
tag = size_dict[size]
doc_list.append(tag + " " + text)
docsize_list.append(sizeorder_dict[size])
# remove consecutive duplicates
reducedoclist = []
preValue = -1
for element_n in docsize_list:
value = element_n
if value != 1:
reducedoclist.append(value)
else:
if preValue != value:
reducedoclist.append(value)
preValue = value
# merge continuous tags
newlist = []
sizelistReduced = []
string = doc_list[0]
tag = string[string.find("<") + 1:string.find(">")]
for element in doc_list[1:]:
tag1 = element[element.find("<") + 1:element.find(">")]
if tag1 != tag:
newlist.append(string)
sizelistReduced.append(tagtosize[tag])
string = element
tag = tag1
else:
element = re.sub('<' + tag1 + '>', '', element)
if string.strip()[-1].isalpha() or string.strip()[-1].isdigit():
string = string + ', ' + element
else:
string = string + " " + element
newlist.append(string)
sizelistReduced.append(tagtosize[tag])
# order the strings based on tags
thislist = sizelistReduced
helperstack = deque()
helperid = deque()
index = 1
arrSize = len(thislist)
tree_struct = {}
helperstack.append(thislist[arrSize - 1])
helperid.append(arrSize - 1)
while helperstack:
value = thislist[arrSize - index - 1]
if value > helperstack[-1]:
tree_struct[arrSize - index - 1] = []
while helperstack and (value > helperstack[-1]):
helperstack.pop()
tree_struct[arrSize - index | |
parte'),
('G00.0', 'Meningite por Haemophilus'),
('G00.1', 'Meningite pneumocócica'),
('G00.2', 'Meningite estreptocócica'),
('G00.3', 'Meningite estafilocócica'),
('G00.8', 'Outras meningites bacterianas'),
('G00.9', 'Meningite bacteriana não especificada'),
('G01', ' *Meningite em doenças bacterianas classificadas em outra parte'),
('G02.0', 'Meningite em doenças virais classificadas em outra parte,G02.0*Meningite em doenças virais classificadas em outra parte'),
('G02.1', 'Meningite em micoses,G02.1*Meningite em micoses'),
('G02.8', 'Meningite em outras doenças infecciosas e parasitárias classificadas em outra parte,G02.8*Meningite em outras doenças infecciosas e parasitárias classificadas em outra parte'),
('G03.0', 'Meningite não-piogênica'),
('G03.1', 'Meningite crônica'),
('G03.2', 'Meningite recorrente benigna [Mollaret]'),
('G03.8', 'Meningite devida a outras causas especificadas'),
('G03.9', 'Meningite não especificada'),
('G04.0', 'Encefalite aguda disseminada'),
('G04.1', 'Paraplegia espástica tropical'),
('G04.2', 'Meningoencefalite e meningomielite bacterianas não classificadas em outra parte'),
('G04.8', 'Outras encefalites, mielites e encefalomielites'),
('G04.9', 'Encefalite, mielite e encefalomielite não especificada'),
('G05.0', 'Encefalite, mielite e encefalomielite em doenças bacterianas classificadas em outra parte,G05.0*Encefalite, mielite e encefalomielite em doenças bacterianas classificadas em outra parte'),
('G05.1', 'Encefalite, mielite e encefalomielite em doenças virais classificadas em outra parte,G05.1*Encefalite, mielite e encefalomielite em doenças virais classificadas em outra parte'),
('G05.2', 'Encefalite, mielite e encefalomielite em outras doenças infecciosas e parasitárias classificadas em outra parte'),
('G05.8', 'Encefalite, mielite e encefalomielite em outras doenças classificadas em outra parte,G05.8*Encefalite, mielite e encefalomielite em outras doenças classificadas em outra parte'),
('G06.0', 'Abscesso e granuloma intracranianos'),
('G06.1', 'Abscesso e granuloma intra-raquidianos'),
('G06.2', 'Abscesso extradural e subdural não especificados'),
('G07', ' *Abscesso e granuloma intracranianos e intraspinais em doenças classificadas em outra parte'),
('G08', ' Flebite e tromboflebite intracranianas e intra-raquidianas'),
('G09', ' Seqüelas de doenças inflamatórias do sistema nervoso central'),
('G10', ' Doença de Huntington'),
('G11.0', 'Ataxia congênita não-progressiva'),
('G11.1', 'Ataxia cerebelar de início precoce'),
('G11.2', 'Ataxia cerebelar de início tardio'),
('G11.3', 'Ataxia cerebelar com déficit na reparação do DNA'),
('G11.4', 'Paraplegia espástica hereditária'),
('G11.8', 'Outras ataxias hereditárias'),
('G11.9', 'Ataxia hereditária não especificada'),
('G12.0', 'Atrofia muscular espinal infantil tipo I [Werdnig-Hoffman]'),
('G12.1', 'Outras atrofias musculares espinais hereditárias'),
('G12.2', 'Doença do neurônio motor'),
('G12.8', 'Outras atrofias musculares espinais e síndromes musculares correlatas'),
('G12.9', 'Atrofia muscular espinal não especificada'),
('G13.0', 'Neuromiopatia e neuropatia paraneoplásicas,G13.0*Neuromiopatia e neuropatia paraneoplásicas'),
('G13.1', 'Outra atrofia sistêmica que afeta primariamente o sistema nervoso central em doenças neoplásicas'),
('G13.2', 'Atrofia sistêmica que afeta primariamente o sistema nervoso central no mixedema'),
('G13.8', 'Atrofia sistêmica que afeta primariamente o sistema nervoso central em outras doenças classificadas em outra parte'),
('G20', ' Doença de Parkinson'),
('G21.0', 'Síndrome maligna dos neurolépticos'),
('G21.1', 'Outras formas de parkinsonismo secundário induzido por drogas'),
('G21.2', 'Parkinsonismo secundário devido a outros agentes externos'),
('G21.3', 'Parkinsonismo pós-encefalítico'),
('G21.8', 'Outras formas de parkinsonismo secundário'),
('G21.9', 'Parkinsonismo secundário não especificado'),
('G22', ' *Parkinsonismo em doenças classificadas em outra parte'),
('G23.0', 'Doença de Hallervorden-Spatz'),
('G23.1', 'Oftalmoplegia supranuclear progressiva [Steele-Richardson-Olszewski]'),
('G23.2', 'Degeneração estrionígrica'),
('G23.8', 'Outras doenças degenerativas especificadas dos gânglios da base'),
('G23.9', 'Doença degenerativa dos gânglios da base, não especificada'),
('G24.0', 'Distonia induzida por drogas'),
('G24.1', 'Distonia familiar idiopática'),
('G24.2', 'Distonia não-familiar idiopática'),
('G24.3', 'Torcicolo espasmódico'),
('G24.4', 'Distonia orofacial idiopática'),
('G24.5', 'Blefaroespasmo'),
('G24.8', 'Outras distonias'),
('G24.9', 'Distonia não especificada'),
('G25.0', 'Tremor essencial'),
('G25.1', 'Tremor induzido por drogas'),
('G25.2', 'Outras formas especificadas de tremor'),
('G25.3', 'Mioclonia'),
('G25.4', 'Coréia induzida por droga'),
('G25.5', 'Outras formas de coréia'),
('G25.6', 'Tiques induzidos por droga e outros tipos de origem orgânica'),
('G25.8', 'Outras doenças extrapiramidais e transtornos dos movimentos, especificados'),
('G25.9', 'Doenças extrapiramidais e transtornos dos movimentos, não especificados'),
('G26', ' *Doenças extrapiramidais e transtornos dos movimentos em doenças classificadas em outra parte'),
('G30.0', 'Doença de Alzheimer de início precoce'),
('G30.1', 'Doença de Alzheimer de início tardio'),
('G30.8', 'Outras formas de doença de Alzheimer'),
('G30.9', 'Doença de Alzheimer não especificada'),
('G31.0', 'Atrofia cerebral circunscrita'),
('G31.1', 'Degeneração cerebral senil, não classificadas em outra parte'),
('G31.2', 'Degeneração do sistema nervoso devida ao álcool'),
('G31.8', 'Outras doenças degenerativas especificadas do sistema nervoso'),
('G31.9', 'Doença degenerativa do sistema nervoso, não especificada'),
('G32.0', 'Degeneração combinada subaguda da medula espinal em doenças classificadas em outra parte'),
('G32.8', 'Outros transtornos degenerativos especificados do sistema nervoso em doenças classificadas em outra parte'),
('G35', ' Esclerose múltipla'),
('G36.0', 'Neuromielite óptica [doença de Devic]'),
('G36.1', 'Leucoencefalite hemorrágica aguda e subaguda [Hurst]'),
('G36.8', 'Outras desmielinizações disseminadas agudas especificadas'),
('G36.9', 'Desmielinização disseminada aguda não especificada'),
('G37.0', 'Esclerose difusa'),
('G37.1', 'Desmielinização central do corpo caloso'),
('G37.2', 'Mielinólise central da ponte'),
('G37.3', 'Mielite transversa aguda em doenças desmielinizantes do sistema nervoso central'),
('G37.4', 'Mielite subaguda necrótica'),
('G37.5', 'Esclerose concêntrica [Baló]'),
('G37.8', 'Outras doenças desmielinizantes especificadas do sistema nervoso central'),
('G37.9', 'Doença desmielinizante do sistema nervoso central, não especificada'),
('G40.0', 'Epilepsia e síndromes epilépticas idiopáticas definidas por sua localização (focal) (parcial) com crises de início focal'),
('G40.1', 'Epilepsia e síndromes epilépticas sintomáticas definidas por sua localização (focal) (parcial) com crises parciais simples'),
('G40.2', 'Epilepsia e síndromes epilépticas sintomáticas definidas por sua localização (focal) (parcial) com crises parciais complexas'),
('G40.3', 'Epilepsia e síndromes epilépticas generalizadas idiopáticas'),
('G40.4', 'Outras epilepsias e síndromes epilépticas generalizadas'),
('G40.5', 'Síndromes epilépticas especiais'),
('G40.6', 'Crise de grande mal, não especificada (com ou sem pequeno mal)'),
('G40.7', 'Pequeno mal não especificado, sem crises de grande mal'),
('G40.8', 'Outras epilepsias'),
('G40.9', 'Epilepsia, não especificada'),
('G41.0', 'Estado de grande mal epiléptico'),
('G41.1', 'Estado de pequeno mal epiléptico'),
('G41.2', 'Estado de mal epiléptico parcial complexo'),
('G41.8', 'Outros estados de mal epiléptico'),
('G41.9', 'Estado de mal epiléptico, não especificado'),
('G43.0', 'Enxaqueca sem aura [enxaqueca comum]'),
('G43.1', 'Enxaqueca com aura [enxaqueca clássica]'),
('G43.2', 'Estado de mal enxaquecoso'),
('G43.3', 'Enxaqueca complicada'),
('G43.8', 'Outras formas de enxaqueca'),
('G43.9', 'Enxaqueca, sem especificação'),
('G44.0', 'Síndrome de "cluster-headache"'),
('G44.1', 'Cefaléia vascular, não classificada em outra parte'),
('G44.2', 'Cefaléia tensional'),
('G44.3', 'Cefaléia crônica pós-traumática'),
('G44.4', 'Cefaléia induzida por drogas, não classificada em outra parte'),
('G44.8', 'Outras síndromes de cefaléia especificadas'),
('G45.0', 'Síndrome da artéria vértebro-basilar'),
('G45.1', 'Síndrome da artéria carotídea (hemisférica)'),
('G45.2', 'Síndrome das artérias pré-cerebrais, múltiplas e bilaterais'),
('G45.3', 'Amaurose fugaz'),
('G45.4', 'Amnésia global transitória'),
('G45.8', 'Outros acidentes isquêmicos cerebrais transitórios e síndromes correlatas'),
('G45.9', 'Isquemia cerebral transitória não especificada'),
('G46.0', 'Síndrome da artéria cerebral média (I66.0+)'),
('G46.1', 'Síndrome da artéria cerebral anterior (I66.1+)'),
('G46.2', 'Síndrome da artéria cerebral posterior (I66.2+)'),
('G46.3', 'Síndromes vasculares do tronco cerebral (I60-I67+)'),
('G46.4', 'Síndrome vascular cerebelar (I60-I67+)'),
('G46.5', 'Síndrome lacunar motora pura (I60-I67+)'),
('G46.6', 'Síndrome lacunar sensorial pura (I60-I67+)'),
('G46.7', 'Outras síndromes lacunares (I60-I67+)'),
('G46.8', 'Outras síndromes vasculares cerebrais em doenças cerebrovasculares (I60-I67+)'),
('G47.0', 'Distúrbios do início e da manutenção do sono [insônias]'),
('G47.1', 'Distúrbios do sono por sonolência excessiva [hipersonia]'),
('G47.2', 'Distúrbios do ciclo vigília-sono'),
('G47.3', 'Apnéia de sono'),
('G47.4', 'Narcolepsia e cataplexia'),
('G47.8', 'Outros distúrbios do sono'),
('G47.9', 'Distúrbio do sono, não especificado'),
('G50.0', 'Nevralgia do trigêmeo'),
('G50.1', 'Dor facial atípica'),
('G50.8', 'Outros transtornos do nervo trigêmeo'),
('G50.9', 'Transtorno não especificado do nervo trigêmeo'),
('G51.0', 'Paralisia de Bell'),
('G51.1', 'Ganglionite geniculada'),
('G51.2', '<NAME>'),
('G51.3', 'Espasmo hemifacial clônico'),
('G51.4', 'Mioquimia facial'),
('G51.8', 'Outros transtornos do nervo facial'),
('G51.9', 'Transtorno não especificado do nervo facial'),
('G52.0', 'Transtornos do nervo olfatório'),
('G52.1', 'Transtornos do nervo glossofaríngeo'),
('G52.2', 'Transtornos do nervo vago'),
('G52.3', 'Transtornos do nervo hipoglosso'),
('G52.7', 'Transtornos de múltiplos nervos cranianos'),
('G52.8', 'Transtornos de outros nervos cranianos especificados'),
('G52.9', 'Transtorno de nervo craniano não especificado'),
('G53.0', 'Nevralgia pós-zoster (B02.2+),G53.0*Nevralgia pós-zoster (B02.2+));\r\nINSERT INTO cid VALUES (G53.1*Paralisias de múltiplos nervos cranianos em doenças infecciosas e parasitárias classificadas em outra parte (A00-B99+)'),
('G53.2', 'Paralisias de múltiplos nervos cranianos na sarcoidose (D86.8+)'),
('G53.3', 'Paralisias de múltiplos nervos cranianos em doenças neoplásicas (C00-D48+)'),
('G53.8', 'Outros transtornos de nervos cranianos em outras doenças classificadas em outra parte'),
('G54.0', 'Transtornos do plexo braquial'),
('G54.1', 'Transtornos do plexo lombossacral'),
('G54.2', 'Transtornos das raízes cervicais não classificadas em outra parte'),
('G54.3', 'Transtornos das raízes torácicas não classificadas em outra parte'),
('G54.4', 'Transtornos das raízes lombossacras não classificadas em outra parte'),
('G54.5', 'Amiotrofia nevrálgica'),
('G54.6', 'Síndrome dolorosa do membro fantasma'),
('G54.7', 'Síndrome do membro fantasma sem manifestação dolorosa'),
('G54.8', 'Outros transtornos das raízes e dos plexos nervosos'),
('G54.9', 'Transtorno não especificado das raízes e dos plexos nervosos'),
('G55.0', 'Compressões das raízes e dos plexos nervosos em doenças neoplásicas (C00-D48+)'),
('G55.1', 'Compressões das raízes e dos plexos nervosos em transtornos dos discos intervertebrais (M50-M51+)'),
('G55.2', 'Compressões das raízes e dos plexos nervosos na espondilose (M47.-+)'),
('G55.3', 'Compressões das raízes e dos plexos nervosos em outras dorsopatias (M45-M46+, M48.-+, M53-M54+)'),
('G55.8', 'Compressões das raízes e dos plexos nervosos em outras doenças classificadas em outra parte'),
('G56.0', 'Síndrome do túnel do carpo'),
('G56.1', 'Outras lesões do nervo mediano'),
('G56.2', 'Lesões do nervo cubital [ulnar]'),
('G56.3', 'Lesão do nervo radial'),
('G56.4', 'Causalgia'),
('G56.8', 'Outras mononeuropatias dos membros superiores'),
('G56.9', 'Mononeuropatia dos membros superiores, não especificada'),
('G57.0', 'Lesão do nervo ciático'),
('G57.1', 'Meralgia parestésica'),
('G57.2', 'Lesão do nervo femoral'),
('G57.3', 'Lesão do nervo poplíteo lateral'),
('G57.4', 'Lesão do nervo poplíteo medial'),
('G57.5', 'Síndrome do túnel do tarso'),
('G57.6', 'Lesão do nervo plantar'),
('G57.8', 'Outras mononeuropatias dos membros inferiores'),
('G57.9', 'Mononeuropatia dos membros inferiores, não especificada'),
('G58.0', 'Neuropatia intercostal'),
('G58.7', 'Mononeurite múltipla'),
('G58.8', 'Outras mononeuropatias especificadas'),
('G58.9', 'Mononeuropatia não especificada'),
('G59.0', 'Mononeuropatia diabética (E10-E14+ com quarto caractere comum .4)'),
('G59.8', 'Outras mononeuropatias em doenças classificadas em outra parte'),
('G60.0', 'Neuropatia hereditária motora e sensorial'),
('G60.1', 'Doença de Refsum'),
('G60.2', 'Neuropatia associada a ataxia hereditária'),
('G60.3', | |
stop=stop, delete=delete, delay=delay, overrides=overrides, info=info, snapshot=snapshot,
revert=revert, update=update)
return 0
def repo(args):
"""Create/Delete repo"""
repo = args.repo
delete = args.delete
url = args.url
update = args.update
baseconfig = Kbaseconfig(client=args.client, debug=args.debug)
if update:
if repo is None:
common.pprint("Updating all repos...", color='blue')
repos = baseconfig.list_repos()
for repo in repos:
common.pprint("Updating repo %s..." % repo, color='green')
baseconfig.update_repo(repo)
else:
common.pprint("Updating repo %s..." % repo, color='green')
baseconfig.update_repo(repo)
return
if repo is None:
common.pprint("Missing repo. Leaving...", color='red')
os._exit(1)
if delete:
common.pprint("Deleting repo %s..." % repo, color='green')
baseconfig.delete_repo(repo)
return
if update:
common.pprint("Updating repo %s..." % repo, color='green')
baseconfig.delete_repo(repo)
return
if url is None:
common.pprint("Missing url. Leaving...", color='red')
os._exit(1)
common.pprint("Adding repo %s..." % repo, color='green')
baseconfig.create_repo(repo, url)
return 0
def product(args):
"""Create product"""
repo = args.repo
product = args.product
latest = args.latest
plan = args.plan
group = args.group
overrides = common.get_overrides(paramfile=args.paramfile, param=args.param)
info = args.info
search = args.search
if info:
baseconfig = Kbaseconfig(client=args.client, debug=args.debug)
common.pprint("Providing information on product %s..." % product, color='green')
baseconfig.info_product(product, repo, group)
elif search:
baseconfig = Kbaseconfig(client=args.client, debug=args.debug)
products = PrettyTable(["Repo", "Group", "Product", "Description", "Numvms", "Memory"])
products.align["Repo"] = "l"
productsinfo = baseconfig.list_products(repo=repo)
for prod in sorted(productsinfo, key=lambda x: (x['repo'], x['group'], x['name'])):
name = prod['name']
repo = prod['repo']
prodgroup = prod['group']
description = prod.get('description', 'N/A')
if product.lower() not in name.lower() and product.lower() not in description.lower():
continue
if group is not None and prodgroup != group:
continue
numvms = prod.get('numvms', 'N/A')
memory = prod.get('memory', 'N/A')
group = prod.get('group', 'N/A')
products.add_row([repo, group, name, description, numvms, memory])
print(products)
else:
config = Kconfig(client=args.client, debug=args.debug, region=args.region, zone=args.zone,
namespace=args.namespace)
common.pprint("Creating product %s..." % product, color='green')
config.create_product(product, repo=repo, group=group, plan=plan, latest=latest, overrides=overrides)
return 0
def ssh(args):
"""Ssh into vm"""
l = args.L
r = args.R
D = args.D
X = args.X
Y = args.Y
config = Kconfig(client=args.client, debug=args.debug, region=args.region, zone=args.zone, namespace=args.namespace)
name = [common.get_lastvm(config.client)] if not args.name else args.name
k = config.k
tunnel = config.tunnel
insecure = config.insecure
if len(name) > 1:
cmd = ' '.join(name[1:])
else:
cmd = None
name = name[0]
if '@' in name and len(name.split('@')) == 2:
user = name.split('@')[0]
name = name.split('@')[1]
else:
user = None
if os.path.exists("/i_am_a_container") and not os.path.exists("/root/.kcli/config.yml")\
and not os.path.exists("/root/.ssh/config"):
insecure = True
sshcommand = k.ssh(name, user=user, local=l, remote=r, tunnel=tunnel, insecure=insecure, cmd=cmd, X=X, Y=Y, D=D)
if sshcommand is not None:
if find_executable('ssh') is not None:
os.system(sshcommand)
else:
print(sshcommand)
else:
common.pprint("Couldnt ssh to %s" % name, color='red')
def scp(args):
"""Scp into vm"""
recursive = args.recursive
volumepath = args.volumepath
source = args.source[0]
source = source if not os.path.exists("/i_am_a_container") else "%s/%s" % (volumepath, source)
destination = args.destination[0]
config = Kconfig(client=args.client, debug=args.debug, region=args.region, zone=args.zone, namespace=args.namespace)
k = config.k
tunnel = config.tunnel
if len(source.split(':')) == 2:
name, source = source.split(':')
download = True
elif len(destination.split(':')) == 2:
name, destination = destination.split(':')
download = False
else:
common.pprint("Couldn't run scp", color='red')
return
if '@' in name and len(name.split('@')) == 2:
user, name = name.split('@')
else:
user = None
scpcommand = k.scp(name, user=user, source=source, destination=destination,
tunnel=tunnel, download=download, recursive=recursive)
if scpcommand is not None:
if find_executable('scp') is not None:
os.system(scpcommand)
else:
print(scpcommand)
else:
common.pprint("Couldn't run scp", color='red')
def network(args):
"""Create/Delete/List Network"""
name = args.name
delete = args.delete
isolated = args.isolated
cidr = args.cidr
vlan = args.vlan
nodhcp = args.nodhcp
domain = args.domain
pxe = args.pxe
config = Kconfig(client=args.client, debug=args.debug, region=args.region, zone=args.zone, namespace=args.namespace)
k = config.k
if name is None:
common.pprint("Missing Network", color='red')
os._exit(1)
if delete:
result = k.delete_network(name=name, cidr=cidr)
common.handle_response(result, name, element='Network ', action='deleted')
else:
if isolated:
nat = False
else:
nat = True
dhcp = not nodhcp
result = k.create_network(name=name, cidr=cidr, dhcp=dhcp, nat=nat, domain=domain, pxe=pxe, vlan=vlan)
common.handle_response(result, name, element='Network ')
def bootstrap(args):
"""Generate basic config file"""
name = args.name
host = args.host
port = args.port
user = args.user
protocol = args.protocol
url = args.url
pool = args.pool
poolpath = args.poolpath
baseconfig = Kbaseconfig(client=args.client, debug=args.debug)
baseconfig.bootstrap(name, host, port, user, protocol, url, pool, poolpath)
def container(args):
"""Create container"""
name = args.name
profile = args.profile
overrides = common.get_overrides(paramfile=args.paramfile, param=args.param)
config = Kconfig(client=args.client, debug=args.debug, region=args.region, zone=args.zone, namespace=args.namespace)
cont = Kcontainerconfig(config, client=args.containerclient).cont
if name is None:
name = nameutils.get_random_name()
if config.type == 'kubevirt':
name = name.replace('_', '-')
if profile is None:
common.pprint("Missing profile", color='red')
os._exit(1)
containerprofiles = {k: v for k, v in config.profiles.items() if 'type' in v and v['type'] == 'container'}
if profile not in containerprofiles:
common.pprint("profile %s not found. Trying to use the profile as image"
"and default values..." % profile, color='blue')
cont.create_container(name, profile, overrides=overrides)
else:
common.pprint("Deploying container %s from profile %s..." % (name, profile), color='green')
profile = containerprofiles[profile]
image = next((e for e in [profile.get('image'), profile.get('template')] if e is not None), None)
if image is None:
common.pprint("Missing image in profile %s. Leaving..." % profile, color='red')
os._exit(1)
cmd = profile.get('cmd', None)
ports = profile.get('ports', None)
environment = profile.get('environment', None)
volumes = next((e for e in [profile.get('volumes'), profile.get('disks')] if e is not None), None)
cont.create_container(name, image, nets=None, cmd=cmd, ports=ports, volumes=volumes, environment=environment)
common.pprint("container %s created" % name, color='green')
return
def snapshot(args):
"""Create/Delete/Revert snapshot"""
snapshot = args.snapshot
name = args.name
revert = args.revert
delete = args.delete
listing = args.listing
config = Kconfig(client=args.client, debug=args.debug, region=args.region, zone=args.zone, namespace=args.namespace)
k = config.k
if revert:
common.pprint("Reverting snapshot of %s named %s..." % (name, snapshot), color='green')
elif delete:
common.pprint("Deleting snapshot of %s named %s..." % (name, snapshot), color='green')
elif listing:
common.pprint("Listing snapshots of %s..." % name, color='green')
snapshots = k.snapshot(snapshot, name, listing=True)
if isinstance(snapshots, dict):
common.pprint("Vm %s not found" % name, color='red')
return
else:
for snapshot in snapshots:
print(snapshot)
return
elif snapshot is None:
common.pprint("Missing snapshot name", color='red')
return {'result': 'success'}
else:
common.pprint("Creating snapshot of %s named %s..." % (name, snapshot), color='green')
result = k.snapshot(snapshot, name, revert=revert, delete=delete)
code = common.handle_response(result, name, element='', action='snapshotted')
return code
def report(args):
"""Report info about host"""
config = Kconfig(client=args.client, debug=args.debug, region=args.region, zone=args.zone, namespace=args.namespace)
k = config.k
k.report()
def switch(args):
"""Handle host"""
host = args.host
baseconfig = Kbaseconfig(client=args.client, debug=args.debug)
result = baseconfig.switch_host(host)
if result['result'] == 'success':
os._exit(0)
else:
os._exit(1)
def cli():
"""
"""
parser = argparse.ArgumentParser(description='Libvirt/VirtualBox/Kubevirt'
'wrapper on steroids. Check out '
'https://github.com/karmab/kcli!')
parser.add_argument('-C', '--client')
parser.add_argument('--containerclient', help='Containerclient to use')
parser.add_argument('--dnsclient', help='Dnsclient to use')
parser.add_argument('-d', '--debug', action='store_true')
parser.add_argument('-n', '--namespace', help='Namespace to use. specific to kubevirt')
parser.add_argument('-r', '--region', help='Region to use. specific to aws/gcp')
parser.add_argument('-z', '--zone', help='Zone to use. specific to gcp')
parser.add_argument('-v', '--version', action='version', version=__version__)
subparsers = parser.add_subparsers(metavar='')
bootstrap_info = 'Generate basic config file'
bootstrap_parser = subparsers.add_parser('bootstrap', help=bootstrap_info, description=bootstrap_info)
bootstrap_parser.add_argument('-n', '--name', help='Name to use', metavar='CLIENT')
bootstrap_parser.add_argument('-H', '--host', help='Host to use', metavar='HOST')
bootstrap_parser.add_argument('-p', '--port', help='Port to use', metavar='PORT')
bootstrap_parser.add_argument('-u', '--user', help='User to use', default='root', metavar='USER')
bootstrap_parser.add_argument('-P', '--protocol', help='Protocol to use', default='ssh', metavar='PROTOCOL')
bootstrap_parser.add_argument('-U', '--url', help='URL to use', metavar='URL')
bootstrap_parser.add_argument('--pool', help='Pool to use', metavar='POOL')
bootstrap_parser.add_argument('--poolpath', help='Pool Path to use', metavar='POOLPATH')
bootstrap_parser.set_defaults(func=bootstrap)
clone_info = 'Clone existing vm'
clone_parser = subparsers.add_parser('clone', description=clone_info, help=clone_info)
clone_parser.add_argument('-b', '--base', help='Base VM', metavar='BASE')
clone_parser.add_argument('-f', '--full', action='store_true', help='Full Clone')
clone_parser.add_argument('-s', '--start', action='store_true', help='Start cloned VM')
clone_parser.add_argument('name', metavar='VMNAME')
clone_parser.set_defaults(func=clone)
console_info = 'Vnc/Spice/Serial/Container console'
console_parser = subparsers.add_parser('console', description=console_info, help=console_info)
console_parser.add_argument('-s', '--serial', action='store_true')
console_parser.add_argument('--container', action='store_true')
console_parser.add_argument('name', metavar='VMNAME', nargs='?')
console_parser.set_defaults(func=console)
container_info = 'Create container'
container_parser = subparsers.add_parser('container', description=container_info, help=container_info)
container_parser.add_argument('-p', '--profile', help='Profile to use', metavar='PROFILE')
container_parser.add_argument('-P', '--param', action='append',
help='specify parameter or keyword for rendering (can specify multiple)',
metavar='PARAM')
container_parser.add_argument('--paramfile', help='Parameters file', metavar='PARAMFILE')
container_parser.add_argument('name', metavar='NAME', nargs='?')
container_parser.set_defaults(func=container)
delete_info = 'Delete vm/container'
delete_parser = subparsers.add_parser('delete', description=delete_info, help=delete_info)
delete_parser.add_argument('-y', '--yes', action='store_true', help='Dont ask for confirmation')
delete_parser.add_argument('--container', action='store_true')
delete_parser.add_argument('-t', '--template', action='store_true', help='delete template')
delete_parser.add_argument('--snapshots', action='store_true', help='Remove snapshots if needed')
delete_parser.add_argument('names', metavar='VMNAMES', nargs='*')
delete_parser.set_defaults(func=delete)
disk_info = 'Add/Delete disk of vm'
disk_parser = subparsers.add_parser('disk', description=disk_info, help=disk_info)
disk_parser.add_argument('-d', '--delete', action='store_true')
disk_parser.add_argument('-s', '--size', type=int, help='Size of the disk to add, in GB', metavar='SIZE')
disk_parser.add_argument('-n', '--diskname', help='Name or Path of the disk, when deleting', metavar='DISKNAME')
disk_parser.add_argument('-t', '--template', help='Name or Path of a Template, when adding', metavar='TEMPLATE')
disk_parser.add_argument('-p', '--pool', default='default', help='Pool', metavar='POOL')
disk_parser.add_argument('name', metavar='VMNAME', nargs='?')
disk_parser.set_defaults(func=disk)
dns_info = 'Create/Delete dns entries'
dns_parser = subparsers.add_parser('dns', description=dns_info, help=dns_info)
dns_parser.add_argument('-d', '--delete', action='store_true')
dns_parser.add_argument('-n', '--net', help='Domain where to create entry', metavar='NET')
dns_parser.add_argument('-i', '--ip', help='Ip', metavar='IP')
dns_parser.add_argument('name', metavar='NAME', nargs='?')
dns_parser.set_defaults(func=dns)
download_info = 'Download template'
download_help = "Template to download. Choose between \n%s" % '\n'.join(TEMPLATES.keys())
download_parser = subparsers.add_parser('download', description=download_info, help=download_info)
| |
SymbolicConstant specifying the background style to be used for all viewport windows.
# Possible values are SOLID and GRADIENT. The default value is SOLID.If
# *backgroundStyle*=SOLID, the viewport background will appear as a solid color as
# specified by *backgroundColor*. If *backgroundStyle*=GRADIENT, the viewport background
# will be drawn as a gradient beginning with the *backgroundColor* at the top of the
# viewport and gradually blending to the *backgroundBottomColor* at the bottom of the
# viewport.
backgroundStyle: SymbolicConstant = SOLID
# A Boolean specifying whether the hardware accelerated graphics driver will be used for
# off-screen rendering. The default value is ON if graphics hardware acceleration is
# available and has not been disabled via the hardwareAcceleration option, and the
# graphics driver supports the underlying technology. When set to OFF, an alternate
# (slower) technique will be used to create off-screen images. Off-screen rendering is
# used for Printing, Probe, and backing store (viewport refresh). Setting this value to
# OFF will force printed images to be rendered without hardware acceleration. This is
# useful when writing automated tests to produce raster images that you will want to
# compare across multiple machines that may have different graphics environments.
accelerateOffScreen: Boolean = OFF
# A Boolean specifying whether a backing store will be used to refresh a viewport after a
# window occluding the viewport is moved or dismissed. The default value is ON.
backingStore: Boolean = ON
# A SymbolicConstant specifying the highlight method. For the GraphicsOptions object,
# possible values of the member are HARDWARE_OVERLAY, XOR, SOFTWARE_OVERLAY, and BLEND.
highlightMethod: SymbolicConstant = None
# A Boolean specifying if the graphics hardware supports hardware overlay.
hardwareOverlayAvailable: Boolean = OFF
# A Boolean specifying if the graphics hardware supports the OpenGL Shading Language
# (GLSL).
shadersAvailable: Boolean = OFF
# An Int specifying whether speed or accuracy is more important when drawing translucent
# objects. Lower values optimize for speed while higher values optimize for accuracy. The
# actual meaning of each setting will depend on the setting of *shadersAvailable* and the
# capabilities of the graphics hardware and driver. Possible values are 1 ≤≤
# *translucencyMode* ≤≤ 6. The default value is 4.
translucencyMode: int = 4
# A Float specifying a tolerance used when computing the appropriate scale for
# transforming result (contour) values to texture values. When set too low the 'out of
# range' colors may be incorrectly shown for values near the range limits. The default
# value is 0.5×10–5.
contourRangeTexturePrecision: float = 0
# None or a GraphicsOptions object specifying the object from which values are to be
# copied. If other arguments are also supplied to setValues, they will override the values
# in the *options* member. The default value is None.
options: str = None
# A tuple of SymbolicConstants specifying a hint used to modify the highlight method.
# Possible values are:HARDWARE_OVERLAY, specifying a hint of hardware overlay. The best
# graphics performance is achieved using hardware overlay, but not all systems and
# graphics adapters support hardware overlay.XOR, specifying a hint of XOR technique. The
# XOR technique uses a boolean pixel operation to simulate the drawing operations but can
# produce different colors depending on the color of the underlying
# pixels.SOFTWARE_OVERLAY, specifying a hint of software overlay. The software overlay
# method simulates the effect of hardware overlay.BLEND, specifying a hint of blend
# method. The blend method combines the color of the underlying pixel with the desired
# color producing an approximation of the transient graphics.The default value is
# (HARDWARE_OVERLAY, XOR, SOFTWARE_OVERLAY, BLEND).The values of this sequence are applied
# by Abaqus when you start a session in first to last order. The first successful value
# becomes the default highlight method. Not all graphics adapters support the
# HARDWARE_OVERLAY value and you must use the *highlightMethodHint* argument to provide an
# alternative.You can use a single value to set the first element of the list, or you can
# use a tuple with one to four unique values. Abaqus sets any remaining elements of the
# tuple to unique values based on the default order.
highlightMethodHint: SymbolicConstant = None
# A String specifying one of the two background colors for all viewport windows. The
# initial color is black. A list of valid color strings is in the *colors* map in the
# Session object.
backgroundColor: str = ''
# A String specifying one of the two background colors for all viewport windows. This
# color is used only if *backgroundStyle* =GRADIENT. The initial color is black. A list of
# valid color strings is in the *colors* map in the Session object.
backgroundBottomColor: str = ''
def setValues(self, graphicsDriver: SymbolicConstant = None, doubleBuffering: Boolean = ON,
displayLists: Boolean = ON, highlightMethodHint: SymbolicConstant = None,
dragMode: SymbolicConstant = AS_IS, antiAlias: Boolean = ON,
autoFitAfterRotate: Boolean = OFF, polygonOffsetConstant: float = None,
polygonOffsetSlope: float = None, printPolygonOffsetConstant: float = None,
printPolygonOffsetSlope: float = None, vertexArrays: Boolean = ON,
vertexArraysInDisplayLists: Boolean = ON, viewManipDisplayListThreshold: int = 40,
directRendering: Boolean = OFF, hardwareAcceleration: Boolean = ON,
hardwareOverlay: Boolean = OFF, textureMapping: Boolean = ON,
printTextureMapping: Boolean = ON, backgroundStyle: SymbolicConstant = SOLID,
backgroundColor: str = '', backgroundBottomColor: str = '',
backgroundOverride: Boolean = OFF, backfaceCulling: Boolean = ON,
accelerateOffScreen: Boolean = OFF, backingStore: Boolean = ON,
shadersAvailable: Boolean = OFF, translucencyMode: int = 4, options: str = None,
contourRangeTexturePrecision: float = 0):
"""This method modifies the GraphicsOptions object.
Parameters
----------
graphicsDriver
A SymbolicConstant specifying the graphics driver to use. Abaqus/CAE currently uses
OpenGL exclusively so the only possible value is OPEN_GL. OPEN_GL takes advantage of
graphics adapter hardware acceleration.
doubleBuffering
A Boolean specifying whether double buffering is used. The default value is ON.Double
buffering controls where Abaqus/CAE draws its graphics. When *doubleBuffering*=OFF,
everything is drawn directly to the screen and on many systems you can see the progress
of the drawing operations. Most users find this distracting, especially in dynamic
situations such as view manipulation or animation of results. When *doubleBuffering*=ON,
the drawing occurs in a separate graphics buffer that is displayed when all the drawing
operations are complete. This results in a much smoother display during view changes or
animation. It is recommended that you set double buffering to ON.
displayLists
A Boolean specifying whether a display list will be used to accelerate graphics
performance. The default value is ON.When *displayLists*=ON, drawing operations are
recorded in a list that can be quickly replayed. This results in faster drawing on most
systems but requires extra memory to record the drawing operations. In the Visualization
module, display lists are only used during view manipulations and then their use is
subject to the setting of *viewManipDisplayListThreshold*.
highlightMethodHint
A sequence of SymbolicConstants specifying a hint used to modify the highlight method.
Possible values are:HARDWARE_OVERLAY, specifying a hint of hardware overlay. The best
graphics performance is achieved using hardware overlay, but not all systems and
graphics adapters support hardware overlay.XOR, specifying a hint of XOR technique. The
XOR technique uses a boolean pixel operation to simulate the drawing operations but can
produce different colors depending on the color of the underlying
pixels.SOFTWARE_OVERLAY, specifying a hint of software overlay. The | |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import collections.abc
import functools
import inspect
import re
from typing import (
TYPE_CHECKING,
Any,
Callable,
Collection,
Dict,
Generic,
Iterator,
Mapping,
Optional,
Sequence,
Set,
Type,
TypeVar,
cast,
overload,
)
import attr
import typing_extensions
from sqlalchemy.orm import Session
from airflow.compat.functools import cache, cached_property
from airflow.exceptions import AirflowException
from airflow.models.abstractoperator import DEFAULT_RETRIES, DEFAULT_RETRY_DELAY
from airflow.models.baseoperator import BaseOperator, coerce_resources, coerce_retry_delay, parse_retries
from airflow.models.dag import DAG, DagContext
from airflow.models.mappedoperator import (
MappedOperator,
ValidationSource,
ensure_xcomarg_return_value,
get_mappable_types,
prevent_duplicates,
)
from airflow.models.pool import Pool
from airflow.models.xcom_arg import XComArg
from airflow.typing_compat import Protocol
from airflow.utils import timezone
from airflow.utils.context import KNOWN_CONTEXT_KEYS, Context
from airflow.utils.task_group import TaskGroup, TaskGroupContext
from airflow.utils.types import NOTSET
if TYPE_CHECKING:
from airflow.models.mappedoperator import Mappable
def validate_python_callable(python_callable: Any) -> None:
"""
Validate that python callable can be wrapped by operator.
Raises exception if invalid.
:param python_callable: Python object to be validated
:raises: TypeError, AirflowException
"""
if not callable(python_callable):
raise TypeError('`python_callable` param must be callable')
if 'self' in inspect.signature(python_callable).parameters.keys():
raise AirflowException('@task does not support methods')
def get_unique_task_id(
task_id: str,
dag: Optional[DAG] = None,
task_group: Optional[TaskGroup] = None,
) -> str:
"""
Generate unique task id given a DAG (or if run in a DAG context)
Ids are generated by appending a unique number to the end of
the original task id.
Example:
task_id
task_id__1
task_id__2
...
task_id__20
"""
dag = dag or DagContext.get_current_dag()
if not dag:
return task_id
# We need to check if we are in the context of TaskGroup as the task_id may
# already be altered
task_group = task_group or TaskGroupContext.get_current_task_group(dag)
tg_task_id = task_group.child_id(task_id) if task_group else task_id
if tg_task_id not in dag.task_ids:
return task_id
def _find_id_suffixes(dag: DAG) -> Iterator[int]:
prefix = re.split(r"__\d+$", tg_task_id)[0]
for task_id in dag.task_ids:
match = re.match(rf"^{prefix}__(\d+)$", task_id)
if match is None:
continue
yield int(match.group(1))
yield 0 # Default if there's no matching task ID.
core = re.split(r"__\d+$", task_id)[0]
return f"{core}__{max(_find_id_suffixes(dag)) + 1}"
class DecoratedOperator(BaseOperator):
"""
Wraps a Python callable and captures args/kwargs when called for execution.
:param python_callable: A reference to an object that is callable
:param op_kwargs: a dictionary of keyword arguments that will get unpacked
in your function (templated)
:param op_args: a list of positional arguments that will get unpacked when
calling your callable (templated)
:param multiple_outputs: If set to True, the decorated function's return value will be unrolled to
multiple XCom values. Dict will unroll to XCom values with its keys as XCom keys. Defaults to False.
:param kwargs_to_upstream: For certain operators, we might need to upstream certain arguments
that would otherwise be absorbed by the DecoratedOperator (for example python_callable for the
PythonOperator). This gives a user the option to upstream kwargs as needed.
"""
template_fields: Sequence[str] = ('op_args', 'op_kwargs')
template_fields_renderers = {"op_args": "py", "op_kwargs": "py"}
# since we won't mutate the arguments, we should just do the shallow copy
# there are some cases we can't deepcopy the objects (e.g protobuf).
shallow_copy_attrs: Sequence[str] = ('python_callable',)
def __init__(
self,
*,
python_callable: Callable,
task_id: str,
op_args: Optional[Collection[Any]] = None,
op_kwargs: Optional[Mapping[str, Any]] = None,
multiple_outputs: bool = False,
kwargs_to_upstream: Optional[Dict[str, Any]] = None,
**kwargs,
) -> None:
task_id = get_unique_task_id(task_id, kwargs.get('dag'), kwargs.get('task_group'))
self.python_callable = python_callable
kwargs_to_upstream = kwargs_to_upstream or {}
op_args = op_args or []
op_kwargs = op_kwargs or {}
# Check that arguments can be binded
inspect.signature(python_callable).bind(*op_args, **op_kwargs)
self.multiple_outputs = multiple_outputs
self.op_args = op_args
self.op_kwargs = op_kwargs
super().__init__(task_id=task_id, **kwargs_to_upstream, **kwargs)
def execute(self, context: Context):
return_value = super().execute(context)
return self._handle_output(return_value=return_value, context=context, xcom_push=self.xcom_push)
def _handle_output(self, return_value: Any, context: Context, xcom_push: Callable):
"""
Handles logic for whether a decorator needs to push a single return value or multiple return values.
:param return_value:
:param context:
:param xcom_push:
"""
if not self.multiple_outputs:
return return_value
if isinstance(return_value, dict):
for key in return_value.keys():
if not isinstance(key, str):
raise AirflowException(
'Returned dictionary keys must be strings when using '
f'multiple_outputs, found {key} ({type(key)}) instead'
)
for key, value in return_value.items():
xcom_push(context, key, value)
else:
raise AirflowException(
f'Returned output was type {type(return_value)} expected dictionary for multiple_outputs'
)
return return_value
def _hook_apply_defaults(self, *args, **kwargs):
if 'python_callable' not in kwargs:
return args, kwargs
python_callable = kwargs['python_callable']
default_args = kwargs.get('default_args') or {}
op_kwargs = kwargs.get('op_kwargs') or {}
f_sig = inspect.signature(python_callable)
for arg in f_sig.parameters:
if arg not in op_kwargs and arg in default_args:
op_kwargs[arg] = default_args[arg]
kwargs['op_kwargs'] = op_kwargs
return args, kwargs
Function = TypeVar("Function", bound=Callable)
OperatorSubclass = TypeVar("OperatorSubclass", bound="BaseOperator")
@attr.define(slots=False)
class _TaskDecorator(Generic[Function, OperatorSubclass]):
"""
Helper class for providing dynamic task mapping to decorated functions.
``task_decorator_factory`` returns an instance of this, instead of just a plain wrapped function.
:meta private:
"""
function: Function = attr.ib()
operator_class: Type[OperatorSubclass]
multiple_outputs: bool = attr.ib()
kwargs: Dict[str, Any] = attr.ib(factory=dict)
decorator_name: str = attr.ib(repr=False, default="task")
@multiple_outputs.default
def _infer_multiple_outputs(self):
try:
return_type = typing_extensions.get_type_hints(self.function).get("return", Any)
except Exception: # Can't evaluate retrurn type.
return False
ttype = getattr(return_type, "__origin__", return_type)
return ttype == dict or ttype == Dict
def __attrs_post_init__(self):
if "self" in self.function_signature.parameters:
raise TypeError(f"@{self.decorator_name} does not support methods")
self.kwargs.setdefault('task_id', self.function.__name__)
def __call__(self, *args, **kwargs) -> XComArg:
op = self.operator_class(
python_callable=self.function,
op_args=args,
op_kwargs=kwargs,
multiple_outputs=self.multiple_outputs,
**self.kwargs,
)
if self.function.__doc__:
op.doc_md = self.function.__doc__
return XComArg(op)
@cached_property
def function_signature(self):
return inspect.signature(self.function)
@cached_property
def _function_is_vararg(self):
parameters = self.function_signature.parameters
return any(v.kind == inspect.Parameter.VAR_KEYWORD for v in parameters.values())
@cached_property
def _mappable_function_argument_names(self) -> Set[str]:
"""Arguments that can be mapped against."""
return set(self.function_signature.parameters)
def _validate_arg_names(self, func: ValidationSource, kwargs: Dict[str, Any]):
# Ensure that context variables are not shadowed.
context_keys_being_mapped = KNOWN_CONTEXT_KEYS.intersection(kwargs)
if len(context_keys_being_mapped) == 1:
(name,) = context_keys_being_mapped
raise ValueError(f"cannot call {func}() on task context variable {name!r}")
elif context_keys_being_mapped:
names = ", ".join(repr(n) for n in context_keys_being_mapped)
raise ValueError(f"cannot call {func}() on task context variables {names}")
# Ensure that all arguments passed in are accounted for.
if self._function_is_vararg:
return
kwargs_left = kwargs.copy()
for arg_name in self._mappable_function_argument_names:
value = kwargs_left.pop(arg_name, NOTSET)
if func != "expand" or value is NOTSET or isinstance(value, get_mappable_types()):
continue
tname = type(value).__name__
raise ValueError(f"expand() got an unexpected type {tname!r} for keyword argument {arg_name!r}")
if len(kwargs_left) == 1:
raise TypeError(f"{func}() got an unexpected keyword argument {next(iter(kwargs_left))!r}")
elif kwargs_left:
names = ", ".join(repr(n) for n in kwargs_left)
raise TypeError(f"{func}() got unexpected keyword arguments {names}")
def expand(self, **map_kwargs: "Mappable") -> XComArg:
self._validate_arg_names("expand", map_kwargs)
prevent_duplicates(self.kwargs, map_kwargs, fail_reason="mapping already partial")
ensure_xcomarg_return_value(map_kwargs)
partial_kwargs = self.kwargs.copy()
dag = partial_kwargs.pop("dag", DagContext.get_current_dag())
task_group = partial_kwargs.pop("task_group", TaskGroupContext.get_current_task_group(dag))
user_supplied_task_id = partial_kwargs.pop("task_id")
task_id = get_unique_task_id(user_supplied_task_id, dag, task_group)
params = partial_kwargs.pop("params", None)
# Logic here should be kept in sync with BaseOperatorMeta.partial().
if "task_concurrency" in partial_kwargs:
raise TypeError("unexpected argument: task_concurrency")
if partial_kwargs.get("wait_for_downstream"):
partial_kwargs["depends_on_past"] = True
start_date = timezone.convert_to_utc(partial_kwargs.pop("start_date", None))
end_date = timezone.convert_to_utc(partial_kwargs.pop("end_date", None))
if partial_kwargs.get("pool") is None:
partial_kwargs["pool"] = Pool.DEFAULT_POOL_NAME
partial_kwargs["retries"] = parse_retries(partial_kwargs.get("retries", DEFAULT_RETRIES))
partial_kwargs["retry_delay"] = coerce_retry_delay(
partial_kwargs.get("retry_delay", DEFAULT_RETRY_DELAY),
)
partial_kwargs["resources"] = coerce_resources(partial_kwargs.get("resources"))
partial_kwargs.setdefault("executor_config", {})
partial_kwargs.setdefault("op_args", [])
partial_kwargs.setdefault("op_kwargs", {})
# Mypy does not work well with a subclassed attrs class :(
_MappedOperator = cast(Any, DecoratedMappedOperator)
operator = _MappedOperator(
operator_class=self.operator_class,
user_supplied_task_id=user_supplied_task_id,
mapped_kwargs={},
partial_kwargs=partial_kwargs,
task_id=task_id,
params=params,
deps=MappedOperator.deps_for(self.operator_class),
operator_extra_links=self.operator_class.operator_extra_links,
template_ext=self.operator_class.template_ext,
template_fields=self.operator_class.template_fields,
template_fields_renderers=self.operator_class.template_fields_renderers,
ui_color=self.operator_class.ui_color,
ui_fgcolor=self.operator_class.ui_fgcolor,
is_dummy=False,
task_module=self.operator_class.__module__,
task_type=self.operator_class.__name__,
dag=dag,
task_group=task_group,
start_date=start_date,
end_date=end_date,
multiple_outputs=self.multiple_outputs,
python_callable=self.function,
mapped_op_kwargs=map_kwargs,
)
return XComArg(operator=operator)
def partial(self, **kwargs) -> "_TaskDecorator[Function, OperatorSubclass]":
self._validate_arg_names("partial", kwargs)
op_kwargs = self.kwargs.get("op_kwargs", {})
op_kwargs = _merge_kwargs(op_kwargs, kwargs, fail_reason="duplicate partial")
return attr.evolve(self, kwargs={**self.kwargs, "op_kwargs": op_kwargs})
def _merge_kwargs(kwargs1: Dict[str, Any], kwargs2: Dict[str, Any], *, fail_reason: str) -> Dict[str, Any]:
duplicated_keys = set(kwargs1).intersection(kwargs2)
if len(duplicated_keys) == 1:
raise TypeError(f"{fail_reason} argument: {duplicated_keys.pop()}")
elif duplicated_keys:
duplicated_keys_display = ", ".join(sorted(duplicated_keys))
raise TypeError(f"{fail_reason} arguments: {duplicated_keys_display}")
return {**kwargs1, **kwargs2}
@attr.define(kw_only=True, repr=False)
class DecoratedMappedOperator(MappedOperator):
"""MappedOperator implementation for @task-decorated task function."""
multiple_outputs: bool
python_callable: Callable
# We can't save these in mapped_kwargs | |
parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/relay-agent', defining_module='openconfig-relay-agent', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """agent_information_option must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_agent_information_option_openconfig_relay_agent__relay_agent_dhcp_agent_information_option, is_container='container', yang_name="agent-information-option", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/relay-agent', defining_module='openconfig-relay-agent', yang_type='container', is_config=True)""",
})
self.__agent_information_option = t
if hasattr(self, '_set'):
self._set()
def _unset_agent_information_option(self):
self.__agent_information_option = YANGDynClass(base=yc_agent_information_option_openconfig_relay_agent__relay_agent_dhcp_agent_information_option, is_container='container', yang_name="agent-information-option", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/relay-agent', defining_module='openconfig-relay-agent', yang_type='container', is_config=True)
def _get_interfaces(self):
"""
Getter method for interfaces, mapped from YANG variable /relay_agent/dhcp/interfaces (container)
YANG Description: Enclosing container for the list of interface references.
"""
return self.__interfaces
def _set_interfaces(self, v, load=False):
"""
Setter method for interfaces, mapped from YANG variable /relay_agent/dhcp/interfaces (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_interfaces is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_interfaces() directly.
YANG Description: Enclosing container for the list of interface references.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_interfaces_openconfig_relay_agent__relay_agent_dhcp_interfaces, is_container='container', yang_name="interfaces", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/relay-agent', defining_module='openconfig-relay-agent', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """interfaces must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_interfaces_openconfig_relay_agent__relay_agent_dhcp_interfaces, is_container='container', yang_name="interfaces", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/relay-agent', defining_module='openconfig-relay-agent', yang_type='container', is_config=True)""",
})
self.__interfaces = t
if hasattr(self, '_set'):
self._set()
def _unset_interfaces(self):
self.__interfaces = YANGDynClass(base=yc_interfaces_openconfig_relay_agent__relay_agent_dhcp_interfaces, is_container='container', yang_name="interfaces", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/relay-agent', defining_module='openconfig-relay-agent', yang_type='container', is_config=True)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
agent_information_option = __builtin__.property(_get_agent_information_option, _set_agent_information_option)
interfaces = __builtin__.property(_get_interfaces, _set_interfaces)
_pyangbind_elements = OrderedDict([('config', config), ('state', state), ('agent_information_option', agent_information_option), ('interfaces', interfaces), ])
class yc_config_openconfig_relay_agent__relay_agent_dhcpv6_config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-relay-agent - based on the path /relay-agent/dhcpv6/config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configuration data for global DHCPv6
"""
__slots__ = ('_path_helper', '_extmethods', '__enable_relay_agent',)
_yang_name = 'config'
_yang_namespace = 'http://openconfig.net/yang/relay-agent'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__enable_relay_agent = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="enable-relay-agent", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/relay-agent', defining_module='openconfig-relay-agent', yang_type='boolean', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['relay-agent', 'dhcpv6', 'config']
def _get_enable_relay_agent(self):
"""
Getter method for enable_relay_agent, mapped from YANG variable /relay_agent/dhcpv6/config/enable_relay_agent (boolean)
YANG Description: Enables DHCP/BOOTP relay agent on all interfaces
"""
return self.__enable_relay_agent
def _set_enable_relay_agent(self, v, load=False):
"""
Setter method for enable_relay_agent, mapped from YANG variable /relay_agent/dhcpv6/config/enable_relay_agent (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_enable_relay_agent is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_enable_relay_agent() directly.
YANG Description: Enables DHCP/BOOTP relay agent on all interfaces
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="enable-relay-agent", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/relay-agent', defining_module='openconfig-relay-agent', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """enable_relay_agent must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="enable-relay-agent", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/relay-agent', defining_module='openconfig-relay-agent', yang_type='boolean', is_config=True)""",
})
self.__enable_relay_agent = t
if hasattr(self, '_set'):
self._set()
def _unset_enable_relay_agent(self):
self.__enable_relay_agent = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="enable-relay-agent", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/relay-agent', defining_module='openconfig-relay-agent', yang_type='boolean', is_config=True)
enable_relay_agent = __builtin__.property(_get_enable_relay_agent, _set_enable_relay_agent)
_pyangbind_elements = OrderedDict([('enable_relay_agent', enable_relay_agent), ])
class yc_state_openconfig_relay_agent__relay_agent_dhcpv6_state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-relay-agent - based on the path /relay-agent/dhcpv6/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Operational state data global DHCPv6
"""
__slots__ = ('_path_helper', '_extmethods', '__enable_relay_agent',)
_yang_name = 'state'
_yang_namespace = 'http://openconfig.net/yang/relay-agent'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__enable_relay_agent = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="enable-relay-agent", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/relay-agent', defining_module='openconfig-relay-agent', yang_type='boolean', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['relay-agent', 'dhcpv6', 'state']
def _get_enable_relay_agent(self):
"""
Getter method for enable_relay_agent, mapped from YANG variable /relay_agent/dhcpv6/state/enable_relay_agent (boolean)
YANG Description: Enables DHCP/BOOTP relay agent on all interfaces
"""
return self.__enable_relay_agent
def _set_enable_relay_agent(self, v, load=False):
"""
Setter method for enable_relay_agent, mapped from YANG variable /relay_agent/dhcpv6/state/enable_relay_agent (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_enable_relay_agent is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_enable_relay_agent() directly.
YANG Description: Enables DHCP/BOOTP relay agent on all interfaces
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="enable-relay-agent", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/relay-agent', defining_module='openconfig-relay-agent', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """enable_relay_agent must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="enable-relay-agent", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/relay-agent', defining_module='openconfig-relay-agent', yang_type='boolean', is_config=False)""",
})
self.__enable_relay_agent = t
if hasattr(self, '_set'):
self._set()
def _unset_enable_relay_agent(self):
self.__enable_relay_agent = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="enable-relay-agent", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/relay-agent', defining_module='openconfig-relay-agent', yang_type='boolean', is_config=False)
enable_relay_agent = __builtin__.property(_get_enable_relay_agent)
_pyangbind_elements = OrderedDict([('enable_relay_agent', enable_relay_agent), ])
class yc_config_openconfig_relay_agent__relay_agent_dhcpv6_options_config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-relay-agent - based on the path /relay-agent/dhcpv6/options/config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configuration data
"""
__slots__ = ('_path_helper', '_extmethods', '__enable_interface_id','__enable_remote_id',)
_yang_name = 'config'
_yang_namespace = 'http://openconfig.net/yang/relay-agent'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__enable_interface_id = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="enable-interface-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/relay-agent', defining_module='openconfig-relay-agent', yang_type='boolean', is_config=True)
self.__enable_remote_id = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="enable-remote-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/relay-agent', defining_module='openconfig-relay-agent', yang_type='boolean', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['relay-agent', 'dhcpv6', 'options', 'config']
def _get_enable_interface_id(self):
"""
Getter method for enable_interface_id, mapped from YANG variable /relay_agent/dhcpv6/options/config/enable_interface_id (boolean)
YANG Description: Enables DHCPv6 OPTION_INTERFACE_ID (18) to identify the
interface on which the client message was received.
"""
return self.__enable_interface_id
def _set_enable_interface_id(self, v, load=False):
"""
Setter method for enable_interface_id, mapped from YANG variable /relay_agent/dhcpv6/options/config/enable_interface_id (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_enable_interface_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_enable_interface_id() directly.
YANG Description: Enables DHCPv6 OPTION_INTERFACE_ID (18) to identify the
interface on which the client message was received.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="enable-interface-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/relay-agent', defining_module='openconfig-relay-agent', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """enable_interface_id must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="enable-interface-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/relay-agent', defining_module='openconfig-relay-agent', yang_type='boolean', is_config=True)""",
})
self.__enable_interface_id = t
if hasattr(self, '_set'):
self._set()
def _unset_enable_interface_id(self):
self.__enable_interface_id = YANGDynClass(base=YANGBool, | |
from collections.abc import Container, Iterable, Mapping
from copy import deepcopy
from datetime import datetime
import json
import os
import shutil
import time
from fair_research_login import NativeClient
from globus_nexus_client import NexusClient
import globus_sdk
from globus_sdk.response import GlobusHTTPResponse
import jsonschema
KNOWN_SCOPES = {
"transfer": "urn:globus:auth:scope:transfer.api.globus.org:all",
"search": "urn:globus:auth:scope:search.api.globus.org:search",
"search_ingest": "urn:globus:auth:scope:search.api.globus.org:all",
"data_mdf": "urn:globus:auth:scope:data.materialsdatafacility.org:all",
"mdf_connect": "https://auth.globus.org/scopes/c17f27bb-f200-486a-b785-2a25e82af505/connect",
"petrel": "https://auth.globus.org/scopes/56ceac29-e98a-440a-a594-b41e7a084b62/all",
"groups": "urn:globus:auth:scope:nexus.api.globus.org:groups",
"dlhub": "https://auth.globus.org/scopes/81fc4156-a623-47f2-93ad-7184118226ba/auth"
}
KNOWN_TOKEN_KEYS = {
"transfer": "transfer.api.globus.org",
"search": "search.api.globus.org",
"search_ingest": "search.api.globus.org",
"data_mdf": "data.materialsdatafacility.org",
"mdf_connect": "mdf_dataset_submission",
"petrel": "petrel_https_server",
"groups": "nexus.api.globus.org",
"dlhub": "dlhub_org"
}
KNOWN_CLIENTS = {
KNOWN_SCOPES["transfer"]: globus_sdk.TransferClient,
"transfer": globus_sdk.TransferClient,
KNOWN_SCOPES["search"]: globus_sdk.SearchClient,
"search": globus_sdk.SearchClient,
KNOWN_SCOPES["search_ingest"]: globus_sdk.SearchClient,
"search_ingest": globus_sdk.SearchClient,
KNOWN_SCOPES["groups"]: NexusClient,
"groups": NexusClient
}
SEARCH_INDEX_UUIDS = {
"mdf": "1a57bbe5-5272-477f-9d31-343b8258b7a5",
"mdf-test": "5acded0c-a534-45af-84be-dcf042e36412",
"mdf-dev": "aeccc263-f083-45f5-ab1d-08ee702b3384",
"dlhub": "847c9105-18a0-4ffb-8a71-03dd76dfcc9d",
"dlhub-test": "5c89e0a9-00e5-4171-b415-814fe4d0b8af"
}
DEFAULT_APP_NAME = "UNNAMED_APP"
DEFAULT_CLIENT_ID = "984464e2-90ab-433d-8145-ac0215d26c8e"
DEFAULT_INTERVAL = 1 * 60 # 1 minute, in seconds
DEFAULT_INACTIVITY_TIME = 1 * 24 * 60 * 60 # 1 day, in seconds
STD_TIMEOUT = 5 * 60 # 5 minutes
# *************************************************
# * Authentication utilities
# *************************************************
def login(services, make_clients=True, clear_old_tokens=False, **kwargs):
"""Log in to Globus services.
Arguments:
services (list of str): The service names or scopes to authenticate to.
make_clients (bool): If ``True``, will make and return appropriate clients with
generated tokens. If ``False``, will only return authorizers.
**Default**: ``True``.
clear_old_tokens (bool): Force a login flow, even if loaded tokens are valid.
Same effect as ``force``. If one of these is ``True``, the effect triggers
**Default**: ``False``.
Keyword Arguments:
app_name (str): Name of the app/script/client. Used for the named grant during consent,
and the local server browser page by default.
**Default**: ``'UNKNOWN_APP'``.
client_id (str): The ID of the client registered with Globus at
https://developers.globus.org
**Default**: The MDF Native Clients ID.
no_local_server (bool): Disable spinning up a local server to automatically
copy-paste the auth code. THIS IS REQUIRED if you are on a remote server.
When used locally with no_local_server=False, the domain is localhost with
a randomly chosen open port number.
**Default**: ``False``.
no_browser (bool): Do not automatically open the browser for the Globus Auth URL.
Display the URL instead and let the user navigate to that location manually.
**Default**: ``False``.
refresh_tokens (bool): Use Globus Refresh Tokens to extend login time.
**Default**: ``True``.
force (bool): Force a login flow, even if loaded tokens are valid.
Same effect as ``clear_old_tokens``. If one of these is ``True``, the effect
triggers. **Default**: ``False``.
Returns:
dict: The clients and authorizers requested, indexed by service name.
For example, if ``login()`` is told to auth with ``'search'``
then the search client will be in the ``'search'`` field.
"""
if isinstance(services, str):
services = [services]
# Set up arg defaults
app_name = kwargs.get("app_name") or DEFAULT_APP_NAME
client_id = kwargs.get("client_id") or DEFAULT_CLIENT_ID
native_client = NativeClient(client_id=client_id, app_name=app_name)
# Translate known services into scopes, existing scopes are cleaned
servs = []
for serv in services:
serv = serv.lower().strip()
if type(serv) is str:
servs += serv.split(" ")
else:
servs += list(serv)
scopes = [KNOWN_SCOPES.get(sc, sc) for sc in servs]
native_client.login(requested_scopes=scopes,
no_local_server=kwargs.get("no_local_server", False),
no_browser=kwargs.get("no_browser", False),
refresh_tokens=kwargs.get("refresh_tokens", True),
force=clear_old_tokens or kwargs.get("force", False))
all_authorizers = native_client.get_authorizers_by_scope(requested_scopes=scopes)
returnables = {}
# Process authorizers (rename keys to originals, make clients)
for scope, auth in all_authorizers.items():
# User specified known_scope name and not scope directly
if scope not in servs:
try:
key = [k for k, v in KNOWN_SCOPES.items() if scope == v][0]
except IndexError: # Not a known scope(?), fallback to scope as key
key = scope
# User specified scope directly
else:
key = scope
# User wants clients and client supported
if make_clients and scope in KNOWN_CLIENTS.keys():
returnables[key] = KNOWN_CLIENTS[scope](authorizer=auth, http_timeout=STD_TIMEOUT)
# Returning authorizer only
else:
returnables[key] = auth
return returnables
def confidential_login(services, client_id, client_secret, make_clients=True):
"""Log in to Globus services as a confidential client
(a client with its own login information, i.e. NOT a human's account).
Arguments:
services (list of str): Services to authenticate with.
client_id (str): The ID of the client.
client_secret (str): The client's secret for authentication.
make_clients (bool): If ``True``, will make and return appropriate clients
with generated tokens.
If ``False``, will only return authorizers.
**Default**: ``True``.
Returns:
dict: The clients and authorizers requested, indexed by service name.
"""
if isinstance(services, str):
services = [services]
conf_client = globus_sdk.ConfidentialAppAuthClient(client_id, client_secret)
servs = []
for serv in services:
serv = serv.lower().strip()
if type(serv) is str:
servs += serv.split(" ")
else:
servs += list(serv)
# Translate services into scopes as possible
scopes = [KNOWN_SCOPES.get(sc, sc) for sc in servs]
# Make authorizers for each scope requested
all_authorizers = {}
for scope in scopes:
# TODO: Allow non-CC authorizers?
try:
all_authorizers[scope] = globus_sdk.ClientCredentialsAuthorizer(conf_client, scope)
except Exception as e:
print("Error: Cannot create authorizer for scope '{}' ({})".format(scope, str(e)))
returnables = {}
# Process authorizers (rename keys to originals, make clients)
for scope, auth in all_authorizers.items():
# User specified known_scope name and not scope directly
if scope not in servs:
try:
key = [k for k, v in KNOWN_SCOPES.items() if scope == v][0]
except IndexError: # Not a known scope(?), fallback to scope as key
key = scope
# User specified scope directly
else:
key = scope
# User wants clients and client supported
if make_clients and scope in KNOWN_CLIENTS.keys():
returnables[key] = KNOWN_CLIENTS[scope](authorizer=auth, http_timeout=STD_TIMEOUT)
# Returning authorizer only
else:
returnables[key] = auth
return returnables
def anonymous_login(services):
"""Initialize service clients without authenticating to Globus Auth.
Note:
Clients may have reduced functionality without authentication.
Arguments:
services (str or list of str): The services to initialize clients for.
Returns:
dict: The clients requested, indexed by service name.
"""
if isinstance(services, str):
services = [services]
clients = {}
# Initialize valid services
for serv in services:
try:
clients[serv] = KNOWN_CLIENTS[serv](http_timeout=STD_TIMEOUT)
except KeyError: # No known client
print("Error: No known client for '{}' service.".format(serv))
except Exception: # Other issue, probably auth
print("Error: Unable to create client for '{}' service.\n"
"Anonymous access may not be allowed.".format(serv))
return clients
def logout(app_name=None, client_id=None):
"""Revoke and delete all saved tokens for the app.
Arguments:
app_name (str): Name of the app/script/client.
**Default**: ``'UNKNOWN_APP'``.
client_id (str): The ID of the client.
**Default**: The MDF Native Clients ID.
"""
if not app_name:
app_name = DEFAULT_APP_NAME
if not client_id:
client_id = DEFAULT_CLIENT_ID
NativeClient(app_name=app_name, client_id=client_id).logout()
# *************************************************
# * File utilities
# *************************************************
def uncompress_tree(root, delete_archives=False):
"""Uncompress all tar, zip, and gzip archives under a given directory.
Archives will be extracted to a sibling directory named after the archive (minus extension).
This process can be slow, depending on the number and size of archives.
Arguments:
root (str): The path to the starting (root) directory.
delete_archives (bool): If ``True``, will delete extracted archive files.
If ``False``, will preserve archive files.
**Default**: ``False``.
Returns:
dict: Results of the operation.
* **success** (*bool*) - If the extraction succeeded.
* **num_extracted** (*int*) - Number of archives extracted.
* **files_errored** (*list of str*) - The files that threw an unexpected
exception when extracted.
"""
num_extracted = 0
error_files = []
# Start list of dirs to extract with root
# Later, add newly-created dirs with extracted files, because os.walk will miss them
extract_dirs = [os.path.abspath(os.path.expanduser(root))]
while len(extract_dirs) > 0:
for path, dirs, files in os.walk(extract_dirs.pop()):
for filename in files:
try:
# Extract my_archive.tar to sibling dir my_archive
archive_path = os.path.join(path, filename)
extracted_files_dir = os.path.join(path, os.path.splitext(filename)[0])
shutil.unpack_archive(archive_path, extracted_files_dir)
except shutil.ReadError:
# ReadError means is not an (extractable) archive
pass
except Exception:
error_files.append(os.path.join(path, filename))
else:
num_extracted += 1
# Add new dir to list of dirs to process
extract_dirs.append(extracted_files_dir)
if delete_archives:
os.remove(archive_path)
return {
"success": True,
"num_extracted": num_extracted,
"files_errored": error_files
}
# *************************************************
# * Globus Search utilities
# *************************************************
def get_globus_id_type(uuid):
"""**Not implemented**
Determine the type of resource a Globus UUID identifies.
This utility is not comprehensive.
Arguments:
uuid (str): A Globus UUID.
Returns:
str: The type of resource identified. Types this utility can identify:
* ``identity``: A user's identity in Globus Auth
* ``group``: A Globus Group
* ``endpoint``: A Globus Transfer Endpoint
This utility cannot detect other types of UUID.
If the UUID is not one of the above types, or is invalid, the return
value will be ``unknown``.
"""
# TODO: Actually figure out if this is possible without
# serious Auth | |
"2147483647",
"min_val": "64", "setting": "8192", "sourcefile": "/var/lib/pgsql/10/data/postgresql.auto.conf",
"unit": "kB", "vartype": "integer", "val_in_bytes": 4194304 } }
contains:
setting:
description: Current value of the parameter.
returned: always
type: str
sample: 49152
unit:
description: Implicit unit of the parameter.
returned: always
type: str
sample: kB
boot_val:
description:
- Parameter value assumed at server startup if the parameter is not otherwise set.
returned: always
type: str
sample: 4096
min_val:
description:
- Minimum allowed value of the parameter (null for non-numeric values).
returned: always
type: str
sample: 64
max_val:
description:
- Maximum allowed value of the parameter (null for non-numeric values).
returned: always
type: str
sample: 2147483647
sourcefile:
description:
- Configuration file the current value was set in.
- Null for values set from sources other than configuration files,
or when examined by a user who is neither a superuser or a member of pg_read_all_settings.
- Helpful when using include directives in configuration files.
returned: always
type: str
sample: /var/lib/pgsql/10/data/postgresql.auto.conf
context:
description:
- Context required to set the parameter's value.
- For more information see U(https://www.postgresql.org/docs/current/view-pg-settings.html).
returned: always
type: str
sample: user
vartype:
description:
- Parameter type (bool, enum, integer, real, or string).
returned: always
type: str
sample: integer
val_in_bytes:
description:
- Current value of the parameter in bytes.
returned: if supported
type: int
sample: 2147483647
pretty_val:
description:
- Value presented in the pretty form.
returned: always
type: str
sample: 2MB
pending_restart:
description:
- True if the value has been changed in the configuration file but needs a restart; or false otherwise.
- Returns only if C(settings) is passed.
returned: always
type: bool
sample: false
'''
from fnmatch import fnmatch
try:
from psycopg2.extras import DictCursor
except ImportError:
# psycopg2 is checked by connect_to_db()
# from ansible.module_utils.postgres
pass
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.community.general.plugins.module_utils.database import (
check_input,
)
from ansible_collections.community.general.plugins.module_utils.postgres import (
connect_to_db,
get_conn_params,
postgres_common_argument_spec,
)
from ansible.module_utils.six import iteritems
from ansible.module_utils._text import to_native
# ===========================================
# PostgreSQL module specific support methods.
#
class PgDbConn(object):
"""Auxiliary class for working with PostgreSQL connection objects.
Arguments:
module (AnsibleModule): Object of AnsibleModule class that
contains connection parameters.
"""
def __init__(self, module):
self.module = module
self.db_conn = None
self.cursor = None
def connect(self):
"""Connect to a PostgreSQL database and return a cursor object.
Note: connection parameters are passed by self.module object.
"""
conn_params = get_conn_params(self.module, self.module.params, warn_db_default=False)
self.db_conn = connect_to_db(self.module, conn_params)
return self.db_conn.cursor(cursor_factory=DictCursor)
def reconnect(self, dbname):
"""Reconnect to another database and return a PostgreSQL cursor object.
Arguments:
dbname (string): Database name to connect to.
"""
self.db_conn.close()
self.module.params['database'] = dbname
return self.connect()
class PgClusterInfo(object):
"""Class for collection information about a PostgreSQL instance.
Arguments:
module (AnsibleModule): Object of AnsibleModule class.
db_conn_obj (psycopg2.connect): PostgreSQL connection object.
"""
def __init__(self, module, db_conn_obj):
self.module = module
self.db_obj = db_conn_obj
self.cursor = db_conn_obj.connect()
self.pg_info = {
"version": {},
"tablespaces": {},
"databases": {},
"replications": {},
"repl_slots": {},
"settings": {},
"roles": {},
"pending_restart_settings": [],
}
def collect(self, val_list=False):
"""Collect information based on 'filter' option."""
subset_map = {
"version": self.get_pg_version,
"tablespaces": self.get_tablespaces,
"databases": self.get_db_info,
"replications": self.get_repl_info,
"repl_slots": self.get_rslot_info,
"settings": self.get_settings,
"roles": self.get_role_info,
}
incl_list = []
excl_list = []
# Notice: incl_list and excl_list
# don't make sense together, therefore,
# if incl_list is not empty, we collect
# only values from it:
if val_list:
for i in val_list:
if i[0] != '!':
incl_list.append(i)
else:
excl_list.append(i.lstrip('!'))
if incl_list:
for s in subset_map:
for i in incl_list:
if fnmatch(s, i):
subset_map[s]()
break
elif excl_list:
found = False
# Collect info:
for s in subset_map:
for e in excl_list:
if fnmatch(s, e):
found = True
if not found:
subset_map[s]()
else:
found = False
# Default behaviour, if include or exclude is not passed:
else:
# Just collect info for each item:
for s in subset_map:
subset_map[s]()
return self.pg_info
def get_pub_info(self):
"""Get publication statistics."""
query = ("SELECT p.*, r.rolname AS ownername "
"FROM pg_catalog.pg_publication AS p "
"JOIN pg_catalog.pg_roles AS r "
"ON p.pubowner = r.oid")
result = self.__exec_sql(query)
if result:
result = [dict(row) for row in result]
else:
return {}
publications = {}
for elem in result:
if not publications.get(elem['pubname']):
publications[elem['pubname']] = {}
for key, val in iteritems(elem):
if key != 'pubname':
publications[elem['pubname']][key] = val
return publications
def get_subscr_info(self):
"""Get subscription statistics."""
query = ("SELECT s.*, r.rolname AS ownername, d.datname AS dbname "
"FROM pg_catalog.pg_subscription s "
"JOIN pg_catalog.pg_database d "
"ON s.subdbid = d.oid "
"JOIN pg_catalog.pg_roles AS r "
"ON s.subowner = r.oid")
result = self.__exec_sql(query)
if result:
result = [dict(row) for row in result]
else:
return {}
subscr_info = {}
for elem in result:
if not subscr_info.get(elem['dbname']):
subscr_info[elem['dbname']] = {}
if not subscr_info[elem['dbname']].get(elem['subname']):
subscr_info[elem['dbname']][elem['subname']] = {}
for key, val in iteritems(elem):
if key not in ('subname', 'dbname'):
subscr_info[elem['dbname']][elem['subname']][key] = val
return subscr_info
def get_tablespaces(self):
"""Get information about tablespaces."""
# Check spcoption exists:
opt = self.__exec_sql("SELECT column_name "
"FROM information_schema.columns "
"WHERE table_name = 'pg_tablespace' "
"AND column_name = 'spcoptions'")
if not opt:
query = ("SELECT s.spcname, a.rolname, s.spcacl "
"FROM pg_tablespace AS s "
"JOIN pg_authid AS a ON s.spcowner = a.oid")
else:
query = ("SELECT s.spcname, a.rolname, s.spcacl, s.spcoptions "
"FROM pg_tablespace AS s "
"JOIN pg_authid AS a ON s.spcowner = a.oid")
res = self.__exec_sql(query)
ts_dict = {}
for i in res:
ts_name = i[0]
ts_info = dict(
spcowner=i[1],
spcacl=i[2] if i[2] else '',
)
if opt:
ts_info['spcoptions'] = i[3] if i[3] else []
ts_dict[ts_name] = ts_info
self.pg_info["tablespaces"] = ts_dict
def get_ext_info(self):
"""Get information about existing extensions."""
# Check that pg_extension exists:
res = self.__exec_sql("SELECT EXISTS (SELECT 1 FROM "
"information_schema.tables "
"WHERE table_name = 'pg_extension')")
if not res[0][0]:
return True
query = ("SELECT e.extname, e.extversion, n.nspname, c.description "
"FROM pg_catalog.pg_extension AS e "
"LEFT JOIN pg_catalog.pg_namespace AS n "
"ON n.oid = e.extnamespace "
"LEFT JOIN pg_catalog.pg_description AS c "
"ON c.objoid = e.oid "
"AND c.classoid = 'pg_catalog.pg_extension'::pg_catalog.regclass")
res = self.__exec_sql(query)
ext_dict = {}
for i in res:
ext_ver = i[1].split('.')
ext_dict[i[0]] = dict(
extversion=dict(
major=int(ext_ver[0]),
minor=int(ext_ver[1]),
),
nspname=i[2],
description=i[3],
)
return ext_dict
def get_role_info(self):
"""Get information about roles (in PgSQL groups and users are roles)."""
query = ("SELECT r.rolname, r.rolsuper, r.rolcanlogin, "
"r.rolvaliduntil, "
"ARRAY(SELECT b.rolname "
"FROM pg_catalog.pg_auth_members AS m "
"JOIN pg_catalog.pg_roles AS b ON (m.roleid = b.oid) "
"WHERE m.member = r.oid) AS memberof "
"FROM pg_catalog.pg_roles AS r "
"WHERE r.rolname !~ '^pg_'")
res = self.__exec_sql(query)
rol_dict = {}
for i in res:
rol_dict[i[0]] = dict(
superuser=i[1],
canlogin=i[2],
valid_until=i[3] if i[3] else '',
member_of=i[4] if i[4] else [],
)
self.pg_info["roles"] = rol_dict
def get_rslot_info(self):
"""Get information about replication slots if exist."""
# Check that pg_replication_slots exists:
res = self.__exec_sql("SELECT EXISTS (SELECT 1 FROM "
"information_schema.tables "
"WHERE table_name = 'pg_replication_slots')")
if not res[0][0]:
return True
query = ("SELECT slot_name, plugin, slot_type, database, "
"active FROM pg_replication_slots")
res = self.__exec_sql(query)
# If there is no replication:
if not res:
return True
rslot_dict = {}
for i in res:
rslot_dict[i[0]] = dict(
plugin=i[1],
slot_type=i[2],
database=i[3],
active=i[4],
)
self.pg_info["repl_slots"] = rslot_dict
def get_settings(self):
"""Get server settings."""
# Check pending restart column exists:
pend_rest_col_exists = self.__exec_sql("SELECT 1 FROM information_schema.columns "
"WHERE table_name = 'pg_settings' "
"AND column_name = 'pending_restart'")
if not pend_rest_col_exists:
query = ("SELECT name, setting, unit, context, vartype, "
"boot_val, min_val, max_val, sourcefile "
"FROM pg_settings")
else:
query = ("SELECT name, setting, unit, context, vartype, "
"boot_val, min_val, max_val, sourcefile, pending_restart "
"FROM pg_settings")
res = self.__exec_sql(query)
set_dict = {}
for i in res:
val_in_bytes = None
setting = i[1]
if i[2]:
unit = i[2]
else:
unit = ''
if unit == 'kB':
val_in_bytes = int(setting) * 1024
elif unit == '8kB':
val_in_bytes = int(setting) * 1024 * 8
elif unit == 'MB':
val_in_bytes = int(setting) * 1024 * 1024
if val_in_bytes is not None and val_in_bytes < 0:
val_in_bytes = 0
setting_name = i[0]
pretty_val = self.__get_pretty_val(setting_name)
pending_restart = None
if pend_rest_col_exists:
pending_restart = i[9]
set_dict[setting_name] = dict(
setting=setting,
unit=unit,
context=i[3],
vartype=i[4],
boot_val=i[5] if i[5] else '',
min_val=i[6] if i[6] else '',
max_val=i[7] if i[7] else | |
"""#### GUI
Module gathers all functions, classes and methods necessary to create GUI. Its
parts are divided for separate blocks represented by classes `Search`, `Form`,
`Buttons` and `Image` where each of them extends `tkinter.Frame`. `Searchbox`
is extended `tkinter.Combobox` class to application needs. `Gui` connects each
part and places them in main window which will be displayed. Modules used:
`pathlib`, `sys`, `webbrowser`, `PIL` and `tkinter` with `filedialog`,
`messagebox`, `ttk`.
#### License
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from pathlib import Path
import sys
import tkinter as tk
from tkinter.filedialog import askopenfile
import tkinter.messagebox as msg
import tkinter.ttk as ttk
import webbrowser
import PIL
import PIL.Image
from bookmeister.connection import Database
from bookmeister.record import FIELDS, Record
def show_no_connection():
"""Display error message about connection problem."""
msg.showerror('Error', 'Could not perform operation. It may be connection '
'with database problem. Try again later.')
def create_label(container, message, row, column):
"""Display text in set `tkinter` container.
Parameters
----------
container : tk.Tk or tk.Frame
place where label will be bound
message : str
text displayed
row : int
row used with grid manager (place where label will be displayed)
column : int
column used with grid manager (place where label will be displayed)
"""
tk.Label(container, text=message, font='none 10').grid(
row=row, column=column, padx=12, pady=2, sticky='W')
class Gui(tk.Tk):
"""
Configure GUI.
Place individual widgets and allow their communication. Extend `tk.Tk`.
...
Attributes
----------
form : Form
used for communication with `Form` widget
search : Search
used for communication with `Search` widget
"""
def __init__(self, title, size):
"""Set window properties and fill it with elements.
Parameters
----------
title : str
name of application, displayed on top bar
size : str
application size in format 'heightxwidth'
"""
super().__init__(className=title)
self.title(title)
self.geometry(size)
self.resizable(False, False)
self.iconphoto(False, tk.PhotoImage(file=self.get_icon()))
self.form = Form(self)
self.form.grid(row=1, column=0)
self.search = Search(self)
self.search.grid(row=0, column=0, pady=15)
Image(self).grid(row=2, column=0, padx=50, sticky='W')
Buttons(self).grid(row=3, column=0, sticky='E')
def show_error(self, message, field=None):
"""Display error window.
When field is specified incorrect form value from that field is
cleared. Otherwise clear whole form.
Parameters
----------
message : str
text displayed in error window
field : str or None, optional
name of field where value will be cleared, default None: whole form
"""
if field is None:
self.form.clear()
else:
self.form.variables[field].set('')
message = f'Wrong value for field "{field}". {message}'
msg.showerror('Error', message)
@staticmethod
def get_icon():
"""Get icon path.
According to type of installation pick method to get icon path. It can
be obtained from module directory or from `_MEIPASS` when application
is built.
Returns
-------
Path
`pathlib.Path` object containing information about icon path
"""
directory = getattr(sys, '_MEIPASS', Path(__file__).parent.absolute())
icon_path = Path(directory) / 'bookmeister.png'
return icon_path
class Search(tk.Frame):
"""
Create search widget. Extend `tk.Frame`.
...
Attributes
----------
box : Searchbox
used for communication with `Searchbox`
"""
def __init__(self, menu):
"""Create search bar elements.
Parameters
----------
menu : Gui
container where `Search` widget will be bound, used to pass `Form`
variables
"""
super().__init__(menu)
create_label(self, 'Search results:', 0, 0)
self.box = Searchbox(self, menu.form.variables)
self.box.grid(row=0, column=1)
class Searchbox(ttk.Combobox):
"""
Create searchbox. Extend `tk.Combobox`.
...
Attributes
----------
values : dict
has keys as displayed text in `Searchbox` and values as dictionaries
holding information about records
variables : dict
dictionary storing keys used in `Form` and corresponding to them
`tk.StringVar`s, modifying its values change text seen in form
"""
def __init__(self, frame, variables):
"""Configure searchbox.
Parameters
----------
frame : Search
container where `Searchbox` will be bound
variables : dict
dictionary with `Form` variables
"""
super().__init__(frame, width=50, state='readonly')
self.values = {}
self.variables = variables
self.bind('<<ComboboxSelected>>', self.do_on_select)
def assign_values(self, values):
"""Fill searchbox with values.
Clear previously loaded elements. For each record in passed values
create text which is placed in searchbox. Store it as key in
`self.values` dictionary with corresponding it record values
(dictionary). Then pick first record and fill form with its values.
Parameters
----------
values : list
list with database records, their data stored in dictionaries
"""
self.clear()
try:
for data in values:
title = f'{data["ISBN"]} "{data["Title"]}" by {data["Author"]}'
self.values[title] = data
self['values'] = sorted(list(self.values.keys()))
self.current(0)
self.do_on_select()
except (TypeError, tk.TclError):
msg.showwarning('No records',
'Could not find any results to set criteria.')
def assign_image(self, image):
"""Store image data in `self.values`.
Parameters
----------
image : str
string representing image id
"""
self.values[ttk.Combobox.get(self)]['Cover'] = image
def do_on_select(self, *_):
"""Fill form with values from selected record."""
for key in self.variables.keys():
try:
self.variables[key].set(
self.values[ttk.Combobox.get(self)][key])
except KeyError: # pragma: no cover
pass
def get(self):
"""Return record id string when selected or None and display error."""
try:
return self.values[ttk.Combobox.get(self)]['_id']
except KeyError:
msg.showerror('Error', 'No record selected. To perform operation '
'please select record first.')
def get_image(self):
"""Return image id when it exists else None."""
try:
return self.values[ttk.Combobox.get(self)]['Cover']
except KeyError:
return None
def clear(self):
"""Clear positions from searchbox."""
self.values.clear()
self.set('')
self['values'] = []
class Form(tk.Frame):
"""
Create form widget. Extend `tk.Frame`.
...
Attributes
----------
menu : Gui
used to bind `Form` widget and communication with other menu elements
variables : dict
storing `tk.StringVar`s with corresponding them names as keys
"""
def __init__(self, menu):
"""Configure form widget."""
super().__init__(menu)
self.menu = menu
self.variables = {}
for place, name in enumerate(FIELDS):
create_label(self, f'{name}:', place, 0)
self.create_entry(name, '', place, 1)
tk.Button(self, text='Clear', width=4, command=self.clear).grid(
row=place + 1, column=1, sticky='NE')
self.create_checkbutton('Hardcover', place + 1, 0)
def get(self, silent=False):
"""Return dictionary with fields names and their values from form.
Values are cast to types expected in database. When `silent` is set to
`False` display error window in case of incorrect field value.
Parameters
----------
silent : bool, optional
specifies if error windows are shown or not in case of wrong values
"""
record = Record()
for key, value in self.variables.items():
try:
record[key] = value.get()
except ValueError as error:
if not silent:
self.menu.show_error(error, key)
record.cast()
return record
def clear(self):
"""Clear each variable value and remove records from `Searchbox`."""
self.menu.search.box.clear()
for variable in self.variables.values():
try:
variable.set('')
except tk.TclError:
variable.set(False)
def create_checkbutton(self, name, row, column):
"""Display checkbutton.
Bind `tk.Checkbutton` to `Form` frame. Create `tk.BooleanVar` and store
it in `self.variables`. Display label with set name.
Parameters
----------
name : str
text displayed before checkbutton, used as key in `self.variables`
row : int
row used with grid manager (place where button will be shown)
column : int
column used with grid manager (place where button will be shown)
"""
create_label(self, f'{name}: ', row, column)
content = tk.BooleanVar(self)
tk.Checkbutton(self, variable=content).grid(
row=row, column=column + 1, padx=3, sticky='W')
self.variables[name] = content
def create_entry(self, name, value, row, column):
"""Display entry.
Bind `tk.Entry` to `Form` frame. Create `tk.StringVar` and store it in
`self.variables` with key passed in name.
Parameters
----------
name : str
used as key in `self.variables`
value : str
default value displayed in entry on application startup
row : int
row used with grid manager (place where entry will be displayed)
column : int
column used with grid manager (place where entry will be displayed)
"""
content = tk.StringVar(self, value=value)
tk.Entry(self, width=40, textvariable=content).grid(
row=row, column=column, padx=10, pady=2, sticky='W')
self.variables[name] = content
class Image(tk.Frame):
"""
Create image widget. Extend `tk.Frame`.
...
Attributes
----------
menu : Gui
used to bind `Image` widget and communication with other menu elements
"""
def __init__(self, menu):
"""Create buttons for interaction with images."""
super().__init__(menu)
self.menu = menu
tk.Button(self, text='Add cover', width=8,
command=self.add_image).grid(row=0, column=0, pady=15)
tk.Button(self, text='View cover', width=8,
command=self.view_image).grid(row=0, column=1, sticky='W')
def add_image(self):
"""Update selected record image in database.
When record is selected in `Searchbox` open window where user can pick
image file. Then convert image with `bookmeister.picture` module. If
it succeed add it to database else display error window.
"""
selected = self.menu.search.box.get()
if selected:
path = askopenfile(initialdir=Path.home())
if path:
if self.verify(path.name):
image_id = Database().upload_image(path.name)
if image_id:
if Database().update(selected, {'Cover': image_id}):
self.menu.search.box.assign_image(image_id)
msg.showinfo('Done',
'Image successfully saved.')
else:
show_no_connection()
else:
msg.showerror('Error', 'Wrong image file | |
#coding:utf-8
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import numpy as np
import six
from collections import namedtuple
import paddle.fluid as fluid
from paddlehub.reader import tokenization
from paddlehub.common.logger import logger
from paddlehub.common.utils import sys_stdout_encoding
from paddlehub.dataset.dataset import InputExample
from .batching import pad_batch_data
import paddlehub as hub
from .base_reader import BaseReader
class BaseNLPReader(BaseReader):
def __init__(self,
vocab_path,
dataset=None,
label_map_config=None,
max_seq_len=512,
do_lower_case=True,
random_seed=None,
use_task_id=False,
sp_model_path=None,
word_dict_path=None,
in_tokens=False):
super(BaseNLPReader, self).__init__(dataset, random_seed)
self.max_seq_len = max_seq_len
if sp_model_path and word_dict_path:
self.tokenizer = tokenization.WSSPTokenizer(
vocab_path, sp_model_path, word_dict_path, ws=True, lower=True)
else:
self.tokenizer = tokenization.FullTokenizer(
vocab_file=vocab_path, do_lower_case=do_lower_case)
self.vocab = self.tokenizer.vocab
self.pad_id = self.vocab["[PAD]"]
self.cls_id = self.vocab["[CLS]"]
self.sep_id = self.vocab["[SEP]"]
self.mask_id = self.vocab["[MASK]"]
self.in_tokens = in_tokens
self.use_task_id = use_task_id
if self.use_task_id:
logger.warning(
"use_task_id has been de discarded since PaddleHub v1.4.0, it's no necessary to feed task_ids now."
)
self.Record_With_Label_Id = namedtuple(
'Record',
['token_ids', 'text_type_ids', 'position_ids', 'label_id'])
self.Record_Wo_Label_Id = namedtuple(
'Record', ['token_ids', 'text_type_ids', 'position_ids'])
def _truncate_seq_pair(self, tokens_a, tokens_b, max_length):
"""Truncates a sequence pair in place to the maximum length."""
# This is a simple heuristic which will always truncate the longer sequence
# one token at a time. This makes more sense than truncating an equal percent
# of tokens from each, since if one sequence is very short then each token
# that's truncated likely contains more information than a longer sequence.
while True:
total_length = len(tokens_a) + len(tokens_b)
if total_length <= max_length:
break
if len(tokens_a) > len(tokens_b):
tokens_a.pop()
else:
tokens_b.pop()
def _convert_example_to_record(self,
example,
max_seq_length,
tokenizer,
phase=None):
"""Converts a single `Example` into a single `Record`."""
text_a = tokenization.convert_to_unicode(example.text_a)
tokens_a = tokenizer.tokenize(text_a)
tokens_b = None
if example.text_b is not None:
#if "text_b" in example._fields:
text_b = tokenization.convert_to_unicode(example.text_b)
tokens_b = tokenizer.tokenize(text_b)
if tokens_b:
# Modifies `tokens_a` and `tokens_b` in place so that the total
# length is less than the specified length.
# Account for [CLS], [SEP], [SEP] with "- 3"
self._truncate_seq_pair(tokens_a, tokens_b, max_seq_length - 3)
else:
# Account for [CLS] and [SEP] with "- 2"
if len(tokens_a) > max_seq_length - 2:
tokens_a = tokens_a[0:(max_seq_length - 2)]
# The convention in BERT/ERNIE is:
# (a) For sequence pairs:
# tokens: [CLS] is this jack ##son ##ville ? [SEP] no it is not . [SEP]
# type_ids: 0 0 0 0 0 0 0 0 1 1 1 1 1 1
# (b) For single sequences:
# tokens: [CLS] the dog is hairy . [SEP]
# type_ids: 0 0 0 0 0 0 0
#
# Where "type_ids" are used to indicate whether this is the first
# sequence or the second sequence. The embedding vectors for `type=0` and
# `type=1` were learned during pre-training and are added to the wordpiece
# embedding vector (and position vector). This is not *strictly* necessary
# since the [SEP] token unambiguously separates the sequences, but it makes
# it easier for the model to learn the concept of sequences.
#
# For classification tasks, the first vector (corresponding to [CLS]) is
# used as as the "sentence vector". Note that this only makes sense because
# the entire model is fine-tuned.
tokens = []
text_type_ids = []
tokens.append("[CLS]")
text_type_ids.append(0)
for token in tokens_a:
tokens.append(token)
text_type_ids.append(0)
tokens.append("[SEP]")
text_type_ids.append(0)
if tokens_b:
for token in tokens_b:
tokens.append(token)
text_type_ids.append(1)
tokens.append("[SEP]")
text_type_ids.append(1)
token_ids = tokenizer.convert_tokens_to_ids(tokens)
position_ids = list(range(len(token_ids)))
if self.label_map:
if example.label not in self.label_map:
raise KeyError(
"example.label = {%s} not in label" % example.label)
label_id = self.label_map[example.label]
else:
label_id = example.label
if phase != "predict":
record = self.Record_With_Label_Id(
token_ids=token_ids,
text_type_ids=text_type_ids,
position_ids=position_ids,
label_id=label_id)
else:
record = self.Record_Wo_Label_Id(
token_ids=token_ids,
text_type_ids=text_type_ids,
position_ids=position_ids)
return record
def _pad_batch_records(self, batch_records, phase):
raise NotImplementedError
def _prepare_batch_data(self, examples, batch_size, phase=None):
"""generate batch records"""
batch_records, max_len = [], 0
for index, example in enumerate(examples):
if phase == "train":
self.current_example = index
record = self._convert_example_to_record(example, self.max_seq_len,
self.tokenizer, phase)
max_len = max(max_len, len(record.token_ids))
if self.in_tokens:
to_append = (len(batch_records) + 1) * max_len <= batch_size
else:
to_append = len(batch_records) < batch_size
if to_append:
batch_records.append(record)
else:
yield self._pad_batch_records(batch_records, phase)
batch_records, max_len = [record], len(record.token_ids)
if batch_records:
yield self._pad_batch_records(batch_records, phase)
def data_generator(self,
batch_size=1,
phase='train',
shuffle=True,
data=None,
return_list=True):
if phase != 'predict' and not self.dataset:
raise ValueError("The dataset is None ! It isn't allowed.")
if phase == 'train':
shuffle = True
examples = self.get_train_examples()
self.num_examples['train'] = len(examples)
elif phase == 'val' or phase == 'dev':
shuffle = False
examples = self.get_dev_examples()
self.num_examples['dev'] = len(examples)
elif phase == 'test':
shuffle = False
examples = self.get_test_examples()
self.num_examples['test'] = len(examples)
elif phase == 'predict':
shuffle = False
examples = []
seq_id = 0
for item in data:
# set label in order to run the program
if self.dataset:
label = list(self.label_map.keys())[0]
else:
label = 0
if len(item) == 1:
item_i = InputExample(
guid=seq_id, text_a=item[0], label=label)
elif len(item) == 2:
item_i = InputExample(
guid=seq_id,
text_a=item[0],
text_b=item[1],
label=label)
else:
raise ValueError(
"The length of input_text is out of handling, which must be 1 or 2!"
)
examples.append(item_i)
seq_id += 1
else:
raise ValueError(
"Unknown phase, which should be in ['train', 'dev', 'test', 'predict']."
)
def wrapper():
if shuffle:
np.random.shuffle(examples)
for batch_data in self._prepare_batch_data(
examples, batch_size, phase=phase):
if return_list:
# for DataFeeder
yield [batch_data]
else:
# for DataLoader
yield batch_data
return wrapper
class ClassifyReader(BaseNLPReader):
def _pad_batch_records(self, batch_records, phase=None):
batch_token_ids = [record.token_ids for record in batch_records]
batch_text_type_ids = [record.text_type_ids for record in batch_records]
batch_position_ids = [record.position_ids for record in batch_records]
padded_token_ids, input_mask, batch_seq_lens = pad_batch_data(
batch_token_ids,
max_seq_len=self.max_seq_len,
pad_idx=self.pad_id,
return_input_mask=True,
return_seq_lens=True)
padded_text_type_ids = pad_batch_data(
batch_text_type_ids,
max_seq_len=self.max_seq_len,
pad_idx=self.pad_id)
padded_position_ids = pad_batch_data(
batch_position_ids,
max_seq_len=self.max_seq_len,
pad_idx=self.pad_id)
return_list = [
padded_token_ids, padded_position_ids, padded_text_type_ids,
input_mask, batch_seq_lens
]
if phase != "predict":
batch_labels = [record.label_id for record in batch_records]
batch_labels = np.array(batch_labels).astype("int64").reshape(
[-1, 1])
return_list += [batch_labels]
return return_list
class SequenceLabelReader(BaseNLPReader):
def __init__(self,
vocab_path,
dataset=None,
label_map_config=None,
max_seq_len=512,
do_lower_case=True,
random_seed=None,
use_task_id=False,
sp_model_path=None,
word_dict_path=None,
in_tokens=False):
super(SequenceLabelReader, self).__init__(
vocab_path=vocab_path,
dataset=dataset,
label_map_config=label_map_config,
max_seq_len=max_seq_len,
do_lower_case=do_lower_case,
random_seed=random_seed,
use_task_id=use_task_id,
sp_model_path=sp_model_path,
word_dict_path=word_dict_path,
in_tokens=in_tokens)
if sp_model_path and word_dict_path:
self.tokenizer = tokenization.FullTokenizer(
vocab_file=vocab_path,
do_lower_case=do_lower_case,
use_sentence_piece_vocab=True)
def _pad_batch_records(self, batch_records, phase=None):
batch_token_ids = [record.token_ids for record in batch_records]
batch_text_type_ids = [record.text_type_ids for record in batch_records]
batch_position_ids = [record.position_ids for record in batch_records]
# padding
padded_token_ids, input_mask, batch_seq_lens = pad_batch_data(
batch_token_ids,
pad_idx=self.pad_id,
max_seq_len=self.max_seq_len,
return_input_mask=True,
return_seq_lens=True)
padded_text_type_ids = pad_batch_data(
batch_text_type_ids,
max_seq_len=self.max_seq_len,
pad_idx=self.pad_id)
padded_position_ids = pad_batch_data(
batch_position_ids,
max_seq_len=self.max_seq_len,
pad_idx=self.pad_id)
return_list = [
padded_token_ids, padded_position_ids, padded_text_type_ids,
input_mask
]
if phase != "predict":
batch_label_ids = [record.label_id for record in batch_records]
padded_label_ids = pad_batch_data(
batch_label_ids,
max_seq_len=self.max_seq_len,
pad_idx=len(self.label_map) - 1)
return_list += [padded_label_ids, batch_seq_lens]
else:
return_list += [batch_seq_lens]
return return_list
def _reseg_token_label(self, tokens, tokenizer, phase, labels=None):
if phase != "predict":
if len(tokens) != len(labels):
raise ValueError(
"The length of tokens must be same with labels")
ret_tokens = []
ret_labels = []
for token, label in zip(tokens, labels):
sub_token = tokenizer.tokenize(token)
if len(sub_token) == 0:
continue
ret_tokens.extend(sub_token)
ret_labels.append(label)
if len(sub_token) < 2:
continue
sub_label = label
if label.startswith("B-"):
sub_label = "I-" + label[2:]
ret_labels.extend([sub_label] * (len(sub_token) - 1))
if len(ret_tokens) != len(ret_labels):
raise ValueError(
"The length of ret_tokens can't match with labels")
return ret_tokens, ret_labels
else:
ret_tokens = []
for token in tokens:
sub_token = tokenizer.tokenize(token)
if len(sub_token) == 0:
continue
ret_tokens.extend(sub_token)
if len(sub_token) < 2:
continue
return ret_tokens
def _convert_example_to_record(self,
example,
max_seq_length,
tokenizer,
phase=None):
tokens = tokenization.convert_to_unicode(example.text_a).split(u"")
if phase != "predict":
labels = tokenization.convert_to_unicode(example.label).split(u"")
tokens, labels = self._reseg_token_label(
tokens=tokens, labels=labels, tokenizer=tokenizer, phase=phase)
if len(tokens) > max_seq_length - 2:
tokens = tokens[0:(max_seq_length - 2)]
labels = labels[0:(max_seq_length - 2)]
tokens = ["[CLS]"] + tokens + ["[SEP]"]
token_ids = tokenizer.convert_tokens_to_ids(tokens)
position_ids = list(range(len(token_ids)))
text_type_ids = [0] * len(token_ids)
no_entity_id = len(self.label_map) - 1
label_ids = | |
#!/usr/bin/env python3
#
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Compare emoji image file namings against unicode property data.
The intent of this script is to check if the resulting font will pass
the Android linter:
https://android.googlesource.com/platform/frameworks/base/+/master/tools/fonts/fontchain_linter.py
"""
from __future__ import print_function
import argparse
import collections
import glob
import os
from os import path
import re
import sys
from nototools import unicode_data
import add_aliases
ZWJ = 0x200d
EMOJI_VS = 0xfe0f
END_TAG = 0xe007f
def _make_tag_set():
tag_set = set()
tag_set |= set(range(0xe0030, 0xe003a)) # 0-9
tag_set |= set(range(0xe0061, 0xe007b)) # a-z
tag_set.add(END_TAG)
return tag_set
TAG_SET = _make_tag_set()
_namedata = None
def seq_name(seq):
global _namedata
if not _namedata:
def strip_vs_map(seq_map):
return {
unicode_data.strip_emoji_vs(k): v
for k, v in seq_map.items()}
_namedata = [
strip_vs_map(unicode_data.get_emoji_combining_sequences()),
strip_vs_map(unicode_data.get_emoji_flag_sequences()),
strip_vs_map(unicode_data.get_emoji_modifier_sequences()),
strip_vs_map(unicode_data.get_emoji_zwj_sequences()),
]
if len(seq) == 1:
return unicode_data.name(seq[0], None)
for data in _namedata:
if seq in data:
return data[seq]
if EMOJI_VS in seq:
non_vs_seq = unicode_data.strip_emoji_vs(seq)
for data in _namedata:
if non_vs_seq in data:
return data[non_vs_seq]
return None
def _check_no_vs(sorted_seq_to_filepath):
"""Our image data does not use emoji presentation variation selectors."""
for seq, fp in sorted_seq_to_filepath.items():
if EMOJI_VS in seq:
print(f'check no VS: {EMOJI_VS} in path: {fp}')
def _check_valid_emoji_cps(sorted_seq_to_filepath, unicode_version):
"""Ensure all cps in these sequences are valid emoji cps or specific cps
used in forming emoji sequences. This is a 'pre-check' that reports
this specific problem."""
valid_cps = set(unicode_data.get_emoji())
if unicode_version is None or unicode_version >= unicode_data.PROPOSED_EMOJI_AGE:
valid_cps |= unicode_data.proposed_emoji_cps()
else:
valid_cps = set(
cp for cp in valid_cps if unicode_data.age(cp) <= unicode_version)
valid_cps.add(0x200d) # ZWJ
valid_cps.add(0x20e3) # combining enclosing keycap
valid_cps.add(0xfe0f) # variation selector (emoji presentation)
valid_cps.add(0xfe82b) # PUA value for unknown flag
valid_cps |= TAG_SET # used in subregion tag sequences
not_emoji = {}
for seq, fp in sorted_seq_to_filepath.items():
for cp in seq:
if cp not in valid_cps:
if cp not in not_emoji:
not_emoji[cp] = []
not_emoji[cp].append(fp)
if len(not_emoji):
print(
f'check valid emoji cps: {len(not_emoji)} non-emoji cp found', file=sys.stderr)
for cp in sorted(not_emoji):
fps = not_emoji[cp]
print(
f'check valid emoji cps: {cp} (in {len(fps)} sequences)', file=sys.stderr)
def _check_zwj(sorted_seq_to_filepath):
"""Ensure zwj is only between two appropriate emoji. This is a 'pre-check'
that reports this specific problem."""
for seq, fp in sorted_seq_to_filepath.items():
if ZWJ not in seq:
continue
if seq[0] == ZWJ:
print(f'check zwj: zwj at head of sequence in {fp}', file=sys.stderr)
if len(seq) == 1:
continue
if seq[-1] == ZWJ:
print(f'check zwj: zwj at end of sequence in {fp}', file=sys.stderr)
for i, cp in enumerate(seq):
if cp == ZWJ:
if i > 0:
pcp = seq[i-1]
if pcp != EMOJI_VS and not unicode_data.is_emoji(pcp):
print(
f'check zwj: non-emoji {pcp} preceeds ZWJ in {fp}',
file=sys.stderr)
if i < len(seq) - 1:
fcp = seq[i+1]
if not unicode_data.is_emoji(fcp):
print(
f'check zwj: non-emoji {fcp} follows ZWJ in {fp}',
file=sys.stderr)
def _check_flags(sorted_seq_to_filepath):
"""Ensure regional indicators are only in sequences of one or two, and
never mixed."""
for seq, fp in sorted_seq_to_filepath.items():
have_reg = None
for cp in seq:
is_reg = unicode_data.is_regional_indicator(cp)
if have_reg == None:
have_reg = is_reg
elif have_reg != is_reg:
print(
f'check flags: mix of regional and non-regional in {fp}',
file=sys.stderr)
if have_reg and len(seq) > 2:
# We provide dummy glyphs for regional indicators, so there are sequences
# with single regional indicator symbols, the len check handles this.
print(
f'check flags: regional indicator sequence length != 2 in {fp}',
file=sys.stderr)
def _check_tags(sorted_seq_to_filepath):
"""Ensure tag sequences (for subregion flags) conform to the spec. We don't
validate against CLDR, just that there's a sequence of 2 or more tags starting
and ending with the appropriate codepoints."""
BLACK_FLAG = 0x1f3f4
BLACK_FLAG_SET = set([BLACK_FLAG])
for seq, fp in sorted_seq_to_filepath.items():
seq_set = set(cp for cp in seq)
overlap_set = seq_set & TAG_SET
if not overlap_set:
continue
if seq[0] != BLACK_FLAG:
print(f'check tags: bad start tag in {fp}')
elif seq[-1] != END_TAG:
print(f'check tags: bad end tag in {fp}')
elif len(seq) < 4:
print(f'check tags: sequence too short in {fp}')
elif seq_set - TAG_SET != BLACK_FLAG_SET:
print(f'check tags: non-tag items in {fp}')
def _check_skintone(sorted_seq_to_filepath):
"""Ensure skin tone modifiers are not applied to emoji that are not defined
to take them. May appear standalone, though. Also check that emoji that take
skin tone modifiers have a complete set."""
base_to_modifiers = collections.defaultdict(set)
for seq, fp in sorted_seq_to_filepath.items():
for i, cp in enumerate(seq):
if unicode_data.is_skintone_modifier(cp):
if i == 0:
if len(seq) > 1:
print(
f'check skintone: skin color selector first in sequence {fp}',
file=sys.stderr)
# standalone are ok
continue
pcp = seq[i-1]
if not unicode_data.is_emoji_modifier_base(pcp):
print(
f'check skintone: emoji skintone modifier applied to non-base at {i}: {fp}',
file=sys.stderr)
else:
if pcp not in base_to_modifiers:
base_to_modifiers[pcp] = set()
base_to_modifiers[pcp].add(cp)
for cp, modifiers in sorted(base_to_modifiers.items()):
if len(modifiers) != 5:
print(
'check skintone: base %04x has %d modifiers defined (%s) in %s' % (
cp, len(modifiers),
', '.join('%04x' % cp for cp in sorted(modifiers)), fp),
file=sys.stderr)
def _check_zwj_sequences(sorted_seq_to_filepath, unicode_version):
"""Verify that zwj sequences are valid for the given unicode version."""
for seq, fp in sorted_seq_to_filepath.items():
if ZWJ not in seq:
continue
age = unicode_data.get_emoji_sequence_age(seq)
if age is None or unicode_version is not None and age > unicode_version:
print(f'check zwj sequences: undefined sequence {fp}')
def _check_no_alias_sources(sorted_seq_to_filepath):
"""Check that we don't have sequences that we expect to be aliased to
some other sequence."""
aliases = add_aliases.read_default_emoji_aliases()
for seq, fp in sorted_seq_to_filepath.items():
if seq in aliases:
print(f'check no alias sources: aliased sequence {fp}')
def _check_coverage(seq_to_filepath, unicode_version):
"""Ensure we have all and only the cps and sequences that we need for the
font as of this version."""
coverage_pass = True
age = unicode_version
non_vs_to_canonical = {}
for k in seq_to_filepath:
if EMOJI_VS in k:
non_vs = unicode_data.strip_emoji_vs(k)
non_vs_to_canonical[non_vs] = k
aliases = add_aliases.read_default_emoji_aliases()
for k, v in sorted(aliases.items()):
if v not in seq_to_filepath and v not in non_vs_to_canonical:
alias_str = unicode_data.seq_to_string(k)
target_str = unicode_data.seq_to_string(v)
print(f'coverage: alias {alias_str} missing target {target_str}')
coverage_pass = False
continue
if k in seq_to_filepath or k in non_vs_to_canonical:
alias_str = unicode_data.seq_to_string(k)
target_str = unicode_data.seq_to_string(v)
print(f'coverage: alias {alias_str} already exists as {target_str} ({seq_name(v)})')
coverage_pass = False
continue
filename = seq_to_filepath.get(v) or seq_to_filepath[non_vs_to_canonical[v]]
seq_to_filepath[k] = 'alias:' + filename
# check single emoji, this includes most of the special chars
emoji = sorted(unicode_data.get_emoji())
for cp in emoji:
if tuple([cp]) not in seq_to_filepath:
print(
f'coverage: missing single {cp} ({unicode_data.name(cp)})')
coverage_pass = False
# special characters
# all but combining enclosing keycap are currently marked as emoji
for cp in [ord('*'), ord('#'), ord(u'\u20e3')] + list(range(0x30, 0x3a)):
if cp not in emoji and tuple([cp]) not in seq_to_filepath:
print(f'coverage: missing special {cp} ({unicode_data.name(cp)})')
coverage_pass = False
# combining sequences
comb_seq_to_name = sorted(
unicode_data._emoji_sequence_data.items())
for seq, name in comb_seq_to_name:
if seq not in seq_to_filepath:
# strip vs and try again
non_vs_seq = unicode_data.strip_emoji_vs(seq)
if non_vs_seq not in seq_to_filepath:
print(f'coverage: missing combining sequence {unicode_data.seq_to_string(seq)} ({name})')
coverage_pass = False
# check for 'unknown flag'
# this is either emoji_ufe82b or 'unknown_flag', but we filter out things that
# don't start with our prefix so 'unknown_flag' would be excluded by default.
if tuple([0xfe82b]) not in seq_to_filepath:
print('coverage: missing unknown flag PUA fe82b')
coverage_pass = False
if not coverage_pass:
exit("Please fix the problems metioned above or run: make BYPASS_SEQUENCE_CHECK='True'")
def check_sequence_to_filepath(seq_to_filepath, unicode_version, coverage):
sorted_seq_to_filepath = collections.OrderedDict(
sorted(seq_to_filepath.items()))
_check_no_vs(sorted_seq_to_filepath)
_check_valid_emoji_cps(sorted_seq_to_filepath, unicode_version)
_check_zwj(sorted_seq_to_filepath)
_check_flags(sorted_seq_to_filepath)
_check_tags(sorted_seq_to_filepath)
_check_skintone(sorted_seq_to_filepath)
_check_zwj_sequences(sorted_seq_to_filepath, unicode_version)
_check_no_alias_sources(sorted_seq_to_filepath)
if coverage:
_check_coverage(sorted_seq_to_filepath, unicode_version)
def create_sequence_to_filepath(name_to_dirpath, prefix, suffix):
"""Check names, and convert name to sequences for names that are ok,
returning a sequence to file path mapping. Reports bad segments
of a name to stderr."""
segment_re = re.compile(r'^[0-9a-f]{4,6}$')
result = {}
for name, dirname in name_to_dirpath.items():
if not name.startswith(prefix):
print(f'expected prefix "{prefix}" for "{name}"')
continue
segments = name[len(prefix): -len(suffix)].split('_')
segfail | |
= Var(within=Reals,bounds=(0,1),initialize=0)
m.x3157 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3158 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3159 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3160 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3161 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3162 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3163 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3164 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3165 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3166 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3167 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3168 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3169 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3170 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3171 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3172 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3173 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3174 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3175 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3176 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3177 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3178 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3179 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3180 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3181 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3182 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3183 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3184 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3185 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3186 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3187 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3188 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3189 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3190 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3191 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3192 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3193 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3194 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3195 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3196 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3197 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3198 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3199 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3200 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3201 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3202 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3203 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3204 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3205 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3206 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3207 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3208 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3209 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3210 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3211 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3212 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3213 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3214 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3215 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3216 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3217 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3218 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3219 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3220 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3221 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3222 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3223 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3224 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3225 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3226 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3227 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3228 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3229 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3230 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3231 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3232 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3233 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3234 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3235 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3236 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3237 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3238 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3239 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3240 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3241 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3242 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3243 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3244 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3245 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3246 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3247 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3248 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3249 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3250 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3251 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3252 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3253 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3254 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3255 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3256 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3257 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3258 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3259 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3260 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3261 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3262 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3263 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3264 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3265 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3266 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3267 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3268 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3269 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3270 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3271 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3272 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3273 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3274 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3275 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3276 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3277 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3278 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3279 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3280 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3281 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3282 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3283 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3284 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3285 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3286 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3287 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3288 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3289 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3290 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3291 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3292 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3293 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3294 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3295 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3296 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3297 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3298 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3299 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3300 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3301 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3302 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3303 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3304 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3305 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3306 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3307 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3308 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3309 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3310 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3311 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3312 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3313 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3314 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3315 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3316 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3317 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3318 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3319 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3320 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3321 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3322 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3323 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3324 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x3325 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x3326 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x3327 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x3328 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x3329 = Var(within=Reals,bounds=(125,625),initialize=125)
m.x3330 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3331 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3332 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3333 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3334 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3335 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3336 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3337 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3338 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3339 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3340 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3341 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3342 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3343 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3344 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3345 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3346 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3347 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3348 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3349 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3350 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3351 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3352 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3353 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3354 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3355 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3356 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3357 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3358 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3359 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3360 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3361 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3362 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3363 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3364 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3365 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3366 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3367 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3368 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3369 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3370 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3371 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3372 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3373 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3374 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3375 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3376 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3377 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3378 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3379 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3380 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3381 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3382 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3383 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3384 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3385 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3386 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3387 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3388 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3389 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3390 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3391 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3392 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3393 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3394 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3395 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3396 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3397 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3398 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3399 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3400 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3401 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3402 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3403 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3404 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3405 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3406 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3407 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3408 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3409 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3410 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3411 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3412 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3413 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3414 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3415 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3416 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3417 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3418 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3419 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3420 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3421 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3422 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3423 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3424 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3425 = Var(within=Reals,bounds=(375,875),initialize=375)
m.x3426 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3427 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3428 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3429 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3430 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3431 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3432 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3433 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3434 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3435 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3436 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3437 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3438 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3439 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3440 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3441 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3442 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3443 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3444 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3445 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3446 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3447 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3448 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3449 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3450 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3451 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3452 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3453 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3454 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3455 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3456 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3457 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3458 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3459 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3460 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3461 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3462 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3463 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3464 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3465 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3466 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3467 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3468 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3469 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3470 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3471 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3472 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3473 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3474 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3475 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3476 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3477 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3478 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3479 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3480 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3481 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3482 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3483 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3484 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3485 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3486 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3487 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3488 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3489 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3490 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3491 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3492 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3493 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3494 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3495 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3496 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3497 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3498 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3499 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3500 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3501 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3502 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3503 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3504 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3505 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3506 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3507 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3508 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3509 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3510 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3511 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3512 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3513 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3514 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3515 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3516 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3517 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3518 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3519 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3520 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3521 = Var(within=Reals,bounds=(250,750),initialize=250)
m.x3522 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3523 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3524 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3525 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3526 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3527 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3528 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3529 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3530 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3531 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3532 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3533 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3534 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3535 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3536 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3537 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3538 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3539 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3540 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3541 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3542 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3543 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3544 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3545 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3546 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3547 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3548 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3549 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3550 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3551 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3552 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3553 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3554 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3555 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3556 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3557 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3558 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3559 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3560 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3561 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3562 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3563 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3564 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3565 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3566 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3567 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3568 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3569 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3570 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3571 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3572 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3573 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3574 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3575 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3576 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3577 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3578 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3579 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3580 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3581 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3582 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3583 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3584 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3585 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3586 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3587 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3588 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3589 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3590 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3591 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3592 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3593 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3594 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3595 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3596 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3597 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3598 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3599 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3600 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3601 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3602 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3603 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3604 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3605 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3606 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3607 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3608 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3609 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3610 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3611 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3612 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3613 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3614 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3615 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3616 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3617 = Var(within=Reals,bounds=(250,750),initialize=250)
m.x3618 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3619 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3620 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3621 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3622 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3623 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3624 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3625 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3626 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3627 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3628 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3629 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3630 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3631 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3632 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3633 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3634 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3635 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3636 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3637 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3638 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3639 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3640 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3641 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3642 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3643 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3644 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3645 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3646 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3647 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3648 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3649 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3650 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3651 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3652 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3653 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3654 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3655 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3656 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3657 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3658 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3659 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3660 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3661 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3662 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3663 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3664 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3665 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3666 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3667 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3668 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3669 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3670 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3671 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3672 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3673 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3674 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3675 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3676 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3677 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3678 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3679 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3680 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3681 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3682 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3683 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3684 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3685 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3686 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3687 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3688 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3689 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3690 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3691 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3692 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3693 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3694 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3695 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3696 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3697 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3698 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3699 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3700 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3701 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3702 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3703 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3704 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3705 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3706 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3707 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3708 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3709 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3710 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3711 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x3712 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.obj = Objective(expr= 50000*m.x1012 + 5000*m.x3325 + 5000*m.x3326 + 8000*m.x3327 + 8000*m.x3328
+ 0.666666666666667*m.x3329 + 0.666666666666667*m.x3330 + 0.666666666666667*m.x3331
+ 0.666666666666667*m.x3332 + 0.666666666666667*m.x3333 + 0.666666666666667*m.x3334
+ 0.666666666666666*m.x3335 + 0.666666666666667*m.x3336 + 0.666666666666667*m.x3337
+ 0.666666666666666*m.x3338 + 0.666666666666667*m.x3339 + 0.666666666666667*m.x3340
+ 0.666666666666666*m.x3341 + 0.666666666666666*m.x3342 + 0.666666666666668*m.x3343
+ 0.666666666666666*m.x3344 + 0.666666666666666*m.x3345 + 0.666666666666668*m.x3346
+ 0.666666666666666*m.x3347 + 0.666666666666666*m.x3348 + 0.666666666666668*m.x3349
+ 0.666666666666666*m.x3350 + 0.666666666666666*m.x3351 + 0.666666666666668*m.x3352
+ 0.666666666666664*m.x3353 + 0.666666666666668*m.x3354 + 0.666666666666668*m.x3355
+ 0.666666666666664*m.x3356 + 0.666666666666668*m.x3357 + 0.666666666666668*m.x3358
+ 0.666666666666664*m.x3359 + 0.666666666666668*m.x3360 + 0.666666666666668*m.x3361
+ 0.666666666666664*m.x3362 + 0.666666666666668*m.x3363 + 0.666666666666668*m.x3364
+ 0.666666666666664*m.x3365 + 0.666666666666668*m.x3366 + 0.666666666666668*m.x3367
+ 0.666666666666664*m.x3368 + 0.666666666666668*m.x3369 + 0.666666666666668*m.x3370
+ 0.666666666666664*m.x3371 + 0.666666666666668*m.x3372 + 0.666666666666668*m.x3373
+ 0.666666666666664*m.x3374 + 0.666666666666668*m.x3375 + 0.666666666666668*m.x3376
+ 0.666666666666664*m.x3377 + 0.666666666666664*m.x3378 + 0.666666666666671*m.x3379
+ 0.666666666666664*m.x3380 + 0.666666666666664*m.x3381 + 0.666666666666671*m.x3382
+ 0.666666666666664*m.x3383 + 0.666666666666664*m.x3384 + 0.666666666666671*m.x3385
+ 0.666666666666664*m.x3386 + 0.666666666666664*m.x3387 + 0.666666666666671*m.x3388
+ 0.666666666666664*m.x3389 + 0.666666666666664*m.x3390 + 0.666666666666671*m.x3391
+ 0.666666666666664*m.x3392 + 0.666666666666664*m.x3393 + 0.666666666666671*m.x3394
+ 0.666666666666664*m.x3395 + 0.666666666666664*m.x3396 + 0.666666666666671*m.x3397
+ 0.666666666666664*m.x3398 + 0.666666666666664*m.x3399 + 0.666666666666671*m.x3400
+ 0.666666666666664*m.x3401 + 0.666666666666664*m.x3402 + 0.666666666666671*m.x3403
+ 0.666666666666664*m.x3404 + 0.666666666666664*m.x3405 + | |
the name for a variable index, making sure to replicate the new name with
a unique stringwhich corresponds to the variable, index combination.
Parameters
----------
value : str
Unique name of the node.
Returns
-------
self : Node
This node.
Raises
------
ValueError
If an node with `value` already exists in the associated graph.
KeyError
If the current name of the node cannot be found in the associated graph.
"""
# TODO: Need a way to check if the existing node is not equal to the current ndoe as ewll
if self.graph and name in self.graph.nodes:
raise ValueError(f"duplicate name '{name}' in {self.graph.name}:"
f"Existing: {self.graph.nodes[name].args}\n"
f"New: {self.args}")
if self.graph:
graph = self.graph
if self._name is not None and self._name in graph.nodes:
graph.update_graph_key(self._name, name)
else:
graph.nodes[name] = self
self._name = name
return self
def __getitem__(self, key):
if self.is_shape_finalized() and len(self.nodes) >= np.prod(self.shape):
if isinstance(key, Integral):
key = tuple([key])
idx = np.ravel_multi_index(key, dims=self.shape, order='C')
ret = self.nodes.item_by_index(idx)
return ret
else:
if isinstance(key, (list)):
ret = var_index(self.var, tuple(key), graph=self)
elif isinstance(key, tuple):
ret = var_index(self.var, key, graph=self)
else:
ret = var_index(self.var, tuple([key]), graph=self)
return ret
def is_scalar(self, val=None):
if val is not None and (not isinstance(val, np.ndarray) or (len(val.shape) == 1 and val.shape[0] == 1)):
if self.var.shape != DEFAULT_SHAPES[0] and (len(self.var.shape) == 1 and not isinstance(self.var.shape[0],Node)):
raise ValueError(f"Invalid shape var for var index {self} with variable shape {self.var.shape}")
return True
else:
return self.var.shape == DEFAULT_SHAPES[0]
def _evaluate(self, var, indices, **kwargs):
if self.is_scalar(var):
out_shape = (1,)
indices = (0,)
single = True
else:
out_shape = self.domain.shape_from_indices(indices)
indices = self.domain.compute_pairs()
single = False
if isinstance(var, (Integral, Real, str)):
var = np.asarray([var])
elif not isinstance(var, (np.ndarray, list)):
raise TypeError(f"Variable {var} with type {type(var)} is not a list or numpy array, and cannot be sliced for {self.name}")
elif isinstance(var, list):
var = np.asarray(var)
if len(var.shape) != len(out_shape) and np.prod(var.shape) == np.prod(out_shape):
if len(out_shape) > len(var.shape):
for i in range(len(out_shape)):
if out_shape[i] == 1:
var = np.expand_dims(var, axis=i)
else:
var = np.squeeze(var)
if len(var.shape) != len(out_shape) and np.prod(var.shape) != np.prod(out_shape):
raise ValueError(f"Index list does not match {var.shape} in {self.var.name} - {self.var.op_name}"
f"dimensions for slice {self.args[0].name} with {out_shape}.\n"
f"Domain: {self.domain}\n"
f"Eval Stack: {Node._eval_stack}")
if not single and not all([(idx_val - 1) >= indices[-1][idx] for idx, idx_val in enumerate(var.shape)]):
raise ValueError(f"var_index {self.name} has indices which are greater than the variable shape:\n"
f"\tArgs: {self.args}\n"
f"\tVar shape: {var.shape}\n"
f"\tNode shape: {self.var.shape}\n"
f"\tIndex Upper bounds: {indices[-1]}")
indices = list(map(lambda x: x.tolist() if isinstance(x, np.ndarray) else x, indices))
res = var[indices] if single else np.asarray([var[idx] for idx in indices]).reshape(out_shape)
if out_shape == (1,) and len(indices) == 1:
res = res[0]
self.domain.set_computed(out_shape, indices)
return res
def __add__(self, other):
return slice_op(operator.add, self, other, graph=self.graph)
def __radd__(self, other):
return slice_op(operator.add, other, self, graph=self.graph)
def __sub__(self, other):
return slice_op(operator.sub, self, other, graph=self.graph)
def __rsub__(self, other):
return slice_op(operator.sub, other, self, graph=self.graph)
def __pow__(self, other):
return slice_op(builtins.pow, self, other, graph=self.graph)
def __rpow__(self, other):
return slice_op(builtins.pow, other, self, graph=self.graph)
def __mul__(self, other):
return slice_op(operator.mul, self, other, graph=self.graph)
def __rmul__(self, other):
return slice_op(operator.mul, other, self, graph=self.graph)
def __truediv__(self, other):
return slice_op(operator.truediv, self, other, graph=self.graph)
def __rtruediv__(self, other):
return slice_op(operator.truediv, other, self, graph=self.graph)
def __floordiv__(self, other):
return slice_op(operator.floordiv, self, other, graph=self.graph)
def __rfloordiv__(self, other):
return slice_op(operator.floordiv, other, self, graph=self.graph)
def __mod__(self, other):
return slice_op(operator.mod, self, other, graph=self.graph)
def __rmod__(self, other):
return slice_op(operator.mod, other, self, graph=self.graph)
def __lshift__(self, other):
return slice_op(operator.lshift, self, other, graph=self.graph)
def __rlshift__(self, other):
return slice_op(operator.lshift, other, self, graph=self.graph)
def __rshift__(self, other):
return slice_op(operator.rshift, self, other, graph=self.graph)
def __rrshift__(self, other):
return slice_op(operator.rshift, other, self, graph=self.graph)
def __and__(self, other):
return slice_op(operator.and_, self, other, graph=self.graph)
def __rand__(self, other):
return slice_op(operator.and_, other, self, graph=self.graph)
def __or__(self, other):
return slice_op(operator.or_, self, other, graph=self.graph)
def __ror__(self, other):
return slice_op(operator.or_, other, self, graph=self.graph)
def __xor__(self, other):
return slice_op(operator.xor, self, other, graph=self.graph)
def __rxor__(self, other):
return slice_op(operator.xor, other, self, graph=self.graph)
def __lt__(self, other):
return slice_op(operator.lt, self, other, graph=self.graph)
def __le__(self, other):
return slice_op(operator.lt, other, self, graph=self.graph)
def __ne__(self, other):
return slice_op(operator.ne, self, other, graph=self.graph)
def __gt__(self, other):
return slice_op(operator.gt, self, other, graph=self.graph)
def __ge__(self, other):
return slice_op(operator.ge, self, other, graph=self.graph)
def __repr__(self):
return "<var_index name=%s, index=%s>" % (self.name, self.args)
class slice_op(Node):
"""
Node representing multi-dimensional operations performed on a node.
Parameters
----------
target : cal
The multi-dimensional variable used for indexing into.
idx : tuple
Tuple of either integer values or index/index_op nodes.
"""
def __init__(self, target, *args, **kwargs):
if "domain" in kwargs:
domain = tuple(kwargs.pop("domain")) if isinstance(kwargs["domain"], list) else kwargs.pop("domain")
else:
all_args = _flatten_iterable(args)
slice1_var, slice1_idx, slice2_var, slice2_idx = self.get_index_nodes(all_args[0], all_args[1])
domain = slice1_idx.combine_set_domains(slice2_idx)
if "op_name" in kwargs:
kwargs.pop("op_name")
target_name = f"{target.__module__}.{target.__name__}"
super(slice_op, self).__init__(*args, target=target_name, domain=domain, op_name=f"slice_{target.__name__}", **kwargs)
self.target = target
@property
def domain(self):
return self.kwargs["domain"]
def __getitem__(self, key):
if isinstance(key, (tuple, list, np.ndarray)) and len(key) == 0:
return self
elif self.is_shape_finalized() and len(self.nodes) > 0:
if isinstance(key, (int, Node)):
key = tuple([key])
if len(key) != len(self.shape):
raise KeyError(f"Invalid key shape for {self.name}:\n"
f"Shape: {self.shape}\n"
f"Key: {key}")
name = f"{self.name}{key}"
if name not in self.nodes.keys():
raise KeyError(f"{name} not in {self.name} keys:\n"
f"Node keys: {list(self.nodes.keys())}")
ret = self.nodes[name]
return ret
else:
name = []
if isinstance(key, Node):
name.append(key.name)
elif hasattr(key, "__len__") and not isinstance(key, str):
for k in key:
if isinstance(k, Node):
name.append(k.name)
else:
name.append(str(k))
else:
name.append(key)
name = self.var.name + "[" + "][".join(name) + "]"
if name in self.graph.nodes:
return self.graph.nodes[name]
elif isinstance(key, (list)):
return var_index(self, key, name=name, graph=self.graph)
elif isinstance(key, tuple):
return var_index(self, list(key), name=name, graph=self.graph)
else:
return var_index(self, [key], name=name, graph=self.graph)
def set_shape(self, shape=None, init=False):
s = []
assert isinstance(shape, (tuple, list))
if all([isinstance(sv, Integral) for sv in shape]) and len(self.domain) == np.product(shape) and len(shape) > 0:
self._shape = shape if isinstance(shape, tuple) else tuple(shape)
else:
for idx, d in enumerate(self.domain.dom_set):
if shape and isinstance(shape[idx], (func_op, Integral)):
s.append(shape[idx])
elif shape and isinstance(shape[idx], float):
s.append(int(shape[idx]))
elif isinstance(d, float):
s.append(int(d))
elif isinstance(d, var_index):
s.append(d.domain)
else:
s.append(d)
self._shape = tuple(s)
def is_scalar(self, val):
return not isinstance(val, np.ndarray) or (len(val.shape) == 1 and val.shape[0] == 1)
def _evaluate(self, op1, op2, context=None, **kwargs):
if self.is_scalar(op1) or self.is_scalar(op2):
value = self.target(op1, op2)
else:
arg0_dom = self.args[0].domain
arg1_dom = self.args[1].domain
op1_idx = self.domain.map_sub_domain(arg0_dom) if isinstance(self.args[0], Node) else tuple([])
op2_idx = self.domain.map_sub_domain(arg1_dom) if isinstance(self.args[1], Node) else tuple([])
op1 = np.asarray(list(map(lambda x: op1[x], op1_idx))).reshape(self.domain.computed_shape)
op2 = np.asarray(list(map(lambda x: op2[x], op2_idx))).reshape(self.domain.computed_shape)
value = self.target(op1, op2)
return value
def get_index_nodes(self, slice1_var=None, slice2_var=None):
if slice1_var is None and slice2_var is None:
slice1_var, slice2_var = self.args
if isinstance(slice1_var, (slice_op, var_index)) or _is_node_type_instance(slice1_var, "GroupNode"):
slice1_idx = slice1_var.domain
elif _is_node_type_instance(slice1_var, "index"):
slice1_idx = slice1_var.domain
else:
slice1_idx = Domain(tuple([]))
if isinstance(slice2_var, (slice_op, var_index)) or _is_node_type_instance(slice2_var, "GroupNode"):
slice2_idx = slice2_var.domain
elif _is_node_type_instance(slice2_var, "index"):
slice2_idx = slice2_var.domain
else:
slice2_idx = Domain(tuple([]))
return slice1_var, slice1_idx, slice2_var, slice2_idx
def __add__(self, other):
return slice_op(operator.add, self, other, graph=self.graph)
def __radd__(self, other):
return slice_op(operator.add, other, self, graph=self.graph)
def __sub__(self, other):
return slice_op(operator.sub, self, other, graph=self.graph)
def __rsub__(self, other):
return slice_op(operator.sub, other, self, graph=self.graph)
def __pow__(self, other):
return slice_op(builtins.pow, self, other, graph=self.graph)
def __rpow__(self, other):
return slice_op(builtins.pow, other, self, graph=self.graph)
def __mul__(self, other):
return slice_op(operator.mul, self, other, graph=self.graph)
def __rmul__(self, other):
return slice_op(operator.mul, other, self, graph=self.graph)
def __truediv__(self, other):
return slice_op(operator.truediv, self, other, graph=self.graph)
def __rtruediv__(self, other):
return slice_op(operator.truediv, other, self, graph=self.graph)
def __floordiv__(self, other):
return slice_op(operator.floordiv, self, other, graph=self.graph)
def __rfloordiv__(self, other):
return slice_op(operator.floordiv, other, self, graph=self.graph)
def __mod__(self, other):
return slice_op(operator.mod, self, other, graph=self.graph)
def __rmod__(self, other):
return slice_op(operator.mod, other, self, graph=self.graph)
def __lshift__(self, other):
return slice_op(operator.lshift, self, other, graph=self.graph)
def __rlshift__(self, other):
return slice_op(operator.lshift, other, self, graph=self.graph)
def __rshift__(self, other):
return slice_op(operator.rshift, self, other, graph=self.graph)
def __rrshift__(self, other):
return slice_op(operator.rshift, other, self, graph=self.graph)
def __and__(self, other):
return slice_op(operator.and_, self, other, graph=self.graph)
def __rand__(self, other):
return slice_op(operator.and_, other, self, graph=self.graph)
def __or__(self, other):
return slice_op(operator.or_, self, other, graph=self.graph)
def __ror__(self, other):
return slice_op(operator.or_, other, self, graph=self.graph)
def __xor__(self, other):
return slice_op(operator.xor, self, other, graph=self.graph)
def __rxor__(self, other):
return slice_op(operator.xor, other, self, graph=self.graph)
def __lt__(self, other):
return slice_op(operator.lt, self, other, | |
# Mass transfer
0., # Always part of main galaxy
0., # CGM -> main galaxy -> CGM
])
expected = np.array([
1, # Merger, except in early snapshots
0, # Mass Transfer
0, # Always part of main galaxy
0, # CGM -> main galaxy -> CGM
]).astype( bool )
actual = self.classifier.identify_merger()
npt.assert_allclose( expected, actual, )
#########################################################################
def test_identify_wind( self ):
# Prerequisites
self.classifier.is_ejected = np.array([
[ 0, 0, 0, 0, ], # Merger, except in early snapshots
[ 0, 0, 0, 0, ], # Always part of main galaxy
[ 1, 0, 0, 0, ], # CGM -> main galaxy -> CGM
[ 1, 0, 1, 0, ], # Another test case
]).astype( bool )
self.classifier.n_particle = self.classifier.is_ejected.shape[0]
expected = np.array([
[ 0, 0, 0, 0, 0, ], # Merger, except in early snapshots
[ 0, 0, 0, 0, 0, ], # Always part of main galaxy
[ 1, 0, 0, 0, 0, ], # CGM -> main galaxy -> CGM
[ 1, 1, 1, 0, 0, ], # Another test case
]).astype( bool )
actual = self.classifier.identify_wind()
npt.assert_allclose( expected, actual, )
########################################################################
########################################################################
class TestFullClassifierPipeline( unittest.TestCase ):
def setUp( self ):
# Mock the code version so we don't repeatedly change test data
patcher = patch( 'galaxy_dive.utils.utilities.get_code_version' )
self.addCleanup( patcher.stop )
self.mock_code_version = patcher.start()
self.classifier = classify.Classifier( **default_kwargs )
self.savefile = './tests/data/tracking_output/classifications_test_classify.hdf5'
self.events_savefile = './tests/data/tracking_output/events_test_classify.hdf5'
# Because we're skipping this step, we need to make sure we're not tossing objects around
self.classifier.ptracks_tag = self.classifier.tag
self.classifier.galids_tag = self.classifier.tag
if os.path.isfile( self.savefile ):
os.system( 'rm {}'.format( self.savefile ) )
if os.path.isfile( self.events_savefile ):
os.system( 'rm {}'.format( self.events_savefile ) )
########################################################################
def tearDown( self ):
if os.path.isfile( self.savefile ):
os.system( 'rm {}'.format( self.savefile ) )
if os.path.isfile( self.events_savefile ):
os.system( 'rm {}'.format( self.events_savefile ) )
########################################################################
def test_save_classifications( self ):
# Give it filenames to save.
self.classifier.ptrack_filename = 'test_ptrack_filename'
self.classifier.galfind_filename = 'test_galfind_filename'
self.classifier.ahf_reader = read_ahf.AHFReader( default_kwargs['halo_data_dir'] )
# Add some properties that we're not testing in this test.
self.classifier._main_mt_halo_first_snap = 10
self.classifier._ind_first_snap = 10
# Prerequisites
self.classifier.is_unaccreted = np.array([
0, # Merger, except in early snapshots
0, # Mass Transfer
0, # Always part of main galaxy
0, # CGM -> main galaxy -> CGM
]).astype( bool )
self.classifier.is_pristine = np.array([
0, # Merger, except in early snapshots
0, # Mass Transfer
1, # Always part of main galaxy
1, # CGM -> main galaxy -> CGM
]).astype( bool )
self.classifier.is_preprocessed = np.array([
1, # Merger, except in early snapshots
1, # Mass Transfer
0, # Always part of main galaxy
0, # CGM -> main galaxy -> CGM
]).astype( bool )
self.classifier.is_mass_transfer = np.array([
0, # Merger, except in early snapshots
1, # Mass Transfer
0, # Always part of main galaxy
0, # CGM -> main galaxy -> CGM
]).astype( bool )
self.classifier.is_merger = np.array([
1, # Merger, except in early snapshots
0, # Mass Transfer
0, # Always part of main galaxy
0, # CGM -> main galaxy -> CGM
]).astype( bool )
self.classifier.is_wind = np.array([
[ 0, 0, 0, 0, 0, ], # Merger, except in early snapshots
[ 0, 0, 0, 0, 0, ], # Always part of main galaxy
[ 1, 0, 0, 0, 0, ], # CGM -> main galaxy -> CGM
[ 1, 1, 1, 0, 0, ], # Another test case
]).astype( bool )
self.classifier.is_unaccreted_EP = np.array([
[ 1, 0, 0, 0, 0, ],
[ 1, 1, 1, 0, 0, ],
[ 1, 1, 0, 0, 0, ],
[ 0, 0, 0, 0, 0, ],
]).astype( bool )
self.classifier.is_unaccreted_NEP = np.array([
[ 1, 0, 0, 0, 0, ],
[ 1, 1, 1, 0, 0, ],
[ 1, 1, 0, 0, 0, ],
[ 0, 0, 0, 0, 0, ],
]).astype( bool )
self.classifier.is_hitherto_EP = np.array([
[ 1, 0, 0, 0, 0, ],
[ 1, 1, 1, 0, 0, ],
[ 1, 1, 0, 0, 0, ],
[ 0, 0, 0, 0, 0, ],
]).astype( bool )
self.classifier.is_hitherto_NEP = np.array([
[ 1, 0, 0, 0, 0, ],
[ 1, 1, 1, 0, 0, ],
[ 1, 1, 0, 0, 0, ],
[ 0, 0, 0, 0, 0, ],
]).astype( bool )
self.classifier.redshift_first_acc = np.array([ 0., -1., 0.06984665, -1. ])
# Remove the request for additional post-processing
self.classifier.pp_classifications_to_save = []
# Change values from defaults so that we save without issue
self.classifier.halo_file_tag = 'smooth'
self.classifier.mtree_halos_index = None
# The function itself.
self.classifier.save_classifications( self.classifier.classifications_to_save )
# Try to load the data
f = h5py.File( self.savefile, 'r')
npt.assert_allclose( self.classifier.is_pristine, f['is_pristine'][...] )
########################################################################
def test_save_events( self ):
self.classifier.is_ejected = np.array([
[ 0, 0, 0, 0, ], # Merger, except in early snapshots
[ 0, 0, 0, 0, ], # Always part of main galaxy
[ 1, 0, 0, 0, ], # CGM -> main galaxy -> CGM
[ 1, 0, 1, 0, ], # Another test case
]).astype( bool )
self.classifier.is_accreted = np.array([
[ 1, 0, 0, 0, ], # Merger, except in early snapshots
[ 0, 0, 0, 0, ], # Always part of main galaxy
[ 0, 1, 0, 0, ], # CGM -> main galaxy -> CGM
[ 1, 0, 1, 0, ], # Accreted twice
]).astype( bool )
self.classifier.is_in_other_gal = np.array([
[ 0, 1, 1, 0, 0, ], # Merger, except in early snapshots
[ 0, 0, 0, 0, 0, ], # Always part of main galaxy
[ 0, 0, 0, 0, 0, ], # CGM -> main galaxy -> CGM
]).astype( bool )
self.classifier.is_in_other_CGM = np.array([
[ 0, 1, 1, 0, 0, ], # Merger, except in early snapshots
[ 0, 0, 0, 0, 0, ], # Always part of main galaxy
[ 0, 0, 0, 0, 0, ], # CGM -> main galaxy -> CGM
]).astype( bool )
self.classifier.is_in_main_gal = np.array([
[ 1, 0, 0, 0, 0, ], # Merger, except in early snapshots
[ 1, 1, 1, 1, 1, ], # Always part of main galaxy
[ 0, 1, 0, 0, 0, ], # CGM -> main galaxy -> CGM
]).astype( bool )
self.classifier.gal_event_id = np.array([
[ 1, 0, 0, 0, 0, ], # Merger, except in early snapshots
[ 1, 1, 1, 1, 1, ], # Always part of main galaxy
[ 0, 1, 0, 0, 0, ], # CGM -> main galaxy -> CGM
]).astype( bool )
self.classifier.cumulative_time_in_other_gal = np.array([
[ 153., 102., 51., 51., 0, ],
[ 153., 153., 153., 102., 51., ],
[ 153., 153, 102., 51., 0, ],
])
self.classifier.time_in_other_gal_before_acc_during_interval = np.array(
[ 1., 2., 3., ]
)
self.classifier.redshift_first_acc = np.array([ 0., np.nan, 0.06984665, 0.16946003 ])
self.classifier._ind_first_acc = np.array([ 0, -99999, 1, -99999 ])
# Change values so that we save without issue
self.classifier.halo_file_tag = 'smooth'
self.classifier.mtree_halos_index = None
self.classifier.save_events( self.classifier.events_to_save )
f = h5py.File( self.events_savefile, 'r' )
for event_type in [ 'is_ejected', 'is_in_other_gal', 'is_in_main_gal', 'redshift_first_acc', ]:
assert event_type in f.keys()
########################################################################
def test_additional_postprocessing( self ):
# Change arguments
self.classifier.classifications_to_save = [ 'is_A', ]
self.classifier.pp_classifications_to_save = [ 'is_not_A', ]
# Fake original data
self.classifier.is_A = np.random.randint(
2,
size=( 4, 3 ),
).astype( bool )
self.classifier.ptrack_attrs = {
'main_mt_halo_id': 2,
}
# Add some properties that we're not testing in this test.
self.classifier._main_mt_halo_first_snap = 10
self.classifier._ind_first_snap = 10
self.classifier.halo_file_tag = 'smooth'
self.classifier.mtree_halos_index = None
# Usual event saving
self.classifier.save_classifications(
self.classifier.classifications_to_save
)
# Add an attribute to worldlines to test post-processing event saving
def calc_is_not_A( self ):
self.data['is_not_A'] = np.invert( self.get_data( 'is_A' ) )
return self.data['is_not_A']
analyze_worldlines.Worldlines.calc_is_not_A = calc_is_not_A
# Post-processing event saving
self.classifier.additional_postprocessing(
self.classifier.pp_classifications_to_save,
)
# Test results
f = h5py.File( self.savefile, 'r' )
# Fiducial
npt.assert_allclose( self.classifier.is_A, f['is_A'][...] )
# post-processing
npt.assert_allclose(
np.invert( self.classifier.is_A ),
f['is_not_A'][...]
)
########################################################################
def test_full_pipeline( self ):
'''Test that we can run the full pipeline from just the files.'''
# Force the first valid snapshot to be snapshot 10, to ensure | |
import numpy as np
from matplotlib import rc
## for Palatino and other serif fonts use:
rc('font',**{'family':'serif','serif':['Computer Modern Roman']})
rc('text', usetex=True)
import matplotlib.pyplot as plt
import time
#NLL plotting
def nll_plot(nll_mean_list1,nll_var_list1,nll_mean_list2,nll_var_list2,nll_mean_list3,nll_var_list3,N_test,legend=False,last_legend_label=r'GPR'):
legend_label = []
if nll_mean_list1 is not None:
plt.gca().set_prop_cycle(None)
conf_list1 = [1.96*np.sqrt(s)/np.sqrt(N_test) for s in nll_var_list1]
upper1 = [y + c for y,c in zip(nll_mean_list1,conf_list1)]
lower1 = [y - c for y,c in zip(nll_mean_list1,conf_list1)]
plt.fill_between(range(0,len(nll_mean_list1)), upper1, lower1, alpha=.2)
plt.plot(range(0,len(nll_mean_list1)),nll_mean_list1)
legend_label.append(r'ALPaCA')
plt.ylabel('Negative Log Likelihood')
if nll_mean_list2 is not None:
conf_list2 = [1.96*np.sqrt(s)/np.sqrt(N_test) for s in nll_var_list2]
upper2 = [y + c for y,c in zip(nll_mean_list2,conf_list2)]
lower2 = [y - c for y,c in zip(nll_mean_list2,conf_list2)]
plt.fill_between(range(0,len(nll_mean_list2)), upper2, lower2, alpha=.2)
plt.plot(range(0,len(nll_mean_list2)),nll_mean_list2)
legend_label.append(r'ALPaCA (no meta)')
if nll_mean_list3 is not None:
conf_list3 = [1.96*np.sqrt(s)/np.sqrt(N_test) for s in nll_var_list3]
upper3 = [y + c for y,c in zip(nll_mean_list3,conf_list3)]
lower3 = [y - c for y,c in zip(nll_mean_list3,conf_list3)]
plt.fill_between(range(0,len(nll_mean_list3)), upper3, lower3, alpha=.2)
plt.plot(range(0,len(nll_mean_list3)),nll_mean_list3)
legend_label.append(last_legend_label)
if legend==True:
plt.legend(legend_label)
plt.xlabel('Timesteps')
def mse_plot(nll_mean_list1,nll_var_list1,nll_mean_list2,nll_var_list2,nll_mean_list3,nll_var_list3,N_test,legend=False):
legend_label = []
if nll_mean_list1 is not None:
plt.gca().set_prop_cycle(None)
conf_list1 = [1.96*np.sqrt(s)/np.sqrt(N_test) for s in nll_var_list1]
upper1 = [y + c for y,c in zip(nll_mean_list1,conf_list1)]
lower1 = [y - c for y,c in zip(nll_mean_list1,conf_list1)]
plt.fill_between(range(0,len(nll_mean_list1)), upper1, lower1, alpha=.2)
l1 = plt.plot(range(0,len(nll_mean_list1)),nll_mean_list1,label=r'ALPaCA')
legend_label.append(r'ALPaCA')
plt.ylabel('MSE')
if nll_mean_list2 is not None:
conf_list2 = [1.96*np.sqrt(s)/np.sqrt(N_test) for s in nll_var_list2]
upper2 = [y + c for y,c in zip(nll_mean_list2,conf_list2)]
lower2 = [y - c for y,c in zip(nll_mean_list2,conf_list2)]
plt.fill_between(range(0,len(nll_mean_list2)), upper2, lower2, alpha=.2)
l2 = plt.plot(range(0,len(nll_mean_list2)),nll_mean_list2, label=r'MAML (1 step)')
legend_label.append(r'MAML (1 step)')
if nll_mean_list3 is not None:
conf_list3 = [1.96*np.sqrt(s)/np.sqrt(N_test) for s in nll_var_list3]
upper3 = [y + c for y,c in zip(nll_mean_list3,conf_list3)]
lower3 = [y - c for y,c in zip(nll_mean_list3,conf_list3)]
plt.fill_between(range(0,len(nll_mean_list3)), upper3, lower3, alpha=.2)
plt.plot(range(0,len(nll_mean_list3)),nll_mean_list3, label=r'MAML (5 step)')
legend_label.append(r'GPR')
if legend==True:
plt.legend()
plt.xlabel('Timesteps')
def time_plot(nll_mean_list1,nll_var_list1,nll_mean_list2,nll_var_list2,nll_mean_list3,nll_var_list3,N_test,legend=False):
#same arguments cause I'm lazy
legend_label = []
if nll_mean_list1 is not None:
plt.gca().set_prop_cycle(None)
conf_list1 = [1.96*np.sqrt(s)/np.sqrt(N_test) for s in nll_var_list1]
upper1 = [y + c for y,c in zip(nll_mean_list1,conf_list1)]
lower1 = [y - c for y,c in zip(nll_mean_list1,conf_list1)]
plt.fill_between(range(0,len(nll_mean_list1)), upper1, lower1, alpha=.2)
plt.plot(range(0,len(nll_mean_list1)),nll_mean_list1)
legend_label.append(r'ALPaCA')
plt.ylabel(r'Time (s)')
if nll_mean_list2 is not None:
conf_list2 = [1.96*np.sqrt(s)/np.sqrt(N_test) for s in nll_var_list2]
upper2 = [y + c for y,c in zip(nll_mean_list2,conf_list2)]
lower2 = [y - c for y,c in zip(nll_mean_list2,conf_list2)]
plt.fill_between(range(0,len(nll_mean_list2)), upper2, lower2, alpha=.2)
plt.plot(range(0,len(nll_mean_list2)),nll_mean_list2)
legend_label.append(r'ALPaCA (no meta)')
if nll_mean_list3 is not None:
conf_list3 = [1.96*np.sqrt(s)/np.sqrt(N_test) for s in nll_var_list3]
upper3 = [y + c for y,c in zip(nll_mean_list3,conf_list3)]
lower3 = [y - c for y,c in zip(nll_mean_list3,conf_list3)]
plt.fill_between(range(0,len(nll_mean_list3)), upper3, lower3, alpha=.2)
plt.plot(range(0,len(nll_mean_list3)),nll_mean_list3)
legend_label.append(r'GPR')
if legend==True:
plt.legend(legend_label)
plt.xlabel('Timesteps')
def sinusoid_plot(freq,phase,amp,x_list,sigma_list,y_list,X_update, Y_update,sampling_density=101,legend_labels=['Ours', 'True']):
"""
x,y,sigma should be lists
"""
#plot given data
conf_list = [1.96*np.sqrt(s) for s in sigma_list]
upper = [y + c for y,c in zip(y_list,conf_list)]
lower = [y - c for y,c in zip(y_list,conf_list)]
plt.fill_between(x_list, upper, lower, alpha=.5)
plt.plot(x_list,y_list)
#plot true sinusoid
yr_list = [amp*np.sin(freq*x + phase) for x in x_list]
plt.plot(x_list,yr_list,color='r')
# plot update points
plt.plot(X_update[0,:,0],Y_update[0,:,0],'+',color='k',markersize=10)
plt.xlim([np.min(x_list), np.max(x_list)])
#legend
if legend_labels:
plt.legend(legend_labels + ['sampled points'])
def gen_sin_fig(agent, X,Y,freq,phase,amp,upper_x=5,lower_x=-5,point_every=0.1, label=None):
y_list = []
x_list = []
s_list = []
for p in np.arange(lower_x,upper_x,0.1):
y, s = agent.test(X, Y, [[[p]]])
y_list.append(y[0,0,0])
x_list.append(p)
if s:
s_list.append(s[0,0,0,0])
else:
s_list.append(0)
legend_labels = None
if label:
legend_labels = [label, 'True']
sinusoid_plot(freq,phase,amp,x_list,s_list,y_list,X,Y, legend_labels=legend_labels)
def gen_sin_gp_fig(agent, X,Y,freq,phase,amp,upper_x=5,lower_x=-5,point_every=0.1, label=None):
x_test = np.reshape( np.arange(lower_x,upper_x,0.1), [1,-1,1] )
y,s = agent.test(X,Y,x_test)
y = y[0,:,0]
s = s[0,:]**2
legend_labels = None
if label:
legend_labels = [label, 'True']
sinusoid_plot(freq,phase,amp,x_test[0,:,0],s,y,X,Y,legend_labels=legend_labels)
def plot_bases(x,y,indices):
x = x[0,:,0]
y = y[0,:,:]
for i in indices:
plt.figure()
plt.plot(x,y[:,i])
plt.legend([r"$\phi_{"+ str(i) +r"}(x)$"])
plt.show()
def gen_sin_bases_fig(agent, sess, x, n_bases):
phi = sess.run( agent.phi, {agent.x: x} )
plot_bases(x, phi, np.random.choice(agent.config['nn_layers'][-1],n_bases))
def plot_sample_fns(x,phi,K,L,SigEps,n_samples):
x = x[0,:,0]
phi = phi[0,:,:]
mean = np.reshape(K, [-1])
cov = np.kron(SigEps, np.linalg.inv(L))
K_vec = np.random.multivariate_normal(mean,cov,n_samples)
plt.figure()
for i in range(n_samples):
K = np.reshape(K_vec[i,:], K.shape)
y = np.squeeze(phi @ K)
plt.plot(x,y)
plt.show()
# STEP FUNCTIONS
def step_plot(x_jump,x_list,sigma_list,y_list,X_update, Y_update,sampling_density=101,legend_labels=['Ours', 'True']):
"""
x,y,sigma should be lists
"""
#plot given data
conf_list = [1.96*np.sqrt(s) for s in sigma_list]
upper = [y + c for y,c in zip(y_list,conf_list)]
lower = [y - c for y,c in zip(y_list,conf_list)]
plt.fill_between(x_list, upper, lower, alpha=.5)
plt.plot(x_list,y_list)
#plot true step
yr_list = [0.5 + 0.5*np.sign(x-x_jump) for x in x_list]
plt.plot(x_list,yr_list,color='r')
# plot update points
plt.plot(X_update[0,:,0],Y_update[0,:,0],'+',color='k',markersize=10)
plt.xlim([np.min(x_list), np.max(x_list)])
plt.ylim([-1,2])
#legend
if legend_labels:
plt.legend(legend_labels + ['sampled points'])
def multistep_plot(pt_list,x_list,sigma_list,y_list,X_update, Y_update,sampling_density=101,legend_labels=['Ours', 'True']):
"""
x,y,sigma should be lists
"""
#plot given data
conf_list = [1.96*np.sqrt(s) for s in sigma_list]
upper = [y + c for y,c in zip(y_list,conf_list)]
lower = [y - c for y,c in zip(y_list,conf_list)]
plt.fill_between(x_list, upper, lower, alpha=.5)
plt.plot(x_list,y_list)
#plot true step
#yr_list = []
x = np.reshape(x_list,[1,-1])
step_pts = np.reshape(pt_list,[-1,1])
y = 2.*np.logical_xor.reduce( x > step_pts, axis=0) - 1.
yr_list = y
# for x in x_list:
# for i in range(len(pt_list)):
# if x<pt_list[0]:
# yr_list.append(((i)%2)*2-1.0)
# break
# if i==(len(pt_list)-1) and x>pt_list[-1]:
# # print('ok')
# yr_list.append(((i+1)%2)*2-1.0)
# break
# if x>pt_list[i] and x<pt_list[i+1]:
# yr_list.append(((i+1)%2)*2-1.0)
# break
plt.plot(x_list,yr_list,color='r')
# plot update points
plt.plot(X_update[0,:,0],Y_update[0,:,0],'+',color='k',markersize=10)
plt.xlim([np.min(x_list), np.max(x_list)])
plt.ylim([-2,2])
#legend
if legend_labels:
plt.legend(legend_labels + ['sampled points'])
#do plotting
def gen_step_fig(agent,X,Y,x_jump,upper_x=5,lower_x=-5,point_every=0.1, label=None):
y_list = []
x_list = []
s_list = []
for p in np.arange(lower_x,upper_x,0.1):
y, s = agent.test(X, Y, [[[p]]])
y_list.append(y[0,0,0])
x_list.append(p)
if s:
s_list.append(s[0,0,0,0])
else:
s_list.append(0)
legend_labels = None
if label:
legend_labels = [label, 'True']
step_plot(x_jump,x_list,s_list,y_list,X,Y, legend_labels=legend_labels)
def gen_step_gp_fig(agent, X, Y, x_jump, upper_x=5,lower_x=-5,point_every=0.1, label=None):
x_test = np.reshape( np.arange(lower_x,upper_x,0.1), [1,-1,1] )
y,s = agent.test(X,Y,x_test)
y = y[0,:,0]
s = s[0,:]**2
legend_labels = None
if label:
legend_labels = [label, 'True']
step_plot(x_jump,x_test[0,:,0],s,y,X,Y,legend_labels=legend_labels)
def gen_multistep_fig(agent, X,Y,x_jump,upper_x=5,lower_x=-5,point_every=0.1, label=None):
y_list = []
x_list = []
s_list = []
for p in np.arange(lower_x,upper_x,0.1):
y, s = agent.test(X, Y, [[[p]]])
y_list.append(y[0,0,0])
x_list.append(p)
if s:
s_list.append(s[0,0,0,0])
else:
s_list.append(0)
legend_labels = None
if label:
legend_labels = [label, 'True']
multistep_plot(x_jump,x_list,s_list,y_list,X,Y, legend_labels=legend_labels)
def gen_multistep_gp_fig(agent, X, Y, x_jump, upper_x=5,lower_x=-5,point_every=0.1, label=None):
x_test = np.reshape( np.arange(lower_x,upper_x,0.1), [1,-1,1] )
y,s = agent.test(X,Y,x_test)
y = y[0,:,0]
s = s[0,:]**2
legend_labels = None
if label:
legend_labels = [label, 'True']
multistep_plot(x_jump,x_test[0,:,0],s,y,X,Y,legend_labels=legend_labels)
# PENDULUM
def plot_trajectory(X,Y,Y_pred,Sig_pred):
t = np.arange(Y.shape[1] + 1)
Nu = Y.shape[1] - Y_pred.shape[1]
dims = [0,1]
colors = ['b','r']
h_list = []
for i in dims:
x = np.concatenate( ( X[0,0:1,i], X[0,:,i] + Y[0,:,i] ) )
x_pred = np.concatenate( (X[0,:Nu+1,i], X[0,Nu:,i] + Y_pred[0,:,i] ) )
s = np.concatenate( ( np.zeros([Nu+1]), Sig_pred[0,:,i,i] ) )
c = 1.96*np.sqrt(s)
u = x_pred + c
l = x_pred - c
plt.fill_between(t, u, l, alpha=.2, color=colors[i])
h, = plt.plot(t, x_pred, color=colors[i],label=r"$x_{"+str(i+1)+"}$", zorder=1)
h_list.append(h)
plt.scatter(t, x, marker='+', color=colors[i],zorder=1)
plt.legend(handles=h_list)
ax = plt.gca()
ylim = ax.get_ylim()
plt.plot([Nu, Nu],ylim,linestyle=':',color='k', alpha=0.8)
plt.fill_between([0,Nu],[ylim[1],ylim[1]],[ylim[0], ylim[0]],color='white',alpha=0.7,zorder=2)
plt.xlim([t[0],t[-1]])
plt.ylim(ylim)
plt.show()
def gen_pendulum_onestep_fig(agent,sess,X,Y,Nu,T=None):
ux = X[:,:Nu,:]
uy = Y[:,:Nu,:]
y_pred, sig_pred = agent.test(sess, ux, uy, X[:,Nu:T+1,:])
plot_trajectory(X[:,:T+1,:],Y[:,:T+1,:],y_pred,sig_pred)
def sampleMN(K, L_inv, Sig):
mean = np.reshape(K.T, [-1])
cov = np.kron(Sig, L_inv)
K_vec = np.random.multivariate_normal(mean,cov)
return np.reshape(K_vec, K.T.shape).T
def gen_pendulum_sample_fig(agent, X,Y,Nu,N_samples=10,T=None, T_rollout=10,no_update=False):
if not T:
T = Y.shape[1]
tt = np.arange(T+1)
ux = X[0:1,:Nu,:]
uy = Y[0:1,:Nu,:]
K0 = sess.run(agent.K)
L0 = sess.run(agent.L)
SigEps = sess.run(agent.SigEps)
Phi = sess.run( agent.phi, {agent.x: X} )
uPhi = Phi[0:1,:Nu,:]
Kn = K0
Ln = L0
Ln_inv = np.linalg.inv(Ln)
if Nu > 0 and not no_update:
Kn,Ln_inv = agent.batch_update_np(K0,L0,uPhi[0,:,:],uy[0,:,:])
Ln = np.linalg.inv(Ln_inv)
x = np.concatenate( ( X[0,0:1,:2], X[0,:T,:2] + Y[0,:T,:] ) )
x_pred = np.zeros([N_samples, T+1, X.shape[2]])
print(np.shape(x_pred[:,:Nu+1,:]))
print(np.shape(x_pred))
print(np.shape(X[0:1, :Nu+1, :]))
x_pred[:,:Nu+1,:] = X[0:1, :Nu+1, :]
print(np.shape(x_pred))
for j in range(N_samples):
K = sampleMN(Kn,Ln_inv,SigEps)
# print(K)
for t in range(Nu,Nu+T_rollout):
phi_t = sess.run( agent.phi, {agent.x: x_pred[j:j+1, t:t+1, :]})
x_pred[j,t+1,:2] = x_pred[j,t,:2] + np.squeeze( phi_t[0,:,:] @ K )
dims = [0,1]
colors = ['b','r']
styles=['-',':']
for i in dims:
for j in range(N_samples):
plt.plot(tt[Nu:Nu+T_rollout], x_pred[j,Nu:Nu+T_rollout,i], color=colors[i], alpha=5.0/N_samples)
plt.plot(tt, x[:,i], linestyle=styles[i], color='k')
ax = plt.gca()
ylim = [np.min(x)-2,np.max(x)+2]
#plt.plot([Nu, Nu],ylim,linestyle=':',color='k', alpha=0.8)
#plt.fill_between([0,Nu],[ylim[1],ylim[1]],[ylim[0], ylim[0]],color='white',alpha=0.7,zorder=2)
plt.xlim([tt[0],tt[-1]])
plt.ylim(ylim)
#plt.show()
def gen_pendulum_rollout_fig(agent, xu, xp, Nu, N_samples=50, T=None, T_rollout=10, update=True):
| |
order to be converted lossless to Decimal.
:param step: Decimal|str, the (relative) step size to set
:param dynamic_stepping: bool, flag indicating the use of dynamic stepping (True) or
constant stepping (False)
"""
try:
step = D(step)
except TypeError:
if 'int' in type(step).__name__:
step = int(step)
elif 'float' in type(step).__name__:
step = float(step)
else:
raise
step = D(step)
# ignore NaN and infinity values
if not step.is_nan() and not step.is_infinite():
self.__singleStep = step
self.dynamic_stepping = dynamic_stepping
def minimalStep(self):
return float(self.__minimalStep)
def setMinimalStep(self, step):
"""
Method used to set a minimal step size.
When the absolute step size has been calculated in either dynamic or constant step mode
this value is checked against the minimal step size. If it is smaller then the minimal step
size is chosen over the calculated step size. This ensures that no step taken can be
smaller than minimalStep.
Set this value to 0 for no minimal step size.
For maximum robustness and consistency it is strongly recommended to pass step as Decimal
or string in order to be converted lossless to Decimal.
:param step: Decimal|str, the minimal step size to be set
"""
try:
step = D(step)
except TypeError:
if 'int' in type(step).__name__:
step = int(step)
elif 'float' in type(step).__name__:
step = float(step)
else:
raise
step = D(step)
# ignore NaN and infinity values
if not step.is_nan() and not step.is_infinite():
self.__minimalStep = step
def cleanText(self):
"""
Compliance method from Qt SpinBoxes.
Returns the currently shown text from the QLineEdit without prefix and suffix and stripped
from leading or trailing whitespaces.
:return: str, currently shown text stripped from suffix and prefix
"""
text = self.text().strip()
if self.__prefix and text.startswith(self.__prefix):
text = text[len(self.__prefix):]
if self.__suffix and text.endswith(self.__suffix):
text = text[:-len(self.__suffix)]
return text.strip()
def update_display(self):
"""
This helper method updates the shown text based on the current value.
Because this method is only called upon finishing an editing procedure, the eventually
cached value gets deleted.
"""
text = self.textFromValue(self.value())
text = self.__prefix + text + self.__suffix
self.lineEdit().setText(text)
self.__cached_value = None # clear cached value
self.lineEdit().setCursorPosition(0) # Display the most significant part of the number
def keyPressEvent(self, event):
"""
This method catches all keyboard press events triggered by the user. Can be used to alter
the behaviour of certain key events from the default implementation of QAbstractSpinBox.
:param event: QKeyEvent, a Qt QKeyEvent instance holding the event information
"""
# Restore cached value upon pressing escape and lose focus.
if event.key() == QtCore.Qt.Key_Escape:
if self.__cached_value is not None:
self.__value = self.__cached_value
self.valueChanged.emit(self.value())
self.clearFocus() # This will also trigger editingFinished
return
# Update display upon pressing enter/return before processing the event in the default way.
if event.key() == QtCore.Qt.Key_Enter or event.key() == QtCore.Qt.Key_Return:
self.update_display()
if (QtCore.Qt.ControlModifier | QtCore.Qt.MetaModifier) & event.modifiers():
super(ScienDSpinBox, self).keyPressEvent(event)
return
# The rest is to avoid editing suffix and prefix
if len(event.text()) > 0:
# Allow editing of the number or SI-prefix even if part of the prefix/suffix is selected
if self.lineEdit().selectedText():
sel_start = self.lineEdit().selectionStart()
sel_end = sel_start + len(self.lineEdit().selectedText())
min_start = len(self.__prefix)
max_end = len(self.__prefix) + len(self.cleanText())
if sel_start < min_start:
sel_start = min_start
if sel_end > max_end:
sel_end = max_end
self.lineEdit().setSelection(sel_start, sel_end - sel_start)
else:
cursor_pos = self.lineEdit().cursorPosition()
begin = len(self.__prefix)
end = len(self.text()) - len(self.__suffix)
if cursor_pos < begin:
self.lineEdit().setCursorPosition(begin)
elif cursor_pos > end:
self.lineEdit().setCursorPosition(end)
if event.key() == QtCore.Qt.Key_Left:
if self.lineEdit().cursorPosition() == len(self.__prefix):
return
if event.key() == QtCore.Qt.Key_Right:
if self.lineEdit().cursorPosition() == len(self.text()) - len(self.__suffix):
return
if event.key() == QtCore.Qt.Key_Home:
self.lineEdit().setCursorPosition(len(self.__prefix))
return
if event.key() == QtCore.Qt.Key_End:
self.lineEdit().setCursorPosition(len(self.text()) - len(self.__suffix))
return
super(ScienDSpinBox, self).keyPressEvent(event)
def focusInEvent(self, event):
super(ScienDSpinBox, self).focusInEvent(event)
self.selectAll()
return
def focusOutEvent(self, event):
super(ScienDSpinBox, self).focusOutEvent(event)
self.update_display()
return
def paintEvent(self, ev):
"""
Add drawing of a red frame around the spinbox if the is_valid flag is False
"""
super(ScienDSpinBox, self).paintEvent(ev)
# draw red frame if is_valid = False
# self.errorBox.setVisible(not self.is_valid)
def validate(self, text, position):
"""
Access method to the validator. See FloatValidator class for more information.
:param text: str, string to be validated.
:param position: int, current text cursor position
:return: (enum QValidator::State) the returned validator state,
(str) the input string, (int) the cursor position
"""
begin = len(self.__prefix)
end = len(text) - len(self.__suffix)
if position < begin:
position = begin
elif position > end:
position = end
if self.__prefix and text.startswith(self.__prefix):
text = text[len(self.__prefix):]
if self.__suffix and text.endswith(self.__suffix):
text = text[:-len(self.__suffix)]
state, string, position = self.validator.validate(text, position)
text = self.__prefix + string + self.__suffix
end = len(text) - len(self.__suffix)
if position > end:
position = end
value = self.valueFromText(text)
_, in_range = self.check_range(value)
self.errorBox.setVisible(not in_range)
return state, text, position
def fixup(self, text):
"""
Takes an invalid string and tries to fix it in order to pass validation.
The returned string is not guaranteed to pass validation.
:param text: str, a string that has not passed validation in need to be fixed.
:return: str, the resulting string from the fix attempt
"""
return self.validator.fixup(text)
def valueFromText(self, text):
"""
This method is responsible for converting a string displayed in the SpinBox into a Decimal.
The input string is already stripped of prefix and suffix.
Just the si-prefix may be present.
:param text: str, the display string to be converted into a numeric value.
This string must be conform with the validator.
:return: Decimal, the numeric value converted from the input string.
"""
# Check for infinite value
if 'inf' in text.lower():
if text.startswith('-'):
return D('-inf')
else:
return D('inf')
# Handle "normal" (non-infinite) input
group_dict = self.validator.get_group_dict(text)
if not group_dict:
return False
if not group_dict['mantissa']:
return False
si_prefix = group_dict['si']
if si_prefix is None:
si_prefix = ''
si_scale = self._unit_prefix_dict[si_prefix.replace('u', 'µ')]
if group_dict['sign'] is not None:
unscaled_value_str = group_dict['sign'] + group_dict['mantissa']
else:
unscaled_value_str = group_dict['mantissa']
if group_dict['exponent'] is not None:
unscaled_value_str += group_dict['exponent']
value = D(unscaled_value_str) * si_scale
# Try to extract the precision the user intends to use
if self.dynamic_precision:
split_mantissa = group_dict['mantissa'].split('.')
if len(split_mantissa) == 2:
self.setDecimals(max(len(split_mantissa[1]), 1))
else:
self.setDecimals(1) # Minimum number of digits is 1
return value
def textFromValue(self, value):
"""
This method is responsible for the mapping of the underlying value to a string to display
in the SpinBox.
Suffix and Prefix must not be handled here, just the si-Prefix.
The main problem here is, that a scaled float with a suffix is represented by a different
machine precision than the total value.
This method is so complicated because it represents the actual precision of the value as
float and not the precision of the scaled si float.
'{:.20f}'.format(value) shows different digits than
'{:.20f} {}'.format(scaled_value, si_prefix)
:param value: float|decimal.Decimal, the numeric value to be formatted into a string
:return: str, the formatted string representing the input value
"""
# Catch infinity value
if np.isinf(float(value)):
if value < 0:
return '-inf '
else:
return 'inf '
sign = '-' if value < 0 else ''
fractional, integer = math.modf(abs(value))
integer = int(integer)
si_prefix = ''
prefix_index = 0
if integer != 0 or fractional >= 0.1:
integer_str = str(integer)
fractional_str = ''
while len(integer_str) > 3:
fractional_str = integer_str[-3:] + fractional_str
integer_str = integer_str[:-3]
if prefix_index < 8:
si_prefix = 'kMGTPEZY'[prefix_index]
else:
si_prefix = 'e{0:d}'.format(3 * (prefix_index + 1))
prefix_index += 1
# Truncate and round to set number of decimals
# Add digits from fractional if it's not already enough for set self.__decimals
if self.__decimals < len(fractional_str):
round_indicator = int(fractional_str[self.__decimals])
fractional_str = fractional_str[:self.__decimals]
if round_indicator >= 5:
if not fractional_str:
fractional_str = '1'
else:
fractional_str = str(int(fractional_str) + 1)
elif self.__decimals == len(fractional_str):
if fractional >= 0.5:
if fractional_str:
fractional_int = int(fractional_str) + 1
fractional_str = str(fractional_int)
else:
fractional_str = '1'
elif self.__decimals > len(fractional_str):
digits_to_add = self.__decimals - len(fractional_str) # number of digits to add
fractional_tmp_str = ('{0:.' + str(digits_to_add) + 'f}').format(fractional)
if fractional_tmp_str.startswith('1'):
if fractional_str:
fractional_str = str(int(fractional_str) + 1) + '0' * digits_to_add
else:
fractional_str = '1' + '0' * digits_to_add
| |
#!/usr/bin/env python
# coding: utf-8
import re
import operator
operators = {
'+': operator.add,
'-': operator.sub,
'*': operator.mul,
'/': operator.truediv,
}
op_precedences = [
# extract content between innermost parentheses - highest precedence
r'\(\s*([^()]+)\)',
# extract higher precedence operator (* or /) and its adjoining operands
r'(\d+(?:\.\d+)?)\s*([*/])\s*(-?\s*\d+(?:\.\d+)?)',
# extract lower precedence operator (+ or -) and its adjoining operands
r'(-?\s*\d+(?:\.\d+)?)\s*([+-])\s*(-?\s*\d+(?:\.\d+)?)',
]
def validate(expression):
"""Expression validator and reformatter
Receive a candidate expression as a string of space-separated
characters. Valid characters are all of "0123456789.+-*/() ".
Basic checking is performed as well as reformatting. If valid,
return a reformatted expression as a string, otherwise return
None.
"""
# check for characters not in a valid set, not just for the
# purpose of validating an expression but also to guard against
# malicious code injections
if re.search(r'[^\d.+\-*/() ]', expression):
return None
# disallow expressions starting with any of "+", "*" or "/"
if re.search(r'^\s*[+*/]', expression):
return None
# check parentheses for mismatch
if not check_parentheses(expression):
return None
# manage space around negation sign - subtraction operator
processed = re.sub(r'(?<=\()-\s+([\d\(])', r' -\1', expression)
processed = re.sub(r'(?<=\(\s)-\s+([\d\(])', r'-\1', processed)
# join digits separated only by space
processed = re.sub(r'(?<=\d)\s+(?=\d)', r'', processed)
# join operators separated only by space
processed = re.sub(r'(?<=[-+*/])\s+(?=[-+*/])', r'', processed)
# disallow neighboring operators
if re.search(r'[-+*/]{2,}', processed):
return None
# disallow "+","*" or "/" between two identical parentheses
if re.search(r'([\(\)])\s*[+*/]\s*\1', processed):
return None
# disallow "-" between two closing parentheses
if re.search(r'\)\s*-\s*\)', processed):
return None
# enclose everything in parentheses for convenience
processed = re.sub(r'^(.+)$', r'( \1 )', processed)
return processed
# helper function for `validate`
def check_parentheses(expression):
"""Check parentheses for mismatch
Receive a candidate expression as a string and return True if all
parentheses are properly matched. Nesting is allowed. If an error is
detected, return False. If no parentheses are present, return True.
"""
# disallow paired parentheses containing only space and/or
# operator(s) or nothing at all
if re.search(r'\([\s+*/-]*\)', expression):
return False
# construct a list of parentheses to check
parentheses = re.findall(r'[()]', expression)
# parity
count = 0
counter = {
'(': 1,
')': -1
}
for parenthesis in parentheses:
count += counter[parenthesis]
# if left of any point there is more closed than opened parentheses
if count < 0:
return False
# if after counting through the entire expression
# there is more opened than closed parentheses (count > 0)
if count:
return False
return True
def evaluate(expression, noparentheses=False):
"""Evaluator functionality of the solver module
Receive a validated and reformatted expression from `validate`. If
it is None, return None. If it passed validation, evaluate it in
order of operator precedence, second only to order enforced by
parentheses. If the result is a whole number cast it to int. Return the result.
"""
# skip the zero precedence if no parentheses are present
noparentheses = int(noparentheses)
# evaluate expression in order of operator precedences
for order in range(noparentheses, 3):
# "initial" update is not a thing, therefore:
updated = None
# for the current level of precedence
while True:
# call `operate` on an appropriate re.match object
updated = re.sub(op_precedences[order], operate, expression, count=noparentheses)
# cancel out even number of consecutive negative signs
updated = re.sub(r'(?:--)+', '+', updated)
#
updated = re.sub(r'^\+', '0+', updated)
# when done with this level of operator precedence
if (updated == expression):
# proceed to a lower level (higher `order` count)
break
# otherwise repeat the while loop
# reducing the expression stepwise from left to right
expression = updated
# if the result is a whole number, cast it to int
result = float(expression)
if result == round(result):
result = int(result)
return result
def operate(match):
"""Perform a single calculation on a group of strings
Receive a re.match object from `evaluate`. The object contains
subgroups of strings captured by regex groups.
In case the match contains a single subgroup, it is guaranteed not
to contain parentheses. Send the string to `evaluate` for next
level processing.
In case the match contains 3 subgroups, it is guaranteed to be a
match of a binary operation. Convert the two operand strings
(from subgroups 1 and 3) to floats. Resolve the operator string
(from subgroup 2) to the correct `operator` method. Call the
`operator` method on the operands in the correct order. If the
result is a whole number cast it to int. Return the result.
"""
# evaluate subexpression from inside parentheses, if present
if len(match.groups()) == 1:
subresult = evaluate(match.group(1).strip(), noparentheses=True)
return str(subresult)
# otherwise treat as a simple binary expression
left, right = match.group(1, 3)
# remove space between a "-" sign and operand, if present
left = re.sub(r'-\s+', '-', left)
right = re.sub(r'-\s+', '-', right)
# define the correct operator method to use
operator_ = operators[match.group(2)]
# perform the calculation
result = operator_(float(left), float(right))
if result == round(result):
result = int(result)
return str(result)
def run_tests():
tests = {
'( 3 % 2 )': None,
'3 + 2 f': None,
'( 3 $ + 2 $)': None,
'! ( 3 + 2 )': None,
'( 3 + 2 ) =': None,
'( 3 + 2 ) :': None,
'~ 1': None,
'0 ^ 1': None,
'3 + 2 \\': None,
'3 + 2 \0': None,
'3 + 2 \n': None,
'3 + 2 \r': None,
'3 + 2 \t': None,
'3 + 2 F': None,
'- 1 8 / ( 6 * - 3 ) + ( 3 * 2 )': None,
'2 * 6 / 3 + ( 3 * 1 ) )': None,
'( 2 * 6 / 3 + ( 3 * 1 )': None,
'2 * 6 / 3 + ) ( 3 * 1 ) (': None,
'3 * - ( 6 )': None,
'- 1 8 / - ( 6 * - 3 ) + ( - 3 * 2 )': None,
'( )': None,
'2 * 6 / 3 + ( + ( - 3 ) ) + ( 3 * 1 )': None,
'2 * 6 / 3 + ( * ( - 3 ) ) + ( 3 * 1 )': None,
'2 * 6 / 3 + ( / ( - 3 ) ) + ( 3 * 1 )': None,
'2 * 6 / 3 + ( ( - 3 ) * ) + ( 3 * 1 )': None,
'2 * 6 / 3 + ( ( - 3 ) / ) + ( 3 * 1 )': None,
'2 * 6 / 3 + ( ( - 3 ) - ) + ( 3 * 1 )': None,
'2 * 6 / 3 + ( ( - 3 ) + ) + ( 3 * 1 )': None,
'+ 2 + 2': None,
'* 2 + 2': None,
'/ 2 + 2': None,
'4 + ( + ) 3': None,
'4 + ( - ) 3': None,
'4 + ( * ) 3': None,
'4 + ( / ) 3': None,
'2 * 6 / 3 + ( 3 * 1 )': '?',
'2 * 6 / 3 + ( - ( - ( - ( - ( - 3 ) ) ) ) ) + ( 3 * 1 )': '?',
'2 * 6 / 3 + ( - ( - ( - 3 ) ) ) + ( 3 * 1 )': '?',
'2 * 6 / 3 + ( ( - ( - 3 ) ) ) + ( 3 * 1 )': '?',
'2 * ( 6 / 3 ) + ( 3 * 1 )': '?',
'1 8 / 6 * 3 + ( 3 * 2 )': '?',
'1 8 / ( 6 * 3 ) + ( 3 * 2 )': '?',
'- 1 8 / ( - ( 6 * ( - 3 ) ) ) | |
from copy import copy, deepcopy
from datetime import timedelta
from unittest import mock
import pytest
from ticts import TimeSeries
from ticts.utils import MAXTS, MINTS
from .conftest import CURRENT, HALFHOUR, ONEHOUR, ONEMIN
class TestTimeSeriesInit:
def test_with_dict(self, smalldict):
ts = TimeSeries(smalldict)
assert ts[CURRENT + ONEHOUR] == 1
def test_with_data_as_tuple(self):
mytuple = ((CURRENT, 0), (CURRENT + ONEHOUR, 1))
ts = TimeSeries(mytuple)
assert ts[CURRENT] == 0
assert ts[CURRENT + ONEHOUR] == 1
assert len(ts) == 2
class TestTimeSeriesSetItem:
def test_simple_setitem(self, smallts):
smallts[CURRENT] = 1000
assert smallts[CURRENT] == 1000
def test_consecutive_setitem(self, smallts):
smallts[CURRENT] = 1000
first_time = deepcopy(smallts)
smallts[CURRENT] = 1000
assert first_time == smallts
@mock.patch("ticts.TimeSeries.set_interval")
def test_setitem_on_slice_calls_set_interval(self, set_interval, smallts):
smallts[CURRENT:CURRENT + 2 * ONEHOUR] = 1000
assert set_interval.call_count == 1
class TestTimeSeriesCopy:
def test_copy(self, smallts):
copied = copy(smallts)
assert copied == smallts
def test_copy_with_default(self, smallts_withdefault):
copied = copy(smallts_withdefault)
assert copied == smallts_withdefault
def test_deepcopy(self, smallts):
deepcopied = deepcopy(smallts)
assert deepcopied == smallts
def test_deepcopy_with_default(self, smallts_withdefault):
deepcopied = deepcopy(smallts_withdefault)
assert deepcopied == smallts_withdefault
class TestTimeSeriesBoundProperties:
def test_lower_bound(self, smallts):
assert smallts.lower_bound == smallts.keys()[0]
def test_upper_bound(self, smallts):
assert smallts.upper_bound == smallts.keys()[-1]
def test_lower_bound_on_empty(self, emptyts):
assert emptyts.lower_bound == MINTS
def test_upper_bound_on_empty(self, emptyts):
assert emptyts.upper_bound == MAXTS
def test_timeseries_compact(smallts):
smallts[CURRENT + ONEMIN] = 0
assert (CURRENT + ONEMIN) not in smallts.compact().keys()
class TestTimeSeriesGetitem:
available_interpolate = ['previous', 'linear']
# tests on corner cases
def test_get_item_out_of_left_bound_with_default_zero(self):
mytuple = ((CURRENT, 0), (CURRENT + ONEHOUR, 1))
ts = TimeSeries(mytuple, default=0)
assert ts[CURRENT + ONEHOUR] == 1
assert ts[CURRENT - ONEHOUR] == 0
def test_getitem_out_of_left_bound_with_no_default_raises(self, smallts):
with pytest.raises(KeyError) as err:
smallts[CURRENT - ONEMIN]
assert 'default attribute is not set' in str(err)
@pytest.mark.parametrize('interpolate', available_interpolate)
def test_getitem_out_of_left_bound_with_default_return_default(
self, smallts_withdefault, interpolate):
value = smallts_withdefault[CURRENT - ONEMIN, interpolate]
assert value == smallts_withdefault.default
@pytest.mark.parametrize('interpolate', available_interpolate)
def test_getitem_on_empty_when_no_default_raises(self, emptyts,
interpolate):
with pytest.raises(KeyError) as err:
emptyts[CURRENT - ONEMIN, interpolate]
assert str(
"default attribute is not set and timeseries is empty") in str(err)
# tests on '_get_previous'
@mock.patch("ticts.TimeSeries._get_previous")
def test_get_on_previous_is_default_interpolate(self, _get_previous,
smallts):
smallts[CURRENT + ONEMIN]
assert _get_previous.call_count == 1
def test_get_on_previous(self, smallts):
assert smallts[CURRENT + ONEMIN] == 0
def test_get_on_previous_out_of_right_bound(self, smallts):
assert smallts[CURRENT + 10 * ONEHOUR] == 9
# tests on '_get_linear_interpolate'
@pytest.mark.parametrize('time_idx, expected', [
(CURRENT + HALFHOUR, 0.5),
(CURRENT + ONEHOUR + HALFHOUR, 1.5),
(CURRENT + 10 * ONEMIN, 0 + (1 - 0) * (10 * ONEMIN / ONEHOUR)),
])
def test_get_linear_interpolate(self, smallts, time_idx, expected):
assert smallts[time_idx, 'linear'] == expected
def test_get_linear_interpolate_out_of_right_bound(self, smallts):
assert smallts[CURRENT + 10 * ONEHOUR, 'linear'] == 9
# test on 'slice'
def test_get_on_slice_exclude_upper_bound_include_lower_bound(
self, smallts):
start = CURRENT
end = CURRENT + 1 * ONEHOUR
data = {start: 0, end: 1}
expected_ts = TimeSeries(data, default=smallts.default)
assert smallts[start:end + ONEHOUR] == expected_ts
def test_get_on_slice_add_back_previous_value_if_start_not_in_keys(
self, smallts):
start = CURRENT + HALFHOUR
end = CURRENT + ONEHOUR
data = {start: 0, end: 1}
expected_ts = TimeSeries(data, default=smallts.default)
assert smallts[start:end + ONEHOUR] == expected_ts
def test_get_on_slice_entirely_out_of_bounds_on_left_side(self, smallts):
assert smallts[CURRENT - 2 * ONEHOUR:CURRENT - 1 * ONEHOUR].empty
def test_get_on_slice_out_of_bounds_left_side(self, smallts):
start = CURRENT - 2 * ONEHOUR
end = CURRENT
data = {CURRENT: 0}
expected_ts = TimeSeries(data, default=smallts.default)
assert smallts[start:end + 1 * ONEHOUR] == expected_ts
def test_get_on_slice_entirely_out_of_bounds_on_right_side(self, smallts):
assert smallts[CURRENT + 10 * ONEHOUR:CURRENT + 12 * ONEHOUR].empty
def test_get_on_slicout_of_bounds_on_right_side(self, smallts):
sliced_ts = smallts[CURRENT + 9 * ONEHOUR:CURRENT + 12 * ONEHOUR]
expected_dct = {CURRENT + 9 * ONEHOUR: 9}
expected_ts = TimeSeries(expected_dct, default=smallts.default)
assert sliced_ts == expected_ts
class TestTimeSeriesSetInterval:
def test_set_interval_raises_when_no_default(self, smallts):
with pytest.raises(NotImplementedError):
smallts.set_interval(CURRENT, CURRENT + ONEHOUR, 1000)
def test_single_set_interval_end_on_last_key(self, smallts_withdefault):
smallts_withdefault.set_interval(CURRENT + ONEHOUR,
CURRENT + 9 * ONEHOUR, 1000)
expected_keys = [CURRENT, CURRENT + ONEHOUR, CURRENT + 9 * ONEHOUR]
assert list(smallts_withdefault.keys()) == expected_keys
assert smallts_withdefault[CURRENT + ONEHOUR] == 1000
def test_single_set_interval_start_on_first_key(self, smallts_withdefault):
smallts_withdefault.set_interval(CURRENT, CURRENT + 9 * ONEHOUR, 1000)
expected_keys = [CURRENT, CURRENT + 9 * ONEHOUR]
assert list(smallts_withdefault.keys()) == expected_keys
assert smallts_withdefault[CURRENT] == 1000
@pytest.mark.parametrize('start, end', [
(CURRENT + ONEHOUR, CURRENT + 10 * ONEHOUR),
(CURRENT + 4 * ONEHOUR, CURRENT + 11 * ONEHOUR),
])
def test_single_set_interval_end_over_last_key_sets_to_last_value(
self, smallts_withdefault, start, end):
last_key = smallts_withdefault.keys()[-1]
last_val = smallts_withdefault[last_key]
smallts_withdefault.set_interval(start, end, 1000)
keys_before_start = []
for key in smallts_withdefault.keys():
if key < start:
keys_before_start.append(key)
expected_keys = [*keys_before_start, start, end]
assert list(smallts_withdefault.keys()) == expected_keys
assert smallts_withdefault[end] == last_val
def test_single_set_interval_when_start_higher_than_upper_bound(
self, smallts_withdefault):
start = CURRENT + 11 * ONEHOUR
end = CURRENT + 13 * ONEHOUR
smallts_withdefault.set_interval(start, end, 1000)
assert smallts_withdefault[CURRENT + 10 * ONEHOUR] == 9
assert smallts_withdefault[start] == 1000
assert smallts_withdefault[end] == smallts_withdefault.default
def test_single_set_interval_start_before_first_key(
self, smallts_withdefault):
smallts_withdefault.set_interval(CURRENT - ONEHOUR,
CURRENT + 9 * ONEHOUR, 1000)
expected_keys = [CURRENT - ONEHOUR, CURRENT + 9 * ONEHOUR]
assert list(smallts_withdefault.keys()) == expected_keys
assert smallts_withdefault[CURRENT - 1 * ONEHOUR] == 1000
def test_single_set_interval_on_bounds_not_being_keys(
self, smallts_withdefault):
smallts_withdefault.set_interval(CURRENT + ONEMIN,
CURRENT + 9 * ONEHOUR, 1000)
expected_keys = [CURRENT, CURRENT + ONEMIN, CURRENT + 9 * ONEHOUR]
assert list(smallts_withdefault.keys()) == expected_keys
assert smallts_withdefault[CURRENT + ONEMIN] == 1000
def test_set_interval_on_empty(self, emptyts):
emptyts.default = 10
emptyts.set_interval(CURRENT, CURRENT + ONEHOUR, 1)
assert emptyts[CURRENT] == 1
len(emptyts.keys()) == 1
def test_set_interval_on_empty_with_default(self, emptyts_withdefault):
emptyts_withdefault.set_interval(CURRENT, CURRENT + ONEHOUR, 1)
assert emptyts_withdefault[CURRENT] == 1
assert emptyts_withdefault[CURRENT +
ONEHOUR] == emptyts_withdefault.default
len(emptyts_withdefault.keys()) == 2
def test_same_consecutive_set_interval(self, smallts_withdefault):
smallts_withdefault.set_interval(CURRENT, CURRENT + 9 * ONEHOUR, 1000)
first_time = deepcopy(smallts_withdefault)
smallts_withdefault.set_interval(CURRENT, CURRENT + 9 * ONEHOUR, 1000)
assert first_time == smallts_withdefault
def test_consecutive_set_interval_on_empty_with_default(self, emptyts):
emptyts.default = 10
emptyts.set_interval(CURRENT, CURRENT + 2 * ONEHOUR, 10)
emptyts.set_interval(CURRENT, CURRENT + ONEHOUR, 0)
emptyts.set_interval(CURRENT + ONEHOUR, CURRENT + 2 * ONEHOUR, 1)
emptyts.set_interval(CURRENT, CURRENT + 2 * ONEHOUR, 3)
assert emptyts[CURRENT] == 3
assert emptyts[CURRENT + 2 * ONEHOUR] == 10
assert list(emptyts.keys()) == [CURRENT, CURRENT + 2 * ONEHOUR]
def test_set_interval_when_no_keys_to_delete_with_default(self, emptyts):
emptyts.default = 1000
emptyts.set_interval(CURRENT, CURRENT + 1 * ONEHOUR, 0)
emptyts.set_interval(CURRENT + 3 * ONEHOUR, CURRENT + 4 * ONEHOUR, 3)
emptyts.set_interval(CURRENT + 1 * ONEHOUR + HALFHOUR,
CURRENT + 3 * ONEHOUR, 10)
expected_dct = {
CURRENT: 0,
CURRENT + 1 * ONEHOUR: 1000.,
CURRENT + 1 * ONEHOUR + HALFHOUR: 10,
CURRENT + 3 * ONEHOUR: 3,
CURRENT + 4 * ONEHOUR: 1000
}
assert list(emptyts.keys()) == list(expected_dct.keys())
for key in expected_dct:
assert emptyts[key] == expected_dct[key]
class TestTimeSeriesOperators:
def test_simple_add(self, smallts, smalldict):
ts = smallts + smallts
newdct = {key: value + value for key, value in smalldict.items()}
assert ts == TimeSeries(newdct)
def test_simple_add_one_float(self, smallts, smalldict):
ts = smallts + 1000
assert list(ts.values()) == list(range(1000, 1010))
def test_add_with_keys_differences(self, smallts_withdefault,
otherts_withdefault):
ts = smallts_withdefault + otherts_withdefault
assert ts[CURRENT + 1 * ONEHOUR] == 1 + otherts_withdefault.default
assert ts[CURRENT + 2 * ONEHOUR] == 2 + 1000
assert ts[CURRENT + 2 * ONEHOUR + HALFHOUR] == 2 + 2000
assert ts[CURRENT + 3 * ONEHOUR] == 3 + 2000
assert ts[CURRENT + 4 * ONEHOUR] == 4 + 3000
def test_add_on_list_of_timeseries(self, smallts):
lst_timeseries = [smallts, smallts, smallts]
result = sum(lst_timeseries)
for key in result:
assert result[key] == 3 * smallts[key]
def test_simple_sub(self, smallts):
ts = smallts - smallts
assert all([val == 0 for val in ts.values()])
def test_simple_sub_one_float(self, smallts):
ts = smallts - 1
assert list(ts.values()) == list(range(-1, 9))
def test_sub_with_keys_differences(self, smallts_withdefault,
otherts_withdefault):
ts = smallts_withdefault - otherts_withdefault
assert ts[CURRENT + 1 * ONEHOUR] == 1 - 900
assert ts[CURRENT + 2 * ONEHOUR] == 2 - 1000
assert ts[CURRENT + 2 * ONEHOUR + HALFHOUR] == 2 - 2000
assert ts[CURRENT + 3 * ONEHOUR] == 3 - 2000
assert ts[CURRENT + 4 * ONEHOUR] == 4 - 3000
def test_simple_le(self, smallts):
result = smallts >= 5
assert all([
not val for val in result[CURRENT:CURRENT + 4 * ONEHOUR].values()
])
assert all([
val for val in result[CURRENT + 5 * ONEHOUR:CURRENT +
9 * ONEHOUR].values()
])
def test_simple_lt(self, smallts):
result = smallts > 5
assert all([
not val for val in result[CURRENT:CURRENT + 5 * ONEHOUR].values()
])
assert all([
val for val in result[CURRENT + 6 * ONEHOUR:CURRENT +
9 * ONEHOUR].values()
])
def test_floor_on_float(self, smallts):
ts = smallts.floor(2)
assert all([value <= 2 for value in ts.values()])
def test_floor_on_ts(self, smallts_withdefault, otherts_withdefault):
ts = smallts_withdefault.floor(otherts_withdefault)
assert ts[CURRENT + 1 * ONEHOUR] == | |
Union[str, Callable[[Tensor], Tensor]] = F.relu,
layer_norm_eps: float = 1e-6,
batch_first: bool = True,
norm_first: bool = True,
device=None,
dtype=None) -> None:
factory_kwargs = {'device': device, 'dtype': dtype}
super().__init__(
d_model,
nhead,
dim_feedforward=dim_feedforward,
dropout=dropout,
activation=activation,
layer_norm_eps=layer_norm_eps,
batch_first=batch_first,
norm_first=norm_first,
device=device,
dtype=dtype)
self.self_attn = MultiheadAttention(
d_model,
nhead,
dropout=dropout,
batch_first=batch_first,
bias=False,
**factory_kwargs)
# Modified to use cache for autoregressive decoding.
class TransformerDecoder(nn.Module):
r"""TransformerDecoder is a stack of N decoder layers
Args:
d_model: the number of expected features in the input (default=1024).
nhead: the number of heads in the multiheadattention models (default=16).
d_hid: the dimension of the feedforward network model
(default=4096).
dropout: the dropout value (default=0.1).
layer_norm_eps: the eps value in layer normalization components
(default=1e-6).
decoder_layer: an instance of the TransformerDecoderLayer() class (required)
num_layers: the number of sub-decoder-layers in the decoder (required).
Examples::
>>> decoder_layer = nn.TransformerDecoderLayer(d_model=512, nhead=8)
>>> transformer_decoder = nn.TransformerDecoder(decoder_layer, num_layers=6)
>>> memory = torch.rand(10, 32, 512)
>>> tgt = torch.rand(20, 32, 512)
>>> out = transformer_decoder(tgt, memory)
"""
__constants__ = ['norm']
def __init__(self, d_model, nhead, d_hid, dropout, layer_norm_eps,
num_layers):
super().__init__()
self.layers = nn.ModuleList([
TransformerDecoderLayer(
d_model, nhead, d_hid, dropout, layer_norm_eps=layer_norm_eps)
for _ in range(num_layers)
])
self.num_layers = num_layers
self.norm = nn.LayerNorm(d_model, eps=layer_norm_eps)
def forward(self,
tgt: Tensor,
memory: Tensor,
tgt_mask: Optional[Tensor] = None,
memory_mask: Optional[Tensor] = None,
tgt_key_padding_mask: Optional[Tensor] = None,
memory_key_padding_mask: Optional[Tensor] = None,
decode: bool = False,
max_len: Optional[int] = None,
cache: Optional[dict] = None) -> Tensor:
r"""Pass the inputs (and mask) through the decoder layer in turn.
Args:
tgt: the sequence to the decoder (required).
memory: the sequence from the last layer of the encoder (required).
tgt_mask: the mask for the tgt sequence (optional).
memory_mask: the mask for the memory sequence (optional).
tgt_key_padding_mask: the mask for the tgt keys per batch (optional).
memory_key_padding_mask: the mask for the memory keys per batch (optional)
decode: wether to use cache for autoregressive decoding or not.
max_len: maximum sequence length, necessary for decoding cache.
Shape:
see the docs in Transformer class.
"""
output = tgt
for idx, mod in enumerate(self.layers):
output, cache = mod(
output,
memory,
tgt_mask=tgt_mask,
memory_mask=memory_mask,
tgt_key_padding_mask=tgt_key_padding_mask,
memory_key_padding_mask=memory_key_padding_mask,
decode=decode,
max_len=max_len,
cache=cache,
index=idx)
if self.norm is not None:
output = self.norm(output)
if decode:
return output, cache
return output
# Modified to use cache for autoregressive decoding.
class TransformerDecoderLayer(nn.TransformerDecoderLayer):
r"""TransformerDecoderLayer is made up of self-attn, multi-head-attn and
feedforward network.
This standard decoder layer is based on the paper "Attention Is All You Need".
<NAME>, <NAME>, <NAME>, <NAME>, <NAME>,
<NAME>, <NAME>, and <NAME>. 2017. Attention is all
you need. In Advances in Neural Information Processing Systems,
pages 6000-6010. Users may modify or implement in a different way during
application.
Args:
d_model: the number of expected features in the input (default=1024).
nhead: the number of heads in the multiheadattention models (default=16).
dim_feedforward: the dimension of the feedforward network model
(default=4096).
dropout: the dropout value (default=0.1).
activation: the activation function of the intermediate layer, can be a
string ("relu" or "gelu") or a unary callable (default=F.relu).
layer_norm_eps: the eps value in layer normalization components
(default=1e-6).
batch_first: If ``True``, then the input and output tensors are provided
as (batch, seq, feature). Default: ``True`` (batch, seq, feature).
norm_first: if ``True``, layer norm is done prior to self attention,
multihead attention and feedforward operations, respectivaly.
Otherwise it's done after. Default: ``True``.
Examples::
>>> decoder_layer = nn.TransformerDecoderLayer(d_model=512, nhead=8)
>>> memory = torch.rand(10, 32, 512)
>>> tgt = torch.rand(20, 32, 512)
>>> out = decoder_layer(tgt, memory)
Alternatively, when ``batch_first`` is ``True``:
>>> decoder_layer = nn.TransformerDecoderLayer(d_model=512, nhead=8,
batch_first=True)
>>> memory = torch.rand(32, 10, 512)
>>> tgt = torch.rand(32, 20, 512)
>>> out = decoder_layer(tgt, memory)
"""
__constants__ = ['batch_first', 'norm_first']
def __init__(self,
d_model: int = 1024,
nhead: int = 16,
dim_feedforward: int = 4096,
dropout: float = 0.1,
activation: Union[str, Callable[[Tensor], Tensor]] = F.relu,
layer_norm_eps: float = 1e-6,
batch_first: bool = True,
norm_first: bool = True,
device=None,
dtype=None) -> None:
factory_kwargs = {'device': device, 'dtype': dtype}
super().__init__(
d_model,
nhead,
dim_feedforward=dim_feedforward,
dropout=dropout,
activation=activation,
layer_norm_eps=layer_norm_eps,
batch_first=batch_first,
norm_first=norm_first,
device=device,
dtype=dtype)
self.self_attn = MultiheadAttention(
d_model,
nhead,
dropout=dropout,
batch_first=batch_first,
bias=False,
**factory_kwargs)
self.multihead_attn = MultiheadAttention(
d_model,
nhead,
dropout=dropout,
batch_first=batch_first,
bias=False,
**factory_kwargs)
def forward(self,
tgt: Tensor,
memory: Tensor,
tgt_mask: Optional[Tensor] = None,
memory_mask: Optional[Tensor] = None,
tgt_key_padding_mask: Optional[Tensor] = None,
memory_key_padding_mask: Optional[Tensor] = None,
decode: bool = False,
max_len: Optional[int] = None,
cache: Optional[dict] = None,
index: Optional[int] = None) -> Tensor:
r"""Pass the inputs (and mask) through the decoder layer.
Args:
tgt: the sequence to the decoder layer (required).
memory: the sequence from the last layer of the encoder (required).
tgt_mask: the mask for the tgt sequence (optional).
memory_mask: the mask for the memory sequence (optional).
tgt_key_padding_mask: the mask for the tgt keys per batch (optional).
memory_key_padding_mask: the mask for the memory keys per batch (optional)
decode: wether to use cache for autoregressive decoding or not.
max_len: maximum sequence length, necessary for decoding cache.
Shape:
see the docs in Transformer class.
"""
# see Fig. 1 of https://arxiv.org/pdf/2002.04745v1.pdf
x = tgt
if self.norm_first:
sa_out, cache = self._sa_block(
self.norm1(x),
tgt_mask,
tgt_key_padding_mask,
decode=decode,
max_len=max_len,
cache=cache,
index=index)
x = x + sa_out
x = x + self._mha_block(
self.norm2(x), memory, memory_mask, memory_key_padding_mask)
x = x + self._ff_block(self.norm3(x))
else:
sa_out, cache = self._sa_block(
x,
tgt_mask,
tgt_key_padding_mask,
decode=decode,
max_len=max_len,
cache=cache,
index=index)
x = self.norm1(x + sa_out)
x = self.norm2(
x + self._mha_block(x, memory, memory_mask, memory_key_padding_mask))
x = self.norm3(x + self._ff_block(x))
return x, cache
# self-attention block
def _sa_block(self,
x: Tensor,
attn_mask: Optional[Tensor],
key_padding_mask: Optional[Tensor],
decode: bool = False,
max_len: Optional[int] = None,
cache: Optional[dict] = None,
index: Optional[int] = None) -> Tensor:
x, _, cache = self.self_attn(
x,
x,
x,
attn_mask=attn_mask,
key_padding_mask=key_padding_mask,
need_weights=False,
decode=decode,
max_len=max_len,
cache=cache,
index=index)
return self.dropout1(x), cache
# Only difference to standard PyTorch class is that 'self._qkv_same_embed_dim'
# is always set to 'False' and the use of a cache registered as a buffer for
# autoregressive decoding.
class MultiheadAttention(nn.MultiheadAttention):
r"""Allows the model to jointly attend to information
from different representation subspaces.
See `Attention Is All You Need <https://arxiv.org/abs/1706.03762>`_.
.. math::
\text{MultiHead}(Q, K, V) = \text{Concat}(head_1,\dots,head_h)W^O
where :math:`head_i = \text{Attention}(QW_i^Q, KW_i^K, VW_i^V)`.
Args:
embed_dim: Total dimension of the model.
num_heads: Number of parallel attention heads. Note that ``embed_dim`` will
be split across ``num_heads`` (i.e. each head will have dimension
``embed_dim // num_heads``).
dropout: Dropout probability on ``attn_output_weights``. Default: ``0.0``
(no dropout).
bias: If specified, adds bias to input / output projection layers.
Default: ``True``.
add_bias_kv: If specified, adds bias to the key and value sequences at
dim=0. Default: ``False``.
add_zero_attn: If specified, adds a new batch of zeros to the key and value
sequences at dim=1. Default: ``False``.
kdim: Total number of features for keys. Default: ``None``
(uses ``kdim=embed_dim``).
vdim: Total number of features for values. Default: ``None``
(uses ``vdim=embed_dim``).
batch_first: If ``True``, then the input and output tensors are provided
as (batch, seq, feature). Default: ``False`` (seq, batch, feature).
Examples::
>>> multihead_attn = nn.MultiheadAttention(embed_dim, num_heads)
>>> attn_output, attn_output_weights = multihead_attn(query, key, value)
"""
def __init__(self,
embed_dim,
num_heads,
dropout=0.,
bias=True,
add_bias_kv=False,
add_zero_attn=False,
kdim=None,
vdim=None,
batch_first=True,
device=None,
dtype=None) -> None:
super().__init__(
embed_dim,
num_heads,
dropout=dropout,
bias=bias,
add_bias_kv=add_bias_kv,
add_zero_attn=add_zero_attn,
kdim=kdim,
vdim=vdim,
batch_first=batch_first,
device=device,
dtype=dtype)
# This is set to 'True' for kdim == vdim == embed_dim in the standard
# PyTorch class.
self._qkv_same_embed_dim = False
factory_kwargs = {'device': device, 'dtype': dtype}
self.q_proj_weight = nn.Parameter(
torch.empty((embed_dim, embed_dim), **factory_kwargs))
self.k_proj_weight = nn.Parameter(
torch.empty((embed_dim, self.kdim), **factory_kwargs))
self.v_proj_weight = nn.Parameter(
torch.empty((embed_dim, self.vdim), **factory_kwargs))
self.register_parameter('in_proj_weight', None)
self._reset_parameters()
def forward(self,
query: Tensor,
key: Tensor,
value: Tensor,
key_padding_mask: Optional[Tensor] = None,
need_weights: bool = True,
attn_mask: Optional[Tensor] = None,
average_attn_weights: bool = True,
decode: bool = False,
max_len: Optional[int] = None,
cache: Optional[dict] = None,
index: Optional[int] = None) -> Tuple[Tensor, Optional[Tensor]]:
r"""
Args:
query: Query embeddings of shape :math:`(L, E_q)` for unbatched input,
:math:`(L, N, E_q)` when ``batch_first=False`` or :math:`(N, L, E_q)`
when ``batch_first=True``, where :math:`L` is the target sequence
length, :math:`N` is the batch size, and :math:`E_q` is the query
embedding dimension ``embed_dim``.
Queries are compared against key-value pairs to produce | |
# This program was generated by "Generative Art Synthesizer"
# Generation date: 2021-11-28 02:06:28 UTC
# GAS change date: 2021-11-28 01:31:12 UTC
# GAS md5 hash: c291ffb9de6ad6dea37797c00163f591
# Python version: 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)]
# For more information visit: https://github.com/volotat/GAS
#import python libraries
import os #OS version: default
import numpy as np #Numpy version: 1.19.5
from PIL import Image #PIL version: 8.1.2
#set initial params
SIZE = 768
GRID_CHANNELS = 30
def test_values(arr):
if np.isnan(arr).any():
raise Exception('Array has None elements!')
if np.amin(arr) < -1 or np.amax(arr) > 1:
raise Exception('Values went to far! [ %.2f : %.2f ]'%(np.amin(arr), np.amax(arr)) )
return arr
#define grid transformation methods
def transit(x, t_indx, s_indx, alphas):
res = x.copy()
res[:,:,t_indx] = np.sum(x[:,:,s_indx] * alphas, axis = -1)
return test_values(res.clip(-1,1))
def sin(x, t_indx, s_indx, scale = 1, shift = 0):
res = x.copy()
res[:,:,t_indx] = np.sin(x[:,:,s_indx] * 0.5 * np.pi * scale + shift)
return test_values(res)
def power(x, t_indx, s_indx, p = 1):
res = x.copy()
res[:,:,t_indx] = np.sign(x[:,:,s_indx]) * np.abs(x[:,:,s_indx]) ** p
return test_values(res)
def magnitude(x, t_indx, s_indx, ord = 2):
res = x.copy()
res[:,:,t_indx] = np.linalg.norm(x[:,:,s_indx], axis = -1, ord = ord) / np.sqrt(len(s_indx))
return test_values(res)
#set initial grid
grid = np.zeros((SIZE, SIZE, GRID_CHANNELS))
x = ((np.arange(SIZE)/(SIZE-1) - 0.5) * 2).reshape((1, SIZE)).repeat(SIZE, 0)
y = ((np.arange(SIZE)/(SIZE-1) - 0.5) * 2).reshape((SIZE, 1)).repeat(SIZE, 1)
phi = np.pi / 4
rx = (x * np.cos(phi) - y * np.sin(phi)) / 1.5
ry = (x * np.cos(phi) + y * np.sin(phi)) / 1.5
for i in range(15):
grid[:,:,i * 2 + 0] = x
grid[:,:,i * 2 + 1] = y
'''
grid[:,:,0 ] = x
grid[:,:,1 ] = y
grid[:,:,2 ] = rx
grid[:,:,3 ] = ry
grid[:,:, 4] = np.sin(x * 0.5 * np.pi)
grid[:,:, 5] = np.sin(y * 0.5 * np.pi)
grid[:,:, 6: 8] = magnitude(grid[:,:,4:6], [0, 1])
grid[:,:, 8] = np.sin(-x * 0.5 * np.pi)
grid[:,:, 9] = np.sin(-y * 0.5 * np.pi)
grid[:,:,10] = np.sin(rx * 0.5 * np.pi)
grid[:,:,11] = np.sin(ry * 0.5 * np.pi)
grid[:,:,12] = np.sin(-rx * 0.5 * np.pi)
grid[:,:,13] = np.sin(-ry * 0.5 * np.pi)
grid[:,:,14] = np.sin(x * 0.5 * np.pi* 2)
grid[:,:,15] = np.sin(y * 0.5 * np.pi* 2)
grid[:,:,16:18] = magnitude(grid[:,:,14:16], [0, 1])
grid[:,:,18] = np.cos(x * 0.5 * np.pi* 2)
grid[:,:,19] = np.cos(y * 0.5 * np.pi* 2)
grid[:,:,20:22] = magnitude(grid[:,:,18:20], [0, 1])
grid[:,:,22] = np.sin(rx * 0.5 * np.pi* 2)
grid[:,:,23] = np.sin(ry * 0.5 * np.pi* 2)
grid[:,:,24:26] = magnitude(grid[:,:,22:24], [0, 1])
grid[:,:,26] = np.cos(rx * 0.5 * np.pi* 2)
grid[:,:,27] = np.cos(ry * 0.5 * np.pi* 2)
grid[:,:,28:30] = magnitude(grid[:,:,26:28], [0, 1])
'''
'''
grid[:,:,30:32] = np.sin(x * 0.5 * np.pi * 3), np.sin(y * 0.5 * np.pi * 3)
grid[:,:,32:34] = magnitude(grid, [30, 31])
grid[:,:,34:36] = np.cos(x * 0.5 * np.pi * 3), np.cos(y * 0.5 * np.pi * 3)
grid[:,:,36:38] = magnitude(grid, [34, 35])
grid[:,:,38:40] = np.sin(rx * 0.5 * np.pi * 3), np.sin(ry * 0.5 * np.pi * 3)
grid[:,:,40:42] = magnitude(grid, [40, 41])
grid[:,:,42:44] = np.cos(rx * 0.5 * np.pi * 3), np.cos(ry * 0.5 * np.pi * 3)
grid[:,:,44:46] = magnitude(grid, [44, 45])
'''
#apply transformations to the grid
grid = transit(grid, 9, [5, 24, 15, 3], [0.5342212021626492, 0.18147076392900563, 0.055633740504602715, 0.22867429340374243])
grid = transit(grid, 26, [22, 26, 29, 19, 8, 21, 7, 10, 11, 25, 9, 0], [0.05696102197584366, 0.26285222924225504, 0.0919079612051198, 0.1687527806884494, 0.11085010574249453, 0.05902176129124312, 0.011214159943138803, 0.04445927335667039, 0.12588318336806506, 0.017264056162267945, 0.02253705125837301, 0.02829641576607929])
grid = transit(grid, 11, [18, 5, 27, 23, 0, 28, 1, 7, 10, 6, 22, 20, 12, 14], [0.13683569469872395, 0.016959388954744067, 0.03663160854125827, 0.002847112263463712, 0.1157918799233711, 0.08397448090630324, 0.14414551091367567, 0.0021601192347748058, 0.03820829976675248, 0.24869202059053816, 0.03239972241226923, 0.03675275934433129, 0.02289243524271101, 0.08170896720708293])
grid = magnitude(grid, 12, [18], 2)
grid = sin(grid, 9, 24, -0.5197749708846731, 15.854217887574976)
grid = sin(grid, 0, 24, -0.9837158181933741, -71.02392680503814)
grid = transit(grid, 13, [26, 3, 15, 22, 9], [0.06742451457475923, 0.12281320801630513, 0.04161211333159292, 0.35567601456345893, 0.41247414951388384])
grid = transit(grid, 18, [28, 8, 19, 23, 5, 10, 22, 4, 0], [0.19499065845684085, 0.16530700785043909, 0.023804434336736176, 0.11381052492943354, 0.025890588409873856, 0.03293541341724035, 0.19677542129271589, 0.07944659605729233, 0.16703935524942787])
grid = sin(grid, 15, 3, -7.8029197241866, 87.90196568906993)
grid = transit(grid, 5, [27, 22, 1], [0.23158505830919715, 0.6081254056813405, 0.16028953600946239])
grid = sin(grid, 29, 16, 2.873804394920092, 27.43482722191908)
grid = transit(grid, 8, [22, 8, 3, 2, 15, 27, 4, 0], [0.07040696946829501, 0.09245037606429185, 0.08572118214761944, 0.07904402889523246, 0.17642651560212103, 0.02251048152604197, 0.2642463926831366, 0.2091940536132616])
grid = transit(grid, 23, [21], [1.0])
grid = transit(grid, 23, [23, 9, 24, 8, 22, 26, 21, 14, 10], [0.08134327652223117, 0.19291722683799986, 0.009130363704744729, 0.06972659674300093, 0.07539663295186692, 0.10945404198606352, 0.055844354010005634, 0.25921661695123077, 0.14697089029285645])
grid = transit(grid, 11, [16, 22, 0, 6, 23], [0.11937697094300885, 0.06582896844008528, 0.10741157627327866, 0.3116438045579353, 0.395738679785692])
grid = transit(grid, 6, [17, 7, 11, 15, 14, 2, 28, 10, 12], [0.1769994584035045, 0.06251707636911402, 0.24665496513570134, 0.06595276596590365, 0.11364312885348776, 0.13900251803789462, 0.07609885164897781, 0.014977661375232813, 0.10415357421018354])
grid = transit(grid, 18, [13, 28], [0.18237735040537398, 0.817622649594626])
grid = sin(grid, 14, 21, -3.2769715256005334, 33.341014144234634)
grid = transit(grid, 27, [22, 10, 0], [0.37903105988216274, 0.027903296061249153, 0.5930656440565881])
grid = sin(grid, 13, 19, -1.0083057747035085, 45.32663709059881)
grid = sin(grid, 21, 29, -2.033902738635562, 83.18637055692568)
grid = sin(grid, 21, 20, 3.724200166863936, 64.11265315372938)
grid = power(grid, 5, 28, 0.5040698006520863)
grid = transit(grid, 27, [15, 13, 0, 23, 4, 12], [0.16836214550922596, 0.09088633645917937, 0.12832532709494834, 0.19428442145545552, 0.11551692680955276, 0.3026248426716379])
grid = transit(grid, 26, [8, 28, 19, 12], [0.12254915908883034, 0.049158806834715456, 0.2548836014126827, 0.5734084326637715])
grid = sin(grid, 8, 22, 0.28963704045629785, 14.642974186104624)
grid = transit(grid, 4, [1, 6, 11, 9, 16, 5, 7], [0.2908557143577597, 0.26104994960954353, 0.10617777722136651, 0.08395560557363395, 0.0755743561440244, 0.02055115744063781, 0.161835439653034])
grid = transit(grid, 0, [9, 29, 2, 21, 0, 11, 26, 8, 24, 28, 12, 14, 5, 27], [0.11751323724268951, 0.08606364240186842, 0.08258664640517607, 0.14599005593016762, 0.023503498981713042, 0.09960938327534487, 0.10311378807858956, 0.049182823549559086, 0.07706454488212051, 0.03182325629530442, 0.0026883278635587552, 0.09205206091316889, 0.04997214485121891, 0.03883658932952038])
grid = magnitude(grid, 4, [3, 15, 14, 4, 24], 2)
grid = power(grid, 20, 21, 0.8369247988654179)
grid = transit(grid, 19, [0, 5], [0.09676039555415433, 0.9032396044458456])
grid = sin(grid, 29, 29, -0.9643673506468063, 8.967437246119388)
grid = sin(grid, 14, 1, 0.15139570426619076, -40.59378530824198)
grid = transit(grid, 28, [26, 27, 3, 25, 9, 14, 16, 19, 21, 22, 10, 7, 6, 11], [0.03846283630801191, 0.003611165070086529, 0.016386375670539105, 0.03325226481143111, 0.07248141408219415, 0.07574051112729943, 0.025136664847969954, 0.09140666537823693, 0.11688378406685693, 0.13391881271054026, 0.19792069866332154, 0.0069953919558150965, 0.10553327699478507, 0.08227013831291184])
grid = sin(grid, 14, 17, -1.6536754594921832, -38.27968476791121)
grid = transit(grid, 3, [24, 4, 21, 13, 15, 20, 5, 2, 18, 10, 8, 26, 25, 0, 22, 29], [0.043261455138004605, 0.09701099620993626, 0.02330623238287478, 0.002034833032158639, 0.07579485773048504, 0.11071509824560882, 0.009774323938956175, 0.04781055561128071, 0.048216338072515415, 0.18253805904361245, 0.12137806592285647, 0.06336045759588786, 0.0289834486908499, 0.002438501994089762, 0.08636165041317383, 0.057015125977709254])
grid = transit(grid, 4, [23, 29, 21, 2, 3], [0.13700876091088077, 0.2143789960591354, 0.21815790353798725, 0.38176010336713523, 0.04869423612486131])
grid = transit(grid, 5, [5, 0, 12, 10, 26, 1, 3, 14, 2, 15, 7, 29, 25, 16, 8], [0.08299901497093495, 0.0007990389558318802, 0.0398269346802915, 0.09815788008041128, 0.003191584435749173, 0.016639034177236122, 0.10558028964441604, 0.05996226828205053, 0.036106635758057615, 0.13934965910622088, 0.059254497545744876, 0.1279429548882897, 0.07666322625023604, 0.03461467798520952, 0.11891230323931992])
grid = magnitude(grid, 13, [13, 18, 2, 6, 0, 24], 2)
grid = transit(grid, 3, [7, 22], [0.7008249200828205, 0.2991750799171795])
grid = transit(grid, 8, [16, 14, 0, 8, 23, 12, 13, 28], [0.054554519558812804, 0.0634083121537979, 0.061763274064731144, 0.22655341867476142, 0.26362052667155217, 0.06293326171754145, 0.024153690767067026, 0.24301299639173612])
grid = transit(grid, 12, [2, 0, 25, 4, 11, 20], [0.2135727198690867, 0.2602478929854192, 0.036054564699847955, 0.029549321664014874, 0.19439620047101405, 0.26617930031061726])
grid = sin(grid, 12, 29, -2.0606158867391033, 49.041181245520846)
grid = transit(grid, 2, [1, 18, 25, 7, 4, 28, 22, 0, 2, 29, 11, 14, 8, 21, 16, 13, 3, 9, 20, 10, 27, 26, 23, 5, 6], [0.022961767783264937, 0.05216151564895142, 0.06700378538854872, 0.004868404084079884, 0.031119815532516526, 0.06817549828378006, 0.06589765610348139, 0.06192301312443225, 0.0010172298968791822, 0.05043368611785926, 0.012663403673358928, 0.04345946859628934, 0.036083635881321866, 0.017571690463611844, 0.026917561776311466, 0.03758362471596475, 0.023367838545381096, 0.05901741741171624, 0.0377305636240221, 0.1341011079951387, 0.014123583108795754, 0.06457810005715973, 0.02429131114943675, 0.008081204440561181, 0.0348671165971368])
grid = transit(grid, 15, [27, 1, 5, 2, 22, 3, 0, 12, 8, 25, 6, 15, 23, 16, 26], [0.09276096236215951, 0.055987479578727545, 0.03389928739157258, 0.10048460322530173, 0.18808250644766542, 0.15052093262678393, 0.010305629775380134, 0.02733510312887793, 0.01962295296713035, 0.058735304839641365, 0.10684676338951316, 0.0385674598842383, 0.06631728140180648, 0.031899953317147636, 0.01863377966405387])
grid = power(grid, 17, 11, 0.6101129604159466)
grid = magnitude(grid, 24, [1, 12, 11, 17, 10, 21, 25, 4, 28, 19, 20, 3, 7, 0, 15, 27, 22, 29, 9, 18, 14, 2, 6], 2)
grid = sin(grid, 12, 19, -1.2971484605178953, 43.6253219584718)
grid = transit(grid, 2, [8, 7, 29, 26, 24], [0.050019232441529594, 0.20552781686115407, 0.20508495691807016, 0.33655248905731455, 0.20281550472193155])
grid = transit(grid, 22, [26, 4, 9, 6, 28, 23], [0.12501784549867087, 0.17854573410869207, 0.151199454897825, 0.15354236191147705, 0.282732246330031, 0.10896235725330396])
grid = transit(grid, 22, [17, 6, 27, 25, 26], [0.17138763442513022, 0.385897807116071, 0.1332857834306504, 0.1272177111376481, 0.18221106389050015])
grid = magnitude(grid, 9, [26, 13, 11, 28, 7, 18, 6, 19, 5, 25, 21, 27, 16, 9, 1, 20, 0, 15, 23], 2)
grid = sin(grid, 22, 5, 4.201204510461814, 40.04612040712763)
grid = sin(grid, 19, 23, -2.014006414093969, 82.36876582998593)
grid = transit(grid, 27, [22, 23, 7], [0.45404412320349574, 0.5035642851999552, 0.042391591596549094])
grid = transit(grid, 19, [19, 28, 27], [0.06178259935534135, 0.30030960742588425, 0.6379077932187743])
grid = sin(grid, 2, 11, -1.71578256308278, -82.65630005314819)
grid = sin(grid, 19, 21, 3.1366867450792912, -84.93839154346348)
grid = sin(grid, 3, 28, 1.37302236191729, 98.05962588321242)
grid = sin(grid, | |
<gh_stars>1-10
import numpy as np
import matplotlib.pyplot as plt
from tqdm import trange
from htm.bindings.sdr import SDR
from htm.bindings.algorithms import TemporalMemory
from htm.bindings.algorithms import SpatialPooler
from itertools import product
from copy import deepcopy
import json
EPS = 1e-12
class Memory:
"""
The Memory object saves SDR representations of states and clusterizes them using the similarity measure.
The SDR representation must have fixed sparsity of active cells for correct working.
Parameters
----------
size : int
The size is the size of SDR representations, which are stored
threshold: float
The threshold is used to determine then it's necessary to create a new cluster.
Attributes
----------
size: int
It stores size argument.
kernels : np.array
This is the list of created clusters representations in dence form. It contains information about frequency of
cell's activity (for each cluster) during working. Its shape: (number of clusters, size).
norms: np.array
This is the list of representations amount for each cluster. Its shape: (munber of clusters, 1)
threshold: float
It stores threshold argument.
"""
def __init__(self, size, threshold=0.5):
self.kernels = None
self.norms = None
self.threshold = threshold
self.size = size
@property
def number_of_clusters(self):
if (self.kernels is not None) and (self.kernels.ndim == 2):
return self.kernels.shape[0]
else:
return 0
def add(self, state):
""" Add a new SDR representation (store and clusterize).
Parameters
----------
state: np.array
This is the SDR representation (sparse), that we want to store ande clusterize with other stored SDRs.
Returns
-------
"""
state_dense = np.zeros(self.size)
state_dense[state] = 1
sims = self.similarity(state_dense)
if np.sum(sims > self.threshold) == 0:
if self.kernels is None:
self.kernels = state_dense.reshape((1, -1))
self.norms = np.array([[1]])
else:
self.kernels = np.vstack((self.kernels, state_dense))
self.norms = np.vstack((self.norms, [1]))
else:
self.kernels[np.argmax(sims)] += state_dense
self.norms[np.argmax(sims)] += 1
def similarity(self, state):
"""This function evaluate similarity measure between stored clusters and new state.
Parameters
----------
state: np.array
The sparse representation of the state to be compared.
Returns
-------
similarities: np.array
The similarity measures for given state. If the Memory object don't have any saved clusters, then the empty
array is returned, else returned array contained similarities between the state and each cluster.
Its shape: (number of kernels, 1).
"""
if self.kernels is None:
return np.array([])
else:
normalised_kernels = self.kernels / self.norms
sims = normalised_kernels @ state.T / (
np.sqrt(np.sum(normalised_kernels ** 2, axis=1)) * np.sqrt(state @ state.T))
similarities = sims.T
return similarities
def adopted_kernels(self, sparsity):
"""This function normalises stored representations and cuts them by sparsity threshold.
Parameters
----------
sparsity: float
The sparsity of active cells in stored SDR representations.
Returns
-------
clusters_representations: np.array
Normalised and cutted representations of each cluster. The cutting is done by choosing the most frequent
active cells (their number is defined by sparsity) in kernels attribute. All elements of array are
in [0, 1]. The shape is (number of clusters, 1).
"""
data = np.copy(self.kernels)
data[data < np.quantile(data, 1 - sparsity, axis=1).reshape((-1, 1))] = 0
clusters_representations = data / self.norms
return clusters_representations
class Empowerment:
"""
The Empowerment object contains all necessary things to evaluate 'empowerment' using the model of environment. This
model creates and learns also here.
Parameters
----------
seed: int
The seed for random generator.
encode_size: int
The size of SDR representations which is taken by model.
tm_config: dict
It contains all parameters for initialisation of the TemporalMemory without the columnDimensions.
columnDimensions is defined inside Empowerment.
sparsity: float
The sparsity of SDR representations which are used in the TemporalMemory algorithm.
sp_config (optional): dict
It contains all parameters for initialisation of the SpatialPooler without the inputDimensions
and localareaDensity. They are defined inside Empowerment. By default sp_config is None that means the absence
of SpatialPooler.
memory (optional): bool
This parameter defines will be used the Memory for saving and clusterization of state representations or not.
By default is False (doesn't use the Memory).
similarity_threshold (optional): float
This parameter determines the threshold for cluster creation. It is used then memory is True. By default: 0.6.
evaluate (optional): bool
This flag defines the necessarity of storing some statistics to evaluate the learning process.
By default is True.
Attributes
----------
evaluate: bool
It stores the same parameter.
anomalies: list
It stores the anomaly values of TM for easch time step after learning. Only then evaluate is True.
IoU: list
It stores the Intersection over Union values of TM predictions and real ones for easch time step after learning.
Only then evaluate is True.
sparsity: float
It stores the same parameter.
sp: SpatialPooler
It contains the SpatialPooler object if it was defined, else None
tm: TemporalMemory
It contains the TemporalMemory object.
size: int
It stores the encode_size parameter.
memory: Memory
It contains the Memory object if memory parameter is True, else None.
"""
def __init__(self, seed, encode_size, tm_config, sparsity,
sp_config=None,
memory=False,
similarity_threshold=0.6,
evaluate=True,
filename=None):
self.filename = filename
if self.filename is None:
self.evaluate = evaluate
if evaluate:
self.anomalies = []
self.IoU = []
self.sdr_0 = SDR(encode_size)
self.sdr_1 = SDR(encode_size)
self.sparsity = sparsity
if sp_config is not None:
self.sp = SpatialPooler(inputDimensions=[encode_size],
seed=seed,
localAreaDensity=sparsity,
**sp_config,
)
self.tm = TemporalMemory(
columnDimensions=self.sp.getColumnDimensions(),
seed=seed,
**tm_config,
)
self.sdr_sp = SDR(self.sp.getColumnDimensions())
self.size = self.sp.getColumnDimensions()[0]
else:
self.sp = None
self.tm = TemporalMemory(
columnDimensions=[encode_size],
seed=seed,
**tm_config,
)
self.size = self.tm.getColumnDimensions()[0]
if memory:
self.memory = Memory(self.tm.getColumnDimensions()[0], threshold=similarity_threshold)
else:
self.memory = None
else:
with open(self.filename) as json_file:
self.empowerment_data = json.load(json_file)
def eval_from_file(self, position):
return self.empowerment_data[str(position[0])][str(position[1])]
def eval_state(self, state, horizon, use_segments=False, use_memory=False):
"""This function evaluates empowerment for given state.
Parameters
----------
state: np.array
The SDR representation (sparse) of the state.
horizon: int
The horison of evaluating for given state. The good value is 3.
use_segments (optional): bool
The flag determines using of segments instead of cells to evaluate empowerment. By default: False.
use_memory (optional): bool
The flag determines using of the Memory object. Useful only if this object was initialised.
By default: False
Returns
-------
empowerment: float
The empowerment value (always > 0).
p: np.array
The array of probabilities on that the empowerment was calculated.
start_state: np.array
The SDR representation of given state that is used in TM. (Only if sp is defined it differs from parameter
state).
"""
if self.sp is not None:
self.sdr_0.sparse = state
self.sp.compute(self.sdr_0, learn=False, output=self.sdr_sp)
sdr = self.sdr_sp
else:
self.sdr_0.sparse = state
sdr = self.sdr_0
start_state = np.copy(sdr.sparse)
data = np.zeros(self.tm.getColumnDimensions()[0])
for actions in range(horizon):
self.tm.reset()
self.tm.compute(sdr, learn=False)
self.tm.activateDendrites(learn=False)
predictiveCells = self.tm.getPredictiveCells().sparse
predictedColumnIndices = [self.tm.columnForCell(i) for i in predictiveCells]
sdr.sparse = np.unique(predictedColumnIndices)
if use_segments:
predictedColumnIndices = map(self.tm.columnForCell,
map(self.tm.connections.cellForSegment, self.tm.getActiveSegments()))
for i in predictedColumnIndices:
data[i] += 1
if self.memory is not None and use_memory:
if (self.memory.kernels is not None) and (self.memory.kernels.size > 0):
clusters = self.memory.adopted_kernels(self.sparsity)
mask = (clusters[:, data!=0].sum(axis=1) / (self.sparsity * self.size)) < self.memory.threshold
p = np.dot(clusters, data.T) / (self.sparsity * self.size)
p[mask] = 0
total_p = p.sum()
empowerment = np.sum(-p / (total_p + EPS) * np.log(p / (total_p + EPS), where=p != 0), where=p != 0)
p = p / (total_p + EPS)
return empowerment, p, start_state
else:
return 0, None, start_state
empowerment = np.sum(-data / (data.sum() + EPS) * np.log(data / (data.sum() + EPS), where=data != 0), where=data != 0)
p = data / (data.sum() + EPS)
return empowerment, p, start_state
def eval_env(self, environment, horizon, use_segments=False, use_memory=False):
"""This function evaluate empowerment for every state in gridworld environment.
Parameters
----------
environment:
The gridworld environment to be evaluated.
horizon: int
The horison of evaluating for given state. The good value is 3.
use_segments (optional): bool
The flag determines using of segments instead of cells to evaluate empowerment. By default: False.
use_memory (optional): bool
The flag determines using of the Memory object. Useful only if this object was initialised.
By default: False
Returns
-------
empowerment_map: np.array
This is the map of the environment with values of empowerment for each state.
"""
env = deepcopy(environment)
empowerment_map = np.zeros(env.env.shape)
for i in range(env.env.shape[0]):
for j in range(env.env.shape[1]):
if not env.env.entities['obstacle'].mask[i, j]:
env.env.agent.position = (i, j)
_, s, _ = env.observe()
empowerment_map[i, j] = self.eval_state(s, horizon, use_segments, use_memory)[0]
# plt.imshow(empowerment_map)
# plt.show()
return empowerment_map
def learn(self, | |
<gh_stars>100-1000
# Copyright (c) 2018-2021, NVIDIA Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import numpy as np
import os
import torch
from isaacgym import gymtorch
from isaacgym import gymapi
from isaacgym.torch_utils import *
from collections import OrderedDict
project_dir = os.path.abspath(os.path.dirname(__file__))
from elegantrl.envs.utils.torch_jit_utils import *
from elegantrl.envs.isaac_tasks.base.vec_task import VecTask
from types import SimpleNamespace
from collections import deque
from typing import Deque, Dict, Tuple, Union
# python
import enum
import numpy as np
# ################### #
# Dimensions of robot #
# ################### #
class TrifingerDimensions(enum.Enum):
"""
Dimensions of the tri-finger robot.
Note: While it may not seem necessary for tri-finger robot since it is fixed base, for floating
base systems having this dimensions class is useful.
"""
# general state
# cartesian position + quaternion orientation
PoseDim = (7,)
# linear velocity + angular velcoity
VelocityDim = 6
# state: pose + velocity
StateDim = 13
# force + torque
WrenchDim = 6
# for robot
# number of fingers
NumFingers = 3
# for three fingers
JointPositionDim = 9
JointVelocityDim = 9
JointTorqueDim = 9
# generalized coordinates
GeneralizedCoordinatesDim = JointPositionDim
GeneralizedVelocityDim = JointVelocityDim
# for objects
ObjectPoseDim = 7
ObjectVelocityDim = 6
# ################# #
# Different objects #
# ################# #
# readius of the area
ARENA_RADIUS = 0.195
class CuboidalObject:
"""
Fields for a cuboidal object.
@note Motivation for this class is that if domain randomization is performed over the
size of the cuboid, then its attributes are automatically updated as well.
"""
# 3D radius of the cuboid
radius_3d: float
# distance from wall to the center
max_com_distance_to_center: float
# minimum and mximum height for spawning the object
min_height: float
max_height = 0.1
NumKeypoints = 8
ObjectPositionDim = 3
KeypointsCoordsDim = NumKeypoints * ObjectPositionDim
def __init__(self, size: Union[float, Tuple[float, float, float]]):
"""Initialize the cuboidal object.
Args:
size: The size of the object along x, y, z in meters. If a single float is provided, then it is assumed that
object is a cube.
"""
# decide the size depedning on input type
if isinstance(size, float):
self._size = (size, size, size)
else:
self._size = size
# compute remaning attributes
self.__compute()
"""
Properties
"""
@property
def size(self) -> Tuple[float, float, float]:
"""
Returns the dimensions of the cuboid object (x, y, z) in meters.
"""
return self._size
"""
Configurations
"""
@size.setter
def size(self, size: Union[float, Tuple[float, float, float]]):
"""Set size of the object.
Args:
size: The size of the object along x, y, z in meters. If a single float is provided, then it is assumed
that object is a cube.
"""
# decide the size depedning on input type
if isinstance(size, float):
self._size = (size, size, size)
else:
self._size = size
# compute attributes
self.__compute()
"""
Private members
"""
def __compute(self):
"""Compute the attributes for the object."""
# compute 3D radius of the cuboid
max_len = max(self._size)
self.radius_3d = max_len * np.sqrt(3) / 2
# compute distance from wall to the center
self.max_com_distance_to_center = ARENA_RADIUS - self.radius_3d
# minimum height for spawning the object
self.min_height = self._size[2] / 2
class Trifinger(VecTask):
# constants
# directory where assets for the simulator are present
_trifinger_assets_dir = os.path.join(
project_dir, "../", "isaac_assets", "trifinger"
)
# robot urdf (path relative to `_trifinger_assets_dir`)
_robot_urdf_file = "robot_properties_fingers/urdf/pro/trifingerpro.urdf"
# stage urdf (path relative to `_trifinger_assets_dir`)
# _stage_urdf_file = "robot_properties_fingers/urdf/trifinger_stage.urdf"
_table_urdf_file = "robot_properties_fingers/urdf/table_without_border.urdf"
_boundary_urdf_file = "robot_properties_fingers/urdf/high_table_boundary.urdf"
# object urdf (path relative to `_trifinger_assets_dir`)
# TODO: Make object URDF configurable.
_object_urdf_file = "objects/urdf/cube_multicolor_rrc.urdf"
# physical dimensions of the object
# TODO: Make object dimensions configurable.
_object_dims = CuboidalObject(0.065)
# dimensions of the system
_dims = TrifingerDimensions
# Constants for limits
# Ref: https://github.com/rr-learning/rrc_simulation/blob/master/python/rrc_simulation/trifinger_platform.py#L68
# maximum joint torque (in N-m) applicable on each actuator
_max_torque_Nm = 0.36
# maximum joint velocity (in rad/s) on each actuator
_max_velocity_radps = 10
# History of state: Number of timesteps to save history for
# Note: Currently used only to manage history of object and frame states.
# This can be extended to other observations (as done in ANYmal).
_state_history_len = 2
# buffers to store the simulation data
# goal poses for the object [num. of instances, 7] where 7: (x, y, z, quat)
_object_goal_poses_buf: torch.Tensor
# DOF state of the system [num. of instances, num. of dof, 2] where last index: pos, vel
_dof_state: torch.Tensor
# Rigid body state of the system [num. of instances, num. of bodies, 13] where 13: (x, y, z, quat, v, omega)
_rigid_body_state: torch.Tensor
# Root prim states [num. of actors, 13] where 13: (x, y, z, quat, v, omega)
_actors_root_state: torch.Tensor
# Force-torque sensor array [num. of instances, num. of bodies * wrench]
_ft_sensors_values: torch.Tensor
# DOF position of the system [num. of instances, num. of dof]
_dof_position: torch.Tensor
# DOF velocity of the system [num. of instances, num. of dof]
_dof_velocity: torch.Tensor
# DOF torque of the system [num. of instances, num. of dof]
_dof_torque: torch.Tensor
# Fingertip links state list([num. of instances, num. of fingers, 13]) where 13: (x, y, z, quat, v, omega)
# The length of list is the history of the state: 0: t, 1: t-1, 2: t-2, ... step.
_fingertips_frames_state_history: Deque[torch.Tensor] = deque(
maxlen=_state_history_len
)
# Object prim state [num. of instances, 13] where 13: (x, y, z, quat, v, omega)
# The length of list is the history of the state: 0: t, 1: t-1, 2: t-2, ... step.
_object_state_history: Deque[torch.Tensor] = deque(maxlen=_state_history_len)
# stores the last action output
_last_action: torch.Tensor
# keeps track of the number of goal resets
_successes: torch.Tensor
# keeps track of number of consecutive successes
_consecutive_successes: float
_robot_limits: dict = {
"joint_position": SimpleNamespace(
# matches those on the real robot
low=np.array([-0.33, 0.0, -2.7] * _dims.NumFingers.value, dtype=np.float32),
high=np.array([1.0, 1.57, 0.0] * _dims.NumFingers.value, dtype=np.float32),
default=np.array(
[0.0, 0.9, -2.0] * _dims.NumFingers.value, dtype=np.float32
),
),
"joint_velocity": SimpleNamespace(
low=np.full(
_dims.JointVelocityDim.value, -_max_velocity_radps, dtype=np.float32
),
high=np.full(
_dims.JointVelocityDim.value, _max_velocity_radps, dtype=np.float32
),
default=np.zeros(_dims.JointVelocityDim.value, dtype=np.float32),
),
"joint_torque": SimpleNamespace(
low=np.full(_dims.JointTorqueDim.value, -_max_torque_Nm, dtype=np.float32),
high=np.full(_dims.JointTorqueDim.value, _max_torque_Nm, dtype=np.float32),
default=np.zeros(_dims.JointTorqueDim.value, dtype=np.float32),
),
"fingertip_position": SimpleNamespace(
low=np.array([-0.4, -0.4, 0], dtype=np.float32),
high=np.array([0.4, 0.4, 0.5], dtype=np.float32),
),
"fingertip_orientation": SimpleNamespace(
low=-np.ones(4, dtype=np.float32),
high=np.ones(4, dtype=np.float32),
),
"fingertip_velocity": SimpleNamespace(
low=np.full(_dims.VelocityDim.value, -0.2, dtype=np.float32),
high=np.full(_dims.VelocityDim.value, 0.2, dtype=np.float32),
),
"fingertip_wrench": SimpleNamespace(
low=np.full(_dims.WrenchDim.value, -1.0, dtype=np.float32),
high=np.full(_dims.WrenchDim.value, 1.0, dtype=np.float32),
),
# used if we want to have joint stiffness/damping as parameters`
"joint_stiffness": SimpleNamespace(
low=np.array([1.0, 1.0, 1.0] * _dims.NumFingers.value, dtype=np.float32),
high=np.array(
[50.0, 50.0, 50.0] * _dims.NumFingers.value, dtype=np.float32
),
),
"joint_damping": SimpleNamespace(
low=np.array(
[0.01, 0.03, 0.0001] * _dims.NumFingers.value, dtype=np.float32
),
high=np.array([1.0, 3.0, 0.01] * _dims.NumFingers.value, dtype=np.float32),
),
}
# limits of the object (mapped later: str -> torch.tensor)
_object_limits: dict = {
"position": SimpleNamespace(
low=np.array([-0.3, -0.3, 0], dtype=np.float32),
high=np.array([0.3, 0.3, 0.3], dtype=np.float32),
default=np.array([0, 0, _object_dims.min_height], dtype=np.float32),
),
# difference between two positions
"position_delta": SimpleNamespace(
low=np.array([-0.6, -0.6, | |
# Copyright 2020 AstroLab Software
# Author: <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pandas as pd
import numpy as np
from gatspy import periodic
import java
import copy
from astropy.time import Time
import dash
from dash.dependencies import Input, Output
import plotly.graph_objects as go
from apps.utils import convert_jd, readstamp, _data_stretch, convolve
from apps.utils import apparent_flux, dc_mag
from pyLIMA import event
from pyLIMA import telescopes
from pyLIMA import microlmodels, microltoolbox
from pyLIMA.microloutputs import create_the_fake_telescopes
from app import client, app
colors_ = [
'#1f77b4', # muted blue
'#ff7f0e', # safety orange
'#2ca02c', # cooked asparagus green
'#d62728', # brick red
'#9467bd', # muted purple
'#8c564b', # chestnut brown
'#e377c2', # raspberry yogurt pink
'#7f7f7f', # middle gray
'#bcbd22', # curry yellow-green
'#17becf' # blue-teal
]
all_radio_options = {
"Difference magnitude": ["Difference magnitude", "DC magnitude", "DC apparent flux"],
"DC magnitude": ["Difference magnitude", "DC magnitude", "DC apparent flux"],
"DC apparent flux": ["Difference magnitude", "DC magnitude", "DC apparent flux"]
}
layout_lightcurve = dict(
automargin=True,
margin=dict(l=50, r=30, b=0, t=0),
hovermode="closest",
legend=dict(
font=dict(size=10),
orientation="h",
xanchor="right",
x=1,
bgcolor='rgba(0,0,0,0)'
),
xaxis={
'title': 'Observation date',
'automargin': True
},
yaxis={
'autorange': 'reversed',
'title': 'Magnitude',
'automargin': True
}
)
layout_phase = dict(
autosize=True,
automargin=True,
margin=dict(l=50, r=30, b=40, t=25),
hovermode="closest",
legend=dict(
font=dict(size=10),
orientation="h",
yanchor="bottom",
y=0.02,
xanchor="right",
x=1,
bgcolor='rgba(0,0,0,0)'
),
xaxis={
'title': 'Phase'
},
yaxis={
'autorange': 'reversed',
'title': 'Apparent DC Magnitude'
},
title={
"text": "Phased data",
"y": 1.01,
"yanchor": "bottom"
}
)
layout_mulens = dict(
autosize=True,
automargin=True,
margin=dict(l=50, r=30, b=40, t=25),
hovermode="closest",
legend=dict(
font=dict(size=10),
orientation="h",
yanchor="bottom",
y=1.02,
xanchor="right",
x=1,
bgcolor='rgba(0,0,0,0)'
),
xaxis={
'title': 'Observation date'
},
yaxis={
'autorange': 'reversed',
'title': 'DC magnitude'
},
title={
"text": "pyLIMA Fit (PSPL model)",
"y": 1.01,
"yanchor": "bottom"
}
)
layout_scores = dict(
autosize=True,
automargin=True,
margin=dict(l=50, r=30, b=0, t=0),
hovermode="closest",
legend=dict(font=dict(size=10), orientation="h"),
xaxis={
'title': 'Observation date'
},
yaxis={
'title': 'Score',
'range': [0, 1]
}
)
def extract_scores(data: java.util.TreeMap) -> pd.DataFrame:
""" Extract SN scores from the data
"""
values = ['i:jd', 'd:snn_snia_vs_nonia', 'd:snn_sn_vs_all', 'd:rfscore']
pdfs = pd.DataFrame.from_dict(data, orient='index')
if pdfs.empty:
return pdfs
return pdfs[values]
@app.callback(
Output('switch-mag-flux-score', 'options'),
[Input('switch-mag-flux', 'value')])
def set_radio2_options(selected_radio):
return [{'label': i, 'value': i} for i in all_radio_options[selected_radio]]
@app.callback(
Output('switch-mag-flux-score', 'value'),
[Input('switch-mag-flux-score', 'options'), Input('switch-mag-flux', 'value')])
def set_radio1_value(available_options, value):
index = [available_options.index(i) for i in available_options if i['label'] == value][0]
return available_options[index]['value']
@app.callback(
[
Output('lightcurve_cutouts', 'figure'),
Output('lightcurve_scores', 'figure')
],
[
Input('switch-mag-flux', 'value'),
Input('switch-mag-flux-score', 'value'),
Input('url', 'pathname'),
Input('object-data', 'children'),
Input('object-upper', 'children')
])
def draw_lightcurve(switch1: int, switch2: int, pathname: str, object_data, object_upper) -> dict:
""" Draw object lightcurve with errorbars
Parameters
----------
switch{i}: int
Choose:
- 0 to display difference magnitude
- 1 to display dc magnitude
- 2 to display flux
pathname: str
Pathname of the current webpage (should be /ZTF19...).
Returns
----------
figure: dict
"""
changed_id = [p['prop_id'] for p in dash.callback_context.triggered][0]
if 'switch-mag-flux-score' in changed_id:
switch = switch2
else:
switch = switch1
pdf_ = pd.read_json(object_data)
cols = [
'i:jd', 'i:magpsf', 'i:sigmapsf', 'i:fid',
'i:magnr', 'i:sigmagnr', 'i:magzpsci', 'i:isdiffpos'
]
pdf = pdf_.loc[:, cols]
# type conversion
jd = pdf['i:jd']
jd = jd.apply(lambda x: convert_jd(float(x), to='iso'))
# shortcuts
mag = pdf['i:magpsf']
err = pdf['i:sigmapsf']
if switch == "Difference magnitude":
layout_lightcurve['yaxis']['title'] = 'Difference magnitude'
layout_lightcurve['yaxis']['autorange'] = 'reversed'
elif switch == "DC magnitude":
# inplace replacement
mag, err = np.transpose(
[
dc_mag(*args) for args in zip(
pdf['i:fid'].values,
mag.astype(float).values,
err.astype(float).values,
pdf['i:magnr'].astype(float).values,
pdf['i:sigmagnr'].astype(float).values,
pdf['i:magzpsci'].astype(float).values,
pdf['i:isdiffpos'].values
)
]
)
layout_lightcurve['yaxis']['title'] = 'Apparent DC magnitude'
layout_lightcurve['yaxis']['autorange'] = 'reversed'
elif switch == "DC apparent flux":
# inplace replacement
mag, err = np.transpose(
[
apparent_flux(*args) for args in zip(
pdf['i:fid'].astype(int).values,
mag.astype(float).values,
err.astype(float).values,
pdf['i:magnr'].astype(float).values,
pdf['i:sigmagnr'].astype(float).values,
pdf['i:magzpsci'].astype(float).values,
pdf['i:isdiffpos'].values
)
]
)
layout_lightcurve['yaxis']['title'] = 'Apparent DC flux'
layout_lightcurve['yaxis']['autorange'] = True
figure = {
'data': [
{
'x': jd[pdf['i:fid'] == 1],
'y': mag[pdf['i:fid'] == 1],
'error_y': {
'type': 'data',
'array': err[pdf['i:fid'] == 1],
'visible': True,
'color': '#1f77b4'
},
'mode': 'markers',
'name': 'g band',
'text': jd[pdf['i:fid'] == 1],
'marker': {
'size': 12,
'color': '#1f77b4',
'symbol': 'o'}
},
{
'x': jd[pdf['i:fid'] == 2],
'y': mag[pdf['i:fid'] == 2],
'error_y': {
'type': 'data',
'array': err[pdf['i:fid'] == 2],
'visible': True,
'color': '#ff7f0e'
},
'mode': 'markers',
'name': 'r band',
'text': jd[pdf['i:fid'] == 2],
'marker': {
'size': 12,
'color': '#ff7f0e',
'symbol': 'o'}
}
],
"layout": layout_lightcurve
}
if switch == "Difference magnitude":
pdf_upper = pd.read_json(object_upper)
if not pdf_upper.empty:
pdf_upper['i:jd'] = pdf_upper['i:jd'].apply(lambda x: convert_jd(float(x), to='iso'))
figure['data'].append(
{
'x': pdf_upper['i:jd'][pdf_upper['i:fid'] == 1],
'y': pdf_upper['i:diffmaglim'][pdf_upper['i:fid'] == 1],
'mode': 'markers',
'marker': {
'color': '#1f77b4',
'symbol': 'triangle-down-open'
},
'showlegend': False
}
)
figure['data'].append(
{
'x': pdf_upper['i:jd'][pdf_upper['i:fid'] == 2],
'y': pdf_upper['i:diffmaglim'][pdf_upper['i:fid'] == 2],
'mode': 'markers',
'marker': {
'color': '#ff7f0e',
'symbol': 'triangle-down-open'
},
'showlegend': False
}
)
return figure, figure
def draw_scores(data: java.util.TreeMap) -> dict:
""" Draw scores from SNN module
Parameters
----------
data: java.util.TreeMap
Results from a HBase client query
Returns
----------
figure: dict
TODO: memoise me
"""
pdf = extract_scores(data)
jd = pdf['i:jd']
jd = jd.apply(lambda x: convert_jd(float(x), to='iso'))
figure = {
'data': [
{
'x': jd,
'y': [0.5] * len(jd),
'mode': 'lines',
'showlegend': False,
'line': {
'color': 'black',
'width': 2.5,
'dash': 'dash'
}
},
{
'x': jd,
'y': pdf['d:snn_snia_vs_nonia'],
'mode': 'markers',
'name': 'SN Ia score',
'text': jd,
'marker': {
'size': 10,
'color': '#2ca02c',
'symbol': 'circle'}
},
{
'x': jd,
'y': pdf['d:snn_sn_vs_all'],
'mode': 'markers',
'name': 'SNe score',
'text': jd,
'marker': {
'size': 10,
'color': '#d62728',
'symbol': 'square'}
},
{
'x': jd,
'y': pdf['d:rfscore'],
'mode': 'markers',
'name': '<NAME>',
'text': jd,
'marker': {
'size': 10,
'color': '#9467bd',
'symbol': 'diamond'}
}
],
"layout": layout_scores
}
return figure
def extract_cutout(object_data, time0, kind):
""" Extract cutout data from the alert
Parameters
----------
object_data: json
Jsonified pandas DataFrame
time0: str
ISO time of the cutout to extract
kind: str
science, template, or difference
Returns
----------
data: np.array
2D array containing cutout data
"""
values = [
'i:jd',
'i:fid',
'b:cutout{}_stampData'.format(kind.capitalize()),
]
pdf_ = pd.read_json(object_data)
pdfs = pdf_.loc[:, values]
pdfs = pdfs.sort_values('i:jd', ascending=False)
if time0 is None:
position = 0
else:
# Round to avoid numerical precision issues
jds = pdfs['i:jd'].apply(lambda x: np.round(x, 3)).values
jd0 = np.round(Time(time0, format='iso').jd, 3)
position = np.where(jds == jd0)[0][0]
# Grab the cutout data
cutout = readstamp(
client.repository().get(
pdfs['b:cutout{}_stampData'.format(kind.capitalize())].values[position]
)
)
return cutout
@app.callback(
Output("science-stamps", "figure"),
[
Input('lightcurve_cutouts', 'clickData'),
Input('object-data', 'children'),
])
def draw_cutouts_science(clickData, object_data):
""" Draw science cutout data based on lightcurve data
"""
if clickData is not None:
# Draw the cutout associated to the clicked data points
jd0 = clickData['points'][0]['x']
else:
# draw the cutout of the last alert
jd0 = None
data = extract_cutout(object_data, jd0, kind='science')
return draw_cutout(data, 'science')
@app.callback(
Output("template-stamps", "figure"),
[
Input('lightcurve_cutouts', 'clickData'),
Input('object-data', 'children'),
])
def draw_cutouts_template(clickData, object_data):
""" Draw template cutout data based on lightcurve data
"""
if clickData is not None:
jd0 = clickData['points'][0]['x']
else:
jd0 = None
data = extract_cutout(object_data, jd0, kind='template')
return draw_cutout(data, 'template')
@app.callback(
Output("difference-stamps", "figure"),
[
Input('lightcurve_cutouts', 'clickData'),
Input('object-data', 'children'),
])
def draw_cutouts_difference(clickData, object_data):
""" Draw difference cutout data based on lightcurve data
"""
if clickData is not None:
jd0 = clickData['points'][0]['x']
else:
jd0 = None
data = extract_cutout(object_data, jd0, kind='difference')
return draw_cutout(data, 'difference')
def draw_cutout(data, title):
""" Draw a cutout data
"""
# Update graph data for stamps
size = len(data)
data = np.nan_to_num(data)
vmax = data[int(size / 2), int(size / 2)]
vmin = np.min(data) + 0.2 * np.median(np.abs(data - np.median(data)))
data = _data_stretch(data, vmin=vmin, vmax=vmax, stretch='asinh')
data = data[::-1]
data = convolve(data, smooth=1, kernel='gauss')
fig = go.Figure(
data=go.Heatmap(
z=data, showscale=False, colorscale='Greys_r'
)
)
# Greys_r
axis_template = dict(
autorange=True,
showgrid=False, zeroline=False,
linecolor='black', showticklabels=False,
ticks='')
fig.update_layout(
title=title,
margin=dict(t=0, r=0, b=0, l=0),
xaxis=axis_template,
yaxis=axis_template,
showlegend=True,
width=150, height=150,
autosize=False)
return fig
@app.callback(
Output('variable_plot', 'figure'),
[
Input('nterms_base', 'value'),
Input('nterms_band', 'value'),
Input('manual_period', 'value'),
Input('submit_variable', 'n_clicks'),
Input('object-data', 'children')
])
def plot_variable_star(nterms_base, nterms_band, manual_period, n_clicks, object_data):
""" Fit for the period of a star using gatspy
See https://zenodo.org/record/47887
See https://ui.adsabs.harvard.edu/abs/2015ApJ...812...18V/abstract
TODO: clean me
"""
if type(nterms_base) not in [int]:
return {'data': [], "layout": | |
__author__ = 'langoureaux-s'
import init
import os
import shutil
import unittest
class InitTestCase(unittest.TestCase):
"""Tests for `init.py`."""
#@classmethod
def setUp(self):
print("Settup unit test \n")
shutil.copytree("test/fixtures/", "test/tmp/conf/");
os.makedirs("test/tmp/bin/linux-x86-64")
shutil.copy2("test/fixtures/wrapper.conf", "test/tmp/bin/linux-x86-64/")
shutil.copy2("test/fixtures/activemq", "test/tmp/bin/linux-x86-64/")
init.ACTIVEMQ_HOME = "test/tmp";
init.ACTIVEMQ_CONF = init.ACTIVEMQ_HOME + '/conf'
#@classmethod
def tearDown(self):
print("TearDown unit test \n")
shutil.rmtree("test/tmp")
def test_do_setting_activemq_users(self):
"""Check the function do_setting_activemq_users"""
init.do_setting_activemq_users("user", "password")
file = open(init.ACTIVEMQ_HOME +'/conf/users.properties', 'r')
self.assertRegexpMatches(file.read(), "\s+user=password\s+", "Problem when add user on users.properties")
file.close()
def test_do_setting_activemq_credential(self):
""" Check the function do_setting_activemq_credential """
init.do_setting_activemq_credential("user", "password")
file = open(init.ACTIVEMQ_HOME +'/conf/credentials.properties', 'r')
contend = file.read()
file.close()
self.assertRegexpMatches(contend, "activemq\.username=user", "Problem when add user on credentials.properties")
self.assertRegexpMatches(contend, "activemq\.password=password", "Problem when add user on credentials.properties")
def test_do_setting_activemq_groups(self):
"""Check the function do_setting_activemq_groups"""
init.do_setting_activemq_groups("groups", "user1,user2")
file = open(init.ACTIVEMQ_HOME +'/conf/groups.properties', 'r')
self.assertRegexpMatches(file.read(), "\s+groups=user1,user2\s+", "Problem when add user to group on groups.properties");
file.close()
def test_do_setting_activemq_jmx_access(self):
"""Check the function do_setting_activemq_jmx_access"""
init.do_setting_activemq_jmx_access("read", "user", "password")
file = open(init.ACTIVEMQ_HOME +'/conf/jmx.password', 'r')
self.assertRegexpMatches(file.read(), "\s+user password\s+", "Problem when add jmx user");
file.close()
file = open(init.ACTIVEMQ_HOME +'/conf/jmx.access', 'r')
self.assertRegexpMatches(file.read(), "\s+user read\s+", "Problem when add jmx user to role");
file.close()
def test_do_setting_activemq_web_access(self):
"""Check the function do_setting_activemq_web_access"""
init.do_setting_activemq_web_access("role", "user", "password")
file = open(init.ACTIVEMQ_HOME +'/conf/jetty-realm.properties', 'r')
self.assertRegexpMatches(file.read(), "\s+user: password, role\s+", "Problem when add user to web console");
file.close()
def test_do_setting_activemq_wrapper(self):
"""Check the function do_setting_activemq_wrapper"""
init.do_setting_activemq_wrapper(256, 512)
file = open(init.ACTIVEMQ_HOME +'/bin/linux-x86-64/wrapper.conf', 'r')
contend = file.read()
file.close()
self.assertRegexpMatches(contend, "\s+wrapper.java.initmemory=256\s+", "Problem when add min memory to wrapper");
self.assertRegexpMatches(contend, "\s+wrapper.java.maxmemory=512\s+", "Problem when add max memory to wrapper");
def test_do_setting_activemq_log4j(self):
"""Check the function do_setting_activemq_log4j"""
init.do_setting_activemq_log4j("FATAL")
file = open(init.ACTIVEMQ_HOME +'/conf/log4j.properties', 'r')
contend = file.read()
file.close()
self.assertRegexpMatches(contend, "\s+log4j.rootLogger=FATAL, console, logfile\s+", "Problem when set the log level on root logger");
self.assertRegexpMatches(contend, "\s+log4j.logger.org.apache.activemq.audit=FATAL, audit\s+", "Problem when set the log level on audit logger");
def test_do_setting_activemq_main(self):
"""Check the function do_setting_activemq_main"""
init.do_setting_activemq_main("myServer", 500, "5 gb", "1 gb", 30, 1000, "topic1;topic2;topic3", "queue1;queue2;queue3", "true")
file = open(init.ACTIVEMQ_HOME +'/conf/activemq.xml', 'r')
contend = file.read()
file.close()
self.assertRegexpMatches(contend, "\s+brokerName=\"myServer\"\s+", "Problem when set the server name")
self.assertRegexpMatches(contend, "<constantPendingMessageLimitStrategy limit=\"500\"/>", "Problem when set the message limit")
self.assertRegexpMatches(contend, "<storeUsage limit=\"5 gb\"/>", "Problem when set the storage usage")
self.assertRegexpMatches(contend, "<tempUsage limit=\"1 gb\"/>", "Problem when set the temp usage")
self.assertRegexpMatches(contend, "<transportConnector .*\?maximumConnections=30.*/>", "Problem when set the max connection on broker")
self.assertRegexpMatches(contend, "<transportConnector .*wireFormat.maxFrameSize=1000.*/>", "Problem when set the max frame size")
self.assertRegexpMatches(contend, "<broker schedulerSupport=\"true\"", "Problem when enabled scheduler")
self.assertRegexpMatches(contend, "<destinations>\s*<topic physicalName=\"topic1\"\s*/>\s*<topic physicalName=\"topic2\"\s*/>\s*<topic physicalName=\"topic3\"\s*/>\s*<queue physicalName=\"queue1\"\s*/>\s*<queue physicalName=\"queue2\"\s*/>\s*<queue physicalName=\"queue3\"\s*/>\s*</destinations>", "Problem with static topic and queue")
rightManagement = """<plugins>
<!-- use JAAS to authenticate using the login.config file on the classpath to configure JAAS -->
<jaasAuthenticationPlugin configuration="activemq" />
<authorizationPlugin>
<map>
<authorizationMap>
<authorizationEntries>
<authorizationEntry queue=">" read="admins,reads,writes,owners" write="admins,writes,owners" admin="admins,owners" />
<authorizationEntry topic=">" read="admins,reads,writes,owners" write="admins,writes,owners" admin="admins,owners" />
<authorizationEntry topic="ActiveMQ.Advisory.>" read="admins,reads,writes,owners" write="admins,reads,writes,owners" admin="admins,reads,writes,owners"/>
</authorizationEntries>
<!-- let's assign roles to temporary destinations. comment this entry if we don't want any roles assigned to temp destinations -->
<tempDestinationAuthorizationEntry>
<tempDestinationAuthorizationEntry read="tempDestinationAdmins" write="tempDestinationAdmins" admin="tempDestinationAdmins"/>
</tempDestinationAuthorizationEntry>
</authorizationMap>
</map>
</authorizationPlugin>
</plugins>\n"""
self.assertRegexpMatches(contend, rightManagement, "Problem with inject right management")
def test_do_remove_default_account(self):
"""
Check the function do_remove_default_account
"""
init.do_remove_default_account()
# We check the default value on users.properties
file = open(init.ACTIVEMQ_HOME +'/conf/users.properties', 'r')
contend = file.read()
file.close()
self.assertNotRegexpMatches(contend, "admin=admin", "Problem when remove default value on users.properties")
# We check the default value on groups.properties
file = open(init.ACTIVEMQ_HOME +'/conf/groups.properties', 'r')
contend = file.read()
file.close()
self.assertNotRegexpMatches(contend, "admins=admin", "Problem when remove the default value on groups.properties")
# We check the default value on jetty-realm.properties
file = open(init.ACTIVEMQ_HOME +'/conf/jetty-realm.properties', 'r')
contend = file.read()
file.close()
self.assertNotRegexpMatches(contend, "admin: admin, admin", "Problem when remove the default value on jetty-realm.properties")
self.assertNotRegexpMatches(contend, "user: user, user", "Problem when remove the default value on jetty-realm.properties")
# We check the default value on jmx.access and jmx.password
file = open(init.ACTIVEMQ_HOME +'/conf/jmx.access', 'r')
contend = file.read()
file.close()
self.assertNotRegexpMatches(contend, "admin readwrite", "Problem when remove the default value on jmx.access")
file = open(init.ACTIVEMQ_HOME +'/conf/jmx.password', 'r')
contend = file.read()
file.close()
self.assertNotRegexpMatches(contend, "admin activemq", "Problem when remove the default value on jmx.password")
file = open(init.ACTIVEMQ_HOME +'/conf/credentials.properties', 'r')
contend = file.read()
file.close()
self.assertNotRegexpMatches(contend, "activemq\.username=system", "Problem when remove default user on credentials.properties")
self.assertNotRegexpMatches(contend, "activemq\.password=manager", "Problem when remove default user on credentials.properties")
self.assertNotRegexpMatches(contend, "guest\.password=password", "Problem when remove default user on credentials.properties")
def test_do_init_activemq(self):
"""
Test the function do_init_activemq
:return:
"""
init.do_init_activemq()
# We check the value on init script
file = open(init.ACTIVEMQ_HOME +'/bin/linux-x86-64/activemq', 'r')
contend = file.read()
file.close()
self.assertRegexpMatches(contend, "RUN_AS_USER=activemq", "Problem when init the init script")
# We check value on wrapper
file = open(init.ACTIVEMQ_HOME +'/bin/linux-x86-64/wrapper.conf', 'r')
contend = file.read()
file.close()
self.assertRegexpMatches(contend, "set.default.ACTIVEMQ_DATA=/data/activemq", "Problem when init the wrapper.conf")
self.assertRegexpMatches(contend, "wrapper.logfile=/var/log/activemq/wrapper.log", "Problem when init the wrapper.conf")
self.assertRegexpMatches(contend, "set.default.ACTIVEMQ_CONF=%ACTIVEMQ_BASE%/conf.tmp", "Problem when init the wrapper.conf")
# We check the value on log4j
file = open(init.ACTIVEMQ_HOME +'/conf/log4j.properties', 'r')
contend = file.read()
file.close()
self.assertRegexpMatches(contend, "/var/log/activemq/", "Problem when init the log4j")
def test_setting_all(self):
"""
Check the function setting_all
"""
# Check all default value are good
init.setting_all()
# We check the default value on users.properties
file = open(init.ACTIVEMQ_HOME +'/conf/users.properties', 'r')
contend = file.read()
file.close()
self.assertRegexpMatches(contend, "admin=admin", "Problem with default value on users.properties")
# We check the default value on groups.properties
file = open(init.ACTIVEMQ_HOME +'/conf/groups.properties', 'r')
contend = file.read()
file.close()
self.assertRegexpMatches(contend, "admins=admin", "Problem with default value on groups.properties")
# We check the default value on jetty-realm.properties
file = open(init.ACTIVEMQ_HOME +'/conf/jetty-realm.properties', 'r')
contend = file.read()
file.close()
self.assertRegexpMatches(contend, "admin: admin, admin", "Problem with default value on jetty-realm.properties")
self.assertRegexpMatches(contend, "user: user, user", "Problem with default value on jetty-realm.properties")
# We check the default value on jmx.access and jmx.password
file = open(init.ACTIVEMQ_HOME +'/conf/jmx.access', 'r')
contend = file.read()
file.close()
self.assertRegexpMatches(contend, "admin readwrite", "Problem with default value on jmx.access")
file = open(init.ACTIVEMQ_HOME +'/conf/jmx.password', 'r')
contend = file.read()
file.close()
self.assertRegexpMatches(contend, "admin activemq", "Problem with default value on jmx.password")
# We check the default value on log4.properties
file = open(init.ACTIVEMQ_HOME +'/conf/log4j.properties', 'r')
contend = file.read()
file.close()
self.assertRegexpMatches(contend, "log4j\.rootLogger=INFO, console, logfile", "Problem with default value on log4j.properties")
self.assertRegexpMatches(contend, "log4j\.logger\.org\.apache\.activemq\.audit=INFO, audit", "Problem with default value on log4j.properties")
# We check the default value on wrapper.conf
file = open(init.ACTIVEMQ_HOME +'/bin/linux-x86-64/wrapper.conf', 'r')
contend = file.read()
file.close()
self.assertRegexpMatches(contend, "wrapper.java.initmemory=128", "Problem with default value on wrapper.conf");
self.assertRegexpMatches(contend, "wrapper.java.maxmemory=1024", "Problem with default value on wrapper.conf");
# We check the default value on activemq.xml
file = open(init.ACTIVEMQ_HOME +'/conf/activemq.xml', 'r')
contend = file.read()
file.close()
self.assertRegexpMatches(contend, "\s+brokerName=\"localhost\"\s+", "Problem with the default value on activemq.xml")
self.assertRegexpMatches(contend, "<constantPendingMessageLimitStrategy limit=\"1000\"/>", "Problem with the default value on activemq.xml")
self.assertRegexpMatches(contend, "<storeUsage limit=\"100 gb\"/>", "Problem with the default value on activemq.xml")
self.assertRegexpMatches(contend, "<tempUsage limit=\"50 gb\"/>", "Problem with the default value on activemq.xml")
self.assertRegexpMatches(contend, "<transportConnector .*\?maximumConnections=1000.*/>", "Problem with the default value on activemq.xml")
self.assertRegexpMatches(contend, "<transportConnector .*wireFormat.maxFrameSize=104857600.*/>", "Problem with the default value on activemq.xml")
self.assertNotRegexpMatches(contend, "<broker schedulerSupport=\"true\"", "Problem with the default value on activemq.xml")
self.assertNotRegexpMatches(contend, "<destinations>.*</destinations>", "Problem with the default value on activemq.xml")
rightManagement = """<plugins>
<!-- use JAAS to authenticate using the login.config file on the classpath to configure JAAS -->
<jaasAuthenticationPlugin configuration="activemq" />
<authorizationPlugin>
<map>
<authorizationMap>
<authorizationEntries>
<authorizationEntry queue=">" read="admins,reads,writes,owners" write="admins,writes,owners" admin="admins,owners" />
<authorizationEntry topic=">" read="admins,reads,writes,owners" write="admins,writes,owners" admin="admins,owners" />
<authorizationEntry topic="ActiveMQ.Advisory.>" read="admins,reads,writes,owners" write="admins,reads,writes,owners" admin="admins,reads,writes,owners"/>
</authorizationEntries>
<!-- let's assign roles to temporary destinations. comment this entry if we don't want any roles assigned to temp destinations -->
<tempDestinationAuthorizationEntry>
<tempDestinationAuthorizationEntry read="tempDestinationAdmins" write="tempDestinationAdmins" admin="tempDestinationAdmins"/>
</tempDestinationAuthorizationEntry>
</authorizationMap>
</map>
</authorizationPlugin>
</plugins>\n"""
self.assertRegexpMatches(contend, rightManagement, "Problem with the default value on activemq.xml")
# We check the value on init script
file = open(init.ACTIVEMQ_HOME +'/bin/linux-x86-64/activemq', 'r')
contend = file.read()
file.close()
self.assertRegexpMatches(contend, "RUN_AS_USER=activemq", "Problem when init the init script")
# We check value on wrapper
file = open(init.ACTIVEMQ_HOME +'/bin/linux-x86-64/wrapper.conf', 'r')
contend = file.read()
file.close()
self.assertRegexpMatches(contend, "set.default.ACTIVEMQ_DATA=/data/activemq", "Problem when init the wrapper.conf")
self.assertRegexpMatches(contend, "wrapper.logfile=/var/log/activemq/wrapper.log", "Problem when init the wrapper.conf")
# We check the value on log4j
file = open(init.ACTIVEMQ_HOME +'/conf/log4j.properties', 'r')
contend = file.read()
file.close()
self.assertRegexpMatches(contend, "/var/log/activemq/", "Problem when init the log4j")
########################################################################################
# We now check all parameters
os.environ["ACTIVEMQ_NAME"] = "myTest"
os.environ["ACTIVEMQ_LOGLEVEL"] = "DEBUG"
os.environ["ACTIVEMQ_PENDING_MESSAGE_LIMIT"] = "2000"
os.environ["ACTIVEMQ_STORAGE_USAGE"] = "10 gb"
os.environ["ACTIVEMQ_TEMP_USAGE"] = "5 gb"
os.environ["ACTIVEMQ_MAX_CONNECTION"] = "10"
os.environ["ACTIVEMQ_FRAME_SIZE"] = "2000000"
os.environ["ACTIVEMQ_MIN_MEMORY"] = "256"
os.environ["ACTIVEMQ_MAX_MEMORY"] = "512"
os.environ["ACTIVEMQ_ADMIN_LOGIN"] = "admin"
os.environ["ACTIVEMQ_ADMIN_PASSWORD"] = "<PASSWORD>"
os.environ["ACTIVEMQ_USER_LOGIN"] = "disaster"
os.environ["ACTIVEMQ_USER_PASSWORD"] = "<PASSWORD>"
os.environ["ACTIVEMQ_READ_LOGIN"] | |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import torch
import torch.nn as nn
import numpy as np
import torch.optim as optim
import os
import six
from six.moves import cPickle
import pickle
bad_endings = ['with','in','on','of','a','at','to','for','an','this','his','her','that']
bad_endings += ['the']
def pickle_load(f):
""" Load a pickle.
Parameters
----------
f: file-like object
"""
return pickle.load(f)
if six.PY3:
return cPickle.load(f, encoding='latin-1')
else:
return cPickle.load(f)
def pickle_dump(obj, f):
""" Dump a pickle.
Parameters
----------
obj: pickled object
f: file-like object
"""
return pickle.dump(obj, f)
if six.PY3:
return cPickle.dump(obj, f, protocol=2)
else:
return cPickle.dump(obj, f)
def if_use_feat(caption_model):
# Decide if load attention feature according to caption model
if caption_model in ['show_tell', 'all_img', 'fc', 'newfc']:
use_att, use_fc = False, True
elif caption_model == 'language_model':
use_att, use_fc = False, False
elif caption_model == 'topdown':
use_fc, use_att = True, True
else:
use_att, use_fc = True, False
return use_fc, use_att
# Input: seq, N*D numpy array, with element 0 .. vocab_size. 0 is END token.
def decode_sequence(ix_to_word, seq):
N, D = seq.size()
out = []
for i in range(N):
txt = ''
for j in range(D):
ix = seq[i,j]
if ix > 0 :
if j >= 1:
txt = txt + ' '
txt = txt + ix_to_word[str(ix.item())]
else:
break
if int(os.getenv('REMOVE_BAD_ENDINGS', '0')):
flag = 0
words = txt.split(' ')
for j in range(len(words)):
if words[-j-1] not in bad_endings:
flag = -j
break
txt = ' '.join(words[0:len(words)+flag])
out.append(txt)
return out
def to_contiguous(tensor):
if tensor.is_contiguous():
return tensor
else:
return tensor.contiguous()
class RewardCriterion(nn.Module):
def __init__(self):
super(RewardCriterion, self).__init__()
def forward(self, input, seq, reward, gpn_loss=None):
# seq is predicted word index, input is corresponding word log probability (negative values),
# reward is generally negative values
input = to_contiguous(input).view(-1)
reward = to_contiguous(reward).view(-1)
mask = (seq>0).float()
mask = to_contiguous(torch.cat([mask.new(mask.size(0), 1).fill_(1), mask[:, :-1]], 1)).view(-1)
if gpn_loss is None:
output = - input * reward * mask
output = torch.sum(output) / torch.sum(mask)
else: # trying attach gpn_loss to the corresponding sentence loss for the input sub-graph
gpn_loss = gpn_loss.unsqueeze(1).expand(gpn_loss.size(0),seq.size(1)).contiguous().view(-1)
#model_loss = - input + gpn_loss
#output = model_loss * reward * mask
output = (- input * reward + gpn_loss * torch.exp(reward))* mask
output = torch.sum(output) / torch.sum(mask)
return output
class LanguageModelCriterion(nn.Module):
def __init__(self):
super(LanguageModelCriterion, self).__init__()
def forward(self, input, target, mask):
# truncate to the same size
target = target[:, :input.size(1)]
mask = mask[:, :input.size(1)]
# find the log probability of the gt label and then mask out the padding part
output = -input.gather(2, target.unsqueeze(2)).squeeze(2) * mask
# average over a sentence's words
output = torch.sum(output) / torch.sum(mask)
return output
class LabelSmoothing(nn.Module):
"Implement label smoothing."
def __init__(self, size=0, padding_idx=0, smoothing=0.0):
super(LabelSmoothing, self).__init__()
self.criterion = nn.KLDivLoss(size_average=False, reduce=False)
# self.padding_idx = padding_idx
self.confidence = 1.0 - smoothing
self.smoothing = smoothing
# self.size = size
self.true_dist = None
def forward(self, input, target, mask):
# truncate to the same size
target = target[:, :input.size(1)]
mask = mask[:, :input.size(1)]
input = to_contiguous(input).view(-1, input.size(-1))
target = to_contiguous(target).view(-1)
mask = to_contiguous(mask).view(-1)
# assert x.size(1) == self.size
self.size = input.size(1)
# true_dist = x.data.clone()
true_dist = input.data.clone()
# true_dist.fill_(self.smoothing / (self.size - 2))
true_dist.fill_(self.smoothing / (self.size - 1))
true_dist.scatter_(1, target.data.unsqueeze(1), self.confidence)
# true_dist[:, self.padding_idx] = 0
# mask = torch.nonzero(target.data == self.padding_idx)
# self.true_dist = true_dist
return (self.criterion(input, true_dist).sum(1) * mask).sum() / mask.sum()
def set_lr(optimizer, lr):
for group in optimizer.param_groups:
group['lr'] = lr
def get_lr(optimizer):
for group in optimizer.param_groups:
return group['lr']
def clip_gradient(optimizer, grad_clip):
for group in optimizer.param_groups:
for param in group['params']:
if param.grad is None:
pass
if param.grad is not None:
param.grad.data.clamp_(-grad_clip, grad_clip)
def clip_gradient_norm(optimizer, clip_norm=10.):
"""Computes a gradient clipping coefficient based on gradient norm."""
"""Clips gradient norm of an iterable of parameters.
The norm is computed over all gradients together, as if they were
concatenated into a single vector. Gradients are modified in-place.
Arguments:
parameters (Iterable[Variable]): an iterable of Variables that will have
gradients normalized
max_norm (float or int): max norm of the gradients
Returns:
Total norm of the parameters (viewed as a single vector).
"""
totalnorm = 0
for group in optimizer.param_groups:
for p in group['params']:
if p.requires_grad and p.grad is not None:
modulenorm = p.grad.data.norm(2)
totalnorm += modulenorm ** 2
totalnorm = totalnorm ** (1. / 2) #np.sqrt(totalnorm)
norm = clip_norm / max(totalnorm, clip_norm)
for group in optimizer.param_groups:
for p in group['params']:
if p.requires_grad and p.grad is not None:
p.grad.mul_(norm)
def optimistic_restore(network, state_dict):
mismatch = False
own_state = network.state_dict()
for name, param in state_dict.items():
if name not in own_state:
print("Unexpected key {} in state_dict with size {}".format(name, param.size()))
mismatch = True
elif param.size() == own_state[name].size():
own_state[name].copy_(param)
else:
print("Network has {} with size {}, ckpt has {}".format(name,
own_state[name].size(),
param.size()))
mismatch = True
missing = set(own_state.keys()) - set(state_dict.keys())
if len(missing) > 0:
print("We couldn't find {}".format(','.join(missing)))
mismatch = True
return not mismatch
def build_optimizer(params, opt):
if opt.optim == 'rmsprop':
return optim.RMSprop(params, opt.learning_rate, opt.optim_alpha, opt.optim_epsilon, weight_decay=opt.weight_decay)
elif opt.optim == 'adagrad':
return optim.Adagrad(params, opt.learning_rate, weight_decay=opt.weight_decay)
elif opt.optim == 'sgd':
return optim.SGD(params, opt.learning_rate, weight_decay=5e-4, momentum=0.9)#opt.weight_decay)
elif opt.optim == 'sgdm':
return optim.SGD(params, opt.learning_rate, opt.optim_alpha, weight_decay=opt.weight_decay)
elif opt.optim == 'sgdmom':
return optim.SGD(params, opt.learning_rate, opt.optim_alpha, weight_decay=opt.weight_decay, nesterov=True)
elif opt.optim == 'adam':
return optim.Adam(params, opt.learning_rate, (opt.optim_alpha, opt.optim_beta), opt.optim_epsilon, weight_decay=opt.weight_decay)
elif opt.optim == 'adamw':
return optim.AdamW(params, opt.learning_rate, weight_decay=0.01)
else:
raise Exception("bad option opt.optim: {}".format(opt.optim))
def penalty_builder(penalty_config):
if penalty_config == '':
return lambda x,y: y
pen_type, alpha = penalty_config.split('_')
alpha = float(alpha)
if pen_type == 'wu':
return lambda x,y: length_wu(x,y,alpha)
if pen_type == 'avg':
return lambda x,y: length_average(x,y,alpha)
def length_wu(length, logprobs, alpha=0.):
"""
NMT length re-ranking score from
"Google's Neural Machine Translation System" :cite:`wu2016google`.
"""
modifier = (((5 + length) ** alpha) /
((5 + 1) ** alpha))
return (logprobs / modifier)
def length_average(length, logprobs, alpha=0.):
"""
Returns the average probability of tokens in a sequence.
"""
return logprobs / length
class NoamOpt(object):
"Optim wrapper that implements rate."
def __init__(self, model_size, factor, warmup, optimizer):
self.optimizer = optimizer
self._step = 0
self.warmup = warmup
self.factor = factor
self.model_size = model_size
self._rate = 0
def step(self):
"Update parameters and rate"
self._step += 1
rate = self.rate()
for p in self.optimizer.param_groups:
p['lr'] = rate
self._rate = rate
self.optimizer.step()
def rate(self, step = None):
"Implement `lrate` above"
if step is None:
step = self._step
return self.factor * \
(self.model_size ** (-0.5) *
min(step ** (-0.5), step * self.warmup ** (-1.5)))
def __getattr__(self, name):
return getattr(self.optimizer, name)
class ReduceLROnPlateau(object):
"Optim wrapper that implements rate."
def __init__(self, optimizer, mode='min', factor=0.1, patience=10, verbose=False, threshold=0.0001, threshold_mode='rel', cooldown=0, min_lr=0, eps=1e-08):
self.scheduler = optim.lr_scheduler.ReduceLROnPlateau(optimizer, mode, factor, patience, verbose, threshold, threshold_mode, cooldown, min_lr, eps)
self.optimizer = optimizer
self.current_lr = get_lr(optimizer)
def step(self):
"Update parameters and rate"
self.optimizer.step()
def scheduler_step(self, val):
self.scheduler.step(val)
self.current_lr = get_lr(self.optimizer)
def state_dict(self):
return {'current_lr':self.current_lr,
'scheduler_state_dict': self.scheduler.state_dict(),
'optimizer_state_dict': self.optimizer.state_dict()}
def load_state_dict(self, state_dict):
if 'current_lr' not in state_dict:
# it's normal optimizer
self.optimizer.load_state_dict(state_dict)
set_lr(self.optimizer, self.current_lr) # use the lr fromt the option
else:
# it's a schduler
self.current_lr = state_dict['current_lr']
self.scheduler.load_state_dict(state_dict['scheduler_state_dict'])
self.optimizer.load_state_dict(state_dict['optimizer_state_dict'])
# current_lr is actually useless in this case
def rate(self, step = None):
"Implement `lrate` above"
if step is None:
step = self._step
return self.factor * \
(self.model_size ** (-0.5) *
min(step ** (-0.5), step * self.warmup ** (-1.5)))
def __getattr__(self, name):
return getattr(self.optimizer, name)
def get_std_opt(model, param, factor=1, warmup=2000):
# return NoamOpt(model.tgt_embed[0].d_model, 2, 4000,
# torch.optim.Adam(model.parameters(), lr=0, betas=(0.9, 0.98), eps=1e-9))
return NoamOpt(model.model.tgt_embed[0].d_model, factor, warmup,
torch.optim.Adam(param, lr=0, betas=(0.9, 0.98), eps=1e-9))
def obj_edge_vectors(names, wv_type='glove.6B', wv_dir='data/', wv_dim=300):
wv_dict, wv_arr, wv_size = load_word_vectors(wv_dir, wv_type, wv_dim)
vectors = torch.Tensor(len(names), wv_dim)
vectors.normal_(0,1)
for i, token in enumerate(names):
if token == "brocolli":
token = "broccoli"
if token == "sandwhich":
token = "sandwich"
if token == "kneepad":
token = "knee pad"
if token == "skiis":
token = "skis"
if token == "tshirt":
token = "shirt"
wv_index = wv_dict.get(token, None)
if wv_index is not None:
vectors[i] = wv_arr[wv_index]
else:
# try average for predicate
token_list = token.split(" ")
#print(token)
got = 0
for i in range(len(token_list)):
wv_index_i = wv_dict.get(token_list[i], None)
if wv_index_i is not None:
#print("Get token: {}".format(token_list[i]))
if got == 0:
temp = wv_arr[wv_index_i]
else:
temp += wv_arr[wv_index_i]
got += | |
<reponame>athenianco/athenian-api<filename>server/athenian/api/models/web/jira_epic_issue_common.py<gh_stars>1-10
from datetime import datetime, timedelta
from typing import Optional
from athenian.api.models.web.base_model_ import Model
class JIRAEpicIssueCommon(Model):
"""Common JIRA issue fields."""
openapi_types = {
"id": str,
"title": str,
"created": datetime,
"updated": datetime,
"work_began": Optional[datetime],
"resolved": Optional[datetime],
"lead_time": Optional[timedelta],
"life_time": timedelta,
"reporter": str,
"assignee": Optional[str],
"comments": int,
"priority": str,
"status": str,
"url": str,
}
attribute_map = {
"id": "id",
"title": "title",
"created": "created",
"updated": "updated",
"work_began": "work_began",
"resolved": "resolved",
"lead_time": "lead_time",
"life_time": "life_time",
"reporter": "reporter",
"assignee": "assignee",
"comments": "comments",
"priority": "priority",
"status": "status",
"url": "url",
}
__enable_slots__ = False
def __init__(self,
id: Optional[str] = None,
title: Optional[str] = None,
created: Optional[datetime] = None,
updated: Optional[datetime] = None,
work_began: Optional[datetime] = None,
resolved: Optional[datetime] = None,
lead_time: Optional[timedelta] = None,
life_time: Optional[timedelta] = None,
reporter: Optional[str] = None,
assignee: Optional[str] = None,
comments: Optional[int] = None,
priority: Optional[str] = None,
status: Optional[str] = None,
url: Optional[str] = None,
):
"""JIRAEpicChild - a model defined in OpenAPI
:param id: The id of this JIRAEpicIssueCommon.
:param title: The title of this JIRAEpicIssueCommon.
:param created: The created of this JIRAEpicIssueCommon.
:param updated: The updated of this JIRAEpicIssueCommon.
:param work_began: The work_began of this JIRAEpicIssueCommon.
:param resolved: The resolved of this JIRAEpicIssueCommon.
:param lead_time: The lead_time of this JIRAEpicIssueCommon.
:param life_time: The life_time of this JIRAEpicIssueCommon.
:param reporter: The reporter of this JIRAEpicIssueCommon.
:param assignee: The assignee of this JIRAEpicIssueCommon.
:param comments: The comments of this JIRAEpicIssueCommon.
:param priority: The priority of this JIRAEpicIssueCommon.
:param status: The status of this JIRAEpicIssueCommon.
:param url: The url of this JIRAEpicIssueCommon.
"""
self._id = id
self._title = title
self._created = created
self._updated = updated
self._work_began = work_began
self._resolved = resolved
self._lead_time = lead_time
self._life_time = life_time
self._reporter = reporter
self._assignee = assignee
self._comments = comments
self._priority = priority
self._status = status
self._url = url
def __lt__(self, other: "JIRAEpicIssueCommon") -> bool:
"""Support sorting."""
return self._id < other._id
@property
def id(self) -> str:
"""Gets the id of this JIRAEpicIssueCommon.
JIRA issue key `PROJECT-###`.
:return: The id of this JIRAEpicIssueCommon.
"""
return self._id
@id.setter
def id(self, id: str):
"""Sets the id of this JIRAEpicIssueCommon.
JIRA issue key `PROJECT-###`.
:param id: The id of this JIRAEpicIssueCommon.
"""
if id is None:
raise ValueError("Invalid value for `id`, must not be `None`")
self._id = id
@property
def title(self) -> str:
"""Gets the title of this JIRAEpicIssueCommon.
Title of this issue.
:return: The title of this JIRAEpicIssueCommon.
"""
return self._title
@title.setter
def title(self, title: str):
"""Sets the title of this JIRAEpicIssueCommon.
Title of this issue.
:param title: The title of this JIRAEpicIssueCommon.
"""
if title is None:
raise ValueError("Invalid value for `title`, must not be `None`")
self._title = title
@property
def created(self) -> datetime:
"""Gets the created of this JIRAEpicIssueCommon.
When this issue was created.
:return: The created of this JIRAEpicIssueCommon.
"""
return self._created
@created.setter
def created(self, created: datetime):
"""Sets the created of this JIRAEpicIssueCommon.
When this issue was created.
:param created: The created of this JIRAEpicIssueCommon.
"""
if created is None:
raise ValueError("Invalid value for `created`, must not be `None`")
self._created = created
@property
def updated(self) -> datetime:
"""Gets the updated of this JIRAEpicIssueCommon.
When this issue was updated.
:return: The updated of this JIRAEpicIssueCommon.
"""
return self._updated
@updated.setter
def updated(self, updated: datetime):
"""Sets the updated of this JIRAEpicIssueCommon.
When this issue was updated.
:param updated: The updated of this JIRAEpicIssueCommon.
"""
if updated is None:
raise ValueError("Invalid value for `updated`, must not be `None`")
self._updated = updated
@property
def work_began(self) -> Optional[datetime]:
"""Gets the work_began of this JIRAEpicIssueCommon.
When the issue entered the "In Progress" stage. This timestamp can be missing and is always
less than or equal to `resolved`.
:return: The work_began of this JIRAEpicIssueCommon.
"""
return self._work_began
@work_began.setter
def work_began(self, work_began: Optional[datetime]):
"""Sets the work_began of this JIRAEpicIssueCommon.
When the issue entered the "In Progress" stage. This timestamp can be missing and is always
less than or equal to `resolved`.
:param work_began: The work_began of this JIRAEpicIssueCommon.
"""
self._work_began = work_began
@property
def resolved(self) -> Optional[datetime]:
"""Gets the resolved of this JIRAEpicIssueCommon.
When the issue was marked as completed. This timestamp can be missing and is always greater
than or equal to `work_began`.
:return: The resolved of this JIRAEpicIssueCommon.
"""
return self._resolved
@resolved.setter
def resolved(self, resolved: Optional[datetime]):
"""Sets the resolved of this JIRAEpicIssueCommon.
When the issue was marked as completed. This timestamp can be missing and is always greater
than or equal to `work_began`.
:param resolved: The resolved of this JIRAEpicIssueCommon.
"""
self._resolved = resolved
@property
def lead_time(self) -> Optional[timedelta]:
"""Gets the lead_time of this JIRAEpicIssueCommon.
Issue's time spent between `work_began` and `resolved`. If not resolved, \
between `work_began` and `now()`.
:return: The lead_time of this JIRAEpicIssueCommon.
"""
return self._lead_time
@lead_time.setter
def lead_time(self, lead_time: timedelta):
"""Sets the lead_time of this JIRAEpicIssueCommon.
Issue's time spent between `work_began` and `resolved`. If not resolved, \
between `work_began` and `now()`.
:param lead_time: The lead_time of this JIRAEpicIssueCommon.
"""
self._lead_time = lead_time
@property
def life_time(self) -> Optional[timedelta]:
"""Gets the life_time of this JIRAEpicIssueCommon.
Issue's time spent between `created` and `resolved`. If not resolved, \
between `created` and `now()`.
:return: The life_time of this JIRAEpicIssueCommon.
"""
return self._life_time
@life_time.setter
def life_time(self, life_time: timedelta):
"""Sets the life_time of this JIRAEpicIssueCommon.
Issue's time spent between `created` and `resolved`. If not resolved, \
between `created` and `now()`.
:param life_time: The life_time of this JIRAEpicIssueCommon.
"""
if life_time is None:
raise ValueError("Invalid value for `life_time`, must not be `None`")
self._life_time = life_time
@property
def reporter(self) -> str:
"""Gets the reporter of this JIRAEpicIssueCommon.
Name of the person who reported the issue.
:return: The reporter of this JIRAEpicIssueCommon.
"""
return self._reporter
@reporter.setter
def reporter(self, reporter: str):
"""Sets the reporter of this JIRAEpicIssueCommon.
Name of the person who reported the issue.
:param reporter: The reporter of this JIRAEpicIssueCommon.
"""
if reporter is None:
raise ValueError("Invalid value for `reporter`, must not be `None`")
self._reporter = reporter
@property
def assignee(self) -> Optional[str]:
"""Gets the assignee of this JIRAEpicIssueCommon.
Name of the assigned person.
:return: The assignee of this JIRAEpicIssueCommon.
"""
return self._assignee
@assignee.setter
def assignee(self, assignee: Optional[str]):
"""Sets the assignee of this JIRAEpicIssueCommon.
Name of the assigned person.
:param assignee: The assignee of this JIRAEpicIssueCommon.
"""
self._assignee = assignee
@property
def comments(self) -> int:
"""Gets the comments of this JIRAEpicIssueCommon.
Number of comments in the issue excluding sub-tasks.
:return: The comments of this JIRAEpicIssueCommon.
"""
return self._comments
@comments.setter
def comments(self, comments: int):
"""Sets the comments of this JIRAEpicIssueCommon.
Number of comments in the issue excluding sub-tasks.
:param comments: The comments of this JIRAEpicIssueCommon.
"""
if comments is None:
raise ValueError("Invalid value for `comments`, must not be `None`")
self._comments = comments
@property
def priority(self) -> str:
"""Gets the priority of this JIRAEpicIssueCommon.
Name of the priority. The details are returned in `FilteredJIRAStuff.priorities`.
:return: The priority of this JIRAEpicIssueCommon.
"""
return self._priority
@priority.setter
def priority(self, priority: str):
"""Sets the priority of this JIRAEpicIssueCommon.
Name of the priority. The details are returned in `FilteredJIRAStuff.priorities`.
:param priority: The priority of this JIRAEpicIssueCommon.
"""
if priority is None:
raise ValueError("Invalid value for `priority`, must not be `None`")
self._priority = priority
@property
def status(self) -> str:
"""Gets the status of this JIRAEpicIssueCommon.
Name of the status. The details are returned in `FilteredJIRAStuff.statuses`.
:return: The status of this JIRAEpicIssueCommon.
"""
return self._status
@status.setter
def status(self, status: str):
"""Sets the status of this JIRAEpicIssueCommon.
Name of the status. The details are returned in `FilteredJIRAStuff.statuses`.
:param status: The status of this JIRAEpicIssueCommon.
"""
if status is None:
raise ValueError("Invalid value for `status`, must not be `None`")
self._status = status
@property
def url(self) -> str:
"""Gets the url of this JIRAEpicIssueCommon.
Link to the issue in JIRA web application.
:return: The url of this JIRAEpicIssueCommon.
"""
return self._url
@url.setter
def url(self, url: str):
"""Sets the url of this JIRAEpicIssueCommon.
Link to the issue in JIRA web application.
:param url: The url | |
60.92*m.x369 - 60.92*m.x378 - 60.92*m.x395 - 47.68*m.x406
- 47.68*m.x424 - 47.68*m.x438 - 47.68*m.x449 - 1.76*m.x460 - 1.76*m.x478 - 1.76*m.x495
- 1.76*m.x502 - 1.76*m.x511 - 1.76*m.x522 - 24.98*m.x543 - 24.98*m.x558 - 24.98*m.x566
- 24.98*m.x583 + 9.12*m.x604 + 9.12*m.x628 + 9.12*m.x635 + 9.12*m.x644 + 9.12*m.x655
- 45.03*m.x666 - 45.03*m.x690 - 45.03*m.x701 - 40.42*m.x712 - 40.42*m.x736 - 40.42*m.x743
- 49.64*m.x752 - 49.64*m.x770 - 49.64*m.x778 - 49.64*m.x785 - 50.15*m.x794 - 50.15*m.x820
- 50.15*m.x828 - 50.15*m.x839 - 6.81*m.x851 - 40.39*m.x862 - 58.3*m.x905 - 58.3*m.x913
- 58.3*m.x930 - 43.27*m.x952 - 43.27*m.x967 - 22.8*m.x978 - 22.8*m.x1006 + 7.17*m.x1020
+ 7.17*m.x1037 + 7.17*m.x1048 + 6.89*m.x1083 - 40.39*m.x1211 - 58.3*m.x1217 <= 0)
m.c185 = Constraint(expr= 0.109999999999999*m.x91 + 0.109999999999999*m.x99 + 0.109999999999999*m.x123
+ 0.109999999999999*m.x132 - 19.1*m.x163 - 19.1*m.x170 - 11.04*m.x185 - 11.04*m.x203
- 11.04*m.x218 - 11.04*m.x233 - 37.9*m.x254 - 37.9*m.x267 - 37.9*m.x276 - 37.9*m.x293
- 12.23*m.x304 - 12.23*m.x322 - 12.23*m.x330 - 12.23*m.x337 - 39.02*m.x346 - 39.02*m.x362
- 39.02*m.x369 - 39.02*m.x378 - 39.02*m.x395 - 9.07*m.x406 - 9.07*m.x424 - 9.07*m.x438
- 9.07*m.x449 + 5.06*m.x460 + 5.06*m.x478 + 5.06*m.x495 + 5.06*m.x502 + 5.06*m.x511
+ 5.06*m.x522 + 5.25*m.x543 + 5.25*m.x558 + 5.25*m.x566 + 5.25*m.x583 - 16.22*m.x604
- 16.22*m.x628 - 16.22*m.x635 - 16.22*m.x644 - 16.22*m.x655 - 11.43*m.x666 - 11.43*m.x690
- 11.43*m.x701 - 6.92*m.x712 - 6.92*m.x736 - 6.92*m.x743 - 59.57*m.x752 - 59.57*m.x770
- 59.57*m.x778 - 59.57*m.x785 - 9.16*m.x794 - 9.16*m.x820 - 9.16*m.x828 - 9.16*m.x839
- 46.34*m.x851 + 9.83*m.x862 - 3.38*m.x905 - 3.38*m.x913 - 3.38*m.x930 - 63.36*m.x952
- 63.36*m.x967 - 60.66*m.x978 - 60.66*m.x1006 - 30.1*m.x1020 - 30.1*m.x1037 - 30.1*m.x1048
- 11.04*m.x1083 + 9.83*m.x1211 - 3.38*m.x1217 <= 0)
m.c186 = Constraint(expr= - 16.94*m.x91 - 16.94*m.x99 - 16.94*m.x123 - 16.94*m.x132 - 13.89*m.x163 - 13.89*m.x170
- 60.55*m.x185 - 60.55*m.x203 - 60.55*m.x218 - 60.55*m.x233 - 11.84*m.x254 - 11.84*m.x267
- 11.84*m.x276 - 11.84*m.x293 - 63.51*m.x304 - 63.51*m.x322 - 63.51*m.x330 - 63.51*m.x337
- 26.1*m.x346 - 26.1*m.x362 - 26.1*m.x369 - 26.1*m.x378 - 26.1*m.x395 - 15.71*m.x406
- 15.71*m.x424 - 15.71*m.x438 - 15.71*m.x449 - 4.76*m.x460 - 4.76*m.x478 - 4.76*m.x495
- 4.76*m.x502 - 4.76*m.x511 - 4.76*m.x522 - 54.07*m.x543 - 54.07*m.x558 - 54.07*m.x566
- 54.07*m.x583 - 25.82*m.x604 - 25.82*m.x628 - 25.82*m.x635 - 25.82*m.x644 - 25.82*m.x655
+ 3.61*m.x666 + 3.61*m.x690 + 3.61*m.x701 - 25.1*m.x712 - 25.1*m.x736 - 25.1*m.x743
+ 1.61*m.x752 + 1.61*m.x770 + 1.61*m.x778 + 1.61*m.x785 - 62.63*m.x794 - 62.63*m.x820
- 62.63*m.x828 - 62.63*m.x839 - 59.1*m.x851 - 52.7*m.x862 - 6.28*m.x905 - 6.28*m.x913
- 6.28*m.x930 - 12.36*m.x952 - 12.36*m.x967 - 47.08*m.x978 - 47.08*m.x1006 - 41.12*m.x1020
- 41.12*m.x1037 - 41.12*m.x1048 - 60.55*m.x1083 - 52.7*m.x1211 - 6.28*m.x1217 <= 0)
m.c187 = Constraint(expr= - 10.2*m.x91 - 10.2*m.x99 - 10.2*m.x123 - 10.2*m.x132 - 5.37*m.x163 - 5.37*m.x170
- 62.33*m.x185 - 62.33*m.x203 - 62.33*m.x218 - 62.33*m.x233 - 58.49*m.x254 - 58.49*m.x267
- 58.49*m.x276 - 58.49*m.x293 - 6.19*m.x304 - 6.19*m.x322 - 6.19*m.x330 - 6.19*m.x337
- 48.45*m.x346 - 48.45*m.x362 - 48.45*m.x369 - 48.45*m.x378 - 48.45*m.x395 - 67.17*m.x406
- 67.17*m.x424 - 67.17*m.x438 - 67.17*m.x449 - 31.32*m.x460 - 31.32*m.x478 - 31.32*m.x495
- 31.32*m.x502 - 31.32*m.x511 - 31.32*m.x522 - 23.94*m.x543 - 23.94*m.x558 - 23.94*m.x566
- 23.94*m.x583 + 0.140000000000001*m.x604 + 0.140000000000001*m.x628
+ 0.140000000000001*m.x635 + 0.140000000000001*m.x644 + 0.140000000000001*m.x655
- 33.46*m.x666 - 33.46*m.x690 - 33.46*m.x701 - 69.19*m.x712 - 69.19*m.x736 - 69.19*m.x743
- 17.44*m.x752 - 17.44*m.x770 - 17.44*m.x778 - 17.44*m.x785 - 13.84*m.x794 - 13.84*m.x820
- 13.84*m.x828 - 13.84*m.x839 - 68.97*m.x851 - 4.62*m.x862 - 7.11*m.x905 - 7.11*m.x913
- 7.11*m.x930 - 52.47*m.x952 - 52.47*m.x967 - 35.08*m.x978 - 35.08*m.x1006 - 2.15*m.x1020
- 2.15*m.x1037 - 2.15*m.x1048 - 62.33*m.x1083 - 4.62*m.x1211 - 7.11*m.x1217 <= 0)
m.c188 = Constraint(expr= - 74.75*m.x91 - 74.75*m.x99 - 74.75*m.x123 - 74.75*m.x132 - 54.54*m.x163 - 54.54*m.x170
- 49.34*m.x185 - 49.34*m.x203 - 49.34*m.x218 - 49.34*m.x233 - 50.1*m.x254 - 50.1*m.x267
- 50.1*m.x276 - 50.1*m.x293 - 28.43*m.x304 - 28.43*m.x322 - 28.43*m.x330 - 28.43*m.x337
- 42.45*m.x346 - 42.45*m.x362 - 42.45*m.x369 - 42.45*m.x378 - 42.45*m.x395 + 2.27*m.x406
+ 2.27*m.x424 + 2.27*m.x438 + 2.27*m.x449 - 8.33*m.x460 - 8.33*m.x478 - 8.33*m.x495
- 8.33*m.x502 - 8.33*m.x511 - 8.33*m.x522 - 28.55*m.x543 - 28.55*m.x558 - 28.55*m.x566
- 28.55*m.x583 - 29.91*m.x604 - 29.91*m.x628 - 29.91*m.x635 - 29.91*m.x644 - 29.91*m.x655
- 4.4*m.x666 - 4.4*m.x690 - 4.4*m.x701 - 53.94*m.x712 - 53.94*m.x736 - 53.94*m.x743
- 49.09*m.x752 - 49.09*m.x770 - 49.09*m.x778 - 49.09*m.x785 - 4.22*m.x794 - 4.22*m.x820
- 4.22*m.x828 - 4.22*m.x839 - 41.22*m.x851 - 63.56*m.x862 - 4.61*m.x905 - 4.61*m.x913
- 4.61*m.x930 + 0.27*m.x952 + 0.27*m.x967 - 56.05*m.x978 - 56.05*m.x1006 - 35.81*m.x1020
- 35.81*m.x1037 - 35.81*m.x1048 - 49.34*m.x1083 - 63.56*m.x1211 - 4.61*m.x1217 <= 0)
m.c189 = Constraint(expr= - 2.3*m.x91 - 2.3*m.x99 - 2.3*m.x123 - 2.3*m.x132 - 56.41*m.x163 - 56.41*m.x170 - 60.75*m.x185
- 60.75*m.x203 - 60.75*m.x218 - 60.75*m.x233 - 4.25*m.x254 - 4.25*m.x267 - 4.25*m.x276
- 4.25*m.x293 - 26.96*m.x304 - 26.96*m.x322 - 26.96*m.x330 - 26.96*m.x337 - 51.07*m.x346
- 51.07*m.x362 - 51.07*m.x369 - 51.07*m.x378 - 51.07*m.x395 - 39.64*m.x406 - 39.64*m.x424
- 39.64*m.x438 - 39.64*m.x449 - 50.63*m.x460 - 50.63*m.x478 - 50.63*m.x495 - 50.63*m.x502
- 50.63*m.x511 - 50.63*m.x522 - 5.28*m.x543 - 5.28*m.x558 - 5.28*m.x566 - 5.28*m.x583
- 67.95*m.x604 - 67.95*m.x628 - 67.95*m.x635 - 67.95*m.x644 - 67.95*m.x655 - 53.54*m.x666
- 53.54*m.x690 - 53.54*m.x701 - 19.99*m.x712 - 19.99*m.x736 - 19.99*m.x743 - 38.6*m.x752
- 38.6*m.x770 - 38.6*m.x778 - 38.6*m.x785 - 23.94*m.x794 - 23.94*m.x820 - 23.94*m.x828
- 23.94*m.x839 - 35.37*m.x851 - 32.94*m.x862 - 25.31*m.x905 - 25.31*m.x913 - 25.31*m.x930
- 2.98*m.x952 - 2.98*m.x967 - 69.32*m.x978 - 69.32*m.x1006 - 36.49*m.x1020 - 36.49*m.x1037
- 36.49*m.x1048 - 60.75*m.x1083 - 32.94*m.x1211 - 25.31*m.x1217 <= 0)
m.c190 = Constraint(expr= m.x91 + m.x99 + m.x123 + m.x132 - 28.51*m.x163 - 28.51*m.x170 - 11.3*m.x185 - 11.3*m.x203
- 11.3*m.x218 - 11.3*m.x233 - 49*m.x254 - 49*m.x267 - 49*m.x276 - 49*m.x293 + 7.94*m.x304
+ 7.94*m.x322 + 7.94*m.x330 + 7.94*m.x337 - 55.87*m.x346 - 55.87*m.x362 - 55.87*m.x369
- 55.87*m.x378 - 55.87*m.x395 - 57.96*m.x406 - 57.96*m.x424 - 57.96*m.x438 - 57.96*m.x449
- 57.02*m.x460 - 57.02*m.x478 - 57.02*m.x495 - 57.02*m.x502 - 57.02*m.x511 - 57.02*m.x522
+ 16.41*m.x543 + 16.41*m.x558 + 16.41*m.x566 + 16.41*m.x583 - 31.16*m.x604 - 31.16*m.x628
- 31.16*m.x635 - 31.16*m.x644 - 31.16*m.x655 - 58.17*m.x666 - 58.17*m.x690 - 58.17*m.x701
- 43.87*m.x712 - 43.87*m.x736 - 43.87*m.x743 - 3.02*m.x752 - 3.02*m.x770 - 3.02*m.x778
- 3.02*m.x785 + 7.17*m.x794 + 7.17*m.x820 + 7.17*m.x828 + 7.17*m.x839 - 46.52*m.x851
- 24.33*m.x862 - 0.709999999999997*m.x905 - 0.709999999999997*m.x913
- 0.709999999999997*m.x930 - 54.24*m.x952 - 54.24*m.x967 - 38.61*m.x978 - 38.61*m.x1006
- 45.57*m.x1020 - 45.57*m.x1037 - 45.57*m.x1048 - 11.3*m.x1083 - 24.33*m.x1211
- 0.709999999999997*m.x1217 <= 0)
m.c191 = Constraint(expr= 56.82*m.x92 + 56.82*m.x100 + 56.82*m.x105 + 56.82*m.x114 + 56.82*m.x133 + 56.4*m.x152
+ 56.4*m.x171 + 50.98*m.x186 + 50.98*m.x204 + 50.98*m.x209 + 50.98*m.x234 + 23.26*m.x255
+ 23.26*m.x260 + 23.26*m.x268 + 23.26*m.x294 + 19.52*m.x305 + 19.52*m.x323 + 19.52*m.x338
+ 7.29*m.x347 + 7.29*m.x353 + 7.29*m.x370 + 7.29*m.x396 + 58.34*m.x407 + 58.34*m.x425
+ 58.34*m.x430 + 58.34*m.x450 + 10.91*m.x461 + 10.91*m.x479 + 10.91*m.x484 + 10.91*m.x503
+ 10.91*m.x523 + 45.48*m.x544 + 45.48*m.x549 + 45.48*m.x584 + 6.48*m.x605 + 6.48*m.x610
+ 6.48*m.x619 + 6.48*m.x636 + 6.48*m.x656 + 36.75*m.x667 + 36.75*m.x673 + 36.75*m.x682
+ 36.75*m.x702 + 2.52*m.x713 + 2.52*m.x718 + 2.52*m.x727 + 2.52*m.x744 - 8.22*m.x753
- 8.22*m.x771 - 8.22*m.x786 + 4.32*m.x795 + 4.32*m.x811 + 4.32*m.x840 - 5.91*m.x852
+ 26.06*m.x863 + 26.06*m.x879 - 14.4*m.x894 - 14.4*m.x931 + 37.97*m.x968 + 51.29*m.x979
+ 51.29*m.x995 + 55.41*m.x1021 + 55.41*m.x1027 + 55.41*m.x1049 + 56.82*m.x1063 + 50.98*m.x1084
+ 7.29*m.x1113 + 10.91*m.x1137 + 36.75*m.x1168 - 8.22*m.x1187 + 4.32*m.x1197 + 51.29*m.x1237
<= 0)
m.c192 = Constraint(expr= - 41.97*m.x92 - 41.97*m.x100 - 41.97*m.x105 - 41.97*m.x114 - 41.97*m.x133 - 2.95*m.x152
- 2.95*m.x171 + 2.96*m.x186 + 2.96*m.x204 + 2.96*m.x209 + 2.96*m.x234 + 11.98*m.x255
+ 11.98*m.x260 + 11.98*m.x268 + 11.98*m.x294 + 20.54*m.x305 + 20.54*m.x323 + 20.54*m.x338
+ 4.71*m.x347 + 4.71*m.x353 + 4.71*m.x370 + 4.71*m.x396 - 6.91*m.x407 - 6.91*m.x425
- 6.91*m.x430 - 6.91*m.x450 - 29.07*m.x461 - 29.07*m.x479 - 29.07*m.x484 - 29.07*m.x503
- 29.07*m.x523 - 22.32*m.x544 - 22.32*m.x549 - 22.32*m.x584 - 45.67*m.x605 - 45.67*m.x610
- 45.67*m.x619 - 45.67*m.x636 - 45.67*m.x656 - 36.13*m.x667 - 36.13*m.x673 - 36.13*m.x682
- 36.13*m.x702 - 25.85*m.x713 - 25.85*m.x718 - 25.85*m.x727 - 25.85*m.x744 - 12.78*m.x753
- 12.78*m.x771 - 12.78*m.x786 + 17.21*m.x795 + 17.21*m.x811 + 17.21*m.x840 + 16.92*m.x852
+ 13.2*m.x863 + 13.2*m.x879 + 18.39*m.x894 + 18.39*m.x931 - 32.65*m.x968
+ 0.409999999999997*m.x979 + 0.409999999999997*m.x995 - 31.83*m.x1021 - 31.83*m.x1027
- 31.83*m.x1049 - 41.97*m.x1063 + 2.96*m.x1084 + 4.71*m.x1113 - 29.07*m.x1137 - 36.13*m.x1168
- 12.78*m.x1187 + 17.21*m.x1197 + 0.409999999999997*m.x1237 <= 0)
m.c193 = Constraint(expr= - 14.44*m.x92 - 14.44*m.x100 - 14.44*m.x105 | |
{
"basetype" : "Enumeration",
"normal" : {
"nodetype" : "namednumber",
"number" : "0"
},
"immediate" : {
"nodetype" : "namednumber",
"number" : "1"
},
"fast" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"multicastPortLeaveTimeout" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.17.32.28.1.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""200..6348800""",
}, # column
"multicastPortFastLeaveTimeout" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.17.32.28.1.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""200..6348800""",
}, # column
"multicastStatus" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.17.32.29",
}, # node
"multicastStatusTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.17.32.29.1",
"status" : "current",
"description" :
"""""",
}, # table
"multicastStatusEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.17.32.29.1.1",
"status" : "current",
"linkage" : [
"multicastStatusVlanID",
"multicastStatusPort",
"multicastStatusGroup",
],
"description" :
"""An entry in multicastStatusTable.""",
}, # row
"multicastStatusIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.17.32.29.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"multicastStatusVlanID" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.17.32.29.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"multicastStatusPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.17.32.29.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"multicastStatusGroup" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.16.29.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpSnpCountTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.17.32.29.2",
"status" : "current",
"description" :
"""A count table of igmp query/report/leave message.""",
}, # table
"igmpSnpCountEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.17.32.29.2.1",
"status" : "current",
"linkage" : [
"igmpSnpCountIndex",
],
"description" :
"""An entry in igmpSnpCountTable.""",
}, # row
"igmpSnpCountIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.17.32.29.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Index of IgmpCountEntry. 0 means total count in whole system""",
}, # column
"igmpSnpV2CountQueryRx" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.17.32.29.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpSnpV2CountReportRx" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.17.32.29.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpSnpV2CountLeaveRx" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.17.32.29.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpSnpV2CountQueryRxDrop" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.16.29.2.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpSnpV2CountReportRxDrop" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.17.32.29.2.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpSnpV2CountLeaveRxDrop" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.17.32.29.2.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpSnpV2CountQueryTx" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.17.32.29.2.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpSnpV2CountReportTx" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.17.32.29.2.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpSnpV2CountLeaveTx" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.16.29.2.1.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpSnpV3CountQueryRx" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.17.32.29.2.1.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpSnpV3CountReportRx" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.17.32.29.2.1.12",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpSnpV3CountQueryRxDrop" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.17.32.29.2.1.13",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpSnpV3CountReportRxDrop" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.17.32.29.2.1.14",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpSnpV3CountQueryTx" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.17.32.29.2.1.15",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpSnpV3CountReportTx" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.16.29.2.1.16",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"multicastVlanStatusTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.17.32.29.3",
"status" : "current",
"description" :
"""""",
}, # table
"multicastVlanStatusEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.17.32.29.3.1",
"status" : "current",
"linkage" : [
"multicastVlanStatusVlanID",
],
"description" :
"""An entry in multicastVlanStatusTable.""",
}, # row
"multicastVlanStatusVlanID" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.17.32.29.3.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"multicastVlanStatusType" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.16.29.3.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"dynamic" : {
"nodetype" : "namednumber",
"number" : "1"
},
"mvr" : {
"nodetype" : "namednumber",
"number" : "2"
},
"static" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"multicastVlanQueryPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.17.32.29.3.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"Q-BRIDGE-MIB", "name" : "PortList"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpSnpCountVlanTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.17.32.29.4",
"status" : "current",
"description" :
"""""",
}, # table
"igmpSnpCountVlanEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.17.32.29.4.1",
"status" : "current",
"linkage" : [
"igmpSnpCountVlanIndex",
],
"description" :
"""An entry in igmpGroupVlanStatus.""",
}, # row
"igmpSnpCountVlanIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.17.32.29.4.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Input vlan""",
}, # column
"igmpSnpV2CountVlanQueryRx" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-ES2024A-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.16.29.4.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""show igmpsnp Query Rx counters | |
],
[ [ SP(0x2BBC), M(0x2BBC,0xBC), M(0x2BBD,0x1B), F(0x00) ], [ 0xD0 ], 11, [ (PC == 0x1BBC), (SP == 0x2BBE) ], "RET NC (jump)" ],
[ [ SP(0x2BBC), M(0x2BBC,0xBC), M(0x2BBD,0x1B), F(0x01) ], [ 0xD0 ], 5, [ (PC == 0x0001), (SP == 0x2BBC) ], "RET NC (no jump)" ],
[ [ SP(0x2BBC), M(0x2BBC,0xBC), M(0x2BBD,0x1B), F(0x40) ], [ 0xC8 ], 11, [ (PC == 0x1BBC), (SP == 0x2BBE) ], "RET Z (jump)" ],
[ [ SP(0x2BBC), M(0x2BBC,0xBC), M(0x2BBD,0x1B), F(0x00) ], [ 0xC8 ], 5, [ (PC == 0x0001), (SP == 0x2BBC) ], "RET Z (no jump)" ],
[ [ SP(0x2BBC), M(0x2BBC,0xBC), M(0x2BBD,0x1B), F(0x00) ], [ 0xC0 ], 11, [ (PC == 0x1BBC), (SP == 0x2BBE) ], "RET NZ (jump)" ],
[ [ SP(0x2BBC), M(0x2BBC,0xBC), M(0x2BBD,0x1B), F(0x40) ], [ 0xC0 ], 5, [ (PC == 0x0001), (SP == 0x2BBC) ], "RET NZ (no jump)" ],
[ [ SP(0x2BBC), M(0x2BBC,0xBC), M(0x2BBD,0x1B), F(0x04) ], [ 0xE8 ], 11, [ (PC == 0x1BBC), (SP == 0x2BBE) ], "RET PE (jump)" ],
[ [ SP(0x2BBC), M(0x2BBC,0xBC), M(0x2BBD,0x1B), F(0x00) ], [ 0xE8 ], 5, [ (PC == 0x0001), (SP == 0x2BBC) ], "RET PE (no jump)" ],
[ [ SP(0x2BBC), M(0x2BBC,0xBC), M(0x2BBD,0x1B), F(0x00) ], [ 0xE0 ], 11, [ (PC == 0x1BBC), (SP == 0x2BBE) ], "RET PO (jump)" ],
[ [ SP(0x2BBC), M(0x2BBC,0xBC), M(0x2BBD,0x1B), F(0x04) ], [ 0xE0 ], 5, [ (PC == 0x0001), (SP == 0x2BBC) ], "RET PO (no jump)" ],
[ [ SP(0x2BBC), M(0x2BBC,0xBC), M(0x2BBD,0x1B), F(0x80) ], [ 0xF8 ], 11, [ (PC == 0x1BBC), (SP == 0x2BBE) ], "RET M (jump)" ],
[ [ SP(0x2BBC), M(0x2BBC,0xBC), M(0x2BBD,0x1B), F(0x00) ], [ 0xF8 ], 5, [ (PC == 0x0001), (SP == 0x2BBC) ], "RET M (no jump)" ],
[ [ SP(0x2BBC), M(0x2BBC,0xBC), M(0x2BBD,0x1B), F(0x00) ], [ 0xF0 ], 11, [ (PC == 0x1BBC), (SP == 0x2BBE) ], "RET P (jump)" ],
[ [ SP(0x2BBC), M(0x2BBC,0xBC), M(0x2BBD,0x1B), F(0x80) ], [ 0xF0 ], 5, [ (PC == 0x0001), (SP == 0x2BBC) ], "RET P (no jump)" ],
]
for (pre, instructions, t_cycles, post, name) in tests:
self.execute_instructions(pre, instructions, t_cycles, post, name)
def test_rst(self):
# actions taken first, instructions to execute, t-cycles to run for, expected conditions post, name
tests = [
[ [ PC(0x1233), SP(0x1BBC) ], ([ 0xFF ]*0x1233) + [ 0xC7 ], 11, [ (PC == 0x0000), (SP == 0x1BBA), (M[0x1BBA] == 0x34), (M[0x1BBB] == 0x12) ], "RST 00H" ],
[ [ PC(0x1233), SP(0x1BBC) ], ([ 0xFF ]*0x1233) + [ 0xCF ], 11, [ (PC == 0x0008), (SP == 0x1BBA), (M[0x1BBA] == 0x34), (M[0x1BBB] == 0x12) ], "RST 08H" ],
[ [ PC(0x1233), SP(0x1BBC) ], ([ 0xFF ]*0x1233) + [ 0xD7 ], 11, [ (PC == 0x0010), (SP == 0x1BBA), (M[0x1BBA] == 0x34), (M[0x1BBB] == 0x12) ], "RST 10H" ],
[ [ PC(0x1233), SP(0x1BBC) ], ([ 0xFF ]*0x1233) + [ 0xDF ], 11, [ (PC == 0x0018), (SP == 0x1BBA), (M[0x1BBA] == 0x34), (M[0x1BBB] == 0x12) ], "RST 18H" ],
[ [ PC(0x1233), SP(0x1BBC) ], ([ 0xFF ]*0x1233) + [ 0xE7 ], 11, [ (PC == 0x0020), (SP == 0x1BBA), (M[0x1BBA] == 0x34), (M[0x1BBB] == 0x12) ], "RST 20H" ],
[ [ PC(0x1233), SP(0x1BBC) ], ([ 0xFF ]*0x1233) + [ 0xEF ], 11, [ (PC == 0x0028), (SP == 0x1BBA), (M[0x1BBA] == 0x34), (M[0x1BBB] == 0x12) ], "RST 28H" ],
[ [ PC(0x1233), SP(0x1BBC) ], ([ 0xFF ]*0x1233) + [ 0xF7 ], 11, [ (PC == 0x0030), (SP == 0x1BBA), (M[0x1BBA] == 0x34), (M[0x1BBB] == 0x12) ], "RST 30H" ],
[ [ PC(0x1233), SP(0x1BBC) ], ([ 0xFF ]*0x1233) + [ 0xFF ], 11, [ (PC == 0x0038), (SP == 0x1BBA), (M[0x1BBA] == 0x34), (M[0x1BBB] == 0x12) ], "RST 38H" ],
]
for (pre, instructions, t_cycles, post, name) in tests:
self.execute_instructions(pre, instructions, t_cycles, post, name)
def test_in(self):
# actions taken first, instructions to execute, t-cycles to run for, expected conditions post, name
tests = [
[ [ A(0x55), IN(0xAB) ], [ 0xDB, 0xFE ], 11, [ (A == 0xAB), (IN == 0x55) ], "IN A,FEH" ],
[ [ A(0x55), IN(0xAB) ], [ 0xDB, 0x57 ], 11, [ (A == 0x00) ], "IN A,57H" ],
[ [ B(0x55), C(0xFE), IN(0xAB) ], [ 0xED, 0x40 ], 12, [ (B == 0xAB), (IN == 0x55), (F == 0xA8) ], "IN B,(C)" ],
[ [ B(0x55), C(0xFE), IN(0xAB) ], [ 0xED, 0x48 ], 12, [ (C == 0xAB), (IN == 0x55), (F == 0xA8) ], "IN C,(C)" ],
[ [ B(0x55), C(0xFE), IN(0xAB) ], [ 0xED, 0x50 ], 12, [ (D == 0xAB), (IN == 0x55), (F == 0xA8) ], "IN D,(C)" ],
[ [ B(0x55), C(0xFE), IN(0xAB) ], [ 0xED, 0x58 ], 12, [ (E == 0xAB), (IN == 0x55), (F == 0xA8) ], "IN E,(C)" ],
[ [ B(0x55), C(0xFE), IN(0xAB) ], [ 0xED, 0x60 ], 12, [ (H == 0xAB), (IN == 0x55), (F == 0xA8) ], "IN H,(C)" ],
[ [ B(0x55), C(0xFE), IN(0xAB) ], [ 0xED, 0x68 ], 12, [ (L == 0xAB), (IN == 0x55), (F == 0xA8) ], "IN L,(C)" ],
[ [ B(0x55), C(0xFE), IN(0xAB) ], [ 0xED, 0x70 ], 12, [ (IN == 0x55), (F == 0xA9) ], "IN F,(C)" ],
[ [ B(0x55), C(0xFE), IN(0xAB) ], [ 0xED, 0x78 ], 12, [ (A == 0xAB), (IN == 0x55), (F == 0xA8) ], "IN A,(C)" ],
]
for (pre, instructions, t_cycles, post, name) in tests:
self.execute_instructions(pre, instructions, t_cycles, post, name)
def test_ini(self):
# actions taken first, instructions to execute, t-cycles to run for, expected conditions post, name
tests = [
[ [ IN(0xAB), HL(0x1BBC), B(0x2), C(0xFE) ], [ 0xED, 0xA2 ], 16, [ (M[0x1BBC] == 0xAB), (IN == 0x02), (HL == 0x1BBD), (B == 0x01), (F == 0x00) ], "INI" ],
[ [ IN(0xAB), HL(0x1BBC), B(0x1), C(0xFE) ], [ 0xED, 0xA2 ], 16, [ (M[0x1BBC] == 0xAB), (IN == 0x01), (HL == 0x1BBD), (B == 0x00), (F == 0x44) ], "INI" ],
]
for (pre, instructions, t_cycles, post, name) in tests:
self.execute_instructions(pre, instructions, t_cycles, post, name)
def test_inir(self):
# actions taken first, instructions to execute, t-cycles to run for, expected conditions post, name
tests = [
[ [ IN(0xAB), HL(0x1BBC), B(0x2), C(0xFE) ], [ 0xED, 0xB2 ], 21, [ (PC == 0x00), (M[0x1BBC] == 0xAB), (IN == 0x02), (HL == 0x1BBD), (B == 0x01), (F == 0x00) ], "INIR" ],
[ [ IN(0xAB), HL(0x1BBC), B(0x1), C(0xFE) ], [ 0xED, 0xB2 ], 16, [ (PC == 0x02), (M[0x1BBC] == 0xAB), (IN == 0x01), (HL == 0x1BBD), (B == 0x00), (F == 0x44) ], "INIR" ],
[ [ IN(0xAB), HL(0x1BBC), B(0x2), C(0xFE) ], [ 0xED, 0xB2 ], 37, [ (PC == 0x02), (M[0x1BBC] == 0xAB), (M[0x1BBD] == 0xAB), (IN == 0x01), (HL == 0x1BBE), (B == 0x00), (F == 0x44)], "INIR" ],
]
for (pre, instructions, t_cycles, post, name) in tests:
self.execute_instructions(pre, instructions, t_cycles, post, name)
def test_ind(self):
# actions taken first, instructions to execute, t-cycles to run for, expected conditions post, name
tests = [
[ [ IN(0xAB), HL(0x1BBC), B(0x2), C(0xFE) ], [ 0xED, 0xAA ], 16, [ (M[0x1BBC] == 0xAB), (IN == 0x02), (HL == 0x1BBB), (B == 0x01), (F == 0x00) ], "INI" ],
[ [ IN(0xAB), HL(0x1BBC), B(0x1), C(0xFE) ], [ 0xED, 0xAA ], 16, [ (M[0x1BBC] == 0xAB), (IN == 0x01), (HL == 0x1BBB), (B == 0x00), (F == 0x44) ], "INI" ],
]
for (pre, instructions, t_cycles, post, name) in tests:
self.execute_instructions(pre, instructions, t_cycles, post, name)
def test_indr(self):
# actions taken first, instructions to execute, t-cycles to run for, expected conditions post, name
tests = [
[ [ IN(0xAB), HL(0x1BBC), B(0x2), C(0xFE) ], [ 0xED, 0xBA ], 21, [ (PC == 0x00), (M[0x1BBC] == 0xAB), (IN == 0x02), (HL == 0x1BBB), (B == 0x01), (F == 0x00) ], "INIR" ],
[ [ IN(0xAB), HL(0x1BBC), B(0x1), C(0xFE) ], | |
<gh_stars>0
# -*- encoding: utf-8 -*-
"""
License: MIT
Copyright (c) 2019 - present AppSeed.us
"""
from django.contrib.auth.models import Group, User
from django.contrib.auth.decorators import login_required
from django.db import models
from django.shortcuts import render, get_object_or_404, redirect
from django.template import loader
from django.http import HttpResponse
from django import template
from app.models import WorkPending , SiteList , PersanalDetaillogin , WahSubmitforcontractor , Workfromgmail , type_of_work
from app.resources import SiteListResource
from django.core.paginator import Paginator, EmptyPage,InvalidPage
from django.contrib import messages
from django.db.models import Q
from django.db.models import Count
from django.db.models.functions import Lower
import json
import requests
from django.views.decorators.csrf import csrf_exempt
from app.createflexmessage import *
from linebot.views import PushMessage , send_notify
import datetime
import arrow
from django.db.models import F, Sum
# import schedule
# date = arrow.now().format('YYYY-MM-DD')
# date_new = arrow.now()
today = datetime.datetime.now().strftime("%Y-%m-%d")
tomorrow = (datetime.datetime.now() + datetime.timedelta(days=1)).strftime("%Y-%m-%d")
@login_required(login_url="/login/")
def index(request):
if request.user.is_authenticated:
# สร้างไว้ป้องกันในกรณี ระบบจำ user ไว้ จะได้ direct ไปหาหน้าของตัวเอง เช่น หน้าของ ผรม
group = request.user.groups.values_list('name', flat=True).first() #เรียกหา GroupID
if group == 'CBRE':
status_pending ='INPRG'
count_today_planning_work=WahSubmitforcontractor.objects.filter(planned_date=str(today),status='in planing').count()
count_tomorrow_planning_work=WahSubmitforcontractor.objects.filter(planned_date=str(tomorrow),status='in planing').count()
count_wah_onsite_count=WahSubmitforcontractor.objects.filter(status='onsite').count()
count_wah_completed_count=WahSubmitforcontractor.objects.filter(status='completed').count()
request.session['count_tomorrow_planning_work'] = count_tomorrow_planning_work
request.session['count_today_planning_work'] = count_today_planning_work
request.session['count_wah_onsite_count'] = count_wah_onsite_count
request.session['count_wah_completed_count'] = count_wah_completed_count
submited_work=WahSubmitforcontractor.objects.filter(status='in planing')
today_planning_work=WahSubmitforcontractor.objects.filter(planned_date=str(today),status='in planing')
tomorrow_planning_work=WahSubmitforcontractor.objects.filter(planned_date=str(tomorrow),status='in planing')
work_at_site=WahSubmitforcontractor.objects.filter(status='onsite')
pendingworkdetailmycompany=WahSubmitforcontractor.objects.annotate(lower_title=Lower('status')).values('status').annotate(num=Count('status')).order_by('company')
pendingworkdetail2=WahSubmitforcontractor.objects.filter(status='in planing').count() | WahSubmitforcontractor.objects.filter(status='onsite').count()
count_wah_onsite=WahSubmitforcontractor.objects.filter(status='onsite')
print (pendingworkdetail2)
return render(request, "index.html",{'work_at_site':work_at_site,'tomorrow_planning_work':tomorrow_planning_work,'today_planning_work':today_planning_work ,'count_wah_onsite':count_wah_onsite,'count_today_planning_work':count_today_planning_work , 'count_tomorrow_planning_work':count_tomorrow_planning_work ,'count_wah_onsite_count':count_wah_onsite_count , 'count_wah_completed_count':count_wah_completed_count , 'submited_work':submited_work , 'pendingworkdetailmycompany':pendingworkdetailmycompany})
else:
return redirect("contractor") #ส่งค่าไปแสดงผลที่ index.html
#return redirect(request,'contractor.html',{'group':group , 'id':current_user.id}) #ส่งค่าไปแสดงผลที่ index.html
@login_required(login_url="/login/")
def pages(request):
context = {}
# All resource paths end in .html.
# Pick out the html file name from the url. And load that template.
try:
load_template = request.path.split('/')[-1]
html_template = loader.get_template( load_template )
return HttpResponse(html_template.render(context, request))
except template.TemplateDoesNotExist:
html_template = loader.get_template( 'error-404.html' )
return HttpResponse(html_template.render(context, request))
except:
html_template = loader.get_template( 'error-500.html' )
return HttpResponse(html_template.render(context, request))
def export(request):
SiteList_resource = SiteListResource()
WorkPending_resource= SiteListResource()
dataset = SiteList_resource.export()
dataset = WorkPending_resource.export()
response = HttpResponse(dataset.csv, content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="member.csv"'
response = HttpResponse(dataset.json, content_type='application/json')
response['Content-Disposition'] = 'attachment; filename="persons.json"'
response = HttpResponse(dataset.xls, content_type='application/vnd.ms-excel')
response['Content-Disposition'] = 'attachment; filename="persons.xls"'
return response
def contractorreport_ (request) :
#สร้างการเชื่อมต่อ sql ที่นี่
if request.user.is_authenticated:
contractor = request.user.groups.values_list('name', flat=True).first() #เรียกหา GroupID
# contractor='KVM ENGINEERING CO.,LTD'
status_pending ='INPRG'
status_completed = 'COMP'
pendingworks=Workfromgmail.objects.filter(service_provider=contractor, status=status_pending) # ดึงอีเมลล์จากฐานข้อมูล เงื่อนไข คือ อีเมลล์ตรงกัน
pendingcount=Workfromgmail.objects.filter(status=status_pending,service_provider=contractor).count()
completedcount=Workfromgmail.objects.filter(status=status_completed,service_provider=contractor).count()
wah_submited=WahSubmitforcontractor.objects.filter(company=contractor,status="in planing")
wah_submitedcount=WahSubmitforcontractor.objects.filter(company=contractor,status="in planing").count()
wah_onsite=WahSubmitforcontractor.objects.filter(company=contractor,status="onsite").count()
wah_completed=WahSubmitforcontractor.objects.filter(company=contractor,status="completed").count()
# print ('CompletedWork',wah_completed)
#print (current_user.id)
paginator=Paginator(pendingworks,5) #ตั้งค่าให้แสดง 4 รายการต่อหน้า
try :
page=int(request.GET.get('page','1'))
except:
page=1
try:
workpendingperpage=paginator.page(page)
except (EmptyPage,InvalidPage):
workpendingperpage=paginator.page(paginator.num_pages)
return render(request,'contractor.html',{'pendings':workpendingperpage ,'wahsubmits':wah_submited,'pendingcount':pendingcount,'completedcount':completedcount ,'wahsubited':wah_submitedcount ,'wah_onsite':wah_onsite , 'wah_completed':wah_completed})
def contractorreport(request) :
#สร้างการเชื่อมต่อ sql ที่นี่
if request.user.is_authenticated:
contractor_id = request.user.groups.values_list('id', flat=True).first() #เรียกหา GroupID
contractor = request.user.groups.values_list('name', flat=True).first() #เรียกหา GroupID
contractor_initials_name = request.user.groups.values_list('initials_name', flat=True).first() #เรียกหา GroupID
request.session['contractor_id'] = contractor_id
request.session['contractor'] = contractor
request.session['contractor_initials_name'] = contractor_initials_name
# contractor='KVM ENGINEERING CO.,LTD'
pendingworks=Workfromgmail.objects.filter(service_id=request.session['contractor_id'],status_submit__isnull=True) # ดึงอีเมลล์จากฐานข้อมูล เงื่อนไข คือ อีเมลล์ตรงกัน
pendingcount=Workfromgmail.objects.filter(service_id=request.session['contractor_id'],status_submit__isnull=True).count()
# completedcount=WorkPending.objects.filter(status=status_completed,service_provider=contractor).count()
wah_submited=WahSubmitforcontractor.objects.filter(company_id=request.session['contractor_id'],status="in planing")
wah_submitedcount=WahSubmitforcontractor.objects.filter(company_id=request.session['contractor_id'],status="in planing").count()
wah_onsite=WahSubmitforcontractor.objects.filter(company_id=request.session['contractor_id'],status="onsite").count()
wah_completed=WahSubmitforcontractor.objects.filter(company_id=request.session['contractor_id'],status="completed").count()
# print ('CompletedWork',wah_completed)
#print (current_user.id)
paginator=Paginator(pendingworks,5) #ตั้งค่าให้แสดง 4 รายการต่อหน้า
try :
page=int(request.GET.get('page','1'))
except:
page=1
try:
workpendingperpage=paginator.page(page)
except (EmptyPage,InvalidPage):
workpendingperpage=paginator.page(paginator.num_pages)
return render(request,'contractor.html',{'pendings':workpendingperpage ,'wahsubmits':wah_submited,'pendingcount':pendingcount,'completedcount':wah_completed ,'wahsubited':wah_submitedcount ,'wah_onsite':wah_onsite , 'wah_completed':wah_completed})
@login_required(login_url='singIn') # เป็นการบังคับให้ login ก่อนทำการกดสักซื้อ
def addWAH(request,workorder):
# status_pending ='INPRG'
# global contractor_id
# contractor = request.user.groups.values_list('name', flat=True).first() #เรียกหา GroupID
# contractor_id = request.user.groups.values_list('id', flat=True).first() #เรียกหา GroupID
description=Workfromgmail.objects.filter(service_id=request.session['contractor_id'], workorder=workorder)
pendingcount=Workfromgmail.objects.filter(service_id=request.session['contractor_id']).count()
wah_submitedcount=WahSubmitforcontractor.objects.filter(company_id=request.session['contractor_id'],wah_status="submited").count()
wah_onsite=WahSubmitforcontractor.objects.filter(company_id=request.session['contractor_id'],status="onsite").count()
wah_completed=WahSubmitforcontractor.objects.filter(company_id=request.session['contractor_id'],status="completed").count()
fm_detail=PersanalDetaillogin.objects.filter(company='CBRE')
worktype_detail=type_of_work.objects.all()
fls_detail=PersanalDetaillogin.objects.filter(company_id=request.session['contractor_id'])
for workdescription in description :
print (workdescription)
work_detail= (workdescription.problum)
sitename = (workdescription.caller)
print (workorder)
print (description)
# new_wah.save()
return render (request,'submit_wah.html',{'worktype_detail':worktype_detail,'workorder':workorder , 'contractor':request.session['contractor'], 'workdetail':work_detail , 'sitename':sitename,'wahsubited':wah_submitedcount ,'wah_onsite':wah_onsite , 'wah_completed':wah_completed , 'pendingcount':pendingcount , 'fm_detail':fm_detail ,"fls_detail":fls_detail})
@login_required(login_url='singIn') # เป็นการบังคับให้ login ก่อนทำการกดสักซื้อ
def addWAHtoDB(request):
open_work = open
wah_status = 'submited'
status_onsite='in planing'
print('inside addwahtodb')
if request.method == "POST" :
print('inside post method')
if request.POST.get('planned_date') and request.POST.get('caller') and request.POST.get('job_description') and request.POST.get('workorder') and request.POST.get('company') and request.POST.get('fls_mame_1') and request.POST.get('fls_mame_2') and request.POST.get('fls_phone') and request.POST.get('management') and request.POST.get('remark') and request.POST.get('type_job') and request.POST.get('jla_ra') and request.POST.get('any_ssw') and request.POST.get('physical') and request.POST.get('fm'):
workorder = request.POST.get('workorder')
contractor = request.POST.get('company')
token=PersanalDetaillogin.objects.filter(name=request.POST.get('fls_mame_1')).values_list('group_id')[0][0]
print('Token is',token)
fls_id_1 = PersanalDetaillogin.objects.filter(name=request.POST.get('fls_mame_1')).values_list('id')[0][0]
fls_id_2 = PersanalDetaillogin.objects.filter(name=request.POST.get('fls_mame_2')).values_list('id')[0][0]
print ('fls name is ')
save_record=WahSubmitforcontractor()
save_record.planned_date=request.POST.get('planned_date')
save_record.caller=request.POST.get('caller')
save_record.job_description=request.POST.get('job_description')
save_record.workorder=request.POST.get('workorder')
save_record.company=request.POST.get('company')
save_record.fls_mame_1=request.POST.get('fls_mame_1')
save_record.fls_mame_2=request.POST.get('fls_mame_2')
save_record.fls_id_1=fls_id_1
save_record.fls_id_2=fls_id_2
save_record.fls_phone=request.POST.get('fls_phone')
save_record.management=request.POST.get('management')
save_record.remark=request.POST.get('remark')
save_record.type_job=request.POST.get('type_job')
save_record.jla_ra=request.POST.get('jla_ra')
save_record.any_ssw=request.POST.get('any_ssw')
save_record.physical=request.POST.get('physical')
save_record.fm=request.POST.get('fm')
save_record.openned=open_work
save_record.wah_status=wah_status
save_record.status=status_onsite
save_record.company_id=request.session['contractor_id']
save_record.initials_name=request.session['contractor_initials_name']
save_record.save(request)
udpate_pending_to_submitted=Workfromgmail.objects.filter(workorder=workorder).update(status_submit='yes')
data_3=creatinglinemessages.submit_notify(request)
send_notify(data_3,token)
#return redirect(request,'contractor')
return redirect('contractor')
@login_required(login_url='singIn') # เป็นการบังคับให้ login ก่อนทำการกดสักซื้อ
def editwah (request,id):
print ('id is ',id)
# contractor = request.user.groups.values_list('name', flat=True).first() #เรียกหา GroupID
workforedit=WahSubmitforcontractor.objects.filter(company=request.session['contractor'], id=id)
for workforedit in workforedit :
id=workforedit.id
workorder=workforedit.workorder
company=workforedit.company
opended=workforedit.opended
status=workforedit.status
startwork=workforedit.startwork
completedwork=workforedit.completedwork
caller=workforedit.caller
wah_status=workforedit.wah_status
planned_date=workforedit.planned_date
job_description=workforedit.job_description
fls_mame=workforedit.fls_mame_1
fls_phone=workforedit.fls_phone
management=workforedit.management
remark=workforedit.remark
type_job=workforedit.type_job
jla_ra=workforedit.jla_ra
any_ssw=workforedit.any_ssw
physical=workforedit.physical
fm=workforedit.fm
return render (request,'edit_submitwah.html',{'id':id,'workorder':workorder ,'company':company ,'opended':opended , 'status':status , 'startwork':startwork , 'completedwork':completedwork , 'caller':caller , 'wah_status':wah_status , 'planned_date':planned_date , 'job_description':job_description , 'fls_mame':fls_mame , 'fls_phone':fls_phone , 'management':management , 'remark':remark , 'type_job':type_job , 'jla_ra':jla_ra , 'any_ssw':any_ssw , 'physical':physical , 'fm':fm })
@login_required(login_url='singIn') # เป็นการบังคับให้ login ก่อนทำการกดสักซื้อ
def updatewah (request,id):
if request.method == "POST" :
if request.POST.get('fls_mame') :
print ('fls name is',request.POST.get('fls_mame'))
token=PersanalDetaillogin.objects.filter(name=request.POST.get('fls_mame')).values_list('group_id')[0][0]
print ('Token is',token)
if request.POST.get('planned_date') :
print (request.POST.get('planned_date'))
ID = request.POST.get('id')
planned_update= request.POST.get('planned_date')
udpatedatawah=WahSubmitforcontractor.objects.filter(id=id).update(planned_date=planned_update)
data_3=creatinglinemessages.updatedsubmit_notify(request)
send_notify(data_3,token)
return redirect('contractor')
@login_required(login_url='singIn') # เป็นการบังคับให้ login ก่อนทำการกดสักซื้อ
def seedetail (request,id):
print ('id is ',id)
contractor = request.user.groups.values_list('name', flat=True).first() #เรียกหา GroupID
workforedit=WahSubmitforcontractor.objects.filter(id=id)
for workforedit in workforedit :
id=workforedit.id
workorder=workforedit.workorder
company=workforedit.company
opended=workforedit.opended
status=workforedit.status
startwork=workforedit.startwork
completedwork=workforedit.completedwork
caller=workforedit.caller
wah_status=workforedit.wah_status
planned_date=workforedit.planned_date
job_description=workforedit.job_description
fls_mame=workforedit.fls_mame_1
fls_phone=workforedit.fls_phone
management=workforedit.management
remark=workforedit.remark
type_job=workforedit.type_job
jla_ra=workforedit.jla_ra
any_ssw=workforedit.any_ssw
physical=workforedit.physical
fm=workforedit.fm
return render (request,'seedetailofwork.html',{'count_wah_completed_count':request.session['count_wah_completed_count'],'count_wah_onsite_count':request.session['count_wah_onsite_count'],'count_tomorrow_planning_work':request.session['count_tomorrow_planning_work'],'count_today_planning_work':request.session['count_today_planning_work'],'workorder':workorder ,'company':company ,'opended':opended , 'status':status , 'startwork':startwork , 'completedwork':completedwork , 'caller':caller , 'wah_status':wah_status , 'planned_date':planned_date , 'job_description':job_description , 'fls_mame':fls_mame , 'fls_phone':fls_phone , 'management':management , 'remark':remark , 'type_job':type_job , 'jla_ra':jla_ra , 'any_ssw':any_ssw , 'physical':physical , 'fm':fm })
def liffpage (requests):
return render (requests,'liffpagelogin.html')
@csrf_exempt
def check_userid(request):
if request.method == "POST" :
# global user_id
user_id=request.POST['user_id']
request.session['user_id'] = user_id
return HttpResponse('OK')
def wahwork(request):
company_id = PersanalDetaillogin.objects.filter(line_id=request.session['user_id']).values_list('company_id')[0][0]
request.session['company_id'] = company_id
group = Group.objects.filter(id=company_id).values_list('name')[0][0]
if group == 'CBRE':
count_wah_submitedcount=WahSubmitforcontractor.objects.filter(status='in planing').count()
count_wah_onsite_count=WahSubmitforcontractor.objects.filter(status='onsite').count()
return render (request,'liffinformationwah.html',{"count_wah_submitedcount":count_wah_submitedcount , "count_wah_onsite_count":count_wah_onsite_count , "company_id":company_id})
def liffsubmitedwahbycontractor(request,id,type):
if id == 3 :
wahs_submited=WahSubmitforcontractor.objects.filter(status=type).values('company').annotate(dcount=Count('company'))
print(wahs_submited)
else:
print(id) #Pass for contractor
return render (request,'liffsubmitedwahbycontractor.html',{'wah_submited_detail':wahs_submited , "type":type})
def check_type_work(request,type_check):
company_id = PersanalDetaillogin.objects.filter(line_id=request.session['user_id']).values_list('company_id')[0][0]
request.session['company_id'] = company_id
imcomming_work=Workfromgmail.objects.filter(date=today,notify_contractor__isnull=True).values('service_provider').annotate(dcount=Count('service_provider'))
# print ('incomming is',imcomming_work)
# print (type_check)
today_check = datetime.datetime.now().strftime("%Y-%m-%d")
tomorrow_check = (datetime.datetime.now() + datetime.timedelta(days=1)).strftime("%Y-%m-%d")
if request.session['company_id'] == 3 :
if type_check == 'main_check':
worktype_detail=WahSubmitforcontractor.objects.filter(status='in planing').values('type_job').annotate(dcount=Count('type_job'))
if type_check == 'today':
worktype_detail=WahSubmitforcontractor.objects.filter(status='in planing',planned_date=today_check).values('type_job').annotate(dcount=Count('type_job'))
if type_check == 'tomorrow':
worktype_detail=WahSubmitforcontractor.objects.filter(status='in planing',planned_date=tomorrow_check).values('type_job').annotate(dcount=Count('type_job'))
if type_check == 'incomming':
# เป็นการ Query ข้อมูลในตารางเดียวกันและให้ระบบ sum ข้อมูลที่เหมือนกันในแต่ละเงื่อนไขได้เลย
# worktype_detail=Workfromgmail.objects.filter(completed_work__isnull=True).values('service_provider').annotate(date_open=Count('date', filter=Q(date=today))).annotate(submit=Count('status_submit')).annotate(todaypending=Count('date', filter=Q(date=today,status_submit__isnull=True))).annotate(notify=Count('notify_contractor')).annotate(pending=Count('date', filter= ~Q(status_submit='yes')))
worktype_detail=Workfromgmail.objects.filter(date=today).values('date').annotate(date_open=Count('date', filter=Q(date=today))).annotate(submit=Count('status_submit')).annotate(todaypending=Count('date', filter=Q(date=today,status_submit__isnull=True)))
# print(worktype_detail)
if type_check == 'submitted_check':
data_1 = []
data_2 = Workfromgmail.objects.filter(completed_work__isnull=True).values('initials_name').annotate(new_work_today=Count('date', filter=Q(date=today_check))).annotate(today_submit=Count('status_submit',filter=Q(status_submit='yes'))).annotate(todaypending=Count('date', filter=Q(status_submit__isnull=True)))
data_3 = WahSubmitforcontractor.objects.filter(planned_date=tomorrow,status='in planning').values('initials_name').annotate(planned_today=Count('planned_date',))
today_date = datetime.datetime.now().strftime("%d-%m-%Y %H:%M")
tomorrow_date = (datetime.datetime.now() + datetime.timedelta(days=1)).strftime("%d-%m-%Y")
# print ('data 2 is',data_2)
# print ('data 3 is',data_3)
# print ('today is',today_date)
for name in Group.objects.values_list('initials_name'):
data = {'name':(name[0])}
if data['name'] == 'CBRE':
print ('Cancel CBRE')
else :
data_1.append(data)
for guest_new in data_2:
for name in data_1:
if (name['name'] == guest_new['initials_name']):
name['new_work_today'] = guest_new['new_work_today']
name['today_submit'] = guest_new['today_submit']
name['todaypending'] = guest_new['todaypending']
for guest_new in data_3 :
for name in data_1:
if(name['name'] == guest_new['initials_name']):
name['planned_today'] = guest_new['planned_today']
print (data_1)
data_line=creatinglinemessages.summary_by_contractor(data_1,today_date,tomorrow_date)
# print (json.dumps(data_line))
return render (request,'showsubmitcheck.html',{"data":json.dumps(data_line),'today_date':today_date,'final':data_1})
else:
print(id) #Pass for contractor
return render (request,'showcheckworkbytype.html',{'worktype_detail':worktype_detail,'type':type_check})
def checkworktype_by_contractor(request,type_job,type_check):
print (type_check)
if type_check == 'incomming':
print ('type job is',type_job)
details=Workfromgmail.objects.filter(completed_work__isnull=True).values('service_provider').annotate(date_open=Count('date', filter=Q(date=today))).annotate(submit=Count('status_submit')).annotate(todaypending=Count('date', filter=Q(date=today,status_submit__isnull=True))).annotate(notify=Count('notify_contractor')).annotate(pending=Count('date', filter= ~Q(status_submit='yes')))
return render (request,'worktype_by_contractor.html',{'details':details ,'type_job':type_job ,'type':type_check})
# return HttpResponse (200)
else :
details=WahSubmitforcontractor.objects.filter(type_job=type_job,status='in planing').values('company').annotate(dcount=Count('company'))
return render (request,'worktype_by_contractor.html',{'details':details ,'type_job':type_job ,'type':type_check})
def detail_checkworktype_by_contractor(request,type_job,type_check):
type = 'in planing'
details=WahSubmitforcontractor.objects.filter(status='in planing',type_job=type_job)
data=creatinglinemessages.wahsubmit(details,type)
return render (request,'worktype_by_contractor_detail.html',{'type':type_check,'details':details , 'type_job':type_job , "data":json.dumps(data)})
def checkworktoday(request):
details=WahSubmitforcontractor.objects.filter(status='in planing',planned_date=today)
print(details)
return HttpResponse (200)
def liffsubmiteddetail(request,company,type):
count_wah_submit_detail=WahSubmitforcontractor.objects.filter(status=type,company=company)
data=creatinglinemessages.wahsubmit(count_wah_submit_detail,type)
return render (request,'liffsubmitedwahdetail.html',{"count_wah_submit_detail":count_wah_submit_detail , "type":type , "data":json.dumps(data)})
def sendlinebot(request,company,type,workorder):
return render (request,'liffsubmitedwahdetail.html',{"count_wah_submit_detail":count_wah_submit_detail , "type":type })
def checkinwork (request):
return render (request,'liffpage_checkin_login.html')
def checkoutwork (request):
return render (request,'liffpage_checkout_login.html')
def worklistforcheckin(request):
fls_line_id=PersanalDetaillogin.objects.filter(line_id=request.session['user_id']).values_list('id')[0][0]
work_detail=WahSubmitforcontractor.objects.filter(Q(fls_id_1=fls_line_id) | Q(fls_id_2=fls_line_id) ,startwork__isnull=True)
#work_detail=WahSubmitforcontractor.objects.filter(Q(fls_id_1=fls_line_id) | Q(fls_id_2=fls_line_id) )
if work_detail.exists():
return render (request,'liffpage_checkin_detail.html',{"work_detail":work_detail ,"type":'OK'})
else:
return render (request,'liffpage_checkin_detail.html',{"work_detail":work_detail ,"type":'NOK'})
def worklistforcheckout(request):
fls_line_id=PersanalDetaillogin.objects.filter(line_id=request.session['user_id']).values_list('id')[0][0]
work_detail=WahSubmitforcontractor.objects.filter(Q(fls_id_1=fls_line_id) | Q(fls_id_2=fls_line_id) ,status='onsite')
#work_detail=WahSubmitforcontractor.objects.filter(Q(fls_id_1=fls_line_id) | Q(fls_id_2=fls_line_id) )
if work_detail.exists():
return render (request,'liffpage_checkout_detail.html',{"work_detail":work_detail ,"type":'OK'})
else:
return render (request,'liffpage_checkout_detail.html',{"work_detail":work_detail ,"type":'NOK'})
def liffpage_checkin_confirme(request,id):
type='onsite'
fls_line_id=PersanalDetaillogin.objects.filter(line_id=request.session['user_id']).values_list('id')[0][0]
work_detail=WahSubmitforcontractor.objects.filter(Q(fls_id_1=fls_line_id) | Q(fls_id_2=fls_line_id),startwork__isnull=True,id=id)
#work_detail=WahSubmitforcontractor.objects.filter(Q(fls_id_1=fls_line_id) | Q(fls_id_2=fls_line_id))
return render (request,'liffpage_checkin_confirme.html',{"work_detail":work_detail })
def liffpage_checkout_confirme(request,id):
fls_line_id=PersanalDetaillogin.objects.filter(line_id=request.session['user_id']).values_list('id')[0][0]
work_detail=WahSubmitforcontractor.objects.filter(Q(fls_id_1=fls_line_id) | Q(fls_id_2=fls_line_id),completedwork__isnull=True,id=id)
#work_detail=WahSubmitforcontractor.objects.filter(Q(fls_id_1=fls_line_id) | Q(fls_id_2=fls_line_id))
return render (request,'liffpage_checkout_confirme.html',{"work_detail":work_detail })
def updatecheckindatabase(request,id,workorder):
# print('Work ID is ',id)
type='onsite'
type_1='admin'
type_2='fm'
today_checkin = datetime.datetime.now().strftime("%d-%m-%Y %H:%M")
fls_startwork=PersanalDetaillogin.objects.filter(line_id=request.session['user_id']).values_list('name')[0][0]
updatedatawah=WahSubmitforcontractor.objects.filter(id=id).update(startwork=today_checkin,status='onsite',fls_startwork=fls_startwork)
udpate_pending_to_status_work=Workfromgmail.objects.filter(workorder=workorder).update(completed_work='onsite')
work_detail_to_line=WahSubmitforcontractor.objects.filter(id=id)
global data_2
global data_3
data_1=creatinglinemessages.linedetailcheck(work_detail_to_line,type)
data_2=creatinglinemessages.linedetailcheck(work_detail_to_line,type_1)
data_3=creatinglinemessages.checkin_notify(work_detail_to_line)
for I in work_detail_to_line :
fm_name = I.fm
request.session['fm_name'] = fm_name
admin_data=PersanalDetaillogin.objects.filter(user_type='admin')
token=PersanalDetaillogin.objects.filter(line_id=request.session['user_id']).values_list('group_id')[0][0]
for I in admin_data :
data_admin=I.line_id
send_line_to_cbre=PushMessage(data_2,data_admin)
# print (send_line_to_cbre)
fm_data=PersanalDetaillogin.objects.filter(name=request.session['fm_name']).values_list('line_id')[0][0]
# print (fm_data)
send_line_to_fm=PushMessage(data_2,fm_data)
send_notify(data_3,token)
return render(request,'completedcheckin.html',{"data":json.dumps(data_1)})
def updatecheckoutdatabase(request,id,workorder):
print('Work ID is ',id)
type='completed'
type_1='admin'
type_1='admin2'
today_checkout = | |
"""
if self._free_points == True and self.c_points:
if self.c_points.contents.alloc_points > 0:
#print("G_free(points) [%i]"%(self.c_points.contents.alloc_points))
libgis.G_free(self.c_points.contents.x)
libgis.G_free(self.c_points.contents.y)
if self.c_points.contents.z:
libgis.G_free(self.c_points.contents.z)
if self._free_cats == True and self.c_cats:
if self.c_cats.contents.alloc_cats > 0:
#print("G_free(cats) [%i]"%(self.c_cats.contents.alloc_cats))
libgis.G_free(self.c_cats.contents.cat)
@property
def cat(self):
if self.c_cats.contents.cat:
return self.c_cats.contents.cat.contents.value
def has_topology(self):
if self.c_mapinfo is not None:
return self.c_mapinfo.contents.level == 2
else:
return False
@mapinfo_must_be_set
def read(self):
"""Read and set the coordinates of the centroid from the vector map,
using the centroid_id and calling the Vect_read_line C function"""
self.id, ftype, c_points, c_cats = c_read_line(self.id, self.c_mapinfo,
self.c_points,
self.c_cats)
def to_wkt(self):
"""Return a "well know text" (WKT) geometry string, this method uses
the GEOS implementation in the vector library. ::
>>> pnt = Point(10, 100)
>>> pnt.to_wkt()
'POINT (10.0000000000000000 100.0000000000000000)'
"""
return decode(libvect.Vect_line_to_wkt(self.c_points, self.gtype, not self.is2D))
def to_wkb(self):
"""Return a "well know binary" (WKB) geometry byte array, this method uses
the GEOS implementation in the vector library. ::
>>> pnt = Point(10, 100)
>>> wkb = pnt.to_wkb()
>>> len(wkb)
21
"""
size = ctypes.c_size_t()
barray = libvect.Vect_line_to_wkb(self.c_points, self.gtype,
not self.is2D, ctypes.byref(size))
return(ctypes.string_at(barray, size.value))
class Point(Geo):
"""Instantiate a Point object that could be 2 or 3D, default
parameters are 0.
::
>>> pnt = Point()
>>> pnt.x
0.0
>>> pnt.y
0.0
>>> pnt.z
>>> pnt.is2D
True
>>> pnt
Point(0.000000, 0.000000)
>>> pnt.z = 0
>>> pnt.is2D
False
>>> pnt
Point(0.000000, 0.000000, 0.000000)
>>> print(pnt)
POINT Z (0.0000000000000000 0.0000000000000000 0.0000000000000000)
>>> c_points = ctypes.pointer(libvect.line_pnts())
>>> c_cats = ctypes.pointer(libvect.line_cats())
>>> p = Point(c_points = c_points, c_cats=c_cats)
>>> del p
>>> c_points = ctypes.pointer(libvect.line_pnts())
>>> c_cats = ctypes.pointer(libvect.line_cats())
>>> p = Point(c_points=c_points, c_cats=c_cats, free_points=True,
... free_cats=True)
>>> del p
..
"""
# geometry type
gtype = libvect.GV_POINT
def __init__(self, x=0, y=0, z=None, **kargs):
super(Point, self).__init__(**kargs)
if self.id and self.c_mapinfo:
self.read()
else:
self.is2D = True if z is None else False
z = z if z is not None else 0
libvect.Vect_append_point(self.c_points, x, y, z)
def _get_x(self):
return self.c_points.contents.x[0]
def _set_x(self, value):
self.c_points.contents.x[0] = value
x = property(fget=_get_x, fset=_set_x,
doc="Set and obtain x coordinate")
def _get_y(self):
return self.c_points.contents.y[0]
def _set_y(self, value):
self.c_points.contents.y[0] = value
y = property(fget=_get_y, fset=_set_y,
doc="Set and obtain y coordinate")
def _get_z(self):
if self.is2D:
return None
return self.c_points.contents.z[0]
def _set_z(self, value):
if value is None:
self.is2D = True
self.c_points.contents.z[0] = 0
else:
self.c_points.contents.z[0] = value
self.is2D = False
z = property(fget=_get_z, fset=_set_z,
doc="Set and obtain z coordinate")
def __str__(self):
return self.to_wkt()
def __repr__(self):
return "Point(%s)" % ', '.join(['%f' % coor for coor in self.coords()])
def __eq__(self, pnt):
"""Return True if the coordinates are the same.
>>> p0 = Point()
>>> p1 = Point()
>>> p2 = Point(1, 1)
>>> p0 == p1
True
>>> p1 == p2
False
"""
if isinstance(pnt, Point):
return pnt.coords() == self.coords()
return Point(*pnt).coords() == self.coords()
def __ne__(self, other):
return not self == other
# Restore Python 2 hashing beaviour on Python 3
__hash__ = object.__hash__
def coords(self):
"""Return a tuple with the point coordinates. ::
>>> pnt = Point(10, 100)
>>> pnt.coords()
(10.0, 100.0)
If the point is 2D return a x, y tuple. But if we change the ``z``
the Point object become a 3D point, therefore the method return a
x, y, z tuple. ::
>>> pnt.z = 1000.
>>> pnt.coords()
(10.0, 100.0, 1000.0)
..
"""
if self.is2D:
return self.x, self.y
else:
return self.x, self.y, self.z
def to_wkt_p(self):
"""Return a "well know text" (WKT) geometry string Python implementation. ::
>>> pnt = Point(10, 100)
>>> pnt.to_wkt_p()
'POINT(10.000000 100.000000)'
.. warning::
Only ``POINT`` (2/3D) are supported, ``POINTM`` and ``POINT`` with:
``XYZM`` are not supported yet.
"""
return "POINT(%s)" % ' '.join(['%f' % coord
for coord in self.coords()])
def distance(self, pnt):
"""Calculate distance of 2 points, using the Vect_points_distance
C function, If one of the point have z == None, return the 2D distance.
:param pnt: the point for calculate the distance
:type pnt: a Point object or a tuple with the coordinates
>>> pnt0 = Point(0, 0, 0)
>>> pnt1 = Point(1, 0)
>>> pnt0.distance(pnt1)
1.0
>>> pnt1.z = 1
>>> pnt1
Point(1.000000, 0.000000, 1.000000)
>>> pnt0.distance(pnt1)
1.4142135623730951
"""
if self.is2D or pnt.is2D:
return libvect.Vect_points_distance(self.x, self.y, 0,
pnt.x, pnt.y, 0, 0)
else:
return libvect.Vect_points_distance(self.x, self.y, self.z,
pnt.x, pnt.y, pnt.z, 1)
def buffer(self, dist=None, dist_x=None, dist_y=None, angle=0,
round_=True, tol=0.1):
"""Return the buffer area around the point, using the
``Vect_point_buffer2`` C function.
:param dist: the distance around the point
:type dist: num
:param dist_x: the distance along x
:type dist_x: num
:param dist_y: the distance along y
:type dist_y: num
:param angle: the angle between 0x and major axis
:type angle: num
:param round_: to make corners round
:type round_: bool
:param tol: fix the maximum distance between theoretical arc and
output segments
:type tol: float
:returns: the buffer as Area object
>>> pnt = Point(0, 0)
>>> boundary, centroid = pnt.buffer(10)
>>> boundary #doctest: +ELLIPSIS
Line([Point(10.000000, 0.000000),...Point(10.000000, 0.000000)])
>>> centroid
Point(0.000000, 0.000000)
"""
if dist is not None:
dist_x = dist
dist_y = dist
elif not dist_x or not dist_y:
raise TypeError('TypeError: buffer expected 1 arguments, got 0')
bound = Line()
p_points = ctypes.pointer(bound.c_points)
libvect.Vect_point_buffer2(self.x, self.y,
dist_x, dist_y,
angle, int(round_), tol,
p_points)
return (bound, self)
class Line(Geo):
"""Instantiate a new Line with a list of tuple, or with a list of Point. ::
>>> line = Line([(0, 0), (1, 1), (2, 0), (1, -1)])
>>> line #doctest: +NORMALIZE_WHITESPACE
Line([Point(0.000000, 0.000000),
Point(1.000000, 1.000000),
Point(2.000000, 0.000000),
Point(1.000000, -1.000000)])
..
"""
# geometry type
gtype = libvect.GV_LINE
def __init__(self, points=None, **kargs):
super(Line, self).__init__(**kargs)
if points is not None:
for pnt in points:
self.append(pnt)
def __getitem__(self, key):
"""Get line point of given index, slice allowed. ::
>>> line = Line([(0, 0), (1, 1), (2, 2), (3, 3)])
>>> line[1]
Point(1.000000, 1.000000)
>>> line[-1]
Point(3.000000, 3.000000)
>>> line[:2]
[Point(0.000000, 0.000000), Point(1.000000, 1.000000)]
..
"""
#TODO:
# line[0].x = 10 is not working
#pnt.c_px = ctypes.pointer(self.c_points.contents.x[indx])
# pnt.c_px = ctypes.cast(id(self.c_points.contents.x[indx]),
# ctypes.POINTER(ctypes.c_double))
if isinstance(key, slice):
#import pdb; pdb.set_trace()
#Get the start, stop, and step from the slice
return [Point(self.c_points.contents.x[indx],
self.c_points.contents.y[indx],
None if self.is2D else self.c_points.contents.z[indx])
for indx in range(*key.indices(len(self)))]
elif isinstance(key, int):
if key < 0: # Handle negative indices
key += self.c_points.contents.n_points
if key >= self.c_points.contents.n_points:
raise IndexError('Index out of range')
return Point(self.c_points.contents.x[key],
self.c_points.contents.y[key],
None if self.is2D else self.c_points.contents.z[key])
else:
raise ValueError("Invalid argument type: %r." % key)
def __setitem__(self, indx, pnt):
"""Change the coordinate of point. ::
>>> line = Line([(0, 0), (1, 1)])
>>> line[0] = (2, 2)
>>> line
Line([Point(2.000000, 2.000000), Point(1.000000, 1.000000)])
..
"""
x, y, z = get_xyz(pnt)
self.c_points.contents.x[indx] = x
self.c_points.contents.y[indx] = y
self.c_points.contents.z[indx] = z
def __iter__(self):
"""Return a Point generator of the Line"""
return (self.__getitem__(i) for i in range(self.__len__()))
def __len__(self):
"""Return the number of points of the line."""
return self.c_points.contents.n_points
def __str__(self):
return self.to_wkt()
def __repr__(self):
return "Line([%s])" % ', '.join([repr(pnt) for pnt in self.__iter__()])
def point_on_line(self, distance, angle=0, slope=0):
"""Return a Point object on line in the specified distance, using the
`Vect_point_on_line` C function.
Raise a ValueError If the distance exceed the Line length. ::
>>> line = Line([(0, 0), (1, 1)])
>>> line.point_on_line(5) #doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
Traceback (most recent call last):
...
ValueError: The distance exceed the length of the line,
that is: 1.414214
>>> line.point_on_line(1)
Point(0.707107, 0.707107)
..
"""
# instantiate an empty Point object
maxdist = self.length()
if distance > maxdist:
str_err = "The distance exceed the length of the line, that is: %f"
raise ValueError(str_err % maxdist)
pnt = Point(0, 0, -9999)
if not libvect.Vect_point_on_line(self.c_points, distance,
pnt.c_points.contents.x,
pnt.c_points.contents.y,
pnt.c_points.contents.z,
ctypes.pointer(ctypes.c_double(angle)),
ctypes.pointer(ctypes.c_double(slope))):
raise ValueError("Vect_point_on_line give an error.")
pnt.is2D = self.is2D
return pnt
@mapinfo_must_be_set
def alive(self):
"""Return True if this line is alive or False if this line is
dead or its index is out of range.
"""
return(bool(libvect.Vect_line_alive(self.c_mapinfo, self.id)))
def append(self, pnt):
"""Appends one point to the end of a line, using the
``Vect_append_point`` C function.
:param pnt: the point to add to line
:type pnt: | |
_url = "https://console.jumpcloud.com/api/systemusers/" + str(user_id)
response_json = get_response_json(_url)
return response_json
def get_systemusers_json():
"""return: json get_systemusers_json_multi."""
skip = 0
limit = 100
data = get_systemusers_json_multi(skip, limit)
totalcount = data['totalCount']
resultlist = data['results']
while len(data['results']) > 0:
skip += 100
data = get_systemusers_json_multi(skip, limit=100)
resultlist.extend(data['results'])
dictdata = {'totalCount': totalcount, 'results': resultlist}
jdata = json.dumps(dictdata)
return json.loads(jdata)
def get_response_json(_url):
"""get: url: return json."""
response = requests.get(_url,
headers={'x-api-key': os.environ.get('JUMPCLOUD_API_KEY'),
'Content-Type': 'application/json',
'Accept': 'application/json'})
return response.json()
def get_systemusers_json_multi(skip, limit):
"""get: api systemusers json: return dict."""
_url = "https://console.jumpcloud.com/api/systemusers"
_url += "?skip=" + str(skip) + '&limit=' + str(limit)
response = get_response_json(_url)
return response
def list_users():
"""print: get_systemusers_json users."""
jdata = get_systemusers_json()
if len(jdata) == 0:
print('Zero (0) response')
for data in jdata['results']:
middlename = data.get('middlename')
if middlename == "" or middlename is None:
middlename = ' '
else:
middlename = ' ' + str(data.get('middlename')) + ' '
_line = str(data.get('_id')) + ' ' + str(data.get('username'))
_line += ' (' + str(data.get('displayname')) + ') '
_line += '["' + str(data.get('firstname'))
_line += str(middlename) + str(data.get('lastname')) + '"] '
_line += str(data.get('email'))
print(_line)
def list_users_suspended(_print=True):
"""return: dict get_systemusers_json suspended."""
thisdict = {}
jdata = get_systemusers_json()
if len(jdata) == 0:
print('Zero (0) response')
for data in jdata['results']:
suspended = data.get('suspended')
if str(suspended) == 'True':
_line = data.get('_id') + ' ' + data.get('username') + ' ' + data.get('email') + ' '
_line += 'suspended:' + str(suspended)
if _print:
print(_line)
thisdict[data.get('_id')] = data.get('email')
return thisdict
def list_users_locked(_print=True):
"""return: dict get_systemusers_json account_locked."""
thisdict = {}
jdata = get_systemusers_json()
if len(jdata) == 0:
print('Zero (0) response')
for data in jdata['results']:
account_locked = data.get('account_locked')
if str(account_locked) != 'False':
_line = data.get('_id') + ' ' + data.get('username') + ' ' + data.get('email') + ' '
_line += 'account_locked:' + str(account_locked)
if _print:
print(_line)
thisdict[data.get('_id')] = data.get('email')
return thisdict
def list_users_password_expired(_print=True):
"""return: dict get_systemusers_json password_expired."""
thisdict = {}
jdata = get_systemusers_json()
if len(jdata) == 0:
print('Zero (0) response')
for data in jdata['results']:
password_expired = data.get('password_expired')
if str(password_expired) != 'False':
_line = data.get('_id') + ' ' + data.get('username') + ' ' + data.get('email') + ' '
_line += 'password_expired:' + str(password_expired)
if _print:
print(_line)
thisdict[data.get('_id')] = data.get('email')
return thisdict
def list_users_not_activated(_print=True):
"""return: dict get_systemusers_json activated."""
thisdict = {}
jdata = get_systemusers_json()
if len(jdata) == 0:
print('Zero (0) response')
for data in jdata['results']:
activated = data.get('activated')
if str(activated) != 'True':
_line = data.get('_id') + ' ' + data.get('username') + ' ' + data.get('email') + ' '
_line += 'activated:' + str(activated)
if _print:
print(_line)
thisdict[data.get('_id')] = data.get('email')
return thisdict
def list_users_ldap_bind(_print=True):
"""return: dict get_systemusers_json ldap_bind."""
thisdict = {}
jdata = get_systemusers_json()
if len(jdata) == 0:
print('Zero (0) response')
for data in jdata['results']:
ldap_binding_user = data.get('ldap_binding_user')
if str(ldap_binding_user) == 'True':
_line = data.get('_id') + ' ' + data.get('username') + ' ' + data.get('email') + ' '
_line += 'ldap_binding_user:' + str(ldap_binding_user)
if _print:
print(_line)
thisdict[data.get('_id')] = data.get('email')
return thisdict
def list_users_mfa():
"""print: get_systemusers_json mfa."""
jdata = get_systemusers_json()
if len(jdata) == 0:
print('Zero (0) response')
for data in jdata['results']:
mfa_json = json.dumps(data.get('mfa'), sort_keys=True)
_output = data.get('_id') + ' "' + data.get('email') + ' ' + str(mfa_json)
print(_output)
def list_users_json():
"""print: get_systemusers_json."""
response = get_systemusers_json()
if len(response) == 0:
print('Zero (0) response')
print(json.dumps(response, sort_keys=True, indent=4))
def list_systems_json(system_id=None):
"""print: get_systems_json_single."""
if system_id:
system_id = ''.join(system_id)
jdata = get_systems_json_single(system_id)
else:
jdata = get_systems_json_single()
print(json.dumps(jdata, sort_keys=True, indent=4))
# def list_systems_id():
def list_systems_id(skip=0):
"""print: get_systems_id_json system_id."""
skip = 0
jdata = get_systems_id_json(skip, limit=100)
for data in jdata['results']:
print(data.get('_id'))
while len(jdata['results']) > 0:
skip += 100
jdata = get_systems_id_json(skip, limit=100)
for data in jdata['results']:
print(data.get('_id'))
def get_systems_id_json(skip, limit):
"""get: api systems."""
_url = "https://console.jumpcloud.com/api/systems?skip=" + str(skip) + '&limit=' + str(limit)
response_json = get_response_json(_url)
return response_json
def get_systems_id():
"""return: idList get_systems_id_json."""
idlist = []
skip = 0
jdata = get_systems_id_json(skip, limit=100)
for data in jdata['results']:
idlist.append(data.get('_id'))
while len(jdata['results']) > 0:
skip += 100
jdata = get_systems_id_json(skip, limit=100)
for data in jdata['results']:
idlist.append(data.get('_id'))
return idlist
def list_systeminsights_hardware():
"""print: get_systeminsights_system_info_json hardware."""
idlist = get_systems_id()
for system_id in idlist:
response = get_systeminsights_system_info_json(system_id, skip=0, limit=100)
if len(response) == 0:
print(str(system_id))
for line in response:
memgb = round(int(line['physical_memory']) / 1024 / 1024 / 1024)
_line = str(system_id) + ' ' + line['computer_name'] + ' (' + line['hostname'] + ') '
_line += line['hardware_model'] + ' (' + line['hardware_vendor'] + ') '
_line += line['cpu_type'] + ' (' + str(line['cpu_physical_cores']) + ') '
_line += line['cpu_brand'] + ' ' + str(line['physical_memory'])
_line += ' Bytes (' + str(memgb) + ' GB) ["'
_line += str(line['hardware_serial']) + '"] '
print(_line)
def list_systeminsights_hardware_csv():
"""print: get_systeminsights_system_info_json csv."""
idlist = get_systems_id()
for system_id in idlist:
response = get_systeminsights_system_info_json(system_id, skip=0, limit=100)
if len(response) == 0:
print(str(system_id))
for line in response:
memgb = round(int(line['physical_memory']) / 1024 / 1024 / 1024)
_line = str(system_id) + ',' + line['computer_name'] + ',(' + line['hostname'] + '),'
_line += str(line['hardware_model']).replace(",", " ")
_line += ',(' + line['hardware_vendor'] + '),'
_line += line['cpu_type'] + ',(' + str(line['cpu_physical_cores']) + '),'
_line += line['cpu_brand'] + ',' + str(line['physical_memory'])
_line += ' Bytes,(' + str(memgb) + ' GB),["'
_line += str(line['hardware_serial']) + '"] '
print(_line)
def list_systeminsights_hardware_json():
"""print: get_systeminsights_system_info_json."""
skip = 0
limit = 100
idlist = get_systems_id()
for system_id in idlist:
response = get_systeminsights_system_info_json(system_id, limit, skip)
if len(response) == 0:
response = {'system_id': system_id}
print(json.dumps(response, sort_keys=False, indent=4))
def get_systeminsights_system_info(system_id=None):
"""print: get_systeminsights_system_info_json system_id."""
system_id = ''.join(system_id)
jdata = get_systeminsights_system_info_json(system_id, skip=0, limit=100)
print(json.dumps(jdata, sort_keys=False, indent=4))
# GET /systeminsights/system_info
# List System Insights System Info
# Valid filter fields are system_id and cpu_subtype.
# https://docs.jumpcloud.com/2.0/system-insights/list-system-insights-system-info
def get_systeminsights_system_info_json(system_id=None, limit=None, skip=None):
"""get: api v2 systeminsights system_info limit skip filter system_id."""
skip = 0
limit = 100
system_id = ''.join(system_id)
jumpcloud_url = "https://console.jumpcloud.com/api/v2/systeminsights/system_info"
_url = jumpcloud_url + "?limit=" + str(limit) + "&skip=" + str(skip)
_url += "&filter=system_id:eq:" + str(system_id)
response_json = get_response_json(_url)
return response_json
def list_systems():
"""print: get_systems_json hostname arch."""
jdata = get_systems_json()
for data in jdata['results']:
print(str(data.get('_id')) + ' "'
+ str(data.get('displayName')) + '" ('
+ str(data.get('hostname')) + ') '
+ str(data.get('os')) + ' '
+ str(data.get('version')) + ' '
+ str(data.get('arch')))
def list_systems_hostname():
"""print: get_systems_json hostname."""
jdata = get_systems_json()
for data in jdata['results']:
print(str(data.get('_id')) + ' ' + str(data.get('hostname')))
def list_systems_os(_print=True):
"""return: dict get_systems_json os."""
thisdict = {}
jdata = get_systems_json()
for data in jdata['results']:
if _print:
print(str(data.get('_id')) + ' ' + str(data.get('os')))
thisdict[data.get('_id')] = data.get('os')
return thisdict
def get_systems_os(system_id, _print=True):
"""return: str get_systems_json_single os."""
system_id = ''.join(system_id)
jdata = get_systems_json_single(system_id)
if _print:
print(str(jdata['os']))
return jdata['os']
def list_systems_serial():
"""print: get_systems_json serialNumber."""
jdata = get_systems_json()
for data in jdata['results']:
print(str(data.get('_id')) + ' ("' + str(data.get('serialNumber')) + '") ')
def list_systems_agent():
"""print: get_systems_json agentVersion."""
jdata = get_systems_json()
for data in jdata['results']:
data_str = str(data.get('_id')) + ' ' + str(data.get('hostname'))
data_str += ' ("' + str(data.get('agentVersion')) + '") '
print(data_str)
def list_systems_os_version():
"""print: get_systems_json os version."""
jdata = get_systems_json()
for data in jdata['results']:
data_str = str(data.get('_id')) + ' ' + str(data.get('os')) + ' ' + str(data.get('version'))
data_str += ' ' + str(data.get('arch'))
print(data_str)
def list_systems_insights():
"""print: get_systems_json systemInsights."""
jdata = get_systems_json()
for data in jdata['results']:
_line = str(data.get('_id')) + ' "' + str(data.get('displayName'))
_line += '" (' + str(data.get('hostname'))
_line += ') ' + str(data.get('os')) + ' ' + str(data.get('version'))
_line += ' ' + str(data.get('arch'))
_line += ' ' + json.dumps(str(data.get('systemInsights')))
print(_line)
def list_systems_state():
"""print: get_systems_json lastContact."""
jdata = get_systems_json()
for data in jdata['results']:
_line = str(data.get('_id')) + ' "' + str(data.get('displayName'))
_line += '" (' + str(data.get('hostname'))
_line += ') ' + str(data.get('lastContact')) + ' active: '
_line += str(json.dumps(data.get('active')))
print(_line)
def list_systems_fde():
"""print: get_systems_json fde."""
jdata = get_systems_json()
if len(jdata) == 0:
print('Zero (0) response')
if len(jdata) == 1:
print(str(jdata))
print('I have spoken.') # Kuiil
return
for data in jdata['results']:
fde_json = json.dumps(data.get('fde'), sort_keys=True)
_line = str(data.get('_id')) + ' "' + str(data.get('displayName'))
_line += '" (' + str(data.get('hostname'))
_line += ') ' + str(data.get('os')) + ' ' + str(data.get('version'))
_line += ' ' + str(data.get('arch'))
_line += ' ' + str(data.get('fileSystem')) + ' [' + str(fde_json) + ']'
print(_line)
def list_systems_root_ssh():
"""print: get_systems_json allowSshRootLogin."""
jdata = get_systems_json()
for data in jdata['results']:
root_ssh = json.dumps(data.get('allowSshRootLogin'), sort_keys=True)
_line | |
<filename>susi/SOMEstimator.py
"""SOMEstimator class.
Copyright (c) 2019-2021 <NAME>.
All rights reserved.
"""
from abc import ABC, abstractmethod
from typing import List, Optional, Sequence, Tuple, Union
import numpy as np
from sklearn.base import BaseEstimator
from sklearn.utils.validation import check_array, check_is_fitted
from tqdm import tqdm
from .SOMClustering import SOMClustering
from .SOMUtils import check_estimation_input, modify_weight_matrix_online
class SOMEstimator(SOMClustering, BaseEstimator, ABC):
"""Basic class for supervised self-organizing maps.
Parameters
----------
n_rows : int, optional (default=10)
Number of rows for the SOM grid
n_columns : int, optional (default=10)
Number of columns for the SOM grid
init_mode_unsupervised : str, optional (default="random")
Initialization mode of the unsupervised SOM
init_mode_supervised : str, optional (default="random")
Initialization mode of the supervised SOM
n_iter_unsupervised : int, optional (default=1000)
Number of iterations for the unsupervised SOM
n_iter_supervised : int, optional (default=1000)
Number of iterations for the supervised SOM
train_mode_unsupervised : str, optional (default="online")
Training mode of the unsupervised SOM
train_mode_supervised : str, optional (default="online")
Training mode of the supervised SOM
neighborhood_mode_unsupervised : str, optional (default="linear")
Neighborhood mode of the unsupervised SOM
neighborhood_mode_supervised : str, optional (default="linear")
Neighborhood mode of the supervised SOM
learn_mode_unsupervised : str, optional (default="min")
Learning mode of the unsupervised SOM
learn_mode_supervised : str, optional (default="min")
Learning mode of the supervised SOM
distance_metric : str, optional (default="euclidean")
Distance metric to compare on feature level (not SOM grid).
Possible metrics: {"euclidean", "manhattan", "mahalanobis",
"tanimoto", "spectralangle"}. Note that "tanimoto" tends to be slow.
.. versionadded:: 1.1.1
Spectral angle metric.
learning_rate_start : float, optional (default=0.5)
Learning rate start value
learning_rate_end : float, optional (default=0.05)
Learning rate end value (only needed for some lr definitions)
nbh_dist_weight_mode : str, optional (default="pseudo-gaussian")
Formula of the neighborhood distance weight. Possible formulas
are: {"pseudo-gaussian", "mexican-hat"}.
missing_label_placeholder : int or str or None, optional (default=None)
Label placeholder for datapoints with no label. This is needed for
semi-supervised learning.
n_jobs : int or None, optional (default=None)
The number of jobs to run in parallel.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
verbose : int, optional (default=0)
Controls the verbosity.
Attributes
----------
node_list_ : np.ndarray of (int, int) tuples
List of 2-dimensional coordinates of SOM nodes
radius_max_ : float, int
Maximum radius of the neighborhood function
radius_min_ : float, int
Minimum radius of the neighborhood function
unsuper_som_ : np.ndarray
Weight vectors of the unsupervised SOM
shape = (self.n_rows, self.n_columns, X.shape[1])
X_ : np.ndarray
Input data
fitted_ : bool
States if estimator is fitted to X
max_iterations_ : int
Maximum number of iterations for the current training
bmus_ : list of (int, int) tuples
List of best matching units (BMUs) of the dataset X
sample_weights_ : np.ndarray
Sample weights.
n_features_in_ : int
Number of input features
"""
def __init__(
self,
n_rows: int = 10,
n_columns: int = 10,
*,
init_mode_unsupervised: str = "random",
init_mode_supervised: str = "random",
n_iter_unsupervised: int = 1000,
n_iter_supervised: int = 1000,
train_mode_unsupervised: str = "online",
train_mode_supervised: str = "online",
neighborhood_mode_unsupervised: str = "linear",
neighborhood_mode_supervised: str = "linear",
learn_mode_unsupervised: str = "min",
learn_mode_supervised: str = "min",
distance_metric: str = "euclidean",
learning_rate_start: float = 0.5,
learning_rate_end: float = 0.05,
nbh_dist_weight_mode: str = "pseudo-gaussian",
missing_label_placeholder: Optional[Union[int, str]] = None,
n_jobs: Optional[int] = None,
random_state=None,
verbose: Optional[int] = 0,
) -> None:
"""Initialize SOMEstimator object."""
super().__init__(
n_rows=n_rows,
n_columns=n_columns,
init_mode_unsupervised=init_mode_unsupervised,
n_iter_unsupervised=n_iter_unsupervised,
train_mode_unsupervised=train_mode_unsupervised,
neighborhood_mode_unsupervised=neighborhood_mode_unsupervised,
learn_mode_unsupervised=learn_mode_unsupervised,
distance_metric=distance_metric,
learning_rate_start=learning_rate_start,
learning_rate_end=learning_rate_end,
nbh_dist_weight_mode=nbh_dist_weight_mode,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose,
)
self.init_mode_supervised = init_mode_supervised
self.n_iter_supervised = n_iter_supervised
self.train_mode_supervised = train_mode_supervised
self.neighborhood_mode_supervised = neighborhood_mode_supervised
self.learn_mode_supervised = learn_mode_supervised
self.missing_label_placeholder = missing_label_placeholder
@abstractmethod
def _init_super_som(self) -> None:
"""Initialize map."""
return None
def fit(self, X: Sequence, y: Optional[Sequence] = None):
"""Fit supervised SOM to the input data.
Parameters
----------
X : array-like matrix of shape = [n_samples, n_features]
The prediction input samples.
y : array-like matrix of shape = [n_samples, 1]
The labels (ground truth) of the input samples
Returns
-------
self : object
Examples
--------
Load the SOM and fit it to your input data `X` and the labels `y` with:
>>> import susi
>>> som = susi.SOMRegressor()
>>> som.fit(X, y)
"""
X, y = check_estimation_input(X, y)
self.X_: np.ndarray = X
self.y_: np.ndarray = y
self.n_features_in_ = self.X_.shape[1]
return self._fit_estimator()
def _fit_estimator(self):
"""Fit supervised SOM to the (checked) input data.
Parameters
----------
X : array-like matrix of shape = [n_samples, n_features]
The prediction input samples.
y : array-like matrix of shape = [n_samples, 1]
The labels (ground truth) of the input samples
"""
np.random.seed(seed=self.random_state)
# supervised case:
if self.missing_label_placeholder is None:
self.labeled_indices_ = list(range(len(self.y_)))
self.sample_weights_ = np.full(
fill_value=1.0, shape=(len(self.X_), 1)
)
# semi-supervised case:
else:
self.labeled_indices_ = np.where(
self.y_ != self.missing_label_placeholder
)[0]
unlabeled_weight = max(
len(self.labeled_indices_) / len(self.y_), 0.1
)
self.sample_weights_ = np.full(
fill_value=unlabeled_weight, shape=(len(self.X_), 1)
)
self.sample_weights_[self.labeled_indices_] = 1.0
# train SOMs
self._train_unsupervised_som()
self._train_supervised_som()
self.fitted_ = True
return self
def predict(
self, X: Sequence, y: Optional[Sequence] = None
) -> List[float]:
"""Predict output of data X.
Parameters
----------
X : array-like matrix of shape = [n_samples, n_features]
The prediction input samples.
y : None, optional
Ignored.
Returns
-------
y_pred : list of float
List of predicted values.
Examples
--------
Fit the SOM on your data `X, y`:
>>> import susi
>>> som = susi.SOMClassifier()
>>> som.fit(X, y)
>>> y_pred = som.predict(X)
"""
# Check is fit had been called
check_is_fitted(self, ["X_", "y_"])
# Input validation
X = check_array(X, dtype=np.float64)
y_pred_list = []
for dp in tqdm(X, desc="predict", **self.tqdm_params_):
y_pred_list.append(self._calc_estimation_output(dp, proba=False))
y_pred = np.array(y_pred_list)
return y_pred
def _calc_estimation_output(
self, datapoint: np.ndarray, proba: bool = False
) -> Tuple[Union[int, str, float], np.ndarray]:
"""Get SOM output for fixed SOM.
The given datapoint doesn't have to belong to the training set of the
input SOM.
Parameters
----------
datapoint : np.ndarray, shape=(X.shape[1])
Datapoint = one row of the dataset X
proba : bool
If True, probabilities are calculated.
Returns
-------
int or str or float
Content of SOM node which is linked to the datapoint.
Classification: the label.
Regression: the target variable.
TODO Implement handling of incomplete datapoints
"""
bmu_pos = self.get_bmu(datapoint, self.unsuper_som_)
estimation_output = self.super_som_[bmu_pos[0], bmu_pos[1]][0]
if not proba:
return estimation_output
return (estimation_output, self._calc_proba(bmu_pos=bmu_pos))
def _calc_proba(self, bmu_pos: Tuple[int, int]) -> np.ndarray:
"""Calculate probabilities for datapoint related to BMU.
.. versionadded:: 1.1.3
This function is just a placeholder and should not be used.
Parameters
----------
bmu_pos : Tuple[int, int]
BMU position on the SOM grid.
Returns
-------
np.ndarray
Dummy output.
"""
return np.array([1.0])
def _modify_weight_matrix_supervised(
self,
dist_weight_matrix: np.ndarray,
true_vector: Optional[np.array] = None,
learning_rate: Optional[float] = None,
) -> np.ndarray:
"""Modify weights of the supervised SOM, either online or batch.
Parameters
----------
dist_weight_matrix : np.ndarray of float
Current distance weight of the SOM for the specific node
true_vector : np.ndarray, optional (default=None)
True vector. `None` is only valid in batch mode.
learning_rate : float, optional (default=None)
Current learning rate of the SOM. `None` is only valid in batch
mode.
Returns
-------
np.array
Weight vector of the SOM after the modification
"""
if self.train_mode_supervised == "online":
# require valid values for true_vector and learning_rate
if not isinstance(true_vector, np.ndarray) or not isinstance(
learning_rate, float
):
raise ValueError("Parameters required to be not None.")
return modify_weight_matrix_online(
som_array=self.super_som_,
dist_weight_matrix=dist_weight_matrix,
true_vector=true_vector,
learning_rate=learning_rate,
)
if self.train_mode_supervised == "batch":
return self._modify_weight_matrix_batch(
som_array=self.super_som_,
dist_weight_matrix=dist_weight_matrix[self.labeled_indices_],
data=self.y_[self.labeled_indices_],
)
raise ValueError(
"Invalid train_mode_supervised: " + str(self.train_mode_supervised)
)
def _train_supervised_som(self):
"""Train supervised SOM."""
self._set_bmus(self.X_[self.labeled_indices_])
self._init_super_som()
if self.train_mode_supervised == "online":
for it in tqdm(
range(self.n_iter_supervised),
desc="super",
**self.tqdm_params_,
):
# select one input vector & calculate best matching unit (BMU)
dp = self._get_random_datapoint()
bmu_pos = self.bmus_[dp]
# calculate learning rate and neighborhood function
learning_rate = self._calc_learning_rate(
curr_it=it, mode=self.learn_mode_supervised
)
nbh_func = self._calc_neighborhood_func(
curr_it=it, mode=self.neighborhood_mode_supervised
)
# calculate distance weight matrix and update weights
dist_weight_matrix = self._get_nbh_distance_weight_matrix(
nbh_func, bmu_pos
)
self.super_som_ = self._modify_weight_matrix_supervised(
dist_weight_matrix=dist_weight_matrix,
true_vector=self.y_[self.labeled_indices_][dp],
learning_rate=learning_rate,
)
elif self.train_mode_supervised == "batch":
for it in tqdm(
range(self.n_iter_supervised),
desc="super",
**self.tqdm_params_,
):
# calculate BMUs with the unsupervised (!) SOM
| |
weightings of each vector field towards the search, e.g. image\_vector\_ weights 100%, whilst description\_vector\_ 50%.
Advanced search also supports filtering to only search through filtered results and facets to get the overview of products available when a minimum score is set.
Args:
collection_name:
Name of Collection
page:
Page of the results
page_size:
Size of each page of results
approx:
Used for approximate search
sum_fields:
Whether to sum the multiple vectors similarity search score as 1 or seperate
metric:
Similarity Metric, choose from ['cosine', 'l1', 'l2', 'dp']
filters:
Query for filtering the search results
facets:
Fields to include in the facets, if [] then all
min_score:
Minimum score for similarity metric
include_vector:
Include vectors in the search results
include_count:
Include count in the search results
include_facets:
Include facets in the search results
hundred_scale:
Whether to scale up the metric by 100
multivector_query:
Query for advance search that allows for multiple vector and field querying
Example:
>>> vi_client = ViCollectionClient(username, api_key, collection_name, url)
>>> advanced_search_query = {
'text' : {'vector': encode_question("How do I cluster?"), 'fields' : ['function_vector_']}
}
>>> vi_client.advanced_search(advanced_search_query)
"""
return requests.post(
url="{}/collection/advanced_search".format(self.url),
json={
"username": self.username,
"api_key": self.api_key,
"collection_name": collection_name,
"multivector_query": multivector_query,
"facets": facets,
"filters": filters,
"sum_fields": sum_fields,
"metric": metric,
"min_score": min_score,
"page": page,
"page_size": page_size,
"include_vector": include_vector,
"include_count": include_count,
"include_facets": include_facets,
},
).json()
def advanced_hybrid_search(
self,
collection_name: str,
text: str,
multivector_query: Dict,
text_fields: List,
sum_fields: bool = True,
facets: List = [],
filters: List = [],
metric: str = "cosine",
min_score=None,
page: int = 1,
page_size: int = 10,
include_vector=False,
include_count=True,
include_facets=False,
):
"""
Advanced Search a text field with vector and text using Vector Search and Traditional Search
Advanced Vector similarity search + Traditional Fuzzy Search with text and vector.
You can also give weightings of each vector field towards the search, e.g. image\_vector\_ weights 100%, whilst description\_vector\_ 50%.
Advanced search also supports filtering to only search through filtered results and facets to get the overview of products available when a minimum score is set.
Args:
collection_name:
Name of Collection
page:
Page of the results
page_size:
Size of each page of results
approx:
Used for approximate search
sum_fields:
Whether to sum the multiple vectors similarity search score as 1 or seperate
metric:
Similarity Metric, choose from ['cosine', 'l1', 'l2', 'dp']
filters:
Query for filtering the search results
facets:
Fields to include in the facets, if [] then all
min_score:
Minimum score for similarity metric
include_vector:
Include vectors in the search results
include_count:
Include count in the search results
include_facets:
Include facets in the search results
hundred_scale:
Whether to scale up the metric by 100
multivector_query:
Query for advance search that allows for multiple vector and field querying
text:
Text Search Query (not encoded as vector)
text_fields:
Text fields to search against
traditional_weight:
Multiplier of traditional search. A value of 0.025~0.1 is good.
fuzzy:
Fuzziness of the search. A value of 1-3 is good.
join:
Whether to consider cases where there is a space in the word. E.g. Go Pro vs GoPro.
"""
return requests.post(
url="{}/collection/advanced_hybrid_search".format(self.url),
json={
"username": self.username,
"api_key": self.api_key,
"collection_name": collection_name,
"text": text,
"multivector_query": multivector_query,
"text_fields": text_fields,
"sum_fields": sum_fields,
"facets": facets,
"filters": filters,
"metric": metric,
"min_score": min_score,
"page": page,
"page_size": page_size,
"include_vector": include_vector,
"include_count": include_count,
"include_facets": include_facets,
},
).json()
def advanced_search_by_id(
self,
collection_name: str,
document_id: str,
fields: Dict,
sum_fields: bool = True,
facets: List = [],
filters: List = [],
metric: str = "cosine",
min_score=None,
page: int = 1,
page_size: int = 10,
include_vector=False,
include_count=True,
include_facets=False,
):
"""
Advanced Single Product Recommendations (Search by an id).
For example: Search with id of a product in the database, and using the product's image and description vectors to find the most similar products by what it looks like and what its described to do.
You can also give weightings of each vector field towards the search, e.g. image\_vector\_ weights 100%, whilst description\_vector\_ 50%.
Advanced search also supports filtering to only search through filtered results and facets to get the overview of products available when a minimum score is set.
Args:
collection_name:
Name of Collection
page:
Page of the results
page_size:
Size of each page of results
approx:
Used for approximate search
sum_fields:
Whether to sum the multiple vectors similarity search score as 1 or seperate
metric:
Similarity Metric, choose from ['cosine', 'l1', 'l2', 'dp']
filters:
Query for filtering the search results
facets:
Fields to include in the facets, if [] then all
min_score:
Minimum score for similarity metric
include_vector:
Include vectors in the search results
include_count:
Include count in the search results
include_facets:
Include facets in the search results
hundred_scale:
Whether to scale up the metric by 100
document_id:
ID of a document
search_fields:
Vector fields to search against, and the weightings for them.
"""
return requests.post(
url="{}/collection/advanced_search_by_id".format(self.url),
json={
"username": self.username,
"api_key": self.api_key,
"collection_name": collection_name,
"document_id": document_id,
"search_fields": fields,
"sum_fields": sum_fields,
"facets": facets,
"filters": filters,
"metric": metric,
"min_score": min_score,
"page": page,
"page_size": page_size,
"include_vector": include_vector,
"include_count": include_count,
"include_facets": include_facets,
},
).json()
def advanced_search_by_ids(
self,
collection_name: str,
document_ids: Dict,
fields: Dict,
vector_operation: str = "mean",
sum_fields: bool = True,
facets: List = [],
filters: List = [],
metric: str = "cosine",
min_score=None,
page: int = 1,
page_size: int = 10,
include_vector=False,
include_count=True,
include_facets=False,
):
"""
Advanced Multi Product Recommendations (Search by ids).
For example: Search with multiple ids of products in the database, and using the product's image and description vectors to find the most similar products by what it looks like and what its described to do.
You can also give weightings of each vector field towards the search, e.g. image\_vector\_ weights 100%, whilst description\_vector\_ 50%.
You can also give weightings of on each product as well e.g. product ID-A weights 100% whilst product ID-B 50%.
Advanced search also supports filtering to only search through filtered results and facets to get the overview of products available when a minimum score is set.
Args:
collection_name:
Name of Collection
page:
Page of the results
page_size:
Size of each page of results
approx:
Used for approximate search
sum_fields:
Whether to sum the multiple vectors similarity search score as 1 or seperate
metric:
Similarity Metric, choose from ['cosine', 'l1', 'l2', 'dp']
filters:
Query for filtering the search results
facets:
Fields to include in the facets, if [] then all
min_score:
Minimum score for similarity metric
include_vector:
Include vectors in the search results
include_count:
Include count in the search results
include_facets:
Include facets in the search results
hundred_scale:
Whether to scale up the metric by 100
document_ids:
Document IDs to get recommendations for, and the weightings of each document
search_fields:
Vector fields to search against, and the weightings for them.
vector_operation:
Aggregation for the vectors, choose from ['mean', 'sum', 'min', 'max']
"""
return requests.post(
url="{}/collection/advanced_search_by_ids".format(self.url),
json={
"username": self.username,
"api_key": self.api_key,
"collection_name": collection_name,
"document_ids": document_ids,
"search_fields": fields,
"vector_operation": vector_operation,
"sum_fields": sum_fields,
"facets": facets,
"filters": filters,
"metric": metric,
"min_score": min_score,
"page": page,
"page_size": page_size,
"include_vector": include_vector,
"include_count": include_count,
"include_facets": include_facets,
},
).json()
def advanced_search_by_positive_negative_ids(
self,
collection_name: str,
positive_document_ids: Dict,
negative_document_ids: Dict,
fields: Dict,
vector_operation: str = "mean",
sum_fields: bool = True,
facets: List = [],
filters: List = [],
metric: str = "cosine",
min_score=None,
page: int = 1,
page_size: int = 10,
include_vector=False,
include_count=True,
include_facets=False,
):
"""
Advanced Multi Product Recommendations with likes and dislikes (Search by ids).
For example: Search with multiple ids of liked and dislike products in the database. Then using the product's image and description vectors to find the most similar products by what it looks like and what its described to do against the positives and most disimilar products for the negatives.
You can also give weightings of each vector field towards the search, e.g. image\_vector\_ weights 100%, whilst description\_vector\_ 50%.
You can also give weightings of on each product as well e.g. product ID-A weights 100% whilst product ID-B 50%.
Advanced search also supports filtering to only search through filtered results and facets to get the overview of products available when a minimum score is set.
Args:
collection_name:
Name of Collection
page:
Page of the results
page_size:
Size of each page of results
approx:
Used for approximate search
sum_fields:
Whether to sum the multiple vectors similarity search score as 1 or seperate
metric:
Similarity Metric, choose from ['cosine', 'l1', 'l2', 'dp']
filters:
Query for filtering the search results
facets:
Fields to include in the facets, if [] then all
min_score:
Minimum score for similarity metric
include_vector:
Include vectors in the search results
include_count:
Include count in the search results
include_facets:
Include facets in the search results
hundred_scale:
Whether to scale up the metric by 100
positive_document_ids:
Positive Document IDs to get recommendations for, and the weightings of each document
negative_document_ids:
Negative Document IDs to get recommendations for, and the weightings of each document
search_fields:
Vector fields to search against, and the weightings for them.
vector_operation:
Aggregation for the vectors, choose from ['mean', 'sum', 'min', 'max']
"""
return requests.post(
url="{}/collection/advanced_search_by_positive_negative_ids".format(
self.url
),
json={
"username": self.username,
"api_key": self.api_key,
"collection_name": collection_name,
| |
# -*- coding: utf-8 -*-
"""
Find cuts of a page and annotate them based on the table separators
Copyright Naver Labs Europe 2018
<NAME>
Developed for the EU project READ. The READ project has received funding
from the European Union's Horizon 2020 research and innovation programme
under grant agreement No 674943.
"""
import sys, os
from optparse import OptionParser
import operator
from collections import defaultdict
from lxml import etree
import numpy as np
import shapely.geometry as geom
import shapely.affinity
try: #to ease the use without proper Python installation
import TranskribusDU_version
except ImportError:
sys.path.append( os.path.dirname(os.path.dirname( os.path.abspath(sys.argv[0]) )) )
import TranskribusDU_version
from common.trace import traceln
from xml_formats.PageXml import MultiPageXml, PageXml
from util.Polygon import Polygon
from util.Shape import ShapeLoader, PolygonPartition
from tasks.DU_Table.DU_ABPTableSkewed_CutAnnotator import _isBaselineNotO, _isBaselineInTable,\
computePRF
from tasks.DU_Table.DU_ABPTableRCAnnotation import computeMaxRowSpan
from util.partitionEvaluation import evalPartitions
from util.jaccard import jaccard_distance
class CutAnnotator:
"""
Cutting the page horizontally
"""
fRATIO = 0.66
def __init__(self):
pass
def get_separator_YX_from_DOM(self, root, fMinPageCoverage):
"""
get the x and y of the GT table separators
return lists of y, for horizontal and of x for vertical separators, per page
return [(y_list, x_list), ...]
"""
ltlYlX = []
for ndPage in MultiPageXml.getChildByName(root, 'Page'):
w, h = int(ndPage.get("imageWidth")), int(ndPage.get("imageHeight"))
lYi, lXi = [], []
l = MultiPageXml.getChildByName(ndPage,'TableRegion')
if len(l) != 1:
if l:
traceln("** warning ** %d TableRegion instead of expected 1" % len(l))
else:
traceln("** warning ** no TableRegion, expected 1")
if l:
for ndTR in l:
#enumerate the table separators
for ndSep in MultiPageXml.getChildByName(ndTR,'SeparatorRegion'):
sPoints=MultiPageXml.getChildByName(ndSep,'Coords')[0].get('points')
[(x1,y1),(x2,y2)] = Polygon.parsePoints(sPoints).lXY
dx, dy = abs(x2-x1), abs(y2-y1)
if dx > dy:
#horizontal table line
if dx > (fMinPageCoverage*w):
#ym = (y1+y2)/2.0 # 2.0 to support python2
lYi.append((y1,y2))
else:
if dy > (fMinPageCoverage*h):
#xm = (x1+x2)/2.0
lXi.append((x1,x2))
ltlYlX.append( (lYi, lXi) )
return ltlYlX
def getHisto(self, lNd, w, _fMinHorizProjection, h, _fMinVertiProjection
, fRatio=1.0
, fMinHLen=None):
"""
return two Numpy array reflecting the histogram of projections of objects
first array along Y axis (horizontal projection), 2nd along X axis
(vertical projection)
when fMinHLen is given , we do not scale horizontally text shorter than fMinHLen
"""
hy = np.zeros((h,), np.float)
hx = np.zeros((w,), np.float)
for nd in lNd:
sPoints=MultiPageXml.getChildByName(nd,'Coords')[0].get('points')
try:
x1,y1,x2,y2 = Polygon.parsePoints(sPoints).fitRectangle()
if fMinHLen is None or abs(x2-x1) > fMinHLen:
_x1, _x2 = self.scale(x1, x2, fRatio)
else:
_x1, _x2 = x1, x2
_y1, _y2 = self.scale(y1, y2, fRatio)
hy[_y1:_y2+1] += float(x2 - x1) / w
hx[_x1:_x2+1] += float(y2 - y1) / h
except ZeroDivisionError:
pass
except ValueError:
pass
return hy, hx
@classmethod
def scale(cls, a, b, fRatio):
"""
a,b are integers
apply a scaling factor to the segment
make sure its length remains non-zero
return 2 integers
"""
if fRatio == 1.0: return (a,b) # the code below does it, but no need...
l = b - a # signed length
ll = int(round(l * fRatio)) # new signed length
dl2 = (l - ll) / 2.0
ll2a = int(round(dl2))
ll2b = (l - ll) - ll2a
return a + ll2a, b - ll2b
# labels...
def _getLabel(self, i,j, liGT):
"""
i,j are the index of teh start and end of interval of zeros
liGT is a list of pair of pixel coordinates
an interval of zeros is positive if it contains either end of the
separator or its middle.
"""
for iGT, jGT in liGT:
mGT = (iGT+jGT) // 2
if i <= iGT and iGT <= j:
return "S"
elif i <= jGT and jGT <= j:
return "S"
elif i <= mGT and mGT <= j:
return "S"
return "O"
def getCentreOfZeroAreas(self, h, liGT=None):
"""
liGT is the groundtruth indices
return a list of center of areas contains consecutive 0s
"""
lij = [] #list of area indices
i0 = None # index of start of a 0 area
imax = h.shape[0]
i = 0
while i < imax:
if i0 is None: # we were in a non-zero area
if h[i] <= 0: i0 = i # start of an area of 0s
else: # we were in a zero area
if h[i] > 0:
# end of area of 0s
lij.append((i0, i-1))
i0 = None
i += 1
if not i0 is None:
lij.append((i0, imax-1))
if liGT is None:
liLbl = [None] * len(lij)
else:
liLbl = [self._getLabel(i,j,liGT) for (i,j) in lij]
#take middle
li = [ (j + i) // 2 for (i,j) in lij ]
return li, liLbl
def getLowestOfZeroAreas(self, h, liGT=None):
"""
liGT is the groundtruth indices
return a list of lowest points of areas contains consecutive 0s
"""
lijm = [] #list of area indices
i0 = None # index of start of a 0 area
imax = h.shape[0]
i = 0
minV, minI = None, None
while i < imax:
if i0 is None: # we were in a non-zero area
if h[i] <= 0:
i0 = i # start of an area of 0s
minV, minI = h[i0], i0
else: # we were in a zero area
if h[i] > 0:
# end of area of 0s
lijm.append((i0, i-1, minI))
i0 = None
else:
if h[i] <= minV: # take rightmost
minV, minI = h[i], i
i += 1
if not i0 is None:
minV, minI = h[i0], i0
i = i0 + 1
while i < imax:
if h[i] < minV: # tale leftmost
minV, minI = h[i], i
i += 1
lijm.append((i0, imax-1, minI))
if liGT is None:
liLbl = [None] * len(lijm)
else:
liLbl = [self._getLabel(i,j,liGT) for (i,j,_m) in lijm]
#take middle
li = [ m for (_i,_j, m) in lijm ]
return li, liLbl
def add_cut_to_DOM(self, root,
fMinHorizProjection=0.05,
fMinVertiProjection=0.05,
ltlYlX=[]
, fRatio = 1.0
, fMinHLen = None):
"""
for each page, compute the histogram of projection of text on Y then X
axis.
From this histogram, find cuts.
fMinProjection determines the threholds as a percentage of width (resp
height) of page. Any bin lower than it is considered as zero.
Map cuts to table separators to annotate them
Dynamically tune the threshold for cutting so as to reflect most separators
as a cut.
Tag them if ltlYlX is given
ltlYlX is a list of (ltY1Y2, ltX1X2) per page.
ltY1Y2 is the list of (Y1, Y2) of horizontal separators,
ltX1X2 is the list of (X1, X2) of vertical separators.
Modify the XML DOM by adding a separator cut, annotated if GT given
"""
domid = 0 #to add unique separator id
llX, llY = [], []
for iPage, ndPage in enumerate(MultiPageXml.getChildByName(root, 'Page')):
try:
lYi, lXi = ltlYlX[iPage]
#except TypeError:
except:
lYi, lXi = [], []
w, h = int(ndPage.get("imageWidth")), int(ndPage.get("imageHeight"))
#Histogram of projections
lndTexLine = MultiPageXml.getChildByName(ndPage, 'TextLine')
aYHisto, aXHisto = self.getHisto(lndTexLine,
w, fMinHorizProjection,
h, fMinVertiProjection
, fRatio
, fMinHLen=fMinHLen)
aYHisto = aYHisto - fMinHorizProjection
aXHisto = aXHisto - fMinVertiProjection
#find the centre of each area of 0s and its label
lY, lYLbl = self.getCentreOfZeroAreas(aYHisto, lYi)
# lX, lXLbl = self.getCentreOfZeroAreas(aXHisto, lXi)
lX, lXLbl = self.getLowestOfZeroAreas(aXHisto, lXi)
traceln(lY)
traceln(lX)
traceln(" - %d horizontal cuts" % len(lY))
traceln(" - %d vertical cuts" % len(lX))
#ndTR = MultiPageXml.getChildByName(ndPage,'TableRegion')[0]
# horizontal grid lines
for y, ylbl in zip(lY, lYLbl):
domid += 1
self.addPageXmlSeparator(ndPage, ylbl, 0, y, w, y, domid)
# Vertical grid lines
for x, xlbl in zip(lX, lXLbl):
domid += 1
self.addPageXmlSeparator(ndPage, xlbl, x, 0, x, h, domid)
llX.append(lX)
llY.append(lY)
return (llY, llX)
@classmethod
def addPageXmlSeparator(cls, nd, sLabel, x1, y1, x2, y2, domid):
ndSep = MultiPageXml.createPageXmlNode("CutSeparator")
if not sLabel is None:
# propagate the groundtruth info we have
ndSep.set("type", sLabel)
if abs(x2-x1) > abs(y2-y1):
ndSep.set("orient", "0")
else:
ndSep.set("orient", "90")
ndSep.set("id", "s_%d"%domid)
| |
<gh_stars>1-10
#!/usr/bin/env python
# coding: utf-8
# # **World Cup 2018 Prediction by <NAME>**
#
# The purpose of this is to try and predict the top 3 teams for World Cup 2018 using classification models coupled with poisson distribution to predict the exact results of the semi-finals, third place playoff and final.
#
# ## **Final Predictions based on this notebook:**
#
# **Winner**: Germany
#
# **2nd Place**: Spain
#
# **3rd Place**: France
#
# **Final Score**: Germany VS Spain: 2-1
#
# **Third place playoff score**: France vs England: 1-1 (France win Penalty Shootout)
#
#
#
# ## **Contents:**
#
# **1. Import Necessary Packages/Datasets**
#
# **2. Data Cleaning**
#
# **3. Classification Models to predict match results (Win/Draw/Lose)**
# - Variables used:
# - Which stadium is it played at (0 -neutral, 1-away team's stadium, 2- home team's stadium)
# - Whether the match is an important match or a friendly match (0 - Friendly, 1- Important)
# - How much the Home team's rank changes compared to the past period
# - How much the Away team's rank changes compared to the past period
# - Difference in the 2 team's ranking
# - Difference in the 2 team's mean weighted ratings over the past 3 years
#
# **4. Classification Models to predict exact goals scored by Home and Away Sides**
#
# - Variables used same as in (3)
#
# **5. Visualizing ability/potential of players of the 32 countries**
#
# **6. Adding variables to build a poisson model**
#
# - Variables used:
# - Soccer Power Index
# - Average Age
# - Average Height
# - Total World Cup Appearances
# - Average goals scored per game
# - Average goals conceded per game
# - Potential
#
# **7. Predicting World Cup 2018**
# - Detailed Methods on how to simulate and predict the World Cup 2018 Matches will be explained here
# # **1. Import Necessary Packages/Datasets**
# In[ ]:
import random
import numpy as np
import scipy as sp
from scipy.stats import poisson
import matplotlib as mpl
import matplotlib.cm as cm
import matplotlib.pyplot as plt
import pandas as pd
pd.set_option('display.width', 500)
pd.set_option('display.max_columns', 100)
pd.set_option('display.notebook_repr_html', True)
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestClassifier
from sklearn.feature_selection import SelectKBest
from sklearn.grid_search import GridSearchCV
from sklearn.linear_model import LogisticRegression
from sklearn.tree import DecisionTreeRegressor
from sklearn.linear_model import LinearRegression
import statsmodels.formula.api as sm
from sklearn.svm import SVC, LinearSVC
from sklearn.neighbors import KNeighborsClassifier
from sklearn.tree import DecisionTreeClassifier
from sklearn.feature_selection import SelectFromModel
from sklearn.model_selection import KFold
from sklearn.model_selection import cross_val_score
from sklearn.metrics import confusion_matrix
from sklearn.metrics import accuracy_score
import xgboost as xgb
import scikitplot as skplt
from sklearn.metrics import average_precision_score
from sklearn.metrics import precision_score
from sklearn.metrics import f1_score
from sklearn import preprocessing
import warnings
warnings.filterwarnings('ignore')
# In[ ]:
countries = pd.read_csv("../../../input/tadhgfitzgerald_fifa-international-soccer-mens-ranking-1993now/fifa_ranking.csv")
historical = pd.read_csv("../../../input/tadhgfitzgerald_fifa-international-soccer-mens-ranking-1993now/fifa_ranking.csv")
player_stats_18 = pd.read_csv("../../../input/tadhgfitzgerald_fifa-international-soccer-mens-ranking-1993now/fifa_ranking.csv")
results = pd.read_csv("../../../input/tadhgfitzgerald_fifa-international-soccer-mens-ranking-1993now/fifa_ranking.csv")
squads = pd.read_csv("../../../input/tadhgfitzgerald_fifa-international-soccer-mens-ranking-1993now/fifa_ranking.csv")
fifa18 = pd.read_csv("../../../input/tadhgfitzgerald_fifa-international-soccer-mens-ranking-1993now/fifa_ranking.csv")
results_so_far = pd.read_csv("../../../input/tadhgfitzgerald_fifa-international-soccer-mens-ranking-1993now/fifa_ranking.csv")
stats = pd.read_csv("../../../input/tadhgfitzgerald_fifa-international-soccer-mens-ranking-1993now/fifa_ranking.csv")
world_cup = pd.read_csv("../../../input/tadhgfitzgerald_fifa-international-soccer-mens-ranking-1993now/fifa_ranking.csv")
# # **2. Data Cleaning**
# In[ ]:
squads.Player = squads.Player.apply(lambda x: x[:-10] if "captain" in x else x)
# In[ ]:
fifa18 = fifa18.replace({"Korea Republic":"South Korea"})
wc_player_stats_18 = fifa18[fifa18["name"].isin(squads.Player) | fifa18["full_name"].isin(squads.Player)]
wc_fifa18_stats = fifa18[fifa18.nationality.isin(squads.Team)]
# Updated Results as at 23/06
# In[ ]:
results_so_far = results_so_far.replace({"Korea Republic":"South Korea"})
results_so_far["Result"] = np.where(results_so_far["Home Team Goals"] < results_so_far["Away Team Goals"], 0, np.where(results_so_far["Home Team Goals"]==results_so_far["Away Team Goals"],1,2))
results_so_far["Matches"] = results_so_far["Home Team Name"] + "," + results_so_far["Away Team Name"]
results_so_far = results_so_far.dropna(how="any")
results_so_far = results_so_far.drop(["Year","Match date","Stage","Stadium","Group"],axis=1)
results_so_far["Home Team Goals"] = results_so_far["Home Team Goals"].apply(lambda x: int(x))
results_so_far["Away Team Goals"] = results_so_far["Away Team Goals"].apply(lambda x: int(x))
results_so_far.tail(2)
# In[ ]:
results = results.drop(["Unnamed: 0"],axis=1)
results.reset_index(inplace=True,drop=True)
results.tail(2)
# In[ ]:
world_cup = world_cup.loc[:, ['Team', 'Group', 'First match \nagainst', 'Second match\n against', 'Third match\n against']]
world_cup = world_cup.dropna(how='all')
world_cup = world_cup.replace({"IRAN": "Iran", "Costarica": "Costa Rica", "Porugal": "Portugal", "Columbia": "Colombia", "Korea" : "South Korea"})
world_cup = world_cup.set_index('Team')
world_cup.head(4)
# In[ ]:
wc_countries = countries[countries.team.isin(squads.Team)]
wc_countries.head(2)
# In[ ]:
squads = pd.read_csv("../../../input/tadhgfitzgerald_fifa-international-soccer-mens-ranking-1993now/fifa_ranking.csv")
rankings = pd.read_csv("../../../input/tadhgfitzgerald_fifa-international-soccer-mens-ranking-1993now/fifa_ranking.csv")
rankings_prev = rankings[rankings["rank_date"]=="2018-05-17"]
rankings_prev = rankings_prev.replace({"IR Iran":"Iran","Korea Republic":"South Korea"})
rankings_prev = rankings_prev.loc[rankings_prev["country_full"].isin(list(squads["Team"]))]
rankings_prev=rankings_prev.drop(["country_abrv","confederation"],axis=1)
rankings_prev.reset_index(inplace=True,drop=True)
rankings_prev = rankings_prev.set_index("country_full",drop=True)
rankings_18 = rankings[rankings["rank_date"]=="2018-06-07"]
rankings_18 = rankings_18.replace({"IR Iran":"Iran","Korea Republic":"South Korea"})
rankings_18 = rankings_18.loc[rankings_18["country_full"].isin(list(squads["Team"]))]
rankings_18=rankings_18.drop(["country_abrv","confederation"],axis=1)
rankings_18.reset_index(inplace=True,drop=True)
rankings_18 = rankings_18.set_index("country_full",drop=False)
rankings_18["mean_weighted_over_years"] = (rankings_18["cur_year_avg_weighted"]+rankings_18["last_year_avg_weighted"]+ rankings_18["two_year_ago_weighted"]+rankings_18["three_year_ago_weighted"])/4
rankings_18.head()
# # **3. Classification Models to predict match results (Win/Draw/Lose)**
# **The 6 variables used to predict the results of a match are: **
# - Which stadium is it played at (0 -neutral, 1-away team's stadium, 2- home team's stadium)
# - Whether the match is an important match or a friendly match (0 - Friendly, 1- Important)
# - How much the Home team's rank changes compared to the past period
# - How much the Away team's rank changes compared to the past period
# - Difference in the 2 team's ranking
# - Difference in the 2 team's mean weighted ratings over the past 3 years
# In[ ]:
x = results.loc[:,["country","impt","home_rank_change","away_rank_change","diff_in_ranking","diff_in_mean_weighted_over_years"]]
y = results.loc[:,"Result"]
# ## **3.1 Splitting into training and test set**
# We shall use 80% of our dataset as our training set and 20% as our test set. We will also apply 5-fold Cross Validation
# In[ ]:
x_train,x_test,y_train,y_test=train_test_split(x,y,test_size=0.2,random_state=0)
# In[ ]:
k_fold = KFold(n_splits=5, shuffle=True, random_state=0)
# Let's define some function to evaluate our models
# In[ ]:
def train_acc_score(model):
return round(np.mean(cross_val_score(model,x_train,y_train,cv=k_fold,scoring="accuracy")),2)
def test_acc_score(model):
return round(accuracy_score(y_test, model.predict(x_test)),2)
def train_prec_score(model):
return round(precision_score(y_train,model.predict(x_train),average='macro'),2)
def test_prec_score(model):
return round(precision_score(y_test,model.predict(x_test),average='macro'),2)
def train_f1(model):
return round(f1_score(y_train,model.predict(x_train),average='macro'),2)
def test_f1(model):
return round(f1_score(y_test,model.predict(x_test),average='macro'),2)
def confusion_matrix_model(model_used):
cm=confusion_matrix(y_test,model_used.predict(x_test))
col=["Predicted Away Win","Predicted Draw","Predicted Home Win"]
cm=pd.DataFrame(cm)
cm.columns=["Predicted Away Win","Predicted Draw","Predicted Home Win"]
cm.index=["Actual Away Win","Actual Draw","Actual Home Win"]
return cm.T
def confusion_matrix_model_train(model_used):
cm=confusion_matrix(y_train,model_used.predict(x_train))
col=["Predicted Away Win","Predicted Draw","Predicted Home Win"]
cm=pd.DataFrame(cm)
cm.columns=["Predicted Away Win","Predicted Draw","Predicted Home Win"]
cm.index=["Actual Away Win","Actual Draw","Actual Home Win"]
return cm.T
def importance_of_features(model):
features = pd.DataFrame()
features['feature'] = x_train.columns
features['importance'] = model.feature_importances_
features.sort_values(by=['importance'], ascending=True, inplace=True)
features.set_index('feature', inplace=True)
return features.plot(kind='barh', figsize=(6,6))
# ## **3.2 Building classification models to predict results**
# The models will be optimised using GridSearchCV based on F1 score. F1 score gives a weighted average between precision and accuracy/recall. It tells you how precise your classifier is (how many instances it classifies correctly), as well as how robust it is (it does not miss a significant number of instances).
#
# I have typed in some of the optimised parameters based on the GridSearchCV code output, then commented out the GridSearchCV codes to make the notebook run faster as it won't be re-optimised.
#
# Confusion matrix table and details will only be shown for the final selected models in order to save space. There would be a summary of each models in the evaluation section below
# **3.2.1. Logistic Regression (Lasso)**
# In[ ]:
param_grid = dict(C=(0.0001,0.001,0.005,0.01,0.1,0.5,1))
log_reg1 = GridSearchCV(LogisticRegression(penalty="l1"),param_grid=param_grid,scoring="f1_macro")
#log_reg1=LogisticRegression(penalty="l1")
log_reg1.fit(x_train,y_train)
print(log_reg1.best_params_)
print("In-sample accuracy: " + str(train_acc_score(log_reg1)))
print("Test accuracy: " + str(test_acc_score(log_reg1)))
print ("In-sample Precision Score: " + str(train_prec_score(log_reg1)))
print ("Test Precision Score: " + str(test_prec_score(log_reg1)))
print ("In-sample F1 Score: " + str(train_f1(log_reg1)))
print ("Test F1 Score: " + str(test_f1(log_reg1)))
#confusion_matrix_model_train(log_reg1)
# **3.2.2. Logistic Regression (Ridge)**
# In[ ]:
param_grid = dict(C=(0.0001,0.001,0.005,0.01,0.1,0.5,1))
log_reg2 = GridSearchCV(LogisticRegression(penalty="l2"),param_grid=param_grid,scoring="f1_macro")
#log_reg2=LogisticRegression(penalty="l2",C=0.01)
log_reg2.fit(x_train,y_train)
print(log_reg2.best_params_)
print("In-sample accuracy: " + str(train_acc_score(log_reg2)))
print("Test accuracy: " + str(test_acc_score(log_reg2)))
print ("In-sample Precision Score: " + str(train_prec_score(log_reg2)))
print ("Test Precision Score: " + str(test_prec_score(log_reg2)))
print ("In-sample F1 Score: " + str(train_f1(log_reg2)))
print ("Test F1 Score: " + str(test_f1(log_reg2)))
#confusion_matrix_model_train(log_reg2)
# **3.2.3. SVM (RBF Kernel)**
# In[ ]:
#param_grid = dict(C=(0.001,0.01,0.1,0.5,1,2),gamma=(0.001,0.01,0.1,0.5,1,2))
#svc_rbf = GridSearchCV(SVC(kernel="rbf",random_state=0),param_grid=param_grid,scoring="f1_macro")
svc_rbf = SVC(kernel='rbf', gamma=0.001, C=0.5,random_state=0)
svc_rbf.fit(x_train, y_train)
#print(svc_rbf.best_params_)
print("In-sample accuracy: " + str(train_acc_score(svc_rbf)))
print("Test accuracy: " + str(test_acc_score(svc_rbf)))
print ("In-sample Precision Score: " + str(train_prec_score(svc_rbf)))
print ("Test Precision Score: " + str(test_prec_score(svc_rbf)))
print ("In-sample F1 Score: " + str(train_f1(svc_rbf)))
print ("Test F1 Score: " + str(test_f1(svc_rbf)))
#confusion_matrix_model_train(svc_rbf)
# **3.2.4. SVM (Linear Kernel)**
# In[ ]:
#param_grid = dict(C=(0.001,0.01,0.1,0.5,1,2),gamma=(0.001,0.01,0.1,0.5,1,2))
#svc_lin= GridSearchCV(SVC(kernel="linear",random_state=0),param_grid=param_grid,scoring="f1_macro")
svc_lin = SVC(kernel='linear', gamma=0.001, C=0.1,random_state=0)
svc_lin.fit(x_train, y_train)
#print(svc_lin.best_params_)
print("In-sample accuracy: " + str(train_acc_score(svc_lin)))
print("Test accuracy: " + str(test_acc_score(svc_lin)))
print ("In-sample Precision Score: " + str(train_prec_score(svc_lin)))
print ("Test Precision Score: " + str(test_prec_score(svc_lin)))
print ("In-sample F1 Score: " + str(train_f1(svc_lin)))
print ("Test F1 Score: " + str(test_f1(svc_lin)))
#confusion_matrix_model_train(svc_lin)
# **3.2.5. K-Nearest Neighbour**
# In[ ]:
#param_grid = dict(n_neighbors=np.arange(10,70),weights=("uniform","distance"),p=(1,2))
#KNN = GridSearchCV(KNeighborsClassifier(),param_grid=param_grid,scoring="f1_macro")
KNN=KNeighborsClassifier(n_neighbors=16,p=1,weights='uniform')
KNN.fit(x_train,y_train)
#print(KNN.best_params_)
print("In-sample accuracy: " + str(train_acc_score(KNN)))
print("Test accuracy: " + str(test_acc_score(KNN)))
print ("In-sample Precision Score: " + str(train_prec_score(KNN)))
print ("Test Precision Score: " + str(test_prec_score(KNN)))
print ("In-sample F1 Score: " + str(train_f1(KNN)))
print ("Test F1 Score: " + str(test_f1(KNN)))
#confusion_matrix_model_train(KNN)
# **3.2.6. Decision Tree**
# In[ ]:
#param_grid = dict(max_depth=np.arange(4,10),min_samples_leaf=np.arange(1,8),min_samples_split=np.arange(2,8),max_leaf_nodes=np.arange(30,100,10))
#Dec_tree = GridSearchCV(DecisionTreeClassifier(),param_grid=param_grid,scoring="f1_macro")
Dec_tree=DecisionTreeClassifier(max_depth= 8, max_leaf_nodes= 40, min_samples_leaf= 1, min_samples_split= 7)
Dec_tree.fit(x_train,y_train)
#print(Dec_tree.best_params_)
print("In-sample accuracy: " + str(train_acc_score(Dec_tree)))
print("Test accuracy: " + str(test_acc_score(Dec_tree)))
print ("In-sample Precision Score: " + str(train_prec_score(Dec_tree)))
print ("Test Precision Score: " + str(test_prec_score(Dec_tree)))
print ("In-sample F1 Score: " + str(train_f1(Dec_tree)))
print ("Test F1 Score: " + str(test_f1(Dec_tree)))
#confusion_matrix_model_train(Dec_tree)
# **3.2.7. Random Forest**
# In[ ]:
#param_grid = dict(max_depth=np.arange(3,10),min_samples_leaf=np.arange(1,10),min_samples_split=np.arange(2,6),max_leaf_nodes=np.arange(50,120,10))
#param_grid = dict(n_estimators = np.arange(50,500,50))
#ranfor = GridSearchCV(RandomForestClassifier(max_depth= 7, max_leaf_nodes=50, min_samples_leaf= 7, min_samples_split= 4,random_state=0),param_grid=param_grid,scoring="f1_macro")
ranfor = RandomForestClassifier(n_estimators=50,max_depth= 7, max_leaf_nodes=50, min_samples_leaf= 7, min_samples_split= 4,random_state=0)
ranfor.fit(x_train,y_train)
#print(ranfor.best_params_)
print("In-sample accuracy: " + str(train_acc_score(ranfor)))
print("Test accuracy: " + str(test_acc_score(ranfor)))
print ("In-sample Precision Score: " + str(train_prec_score(ranfor)))
print ("Test Precision Score: " + str(test_prec_score(ranfor)))
print ("In-sample F1 Score: " + str(train_f1(ranfor)))
print ("Test F1 Score: " + str(test_f1(ranfor)))
#confusion_matrix_model_train(ranfor)
# **3.2.8. XGBoosting**
# In[ ]:
#param_grid = dict(n_estimators=np.arange(50,500,50),max_depth=np.arange(6,12),learning_rate=(0.0001,0.001,0.01,0.1))
#xgclass = GridSearchCV(xgb.XGBClassifier(random_state=0),param_grid=param_grid,scoring="f1_macro")
xgclass | |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'MFyolo\ui\YOLOdetect_2.0.ui'
#
# Created by: PyQt5 UI code generator 5.9.2
#
# WARNING! All changes made in this file will be lost!
from asyncio.windows_events import NULL
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
import sys
import cv2
import time
import numpy as np
from PIL import Image
from yolo import YOLO
import serial
import struct
class Ui_Form(object):
def setupUi(self, Form):
# yolo对象创建
self.yolo = YOLO()
Form.setObjectName("Form")
Form.resize(1420, 720)
self.label_left = QtWidgets.QLabel(Form)
self.label_left.setGeometry(QtCore.QRect(10, 30, 700, 320))
self.label_left.setObjectName("label_left")
self.label_right = QtWidgets.QLabel(Form)
self.label_right.setGeometry(QtCore.QRect(710, 30, 700, 320))
self.label_right.setObjectName("label_right")
self.groupBox = QtWidgets.QGroupBox(Form)
self.groupBox.setGeometry(QtCore.QRect(20, 370, 841, 161))
self.groupBox.setObjectName("groupBox")
self.label_3 = QtWidgets.QLabel(self.groupBox)
self.label_3.setGeometry(QtCore.QRect(30, 30, 111, 21))
font = QtGui.QFont()
font.setFamily("华光楷体一_CNKI")
font.setPointSize(11)
self.label_3.setFont(font)
self.label_3.setObjectName("label_3")
self.open_weight_path = QtWidgets.QLineEdit(self.groupBox)
self.open_weight_path.setGeometry(QtCore.QRect(170, 30, 561, 20))
self.open_weight_path.setObjectName("open_weight_path")
#------ 权重文件默认参数设置-----#
self.open_weight_path.setText("D:/deeplearnITERM/yolox-pytorch/logs/not_zip/ep100-loss2.684-val_loss2.612.pth")
self.yolo._defaults["model_path"] = self.open_weight_path.text()
#------ 权重文件默认参数设置-----#
self.select_weith_pth_button = QtWidgets.QPushButton(self.groupBox)
self.select_weith_pth_button.setGeometry(QtCore.QRect(740, 30, 71, 20))
self.select_weith_pth_button.setObjectName("select_weith_pth_button")
self.tabWidget = QtWidgets.QTabWidget(self.groupBox)
self.tabWidget.setGeometry(QtCore.QRect(30, 80, 681, 71))
self.tabWidget.setObjectName("tabWidget")
self.tab = QtWidgets.QWidget()
self.tab.setObjectName("tab")
self.open_camera_button = QtWidgets.QPushButton(self.tab)
self.open_camera_button.setGeometry(QtCore.QRect(10, 10, 81, 31))
self.open_camera_button.setObjectName("open_camera_button")
self.tabWidget.addTab(self.tab, "")
self.tab_2 = QtWidgets.QWidget()
self.tab_2.setObjectName("tab_2")
self.open_video_path = QtWidgets.QLineEdit(self.tab_2)
self.open_video_path.setGeometry(QtCore.QRect(70, 20, 401, 20))
self.open_video_path.setObjectName("open_video_path")
self.open_video_path.setText("D:/deeplearnITERM/yolo3-pytorch/myVideoSave/origin/origin/springnorth.mp4")
self.open_video_button = QtWidgets.QPushButton(self.tab_2)
self.open_video_button.setGeometry(QtCore.QRect(490, 20, 71, 20))
self.open_video_button.setObjectName("open_video_button")
self.label_6 = QtWidgets.QLabel(self.tab_2)
self.label_6.setGeometry(QtCore.QRect(0, 20, 111, 21))
font = QtGui.QFont()
font.setFamily("华光楷体一_CNKI")
font.setPointSize(11)
self.label_6.setFont(font)
self.label_6.setObjectName("label_6")
self.start_video_button = QtWidgets.QPushButton(self.tab_2)
self.start_video_button.setGeometry(QtCore.QRect(580, 0, 91, 41))
self.start_video_button.setObjectName("start_video_button")
self.tabWidget.addTab(self.tab_2, "")
self.start_YOLOdetect_button = QtWidgets.QPushButton(self.groupBox)
self.start_YOLOdetect_button.setGeometry(QtCore.QRect(730, 100, 91, 41))
self.start_YOLOdetect_button.setObjectName("start_YOLOdetect_button")
self.open_classes_path = QtWidgets.QLineEdit(self.groupBox)
self.open_classes_path.setGeometry(QtCore.QRect(170, 60, 561, 20))
self.open_classes_path.setObjectName("open_classes_path")
#------ classes.txt文件默认参数设置-----#
self.open_classes_path.setText("D:\deeplearnITERM\yolox-pytorch\model_data\my_classes.txt")
self.yolo._defaults["classes_path"] = self.open_classes_path.text()
#------ classes.txt文件默认参数设置-----#
self.select_classes_txt_button = QtWidgets.QPushButton(self.groupBox)
self.select_classes_txt_button.setGeometry(QtCore.QRect(740, 60, 71, 20))
self.select_classes_txt_button.setObjectName("select_classes_txt_button")
self.label_7 = QtWidgets.QLabel(self.groupBox)
self.label_7.setGeometry(QtCore.QRect(30, 60, 131, 21))
font = QtGui.QFont()
font.setFamily("华光楷体一_CNKI")
font.setPointSize(11)
self.label_7.setFont(font)
self.label_7.setObjectName("label_7")
self.tabWidget.raise_()
self.label_3.raise_()
self.open_weight_path.raise_()
self.select_weith_pth_button.raise_()
self.start_YOLOdetect_button.raise_()
self.open_classes_path.raise_()
self.select_classes_txt_button.raise_()
self.label_7.raise_()
self.groupBox_2 = QtWidgets.QGroupBox(Form)
self.groupBox_2.setGeometry(QtCore.QRect(20, 550, 841, 151))
self.groupBox_2.setObjectName("groupBox_2")
self.LineEdit_uartName = QtWidgets.QLineEdit(self.groupBox_2)
self.LineEdit_uartName.setGeometry(QtCore.QRect(170, 30, 81, 20))
self.LineEdit_uartName.setObjectName("LineEdit_uartName")
self.LineEdit_uartBot = QtWidgets.QLineEdit(self.groupBox_2)
self.LineEdit_uartBot.setGeometry(QtCore.QRect(170, 70, 81, 20))
self.LineEdit_uartBot.setObjectName("LineEdit_uartBot")
self.LineEdit_uart_Timeout = QtWidgets.QLineEdit(self.groupBox_2)
self.LineEdit_uart_Timeout.setGeometry(QtCore.QRect(170, 110, 81, 20))
self.LineEdit_uart_Timeout.setObjectName("LineEdit_uart_Timeout")
self.label_8 = QtWidgets.QLabel(self.groupBox_2)
self.label_8.setGeometry(QtCore.QRect(30, 30, 61, 21))
font = QtGui.QFont()
font.setFamily("华光楷体一_CNKI")
font.setPointSize(11)
self.label_8.setFont(font)
self.label_8.setObjectName("label_8")
self.label_9 = QtWidgets.QLabel(self.groupBox_2)
self.label_9.setGeometry(QtCore.QRect(30, 70, 61, 21))
font = QtGui.QFont()
font.setFamily("华光楷体一_CNKI")
font.setPointSize(11)
self.label_9.setFont(font)
self.label_9.setObjectName("label_9")
self.label_10 = QtWidgets.QLabel(self.groupBox_2)
self.label_10.setGeometry(QtCore.QRect(30, 110, 61, 21))
font = QtGui.QFont()
font.setFamily("华光楷体一_CNKI")
font.setPointSize(11)
self.label_10.setFont(font)
self.label_10.setObjectName("label_10")
self.start_open_uart_button = QtWidgets.QPushButton(self.groupBox_2)
self.start_open_uart_button.setGeometry(QtCore.QRect(730, 60, 91, 41))
self.start_open_uart_button.setObjectName("start_open_uart_button")
self.groupBox_3 = QtWidgets.QGroupBox(Form)
self.groupBox_3.setGeometry(QtCore.QRect(870, 370, 541, 161))
self.groupBox_3.setObjectName("groupBox_3")
self.textEdit = QtWidgets.QTextEdit(self.groupBox_3)
self.textEdit.setGeometry(QtCore.QRect(10, 20, 511, 121))
self.textEdit.setObjectName("textEdit")
self.groupBox_4 = QtWidgets.QGroupBox(Form)
self.groupBox_4.setGeometry(QtCore.QRect(870, 550, 541, 151))
self.groupBox_4.setObjectName("groupBox_4")
self.textEdit_Uart = QtWidgets.QTextEdit(self.groupBox_4)
self.textEdit_Uart.setGeometry(QtCore.QRect(10, 20, 511, 131))
self.textEdit_Uart.setObjectName("textEdit_Uart")
self.groupBox_3.raise_()
self.label_left.raise_()
self.label_right.raise_()
self.groupBox.raise_()
self.groupBox_2.raise_()
self.groupBox_4.raise_()
# 定时器设置
self.timer_camera1 = QtCore.QTimer()#摄像头显示
self.timer_camera2 = QtCore.QTimer()#检测画面显示
self.timer_camera3 = QtCore.QTimer()#视频显示
# 摄像头对象创建
self.cap = cv2.VideoCapture(0)
#FPS
self.fps = 0.0
#定时器信号与槽的连接
self.timer_camera1.timeout.connect(self.show_camera)
self.timer_camera3.timeout.connect(self.show_video)
self.timer_camera2.timeout.connect(self.show_Detected_camera)
#按钮信号与槽的连接
self.open_camera_button.clicked.connect(self.button_open_camera_click)
self.start_YOLOdetect_button.clicked.connect(self.button_detect_camera_click)
self.open_video_button.clicked.connect(self.open_select_video_click)
self.start_video_button.clicked.connect(self.button_open_videoPath_click)
self.select_weith_pth_button.clicked.connect(self.open_select_weight_click)
self.select_classes_txt_button.clicked.connect(self.open_select_classes_txt_click)
self.start_open_uart_button.clicked.connect(self.botton_open_ser_cliked)
#串口打开标志位,用于检测结果的输出开闭
self.uartOpenFlag = 0
self.retranslateUi(Form)
self.tabWidget.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
_translate = QtCore.QCoreApplication.translate
Form.setWindowTitle(_translate("Form", "YoloV深度学习目标检测与通信平台"))
self.label_left.setText(_translate("Form", "等待摄像头连接。。。"))
self.label_right.setText(_translate("Form", "等待开始检测。。。"))
self.groupBox.setTitle(_translate("Form", "检测操作区"))
self.label_3.setText(_translate("Form", "权重文件路径:"))
self.select_weith_pth_button.setText(_translate("Form", "浏览"))
self.open_camera_button.setText(_translate("Form", "打开摄像头"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab), _translate("Form", "检测摄像头画面"))
self.open_video_button.setText(_translate("Form", "浏览"))
self.label_6.setText(_translate("Form", "视频路径:"))
self.start_video_button.setText(_translate("Form", "打开视频"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_2), _translate("Form", "检测视频画面"))
self.start_YOLOdetect_button.setText(_translate("Form", "开始YOLO检测"))
self.select_classes_txt_button.setText(_translate("Form", "浏览"))
self.label_7.setText(_translate("Form", "classes.txt文件路径:"))
self.groupBox_2.setTitle(_translate("Form", "串口输出操作区"))
self.label_8.setText(_translate("Form", "串口名:"))
self.label_9.setText(_translate("Form", "波特率:"))
self.label_10.setText(_translate("Form", "TimeOut:"))
self.start_open_uart_button.setText(_translate("Form", "打开串口"))
self.groupBox_3.setTitle(_translate("Form", "载入信息显示"))
self.groupBox_4.setTitle(_translate("Form", "串口收发"))
#选择文件夹
self._translate = QtCore.QCoreApplication.translate
def open_select_weight_click(self):
# _translate = QtCore.QCoreApplication.translate
self.directory_weight = QFileDialog.getOpenFileName(None, "选择文件", "H:/")
weight_path = self.directory_weight[0]
if(self.directory_weight[0] != ""):
self.yolo._defaults["model_path"]=weight_path
self.open_weight_path.setText(self._translate("Form", weight_path))
def open_select_classes_txt_click(self):
# _translate = QtCore.QCoreApplication.translate
self.directory_classes = QFileDialog.getOpenFileName(None, "选择文件", "H:/")
if(self.directory_classes[0] != ""):
classes_path = self.directory_classes[0]
self.yolo._defaults["classes_path"]=classes_path
self.open_classes_path.setText(self._translate("Form", classes_path))
def open_select_video_click(self):
# _translate = QtCore.QCoreApplication.translate
self.directory1 = QFileDialog.getOpenFileName(None, "选择文件", "H:/")
if(self.directory1[0] != ""):
self.video_path = self.directory1[0]
self.open_video_path.setText(self._translate("Form", self.video_path))
def button_open_videoPath_click(self):
self.timer_camera1.stop()
self.label_left.clear()
video_path = self.open_video_path.text()
if self.timer_camera3.isActive() == False:
flag = self.cap.open(video_path)
if flag == False:
self.textEdit.append('打开视频失败!请检查视频路径是否正确!')
else:
self.timer_camera3.start(30)
self.start_video_button.setText(u'关闭视频')
else:
self.timer_camera2.stop()
self.label_right.clear()
self.timer_camera3.stop()
self.timer_camera1.stop()
self.label_left.clear()
video_path = ''
self.start_video_button.setText(u'打开视频')
self.start_YOLOdetect_button.setText(u'开始YOLO检测')
self.open_camera_button.setText(u'打开摄像头')
def button_open_camera_click(self):
self.timer_camera3.stop()
self.label_left.clear()
self.video_path = 0
if self.timer_camera1.isActive() == False:
flag = self.cap.open(self.video_path)
if flag == False:
self.textEdit.append('打开相机失败!请检测相机与电脑是否连接正确!')
else:
self.timer_camera1.start(30)
self.open_camera_button.setText(u'关闭摄像头')
else:
self.timer_camera2.stop()
self.label_right.clear()
self.timer_camera1.stop()
self.label_left.clear()
self.open_camera_button.setText(u'打开摄像头')
def show_video(self): #左边显示视频画面
# self.t1 = time.time()
flag, self.image = self.cap.read()
if not flag:
raise ValueError("未能正确读取摄像头(视频),请注意是否正确安装摄像头(是否正确填写视频路径)。")
# if self.video_save_path!="":
# fourcc = cv2.VideoWriter_fourcc(*'XVID')
# size = (int(self.cap.get(cv2.CAP_PROP_FRAME_WIDTH)), int(self.cap.get(cv2.CAP_PROP_FRAME_HEIGHT)))
# out = cv2.VideoWriter(self.video_save_path, fourcc, self.video_fps, size)
#-----------------------#
# 用于保存图像
#-----------------------#
# dir_path=os.getcwd()
# camera_source =dir_path+ "\\data\\test\\2.jpg"
# cv2.imwrite(camera_source, self.image)
#------------------------------------------------------------#
# 设置视频宽高
#------------------------------------------------------------#
#self.cap.set(cv2.CAP_PROP_FRAME_WIDTH,self.video_width)
#self.cap.set(cv2.CAP_PROP_FRAME_HEIGHT,self.video_Heigth)
width = self.cap.get(3)
height = self.cap.get(4)
# 设置新的图片分辨率框架
width_new = 700
height_new = 320
# 判断图片的长宽比率
if width / height >= width_new / height_new:
show = cv2.resize(self.image, (width_new, int(height * width_new / width)))
else:
show = cv2.resize(self.image, (int(width * height_new / height), height_new))
show = cv2.cvtColor(show, cv2.COLOR_BGR2RGB)
# #FPS绘制
# self.fps = ( self.fps + (1./(time.time()-self.t1)) ) / 2
# print("originFps= %.2f"%(self.fps))
# show = cv2.putText(show, "fps= %.2f"%(self.fps), (0, 40), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0), 2)
showImage = QtGui.QImage(show.data, show.shape[1], show.shape[0],3 * show.shape[1], QtGui.QImage.Format_RGB888)
self.label_left.setPixmap(QtGui.QPixmap.fromImage(showImage))
def show_camera(self): #左边显示摄像头画面
#self.t1 = time.time()
flag, self.image = self.cap.read()
if not flag:
raise ValueError("未能正确读取摄像头(视频),请注意是否正确安装摄像头(是否正确填写视频路径)。")
# if self.video_save_path!="":
# fourcc = cv2.VideoWriter_fourcc(*'XVID')
# size = (int(self.cap.get(cv2.CAP_PROP_FRAME_WIDTH)), int(self.cap.get(cv2.CAP_PROP_FRAME_HEIGHT)))
# out = cv2.VideoWriter(self.video_save_path, fourcc, self.video_fps, size)
#-----------------------#
# 用于保存图像
#-----------------------#
# dir_path=os.getcwd()
# camera_source =dir_path+ "\\data\\test\\2.jpg"
# cv2.imwrite(camera_source, self.image)
#------------------------------------------------------------#
# 设置视频宽高
#------------------------------------------------------------#
#self.cap.set(cv2.CAP_PROP_FRAME_WIDTH,self.video_width)
#self.cap.set(cv2.CAP_PROP_FRAME_HEIGHT,self.video_Heigth)
width = self.cap.get(3)
height = self.cap.get(4)
# 设置新的图片分辨率框架
width_new = 700
height_new = 320
# 判断图片的长宽比率
if width / height >= width_new / height_new:
show = cv2.resize(self.image, (width_new, int(height * width_new / width)))
else:
show = cv2.resize(self.image, (int(width * height_new / height), height_new))
show = cv2.cvtColor(show, cv2.COLOR_BGR2RGB)
# #FPS绘制
# self.fps = ( self.fps + (1./(time.time()-self.t1)) ) / 2
# print("originFps= %.2f"%(self.fps))
# show = cv2.putText(show, "fps= %.2f"%(self.fps), (0, 40), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0), 2)
showImage = QtGui.QImage(show.data, show.shape[1], show.shape[0],3 * show.shape[1], QtGui.QImage.Format_RGB888)
self.label_left.setPixmap(QtGui.QPixmap.fromImage(showImage))
#检测结果画面
def button_detect_camera_click(self):
if self.timer_camera1.isActive() == False and self.timer_camera3.isActive() == False :
self.textEdit.append('未获取到图像源,请检查视频或摄像头是否打开')
return
elif self.open_classes_path.text() == '' or self.open_weight_path.text() =='':
self.textEdit.append('未成功生成yolo模型。。。请检查是否选择了权重文件或者classes.txt文件路径')
return
elif self.timer_camera2.isActive() == False:
self.textEdit.append('模型加载完毕!')
self.yolo.yoloinit()
self.timer_camera2.start(30)
self.start_YOLOdetect_button.setText(u'停止YOLO检测')
else:
self.timer_camera2.stop()
self.label_right.clear()
self.timer_camera1.stop()
self.timer_camera3.stop()
self.label_left.clear()
self.start_YOLOdetect_button.setText(u'开始YOLO检测')
self.open_camera_button.setText(u'打开摄像头')
self.start_video_button.setText(u'打开视频')
def show_Detected_camera(self):
t2 = time.time()
# 格式转变,BGRtoRGB
frame = cv2.cvtColor(self.image,cv2.COLOR_BGR2RGB)
# 转变成Image
frame = Image.fromarray(np.uint8(frame))
# 进行检测
frame = np.array(self.yolo.detect_image(frame))
# RGBtoBGR满足opencv显示格式
frame = cv2.cvtColor(frame,cv2.COLOR_RGB2BGR)
#若串口打开 获取是否检测到目标的标志位
if(self.uartOpenFlag==1):
isFind = self.yolo.getS()
self.send_msg(isFind)
# 单独窗口
#cv2.imshow("Detect",frame)
# 显示到右边窗口
width = self.cap.get(3)
height = self.cap.get(4)
# 设置新的图片分辨率框架
width_new = 700
height_new = 320
# 判断图片的长宽比率
if width / height >= width_new / height_new:
frame = cv2.resize(frame, (width_new, int(height * width_new / width)))
else:
frame = cv2.resize(frame, (int(width * height_new / height), height_new))
#FPS绘制
self.fps = ( self.fps + (1./(time.time()-t2)) ) / 2
print("检测视频fps= %.2f"%(self.fps))
show = cv2.putText(frame, "fps= %.2f"%(self.fps), (0, 40), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0), 2)
show = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
showImage = QtGui.QImage(show.data, show.shape[1], show.shape[0],3 * show.shape[1], QtGui.QImage.Format_RGB888)
self.label_right.setPixmap(QtGui.QPixmap.fromImage(showImage))
#-------------------------串口模块----------------------------#
def get_port_list():
"""
获取当前系统所有COM口
:return:
"""
com_list = [] # 用于保存端口名的列表
port_list = serial.tools.list_ports.comports() # 获取本机端口,返回list
for port in port_list:
com_list.append(port[0]) # 保存端口到列表
return com_list # 返回列表
#将string类型转为float类型
def atoi(self,s):
s = s[::-1]
num = 0.0
for i, v in enumerate(s):
for j in range(0, 10):
if v == str(j):
num += j * (10 ** i)
return num
def botton_open_ser_cliked(self) :
Bport = self.LineEdit_uartName.text()
Bbaudrate = self.LineEdit_uartBot.text()
Btimeout =self.LineEdit_uart_Timeout.text()
serButton_text = self.start_open_uart_button.text()
if(serButton_text == '打开串口'):
if(Bport != '' and Bbaudrate != '' and Btimeout != ''):
self.open_ser(Bport,Bbaudrate,self.atoi(Btimeout))
self.uartOpenFlag = 1
self.start_open_uart_button.setText('关闭串口')
else:
self.textEdit_Uart.append('串口信息未填写完整!')
else:
self.uartOpenFlag = 0
self.close_ser()
self.start_open_uart_button.setText('打开串口')
#打开串口
def open_ser(self,port,baudrate,timeout):
try:
self.ser = serial.Serial(port,baudrate,timeout)
if(self.ser.isOpen()==True):
self.textEdit_Uart.append("串口打开成功")
except Exception as exc:
self.textEdit_Uart.append('串口打开异常')
print('串口打开异常',exc)
#串口发送
def send_msg(self,x):
try:
if x == 1:
send_datas=bytearray([0x01,0x0d,0x0a])
strsend = time.strftime("%Y-%m-%d %H:%M:%S")+"已发送数据:1"
elif x == 0:
send_datas=bytearray([0x00,0x0d,0x0a])
strsend = time.strftime("%Y-%m-%d %H:%M:%S")+"已发送数据:1"
else: print('参数输入0或1')
self.ser.write(send_datas)
#将byte转换为 int
# pre_datas = int.from_bytes(send_datas, byteorder='big', signed=False)
self.textEdit_Uart.append(strsend)
except Exception as exc:
self.textEdit_Uart.append("发送异常")
print("发送异常",exc)
# # 接收数据
# def read_msg(self):
# try:
# print("等待接收数据")
# while True:
# data = ser.read(ser.in_waiting).decode('gbk')
# if data != '':
# break
# print("已接受到数据:",data)
# except Exception as exc:
# print("读取异常",exc)
# 关闭串口
def close_ser(self):
try:
self.ser.close()
if self.ser.isOpen():
self.textEdit_Uart.append("串口未关闭")
else:
self.textEdit_Uart.append("串口已关闭")
except Exception as exc:
print("串口关闭异常", exc)
#------------------------串口----------------------------------#
if __name__ == '__main__':
app = QApplication(sys.argv)
#splash = QSplashScreen(QPixmap(".\\data\\source_image\\logo.png"))
# 设置画面中的文字的字体
# splash.setFont(QFont('Microsoft YaHei UI', 12))
# | |
< ind[i]):
var = np.abs(y_linear[j]-ds[i-1])
if var > tolerance*np.abs(ds[i-1]):
out=True
j +=1
# if one point is outside the interval, use zero interpolation
# for the segment
if out:
for j in range(last_ind+1, ind[i]):
y[j] = y_zero[j]
last_ind = ind[i]
return y
def combined_fixed_recon(ds, ind, threshold, tolerance_ratio=0.1,plot=False):
""" A combination of zero and linear spline interpolation
This function combines linear and zero spline interpolation, given a
signal that has been downsampled using a by percentage downsampling. It
uses the information given by this downsampling to choose between linear or
zero interpolation.
If the linear interpolation gives at least one point that has a variation
superior to the threshold, it will use zero interpolation instead.
Parameters
----------
ds : list
the downsampled signal to reconstruct
ind : list
the list of indices corresponding to the position of
the downsampled points
threshold : float
the threshold used for the by percentage downsampling
tolerance_ratio : float
this ratio increases the interval of points where linear interpolation
is used. It prevents the algorithm to use zero if linear is closer to
the actual signal. It calculates a larger threshold, as such :
``new_threshold = threshold*(1+tolerance_ratio)``
Returns
-------
y : list
the reconstructed signal
"""
x = np.arange(ind[-1]+1)
tolerance = threshold * (1+tolerance_ratio)
f_linear = interpolate.interp1d(ind, ds, kind='linear')
f_pchip = interpolate.PchipInterpolator(ind,ds)
y = [ds[0]]
for i in range(1, len(ind)):
indx = np.arange(ind[i-1]+1, ind[i]+1, 1)
indx_conditions = (i-2 >= 0) and (i+1 < len(ind))
out = False
j = ind[i]-1
while (not out) and (j > ind[i-1]):
var = np.abs(f_linear(x[j]) - ds[i-1])
if var > tolerance*np.abs(ds[i-1]):
out = True
j -= 1
y_ = f_linear(indx)
if out:
#f_nearest = interpolate.interp1d([ind[i-1],ind[i]],[ds[i-1],ds[i]],kind='nearest')
#y_ = f_nearest(indx).tolist()
ind = np.array(ind,dtype=int)
ds = np.array(ds,dtype=float)
y_ = f_pchip(indx)
#y_.append(ds[i])
y = np.concatenate((y, y_))
return y
def combined_con_recon(
ds, ind, threshold, tolerance_ratio=0.1, min_distance=3,
max_distance=-0.5, previous_distance=3):
""" A shape preserving combination of zero and linear spline interpolation
This function is similar to `combined_recon` but adds a shape-preserving
aspect. If an interval of the input signal is considered convex or concave,
the function will generate a point in the middle of this interval, with a
value that is the average of 2 values :
- the minimum (or maximum) possible, which means that it stays within a 1%
variation for a convex (or concave) signal
- the value that would have been given by the `combined_recon` function
This ensures a reconstruction that preserves convexity or concavity.
Parameters
----------
ds : list
the downsampled signal to reconstruct
ind : list
the list of indices corresponding to the position of
the downsampled points
threshold : float
the threshold used for the by percentage downsampling
tolerance_ratio : float
this ratio increases the interval of points where linear interpolation
is used. It prevents the algorithm to use zero if linear is closer to
the actual signal. It calculates a larger threshold, as such :
``new_threshold = threshold*(1+tolerance_ratio)``
min_distance : int
the minimal distance between the 2 points where we want to interpolate
when we can assume convexity or concavity
max_distance : int
the maximal distance between 2 points where we can assume convexity
or concavityself.
If set at a negative number, it will calculate :
``new_max_distance = abs(max_distance) * ind[-1]``
previous_distance : int
the minimal distance between the last 2 points where we can assume
convexity or concavity. If the points are too close the signal is less
likely to be convex or concave
Returns
-------
y : list
the reconstructed signal
"""
if max_distance < 0:
# if negative, it will take a proportion
# of the length of the signal.
# If signal is of length 120 and max_distance = 0.5, then
# max_distance = 0.5*120 = 60
max_distance = np.abs(max_distance) * ind[-1]
x = np.arange(ind[-1]+1)
tolerance = threshold * (1+tolerance_ratio)
f_linear = interpolate.interp1d(ind, ds, kind='linear')
y = [ds[0]]
for i in range(1, len(ind)):
indx = np.arange(ind[i-1]+1, ind[i]+1, 1)
# conditions on the existence of the points we use
indx_conditions = (i-2 >= 0) and (i+1 < len(ind))
# conditions on the distance between the 2 points we want to
# interpolate, as well as the 2 points before.
# If the 2 points to interpolate are too close, linear works well
# if they are too far, the convexity assumption is less likely
# if the 2 points before were too close, the convexity assumption
# is less likely to be true
dist_conditions = ((ind[i]-ind[i-1] < max_distance) and
(ind[i]-ind[i-1] > min_distance) and
(ind[i-1]-ind[i-2] > previous_distance))
# if the function is convex or concave on the chosen interval
change_conditions = np.sign(ds[i-1]-ds[i-2]) != np.sign(ds[i]-ds[i-1])
out = False
# we go from last index to first because it is usually in
# the last points that the points are out the interval,
# so we minimize the complexity
j = ind[i]-1
while (not out) and (j > ind[i-1]):
var = np.abs(f_linear(x[j]) - ds[i-1])
if var > tolerance*np.abs(ds[i-1]):
out = True
j -= 1
if indx_conditions and dist_conditions and change_conditions :
sign_before = np.sign(ds[i-1] - ds[i-2])
ind_mid = (ind[i]+ind[i-1]) // 2
# if convex
if sign_before < 0:
bound = ds[i-1] - np.abs(ds[i-1])*tolerance
# if concave
else:
bound = ds[i-1] + np.abs(ds[i-1])*tolerance
# check if smart_linear_interpolation would use zero or linear
# interpolation
if out:
# if zero interpolation, we choose a point in the
# middle of zero and the percentage rule limit
pt_mid = (ds[i-1] + bound)/2
interp_indx = [ind[i-1], ind_mid, ind[i]-1, ind[i]]
interp_points = [ds[i-1], pt_mid, ds[i-1], ds[i]]
else:
# if linear interpolation, we choose a point in the middle
# of linear and the percentage rule limit
pt_mid = (f_linear(ind_mid)+bound) / 2
interp_indx = [ind[i-1], ind_mid, ind[i]]
interp_points = [ds[i-1], pt_mid, ds[i]]
# we connect the points using a linear interpolation
f_ = interpolate.interp1d(interp_indx, interp_points,
kind='linear')
y_ = f_(indx)
else:
# use the smart_linear_interpolation algorithm
y_ = f_linear(indx)
if out:
y_ = [ds[i-1] for k in indx[1:]]
y_.append(ds[i])
y = np.concatenate((y, y_))
return y
def combined_con_fixed_recon(ds, ind, threshold, tolerance_ratio=0.1):
ind = np.array(ind,dtype=int)
ds = np.array(ds,dtype=float)
x = np.arange(ind[-1]+1)
tolerance = threshold * (1+tolerance_ratio)
f_linear = interpolate.interp1d(ind, ds, kind='linear')
y = [ds[0]]
for i in range(1, len(ind)):
indx = np.arange(ind[i-1]+1, ind[i]+1, 1)
# conditions on the existence of the points we use
indx_conditions = (i-2 >= 0) and (i+1 < len(ind))
# conditions on the distance between the 2 points we want to
# interpolate, as well as the 2 points before.
# If the 2 points to interpolate are too close, linear works well
# if they are too far, the convexity assumption is less likely
# if the 2 points before were too close, the convexity assumption
# is less likely to be true
# if the function is convex or concave on the chosen interval
change_conditions = np.sign(ds[i-1]-ds[i-2]) != np.sign(ds[i]-ds[i-1])
out = False
# we go from last index to first because it is usually in
# the last points that the points are out the interval,
# so we minimize the complexity
j = ind[i]-1
while (not out) and (j > ind[i-1]):
var = np.abs(f_linear(x[j]) - ds[i-1])
if var > tolerance*np.abs(ds[i-1]):
out = True
j -= 1
if indx_conditions and change_conditions :
sign_before = np.sign(ds[i-1] - ds[i-2])
ind_mid = (ind[i]+ind[i-1]) // 2
# if convex
if sign_before < 0:
bound = ds[i-1] - np.abs(ds[i-1])*tolerance
# if concave
else:
bound = ds[i-1] + np.abs(ds[i-1])*tolerance
# check if smart_linear_interpolation would use zero or linear
# interpolation
if out:
# if zero interpolation, we choose a point in the
# middle of zero and the percentage rule limit
pt_mid = (ds[i-1] + bound)/2
interp_indx = [ind[i-2],ind[i-1],ind[i]-1, ind[i],ind[i+1]]
interp_points = [ds[i-2],ds[i-1],ds[i-1], ds[i],ds[i+1]]
y_ = interpolate.pchip_interpolate(interp_indx,interp_points,indx)
else:
# if linear interpolation, we choose a point in the middle
# of linear and the percentage rule limit
pt_mid = (f_linear(ind_mid)+bound) / 2
| |
Solutions Inc.",
"001D04": "Zipit Wireless, Inc.",
"001D05": "iLight",
"001D06": "HM Electronics, Inc.",
"001D07": "Shenzhen Sang Fei Consumer Communications Co.,Ltd",
"001D08": "JIANGSU YINHE ELECTRONICS CO., LTD",
"001D09": "Dell Inc",
"001D0A": "Davis Instruments, Inc.",
"001D0B": "Power Standards Lab",
"001D0C": "MobileCompia",
"001D0D": "Sony Computer Entertainment inc.",
"001D0E": "Agapha Technology co., Ltd.",
"001D0F": "TP-LINK Technologies Co., Ltd.",
"001D10": "LightHaus Logic, Inc.",
"001D11": "Analogue & Micro Ltd",
"001D12": "ROHM CO., LTD.",
"001D13": "NextGTV",
"001D14": "SPERADTONE INFORMATION TECHNOLOGY LIMITED",
"001D15": "Shenzhen Dolphin Electronic Co., Ltd",
"001D16": "SFR",
"001D17": "Digital Sky Corporation",
"001D18": "Power Innovation GmbH",
"001D19": "Arcadyan Technology Corporation",
"001D1A": "OvisLink S.A.",
"001D1B": "Sangean Electronics Inc.",
"001D1C": "Gennet s.a.",
"001D1D": "Inter-M Corporation",
"001D1E": "KYUSHU TEN CO.,LTD",
"001D1F": "Siauliu Tauro Televizoriai, JSC",
"001D20": "COMTREND CO.",
"001D21": "Alcad SL",
"001D22": "Foss Analytical A/S",
"001D23": "SENSUS",
"001D24": "Aclara Power-Line Systems Inc.",
"001D25": "Samsung Electronics Co.,Ltd",
"001D26": "Rockridgesound Technology Co.",
"001D27": "NAC-INTERCOM",
"001D28": "Sony Ericsson Mobile Communications AB",
"001D29": "Doro AB",
"001D2A": "SHENZHEN BUL-TECH CO.,LTD.",
"001D2B": "Wuhan Pont Technology CO. , LTD",
"001D2C": "Wavetrend Technologies (Pty) Limited",
"001D2D": "Pylone, Inc.",
"001D2E": "Ruckus Wireless",
"001D2F": "QuantumVision Corporation",
"001D30": "YX Wireless S.A.",
"001D31": "HIGHPRO INTERNATIONAL R&D CO,.LTD.",
"001D32": "Longkay Communication & Technology (Shanghai) Co. Ltd",
"001D33": "Maverick Systems Inc.",
"001D34": "SYRIS Technology Corp",
"001D35": "Viconics Electronics Inc.",
"001D36": "ELECTRONICS CORPORATION OF INDIA LIMITED",
"001D37": "Thales-Panda Transportation System",
"001D38": "Seagate Technology",
"001D39": "MOOHADIGITAL CO., LTD",
"001D3A": "mh acoustics LLC",
"001D3B": "Nokia Danmark A/S",
"001D3C": "Muscle Corporation",
"001D3D": "Avidyne Corporation",
"001D3E": "SAKA TECHNO SCIENCE CO.,LTD",
"001D3F": "Mitron Pty Ltd",
"001D40": " Intel \u2013 GE Care Innovations LLC",
"001D41": "Hardy Instruments",
"001D42": "Nortel",
"001D43": "Shenzhen G-link Digital Technology Co., Ltd.",
"001D44": "KROHNE",
"001D45": "CISCO SYSTEMS, INC.",
"001D46": "CISCO SYSTEMS, INC.",
"001D47": "Covote GmbH & Co KG",
"001D48": "Sensor-Technik Wiedemann GmbH",
"001D49": "Innovation Wireless Inc.",
"001D4A": "Carestream Health, Inc.",
"001D4B": "Grid Connect Inc.",
"001D4C": "Alcatel-Lucent",
"001D4D": "Adaptive Recognition Hungary, Inc",
"001D4E": "TCM Mobile LLC",
"001D4F": "Apple",
"001D50": "SPINETIX SA",
"001D51": "Babcock & Wilcox Power Generation Group, Inc",
"001D52": "Defzone B.V.",
"001D53": "S&O Electronics (Malaysia) Sdn. Bhd.",
"001D54": "Sunnic Technology & Merchandise INC.",
"001D55": "ZANTAZ, Inc",
"001D56": "Kramer Electronics Ltd.",
"001D57": "CAETEC Messtechnik",
"001D58": "CQ Inc",
"001D59": "Mitra Energy & Infrastructure",
"001D5A": "2Wire Inc.",
"001D5B": "Tecvan Inform\u00e1tica Ltda",
"001D5C": "Tom Communication Industrial Co.,Ltd.",
"001D5D": "Control Dynamics Pty. Ltd.",
"001D5E": "COMING MEDIA CORP.",
"001D5F": "OverSpeed SARL",
"001D60": "ASUSTek COMPUTER INC.",
"001D61": "BIJ Corporation",
"001D62": "InPhase Technologies",
"001D63": "Miele & Cie. KG",
"001D64": "Adam Communications Systems Int Ltd",
"001D65": "Microwave Radio Communications",
"001D66": "Hyundai Telecom",
"001D67": "AMEC",
"001D68": "Thomson Telecom Belgium",
"001D69": "Knorr-Bremse IT-Services GmbH",
"001D6A": "Alpha Networks Inc.",
"001D6B": "ARRIS Group, Inc.",
"001D6C": "ClariPhy Communications, Inc.",
"001D6D": "Confidant International LLC",
"001D6E": "Nokia Danmark A/S",
"001D6F": "Chainzone Technology Co., Ltd",
"001D70": "CISCO SYSTEMS, INC.",
"001D71": "CISCO SYSTEMS, INC.",
"001D72": "Wistron Corporation",
"001D73": "Buffalo Inc.",
"001D74": "Tianjin China-Silicon Microelectronics Co., Ltd.",
"001D75": "Radioscape PLC",
"001D76": "Eyeheight Ltd.",
"001D77": "NSGate",
"001D78": "Invengo Information Technology Co.,Ltd",
"001D79": "SIGNAMAX LLC",
"001D7A": "Wideband Semiconductor, Inc.",
"001D7B": "Ice Energy, Inc.",
"001D7C": "ABE Elettronica S.p.A.",
"001D7D": "GIGA-BYTE TECHNOLOGY CO.,LTD.",
"001D7E": "Cisco-Linksys, LLC",
"001D7F": "Tekron International Ltd",
"001D80": "Beijing Huahuan Eletronics Co.,Ltd",
"001D81": "GUANGZHOU GATEWAY ELECTRONICS CO., LTD",
"001D82": "GN A/S (GN Netcom A/S)",
"001D83": "Emitech Corporation",
"001D84": "Gateway, Inc.",
"001D85": "Call Direct Cellular Solutions",
"001D86": "Shinwa Industries(China) Ltd.",
"001D87": "VigTech Labs Sdn Bhd",
"001D88": "Clearwire",
"001D89": "VaultStor Corporation",
"001D8A": "TechTrex Inc",
"001D8B": "ADB Broadband Italia",
"001D8C": "La Crosse Technology LTD",
"001D8D": "Raytek GmbH",
"001D8E": "Alereon, Inc.",
"001D8F": "PureWave Networks",
"001D90": "EMCO Flow Systems",
"001D91": "Digitize, Inc",
"001D92": "MICRO-STAR INT'L CO.,LTD.",
"001D93": "Modacom",
"001D94": "Climax Technology Co., Ltd",
"001D95": "Flash, Inc.",
"001D96": "WatchGuard Video",
"001D97": "Alertus Technologies LLC",
"001D98": "Nokia Danmark A/S",
"001D99": "Cyan Optic, Inc.",
"001D9A": "GODEX INTERNATIONAL CO., LTD",
"001D9B": "Hokuyo Automatic Co., Ltd.",
"001D9C": "Rockwell Automation",
"001D9D": "ARTJOY INTERNATIONAL LIMITED",
"001D9E": "AXION TECHNOLOGIES",
"001D9F": "MATT R.P.Traczynscy Sp.J.",
"001DA0": "Heng Yu Electronic Manufacturing Company Limited",
"001DA1": "CISCO SYSTEMS, INC.",
"001DA2": "CISCO SYSTEMS, INC.",
"001DA3": "SabiOso",
"001DA4": "Hangzhou System Technology CO., LTD",
"001DA5": "WB Electronics",
"001DA6": "Media Numerics Limited",
"001DA7": "Seamless Internet",
"001DA8": "Takahata Electronics Co.,Ltd",
"001DA9": "Castles Technology, Co., LTD",
"001DAA": "DrayTek Corp.",
"001DAB": "SwissQual License AG",
"001DAC": "Gigamon Systems LLC",
"001DAD": "Sinotech Engineering Consultants, Inc. Geotechnical Enginee",
"001DAE": "CHANG TSENG TECHNOLOGY CO., LTD",
"001DAF": "Nortel",
"001DB0": "FuJian HengTong Information Technology Co.,Ltd",
"001DB1": "Crescendo Networks",
"001DB2": "HOKKAIDO ELECTRIC ENGINEERING CO.,LTD.",
"001DB3": "HPN Supply Chain",
"001DB4": "KUMHO ENG CO.,LTD",
"001DB5": "Juniper networks",
"001DB6": "BestComm Networks, Inc.",
"001DB7": "Tendril Networks, Inc.",
"001DB8": "Intoto Inc.",
"001DB9": "Wellspring Wireless",
"001DBA": "Sony Corporation",
"001DBB": "Dynamic System Electronics Corp.",
"001DBC": "Nintendo Co., Ltd.",
"001DBD": "Versamed Inc.",
"001DBE": "ARRIS Group, Inc.",
"001DBF": "Radiient Technologies, Inc.",
"001DC0": "Enphase Energy",
"001DC1": "Audinate Pty L",
"001DC2": "XORTEC OY",
"001DC3": "RIKOR TV, Ltd",
"001DC4": "AIOI Systems Co., Ltd.",
"001DC5": "Beijing Jiaxun Feihong Electricial Co., Ltd.",
"001DC6": "SNR Inc.",
"001DC7": "L-3 Communications Geneva Aerospace",
"001DC8": "Navionics Research Inc., dba SCADAmetrics",
"001DC9": "GainSpan Corp.",
"001DCA": "PAV Electronics Limited",
"001DCB": "Ex\u00e9ns Development Oy",
"001DCC": "Hetra Secure Solutions",
"001DCD": "ARRIS Group, Inc.",
"001DCE": "ARRIS Group, Inc.",
"001DCF": "ARRIS Group, Inc.",
"001DD0": "ARRIS Group, Inc.",
"001DD1": "ARRIS Group, Inc.",
"001DD2": "ARRIS Group, Inc.",
"001DD3": "ARRIS Group, Inc.",
"001DD4": "ARRIS Group, Inc.",
"001DD5": "ARRIS Group, Inc.",
"001DD6": "ARRIS Group, Inc.",
"001DD7": "Algolith",
"001DD8": "Microsoft Corporation",
"001DD9": "Hon Hai Precision Ind.Co.,Ltd.",
"001DDA": "Mikroelektronika spol. s r. o.",
"001DDB": "C-BEL Corporation",
"001DDC": "HangZhou DeChangLong Tech&Info Co.,Ltd",
"001DDD": "DAT H.K. LIMITED",
"001DDE": "Zhejiang Broadcast&Television Technology Co.,Ltd.",
"001DDF": "Sunitec Enterprise Co., Ltd.",
"001DE0": "Intel Corporate",
"001DE1": "Intel Corporate",
"001DE2": "Radionor Communications",
"001DE3": "Intuicom",
"001DE4": "Visioneered Image Systems",
"001DE5": "CISCO SYSTEMS, INC.",
"001DE6": "CISCO SYSTEMS, INC.",
"001DE7": "Marine Sonic Technology, Ltd.",
"001DE8": "Nikko Denki Tsushin Corporation(NDTC)",
"001DE9": "Nokia Danmark A/S",
"001DEA": "Commtest Instruments Ltd",
"001DEB": "DINEC International",
"001DEC": "Marusys",
"001DED": "Grid Net, Inc.",
"001DEE": "NEXTVISION SISTEMAS DIGITAIS DE TELEVIS\u00c3O LTDA.",
"001DEF": "TRIMM, INC.",
"001DF0": "Vidient Systems, Inc.",
"001DF1": "Intego Systems, Inc.",
"001DF2": "Netflix, Inc.",
"001DF3": "SBS Science & Technology Co., Ltd",
"001DF4": "Magellan Technology Pty Limited",
"001DF5": "Sunshine Co,LTD",
"001DF6": "Samsung Electronics Co.,Ltd",
"001DF7": "R. STAHL Schaltger\u00e4te GmbH",
"001DF8": "Webpro Vision Technology Corporation",
"001DF9": "Cybiotronics (Far East) Limited",
"001DFA": "Fujian LANDI Commercial Equipment Co.,Ltd",
"001DFB": "NETCLEUS Systems Corporation",
"001DFC": "KSIC",
"001DFD": "Nokia Danmark A/S",
"001DFE": "Palm, Inc",
"001DFF": "Network Critical Solutions Ltd",
"001E00": "Shantou Institute of Ultrasonic Instruments",
"001E01": "Renesas Technology Sales Co., | |
from an edge from the graph and make them a dictionary.
node1 - The name of the first node.
node2 - The name of the second node.
"""
return self.graph.get_edge_data(node1, node2)
# attrList = self.graph.edge_attributes((node1, node2))
# for attr in attrList:
# if type(attr) == type({}):
# retval = attr
# retval['wt'] = self.graph.edge_weight((node1, node2))
# return retval
# retval = {}
# retval['wt'] = self.graph.edge_weight(node1, node2)
# return retval
def _calculateAndSetGlobalEvents(self):
"""Calculate the global events and record their names."""
self.globalEvents = []
for eventProfile in self.eventProfiles:
if eventProfile.isHappening(self.day, self.globalEvents):
self.globalEvents += [eventProfile.name]
def doTurn(self):
"""Process one turn of the game engine."""
self.day += 1
# Calculate and set all the events
self._calculateAndSetGlobalEvents()
for node in self.graph.nodes():
self._calculateAndSetNodeEvents(node)
for edge in self.graph.edges():
self._calculateAndSetEdgeEvents(edge)
for being in self.beings:
# print('\nTURN: {0}'.format(being.name))
being.doTurn(self)
self.encounterCheck(being)
# Resolve all the encounters
for encounter in self.encounters:
while True:
keepGoing = encounter.doTurn()
if not keepGoing:
break
self.encounters = []
return True
class Player:
"""The interface you must define to create a new player."""
def initGame(self, playerNumber: int) -> str:
"""
Call to each player to start a game.
playerNumber - Number that is used to uniquify each player name.
returns Name of the being that represents the player.
"""
raise NotImplementedError("initGame is virtual and must be overridden.")
def chooseDestination(self, game: Game) -> Optional[str]:
"""
Choose a new destination node. This must be a neighbor node to your current location.
game - Game object.
Returns the name of the new destination node.
"""
raise NotImplementedError("chooseDestination is virtual and must be overridden.")
def safeTravelUpdate(self, game: Game, distanceLeft: int):
"""
You have safely and uneventfully travelled for one day.
game - Game object.
"""
raise NotImplementedError("safeTravelUpdate is virtual and must be overridden.")
def voteInitState(self, game: Game, being: 'Being') -> EncounterStateCode:
"""
Vote on what state to begin an encounter in.
being - The other being you are about to encounter.
Return one of (encounter.EncounterStateCode.COMBAT,
encounter.EncounterStateCode.TRADE,
encounter.EncounterStateCode.SEARCH).
"""
raise NotImplementedError("voteInitState is virtual and must be overridden.")
def chooseCombatAction(self, game: Game, being: 'Being', cmbt: Combat) -> CombatAction:
"""
You have been confronted by an enemy and must choose a response.
game - Game object.
being - Being object of the enemy.
cmbt - Combat object with the status of the combat.
Returns a CombatAction
"""
raise NotImplementedError("chooseCombatAction is virtual and must be overridden.")
def combatEvents(self, game: Game, events: List[CombatEvent]):
"""
A series of updates for ongoing combat.
game - Game object.
events - List of objects of type CombatEvent.
"""
raise NotImplementedError("combatEvents is virtual and must be overridden.")
def arrived(self, game: Game):
"""
You have arrived at your destination node.
game - the Game object.
"""
raise NotImplementedError("arrived is virtual and must be overridden.")
def nodeEvents(self, game: Game, events: Tuple[str]):
"""
Tell the player which events are happening at the node they are currently at.
game - the Game object.
events - A tuple of event names that are currently happening.
For a full description of the event the player can call Game::getNodeEventDescription.
"""
raise NotImplementedError("nodeEvents is virtual and must be overridden.")
def advertiseTrade(self, game: Game, meBeing: 'Being') -> Dict[str, int]:
"""
Ask the player to advertise the sell prices for all goods in his inventory.
game - the Game object.
meBeing - Being object representing you.
Returns a dictionary from goodName -> sellPrice.
"""
raise NotImplementedError("advertiseTrade is virtual and must be overridden.")
def readTradeAdvertisement(self, game: Game, prices: Dict[str, int]):
"""
Receive advertisments about another player's inventory and prices.
game - the Game object.
prices - a dictionary from goodName -> sellPrice.
"""
raise NotImplementedError("readTradeAdvertisement is virtual and must be overridden.")
def chooseTradeAction(self, game: Game, meBeing: 'Being', themBeing: 'Being') -> Tuple[TradeAction, Optional[int], Optional[str], Optional[int]]: # noqa: E501
"""
You have an opportunity to trade commodities, weapons, or vessels.
game - the Game object.
meBeing - Being object representing you.
themBeing - Being object representing your counterpart.
Returns a tuple of (TradeAction, quantity, goodName, price).
"""
raise NotImplementedError("chooseTradeAction is virtual and must be overridden.")
def evaluateTradeRequest(self, game: Game, meBeing: 'Being', themBeing: 'Being', tradeAction: TradeAction,
quantity: int, goodName: str, price: int) -> bool:
"""
You have an opportunity to trade commodities, weapons, or vessels.
game - the Game object.
meBeing - Being object representing you.
themBeing - Being object representing your counterpart.
tradeAction - One of TradeAction
quantity - How much of the good.
goodName - Name of the good.
price - Price per unit being offered.
Returns True or False.
"""
raise NotImplementedError("evaluateTradeRequest is virtual and must be overridden.")
def tradeEvents(self, game: Game, events: List[TradeEvent]):
"""
A series of updates for ongoing trade.
game - Game object.
events - List of objects of type TradeEvent.
"""
raise NotImplementedError("tradeEvents is virtual and must be overridden.")
def chooseSearchAction(self, game: Game, meBeing: 'Being', themBeing: 'Being') -> SearchAction:
"""
game - the Game object.
meBeing - Being object representing you.
themBeing - Being object representing your counterpart.
Returns SearchAction.
"""
raise NotImplementedError("chooseSearchAction is virtual and must be overridden.")
def evaluateBoardRequest(self, game: Game, meBeing: 'Being', themBeing: 'Being') -> SearchAction:
"""
Evaluate a request to board your vessel.
game - the Game object.
meBeing - Being object representing you.
themBeing - Being object representing your counterpart.
Returns SearchAction.
"""
raise NotImplementedError("evaluateBoardRequest is virtual and must be overridden.")
def evaluateBribeSolicitation(self, game: Game, meBeing: 'Being', themBeing: 'Being') -> Tuple[SearchAction, int]:
"""
Evaluate a solicitation of a bribe.
game - the Game object.
meBeing - Being object representing you.
themBeing - Being object representing your counterpart.
Returns a tuple of ( SearchAction, bribeAmount )
"""
raise NotImplementedError("evaluateBribeSolicitation is virtual and must be overridden.")
def seize(self, game: Game, themInventory: 'Inventory') -> 'Inventory':
"""
You have successfully boarded another vessel and can seize what you like from their inventory.
game - the Game object.
themInventory - Inventory of the vessel you have boarded.
Returns a being.Inventory object of the stuff you are seizing.
"""
raise NotImplementedError("seize is virtual and must be overridden.")
def searchEvents(self, game: Game, events: List[SearchEvent]):
"""
A series of updates for ongoing search encounter.
game - Game object.
events - List of objects of type SearchEvent.
"""
raise NotImplementedError("searchEvents is virtual and must be overridden.")
def death(self, game: Game, deathReason: DeathReason):
"""
You are dead.
game - Game object.
deathReason - Code for the reason you are dead.
"""
raise NotImplementedError("death is virtual and must be overridden.")
class Inventory:
"""
"""
def __init__(self,
goods: Counter = Counter(),
vessel: Optional[Vessel] = None,
money: int = 0):
self.goods = goods
self.vessel = vessel
self.money = int(money)
def __str__(self):
return 'goods={0}\nvessel={1}\nmoney={2}'.format(self.goods,
self.vessel,
self.money)
def add(self, otherInventory: 'Inventory'):
"""
Add another inventory into this inventory (EXCEPT the vessel).
otherInventory - The other inventory we are adding.
"""
self.goods += otherInventory.goods
self.money += otherInventory.money
def subtract(self, otherInventory: 'Inventory'):
"""
Subtract another inventory into this inventory (EXCEPT the vessel).
otherInventory - The other inventory we are subtracting.
"""
self.goods -= otherInventory.goods
self.money -= otherInventory.money
class Being:
"""
"""
def __init__(self, game: Game, name: str, player: Player,
inventory: Optional[Inventory] = None, initialLocation: str = ''):
self.name = name
self.player = player
if inventory:
self.inventory = inventory
else:
self.inventory = Inventory()
self.destination = ''
self.lastDestination = ''
self.currentLocation = initialLocation
self._state: BeingState = NodeBeingState(self, game)
self._dead = False
def __str__(self):
return 'name={0}\nplayer={1}\ninventory={2}\ndestination={3}\nlastDestination={4}\ncurrentLocation={5}\nstate={6}\n'.format(self.name, self.player, self.inventory, self.destination, self.lastDestination, self.currentLocation, self._state) # noqa: E501
def doTurn(self, game: Game):
return self._state.doTurn(game)
def embarking(self, game: Game, newDestination: str):
"""
This is called to change this Being state to travelling and select a destination.
game - The Game object.
newDestination - The name of the destination the being is travelling to.
"""
self._state = TravelBeingState(self, game.distance(self.currentLocation, newDestination))
self.lastDestination = self.currentLocation
self.destination = newDestination
self.currentLocation = ''
def arrived(self, game: Game):
"""
This is called to change this Being state from travelling to node.
game - The Game object.
"""
self.currentLocation = self.destination
self.lastDestination = ''
self.destination = | |
periods (yrs) of interest
nb_steps = 1000 # Enter discretization of the circle in the normal space (optional)
# Non-Parametric Clayton copula contour generation example
Hs_Return, T_Return = NonParaClayton46022.getContours(Time_SS, Time_r,nb_steps)
'''
self.time_ss = time_ss
self.time_r = time_r
self.nb_steps = nb_steps
comp_1 = np.zeros(nb_steps)
comp_2_Clay = np.zeros(nb_steps)
# Inverse FORM
p_f = 1 / (365 * (24 / time_ss) * time_r)
beta = stats.norm.ppf((1 - p_f), loc=0, scale=1) # Reliability
# Normal Space
theta = np.linspace(0, 2 * np.pi, num = nb_steps)
U1 = beta * np.cos(theta)
U2 = beta * np.sin(theta)
# Copula parameters
tau = stats.kendalltau(self.buoy.T,self.buoy.Hs)[0]# Calculate Kendall's tau
theta_clay = (2.*tau)/(1.-tau);
# Component 1 (Hs)
z1_Hs = stats.norm.cdf(U1)
for k in range(0,nb_steps):
for j in range(0,np.size(self.nonpara_dist_1,0)):
if z1_Hs[k] <= self.nonpara_dist_1[0,1]:
comp_1[k] = min(self.nonpara_dist_1[:,0])
break
elif z1_Hs[k] <= self.nonpara_dist_1[j,1]:
comp_1[k] = (self.nonpara_dist_1[j,0] + self.nonpara_dist_1[j-1,0])/2
break
else:
comp_1[k]= max(self.nonpara_dist_1[:,0])
# Component 2 (T)
z2_Clay=((1.-stats.norm.cdf(U1)**(-theta_clay)+stats.norm.cdf(U1)**(-theta_clay)/stats.norm.cdf(U2))**(theta_clay/(1.+theta_clay)))**(-1./theta_clay)
for k in range(0,nb_steps):
for j in range(0,np.size(self.nonpara_dist_2,0)):
if z2_Clay[k] <= self.nonpara_dist_2[0,1]:
comp_2_Clay[k,0] = min(self.nonpara_dist_2[:,0])
break
elif z2_Clay[k] <= self.nonpara_dist_2[j,1]:
comp_2_Clay[k] = (self.nonpara_dist_2[j,0] + self.nonpara_dist_2[j-1,0])/2
break
else:
comp_2_Clay[k]= max(self.nonpara_dist_2[:,0])
Hs_Return = comp_1
T_Return = comp_2_Clay
self.Hs_ReturnContours = Hs_Return
self.T_ReturnContours = T_Return
return Hs_Return, T_Return
def getSamples(self):
'''Currently not implemented in this version.'''
raise NotImplementedError
def _saveParams(self, groupObj):
groupObj.create_dataset('nonpara_dist_1', data=self.nonpara_dist_1)
groupObj.create_dataset('nonpara_dist_2', data=self.nonpara_dist_2)
class NonParaGumbelCopula(EA):
'''Create a NonParaGumbelCopula EA class for a buoy object. Contours
generated under this class will use a Gumbel copula with non-parametric
marginal distribution fits.'''
def __init__(self, buoy, Ndata = 1000, max_T=None, max_Hs=None):
'''
Parameters
----------
buoy : NDBCData
ESSC.Buoy Object
NData: int
discretization resolution used in KDE construction
max_T:float
Maximum T value for KDE contstruction, must include possible
range of contour. Default value is 2*max(T)
max_Hs:float
Maximum Hs value for KDE contstruction, must include possible
range of contour. Default value is 2*max(Hs)
'''
self.method = "Non-parametric Gumbel Copula"
self.buoy = buoy
self.Ndata = Ndata
self.Hs_ReturnContours = None
# self.Hs_SampleCA = None
# self.Hs_SampleFSS = None
self.T_ReturnContours = None
# self.T_SampleCA = None
# self.T_SampleFSS = None
# self.Weight_points = None
# self.coeff, self.shift, self.comp1_params, self.sigma_param, self.mu_param = self.__generateParams(size_bin)
if max_T == None:
max_T = max(self.buoy.T)*2.
if max_Hs == None:
max_Hs = max(self.buoy.Hs)*2.
self.max_T = max_T
self.max_Hs = max_Hs
self.nonpara_dist_1,self.nonpara_dist_2,self.nonpara_pdf_2 = self._EA__getNonParaCopulaParams(Ndata,max_T,max_Hs)
def getContours(self, time_ss, time_r, nb_steps = 1000):
'''WDRT Extreme Sea State non-parameteric Gumbel Copula Contour
function. This function calculates environmental contours of extreme
sea states using a Gumbel copula with non-parametric marginal
distribution fits and the inverse first-order reliability method.
Parameters
___________
time_ss : float
Sea state duration (hours) of measurements in input.
time_r : np.array
Desired return period (years) for calculation of environmental
contour, can be a scalar or a vector.
nb_steps : float
Discretization of the circle in the normal space used for
inverse FORM calculation.
Returns
-------
Hs_Return : np.array
Calculated Hs values along the contour boundary following
return to original input orientation.
T_Return : np.array
Calculated T values along the contour boundary following
return to original input orientation.
nb_steps : float
Discretization of the circle in the normal space
Example
-------
To obtain the contours for a NDBC buoy::
import WDRT.ESSC as ESSC
# Pull spectral data from NDBC website
buoy46022 = ESSC.Buoy('46022','NDBC')
buoy46022.fetchFromWeb()
# Create Environtmal Analysis object using above parameters
NonParaGumbel46022 = ESSC.NonParaGumbelCopula(buoy46022)
# Declare required parameters
Time_SS = 1. # Sea state duration (hrs)
Time_r = 100 # Return periods (yrs) of interest
nb_steps = 1000 # Enter discretization of the circle in the normal space (optional)
# Non-Parametric Gumbel copula contour generation example
Hs_Return, T_Return = NonParaGumbel46022.getContours(Time_SS, Time_r,nb_steps)
'''
self.time_ss = time_ss
self.time_r = time_r
self.nb_steps = nb_steps
comp_1 = np.zeros(nb_steps)
comp_2_Gumb = np.zeros(nb_steps)
# Inverse FORM
p_f = 1 / (365 * (24 / time_ss) * time_r)
beta = stats.norm.ppf((1 - p_f), loc=0, scale=1) # Reliability
# Normal Space
theta = np.linspace(0, 2 * np.pi, num = nb_steps)
U1 = beta * np.cos(theta)
U2 = beta * np.sin(theta)
# Copula parameters
tau = stats.kendalltau(self.buoy.T,self.buoy.Hs)[0]# Calculate Kendall's tau
theta_gum = 1./(1.-tau);
# Component 1 (Hs)
z1_Hs = stats.norm.cdf(U1)
for k in range(0,nb_steps):
for j in range(0,np.size(self.nonpara_dist_1,0)):
if z1_Hs[k] <= self.nonpara_dist_1[0,1]:
comp_1[k] = min(self.nonpara_dist_1[:,0])
break
elif z1_Hs[k] <= self.nonpara_dist_1[j,1]:
comp_1[k] = (self.nonpara_dist_1[j,0] + self.nonpara_dist_1[j-1,0])/2
break
else:
comp_1[k]= max(self.nonpara_dist_1[:,0])
# Component 2 (T)
fi_u1=stats.norm.cdf(U1);
fi_u2=stats.norm.cdf(U2);
for k in range(0,nb_steps):
z1 = np.linspace(fi_u1[k],fi_u1[k],self.Ndata)
Z = np.array((np.transpose(z1),self.nonpara_dist_2[:,1]))
Y = self._EA__gumbelCopula(Z, theta_gum)
Y =np.nan_to_num(Y) # Need to look into this
p_x2_x1 = Y*self.nonpara_pdf_2[:,1]
dum = np.cumsum(p_x2_x1)
cdf = dum/(dum[self.Ndata-1])
table = np.array((self.nonpara_pdf_2[:,0], cdf))
table = table.T
for j in range(self.Ndata):
if fi_u2[k] <= table[0,1]:
comp_2_Gumb[k] = min(table[:,0])
break
elif fi_u2[k] <= table[j,1]:
comp_2_Gumb[k] = (table[j,0]+table[j-1,0])/2
break
else:
comp_2_Gumb[k] = max(table[:,0])
Hs_Return = comp_1
T_Return = comp_2_Gumb
self.Hs_ReturnContours = Hs_Return
self.T_ReturnContours = T_Return
return Hs_Return, T_Return
def getSamples(self):
'''Currently not implemented in this version.'''
raise NotImplementedError
def _saveParams(self, groupObj):
groupObj.create_dataset('nonpara_dist_1', data=self.nonpara_dist_1)
groupObj.create_dataset('nonpara_dist_2', data=self.nonpara_dist_2)
class BivariateKDE(EA):
'''Create a BivariateKDE EA class for a buoy object. Contours
generated under this class will use a non-parametric KDE to fit the joint distribution.'''
def __init__(self, buoy, bw, NData = 100, logTransform = False, max_T=None, max_Hs=None):
'''
Parameters
----------
buoy : NDBCData
ESSC.Buoy Object
bw: np.array
Array containing KDE bandwidth for Hs and T
NData: int
Discretization resolution used in KDE construction
logTransform: Boolean
Logical. True if log transformation should be taken prior to
KDE construction. Default value is False.
max_T:float
Maximum T value for KDE contstruction, must include possible
range of contour. Default value is 2*max(T)
max_Hs:float
Maximum Hs value for KDE contstruction, must include possible
range of contour. Default value is 2*max(Hs)
'''
if logTransform:
self.method = "Bivariate KDE, Log Transform"
else:
self.method = "Bivariate KDE"
self.buoy = buoy
if max_T == None:
max_T = max(self.buoy.T)*2.
if max_Hs == None:
max_Hs = max(self.buoy.Hs)*2.
self.max_T = max_T
self.max_Hs = max_Hs
self.Hs_ReturnContours = None
self.T_ReturnContours = None
self.NData = NData
self.bw = bw
self.logTransform = logTransform
def getContours(self, time_ss, time_r):
'''WDRT Extreme Sea State non-parameteric bivariate KDE Contour
function. This function calculates environmental contours of extreme
sea states using a bivariate KDE to estimate the joint distribution.
The contour is then calculcated directly from the joint distribution.
Parameters
___________
time_ss : float
Sea state duration (hours) of measurements in input.
time_r : np.array
Desired return period (years) for calculation of environmental
contour, can be a scalar or a vector.
Returns
-------
Hs_Return : np.array
Calculated Hs values along the contour boundary following
return to original input orientation.
T_Return : np.array
Calculated T values along the contour boundary following
return to original input orientation.
Example
-------
To obtain the contours for a NDBC buoy::
import WDRT.ESSC as ESSC
# Pull spectral data from NDBC website
buoy46022 = ESSC.Buoy('46022','NDBC')
buoy46022.fetchFromWeb()
# Create Environmental Analysis object using above parameters
BivariateKDE46022 = ESSC.BivariateKDE(buoy46022, bw = [0.23,0.19], logTransform = False)
# Declare required parameters
Time_SS = 1. # Sea state duration (hrs)
Time_r = 100 # Return periods (yrs) of interest
# KDE contour generation example
Hs_Return, T_Return = BivariateKDE46022.getContours(Time_SS, Time_r)
'''
p_f = 1 / (365 * (24 / time_ss) * time_r)
if self.logTransform:
# Take log of both variables
logTp = np.log(self.buoy.T)
logHs = np.log(self.buoy.Hs)
ty = [logTp, logHs]
else:
ty = [self.buoy.T, self.buoy.Hs]
# Create grid of points
Ndata = self.NData
min_limit_1 = 0.01
max_limit_1 = self.max_T
min_limit_2 = 0.01
max_limit_2 = self.max_Hs
pts_tp = np.linspace(min_limit_1, max_limit_1, Ndata)
pts_hs = np.linspace(min_limit_2, max_limit_2, Ndata)
pt1,pt2 = np.meshgrid(pts_tp, pts_hs)
pts_tp = pt1.flatten()
pts_hs = pt2.flatten()
# Transform gridded points using log
xi = [pts_tp, pts_hs]
if self.logTransform:
txi = [np.log(pts_tp), np.log(pts_hs)]
else:
txi = xi
m = len(txi[0])
n | |
Algorithms: 文件压缩算法
gzip:指定 GZIP 压缩
brotli:指定Brotli压缩
注意:此字段可能返回 null,表示取不到有效值。
:type Algorithms: list of str
"""
self.Compress = None
self.FileExtensions = None
self.MinLength = None
self.MaxLength = None
self.Algorithms = None
def _deserialize(self, params):
self.Compress = params.get("Compress")
self.FileExtensions = params.get("FileExtensions")
self.MinLength = params.get("MinLength")
self.MaxLength = params.get("MaxLength")
self.Algorithms = params.get("Algorithms")
class CookieKey(AbstractModel):
"""组成CacheKey的一部分
"""
def __init__(self):
"""
:param Switch: on | off 是否使用Cookie作为Cache的一部分
注意:此字段可能返回 null,表示取不到有效值。
:type Switch: str
:param Value: 使用的cookie,';' 分割
注意:此字段可能返回 null,表示取不到有效值。
:type Value: str
"""
self.Switch = None
self.Value = None
def _deserialize(self, params):
self.Switch = params.get("Switch")
self.Value = params.get("Value")
class CreateClsLogTopicRequest(AbstractModel):
"""CreateClsLogTopic请求参数结构体
"""
def __init__(self):
"""
:param TopicName: 日志主题名称
:type TopicName: str
:param LogsetId: 日志集ID
:type LogsetId: str
:param Channel: 接入渠道,默认值为cdn
:type Channel: str
:param DomainAreaConfigs: 域名区域信息
:type DomainAreaConfigs: list of DomainAreaConfig
"""
self.TopicName = None
self.LogsetId = None
self.Channel = None
self.DomainAreaConfigs = None
def _deserialize(self, params):
self.TopicName = params.get("TopicName")
self.LogsetId = params.get("LogsetId")
self.Channel = params.get("Channel")
if params.get("DomainAreaConfigs") is not None:
self.DomainAreaConfigs = []
for item in params.get("DomainAreaConfigs"):
obj = DomainAreaConfig()
obj._deserialize(item)
self.DomainAreaConfigs.append(obj)
class CreateClsLogTopicResponse(AbstractModel):
"""CreateClsLogTopic返回参数结构体
"""
def __init__(self):
"""
:param TopicId: 主题ID
注意:此字段可能返回 null,表示取不到有效值。
:type TopicId: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.TopicId = None
self.RequestId = None
def _deserialize(self, params):
self.TopicId = params.get("TopicId")
self.RequestId = params.get("RequestId")
class CreateDiagnoseUrlRequest(AbstractModel):
"""CreateDiagnoseUrl请求参数结构体
"""
def __init__(self):
"""
:param Url: 需诊断的url,形如:http://www.test.com/test.txt。
:type Url: str
"""
self.Url = None
def _deserialize(self, params):
self.Url = params.get("Url")
class CreateDiagnoseUrlResponse(AbstractModel):
"""CreateDiagnoseUrl返回参数结构体
"""
def __init__(self):
"""
:param DiagnoseLink: 系统生成的诊断链接,一个诊断链接最多可访问10次,有效期为24h。
:type DiagnoseLink: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.DiagnoseLink = None
self.RequestId = None
def _deserialize(self, params):
self.DiagnoseLink = params.get("DiagnoseLink")
self.RequestId = params.get("RequestId")
class CreateEdgePackTaskRequest(AbstractModel):
"""CreateEdgePackTask请求参数结构体
"""
def __init__(self):
"""
:param CosBucket: apk 所在的 cos 存储桶, 如 edgepack-xxxxxxxx
:type CosBucket: str
:param CosUriFrom: apk 源文件的存储路径, 如 /apk/xxxx.apk
:type CosUriFrom: str
:param CosUriTo: 拓展之后的 apk 目标存储路径,如 /out/xxxx.apk
:type CosUriTo: str
:param BlockID: BlockID 的值, WALLE为1903654775(0x71777777),VasDolly为2282837503(0x881155ff),传0或不传时默认为 WALLE 方案
:type BlockID: int
"""
self.CosBucket = None
self.CosUriFrom = None
self.CosUriTo = None
self.BlockID = None
def _deserialize(self, params):
self.CosBucket = params.get("CosBucket")
self.CosUriFrom = params.get("CosUriFrom")
self.CosUriTo = params.get("CosUriTo")
self.BlockID = params.get("BlockID")
class CreateEdgePackTaskResponse(AbstractModel):
"""CreateEdgePackTask返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class CreateScdnLogTaskRequest(AbstractModel):
"""CreateScdnLogTask请求参数结构体
"""
def __init__(self):
"""
:param Mode: 防护类型
Mode 映射如下:
waf = "Web攻击"
cc = "CC攻击"
bot = "Bot攻击"
:type Mode: str
:param StartTime: 查询起始时间,如:2018-09-04 10:40:00,返回结果大于等于指定时间
:type StartTime: str
:param EndTime: 查询结束时间,如:2018-09-04 10:40:00,返回结果小于等于指定时间
:type EndTime: str
:param Domain: 指定域名查询, 不填默认查询全部域名
:type Domain: str
:param AttackType: 指定攻击类型, 不填默认查询全部攻击类型
AttackType 映射如下:
other = '未知类型'
malicious_scan = "恶意扫描"
sql_inject = "SQL注入攻击"
xss = "XSS攻击"
cmd_inject = "命令注入攻击"
ldap_inject = "LDAP注入攻击"
ssi_inject = "SSI注入攻击"
xml_inject = "XML注入攻击"
web_service = "WEB服务漏洞攻击"
web_app = "WEB应用漏洞攻击"
path_traversal = "路径跨越攻击"
illegal_access_core_file = "核心文件非法访问"
trojan_horse = "木马后门攻击"
csrf = "CSRF攻击"
malicious_file_upload= '恶意文件上传'
js = "JS主动探测"
cookie = "Cookie指纹"
:type AttackType: str
:param DefenceMode: 指定执行动作, 不填默认查询全部执行动作
DefenceMode 映射如下:
observe = '观察模式'
intercept = '拦截模式'
captcha = "<PASSWORD>"
redirect = "重定向"
:type DefenceMode: str
:param Ip: 不填为全部ip
:type Ip: str
:param Domains: 指定域名查询, 与 Domain 参数同时有值时使用 Domains 参数,不填默认查询全部域名,指定域名查询时最多支持同时选择 5 个域名查询
:type Domains: list of str
:param AttackTypes: 指定攻击类型查询, 与 AttackType 参数同时有值时使用 AttackTypes 参数,不填默认查询全部攻击类型
:type AttackTypes: list of str
:param Conditions: 查询条件
:type Conditions: list of ScdnEventLogConditions
"""
self.Mode = None
self.StartTime = None
self.EndTime = None
self.Domain = None
self.AttackType = None
self.DefenceMode = None
self.Ip = None
self.Domains = None
self.AttackTypes = None
self.Conditions = None
def _deserialize(self, params):
self.Mode = params.get("Mode")
self.StartTime = params.get("StartTime")
self.EndTime = params.get("EndTime")
self.Domain = params.get("Domain")
self.AttackType = params.get("AttackType")
self.DefenceMode = params.get("DefenceMode")
self.Ip = params.get("Ip")
self.Domains = params.get("Domains")
self.AttackTypes = params.get("AttackTypes")
if params.get("Conditions") is not None:
self.Conditions = []
for item in params.get("Conditions"):
obj = ScdnEventLogConditions()
obj._deserialize(item)
self.Conditions.append(obj)
class CreateScdnLogTaskResponse(AbstractModel):
"""CreateScdnLogTask返回参数结构体
"""
def __init__(self):
"""
:param Result: 创建结果,
"0" -> 创建成功
:type Result: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Result = None
self.RequestId = None
def _deserialize(self, params):
self.Result = params.get("Result")
self.RequestId = params.get("RequestId")
class CreateVerifyRecordRequest(AbstractModel):
"""CreateVerifyRecord请求参数结构体
"""
def __init__(self):
"""
:param Domain: 要取回的域名
:type Domain: str
"""
self.Domain = None
def _deserialize(self, params):
self.Domain = params.get("Domain")
class CreateVerifyRecordResponse(AbstractModel):
"""CreateVerifyRecord返回参数结构体
"""
def __init__(self):
"""
:param SubDomain: 子解析
:type SubDomain: str
:param Record: 解析值
:type Record: str
:param RecordType: 解析类型
:type RecordType: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.SubDomain = None
self.Record = None
self.RecordType = None
self.RequestId = None
def _deserialize(self, params):
self.SubDomain = params.get("SubDomain")
self.Record = params.get("Record")
self.RecordType = params.get("RecordType")
self.RequestId = params.get("RequestId")
class DeleteCdnDomainRequest(AbstractModel):
"""DeleteCdnDomain请求参数结构体
"""
def __init__(self):
"""
:param Domain: 域名
域名状态需要为【已停用】
:type Domain: str
"""
self.Domain = None
def _deserialize(self, params):
self.Domain = params.get("Domain")
class DeleteCdnDomainResponse(AbstractModel):
"""DeleteCdnDomain返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class DeleteClsLogTopicRequest(AbstractModel):
"""DeleteClsLogTopic请求参数结构体
"""
def __init__(self):
"""
:param TopicId: 日志主题ID
:type TopicId: str
:param LogsetId: 日志集ID
:type LogsetId: str
:param Channel: 接入渠道,默认值为cdn
:type Channel: str
"""
self.TopicId = None
self.LogsetId = None
self.Channel = None
def _deserialize(self, params):
self.TopicId = params.get("TopicId")
self.LogsetId = params.get("LogsetId")
self.Channel = params.get("Channel")
class DeleteClsLogTopicResponse(AbstractModel):
"""DeleteClsLogTopic返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class DeleteScdnDomainRequest(AbstractModel):
"""DeleteScdnDomain请求参数结构体
"""
def __init__(self):
"""
:param Domain: 域名
:type Domain: str
"""
self.Domain = None
def _deserialize(self, params):
self.Domain = params.get("Domain")
class DeleteScdnDomainResponse(AbstractModel):
"""DeleteScdnDomain返回参数结构体
"""
def __init__(self):
"""
:param Result: 创建结果,Success表示成功
:type Result: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Result = None
self.RequestId = None
def _deserialize(self, params):
self.Result = params.get("Result")
self.RequestId = params.get("RequestId")
class DescribeBillingDataRequest(AbstractModel):
"""DescribeBillingData请求参数结构体
"""
def __init__(self):
"""
:param StartTime: 查询起始时间,如:2018-09-04 10:40:00,返回结果大于等于指定时间
根据指定时间粒度参数不同,会进行向前取整,如指定起始时间为 2018-09-04 10:40:00 按小时粒度查询,返回的第一个数据对应时间点为 2018-09-04 10:00:00
起始时间与结束时间间隔小于等于 90 天
:type StartTime: str
:param EndTime: 查询结束时间,如:2018-09-04 10:40:00,返回结果小于等于指定时间
根据指定时间粒度参数不同,会进行向前取整,如指定结束时间为 2018-09-04 10:40:00 按小时粒度查询时,返回的最后一个数据对应时间点为 2018-09-04 10:00:00
起始时间与结束时间间隔小于等于 90 天
:type EndTime: str
:param Interval: 时间粒度,支持模式如下:
min:1 分钟粒度,查询区间需要小于等于 24 小时
5min:5 分钟粒度,查询区间需要小于等于 31 天
hour:1 小时粒度,查询区间需要小于等于 31 天内
day:天粒度,查询区间需要大于 31 天
Area 字段为 overseas 时暂不支持1分钟粒度数据查询
:type Interval: str
:param Domain: 指定域名查询计费数据
:type Domain: str
:param Project: 指定项目 ID 查询,[前往查看项目 ID](https://console.cloud.tencent.com/project)
若 Domain 参数填充了具体域名信息,则返回该域名的计费数据,而非指定项目计费数据
:type Project: int
:param Area: 指定加速区域查询计费数据:
mainland:中国境内
overseas:中国境外
不填充时,默认为 mainland
:type Area: str
:param District: Area 为 overseas 时,指定国家/地区查询
省份、国家/地区编码可以查看 [省份编码映射](https://cloud.tencent.com/document/product/228/6316#.E7.9C.81.E4.BB.BD.E6.98.A0.E5.B0.84)
不填充时,查询所有国家/地区
:type District: int
:param Metric: 计费统计类型
flux:计费流量
bandwidth:计费带宽
默认为 bandwidth
:type Metric: str
"""
self.StartTime = None
self.EndTime = None
self.Interval = None
self.Domain = None
self.Project = None
self.Area = None
self.District = None
self.Metric = None
def _deserialize(self, params):
self.StartTime = params.get("StartTime")
self.EndTime = params.get("EndTime")
self.Interval = params.get("Interval")
self.Domain = params.get("Domain")
self.Project = params.get("Project")
self.Area = params.get("Area")
self.District = params.get("District")
self.Metric = params.get("Metric")
class DescribeBillingDataResponse(AbstractModel):
"""DescribeBillingData返回参数结构体
"""
def __init__(self):
"""
:param Interval: 时间粒度,根据查询时传递参数指定:
min:1 分钟粒度
5min:5 分钟粒度
hour:1 小时粒度
day:天粒度
:type Interval: str
:param Data: 数据明细
:type Data: list of ResourceBillingData
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Interval = None
self.Data = None
self.RequestId = None
def _deserialize(self, params):
self.Interval = params.get("Interval")
if params.get("Data") is not None:
self.Data = []
for item in params.get("Data"):
obj = ResourceBillingData()
obj._deserialize(item)
self.Data.append(obj)
self.RequestId = params.get("RequestId")
class DescribeCdnDataRequest(AbstractModel):
"""DescribeCdnData请求参数结构体
"""
def __init__(self):
"""
:param StartTime: 查询起始时间,如:2018-09-04 10:40:00,返回结果大于等于指定时间
根据指定时间粒度不同,会进行向前归整,如 2018-09-04 10:40:00 在按 1 小时的时间粒度查询时,返回的第一个数据对应时间点为 2018-09-04 10:00:00
起始时间与结束时间间隔小于等于 90 天
:type StartTime: str
:param EndTime: 查询结束时间,如:2018-09-04 10:40:00,返回结果小于等于指定时间
根据指定时间粒度不同,会进行向前归整,如 2018-09-04 10:40:00 在按 1 小时的时间粒度查询时,返回的最后一个数据对应时间点为 2018-09-04 10:00:00
起始时间与结束时间间隔小于等于 90 天
:type EndTime: str
:param Metric: 指定查询指标,支持的类型有:
flux:流量,单位为 byte
bandwidth:带宽,单位为 bps
request:请求数,单位为 次
fluxHitRate:流量命中率,单位为 %
statusCode:状态码,返回 2xx、3xx、4xx、5xx 汇总数据,单位为 个
2xx:返回 2xx 状态码汇总及各 2 开头状态码数据,单位为 个
3xx:返回 3xx 状态码汇总及各 3 开头状态码数据,单位为 个
4xx:返回 4xx 状态码汇总及各 4 开头状态码数据,单位为 个
5xx:返回 5xx 状态码汇总及各 5 开头状态码数据,单位为 个
支持指定具体状态码查询,若未产生过,则返回为空
:type Metric: str
:param Domains: 指定查询域名列表
最多可一次性查询 30 个加速域名明细
:type Domains: list of str
:param Project: 指定要查询的项目 ID,[前往查看项目 ID](https://console.cloud.tencent.com/project)
未填充域名情况下,指定项目查询,若填充了具体域名信息,以域名为主
:type Project: int
:param Interval: 时间粒度,支持以下几种模式:
min:1 分钟粒度,指定查询区间 24 小时内(含 24 小时),可返回 1 分钟粒度明细数据(指定查询服务地域为中国境外时不支持 1 分钟粒度)
5min:5 分钟粒度,指定查询区间 31 天内(含 31 天),可返回 5 分钟粒度明细数据
hour:1 小时粒度,指定查询区间 31 天内(含 31 天),可返回 1 小时粒度明细数据
day:天粒度,指定查询区间大于 31 天,可返回天粒度明细数据
:type Interval: str
:param Detail: 多域名查询时,默认(false)返回多个域名的汇总数据
可按需指定为 true,返回每一个 Domain 的明细数据(statusCode 指标暂不支持)
:type Detail: bool
:param Isp: 查询中国境内CDN数据时,指定运营商查询,不填充表示查询所有运营商
运营商编码可以查看 [运营商编码映射](https://cloud.tencent.com/document/product/228/6316#.E5.8C.BA.E5.9F.9F-.2F-.E8.BF.90.E8.90.A5.E5.95.86.E6.98.A0.E5.B0.84.E8.A1.A8)
指定运营商查询时,不可同时指定省份、IP协议查询
:type Isp: int
:param District: 查询中国境内CDN数据时,指定省份查询,不填充表示查询所有省份
查询中国境外CDN数据时,指定国家/地区查询,不填充表示查询所有国家/地区
省份、国家/地区编码可以查看 [省份编码映射](https://cloud.tencent.com/document/product/228/6316#.E5.8C.BA.E5.9F.9F-.2F-.E8.BF.90.E8.90.A5.E5.95.86.E6.98.A0.E5.B0.84.E8.A1.A8)
指定(中国境内)省份查询时,不可同时指定运营商、IP协议查询
:type District: int
:param Protocol: 指定协议查询,不填充表示查询所有协议
all:所有协议
http:指定查询 HTTP 对应指标
https:指定查询 HTTPS 对应指标
:type Protocol: str
:param DataSource: 指定数据源查询,白名单功能
:type DataSource: str
:param IpProtocol: 指定IP协议查询,不填充表示查询所有协议
all:所有协议
ipv4:指定查询 ipv4 对应指标
ipv6:指定查询 ipv6 对应指标
指定IP协议查询时,不可同时指定省份、运营商查询
注意:非IPv6白名单用户不可指定ipv4、ipv6进行查询
:type IpProtocol: str
:param Area: 指定服务地域查询,不填充表示查询中国境内CDN数据
mainland:指定查询中国境内 CDN 数据
overseas:指定查询中国境外 CDN 数据
:type Area: str
| |
<gh_stars>1-10
# This file is part of Indico.
# Copyright (C) 2002 - 2020 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from sqlalchemy.dialects.postgresql import ARRAY
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.ext.hybrid import Comparator, hybrid_method, hybrid_property
from sqlalchemy.orm import joinedload, noload
from indico.core.db.sqlalchemy import PyIntEnum, db
from indico.core.db.sqlalchemy.util.models import get_simple_column_attrs
from indico.core.permissions import get_available_permissions
from indico.util.decorators import classproperty, strict_classproperty
from indico.util.fossilize import Fossilizable, IFossil, fossilizes
from indico.util.string import format_repr, return_ascii
from indico.util.struct.enum import IndicoEnum
class PrincipalType(int, IndicoEnum):
user = 1
local_group = 2
multipass_group = 3
email = 4
network = 5
event_role = 6
category_role = 7
registration_form = 8
def _make_check(type_, allow_emails, allow_networks, allow_event_roles, allow_category_roles,
allow_registration_forms, *cols):
all_cols = {'user_id', 'local_group_id', 'mp_group_provider', 'mp_group_name'}
if allow_emails:
all_cols.add('email')
if allow_networks:
all_cols.add('ip_network_group_id')
if allow_event_roles:
all_cols.add('event_role_id')
if allow_category_roles:
all_cols.add('category_role_id')
if allow_registration_forms:
all_cols.add('registration_form_id')
required_cols = all_cols & set(cols)
forbidden_cols = all_cols - required_cols
criteria = ['{} IS NULL'.format(col) for col in sorted(forbidden_cols)]
criteria += ['{} IS NOT NULL'.format(col) for col in sorted(required_cols)]
condition = 'type != {} OR ({})'.format(type_, ' AND '.join(criteria))
return db.CheckConstraint(condition, 'valid_{}'.format(type_.name))
def serialize_email_principal(email):
"""Serialize email principal to a simple dict."""
return {
'_type': 'Email',
'email': email.email,
'id': email.name,
'name': email.name,
'identifier': email.identifier
}
class IEmailPrincipalFossil(IFossil):
def getId(self):
pass
getId.produce = lambda x: x.email
def getIdentifier(self):
pass
getIdentifier.produce = lambda x: 'Email:{}'.format(x.email)
def getEmail(self):
pass
getEmail.produce = lambda x: x.email
def getName(self):
pass
getName.produce = lambda x: x.name
class EmailPrincipal(Fossilizable):
"""Wrapper for email principals.
:param email: The email address.
"""
principal_type = PrincipalType.email
is_network = False
is_group = False
is_single_person = True
is_event_role = False
is_category_role = False
is_registration_form = False
principal_order = 0
fossilizes(IEmailPrincipalFossil)
def __init__(self, email):
self.email = email.lower()
@property
def name(self):
return self.email
@property
def as_legacy(self):
return self
@property
def user(self):
from indico.modules.users import User
return User.query.filter(~User.is_deleted, User.all_emails == self.email).first()
@property
def identifier(self):
return 'Email:{}'.format(self.email)
def __eq__(self, other):
return isinstance(other, EmailPrincipal) and self.email == other.email
def __ne__(self, other):
return not (self == other)
def __hash__(self):
return hash(self.email)
def __contains__(self, user):
if not user:
return False
return self.email in user.all_emails
@return_ascii
def __repr__(self):
return format_repr(self, 'email')
class PrincipalMixin(object):
#: The name of the backref added to `User` and `LocalGroup`.
#: For consistency, it is recommended to name the backref
#: ``in_foo_acl`` with *foo* describing the ACL where this
#: mixin is used.
principal_backref_name = None
#: The columns which should be included in the unique constraints.
#: If set to ``None``, no unique constraints will be added.
unique_columns = None
#: Whether it should be allowed to add a user by email address.
#: This is useful in places where no Indico user exists yet.
#: Usually adding an email address to an ACL should result in
#: an email being sent to the user, inviting him to create an
#: account with that email address.
allow_emails = False
#: Whether it should be allowed to add an IP network.
allow_networks = False
#: Whether it should be allowed to add an event role.
allow_event_roles = False
#: Whether it should be allowed to add a category role
allow_category_roles = False
#: Whether it should be allowed to add registrants
allow_registration_forms = False
@strict_classproperty
@classmethod
def __auto_table_args(cls):
uniques = ()
if cls.unique_columns:
uniques = [db.Index('ix_uq_{}_user'.format(cls.__tablename__), 'user_id', *cls.unique_columns, unique=True,
postgresql_where=db.text('type = {}'.format(PrincipalType.user))),
db.Index('ix_uq_{}_local_group'.format(cls.__tablename__), 'local_group_id', *cls.unique_columns,
unique=True, postgresql_where=db.text('type = {}'.format(PrincipalType.local_group))),
db.Index('ix_uq_{}_mp_group'.format(cls.__tablename__), 'mp_group_provider', 'mp_group_name',
*cls.unique_columns, unique=True,
postgresql_where=db.text('type = {}'.format(PrincipalType.multipass_group)))]
if cls.allow_emails:
uniques.append(db.Index('ix_uq_{}_email'.format(cls.__tablename__), 'email', *cls.unique_columns,
unique=True, postgresql_where=db.text('type = {}'.format(PrincipalType.email))))
indexes = [db.Index(None, 'mp_group_provider', 'mp_group_name')]
checks = [_make_check(PrincipalType.user, cls.allow_emails, cls.allow_networks, cls.allow_event_roles,
cls.allow_category_roles, cls.allow_registration_forms, 'user_id'),
_make_check(PrincipalType.local_group, cls.allow_emails, cls.allow_networks, cls.allow_event_roles,
cls.allow_category_roles, cls.allow_registration_forms, 'local_group_id'),
_make_check(PrincipalType.multipass_group, cls.allow_emails, cls.allow_networks,
cls.allow_event_roles, cls.allow_category_roles, cls.allow_registration_forms,
'mp_group_provider', 'mp_group_name')]
if cls.allow_emails:
checks.append(_make_check(PrincipalType.email, cls.allow_emails, cls.allow_networks, cls.allow_event_roles,
cls.allow_category_roles, cls.allow_registration_forms, 'email'))
checks.append(db.CheckConstraint('email IS NULL OR email = lower(email)', 'lowercase_email'))
if cls.allow_networks:
checks.append(_make_check(PrincipalType.network, cls.allow_emails, cls.allow_networks,
cls.allow_event_roles, cls.allow_category_roles, cls.allow_registration_forms,
'ip_network_group_id'))
if cls.allow_event_roles:
checks.append(_make_check(PrincipalType.event_role, cls.allow_emails, cls.allow_networks,
cls.allow_event_roles, cls.allow_category_roles, cls.allow_registration_forms,
'event_role_id'))
if cls.allow_category_roles:
checks.append(_make_check(PrincipalType.category_role, cls.allow_emails, cls.allow_networks,
cls.allow_event_roles, cls.allow_category_roles, cls.allow_registration_forms,
'category_role_id'))
if cls.allow_registration_forms:
checks.append(_make_check(PrincipalType.registration_form, cls.allow_emails, cls.allow_networks,
cls.allow_event_roles, cls.allow_category_roles, cls.allow_registration_forms,
'registration_form_id'))
return tuple(uniques + indexes + checks)
@declared_attr
def type(cls):
exclude_values = set()
if not cls.allow_emails:
exclude_values.add(PrincipalType.email)
if not cls.allow_networks:
exclude_values.add(PrincipalType.network)
if not cls.allow_event_roles:
exclude_values.add(PrincipalType.event_role)
if not cls.allow_category_roles:
exclude_values.add(PrincipalType.category_role)
if not cls.allow_registration_forms:
exclude_values.add(PrincipalType.registration_form)
return db.Column(
PyIntEnum(PrincipalType, exclude_values=(exclude_values or None)),
nullable=False
)
@declared_attr
def user_id(cls):
return db.Column(
db.Integer,
db.ForeignKey('users.users.id'),
nullable=True,
index=True
)
@declared_attr
def local_group_id(cls):
return db.Column(
db.Integer,
db.ForeignKey('users.groups.id'),
nullable=True,
index=True
)
@declared_attr
def multipass_group_provider(cls):
return db.Column(
'mp_group_provider', # otherwise the index name doesn't fit in 60 chars
db.String,
nullable=True
)
@declared_attr
def multipass_group_name(cls):
return db.Column(
'mp_group_name', # otherwise the index name doesn't fit in 60 chars
db.String,
nullable=True
)
@declared_attr
def email(cls):
if not cls.allow_emails:
return
return db.Column(
db.String,
nullable=True,
index=True
)
@declared_attr
def ip_network_group_id(cls):
if not cls.allow_networks:
return
return db.Column(
db.Integer,
db.ForeignKey('indico.ip_network_groups.id'),
nullable=True,
index=True
)
@declared_attr
def event_role_id(cls):
if not cls.allow_event_roles:
return
return db.Column(
db.Integer,
db.ForeignKey('events.roles.id'),
nullable=True,
index=True
)
@declared_attr
def category_role_id(cls):
if not cls.allow_category_roles:
return
return db.Column(
db.Integer,
db.ForeignKey('categories.roles.id'),
nullable=True,
index=True
)
@declared_attr
def registration_form_id(cls):
if not cls.allow_registration_forms:
return
return db.Column(
db.Integer,
db.ForeignKey('event_registration.forms.id'),
nullable=True,
index=True
)
@declared_attr
def user(cls):
assert cls.principal_backref_name
return db.relationship(
'User',
lazy=False,
backref=db.backref(
cls.principal_backref_name,
cascade='all, delete',
lazy='dynamic'
)
)
@declared_attr
def local_group(cls):
assert cls.principal_backref_name
return db.relationship(
'LocalGroup',
lazy=False,
backref=db.backref(
cls.principal_backref_name,
cascade='all, delete',
lazy='dynamic'
)
)
@declared_attr
def ip_network_group(cls):
if not cls.allow_networks:
return
assert cls.principal_backref_name
return db.relationship(
'IPNetworkGroup',
lazy=False,
backref=db.backref(
cls.principal_backref_name,
cascade='all, delete',
lazy='dynamic'
)
)
@declared_attr
def event_role(cls):
if not cls.allow_event_roles:
return
assert cls.principal_backref_name
return db.relationship(
'EventRole',
lazy=False,
backref=db.backref(
cls.principal_backref_name,
cascade='all, delete',
lazy='dynamic'
)
)
@declared_attr
def category_role(cls):
if not cls.allow_category_roles:
return
assert cls.principal_backref_name
return db.relationship(
'CategoryRole',
lazy=False,
backref=db.backref(
cls.principal_backref_name,
cascade='all, delete',
lazy='dynamic'
)
)
@declared_attr
def registration_form(cls):
if not cls.allow_registration_forms:
return
assert cls.principal_backref_name
return db.relationship(
'RegistrationForm',
lazy=False,
backref=db.backref(
cls.principal_backref_name,
cascade='all, delete',
lazy='dynamic'
)
)
@hybrid_property
def principal(self):
from indico.modules.groups import GroupProxy
if self.type == PrincipalType.user:
return self.user
elif self.type == PrincipalType.local_group:
return self.local_group.proxy
elif self.type == PrincipalType.multipass_group:
return GroupProxy(self.multipass_group_name, self.multipass_group_provider)
elif self.type == PrincipalType.email:
return EmailPrincipal(self.email)
elif self.type == PrincipalType.network:
return self.ip_network_group
elif self.type == PrincipalType.event_role:
return self.event_role
elif self.type == PrincipalType.category_role:
return self.category_role
elif self.type == PrincipalType.registration_form:
return self.registration_form
@principal.setter
def principal(self, value):
self.type = value.principal_type
self.email = None
self.user = None
self.local_group = None
self.multipass_group_provider = self.multipass_group_name = None
self.ip_network_group = None
self.event_role = None
self.category_role = None
self.registration_form = None
if self.type == PrincipalType.email:
assert self.allow_emails
self.email = value.email
elif self.type == PrincipalType.network:
assert self.allow_networks
self.ip_network_group = value
elif self.type == PrincipalType.event_role:
assert self.allow_event_roles
self.event_role = value
elif self.type == PrincipalType.category_role:
assert self.allow_category_roles
self.category_role = value
elif self.type == PrincipalType.registration_form:
assert self.allow_registration_forms
self.registration_form = value
elif self.type == PrincipalType.local_group:
self.local_group = value.group
elif self.type == PrincipalType.multipass_group:
self.multipass_group_provider = value.provider
self.multipass_group_name = value.name
elif self.type == PrincipalType.user:
self.user = value
else:
raise ValueError('Unexpected principal type: {}'.format(self.type))
@principal.comparator
def principal(cls):
return PrincipalComparator(cls)
def get_emails(self):
"""Get a set of all unique emails associated with this principal.
For users, this is just the primary email (or nothing for the system user).
For anything group-like it is the primary email address of each group member
who has an Indico account.
"""
if self.type == PrincipalType.user and not self.user.is_system:
return {self.user.email}
elif self.type in (PrincipalType.local_group, PrincipalType.multipass_group):
return {x.email for x in self.principal.get_members() if not x.is_system}
elif self.type in (PrincipalType.event_role, PrincipalType.category_role):
return {x.email for x in self.principal.members if not x.is_system}
return set()
def get_users(self):
"""Get a set of all users associated with this principal.
For users this is just the user itself. For anything group-like this
returns all members.
"""
if self.type == PrincipalType.user:
return {self.user}
elif self.type in (PrincipalType.local_group, PrincipalType.multipass_group):
return {x for x in self.principal.get_members() if not x.is_system}
elif self.type in (PrincipalType.event_role, PrincipalType.category_role):
return {x for x in self.principal.members if not x.is_system}
return set()
def merge_privs(self, other):
"""Merge the privileges of another principal.
:param other: Another principal object.
"""
# nothing to do here
def current_data(self):
return None
@classmethod
def merge_users(cls, target, source, relationship_attr):
"""Merge two users in the ACL.
:param target: The target user of | |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
# ---------------------------------------------------------------------------
# ___ __ __ __ ___
# / | \ | \ | \ / Automatic
# \__ |__/ |__/ |___| \__ Annotation
# \ | | | | \ of
# ___/ | | | | ___/ Speech
#
#
# http://www.sppas.org/
#
# ---------------------------------------------------------------------------
# Laboratoire Parole et Langage, Aix-en-Provence, France
# Copyright (C) 2011-2016 <NAME>
#
# This banner notice must not be removed
# ---------------------------------------------------------------------------
# Use of this software is governed by the GNU Public License, version 3.
#
# SPPAS is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SPPAS is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SPPAS. If not, see <http://www.gnu.org/licenses/>.
#
# ---------------------------------------------------------------------------
# File: annotationctrl.py
# ----------------------------------------------------------------------------
__docformat__ = """reST"""
__authors___ = """<NAME> (<EMAIL>)"""
__copyright__ = """Copyright (C) 2011-2015 <NAME>"""
import logging
import wx
import wx.lib.newevent
from pointctrl import PointCtrl
from pointctrl import spEVT_MOVING
from pointctrl import MIN_W as pointctrlMinWidth
from labelctrl import LabelCtrl
# ----------------------------------------------------------------------------
# Constants
# ----------------------------------------------------------------------------
MIN_W = 4
MIN_H = 8
NORMAL_COLOUR = wx.Colour(0, 0, 0)
UNCERTAIN_COLOUR = wx.Colour(70, 70, 180)
STYLE = wx.NO_BORDER | wx.NO_FULL_REPAINT_ON_RESIZE
FONT_SIZE_MIN = 8
FONT_SIZE_MAX = 32
PANE_WIDTH_MIN = 10
PANE_WIDTH_MAX = 200
PANE_WIDTH = 100
BORDER_WIDTH = 2
# ----------------------------------------------------------------------------
class AnnotationCtrl(wx.Window):
"""This class is used to display an annotation.
:author: <NAME>
:contact: <EMAIL>
:license: GPL, v3
"""
def __init__(self, parent, id=wx.ID_ANY,
pos=wx.DefaultPosition,
size=wx.DefaultSize,
ann=None):
"""Constructor.
Non-wxpython related parameters:
:param ann: (sppasAnnotation) the annotation to be represented.
The size is representing the available area to draw the annotation.
The member _pxsec must be fixed for the annotation to draw inside this
area. It represents the number of pixels required for 1 second.
"""
self._pointctrl1 = None
self._pointctrl2 = None
self._labelctrl = None
self._pxsec = 0 # the number of pixels to represent 1 second of time
wx.Window.__init__(self, parent, id, pos, size, STYLE)
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self.SetDoubleBuffered(True)
# Members, Initializations
self._ann = None
if ann is not None:
self.SetAnn(ann)
self.Reset(size)
# Bind the events related to our control
wx.EVT_PAINT(self, self.OnPaint)
wx.EVT_ERASE_BACKGROUND(self, lambda event: None)
wx.EVT_MOUSE_EVENTS(self, self.OnMouseEvents)
spEVT_MOVING(self, self.OnPointMoving)
# ------------------------------------------------------------------------
def Reset(self, size=None):
"""Reset all members to their default.
@param size (wx.Size)
"""
self._selected = False
self.__initializeColours()
if size:
self.__initialSize(size)
# ------------------------------------------------------------------------
# Look & style
# ------------------------------------------------------------------------
def SetLabelFont(self, font):
"""Override. Set a new font."""
if self._labelctrl:
self._labelctrl.SetFont(font)
# ------------------------------------------------------------------------
def SetLabelAlign(self, value):
"""Fix the position of the text of an annotation.
:param value: is one of wx.ALIGN_LEFT, wx.ALIGN_CENTRE or wx.ALIGN_RIGHT
"""
if self._labelctrl:
self._labelctrl.SetAlign(value)
# ------------------------------------------------------------------------
def SetPointColours(self, colourmidpoint=None, colourradius=None):
"""Change the main colors of the Points."""
if self._pointctrl1:
self._pointctrl1.SetColours(colourmidpoint, colourradius)
if self._pointctrl2:
self._pointctrl2.SetColours(colourmidpoint, colourradius)
# ------------------------------------------------------------------------
def SetLabelColours(self, bgcolour=None, fontnormalcolour=None, fontuncertaincolour=None):
"""Change the main colors of the Label.
Notice that uncertain labels can be of a different color,
like links in web browsers.
:param bgcolour: (wx.Colour)
:param fontcolour: (wx.Colour)
:param fontuncertaincolour: (wx.Colour)
"""
if self._labelctrl is None: return
# if self._labelctrl.GetValue().GetSize() == 1:
self._labelctrl.SetColours(bgcolour, fontnormalcolour)
# else:
# self._labelctrl.SetColours(bgcolour, fontuncertaincolour)
# ------------------------------------------------------------------------
def SetBorderColour(self, colour):
"""Fix the color of the top/bottom lines."""
self._penbordercolor = wx.Pen(colour, 1, wx.SOLID)
# ------------------------------------------------------------------------
def GetHeight(self):
"""Return the current height."""
return self.GetSize().height
# -----------------------------------------------------------------------
def GetAnn(self):
"""Return the annotation to draw."""
return self._ann
# -----------------------------------------------------------------------
def SetAnn(self, ann):
"""Set the annotation.
@param ann (annotation)
"""
loc = ann.get_location().get_best()
if loc.is_interval():
self._pointctrl1 = PointCtrl(self, id=-1, point=loc.get_begin())
self._pointctrl2 = PointCtrl(self, id=-1, point=loc.get_end())
elif loc.is_point():
self._pointctrl1 = PointCtrl(self, id=-1, point=loc)
self._pointctrl2 = None
else:
raise NotImplemented('Disjoint intervals are not supported yet!')
l = ann.serialize_labels(separator=" ", empty="", alt=True)
self._labelctrl = LabelCtrl(self, id=-1, label=l)
self._ann = ann
# ------------------------------------------------------------------------
def SetPxSec(self, value):
if value < 0:
raise ValueError
self._pxsec = int(value)
self.Refresh()
# ------------------------------------------------------------------------
# Methods to move/resize objects
# ------------------------------------------------------------------------
def SetHeight(self, height):
"""Set the height (int).
:param height: (int) in pixels
"""
if height < MIN_H:
height = MIN_H
w, h = self.GetSize()
if h != height:
if self._labelctrl:
self._labelctrl.SetHeight(height)
self.SetSize(wx.Size(w, height))
# ------------------------------------------------------------------------
def MoveWindow(self, pos, size):
"""Define a new position and/or size to display.
:param pos: (wx.Point)
:param size: (wx.Size)
"""
(w, h) = size
(x, y) = pos
(ow, oh) = self.GetSize()
(ox, oy) = self.GetPosition()
# New width
if ow != w:
if w < MIN_W:
w = MIN_W
self.SetSize(wx.Size(w, oh))
# New height
if oh != h:
self.SetHeight(h)
# New position (x and/or y)
if ox != x or oy != y:
self.Move(pos)
# ------------------------------------------------------------------------
# ------------------------------------------------------------------------
# Callbacks
# ------------------------------------------------------------------------
def OnMouseEvents(self, event):
"""
Handles the wx.EVT_MOUSE_EVENTS event for AnnotationCtrl.
"""
if (event.Entering() and not self._selected) or \
(event.Leaving() and self._selected):
logging.debug(' event entering or leaving (selected=%s)'%self._selected)
self._selected = not self._selected
logging.debug(' --> (selected=%s)' % self._selected)
#self.OnPaint(event)
self.Refresh()
wx.PostEvent(self.GetParent(), event)
event.Skip()
# -----------------------------------------------------------------------
def OnPointMoving(self, event):
logging.debug('ANNOTAION. OnPointMoving.')
# which point is moving and what is new size?
ptr = event.GetEventObject()
(x, y) = event.pos
(w, h) = ptr.GetSize()
logging.debug(' ... point %s: x=%d,y=%d, w=%d,h=%d' % (ptr.GetValue(), x, y, w, h))
# self coordinates
sw, sh = self.GetClientSize()
sx, sy = self.GetPosition()
# get new time value
b = ptr.get_midpoint() - ptr.get_radius()
e = ptr.get_midpoint() + ptr.get_radius()
logging.debug(' ... moving point %s: FROM b=%f,e=%f' % (ptr.GetValue(), b, e))
logging.debug(' ... ... calcT=%f' % self._calcT(x))
if x < 0:
x = -x
b = b - self._calcT(x)
else:
b = b + self._calcT(x)
e = e + self._calcT(w)
midpoint = b + ((e-b)/2.)
logging.debug(' ... moving point %s: TO b=%f,e=%f' % (ptr.GetValue(), b, e))
# Create a copy of the current point, then apply the modification.
pointcopy = ptr.GetValue().Copy()
pointcopy.SetMidpoint(midpoint)
# try to fix the new point to this annotation
try:
if self._ann.get_location().get_best().is_point():
self._ann.get_location().get_best().set(pointcopy)
else:
self._ann.get_location().get_best().set_begin(pointcopy)
ptr.SetValue(pointcopy)
if ptr is self._pointctrl1:
sx = sx + event.pos.x
sw = sw - event.pos.x
logging.debug(' ---> new content sx=%d, sw=%d' % (sx, sw))
self.MoveWindow(pos=(sx, sy), size=(sw, sh))
except Exception as e:
logging.debug(' ... Exception: %s' % e)
pass
self.GetTopLevelParent().GetStatusBar().SetStatusText('Point moving: %d' % sx)
# ------------------------------------------------------------------------
# Painting
# ------------------------------------------------------------------------
def OnPaint(self, event):
"""Handles the wx.EVT_PAINT event for AnnotationCtrl."""
dc = wx.BufferedPaintDC(self)
self.Draw(dc)
# ------------------------------------------------------------------------
def Draw(self, dc):
"""Draw the AnnotationCtrl on the DC.
:param dc: (wx.DC) The device context to draw on.
"""
logging.debug('AnnotationCtrl.Draw...')
# Get the actual client size of ourselves
# Notice that the size is corresponding to the available size on screen
# for that annotation. It can often happen that the annotation-duration
# is larger than the available width.
w, h = self.GetClientSize()
# Nothing to do, we still don't have dimensions!
if w*h == 0:
return
# Initialize the DC
dc.SetBackgroundMode(wx.TRANSPARENT)
dc.Clear()
x = 0
y = 0
if self._selected is True:
# Draw borders: simply a rectangle
self.DrawBorders(dc, w, h)
# Update position and size for the points and the label
x = BORDER_WIDTH
y = BORDER_WIDTH
w = w - (2 * BORDER_WIDTH)
h = h - (2 * BORDER_WIDTH)
# Content
self.DrawContent(dc, x, y, w, h)
# ------------------------------------------------------------------------
def DrawContent(self, dc, x, y, w, h):
"""Draw the annotation on the DC.
:param dc: (PaintDC, MemoryDC, BufferedDC...)
:param x,y: (int,int) are coord. of top left corner from which drawing
:param w,h: (int,int) are width and height available for drawing.
"""
if self._ann is None:
return
# logging.debug(' Draw content for ann %s: x=%d,y=%d, w=%d, h=%d' % (self._ann, x, y, w, h))
wpt1 = max(pointctrlMinWidth, self._calcW(self._pointctrl1.GetValue().duration().get_value()))
if wpt1 > w:
wpt1 = w
if self._pointctrl2 is None:
tw = min(50, self.__getTextWidth(self._labelctrl.GetValue())+2)
if (wpt1+tw) > w: # ensure to stay in our allocated area
tw = w - wpt1 # reduce width to the available area
tw = max(0, tw)
self._labelctrl.MoveWindow(wx.Point(wpt1, y), wx.Size(tw, h))
else:
wpt2 = max(pointctrlMinWidth, self._calcW(self._pointctrl2.GetValue().duration().get_value()))
xpt2 = w-wpt2+1
tx = x + wpt1
tw | |
"""Problems related to graphs such as Conway's 99 problem, finding
[cliques](https://en.wikipedia.org/wiki/Clique_(graph_theory)) of various sizes, shortest path (Dijkstra) """
from puzzle_generator import PuzzleGenerator
from typing import List
# See https://github.com/microsoft/PythonProgrammingPuzzles/wiki/How-to-add-a-puzzle to learn about adding puzzles
class Conway99(PuzzleGenerator):
"""Conway's 99-graph problem (*unsolved*, open problem)
Conway's 99-graph problem is an unsolved problem in graph theory.
In Conway's terminology, from [Five $1,000 Problems (Update 2017)](https://oeis.org/A248380/a248380.pdf)
"Is there a graph with 99 vertices in which every edge (i.e. pair of joined vertices) belongs to a unique
triangle and every nonedge (pair of unjoined vertices) to a unique quadrilateral?"
See also this [Wikipedia article](https://en.wikipedia.org/w/index.php?title=Conway%27s_99-graph_problem).
"""
@staticmethod
def sat(edges: List[List[int]]):
"""
Find an undirected graph with 99 vertices, in which each two adjacent vertices have exactly one common
neighbor, and in which each two non-adjacent vertices have exactly two common neighbors.
"""
# first compute neighbors sets, N:
N = {i: {j for j in range(99) if j != i and ([i, j] in edges or [j, i] in edges)} for i in range(99)}
return all(len(N[i].intersection(N[j])) == (1 if j in N[i] else 2) for i in range(99) for j in range(i))
def dedup_edges(stuff):
seen = set()
return [a for a in stuff if tuple(a) not in seen and not seen.add(tuple(a))]
class AnyEdge(PuzzleGenerator):
"""Trivial [graph](https://en.wikipedia.org/w/index.php?title=Graph_(discrete_mathematics)) problem."""
@staticmethod
def sat(e: List[int], edges=[[0, 217], [40, 11], [17, 29], [11, 12], [31, 51]]):
"""Find any edge in edges."""
return e in edges
@staticmethod
def sol(edges):
return edges[0]
def gen_random(self):
n = self.random.randrange(1, self.random.choice([10, 100]))
m = self.random.randrange(1, 10 * n)
# random graph:
edges = dedup_edges([[self.random.randrange(n + 1), self.random.randrange(n + 1)] for _ in range(m)])
self.add({"edges": edges})
class AnyTriangle(PuzzleGenerator):
"""
Easy [graph](https://en.wikipedia.org/w/index.php?title=Graph_(discrete_mathematics)) problem,
see [triangle](https://en.wikipedia.org/w/index.php?title=Triangle_graph)
"""
@staticmethod
def sat(tri: List[int], edges=[[0, 17], [0, 22], [17, 22], [17, 31], [22, 31], [31, 17]]):
"""Find any triangle in the given directed graph."""
a, b, c = tri
return [a, b] in edges and [b, c] in edges and [c, a] in edges and a != b != c != a
@staticmethod
def sol(edges):
from collections import defaultdict
outs = defaultdict(set)
ins = defaultdict(set)
for i, j in edges:
if j != i:
outs[i].add(j)
ins[j].add(i)
for i in outs:
for j in outs[i]:
try:
if j in outs:
k = min(outs[j].intersection(ins[i]))
return [i, j, k]
except ValueError:
pass
def gen_random(self):
n = self.random.randrange(1, self.random.choice([10, 100]))
m = self.random.randrange(1, 10 * n)
# random graph:
edges = dedup_edges([[self.random.randrange(n + 1), self.random.randrange(n + 1)] for _ in range(m)])
tri = self.sol(edges)
if tri:
assert self.sat(tri, edges)
self.add({"edges": edges})
########################################################################################################################
class PlantedClique(PuzzleGenerator):
"""Find a [planted clique](https://en.wikipedia.org/w/index.php?title=Planted_clique) of a given size
in an undirected graph. Finding a polynomial-time algorithm for this problem has been *unsolved* for
some time."""
@staticmethod
def sat(nodes: List[int], size=3, edges=[[0, 17], [0, 22], [17, 22], [17, 31], [22, 31], [31, 17]]):
"""Find a clique of the given size in the given undirected graph. It is guaranteed that such a clique exists."""
assert len(nodes) == len(set(nodes)) >= size
edge_set = {(a, b) for (a, b) in edges}
for a in nodes:
for b in nodes:
assert a == b or (a, b) in edge_set or (b, a) in edge_set
return True
@staticmethod
def sol(size, edges): # brute force (finds list in increasing order), but with a tiny bit of speedup
if size == 0:
return []
from collections import defaultdict
neighbors = defaultdict(set)
n = max(max(e) for e in edges)
for (a, b) in edges:
if a != b:
neighbors[a].add(b)
neighbors[b].add(a)
pools = [list(range(n + 1))]
indices = [-1]
while pools:
indices[-1] += 1
if indices[-1] >= len(pools[-1]) - size + len(pools): # since list is increasing order
indices.pop()
pools.pop()
continue
if len(pools) == size:
return [pool[i] for pool, i in zip(pools, indices)]
a = (pools[-1])[indices[-1]]
pools.append([i for i in pools[-1] if i > a and i in neighbors[a]])
indices.append(-1)
assert False, f"No clique of size {size}"
def gen_random(self):
n = self.random.randrange(1, self.random.choice([10, 20, 50, 100]))
m = self.random.randrange(1, 10 * n)
# random graph:
edges = [[self.random.randrange(n + 1), self.random.randrange(n + 1)] for _ in range(m)]
size = self.random.randrange(min(20, n))
clique = self.random.sample(range(n), size)
for a in clique: # plant clique!
for b in clique:
if a < b:
edges.append(self.random.choice([[a, b], [b, a]]))
edges = dedup_edges(edges)
self.random.shuffle(edges)
self.add({"edges": edges, "size": size}, test=(size <= 10))
class ShortestPath(PuzzleGenerator):
"""Shortest Path, see (Dijkstra's algorithm)[https://en.wikipedia.org/w/index.php?title=Dijkstra%27s_algorithm]"""
@staticmethod
def sat(path: List[int], weights=[{1: 20, 2: 1}, {2: 2, 3: 5}, {1: 10}], bound=11):
"""
Find a path from node 0 to node 1, of length at most bound, in the given digraph.
weights[a][b] is weight on edge [a,b] for (int) nodes a, b
"""
return path[0] == 0 and path[-1] == 1 and sum(weights[a][b] for a, b in zip(path, path[1:])) <= bound
@staticmethod
def sol(weights, bound): # Dijkstra's algorithm (bound is ignored)
u, v = 0, 1 # go from 0 to 1
import heapq
queue = [(0, u, u)] # distance, node, trail
trails = {}
while queue:
dist, i, j = heapq.heappop(queue)
if i in trails:
continue
trails[i] = j
if i == v:
break
for j in weights[i]:
if j not in trails:
heapq.heappush(queue, (dist + weights[i][j], j, i))
if v in trails:
rev_path = [v]
while rev_path[-1] != u:
rev_path.append(trails[rev_path[-1]])
return rev_path[::-1]
# no path
def gen_random(self):
n = self.random.randrange(1, self.random.choice([10, 20, 50, 100]))
m = self.random.randrange(n, 5 * n)
# random graph:
weights = [{} for _ in range(n)]
for _ in range(m):
weights[self.random.randrange(n)][self.random.randrange(n)] = self.random.randrange(1000)
path = self.sol(weights, bound=None)
if path:
bound = sum(weights[a][b] for a, b in zip(path, path[1:]))
assert self.sat(path, weights, bound)
self.add(dict(weights=weights, bound=bound))
class UnweightedShortestPath(PuzzleGenerator):
"""
Unweighted Shortest Path
See (Dijkstra's algorithm)[https://en.wikipedia.org/w/index.php?title=Dijkstra%27s_algorithm]
"""
@staticmethod
def sat(path: List[int],
edges=[[0, 11], [0, 7], [7, 5], [0, 22], [11, 22], [11, 33], [22, 33]],
u=0,
v=33,
bound=3):
"""Find a path from node u to node v, of a bounded length, in the given digraph on vertices 0, 1,..., n."""
assert path[0] == u and path[-1] == v and all([i, j] in edges for i, j in zip(path, path[1:]))
return len(path) <= bound
@staticmethod
def sol(edges, u, v, bound): # Dijkstra's algorithm
import heapq
from collections import defaultdict
queue = [(0, u, u)] # distance, node, trail
trails = {}
neighbors = defaultdict(set)
for (i, j) in edges:
neighbors[i].add(j)
while queue:
dist, i, j = heapq.heappop(queue)
if i in trails:
continue
trails[i] = j
if i == v:
break
for j in neighbors[i]:
if j not in trails:
heapq.heappush(queue, (dist + 1, j, i))
if v in trails:
rev_path = [v]
while rev_path[-1] != u:
rev_path.append(trails[rev_path[-1]])
return rev_path[::-1]
# no path
def gen_random(self):
n = self.random.randrange(1, self.random.choice([10, 20, 50, 100]))
m = self.random.randrange(n, 5 * n)
# random graph:
edges = dedup_edges([self.random.randrange(n + 1), self.random.randrange(n + 1)] for _ in range(5 * n))
u = self.random.randrange(n)
v = self.random.randrange(n)
path = self.sol(edges, u, v, bound=None)
if path:
bound = len(path)
assert self.sat(path, edges, u, v, bound)
self.add(dict(u=u, v=v, edges=edges, bound=bound))
class AnyPath(PuzzleGenerator):
"""Any Path"""
@staticmethod
def sat(path: List[int], edges=[[0, 1], [0, 2], [1, 2], [1, 3], [2, 3]]):
""" Find any path from node 0 to node n in a given digraph on vertices 0, 1,..., n."""
for i in range(len(path) - 1):
assert [path[i], path[i + 1]] in edges
assert path[0] == 0
assert path[-1] == max(max(edge) for edge in edges)
return True
@staticmethod
def sol(edges):
n = max(max(edge) for edge in edges)
paths = {0: [0]}
for _ in range(n + 1):
for i, j in edges:
if i in paths and j not in paths:
paths[j] = paths[i] + [j]
return paths.get(n)
def gen_random(self):
n = self.random.randrange(1, self.random.choice([10, 100]))
# random graph:
edges = dedup_edges([self.random.randrange(n), self.random.randrange(n)] for _ in range(2 * n))
if self.sol(edges):
self.add(dict(edges=edges))
class EvenPath(PuzzleGenerator):
@staticmethod
def sat(path: List[int], edges=[[0, 2], [0, 1], [2, 1], [2, 3], [1, 3]]):
"""Find a path with an even number of nodes from nodes 0 to n in | |
class Shield:
def __init__(self):
self.s7 = 0
self.s6 = 0
self.s5 = 0
self.s4 = 0
self.s3 = 0
self.s2 = 0
self.s1 = 0
self.s0 = 0
def tick(self, inputs):
depth7 = inputs[0]
depth6 = inputs[1]
depth5 = inputs[2]
depth4 = inputs[3]
depth3 = inputs[4]
depth2 = inputs[5]
depth1 = inputs[6]
k2 = inputs[7]
k1 = inputs[8]
oxygen_low = inputs[9]
oxygen_full = inputs[10]
diver_found = inputs[11]
action4_1 = inputs[12]
action3_1 = inputs[13]
action2_1 = inputs[14]
action1_1 = inputs[15]
s7 = self.s7
s6 = self.s6
s5 = self.s5
s4 = self.s4
s3 = self.s3
s2 = self.s2
s1 = self.s1
s0 = self.s0
tmp13 = (1 - (1 if action1_1 else 0))
tmp12 = (1 if action4_1 else tmp13)
tmp15 = (1 if action2_1 else 0)
tmp14 = (tmp15 if oxygen_low else tmp12)
tmp11 = (tmp12 if oxygen_full else tmp14)
tmp10 = (tmp11 if depth7 else tmp12)
tmp9 = (tmp10 if depth6 else tmp12)
tmp19 = (tmp11 if k2 else tmp12)
tmp18 = (tmp19 if depth7 else tmp12)
tmp17 = (tmp18 if depth6 else tmp12)
tmp16 = (tmp9 if depth5 else tmp17)
tmp8 = (tmp9 if depth4 else tmp16)
tmp29 = (1 if action1_1 else 0)
tmp28 = (1 - (tmp29 if action2_1 else 0))
tmp27 = (1 if action4_1 else tmp28)
tmp26 = (tmp12 if oxygen_low else tmp27)
tmp25 = (tmp12 if oxygen_full else tmp26)
tmp24 = (tmp12 if diver_found else tmp25)
tmp23 = (tmp12 if depth7 else tmp24)
tmp22 = (tmp12 if depth6 else tmp23)
tmp21 = (tmp9 if depth5 else tmp22)
tmp20 = (tmp9 if depth4 else tmp21)
tmp7 = (tmp8 if depth3 else tmp20)
tmp32 = (tmp9 if depth5 else tmp12)
tmp31 = (tmp9 if depth4 else tmp32)
tmp30 = (tmp31 if depth3 else tmp20)
tmp6 = (tmp7 if depth2 else tmp30)
tmp39 = (tmp19 if k1 else tmp12)
tmp38 = (tmp39 if depth7 else tmp12)
tmp37 = (tmp38 if depth6 else tmp12)
tmp36 = (tmp9 if depth5 else tmp37)
tmp35 = (tmp9 if depth4 else tmp36)
tmp34 = (tmp35 if depth3 else tmp20)
tmp45 = (tmp11 if k1 else tmp19)
tmp44 = (tmp45 if depth7 else tmp12)
tmp43 = (tmp44 if depth6 else tmp12)
tmp42 = (tmp9 if depth5 else tmp43)
tmp41 = (tmp42 if depth4 else tmp21)
tmp40 = (tmp20 if depth3 else tmp41)
tmp33 = (tmp34 if depth2 else tmp40)
tmp5 = (tmp6 if depth1 else tmp33)
tmp53 = (tmp12 if oxygen_full else tmp15)
tmp52 = (tmp53 if depth7 else tmp12)
tmp51 = (tmp52 if depth6 else tmp12)
tmp58 = (tmp53 if k2 else tmp12)
tmp57 = (tmp53 if k1 else tmp58)
tmp56 = (tmp57 if depth7 else tmp12)
tmp55 = (tmp56 if depth6 else tmp12)
tmp54 = (tmp51 if depth5 else tmp55)
tmp50 = (tmp51 if depth4 else tmp54)
tmp60 = (tmp51 if depth5 else tmp12)
tmp59 = (tmp51 if depth4 else tmp60)
tmp49 = (tmp50 if depth3 else tmp59)
tmp66 = (tmp58 if k1 else tmp12)
tmp65 = (tmp66 if depth7 else tmp12)
tmp64 = (tmp65 if depth6 else tmp12)
tmp63 = (tmp51 if depth5 else tmp64)
tmp62 = (tmp51 if depth4 else tmp63)
tmp61 = (tmp62 if depth3 else tmp59)
tmp48 = (tmp49 if depth2 else tmp61)
tmp72 = (tmp58 if depth7 else tmp12)
tmp71 = (tmp72 if depth6 else tmp12)
tmp70 = (tmp51 if depth5 else tmp71)
tmp69 = (tmp51 if depth4 else tmp70)
tmp68 = (tmp69 if depth3 else tmp59)
tmp67 = (tmp68 if depth2 else tmp59)
tmp47 = (tmp48 if depth1 else tmp67)
tmp75 = (tmp51 if depth3 else tmp59)
tmp74 = (tmp75 if depth2 else tmp68)
tmp73 = (tmp74 if depth1 else tmp48)
tmp46 = (tmp47 if s0 else tmp73)
tmp4 = (tmp5 if s1 else tmp46)
tmp82 = (tmp51 if depth3 else tmp62)
tmp81 = (tmp82 if depth2 else tmp49)
tmp80 = (tmp81 if depth1 else tmp74)
tmp85 = (tmp51 if depth3 else tmp69)
tmp84 = (tmp85 if depth2 else tmp75)
tmp83 = (tmp84 if depth1 else tmp81)
tmp79 = (tmp80 if s0 else tmp83)
tmp89 = (tmp51 if depth3 else tmp50)
tmp88 = (tmp89 if depth2 else tmp82)
tmp87 = (tmp88 if depth1 else tmp84)
tmp91 = (tmp51 if depth2 else tmp85)
tmp90 = (tmp91 if depth1 else tmp88)
tmp86 = (tmp87 if s0 else tmp90)
tmp78 = (tmp79 if s1 else tmp86)
tmp99 = (tmp52 if depth6 else tmp65)
tmp98 = (tmp99 if depth5 else tmp51)
tmp97 = (tmp98 if depth4 else tmp51)
tmp96 = (tmp97 if depth3 else tmp51)
tmp95 = (tmp96 if depth2 else tmp89)
tmp94 = (tmp95 if depth1 else tmp91)
tmp105 = (tmp52 if depth6 else tmp72)
tmp104 = (tmp105 if depth5 else tmp51)
tmp103 = (tmp104 if depth4 else tmp51)
tmp102 = (tmp103 if depth3 else tmp51)
tmp101 = (tmp102 if depth2 else tmp51)
tmp100 = (tmp101 if depth1 else tmp95)
tmp93 = (tmp94 if s0 else tmp100)
tmp112 = (tmp52 if depth6 else tmp56)
tmp111 = (tmp112 if depth5 else tmp51)
tmp110 = (tmp111 if depth4 else tmp51)
tmp109 = (tmp110 if depth3 else tmp51)
tmp108 = (tmp109 if depth2 else tmp96)
tmp107 = (tmp108 if depth1 else tmp101)
tmp117 = (tmp52 if depth5 else tmp51)
tmp116 = (tmp117 if depth4 else tmp51)
tmp115 = (tmp116 if depth3 else tmp51)
tmp114 = (tmp115 if depth2 else tmp102)
tmp113 = (tmp114 if depth1 else tmp108)
tmp106 = (tmp107 if s0 else tmp113)
tmp92 = (tmp93 if s1 else tmp106)
tmp77 = (tmp78 if s2 else tmp92)
tmp123 = (tmp116 if depth3 else tmp97)
tmp122 = (tmp123 if depth2 else tmp109)
tmp121 = (tmp122 if depth1 else tmp114)
tmp126 = (tmp116 if depth3 else tmp103)
tmp125 = (tmp126 if depth2 else tmp115)
tmp124 = (tmp125 if depth1 else tmp122)
tmp120 = (tmp121 if s0 else tmp124)
tmp130 = (tmp116 if depth3 else tmp110)
tmp129 = (tmp130 if depth2 else tmp123)
tmp128 = (tmp129 if depth1 else tmp125)
tmp132 = (tmp116 if depth2 else tmp126)
tmp131 = (tmp132 if depth1 else tmp129)
tmp127 = (tmp128 if s0 else tmp131)
tmp119 = (tmp120 if s1 else tmp127)
tmp138 = (tmp117 if depth4 else tmp98)
tmp137 = (tmp138 if depth3 else tmp116)
tmp136 = (tmp137 if depth2 else tmp130)
tmp135 = (tmp136 if depth1 else tmp132)
tmp142 = (tmp117 if depth4 else tmp104)
tmp141 = (tmp142 if depth3 else tmp116)
tmp140 = (tmp141 if depth2 else tmp116)
tmp139 = (tmp140 if depth1 else tmp136)
tmp134 = (tmp135 if s0 else tmp139)
tmp147 = (tmp117 if depth4 else tmp111)
tmp146 = (tmp147 if depth3 else tmp116)
tmp145 = (tmp146 if depth2 else tmp137)
tmp144 = (tmp145 if depth1 else tmp140)
tmp150 = (tmp117 if depth3 else tmp116)
tmp149 = (tmp150 if depth2 else tmp141)
tmp148 = (tmp149 if depth1 else tmp145)
tmp143 = (tmp144 if s0 else tmp148)
tmp133 = (tmp134 if s1 else tmp143)
tmp118 = (tmp119 if s2 else tmp133)
tmp76 = (tmp77 if s3 else tmp118)
tmp3 = (tmp4 if s4 else tmp76)
tmp159 = (tmp117 if depth3 else tmp138)
tmp158 = (tmp159 if depth2 else tmp146)
tmp157 = (tmp158 if depth1 else tmp149)
tmp162 = (tmp117 if depth3 else tmp142)
tmp161 = (tmp162 if depth2 else tmp150)
tmp160 = (tmp161 if depth1 else tmp158)
tmp156 = (tmp157 if s0 else tmp160)
tmp166 = (tmp117 if depth3 else tmp147)
tmp165 = (tmp166 if depth2 else tmp159)
tmp164 = (tmp165 if depth1 else tmp161)
tmp168 = (tmp117 if depth2 else tmp162)
tmp167 = (tmp168 if depth1 else tmp165)
tmp163 = (tmp164 if s0 else tmp167)
tmp155 = (tmp156 if s1 else tmp163)
tmp175 = (tmp52 if depth5 else tmp99)
tmp174 = (tmp175 if depth4 else tmp117)
tmp173 = (tmp174 if depth3 else tmp117)
tmp172 = (tmp173 if depth2 else tmp166)
tmp171 = (tmp172 if depth1 else tmp168)
tmp180 = (tmp52 if | |
colours);
(3) ferrous-bearing carbonates (warm colours) potentially associated with metasomatic “alteration”;
(4) calcite/dolomite which are ferrous iron-poor (cool colours); and
(5) epidote, which is ferrous iron poor (cool colours) – in combination with FeOH content product (high).""",
# The WMS name for the layer
"name": "aster_ferrous_iron_content_in_mgoh",
# The Datacube name for the associated data product
"product_name": "aster_ferrous_iron_content_in_mgoh",
# The Datacube name for the associated pixel-quality product (optional)
# The name of the associated Datacube pixel-quality product
# "pq_dataset": "s2b_ard_granule",
# The name of the measurement band for the pixel-quality product
# (Only required if pq_dataset is set)
# "pq_band": "pixel_quality",
# Min zoom factor - sets the zoom level where the cutover from indicative polygons
# to actual imagery occurs.
"min_zoom_factor": 10.0,
# The fill-colour of the indicative polygons when zoomed out.
# Triplets (rgb) or quadruplets (rgba) of integers 0-255.
"zoomed_out_fill_colour": [150, 180, 200, 160],
# Time Zone. In hours added to UTC (maybe negative)
# Used for rounding off scene times to a date.
# 9 is good value for imagery of Australia.
"time_zone": 9,
# Extent mask function
# Determines what portions of dataset is potentially meaningful data.
"extent_mask_func": lambda data, band: (data[band] != data[band].attrs['nodata']),
# Flags listed here are ignored in GetFeatureInfo requests.
# (defaults to empty list)
"ignore_info_flags": [],
# Define layer wide legend graphic if no style is passed
# to GetLegendGraphic
"legend": {
# "url": ""
"styles": ["ramp"]
},
"wcs_default_bands": ["Band_1"],
# Styles.
#
# See band_mapper.py
#
# The various available spectral bands, and ways to combine them
# into a single rgb image.
# The examples here are ad hoc
#
"styles": [
# Examples of styles which are linear combinations of the available spectral bands.
#
{
"name": "ramp",
"title": "B5/B4 ",
"abstract": "",
"index_function": lambda data: data["Band_1"],
"needed_bands": ["Band_1"],
"color_ramp": [
{
"value": 0.0,
"color": "#8F3F20",
"alpha": 0.0,
"legend": {
"label": "0.1"
}
},
{
"value": 1,
"color": "#000000"
},
{
"value": 10,
"color": "#2d002b"
},
{
"value": 25,
"color": "#550071"
},
{
"value": 60,
"color": "#0400ff"
},
{
"value": 90,
"color": "#0098ff"
},
{
"value": 110,
"color": "#00ffff"
},
{
"value": 130,
"color": "#00ff94"
},
{
"value": 150,
"color": "#00ff2a"
},
{
"value": 170,
"color": "#3fff00"
},
{
"value": 210,
"color": "#ffee00"
},
{
"value": 230,
"color": "#ff8300"
},
{
"value": 255.0,
"color": "#ff0000",
"legend": {
"label": "2.0"
}
}
],
"legend": {
"units": "Blue is low ferrous iron content,\nRed is high ferrous iron content",
}
},
],
# Default style (if request does not specify style)
# MUST be defined in the styles list above.
# (Looks like Terria assumes this is the first style in the list, but this is
# not required by the standard.)
"default_style": "ramp",
}, # ASTER Ferrous Iron Content in MgOH
{
# Included as a keyword for the layer
"label": "Ferrous Iron Index",
# Included as a keyword for the layer
"type": "",
# Included as a keyword for the layer
"variant": "",
"abstract": """
Band ratio: B5/B4
Blue is low abundance,
Red is high abundance
This product can help map exposed “fresh” (un-oxidised) rocks (warm colours) especially mafic and ultramafic lithologies rich in ferrous silicates (e.g. actinolite, chlorite) and/or ferrous carbonates (e.g. ferroan dolomite, ankerite, siderite).
Applying an MgOH Group content mask to this product helps to isolate ferrous bearing non-OH bearing minerals like pyroxenes (e.g. jadeite) from OH-bearing or carbonate-bearing ferrous minerals like actinolite or ankerite, respectively.
Also combine with the FeOH Group content product to find evidence for ferrous-bearing chlorite (e.g. chamosite).
""",
# The WMS name for the layer
"name": "aster_ferrous_iron_index",
# The Datacube name for the associated data product
"product_name": "aster_ferrous_iron_index",
# The Datacube name for the associated pixel-quality product (optional)
# The name of the associated Datacube pixel-quality product
# "pq_dataset": "s2b_ard_granule",
# The name of the measurement band for the pixel-quality product
# (Only required if pq_dataset is set)
# "pq_band": "pixel_quality",
# Min zoom factor - sets the zoom level where the cutover from indicative polygons
# to actual imagery occurs.
"min_zoom_factor": 10.0,
# The fill-colour of the indicative polygons when zoomed out.
# Triplets (rgb) or quadruplets (rgba) of integers 0-255.
"zoomed_out_fill_colour": [150, 180, 200, 160],
# Time Zone. In hours added to UTC (maybe negative)
# Used for rounding off scene times to a date.
# 9 is good value for imagery of Australia.
"time_zone": 9,
# Extent mask function
# Determines what portions of dataset is potentially meaningful data.
"extent_mask_func": lambda data, band: (data[band] != data[band].attrs['nodata']),
# Flags listed here are ignored in GetFeatureInfo requests.
# (defaults to empty list)
"ignore_info_flags": [],
# Define layer wide legend graphic if no style is passed
# to GetLegendGraphic
"legend": {
# "url": ""
"styles": ["ramp"]
},
"wcs_default_bands": ["Band_1"],
# Styles.
#
# See band_mapper.py
#
# The various available spectral bands, and ways to combine them
# into a single rgb image.
# The examples here are ad hoc
#
"styles": [
# Examples of styles which are linear combinations of the available spectral bands.
#
{
"name": "ramp",
"title": "B5/B4 ",
"abstract": "",
"index_function": lambda data: data["Band_1"],
"needed_bands": ["Band_1"],
"color_ramp": [
{
"value": 0.0,
"color": "#8F3F20",
"alpha": 0.0,
"legend": {
"label": "0.75"
}
},
{
"value": 1,
"color": "#000000"
},
{
"value": 10,
"color": "#2d002b"
},
{
"value": 25,
"color": "#550071"
},
{
"value": 60,
"color": "#0400ff"
},
{
"value": 90,
"color": "#0098ff"
},
{
"value": 110,
"color": "#00ffff"
},
{
"value": 130,
"color": "#00ff94"
},
{
"value": 150,
"color": "#00ff2a"
},
{
"value": 170,
"color": "#3fff00"
},
{
"value": 210,
"color": "#ffee00"
},
{
"value": 230,
"color": "#ff8300"
},
{
"value": 255.0,
"color": "#ff0000",
"legend": {
"label": "1.025"
}
}
],
"legend": {
"units": "Blue is low abundance,\nRed is high abundance",
}
},
],
# Default style (if request does not specify style)
# MUST be defined in the styles list above.
# (Looks like Terria assumes this is the first style in the list, but this is
# not required by the standard.)
"default_style": "ramp",
}, # ASTER Ferrous Iron Index
{
# Included as a keyword for the layer
"label": "Green Vegetation",
# Included as a keyword for the layer
"type": "",
# Included as a keyword for the layer
"variant": "",
"abstract": """
Band ratio: B3/B2
Blue is low content,
Red is high content
Use this image to help interpret the amount of “obscuring/complicating” green vegetation cover.""",
# The WMS name for the layer
"name": "aster_green_vegetation",
# The Datacube name for the associated data product
"product_name": "aster_green_vegetation",
# The Datacube name for the associated pixel-quality product (optional)
# The name of the associated Datacube pixel-quality product
# "pq_dataset": "s2b_ard_granule",
# The name of the measurement band for the pixel-quality product
# (Only required if pq_dataset is set)
# "pq_band": "pixel_quality",
# Min zoom factor - sets the zoom level where the cutover from indicative polygons
# to actual imagery occurs.
"min_zoom_factor": 10.0,
# The fill-colour of the indicative polygons when zoomed out.
# Triplets (rgb) or quadruplets (rgba) of integers 0-255.
"zoomed_out_fill_colour": [150, 180, 200, 160],
# Time Zone. In hours added to UTC (maybe negative)
# Used for rounding off scene times to a date.
# 9 is good value for imagery of Australia.
"time_zone": 9,
# Extent mask function
# Determines what portions of dataset is potentially meaningful data.
"extent_mask_func": lambda data, band: (data[band] != data[band].attrs['nodata']),
# Flags listed here are ignored in GetFeatureInfo requests.
# (defaults to empty list)
"ignore_info_flags": [],
# Define layer wide legend graphic if no style is passed
# to GetLegendGraphic
"legend": {
# "url": ""
"styles": ["ramp"]
},
"wcs_default_bands": ["Band_1"],
# Styles.
#
# See band_mapper.py
#
# The various available spectral bands, and ways to combine them
# into a single rgb image.
# The examples here are ad hoc
#
| |
# store nodes that have been visited
At = T # the node of current position in tracing
NodeColor[VrtxCmpnt[T]] = 1
while(not At in Branching and not At in Special):
Visited.append(At)
for Nbr in TreeNbr[At]:
if not Nbr in Visited: # search toward the mainstream
Trace.append((Nbr, At))
Previous = At # the node visited before At in tracing
At = Nbr
break
# print "\t\t\tTrace ", Trace
# after the while loop, At stops at a branching node.
# block below deactivated Forrest 2011-10-04 to remove all links from Terminal
'''
if not At in Special:
NodeColor[VrtxCmpnt[At]] = 2 # just a regular branching node
TreeNbr[At].remove(Previous)
for Pair in Trace:
(Src, Dst) = Pair
if Pair in Path:
Path.remove(Pair)
else: # it is possible the order of nodes is reversed in Path
Path.remove((Dst, Src))
Degree[At] -= 1
if Degree[At] < 3:
Branching.remove(At)
elif At in Special and At in Branching:
NodeColor[VrtxCmpnt[At]] = 5
elif At in Special and At in Terminal:
NodeColor[VrtxCmpnt[At]] = 4
else: # Special only
NodeColor[VrtxCmpnt[At]] = 3
'''
# Delete all links from non-special terminals Forrest 2011-10-04
NodeColor[VrtxCmpnt[At]] = 2 # just a regular branching node
TreeNbr[At].remove(Previous)
for Pair in Trace:
(Src, Dst) = Pair
if Pair in Path:
Path.remove(Pair)
else: # it is possible the order of nodes is reversed in Path
Path.remove((Dst, Src))
if At in Branching: # may stop at a Special-only node
Degree[At] -= 1
if Degree[At] < 3:
Branching.remove(At)
# End of Delete all links from non-special terminals Forrest 2011-10-04
# print "\t\t Final path: ", Path
return Path, NodeColor
def nonZeroLn(List): # activated 2011-05-25 19:14
'''Given a 2-D list, return number of 1-D lists that contains nonzero elements
'''
Counter = 0
for L in List:
for E in L:
if E != 0:
Counter += 1
break
return Counter
class Prim: # modified from the code without license @http://hurring.com/scott/code/python/mst_prim/v0.1/mst_prim.py
INFINITY = 2**8 # this is large enough for current problem size
vertices = 0
def __init__(self, A, r):
"""
Prepare the inputs for mst_prime
"""
self.vertices = A[0].__len__();
self.nonzeros = nonZeroLn(A) # a new member, activated Forrest 2011-05-25 18:56
self.init_adjacency(A)
self.remove_route(A, r)
self.degree = [0 for i in xrange(0, len(A))] # a new member , activated Forrest 2011-05-27 20:31
self.tree_nbr= [[] for i in xrange(0, len(A))] # record nbrs of each node, Forrest 2011-09-24
def mst_prim(self, A, w, i, path, degree, tree_nbr):
"""
'A' is the adjacency matrix
'w' is the list of all connected vertices (in order of discovery)
'path' is a list of tuples showing (from, to)
i : the ID of the connected component # Forrest 2011-05-26 00:31
"""
# Stop when we've added all nodes to the path
# if (w.__len__() == self.vertices): # old line. But if some nodes are not connected, it goes into infinite recursion. Deactivated Forrest 2011-05-25 19:39
if (w.__len__() == self.nonzeros): # new way, activated Forrest 2011-05-25 19:42
return (A, w, path, degree, tree_nbr)
# Find minimum path coming OUT of the known vertexes
(vfrom, vto, vcost) = self.find_min(A, w)
# increase the degreee for vertexes vfrom and vto
degree[vfrom] += 1
degree[vto] += 1
# update tree_nbr list for vfrom and vto Forrest 2011-09-24 10:55
tree_nbr[vfrom].append(vto)
tree_nbr[vto].append(vfrom)
# Mark down this vertex as being a part of the MST path
w.append(vto)
#path.append((vfrom,vto,vcost, i)) # commented, Forrest 2011-09-24
path.append((vfrom, vto))
self.remove_route(A, vto)
return self.mst_prim(A, w, i, path, degree, tree_nbr)
def init_adjacency(self, A):
"""
Initialize adjacency list - set 0 = INFINITY
"""
for i in range(0, self.vertices):
for j in range(0, self.vertices):
if A[i][j] == 0:
A[i][j] = 2**8
def remove_route(self, A, v):
"""
Once we've added a node to our path, set all routes
to this node equal to INFINITY - to prevent loops
"""
for i in range(0, self.vertices):
A[i][v] = self.INFINITY
def find_min(self, A, w):
"""
Find the cheapest connection we can possibly make,
given the partially-built MST 'w'
'vfrom' vertex to connect from
'vto' vertex to connect to
'vcost' cost of connection
"""
vcost = self.INFINITY
vto = vfrom = -1
for v in w:
# Get array offset of minimum of this vertex
i = argmin(A[v])
if A[v][i] < vcost:
vcost = A[v][i]
vto = i
vfrom = v
return (vfrom, vto, vcost)
# The end of Class Prim
def fundiLength(Vrtx, Path):
'''Estimate a fundiLength in accumulation of Euclidean distance of a given Path
Notes
======
Now since a path has only two nodes, it is very easy that we don't need to accumulate.
'''
Length = 0
for i in xrange(0, len(Path)-1):
j = i+1
Length += sqrt( sum([ (Vrtx[i][k]-Vrtx[j][k])**2 for k in [0,1,2]]))
return Length
def mst(Adjs, VrtxCmpnts, SpecialGroup, NbrLst, Coordinates):
'''Using Prim algorithm to connect nodes (including fundus vertexes) within each connected component
Parameters
==========
VrtxCmpnts : list of lists of integers
VrtxCmpnts[i] is a list of vertexes in the i-th connected component
SpecialGroup : list of lists of integers
Special[i] is a list of Special vertexes in the i-th connected component
Special[i] can be empty
NbrLst : list of list of integers
neighbor list of vertexes
Path : list of lists (2-tuple) of integers
Each element of *Path* is a list of the two terminals of each pair of connected fundus vertexes
Vertexes indexed LOCALLY, i.e., they are referred by their ID in currenct component
Adj : list of lists of integers
adjacency matrix of fundus vertexes in one connected component
Adjs : list of Adj's
adjacency matrixes of all fundus vertexes on one hemisphere
Coordinates : list of 3-tuples
Coordinates of vertexes in GLOBAL index.
W : list of integers
vertexes taht are already connected in Prim algorithm
It has no use.
Segs : list of lists (2-tuple) of integers
Vertexes indexed GLOBALLY, i.e., they are referred by their ID in the entire hemisphere
Degree : list of integers
Degrees of nodes in a component
TreeNbr : list of list of integers
Each element is a list of neighbors of a node. All LOCAL IDs.
Terminal : list of integers
Local IDs of nodes that are terminals.
Branching : list of integers
Local IDs of nodes that are branching nodes.
FundusLen : dictionary of integers
Each key is a GLOBAL vertex ID.
The value of each key is the length of the fundus that the key is part of.
FundusID : dictionary of integers
Each key is a GLOBAL vertex ID.
The value of each key is the ID (equal to component ID) of the fundus that the key is part of.
'''
Segs = []
Color = {}
FundusLen, FundusID = {}, {}
if len(Adjs) != len(VrtxCmpnts):
print "Error, Adjs is not as long as VrtxCmpnts"
exit()
else:
print "Connecting fundus vertexes in", len(VrtxCmpnts), "connected components."
for i in xrange(0, len(Adjs)): # For each component in the hemisphere
print "\t MST on component",i+1, ",",
if len(SpecialGroup[i]) < 2 : # This compnent has no more than two vertexes to be connected
print "\t Skipped. Too few Special vertexes."
elif len(VrtxCmpnts[i]) >200: # For quick debugging ONLY. Forrest 2011-09-29 16:56
print "\t Skipped. Too many vertexes (all kinds). "
else:
# print "\t # of special points", len(SpecialGroup[i]) ,
Root = VrtxCmpnts[i].index(SpecialGroup[i][0]) # always start MST from a special vertex
# Adj = Adjs[i] # avoid creating new variable to speed up
# Cmpnt = VrtxCmpnts[i] # avoid creating new variable to speed up
Num = len(Adjs[i])
if Num > 1: # the Num < 1000 is for fast debugging
M = Prim(Adjs[i], Root) # starting from the Root
(Adj, W, Path, Degree, TreeNbr) = M.mst_prim(Adjs[i], [Root], i, [], M.degree, M.tree_nbr) # starting from the Root
# Seg = [[VrtxCmpnts[i][Idx] for Idx in Pair] for Pair in Path] # The Idx is LOCAL (i.e., within the connected component) index of a vertex.
# pruning the MST Forrest 2011-09-24
Terminal, Branching =[], | |
<reponame>EasternEdgeRobotics/2018<gh_stars>0
#!/usr/bin/python
import sys
import subprocess
import os.path
import math
#===============================================================================
def print_usage():
print \
"""
Usage: trim_map.py <infile> <outfile> <out-info-file> \\
-keep <bottom-lat> <top-lat> <left-lon> <right-lon> \\
-size <x-size> <y-size> \\
-origin <origin-lat> <origin-lon>
If the the '-keep' option and associated parameters are supplied, this trims the
input image in a way that keeps the specified region. If '-keep' and its
parameters are not specified, then the image is simply cropped down to the
specified output size, removing the top and right parts of the image as needed.
<infile> must have a name of the form produced by the 'map_get_map.pl' script
so that this script can figure out its lat/lon boundaries.
<bottom-lat> ... <right-lon> describe the region within the input file that must
be present in the output file. (If this script is unable to meet this
requirement, the script will fail.)
<x-size> and <y-size> give the size of the output image, in pixels. If the
input image is smaller in either dimension than the output image is specified to
be, this script will fail.
<origin-lat> and <origin-lon> give the location of the origin of the (x,y)
coordinate system used in the AUV software. This script just needs this
information when writing the .info file.
The script will produce two files:
<outfile> is the name of the .jpg file that this script will produce.
This script will *not* add a .tif suffix to the filename; you need to specify
that. Outfile will be exactly 2048 x 2048 pixels in size.
Note that the image format of the output file is dictacted by the filename
extension you use for <outfile>. So, for example, if you specify
"foo.tif", your output file will be a TIFF file.
<out-info-file> describes the details of the output file, suitable for use in
polyview. It will presumably be named something like "foo.info".
*** THIS SCRIPT WILL OVERWRITE EXISTING <outfile> AND <out-info-file> FILES! ***
"""
#===============================================================================
nautical_miles_per_degree_lat = 60
meters_per_nautical_mile = 1852
meters_per_degree_lat = nautical_miles_per_degree_lat * meters_per_nautical_mile
#===============================================================================
def get_meters_per_degree_lon(lat_degrees):
lat_radians = lat_degrees * (2 * math.pi / 360)
# This is approximate, but should be close enough.
return nautical_miles_per_degree_lat * meters_per_nautical_mile * math.cos(lat_radians)
#===============================================================================
def get_image_xy_size(filename):
"""
Returns a tuple (x, y) giving the size in pixels of the specified image file.
If some problem is encountered, this prints an error and exits.
"""
cmd = ['identify', '-format', '%w %h', filename]
p = subprocess.Popen(args=cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(p_stdout, p_stderr) = p.communicate()
if p.returncode != 0:
print >> sys.stderr, "Command '" + ' '.join(cmd) + "' failed. Here's it's output:\n"
sys.exit(p_stderr)
results = p_stdout.strip().split(' ')
assert(len(results) == 2)
return int(results[0]), int(results[1])
#===============================================================================
def crop_image(input_filename, output_filename, new_x_pixels, new_y_pixels, crop_x_offset, crop_y_offset):
cmd = ['convert',
'-gravity', 'SouthWest',
'-crop', str(new_x_pixels) + 'x' + str(new_y_pixels) \
+ '+' + str(crop_x_offset) + '+' + str(crop_y_offset),
input_filename,
output_filename]
p = subprocess.Popen(args=cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(p_stdout, p_stderr) = p.communicate()
if p.returncode != 0:
print >> sys.stderr, "Command '" + ' '.join(cmd) + "' failed. Here's it's output:\n"
sys.exit(p_stderr)
results = p_stdout.strip().split(' ')
#===============================================================================
def parse_input_img_filename(filename):
f = os.path.splitext(os.path.basename(filename))[0]
fname_parts = f.split('_')
if (len(fname_parts) != 8) \
or (fname_parts[0] != 'blat') \
or (fname_parts[2] != 'tlat') \
or (fname_parts[4] != 'llon') \
or (fname_parts[6] != 'rlon'):
sys.exit("The input file's name doesn't have the required structure.")
blat = float(fname_parts[1])
tlat = float(fname_parts[3])
llon = float(fname_parts[5])
rlon = float(fname_parts[7])
return (blat, tlat, llon, rlon)
#===============================================================================
def main(argv):
if (len(argv) != 15) \
or (argv[4] != '-keep') \
or (argv[9] != '-size') \
or (argv[12] != '-origin'):
print_usage();
sys.exit(1);
try:
infile = argv[1]
outfile = argv[2]
out_info_file = argv[3]
desired_blat = float(argv[5])
desired_tlat = float(argv[6])
desired_llon = float(argv[7])
desired_rlon = float(argv[8])
desired_x_size = int(argv[10])
desired_y_size = int(argv[11])
origin_lat = float(argv[13])
origin_lon = float(argv[14])
except:
print sys.exc_info()
print "\n\n"
print_usage()
sys.exit(1)
# Confirm that the user isn't asking to grow the image...
(input_image_x, input_image_y) = get_image_xy_size(infile)
if (desired_x_size > input_image_x) or (desired_y_size > input_image_y):
sys.exit("You specified an output image of width, height=" + \
str(desired_x_size) + ", " + str(desired_y_size) + "\n" + \
"But the input image has width, height of " + \
str(input_image_x) + ", " + str(input_image_y) + "\n" + \
"The output image cannot be bigger on either axis than the input image.")
# Confirm that the desired image actually lies within the input image.
# Note that this probably only works for images that don't span the poles,
# prime meridian, etc.
#
# We also assume that the image is oriented with the increasing-y axis
# pointing north. This assures us that the bottom latitude is < the top
# latitude (as long as the image doesn't span a pole of the earth), and
# the left longitude is less than the right longitude (as long as the
# image doesn't cross the prime meridian)...
(input_blat, input_tlat, input_llon, input_rlon) = parse_input_img_filename(infile)
if ((desired_blat < input_blat) or (desired_blat > input_tlat)):
sys.exit("Problem: <bottom-lat> is outside the latitude range of the input file.")
if ((desired_tlat < input_blat) or (desired_tlat > input_tlat)):
sys.exit("Problem: <top-lat> is outside the latitude range of the input file.")
if (desired_blat > desired_tlat):
sys.exit("Problem: <bottom-lat> is greater than <top-lat>.")
if ((desired_llon < input_llon) or (desired_llon > input_rlon)):
sys.exit("Problem: <left-lon> is outside the longitude range of the input file.")
if ((desired_rlon < input_llon) or (desired_rlon > input_rlon)):
sys.exit("Problem: <right-lon> is outside the longitude range of the input file.")
if (desired_llon > desired_rlon):
sys.exit("Problem: <left-lon> is greater than <right-lon>.")
# Figure out the relationship between pixels and lat/lon.
#
# This is only an approximation, because lines of longitude aren't
# parallel. But it's safe enough at the scales we work at...
input_lat_span_degrees = input_tlat - input_blat
input_lon_span_degrees = input_rlon - input_llon
desired_lat_span_degrees = desired_tlat - desired_blat
desired_lon_span_degrees = desired_tlat - desired_blat
input_lat_degrees_per_pixel = input_lat_span_degrees / input_image_y
input_lon_degrees_per_pixel = input_lon_span_degrees / input_image_x
required_output_image_y = int(input_image_y * (desired_lat_span_degrees / input_lat_span_degrees))
required_output_image_x = int(input_image_x * (desired_lon_span_degrees / input_lon_span_degrees))
if (required_output_image_y > desired_y_size) or (required_output_image_x > desired_x_size):
sys.exit( \
"Problem: In order to produce an output image that covers the specified \n" + \
" lat/lon range, the output image would need to have width,height = " + \
str(required_output_image_x) + "," + str(required_output_image_y) + "\n\n" + \
" That's bigger than you specified the output image size to be.")
# Figure out where in the input and output files the origin is. We really
# only need to know it for the output file, but just to be helpful we'll
# report where it is in the input file.
input_origin_x_fraction = (origin_lon - input_llon) / input_lon_span_degrees
input_origin_y_fraction = (origin_lat - input_blat) / input_lat_span_degrees
input_origin_x_pixels = int(input_origin_x_fraction * input_image_x)
input_origin_y_pixels = int(input_origin_y_fraction * input_image_y)
# Figure out what pixel offset (both x- and y-) into the original image
# should be the bottom-left corner of the content that ends up in the
# output image...
crop_x_offset = int((desired_blat - input_blat) / input_lat_degrees_per_pixel)
crop_y_offset = int((desired_llon - input_llon) / input_lon_degrees_per_pixel)
# Figure out the lat/lon bounds of the *actual* output image being produced.
# This will typically exceed what the user specified with <bottom-lat>, et al
# because we need to grab extra content from the source image in order for the
# output image to have the number of pixels specified by the <x-size> and
# <y-size> command-line args...
outfile_llon = input_llon + (crop_x_offset * input_lon_degrees_per_pixel)
outfile_rlon = outfile_llon + (required_output_image_x * input_lon_degrees_per_pixel)
outfile_blat = input_blat + (crop_x_offset * input_lat_degrees_per_pixel)
outfile_tlat = outfile_blat + (required_output_image_y * input_lon_degrees_per_pixel)
outfile_lon_span_degrees = outfile_rlon - outfile_llon
outfile_lat_span_degrees = outfile_tlat - outfile_blat
# Figure out where in the output image the origin is, but fractional and
# pixel offset...
outfile_origin_x_fraction = (origin_lon - outfile_llon) / outfile_lon_span_degrees
outfile_origin_y_fraction = (origin_lat - outfile_blat) / outfile_lat_span_degrees
outfile_origin_x_pixels = int(outfile_origin_x_fraction * required_output_image_x)
outfile_origin_y_pixels = int(outfile_origin_y_fraction | |
None
#: Security definitions from Security Definitions Object
#:
#: key: security name, value: SecurityDefinition object
security_definitions = None
#: Represents tag descriptions from Swagger Tag Object
#:
#: key: tag name, value: dict with keys ``description`` and ``externalDocs``
tag_descriptions = None
#: Example Manager. Must be subclass of Exampilator
exampilator = None
def __init__(self, obj, exampilator=None, examples=None):
if obj['swagger'] != '2.0':
raise ConverterError('Invalid Swagger version')
self.raw = obj
self.exampilator = exampilator or Exampilator
assert issubclass(self.exampilator, Exampilator)
if examples:
try:
self.exampilator.schema_validate(examples, examples_json_schema)
except ValidationError as err:
raise ConverterError(err.message)
self.exampilator.fill_examples(examples)
if 'definitions' in obj:
self._fill_schemas_from_definitions(obj['definitions'])
if 'parameters' in obj:
self._fill_parameter_definitions(obj['parameters'])
if 'responses' in obj:
self._fill_response_definitions(obj['responses'])
if 'securityDefinitions' in obj:
self._fill_security_definitions(obj['securityDefinitions'])
if 'security' in obj:
self._fill_securities(obj['security'])
self.info = obj['info']
self.host = obj.get('host', '')
self.base_path = obj.get('basePath', '')
self.consumes = obj.get('consumes', ['application/json'])
self.produces = obj.get('produces', ['application/json'])
self.schemes = obj.get('schemes', ['http'])
self._fill_operations()
self.external_docs = obj.get('externalDocs')
def _fill_operations(self):
self.operations = {}
self._fill_tag_descriptions()
self.tags = defaultdict(list)
for path, operations in self.raw['paths'].items():
path_params = []
for param in operations.get('parameters', []):
if param.get('$ref'):
path_params.append(self.parameter_definitions[param['$ref']])
else:
path_params.append(
Parameter(param, name=param['name'], root=self))
for method, operation in operations.items():
if method == 'parameters':
continue
op = Operation(operation, method, path, self, path_params)
self.operations[op.operation_id] = op
for tag in op.tags:
self.tags[tag].append(op)
def _fill_tag_descriptions(self):
if 'tags' in self.raw:
self.tag_descriptions = {}
for tag in self.raw['tags']:
if 'description' in tag or 'externalDocs' in tag:
self.tag_descriptions[tag['name']] = {
'description': tag.get('description'),
'externalDocs': tag.get('externalDocs')
}
def _fill_schemas_from_definitions(self, obj):
self.schemas.clear()
for name, definition in obj.items():
self.schemas.create_schema(
definition, name, SchemaTypes.DEFINITION, root=self)
def _fill_parameter_definitions(self, obj):
self.parameter_definitions = {}
for name, parameter in obj.items():
key = '#/parameters/{}'.format(name)
self.parameter_definitions[key] = Parameter(
parameter, name=parameter['name'], root=self)
def _fill_response_definitions(self, obj):
self.response_definitions = {}
for name, response in obj.items():
key = '#/responses/{}'.format(name)
self.response_definitions[key] = Response(
response, name=name, root=self)
def _fill_security_definitions(self, obj):
self.security_definitions = {
name: SecurityDefinition(name, _obj) for name, _obj in obj.items()
}
def get_type_description(self, _type, *args, **kwargs):
""" Get description of type
:param str _type:
:rtype: str
"""
return self.schemas.get_type_description(_type, *args, **kwargs)
class Operation(SecurityMixin):
""" Represents Swagger Operation Object
"""
parameters = None
responses = None
method = None
path = None
root = None #: root swagger object
def __init__(self, obj, method, path, root, path_params=None):
self.method = method
self.path = path
self.root = root
self.operation_id = obj.get(
'operationId', self.get_operation_id(method, path))
self.summary = obj.get('summary')
self.description = obj.get('description')
self.consumes = obj.get('consumes', self.root.consumes)
self.produces = obj.get('produces', self.root.produces)
self.schemes = obj.get('schemes', self.root.schemes)
self._fill_parameters(obj.get('parameters', []), path_params)
self._fill_responses(obj['responses'])
self.deprecated = obj.get('deprecated', False)
self.tags = obj.get('tags', ['default'])
self.external_docs = obj.get('externalDocs')
if 'security' in obj:
self._fill_securities(obj['security'])
@staticmethod
def get_operation_id(method, path):
op_id = '{}_{}'.format(method, path)
# copy-paste from swagger-js
op_id = re.sub('[\s!@#$%^&*()+=\[{\]};:<>|./?,\'"-]', '_', op_id)
op_id = re.sub('(_){2,}', '_', op_id)
op_id = re.sub('^[_]*', '', op_id)
op_id = re.sub('([_]*)$', '', op_id)
return op_id
def _fill_parameters(self, params, path_params):
self.parameters = []
for obj in params:
if '$ref' in obj:
self.parameters.append(self.root.parameter_definitions[obj['$ref']])
else:
self.parameters.append(
Parameter(obj, name=obj['name'], root=self.root))
if path_params:
self.parameters += path_params
if len(self.get_parameters_by_location(['body'])) > 1:
raise ConverterError(
'Invalid source file: More than one body parameters in %s' % self.path)
def _fill_responses(self, responses):
self.responses = {}
for code, obj in responses.items():
if '$ref' in obj:
self.responses[code] = self.root.response_definitions[obj['$ref']]
else:
self.responses[code] = Response(obj, name=code, root=self.root)
def get_parameters_by_location(self, locations=None, excludes=None):
""" Get parameters list by location
:param locations: list of locations
:type locations: list or None
:param excludes: list of excludes locations
:type excludes: list or None
:return: list of Parameter
:rtype: list
"""
result = self.parameters
if locations:
result = filter(lambda x: x.location_in in locations, result)
if excludes:
result = filter(lambda x: x.location_in not in excludes, result)
return list(result)
@cached_property
def body(self):
""" Return body request parameter
:return: Body parameter
:rtype: Parameter or None
"""
body = self.get_parameters_by_location(['body'])
return self.root.schemas.get(body[0].type) if body else None
class AbstractTypeObject(object):
_type = None
type_format = None
properties = None
item = None #: set if type is array
def __init__(self, obj, name, root, **kwargs):
self.raw = obj
self.name = name
self.root = root
def get_type_properties(self, property_obj, name):
""" Get internal properties of property
:param dict property_obj: raw property object
:param str name: name of property
:return: Type, format and internal properties of property
:rtype: tuple(str, str, dict)
"""
property_type = property_obj.get('type', 'object')
property_format = property_obj.get('format')
property_dict = dict()
if property_type in ['object', 'array']:
schema_id = self._get_object_schema_id(property_obj, SchemaTypes.INLINE)
if not ('$ref' in property_obj or SchemaObjects.get(schema_id)):
_schema = SchemaObjects.create_schema(
property_obj, name, SchemaTypes.INLINE, root=self.root)
self._after_create_schema(_schema)
property_type = schema_id
if 'default' in property_obj:
property_dict['default'] = property_obj['default']
if 'maximum' in property_obj:
property_dict['maximum'] = property_obj['maximum']
property_dict['exclusive_maximum'] = property_obj.get('exclusiveMaximum', False)
if 'minimum' in property_obj:
property_dict['minimum'] = property_obj['minimum']
property_dict['exclusive_minimum'] = property_obj.get('exclusiveMinimum', False)
if 'maxLength' in property_obj:
property_dict['max_length'] = property_obj['maxLength']
if 'minLength' in property_obj:
property_dict['min_length'] = property_obj['minLength']
if 'enum' in property_obj:
property_dict['enum'] = property_obj['enum']
return property_type, property_format, property_dict
@staticmethod
def _get_id(base):
m = md5()
m.update(base.encode('utf-8'))
return m.hexdigest()
def _get_object_schema_id(self, obj, schema_type):
if '$ref' in obj:
base = obj['$ref']
prefix = SchemaTypes.prefixes[SchemaTypes.DEFINITION]
else:
base = json.dumps(obj)
prefix = SchemaTypes.prefixes[schema_type]
return '{}_{}'.format(prefix, self._get_id(base))
def set_type_by_schema(self, schema_obj):
"""
Set property type by schema object
Schema will create, if it doesn't exists in collection
:param dict schema_obj: raw schema object
"""
schema_id = self._get_object_schema_id(schema_obj, SchemaTypes.INLINE)
# TODO:
if schema_obj.get('additionalProperties'):
self._type = 'object'
return
if not SchemaObjects.contains(schema_id):
schema = SchemaObjects.create_schema(
schema_obj, self.name, SchemaTypes.INLINE, root=self.root)
assert schema.schema_id == schema_id
self._type = schema_id
def _after_create_schema(self, schema):
pass
@property
def type(self):
return self._type
@property
def is_array(self):
return self._type == 'array'
class Parameter(AbstractTypeObject):
""" Represents Swagger Parameter Object
"""
def __init__(self, obj, **kwargs):
super(Parameter, self).__init__(obj, **kwargs)
self.location_in = obj['in']
self.required = obj.get('required', False)
self.description = obj.get('description', '')
self.default = obj.get('default')
self.collection_format = obj.get('collectionFormat')
self._set_type()
def _set_type(self):
if 'type' in self.raw:
self._type = self.raw['type']
self.type_format = self.raw.get('format')
if self.is_array:
self.item = dict(zip(
('type', 'type_format', 'type_properties'),
self.get_type_properties(self.raw['items'], self.name)))
else:
_, _, self.properties = self.get_type_properties(self.raw, self.name)
elif 'schema' in self.raw:
self.set_type_by_schema(self.raw['schema'])
else:
raise ConverterError('Invalid structure')
@property
def type(self):
if self.is_array:
return 'array of {}'.format(self.item['type'])
else:
return self._type
def __repr__(self):
return '{}_{}'.format(self.location_in, self.name)
class Response(AbstractTypeObject):
""" Represents Swagger Response Object
"""
headers = None
examples = None
def __init__(self, obj, **kwargs):
super(Response, self).__init__(obj, **kwargs)
self.description = obj['description']
self.examples = obj.get('examples')
if 'schema' in obj:
self._set_type()
if 'headers' in obj:
self.headers = {name: Header(header, name=name, root=self.root)
for name, header in obj['headers'].items()}
def _set_type(self):
if 'type' in self.raw['schema'] and self.raw['schema']['type'] in PRIMITIVE_TYPES:
self._type = self.raw['schema']['type']
self.type_format = self.raw['schema'].get('format')
_, _, self.properties = self.get_type_properties(self.raw, self.name)
else:
self.set_type_by_schema(self.raw['schema'])
class Header(AbstractTypeObject):
""" Represents Swagger Header Object
"""
def __init__(self, obj, **kwargs):
super(Header, self).__init__(obj, **kwargs)
self.description = obj.get('description')
self._set_type()
def _set_type(self):
self._type = self.raw['type']
if self._type not in PRIMITIVE_TYPES and self._type != 'array':
raise ConverterError(
'Invalid type of response header {}'.format(self.name))
self.type_format = self.raw.get('format')
if self.is_array:
self.item = dict(zip(
('type', 'type_format', 'type_properties'),
self.get_type_properties(self.raw['items'], self.name)))
else:
_, _, self.properties = self.get_type_properties(self.raw, self.name)
class Schema(AbstractTypeObject):
""" Represents Swagger Schema Object
"""
schema_id = None
schema_type = None #: definition or inline
ref_path = None #: path for definition schemas
nested_schemas = None
all_of = None
def __init__(self, obj, schema_type, **kwargs):
assert schema_type in SchemaTypes.prefixes
super(Schema, self).__init__(obj, **kwargs)
self.nested_schemas = set()
self.schema_type = schema_type
self._type = obj.get('type', 'object')
# assert self._type in ('array', 'object')
self.type_format = obj.get('format')
self.schema_example = obj.get('example')
self.read_only = obj.get('readOnly', False)
self.external_docs = obj.get('externalDocs')
if self._type in PRIMITIVE_TYPES:
self.properties = [{
'name': kwargs.get('name', ''),
'description': '',
'required': obj.get('required', False),
'type': self.type,
'type_format': self.type_format,
'type_properties': self.get_type_properties(obj, '')[2],
}]
if schema_type != SchemaTypes.INLINE:
self.ref_path = '#/definitions/{}'.format(self.name)
if self.is_array:
self.item = dict(zip(
('type', 'type_format', 'type_properties'),
self.get_type_properties(obj['items'], self.name)
))
self.name += '_array'
if self.item['type'] not in PRIMITIVE_TYPES:
self.nested_schemas.add(self.item['type'])
if 'properties' in obj:
# self.example = dict()
self._set_properties()
if 'allOf' in obj:
self.all_of = []
for _obj in obj['allOf']:
_id = self._get_object_schema_id(_obj, SchemaTypes.INLINE)
if not SchemaObjects.contains(_id):
schema = SchemaObjects.create_schema(
_obj, 'inline', SchemaTypes.INLINE, self.root)
assert schema.schema_id == _id
self.all_of.append(_id)
self.nested_schemas.add(_id)
self._set_schema_id()
def _set_schema_id(self):
_id = self._get_id(self.ref_path or json.dumps(self.raw))
self.schema_id = '{}_{}'.format(
SchemaTypes.prefixes[self.schema_type], _id)
def _set_properties(self):
self.properties = []
required_fields = self.raw.get('required', [])
for name, property_obj in self.raw['properties'].items():
property_type, property_format, prop = self.get_type_properties(property_obj, name)
if property_type not in PRIMITIVE_TYPES:
self.nested_schemas.add(property_type)
_obj = {
'name': name,
'description': '',
| |
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p43008825-p43151391.7z"),
page_ids=range(43008825, 43151392),
darus_id=94898,
sha1="0cf7f6075eb60646ca10b3bc7a35ee8f63bbc365",
size=265137675,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p43151392-p43249708.7z"),
page_ids=range(43151392, 43249709),
darus_id=94899,
sha1="71540d8387d9d48605bef5a55ea82eb8bd031b82",
size=210261016,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p43249709-p43351690.7z"),
page_ids=range(43249709, 43351691),
darus_id=94900,
sha1="94d516addf5bc16bb7fd9ed57c85b783467eeab2",
size=212763069,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p43351691-p43435317.7z"),
page_ids=range(43351691, 43435318),
darus_id=94901,
sha1="bc3c7abd38974c44b93ad188db8ae5857b617ebd",
size=175898306,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p43435318-p43535735.7z"),
page_ids=range(43435318, 43535736),
darus_id=94903,
sha1="e03e55596cec9a7687822e3c1c9712bcbe7e22f8",
size=196456995,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p43535736-p43645696.7z"),
page_ids=range(43535736, 43645697),
darus_id=94904,
sha1="df11056c506b1b1ed287a46db623d678369d8935",
size=213132849,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p43645697-p43761276.7z"),
page_ids=range(43645697, 43761277),
darus_id=94905,
sha1="a02519ffbbfccbb3cf02795ec47dc8de1713c301",
size=231567275,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p43761277-p43880515.7z"),
page_ids=range(43761277, 43880516),
darus_id=94906,
sha1="e98e94046dc554f2c4a51decb2180e47b789e839",
size=232483626,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p43880516-p43980361.7z"),
page_ids=range(43880516, 43980362),
darus_id=94907,
sha1="292a929517c1d5cacfcc72c409425a90cb245bc8",
size=207880839,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p43980362-p44101941.7z"),
page_ids=range(43980362, 44101942),
darus_id=94908,
sha1="ba92aac64296e52953f5d6f49d6dedca15438359",
size=235383202,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p44101942-p44219282.7z"),
page_ids=range(44101942, 44219283),
darus_id=94911,
sha1="be381188c8219d820e37af03d67f83f7f4dfc0cc",
size=209286206,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p44219283-p44356783.7z"),
page_ids=range(44219283, 44356784),
darus_id=94913,
sha1="61a42b68183819fda3cfbc8a1ded81c48fc22bc4",
size=249167819,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p44356784-p44483765.7z"),
page_ids=range(44356784, 44483766),
darus_id=94914,
sha1="ac9dc0a977a21f4420323c0f18f60d83b7e8ee4c",
size=246147351,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p44483766-p44611606.7z"),
page_ids=range(44483766, 44611607),
darus_id=94915,
sha1="f9b9b45662f599e88068dd0672f13863b13c307a",
size=222696981,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p44611607-p44770311.7z"),
page_ids=range(44611607, 44770312),
darus_id=94916,
sha1="f68aeb7f0b9b8473c4e8ca77a68e28257a042463",
size=255165610,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p44770312-p44943742.7z"),
page_ids=range(44770312, 44943743),
darus_id=94919,
sha1="41d910a333201273ebd10bbbbd9d79fb65cf98ae",
size=281760850,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p44943743-p45122939.7z"),
page_ids=range(44943743, 45122940),
darus_id=94920,
sha1="86f87f3ca79283daca2f7f3b55652859b2c5de50",
size=287000277,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p45122940-p45289497.7z"),
page_ids=range(45122940, 45289498),
darus_id=94922,
sha1="cafb83794d1e0f24c628308dc1e51ff9940c203c",
size=275704734,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p45289498-p45461169.7z"),
page_ids=range(45289498, 45461170),
darus_id=94923,
sha1="371df62811ffceeac6dd06c85402bac5ae052cc0",
size=283139901,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p45461170-p45527040.7z"),
page_ids=range(45461170, 45527041),
darus_id=94925,
sha1="e0dd964c6d9c1f11a355fa800a4bb940c64a7130",
size=108363256,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p45527041-p45699840.7z"),
page_ids=range(45527041, 45699841),
darus_id=94928,
sha1="6affdf6c09f6710ef978c0765ab5afcac028f516",
size=283717264,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p45699841-p45873683.7z"),
page_ids=range(45699841, 45873684),
darus_id=94929,
sha1="57bf6272eb240ffc7602a8b5862049db7088b380",
size=284682737,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p45873684-p46059085.7z"),
page_ids=range(45873684, 46059086),
darus_id=94931,
sha1="84c60a0585c9ba8b14aca245fd5885beb1c27b9e",
size=294321422,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p46059086-p46223247.7z"),
page_ids=range(46059086, 46223248),
darus_id=94932,
sha1="632c569e6c7a06be95c1683b5eb17a40ab8f9627",
size=269262784,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p46223248-p46403271.7z"),
page_ids=range(46223248, 46403272),
darus_id=94933,
sha1="e85d9050260192410ede6e61358189fd1d114e8f",
size=285790025,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p46403272-p46546103.7z"),
page_ids=range(46403272, 46546104),
darus_id=94935,
sha1="348309680610251574b9c1df04f50fc8ade7fc27",
size=245367481,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p46546104-p46649408.7z"),
page_ids=range(46546104, 46649409),
darus_id=94937,
sha1="09923f561d8234c4f0e890e7a2ff6fcf82573852",
size=140312084,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p46649409-p46747270.7z"),
page_ids=range(46649409, 46747271),
darus_id=94938,
sha1="9bc31702dbea9b0006da49535aad4df737fe11c6",
size=108933945,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p46747271-p46847320.7z"),
page_ids=range(46747271, 46847321),
darus_id=94939,
sha1="2f41efc3c26c1e64f49e74714dcabee99ed70218",
size=109590322,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p46847321-p47001936.7z"),
page_ids=range(46847321, 47001937),
darus_id=94941,
sha1="cd27e50303cd965a92813753e9e99be5ee138616",
size=238348000,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p47001937-p47133463.7z"),
page_ids=range(47001937, 47133464),
darus_id=94943,
sha1="8520922e078a0e8a9f2f815c34df3ed3f81abe9b",
size=233400301,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p47133464-p47263735.7z"),
page_ids=range(47133464, 47263736),
darus_id=94944,
sha1="d45ebb7d6d1a35135de546e1d68e4ba166752e78",
size=228795473,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p47263736-p47390812.7z"),
page_ids=range(47263736, 47390813),
darus_id=94946,
sha1="256f4638ba014cace7440aead0c9b53fff4b3cf3",
size=226954880,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p47390813-p47504961.7z"),
page_ids=range(47390813, 47504962),
darus_id=94947,
sha1="5cc979fae2b3e865bafbe6c17b5d762f7ac1f499",
size=214734455,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p47504962-p47638419.7z"),
page_ids=range(47504962, 47638420),
darus_id=94948,
sha1="594664880f7ac6dfe2d4a699b1f7432eed3d7a71",
size=236813499,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p47638420-p47774069.7z"),
page_ids=range(47638420, 47774070),
darus_id=94951,
sha1="c41e669a2385923f4dfd89719eaa1a42a1a63460",
size=237544523,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p47774070-p47906796.7z"),
page_ids=range(47774070, 47906797),
darus_id=94952,
sha1="5ac554da56aa2160c23973cd28030865696fa870",
size=233162229,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p47906797-p48036547.7z"),
page_ids=range(47906797, 48036548),
darus_id=94953,
sha1="e8dce1d727ce9d8d3ec375c8fcfada16adcdae6a",
size=233341802,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p48036548-p48157308.7z"),
page_ids=range(48036548, 48157309),
darus_id=94955,
sha1="0e744b271ac915d028c38d5ec133575a8d2cb9eb",
size=225829647,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p48157309-p48211944.7z"),
page_ids=range(48157309, 48211945),
darus_id=94956,
sha1="036ea120b26a8d589101e9a6bdaa552f1ed69d21",
size=144562159,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p48211945-p48312321.7z"),
page_ids=range(48211945, 48312322),
darus_id=94957,
sha1="bb500243fab31bd482fe3ae6fde4f06d3f604be7",
size=203088740,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p48312322-p48419073.7z"),
page_ids=range(48312322, 48419074),
darus_id=94958,
sha1="70e79c3b6329a6287b5cf9650a564b6d11a4cb88",
size=209945559,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p48419074-p48529609.7z"),
page_ids=range(48419074, 48529610),
darus_id=94959,
sha1="91641e772d51a275165101a6e1521f62e0f6a68f",
size=212371145,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p48529610-p48641824.7z"),
page_ids=range(48529610, 48641825),
darus_id=94961,
sha1="aab7ba85f5f675ee62429bd2ba3128de1326ff5a",
size=212713965,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p48641825-p48744727.7z"),
page_ids=range(48641825, 48744728),
darus_id=94962,
sha1="8af6b7c436444ccf56f5a8c0270195dd4a0e79b2",
size=204608245,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p48744728-p48855563.7z"),
page_ids=range(48744728, 48855564),
darus_id=94963,
sha1="08e816d7cd01743c9f7dcd9a8f5e4ebc54720c86",
size=216066847,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p48855564-p48971557.7z"),
page_ids=range(48855564, 48971558),
darus_id=94965,
sha1="4ea124badaea031d53fc932bce7b19bef84d6856",
size=222296896,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p48971558-p49088278.7z"),
page_ids=range(48971558, 49088279),
darus_id=94966,
sha1="4fe448e3cc4685b942a8029f060c4548dcd401df",
size=225752957,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p49088279-p49191572.7z"),
page_ids=range(49088279, 49191573),
darus_id=94968,
sha1="f3cf31b376f3e319a6847cb11076e849e2ced6c3",
size=215303530,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p49191573-p49295569.7z"),
page_ids=range(49191573, 49295570),
darus_id=94969,
sha1="0db5526c43662c011ba99f34cbf7a9420fbe0a35",
size=214641910,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p49295570-p49407627.7z"),
page_ids=range(49295570, 49407628),
darus_id=94971,
sha1="7016dbe21fb547d34fc685095f7b91a885d92906",
size=218532667,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p49407628-p49555852.7z"),
page_ids=range(49407628, 49555853),
darus_id=94972,
sha1="655e712d8e6c47fec29855c84c098547bc1eb6b6",
size=249825193,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p49555853-p49680990.7z"),
page_ids=range(49555853, 49680991),
darus_id=94973,
sha1="77e9bc2c3769338c5f759fbdffe984c856601c32",
size=209925639,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p49680991-p49856120.7z"),
page_ids=range(49680991, 49856121),
darus_id=94974,
sha1="5eef340b65fd6b8d34102a1ae1435bc6964e0a32",
size=261531253,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p49856121-p50016141.7z"),
page_ids=range(49856121, 50016142),
darus_id=94976,
sha1="2717ad2b79a9942dca9dfc0a45d2ad6cc2037360",
size=222763130,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p50016142-p50147937.7z"),
page_ids=range(50016142, 50147938),
darus_id=94978,
sha1="e97c3d72bfbbf7cd91f228cc24dbaf32b2e7ca7a",
size=178258520,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p50147938-p50296512.7z"),
page_ids=range(50147938, 50296513),
darus_id=94980,
sha1="d91d6e624ea0ec5dd97def3f602d7a248d471f9d",
size=174472128,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p50296513-p50420873.7z"),
page_ids=range(50296513, 50420874),
darus_id=94981,
sha1="900cd8099e8a702586059e240cf42f82763d7898",
size=154878085,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p50420874-p50526467.7z"),
page_ids=range(50420874, 50526468),
darus_id=94982,
sha1="0cecd8fde7a3a259599c30c37b9f29f8fa21c4f2",
size=137571646,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p50526468-p50636067.7z"),
page_ids=range(50526468, 50636068),
darus_id=94984,
sha1="578e9cbd81391e1f608cda18bb23933db1a08fb5",
size=142802924,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p50636068-p50690485.7z"),
page_ids=range(50636068, 50690486),
darus_id=94985,
sha1="633125995ee0ebe5d90585b839bd5b3c6dbd7d5c",
size=56785731,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p50690486-p50823114.7z"),
page_ids=range(50690486, 50823115),
darus_id=94987,
sha1="fc2decc5764b6f777321cece7a9e16fc11aa34cd",
size=150350358,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p50823115-p50942357.7z"),
page_ids=range(50823115, 50942358),
darus_id=94988,
sha1="62af2f3139b97a998cc23defce595a7e19d19f0b",
size=158286813,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p50942358-p51099019.7z"),
page_ids=range(50942358, 51099020),
darus_id=94989,
sha1="c809900fa4829ed670fdbb8b4affb3894d4e0fa5",
size=225512021,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p51099020-p51222302.7z"),
page_ids=range(51099020, 51222303),
darus_id=94991,
sha1="a51258bac8963017b5a563298ca03db19a75bd3c",
size=213635993,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p51222303-p51337270.7z"),
page_ids=range(51222303, 51337271),
darus_id=94992,
sha1="55365f68ee5aca963a80f0860acb9d4461676afb",
size=269443276,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p51337271-p51401123.7z"),
page_ids=range(51337271, 51401124),
darus_id=94993,
sha1="22e84ae5ff02e0699a83dadce4f24668faaede6e",
size=118258842,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p51401124-p51534901.7z"),
page_ids=range(51401124, 51534902),
darus_id=94995,
sha1="a66d7929dfa100cc6b0905ad2e062f15bb3204ec",
size=235679413,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p51534902-p51657569.7z"),
page_ids=range(51534902, 51657570),
darus_id=94996,
sha1="9a4017b36f5f109996f8453e0f0784ef50ce2c17",
size=220949347,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p51657570-p51724351.7z"),
page_ids=range(51657570, 51724352),
darus_id=94997,
sha1="1fe45cd466c573d7f557ee6639460fa0400af270",
size=121445883,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p51724352-p51801432.7z"),
page_ids=range(51724352, 51801433),
darus_id=94998,
sha1="ce9c8bdf8cb869195f7628de45b445b698b49255",
size=154030913,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p51801433-p51920901.7z"),
page_ids=range(51801433, 51920902),
darus_id=95000,
sha1="b0dcecb9f2dd2727ee5f4da7ab84df499581eb92",
size=223718855,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p51920902-p52047769.7z"),
page_ids=range(51920902, 52047770),
darus_id=95001,
sha1="45233d6db1c2a317160a395f3e6f43c4d6bb5b10",
size=229044138,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p52047770-p52189318.7z"),
page_ids=range(52047770, 52189319),
darus_id=95002,
sha1="168450987dba6e1ce8b1593e287afba43b6776e1",
size=234619139,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p52189319-p52335368.7z"),
page_ids=range(52189319, 52335369),
darus_id=95004,
sha1="8f2c1790bda25ab4c9532e3845f006010dce3d00",
size=243773784,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p52335369-p52437132.7z"),
page_ids=range(52335369, 52437133),
darus_id=95006,
sha1="c51cdf8c6f7d13cc8b4382b6abfece6d150f1e6c",
size=173837873,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p52437133-p52583691.7z"),
page_ids=range(52437133, 52583692),
darus_id=95007,
sha1="450471a5bbb112f5ce73bcbd75b8237f33d35cb4",
size=252240124,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p52583692-p52707566.7z"),
page_ids=range(52583692, 52707567),
darus_id=95008,
sha1="94dbb78a3bf193ec2a84cb477fdb7f9042fc4f32",
size=220151649,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p52707567-p52853042.7z"),
page_ids=range(52707567, 52853043),
darus_id=95010,
sha1="82394e3b220139964d07f6be572d1d753a554576",
size=245192864,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p52853043-p53025666.7z"),
page_ids=range(52853043, 53025667),
darus_id=95011,
sha1="8c17a41aa6e0377c2533ed2b910dfe44c4cc6708",
size=266877945,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p53025667-p53160630.7z"),
page_ids=range(53025667, 53160631),
darus_id=95013,
sha1="e1aaf63088cf0ad76bf4e6e9ac412d6d3d740e16",
size=217929079,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p53160631-p53353101.7z"),
page_ids=range(53160631, 53353102),
darus_id=95014,
sha1="82d06eedee2ef7e1c2dffb7215f9014b92b3610b",
size=276568778,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p53353102-p53469010.7z"),
page_ids=range(53353102, 53469011),
darus_id=95015,
sha1="5ac4120553693e920a7e66e456d576489ce33b9b",
size=208709612,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p53469011-p53614717.7z"),
page_ids=range(53469011, 53614718),
darus_id=95017,
sha1="5800aacc24fb5ef90fd63384253dbaf9affe5ff8",
size=235716253,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p53614718-p53792073.7z"),
page_ids=range(53614718, 53792074),
darus_id=95018,
sha1="8c6ed4bb2d16cd549dab5e6ca9a8c757522e6efa",
size=262926403,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p53792074-p53892668.7z"),
page_ids=range(53792074, 53892669),
darus_id=95019,
sha1="7731b570f2c420353a91cb9b2b466fa44ed35b81",
size=183311226,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p53892669-p53974823.7z"),
page_ids=range(53892669, 53974824),
darus_id=95022,
sha1="5cd5c007ed5606258c8dac232bf0ecb643a4292d",
size=145044112,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p53974824-p54042840.7z"),
page_ids=range(53974824, 54042841),
darus_id=95023,
sha1="6a18bfe1269d4077cbe07b856ede845a54d7ce3f",
size=118072840,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p54042841-p54081671.7z"),
page_ids=range(54042841, 54081672),
darus_id=95024,
sha1="9a8d3cbaf72a9b6a8a43ec1d6bb1905e4482637a",
size=73545331,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p54081672-p54235229.7z"),
page_ids=range(54081672, 54235230),
darus_id=95025,
sha1="ff6634fdec3fa0fecccfbe6c066bd6566401ee64",
size=243369480,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p54235230-p54423927.7z"),
page_ids=range(54235230, 54423928),
darus_id=95026,
sha1="1b91183b3844ccbb4400780f1bd967fe39ae6317",
size=276835012,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p54423928-p54580506.7z"),
page_ids=range(54423928, 54580507),
darus_id=95028,
sha1="abce7fb06d2ec86137297cfb0cb2deb2048b2360",
size=247366471,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p54580507-p54746022.7z"),
page_ids=range(54580507, 54746023),
darus_id=95029,
sha1="c7bfa89b8c58b52873c27066c6aed06503dc6684",
size=263153147,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p54746023-p54911931.7z"),
page_ids=range(54746023, 54911932),
darus_id=95030,
sha1="517d1ea085182e062dbd32fd0c077b0d9ebf855f",
size=266288999,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p54911932-p55030702.7z"),
page_ids=range(54911932, 55030703),
darus_id=95031,
sha1="bcba1f66b4f24ba979f0debba82fdac15ce99609",
size=224287806,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p55030703-p55134250.7z"),
page_ids=range(55030703, 55134251),
darus_id=95033,
sha1="7c4ac6c5d168824035cfb71ddc6beb4f980377ed",
size=201180390,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p55134251-p55255749.7z"),
page_ids=range(55134251, 55255750),
darus_id=95034,
sha1="ce03d8c756d069c0b0d6e3394725a84a0ffb6792",
size=205094181,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix + "p55255750-p55399738.7z"),
page_ids=range(55255750, 55399739),
darus_id=95035,
sha1="2dc7d44b7bbf3874a558982df6a0201cdd95eaad",
size=223543047,
auto_download=auto_download,
),
WikidatedV1_0SortedEntityStreamsFile(
archive_path=dataset_dir / (prefix | |
the setpoint.
Converts the requested energy to the real position of the alio,
and also converts that energy to eV and passes it along to
the vernier.
"""
pseudo_pos = self.PseudoPosition(*pseudo_pos)
energy = pseudo_pos.energy
alio = self.energy_to_alio(energy)
vernier = energy * 1000
return self.RealPosition(alio=alio, vernier=vernier)
def inverse(self, real_pos: namedtuple) -> namedtuple:
"""
PseudoPositioner interface function for calculating the readback.
Converts the real position of the alio to the calculated energy
"""
real_pos = self.RealPosition(*real_pos)
alio = real_pos.alio
energy = self.alio_to_energy(alio)
return self.PseudoPosition(energy=energy)
class CCMX(SyncAxis):
"""
Combined motion of the CCM X motors.
You can use this device like a motor, and the position setpoint will be
forwarded to both x motors.
This is used to bring the CCM in and out of the beam.
Parameters
----------
prefix : str, optional
Devices are required to have a positional argument here,
but this is not used. If provided, it should be the same as
down_prefix (x1).
down_prefix : str, required keyword
The prefix for the downstream ccm x translation motor (x1).
up_prefix : str, required keyword
The prefix for the upstream ccm x translation motor (x2).
"""
down = UCpt(IMS, kind='normal',
doc='Downstream ccm x translation motor (x1).')
up = UCpt(IMS, kind='normal',
doc='Upstream ccm x translation motor(x2).')
offset_mode = SyncAxisOffsetMode.STATIC_FIXED
tab_component_names = True
def __init__(
self,
prefix: typing.Optional[str] = None,
**kwargs
):
UCpt.collect_prefixes(self, kwargs)
prefix = prefix or self.unrelated_prefixes['down_prefix']
super().__init__(prefix, **kwargs)
class CCMY(SyncAxis):
"""
Combined motion of the CCM Y motors.
You can use this device like a motor, and the position setpoint will be
forwarded to all three y motors.
These motors are typically powered off for RP reasons.
Parameters
----------
prefix : str, optional
Devices are required to have a positional argument here,
but this is not used. If provided, it should be the same as
down_prefix (y1).
down_prefix : str, required keyword
The prefix for the downstream ccm y translation motor (y1).
up_north_prefix : str, required keyword
The prefix for the north upstream ccm y translation motor (y2).
up_south_prefix : str, required keyword
The prefix for the south upstream ccm y translation motor (y3).
"""
down = UCpt(IMS, kind='normal',
doc='Downstream ccm y translation motor (y1).')
up_north = UCpt(IMS, kind='normal',
doc='North upstream ccm y translation motor (y2).')
up_south = UCpt(IMS, kind='normal',
doc='South upstream ccm y translation motor (y3).')
offset_mode = SyncAxisOffsetMode.STATIC_FIXED
tab_component_names = True
def __init__(
self,
prefix: typing.Optional[str] = None,
**kwargs
):
UCpt.collect_prefixes(self, kwargs)
prefix = prefix or self.unrelated_prefixes['down_prefix']
super().__init__(prefix, **kwargs)
class CCM(BaseInterface, GroupDevice, LightpathMixin, CCMConstantsMixin):
"""
The full CCM assembly.
This requires a huge number of motor pv prefixes to be passed in.
Pay attention to this docstring because most of the arguments are in
the kwargs.
Parameters
----------
prefix : str, optional
Devices are required to have a positional argument here,
but this is not used. If provided, it should be the same as
alio_prefix.
in_pos : float, required keyword
The x position to consider as "inserted" into the beam.
out_pos : float, required keyword
The x position to consider as "removed" from the beam.
alio_prefix : str, required keyword
The PV prefix of the Alio motor, e.g. XPP:MON:MPZ:07A
theta2fine_prefix : str, required keyword
The PV prefix of the motor that controls the fine adjustment
of the of the second crystal's theta angle.
theta2coarse_prefix : str, required keyword
The PV prefix of the motor that controls the coarse adjustment
of the of the second crystal's theta angle.
chi2_prefix : str, required keyword
The PV prefix of the motor that controls the adjustment
of the of the second crystal's chi angle.
x_down_prefix : str, required keyword
The prefix for the downstream ccm x translation motor (x1).
x_up_prefix : str, required keyword
The prefix for the upstream ccm x translation motor (x2).
y_down_prefix : str, required keyword
The prefix for the downstream ccm y translation motor (y1).
y_up_north_prefix : str, required keyword
The prefix for the north upstream ccm y translation motor (y2).
y_up_south_prefix : str, required keyword
The prefix for the south upstream ccm y translation motor (y3).
"""
energy = Cpt(
CCMEnergy, '', kind='hinted',
doc=(
'PseudoPositioner that moves the alio in '
'terms of the calculated CCM energy.'
),
)
energy_with_vernier = Cpt(
CCMEnergyWithVernier, '', kind='normal',
doc=(
'PsuedoPositioner that moves the alio in '
'terms of the calculated CCM energy while '
'also requesting a vernier move.'
),
)
alio = UCpt(CCMAlio, kind='normal',
doc='The motor that rotates the CCM crystal.')
theta2fine = UCpt(
CCMMotor, atol=0.01, kind='normal',
doc=(
'The motor that controls the fine adjustment '
'of the of the second crystal theta angle.'
),
)
theta2coarse = UCpt(
CCMPico, kind='normal',
doc=(
'The motor that controls the coarse adjustment '
'of the of the second crystal theta angle.'
),
)
chi2 = UCpt(
CCMPico, kind='normal',
doc=(
'The motor that controls the adjustment of the'
'second crystal chi angle.'
),
)
x = UCpt(CCMX, add_prefix=[], kind='normal',
doc='Combined motion of the CCM X motors.')
y = UCpt(CCMY, add_prefix=[], kind='normal',
doc='Combined motion of the CCM Y motors.')
lightpath_cpts = ['x']
tab_whitelist = ['x1', 'x2', 'y1', 'y2', 'y3', 'E', 'E_Vernier',
'th2fine', 'alio2E', 'E2alio', 'alio', 'home',
'kill', 'insert', 'remove', 'inserted', 'removed']
_in_pos: float
_out_pos: float
def __init__(
self,
*,
prefix: typing.Optional[str] = None,
in_pos: float,
out_pos: float,
**kwargs
):
UCpt.collect_prefixes(self, kwargs)
self._in_pos = in_pos
self._out_pos = out_pos
prefix = prefix or self.unrelated_prefixes['alio_prefix']
super().__init__(prefix, **kwargs)
# Aliases: defined by the scientists
self.x1 = self.x.down
self.x2 = self.x.up
self.y1 = self.y.down
self.y2 = self.y.up_north
self.y3 = self.y.up_south
self.E = self.energy.energy
self.E.readback.name = f'{self.name}E'
self.E_Vernier = self.energy_with_vernier.energy
self.E_Vernier.readback.name = f'{self.name}E_Vernier'
self.th2coarse = self.theta2coarse
self.th2fine = self.theta2fine
self.alio2E = self.energy.alio_to_energy
self.E2alio = self.energy.energy_to_alio
self.home = self.alio.home
self.kill = self.alio.kill
def format_status_info(self, status_info: dict[str, typing.Any]) -> str:
"""
Define how we're going to format the state of the CCM for the user.
"""
# Pull out the numbers we want and format them, or show N/A if failed
alio = get_status_float(status_info, 'alio', 'position', precision=4)
theta = get_status_float(status_info, 'energy', 'theta_deg', 'value',
precision=3)
wavelength = get_status_float(status_info, 'energy', 'wavelength',
'value', precision=4)
energy = get_status_float(status_info, 'energy', 'energy', 'position',
precision=4)
res_mm = get_status_float(status_info, 'energy', 'resolution', 'value',
scale=1e3, precision=1)
res_um = get_status_float(status_info, 'energy', 'resolution', 'value',
precision=2)
x_down = get_status_float(status_info, 'x', 'down', 'position',
precision=3)
x_up = get_status_float(status_info, 'x', 'up', 'position',
precision=3)
try:
xavg = np.average([float(x_down), float(x_up)])
xavg = f'{xavg:.3f}'
except TypeError:
xavg = 'N/A'
# Fill out the text
text = f'alio (mm): {alio}\n'
text += f'angle (deg): {theta}\n'
text += f'lambda (A): {wavelength}\n'
text += f'Energy (keV): {energy}\n'
text += f'res (eV/mm): {res_mm}\n'
text += f'res (eV/um): {res_um}\n'
text += f'x @ (mm): {xavg} [x1,x2={x_down},{x_up}]\n'
return text
def _set_lightpath_states(
self,
lightpath_values: dict[OphydObject, dict[str, typing.Any]],
) -> None:
"""
Update the fields used by the lightpath to determine in/out.
Compares the x position with the saved in and out values.
"""
x_pos = lightpath_values[self.x]['value']
self._inserted = np.isclose(x_pos, self._in_pos)
self._removed = np.isclose(x_pos, self._out_pos)
if self._removed:
self._transmission = 1
else:
# Placeholder "small attenuation" value
self._transmission = 0.9
def insert(self, wait: bool = False) -> MoveStatus:
"""
Move the x motors to the saved "in" position.
Parameters
----------
wait : bool, optional
If True, wait for the move to complete.
If False, return without waiting.
Returns
-------
move_status : MoveStatus
A status object that tells you information about the
success/failure/completion status of the move.
"""
return self.x.move(self._in_pos, wait=wait)
def remove(self, wait: bool = False) -> MoveStatus:
"""
Move the x motors to the saved "out" position.
Parameters
----------
wait : bool, optional
If True, wait for the move to complete.
If False, return without waiting.
Returns
-------
move_status : MoveStatus
A status object that tells you information about the
success/failure/completion status of the move.
"""
return self.x.move(self._out_pos, wait=wait)
# Calculations between alio position and energy, with all intermediates.
def theta_to_alio(theta: float, theta0: float, gr: float, gd: float) -> float:
"""
Converts theta angle (rad) to alio position (mm).
Theta_B: scattering angle, the angle | |
# Copyright 2015, 2017 IBM Corp.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from pypowervm import adapter as adp
from pypowervm import exceptions as exc
from pypowervm.tasks import cna
from pypowervm.tests import test_fixtures as fx
from pypowervm.tests.test_utils import test_wrapper_abc as twrap
from pypowervm.wrappers import entry_wrapper as ewrap
from pypowervm.wrappers import logical_partition as pvm_lpar
from pypowervm.wrappers import network as pvm_net
VSWITCH_FILE = 'fake_vswitch_feed.txt'
VNET_FILE = 'fake_virtual_network_feed.txt'
class TestCNA(twrap.TestWrapper):
"""Unit Tests for creating Client Network Adapters."""
mock_adapter_fx_args = {'traits': fx.RemoteHMCTraits}
file = VSWITCH_FILE
wrapper_class_to_test = pvm_net.VSwitch
@mock.patch('pypowervm.tasks.cna._find_or_create_vnet')
def test_crt_cna(self, mock_vnet_find):
"""Tests the creation of Client Network Adapters."""
# Create a side effect that can validate the input into the create
# call.
def validate_of_create(*kargs, **kwargs):
self.assertIsNotNone(kargs[0])
self.assertEqual('LogicalPartition', kargs[1])
self.assertEqual('fake_lpar', kwargs.get('root_id'))
self.assertEqual('ClientNetworkAdapter', kwargs.get('child_type'))
return pvm_net.CNA.bld(self.adpt, 1, 'href').entry
self.adpt.create.side_effect = validate_of_create
self.adpt.read.return_value = self.resp
n_cna = cna.crt_cna(self.adpt, None, 'fake_lpar', 5)
self.assertIsNotNone(n_cna)
self.assertIsInstance(n_cna, pvm_net.CNA)
self.assertEqual(1, mock_vnet_find.call_count)
@mock.patch('pypowervm.tasks.cna._find_or_create_vnet')
def test_crt_cna_no_vnet_crt(self, mock_vnet_find):
"""Tests the creation of Client Network Adapters.
The virtual network creation shouldn't be done in this flow.
"""
# PVMish Traits
self.adptfx.set_traits(fx.LocalPVMTraits)
self.adpt.read.return_value = self.resp
# Create a side effect that can validate the input into the create
# call.
def validate_of_create(*kargs, **kwargs):
self.assertIsNotNone(kargs[0])
self.assertEqual('LogicalPartition', kargs[1])
self.assertEqual('fake_lpar', kwargs.get('root_id'))
self.assertEqual('ClientNetworkAdapter', kwargs.get('child_type'))
return pvm_net.CNA.bld(self.adpt, 1, 'href').entry
self.adpt.create.side_effect = validate_of_create
n_cna = cna.crt_cna(self.adpt, None, 'fake_lpar', 5, slot_num=1)
self.assertIsNotNone(n_cna)
self.assertIsInstance(n_cna, pvm_net.CNA)
self.assertEqual(0, mock_vnet_find.call_count)
def test_find_or_create_vswitch(self):
"""Validates that a vswitch can be created."""
self.adpt.read.return_value = self.resp
# Test that it finds the right vSwitch
vswitch_w = cna._find_or_create_vswitch(self.adpt, 'ETHERNET0', True)
self.assertIsNotNone(vswitch_w)
# Create a side effect that can validate the input into the create call
def validate_of_create(*kargs, **kwargs):
self.assertIsNotNone(kargs[0])
# Is the vSwitch create
self.assertEqual('ManagedSystem', kargs[1])
self.assertEqual('VirtualSwitch', kwargs.get('child_type'))
# Return a previously created vSwitch...
return self.dwrap.entry
self.adpt.create.side_effect = validate_of_create
# Test the create
vswitch_w = cna._find_or_create_vswitch(self.adpt, 'Temp', True)
self.assertIsNotNone(vswitch_w)
self.assertTrue(self.adpt.create.called)
# Make sure that if the create flag is set to false, an error is thrown
# when the vswitch can't be found.
self.assertRaises(exc.Error, cna._find_or_create_vswitch, self.adpt,
'Temp', False)
class TestVNET(twrap.TestWrapper):
mock_adapter_fx_args = {'traits': fx.RemoteHMCTraits}
file = VNET_FILE
wrapper_class_to_test = pvm_net.VNet
def test_find_or_create_vnet(self):
"""Tests that the virtual network can be found/created."""
self.adpt.read.return_value = self.resp
fake_vs = mock.Mock()
fake_vs.switch_id = 0
fake_vs.name = 'ETHERNET0'
fake_vs.related_href = ('https://9.1.2.3:12443/rest/api/uom/'
'ManagedSystem/'
'67dca605-3923-34da-bd8f-26a378fc817f/'
'VirtualSwitch/'
'ec8aaa54-9837-3c23-a541-a4e4be3ae489')
# This should find a vnet.
vnet_resp = cna._find_or_create_vnet(self.adpt, '2227', fake_vs)
self.assertIsNotNone(vnet_resp)
# Now flip to a CNA that requires a create...
resp = adp.Response('reqmethod', 'reqpath', 'status', 'reason', {})
resp.entry = ewrap.EntryWrapper._bld(
self.adpt, tag='VirtualNetwork').entry
self.adpt.create.return_value = resp
vnet_resp = cna._find_or_create_vnet(self.adpt, '2228', fake_vs)
self.assertIsNotNone(vnet_resp)
self.assertEqual(1, self.adpt.create.call_count)
def test_find_free_vlan(self):
"""Tests that a free VLAN can be found."""
self.adpt.read.return_value = self.resp
# Mock data specific to the VNET File
fake_vs = mock.Mock()
fake_vs.name = 'ETHERNET0'
fake_vs.related_href = ('https://9.1.2.3:12443/rest/api/uom/'
'ManagedSystem/'
'67dca605-3923-34da-bd8f-26a378fc817f/'
'VirtualSwitch/'
'ec8aaa54-9837-3c23-a541-a4e4be3ae489')
self.assertEqual(1, cna._find_free_vlan(self.adpt, fake_vs))
@mock.patch('pypowervm.wrappers.network.VNet.wrap')
def test_find_free_vlan_mocked(self, mock_vnet_wrap):
"""Uses lots of mock data for a find vlan."""
self.adpt.read.return_value = mock.Mock()
# Helper function to build the vnets.
def build_mock_vnets(max_vlan, vswitch_uri):
vnets = []
for x in range(1, max_vlan + 1):
vnets.append(mock.Mock(vlan=x,
associated_switch_uri=vswitch_uri))
return vnets
mock_vswitch = mock.Mock(related_href='test_vs')
# Test when all the vnet's are on a single switch.
mock_vnet_wrap.return_value = build_mock_vnets(3000, 'test_vs')
self.assertEqual(3001, cna._find_free_vlan(self.adpt, mock_vswitch))
# Test with multiple switches. The second vswitch with a higher vlan
# should not impact the vswitch we're searching for.
mock_vnet_wrap.return_value = (build_mock_vnets(2000, 'test_vs') +
build_mock_vnets(4000, 'test_vs2'))
self.assertEqual(2001, cna._find_free_vlan(self.adpt, mock_vswitch))
# Test when all the VLANs are consumed
mock_vnet_wrap.return_value = build_mock_vnets(4094, 'test_vs')
self.assertRaises(exc.Error, cna._find_free_vlan, self.adpt,
mock_vswitch)
@mock.patch('pypowervm.tasks.cna._find_free_vlan')
def test_assign_free_vlan(self, mock_find_vlan):
mock_find_vlan.return_value = 2016
mocked = mock.MagicMock()
mock_cna = mock.MagicMock(pvid=31, enabled=False)
mock_cna.update.return_value = mock_cna
updated_cna = cna.assign_free_vlan(mocked, mocked, mocked, mock_cna)
self.assertEqual(2016, updated_cna.pvid)
self.assertEqual(mock_cna.enabled, updated_cna.enabled)
updated_cna = cna.assign_free_vlan(mocked, mocked, mocked, mock_cna,
ensure_enabled=True)
self.assertEqual(True, updated_cna.enabled)
@mock.patch('pypowervm.wrappers.network.CNA.bld')
@mock.patch('pypowervm.tasks.cna._find_free_vlan')
@mock.patch('pypowervm.tasks.cna._find_or_create_vswitch')
@mock.patch('pypowervm.tasks.partition.get_partitions')
def test_crt_p2p_cna(
self, mock_get_partitions, mock_find_or_create_vswitch,
mock_find_free_vlan, mock_cna_bld):
"""Tests the crt_p2p_cna."""
# Mock out the data
mock_vswitch = mock.Mock(related_href='vswitch_href')
mock_find_or_create_vswitch.return_value = mock_vswitch
mock_find_free_vlan.return_value = 2050
# Mock the get of the VIOSes
mock_vio1 = mock.Mock(uuid='src_io_host_uuid')
mock_vio2 = mock.Mock(uuid='vios_uuid2')
mock_get_partitions.return_value = [mock_vio1, mock_vio2]
mock_cna = mock.MagicMock()
mock_trunk1, mock_trunk2 = mock.MagicMock(pvid=2050), mock.MagicMock()
mock_trunk1.create.return_value = mock_trunk1
mock_cna_bld.side_effect = [mock_trunk1, mock_trunk2, mock_cna]
# Invoke the create
mock_ext_ids = {'test': 'value', 'test2': 'value2'}
client_adpt, trunk_adpts = cna.crt_p2p_cna(
self.adpt, None, 'lpar_uuid',
['src_io_host_uuid', 'vios_uuid2'], mock_vswitch, crt_vswitch=True,
slot_num=1, mac_addr='aabbccddeeff', ovs_bridge='br-ex',
ovs_ext_ids=mock_ext_ids, configured_mtu=1450)
# Make sure the client and trunk were 'built'
mock_cna_bld.assert_any_call(self.adpt, 2050, 'vswitch_href',
slot_num=1, mac_addr='aabbccddeeff')
mock_cna_bld.assert_any_call(
self.adpt, 2050, 'vswitch_href', trunk_pri=1, dev_name=None,
ovs_bridge='br-ex', ovs_ext_ids=mock_ext_ids, configured_mtu=1450)
mock_cna_bld.assert_any_call(
self.adpt, 2050, 'vswitch_href', trunk_pri=2, dev_name=None,
ovs_bridge='br-ex', ovs_ext_ids=mock_ext_ids, configured_mtu=1450)
# Make sure they were then created
self.assertIsNotNone(client_adpt)
self.assertEqual(2, len(trunk_adpts))
mock_cna.create.assert_called_once_with(
parent_type=pvm_lpar.LPAR, parent_uuid='lpar_uuid')
mock_trunk1.create.assert_called_once_with(parent=mock_vio1)
mock_trunk2.create.assert_called_once_with(parent=mock_vio2)
@mock.patch('pypowervm.wrappers.network.CNA.bld')
@mock.patch('pypowervm.tasks.cna._find_free_vlan')
@mock.patch('pypowervm.tasks.cna._find_or_create_vswitch')
@mock.patch('pypowervm.tasks.partition.get_partitions')
def test_crt_p2p_cna_single(
self, mock_get_partitions, mock_find_or_create_vswitch,
mock_find_free_vlan, mock_cna_bld):
"""Tests the crt_p2p_cna with the mgmt lpar and a dev_name."""
# Mock out the data
mock_vswitch = mock.Mock(related_href='vswitch_href')
mock_find_or_create_vswitch.return_value = mock_vswitch
mock_find_free_vlan.return_value = 2050
# Mock the get of the VIOSes
mock_vio1 = mock.Mock(uuid='mgmt_lpar_uuid')
mock_vio2 = mock.Mock(uuid='vios_uuid2')
mock_get_partitions.return_value = [mock_vio1, mock_vio2]
mock_cna = mock.MagicMock()
mock_trunk1 = mock.MagicMock(pvid=2050)
mock_trunk1.create.return_value = mock_trunk1
mock_cna_bld.side_effect = [mock_trunk1, mock_cna]
# Invoke the create
client_adpt, trunk_adpts = cna.crt_p2p_cna(
self.adpt, None, 'lpar_uuid',
['mgmt_lpar_uuid'], mock_vswitch, crt_vswitch=True,
mac_addr='aabbccddeeff', dev_name='tap-12345')
# Make sure the client and trunk were 'built'
mock_cna_bld.assert_any_call(self.adpt, 2050, 'vswitch_href',
mac_addr='aabbccddeeff', slot_num=None)
mock_cna_bld.assert_any_call(
self.adpt, 2050, 'vswitch_href', trunk_pri=1, dev_name='tap-12345',
ovs_bridge=None, ovs_ext_ids=None, configured_mtu=None)
# Make sure they were then created
self.assertIsNotNone(client_adpt)
self.assertEqual(1, len(trunk_adpts))
mock_cna.create.assert_called_once_with(
parent_type=pvm_lpar.LPAR, parent_uuid='lpar_uuid')
mock_trunk1.create.assert_called_once_with(parent=mock_vio1)
@mock.patch('pypowervm.wrappers.network.CNA.bld')
@mock.patch('pypowervm.tasks.cna._find_free_vlan')
@mock.patch('pypowervm.tasks.cna._find_or_create_vswitch')
@mock.patch('pypowervm.tasks.partition.get_partitions')
def test_crt_trunk_with_free_vlan(
self, mock_get_partitions, mock_find_or_create_vswitch,
mock_find_free_vlan, mock_cna_bld):
"""Tests the crt_trunk_with_free_vlan on mgmt based VIOS."""
# Mock out the data
mock_vswitch = mock.Mock(related_href='vswitch_href')
mock_find_or_create_vswitch.return_value = mock_vswitch
mock_find_free_vlan.return_value = 2050
# Mock the get of the VIOSes.
mock_vio1 = mock.Mock(uuid='vios_uuid1')
mock_get_partitions.return_value = [mock_vio1]
mock_trunk1 = mock.MagicMock(pvid=2050)
mock_trunk1.create.return_value = mock_trunk1
mock_cna_bld.return_value = mock_trunk1
# Invoke the create
mock_ext_id = {'test1': 'value1', 'test2': 'value2'}
trunk_adpts = cna.crt_trunk_with_free_vlan(
self.adpt, None, ['vios_uuid1'],
mock_vswitch, crt_vswitch=True, dev_name='tap-12345',
ovs_bridge='br-int', ovs_ext_ids=mock_ext_id, configured_mtu=1450)
# Make sure the client and trunk were 'built'
mock_cna_bld.assert_any_call(
self.adpt, 2050, 'vswitch_href', trunk_pri=1, dev_name='tap-12345',
ovs_bridge='br-int', ovs_ext_ids=mock_ext_id, configured_mtu=1450)
# Make sure that the trunk was created
self.assertEqual(1, len(trunk_adpts))
mock_trunk1.create.assert_called_once_with(parent=mock_vio1)
@mock.patch('pypowervm.wrappers.network.CNA.get')
def test_find_trunk_on_lpar(self, mock_cna_get):
parent_wrap = mock.MagicMock()
m1 = mock.Mock(is_trunk=True, pvid=2, vswitch_id=2)
m2 = mock.Mock(is_trunk=False, pvid=3, vswitch_id=2)
m3 = mock.Mock(is_trunk=True, pvid=3, vswitch_id=1)
m4 = mock.Mock(is_trunk=True, pvid=3, vswitch_id=2)
mock_cna_get.return_value = [m1, m2, m3]
self.assertIsNone(cna._find_trunk_on_lpar(self.adpt, parent_wrap, m4))
self.assertTrue(mock_cna_get.called)
mock_cna_get.reset_mock()
mock_cna_get.return_value = [m1, m2, m3, m4]
self.assertEqual(m4, cna._find_trunk_on_lpar(self.adpt, parent_wrap,
m4))
self.assertTrue(mock_cna_get.called)
@mock.patch('pypowervm.tasks.cna._find_trunk_on_lpar')
@mock.patch('pypowervm.tasks.partition.get_mgmt_partition')
@mock.patch('pypowervm.wrappers.virtual_io_server.VIOS.get')
def test_find_trunks(self, mock_vios_get, mock_get_mgmt,
mock_find_trunk):
# Mocked responses can be simple, since they are just fed into the
# _find_trunk_on_lpar
mock_vios_get.return_value = [mock.MagicMock(), mock.MagicMock()]
mock_get_mgmt.return_value = mock.MagicMock()
# The responses back from the find trunk. Make it an odd trunk
# priority ordering to make sure we sort properly
v1 = mock.Mock(trunk_pri=3)
c1, c2 = mock.Mock(trunk_pri=1), mock.Mock(trunk_pri=2)
mock_find_trunk.side_effect = [v1, c1, c2]
# Invoke the method.
resp = cna.find_trunks(self.adpt, mock.Mock(pvid=2))
# Make sure four calls to the find trunk
self.assertEqual(3, mock_find_trunk.call_count)
# Order of the response is important. Should be based off of trunk
# priority
self.assertEqual([c1, c2, v1], resp)
@mock.patch('pypowervm.wrappers.network.CNA.get')
def test_find_all_trunks_on_lpar(self, mock_cna_get):
parent_wrap = mock.MagicMock()
m1 = mock.Mock(is_trunk=True, vswitch_id=2)
m2 = mock.Mock(is_trunk=False, vswitch_id=2)
m3 = mock.Mock(is_trunk=True, vswitch_id=1)
m4 = mock.Mock(is_trunk=True, vswitch_id=2)
mock_cna_get.return_value = [m1, m2, m3, m4]
returnVal = [m1, m3, m4]
self.assertEqual(returnVal, cna._find_all_trunks_on_lpar(self.adpt,
parent_wrap))
mock_cna_get.reset_mock()
mock_cna_get.return_value = [m1, m2, m3, m4]
self.assertEqual([m3],
cna._find_all_trunks_on_lpar(self.adpt,
parent_wrap=parent_wrap,
vswitch_id=1))
@mock.patch('pypowervm.wrappers.network.CNA.get')
@mock.patch('pypowervm.wrappers.virtual_io_server.VIOS.get')
@mock.patch('pypowervm.wrappers.logical_partition.LPAR.get')
def test_find_cna_wraps(self, mock_lpar_get, mock_vios_get, mock_cna_get):
# Mocked responses are simple since they are only used for
# pvm_net.CNA.get
mock_lpar_get.return_value = [mock.MagicMock()]
mock_vios_get.return_value = [mock.MagicMock()]
# Mocked cna_wraps
m1 = mock.Mock(uuid=2, pvid=2, vswitch_id=2)
m2 = mock.Mock(uuid=3, pvid=1, vswitch_id=1)
m3 = mock.Mock(uuid=1, pvid=1, vswitch_id=1)
mock_cna_get.side_effect = [[m1, m2], [m3]]
mock_trunk = mock.Mock(adapter=self.adpt, uuid=1, pvid=1, vswitch_id=1)
self.assertEqual([m1, m2, m3], cna._find_cna_wraps(mock_trunk))
mock_cna_get.side_effect = [[m1, m2], [m3]]
self.assertEqual([m2, m3], cna._find_cna_wraps(mock_trunk, 1))
@mock.patch('pypowervm.tasks.cna._find_cna_wraps')
def test_find_cnas_on_trunk(self, mock_find_wraps):
# Mocked cna_wraps
m1 | |
+= '#EXTINF:-1 tvg-id="%s" tvg-name="%s" tvg-logo="%s/%s/%s.png" channel-id="%s" group-title="LIVE",%s\n' % (
chan_map[pos].channum, chan_map[pos].channame, SERVER_HOST, SERVER_PATH, chan_map[pos].channum,
chan_map[pos].channum,
prog.title)
new_playlist += '%s\n' % channel_url
except:
logger.exception("Exception while updating kodi playlist on channel #%s." % pos)
new_playlist += '#EXTINF:-1 tvg-id="static_refresh" tvg-name="Static Refresh" tvg-logo="%s/%s/empty.png" channel-id="0" group-title="Static RTMP",Static Refresh\n' % (
SERVER_HOST, SERVER_PATH)
new_playlist += '%s/%s/refresh.m3u8\n' % (SERVER_HOST, SERVER_PATH)
logger.info("Built Kodi playlist")
# if ADDONPATH and os.path.isdir(ADDONPATH):
# #lazy install, low priority tbh
# tree = ET.parse(os.path.join(ADDONPATH, 'settings.xml'))
# root = tree.getroot()
# for child in root:
# if child.attrib['id'] == 'epgUrl':
# child.attrib['value'] = '%s/%s/epg.xml' % (SERVER_HOST, SERVER_PATH)
# elif child.attrib['id'] == 'm3uUrl':
# child.attrib['value'] = '%s/%s/kodi.m3u8' % (SERVER_HOST, SERVER_PATH)
# elif child.attrib['id'] == 'epgPathType':
# child.attrib['value'] = '1'
# elif child.attrib['id'] == 'm3uPathType':
# child.attrib['value'] = '1'
# tree.write(os.path.join(ADDONPATH, 'settings.xml'))
return new_playlist
def rescan_channels():
credentials = str.encode(KODIUSER + ':' + KODIPASS)
encoded_credentials = base64.b64encode(credentials)
authorization = b'Basic ' + encoded_credentials
apiheaders = {'Content-Type': 'application/json', 'Authorization': authorization}
apidata = {"jsonrpc": "2.0", "method": "Addons.SetAddonEnabled",
"params": {"addonid": "pvr.iptvsimple", "enabled": "toggle"}, "id": 1}
apiurl = 'http://%s:%s/jsonrpc' % (request.environ.get('REMOTE_ADDR'), KODIPORT)
json_data = json.dumps(apidata)
post_data = json_data.encode('utf-8')
apirequest = urllib.request.Request(apiurl, post_data, apiheaders)
# has to happen twice to toggle off then back on
result = urllib.request.urlopen(apirequest)
result = urllib.request.urlopen(apirequest)
logger.info("Forcing Kodi to rescan, result:%s " % result.read())
############################################################
# Html
############################################################
# Change this to change the style of the web page generated
style = """
<style type="text/css">
body { background: white url("https://guide.smoothstreams.tv/assets/images/channels/150.png") no-repeat fixed center center; background-size: 500px 500px; color: black; }
h1 { color: white; background-color: black; padding: 0.5ex }
h2 { color: white; background-color: black; padding: 0.3ex }
.container {display: table; width: 100%;}
.left-half {position: absolute; left: 0px; width: 50%;}
.right-half {position: absolute; right: 0px; width: 50%;}
</style>
"""
def create_menu():
footer = '<p>Donations: PayPal to <EMAIL> or BTC - 19qvdk7JYgFruie73jE4VvW7ZJBv8uGtFb</p>'
with open("./cache/settings.html", "w") as html:
html.write("""<html>
<head>
<meta charset="UTF-8">
%s
<title>YAP</title>
</head>
<body>\n""" % (style,))
html.write('<section class="container"><div class="left-half">')
html.write("<h1>YAP Settings</h1>")
template = "<a href='{1}/{2}/{0}.html'>{3}</a>"
html.write(
"<p>" + template.format("settings", SERVER_HOST, SERVER_PATH, "Options") + " " + template.format("howto",
SERVER_HOST,
SERVER_PATH,
"Instructions") + " " + template.format(
"channels", SERVER_HOST, SERVER_PATH, "Channels List") + " " + template.format("adv_settings",
SERVER_HOST, SERVER_PATH,
"Advanced Settings") + " " + template.format("paths",
SERVER_HOST, SERVER_PATH,
"Proxy Paths") + "</p>")
html.write('<form action="%s/%s/handle_data" method="post">' % (SERVER_HOST, SERVER_PATH))
channelmap = {}
chanindex = 0
list = ["Username", "Password", "Quality", "Stream", "Server", "Service", "IP", "Port",
"ExternalIP", "ExternalPort"]
html.write('<table width="300" border="2">')
for setting in list:
if setting.lower() == 'service':
html.write('<tr><td>Service:</td><td><select name="Service" size="1">')
for option in providerList:
html.write('<option value="%s"%s>%s</option>' % (
option[0], ' selected' if SITE == option[1] else "", option[0]))
html.write('</select></td></tr>')
elif setting.lower() == 'server':
html.write('<tr><td>Server:</td><td><select name="Server" size="1">')
for option in serverList:
html.write('<option value="%s"%s>%s</option>' % (
option[0], ' selected' if SRVR == option[1] else "", option[0]))
html.write('</select></td></tr>')
elif setting.lower() == 'stream':
html.write('<tr><td>Stream:</td><td><select name="Stream" size="1">')
for option in streamtype:
html.write(
'<option value="%s"%s>%s</option>' % (option, ' selected' if STRM == option else "", option))
html.write('</select></td></tr>')
elif setting.lower() == 'quality':
html.write('<tr><td>Quality:</td><td><select name="Quality" size="1">')
for option in qualityList:
html.write('<option value="%s"%s>%s</option>' % (
option[0], ' selected' if QUAL == option[1] else "", option[0]))
html.write('</select></td></tr>')
elif setting.lower() == 'password':
html.write('<tr><td>%s:</td><td><input name="%s" type="Password" value="%s"></td></tr>' % (
setting, setting, PASS))
else:
val = "Unknown"
if setting == "Username":
val = USER
elif setting == "IP":
val = LISTEN_IP
elif setting == "Port":
val = LISTEN_PORT
elif setting == "ExternalIP":
val = EXTIP
elif setting == "ExternalPort":
val = EXTPORT
html.write(
'<tr><td>%s:</td><td><input name="%s" type="text" value="%s"></td></tr>' % (setting, setting, val))
html.write('</table>')
html.write('<input type="submit" value="Submit">')
html.write('</form>')
html.write("<p>You are running version (%s %s), the latest is %s</p>" % (type, __version__, latest_ver))
html.write("</br><p>Restarts can take a while, it is not immediate.</p>")
html.write('<form action="%s/%s/handle_data" method="post">' % (SERVER_HOST, SERVER_PATH))
html.write('<input type="hidden" name="restart" value="1">')
html.write('<input type="submit" value="Restart">')
html.write('</form>')
html.write('<form action="%s/%s/handle_data" method="post">' % (SERVER_HOST, SERVER_PATH))
html.write('<input type="hidden" name="restart" value="2">')
html.write('<input type="submit" value="Update + Restart">')
html.write('</form>')
html.write('<form action="%s/%s/handle_data" method="post">' % (SERVER_HOST, SERVER_PATH))
html.write('<input type="hidden" name="restart" value="3">')
devname = latestfile.replace('master', 'dev')
html.write('<input type="submit" value="Update(Dev Branch) + Restart">')
html.write('</form>')
html.write("<p><a href='%s'>Manual Download Master link</a></p>" % latestfile)
html.write("<p><a href='%s'>Manual Download Dev link</a></p>" % devname)
# html.write('<p> </p>')
# html.write('<p> </p>')
html.write('<p> </p>')
html.write('<p> </p>')
html.write(footer)
html.write('</div><div class="right-half"><h1>YAP Outputs</h1>')
html.write("<table><tr><td rowspan='2'>Standard Outputs</td><td>m3u8 - %s/playlist.m3u8</td></tr>" % urljoin(
SERVER_HOST, SERVER_PATH))
html.write("<tr><td>EPG - %s/epg.xml</td></tr>" % urljoin(SERVER_HOST, SERVER_PATH))
html.write("<tr><td> </td><td> </td></tr>")
html.write(
"<tr><td>Sports Playlist</td><td>%s/sports.m3u8</td></tr>" % urljoin(SERVER_HOST, SERVER_PATH))
html.write(
"<tr><td>Sports EPG (Alternative)</td><td>%s/sports.xml</td></tr>" % urljoin(SERVER_HOST, SERVER_PATH))
html.write("<tr><td> </td><td> </td></tr>")
html.write(
"<tr><td>Kodi RTMP supported</td><td>m3u8 - %s/kodi.m3u8</td></tr>" % urljoin(SERVER_HOST, SERVER_PATH))
html.write("<tr><td> </td><td> </td></tr>")
html.write("<tr><td rowspan='2'>Plex Live<sup>1</sup></td><td>Tuner - %s</td></tr>" % urljoin(SERVER_HOST,
SERVER_PATH))
html.write("<tr><td>EPG - %s/epg.xml</td></tr>" % urljoin(SERVER_HOST, SERVER_PATH))
html.write("<tr><td> </td><td> </td></tr>")
html.write("<tr><td>TVHeadend<sup>1</sup></td><td>%s/tvh.m3u8</td></tr>" % urljoin(SERVER_HOST, SERVER_PATH))
html.write("<tr><td> </td><td> </td></tr>")
html.write(
"<tr><td rowspan='2'>Remote Internet access<sup>2</sup></td><td>m3u8 - %s/external.m3u8</td></tr>" % urljoin(
EXT_HOST, SERVER_PATH))
html.write("<tr><td>EPG - %s/epg.xml</td></tr>" % urljoin(EXT_HOST, SERVER_PATH))
html.write("<tr><td> </td><td> </td></tr>")
html.write(
"<tr><td rowspan='2'>Combined Outputs<sup>2</sup></td><td>m3u8 - %s/combined.m3u8</td></tr>" % urljoin(
SERVER_HOST, SERVER_PATH))
html.write("<tr><td>epg - %s/combined.xml</td></tr>" % urljoin(SERVER_HOST, SERVER_PATH))
html.write("<tr><td> </td><td> </td></tr>")
html.write(
"<tr><td>Static Playlist</td><td>m3u8 - %s/static.m3u8</td></tr>" % urljoin(SERVER_HOST, SERVER_PATH))
html.write("<tr><td> </td><td> </td></tr>")
html.write(
"<tr><td rowspan='2'>TVHProxy<sup>3</sup></td><td>Tuner - %s</td></tr>" % urljoin(SERVER_HOST, 'tvh'))
html.write("<tr><td>EPG - http://%s:9981/xmltv/channels</td></tr>" % TVHURL)
html.write("<tr><td> </td><td> </td></tr>")
html.write("<tr><td>Test Playlist for troubleshooting</td><td>%s/test.m3u8</td></tr>" % urljoin(SERVER_HOST,
SERVER_PATH))
html.write("<tr><td> </td><td> </td></tr>")
html.write(
"<tr><td>Dynamic xspf, includes currently showing programs</td><td>%s/playlist.xspf</td></tr>" % urljoin(
SERVER_HOST,
SERVER_PATH))
html.write("<tr><td>Static xspf</td><td>%s/static.xspf</td></tr>" % urljoin(SERVER_HOST,
SERVER_PATH))
html.write("<tr><td> </td><td> </td></tr>")
html.write("<tr><td>Note 1:</td><td>Requires FFMPEG installation and setup</td></tr>")
html.write("<tr><td>Note 2:</td><td>Requires External IP and port in advancedsettings</td></tr>")
html.write("<tr><td>Note 3:</td><td>Requires TVH proxy setup in advancedsettings</td></tr></table>")
html.write("</div></section></body></html>\n")
with open("./cache/adv_settings.html", "w") as html:
html.write("""<html>
<head>
<meta charset="UTF-8">
%s
<title>YAP</title>
</head>
<body>\n""" % (style,))
html.write('<section class="container"><div class="left-half">')
html.write("<h1>YAP Settings</h1>")
template = "<a href='{1}/{2}/{0}.html'>{3}</a>"
html.write(
"<p>" + template.format("settings", SERVER_HOST, SERVER_PATH, "Options") + " " + template.format("howto",
SERVER_HOST,
SERVER_PATH,
"Instructions") + " " + template.format(
"channels", SERVER_HOST, SERVER_PATH, "Channels List") + " " + template.format("adv_settings",
SERVER_HOST, SERVER_PATH,
"Advanced Settings") + " " + template.format("paths",
SERVER_HOST, SERVER_PATH,
"Proxy Paths") + "</p>")
html.write('<form action="%s/%s/handle_data" method="post">' % (SERVER_HOST, SERVER_PATH))
channelmap = {}
chanindex = 0
adv_set = ["kodiuser", "kodipass", "ffmpegloc", "kodiport", "extram3u8url", "extram3u8name", "extram3u8file",
"extraxmlurl", "tvhredirect", "tvhaddress", "tvhuser", "tvhpass", "overridexml", "checkchannel",
"pipe"]
html.write('<table width="300" border="2">')
for setting in adv_set:
if setting.lower() == 'kodipass':
html.write('<tr><td>%s:</td><td><input name="%s" type="Password" value="%s"></td></tr>' % (
setting, setting, KODIPASS))
elif setting == "checkchannel":
html.write(
'<tr><td>%s:</td><td><select name="%s" size="1"><option value="True" %s>Enabled</option><option value="False" %s>Disabled</option></select></td></tr>' % (
setting, setting, ' selected' if CHECK_CHANNEL == True else "",
' selected' if CHECK_CHANNEL == False else ""))
elif setting == "pipe":
html.write(
'<tr><td>%s:</td><td><select name="%s" size="1"><option value="True" %s>Enabled</option><option value="False" %s>Disabled</option></select></td></tr>' % (
setting, setting, ' selected' if PIPE == True else "", ' selected' if PIPE == False else ""))
else:
val = "Unknown"
if setting == "kodiuser":
val = KODIUSER
elif setting == "kodiport":
val = KODIPORT
elif setting == "ffmpegloc":
val = FFMPEGLOC
elif setting == "extram3u8url":
val = EXTM3URL
elif setting == "extram3u8file":
val = EXTM3UFILE
elif setting == "extram3u8name":
val = EXTM3UNAME
elif setting == "extraxmlurl":
val = EXTXMLURL
elif setting == "tvhredirect":
val = TVHREDIRECT
elif setting == "tvhaddress":
val = TVHURL
elif setting == "tvhuser":
val = TVHUSER
elif setting == "tvhpass":
val = TVHPASS
elif setting == "overridexml":
val = OVRXML
if not (setting == "ffmpegloc" and not platform.system() == 'Windows'):
html.write('<tr><td>%s:</td><td><input name="%s" type="text" value="%s"></td></tr>' % (
setting, setting, val))
html.write('</table>')
html.write('<input type="submit" value="Submit">')
html.write('</form>')
html.write("<p>You are running version (%s %s), the latest is %s</p>" % (type, __version__, latest_ver))
html.write("</br><p>Restarts can take a while, it is not immediate.</p>")
html.write('<form action="%s/%s/handle_data" method="post">' % (SERVER_HOST, SERVER_PATH))
html.write('<input type="hidden" name="restart" value="1">')
html.write('<input type="submit" value="Restart">')
html.write('</form>')
html.write('<form action="%s/%s/handle_data" method="post">' % (SERVER_HOST, SERVER_PATH))
html.write('<input type="hidden" name="restart" value="2">')
html.write('<input type="submit" value="Update + Restart">')
html.write('</form>')
html.write('<form action="%s/%s/handle_data" method="post">' % (SERVER_HOST, SERVER_PATH))
html.write('<input type="hidden" name="restart" value="3">')
html.write('<input type="submit" value="Update(Dev Branch) + Restart">')
html.write('</form>')
html.write('<p> </p>')
html.write('<p> </p>')
html.write('<p> </p>')
html.write('<p> </p>')
html.write(footer)
html.write("</div></section></body></html>\n")
with open("./cache/channels.html", "w") as html:
global chan_map
html.write("""<html><head><title>YAP</title><meta charset="UTF-8">%s</head><body>\n""" % (style,))
html.write("<h1>Channel List and Upcoming Shows</h1>")
template = "<a href='{1}/{2}/{0}.html'>{3}</a>"
html.write(
"<p>" + template.format("settings", SERVER_HOST, SERVER_PATH, "Options") + " " + template.format("howto",
SERVER_HOST,
SERVER_PATH,
"Instructions") + " " + template.format(
"channels", SERVER_HOST, SERVER_PATH, "Channels List") + " " + template.format("adv_settings",
SERVER_HOST, SERVER_PATH,
"Advanced Settings") + " " + template.format("paths",
SERVER_HOST, SERVER_PATH,
"Proxy Paths") + "</p>")
html.write("<a href='https://guide.smoothstreams.tv/'>Click here to go to the SmoothStreams Official Guide</a>")
html.write('<section class="container"><div class="left-half"><table width="300" border="1">')
template = "<td>{0}</td><td><a href='{2}/{3}/playlist.m3u8?ch={0}'><img src='{2}/{3}/{0}.png'></a></td></td>"
for i in chan_map:
if i % 5 == 1:
html.write("<tr>")
html.write(template.format(chan_map[i].channum, chan_map[i].channame, SERVER_HOST, SERVER_PATH))
if i % 5 == 0:
html.write("</tr>")
html.write("</table>")
html.write("</br>%s</div>" % footer)
html.write('<div class="right-half"><h3>Coming up</h3>')
template = "{0} - <a href='{2}/{3}/playlist.m3u8?ch={0}'>{1}</a></br>"
for i in chan_map:
prog = getProgram(i)
if prog.title != 'none':
try:
html.write(
template.format(chan_map[i].channum, str(prog.title).encode('utf-8'), SERVER_HOST, SERVER_PATH))
except:
logger.exception(prog.title)
html.write("</div></section>")
html.write("</body></html>\n")
with open("./cache/index.html", "w") as html:
html.write("""<html><head><title>YAP</title><meta charset="UTF-8">%s</head><body>\n""" % (style,))
template = "<h2><a href='{1}/{2}/{0}.html'>{3}</a></h2>"
html.write("<h1>Welcome to YAP!</h1>")
html.write(template.format("settings", SERVER_HOST, SERVER_PATH, "Options"))
html.write(template.format("howto", SERVER_HOST, SERVER_PATH, "Instructions"))
html.write(template.format("channels", SERVER_HOST, SERVER_PATH, "Channels List"))
html.write(template.format("adv_settings", SERVER_HOST, SERVER_PATH, "Advanced Settings"))
html.write(template.format("paths", SERVER_HOST, SERVER_PATH, "Proxy Paths"))
html.write(footer)
html.write("</body></html>\n")
with open("./cache/howto.html", "w") as html:
html.write("""<html><head><title>YAP</title><meta charset="UTF-8">%s</head><body>\n""" % (style,))
template = "<a href='{1}/{2}/{0}.html'>{3}</a>"
html.write("<h1>Welcome to YAP!</h1>")
html.write(
"<p>" + template.format("settings", SERVER_HOST, SERVER_PATH, "Options") + " " + template.format("howto",
SERVER_HOST,
SERVER_PATH,
"Instructions") + " " + template.format(
"channels", SERVER_HOST, SERVER_PATH, "Channels List") + " " + template.format("adv_settings",
SERVER_HOST, SERVER_PATH,
"Advanced Settings") + "</p>")
html.write("<h2>Work in progress.</h2>")
html.write("""<h2>Commandline Arguments</h2></br><p>'install' - forces recreation of the install function which creates certain files, such as the tvh internal grabber</br></br>
'headless' - uses command line for initial setup rather than gui</br></br>
'tvh' - each call to a piped channel will return channel 01 which is a 24/7 channel so will always generate a positive result, this allows TVH to create all services</p></br>""")
html.write(
"<h2><a href='https://seo-michael.co.uk/how-to-setup-livetv-pvr-simple-xbmc-kodi/'>Kodi Setup</a></h2>")
html.write("<p>Use this information to populate the settings:</p>")
html.write("<p>m3u8 - %s/kodi.m3u8</p>" % urljoin(SERVER_HOST, SERVER_PATH))
html.write("<p>EPG - %s/epg.xml</p>" % urljoin(SERVER_HOST, SERVER_PATH))
html.write(
'''<p>RTMP is an issue so there's a special playlist for it (kodi.m3u8), it has two of every channel in both rtmp and hls, in kodi Tv use the Left hand menu and select group or filter. Then select dynamic (forced hls) or static rtmp.For static_refresh channel (151) don't use it on the guide page, use it on the channel list page. Otherwise kodi will crash. This will lock kodi for about 20secs but refresh the playlist.</p>''')
html.write("<h2>Ensure you can get YAP working in Kodi or VLC first before attmepting Plex or TVHeadend!</h2>")
html.write("<h2><a href='https://imgur.com/a/OZkN0'>Plex Setup</a></h2>")
html.write("<p></p>")
html.write("<h2>TVHeadend | |
assert(g.match(s, i, '('))
while i < n:
c = s[i]
if c == '(':
level += 1; i += 1
elif c == ')':
level -= 1
if level <= 0: return i
i += 1
elif c == '\'' or c == '"': i = g.skip_string(s, i)
elif g.match(s, i, "//"): i = g.skip_to_end_of_line(s, i)
elif g.match(s, i, "/*"): i = g.skip_block_comment(s, i)
else: i += 1
return i
#@+node:ekr.20031218072017.3163: *4* skip_pascal_begin_end
def skip_pascal_begin_end(s, i):
'''Skips from begin to matching end.
If found, i points to the end. Otherwise, i >= len(s)
The end keyword matches begin, case, class, record, and try.'''
assert(g.match_c_word(s, i, "begin"))
level = 1; i = g.skip_c_id(s, i) # Skip the opening begin.
while i < len(s):
ch = s[i]
if ch == '{':
i = g.skip_pascal_braces(s, i)
elif ch == '"' or ch == '\'':
i = g.skip_pascal_string(s, i)
elif g.match(s, i, "//"):
i = g.skip_line(s, i)
elif g.match(s, i, "(*"):
i = g.skip_pascal_block_comment(s, i)
elif g.match_c_word(s, i, "end"):
level -= 1
if level == 0:
return i
i = g.skip_c_id(s, i)
elif g.is_c_id(ch):
j = i; i = g.skip_c_id(s, i); name = s[j: i]
if name in ["begin", "case", "class", "record", "try"]:
level += 1
else:
i += 1
return i
#@+node:ekr.20031218072017.3164: *4* skip_pascal_block_comment
# Scans past a pascal comment delimited by (* and *).
def skip_pascal_block_comment(s, i):
j = i
assert(g.match(s, i, "(*"))
i = s.find("*)", i)
if i > -1:
return i + 2
g.scanError("Run on comment" + s[j: i])
return len(s)
#@+node:ekr.20031218072017.3165: *4* skip_pascal_string : called by tangle
def skip_pascal_string(s, i):
j = i; delim = s[i]; i += 1
assert(delim == '"' or delim == '\'')
while i < len(s):
if s[i] == delim:
return i + 1
i += 1
g.scanError("Run on string: " + s[j: i])
return i
#@+node:ekr.20031218072017.3166: *4* skip_heredoc_string : called by php import (<NAME>)
#@+at 08-SEP-2002 DTHEIN: added function skip_heredoc_string
# A heredoc string in PHP looks like:
#
# <<<EOS
# This is my string.
# It is mine. I own it.
# No one else has it.
# EOS
#
# It begins with <<< plus a token (naming same as PHP variable names).
# It ends with the token on a line by itself (must start in first position.
#
#@@c
def skip_heredoc_string(s, i):
j = i
assert(g.match(s, i, "<<<"))
# pylint: disable=anomalous-backslash-in-string
m = re.match("\<\<\<([a-zA-Z_\x7f-\xff][a-zA-Z0-9_\x7f-\xff]*)", s[i:])
if m is None:
i += 3
return i
# 14-SEP-2002 DTHEIN: needed to add \n to find word, not just string
delim = m.group(1) + '\n'
i = g.skip_line(s, i) # 14-SEP-2002 DTHEIN: look after \n, not before
n = len(s)
while i < n and not g.match(s, i, delim):
i = g.skip_line(s, i) # 14-SEP-2002 DTHEIN: move past \n
if i >= n:
g.scanError("Run on string: " + s[j: i])
elif g.match(s, i, delim):
i += len(delim)
return i
#@+node:ekr.20031218072017.3167: *4* skip_pp_directive
# Now handles continuation lines and block comments.
def skip_pp_directive(s, i):
while i < len(s):
if g.is_nl(s, i):
if g.escaped(s, i): i = g.skip_nl(s, i)
else: break
elif g.match(s, i, "//"): i = g.skip_to_end_of_line(s, i)
elif g.match(s, i, "/*"): i = g.skip_block_comment(s, i)
else: i += 1
return i
#@+node:ekr.20031218072017.3168: *4* skip_pp_if
# Skips an entire if or if def statement, including any nested statements.
def skip_pp_if(s, i):
start_line = g.get_line(s, i) # used for error messages.
assert(
g.match_word(s, i, "#if") or
g.match_word(s, i, "#ifdef") or
g.match_word(s, i, "#ifndef"))
i = g.skip_line(s, i)
i, delta1 = g.skip_pp_part(s, i)
i = g.skip_ws(s, i)
if g.match_word(s, i, "#else"):
i = g.skip_line(s, i)
i = g.skip_ws(s, i)
i, delta2 = g.skip_pp_part(s, i)
if delta1 != delta2:
g.es("#if and #else parts have different braces:", start_line)
i = g.skip_ws(s, i)
if g.match_word(s, i, "#endif"):
i = g.skip_line(s, i)
else:
g.es("no matching #endif:", start_line)
return i, delta1
#@+node:ekr.20031218072017.3169: *4* skip_pp_part
# Skip to an #else or #endif. The caller has eaten the #if, #ifdef, #ifndef or #else
def skip_pp_part(s, i):
delta = 0
while i < len(s):
c = s[i]
if g.match_word(s, i, "#if") or g.match_word(s, i, "#ifdef") or g.match_word(s, i, "#ifndef"):
i, delta1 = g.skip_pp_if(s, i)
delta += delta1
elif g.match_word(s, i, "#else") or g.match_word(s, i, "#endif"):
return i, delta
elif c == '\'' or c == '"': i = g.skip_string(s, i)
elif c == '{':
delta += 1; i += 1
elif c == '}':
delta -= 1; i += 1
elif g.match(s, i, "//"): i = g.skip_line(s, i)
elif g.match(s, i, "/*"): i = g.skip_block_comment(s, i)
else: i += 1
return i, delta
#@+node:ekr.20031218072017.3170: *4* skip_python_string
def skip_python_string(s, i, verbose=True):
if g.match(s, i, "'''") or g.match(s, i, '"""'):
j = i; delim = s[i] * 3; i += 3
k = s.find(delim, i)
if k > -1: return k + 3
if verbose:
g.scanError("Run on triple quoted string: " + s[j: i])
return len(s)
# 2013/09/08: honor the verbose argument.
return g.skip_string(s, i, verbose=verbose)
#@+node:ekr.20031218072017.2369: *4* skip_string (leoGlobals)
def skip_string(s, i, verbose=True):
'''Scan forward to the end of a string.
New in Leo 4.4.2 final: give error only if verbose is True'''
j = i; delim = s[i]; i += 1
assert(delim == '"' or delim == '\'')
n = len(s)
while i < n and s[i] != delim:
if s[i] == '\\': i += 2
else: i += 1
if i >= n:
if verbose:
g.scanError("Run on string: " + s[j: i])
elif s[i] == delim:
i += 1
return i
#@+node:ekr.20031218072017.3171: *4* skip_to_semicolon
# Skips to the next semicolon that is not in a comment or a string.
def skip_to_semicolon(s, i):
n = len(s)
while i < n:
c = s[i]
if c == ';':
return i
if c == '\'' or c == '"':
i = g.skip_string(s, i)
elif g.match(s, i, "//"):
i = g.skip_to_end_of_line(s, i)
elif g.match(s, i, "/*"):
i = g.skip_block_comment(s, i)
else:
i += 1
return i
#@+node:ekr.20031218072017.3172: *4* skip_typedef
def skip_typedef(s, i):
n = len(s)
while i < n and g.is_c_id(s[i]):
i = g.skip_c_id(s, i)
i = g.skip_ws_and_nl(s, i)
if g.match(s, i, '{'):
i = g.skip_braces(s, i)
i = g.skip_to_semicolon(s, i)
return i
#@+node:ekr.20031218072017.3173: *3* Scanners: no error messages
#@+node:ekr.20031218072017.3174: *4* escaped
# Returns True if s[i] is preceded by an odd number of backslashes.
def escaped(s, i):
count = 0
while i - 1 >= 0 and s[i - 1] == '\\':
count += 1
i -= 1
return (count % 2) == 1
#@+node:ekr.20031218072017.3175: *4* find_line_start
def find_line_start(s, i):
'''Return the index in s of the start of the line containing s[i].'''
if i < 0:
return 0 # New in Leo 4.4.5: add this defensive code.
# bug fix: 11/2/02: change i to i+1 in rfind
i = s.rfind('\n', 0, i + 1) # Finds the highest index in the range.
return 0 if i == -1 else i + 1
# if i == -1: return 0
# else: return i + 1
#@+node:ekr.20031218072017.3176: *4* find_on_line
def find_on_line(s, i, pattern):
j = s.find('\n', i)
if j == -1: j = len(s)
k = s.find(pattern, i, j)
return k
#@+node:ekr.20031218072017.3177: *4* is_c_id
def is_c_id(ch):
return g.isWordChar(ch)
#@+node:ekr.20031218072017.3178: *4* is_nl
def is_nl(s, i):
return i < len(s) and (s[i] == '\n' or s[i] == '\r')
#@+node:ekr.20031218072017.3179: *4* g.is_special
def is_special(s, directive):
'''Return True if the body text contains the @ directive.'''
assert(directive and directive[0] == '@')
lws = directive in ("@others", "@all")
# Most directives must start the line.
pattern = r'^\s*(%s\b)' if lws else r'^(%s\b)'
pattern = re.compile(pattern % directive, re.MULTILINE)
m = re.search(pattern, s)
if m:
return True, m.start(1)
return False, -1
#@+node:ekr.20031218072017.3180: *4* is_ws & is_ws_or_nl
def is_ws(c):
return c == '\t' or c == ' '
def is_ws_or_nl(s, i):
return g.is_nl(s, i) or (i < len(s) and g.is_ws(s[i]))
#@+node:ekr.20031218072017.3181: *4* match
# Warning: this code makes no assumptions about what follows pattern.
def match(s, i, pattern):
return s and pattern and s.find(pattern, i, i + len(pattern)) == i
#@+node:ekr.20031218072017.3182: *4* match_c_word
def match_c_word(s, i, name):
n = len(name)
return (
name and
name == s[i: i + n] and
(i + n == len(s) or not g.is_c_id(s[i + n]))
| |
<gh_stars>0
import kivy
kivy.require('1.9.1') # replace with your current kivy version !
############
#per installare i garden components
#C:\Users\<NAME>\Downloads\WinPython-64bit-3.5.2.3Qt5\python-3.5.2.amd64\Scripts
#https://docs.scipy.org/doc/numpy/f2py/index.html
#!python garden install nomefile
############
from kivy.app import App
from kivy.lang import Builder
from kivy.config import Config
from kivy.core.window import Window
from kivy.uix.boxlayout import BoxLayout
from kivy.properties import ListProperty
from kivy.uix.widget import Widget
import numpy as np
np.set_printoptions(threshold=np.nan)
#from kivy.app import App
#from kivy.uix.floatlayout import FloatLayout
from kivy.factory import Factory
#from kivy.properties import ObjectProperty
from kivy.uix.popup import Popup
import os
#from kivy.uix.screenmanager import ScreenManager, Screen, FadeTransition
"""Simple widget to display a matplolib figure in kivy"""
#from kivy.uix.widget import Widget
from matplotlib.backends.backend_agg import FigureCanvasAgg
from matplotlib.backend_bases import NavigationToolbar2
from kivy.graphics.texture import Texture
from kivy.properties import ObjectProperty
from kivy.base import EventLoop
import math
from kivy.graphics import Mesh
Config.set('graphics', 'fullscreen', 1)
Window.size = (700,600)
Config.set('graphics','resizable',0)
printa=0
if printa:
print(Window.size)
if 0:
fullscreen=0
if fullscreen:
Window.size = (Window._get_width(),Window._get_height())
if printa:
print(os.getcwd())
class LblTxt(BoxLayout):
from kivy.properties import ObjectProperty
theTxt = ObjectProperty(None)
"""
class CheckLista(BoxLayout):
from kivy.uix.checkbox import CheckBox
CheckForm = CheckBox()
"""
#https://github.com/jeysonmc/kivy_matplotlib/blob/master/README.md
#http://pythonmobile.blogspot.it/2014/06/21-checkboxes.html
class MatplotFigure(Widget):
"""Widget to show a matplotlib figure in kivy.
The figure is rendered internally in an AGG backend then
the rgb data is obtained and blitted into a kivy texture.
"""
figure = ObjectProperty(None)
_box_pos = ListProperty([0, 0])
_box_size = ListProperty([0, 0])
_img_texture = ObjectProperty(None)
_bitmap = None
_pressed = False
figcanvas = ObjectProperty(None)
# I Chose composition over MI because of name clashes
def on_figure(self, obj, value):
self.figcanvas = _FigureCanvas(self.figure, self)
self.figcanvas._isDrawn = False
l, b, w, h = self.figure.bbox.bounds
#print(l,b,w,h)
w = int(math.ceil(w))
h = int(math.ceil(h))
self.width = w
self.height = h
# Texture
self._img_texture = Texture.create(size=(w, h))
def __init__(self, figure=None, *args, **kwargs):
super(MatplotFigure, self).__init__(*args, **kwargs)
self.figure = figure
# Event binding
EventLoop.window.bind(mouse_pos=self.on_mouse_move)
self.bind(size=self._onSize)
def _draw_bitmap(self):
if self._bitmap is None:
print("No bitmap!")
return
self._img_texture = Texture.create(size=(self.bt_w, self.bt_h))
self._img_texture.blit_buffer(
self._bitmap, colorfmt="rgb", bufferfmt='ubyte')
self._img_texture.flip_vertical()
def on_mouse_move(self, window, mouse_pos):
""" Mouse move """
if self._pressed: # Do not process this event if there's a touch_move
return
x, y = mouse_pos
if self.collide_point(x, y):
real_x, real_y = x - self.pos[0], y - self.pos[1]
self.figcanvas.motion_notify_event(real_x, real_y, guiEvent=None)
def on_touch_down(self, event):
x, y = event.x, event.y
if self.collide_point(x, y):
self._pressed = True
real_x, real_y = x - self.pos[0], y - self.pos[1]
self.figcanvas.button_press_event(real_x, real_y, 1, guiEvent=event)
def on_touch_move(self, event):
""" Mouse move while pressed """
x, y = event.x, event.y
if self.collide_point(x, y):
real_x, real_y = x - self.pos[0], y - self.pos[1]
self.figcanvas.motion_notify_event(real_x, real_y, guiEvent=event)
def on_touch_up(self, event):
x, y = event.x, event.y
if self._box_size[0] > 1 or self._box_size[1] > 1:
self.reset_box()
if self.collide_point(x, y):
pos_x, pos_y = self.pos
real_x, real_y = x - pos_x, y - pos_y
self.figcanvas.button_release_event(real_x, real_y, 1, guiEvent=event)
self._pressed = False
def new_timer(self, *args, **kwargs):
pass # TODO
def _onSize(self, o, size):
if self.figure is None:
return
# Creat a new, correctly sized bitmap
self._width, self._height = size
self._isDrawn = False
if self._width <= 1 or self._height <= 1:
return
dpival = self.figure.dpi
winch = self._width / dpival
hinch = self._height / dpival
self.figure.set_size_inches(winch, hinch)
self.figcanvas.resize_event()
self.figcanvas.draw()
def reset_box(self):
self._box_size = 0, 0
self._box_pos = 0, 0
def draw_box(self, event, x0, y0, x1, y1):
pos_x, pos_y = self.pos
# Kivy coords
y0 = pos_y + y0
y1 = pos_y + y1
self._box_pos = x0, y0
self._box_size = x1 - x0, y1 - y0
class _FigureCanvas(FigureCanvasAgg):
"""Internal AGG Canvas"""
def __init__(self, figure, widget, *args, **kwargs):
self.widget = widget
super(_FigureCanvas, self).__init__(figure, *args, **kwargs)
def draw(self):
"""
Render the figure using agg.
"""
super(_FigureCanvas, self).draw()
agg = self.get_renderer()
w, h = agg.width, agg.height
self._isDrawn = True
self.widget.bt_w = w
self.widget.bt_h = h
self.widget._bitmap = agg.tostring_rgb()
self.widget._draw_bitmap()
def blit(self, bbox=None):
# TODO bbox
agg = self.get_renderer()
w, h = agg.width, agg.height
self.widget._bitmap = agg.tostring_rgb()
self.widget.bt_w = w
self.widget.bt_h = h
self.widget._draw_bitmap()
#def print_figure(self,filename, *args, **kwargs):
#http://stackoverflow.com/questions/17538235/unable-to-save-matplotlib-figure-figure-canvas-is-none
#http://answers.elteacher.info/questions/post/229454/plot-radec-polygons-with-astropy-wcs-aplpy-fitsfigure-ask-question.html
#https://www.google.it/search?q=kivy+super+print_figure&ie=utf-8&oe=utf-8&client=firefox-b-ab&gfe_rd=cr&ei=jHGxWO2YK_CEygWStrPADQ
#https://github.com/dbuscombe-usgs/lobos/blob/master/kivy_matplotlib.py
"""
finchenonlomettiapposto=0
if finchenonlomettiapposto:
super(self.print_figure, self).print_figure(filename, *args, **kwargs)
if self._isDrawn:
self.draw()
"""
class MatplotNavToolbar(BoxLayout):
"""Figure Toolbar"""
pan_btn = ObjectProperty(None)
zoom_btn = ObjectProperty(None)
home_btn = ObjectProperty(None)
info_lbl = ObjectProperty(None)
_navtoolbar = None # Internal NavToolbar logic
figure_widget = ObjectProperty(None)
def __init__(self, figure_widget=None, *args, **kwargs):
super(MatplotNavToolbar, self).__init__(*args, **kwargs)
self.figure_widget = figure_widget
def on_figure_widget(self, obj, value):
self.figure_widget.bind(figcanvas=self._canvas_ready)
def _canvas_ready(self, obj, value):
self._navtoolbar = _NavigationToolbar(value, self)
self._navtoolbar.figure_widget = obj
class _NavigationToolbar(NavigationToolbar2):
figure_widget = None
def __init__(self, canvas, widget):
self.widget = widget
super(_NavigationToolbar, self).__init__(canvas)
def _init_toolbar(self):
self.widget.home_btn.bind(on_press=self.home)
self.widget.bind(on_press=self.pan)
self.widget.zoom_btn.bind(on_press=self.zoom)
def dynamic_update(self):
self.canvas.draw()
def draw_rubberband(self, event, x0, y0, x1, y1):
self.figure_widget.draw_box(event, x0, y0, x1, y1)
def set_message(self, s):
self.widget.info_lbl.text = s
class LoadDialog(BoxLayout):
load = ObjectProperty(None)
cancel = ObjectProperty(None)
class SaveDialog(BoxLayout):
save = ObjectProperty(None)
cancel = ObjectProperty(None)
Factory.register('LoadDialog', cls=LoadDialog)
Factory.register('SaveDialog', cls=SaveDialog)
Factory.register('MatplotFigure', cls=MatplotFigure)
Factory.register('MatplotNavToolbar', cls=MatplotNavToolbar)
if __name__ == '__main__':
# Example
import matplotlib as mpl
import numpy as np
class CalcolatriceApp(App):
##########################################################################
loadfile = ObjectProperty(None)
savefile = ObjectProperty(None)
text_input = ObjectProperty(None)
def build_mesh(self):
from math import sin, cos, pi
""" returns a Mesh of a rough circle. """
vertices = []
indices = []
step = 10
istep = (pi * 2) / float(step)
for i in range(step):
x = 300 + cos(istep * i) * 100
y = 300 + sin(istep * i) * 100
vertices.extend([x, y, 0, 0])
indices.append(i)
"""
Mesh:
vertices: (x1, y1, s1, v1, x2, y2, s2, v2, x3, y3, s3, v3...)
indices: (1, 2, 3...)
texture: some_texture
rgba: 1,1,1,1
mode: some_mode
"""
#ritorna una area colorata chiusa
return Mesh(vertices=vertices, indices=indices, mode='triangle_fan')
#return Mesh(vertices=vertices, indices=indices, mode='line_loop')
def dismiss_popup(self):
self._popup.dismiss()
def show_load(self):
content = LoadDialog(load=self.load, cancel=self.dismiss_popup)
self._popup = Popup(title="Carica File", content=content,
size_hint=(0.9, 0.9))
self._popup.open()
def show_save(self):
content = SaveDialog(save=self.save, cancel=self.dismiss_popup)
self._popup = Popup(title="Salva File", content=content,
size_hint=(0.9, 0.9))
self._popup.open()
def load(self, path, filename):
self.stringa=np.asmatrix(np.genfromtxt(os.path.join(path, filename[0]),delimiter=","))
print(self.stringa)
print(filename)
self.vada=np.size(self.stringa,0)-1
#print(self.vada)
self.root.ids.nomArch.theTxt.text=filename[0]
fig = mpl.figure.Figure(figsize=(self.mmma, self.mmmb))
axes = fig.gca()
from calcolatrice.stampafigura import disegna
disegna(self,self.stringa)
figure_wgt = self.root.ids['figure_wgt'] # MatplotFigure
figure_wgt.figure = fig
#with open(os.path.join(path, filename[0])) as stream:
#self.text_input.text = stream.read()
self.dismiss_popup()
def save(self, path, filename):
#with open(, 'w') as stream:
nome=self.root.ids.nomArch.theTxt.text
#print("dd"+nome+"dd")
strada=os.getcwd()+"\\" + nome
#print(os.getcwd())
#print(os.path.join(path, filename[0]))
#stream.write(self.stringa)
#print(strada)
np.savetxt(strada, self.stringa, delimiter=',', newline='\n')
self.dismiss_popup()
def salvaauto(self,*args):
if self.vada>0:
nome=self.root.ids.nomArch.theTxt.text
estensione=".csv"
strada=os.getcwd()+"\\" + nome
nomeTemp=nome
if nome=="":
k=0
nomeTemp="ciccione"+"0"+str(k)+str(estensione)
strada=os.getcwd()+"\\"+nomeTemp
while os.path.isfile(strada)==True:
nomeTemp="ciccione"+"0"+str(k)+str(estensione)
strada=os.getcwd()+"\\"+nomeTemp
k=k+1
#print(strada)
np.savetxt(strada, self.stringa, delimiter=',', newline='\n')
self.root.ids.nomArch.theTxt.text=nomeTemp
##########################################################################
title = "Disegnatore di Biancardi"
#stringa= MatrixProperty()
#Status=StringProperty()
def UndoZZZZ(self,*args):
if self.vada>0:
self.vada=self.vada-1
self.stringa=self.stringa[:-1,:]
fig = mpl.figure.Figure(figsize=(self.mmma, self.mmmb))
axes = fig.gca()
figure_wgt = self.root.ids['figure_wgt'] # MatplotFigure
figure_wgt.figure = fig
from calcolatrice.stampafigura import disegna
disegna(self,self.stringa)
self.root.ids.risoluzione.text="figure inserite %d"%self.vada
#self.stringa=np.matrix("42015.,3.,1.,48.,0.,0.,0.,0.,0.,0.,0.;4.,1.,0.,0.,0.,0.,6.,10.,6.,10.,0.;2.,-1.,0.,4.,0.,3.,0.,3.1415,0.,0.,0.")
def Resetta(self,*args):
if self.vada>0:
self.stringa=self.iniziale
#self.root.ids.schifo.text=print(self.stringa)
#print(self.stringa)
self.vada=0
#self.root.ids.schifo.text=""
self.root.ids.risoluzione.text="resettato"
fig = mpl.figure.Figure(figsize=(self.mmma, self.mmmb))
fig.clf()
figure_wgt = self.root.ids['figure_wgt'] # MatplotFigure
figure_wgt.figure = fig
self.root.ids.risoluzione.text="figure inserite %d"%self.vada
def SalvaDisegno(self,*args):
if self.vada>0:
#print(self.root.ids.figure_wgt.figure.figure)
#print(self.root.ids.figure_wgt.figure.bbox.bounds)
#print(self.root.ids.figure_wgt.figure.dpi)
#self.root.ids.figure_wgt.figure.savefig(filename)
nome=self.root.ids.nomArch.theTxt.text
estensione=".png"
strada=os.getcwd()+"\\" + nome
nomeTemp=nome
if nome=="":
k=0
nomeTemp="ciccione"+"0"+str(k)+estensione
strada=os.getcwd()+"\\"+nomeTemp
while os.path.isfile(strada)==True:
nomeTemp="ciccione"+"0"+str(k)+estensione
strada=os.getcwd()+"\\"+nomeTemp
k=k+1
#print(strada)
self.root.ids.nomArch.theTxt.text=nomeTemp
self.root.ids.figure_wgt.export_to_png(self.root.ids.nomArch.theTxt.text)
#from matplotlib.backends.backend_pdf import PdfPages
#with PdfPages('multipage_pdf.pdf') as pdf:
#pdf.savefig(self.root.ids.figure_wgt.figure)
def BottonePremutoNocciolo(self,*args):
if self.vuoifareancheilnocciolo==0:
self.vuoifareancheilnocciolo=1
self.iniziale[0,2]=1
elif self.vuoifareancheilnocciolo==1:
self.vuoifareancheilnocciolo=0
self.iniziale[0,2]=0
print('The checkbox is active')
def build(self):
# Matplotlib stuff, figure and plot
fig = mpl.figure.Figure(figsize=(self.mmma, self.mmmb))
axes = fig.gca()
#axes.set_xlim(0, 50)
#axes.grid(True)
#fig.clf()
axes.axis("on")
#axes.set_xlim(0, 50)
#axes.set_aspect('equal')
# Kivy stuff
root = Builder.load_file("nuovaForma.kv")
figure_wgt = root.ids['figure_wgt'] # MatplotFigure
figure_wgt.figure = fig
self.root=root
self.root.ids.risoluzione.text="figure inserite %d"%self.vada
return root
def __init__(self, **kwargs):
super(CalcolatriceApp, self).__init__(**kwargs)
self.mmma=2
self.mmmb=2
self.vada=0
self.scalatore=1000
self.kk=3
self.discretizzaarchicerchi=80
self.vuoifareancheilnocciolo=1
self.contorna=1
self.vuota=np.matrix("0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.")
self.iniziale=np.matrix("0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.")
self.iniziale[0,0]=42015.
self.iniziale[0,1]=self.kk
self.iniziale[0,2]=self.vuoifareancheilnocciolo
self.iniziale[0,3]=self.discretizzaarchicerchi
self.iniziale[0,4]=self.contorna
self.stringa=self.iniziale
self.vada=0
#print(self.stringa)
def Internet(*args):
"""
For documentation of the webbrowser module,
see http://docs.python.org/library/webbrowser.html
"""
import webbrowser
new = 2 # open in a new tab, if possible
# open a public URL, in this case, the webbrowser docs
url = "https://www.facebook.com/francescoscodesection\n"
webbrowser.open(url,new=new)
# open an HTML file on my own (Windows) computer
#url = "file://X:/MiscDev/language_links.html"
#webbrowser.open(url,new=new)
def Calcolalo(self,*args):
#import os
#os.chdir("C:\Users\<NAME>\Google Drive\mat2pylab\calcolatrice")
noncidevonoessereaste=1
from calcolatrice.misuras import guardasecisonoaste
noncidevonoessereaste=guardasecisonoaste(self)
if noncidevonoessereaste and 0:
self.stringa[0,2]=0
self.vuoifareancheilnocciolo=0
if self.vada>0:
import time
b0=time.clock()
self.salvaauto(self)
fig = mpl.figure.Figure(figsize=(self.mmma, self.mmmb))
axes = fig.gca()
figure_wgt = self.root.ids['figure_wgt'] # MatplotFigure
figure_wgt.figure = fig
#print(self.root.ids.figure_wgt.figure)
if self.root.ids.quantidiscreti.theTxt.text!="":
self.stringa[0,3]=int(self.root.ids.quantidiscreti.theTxt.text)
from calcolatrice.principale import codicesezione
self.root.ids.risoluzione.text=codicesezione(self)
#filename=self.root.ids.nomArch.theTxt.text
#mpl.draw()
#self.print_figure(self, filename)
b=time.clock()
print("tempo",round(b-b0,4))
#self.uscita="cacca"+"\n"
#self.root.ids.risoluzione.text=self.uscita
else:
self.root.ids.risoluzione.text="figure inserite | |
"""
Reference tag taxonomy
======================
References are built up by tokens which come in four groups "axis", "value",
"general expression", and "named_entity"; i.e.
Each group has several possible tag-values
1) axis: Specifies the type of referred snippet.
Implemented by class RefAxis
E.g.: Article, Absatz, Part, ...
2) value: Specifies the label of the reference within the corresponding
labeling system. E.g. (a), First, 1), ...
3) "connectors" are generic expressions, such as the words "and" or "or"
4) named_entity: Documents with proper names.
In this case. The "type" attribute refers directly to the local ID of the
corresponding document.
"""
import re
import string
from functools import lru_cache
from operator import attrgetter
from typing import List, Tuple, Iterator, Any, Dict, Iterable
from .model import NamedEntity, Connector, Value, Group, \
Axis, AxisRole
from .structures import Span, ReferenceToken, Coordinate, ReferenceTag, \
Target, StdCoordinate, InconsistentTargetError, Cycle, UnsupportedRole
from .utils import MetaPatternHandler, repeat_until_true, Reference
from .settings import LANG_2_DOMAIN
def list_enum(seq: list, reverse=False) -> Iterator[Tuple[int, Any]]:
if not reverse:
return enumerate(seq)
return reversed(list(enumerate(seq)))
class GroupPattern(MetaPatternHandler):
_base = {
'coordinates': '(axis:value|named_entity)',
'connector_value': '(?P<leader>coordinate)value*'
'(?P<buddies>(connector:value+)+)'
'(?P<after>coordinate)?',
'coordinate_connector_value':
'coordinate(connector:value)+',
'value_n': '(?P<leader>coordinate)value+',
'axis_connector': '^axis:connector',
'adjacent_coordinates': 'coordinate+'
}
@property
@lru_cache()
def mapping(self):
return {g.name: g.value for g in Group}
class TagPattern(MetaPatternHandler):
_base = {
'spoken_latin': '(?<=Group.value)LATIN',
'fourth_directive': 'SRNK[REG:DIR:DEC]',
'spoken_rank': # first and second point of ...
'SRNK(?P<followers>Group.connector:SRNK)*Group.axis',
're_reference': '[XPREVX]Group.axis',
'orphan_axes': '(?<![XPREVX:SRNK])Group.axis$',
'orphan_annex': '(?<![XPREVX:SRNK])ANX(?!Group.value)',
'generic_context': '[BRCRPL:THEREOF]',
'range_connected': # e.g. "points (k)(ii) to (v)"
'Group.coordinate:Group.value:RC:Group.value(?!Group.value)',
'of_day': 'SPCLPR:NM$', # e.g. of 12 December 2001 on Community designs
'co_and_co': '^Group.coordinate:AND:Group.coordinate$',
'co_desu_co': 'Group.coordinate:XDESUX:Group.coordinate',
'first_end': 'SRNK$',
'left_of_right':
'(?P<subs>Group.coordinate+)[SPCLPR:XDESUX]:Group.coordinate(?!Group.coordinate)',
'co_underthe_co': 'Group.coordinate:SPPLCR:Group.coordinate',
'comma_stairway': '(Group.coordinate:COM)+Group.coordinate'
}
@property
@lru_cache()
def mapping(self) -> Dict[str, str]:
t2g = Value.tag_2_group()
letters = list(string.ascii_letters + string.digits) \
+ [chr(k) for k in range(913, 938)] \
+ [chr(k) for k in range(945, 970)]
letters.reverse()
result = {}
for tag, group in t2g.items():
if group == Group.named_entity.name:
continue
letter = letters.pop()
result[tag] = letter
result[f'Group.{group}'] = \
result.get(f'Group.{group}', '') + letter
for group in Group:
try:
result[f'Group.{group.name}'] = \
"[{}]".format(result[f'Group.{group.name}'])
except KeyError:
pass
result['Group.coordinate'] = Coordinate.tag_value
result[Coordinate.tag_value] = Coordinate.tag_value # not alphabetic
return result
class TokenSequence(list):
""" A TokenSequence instance is a gathering of reference tokens. """
sep = ':'
white_spaces = re.compile(r'^\s*$')
def __init__(self, rt: ReferenceToken):
super().__init__([rt])
self.gp = GroupPattern()
self.tp = TagPattern()
self._finalized = False
@property
def groups(self):
return ''.join(child.group_tag for child in self)
@property
def values(self) -> str:
# noinspection PyTypeChecker
return ''.join(self.tp.mapping.get(child.tag_value, ' ')
for child in self)
def append(self, rt: ReferenceToken):
assert self[-1].span.end <= rt.span.start
super().append(rt)
@property
def span(self) -> Span:
return Span(self[0].span.start, self[-1].span.end)
def __repr__(self):
return type(self).__name__ + f"[{self.span}]"
def __str__(self):
return repr(self) + '\n ' + '\n '.join(map(repr, self))
def to_dict(self, text: str = None) -> dict:
result = {"span": self.span.to_dict(),
"children": [t.to_dict(text is not None) for t in self]}
if text is not None:
result['text'] = text[self.span.start:self.span.end]
return result
def single_token_2_coordinate(self, i, group):
value = self.pop(i)
coordinate = Coordinate(
axis=ReferenceToken.anonymous_axis(value.span.start, tag=group.name),
value=value)
self.insert(i, coordinate)
return coordinate
def _handle_pattern_generic_context(self):
""" Handling special kinds of back-references, such as
- "... dessen erster Paragraph"
- "... sus Articulos 7 y 8"
"""
for i, m in self.tp.finditer('generic_context', self.values):
coordinate = self.single_token_2_coordinate(i, Group.connector)
coordinate.level = 10
def finalize(self):
if self._finalized:
return
self._cleanup()
self._coordination()
if not self:
return
self._nesting()
self._finalized = True
def _handle_pattern_fourth_directive(self):
""" These have to be merged. """
for i, m in self.tp.finditer('fourth_directive', self.values):
this, axis = self[i:i+2]
self[i] = ReferenceToken(
axis.tag, Span(this.span.start, axis.span.end),
axis.text + axis.tail + this.text, tail=axis.tail)
def _handle_pattern_spoken_latin(self):
for i, m in self.tp.finditer('spoken_latin', self.values):
prev = self[i-1]
this = self[i]
suffix = Value.extract_as_number(this.text, 'LATIN', 'XX')
if prev.tag.value != 'NM':
if prev.text == prev.text.upper():
suffix = suffix.upper()
prev.span = Span(prev.span.start, this.span.end)
prev.tail = this.tail
prev.suffix = suffix
self.pop(i)
def _handle_pattern_spoken_rank(self):
for i, m in self.tp.finditer('spoken_rank', self.values):
span = Span(*m.span())
axis = self.pop(span.end - 1)
for k in range(i, span.end - 1):
if self[k].group_tag == Group.connector.value:
continue
self.insert(k, Coordinate(value=self.pop(k), axis=axis))
def _handle_pattern_re_reference(self):
for i, _ in self.tp.finditer('re_reference', self.values):
# TODO: move the self-reference and back-reference pattern to the
# axis-table and get rid of this loop.
value = self.pop(i)
self.insert(i, Coordinate(
value=ReferenceToken.quasi_value(value.tag.value,
value.span,
value.text),
axis=self.pop(i)))
def _handle_pattern_coordinates(self):
for i, m in self.gp.finditer('coordinates', self.groups):
if m.group() == Group.named_entity.value:
self.single_token_2_coordinate(i, Group.named_entity)
else:
self.insert(i, Coordinate(axis=self.pop(i), value=self.pop(i)))
def _handle_pattern_range_connected(self):
for i, m in self.tp.finditer('range_connected', self.values):
leader, first, to, last = self[i:i+4]
if not Value.compatible(first.tag.value, last.tag.value):
continue
first = Coordinate(ReferenceToken.anonymous_axis(first.span.start),
self.pop(i+1))
first.level = leader.level + 1
self.insert(i + 1, first)
self.insert(i + 3, Coordinate(first.axis, self.pop(i + 3)))
self[i+3].level = first.level
def _handle_pattern_connector_value(self):
for i, m in self.gp.finditer('connector_value', self.groups):
leader = self[i]
if m.group('after') is not None:
after = self[m.span('after')[0]]
if after.axis.tag.value == leader.axis.tag.value:
leader = self[i-1]
start, end = m.span('buddies')
for con in range(end - 1, start - 1, -1):
connector = self[con]
if connector.group_tag != Group.connector.value: # -> self.gp.mapping['connector']
continue
assert connector.tag.value in ('RC', 'COM', 'AND', 'OTHERX', 'LF')
if not Value.compatible(self[con+1].tag.value,
leader.value.tag.value):
continue
self.insert(
con + 1,
Coordinate(axis=leader.axis, value=self.pop(con + 1)))
def _handle_pattern_value_n(self):
for i, m in self.gp.finditer('value_n', self.groups):
leader = self[i]
for index in range(i + 1, m.span()[1]):
coordinate = Coordinate(
axis=ReferenceToken.anonymous_axis(self[index].span.start),
value=self.pop(index))
coordinate.level = leader.level + 1
self.insert(index, coordinate)
leader = coordinate
def _handle_pattern_coordinate_connector_value(self):
for i, m in self.gp.finditer('coordinate_connector_value', self.groups):
coordinate = self[i]
end = m.span()[1]
for j, (connector, value) in enumerate(zip(self[i+1:end:2],
self[i+2:end:2])):
if connector.tag.value not in ('RC', 'AND', 'OTHERX', 'COM'):
break
assert Value.compatible(coordinate.value.tag.value,
value.tag.value)
index = i + 2 * j + 2
sibling = Coordinate(axis=coordinate.axis,
value=self.pop(index))
sibling.level = coordinate.level
self.insert(index, sibling)
def _handle_pattern_orphan_annex(self):
for i, m in self.tp.finditer('orphan_annex', self.values):
annex = self.pop(i)
self.insert(i, Coordinate(
axis=annex,
value=ReferenceToken.quasi_value(
'ANX', annex.span, annex.text)))
@property
def coordinated(self) -> bool:
return set(self.groups).issubset({Group.coordinate.value,
Group.connector.value})
def _coordination(self):
for pattern_key in ('generic_context', 'fourth_directive', 'spoken_latin',
'spoken_rank', 'coordinates', 're_reference',
'range_connected', 'connector_value', 'value_n',
'coordinate_connector_value', 'orphan_annex'):
getattr(self, f"_handle_pattern_{pattern_key}")()
if self.coordinated:
return
def iter_coordinates(self) -> Iterable[Coordinate]:
for item in self:
if type(item) is Coordinate:
yield item
def iter_roots(self) -> Iterable[Coordinate]:
for coordinate in self.iter_coordinates():
if coordinate.parent is None:
yield coordinate
def _iter_siblings(self, leader: Coordinate) -> Iterable[Coordinate]:
""" Iterate over coordinates that share the same Axis-Token. """
for follower in self.iter_coordinates():
if follower.axis == leader.axis:
if follower.value != leader.value:
yield follower
@staticmethod
def nest_neighbours(precursor, iterator: Iterable[Coordinate]):
effect = False
for coordinate in iterator:
if precursor.level < coordinate.level:
precursor.append(coordinate)
precursor = coordinate
effect = True
elif coordinate.level < precursor.level \
and precursor.parent is None \
and Axis.compatible(coordinate.axis.tag.value,
precursor.axis.tag.value):
coordinate.append(precursor)
effect = True
else:
precursor = coordinate
return effect
def _nest_siblings(self):
effect = False
for coordinate in self.iter_coordinates():
if coordinate.parent is None:
for sibling in self._iter_siblings(coordinate):
if sibling.parent is not None:
parent = sibling.parent
coordinate.parent = parent
effect = True
break
else:
continue
else:
parent = coordinate.parent
for sibling in self._iter_siblings(coordinate):
if sibling.parent is None:
sibling.parent = parent
effect = True
return effect
@repeat_until_true(4)
def _nest_rest(self):
effect = self._nest_siblings()
effect = self.nest_neighbours(self[0], self.iter_roots()) or effect
return not effect
def _nest_adjacent(self):
for i, m in self.gp.finditer('adjacent_coordinates', self.groups):
self.nest_neighbours(self[i], self[i+1:m.span()[1]])
def _nest_co_underthe_co(self):
for i, _ in self.tp.finditer('co_underthe_co', self.values, reverse=False):
self[i].append(self[i+2])
for sibling in self._iter_siblings(self[i+2]):
self[i].append(sibling)
def _nest_left_of_right(self):
for i, m in self.tp.finditer('left_of_right', self.values):
child = self[i]
new_parent = self[m.span()[1]-1]
if len(m.group('subs')) > 1: # this is more complicated
last_sub = self[m.span('subs')[1]-1]
if last_sub in child.ancestors:
child = last_sub
else:
assert child in last_sub.ancestors
if child.parent is not None:
# noinspection PyUnresolvedReferences
if child.parent.axis.tag_value != new_parent.axis.tag_value:
continue
child.parent = new_parent
def _nest_comma_stairways(self):
""" E.g. "*Chapter 4, Section 3* of Regulation ..." """
for i, m in self.tp.finditer('comma_stairway', self.values, reverse=False):
end = m.span()[1]
if len(self) == end:
return
parent = self[i]
for j in range(i + 2, m.span()[1], 2):
if parent.level >= self[j].level:
break
parent.append(self[j])
parent = self[j]
def _nesting(self):
if len(self) == 1:
return
if self.tp['co_and_co'].match(self.values):
return # e.g. Chapter VII and Article 83
self._nest_adjacent()
self._nest_desu()
self._nest_co_underthe_co()
self._nest_siblings()
self._nest_comma_stairways()
self._nest_left_of_right()
self._nest_rest()
def _nest_desu(self):
for i, _ in self.tp.finditer('co_desu_co', self.values):
# finding referred parent coordinate
su_level = self[i+2].level
for j in range(i-1, -1, -1):
if self[j].group_tag == Group.coordinate.value:
if self[j].level < su_level:
self[j].append(self[i+2])
self[i+2].append(self[i])
def groups_representation(self, text: str) -> dict:
""" Good for debugging. """
return {
'text': text[self.span.start:self.span.end],
| |
<gh_stars>0
#!/usr/bin/env python
# coding: utf-8
# [Part I: On-policy learning and SARSA (3 points)](#Part-I:-On-policy-learning-and-SARSA-(3-points))
#
# [Part II: Experience replay (4 points)](#Part-II:-experience-replay-(4-points))
#
# [Bonus I: TD($ \lambda $) (5+ points)](#Bonus-I:-TD($\lambda$)-(5+-points))
#
# [Bonus II: More pacman (5+ points)](#Bonus-II:-More-pacman-(5+-points))
# ## Part I: On-policy learning and SARSA (3 points)
#
# _This notebook builds upon `qlearning.ipynb`, or to be exact your implementation of QLearningAgent._
#
# The policy we're gonna use is epsilon-greedy policy, where agent takes optimal action with probability $(1-\epsilon)$, otherwise samples action at random. Note that agent __can__ occasionally sample optimal action during random sampling by pure chance.
# In[1]:
import sys, os
if 'google.colab' in sys.modules and not os.path.exists('.setup_complete'):
get_ipython().system('wget -q https://raw.githubusercontent.com/yandexdataschool/Practical_RL/master/setup_colab.sh -O- | bash')
get_ipython().system('touch .setup_complete')
# This code creates a virtual display to draw game images on.
# It will have no effect if your machine has a monitor.
if type(os.environ.get("DISPLAY")) is not str or len(os.environ.get("DISPLAY")) == 0:
get_ipython().system('bash ../xvfb start')
os.environ['DISPLAY'] = ':1'
# In[2]:
import numpy as np
import matplotlib.pyplot as plt
get_ipython().run_line_magic('matplotlib', 'inline')
# You can copy your `QLearningAgent` implementation from previous notebook.
# In[3]:
from collections import defaultdict
import random
import math
import numpy as np
class QLearningAgent:
def __init__(self, alpha, epsilon, discount, get_legal_actions):
"""
Q-Learning Agent
based on https://inst.eecs.berkeley.edu/~cs188/sp19/projects.html
Instance variables you have access to
- self.epsilon (exploration prob)
- self.alpha (learning rate)
- self.discount (discount rate aka gamma)
Functions you should use
- self.get_legal_actions(state) {state, hashable -> list of actions, each is hashable}
which returns legal actions for a state
- self.get_qvalue(state,action)
which returns Q(state,action)
- self.set_qvalue(state,action,value)
which sets Q(state,action) := value
!!!Important!!!
Note: please avoid using self._qValues directly.
There's a special self.get_qvalue/set_qvalue for that.
"""
self.get_legal_actions = get_legal_actions
self._qvalues = defaultdict(lambda: defaultdict(lambda: 0))
self.alpha = alpha
self.epsilon = epsilon
self.discount = discount
def get_qvalue(self, state, action):
""" Returns Q(state,action) """
return self._qvalues[state][action]
def set_qvalue(self, state, action, value):
""" Sets the Qvalue for [state,action] to the given value """
self._qvalues[state][action] = value
#---------------------START OF YOUR CODE---------------------#
def get_value(self, state):
"""
Compute your agent's estimate of V(s) using current q-values
V(s) = max_over_action Q(state,action) over possible actions.
Note: please take into account that q-values can be negative.
"""
possible_actions = self.get_legal_actions(state)
# If there are no legal actions, return 0.0
if len(possible_actions) == 0:
return 0.0
value = np.max([self.get_qvalue(state, a) for a in self.get_legal_actions(state)])
return value
def update(self, state, action, reward, next_state):
"""
You should do your Q-Value update here:
Q(s,a) := (1 - alpha) * Q(s,a) + alpha * (r + gamma * V(s'))
"""
# agent parameters
gamma = self.discount
learning_rate = self.alpha
G = reward + gamma * self.get_value(next_state)
self.set_qvalue(state, action,
(1 - learning_rate) * self.get_qvalue(state, action)
+ learning_rate * G)
def get_best_action(self, state):
"""
Compute the best action to take in a state (using current q-values).
"""
possible_actions = self.get_legal_actions(state)
# If there are no legal actions, return None
if len(possible_actions) == 0:
return None
best_action = possible_actions[np.argmax([self.get_qvalue(state, a) for a in possible_actions])]
return best_action
def get_action(self, state):
"""
Compute the action to take in the current state, including exploration.
With probability self.epsilon, we should take a random action.
otherwise - the best policy action (self.get_best_action).
Note: To pick randomly from a list, use random.choice(list).
To pick True or False with a given probablity, generate uniform number in [0, 1]
and compare it with your probability
"""
# Pick Action
possible_actions = self.get_legal_actions(state)
action = None
# If there are no legal actions, return None
if len(possible_actions) == 0:
return None
# agent parameters:
epsilon = self.epsilon
take_random_action = np.random.binomial(1, epsilon, size=1)[0]
if take_random_action:
action_idx = np.random.randint(0, len(possible_actions), size=1)[0]
action = possible_actions[action_idx]
else:
action = self.get_best_action(state)
return action
# Now we gonna implement Expected Value SARSA on top of it.
# In[4]:
class EVSarsaAgent(QLearningAgent):
"""
An agent that changes some of q-learning functions to implement Expected Value SARSA.
Note: this demo assumes that your implementation of QLearningAgent.update uses get_value(next_state).
If it doesn't, please add
def update(self, state, action, reward, next_state):
and implement it for Expected Value SARSA's V(s')
"""
def get_value(self, state):
"""
Returns Vpi for current state under epsilon-greedy policy:
V_{pi}(s) = sum _{over a_i} {pi(a_i | s) * Q(s, a_i)}
Hint: all other methods from QLearningAgent are still accessible.
"""
epsilon = self.epsilon
possible_actions = self.get_legal_actions(state)
n_actions = len(possible_actions)
# If there are no legal actions, return 0.0
if n_actions == 0:
return 0.0
best_action_idx = np.argmax([self.get_qvalue(state, a) for a in possible_actions])
probas = epsilon / n_actions * np.ones(n_actions)
probas[best_action_idx] += 1 - epsilon
qs = np.array([self.get_qvalue(state, a) for a in possible_actions])
state_value = probas.dot(qs)
return state_value
# ### Cliff World
#
# Let's now see how our algorithm compares against q-learning in case where we force agent to explore all the time.
#
# <img src=https://github.com/yandexdataschool/Practical_RL/raw/master/yet_another_week/_resource/cliffworld.png width=600>
# <center><i>image by cs188</i></center>
# In[5]:
import gym
import gym.envs.toy_text
env = gym.envs.toy_text.CliffWalkingEnv()
n_actions = env.action_space.n
print(env.__doc__)
# In[6]:
# Our cliffworld has one difference from what's on the image: there is no wall.
# Agent can choose to go as close to the cliff as it wishes. x:start, T:exit, C:cliff, o: flat ground
env.render()
# In[7]:
def play_and_train(env, agent, t_max=10**4):
"""This function should
- run a full game, actions given by agent.get_action(s)
- train agent using agent.update(...) whenever possible
- return total reward"""
total_reward = 0.0
s = env.reset()
for t in range(t_max):
a = agent.get_action(s)
next_s, r, done, _ = env.step(a)
agent.update(s, a, r, next_s)
s = next_s
total_reward += r
if done:
break
return total_reward
# In[8]:
agent_sarsa = EVSarsaAgent(alpha=0.25, epsilon=0.2, discount=0.99,
get_legal_actions=lambda s: range(n_actions))
agent_ql = QLearningAgent(alpha=0.25, epsilon=0.2, discount=0.99,
get_legal_actions=lambda s: range(n_actions))
# In[9]:
from IPython.display import clear_output
import pandas as pd
def moving_average(x, span=100):
return pd.DataFrame({'x': np.asarray(x)}).x.ewm(span=span).mean().values
rewards_sarsa, rewards_ql = [], []
for i in range(5000):
rewards_sarsa.append(play_and_train(env, agent_sarsa))
rewards_ql.append(play_and_train(env, agent_ql))
# Note: agent.epsilon stays constant
if i % 100 == 0:
clear_output(True)
print('EVSARSA mean reward =', np.mean(rewards_sarsa[-100:]))
print('QLEARNING mean reward =', np.mean(rewards_ql[-100:]))
plt.title("epsilon = %s" % agent_ql.epsilon)
plt.plot(moving_average(rewards_sarsa), label='ev_sarsa')
plt.plot(moving_average(rewards_ql), label='qlearning')
plt.grid()
plt.legend()
plt.ylim(-500, 0)
plt.show()
# Let's now see what did the algorithms learn by visualizing their actions at every state.
# In[10]:
def draw_policy(env, agent):
""" Prints CliffWalkingEnv policy with arrows. Hard-coded. """
n_rows, n_cols = env._cliff.shape
actions = '^>v<'
for yi in range(n_rows):
for xi in range(n_cols):
if env._cliff[yi, xi]:
print(" C ", end='')
elif (yi * n_cols + xi) == env.start_state_index:
print(" X ", end='')
elif (yi * n_cols + xi) == n_rows * n_cols - 1:
print(" T ", end='')
else:
print(" %s " %
actions[agent.get_best_action(yi * n_cols + xi)], end='')
print()
# In[11]:
print("Q-Learning")
draw_policy(env, agent_ql)
print("SARSA")
draw_policy(env, agent_sarsa)
# ### More on SARSA
#
# Here are some of the things you can do if you feel like it:
#
# * Play with epsilon. See learned how policies change if you set epsilon to higher/lower values (e.g. 0.75).
# * Expected Value SARSA for softmax policy __(2pts)__:
# $$ \pi(a_i \mid s) = \operatorname{softmax} \left( \left\{ {Q(s, a_j) \over \tau} \right\}_{j=1}^n \right)_i = {\operatorname{exp} \left( Q(s,a_i) / \tau \right) \over {\sum_{j} \operatorname{exp} \left( Q(s,a_j) / \tau \right)}} $$
# * Implement N-step algorithms and TD($\lambda$): see [Sutton's book](http://incompleteideas.net/book/RLbook2020.pdf) chapter 7 and chapter 12.
# * Use those algorithms to train on CartPole in previous / next assignment for this week.
# In[32]:
from scipy.special import softmax
class EVSoftmaxSarsaAgent(EVSarsaAgent):
"""Softmax policy SARSA agent"""
def __init__(self, alpha, tau, discount, get_legal_actions):
super().__init__(alpha, 0, discount, get_legal_actions)
self.tau = tau
def get_action(self, state):
# Pick Action
possible_actions = self.get_legal_actions(state)
action = None
# If there are no legal actions, return None
if len(possible_actions) == 0:
return None
tau = self.tau
probas = softmax([self.get_qvalue(state, a) / tau for a in possible_actions])
action = np.random.choice(possible_actions, p=probas)
return action
# In[33]:
agent_sarsa = EVSarsaAgent(alpha=0.25, epsilon=0.2, discount=0.99,
get_legal_actions=lambda s: range(n_actions))
agent_softmax_sarsa = EVSoftmaxSarsaAgent(alpha=0.25, tau=1e-4, discount=0.99,
get_legal_actions=lambda s: range(n_actions))
agent_ql = QLearningAgent(alpha=0.25, epsilon=0.2, discount=0.99,
get_legal_actions=lambda s: range(n_actions))
# In[34]:
from IPython.display import clear_output
import pandas as pd
def moving_average(x, span=100):
return pd.DataFrame({'x': np.asarray(x)}).x.ewm(span=span).mean().values
rewards_sarsa, rewards_softmax_sarsa, rewards_ql = [], [], []
for i in range(5000):
rewards_sarsa.append(play_and_train(env, agent_sarsa))
rewards_softmax_sarsa.append(play_and_train(env, agent_softmax_sarsa))
rewards_ql.append(play_and_train(env, agent_ql))
# Note: agent.epsilon stays constant
if i % 100 == 0:
clear_output(True)
print('EVSARSA mean reward =', np.mean(rewards_sarsa[-100:]))
print('EVSoftmaxSARSA mean reward =', np.mean(rewards_softmax_sarsa[-100:]))
print('QLEARNING | |
def stop_execution(self):
"""
Triggers the stopping of the object.
"""
if not (self._stopping or self._stopped):
for actor in self.owner.actors:
actor.stop_execution()
self._stopping = True
def is_stopping(self):
"""
Returns whether the director is in the process of stopping.
:return:
"""
return self._stopping
def is_stopped(self):
"""
Returns whether the object has been stopped.
:return: whether stopped
:rtype: bool
"""
return self._stopped
def check_owner(self, owner):
"""
Checks the owner. Raises an exception if invalid.
:param owner: the owner to check
:type owner: Actor
"""
if not isinstance(owner, ActorHandler):
raise Exception("Owner is not an ActorHandler: " + owner.__name__)
def check_actors(self):
"""
Checks the actors of the owner. Raises an exception if invalid.
"""
actors = []
for actor in self.owner.actors:
if actor.skip:
continue
actors.append(actor)
if len(actors) == 0:
return
if not self.allow_source and base.is_source(actors[0]):
raise Exception("Actor '" + actors[0].full_name + "' is a source, but no sources allowed!")
for i in xrange(1, len(actors)):
if not isinstance(actors[i], InputConsumer):
raise Exception("Actor does not accept any input: " + actors[i].full_name)
def setup(self):
"""
Performs some checks.
:return: None if successful, otherwise error message.
:rtype: str
"""
result = super(SequentialDirector, self).setup()
if result is None:
try:
self.check_actors()
except Exception, e:
result = str(e)
return result
def do_execute(self):
"""
Actual execution of the director.
:return: None if successful, otherwise error message
:rtype: str
"""
self._stopped = False
self._stopping = False
not_finished_actor = self.owner.first_active
pending_actors = []
finished = False
actor_result = None
while not (self.is_stopping() or self.is_stopped()) and not finished:
# determing starting point of next iteration
if len(pending_actors) > 0:
start_index = self.owner.index_of(pending_actors[-1].name)
else:
start_index = self.owner.index_of(not_finished_actor.name)
not_finished_actor = None
# iterate over actors
token = None
last_active = -1
if self.owner.active > 0:
last_active = self.owner.last_active.index
for i in xrange(start_index, last_active + 1):
# do we have to stop the execution?
if self.is_stopped() or self.is_stopping():
break
curr = self.owner.actors[i]
if curr.skip:
continue
# no token? get pending one or produce new one
if token is None:
if isinstance(curr, OutputProducer) and curr.has_output():
pending_actors.pop()
else:
actor_result = curr.execute()
if actor_result is not None:
self.owner.logger.error(
curr.full_name + " generated following error output:\n" + actor_result)
break
if isinstance(curr, OutputProducer) and curr.has_output():
token = curr.output()
else:
token = None
# still more to come?
if isinstance(curr, OutputProducer) and curr.has_output():
pending_actors.append(curr)
else:
# process token
curr.input = token
actor_result = curr.execute()
if actor_result is not None:
self.owner.logger.error(
curr.full_name + " generated following error output:\n" + actor_result)
break
# was a new token produced?
if isinstance(curr, OutputProducer):
if curr.has_output():
token = curr.output()
else:
token = None
# still more to come?
if curr.has_output():
pending_actors.append(curr)
else:
token = None
# token from last actor generated? -> store
if (i == self.owner.last_active.index) and (token is not None):
if self._record_output:
self._recorded_output.append(token)
# no token produced, ignore rest of actors
if isinstance(curr, OutputProducer) and (token is None):
break
# all actors finished?
finished = (not_finished_actor is None) and (len(pending_actors) == 0)
return actor_result
class Flow(ActorHandler, StorageHandler):
"""
Root actor for defining and executing flows.
"""
def __init__(self, name=None, config=None):
"""
Initializes the sequence.
:param name: the name of the sequence
:type name: str
:param config: the dictionary with the options (str -> object).
:type config: dict
"""
super(Flow, self).__init__(name=name, config=config)
self._storage = {}
def description(self):
"""
Returns a description of the actor.
:return: the description
:rtype: str
"""
return "Root actor for defining and executing flows."
def new_director(self):
"""
Creates the director to use for handling the sub-actors.
:return: the director instance
:rtype: Director
"""
result = SequentialDirector(self)
result.record_output = False
result.allow_source = True
return result
def check_actors(self, actors):
"""
Performs checks on the actors that are to be used. Raises an exception if invalid setup.
:param actors: the actors to check
:type actors: list
"""
super(Flow, self).check_actors(actors)
actor = self.first_active
if (actor is not None) and not base.is_source(actor):
raise Exception("First active actor is not a source: " + actor.full_name)
@property
def storage(self):
"""
Returns the internal storage.
:return: the internal storage
:rtype: dict
"""
return self._storage
@classmethod
def load(cls, fname):
"""
Loads the flow from a JSON file.
:param fname: the file to load
:type fname: str
:return: the flow
:rtype: Flow
"""
with open(fname) as f:
content = f.readlines()
return Flow.from_json(''.join(content))
@classmethod
def save(cls, flow, fname):
"""
Saves the flow to a JSON file.
:param flow: the flow to save
:type flow: Flow
:param fname: the file to load
:type fname: str
:return: None if successful, otherwise error message
:rtype: str
"""
result = None
try:
f = open(fname, 'w')
f.write(flow.to_json())
f.close()
except Exception, e:
result = str(e)
return result
class Sequence(ActorHandler, InputConsumer):
"""
Simple sequence of actors that get executed one after the other. Accepts input.
"""
def __init__(self, name=None, config=None):
"""
Initializes the sequence.
:param name: the name of the sequence
:type name: str
:param config: the dictionary with the options (str -> object).
:type config: dict
"""
super(Sequence, self).__init__(name=name, config=config)
self._output = []
def description(self):
"""
Returns a description of the actor.
:return: the description
:rtype: str
"""
return "Simple sequence of actors that get executed one after the other. Accepts input."
def new_director(self):
"""
Creates the director to use for handling the sub-actors.
:return: the director instance
:rtype: Director
"""
result = SequentialDirector(self)
result.record_output = False
result.allow_source = False
return result
def check_actors(self, actors):
"""
Performs checks on the actors that are to be used. Raises an exception if invalid setup.
:param actors: the actors to check
:type actors: list
"""
super(Sequence, self).check_actors(actors)
actor = self.first_active
if (actor is not None) and not isinstance(actor, InputConsumer):
raise Exception("First active actor does not accept input: " + actor.full_name)
def do_execute(self):
"""
The actual execution of the actor.
:return: None if successful, otherwise error message
:rtype: str
"""
self.first_active.input = self.input
result = self._director.execute()
if result is None:
self._output.append(self.input)
return result
class Tee(ActorHandler, Transformer):
"""
'Tees off' the current token to be processed in the sub-tree before passing it on.
"""
def __init__(self, name=None, config=None):
"""
Initializes the sequence.
:param name: the name of the sequence
:type name: str
:param config: the dictionary with the options (str -> object).
:type config: dict
"""
super(Tee, self).__init__(name=name, config=config)
self._requires_active_actors = True
def description(self):
"""
Returns a description of the actor.
:return: the description
:rtype: str
"""
return "'Tees off' the current token to be processed in the sub-tree before passing it on."
@property
def quickinfo(self):
"""
Returns a short string describing some of the options of the actor.
:return: the info, None if not available
:rtype: str
"""
cond = str(self.config["condition"])
if len(cond) > 0:
return "condition: " + cond
else:
return None
def fix_config(self, options):
"""
Fixes the options, if necessary. I.e., it adds all required elements to the dictionary.
:param options: the options to fix
:type options: dict
:return: the (potentially) fixed options
:rtype: dict
"""
options = super(Tee, self).fix_config(options)
opt = "condition"
if opt not in options:
options[opt] = "True"
if opt not in self.help:
self.help[opt] = "The (optional) condition for teeing off the tokens; uses the 'eval' method, "\
"ie the expression must evaluate to a boolean value; storage values placeholders "\
"'@{...}' get replaced with their string representations before evaluating the "\
"expression (string)."
return options
def new_director(self):
"""
Creates the director to use for handling the sub-actors.
:return: the director instance
:rtype: Director
"""
result = SequentialDirector(self)
result.record_output = False
result.allow_source = False
return result
def check_actors(self, actors):
"""
Performs checks on the actors that are to be used. Raises an exception if invalid setup.
:param actors: the actors to check
:type actors: list
"""
super(Tee, self).check_actors(actors)
actor = self.first_active
if actor is None:
if self._requires_active_actors:
raise Exception("No active actor!")
elif not isinstance(actor, InputConsumer):
raise Exception("First active actor does not accept input: " + actor.full_name)
def do_execute(self):
"""
The actual execution of the actor.
:return: None if successful, otherwise error message
:rtype: str
"""
result | |
base path while still using different user data directories for
# different channels (Stable, Beta, Dev). For existing users who only have
# chrome-profile, continue using CHROME_USER_DATA_DIR so they don't have to
# set up their profile again.
chrome_profile = os.path.join(CONFIG_DIR, "chrome-profile")
chrome_config_home = os.path.join(CONFIG_DIR, "chrome-config")
if (os.path.exists(chrome_profile)
and not os.path.exists(chrome_config_home)):
self.child_env["CHROME_USER_DATA_DIR"] = chrome_profile
else:
self.child_env["CHROME_CONFIG_HOME"] = chrome_config_home
# Set SSH_AUTH_SOCK to the file name to listen on.
if self.ssh_auth_sockname:
self.child_env["SSH_AUTH_SOCK"] = self.ssh_auth_sockname
if USE_XORG_ENV_VAR in os.environ:
self._launch_xorg(display, x_auth_file, extra_x_args)
else:
self._launch_xvfb(display, x_auth_file, extra_x_args)
# The remoting host expects the server to use "evdev" keycodes, but Xvfb
# starts configured to use the "base" ruleset, resulting in XKB configuring
# for "xfree86" keycodes, and screwing up some keys. See crbug.com/119013.
# Reconfigure the X server to use "evdev" keymap rules. The X server must
# be started with -noreset otherwise it'll reset as soon as the command
# completes, since there are no other X clients running yet.
exit_code = subprocess.call(["setxkbmap", "-rules", "evdev"],
env=self.child_env)
if exit_code != 0:
logging.error("Failed to set XKB to 'evdev'")
if not self.server_supports_randr:
return
# Register the screen sizes with RandR, if needed. Errors here are
# non-fatal; the X server will continue to run with the dimensions from
# the "-screen" option.
if self.randr_add_sizes:
for width, height in self.sizes:
label = "%dx%d" % (width, height)
args = ["xrandr", "--newmode", label, "0", str(width), "0", "0", "0",
str(height), "0", "0", "0"]
subprocess.call(args, env=self.child_env, stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL)
args = ["xrandr", "--addmode", "screen", label]
subprocess.call(args, env=self.child_env, stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL)
# Set the initial mode to the first size specified, otherwise the X server
# would default to (max_width, max_height), which might not even be in the
# list.
initial_size = self.sizes[0]
label = "%dx%d" % initial_size
args = ["xrandr", "-s", label]
subprocess.call(args, env=self.child_env, stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL)
# Set the physical size of the display so that the initial mode is running
# at approximately 96 DPI, since some desktops require the DPI to be set
# to something realistic.
args = ["xrandr", "--dpi", "96"]
subprocess.call(args, env=self.child_env, stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL)
# Monitor for any automatic resolution changes from the desktop
# environment.
args = [SCRIPT_PATH, "--watch-resolution", str(initial_size[0]),
str(initial_size[1])]
# It is not necessary to wait() on the process here, as this script's main
# loop will reap the exit-codes of all child processes.
subprocess.Popen(args, env=self.child_env, stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL)
def _launch_pre_session(self):
# Launch the pre-session script, if it exists. Returns true if the script
# was launched, false if it didn't exist.
if os.path.exists(SYSTEM_PRE_SESSION_FILE_PATH):
pre_session_command = bash_invocation_for_script(
SYSTEM_PRE_SESSION_FILE_PATH)
logging.info("Launching pre-session: %s" % pre_session_command)
self.pre_session_proc = subprocess.Popen(pre_session_command,
stdin=subprocess.DEVNULL,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
cwd=HOME_DIR,
env=self.child_env)
if not self.pre_session_proc.pid:
raise Exception("Could not start pre-session")
output_filter_thread = SessionOutputFilterThread(
self.pre_session_proc.stdout, "Pre-session output: ", None)
output_filter_thread.start()
return True
return False
def launch_x_session(self):
# Start desktop session.
# The /dev/null input redirection is necessary to prevent the X session
# reading from stdin. If this code runs as a shell background job in a
# terminal, any reading from stdin causes the job to be suspended.
# Daemonization would solve this problem by separating the process from the
# controlling terminal.
xsession_command = choose_x_session()
if xsession_command is None:
raise Exception("Unable to choose suitable X session command.")
logging.info("Launching X session: %s" % xsession_command)
self.session_proc = subprocess.Popen(xsession_command,
stdin=subprocess.DEVNULL,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
cwd=HOME_DIR,
env=self.child_env)
if not self.session_proc.pid:
raise Exception("Could not start X session")
output_filter_thread = SessionOutputFilterThread(self.session_proc.stdout,
"Session output: ", SESSION_OUTPUT_TIME_LIMIT_SECONDS)
output_filter_thread.start()
def launch_session(self, x_args):
self._init_child_env()
self._setup_pulseaudio()
self._setup_gnubby()
self._launch_x_server(x_args)
if not self._launch_pre_session():
# If there was no pre-session script, launch the session immediately.
self.launch_x_session()
def launch_host(self, host_config, extra_start_host_args):
# Start remoting host
args = [HOST_BINARY_PATH, "--host-config=-"]
if self.pulseaudio_pipe:
args.append("--audio-pipe-name=%s" % self.pulseaudio_pipe)
if self.server_supports_exact_resize:
args.append("--server-supports-exact-resize")
if self.ssh_auth_sockname:
args.append("--ssh-auth-sockname=%s" % self.ssh_auth_sockname)
args.extend(extra_start_host_args)
# Have the host process use SIGUSR1 to signal a successful start.
def sigusr1_handler(signum, frame):
_ = signum, frame
logging.info("Host ready to receive connections.")
self.host_ready = True
ParentProcessLogger.release_parent_if_connected(True)
signal.signal(signal.SIGUSR1, sigusr1_handler)
args.append("--signal-parent")
logging.info(args)
self.host_proc = subprocess.Popen(args, env=self.child_env,
stdin=subprocess.PIPE)
if not self.host_proc.pid:
raise Exception("Could not start Chrome Remote Desktop host")
try:
self.host_proc.stdin.write(json.dumps(host_config.data).encode('UTF-8'))
self.host_proc.stdin.flush()
except IOError as e:
# This can occur in rare situations, for example, if the machine is
# heavily loaded and the host process dies quickly (maybe if the X
# connection failed), the host process might be gone before this code
# writes to the host's stdin. Catch and log the exception, allowing
# the process to be retried instead of exiting the script completely.
logging.error("Failed writing to host's stdin: " + str(e))
finally:
self.host_proc.stdin.close()
def shutdown_all_procs(self):
"""Send SIGTERM to all procs and wait for them to exit. Will fallback to
SIGKILL if a process doesn't exit within 10 seconds.
"""
for proc, name in [(self.x_proc, "X server"),
(self.pre_session_proc, "pre-session"),
(self.session_proc, "session"),
(self.host_proc, "host")]:
if proc is not None:
logging.info("Terminating " + name)
try:
psutil_proc = psutil.Process(proc.pid)
psutil_proc.terminate()
# Use a short timeout, to avoid delaying service shutdown if the
# process refuses to die for some reason.
psutil_proc.wait(timeout=10)
except psutil.TimeoutExpired:
logging.error("Timed out - sending SIGKILL")
psutil_proc.kill()
except psutil.Error:
logging.error("Error terminating process")
self.x_proc = None
self.pre_session_proc = None
self.session_proc = None
self.host_proc = None
def report_offline_reason(self, host_config, reason):
"""Attempt to report the specified offline reason to the registry. This
is best effort, and requires a valid host config.
"""
logging.info("Attempting to report offline reason: " + reason)
args = [HOST_BINARY_PATH, "--host-config=-",
"--report-offline-reason=" + reason]
proc = subprocess.Popen(args, env=self.child_env, stdin=subprocess.PIPE)
proc.communicate(json.dumps(host_config.data).encode('UTF-8'))
def parse_config_arg(args):
"""Parses only the --config option from a given command-line.
Returns:
A two-tuple. The first element is the value of the --config option (or None
if it is not specified), and the second is a list containing the remaining
arguments
"""
# By default, argparse will exit the program on error. We would like it not to
# do that.
class ArgumentParserError(Exception):
pass
class ThrowingArgumentParser(argparse.ArgumentParser):
def error(self, message):
raise ArgumentParserError(message)
parser = ThrowingArgumentParser()
parser.add_argument("--config", nargs='?', action="store")
try:
result = parser.parse_known_args(args)
return (result[0].config, result[1])
except ArgumentParserError:
return (None, list(args))
def get_daemon_proc(config_file, require_child_process=False):
"""Checks if there is already an instance of this script running against
|config_file|, and returns a psutil.Process instance for it. If
|require_child_process| is true, only check for an instance with the
--child-process flag specified.
If a process is found without --config in the command line, get_daemon_proc
will fall back to the old behavior of checking whether the script path matches
the current script. This is to facilitate upgrades from previous versions.
Returns:
A Process instance for the existing daemon process, or None if the daemon
is not running.
"""
# Note: When making changes to how instances are detected, it is imperative
# that this function retains the ability to find older versions. Otherwise,
# upgrades can leave the user with two running sessions, with confusing
# results.
uid = os.getuid()
this_pid = os.getpid()
# This function should return the process with the --child-process flag if it
# exists. If there's only a process without, it might be a legacy process.
non_child_process = None
# Support new & old psutil API. This is the right way to check, according to
# http://grodola.blogspot.com/2014/01/psutil-20-porting.html
if psutil.version_info >= (2, 0):
psget = lambda x: x()
else:
psget = lambda x: x
for process in psutil.process_iter():
# Skip any processes that raise an exception, as processes may terminate
# during iteration over the list.
try:
# Skip other users' processes.
if psget(process.uids).real != uid:
continue
# Skip the process for this instance.
if process.pid == this_pid:
continue
# |cmdline| will be [python-interpreter, script-file, other arguments...]
cmdline = psget(process.cmdline)
if len(cmdline) < 2:
continue
if (os.path.basename(cmdline[0]).startswith('python') and
os.path.basename(cmdline[1]) == os.path.basename(sys.argv[0]) and
"--start" in cmdline):
process_config = parse_config_arg(cmdline[2:])[0]
# Fall back to old behavior if there is no --config argument
# TODO(rkjnsn): Consider removing this fallback once sufficient time
# has passed.
if process_config == config_file or (process_config is None and
cmdline[1] == sys.argv[0]):
if "--child-process" in cmdline:
return process
else:
non_child_process = process
| |
import os
from datetime import datetime, timedelta, timezone, date
import numpy as np
import torch
import torch.nn as nn
from torch.utils.data import DataLoader
from torch.utils.tensorboard import SummaryWriter
from src.loss import JointsMSELoss
from src.model import *
from src.dataset import *
from src.util import *
from src.evaluate import *
import itertools
import matplotlib.pyplot as plt
MEAN = 0.5
STD = 0.5
NUM_WORKER = 0
def train(args):
## Set Hyperparameters for the Training
mode = args.mode
train_continue = args.train_continue
lr = args.lr
batch_size = args.batch_size
num_epoch = args.num_epoch
data_dir = args.data_dir
ckpt_dir = args.ckpt_dir
log_dir = args.log_dir
result_dir = args.result_dir
log_prefix = args.log_prefix
task = args.task
num_mark = args.num_mark
ny = args.ny
nx = args.nx
nch = args.nch
nker = args.nker
norm = args.norm
network = args.network
resnet_depth = args.resnet_depth
joint_weight = args.joint_weight
cuda = args.cuda
device = torch.device(cuda if torch.cuda.is_available() else 'cpu')
## Open log file and write
date_time = datetime.now().strftime("%m/%d/%Y, %H:%M:%S")
f = open(log_prefix + "-" + mode + ".txt", "a")
f.write("initiate %s loop : " % mode + date_time + "\n")
f.write("mode: %s\n" % mode)
f.write("norm: %s\n" % norm)
f.write("learning rate: %.4e\n" % lr)
f.write("batch size: %d\n" % batch_size)
f.write("number of epoch: %d\n" % num_epoch)
f.write("task: %s\n" % task)
f.write("number of markers: %s\n" % num_mark)
f.write("network: %s\n" % network)
f.write("data dir: %s\n" % data_dir)
f.write("ckpt dir: %s\n" % ckpt_dir)
f.write("log dir: %s\n" % log_dir)
f.write("result dir: %s\n" % result_dir)
f.write("device: %s\n" % device)
## Create Result Directories
result_dir_train = os.path.join(result_dir, 'train')
if not os.path.exists(result_dir_train):
os.makedirs(os.path.join(result_dir_train))
if mode == 'train':
transform_train = "3R1N" # Resize - RandomCrop - RandomFlip - Normalization
dataset_full = Dataset(data_dir=os.path.join(data_dir, 'train'),
transform=transform_train, shape=(ny, nx, nch), hm_shape=(ny, nx, num_mark))
# Set Other Variables
num_data = len(dataset_full)
num_data_train = num_data // 10 * 9
num_batch_train = np.ceil(num_data_train / batch_size)
dataset_train, dataset_val = torch.utils.data.random_split(dataset_full, [num_data_train, num_data-num_data_train])
loader_train = DataLoader(dataset_train,
batch_size=batch_size,
shuffle=True, num_workers=NUM_WORKER)
loader_val = DataLoader(dataset_val,
batch_size=batch_size,
shuffle=True, num_workers=NUM_WORKER)
if network == "PoseResNet":
netP = PoseResNet(in_channels=nch, out_channels=num_mark, nker=nker, norm=norm, num_layers=resnet_depth).to(device)
message = init_weights(netP, init_type='normal', init_gain=0.02)
f.write(message)
elif network == "PoseResNetv2":
netP = PoseResNetv2(out_channels=num_mark, num_layers=resnet_depth, pretrained=True).to(device)
f.write("initialize network with pretrained parameters\n")
## Define the Loss Functions
fn_pose = JointsMSELoss(use_target_weight=joint_weight).to(device)
## Set the Optimizers
optimP = torch.optim.Adam(netP.parameters(), lr=lr, betas=(0.5, 0.999))
## Define Other Functions
fn_tonumpy = lambda x: x.to('cpu').detach().numpy().transpose(0, 2, 3, 1)
fn_denorm = lambda x: (x * STD) + MEAN
cmap = None
## Set SummaryWriter for the Tensorboard
writer_train = SummaryWriter(log_dir=os.path.join(log_dir, 'train'))
## Train the Networks
st_epoch = 0
if mode == 'train':
if train_continue == "on":
st_epoch, netP, optimP = load(ckpt_dir=ckpt_dir,
netP=netP,
optimP=optimP)
early_stop = EarlyStopping(ckpt_dir=ckpt_dir, trace_func=f.write)
for epoch in range(st_epoch + 1, num_epoch + 1):
netP.train()
loss_P_train = []
val_data = next(iter(loader_val))
val_input = val_data["image"].to(device)
val_target = val_data["hmap"]
for batch, data in enumerate(loader_train, 1):
input_data = data["image"].to(device)
target = data["hmap"].to(device)
target_weight = None
# forward netP
output = netP(input_data)
# Build target heatmap from pose labels
# try interpolation - deprecated
# target = nn.functional.interpolate(target, (output.size()[1], output.size()[2], output.size()[3]), mode="nearest")
size = (output.size()[2], output.size()[3])
target = Resample()(size=size, target=target)
# backward netP
set_requires_grad(netP, True)
optimP.zero_grad()
loss_P = fn_pose(output, target)
loss_P.backward()
optimP.step()
# compute the losses
loss_P_train += [float(loss_P.item())]
f.write("TRAIN: EPOCH %04d / %04d | BATCH %04d / %04d | "
"POSE LOSS %.8f | \n"%
(epoch, num_epoch, batch, num_batch_train,
np.mean(loss_P_train)))
if batch % 50 == 0:
# Save to the Tensorboard
input_data = fn_tonumpy(fn_denorm(input_data)).squeeze()
output = fn_tonumpy(fn_denorm(output)).squeeze()
input_data = np.clip(input_data, a_min=0, a_max=1)
# Convert pose heatmap into image form
output = pose2image(output)
output = np.clip(output, a_min=0, a_max=1)
id = num_batch_train * (epoch - 1) + batch
if not batch_size==1:
plt.imsave(os.path.join(result_dir_train, '%04d_input.png' % id), input_data[0],
cmap=cmap)
plt.imsave(os.path.join(result_dir_train, '%04d_output.png' % id), output[0],
cmap=cmap)
writer_train.add_image('input', input_data, id, dataformats='NHWC')
writer_train.add_image('output', input_data, id, dataformats='NHWC')
else:
plt.imsave(os.path.join(result_dir_train, '%04d_input.png' % id), input_data,
cmap=cmap)
plt.imsave(os.path.join(result_dir_train, '%04d_output.png' % id), output,
cmap=cmap)
writer_train.add_image('input', input_data, id, dataformats='HWC')
writer_train.add_image('output', input_data, id, dataformats='HWC')
writer_train.add_scalar('loss_P', np.mean(loss_P_train), epoch)
if epoch % 10 == 0 or epoch == num_epoch:
save(ckpt_dir=ckpt_dir, epoch=epoch,
netP=netP, optimP=optimP)
# forward netP
with torch.no_grad():
netP.eval()
val_output = netP(val_input)
val_target = nn.functional.interpolate(val_target, (val_output.size()[2], val_output.size()[3]), mode="nearest").to(device)
# Early stop when validation loss does not reduce
val_loss = fn_pose(val_output, val_target)
early_stop(val_loss=val_loss, model=netP, optim=optimP, epoch=epoch)
if early_stop.early_stop:
break
writer_train.close()
f.close()
def test(args):
## Set Hyperparameters for the Testing
mode = args.mode
train_continue = args.train_continue
lr = args.lr
batch_size = args.batch_size
num_epoch = args.num_epoch
data_dir = args.data_dir
ckpt_dir = args.ckpt_dir
log_dir = args.log_dir
result_dir = args.result_dir
log_prefix = args.log_prefix
task = args.task
num_mark = args.num_mark
ny = args.ny
nx = args.nx
nch = args.nch
nker = args.nker
norm = args.norm
network = args.network
resnet_depth = args.resnet_depth
joint_weight = args.joint_weight
cuda = args.cuda
device = torch.device(cuda if torch.cuda.is_available() else 'cpu')
## Open log file and write
date_time = datetime.now().strftime("%m/%d/%Y, %H:%M:%S")
f = open(log_prefix + "-" + mode + ".txt", "a")
f.write("initiate %s loop : " % mode + date_time + "\n")
f.write("mode: %s\n" % mode)
f.write("norm: %s\n" % norm)
f.write("learning rate: %.4e\n" % lr)
f.write("batch size: %d\n" % batch_size)
f.write("number of epoch: %d\n" % num_epoch)
f.write("task: %s\n" % task)
f.write("number of markers: %s\n" % num_mark)
f.write("network: %s\n" % network)
f.write("data dir: %s\n" % data_dir)
f.write("ckpt dir: %s\n" % ckpt_dir)
f.write("log dir: %s\n" % log_dir)
f.write("result dir: %s\n" % result_dir)
f.write("device: %s\n" % device)
## Create Result Directories
result_dir_test = os.path.join(result_dir, 'test')
if not os.path.exists(result_dir_test):
os.makedirs(os.path.join(result_dir_test))
if mode == 'test':
transform_test = "RN" # Resize - Normalization
dataset_test = Dataset(data_dir=os.path.join(data_dir, 'test'),
transform=transform_test, shape=(ny, nx, nch), hm_shape=(ny, nx, num_mark))
loader_test = DataLoader(dataset_test,
batch_size=batch_size,
shuffle=False, num_workers=NUM_WORKER)
# Set Other Variables
num_data_test = len(dataset_test)
num_batch_test = np.ceil(num_data_test / batch_size)
if network == "PoseResNet":
netP = PoseResNet(in_channels=nch, out_channels=num_mark, nker=nker, norm=norm, num_layers=resnet_depth).to(device)
message = init_weights(netP, init_type='normal', init_gain=0.02)
f.write(message)
elif network == "PoseResNetv2":
netP = PoseResNetv2(out_channels=num_mark, num_layers=resnet_depth, pretrained=True).to(device)
f.write("initialize network with pretrained parameters\n")
## Define the Loss Functions
fn_pose = JointsMSELoss(use_target_weight=joint_weight).to(device)
## Set the Optimizers
optimP = torch.optim.Adam(netP.parameters(), lr=lr, betas=(0.5, 0.999))
## Define Other Functions
fn_tonumpy = lambda x: x.to('cpu').detach().numpy().transpose(0, 2, 3, 1)
fn_denorm = lambda x: (x * STD) + MEAN
cmap = None
## Set SummaryWriter for the Tensorboard
writer_test = SummaryWriter(log_dir=os.path.join(log_dir, 'test'))
## Inference
st_epoch = 0
if mode == 'test':
epoch, netP, optimP = load(ckpt_dir=ckpt_dir,
netP=netP,
optimP=optimP)
with torch.no_grad():
netP.eval()
loss_P = []
for batch, data in enumerate(loader_test, 1):
input_data = data["image"].to(device)
target = data["hmap"].to(device)
target_weight = None
# forward netP
output = netP(input_data)
# Build target heatmap from pose labels
# try interpolation - deprecated
# target = nn.functional.interpolate(target, (output.size()[1], output.size()[2], output.size()[3]), mode="nearest")
size = (output.size()[2], output.size()[3])
target = Resample()(size=size, target=target)
loss = fn_pose(output, target)
# compute the losses
loss_P_test = float(loss.item())
loss_P += [loss_P_test]
# Save to the Tensorboard
input_data = fn_tonumpy(fn_denorm(input_data))
output = fn_tonumpy(fn_denorm(output))
target = fn_tonumpy(fn_denorm(target))
if not batch_size==1:
for j in range(input_data.shape[0]):
id = batch_size * (batch - 1) + j
input_data_ = input_data[j]
output_ = output[j]
target_ = target[j]
input_data_ = np.clip(input_data_, a_min=0, a_max=1)
# Convert pose heatmaps into image form
output_ = reshape2image(output_)
output_ = np.clip(output_, a_min=0, a_max=1)
target_ = reshape2image(target_)
target_ = np.clip(target_, a_min=0, a_max=1)
plt.imsave(os.path.join(result_dir_test, '%04d_input.png' % id), input_data_)
plt.imsave(os.path.join(result_dir_test, '%04d_output.png' % id), output_)
plt.imsave(os.path.join(result_dir_test, '%04d_target.png' % id), target_)
writer_test.add_image('input', input_data, id, dataformats='NHWC')
writer_test.add_image('output', output, id, dataformats='NHWC')
writer_test.add_image('target', target, id, dataformats='NHWC')
f.write("TEST: BATCH %04d / %04d | POSE LOSS %.8f | \n" % (id + 1, num_data_test, np.mean(loss_P_test)))
else:
id = batch_size * (batch - 1) + 0
input_data_ = input_data
output_ = output
target_ = target
input_data_ = reshape2image(input_data_)
input_data_ = np.clip(input_data_, a_min=0, a_max=1)
# Convert pose heatmaps into image form
output_ = reshape2image(output_)
output_ = np.clip(output_, a_min=0, a_max=1)
target_ = reshape2image(target_)
target_ = np.clip(target_, a_min=0, a_max=1)
plt.imsave(os.path.join(result_dir_test, '%04d_input.png' % id), input_data_)
plt.imsave(os.path.join(result_dir_test, '%04d_output.png' % id), output_)
plt.imsave(os.path.join(result_dir_test, '%04d_target.png' % id), target_)
writer_test.add_image('input', input_data_, id, dataformats='HWC')
writer_test.add_image('output', output_, id, dataformats='HWC')
writer_test.add_image('target', target_, id, dataformats='HWC')
f.write("TEST: BATCH %04d / %04d | POSE LOSS %.8f | \n" % (id + 1, num_data_test, np.mean(loss_P_test)))
writer_test.add_scalar('loss', np.mean(loss_P), batch)
writer_test.close()
f.close()
def evaluate(args):
## Set Hyperparameters for the Evaluation
mode = | |
2] + k[3, 2]
K[6 * i + 3, 6 * i + 3] = K[6 * i + 3, 6 * i + 3] + k[3, 3]
K[6 * i + 3, 6 * i + 4] = K[6 * i + 3, 6 * i + 4] + k[3, 4]
K[6 * i + 3, 6 * i + 5] = K[6 * i + 3, 6 * i + 5] + k[3, 5]
K[6 * i + 3, 6 * j] = K[6 * i + 3, 6 * j] + k[3, 6]
K[6 * i + 3, 6 * j + 1] = K[6 * i + 3, 6 * j + 1] + k[3, 7]
K[6 * i + 3, 6 * j + 2] = K[6 * i + 3, 6 * j + 2] + k[3, 8]
K[6 * i + 3, 6 * j + 3] = K[6 * i + 3, 6 * j + 3] + k[3, 9]
K[6 * i + 3, 6 * j + 4] = K[6 * i + 3, 6 * j + 4] + k[3, 10]
K[6 * i + 3, 6 * j + 5] = K[6 * i + 3, 6 * j + 5] + k[3, 11]
K[6 * i + 4, 6 * i] = K[6 * i + 4, 6 * i] + k[4, 0]
K[6 * i + 4, 6 * i + 1] = K[6 * i + 4, 6 * i + 1] + k[4, 1]
K[6 * i + 4, 6 * i + 2] = K[6 * i + 4, 6 * i + 2] + k[4, 2]
K[6 * i + 4, 6 * i + 3] = K[6 * i + 4, 6 * i + 3] + k[4, 3]
K[6 * i + 4, 6 * i + 4] = K[6 * i + 4, 6 * i + 4] + k[4, 4]
K[6 * i + 4, 6 * i + 5] = K[6 * i + 4, 6 * i + 5] + k[4, 5]
K[6 * i + 4, 6 * j] = K[6 * i + 4, 6 * j] + k[4, 6]
K[6 * i + 4, 6 * j + 1] = K[6 * i + 4, 6 * j + 1] + k[4, 7]
K[6 * i + 4, 6 * j + 2] = K[6 * i + 4, 6 * j + 2] + k[4, 8]
K[6 * i + 4, 6 * j + 3] = K[6 * i + 4, 6 * j + 3] + k[4, 9]
K[6 * i + 4, 6 * j + 4] = K[6 * i + 4, 6 * j + 4] + k[4, 10]
K[6 * i + 4, 6 * j + 5] = K[6 * i + 4, 6 * j + 5] + k[4, 11]
K[6 * i + 5, 6 * i] = K[6 * i + 5, 6 * i] + k[5, 0]
K[6 * i + 5, 6 * i + 1] = K[6 * i + 5, 6 * i + 1] + k[5, 1]
K[6 * i + 5, 6 * i + 2] = K[6 * i + 5, 6 * i + 2] + k[5, 2]
K[6 * i + 5, 6 * i + 3] = K[6 * i + 5, 6 * i + 3] + k[5, 3]
K[6 * i + 5, 6 * i + 4] = K[6 * i + 5, 6 * i + 4] + k[5, 4]
K[6 * i + 5, 6 * i + 5] = K[6 * i + 5, 6 * i + 5] + k[5, 5]
K[6 * i + 5, 6 * j] = K[6 * i + 5, 6 * j] + k[5, 6]
K[6 * i + 5, 6 * j + 1] = K[6 * i + 5, 6 * j + 1] + k[5, 7]
K[6 * i + 5, 6 * j + 2] = K[6 * i + 5, 6 * j + 2] + k[5, 8]
K[6 * i + 5, 6 * j + 3] = K[6 * i + 5, 6 * j + 3] + k[5, 9]
K[6 * i + 5, 6 * j + 4] = K[6 * i + 5, 6 * j + 4] + k[5, 10]
K[6 * i + 5, 6 * j + 5] = K[6 * i + 5, 6 * j + 5] + k[5, 11]
K[6 * j, 6 * i] = K[6 * j, 6 * i] + k[6, 0]
K[6 * j, 6 * i + 1] = K[6 * j, 6 * i + 1] + k[6, 1]
K[6 * j, 6 * i + 2] = K[6 * j, 6 * i + 2] + k[6, 2]
K[6 * j, 6 * i + 3] = K[6 * j, 6 * i + 3] + k[6, 3]
K[6 * j, 6 * i + 4] = K[6 * j, 6 * i + 4] + k[6, 4]
K[6 * j, 6 * i + 5] = K[6 * j, 6 * i + 5] + k[6, 5]
K[6 * j, 6 * j] = K[6 * j, 6 * j] + k[6, 6]
K[6 * j, 6 * j + 1] = K[6 * j, 6 * j + 1] + k[6, 7]
K[6 * j, 6 * j + 2] = K[6 * j, 6 * j + 2] + k[6, 8]
K[6 * j, 6 * j + 3] = K[6 * j, 6 * j + 3] + k[6, 9]
K[6 * j, 6 * j + 4] = K[6 * j, 6 * j + 4] + k[6, 10]
K[6 * j, 6 * j + 5] = K[6 * j, 6 * j + 5] + k[6, 11]
K[6 * j + 1, 6 * i] = K[6 * j + 1, 6 * i] + k[7, 0]
K[6 * j + 1, 6 * i + 1] = K[6 * j + 1, 6 * i + 1] + k[7, 1]
K[6 * j + 1, 6 * i + 2] = K[6 * j + 1, 6 * i + 2] + k[7, 2]
K[6 * j + 1, 6 * i + 3] = K[6 * j + 1, 6 * i + 3] + k[7, 3]
K[6 * j + 1, 6 * i + 4] = K[6 * j + 1, 6 * i + 4] + k[7, 4]
K[6 * j + 1, 6 * i + 5] = K[6 * j + 1, 6 * i + 5] + k[7, 5]
K[6 * j + 1, 6 * j] = K[6 * j + 1, 6 * j] + k[7, 6]
K[6 * j + 1, 6 * j + 1] = K[6 * j + 1, 6 * j + 1] + k[7, 7]
K[6 * j + 1, 6 * j + 2] = K[6 * j + 1, 6 * j + 2] + k[7, 8]
K[6 * j + 1, 6 * j + 3] = K[6 * j + 1, 6 * j + 3] + k[7, 9]
K[6 * j + 1, 6 * j + 4] = K[6 * j + 1, 6 * j + 4] + k[7, 10]
K[6 * j + 1, 6 * j + 5] = K[6 * j + 1, | |
# To add a new cell, type '# %%'
# To add a new markdown cell, type '# %% [markdown]'
# %% [markdown]
# # How to use custom data and implement custom models and metrics
# %% [markdown]
# ## Building a simple, first model
# %% [markdown]
# For demonstration purposes we will choose a simple fully connected model. It takes a timeseries of size `input_size` as input and outputs a new timeseries of size `output_size`. You can think of this `input_size` encoding steps and `output_size` decoding/prediction steps.
# %%
import os
import warnings
# warnings.filterwarnings("ignore")
os.chdir("../../..")
# %%
import torch
from torch import nn
class FullyConnectedModule(nn.Module):
def __init__(self, input_size: int, output_size: int, hidden_size: int, n_hidden_layers: int):
super().__init__()
# input layer
module_list = [nn.Linear(input_size, hidden_size), nn.ReLU()]
# hidden layers
for _ in range(n_hidden_layers):
module_list.extend([nn.Linear(hidden_size, hidden_size), nn.ReLU()])
# output layer
module_list.append(nn.Linear(hidden_size, output_size))
self.sequential = nn.Sequential(*module_list)
def forward(self, x: torch.Tensor) -> torch.Tensor:
# x of shape: batch_size x n_timesteps_in
# output of shape batch_size x n_timesteps_out
return self.sequential(x)
# test that network works as intended
network = FullyConnectedModule(input_size=5, output_size=2, hidden_size=10, n_hidden_layers=2)
x = torch.rand(20, 5)
network(x).shape
# %%
from typing import Dict
from pytorch_forecasting.models import BaseModel
class FullyConnectedModel(BaseModel):
def __init__(self, input_size: int, output_size: int, hidden_size: int, n_hidden_layers: int, **kwargs):
# saves arguments in signature to `.hparams` attribute, mandatory call - do not skip this
self.save_hyperparameters()
# pass additional arguments to BaseModel.__init__, mandatory call - do not skip this
super().__init__(**kwargs)
self.network = FullyConnectedModule(
input_size=self.hparams.input_size,
output_size=self.hparams.output_size,
hidden_size=self.hparams.hidden_size,
n_hidden_layers=self.hparams.n_hidden_layers,
)
def forward(self, x: Dict[str, torch.Tensor]) -> Dict[str, torch.Tensor]:
# x is a batch generated based on the TimeSeriesDataset
network_input = x["encoder_cont"].squeeze(-1)
prediction = self.network(network_input)
# We need to return a dictionary that at least contains the prediction and the target_scale.
# The parameter can be directly forwarded from the input.
return dict(prediction=prediction, target_scale=x["target_scale"])
model = FullyConnectedModel(input_size=5, output_size=2, hidden_size=10, n_hidden_layers=2)
# %% [markdown]
# This is a very basic implementation that could be readily used for training. But before we add additional features, let's first have a look how we pass data to this model.
# %% [markdown]
# ### Passing data to a model
# %%
import numpy as np
import pandas as pd
test_data = pd.DataFrame(
dict(
value=np.random.rand(30) - 0.5,
#value=np.arange(30),
group=np.repeat(np.arange(3), 10),
time_idx=np.tile(np.arange(10), 3),
)
)
test_data
# %%
from pytorch_forecasting import TimeSeriesDataSet
# create the dataset from the pandas dataframe
dataset = TimeSeriesDataSet(
test_data,
group_ids=["group"],
target="value",
time_idx="time_idx",
min_encoder_length=5,
max_encoder_length=5,
min_prediction_length=2,
max_prediction_length=2,
time_varying_unknown_reals=["value"],
)
# %%
dataset.get_parameters()
# %% [markdown]
# Now, we take a look at the output of the dataloader. It's `x` will be fed to the model's forward method, that is why it is so important to understand it.
# %%
# convert the dataset to a dataloader
dataloader = dataset.to_dataloader(batch_size=4)
# and load the first batch
x, y = next(iter(dataloader))
print("x =", x)
print("\ny =", y)
print("\nsizes of x =")
for key, value in x.items():
print(f"\t{key} = {value.size()}")
# %% [markdown]
# This explains why we had to first extract the correct input in our simple `FullyConnectedModel` above before passing it to our `FullyConnectedModule`.
# As a reminder:
#
# %%
def forward(self, x: Dict[str, torch.Tensor]) -> Dict[str, torch.Tensor]:
# x is a batch generated based on the TimeSeriesDataset
network_input = x["encoder_cont"].squeeze(-1)
prediction = self.network(network_input)
# We need to return a dictionary that at least contains the prediction and the target_scale.
# The parameter can be directly forwarded from the input.
return dict(prediction=prediction, target_scale=x["target_scale"])
# %% [markdown]
# For such a simple architecture, we can ignore most of the inputs in ``x``. You do not have to worry about moving tensors to specifc GPUs, [PyTorch Lightning](https://pytorch-lightning.readthedocs.io) will take care of this for you.
#
# Now, let's check if our model works:
# %%
x, y = next(iter(dataloader))
model(x)
# %%
dataset.x_to_index(x)
# %% [markdown]
# ### Coupling datasets and models
# %%
class FullyConnectedModel(BaseModel):
def __init__(self, input_size: int, output_size: int, hidden_size: int, n_hidden_layers: int, **kwargs):
# saves arguments in signature to `.hparams` attribute, mandatory call - do not skip this
self.save_hyperparameters()
# pass additional arguments to BaseModel.__init__, mandatory call - do not skip this
super().__init__(**kwargs)
self.network = FullyConnectedModule(
input_size=self.hparams.input_size,
output_size=self.hparams.output_size,
hidden_size=self.hparams.hidden_size,
n_hidden_layers=self.hparams.n_hidden_layers,
)
def forward(self, x: Dict[str, torch.Tensor]) -> Dict[str, torch.Tensor]:
# x is a batch generated based on the TimeSeriesDataset
network_input = x["encoder_cont"].squeeze(-1)
prediction = self.network(network_input).unsqueeze(-1)
# We need to return a dictionary that at least contains the prediction and the target_scale.
# The parameter can be directly forwarded from the input.
return dict(prediction=prediction, target_scale=x["target_scale"])
@classmethod
def from_dataset(cls, dataset: TimeSeriesDataSet, **kwargs):
new_kwargs = {
"output_size": dataset.max_prediction_length,
"input_size": dataset.max_encoder_length,
}
new_kwargs.update(kwargs) # use to pass real hyperparameters and override defaults set by dataset
# example for dataset validation
assert dataset.max_prediction_length == dataset.min_prediction_length, "Decoder only supports a fixed length"
assert dataset.min_encoder_length == dataset.max_encoder_length, "Encoder only supports a fixed length"
assert (
len(dataset.time_varying_known_categoricals) == 0
and len(dataset.time_varying_known_reals) == 0
and len(dataset.time_varying_unknown_categoricals) == 0
and len(dataset.static_categoricals) == 0
and len(dataset.static_reals) == 0
and len(dataset.time_varying_unknown_reals) == 1
and dataset.time_varying_unknown_reals[0] == dataset.target
), "Only covariate should be the target in 'time_varying_unknown_reals'"
return super().from_dataset(dataset, **new_kwargs)
# %% [markdown]
# Now, let's initialize from our dataset:
# %%
model = FullyConnectedModel.from_dataset(dataset, hidden_size=10, n_hidden_layers=2)
model.summarize("full") # print model summary
model.hparams
# %% [markdown]
# ### Defining additional hyperparameters
# %%
model.hparams
# %%
print(BaseModel.__init__.__doc__)
# %% [markdown]
# ## Classification
# %%
classification_test_data = pd.DataFrame(
dict(
target=np.random.choice(["A", "B", "C"], size=30), # CHANGING values to predict to a categorical
value=np.random.rand(30), # INPUT values - see next section on covariates how to use categorical inputs
group=np.repeat(np.arange(3), 10),
time_idx=np.tile(np.arange(10), 3),
)
)
classification_test_data
# %%
from pytorch_forecasting.data.encoders import NaNLabelEncoder
# create the dataset from the pandas dataframe
classification_dataset = TimeSeriesDataSet(
classification_test_data,
group_ids=["group"],
target="target", # SWITCHING to categorical target
time_idx="time_idx",
min_encoder_length=5,
max_encoder_length=5,
min_prediction_length=2,
max_prediction_length=2,
time_varying_unknown_reals=["value"],
target_normalizer=NaNLabelEncoder(), # Use the NaNLabelEncoder to encode categorical target
)
x, y = next(iter(classification_dataset.to_dataloader(batch_size=4)))
y[0] # target values are encoded categories
# The keyword argument ``target_normalizer`` is here redundant because the would have detected that a categorical target is used and therefore a :py:class:`~pytorch_forecasting.data.encoders.NaNLabelEncoder` is required.
# %%
from pytorch_forecasting.metrics import CrossEntropy
class FullyConnectedClassificationModel(BaseModel):
def __init__(
self,
input_size: int,
output_size: int,
hidden_size: int,
n_hidden_layers: int,
n_classes: int,
loss=CrossEntropy(),
**kwargs,
):
# saves arguments in signature to `.hparams` attribute, mandatory call - do not skip this
self.save_hyperparameters()
# pass additional arguments to BaseModel.__init__, mandatory call - do not skip this
super().__init__(**kwargs)
self.network = FullyConnectedModule(
input_size=self.hparams.input_size,
output_size=self.hparams.output_size * self.hparams.n_classes,
hidden_size=self.hparams.hidden_size,
n_hidden_layers=self.hparams.n_hidden_layers,
)
def forward(self, x: Dict[str, torch.Tensor]) -> Dict[str, torch.Tensor]:
# x is a batch generated based on the TimeSeriesDataset
batch_size = x["encoder_cont"].size(0)
network_input = x["encoder_cont"].squeeze(-1)
prediction = self.network(network_input)
# RESHAPE output to batch_size x n_decoder_timesteps x n_classes
prediction = prediction.unsqueeze(-1).view(batch_size, -1, self.hparams.n_classes)
# We need to return a dictionary that at least contains the prediction and the target_scale.
# The parameter can be directly forwarded from the input.
return dict(prediction=prediction, target_scale=x["target_scale"])
@classmethod
def from_dataset(cls, dataset: TimeSeriesDataSet, **kwargs):
assert isinstance(dataset.target_normalizer, NaNLabelEncoder), "target normalizer has to encode categories"
new_kwargs = {
"n_classes": len(
dataset.target_normalizer.classes_
), # ADD number of classes as encoded by the target normalizer
"output_size": dataset.max_prediction_length,
"input_size": dataset.max_encoder_length,
}
new_kwargs.update(kwargs) # use to pass real hyperparameters and override defaults set by dataset
# example for dataset validation
assert dataset.max_prediction_length == dataset.min_prediction_length, "Decoder only supports a fixed length"
assert dataset.min_encoder_length == dataset.max_encoder_length, "Encoder only supports a fixed length"
assert (
len(dataset.time_varying_known_categoricals) == 0
and len(dataset.time_varying_known_reals) == 0
and len(dataset.time_varying_unknown_categoricals) == 0
and len(dataset.static_categoricals) == 0
and len(dataset.static_reals) == 0
and len(dataset.time_varying_unknown_reals) == 1
), "Only covariate should be in 'time_varying_unknown_reals'"
return super().from_dataset(dataset, **new_kwargs)
model = FullyConnectedClassificationModel.from_dataset(classification_dataset, hidden_size=10, n_hidden_layers=2)
model.summarize("full")
model.hparams
# %%
# passing x through model
model(x)["prediction"].shape
# %% [markdown]
# ## Predicting multiple targets at the same time
# %% [markdown]
# Training a model to predict multiple targets simulateneously is not difficult to implement. We can even employ mixed targets, i.e. a mix of categorical and continous targets. The first step is to use define a dataframe with multiple targets:
# %%
multi_target_test_data = pd.DataFrame(
dict(
target1=np.random.rand(30),
target2=np.random.rand(30),
group=np.repeat(np.arange(3), 10),
time_idx=np.tile(np.arange(10), 3),
)
)
multi_target_test_data
# %%
from pytorch_forecasting.data.encoders import EncoderNormalizer, MultiNormalizer, TorchNormalizer
# create the dataset from the pandas dataframe
multi_target_dataset = TimeSeriesDataSet(
multi_target_test_data,
group_ids=["group"],
target=["target1", "target2"], # USING two targets
time_idx="time_idx",
min_encoder_length=5,
max_encoder_length=5,
min_prediction_length=2,
max_prediction_length=2,
time_varying_unknown_reals=["target1", "target2"],
target_normalizer=MultiNormalizer(
[EncoderNormalizer(), TorchNormalizer()]
), # Use the NaNLabelEncoder to encode categorical target
)
x, y = next(iter(multi_target_dataset.to_dataloader(batch_size=4)))
y[0] # target values are a list of targets
# %%
from typing import List, Union
from pytorch_forecasting.metrics import MAE, SMAPE, MultiLoss
from pytorch_forecasting.utils import to_list
class FullyConnectedMultiTargetModel(BaseModel):
def __init__(
self,
| |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2020-2022 Barcelona Supercomputing Center (BSC), Spain
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import atexit
import json
import logging
import os
import sys
import shutil
import tempfile
from typing import cast, Sequence, Type, Union
import yaml
# We have preference for the C based loader and dumper, but the code
# should fallback to default implementations when C ones are not present
YAMLLoader: Type[Union[yaml.Loader, yaml.CLoader]]
YAMLDumper: Type[Union[yaml.Dumper, yaml.CDumper]]
try:
from yaml import CLoader as YAMLLoader, CDumper as YAMLDumper
except ImportError:
from yaml import Loader as YAMLLoader, Dumper as YAMLDumper
from wfexs_backend.wfexs_backend import WfExSBackend
from wfexs_backend.workflow import WF
from wfexs_backend import get_WfExS_version
from wfexs_backend.common import StrDocEnum, ArgsDefaultWithRawHelpFormatter
from wfexs_backend.common import CacheType as WfExS_CacheType
from wfexs_backend.common import SymbolicName
from wfexs_backend.utils.misc import DatetimeEncoder
class WfExS_Commands(StrDocEnum):
Init = ('init', 'Init local setup')
Cache = ('cache', 'Cache handling subcommands')
ConfigValidate = ('config-validate', 'Validate the configuration files to be used for staging and execution')
Stage = ('stage', 'Prepare the staging (working) directory for workflow execution, fetching dependencies and contents')
MountWorkDir = ('mount-workdir', 'Mount the encrypted staging directory on secure staging scenarios')
StagedWorkDir = ('staged-workdir', 'Staged working directories handling subcommands')
Export = ('export', 'Staged working directories export subcommands')
ExportStage = ('export-stage', 'Export the staging directory as an RO-Crate')
OfflineExecute = ('offline-execute', 'Execute an already prepared workflow in the staging directory')
Execute = ('execute', 'Execute the stage + offline-execute + export steps')
ExportResults = ('export-results', 'Export the results to a remote location, gathering their public ids')
ExportCrate = ('export-crate', 'Export an already executed workflow in the staging directory as an RO-Crate')
class WfExS_Cache_Commands(StrDocEnum):
List = ('ls', 'List the cache entries')
Inject = ('inject', 'Inject a new entry in the cache')
Fetch = ('fetch', 'Fetch a new cache entry, giving as input both the URI and optionally both a security context file and a security context name')
Remove = ('rm', 'Remove an entry from the cache')
Validate = ('validate', 'Validate the consistency of the cache')
class WfExS_Staged_WorkDir_Commands(StrDocEnum):
OfflineExecute = ('offline-exec', 'Offline execute the staged instances which match the input pattern')
List = ('ls', 'List the staged instances\n\tIt shows the instance id, nickname,\n\tencryption and whether they are damaged')
Mount = ('mount', 'Mount the staged instances which match the input pattern')
Remove = ('rm', 'Removes the staged instances which match the input pattern')
Shell = ('shell', 'Launches a command in the workdir\n\tFirst parameter is either the staged instance id or the nickname.\n\tIt launches the command specified after the id.\n\tIf there is no additional parameters, it launches a shell\n\tin the mounted working directory of the instance')
Status = ('status', 'Shows staged instances status')
# Validate = 'validate'
class WfExS_Export_Commands(StrDocEnum):
List = ('ls', 'List the public identifiers obtained from previous export actions')
Run = ('run', 'Run the different export actions, pushing the exported content and gathering the obtained permanent / public identifiers')
DEFAULT_LOCAL_CONFIG_RELNAME = 'wfexs_config.yml'
LOGGING_FORMAT = '%(asctime)-15s - [%(levelname)s] %(message)s'
DEBUG_LOGGING_FORMAT = '%(asctime)-15s - [%(name)s %(funcName)s %(lineno)d][%(levelname)s] %(message)s'
def genParserSub(
sp: argparse._SubParsersAction,
command: WfExS_Commands,
preStageParams: bool = False,
postStageParams: bool = False,
crateParams: bool = False,
exportParams: bool = False
) -> argparse.ArgumentParser:
ap_ = sp.add_parser(
command.value,
formatter_class=ArgsDefaultWithRawHelpFormatter,
help=command.description
)
if preStageParams:
ap_.add_argument(
'-W',
'--workflow-config',
dest="workflowConfigFilename",
required=True,
help="Configuration file, describing workflow and inputs"
)
if preStageParams or exportParams:
ap_.add_argument(
'-Z',
'--creds-config',
dest="securityContextsConfigFilename",
help="Configuration file, describing security contexts, which hold credentials and similar"
)
if exportParams:
ap_.add_argument(
'-E',
'--exports-config',
dest="exportsConfigFilename",
help="Configuration file, describing exports which can be done"
)
if preStageParams:
ap_.add_argument(
'-n',
'--nickname-prefix',
dest="nickname_prefix",
help="Nickname prefix to be used on staged workdir creation"
)
if postStageParams:
ap_.add_argument(
'-J',
'--staged-job-dir',
dest='workflowWorkingDirectory',
required=True,
help="Already staged job directory"
)
if crateParams:
ap_.add_argument('--full', dest='doMaterializedROCrate', action='store_true',
help="Should the RO-Crate contain a copy of the inputs (and outputs)?")
return ap_
def processCacheCommand(wfBackend: WfExSBackend, args: argparse.Namespace, logLevel) -> int:
"""
This method processes the cache subcommands, and returns the retval
to be used with sys.exit
"""
print(f"\t- Subcommand {args.cache_command} {args.cache_type}")
cH , cPath = wfBackend.getCacheHandler(args.cache_type)
assert cPath is not None
retval = 0
if args.cache_command == WfExS_Cache_Commands.List:
if logLevel <= logging.INFO:
contentsI = sorted(map(lambda l: l[1], cH.list(cPath, *args.cache_command_args, acceptGlob=args.filesAsGlobs, cascade=args.doCacheCascade)), key=lambda x: x['stamp'])
for entryI in contentsI:
json.dump(entryI, sys.stdout, cls=DatetimeEncoder, indent=4, sort_keys=True)
print()
else:
contentsD = sorted(map(lambda l: l[0], cH.list(cPath, *args.cache_command_args, acceptGlob=args.filesAsGlobs, cascade=args.doCacheCascade)), key=lambda x: x.uri)
for entryD in contentsD:
print(entryD)
elif args.cache_command == WfExS_Cache_Commands.Remove:
print('\n'.join(map(lambda x: '\t'.join([x[0].uri, x[1]]), cH.remove(cPath, *args.cache_command_args, acceptGlob=args.filesAsGlobs, doRemoveFiles=args.doCacheRecursively, cascade=args.doCacheCascade))))
elif args.cache_command == WfExS_Cache_Commands.Inject:
if len(args.cache_command_args) == 2:
injected_uri = args.cache_command_args[0]
finalCachedFilename = args.cache_command_args[1]
# # First, remove old occurrence
# cH.remove(cPath, injected_uri)
# Then, inject new occurrence
cH.inject(cPath, injected_uri, finalCachedFilename=finalCachedFilename)
else:
print(f"ERROR: subcommand {args.cache_command} takes two positional parameters: the URI to be injected, and the path to the local content to be associated to that URI", file=sys.stderr)
retval = 1
elif args.cache_command == WfExS_Cache_Commands.Validate:
for metaUri, validated, metaStructure in cH.validate(cPath, *args.cache_command_args, acceptGlob=args.filesAsGlobs, cascade=args.doCacheCascade):
print(f"\t- {metaUri.uri} {validated}")
# pass
elif args.cache_command == WfExS_Cache_Commands.Fetch:
if len(args.cache_command_args) == 1 or len(args.cache_command_args) == 3:
uri_to_fetch = args.cache_command_args[0]
secContext = None
if len(args.cache_command_args) == 3:
secContextFilename = args.cache_command_args[1]
secContextName = args.cache_command_args[2]
if os.path.exists(secContextFilename):
numErrors, secContextBlock = wfBackend.parseAndValidateSecurityContextFile(secContextFilename)
if numErrors > 0:
print(f"ERROR: security context file {secContextFilename} has {numErrors} errors", file=sys.stderr)
retval = 1
else:
print(f"ERROR: security context file {secContextFilename} does not exist", file=sys.stderr)
retval = 1
if retval == 0:
secContext = secContextBlock.get(secContextName)
if secContext is None:
print(f"ERROR: security context file {secContextFilename} does not contain the security context {secContextName}", file=sys.stderr)
retval = 1
if retval == 0:
contentKind, abs_path, metadata, licences = wfBackend.cacheFetch(uri_to_fetch, args.cache_type, offline=False, secContext=secContext)
print(f'{contentKind}\t{abs_path}\t{licences}\t{metadata}')
else:
print(f"ERROR: subcommand {args.cache_command} takes either one or three positional parameters: the URI to be fetched, the path to a security context file and the security context to be used for the fetch operation", file=sys.stderr)
retval = 1
return retval
def processStagedWorkdirCommand(wB:WfExSBackend, args: argparse.Namespace, loglevel) -> int:
"""
This method processes the cache subcommands, and returns the retval
to be used with sys.exit
"""
print(f"\t- Subcommand {args.staged_workdir_command}")
retval = 0
# This is needed to be sure the encfs instance is unmounted
#if args.staged_workdir_command != WfExS_Staged_WorkDir_Commands.Mount:
# atexit.register(wfInstance.cleanup)
if args.staged_workdir_command == WfExS_Staged_WorkDir_Commands.Mount:
if len(args.staged_workdir_command_args) > 0:
for instance_id, nickname, creation, wfSetup, wfInstance in wB.listStagedWorkflows(*args.staged_workdir_command_args, acceptGlob=args.filesAsGlobs, doCleanup=False):
if wfSetup is not None:
print(f'Mounted {instance_id} ({nickname}) at {wfSetup.work_dir}')
elif args.staged_workdir_command == WfExS_Staged_WorkDir_Commands.List:
contents = sorted(
wB.listStagedWorkflows(*args.staged_workdir_command_args, acceptGlob=args.filesAsGlobs),
key=lambda x: x[2]
)
for instance_id, nickname, creation, wfSetup, _ in contents:
is_encrypted : Union[bool, str]
if wfSetup is None:
is_damaged = True
is_encrypted = '(unknown)'
else:
is_damaged = wfSetup.is_damaged
is_encrypted = wfSetup.is_encrypted
print(f'{instance_id}\t{nickname}\t{creation.isoformat()}\t{is_encrypted}\t{is_damaged}')
elif args.staged_workdir_command == WfExS_Staged_WorkDir_Commands.Remove:
print('\n'.join(map(lambda x: 'Removed: ' + '\t'.join(x), wB.removeStagedWorkflows(*args.staged_workdir_command_args, acceptGlob=args.filesAsGlobs))))
elif args.staged_workdir_command == WfExS_Staged_WorkDir_Commands.Shell:
retval = wB.shellFirstStagedWorkflow(*args.staged_workdir_command_args, acceptGlob=args.filesAsGlobs)
elif args.staged_workdir_command == WfExS_Staged_WorkDir_Commands.OfflineExecute:
if len(args.staged_workdir_command_args) > 0:
for instance_id, nickname, creation, wfSetup, wfInstance in wB.listStagedWorkflows(*args.staged_workdir_command_args, acceptGlob=args.filesAsGlobs, doCleanup=False):
is_damaged = True if wfSetup is None else wfSetup.is_damaged
if not is_damaged and (wfInstance is not None):
try:
wfInstance.executeWorkflow(offline=True)
except Exception as e:
logging.exception(f'Error while executing {instance_id} ({nickname})')
finally:
wfInstance.cleanup()
elif args.staged_workdir_command == WfExS_Staged_WorkDir_Commands.Status:
if len(args.staged_workdir_command_args) > 0:
for instance_id, nickname, creation, wfSetup, mStatus in wB.statusStagedWorkflows(*args.staged_workdir_command_args, acceptGlob=args.filesAsGlobs):
is_damaged = True if wfSetup is None else wfSetup.is_damaged
if wfSetup is None:
is_damaged = True
is_encrypted = '(unknown)'
else:
is_damaged = wfSetup.is_damaged
is_encrypted = wfSetup.is_encrypted
print(
f"""=> Instance {instance_id} ({nickname})
* Is damaged? {is_damaged}
* Created: {creation.isoformat()}
* Secure (encrypted)? {is_encrypted}
* {repr(mStatus)}
""")
# Thi
return retval
def processExportCommand(wfInstance: WF, args: argparse.Namespace, loglevel) -> int:
"""
This method processes the export subcommands, and returns the retval
to be used with sys.exit
"""
print(f"\t- Subcommand {args.export_contents_command}")
retval = 0
if args.export_contents_command == WfExS_Export_Commands.List:
for mExport in wfInstance.listMaterializedExportActions():
print(f'{mExport}')
elif | |
4.5],
[24.5, 68.0, 35.5, 112.0],
# [2, 4, 24.6, 8],
[-9.8, 0.5, 13.8, 7.5]
]) # (xmin, ymin, xmax, ymax)
boxlist2 = np.array([[1, 1, 5, 8, np.pi/16],
# [1, 1, 5, 8, np.pi/16 + np.pi/10]
# [1, 1, 10, 5, 0],
[30, 90, 12, 45, np.pi/10],
[5, 4, 26, 8.2, np.pi/2 + np.pi/10.]
])
polys2 = RotBox2Polys(boxlist2)
targets = dbbox_transform2_warp(boxlist1, polys2)
expected_targets = np.array([[0, 0, 0, 0, 0.78125],
[0, 0, 0, 0, 0.8],
[0., -3/8., np.log(26/24.6), np.log(8.2/8.), np.pi/10./(2 * np.pi)]])
np.testing.assert_almost_equal(expected_targets, targets, decimal=4)
@unittest.skip("The test need to be reconstruct")
def test_dbbox_transform2_inv_warp_multiclass(self):
# test 2 classes here
ext_rois = np.array([[-1, -2.5, 3, 4.5],
[24.5, 68.0, 35.5, 112.0],
# [2, 4, 24.6, 8],
[-9.8, 0.5, 13.8, 7.5]
]) # (xmin, ymin, xmax, ymax)
expected_results = np.array([[1, 1, 5, 8, np.pi/2, 1, 1, 5, 8, np.pi/16], # (x_ctr, y_ctr, w, h, theta)
[30, 90, 12, 45, np.pi/10, 30, 90, 12, 45, np.pi/2],
# [2, 4, 24.6, 8],
[2, 4, 24.6, 8, np.pi/2, 5, 4, 26, 8.2, np.pi/2 + np.pi/10.]
])
targets = np.array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0.78125],
[0, 0, 0, 0, 0.8, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, -3/8., np.log(26/24.6), np.log(8.2/8.), np.pi/10./(2 * np.pi)]])
outputs = dbbox_transform2_inv_warp(ext_rois, targets)
# print 'outputs:', outputs
np.testing.assert_almost_equal(outputs, expected_results, decimal=4)
def test_dbbox_transform2_encode_decode(self):
boxlist2 = np.array([[1, 1, 5, 8, np.pi/16],
[1, 1, 5, 8, np.pi/16 + np.pi/10],
[1, 1, 10, 5, 0],
[30, 90, 12, 45, np.pi/10]
])
polys2 = RotBox2Polys(boxlist2)
ex_rois = poly2bbox(polys2)
targets = dbbox_transform2_warp(ex_rois, polys2)
outputs = dbbox_transform2_inv_warp(ex_rois, targets)
np.testing.assert_almost_equal(outputs, boxlist2, decimal=5)
def test_RotBox2Polys(self):
rotboxes = np.array([[1, 1, 5, 8, np.pi/2, 1, 1, 5, 8, np.pi/16], # (x_ctr, y_ctr, w, h, theta)
[30, 90, 12, 45, np.pi/10, 30, 90, 12, 45, np.pi/2],
# [2, 4, 24.6, 8],
[2, 4, 24.6, 8, np.pi/2, 5, 4, 26, 8.2, np.pi/2 + np.pi/10.]
])
expected_polys = np.concatenate((RotBox2Polys(rotboxes[:, 0:5]), RotBox2Polys(rotboxes[:, 5:10])), axis=1)
polys = RotBox2Polys_multi_class(rotboxes)
# print 'polys:', polys
self.assertTrue(polys.shape == (3, 16))
np.testing.assert_almost_equal(expected_polys, polys)
def test_polys2xyhs(self):
# polys =
pass
@unittest.skip("The test can not be passed")
def test_xyhs2polys(self):
# xyh format: (x1, y1, x2, y2, h), x1, y1 is the first point, x2, y2 is the second point. h is the height of a bounding box
xyhs = np.array([[2, 1, 6, 3, 3],
[1.4, 8, 4.2, 6.3, 7.4]])
polys = xyhs2polys(xyhs)
inverse_xyhs = polys2xyhs(polys)
inverse_polys = xyhs2polys(inverse_xyhs)
np.testing.assert_almost_equal(xyhs, inverse_xyhs, decimal=6)
np.testing.assert_almost_equal(polys, inverse_polys, decimal=6)
def test_dbbox_transform3(self):
boxlist1 = np.array([[np.sqrt(3), 1, 2 * np.sqrt(3), 2, 2],
[np.sqrt(3), 1, 1 + np.sqrt(3), 1 + np.sqrt(3), 2]])
boxlist2 = np.array([[(3 * np.sqrt(3)-1)/2., (3 + np.sqrt(3))/2., (4 * np.sqrt(3) - 1)/2., (4 + np.sqrt(3))/2., 1],
[(np.sqrt(3) + 1)/2., (np.sqrt(3) + 3)/2., (np.sqrt(3) + 2)/2., (3 + 2 * np.sqrt(3))/2., 1]])
targets = dbboxtransform3(boxlist1, boxlist2)
trans_boxlist1 = np.array([[0, 0, 2, 0, 2]])
expected_targets = np.array([[0.5, 0.5, 0, 0.5, np.log(1/2.)],
[0.5, 0.5, 0, 0.5, np.log(1 / 2.)]])
np.testing.assert_almost_equal(expected_targets, targets)
def test_dbbox_transform3_inv_warp(self):
ext_rois = np.array([[2, 5, 6, 10.3]])
targets = np.array([[1/4., 0.2/5.3, 0, 0.2/5.3, np.log(5.1/5.3)]])
outputs = dbboxtransform3_inv_warp(ext_rois, targets)
expected_results = np.array([[3, 5.2, 6, 5.2, 5.1]])
# pdb.set_trace()
np.testing.assert_almost_equal(outputs, expected_results)
def test_dbbox_transform3_warp_encode_decode(self):
boxlist1 = np.array([[-1, -2.5, 3, 4.5],
[24.5, 68.0, 35.5, 112.0],
# [2, 4, 24.6, 8],
[-9.8, 0.5, 13.8, 7.5]
]) # (xmin, ymin, xmax, ymax)
boxlist2 = np.array([[1, 1, 5, 8, np.pi/16],
# [1, 1, 5, 8, np.pi/16 + np.pi/10]
# [1, 1, 10, 5, 0],
[30, 90, 12, 45, np.pi/10],
[5, 4, 26, 8.2, np.pi/2 + np.pi/10.]
])
polys2 = RotBox2Polys(boxlist2)
gt_xyhs = polys2xyhs(polys2)
targets = dbboxtransform3_warp(boxlist1, polys2)
# expected_targets = np.array([[0, 0, 0, 0, 0.78125],
# [0, 0, 0, 0, 0.8],
# [0., -3/8., np.log(26/24.6), np.log(8.2/8.), np.pi/10./(2 * np.pi)]])
targets_inverse = dbboxtransform3_inv_warp(boxlist1, targets)
np.testing.assert_almost_equal(gt_xyhs, targets_inverse)
def test_dbbox_transform3_rotation_invariant(self):
boxlist1 = np.array([[1000, 1000.8, 8000.767, 12500, np.pi/6.],
[24.5, 68.0, 23, 89.2, np.pi],
# [2, 4, 24.6, 8],
# [-9.8, 0.5, 13.8, 7.5, -np.pi/10.]
]) # (xmin, ymin, xmax, ymax)
boxlist2 = np.array([[1000, 1000.8, 5000.767, 8000, np.pi/16],
# [1, 1, 5, 8, np.pi/16 + np.pi/10]
# [1, 1, 10, 5, 0],
[24.5, 68.0, 12, 45.5, np.pi/10],
# [5, 4, 26, 8.2, np.pi/2 + np.pi/10.]
])
polys1 = RotBox2Polys(boxlist1)
polys2 = RotBox2Polys(boxlist2)
xyhs1 = polys2xyhs(polys1)
xyhs2 = polys2xyhs(polys2)
randangle = np.random.rand()
boxlist3 = copy.deepcopy(boxlist1)
boxlist3[:, 4] = boxlist3[:, 4] + randangle
polys3 = RotBox2Polys(boxlist3)
xyhs3 = polys2xyhs(polys3)
boxlist4 = copy.deepcopy(boxlist2)
boxlist4[:, 4] = boxlist4[:, 4] + randangle
polys4 = RotBox2Polys(boxlist4)
xyhs4 = polys2xyhs(polys4)
targets1 = dbboxtransform3(xyhs1, xyhs2)
targets2 = dbboxtransform3(xyhs3, xyhs4)
np.testing.assert_almost_equal(targets1, targets2, decimal=6)
def test_dbbox_transform3_inv_multi_class(self):
pass
def test_dbbox_transform3_inv_warp_multi_class(self):
"""
This is a multi-class test
:return:
"""
ext_rois = np.array([[2, 5, 6, 10.3],
])
targets = np.array([[0, 0, 0, 0, 0, 1/4., 0.2/5.3, 0, 0.2/5.3, np.log(5.1/5.3), 0, 0, 0, 0, 0]
])
outputs = dbboxtransform3_inv_warp(ext_rois, targets)
expected_results = np.array([[2, 5, 6, 5, 5.3, 3, 5.2, 6, 5.2, 5.1, 2, 5, 6, 5, 5.3]])
np.testing.assert_almost_equal(outputs, expected_results)
def test_bbox_transformxyh(self):
ext_rois = np.array([[-1, -2.5, 3, 4.5],
[24.5, 68.0, 35.5, 112.0],
[-9.8, 0.5, 13.8, 7.5]])
def test_polygonToRotRectangle_batch(self):
polygons = np.array([[0, 0, 3, 0, 3, 3, 0, 3]])
rotboxs = polygonToRotRectangle_batch(polygons)
print 'rotboxs:', rotboxs
def test_get_best_begin_point_wrapp(self):
print 'test get best begin point'
input = [7, 5, 3, 6, 1, 2, 5, 1]
expected_output = [1, 2, 5, 1, 7, 5, 3, 6]
output = get_best_begin_point_wrapp(input)
np.testing.assert_almost_equal(np.array(output), np.array(expected_output))
def test_xyhs2polys_muli_class(self):
xyhs = np.array([[0, 0, 2, 0, 3, 3, 4.3, 6, 7, 8.4],
[2, 0, 2, 3, 2, 4.4, 5.5, 7.6, 8.2, 9]])
polys = xyhs2polys_muli_class(xyhs)
expected_polys = np.concatenate((xyhs2polys(xyhs[:, 0:5]), xyhs2polys(xyhs[:, 5:10])), axis=1)
self.assertTrue(polys.shape == (2, 16))
np.testing.assert_almost_equal(polys, expected_polys)
def test_choose_best_Rroi_batch(self):
# (x_ctr, y_ctr, w, h, angle)
Rrois = np.array([[3, 4, 2, 10, np.pi/6.],
[3, 4, 10, 2, np.pi/6. + np.pi/2.],
[3, 4, 2, 10, np.pi/6. + np.pi],
[3, 4, 10, 2, np.pi/6. + np.pi + np.pi/2.]])
results = choose_best_Rroi_batch(Rrois)
expected_results = np.array([[3, 4, 10, 2, np.pi/6. + np.pi/2.],
[3, 4, 10, 2, np.pi / 6. + np.pi / 2.],
[3, 4, 10, 2, np.pi / 6. + np.pi / 2.],
[3, 4, 10, 2, np.pi / 6. + np.pi / 2.]])
np.testing.assert_almost_equal(results, expected_results, decimal=6)
def test_choose_best_match_batch(self):
# (x_ctr, y_ctr, w, h, angle)
Rrois = np.array([[3, 4, 2, 10, np.pi/6.],
[3, 4, 10, 2, np.pi/6. + np.pi/2.],
[3, 4, 2, 10, np.pi / 6.],
[3, 4, 10, 2, np.pi / 6. + np.pi / 2.]
])
gt_rois = np.array([[3, 4, 10, 2, np.pi/6. + np.pi/2.],
[3, 4, 10, 2, np.pi / 6. + np.pi / 2.],
[3, 4, 2, 10, np.pi/6. + np.pi],
[3, 4, 10, 2, np.pi/6. + np.pi * 3 / 2.]
])
results = choose_best_match_batch(Rrois, gt_rois)
expected_results = np.array([[3, 4, 2, 10, np.pi/6.],
[3, 4, 10, 2, np.pi/6. + np.pi/2.],
[3, 4, 2, 10, np.pi / 6.],
[3, 4, 10, 2, np.pi / 6. + np.pi / 2.]
])
np.testing.assert_almost_equal(results, expected_results, decimal=6)
def test_dbbox_transform2_new(self):
boxlist1 = np.array([[1, 1, 10, 5, 0],
[1, 1, 10, 5, 0],
[1, 1, 10, 5, np.pi - np.pi/10.],
[1, 1, 10, 5, np.pi - np.pi/10.]
])
boxlist2 = np.array([[1, 1, 10, 5, -np.pi/10.],
[1, 1, 10, 5, np.pi/10],
[1, 1, 10, 5, np.pi - np.pi/10. - np.pi/20.],
[1, 1, 10, 5, np.pi - np.pi/10. - np.pi/20. + 10 * np.pi]
])
norm = np.pi / 2.
expected_results = np.array([[0, 0, 0, 0, -np.pi/10./norm],
[0, 0, 0, 0, np.pi/10./norm],
[0, 0, 0, 0, -np.pi/20./norm],
[0, 0, 0, 0, -np.pi/20./norm]])
results = dbbox_transform2_new(boxlist1, boxlist2)
np.testing.assert_almost_equal(results, expected_results)
def test_dbbox_transform2_best_match_warp(self):
boxlist1 = np.array([[1, 1, 10, 5, 0],
[1, 1, 10, 5, 0],
[1, 1, 10, 5, np.pi - np.pi/10.],
[1, 1, 10, 5, np.pi - np.pi/10.]
])
boxlist2 = np.array([[1, 1, 5, 10, -np.pi/10. + np.pi/2.],
[1, 1, 10, 5, np.pi/10 + np.pi],
[1, 1, 5, 10, np.pi - np.pi/10. - np.pi/20. - np.pi/2.],
[1, 1, 10, 5, np.pi - np.pi/10. - np.pi/20. + 10 * np.pi]
])
norm = np.pi / 2.
expected_results = np.array([[0, 0, 0, 0, -np.pi/10./norm],
[0, 0, | |
str(line)
p = locals()[name].predict(s)
predicted = np.append(predicted,p)
else:
s = dec.loc[i,'BPA_wind':]
s = np.reshape(s[:,None],(1,n))
name='dec_reg_NW' + str(line)
p = locals()[name].predict(s)
predicted = np.append(predicted,p)
NWPaths_p[:,line_index] = predicted
# Residuals
residuals = predicted - y.values
export_residuals[:,line_index] = residuals
NWPaths_y[:,line_index] = y.values
# RMSE
RMSE = (np.sum((residuals**2))/len(residuals))**.5
# #R2
# a=st.pearsonr(y,predicted)
# print a[0]**2
ResidualsNWPaths = export_residuals
###############################
# Other CA PATHS
###############################
#import data
df_data1 = pd.read_excel('Synthetic_demand_pathflows/OtherCA_Path_data.xlsx',sheet_name='Daily',header=0)
#find average temps
cities = ['Salem','Seattle','Portland','Eugene','Boise','Tuscon','Phoenix','Vegas','Fresno','Oakland','LA','SanDiego','Sacramento','SanJose','SanFran']
num_cities = len(cities)
num_days = len(df_data1)
AvgT = np.zeros((num_days,num_cities))
Wind = np.zeros((num_days,num_cities))
for i in cities:
n1 = i + '_AvgT'
n2 = i + '_Wind'
j = int(cities.index(i))
AvgT[:,j] = df_data1.loc[:,n1]
Wind[:,j] = df_data1.loc[:,n2]
#convert to degree days
HDD = np.zeros((num_days,num_cities))
CDD = np.zeros((num_days,num_cities))
for i in range(0,num_days):
for j in range(0,num_cities):
HDD[i,j] = np.max((0,65-AvgT[i,j]))
CDD[i,j] = np.max((0,AvgT[i,j] - 65))
#separate wind speed by cooling/heating degree day
binary_CDD = CDD>0
binary_HDD = HDD>0
CDD_wind = np.multiply(Wind,binary_CDD)
HDD_wind = np.multiply(Wind,binary_HDD)
X1 = np.array(df_data1.loc[:,'Month':'Path66'])
X2 = np.column_stack((HDD,CDD,HDD_wind,CDD_wind))
cX = np.column_stack((X1,X2))
df_data = pd.DataFrame(cX)
df_data.rename(columns={0:'Month'}, inplace=True)
df_data.rename(columns={3:'Path61'}, inplace=True)
df_data.rename(columns={4:'Path42'}, inplace=True)
df_data.rename(columns={5:'Path24'}, inplace=True)
df_data.rename(columns={6:'Path45'}, inplace=True)
df_data.rename(columns={7:'BPA_wind'}, inplace=True)
jan = df_data.loc[df_data['Month'] == 1,:]
feb = df_data.loc[df_data['Month'] == 2,:]
mar = df_data.loc[df_data['Month'] == 3,:]
apr = df_data.loc[df_data['Month'] == 4,:]
may = df_data.loc[df_data['Month'] == 5,:]
jun = df_data.loc[df_data['Month'] == 6,:]
jul = df_data.loc[df_data['Month'] == 7,:]
aug = df_data.loc[df_data['Month'] == 8,:]
sep = df_data.loc[df_data['Month'] == 9,:]
oct = df_data.loc[df_data['Month'] == 10,:]
nov = df_data.loc[df_data['Month'] == 11,:]
dec = df_data.loc[df_data['Month'] == 12,:]
lines = ['Path61','Path42','Path24','Path45']
num_lines = len(lines)
export_residuals = np.zeros((len(cX),num_lines))
OtherCA_Paths_p= np.zeros((len(cX),num_lines))
OtherCA_Paths_y = np.zeros((len(cX),num_lines))
for line in lines:
y = df_data.loc[:,line]
line_index = lines.index(line)
#multivariate regression
name_1='jan_reg_CA' + str(line)
name_2='feb_reg_CA' + str(line)
name_3='mar_reg_CA' + str(line)
name_4='apr_reg_CA' + str(line)
name_5='may_reg_CA' + str(line)
name_6='jun_reg_CA' + str(line)
name_7='jul_reg_CA' + str(line)
name_8='aug_reg_CA' + str(line)
name_9='sep_reg_CA' + str(line)
name_10='oct_reg_CA' + str(line)
name_11='nov_reg_CA' + str(line)
name_12='dec_reg_CA' + str(line)
locals()[name_1] = linear_model.LinearRegression()
locals()[name_2] = linear_model.LinearRegression()
locals()[name_3] = linear_model.LinearRegression()
locals()[name_4] = linear_model.LinearRegression()
locals()[name_5] = linear_model.LinearRegression()
locals()[name_6] = linear_model.LinearRegression()
locals()[name_7] = linear_model.LinearRegression()
locals()[name_8] = linear_model.LinearRegression()
locals()[name_9] = linear_model.LinearRegression()
locals()[name_10] = linear_model.LinearRegression()
locals()[name_11] = linear_model.LinearRegression()
locals()[name_12] = linear_model.LinearRegression()
# Train the model using the training sets
locals()[name_1].fit(jan.loc[:,'BPA_wind':],jan.loc[:,line])
locals()[name_2].fit(feb.loc[:,'BPA_wind':],feb.loc[:,line])
locals()[name_3].fit(mar.loc[:,'BPA_wind':],mar.loc[:,line])
locals()[name_4].fit(apr.loc[:,'BPA_wind':],apr.loc[:,line])
locals()[name_5].fit(may.loc[:,'BPA_wind':],may.loc[:,line])
locals()[name_6].fit(jun.loc[:,'BPA_wind':],jun.loc[:,line])
locals()[name_7].fit(jul.loc[:,'BPA_wind':],jul.loc[:,line])
locals()[name_8].fit(aug.loc[:,'BPA_wind':],aug.loc[:,line])
locals()[name_9].fit(sep.loc[:,'BPA_wind':],sep.loc[:,line])
locals()[name_10].fit(oct.loc[:,'BPA_wind':],oct.loc[:,line])
locals()[name_11].fit(nov.loc[:,'BPA_wind':],nov.loc[:,line])
locals()[name_12].fit(dec.loc[:,'BPA_wind':],dec.loc[:,line])
# Make predictions using the testing set
predicted = []
rc = np.shape(jan.loc[:,'BPA_wind':])
n = rc[1]
for i in range(0,len(y)):
m = df_data.loc[i,'Month']
if m==1:
s = jan.loc[i,'BPA_wind':]
s = np.reshape(s[:,None],(1,n))
p = locals()[name_1].predict(s)
predicted = np.append(predicted,p)
elif m==2:
s = feb.loc[i,'BPA_wind':]
s = np.reshape(s[:,None],(1,n))
p = locals()[name_2].predict(s)
predicted = np.append(predicted,p)
elif m==3:
s = mar.loc[i,'BPA_wind':]
s = np.reshape(s[:,None],(1,n))
p = locals()[name_3].predict(s)
predicted = np.append(predicted,p)
elif m==4:
s = apr.loc[i,'BPA_wind':]
s = np.reshape(s[:,None],(1,n))
p = locals()[name_4].predict(s)
predicted = np.append(predicted,p)
elif m==5:
s = may.loc[i,'BPA_wind':]
s = np.reshape(s[:,None],(1,n))
p = locals()[name_5].predict(s)
predicted = np.append(predicted,p)
elif m==6:
s = jun.loc[i,'BPA_wind':]
s = np.reshape(s[:,None],(1,n))
p = locals()[name_6].predict(s)
predicted = np.append(predicted,p)
elif m==7:
s = jul.loc[i,'BPA_wind':]
s = np.reshape(s[:,None],(1,n))
p = locals()[name_7].predict(s)
predicted = np.append(predicted,p)
elif m==8:
s = aug.loc[i,'BPA_wind':]
s = np.reshape(s[:,None],(1,n))
p = locals()[name_8].predict(s)
predicted = np.append(predicted,p)
elif m==9:
s = sep.loc[i,'BPA_wind':]
s = np.reshape(s[:,None],(1,n))
p = locals()[name_9].predict(s)
predicted = np.append(predicted,p)
elif m==10:
s = oct.loc[i,'BPA_wind':]
s = np.reshape(s[:,None],(1,n))
p = locals()[name_10].predict(s)
predicted = np.append(predicted,p)
elif m==11:
s = nov.loc[i,'BPA_wind':]
s = np.reshape(s[:,None],(1,n))
p = locals()[name_11].predict(s)
predicted = np.append(predicted,p)
else:
s = dec.loc[i,'BPA_wind':]
s = np.reshape(s[:,None],(1,n))
p = locals()[name_12].predict(s)
predicted = np.append(predicted,p)
OtherCA_Paths_p[:,line_index] = predicted
# Residuals
residuals = predicted - y.values
export_residuals[:,line_index] = residuals
OtherCA_Paths_y[:,line_index] = y.values
# RMSE
RMSE = (np.sum((residuals**2))/len(residuals))**.5
# #R2
# a=st.pearsonr(y,predicted)
# print a[0]**2
ResidualsOtherCA_Paths = export_residuals
##########################
# PATH 65 & 66
##########################
#import data
df_data1 = pd.read_excel('Synthetic_demand_pathflows/Path65_66_regression_data.xlsx',sheet_name='Sheet1',header=0)
#find average temps
cities = ['Salem','Seattle','Portland','Eugene','Boise','Fresno','Oakland','LA','SanDiego','Sacramento','SanJose','SanFran']
num_cities = len(cities)
num_days = len(df_data1)
AvgT = np.zeros((num_days,num_cities))
Wind = np.zeros((num_days,num_cities))
for i in cities:
n1 = i + '_AvgT'
n2 = i + '_Wind'
j = int(cities.index(i))
AvgT[:,j] = df_data1.loc[:,n1]
Wind[:,j] = df_data1.loc[:,n2]
#convert to degree days
HDD = np.zeros((num_days,num_cities))
CDD = np.zeros((num_days,num_cities))
for i in range(0,num_days):
for j in range(0,num_cities):
HDD[i,j] = np.max((0,65-AvgT[i,j]))
CDD[i,j] = np.max((0,AvgT[i,j] - 65))
#separate wind speed by cooling/heating degree day
binary_CDD = CDD>0
binary_HDD = HDD>0
CDD_wind = np.multiply(Wind,binary_CDD)
HDD_wind = np.multiply(Wind,binary_HDD)
X1 = np.array(df_data1.loc[:,'Month':'Weekday'])
X2 = np.column_stack((HDD,CDD,HDD_wind,CDD_wind))
cX = np.column_stack((X1,X2))
df_data = pd.DataFrame(cX)
df_data.rename(columns={0:'Month'}, inplace=True)
df_data.rename(columns={3:'Path65'}, inplace=True)
df_data.rename(columns={4:'Path66'}, inplace=True)
df_data.rename(columns={5:'Wind'}, inplace=True)
jan = df_data.loc[df_data['Month'] == 1,:]
feb = df_data.loc[df_data['Month'] == 2,:]
mar = df_data.loc[df_data['Month'] == 3,:]
apr = df_data.loc[df_data['Month'] == 4,:]
may = df_data.loc[df_data['Month'] == 5,:]
jun = df_data.loc[df_data['Month'] == 6,:]
jul = df_data.loc[df_data['Month'] == 7,:]
aug = df_data.loc[df_data['Month'] == 8,:]
sep = df_data.loc[df_data['Month'] == 9,:]
oct = df_data.loc[df_data['Month'] == 10,:]
nov = df_data.loc[df_data['Month'] == 11,:]
dec = df_data.loc[df_data['Month'] == 12,:]
lines = ['Path65','Path66']
num_lines = len(lines)
export_residuals = np.zeros((len(cX),num_lines))
Path65_66_p = np.zeros((len(cX),num_lines))
Path65_66_y = np.zeros((len(cX),num_lines))
for line in lines:
y = df_data.loc[:,line]
line_index = lines.index(line)
#multivariate regression
name_1='jan_reg_6566' + str(line)
name_2='feb_reg_6566' + str(line)
name_3='mar_reg_6566' + str(line)
name_4='apr_reg_6566' + str(line)
name_5='may_reg_6566' + str(line)
name_6='jun_reg_6566' + str(line)
name_7='jul_reg_6566' + str(line)
name_8='aug_reg_6566' + str(line)
name_9='sep_reg_6566' + str(line)
name_10='oct_reg_6566' + str(line)
name_11='nov_reg_6566' + str(line)
name_12='dec_reg_6566' + str(line)
locals()[name_1] = linear_model.LinearRegression()
locals()[name_2] = linear_model.LinearRegression()
locals()[name_3] = linear_model.LinearRegression()
locals()[name_4] = linear_model.LinearRegression()
locals()[name_5] = linear_model.LinearRegression()
locals()[name_6] = linear_model.LinearRegression()
locals()[name_7] = linear_model.LinearRegression()
locals()[name_8] = linear_model.LinearRegression()
locals()[name_9] = linear_model.LinearRegression()
locals()[name_10] = linear_model.LinearRegression()
locals()[name_11] = linear_model.LinearRegression()
locals()[name_12] = linear_model.LinearRegression()
# Train the model using the training sets
locals()[name_1].fit(jan.loc[:,'Wind':],jan.loc[:,line])
locals()[name_2].fit(feb.loc[:,'Wind':],feb.loc[:,line])
locals()[name_3].fit(mar.loc[:,'Wind':],mar.loc[:,line])
locals()[name_4].fit(apr.loc[:,'Wind':],apr.loc[:,line])
locals()[name_5].fit(may.loc[:,'Wind':],may.loc[:,line])
locals()[name_6].fit(jun.loc[:,'Wind':],jun.loc[:,line])
locals()[name_7].fit(jul.loc[:,'Wind':],jul.loc[:,line])
locals()[name_8].fit(aug.loc[:,'Wind':],aug.loc[:,line])
locals()[name_9].fit(sep.loc[:,'Wind':],sep.loc[:,line])
locals()[name_10].fit(oct.loc[:,'Wind':],oct.loc[:,line])
locals()[name_11].fit(nov.loc[:,'Wind':],nov.loc[:,line])
locals()[name_12].fit(dec.loc[:,'Wind':],dec.loc[:,line])
# Make predictions using the testing set
predicted = []
rc = np.shape(jan.loc[:,'Wind':])
n = rc[1]
for i in range(0,len(y)):
m = df_data.loc[i,'Month']
if m==1:
s = jan.loc[i,'Wind':]
s = np.reshape(s[:,None],(1,n))
p = locals()[name_1].predict(s)
predicted = np.append(predicted,p)
elif m==2:
s = feb.loc[i,'Wind':]
s = np.reshape(s[:,None],(1,n))
p = locals()[name_2].predict(s)
predicted = np.append(predicted,p)
elif m==3:
s = mar.loc[i,'Wind':]
s = np.reshape(s[:,None],(1,n))
p = locals()[name_3].predict(s)
predicted = np.append(predicted,p)
elif m==4:
s = apr.loc[i,'Wind':]
s = np.reshape(s[:,None],(1,n))
p = locals()[name_4].predict(s)
predicted = np.append(predicted,p)
elif m==5:
s = may.loc[i,'Wind':]
s = np.reshape(s[:,None],(1,n))
p = locals()[name_5].predict(s)
predicted = np.append(predicted,p)
elif m==6:
s = jun.loc[i,'Wind':]
s = np.reshape(s[:,None],(1,n))
p = locals()[name_6].predict(s)
predicted = np.append(predicted,p)
elif m==7:
s = jul.loc[i,'Wind':]
s = np.reshape(s[:,None],(1,n))
p = locals()[name_7].predict(s)
predicted = np.append(predicted,p)
elif m==8:
s = aug.loc[i,'Wind':]
s = np.reshape(s[:,None],(1,n))
p = locals()[name_8].predict(s)
predicted = np.append(predicted,p)
elif m==9:
s = sep.loc[i,'Wind':]
s = np.reshape(s[:,None],(1,n))
p = locals()[name_9].predict(s)
predicted = np.append(predicted,p)
elif m==10:
s = oct.loc[i,'Wind':]
s = np.reshape(s[:,None],(1,n))
p = locals()[name_10].predict(s)
predicted = np.append(predicted,p)
elif m==11:
s = nov.loc[i,'Wind':]
s = np.reshape(s[:,None],(1,n))
p = locals()[name_11].predict(s)
predicted = np.append(predicted,p)
else:
s = dec.loc[i,'Wind':]
s = np.reshape(s[:,None],(1,n))
p = locals()[name_12].predict(s)
predicted = np.append(predicted,p)
Path65_66_p[:,line_index] = predicted
Path65_66_y[:,line_index] = y.values
# Residuals
residuals = predicted - y.values
export_residuals[:,line_index] = residuals
#
# RMSE
RMSE = (np.sum((residuals**2))/len(residuals))**.5
#R2
# a=st.pearsonr(y,predicted)
# print a[0]**2
Residuals65_66 = export_residuals[730:,:]
#####################################################################
# Residual Analysis
#####################################################################
R = np.column_stack((ResidualsLoad,ResidualsNWPaths,ResidualsOtherCA_Paths,Residuals46,Residuals65_66))
rc = np.shape(R)
cols = rc[1]
mus = np.zeros((cols,1))
stds = np.zeros((cols,1))
R_w = np.zeros(np.shape(R))
sim_days = len(R_w)
#whiten residuals
for i in range(0,cols):
mus[i] = np.mean(R[:,i])
stds[i] = np.std(R[:,i])
R_w[:,i] = (R[:,i] - mus[i])/stds[i]
#Vector autoregressive model on residuals
model = VAR(R_w)
results = model.fit(1)
sim_residuals = np.zeros((sim_days,cols))
errors = np.zeros((sim_days,cols))
p = results.params
y_seeds = R_w[-1]
C = results.sigma_u
means = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
E = np.random.multivariate_normal(means,C,sim_days)
ys = np.zeros((cols,1))
# Generate cross correlated residuals
for i in range(0,sim_days):
for j in range(1,cols+1):
name='y' + str(j)
locals()[name]= p[0,j-1] + p[1,j-1]*y_seeds[0]+ p[2,j-1]*y_seeds[1]+ p[3,j-1]*y_seeds[2]+ p[4,j-1]*y_seeds[3]+ p[5,j-1]*y_seeds[4]+ p[6,j-1]*y_seeds[5]+ p[7,j-1]*y_seeds[6]+ p[8,j-1]*y_seeds[7]+ p[9,j-1]*y_seeds[8]+ p[10,j-1]*y_seeds[9]+ p[11,j-1]*y_seeds[10]+ p[12,j-1]*y_seeds[11]+ p[13,j-1]*y_seeds[12]+ p[13,j-1]*y_seeds[12]+ p[14,j-1]*y_seeds[13]+ p[15,j-1]*y_seeds[14]+E[i,j-1]
for j in range(1,cols+1):
name='y' + str(j)
y_seeds[j-1]=locals()[name]
sim_residuals[i,:] = [y1,y2,y3,y4,y5,y6,y7,y8,y9,y10,y11,y12,y13,y14,y15]
for i in range(0,cols):
sim_residuals[:,i] = sim_residuals[:,i]*stds[i]*(1/np.std(sim_residuals[:,i])) + mus[i]
#validation
Y = np.column_stack((np.reshape(BPA_y[0:3*365],(1095,1)),np.reshape(SDGE_y[0:3*365],(1095,1)),np.reshape(SCE_y[0:3*365],(1095,1)),np.reshape(PGEV_y[0:3*365],(1095,1)),np.reshape(PGEB_y[0:3*365],(1095,1)),NWPaths_y,OtherCA_Paths_y,np.reshape(Path46_y[730:],(1095,1)),np.reshape(Path65_66_y[730:,:],(1095,2))))
combined_BPA = np.reshape(sim_residuals[:,0],(1095,1)) + np.reshape(BPA_p[0:3*365],(1095,1))
combined_SDGE = np.reshape(sim_residuals[:,1],(1095,1)) + np.reshape(SDGE_p[0:3*365],(1095,1))
combined_SCE = np.reshape(sim_residuals[:,2],(1095,1)) + np.reshape(SCE_p[0:3*365],(1095,1))
combined_PGEV = np.reshape(sim_residuals[:,3],(1095,1)) + np.reshape(PGEV_p[0:3*365],(1095,1))
combined_PGEB = np.reshape(sim_residuals[:,4],(1095,1)) + np.reshape(PGEB_p[0:3*365],(1095,1))
combined_Path8 = np.reshape(sim_residuals[:,5],(1095,1)) + np.reshape(NWPaths_p[:,0],(1095,1))
combined_Path14 = np.reshape(sim_residuals[:,6],(1095,1)) + np.reshape(NWPaths_p[:,1],(1095,1))
combined_Path3 = np.reshape(sim_residuals[:,7],(1095,1)) + np.reshape(NWPaths_p[:,2],(1095,1))
combined_Path61 = np.reshape(sim_residuals[:,8],(1095,1)) + np.reshape(OtherCA_Paths_p[:,0],(1095,1))
combined_Path42 = np.reshape(sim_residuals[:,9],(1095,1)) + np.reshape(OtherCA_Paths_p[:,1],(1095,1))
combined_Path24 = np.reshape(sim_residuals[:,10],(1095,1)) + np.reshape(OtherCA_Paths_p[:,2],(1095,1))
combined_Path45 = np.reshape(sim_residuals[:,11],(1095,1)) + np.reshape(OtherCA_Paths_p[:,3],(1095,1))
combined_Path46 = np.reshape(sim_residuals[:,12],(1095,1)) + np.reshape(Path46_p[730:],(1095,1))
combined_Path65 = np.reshape(sim_residuals[:,13],(1095,1)) + np.reshape(Path65_66_p[730:,0],(1095,1))
combined_Path66 = np.reshape(sim_residuals[:,14],(1095,1)) + np.reshape(Path65_66_p[730:,1],(1095,1))
combined = np.column_stack((combined_BPA,combined_SDGE,combined_SCE,combined_PGEV,combined_PGEB,combined_Path8,combined_Path14,combined_Path3,combined_Path61,combined_Path42,combined_Path24,combined_Path45,combined_Path46,combined_Path65,combined_Path66))
rc = np.shape(Y)
cols = rc[1]
names = ['BPA','SDGE','SCE','PGEV','PGEB','Path8','Path14','Path3','Path61','Path42','Path24','Path45','Path46','Path65','Path66']
#for n in names:
#
# n_index = names.index(n)
#
# plt.figure()
# plt.plot(combined[:,n_index],'r')
# plt.plot(Y[:,n_index],'b')
# plt.title(n)
#
##########################################################################################################################################################
#Simulating demand and path
#########################################################################################################################################################
#Sim Residual
simulation_length=len(sim_weather)
syn_residuals = np.zeros((simulation_length,cols))
errors = np.zeros((simulation_length,cols))
y_seeds = R_w[-1]
C = results.sigma_u
means = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
E = np.random.multivariate_normal(means,C,simulation_length)
ys = np.zeros((cols,1))
for i in range(0,simulation_length):
for n in range(0,cols):
ys[n] = p[0,n] | |
= 'pending'
def _full_parse(self, filepath=None):
"""
Fully parse the input pfile.
Attempts to import pfile version specific parser from pfile submodule. Full parse is
not possible without access to the pfile submodule.
Does not work if input file is a tgz. If NIMSPfile was init'd with a tgz input, the tgz can be
unpacked into a temporary directory, and then this function can parse the unpacked pfile.
Parameters
----------
filepath : str
path to a pfile.7. Does not accept pfile.tgz.
"""
filepath = filepath or self.filepath
if tarfile.is_tarfile(filepath):
raise NIMSPFileError('_full_parse() expects a .7 or .7.gz')
log.debug('_full_parse of %s' % filepath)
try:
pfile = getattr(__import__('pfile.pfile%d' % self.version, globals()), 'pfile%d' % self.version)
except ImportError:
raise ImportError('no pfile parser for v%d' % self.version)
with gzip.open(filepath, 'rb') if is_gzip(filepath) else open(filepath, 'rb') as fileobj:
self._hdr = pfile.POOL_HEADER(fileobj)
if not self._hdr:
raise NIMSPFileError('no pfile was read', log_level=logging.WARNING)
self.data = None # data always starts as None
self.pfilename = 'P%05d' % self._hdr.rec.run_int
self.exam_no = self._hdr.exam.ex_no
self.exam_uid = unpack_uid(self._hdr.exam.study_uid)
self.series_no = self._hdr.series.se_no
self.series_desc = self._hdr.series.se_desc.split('\0', 1)[0]
self.series_uid = unpack_uid(self._hdr.series.series_uid)
self.acq_no = self._hdr.image.scanactno
self.patient_id = self._hdr.exam.patidff.split('\0', 1)[0]
self.subj_code, self.group_name, self.project_name = medimg.parse_patient_id(self.patient_id, 'ex' + str(self.exam_no))
self.subj_firstname, self.subj_lastname = medimg.parse_patient_name(self._hdr.exam.patnameff.split('\0', 1)[0])
self.subj_dob = medimg.parse_patient_dob(self._hdr.exam.dateofbirth.split('\0', 1)[0])
self.subj_sex = ('male', 'female')[self._hdr.exam.patsex-1] if self._hdr.exam.patsex in [1, 2] else None
self.psd_name = os.path.basename(self._hdr.image.psdname.partition('\x00')[0]).lower()
# self.scan_type = self._hdr.image.psd_iname.split('\0', 1)[0] # XXX is this needed, it gets overwritten by end of fullparse
if self._hdr.image.im_datetime > 0:
self.timestamp = datetime.datetime.utcfromtimestamp(self._hdr.image.im_datetime)
else: # HOShims don't have self._hdr.image.im_datetime
month, day, year = map(int, self._hdr.rec.scan_date.split('\0', 1)[0].split('/'))
hour, minute = map(int, self._hdr.rec.scan_time.split('\0', 1)[0].split(':'))
self.timestamp = datetime.datetime(year + 1900, month, day, hour, minute) # GE's epoch begins in 1900
# expose study date, study time, acquisition date, acquisition time
month, day, year = map(int, self._hdr.rec.scan_date.split('\0', 1)[0].split('/'))
hour, minute = map(int, self._hdr.rec.scan_time.split('\0', 1)[0].split(':'))
self.study_date = '%4d%02d%02d' % (year + 1900, month, day)
self.study_time = '%02d%02d%02d' % (hour, minute, 0)
self.study_datetime = self.study_date and self.study_time and datetime.datetime.strptime(self.study_date + self.study_time[:6], '%Y%m%d%H%M%S')
if self._hdr.image.im_datetime > 0:
self.acq_datetime = datetime.datetime.utcfromtimestamp(self._hdr.image.im_datetime)
self.acq_date = datetime.datetime.strftime(self.acq_datetime, '%Y%m%d')
self.acq_time = datetime.datetime.strftime(self.acq_datetime, '%H%M%S')
else:
self.acq_datetime = None
self.acq_date = None
self.acq_time = None
self.ti = self._hdr.image.ti / 1e6
self.te = self._hdr.image.te / 1e6
self.tr = self._hdr.image.tr / 1e6 # tr in seconds
self.flip_angle = float(self._hdr.image.mr_flip)
self.pixel_bandwidth = self._hdr.rec.bw
# Note: the freq/phase dir isn't meaningful for spiral trajectories.
# GE numbers the dims 1,2, so freq_dir==1 is the first dim. We'll use
# the convention where first dim = 0, second dim = 1, etc. for phase_encode.
self.phase_encode = 1 if self._hdr.image.freq_dir == 1 else 0
self.mt_offset_hz = self._hdr.image.offsetfreq
self.num_slices = self._hdr.image.slquant
self.num_averages = self._hdr.image.averages
self.num_echos = self._hdr.rec.nechoes
self.receive_coil_name = self._hdr.image.cname.split('\0', 1)[0]
self.num_receivers = self._hdr.rec.dab[0].stop_rcv - self._hdr.rec.dab[0].start_rcv + 1
self.operator = self._hdr.exam.operator_new.split('\0', 1)[0]
self.protocol_name = self._hdr.series.prtcl.split('\0', 1)[0]
self.scanner_name = self._hdr.exam.hospname.split('\0', 1)[0] + ' ' + self._hdr.exam.ex_sysid.split('\0', 1)[0]
self.scanner_type = 'GE MEDICAL SYSTEMS DISCOVERY MR750' # FIXME: don't hardcode
self.acquisition_type = None # hope this doesn't break anything...
self.size = [self._hdr.image.dim_X, self._hdr.image.dim_Y] # imatrix_Y
self.fov = [self._hdr.image.dfov, self._hdr.image.dfov_rect]
self.num_bands = 1
self.num_mux_cal_cycle = 0
self.num_timepoints = self._hdr.rec.npasses
# Some sequences (e.g., muxepi) acuire more timepoints that will be available in the resulting data file.
# The following will indicate how many to expect in the final image.
self.num_timepoints_available = self.num_timepoints
self.deltaTE = 0.0
self.scale_data = False
# Compute the voxel size rather than use image.pixsize_X/Y
self.mm_per_vox = [self.fov[0] / self.size[0], self.fov[1] / self.size[1], self._hdr.image.slthick + self._hdr.image.scanspacing]
image_tlhc = np.array([self._hdr.image.tlhc_R, self._hdr.image.tlhc_A, self._hdr.image.tlhc_S])
image_trhc = np.array([self._hdr.image.trhc_R, self._hdr.image.trhc_A, self._hdr.image.trhc_S])
image_brhc = np.array([self._hdr.image.brhc_R, self._hdr.image.brhc_A, self._hdr.image.brhc_S])
# psd-specific params get set here
self.infer_psd_type()
if self.psd_type == 'spiral':
self.num_timepoints = int(self._hdr.rec.user0) # not in self._hdr.rec.nframes for sprt
self.deltaTE = self._hdr.rec.user15
self.band_spacing = 0
self.scale_data = True
# spiral is always a square encode based on the frequency encode direction (size_x)
# Atsushi also likes to round up to the next higher power of 2.
# self.size_x = int(pow(2,ceil(log2(pf.size_x))))
# The rec.im_size field seems to have the correct reconned image size, but
# this isn't guaranteed to be correct, as Atsushi's recon does whatever it
# damn well pleases. Maybe we could add a check to ninfer the image size,
# assuming it's square?
self.size_x = self.size_y = self._hdr.rec.im_size
self.mm_per_vox_x = self.mm_per_vox_y = self.fov_x / self.size_x
elif self.psd_type == 'basic':
# first 6 are ref scans, so ignore those. Also, two acquired timepoints are used
# to generate each reconned time point.
self.num_timepoints = (self._hdr.rec.npasses * self._hdr.rec.nechoes - 6) / 2
self.num_echos = 1
elif self.psd_type == 'muxepi':
self.num_bands = int(self._hdr.rec.user6)
self.num_mux_cal_cycle = int(self._hdr.rec.user7)
self.band_spacing_mm = self._hdr.rec.user8
# When ARC is used with mux, the number of acquired TRs is greater than what's Rxed.
# ARC calibration uses multi-shot, so the additional TRs = num_bands*(ileaves-1)*num_mux_cal_cycle
self.num_timepoints = self._hdr.rec.npasses + self.num_bands * (self._hdr.rec.ileaves-1) * self.num_mux_cal_cycle
# The actual number of images returned by the mux recon is npasses - num_calibration_passes + num_mux_cal_cycle
self.num_timepoints_available = self._hdr.rec.npasses - self.num_bands * self.num_mux_cal_cycle + self.num_mux_cal_cycle
# TODO: adjust the image.tlhc... fields to match the correct geometry.
elif self.psd_type == 'mrs':
self._hdr.image.scanspacing = 0.
self.mm_per_vox = [self._hdr.rec.roileny, self._hdr.rec.roilenx, self._hdr.rec.roilenz]
image_tlhc = np.array((-self._hdr.rec.roilocx - self.mm_per_vox[0]/2.,
self._hdr.rec.roilocy + self.mm_per_vox[1]/2.,
self._hdr.rec.roilocz - self.mm_per_vox[1]/2.))
image_trhc = image_tlhc - [self.mm_per_vox[0], 0., 0.]
image_brhc = image_trhc + [0., self.mm_per_vox[1], 0.]
# Tread carefully! Most of the stuff down here depends on various fields being corrected in the
# sequence-specific set of hacks just above. So, move things with care!
# Note: the following is true for single-shot planar acquisitions (EPI and 1-shot spiral).
# For multishot sequences, we need to multiply by the # of shots. And for non-planar aquisitions,
# we'd need to multiply by the # of phase encodes (accounting for any acceleration factors).
# Even for planar sequences, this will be wrong (under-estimate) in case of cardiac-gating.
self.prescribed_duration = self.num_timepoints * self.tr
self.total_num_slices = self.num_slices * self.num_timepoints
# The actual duration can only be computed after the data are loaded. Settled for rx duration for now.
self.duration = self.prescribed_duration
self.effective_echo_spacing = self._hdr.image.effechospace / 1e6
self.phase_encode_undersample = 1. / self._hdr.rec.ileaves
# TODO: Set this correctly! (it's in the dicom at (0x0043, 0x1083))
self.slice_encode_undersample = 1. # FIXME
self.acquisition_matrix_x, self.acquisition_matrix_y = [self._hdr.rec.rc_xres, self._hdr.rec.rc_yres]
# TODO: it looks like the pfile now has a 'grad_data' field!
# Diffusion params
self.dwi_numdirs = self._hdr.rec.numdifdirs
# You might think that the b-value for diffusion scans would be stored in self._hdr.image.b_value.
# But alas, this is GE. Apparently, that var stores the b-value of the just the first image, which is
# usually a non-dwi. So, we had to modify the PSD and stick the b-value into an rhuser CV. Sigh.
# NOTE: pre-dv24, the bvalue was stored in rec.user22.
self.dwi_bvalue = self._hdr.rec.user1 if self.version == 24 else self._hdr.rec.user22
self.is_dwi = True if self.dwi_numdirs >= 6 else False
# if bit 4 of rhtype(int16) is set, then fractional NEX (i.e., partial ky acquisition) was used.
self.partial_ky = self._hdr.rec.scan_type & np.uint16(16) > 0
# was pepolar used to flip the phase encode direction?
self.phase_encode_direction = 1 if np.bitwise_and(self._hdr.rec.dacq_ctrl,4)==4 else 0
self.caipi = self._hdr.rec.user13 # true: CAIPIRINHA-type acquisition; false: Direct aliasing of simultaneous slices.
self.cap_blip_start = self._hdr.rec.user14 # Starting index of the kz blips. 0~(mux-1) correspond to -kmax~kmax.
self.cap_blip_inc = self._hdr.rec.user15 # Increment of the kz blip index for adjacent acquired ky lines.
self.mica = self._hdr.rec.user17 # MICA bit-reverse?
self.slice_duration = self.tr / self.num_slices
lr_diff = image_trhc - image_tlhc
si_diff = image_trhc - image_brhc
if not np.all(lr_diff == 0) and not np.all(si_diff == 0):
row_cosines = lr_diff / np.sqrt(lr_diff.dot(lr_diff))
col_cosines = -si_diff / np.sqrt(si_diff.dot(si_diff))
else:
row_cosines = np.array([1., 0, 0])
col_cosines = np.array([0, -1., 0])
self.slice_order = dcm.mr.generic_mr.SLICE_ORDER_UNKNOWN
# FIXME: check that this is correct.
if self._hdr.series.se_sortorder == 0:
self.slice_order = dcm.mr.generic_mr.SLICE_ORDER_SEQ_INC
elif self._hdr.series.se_sortorder == 1:
self.slice_order = dcm.mr.generic_mr.SLICE_ORDER_ALT_INC
# header geometry is LPS, but we need RAS, so negate R and A.
slice_norm = np.array([-self._hdr.image.norm_R, -self._hdr.image.norm_A, | |
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
from pkg_resources import parse_version
import kaitaistruct
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
from enum import Enum
if parse_version(kaitaistruct.__version__) < parse_version('0.9'):
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
import vlq_base128_le
class Dex(KaitaiStruct):
"""Android OS applications executables are typically stored in its own
format, optimized for more efficient execution in Dalvik virtual
machine.
This format is loosely similar to Java .class file format and
generally holds the similar set of data: i.e. classes, methods,
fields, annotations, etc.
.. seealso::
Source - https://source.android.com/devices/tech/dalvik/dex-format
"""
class ClassAccessFlags(Enum):
public = 1
private = 2
protected = 4
static = 8
final = 16
interface = 512
abstract = 1024
synthetic = 4096
annotation = 8192
enum = 16384
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.header = Dex.HeaderItem(self._io, self, self._root)
class HeaderItem(KaitaiStruct):
class EndianConstant(Enum):
endian_constant = 305419896
reverse_endian_constant = 2018915346
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.magic = self._io.read_bytes(4)
if not self.magic == b"\x64\x65\x78\x0A":
raise kaitaistruct.ValidationNotEqualError(b"\x64\x65\x78\x0A", self.magic, self._io, u"/types/header_item/seq/0")
self.version_str = (KaitaiStream.bytes_terminate(self._io.read_bytes(4), 0, False)).decode(u"ascii")
self.checksum = self._io.read_u4le()
self.signature = self._io.read_bytes(20)
self.file_size = self._io.read_u4le()
self.header_size = self._io.read_u4le()
self.endian_tag = KaitaiStream.resolve_enum(Dex.HeaderItem.EndianConstant, self._io.read_u4le())
self.link_size = self._io.read_u4le()
self.link_off = self._io.read_u4le()
self.map_off = self._io.read_u4le()
self.string_ids_size = self._io.read_u4le()
self.string_ids_off = self._io.read_u4le()
self.type_ids_size = self._io.read_u4le()
self.type_ids_off = self._io.read_u4le()
self.proto_ids_size = self._io.read_u4le()
self.proto_ids_off = self._io.read_u4le()
self.field_ids_size = self._io.read_u4le()
self.field_ids_off = self._io.read_u4le()
self.method_ids_size = self._io.read_u4le()
self.method_ids_off = self._io.read_u4le()
self.class_defs_size = self._io.read_u4le()
self.class_defs_off = self._io.read_u4le()
self.data_size = self._io.read_u4le()
self.data_off = self._io.read_u4le()
class MapList(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.size = self._io.read_u4le()
self.list = [None] * (self.size)
for i in range(self.size):
self.list[i] = Dex.MapItem(self._io, self, self._root)
class EncodedValue(KaitaiStruct):
class ValueTypeEnum(Enum):
byte = 0
short = 2
char = 3
int = 4
long = 6
float = 16
double = 17
method_type = 21
method_handle = 22
string = 23
type = 24
field = 25
method = 26
enum = 27
array = 28
annotation = 29
null = 30
boolean = 31
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.value_arg = self._io.read_bits_int_be(3)
self.value_type = KaitaiStream.resolve_enum(Dex.EncodedValue.ValueTypeEnum, self._io.read_bits_int_be(5))
self._io.align_to_byte()
_on = self.value_type
if _on == Dex.EncodedValue.ValueTypeEnum.int:
self.value = self._io.read_s4le()
elif _on == Dex.EncodedValue.ValueTypeEnum.annotation:
self.value = Dex.EncodedAnnotation(self._io, self, self._root)
elif _on == Dex.EncodedValue.ValueTypeEnum.long:
self.value = self._io.read_s8le()
elif _on == Dex.EncodedValue.ValueTypeEnum.method_handle:
self.value = self._io.read_u4le()
elif _on == Dex.EncodedValue.ValueTypeEnum.byte:
self.value = self._io.read_s1()
elif _on == Dex.EncodedValue.ValueTypeEnum.array:
self.value = Dex.EncodedArray(self._io, self, self._root)
elif _on == Dex.EncodedValue.ValueTypeEnum.method_type:
self.value = self._io.read_u4le()
elif _on == Dex.EncodedValue.ValueTypeEnum.short:
self.value = self._io.read_s2le()
elif _on == Dex.EncodedValue.ValueTypeEnum.method:
self.value = self._io.read_u4le()
elif _on == Dex.EncodedValue.ValueTypeEnum.double:
self.value = self._io.read_f8le()
elif _on == Dex.EncodedValue.ValueTypeEnum.float:
self.value = self._io.read_f4le()
elif _on == Dex.EncodedValue.ValueTypeEnum.type:
self.value = self._io.read_u4le()
elif _on == Dex.EncodedValue.ValueTypeEnum.enum:
self.value = self._io.read_u4le()
elif _on == Dex.EncodedValue.ValueTypeEnum.field:
self.value = self._io.read_u4le()
elif _on == Dex.EncodedValue.ValueTypeEnum.string:
self.value = self._io.read_u4le()
elif _on == Dex.EncodedValue.ValueTypeEnum.char:
self.value = self._io.read_u2le()
class CallSiteIdItem(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.call_site_off = self._io.read_u4le()
class MethodIdItem(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.class_idx = self._io.read_u2le()
self.proto_idx = self._io.read_u2le()
self.name_idx = self._io.read_u4le()
@property
def class_name(self):
"""the definer of this method."""
if hasattr(self, '_m_class_name'):
return self._m_class_name if hasattr(self, '_m_class_name') else None
self._m_class_name = self._root.type_ids[self.class_idx].type_name
return self._m_class_name if hasattr(self, '_m_class_name') else None
@property
def proto_desc(self):
"""the short-form descriptor of the prototype of this method."""
if hasattr(self, '_m_proto_desc'):
return self._m_proto_desc if hasattr(self, '_m_proto_desc') else None
self._m_proto_desc = self._root.proto_ids[self.proto_idx].shorty_desc
return self._m_proto_desc if hasattr(self, '_m_proto_desc') else None
@property
def method_name(self):
"""the name of this method."""
if hasattr(self, '_m_method_name'):
return self._m_method_name if hasattr(self, '_m_method_name') else None
self._m_method_name = self._root.string_ids[self.name_idx].value.data
return self._m_method_name if hasattr(self, '_m_method_name') else None
class TypeItem(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.type_idx = self._io.read_u2le()
@property
def value(self):
if hasattr(self, '_m_value'):
return self._m_value if hasattr(self, '_m_value') else None
self._m_value = self._root.type_ids[self.type_idx].type_name
return self._m_value if hasattr(self, '_m_value') else None
class TypeIdItem(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.descriptor_idx = self._io.read_u4le()
@property
def type_name(self):
if hasattr(self, '_m_type_name'):
return self._m_type_name if hasattr(self, '_m_type_name') else None
self._m_type_name = self._root.string_ids[self.descriptor_idx].value.data
return self._m_type_name if hasattr(self, '_m_type_name') else None
class AnnotationElement(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.name_idx = vlq_base128_le.VlqBase128Le(self._io)
self.value = Dex.EncodedValue(self._io, self, self._root)
class EncodedField(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.field_idx_diff = vlq_base128_le.VlqBase128Le(self._io)
self.access_flags = vlq_base128_le.VlqBase128Le(self._io)
class EncodedArrayItem(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.value = Dex.EncodedArray(self._io, self, self._root)
class ClassDataItem(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.static_fields_size = vlq_base128_le.VlqBase128Le(self._io)
self.instance_fields_size = vlq_base128_le.VlqBase128Le(self._io)
self.direct_methods_size = vlq_base128_le.VlqBase128Le(self._io)
self.virtual_methods_size = vlq_base128_le.VlqBase128Le(self._io)
self.static_fields = [None] * (self.static_fields_size.value)
for i in range(self.static_fields_size.value):
self.static_fields[i] = Dex.EncodedField(self._io, self, self._root)
self.instance_fields = [None] * (self.instance_fields_size.value)
for i in range(self.instance_fields_size.value):
self.instance_fields[i] = Dex.EncodedField(self._io, self, self._root)
self.direct_methods = [None] * (self.direct_methods_size.value)
for i in range(self.direct_methods_size.value):
self.direct_methods[i] = Dex.EncodedMethod(self._io, self, self._root)
self.virtual_methods = [None] * (self.virtual_methods_size.value)
for i in range(self.virtual_methods_size.value):
self.virtual_methods[i] = Dex.EncodedMethod(self._io, self, self._root)
class FieldIdItem(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.class_idx = self._io.read_u2le()
self.type_idx = self._io.read_u2le()
self.name_idx = self._io.read_u4le()
@property
def class_name(self):
"""the definer of this field."""
if hasattr(self, '_m_class_name'):
return self._m_class_name if hasattr(self, '_m_class_name') else None
self._m_class_name = self._root.type_ids[self.class_idx].type_name
return self._m_class_name if hasattr(self, '_m_class_name') else None
@property
def type_name(self):
"""the type of this field."""
if hasattr(self, '_m_type_name'):
return self._m_type_name if hasattr(self, '_m_type_name') else None
self._m_type_name = self._root.type_ids[self.type_idx].type_name
return self._m_type_name if hasattr(self, '_m_type_name') else None
@property
def field_name(self):
"""the name of this field."""
if hasattr(self, '_m_field_name'):
return self._m_field_name if hasattr(self, '_m_field_name') else None
self._m_field_name = self._root.string_ids[self.name_idx].value.data
return self._m_field_name if hasattr(self, '_m_field_name') else None
class EncodedAnnotation(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.type_idx = vlq_base128_le.VlqBase128Le(self._io)
self.size = vlq_base128_le.VlqBase128Le(self._io)
self.elements = [None] * (self.size.value)
for i in range(self.size.value):
self.elements[i] = Dex.AnnotationElement(self._io, self, self._root)
class ClassDefItem(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.class_idx = self._io.read_u4le()
self.access_flags = KaitaiStream.resolve_enum(Dex.ClassAccessFlags, self._io.read_u4le())
self.superclass_idx = self._io.read_u4le()
self.interfaces_off = self._io.read_u4le()
self.source_file_idx = self._io.read_u4le()
self.annotations_off = self._io.read_u4le()
self.class_data_off = self._io.read_u4le()
self.static_values_off = self._io.read_u4le()
@property
def type_name(self):
if hasattr(self, '_m_type_name'):
return self._m_type_name if hasattr(self, '_m_type_name') else None
self._m_type_name = self._root.type_ids[self.class_idx].type_name
return self._m_type_name if hasattr(self, '_m_type_name') else None
@property
def class_data(self):
if hasattr(self, '_m_class_data'):
return self._m_class_data if hasattr(self, '_m_class_data') else None
if self.class_data_off != 0:
_pos = self._io.pos()
self._io.seek(self.class_data_off)
self._m_class_data = Dex.ClassDataItem(self._io, self, self._root)
self._io.seek(_pos)
return self._m_class_data if hasattr(self, '_m_class_data') else None
@property
def static_values(self):
if hasattr(self, '_m_static_values'):
return self._m_static_values if hasattr(self, '_m_static_values') else None
if self.static_values_off != 0:
_pos = self._io.pos()
self._io.seek(self.static_values_off)
self._m_static_values = Dex.EncodedArrayItem(self._io, self, self._root)
self._io.seek(_pos)
return self._m_static_values if hasattr(self, '_m_static_values') else None
class TypeList(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.size = self._io.read_u4le()
self.list = [None] * (self.size)
for i in range(self.size):
self.list[i] = | |
'''
====================================================================
(c) 2003-2016 <NAME>. All rights reserved.
This software is licensed as described in the file LICENSE.txt,
which you should have received as part of this distribution.
====================================================================
wb_svn_ui_actions.py
'''
import pathlib
import pysvn
from PyQt5 import QtWidgets
import wb_log_history_options_dialog
import wb_ui_actions
import wb_common_dialogs
import wb_svn_project
import wb_svn_info_dialog
import wb_svn_properties_dialog
import wb_svn_dialogs
import wb_svn_commit_dialog
import wb_svn_annotate
from wb_background_thread import thread_switcher
#
# Start with the main window components interface
# and add actions used by the main window
# and the commit window
#
# then derive to add tool bars and menus
# appropiate to each context
#
class SvnMainWindowActions(wb_ui_actions.WbMainWindowActions):
def __init__( self, factory ):
super().__init__( 'svn', factory )
def setupDebug( self ):
self.debugLog = self.main_window.app.debug_options.debugLogSvnUi
#--- Enablers ---------------------------------------------------------
#------------------------------------------------------------
#
# tree or table actions depending on focus
#
#------------------------------------------------------------
def enablerTreeTableSvnInfo( self ):
return self.main_window.callTreeOrTableFunction( self.enablerTreeSvnInfo, self.enablerTableSvnInfo )
def enablerTreeTableSvnProperties( self ):
return self.main_window.callTreeOrTableFunction( self.enablerTreeSvnProperties, self.enablerTableSvnProperties )
def enablerTreeTableSvnDiffBaseVsWorking( self ):
return self.main_window.callTreeOrTableFunction( self.enablerTreeSvnDiffBaseVsWorking, self.enablerTableSvnDiffBaseVsWorking )
def enablerTreeTableSvnDiffHeadVsWorking( self ):
return self.main_window.callTreeOrTableFunction( self.enablerTreeSvnDiffHeadVsWorking, self.enablerTableSvnDiffHeadVsWorking )
# ------------------------------------------------------------
def treeTableActionSvnDiffBaseVsWorking( self ):
self.main_window.callTreeOrTableFunction( self.treeActionSvnDiffBaseVsWorking, self.tableActionSvnDiffBaseVsWorking )
def treeTableActionSvnDiffHeadVsWorking( self ):
self.main_window.callTreeOrTableFunction( self.treeActionSvnDiffHeadVsWorking, self.tableActionSvnDiffHeadVsWorking )
@thread_switcher
def treeTableActionSvnInfo_Bg( self, checked=None ):
yield from self.main_window.callTreeOrTableFunction_Bg( self.treeActionSvnInfo, self.tableActionSvnInfo_Bg )
@thread_switcher
def treeTableActionSvnProperties_Bg( self, checked=None ):
yield from self.main_window.callTreeOrTableFunction_Bg( self.treeActionSvnProperties_Bg, self.tableActionSvnProperties_Bg )
#------------------------------------------------------------
def enablerTreeTableSvnLogHistory( self ):
return self.main_window.callTreeOrTableFunction( self.enablerTreeSvnLogHistory, self.enablerTableSvnLogHistory )
def enablerTreeSvnLogHistory( self ):
return self._enablerTreeSvnIsControlled()
def enablerTableSvnLogHistory( self ):
return self._enablerTableSvnIsControlled()
@thread_switcher
def tableActionSvnLogHistory_Bg( self, checked=None ):
yield from self.table_view.tableActionViewRepo_Bg( self.__actionSvnLogHistory_Bg )
@thread_switcher
def treeTableActionSvnLogHistory_Bg( self, checked=None ):
yield from self.main_window.callTreeOrTableFunction_Bg( self.treeActionSvnLogHistory_Bg, self.tableActionSvnLogHistory_Bg )
@thread_switcher
def treeActionSvnLogHistory_Bg( self, checked=None ):
tree_node = self.selectedSvnProjectTreeNode()
if tree_node is None:
return
yield from self.__actionSvnLogHistory_Bg( self.selectedSvnProject(), tree_node.relativePath() )
@thread_switcher
def __actionSvnLogHistory_Bg( self, svn_project, filename ):
self.progress.start( T_('Finding Tags') )
yield self.switchToBackground
try:
all_tag_nodes = svn_project.cmdTagsForFile( filename )
except wb_svn_project.ClientError as e:
svn_project.logClientError( e, 'Cannot get tags for %s:%s' % (svn_project.projectName(), filename) )
all_tag_nodes = []
all_tags = [node.tag_name for node in all_tag_nodes]
yield self.switchToForeground
self.progress.end()
options = wb_log_history_options_dialog.WbLogHistoryOptions( self.app, all_tags, self.main_window )
# as soon as possible del options to attemtp to avoid XCB errors
if not options.exec_():
return
self.setStatusAction( T_('Log for %(filename)s') %
{'filename': filename} )
self.progress.start( T_('Logs %(count)d') )
yield self.switchToBackground
try:
tag = options.getTag()
if tag is not None:
# find the tag node
for node in all_tag_nodes:
if node.tag_name == tag:
break
all_tag_nodes = [node]
rev = node.revision
else:
rev = None
all_commit_nodes = svn_project.cmdCommitLogForFile( filename, options.getLimit(), options.getSince(), options.getUntil(), rev )
except wb_svn_project.ClientError as e:
svn_project.logClientError( e, 'Cannot get commit logs for %s:%s' % (svn_project.projectName(), filename) )
yield self.switchToForeground
return
if len(all_commit_nodes) > 0:
all_commit_nodes.extend( all_tag_nodes )
def key( node ):
return -node['revision'].number
all_commit_nodes.sort( key=key )
yield self.switchToForeground
self.progress.end()
self.setStatusAction()
log_history_view = self.factory.logHistoryView(
self.app,
T_('Commit Log for %(project)s:%(path)s') %
{'project': svn_project.projectName()
,'path': filename} )
log_history_view.showCommitLogForFile( svn_project, filename, all_commit_nodes )
log_history_view.show()
#------------------------------------------------------------
#
# tree actions
#
#------------------------------------------------------------
def selectedSvnProject( self ):
scm_project = self.table_view.selectedScmProject()
if scm_project is None:
return None
if not isinstance( scm_project, wb_svn_project.SvnProject ):
return None
return scm_project
def enablerTreeSvnDiffBaseVsWorking( self ):
return self._enablerTreeSvnIsControlled()
def enablerTreeSvnDiffHeadVsWorking( self ):
return self._enablerTreeSvnIsControlled()
def enablerTreeSvnInfo( self ):
return self._enablerTreeSvnIsControlled()
def enablerTreeSvnMkdir( self ):
return self._enablerTreeSvnIsControlled()
def enablerTreeSvnRevert( self ):
return self._enablerTreeSvnIsControlled()
def enablerTreeSvnAdd( self ):
return not self._enablerTreeSvnIsControlled()
def enablerTreeSvnProperties( self ):
return self._enablerTreeSvnIsControlled()
def _enablerTreeSvnIsControlled( self ):
tree_node = self.selectedSvnProjectTreeNode()
if tree_node is None:
return False
tree_node.relativePath()
if not tree_node.project.hasFileState( tree_node.relativePath() ):
return False
file_state = tree_node.project.getFileState( tree_node.relativePath() )
return file_state.isControlled()
# ------------------------------------------------------------
def treeActionSvnDiffBaseVsWorking( self ):
tree_node = self.selectedSvnProjectTreeNode()
if tree_node is None:
return
try:
diff_text = tree_node.project.cmdDiffFolder( tree_node.relativePath(), head=False )
self.showDiffText( 'Diff Base vs. Working from %s' % (tree_node.relativePath(),), diff_text.split('\n') )
except wb_svn_project.ClientError as e:
tree_node.project.logClientError( e )
def treeActionSvnDiffHeadVsWorking( self ):
tree_node = self.selectedSvnProjectTreeNode()
if tree_node is None:
return
try:
diff_text = tree_node.project.cmdDiffFolder( tree_node.relativePath(), head=True )
self.showDiffText( 'Diff Head vs. Working from %s' % (tree_node.relativePath(),), diff_text.split('\n') )
except wb_svn_project.ClientError as e:
tree_node.project.logClientError( e )
@thread_switcher
def treeActionSvnAdd_Bg( self, checked=None ):
tree_node = self.selectedSvnProjectTreeNode()
if tree_node is None:
return
dialog = wb_svn_dialogs.WbAddFolderDialog( self.app, self.main_window, tree_node.relativePath() )
if dialog.exec_():
try:
tree_node.project.cmdAdd( tree_node.relativePath(), depth=dialog.getDepth(), force=dialog.getForce() )
except wb_svn_project.ClientError as e:
tree_node.project.logClientError( e )
yield from self.top_window.updateTableView_Bg()
@thread_switcher
def treeActionSvnRevert_Bg( self, checked=None ):
tree_node = self.selectedSvnProjectTreeNode()
if tree_node is None:
return
dialog = wb_svn_dialogs.WbRevertFolderDialog( self.app, self.main_window, tree_node.absolutePath() )
if dialog.exec_():
try:
tree_node.project.cmdRevert( tree_node.relativePath(), depth=dialog.getDepth() )
except wb_svn_project.ClientError as e:
tree_node.project.logClientError( e )
yield from self.top_window.updateTableView_Bg()
@thread_switcher
def treeActionSvnMkdir_Bg( self, checked=None ):
tree_node = self.selectedSvnProjectTreeNode()
if tree_node is None:
return
dialog = wb_common_dialogs.WbNewFolderDialog( self.app, self.main_window, tree_node.absolutePath() )
if dialog.exec_():
try:
tree_node.project.cmdMkdir( tree_node.relativePath() / dialog.getFolderName() )
except wb_svn_project.ClientError as e:
tree_node.project.logClientError( e )
yield from self.top_window.updateTableView_Bg()
def treeActionSvnInfo( self ):
tree_node = self.selectedSvnProjectTreeNode()
if tree_node is None:
return
try:
info = tree_node.project.cmdInfo( tree_node.relativePath() )
except wb_svn_project.ClientError as e:
tree_node.project.logClientError( e )
return
dialog = wb_svn_info_dialog.InfoDialog( self.app, self.main_window, tree_node.relativePath(), tree_node.absolutePath(), info )
dialog.exec_()
@thread_switcher
def treeActionSvnProperties_Bg( self, checked=None ):
tree_node = self.selectedSvnProjectTreeNode()
if tree_node is None:
return
svn_project = tree_node.project
filename = tree_node.relativePath()
prop_dict = svn_project.cmdPropList( filename )
dialog = wb_svn_properties_dialog.FolderPropertiesDialog( self.app, self.main_window, filename, prop_dict )
if dialog.exec_():
for is_present, name, value in dialog.getModifiedProperties():
try:
if not is_present:
# delete name
svn_project.cmdPropDel( name, filename )
else:
# add/update name value
svn_project.cmdPropSet( name, value, filename )
except wb_svn_project.ClientError as e:
svn_project.logClientError( e )
yield from self.top_window.updateTableView_Bg()
def treeActionSvnCleanup( self, checked=None ):
tree_node = self.selectedSvnProjectTreeNode()
if tree_node is None:
return
self.top_window.setStatusAction( T_('Cleanup %s') % (tree_node.project.projectName(),) )
try:
tree_node.project.cmdCleanup()
self.log.info( 'Cleanup finished for %s' % (tree_node.project.projectName(),) )
except wb_svn_project.ClientError as e:
tree_node.project.logClientError( e )
self.top_window.setStatusAction()
@thread_switcher
def treeActionSvnUpdate_Bg( self, checked=None ):
tree_node = self.selectedSvnProjectTreeNode()
if tree_node is None:
return
path = tree_node.relativePath()
if path == pathlib.Path( '.' ):
msg = (T_('Update %(project)s') %
{'project': tree_node.project.projectName()})
else:
msg = (T_('Update %(project)s:%(filename)s') %
{'project': tree_node.project.projectName()
,'filename': path})
self.log.infoheader( msg )
self.setStatusAction( msg )
self.progress.start( T_('Updated %(count)d') )
yield self.switchToBackground
try:
svn_project = tree_node.project
filename = tree_node.relativePath()
svn_project.initNotificationOfFilesInConflictCount()
rev_list = svn_project.cmdUpdate(
filename,
svn_project.svn_rev_head,
svn_project.svn_depth_infinity )
yield self.switchToForeground
self.__updateToRevisionProcessResults( tree_node, rev_list )
except pysvn.ClientError as e:
svn_project.logClientError( e )
yield self.switchToForeground
self.progress.end()
self.setStatusAction()
yield from self.top_window.updateTableView_Bg()
def __updateToRevisionProcessResults( self, tree_node, rev_list ):
svn_project = tree_node.project
filename = tree_node.relativePath()
if rev_list is not None:
for rev in rev_list:
if rev.number > 0:
count = self.progress.getEventCount()
if count == 0:
self.log.info( T_('Updated %(project)s:%(filename)s to revision %(rev)d, no new updates') %
{'project': svn_project.projectName()
,'filename': filename
,'rev': rev.number} )
else:
self.log.info( S_('Updated %(project)s:%(filename)s to revision %(rev)d, %(count)d new update',
'Updated %(project)s:%(filename)s to revision %(rev)d, %(count)d new updates', count) %
{'project': svn_project.projectName()
,'filename': filename
,'rev': rev.number
,'count': count} )
else:
self.log.warning( T_('Already up to date') )
files_in_conflict = self.progress.getInConflictCount()
if files_in_conflict > 0:
box = QtWidgets.QMessageBox(
QtWidgets.QMessageBox.Information,
T_('Warning'),
S_("%d file is in conflict",
"%d files are in conflict",
files_in_conflict) %
(files_in_conflict,),
QtWidgets.QMessageBox.Close,
parent=self.top_window )
box.exec_()
def treeActionSvnStatus( self ):
self.log.info( 'Not implemented yet' )
#------------------------------------------------------------
#
# table actions
#
#------------------------------------------------------------
def enablerTableSvnResolveConflict( self ):
tree_node = self.selectedSvnProjectTreeNode()
if tree_node is None:
return False
tree_node.relativePath()
if not tree_node.project.hasFileState( tree_node.relativePath() ):
return False
file_state = tree_node.project.getFileState( tree_node.relativePath() )
return file_state.isConflicted()
def enablerTableSvnLock( self ):
return self._enablerTableSvnIsControlled()
def enablerTableSvnUnlock( self ):
return self._enablerTableSvnIsControlled()
def enablerTableSvnDiffBaseVsWorking( self ):
if not self.main_window.isScmTypeActive( 'svn' ):
return False
all_file_state = self.tableSelectedAllFileStates()
if len(all_file_state) == 0:
return False
for file_state in all_file_state:
if not file_state.isModified():
return False
return True
def enablerTableSvnDiffHeadVsWorking( self ):
if not self.main_window.isScmTypeActive( 'svn' ):
return False
return True
def enablerTableSvnInfo( self ):
return self._enablerTableSvnIsControlled()
def enablerTableSvnProperties( self ):
return self._enablerTableSvnIsControlled()
def enablerTableSvnAdd( self ):
# can only add uncontrolled files
return self.__enablerTableSvnIsUncontrolled()
def enablerTableSvnRevert( self ):
# can only revert uncontrolled files
return self._enablerTableSvnIsControlled()
def enablerSvnCommitInclude( self ):
return self._enablerTableSvnIsControlled()
def __enablerTableSvnIsUncontrolled( self ):
all_file_state = self.tableSelectedAllFileStates()
if len(all_file_state) == 0:
return False
for file_state in all_file_state:
if not file_state.isUncontrolled():
return False
return True
def _enablerTableSvnIsControlled( self ):
all_file_state = self.tableSelectedAllFileStates()
if len(all_file_state) == 0:
return False
for file_state in all_file_state:
if not file_state.isControlled():
return False
return True
def enablerSvnCheckin( self ):
tree_node = self.selectedSvnProjectTreeNode()
if tree_node is None:
return | |
<reponame>bmdepesa/validation-tests
from common_fixtures import * # NOQA
logger = logging.getLogger(__name__)
def create_environment_with_dns_services(client,
service_scale,
consumed_service_scale,
port, cross_linking=False,
isnetworkModeHost_svc=False,
isnetworkModeHost_consumed_svc=False):
if not isnetworkModeHost_svc and not isnetworkModeHost_consumed_svc:
env, service, consumed_service, consumed_service1, dns = \
create_env_with_2_svc_dns(
client, service_scale, consumed_service_scale, port,
cross_linking)
else:
env, service, consumed_service, consumed_service1, dns = \
create_env_with_2_svc_dns_hostnetwork(
client, service_scale, consumed_service_scale, port,
cross_linking, isnetworkModeHost_svc,
isnetworkModeHost_consumed_svc)
service.activate()
consumed_service.activate()
consumed_service1.activate()
dns.activate()
service.addservicelink(serviceLink={"serviceId": dns.id})
dns.addservicelink(serviceLink={"serviceId": consumed_service.id})
dns.addservicelink(serviceLink={"serviceId": consumed_service1.id})
service = client.wait_success(service, 120)
consumed_service = client.wait_success(consumed_service, 120)
consumed_service1 = client.wait_success(consumed_service1, 120)
dns = client.wait_success(dns, 120)
assert service.state == "active"
assert consumed_service.state == "active"
assert consumed_service1.state == "active"
validate_add_service_link(client, service, dns)
validate_add_service_link(client, dns, consumed_service)
validate_add_service_link(client, dns, consumed_service1)
return env, service, consumed_service, consumed_service1, dns
def test_dns_activate_svc_dns_consumed_svc_link(client):
port = "31100"
service_scale = 1
consumed_service_scale = 2
env, service, consumed_service, consumed_service1, dns = \
create_environment_with_dns_services(
client, service_scale, consumed_service_scale, port)
validate_dns_service(
client, service, [consumed_service, consumed_service1], port,
dns.name)
delete_all(client, [env])
def test_dns_cross_link(client):
port = "31101"
service_scale = 1
consumed_service_scale = 2
env, service, consumed_service, consumed_service1, dns = \
create_environment_with_dns_services(
client, service_scale, consumed_service_scale,
port, True)
validate_dns_service(
client, service, [consumed_service, consumed_service1], port,
dns.name)
delete_all(client, [env, get_env(client, consumed_service),
get_env(client, consumed_service1), dns])
def test_dns_activate_consumed_svc_link_activate_svc(client):
port = "31102"
service_scale = 1
consumed_service_scale = 2
env, service, consumed_service, consumed_service1, dns = \
create_env_with_2_svc_dns(
client, service_scale, consumed_service_scale, port)
link_svc(client, service, [dns])
link_svc(client, dns, [consumed_service, consumed_service1])
service = activate_svc(client, service)
consumed_service = activate_svc(client, consumed_service)
consumed_service1 = activate_svc(client, consumed_service1)
dns = activate_svc(client, dns)
validate_dns_service(
client, service, [consumed_service, consumed_service1], port,
dns.name)
delete_all(client, [env])
def test_dns_activate_svc_link_activate_consumed_svc(client):
port = "31103"
service_scale = 1
consumed_service_scale = 2
env, service, consumed_service, consumed_service1, dns = \
create_env_with_2_svc_dns(
client, service_scale, consumed_service_scale, port)
service = activate_svc(client, service)
consumed_service = activate_svc(client, consumed_service)
consumed_service1 = activate_svc(client, consumed_service1)
link_svc(client, service, [dns])
link_svc(client, dns, [consumed_service, consumed_service1])
dns = activate_svc(client, dns)
validate_dns_service(
client, service, [consumed_service, consumed_service1], port,
dns.name)
delete_all(client, [env])
def test_dns_link_activate_consumed_svc_activate_svc(client):
port = "31104"
service_scale = 1
consumed_service_scale = 2
env, service, consumed_service, consumed_service1, dns = \
create_env_with_2_svc_dns(
client, service_scale, consumed_service_scale, port)
dns = activate_svc(client, dns)
link_svc(client, service, [dns])
link_svc(client, dns, [consumed_service, consumed_service1])
service = activate_svc(client, service)
consumed_service = activate_svc(client, consumed_service)
consumed_service1 = activate_svc(client, consumed_service1)
validate_dns_service(
client, service, [consumed_service, consumed_service1], port,
dns.name)
delete_all(client, [env])
def test_dns_link_when_services_still_activating(client):
port = "31106"
service_scale = 1
consumed_service_scale = 2
env, service, consumed_service, consumed_service1, dns = \
create_env_with_2_svc_dns(
client, service_scale, consumed_service_scale, port)
service.activate()
consumed_service.activate()
consumed_service1.activate()
dns.activate()
service.addservicelink(serviceLink={"serviceId": dns.id})
dns.addservicelink(serviceLink={"serviceId": consumed_service.id})
dns.addservicelink(serviceLink={"serviceId": consumed_service1.id})
service = client.wait_success(service, 120)
consumed_service = client.wait_success(consumed_service, 120)
consumed_service1 = client.wait_success(consumed_service1, 120)
dns = client.wait_success(dns, 120)
assert service.state == "active"
assert consumed_service.state == "active"
assert consumed_service1.state == "active"
validate_add_service_link(client, service, dns)
validate_add_service_link(client, dns, consumed_service)
validate_add_service_link(client, dns, consumed_service1)
validate_dns_service(client, service,
[consumed_service, consumed_service1], port, dns.name)
delete_all(client, [env])
def test_dns_service_scale_up(client):
port = "31107"
service_scale = 1
consumed_service_scale = 2
final_service_scale = 3
env, service, consumed_service, consumed_service1, dns = \
create_environment_with_dns_services(
client, service_scale, consumed_service_scale, port)
validate_dns_service(
client, service, [consumed_service, consumed_service1], port,
dns.name)
service = client.update(service, scale=final_service_scale,
name=service.name)
service = client.wait_success(service, 120)
assert service.state == "active"
assert service.scale == final_service_scale
validate_dns_service(
client, service, [consumed_service, consumed_service1], port,
dns.name)
delete_all(client, [env])
def test_dns_services_scale_down(client):
port = "31108"
service_scale = 3
consumed_service_scale = 2
final_service_scale = 1
env, service, consumed_service, consumed_service1, dns = \
create_environment_with_dns_services(
client, service_scale, consumed_service_scale, port)
validate_dns_service(
client, service, [consumed_service, consumed_service1], port,
dns.name)
service = client.update(service, scale=final_service_scale,
name=service.name)
service = client.wait_success(service, 120)
assert service.state == "active"
assert service.scale == final_service_scale
validate_dns_service(
client, service, [consumed_service, consumed_service1], port,
dns.name)
delete_all(client, [env])
def test_dns_consumed_services_scale_up(client):
port = "31109"
service_scale = 1
consumed_service_scale = 2
final_consumed_service_scale = 4
env, service, consumed_service, consumed_service1, dns = \
create_environment_with_dns_services(
client, service_scale, consumed_service_scale, port)
validate_dns_service(
client, service, [consumed_service, consumed_service1], port,
dns.name)
consumed_service = client.update(consumed_service,
scale=final_consumed_service_scale,
name=consumed_service.name)
consumed_service = client.wait_success(consumed_service, 120)
assert consumed_service.state == "active"
assert consumed_service.scale == final_consumed_service_scale
validate_dns_service(
client, service, [consumed_service, consumed_service1], port,
dns.name)
delete_all(client, [env])
def test_dns_consumed_services_scale_down(client):
port = "3110"
service_scale = 2
consumed_service_scale = 3
final_consumed_service_scale = 1
env, service, consumed_service, consumed_service1, dns = \
create_environment_with_dns_services(
client, service_scale, consumed_service_scale, port)
validate_dns_service(
client, service, [consumed_service, consumed_service1], port,
dns.name)
consumed_service = client.update(consumed_service,
scale=final_consumed_service_scale,
name=consumed_service.name)
consumed_service = client.wait_success(consumed_service, 120)
assert consumed_service.state == "active"
assert consumed_service.scale == final_consumed_service_scale
validate_dns_service(
client, service, [consumed_service, consumed_service1], port,
dns.name)
delete_all(client, [env])
def test_dns_consumed_services_stop_start_instance(client,
socat_containers):
port = "3111"
service_scale = 1
consumed_service_scale = 3
env, service, consumed_service, consumed_service1, dns = \
create_environment_with_dns_services(
client, service_scale, consumed_service_scale, port)
validate_dns_service(
client, service, [consumed_service, consumed_service1], port,
dns.name)
container_name = get_container_name(env, consumed_service, 2)
containers = client.list_container(name=container_name)
assert len(containers) == 1
container = containers[0]
# Stop instance
stop_container_from_host(client, container)
consumed_service = wait_state(client, consumed_service, "active")
wait_for_scale_to_adjust(client, consumed_service)
validate_dns_service(
client, service, [consumed_service, consumed_service1], port,
dns.name)
delete_all(client, [env])
def test_dns_consumed_services_restart_instance(client):
port = "3112"
service_scale = 1
consumed_service_scale = 3
env, service, consumed_service, consumed_service1, dns = \
create_environment_with_dns_services(
client, service_scale, consumed_service_scale, port)
validate_dns_service(
client, service, [consumed_service, consumed_service1], port,
dns.name)
container_name = get_container_name(env, consumed_service, 2)
containers = client.list_container(name=container_name)
assert len(containers) == 1
container = containers[0]
# Restart instance
container = client.wait_success(container.restart(), 120)
assert container.state == 'running'
validate_dns_service(
client, service, [consumed_service, consumed_service1], port,
dns.name)
delete_all(client, [env])
def test_dns_consumed_services_delete_instance(client):
port = "3113"
service_scale = 1
consumed_service_scale = 3
env, service, consumed_service, consumed_service1, dns = \
create_environment_with_dns_services(
client, service_scale, consumed_service_scale, port)
validate_dns_service(
client, service, [consumed_service, consumed_service1], port,
dns.name)
container_name = get_container_name(env, consumed_service, 1)
containers = client.list_container(name=container_name)
assert len(containers) == 1
container = containers[0]
# Delete instance
container = client.wait_success(client.delete(container))
assert container.state == 'removed'
wait_for_scale_to_adjust(client, consumed_service)
validate_dns_service(
client, service, [consumed_service, consumed_service1], port,
dns.name)
delete_all(client, [env])
def test_dns_consumed_services_deactivate_activate(client):
port = "3114"
service_scale = 1
consumed_service_scale = 2
env, service, consumed_service, consumed_service1, dns = \
create_environment_with_dns_services(
client, service_scale, consumed_service_scale, port)
validate_dns_service(
client, service, [consumed_service, consumed_service1], port,
dns.name)
consumed_service = consumed_service.deactivate()
consumed_service = client.wait_success(consumed_service, 120)
assert consumed_service.state == "inactive"
wait_until_instances_get_stopped(client, consumed_service)
consumed_service = consumed_service.activate()
consumed_service = client.wait_success(consumed_service, 120)
assert consumed_service.state == "active"
validate_dns_service(
client, service, [consumed_service, consumed_service1], port,
dns.name)
delete_all(client, [env])
def test_dns_service_deactivate_activate(client):
port = "3115"
service_scale = 1
consumed_service_scale = 2
env, service, consumed_service, consumed_service1, dns = \
create_environment_with_dns_services(
client, service_scale, consumed_service_scale, port)
validate_dns_service(
client, service, [consumed_service, consumed_service1], port,
dns.name)
service = service.deactivate()
service = client.wait_success(service, 120)
assert service.state == "inactive"
wait_until_instances_get_stopped(client, service)
service = service.activate()
service = client.wait_success(service, 120)
assert service.state == "active"
time.sleep(restart_sleep_interval)
validate_dns_service(
client, service, [consumed_service, consumed_service1], port,
dns.name)
delete_all(client, [env])
def test_dns_deactivate_activate_environment(client):
port = "3116"
service_scale = 1
consumed_service_scale = 2
env, service, consumed_service, consumed_service1, dns = \
create_environment_with_dns_services(
client, service_scale, consumed_service_scale, port)
validate_dns_service(
client, service, [consumed_service, consumed_service1], port,
dns.name)
env = env.deactivateservices()
service = client.wait_success(service, 120)
assert service.state == "inactive"
consumed_service = client.wait_success(consumed_service, 120)
assert consumed_service.state == "inactive"
wait_until_instances_get_stopped(client, service)
wait_until_instances_get_stopped(client, consumed_service)
env = env.activateservices()
service = client.wait_success(service, 120)
assert service.state == "active"
consumed_service = client.wait_success(consumed_service, 120)
assert consumed_service.state == "active"
time.sleep(restart_sleep_interval)
validate_dns_service(
client, service, [consumed_service, consumed_service1], port,
dns.name)
delete_all(client, [env])
def test_dns_add_remove_servicelinks(client):
port = "3117"
service_scale = 1
consumed_service_scale = 2
env, service, consumed_service, consumed_service1, dns = \
create_environment_with_dns_services(
client, service_scale, consumed_service_scale, port)
validate_dns_service(
client, service, [consumed_service, consumed_service1], port,
dns.name)
# Add another service to environment
launch_config = {"imageUuid": WEB_IMAGE_UUID}
random_name = random_str()
consumed_service_name = random_name.replace("-", "")
consumed_service2 = client.create_service(name=consumed_service_name,
stackId=env.id,
launchConfig=launch_config,
scale=2)
consumed_service2 = client.wait_success(consumed_service2)
assert consumed_service2.state == "inactive"
consumed_service2 = consumed_service2.activate()
consumed_service2 = client.wait_success(consumed_service2, 120)
assert consumed_service2.state == "active"
# Add another service link
dns.addservicelink(serviceLink={"serviceId": consumed_service2.id})
validate_add_service_link(client, dns, consumed_service2)
validate_dns_service(
client, service, [consumed_service, consumed_service1,
consumed_service2], port, dns.name)
# Remove existing service link to the service
dns.removeservicelink(serviceLink={"serviceId": consumed_service.id})
validate_remove_service_link(client, dns, consumed_service)
validate_dns_service(
client, service, [consumed_service1, consumed_service2],
port, dns.name)
delete_all(client, [env])
def test_dns_services_delete_service_add_service(client):
port = "3118"
service_scale = 2
consumed_service_scale = 2
env, service, consumed_service, consumed_service1, dns = \
create_environment_with_dns_services(
client, service_scale, consumed_service_scale, port)
validate_dns_service(
client, service, [consumed_service, consumed_service1], port,
dns.name)
# Delete Service
service = client.wait_success(client.delete(service))
assert service.state == "removed"
validate_remove_service_link(client, service, dns)
port1 = "31180"
# Add another service and link to dns service
launch_config = {"imageUuid": SSH_IMAGE_UUID,
"ports": [port1+":22/tcp"]}
random_name = random_str()
service_name = random_name.replace("-", "")
service1 = client.create_service(name=service_name,
stackId=env.id,
launchConfig=launch_config,
scale=1)
service1 = client.wait_success(service1)
assert service1.state == "inactive"
service1 = service1.activate()
service1 = client.wait_success(service1, 120)
assert service1.state == "active"
service1.addservicelink(serviceLink={"serviceId": dns.id})
validate_add_service_link(client, service1, dns)
validate_dns_service(
client, service1, [consumed_service, consumed_service1], port1,
dns.name)
delete_all(client, [env])
def test_dns_services_delete_and_add_consumed_service(client):
port = "3119"
service_scale = 2
consumed_service_scale = 2
env, service, consumed_service, consumed_service1, dns = \
create_environment_with_dns_services(
client, service_scale, consumed_service_scale, port)
| |
properties[idx]["rms_force"] = force
properties[idx]["rms_displacement"] = rms_displacements[idx]
if extended_opt_info:
if idx < len(max_forces):
properties[idx]["max_force"] = max_forces[idx]
if idx < len(max_displacements):
properties[idx]["max_displacement"] = max_displacements[idx]
if idx < len(max_gradients):
properties[idx]["max_gradient"] = max_gradients[idx]
if idx < len(rms_gradients):
properties[idx]["rms_gradient"] = rms_gradients[idx]
if idx < len(max_int_forces):
properties[idx]["max_internal_force"] = max_int_forces[idx]
if idx < len(rms_int_forces):
properties[idx]["rms_internal_force"] = rms_int_forces[idx]
if idx < len(delta_energy):
change_in_energy = re.sub(r"Energy=", "", delta_energy[idx])
properties[idx]["predicted_change_in_energy"] = float(change_in_energy.replace('D', 'E'))
if GaussianJobType.FREQ in job_types:
enthalpies = lines.find_parameter("thermal Enthalpies", expected_length=7, which_field=6)
if len(enthalpies) == 1:
properties[-1]["enthalpy"] = enthalpies[0]
elif len(enthalpies) > 1:
raise ValueError(f"unexpected # of enthalpies found!\nenthalpies = {enthalpies}")
gibbs_vals = lines.find_parameter("thermal Free Energies", expected_length=8, which_field=7)
if len(gibbs_vals) == 1:
properties[-1]["gibbs_free_energy"] = gibbs_vals[0]
elif len(gibbs_vals) > 1:
raise ValueError(f"unexpected # gibbs free energies found!\ngibbs free energies = {gibbs_vals}")
if GaussianJobType.FREQ in job_types:
enthalpies = lines.find_parameter("thermal Enthalpies", expected_length=7, which_field=6)
if len(enthalpies) == 1:
properties[-1]["enthalpy"] = enthalpies[0]
elif len(enthalpies) > 1:
raise ValueError(f"unexpected # of enthalpies found!\nenthalpies = {enthalpies}")
gibbs_vals = lines.find_parameter("thermal Free Energies", expected_length=8, which_field=7)
if len(gibbs_vals) == 1:
properties[-1]["gibbs_free_energy"] = gibbs_vals[0]
elif len(gibbs_vals) > 1:
raise ValueError(f"unexpected # gibbs free energies found!\ngibbs free energies = {gibbs_vals}")
frequencies = []
try:
frequencies = sum(lines.find_parameter("Frequencies", expected_length=5, which_field=[2,3,4]), [])
properties[-1]["frequencies"] = sorted(frequencies)
except Exception as e:
raise ValueError("error finding frequencies")
# Temperature 298.150 Kelvin. Pressure 1.00000 Atm.
temperature = lines.find_parameter("Temperature", expected_length=6, which_field=1)
if len(temperature) == 1:
properties[-1]["temperature"] = temperature[0]
try:
corrected_free_energy = get_corrected_free_energy(gibbs_vals[0], frequencies, frequency_cutoff=100.0, temperature=temperature[0])
properties[-1]["quasiharmonic_gibbs_free_energy"] = float(f"{float(corrected_free_energy):.6f}") # yes this is dumb
except Exception as e:
pass
if GaussianJobType.NMR in job_types:
nmr_shifts = parse.read_nmr_shifts(lines, molecules[0].num_atoms())
if nmr_shifts is not None:
properties[-1]["isotropic_shielding"] = nmr_shifts.view(OneIndexedArray)
if re.search("nmr=mixed", f.route_card, flags=re.IGNORECASE) or re.search("nmr=spinspin", f.route_card,flags=re.IGNORECASE):
couplings = parse.read_j_couplings(lines, molecules[0].num_atoms())
if couplings is not None:
properties[-1]["j_couplings"] = couplings
if GaussianJobType.FORCE in job_types:
assert len(molecules) == 1, "force jobs should not be combined with optimizations!"
forces = parse.read_forces(lines)
properties[0]["forces"] = forces
if GaussianJobType.POP in job_types:
if re.search("hirshfeld", f.route_card) or re.search("cm5", f.route_card):
charges, spins = parse.read_hirshfeld_charges(lines)
properties[-1]["hirshfeld_charges"] = charges
properties[-1]["hirshfeld_spins"] = spins
try:
charges = parse.read_mulliken_charges(lines)
properties[-1]["mulliken_charges"] = charges
except Exception as e:
pass
try:
dipole = parse.read_dipole_moment(lines)
properties[-1]["dipole_moment"] = dipole
except Exception as e:
pass
for mol, prop in zip(molecules, properties):
f.ensemble.add_molecule(mol, properties=prop)
f.check_has_properties()
files.append(f)
if return_lines:
if len(link1_lines) == 1:
return files[0], link1_lines[0]
else:
return files, link1_lines
else:
if len(link1_lines) == 1:
return files[0]
else:
return files
@classmethod
def _read_gjf_file(cls, filename, return_lines=False):
"""
Reads a Gaussian ``.gjf`` or ``.com`` file and populates the attributes accordingly.
Args:
filename (str): path to the out file
return_lines (Bool): whether the lines of the file should be returned
Returns:
GaussianFile object
(optional) the lines of the file
"""
lines = super().read_file(filename)
header = None
link0 = {}
footer = None
header_done = False
title = None
charge = None
multip = None
in_geom = False
atomic_numbers = []
geometry = []
for idx, line in enumerate(lines):
if header is None:
if re.match("\%", line):
pieces = line[1:].split("=")
link0[pieces[0]] = pieces[1]
continue
if re.match("#", line):
header = line
continue
if (title is None) and (header is not None):
if header_done:
if len(line.strip()) > 0:
title = line
else:
if len(line.strip()) > 0:
header = header + line
else:
header_done = True
continue
if (title is not None) and (charge is None):
if len(line.strip()) > 0:
pieces = list(filter(None, line.split(" ")))
assert len(pieces) == 2, f"can't parse line {line}"
charge = int(pieces[0])
multip = int(pieces[1])
in_geom = True
continue
if in_geom == True:
if len(line.strip()) == 0:
in_geom = False
else:
pieces = list(filter(None, line.split(" ")))
assert len(pieces) == 4, f"can't parse line {line}"
atomic_numbers.append(pieces[0])
geometry.append([pieces[1], pieces[2], pieces[3]])
if (in_geom == False) and (len(geometry) > 0):
if footer:
footer = footer + "\n" + line
else:
if len(line.strip()) > 0:
footer = line
try:
atomic_numbers = np.array(atomic_numbers, dtype=np.int8)
except Exception as e:
atomic_numbers = np.array(list(map(get_number, atomic_numbers)), dtype=np.int8)
job_types = cls._assign_job_types(header)
f = GaussianFile(job_types=job_types, route_card=header, link0=link0, footer=footer, title=title)
f.ensemble.add_molecule(Molecule(atomic_numbers, geometry, charge=charge, multiplicity=multip))
if return_lines:
return f, lines
else:
return f
def get_molecule(self, num=None, properties=False):
"""
Returns the last molecule (from an optimization job) or the only molecule (from other jobs).
If ``num`` is specified, returns ``self.ensemble.molecule_list()[num]``
If ``properties`` is True, returns ``(molecule, properties)``.
"""
# some methods pass num=None, which overrides setting the default above
if num is None:
num = -1
assert isinstance(num, int), "num must be int"
if properties:
return self.ensemble.molecule_list()[num], self.ensemble.properties_list()[num]
else:
return self.ensemble.molecule_list()[num]
@classmethod
def _assign_job_types(cls, header):
"""
Assigns ``GaussianJobType`` objects from route card. ``GaussianJobType.SP`` is assigned by default.
For instance, "#p opt freq=noraman" would give an output of ``[GaussianJobType.SP, GaussianJobType.OPT, GaussianJobType.FREQ]``.
Args:
header (str): Gaussian route card
Returns:
list of ``GaussianJobType`` objects
"""
job_types = []
for name, member in GaussianJobType.__members__.items():
if re.search(f" {member.value}", str(header), re.IGNORECASE):
job_types.append(member)
if GaussianJobType.SP not in job_types:
job_types.append(GaussianJobType.SP)
return job_types
def check_has_properties(self):
"""
Checks that the file has all the appropriate properties for its job types, and raises ValueError if not.
This only checks the last molecule in ``self.ensemble``, for now.
"""
if self.successful_terminations > 0:
if self.successful_terminations == 1 and ((GaussianJobType.OPT in self.job_types) and (GaussianJobType.FREQ in self.job_types)):
return # opt freq jobs should have two terminations
for job_type in self.job_types:
for prop in EXPECTED_PROPERTIES[job_type.value]:
if not self.ensemble.has_property(-1, prop):
raise ValueError(f"expected property {prop} for job type {job_type}, but it's not there!")
else:
return
@classmethod
def write_ensemble_to_file(cls, filename, ensemble, route_card, link0={"mem": "32GB", "nprocshared": 16}, footer=None, title="title", print_symbol=False):
"""
Write each structure in the specified ensemble to a single Gaussian input file
by using the Link1 specification.
Args:
filename (str): where to write the file
ensemble (Ensemble): ``Ensemble`` object to write
route_card (str or list): to use the same route card for every link, use a single string;
otherwise, provide a list whose entries parallel the ensemble members
link0 (dict or list of dicts): to use the same memory/processors for every link, use a single string;
otherwise, provide a list
footer (None/str or list): use None for no text after geometry, provide a str to specify a footer,
or provide some combination of the above as a list
title (str or list): use a single string to provide a generic title for every link or a list as above
print_symbol (bool or list): whether to print atomic symbols or atomic numbers in the geometry specification;
use a single bool or a list as above
"""
n_geometries = len(ensemble)
assert len(ensemble) > 0, "cannot write a blank ensemble"
if isinstance(route_card, str):
route_card = [route_card for _ in ensemble._items]
elif isinstance(route_card, list):
assert len(route_card) == n_geometries, f"expected {n_geometries} route cards but got {len(route_card)}"
for card in route_card:
assert isinstance(card, str), "expected route card to be a str"
else:
raise ValueError(f"unexpected type for route_card: {str(type(route_card))}")
if isinstance(link0, dict):
link0 = [link0 for _ in ensemble._items]
elif isinstance(link0, list):
assert len(link0) == n_geometries, f"expected {n_geometries} link0 entries, but got {len(link0)}"
for d in link0:
assert isinstance(d, dict), f"expected dict for link0 but got {str(type(d))}"
else:
raise ValueError(f"unexpected type for link0: {str(type(link0))}")
if footer is None or isinstance(footer, str):
footer = [footer for _ in ensemble._items]
elif isinstance(footer, list):
assert len(footer) == n_geometries, f"expected {n_geometries} footers, but got {len(footer)}"
for f in footer:
assert f is None or isinstance(f, str), f"expected str or None for footer but got {str(type(f))}"
else:
raise ValueError(f"unexpected type for footer: {str(type(footer))}")
if isinstance(title, str):
assert len(title.strip()) > 0, "zero-length titles not allowed"
title = [title for _ in ensemble._items]
elif isinstance(title, list):
assert len(title) == n_geometries, f"expected {n_geometries} route cards but got {len(title)}"
for card in title:
assert isinstance(card, str), "expected title to be a str"
assert len(title.strip()) > 0, "zero-length titles are not allowed"
else:
raise ValueError(f"unexpected type for title: {str(type(title))}")
if isinstance(print_symbol, bool):
print_symbol = [print_symbol for _ in ensemble._items]
elif isinstance(print_symbol, list):
assert len(print_symbol) == n_geometries, f"expected {n_geometries} print_symbol entries but got {len(print_symbol)}"
for s in print_symbol:
assert isinstance(s, bool), f"expected bool for print_symbol but got {str(type(s))}"
else:
raise ValueError(f"unexpected type | |
<filename>srw_image_tools/__init__.py<gh_stars>0
import os
import warnings
import matplotlib.pyplot as plt
import numpy as np
import h5py
from pyCHX.chx_xpcs_xsvs_jupyter_V1 import *
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
def save_hdf5(data, filename='data.h5', dataset='dataset'):
''' Access BlueSky HDF5 binary data from CHX measurement.
:param data: HDF5 binary data from CHX measurement.
:param filename='data.h5': HDF5 filename.
:param dataset='dataset': Creates dataset type. Default is dataset.
:return: string status of dataset creation
'''
h5f = h5py.File(filename, 'w')
r = h5f.create_dataset(dataset, data=data)
status = '{} created: {}'.format(r, os.path.abspath(filename))
h5f.close()
return status
def plot_profile_horiz(data, uid, y_crd=1200, dpi=80, clim=(0, 200),
cmap='afmhot', line_color='deepskyblue',
linestyles=None):
''' Show plot of intensity versus horizontal position.
:param data: HDF5 binary data from CHX measurement.
:param uid: unique ID automatically assigned to a CHX measurement.
:param y_crd=1200: add a horizontal line across the axis at a given
location on the image.
:param dpi=80: dpi (dots per inch) for output image.
:param clim=(0, 200): sets the color limits of the current image.
:param cmap='afmhot': color map (https://matplotlib.org/examples/color/colormaps_reference.html)
:param line_color='red': color of line that will show the cut location.
'''
print("\n\nHorizontal cut at row " + str(y_crd))
# What size does the figure need to be in inches to fit the image?
height, width = data.shape
figsize = width / float(dpi), height / float(dpi)
# Plot image with cut line
fig = plt.figure(figsize=figsize)
for d in data:
plt.imshow(d, clim=clim, cmap=cmap)
plt.axhline(y_crd, color=line_color)
plt.show()
fig.savefig(str(uid) + "_profile_horiz_image.tif", bbox_inches='tight')
# Plot intensity plot
fig2 = plt.figure(figsize=(15.0, 6.0))
warnings.filterwarnings("ignore")
for i, d in enumerate(data):
linestyle = '-' if not linestyles else linestyles[i]
plt.plot(np.log10(d[y_crd, :]), label=uid[i],
linestyle=linestyle, linewidth=1)
plt.xlabel('Transverse Position (pixel)')
plt.ylabel('Intensity')
plt.grid()
plt.legend()
plt.show()
fig2.savefig(str(uid) + "_profile_horiz_intensity.tif",
bbox_inches='tight')
def plot_profile_vert(data, uid, x_crd=1100, dpi=80, clim=(0, 200),
cmap='afmhot', line_color='deepskyblue',
linestyles=None):
''' Show plot of intensity versus vertical position.
:param data: HDF5 binary data from CHX measurement.
:param uid: unique ID automatically assigned to a CHX measurement.
:param x_crd=1100: add a vertical line across the axis at a given
location on the image.
:param dpi=80: dpi (dots per inch) for output image.
:param clim=(0, 200): sets the color limits of the current image.
:param cmap='afmhot': color map (https://matplotlib.org/examples/color/colormaps_reference.html)
:param line_color='red': color of line that will show the cut location.
:param linestyles=None: custom linestyles
'''
print("\n\nVertical cut at column " + str(x_crd))
# What size does the figure need to be in inches to fit the image?
height, width = data.shape
figsize = width / float(dpi), height / float(dpi)
# Plot image with cut line
fig = plt.figure(figsize=figsize)
for d in data:
plt.imshow(d, clim=clim, cmap=cmap)
plt.axvline(x_crd, color=line_color)
plt.show()
fig.savefig(str(uid) + "_profile_vert_image.tif", bbox_inches='tight')
# Plot intensity plot
fig2 = plt.figure(figsize=(15.0, 6.0))
warnings.filterwarnings("ignore")
for i, d in enumerate(data):
linestyle = '-' if not linestyles else linestyles[i]
plt.plot(np.log10(d[:, x_crd]), label=uid[i],
linestyle=linestyle, linewidth=1)
plt.xlabel('Transverse Position (pixel)')
plt.ylabel('Intensity')
plt.grid()
plt.legend()
plt.show()
fig2.savefig(str(uid) + "_profile_vert_intensity.tif", bbox_inches='tight')
def display_image_in_actual_size(img, uid, dpi=80, eiger_size_per_pixel=0.075,
clim=(0, 100), cmap='gist_stern'):
''' Display CHX Eiger image in full size and save the image as a TIFF with dual pixel and mm axis.
:param im: eiger detector image.
:param uid: unique ID automatically assigned to a CHX measurement.
:param dpi=80: dpi (dots per inch) for output image.
:param eiger_size_per_pixel=0.075: eiger camera has 75 um per pixel.
:param cmap='gist_stern': color map (https://matplotlib.org/examples/color/colormaps_reference.html)
:param clim: sets the color limits of the current image.
'''
img_data = img
height, width = img_data.shape
# What size does the figure need to be in inches to fit the image?
figsize = width / float(dpi), height / float(dpi)
# Create a figure of the right size with one axes that takes
# up the full figure
fig = plt.figure(figsize=figsize)
ax = fig.add_axes([0, 0, 1, 1])
# Title
plt.title("UID: " + str(uid), fontsize=30)
# Set up pixel axis
ax.axis('on')
plt.xticks(fontsize=20)
plt.yticks(fontsize=20)
plt.xlabel('Pixels', fontsize=20)
plt.ylabel('Pixels', fontsize=20)
# Display the image.
ax.imshow(img_data, cmap=cmap, clim=clim)
# Set up microm axis
ax1 = ax.twiny() # Create a twin Axes sharing the yaxis
ax2 = ax.twinx() # Create a twin Axes sharing the xaxis
# Decide the ticklabel position in the new axis,
# then convert them to the position in the old axis
newlabelX = range(0, int(eiger_size_per_pixel*width), 5) # labels of the ticklabels: the position in the new x-axis
newlabelY = range(0, int(eiger_size_per_pixel*height), 5) # labels of the ticklabels: the position in the new x-axis
pixel2micronX = lambda width: width*eiger_size_per_pixel # convert function X: from pixels to microns
pixel2micronY = lambda height: height*eiger_size_per_pixel # convert function Y: from pixels to microns
newposX = [pixel2micronX(width) for width in newlabelX] # position of the xticklabels in the old x-axis
newposY = [pixel2micronY(height) for height in newlabelY] # position of the yticklabels in the old y-axis
ax1.set_xticks(newposX)
ax1.set_xticklabels(newlabelY)
ax1.tick_params(axis="x", labelsize=20)
ax2.set_yticks(newposY)
ax2.set_yticklabels(newlabelY)
ax2.tick_params(axis="y", labelsize=20)
ax1.xaxis.set_ticks_position('bottom') # set the position of the second x-axis to bottom
ax2.yaxis.set_ticks_position('left') # set the position of the second y-axis to left
ax1.xaxis.set_label_position('bottom') # set the position of the second x-axis to bottom
ax2.yaxis.set_label_position('left') # set the position of the second y-axis to left
ax1.spines['bottom'].set_position(('outward', 80))
ax2.spines['left'].set_position(('outward', 130))
ax1.set_xlabel('mm', fontsize=20)
ax2.set_ylabel('mm', fontsize=20)
plt.draw()
plt.show()
# Save plot
fig.savefig(str(uid) + ".tif", bbox_inches='tight')
def display_cropped_image(img, uid, x1=900, x2=1650, y1=750, y2=1400, dpi=80,
eiger_size_per_pixel=0.075, clim=(0, 100),
cmap='gist_stern'):
'''Display CHX eiger image cropped to user specifications and save the
image as a TIFF with dual pixel and mm axis.
:param im: eiger detector image.
:param uid: unique ID automatically assigned to a CHX measurement.
:param x1: x-axis stating location (columns).
:param x2: x-axis final location (columns).
:param y1: y-axis stating location (rows).
:param y2: y-axis final location (rows).
:param dpi=80: dpi (dots per inch) for output image.
:param eiger_size_per_pixel=0.075: eiger camera has 75 um per pixel.
:param cmap='gist_stern': color map (https://matplotlib.org/examples/color/colormaps_reference.html)
:param clim: sets the color limits of the current image.
'''
img_data = img
height, width = img_data.shape
croppedBoxHeight, croppedBoxWidth = x2-x1, y2-y1
pixelTickMarkStepSize = 100
# Crop the image from the center of the image
img_data = img[x1:x2, y1:y2]
# What size does the figure need to be in inches to fit the image?
figsize = croppedBoxWidth / float(dpi), croppedBoxHeight / float(dpi)
# Create a figure of the right size with one axes that takes up the full figure
fig = plt.figure(figsize=figsize)
ax = fig.add_axes([0, 0, 1, 1])
# Title
plt.title("UID: " + str(uid) + " Cropped: " + str(croppedBoxWidth) + "x" + str(croppedBoxHeight), fontsize=15)
# Set up pixel axis
ax.axis('on')
plt.xticks(fontsize=15)
plt.yticks(fontsize=15)
plt.xlabel('Pixels', fontsize=15)
plt.ylabel('Pixels', fontsize=15)
ax.set_xticks(range(0, croppedBoxHeight+pixelTickMarkStepSize, pixelTickMarkStepSize))
ax.set_yticks(range(0, croppedBoxWidth+pixelTickMarkStepSize, pixelTickMarkStepSize))
ax.set_xticklabels(range(x1, x2+pixelTickMarkStepSize, pixelTickMarkStepSize))
ax.set_yticklabels(range(y1, y2+pixelTickMarkStepSize, pixelTickMarkStepSize))
# Set up microm axis
ax1 = ax.twiny() # Create a twin Axes sharing the yaxis
ax2 = ax.twinx() # Create a twin Axes sharing the xaxis
# Decide the ticklabel position in the new micron axis,
# then convert them to the position in the old axis so that the pixel and micron axis line up properly
# Scale to start at Zero; labels of the ticklabels: the position in the new axis
newlabelX = range(0, int(eiger_size_per_pixel*croppedBoxHeight), 10)
newlabelY = range(0, int(eiger_size_per_pixel*croppedBoxWidth), 10)
pixel2micronX = lambda croppedBoxHeight: croppedBoxHeight*eiger_size_per_pixel # convert function X: from pixels to microns
pixel2micronY = lambda croppedBoxWidth: croppedBoxWidth*eiger_size_per_pixel # convert function Y: from pixels to microns
newposX = [pixel2micronX(croppedBoxHeight) for croppedBoxHeight in newlabelX] # position of the xticklabels in the old x-axis
newposY = [pixel2micronY(croppedBoxWidth) for croppedBoxWidth in newlabelY] # position of the yticklabels in the old y-axis
ax1.set_xticks(newposX)
ax1.set_xticklabels(newlabelX)
ax1.tick_params(axis="x", labelsize=15)
ax2.set_yticks(newposY)
ax2.set_yticklabels(newlabelY)
ax2.tick_params(axis="y", labelsize=15)
ax1.xaxis.set_ticks_position('bottom') # set the position of the second x-axis to bottom
ax2.yaxis.set_ticks_position('left') # set the position of the second y-axis to left
ax1.xaxis.set_label_position('bottom') # set the position of the second x-axis to bottom
ax2.yaxis.set_label_position('left') # set the position of the second y-axis to left
ax1.spines['bottom'].set_position(('outward', 50))
ax2.spines['left'].set_position(('outward', 80))
ax1.set_xlabel('mm', fontsize=15)
ax2.set_ylabel('mm', fontsize=15)
# Display the image.
ax.imshow(img_data, cmap=cmap, clim=clim)
plt.savefig(str(uid) + "_cropped.tif", bbox_inches='tight')
plt.show()
# Save plot
fig.savefig(str(uid) + "_cropped.tif", bbox_inches='tight')
def plot_eiger_for_srw(uid, det='eiger4m_single_image', cmap='afmhot',
clim=(0, 100), mean=False, frame_num=0, grid=False):
'''Display CHX eiger image: fullsize, cropped to user specifications, and with horizontal and
vertical cuts, and | |
import hashlib
from flask import request
from assemblyline.common.isotime import now_as_iso
from assemblyline.remote.datatypes.lock import Lock
from assemblyline_ui.api.base import api_login, make_api_response, make_subapi_blueprint
from assemblyline_ui.config import CLASSIFICATION, STORAGE
SUB_API = 'safelist'
safelist_api = make_subapi_blueprint(SUB_API, api_version=4)
safelist_api._doc = "Perform operations on safelisted hashes"
class InvalidSafehash(Exception):
pass
def _merge_safe_hashes(new, old):
try:
# Check if hash types match
if new['type'] != old['type']:
raise InvalidSafehash(f"Safe hash type mismatch: {new['type']} != {old['type']}")
# Use max classification
old['classification'] = CLASSIFICATION.max_classification(old['classification'], new['classification'])
# Update updated time
old['updated'] = now_as_iso()
# Update hashes
old['hashes'].update(new['hashes'])
# Update type specific info
if old['type'] == 'file':
old.setdefault('file', {})
new_names = new.get('file', {}).pop('name', [])
if 'name' in old['file']:
for name in new_names:
if name not in old['file']['name']:
old['file']['name'].append(name)
elif new_names:
old['file']['name'] = new_names
old['file'].update(new.get('file', {}))
elif old['type'] == 'tag':
old['tag'] = new['tag']
# Merge sources
src_map = {x['name']: x for x in new['sources']}
if not src_map:
raise InvalidSafehash("No valid source found")
old_src_map = {x['name']: x for x in old['sources']}
for name, src in src_map.items():
src_cl = src.get('classification', None)
if src_cl:
old['classification'] = CLASSIFICATION.max_classification(old['classification'], src_cl)
if name not in old_src_map:
old_src_map[name] = src
else:
old_src = old_src_map[name]
if old_src['type'] != src['type']:
raise InvalidSafehash(f"Source {name} has a type conflict: {old_src['type']} != {src['type']}")
for reason in src['reason']:
if reason not in old_src['reason']:
old_src['reason'].append(reason)
old['sources'] = old_src_map.values()
return old
except Exception as e:
raise InvalidSafehash(f"Invalid data provided: {str(e)}")
@safelist_api.route("/", methods=["PUT", "POST"])
@api_login(require_type=['user', 'signature_importer'], allow_readonly=False, required_priv=["W"])
def add_or_update_hash(**kwargs):
"""
Add a hash in the safelist if it does not exist or update its list of sources if it does
Arguments:
None
Data Block:
{
"classification": "TLP:W", # Classification of the safe hash (Computed for the mix of sources) - Optional
"enabled": true, # Is the safe hash enabled or not
"file": { # Information about the file - Only used in file mode
"name": ["file.txt"] # Possible names for the file
"size": 12345, # Size of the file
"type": "document/text"}, # Type of the file
},
"hashes": { # Information about the safe hash - At least one hash required
"md5": "123...321", # MD5 hash of the safe hash
"sha1": "1234...4321", # SHA1 hash of the safe hash
"sha256": "12345....54321", # SHA256 of the safe hash
"sources": [ # List of sources for why the file is safelisted, dedupped on name - Required
{"classification": "TLP:W", # Classification of the source (default: TLP:W) - Optional
"name": "NSRL", # Name of external source or user who safelisted it - Required
"reason": [ # List of reasons why the source is safelisted - Required
"Found as test.txt on default windows 10 CD",
"Found as install.txt on default windows XP CD"
],
"type": "external"}, # Type or source (external or user) - Required
{"classification": "TLP:W",
"name": "admin",
"reason": ["We've seen this file many times and it leads to False positives"],
"type": "user"}
],
"signature": { # Signature information - Only used in signature mode
"name": "Avira.Eicar", # Name of signature
},
"tag": { # Tag information - Only used in tag mode
"type": "network.url", # Type of tag
"value": "google.ca" # Value of the tag
},
"type": "tag" # Type of safelist hash (tag or file)
}
Result example:
{
"success": true, # Was the hash successfully added
"op": "add" # Was it added to the system or updated
}
"""
# Load data
data = request.json
if not data:
return make_api_response({}, "No data provided", 400)
user = kwargs['user']
# Set defaults
data.setdefault('classification', CLASSIFICATION.UNRESTRICTED)
data.setdefault('hashes', {})
if data['type'] == 'tag':
tag_data = data.get('tag', None)
if tag_data is None or 'type' not in tag_data or 'value' not in tag_data:
return make_api_response(None, "Tag data not found", 400)
hashed_value = f"{tag_data['type']}: {tag_data['value']}".encode('utf8')
data['hashes']['md5'] = hashlib.md5(hashed_value).hexdigest()
data['hashes']['sha1'] = hashlib.sha1(hashed_value).hexdigest()
data['hashes']['sha256'] = hashlib.sha256(hashed_value).hexdigest()
data.pop('file', None)
data.pop('signature', None)
elif data['type'] == 'signature':
sig_data = data.get('signature', None)
if sig_data is None or 'name' not in sig_data:
return make_api_response(None, "Signature data not found", 400)
hashed_value = f"signature: {sig_data['name']}".encode('utf8')
data['hashes']['md5'] = hashlib.md5(hashed_value).hexdigest()
data['hashes']['sha1'] = hashlib.sha1(hashed_value).hexdigest()
data['hashes']['sha256'] = hashlib.sha256(hashed_value).hexdigest()
data.pop('tag', None)
data.pop('file', None)
elif data['type'] == 'file':
data.pop('tag', None)
data.pop('signature', None)
data.setdefault('file', {})
data['added'] = data['updated'] = now_as_iso()
# Find the best hash to use for the key
qhash = data['hashes'].get('sha256', data['hashes'].get('sha1', data['hashes'].get('md5', None)))
# Validate hash length
if not qhash:
return make_api_response(None, "No valid hash found", 400)
# Validate sources
src_map = {}
for src in data['sources']:
if src['type'] == 'user':
if src['name'] != user['uname']:
return make_api_response(
{}, f"You cannot add a source for another user. {src['name']} != {user['uname']}", 400)
else:
if 'signature_importer' not in user['type']:
return make_api_response(
{}, "You do not have sufficient priviledges to add an external source.", 403)
src_cl = src.get('classification', None)
if src_cl:
data['classification'] = CLASSIFICATION.max_classification(data['classification'], src_cl)
src_map[src['name']] = src
with Lock(f'add_or_update-safelist-{qhash}', 30):
old = STORAGE.safelist.get_if_exists(qhash, as_obj=False)
if old:
try:
# Save data to the DB
STORAGE.safelist.save(qhash, _merge_safe_hashes(data, old))
return make_api_response({'success': True, "op": "update"})
except InvalidSafehash as e:
return make_api_response({}, str(e), 400)
else:
try:
data['sources'] = src_map.values()
STORAGE.safelist.save(qhash, data)
return make_api_response({'success': True, "op": "add"})
except Exception as e:
return make_api_response({}, f"Invalid data provided: {str(e)}", 400)
@safelist_api.route("/add_update_many/", methods=["POST", "PUT"])
@api_login(audit=False, required_priv=['W'], allow_readonly=False, require_type=['signature_importer'])
def add_update_many_hashes(**_):
"""
Add or Update a list of the safe hashes
Variables:
None
Arguments:
None
Data Block (REQUIRED):
[ # List of Safe hash blocks
{
"classification": "TLP:W", # Classification of the safe hash (Computed for the mix of sources) - Optional
"enabled": true, # Is the safe hash enabled or not
"file": { # Information about the file - Only used in file mode
"name": ["file.txt"] # Possible names for the file
"size": 12345, # Size of the file
"type": "document/text"}, # Type of the file
},
"hashes": { # Information about the safe hash - At least one hash required
"md5": "123...321", # MD5 hash of the safe hash
"sha1": "1234...4321", # SHA1 hash of the safe hash
"sha256": "12345....54321", # SHA256 of the safe hash
"sources": [ # List of sources for why the file is safelisted, dedupped on name - Required
{"classification": "TLP:W", # Classification of the source (default: TLP:W) - Optional
"name": "NSRL", # Name of external source or user who safelisted it - Required
"reason": [ # List of reasons why the source is safelisted - Required
"Found as test.txt on default windows 10 CD",
"Found as install.txt on default windows XP CD"
],
"type": "external"}, # Type or source (external or user) - Required
{"classification": "TLP:W",
"name": "admin",
"reason": ["We've seen this file many times and it leads to False positives"],
"type": "user"}
],
"signature": { # Signature information - Only used in signature mode
"name": "Avira.Eicar", # Name of signature
},
"tag": { # Tag information - Only used in tag mode
"type": "network.url", # Type of tag
"value": "google.ca" # Value of the tag
},
"type": "tag" # Type of safelist hash (tag or file)
}
...
]
Result example:
{"success": 23, # Number of hashes that succeeded
"errors": []} # List of hashes that failed
"""
data = request.json
if not isinstance(data, list):
return make_api_response("", "Could not get the list of hashes", 400)
new_data = {}
for hash_data in data:
# Set a classification if None
hash_data.setdefault('classification', CLASSIFICATION.UNRESTRICTED)
if hash_data['type'] == 'tag':
hash_data.pop('file', None)
hash_data.pop('signature', None)
elif hash_data['type'] == 'file':
hash_data.pop('tag', None)
hash_data.pop('signature', None)
elif hash_data['type'] == 'signature':
hash_data.pop('tag', None)
hash_data.pop('file', None)
# Find the hash used for the key
key = hash_data['hashes'].get('sha256', hash_data['hashes'].get('sha1', hash_data['hashes'].get('md5', None)))
if not key:
return make_api_response("", f"Invalid hash block: {str(hash_data)}", 400)
# Save the new hash_block
new_data[key] = hash_data
# Get already existing hashes
old_data = STORAGE.safelist.multiget(list(new_data.keys()), as_dictionary=True, as_obj=False,
error_on_missing=False)
# Test signature names
plan = STORAGE.safelist.get_bulk_plan()
for key, val in new_data.items():
# Use maximum classification
old_val = old_data.get(key, {'classification': CLASSIFICATION.UNRESTRICTED,
'hashes': {}, 'sources': [], 'type': val['type']})
# Add upsert operation
try:
plan.add_upsert_operation(key, _merge_safe_hashes(val, old_val))
except InvalidSafehash as e:
return make_api_response("", str(e), 400)
if not plan.empty:
# Execute plan
res = STORAGE.safelist.bulk(plan)
return make_api_response({"success": len(res['items']), "errors": res['errors']})
return make_api_response({"success": 0, "errors": []})
@safelist_api.route("/<qhash>/", methods=["GET"])
@api_login(required_priv=["R"])
def check_hash_exists(qhash, **kwargs):
"""
Check |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.