id stringlengths 1 265 | text stringlengths 6 5.19M | dataset_id stringclasses 7 values |
|---|---|---|
/AltAnalyze-2.1.3.15.tar.gz/AltAnalyze-2.1.3.15/altanalyze/visualization_scripts/umap_learn/utils.py |
import numpy as np
import numba
@numba.njit("i4(i8[:])")
def tau_rand_int(state):
"""A fast (pseudo)-random number generator.
Parameters
----------
state: array of int64, shape (3,)
The internal state of the rng
Returns
-------
A (pseudo)-random int32 value
"""
state[0] = (((state[0] & 4294967294) << 12) & 0xffffffff) ^ (
(((state[0] << 13) & 0xffffffff) ^ state[0]) >> 19
)
state[1] = (((state[1] & 4294967288) << 4) & 0xffffffff) ^ (
(((state[1] << 2) & 0xffffffff) ^ state[1]) >> 25
)
state[2] = (((state[2] & 4294967280) << 17) & 0xffffffff) ^ (
(((state[2] << 3) & 0xffffffff) ^ state[2]) >> 11
)
return state[0] ^ state[1] ^ state[2]
@numba.njit("f4(i8[:])")
def tau_rand(state):
"""A fast (pseudo)-random number generator for floats in the range [0,1]
Parameters
----------
state: array of int64, shape (3,)
The internal state of the rng
Returns
-------
A (pseudo)-random float32 in the interval [0, 1]
"""
integer = tau_rand_int(state)
return float(integer) / 0x7fffffff
@numba.njit()
def norm(vec):
"""Compute the (standard l2) norm of a vector.
Parameters
----------
vec: array of shape (dim,)
Returns
-------
The l2 norm of vec.
"""
result = 0.0
for i in range(vec.shape[0]):
result += vec[i] ** 2
return np.sqrt(result)
@numba.njit()
def rejection_sample(n_samples, pool_size, rng_state):
"""Generate n_samples many integers from 0 to pool_size such that no
integer is selected twice. The duplication constraint is achieved via
rejection sampling.
Parameters
----------
n_samples: int
The number of random samples to select from the pool
pool_size: int
The size of the total pool of candidates to sample from
rng_state: array of int64, shape (3,)
Internal state of the random number generator
Returns
-------
sample: array of shape(n_samples,)
The ``n_samples`` randomly selected elements from the pool.
"""
result = np.empty(n_samples, dtype=np.int64)
for i in range(n_samples):
reject_sample = True
while reject_sample:
j = tau_rand_int(rng_state) % pool_size
for k in range(i):
if j == result[k]:
break
else:
reject_sample = False
result[i] = j
return result
@numba.njit("f8[:, :, :](i8,i8)")
def make_heap(n_points, size):
"""Constructor for the numba enabled heap objects. The heaps are used
for approximate nearest neighbor search, maintaining a list of potential
neighbors sorted by their distance. We also flag if potential neighbors
are newly added to the list or not. Internally this is stored as
a single ndarray; the first axis determines whether we are looking at the
array of candidate indices, the array of distances, or the flag array for
whether elements are new or not. Each of these arrays are of shape
(``n_points``, ``size``)
Parameters
----------
n_points: int
The number of data points to track in the heap.
size: int
The number of items to keep on the heap for each data point.
Returns
-------
heap: An ndarray suitable for passing to other numba enabled heap functions.
"""
result = np.zeros((3, int(n_points), int(size)), dtype=np.float64)
result[0] = -1
result[1] = np.infty
result[2] = 0
return result
@numba.jit("i8(f8[:,:,:],i8,f8,i8,i8)")
def heap_push(heap, row, weight, index, flag):
"""Push a new element onto the heap. The heap stores potential neighbors
for each data point. The ``row`` parameter determines which data point we
are addressing, the ``weight`` determines the distance (for heap sorting),
the ``index`` is the element to add, and the flag determines whether this
is to be considered a new addition.
Parameters
----------
heap: ndarray generated by ``make_heap``
The heap object to push into
row: int
Which actual heap within the heap object to push to
weight: float
The priority value of the element to push onto the heap
index: int
The actual value to be pushed
flag: int
Whether to flag the newly added element or not.
Returns
-------
success: The number of new elements successfully pushed into the heap.
"""
row = int(row)
indices = heap[0, row]
weights = heap[1, row]
is_new = heap[2, row]
if weight >= weights[0]:
return 0
# break if we already have this element.
for i in range(indices.shape[0]):
if index == indices[i]:
return 0
# insert val at position zero
weights[0] = weight
indices[0] = index
is_new[0] = flag
# descend the heap, swapping values until the max heap criterion is met
i = 0
while True:
ic1 = 2 * i + 1
ic2 = ic1 + 1
if ic1 >= heap.shape[2]:
break
elif ic2 >= heap.shape[2]:
if weights[ic1] > weight:
i_swap = ic1
else:
break
elif weights[ic1] >= weights[ic2]:
if weight < weights[ic1]:
i_swap = ic1
else:
break
else:
if weight < weights[ic2]:
i_swap = ic2
else:
break
weights[i] = weights[i_swap]
indices[i] = indices[i_swap]
is_new[i] = is_new[i_swap]
i = i_swap
weights[i] = weight
indices[i] = index
is_new[i] = flag
return 1
@numba.jit("i8(f8[:,:,:],i8,f8,i8,i8)")
def unchecked_heap_push(heap, row, weight, index, flag):
"""Push a new element onto the heap. The heap stores potential neighbors
for each data point. The ``row`` parameter determines which data point we
are addressing, the ``weight`` determines the distance (for heap sorting),
the ``index`` is the element to add, and the flag determines whether this
is to be considered a new addition.
Parameters
----------
heap: ndarray generated by ``make_heap``
The heap object to push into
row: int
Which actual heap within the heap object to push to
weight: float
The priority value of the element to push onto the heap
index: int
The actual value to be pushed
flag: int
Whether to flag the newly added element or not.
Returns
-------
success: The number of new elements successfully pushed into the heap.
"""
indices = heap[0, row]
weights = heap[1, row]
is_new = heap[2, row]
if weight >= weights[0]:
return 0
# insert val at position zero
weights[0] = weight
indices[0] = index
is_new[0] = flag
# descend the heap, swapping values until the max heap criterion is met
i = 0
while True:
ic1 = 2 * i + 1
ic2 = ic1 + 1
if ic1 >= heap.shape[2]:
break
elif ic2 >= heap.shape[2]:
if weights[ic1] > weight:
i_swap = ic1
else:
break
elif weights[ic1] >= weights[ic2]:
if weight < weights[ic1]:
i_swap = ic1
else:
break
else:
if weight < weights[ic2]:
i_swap = ic2
else:
break
weights[i] = weights[i_swap]
indices[i] = indices[i_swap]
is_new[i] = is_new[i_swap]
i = i_swap
weights[i] = weight
indices[i] = index
is_new[i] = flag
return 1
@numba.njit()
def siftdown(heap1, heap2, elt):
"""Restore the heap property for a heap with an out of place element
at position ``elt``. This works with a heap pair where heap1 carries
the weights and heap2 holds the corresponding elements."""
while elt * 2 + 1 < heap1.shape[0]:
left_child = elt * 2 + 1
right_child = left_child + 1
swap = elt
if heap1[swap] < heap1[left_child]:
swap = left_child
if right_child < heap1.shape[0] and heap1[swap] < heap1[right_child]:
swap = right_child
if swap == elt:
break
else:
heap1[elt], heap1[swap] = heap1[swap], heap1[elt]
heap2[elt], heap2[swap] = heap2[swap], heap2[elt]
elt = swap
@numba.njit()
def deheap_sort(heap):
"""Given an array of heaps (of indices and weights), unpack the heap
out to give and array of sorted lists of indices and weights by increasing
weight. This is effectively just the second half of heap sort (the first
half not being required since we already have the data in a heap).
Parameters
----------
heap : array of shape (3, n_samples, n_neighbors)
The heap to turn into sorted lists.
Returns
-------
indices, weights: arrays of shape (n_samples, n_neighbors)
The indices and weights sorted by increasing weight.
"""
indices = heap[0]
weights = heap[1]
for i in range(indices.shape[0]):
ind_heap = indices[i]
dist_heap = weights[i]
for j in range(ind_heap.shape[0] - 1):
ind_heap[0], ind_heap[ind_heap.shape[0] - j - 1] = (
ind_heap[ind_heap.shape[0] - j - 1],
ind_heap[0],
)
dist_heap[0], dist_heap[dist_heap.shape[0] - j - 1] = (
dist_heap[dist_heap.shape[0] - j - 1],
dist_heap[0],
)
siftdown(
dist_heap[: dist_heap.shape[0] - j - 1],
ind_heap[: ind_heap.shape[0] - j - 1],
0,
)
return indices.astype(np.int64), weights
@numba.njit("i8(f8[:, :, :],i8)")
def smallest_flagged(heap, row):
"""Search the heap for the smallest element that is
still flagged.
Parameters
----------
heap: array of shape (3, n_samples, n_neighbors)
The heaps to search
row: int
Which of the heaps to search
Returns
-------
index: int
The index of the smallest flagged element
of the ``row``th heap, or -1 if no flagged
elements remain in the heap.
"""
ind = heap[0, row]
dist = heap[1, row]
flag = heap[2, row]
min_dist = np.inf
result_index = -1
for i in range(ind.shape[0]):
if flag[i] == 1 and dist[i] < min_dist:
min_dist = dist[i]
result_index = i
if result_index >= 0:
flag[result_index] = 0.0
return int(ind[result_index])
else:
return -1
@numba.njit(parallel=True)
def build_candidates(current_graph, n_vertices, n_neighbors, max_candidates, rng_state):
"""Build a heap of candidate neighbors for nearest neighbor descent. For
each vertex the candidate neighbors are any current neighbors, and any
vertices that have the vertex as one of their nearest neighbors.
Parameters
----------
current_graph: heap
The current state of the graph for nearest neighbor descent.
n_vertices: int
The total number of vertices in the graph.
n_neighbors: int
The number of neighbor edges per node in the current graph.
max_candidates: int
The maximum number of new candidate neighbors.
rng_state: array of int64, shape (3,)
The internal state of the rng
Returns
-------
candidate_neighbors: A heap with an array of (randomly sorted) candidate
neighbors for each vertex in the graph.
"""
candidate_neighbors = make_heap(n_vertices, max_candidates)
for i in range(n_vertices):
for j in range(n_neighbors):
if current_graph[0, i, j] < 0:
continue
idx = current_graph[0, i, j]
isn = current_graph[2, i, j]
d = tau_rand(rng_state)
heap_push(candidate_neighbors, i, d, idx, isn)
heap_push(candidate_neighbors, idx, d, i, isn)
current_graph[2, i, j] = 0
return candidate_neighbors
@numba.njit(parallel=True)
def new_build_candidates(
current_graph, n_vertices, n_neighbors, max_candidates, rng_state, rho=0.5
): # pragma: no cover
"""Build a heap of candidate neighbors for nearest neighbor descent. For
each vertex the candidate neighbors are any current neighbors, and any
vertices that have the vertex as one of their nearest neighbors.
Parameters
----------
current_graph: heap
The current state of the graph for nearest neighbor descent.
n_vertices: int
The total number of vertices in the graph.
n_neighbors: int
The number of neighbor edges per node in the current graph.
max_candidates: int
The maximum number of new candidate neighbors.
rng_state: array of int64, shape (3,)
The internal state of the rng
Returns
-------
candidate_neighbors: A heap with an array of (randomly sorted) candidate
neighbors for each vertex in the graph.
"""
new_candidate_neighbors = make_heap(n_vertices, max_candidates)
old_candidate_neighbors = make_heap(n_vertices, max_candidates)
for i in numba.prange(n_vertices):
for j in range(n_neighbors):
if current_graph[0, i, j] < 0:
continue
idx = current_graph[0, i, j]
isn = current_graph[2, i, j]
d = tau_rand(rng_state)
if tau_rand(rng_state) < rho:
c = 0
if isn:
c += heap_push(new_candidate_neighbors, i, d, idx, isn)
c += heap_push(new_candidate_neighbors, idx, d, i, isn)
else:
heap_push(old_candidate_neighbors, i, d, idx, isn)
heap_push(old_candidate_neighbors, idx, d, i, isn)
if c > 0:
current_graph[2, i, j] = 0
return new_candidate_neighbors, old_candidate_neighbors
@numba.njit(parallel=True)
def submatrix(dmat, indices_col, n_neighbors):
"""Return a submatrix given an orginal matrix and the indices to keep.
Parameters
----------
mat: array, shape (n_samples, n_samples)
Original matrix.
indices_col: array, shape (n_samples, n_neighbors)
Indices to keep. Each row consists of the indices of the columns.
n_neighbors: int
Number of neighbors.
Returns
-------
submat: array, shape (n_samples, n_neighbors)
The corresponding submatrix.
"""
n_samples_transform, n_samples_fit = dmat.shape
submat = np.zeros((n_samples_transform, n_neighbors), dtype=dmat.dtype)
for i in numba.prange(n_samples_transform):
for j in numba.prange(n_neighbors):
submat[i, j] = dmat[i, indices_col[i, j]]
return submat | PypiClean |
/OASYS1-APS-Extensions-1.0.87.tar.gz/OASYS1-APS-Extensions-1.0.87/orangecontrib/aps/shadow/widgets/_not_used/hybrid_screen_error_analysis.py |
__author__ = 'labx'
import os, sys
import orangecanvas.resources as resources
from oasys.widgets import gui as oasysgui
from oasys.widgets import congruence
from orangewidget import gui, widget
from orangewidget.settings import Setting
from oasys.util.oasys_util import EmittingStream
from orangecontrib.shadow.util.shadow_util import ShadowCongruence
from orangecontrib.shadow.util.shadow_objects import ShadowBeam
from PyQt5.QtGui import QImage, QPixmap, QPalette, QFont, QColor, QTextCursor
from PyQt5.QtWidgets import QLabel, QWidget, QHBoxLayout, QMessageBox, QFileDialog
from orangecontrib.shadow.widgets.gui.ow_automatic_element import AutomaticElement
from orangecontrib.shadow.widgets.special_elements import hybrid_control
from orangecontrib.shadow.util.shadow_objects import ShadowPreProcessorData
from orangecontrib.aps.util.gui import HistogramData, StatisticalDataCollection, HistogramDataCollection, \
DoublePlotWidget, write_histo_and_stats_file
from orangecontrib.aps.shadow.util.gui import Scan3DHistoWidget, ScanHistoWidget
class HybridScreenErrorAnalysis(AutomaticElement):
inputs = [("Input Beam", ShadowBeam, "setBeam"),
("PreProcessor Data", ShadowPreProcessorData, "setPreProcessorData")]
name = "Hybrid Screen - Error Analysis"
description = "Shadow HYBRID: Hybrid Screen - Error Analysis"
icon = "icons/hybrid_screen.png"
maintainer = "Luca Rebuffi and Xianbo Shi"
maintainer_email = "lrebuffi(@at@)anl.gov, xshi(@at@)aps.anl.gov"
priority = 2
category = "HYBRID"
keywords = ["data", "file", "load", "read"]
want_control_area = 1
want_main_area = 1
ghy_diff_plane = Setting(1)
ghy_calcType = Setting(0)
focal_length_calc = Setting(0)
ghy_focallength = Setting(0.0)
distance_to_image_calc = Setting(0)
ghy_distance = Setting(0.0)
ghy_nf = Setting(0)
ghy_nbins_x = Setting(100)
ghy_nbins_z = Setting(100)
ghy_npeak = Setting(10)
ghy_fftnpts = Setting(1e6)
file_to_write_out = 0
ghy_automatic = Setting(1)
files_area = None
ghy_files = Setting([""])
input_beam = None
TABS_AREA_HEIGHT = 560
CONTROL_AREA_WIDTH = 405
IMAGE_WIDTH = 865
IMAGE_HEIGHT = 605
current_histo_data_x_ff = None
current_histo_data_x_nf = None
current_histo_data_z_ff = None
current_histo_data_z_nf = None
current_stats_x_ff = None
current_stats_x_nf = None
current_stats_z_ff = None
current_stats_z_nf = None
plot_type = Setting(1)
plot_type_3D = Setting(0)
colormap = Setting(0)
def __init__(self):
super().__init__()
self.runaction = widget.OWAction("Run Hybrid", self)
self.runaction.triggered.connect(self.run_hybrid)
self.addAction(self.runaction)
self.controlArea.setFixedWidth(self.CONTROL_AREA_WIDTH)
button_box = oasysgui.widgetBox(self.controlArea, "", addSpace=False, orientation="horizontal")
button = gui.button(button_box, self, "Run HYBRID", callback=self.run_hybrid)
font = QFont(button.font())
font.setBold(True)
button.setFont(font)
palette = QPalette(button.palette()) # make a copy of the palette
palette.setColor(QPalette.ButtonText, QColor('Dark Blue'))
button.setPalette(palette) # assign new palette
button.setFixedHeight(45)
main_tabs = oasysgui.tabWidget(self.mainArea)
plot_tab = oasysgui.createTabPage(main_tabs, "Plots")
out_tab = oasysgui.createTabPage(main_tabs, "Output")
self.tabs = oasysgui.tabWidget(plot_tab)
tabs_setting = oasysgui.tabWidget(self.controlArea)
tabs_setting.setFixedHeight(self.TABS_AREA_HEIGHT)
tabs_setting.setFixedWidth(self.CONTROL_AREA_WIDTH-5)
tab_bas = oasysgui.createTabPage(tabs_setting, "Basic Setting")
tab_adv = oasysgui.createTabPage(tabs_setting, "Advanced Setting")
box_1 = oasysgui.widgetBox(tab_bas, "Calculation Parameters", addSpace=True, orientation="vertical", height=100)
gui.comboBox(box_1, self, "ghy_diff_plane", label="Diffraction Plane", labelWidth=310,
items=["Sagittal", "Tangential", "Both (2D)", "Both (1D+1D)"],
callback=self.set_DiffPlane,
sendSelectedValue=False, orientation="horizontal")
gui.comboBox(box_1, self, "ghy_calcType", label="Calculation", labelWidth=70,
items=["Diffraction by Mirror Size + Figure Errors",
"Diffraction by Grating Size + Figure Errors",],
callback=self.set_CalculationType,
sendSelectedValue=False, orientation="horizontal")
gui.separator(box_1, 10)
box_files = oasysgui.widgetBox(tab_bas, "Height Error Profiles", addSpace=True, orientation="vertical", height=180)
gui.button(box_files, self, "Select Height Error Profile Data Files", callback=self.select_files)
self.files_area = oasysgui.textArea(height=120, width=360)
self.refresh_files_text_area()
box_files.layout().addWidget(self.files_area)
box_2 = oasysgui.widgetBox(tab_bas, "Numerical Control Parameters", addSpace=True, orientation="vertical", height=140)
self.le_nbins_x = oasysgui.lineEdit(box_2, self, "ghy_nbins_x", "Number of bins for I(Sagittal) histogram", labelWidth=260, valueType=int, orientation="horizontal")
self.le_nbins_z = oasysgui.lineEdit(box_2, self, "ghy_nbins_z", "Number of bins for I(Tangential) histogram", labelWidth=260, valueType=int, orientation="horizontal")
self.le_npeak = oasysgui.lineEdit(box_2, self, "ghy_npeak", "Number of diffraction peaks", labelWidth=260, valueType=int, orientation="horizontal")
self.le_fftnpts = oasysgui.lineEdit(box_2, self, "ghy_fftnpts", "Number of points for FFT", labelWidth=260, valueType=int, orientation="horizontal")
box_3 = oasysgui.widgetBox(tab_adv, "Propagation Parameters", addSpace=True, orientation="vertical", height=200)
self.cb_focal_length_calc = gui.comboBox(box_3, self, "focal_length_calc", label="Focal Length", labelWidth=180,
items=["Use O.E. Focal Distance", "Specify Value"],
callback=self.set_FocalLengthCalc,
sendSelectedValue=False, orientation="horizontal")
self.le_focal_length = oasysgui.lineEdit(box_3, self, "ghy_focallength", "Focal Length value", labelWidth=260, valueType=float, orientation="horizontal")
gui.separator(box_3)
self.cb_distance_to_image_calc = gui.comboBox(box_3, self, "distance_to_image_calc", label="Distance to image", labelWidth=150,
items=["Use O.E. Image Plane Distance", "Specify Value"],
callback=self.set_DistanceToImageCalc,
sendSelectedValue=False, orientation="horizontal")
self.le_distance_to_image = oasysgui.lineEdit(box_3, self, "ghy_distance", "Distance to Image value", labelWidth=260, valueType=float, orientation="horizontal")
gui.separator(box_3)
self.cb_nf = gui.comboBox(box_3, self, "ghy_nf", label="Near Field Calculation", labelWidth=310,
items=["No", "Yes"],
sendSelectedValue=False, orientation="horizontal", callback=self.set_NF)
box_4 = oasysgui.widgetBox(tab_adv, "Geometrical Parameters", addSpace=True, orientation="vertical", height=70)
gui.comboBox(box_4, self, "ghy_automatic", label="Analize geometry to avoid unuseful calculations", labelWidth=310,
items=["No", "Yes"],
sendSelectedValue=False, orientation="horizontal")
box_5 = oasysgui.widgetBox(tab_adv, "Plot Setting", addSpace=True, orientation="vertical", height=150)
gui.comboBox(box_5, self, "plot_type", label="Plot Type", labelWidth=310,
items=["2D", "3D"],
sendSelectedValue=False, orientation="horizontal", callback=self.set_PlotType)
self.box_pt_1 = oasysgui.widgetBox(box_5, "", addSpace=False, orientation="vertical", height=30)
self.box_pt_2 = oasysgui.widgetBox(box_5, "", addSpace=False, orientation="vertical", height=30)
gui.comboBox(self.box_pt_2, self, "plot_type_3D", label="3D Plot Aspect", labelWidth=310,
items=["Lines", "Surface"],
sendSelectedValue=False, orientation="horizontal")
self.set_DiffPlane()
self.set_DistanceToImageCalc()
self.set_CalculationType()
self.set_NF()
self.set_PlotType()
self.initializeTabs()
adv_other_box = oasysgui.widgetBox(tab_bas, "Export Data", addSpace=False, orientation="vertical")
gui.button(adv_other_box, self, "Export Error Analysis", callback=self.export_error_analysis)
self.shadow_output = oasysgui.textArea(height=580, width=800)
out_box = gui.widgetBox(out_tab, "System Output", addSpace=True, orientation="horizontal")
out_box.layout().addWidget(self.shadow_output)
def after_change_workspace_units(self):
label = self.le_focal_length.parent().layout().itemAt(0).widget()
label.setText(label.text() + " [" + self.workspace_units_label + "]")
label = self.le_distance_to_image.parent().layout().itemAt(0).widget()
label.setText(label.text() + " [" + self.workspace_units_label + "]")
def select_files(self):
files, _ = QFileDialog.getOpenFileNames(self,
"Select Height Error Profiles", "","Data Files (*.dat);;Sha Files (*.sha)",
options=QFileDialog.Options())
if files:
self.ghy_files = files
self.refresh_files_text_area()
def initializeTabs(self):
self.tabs.clear()
tabs = []
if self.ghy_diff_plane < 2:
tabs.append(oasysgui.tabWidget(gui.createTabPage(self.tabs, "Distribution of Position at Image Plane")))
self.tab = [[gui.createTabPage(tabs[0], "Position"), gui.createTabPage(tabs[0], "Stats")]]
if self.ghy_nf == 1:
tabs.append(oasysgui.tabWidget(gui.createTabPage(self.tabs, "Distribution of Position at Near Field")))
self.tab.append([gui.createTabPage(tabs[1], "Position"), gui.createTabPage(tabs[1], "Stats")])
elif self.ghy_diff_plane >= 2:
if self.ghy_nf == 1:
tabs.append(oasysgui.tabWidget(gui.createTabPage(self.tabs, "Distribution of Position at Image Plane (S)")))
tabs.append(oasysgui.tabWidget(gui.createTabPage(self.tabs, "Distribution of Position at Near Field (S)")))
tabs.append(oasysgui.tabWidget(gui.createTabPage(self.tabs, "Distribution of Position at Image Plane (T)")))
tabs.append(oasysgui.tabWidget(gui.createTabPage(self.tabs, "Distribution of Position at Near Field (T)")))
self.tab = [[gui.createTabPage(tabs[0], "Position"), gui.createTabPage(tabs[0], "Stats")],
[gui.createTabPage(tabs[1], "Position"), gui.createTabPage(tabs[1], "Stats")],
[gui.createTabPage(tabs[2], "Position"), gui.createTabPage(tabs[2], "Stats")],
[gui.createTabPage(tabs[3], "Position"), gui.createTabPage(tabs[3], "Stats")]
]
else:
tabs.append(oasysgui.tabWidget(gui.createTabPage(self.tabs, "Distribution of Position at Image Plane (S)")))
tabs.append(oasysgui.tabWidget(gui.createTabPage(self.tabs, "Distribution of Position at Image Plane (T)")))
self.tab = [[gui.createTabPage(tabs[0], "Position"), gui.createTabPage(tabs[0], "Stats")],
[gui.createTabPage(tabs[1], "Position"), gui.createTabPage(tabs[1], "Stats")]
]
for tab in tabs:
tab.setFixedHeight(self.IMAGE_HEIGHT)
tab.setFixedWidth(self.IMAGE_WIDTH)
self.plot_canvas = [None, None, None, None]
self.plot_canvas_stats = [None, None, None, None]
def plot_emtpy(self, progressBarValue, plot_canvas_index):
if self.plot_canvas[plot_canvas_index] is None:
widget = QWidget()
widget.setLayout(QHBoxLayout())
label = QLabel(widget)
label.setPixmap(QPixmap(QImage(os.path.join(resources.package_dirname("orangecontrib.shadow.widgets.extension"), "icons", "no_result.png"))))
widget.layout().addWidget(label)
self.plot_canvas[plot_canvas_index] = widget
self.tab[plot_canvas_index].layout().addWidget(self.plot_canvas[plot_canvas_index])
self.progressBarSet(progressBarValue)
def setBeam(self, beam):
if ShadowCongruence.checkEmptyBeam(beam):
if ShadowCongruence.checkGoodBeam(beam):
self.input_beam = beam
if self.is_automatic_run:
self.run_hybrid()
def set_PlotType(self):
self.plot_canvas = [None, None, None, None]
self.box_pt_1.setVisible(self.plot_type==0)
self.box_pt_2.setVisible(self.plot_type==1)
def set_DiffPlane(self):
self.le_nbins_x.setEnabled(self.ghy_diff_plane == 0 or self.ghy_diff_plane == 2)
self.le_nbins_z.setEnabled(self.ghy_diff_plane == 1 or self.ghy_diff_plane == 2)
if self.ghy_diff_plane != 2:
self.cb_nf.setEnabled(True)
else:
self.cb_nf.setEnabled(False)
self.ghy_nf = 0
self.set_NF()
def set_CalculationType(self):
if self.ghy_diff_plane != 2:
self.cb_nf.setEnabled(True)
else:
self.cb_nf.setEnabled(False)
self.ghy_nf = 0
self.set_NF()
def set_NF(self):
if self.ghy_nf == 0:
self.focal_length_calc = 0
self.distance_to_image_calc = 0
self.cb_focal_length_calc.setEnabled(False)
self.le_focal_length.setEnabled(False)
else:
self.cb_focal_length_calc.setEnabled(True)
self.le_focal_length.setEnabled(True)
self.set_FocalLengthCalc()
def set_FocalLengthCalc(self):
self.le_focal_length.setEnabled(self.focal_length_calc == 1)
def set_DistanceToImageCalc(self):
self.le_distance_to_image.setEnabled(self.distance_to_image_calc == 1)
def run_hybrid(self):
try:
self.setStatusMessage("")
self.progressBarInit()
self.initializeTabs()
if ShadowCongruence.checkEmptyBeam(self.input_beam):
if ShadowCongruence.checkGoodBeam(self.input_beam):
sys.stdout = EmittingStream(textWritten=self.write_stdout)
self.check_fields()
input_parameters = hybrid_control.HybridInputParameters()
input_parameters.ghy_lengthunit = self.workspace_units
input_parameters.widget = self
input_parameters.ghy_diff_plane = self.ghy_diff_plane + 1
if self.distance_to_image_calc == 0:
input_parameters.ghy_distance = -1
else:
input_parameters.ghy_distance = self.ghy_distance
if self.focal_length_calc == 0:
input_parameters.ghy_focallength = -1
else:
input_parameters.ghy_focallength = self.ghy_focallength
input_parameters.ghy_nf = self.ghy_nf
input_parameters.ghy_nbins_x = int(self.ghy_nbins_x)
input_parameters.ghy_nbins_z = int(self.ghy_nbins_z)
input_parameters.ghy_npeak = int(self.ghy_npeak)
input_parameters.ghy_fftnpts = int(self.ghy_fftnpts)
input_parameters.file_to_write_out = self.file_to_write_out
input_parameters.ghy_automatic = self.ghy_automatic
# -----------------------------------------------
#cycling or figure errors
# add the reference (no error profile)
shadow_beam = self.input_beam.duplicate()
history_entry = shadow_beam.getOEHistory(shadow_beam._oe_number)
shadow_oe = history_entry._shadow_oe_start # changes to the original object!
shadow_oe._oe.F_RIPPLE = 0
input_parameters.ghy_calcType = 2
input_parameters.shadow_beam = shadow_beam
calculation_parameters = hybrid_control.hy_run(input_parameters)
self.ghy_focallength = input_parameters.ghy_focallength
self.ghy_distance = input_parameters.ghy_distance
self.ghy_nbins_x = int(input_parameters.ghy_nbins_x)
self.ghy_nbins_z = int(input_parameters.ghy_nbins_z)
self.ghy_npeak = int(input_parameters.ghy_npeak)
self.ghy_fftnpts = int(input_parameters.ghy_fftnpts)
if input_parameters.ghy_calcType == 3 or input_parameters.ghy_calcType == 4:
do_plot_x = True
do_plot_z = True
else:
if self.ghy_automatic == 1:
do_plot_x = not calculation_parameters.beam_not_cut_in_x
do_plot_z = not calculation_parameters.beam_not_cut_in_z
else:
do_plot_x = True
do_plot_z = True
do_nf = input_parameters.ghy_nf == 1 and input_parameters.ghy_calcType > 1
if do_plot_x or do_plot_z:
self.setStatusMessage("Plotting Results")
profile = 0
self.current_histo_data_x_ff = None
self.current_histo_data_x_nf = None
self.current_histo_data_z_ff = None
self.current_histo_data_z_nf = None
self.current_stats_x_ff = None
self.current_stats_x_nf = None
self.current_stats_z_ff = None
self.current_stats_z_nf = None
histo_data_x_ff, \
histo_data_z_ff, \
histo_data_x_nf, \
histo_data_z_nf = self.plot_results(calculation_parameters=calculation_parameters,
do_nf=do_nf,
do_plot_x=do_plot_x,
do_plot_z=do_plot_z,
histo_data_x_ff=HistogramData(),
histo_data_z_ff=HistogramData(),
histo_data_x_nf=HistogramData(),
histo_data_z_nf=HistogramData(),
profile=profile)
if not histo_data_x_ff.bins is None: self.current_histo_data_x_ff = HistogramDataCollection(histo_data_x_ff)
if not histo_data_z_ff.bins is None: self.current_histo_data_z_ff = HistogramDataCollection(histo_data_z_ff)
if not histo_data_x_nf.bins is None: self.current_histo_data_x_nf = HistogramDataCollection(histo_data_x_nf)
if not histo_data_z_nf.bins is None: self.current_histo_data_z_nf = HistogramDataCollection(histo_data_z_nf)
stats_x_ff = StatisticalDataCollection(histo_data_x_ff)
stats_z_ff = StatisticalDataCollection(histo_data_z_ff)
stats_x_nf = StatisticalDataCollection(histo_data_x_nf)
stats_z_nf = StatisticalDataCollection(histo_data_z_nf)
input_parameters.ghy_calcType = self.ghy_calcType + 3
for file in self.ghy_files:
shadow_beam = self.input_beam.duplicate()
history_entry = shadow_beam.getOEHistory(shadow_beam._oe_number)
shadow_oe = history_entry._shadow_oe_start # changes to the original object!
shadow_oe._oe.F_RIPPLE = 1
shadow_oe._oe.F_G_S = 2
file = congruence.checkFile(file)
ShadowCongruence.checkErrorProfileFile(file)
shadow_oe._oe.FILE_RIP = bytes(file, 'utf-8')
input_parameters.shadow_beam = shadow_beam
calculation_parameters = hybrid_control.hy_run(input_parameters)
if do_plot_x or do_plot_z:
self.setStatusMessage("Plotting Results")
profile += 1
histo_data_x_ff, \
histo_data_z_ff, \
histo_data_x_nf, \
histo_data_z_nf = self.plot_results(calculation_parameters,
do_nf,
do_plot_x,
do_plot_z,
histo_data_x_ff,
histo_data_z_ff,
histo_data_x_nf,
histo_data_z_nf,
profile)
if not histo_data_x_ff.bins is None: self.current_histo_data_x_ff.add_histogram_data(histo_data_x_ff)
if not histo_data_z_ff.bins is None: self.current_histo_data_z_ff.add_histogram_data(histo_data_z_ff)
if not histo_data_x_nf.bins is None: self.current_histo_data_x_nf.add_histogram_data(histo_data_x_nf)
if not histo_data_z_nf.bins is None: self.current_histo_data_z_nf.add_histogram_data(histo_data_z_nf)
stats_x_ff.add_statistical_data(histo_data_x_ff)
stats_z_ff.add_statistical_data(histo_data_z_ff)
stats_x_nf.add_statistical_data(histo_data_x_nf)
stats_z_nf.add_statistical_data(histo_data_z_nf)
self.current_stats_x_ff = stats_x_ff
self.current_stats_z_ff = stats_z_ff
self.current_stats_x_nf = stats_x_nf
self.current_stats_z_nf = stats_z_nf
self.add_empty_curves(do_nf,
do_plot_x,
do_plot_z,
histo_data_x_ff,
histo_data_x_nf,
histo_data_z_ff,
histo_data_z_nf)
self.plot_stats(do_nf,
do_plot_x,
do_plot_z,
stats_x_ff,
stats_z_ff,
stats_x_nf,
stats_z_nf,)
else:
raise Exception("Input Beam with no good rays")
else:
raise Exception("Empty Input Beam")
except Exception as exception:
QMessageBox.critical(self, "Error", str(exception), QMessageBox.Ok)
if self.IS_DEVELOP: raise exception
self.setStatusMessage("")
self.progressBarFinished()
def plot_results(self,
calculation_parameters,
do_nf,
do_plot_x,
do_plot_z,
histo_data_x_ff,
histo_data_z_ff,
histo_data_x_nf,
histo_data_z_nf,
profile):
if self.ghy_diff_plane == 0:
if do_plot_x:
histo_data_x_ff = self.plot_histo(calculation_parameters.ff_beam, 1, progressBarValue=88,
plot_canvas_index=0, title="X",
xtitle=r'X [$\mu$m]', ytitle=r'Number of Rays', profile=profile,
offset=histo_data_x_ff.offset, xrange=histo_data_x_ff.xrange)
if do_nf:
histo_data_x_nf = self.plot_histo(calculation_parameters.nf_beam, 1, progressBarValue=96,
plot_canvas_index=1, title="X",
xtitle=r'X [$\mu$m]', ytitle=r'Number of Rays', profile=profile,
offset=histo_data_x_nf.offset, xrange=histo_data_x_nf.xrange)
else:
if do_nf:
self.plot_emtpy(88, 0)
self.plot_emtpy(96, 1)
else:
self.plot_emtpy(88, 0)
elif self.ghy_diff_plane == 1:
if do_plot_z:
histo_data_z_ff = self.plot_histo(calculation_parameters.ff_beam, 3, progressBarValue=88,
plot_canvas_index=0, title="Z",
xtitle=r'Z [$\mu$m]', ytitle=r'Number of Rays', profile=profile,
offset=histo_data_z_ff.offset, xrange=histo_data_z_ff.xrange)
if do_nf:
histo_data_z_nf = self.plot_histo(calculation_parameters.nf_beam, 3, progressBarValue=96,
plot_canvas_index=1, title="Z",
xtitle=r'Z [$\mu$m]', ytitle=r'Number of Rays', profile=profile,
offset=histo_data_z_nf.offset, xrange=histo_data_z_nf.xrange)
else:
self.plot_emtpy(88, 0)
if do_nf:
self.plot_emtpy(96, 1)
elif self.ghy_diff_plane >= 2:
if do_plot_x and do_plot_z:
histo_data_x_ff = self.plot_histo(calculation_parameters.ff_beam, 1, progressBarValue=88,
plot_canvas_index=0, title="X",
xtitle=r'X [$\mu$m]', ytitle=r'Number of Rays', profile=profile,
offset=histo_data_x_ff.offset, xrange=histo_data_x_ff.xrange)
histo_data_z_ff = self.plot_histo(calculation_parameters.ff_beam, 3, progressBarValue=88,
plot_canvas_index=1, title="Z",
xtitle=r'Z [$\mu$m]', ytitle=r'Number of Rays', profile=profile,
offset=histo_data_z_ff.offset, xrange=histo_data_z_ff.xrange)
if do_nf:
histo_data_x_nf = self.plot_histo(calculation_parameters.nf_beam, 1, progressBarValue=96,
plot_canvas_index=2, title="X",
xtitle=r'X [$\mu$m]', ytitle=r'Number of Rays', profile=profile,
offset=histo_data_x_nf.offset, xrange=histo_data_x_nf.xrange)
histo_data_z_nf = self.plot_histo(calculation_parameters.nf_beam, 3, progressBarValue=96,
plot_canvas_index=3, title="Z",
xtitle=r'Z [$\mu$m]', ytitle=r'Number of Rays', profile=profile,
offset=histo_data_z_nf.offset, xrange=histo_data_z_nf.xrange)
else:
if do_plot_x:
histo_data_x_ff = self.plot_histo(calculation_parameters.ff_beam, 1, progressBarValue=88,
plot_canvas_index=0, title="X",
xtitle=r'X [$\mu$m]', ytitle=r'Number of Rays', profile=profile,
offset=histo_data_x_ff.offset, xrange=histo_data_x_ff.xrange)
if do_nf:
histo_data_x_nf = self.plot_histo(calculation_parameters.nf_beam, 1, progressBarValue=96,
plot_canvas_index=1, title="X",
xtitle=r'X [$\mu$m]', ytitle=r'Number of Rays', profile=profile,
offset=histo_data_x_nf.offset, xrange=histo_data_x_nf.xrange)
elif do_plot_z:
histo_data_z_ff = self.plot_histo(calculation_parameters.ff_beam, 3, progressBarValue=88,
plot_canvas_index=0, title="Z",
xtitle=r'Z [$\mu$m]', ytitle=r'Number of Rays', profile=profile,
offset=histo_data_z_ff.offset, xrange=histo_data_z_ff.xrange)
if do_nf:
histo_data_z_nf = self.plot_histo(calculation_parameters.nf_beam, 3, progressBarValue=96,
plot_canvas_index=1, title="Z",
xtitle=r'Z [$\mu$m]', ytitle=r'Number of Rays', profile=profile,
offset=histo_data_z_nf.offset, xrange=histo_data_z_nf.xrange)
else:
self.plot_emtpy(88, 0)
if do_nf:
self.plot_emtpy(96, 1)
return histo_data_x_ff, histo_data_z_ff, histo_data_x_nf, histo_data_z_nf
def add_empty_curves(self, do_nf, do_plot_x, do_plot_z, histo_data_x_ff, histo_data_x_nf, histo_data_z_ff,
histo_data_z_nf):
if self.ghy_diff_plane == 0:
if do_plot_x:
self.plot_canvas_stats[0].add_empty_curve(histo_data_x_ff)
if do_nf:
self.plot_canvas[1].add_empty_curve(histo_data_x_nf)
elif self.ghy_diff_plane == 1:
if do_plot_z:
self.plot_canvas[0].add_empty_curve(histo_data_z_ff)
if do_nf:
self.plot_canvas[1].add_empty_curve(histo_data_z_nf)
else:
if do_plot_x and do_plot_z:
self.plot_canvas[0].add_empty_curve(histo_data_x_ff)
self.plot_canvas[1].add_empty_curve(histo_data_z_ff)
if do_nf:
self.plot_canvas[2].add_empty_curve(histo_data_x_nf)
self.plot_canvas[3].add_empty_curve(histo_data_z_nf)
else:
if do_plot_x:
self.plot_canvas[0].add_empty_curve(histo_data_x_ff)
if do_nf:
self.plot_canvas[1].add_empty_curve(histo_data_x_nf)
elif do_plot_z:
self.plot_canvas[0].add_empty_curve(histo_data_z_ff)
if do_nf:
self.plot_canvas[1].add_empty_curve(histo_data_z_nf)
def plot_stats(self, do_nf, do_plot_x, do_plot_z, stats_x_ff, stats_z_ff, stats_x_nf, stats_z_nf):
if self.ghy_diff_plane == 0:
if do_plot_x:
self.plot_stat(stats_x_ff, 0)
if do_nf:
self.plot_stat(stats_x_nf, 1)
elif self.ghy_diff_plane == 1:
if do_plot_z:
self.plot_stat(stats_z_ff, 0)
if do_nf:
self.plot_stat(stats_z_nf, 1)
else:
if do_plot_x and do_plot_z:
self.plot_stat(stats_x_ff, 0)
self.plot_stat(stats_z_ff, 1)
if do_nf:
self.plot_stat(stats_x_nf, 2)
self.plot_stat(stats_z_nf, 3)
else:
if do_plot_x:
self.plot_stat(stats_x_ff, 0)
if do_nf:
self.plot_stat(stats_x_nf, 1)
elif do_plot_z:
self.plot_stat(stats_z_ff, 0)
if do_nf:
self.plot_stat(stats_z_nf, 1)
def plot_stat(self, stats, plot_canvas_index, sigma_um="$\mu$m"):
if self.plot_canvas_stats[plot_canvas_index] is None:
self.plot_canvas_stats[plot_canvas_index] = DoublePlotWidget(parent=None)
self.tab[plot_canvas_index][1].layout().addWidget(self.plot_canvas_stats[plot_canvas_index])
self.plot_canvas_stats[plot_canvas_index].plotCurves(stats.get_scan_values(),
stats.get_sigmas(),
stats.get_relative_peak_intensities(),
"Statistics",
"Profiles",
"Sigma [" + sigma_um + "]",
"Relative Peak Intensity")
def plot_histo(self, beam, col, nbins=100, progressBarValue=80, plot_canvas_index=0, title="", xtitle="", ytitle="",
profile=1, offset=0.0, xrange=None):
if self.plot_canvas[plot_canvas_index] is None:
if self.plot_type == 0:
self.plot_canvas[plot_canvas_index] = ScanHistoWidget(self.workspace_units_to_cm)
elif self.plot_type==1:
self.plot_canvas[plot_canvas_index] = Scan3DHistoWidget(self.workspace_units_to_cm,
type=Scan3DHistoWidget.PlotType.LINES if self.plot_type_3D==0 else Scan3DHistoWidget.PlotType.SURFACE)
self.tab[plot_canvas_index][0].layout().addWidget(self.plot_canvas[plot_canvas_index])
histo_data = self.plot_canvas[plot_canvas_index].plot_histo(beam=beam,
col=col,
nbins=nbins,
title=title,
xtitle=xtitle,
ytitle=ytitle,
histo_index=profile,
scan_variable_name="Profile #",
scan_variable_value=profile,
offset=offset,
xrange=xrange)
histo_data.scan_value=profile
self.progressBarSet(progressBarValue)
return histo_data
def check_fields(self):
if self.focal_length_calc == 1:
congruence.checkPositiveNumber(self.ghy_focallength, "Focal Length value")
if self.distance_to_image_calc == 1:
congruence.checkPositiveNumber(self.ghy_distance, "Distance to image value")
if self.ghy_diff_plane == 0 or self.ghy_diff_plane == 2:
congruence.checkStrictlyPositiveNumber(self.ghy_nbins_x, "Number of bins for I(Sagittal) histogram")
if self.ghy_diff_plane == 1 or self.ghy_diff_plane == 2:
congruence.checkStrictlyPositiveNumber(self.ghy_nbins_z, "Number of bins for I(Tangential) histogram")
if self.ghy_files is None or len(self.ghy_files) == 0 or (len(self.ghy_files) == 1 and self.ghy_files[0] == ""):
raise ValueError("Height Error Profiles list is empty")
congruence.checkStrictlyPositiveNumber(self.ghy_npeak, "Number of diffraction peaks")
congruence.checkStrictlyPositiveNumber(self.ghy_fftnpts, "Number of points for FFT")
def set_progress_bar(self, value):
if value >= 100:
self.progressBarFinished()
elif value <=0:
self.progressBarInit()
else:
self.progressBarSet(value)
def status_message(self, message):
self.setStatusMessage(message)
def write_stdout(self, text):
cursor = self.shadow_output.textCursor()
cursor.movePosition(QTextCursor.End)
cursor.insertText(text)
self.shadow_output.setTextCursor(cursor)
self.shadow_output.ensureCursorVisible()
def setPreProcessorData(self, data):
if data is not None:
if data.error_profile_data_file != ShadowPreProcessorData.NONE:
if isinstance(data.error_profile_data_file, str):
self.ghy_files.append(data.error_profile_data_file)
elif isinstance(data.error_profile_data_file, list):
self.ghy_files = data.error_profile_data_file
else:
raise ValueError("Error Profile Data File: format not recognized")
self.refresh_files_text_area()
def refresh_files_text_area(self):
text = ""
for file in self.ghy_files:
text += file + "\n"
self.files_area.setText(text)
def export_error_analysis(self):
output_folder = QFileDialog.getExistingDirectory(self, "Select Output Directory", directory=os.curdir)
if output_folder:
if not self.current_histo_data_x_ff is None:
write_histo_and_stats_file(histo_data=self.current_histo_data_x_ff,
stats=self.current_stats_x_ff,
suffix="_S_FF",
output_folder=output_folder)
if not self.current_histo_data_x_nf is None:
write_histo_and_stats_file(histo_data=self.current_histo_data_x_nf,
stats=self.current_stats_x_nf,
suffix="_S_NF",
output_folder=output_folder)
if not self.current_histo_data_z_ff is None:
write_histo_and_stats_file(histo_data=self.current_histo_data_z_ff,
stats=self.current_stats_z_ff,
suffix="_T_FF",
output_folder=output_folder)
if not self.current_histo_data_z_nf is None:
write_histo_and_stats_file(histo_data=self.current_histo_data_z_nf.bins,
stats=self.current_stats_z_nf,
suffix="_T_NF",
output_folder=output_folder)
QMessageBox.information(self, "Export Error Analysis Data", "Data saved into directory: " + output_folder, QMessageBox.Ok) | PypiClean |
/Mesa-2.1.1-py3-none-any.whl/mesa/visualization/templates/external/bootstrap-5.1.3-dist/js/bootstrap.js | (function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory(require('@popperjs/core')) :
typeof define === 'function' && define.amd ? define(['@popperjs/core'], factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.bootstrap = factory(global.Popper));
})(this, (function (Popper) { 'use strict';
function _interopNamespace(e) {
if (e && e.__esModule) return e;
const n = Object.create(null);
if (e) {
for (const k in e) {
if (k !== 'default') {
const d = Object.getOwnPropertyDescriptor(e, k);
Object.defineProperty(n, k, d.get ? d : {
enumerable: true,
get: () => e[k]
});
}
}
}
n.default = e;
return Object.freeze(n);
}
const Popper__namespace = /*#__PURE__*/_interopNamespace(Popper);
/**
* --------------------------------------------------------------------------
* Bootstrap (v5.1.3): util/index.js
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
* --------------------------------------------------------------------------
*/
const MAX_UID = 1000000;
const MILLISECONDS_MULTIPLIER = 1000;
const TRANSITION_END = 'transitionend'; // Shoutout AngusCroll (https://goo.gl/pxwQGp)
const toType = obj => {
if (obj === null || obj === undefined) {
return `${obj}`;
}
return {}.toString.call(obj).match(/\s([a-z]+)/i)[1].toLowerCase();
};
/**
* --------------------------------------------------------------------------
* Public Util Api
* --------------------------------------------------------------------------
*/
const getUID = prefix => {
do {
prefix += Math.floor(Math.random() * MAX_UID);
} while (document.getElementById(prefix));
return prefix;
};
const getSelector = element => {
let selector = element.getAttribute('data-bs-target');
if (!selector || selector === '#') {
let hrefAttr = element.getAttribute('href'); // The only valid content that could double as a selector are IDs or classes,
// so everything starting with `#` or `.`. If a "real" URL is used as the selector,
// `document.querySelector` will rightfully complain it is invalid.
// See https://github.com/twbs/bootstrap/issues/32273
if (!hrefAttr || !hrefAttr.includes('#') && !hrefAttr.startsWith('.')) {
return null;
} // Just in case some CMS puts out a full URL with the anchor appended
if (hrefAttr.includes('#') && !hrefAttr.startsWith('#')) {
hrefAttr = `#${hrefAttr.split('#')[1]}`;
}
selector = hrefAttr && hrefAttr !== '#' ? hrefAttr.trim() : null;
}
return selector;
};
const getSelectorFromElement = element => {
const selector = getSelector(element);
if (selector) {
return document.querySelector(selector) ? selector : null;
}
return null;
};
const getElementFromSelector = element => {
const selector = getSelector(element);
return selector ? document.querySelector(selector) : null;
};
const getTransitionDurationFromElement = element => {
if (!element) {
return 0;
} // Get transition-duration of the element
let {
transitionDuration,
transitionDelay
} = window.getComputedStyle(element);
const floatTransitionDuration = Number.parseFloat(transitionDuration);
const floatTransitionDelay = Number.parseFloat(transitionDelay); // Return 0 if element or transition duration is not found
if (!floatTransitionDuration && !floatTransitionDelay) {
return 0;
} // If multiple durations are defined, take the first
transitionDuration = transitionDuration.split(',')[0];
transitionDelay = transitionDelay.split(',')[0];
return (Number.parseFloat(transitionDuration) + Number.parseFloat(transitionDelay)) * MILLISECONDS_MULTIPLIER;
};
const triggerTransitionEnd = element => {
element.dispatchEvent(new Event(TRANSITION_END));
};
const isElement = obj => {
if (!obj || typeof obj !== 'object') {
return false;
}
if (typeof obj.jquery !== 'undefined') {
obj = obj[0];
}
return typeof obj.nodeType !== 'undefined';
};
const getElement = obj => {
if (isElement(obj)) {
// it's a jQuery object or a node element
return obj.jquery ? obj[0] : obj;
}
if (typeof obj === 'string' && obj.length > 0) {
return document.querySelector(obj);
}
return null;
};
const typeCheckConfig = (componentName, config, configTypes) => {
Object.keys(configTypes).forEach(property => {
const expectedTypes = configTypes[property];
const value = config[property];
const valueType = value && isElement(value) ? 'element' : toType(value);
if (!new RegExp(expectedTypes).test(valueType)) {
throw new TypeError(`${componentName.toUpperCase()}: Option "${property}" provided type "${valueType}" but expected type "${expectedTypes}".`);
}
});
};
const isVisible = element => {
if (!isElement(element) || element.getClientRects().length === 0) {
return false;
}
return getComputedStyle(element).getPropertyValue('visibility') === 'visible';
};
const isDisabled = element => {
if (!element || element.nodeType !== Node.ELEMENT_NODE) {
return true;
}
if (element.classList.contains('disabled')) {
return true;
}
if (typeof element.disabled !== 'undefined') {
return element.disabled;
}
return element.hasAttribute('disabled') && element.getAttribute('disabled') !== 'false';
};
const findShadowRoot = element => {
if (!document.documentElement.attachShadow) {
return null;
} // Can find the shadow root otherwise it'll return the document
if (typeof element.getRootNode === 'function') {
const root = element.getRootNode();
return root instanceof ShadowRoot ? root : null;
}
if (element instanceof ShadowRoot) {
return element;
} // when we don't find a shadow root
if (!element.parentNode) {
return null;
}
return findShadowRoot(element.parentNode);
};
const noop = () => {};
/**
* Trick to restart an element's animation
*
* @param {HTMLElement} element
* @return void
*
* @see https://www.charistheo.io/blog/2021/02/restart-a-css-animation-with-javascript/#restarting-a-css-animation
*/
const reflow = element => {
// eslint-disable-next-line no-unused-expressions
element.offsetHeight;
};
const getjQuery = () => {
const {
jQuery
} = window;
if (jQuery && !document.body.hasAttribute('data-bs-no-jquery')) {
return jQuery;
}
return null;
};
const DOMContentLoadedCallbacks = [];
const onDOMContentLoaded = callback => {
if (document.readyState === 'loading') {
// add listener on the first call when the document is in loading state
if (!DOMContentLoadedCallbacks.length) {
document.addEventListener('DOMContentLoaded', () => {
DOMContentLoadedCallbacks.forEach(callback => callback());
});
}
DOMContentLoadedCallbacks.push(callback);
} else {
callback();
}
};
const isRTL = () => document.documentElement.dir === 'rtl';
const defineJQueryPlugin = plugin => {
onDOMContentLoaded(() => {
const $ = getjQuery();
/* istanbul ignore if */
if ($) {
const name = plugin.NAME;
const JQUERY_NO_CONFLICT = $.fn[name];
$.fn[name] = plugin.jQueryInterface;
$.fn[name].Constructor = plugin;
$.fn[name].noConflict = () => {
$.fn[name] = JQUERY_NO_CONFLICT;
return plugin.jQueryInterface;
};
}
});
};
const execute = callback => {
if (typeof callback === 'function') {
callback();
}
};
const executeAfterTransition = (callback, transitionElement, waitForTransition = true) => {
if (!waitForTransition) {
execute(callback);
return;
}
const durationPadding = 5;
const emulatedDuration = getTransitionDurationFromElement(transitionElement) + durationPadding;
let called = false;
const handler = ({
target
}) => {
if (target !== transitionElement) {
return;
}
called = true;
transitionElement.removeEventListener(TRANSITION_END, handler);
execute(callback);
};
transitionElement.addEventListener(TRANSITION_END, handler);
setTimeout(() => {
if (!called) {
triggerTransitionEnd(transitionElement);
}
}, emulatedDuration);
};
/**
* Return the previous/next element of a list.
*
* @param {array} list The list of elements
* @param activeElement The active element
* @param shouldGetNext Choose to get next or previous element
* @param isCycleAllowed
* @return {Element|elem} The proper element
*/
const getNextActiveElement = (list, activeElement, shouldGetNext, isCycleAllowed) => {
let index = list.indexOf(activeElement); // if the element does not exist in the list return an element depending on the direction and if cycle is allowed
if (index === -1) {
return list[!shouldGetNext && isCycleAllowed ? list.length - 1 : 0];
}
const listLength = list.length;
index += shouldGetNext ? 1 : -1;
if (isCycleAllowed) {
index = (index + listLength) % listLength;
}
return list[Math.max(0, Math.min(index, listLength - 1))];
};
/**
* --------------------------------------------------------------------------
* Bootstrap (v5.1.3): dom/event-handler.js
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
* --------------------------------------------------------------------------
*/
/**
* ------------------------------------------------------------------------
* Constants
* ------------------------------------------------------------------------
*/
const namespaceRegex = /[^.]*(?=\..*)\.|.*/;
const stripNameRegex = /\..*/;
const stripUidRegex = /::\d+$/;
const eventRegistry = {}; // Events storage
let uidEvent = 1;
const customEvents = {
mouseenter: 'mouseover',
mouseleave: 'mouseout'
};
const customEventsRegex = /^(mouseenter|mouseleave)/i;
const nativeEvents = new Set(['click', 'dblclick', 'mouseup', 'mousedown', 'contextmenu', 'mousewheel', 'DOMMouseScroll', 'mouseover', 'mouseout', 'mousemove', 'selectstart', 'selectend', 'keydown', 'keypress', 'keyup', 'orientationchange', 'touchstart', 'touchmove', 'touchend', 'touchcancel', 'pointerdown', 'pointermove', 'pointerup', 'pointerleave', 'pointercancel', 'gesturestart', 'gesturechange', 'gestureend', 'focus', 'blur', 'change', 'reset', 'select', 'submit', 'focusin', 'focusout', 'load', 'unload', 'beforeunload', 'resize', 'move', 'DOMContentLoaded', 'readystatechange', 'error', 'abort', 'scroll']);
/**
* ------------------------------------------------------------------------
* Private methods
* ------------------------------------------------------------------------
*/
function getUidEvent(element, uid) {
return uid && `${uid}::${uidEvent++}` || element.uidEvent || uidEvent++;
}
function getEvent(element) {
const uid = getUidEvent(element);
element.uidEvent = uid;
eventRegistry[uid] = eventRegistry[uid] || {};
return eventRegistry[uid];
}
function bootstrapHandler(element, fn) {
return function handler(event) {
event.delegateTarget = element;
if (handler.oneOff) {
EventHandler.off(element, event.type, fn);
}
return fn.apply(element, [event]);
};
}
function bootstrapDelegationHandler(element, selector, fn) {
return function handler(event) {
const domElements = element.querySelectorAll(selector);
for (let {
target
} = event; target && target !== this; target = target.parentNode) {
for (let i = domElements.length; i--;) {
if (domElements[i] === target) {
event.delegateTarget = target;
if (handler.oneOff) {
EventHandler.off(element, event.type, selector, fn);
}
return fn.apply(target, [event]);
}
}
} // To please ESLint
return null;
};
}
function findHandler(events, handler, delegationSelector = null) {
const uidEventList = Object.keys(events);
for (let i = 0, len = uidEventList.length; i < len; i++) {
const event = events[uidEventList[i]];
if (event.originalHandler === handler && event.delegationSelector === delegationSelector) {
return event;
}
}
return null;
}
function normalizeParams(originalTypeEvent, handler, delegationFn) {
const delegation = typeof handler === 'string';
const originalHandler = delegation ? delegationFn : handler;
let typeEvent = getTypeEvent(originalTypeEvent);
const isNative = nativeEvents.has(typeEvent);
if (!isNative) {
typeEvent = originalTypeEvent;
}
return [delegation, originalHandler, typeEvent];
}
function addHandler(element, originalTypeEvent, handler, delegationFn, oneOff) {
if (typeof originalTypeEvent !== 'string' || !element) {
return;
}
if (!handler) {
handler = delegationFn;
delegationFn = null;
} // in case of mouseenter or mouseleave wrap the handler within a function that checks for its DOM position
// this prevents the handler from being dispatched the same way as mouseover or mouseout does
if (customEventsRegex.test(originalTypeEvent)) {
const wrapFn = fn => {
return function (event) {
if (!event.relatedTarget || event.relatedTarget !== event.delegateTarget && !event.delegateTarget.contains(event.relatedTarget)) {
return fn.call(this, event);
}
};
};
if (delegationFn) {
delegationFn = wrapFn(delegationFn);
} else {
handler = wrapFn(handler);
}
}
const [delegation, originalHandler, typeEvent] = normalizeParams(originalTypeEvent, handler, delegationFn);
const events = getEvent(element);
const handlers = events[typeEvent] || (events[typeEvent] = {});
const previousFn = findHandler(handlers, originalHandler, delegation ? handler : null);
if (previousFn) {
previousFn.oneOff = previousFn.oneOff && oneOff;
return;
}
const uid = getUidEvent(originalHandler, originalTypeEvent.replace(namespaceRegex, ''));
const fn = delegation ? bootstrapDelegationHandler(element, handler, delegationFn) : bootstrapHandler(element, handler);
fn.delegationSelector = delegation ? handler : null;
fn.originalHandler = originalHandler;
fn.oneOff = oneOff;
fn.uidEvent = uid;
handlers[uid] = fn;
element.addEventListener(typeEvent, fn, delegation);
}
function removeHandler(element, events, typeEvent, handler, delegationSelector) {
const fn = findHandler(events[typeEvent], handler, delegationSelector);
if (!fn) {
return;
}
element.removeEventListener(typeEvent, fn, Boolean(delegationSelector));
delete events[typeEvent][fn.uidEvent];
}
function removeNamespacedHandlers(element, events, typeEvent, namespace) {
const storeElementEvent = events[typeEvent] || {};
Object.keys(storeElementEvent).forEach(handlerKey => {
if (handlerKey.includes(namespace)) {
const event = storeElementEvent[handlerKey];
removeHandler(element, events, typeEvent, event.originalHandler, event.delegationSelector);
}
});
}
function getTypeEvent(event) {
// allow to get the native events from namespaced events ('click.bs.button' --> 'click')
event = event.replace(stripNameRegex, '');
return customEvents[event] || event;
}
const EventHandler = {
on(element, event, handler, delegationFn) {
addHandler(element, event, handler, delegationFn, false);
},
one(element, event, handler, delegationFn) {
addHandler(element, event, handler, delegationFn, true);
},
off(element, originalTypeEvent, handler, delegationFn) {
if (typeof originalTypeEvent !== 'string' || !element) {
return;
}
const [delegation, originalHandler, typeEvent] = normalizeParams(originalTypeEvent, handler, delegationFn);
const inNamespace = typeEvent !== originalTypeEvent;
const events = getEvent(element);
const isNamespace = originalTypeEvent.startsWith('.');
if (typeof originalHandler !== 'undefined') {
// Simplest case: handler is passed, remove that listener ONLY.
if (!events || !events[typeEvent]) {
return;
}
removeHandler(element, events, typeEvent, originalHandler, delegation ? handler : null);
return;
}
if (isNamespace) {
Object.keys(events).forEach(elementEvent => {
removeNamespacedHandlers(element, events, elementEvent, originalTypeEvent.slice(1));
});
}
const storeElementEvent = events[typeEvent] || {};
Object.keys(storeElementEvent).forEach(keyHandlers => {
const handlerKey = keyHandlers.replace(stripUidRegex, '');
if (!inNamespace || originalTypeEvent.includes(handlerKey)) {
const event = storeElementEvent[keyHandlers];
removeHandler(element, events, typeEvent, event.originalHandler, event.delegationSelector);
}
});
},
trigger(element, event, args) {
if (typeof event !== 'string' || !element) {
return null;
}
const $ = getjQuery();
const typeEvent = getTypeEvent(event);
const inNamespace = event !== typeEvent;
const isNative = nativeEvents.has(typeEvent);
let jQueryEvent;
let bubbles = true;
let nativeDispatch = true;
let defaultPrevented = false;
let evt = null;
if (inNamespace && $) {
jQueryEvent = $.Event(event, args);
$(element).trigger(jQueryEvent);
bubbles = !jQueryEvent.isPropagationStopped();
nativeDispatch = !jQueryEvent.isImmediatePropagationStopped();
defaultPrevented = jQueryEvent.isDefaultPrevented();
}
if (isNative) {
evt = document.createEvent('HTMLEvents');
evt.initEvent(typeEvent, bubbles, true);
} else {
evt = new CustomEvent(event, {
bubbles,
cancelable: true
});
} // merge custom information in our event
if (typeof args !== 'undefined') {
Object.keys(args).forEach(key => {
Object.defineProperty(evt, key, {
get() {
return args[key];
}
});
});
}
if (defaultPrevented) {
evt.preventDefault();
}
if (nativeDispatch) {
element.dispatchEvent(evt);
}
if (evt.defaultPrevented && typeof jQueryEvent !== 'undefined') {
jQueryEvent.preventDefault();
}
return evt;
}
};
/**
* --------------------------------------------------------------------------
* Bootstrap (v5.1.3): dom/data.js
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
* --------------------------------------------------------------------------
*/
/**
* ------------------------------------------------------------------------
* Constants
* ------------------------------------------------------------------------
*/
const elementMap = new Map();
const Data = {
set(element, key, instance) {
if (!elementMap.has(element)) {
elementMap.set(element, new Map());
}
const instanceMap = elementMap.get(element); // make it clear we only want one instance per element
// can be removed later when multiple key/instances are fine to be used
if (!instanceMap.has(key) && instanceMap.size !== 0) {
// eslint-disable-next-line no-console
console.error(`Bootstrap doesn't allow more than one instance per element. Bound instance: ${Array.from(instanceMap.keys())[0]}.`);
return;
}
instanceMap.set(key, instance);
},
get(element, key) {
if (elementMap.has(element)) {
return elementMap.get(element).get(key) || null;
}
return null;
},
remove(element, key) {
if (!elementMap.has(element)) {
return;
}
const instanceMap = elementMap.get(element);
instanceMap.delete(key); // free up element references if there are no instances left for an element
if (instanceMap.size === 0) {
elementMap.delete(element);
}
}
};
/**
* --------------------------------------------------------------------------
* Bootstrap (v5.1.3): base-component.js
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
* --------------------------------------------------------------------------
*/
/**
* ------------------------------------------------------------------------
* Constants
* ------------------------------------------------------------------------
*/
const VERSION = '5.1.3';
class BaseComponent {
constructor(element) {
element = getElement(element);
if (!element) {
return;
}
this._element = element;
Data.set(this._element, this.constructor.DATA_KEY, this);
}
dispose() {
Data.remove(this._element, this.constructor.DATA_KEY);
EventHandler.off(this._element, this.constructor.EVENT_KEY);
Object.getOwnPropertyNames(this).forEach(propertyName => {
this[propertyName] = null;
});
}
_queueCallback(callback, element, isAnimated = true) {
executeAfterTransition(callback, element, isAnimated);
}
/** Static */
static getInstance(element) {
return Data.get(getElement(element), this.DATA_KEY);
}
static getOrCreateInstance(element, config = {}) {
return this.getInstance(element) || new this(element, typeof config === 'object' ? config : null);
}
static get VERSION() {
return VERSION;
}
static get NAME() {
throw new Error('You have to implement the static method "NAME", for each component!');
}
static get DATA_KEY() {
return `bs.${this.NAME}`;
}
static get EVENT_KEY() {
return `.${this.DATA_KEY}`;
}
}
/**
* --------------------------------------------------------------------------
* Bootstrap (v5.1.3): util/component-functions.js
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
* --------------------------------------------------------------------------
*/
const enableDismissTrigger = (component, method = 'hide') => {
const clickEvent = `click.dismiss${component.EVENT_KEY}`;
const name = component.NAME;
EventHandler.on(document, clickEvent, `[data-bs-dismiss="${name}"]`, function (event) {
if (['A', 'AREA'].includes(this.tagName)) {
event.preventDefault();
}
if (isDisabled(this)) {
return;
}
const target = getElementFromSelector(this) || this.closest(`.${name}`);
const instance = component.getOrCreateInstance(target); // Method argument is left, for Alert and only, as it doesn't implement the 'hide' method
instance[method]();
});
};
/**
* --------------------------------------------------------------------------
* Bootstrap (v5.1.3): alert.js
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
* --------------------------------------------------------------------------
*/
/**
* ------------------------------------------------------------------------
* Constants
* ------------------------------------------------------------------------
*/
const NAME$d = 'alert';
const DATA_KEY$c = 'bs.alert';
const EVENT_KEY$c = `.${DATA_KEY$c}`;
const EVENT_CLOSE = `close${EVENT_KEY$c}`;
const EVENT_CLOSED = `closed${EVENT_KEY$c}`;
const CLASS_NAME_FADE$5 = 'fade';
const CLASS_NAME_SHOW$8 = 'show';
/**
* ------------------------------------------------------------------------
* Class Definition
* ------------------------------------------------------------------------
*/
class Alert extends BaseComponent {
// Getters
static get NAME() {
return NAME$d;
} // Public
close() {
const closeEvent = EventHandler.trigger(this._element, EVENT_CLOSE);
if (closeEvent.defaultPrevented) {
return;
}
this._element.classList.remove(CLASS_NAME_SHOW$8);
const isAnimated = this._element.classList.contains(CLASS_NAME_FADE$5);
this._queueCallback(() => this._destroyElement(), this._element, isAnimated);
} // Private
_destroyElement() {
this._element.remove();
EventHandler.trigger(this._element, EVENT_CLOSED);
this.dispose();
} // Static
static jQueryInterface(config) {
return this.each(function () {
const data = Alert.getOrCreateInstance(this);
if (typeof config !== 'string') {
return;
}
if (data[config] === undefined || config.startsWith('_') || config === 'constructor') {
throw new TypeError(`No method named "${config}"`);
}
data[config](this);
});
}
}
/**
* ------------------------------------------------------------------------
* Data Api implementation
* ------------------------------------------------------------------------
*/
enableDismissTrigger(Alert, 'close');
/**
* ------------------------------------------------------------------------
* jQuery
* ------------------------------------------------------------------------
* add .Alert to jQuery only if jQuery is present
*/
defineJQueryPlugin(Alert);
/**
* --------------------------------------------------------------------------
* Bootstrap (v5.1.3): button.js
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
* --------------------------------------------------------------------------
*/
/**
* ------------------------------------------------------------------------
* Constants
* ------------------------------------------------------------------------
*/
const NAME$c = 'button';
const DATA_KEY$b = 'bs.button';
const EVENT_KEY$b = `.${DATA_KEY$b}`;
const DATA_API_KEY$7 = '.data-api';
const CLASS_NAME_ACTIVE$3 = 'active';
const SELECTOR_DATA_TOGGLE$5 = '[data-bs-toggle="button"]';
const EVENT_CLICK_DATA_API$6 = `click${EVENT_KEY$b}${DATA_API_KEY$7}`;
/**
* ------------------------------------------------------------------------
* Class Definition
* ------------------------------------------------------------------------
*/
class Button extends BaseComponent {
// Getters
static get NAME() {
return NAME$c;
} // Public
toggle() {
// Toggle class and sync the `aria-pressed` attribute with the return value of the `.toggle()` method
this._element.setAttribute('aria-pressed', this._element.classList.toggle(CLASS_NAME_ACTIVE$3));
} // Static
static jQueryInterface(config) {
return this.each(function () {
const data = Button.getOrCreateInstance(this);
if (config === 'toggle') {
data[config]();
}
});
}
}
/**
* ------------------------------------------------------------------------
* Data Api implementation
* ------------------------------------------------------------------------
*/
EventHandler.on(document, EVENT_CLICK_DATA_API$6, SELECTOR_DATA_TOGGLE$5, event => {
event.preventDefault();
const button = event.target.closest(SELECTOR_DATA_TOGGLE$5);
const data = Button.getOrCreateInstance(button);
data.toggle();
});
/**
* ------------------------------------------------------------------------
* jQuery
* ------------------------------------------------------------------------
* add .Button to jQuery only if jQuery is present
*/
defineJQueryPlugin(Button);
/**
* --------------------------------------------------------------------------
* Bootstrap (v5.1.3): dom/manipulator.js
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
* --------------------------------------------------------------------------
*/
function normalizeData(val) {
if (val === 'true') {
return true;
}
if (val === 'false') {
return false;
}
if (val === Number(val).toString()) {
return Number(val);
}
if (val === '' || val === 'null') {
return null;
}
return val;
}
function normalizeDataKey(key) {
return key.replace(/[A-Z]/g, chr => `-${chr.toLowerCase()}`);
}
const Manipulator = {
setDataAttribute(element, key, value) {
element.setAttribute(`data-bs-${normalizeDataKey(key)}`, value);
},
removeDataAttribute(element, key) {
element.removeAttribute(`data-bs-${normalizeDataKey(key)}`);
},
getDataAttributes(element) {
if (!element) {
return {};
}
const attributes = {};
Object.keys(element.dataset).filter(key => key.startsWith('bs')).forEach(key => {
let pureKey = key.replace(/^bs/, '');
pureKey = pureKey.charAt(0).toLowerCase() + pureKey.slice(1, pureKey.length);
attributes[pureKey] = normalizeData(element.dataset[key]);
});
return attributes;
},
getDataAttribute(element, key) {
return normalizeData(element.getAttribute(`data-bs-${normalizeDataKey(key)}`));
},
offset(element) {
const rect = element.getBoundingClientRect();
return {
top: rect.top + window.pageYOffset,
left: rect.left + window.pageXOffset
};
},
position(element) {
return {
top: element.offsetTop,
left: element.offsetLeft
};
}
};
/**
* --------------------------------------------------------------------------
* Bootstrap (v5.1.3): dom/selector-engine.js
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
* --------------------------------------------------------------------------
*/
const NODE_TEXT = 3;
const SelectorEngine = {
find(selector, element = document.documentElement) {
return [].concat(...Element.prototype.querySelectorAll.call(element, selector));
},
findOne(selector, element = document.documentElement) {
return Element.prototype.querySelector.call(element, selector);
},
children(element, selector) {
return [].concat(...element.children).filter(child => child.matches(selector));
},
parents(element, selector) {
const parents = [];
let ancestor = element.parentNode;
while (ancestor && ancestor.nodeType === Node.ELEMENT_NODE && ancestor.nodeType !== NODE_TEXT) {
if (ancestor.matches(selector)) {
parents.push(ancestor);
}
ancestor = ancestor.parentNode;
}
return parents;
},
prev(element, selector) {
let previous = element.previousElementSibling;
while (previous) {
if (previous.matches(selector)) {
return [previous];
}
previous = previous.previousElementSibling;
}
return [];
},
next(element, selector) {
let next = element.nextElementSibling;
while (next) {
if (next.matches(selector)) {
return [next];
}
next = next.nextElementSibling;
}
return [];
},
focusableChildren(element) {
const focusables = ['a', 'button', 'input', 'textarea', 'select', 'details', '[tabindex]', '[contenteditable="true"]'].map(selector => `${selector}:not([tabindex^="-"])`).join(', ');
return this.find(focusables, element).filter(el => !isDisabled(el) && isVisible(el));
}
};
/**
* --------------------------------------------------------------------------
* Bootstrap (v5.1.3): carousel.js
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
* --------------------------------------------------------------------------
*/
/**
* ------------------------------------------------------------------------
* Constants
* ------------------------------------------------------------------------
*/
const NAME$b = 'carousel';
const DATA_KEY$a = 'bs.carousel';
const EVENT_KEY$a = `.${DATA_KEY$a}`;
const DATA_API_KEY$6 = '.data-api';
const ARROW_LEFT_KEY = 'ArrowLeft';
const ARROW_RIGHT_KEY = 'ArrowRight';
const TOUCHEVENT_COMPAT_WAIT = 500; // Time for mouse compat events to fire after touch
const SWIPE_THRESHOLD = 40;
const Default$a = {
interval: 5000,
keyboard: true,
slide: false,
pause: 'hover',
wrap: true,
touch: true
};
const DefaultType$a = {
interval: '(number|boolean)',
keyboard: 'boolean',
slide: '(boolean|string)',
pause: '(string|boolean)',
wrap: 'boolean',
touch: 'boolean'
};
const ORDER_NEXT = 'next';
const ORDER_PREV = 'prev';
const DIRECTION_LEFT = 'left';
const DIRECTION_RIGHT = 'right';
const KEY_TO_DIRECTION = {
[ARROW_LEFT_KEY]: DIRECTION_RIGHT,
[ARROW_RIGHT_KEY]: DIRECTION_LEFT
};
const EVENT_SLIDE = `slide${EVENT_KEY$a}`;
const EVENT_SLID = `slid${EVENT_KEY$a}`;
const EVENT_KEYDOWN = `keydown${EVENT_KEY$a}`;
const EVENT_MOUSEENTER = `mouseenter${EVENT_KEY$a}`;
const EVENT_MOUSELEAVE = `mouseleave${EVENT_KEY$a}`;
const EVENT_TOUCHSTART = `touchstart${EVENT_KEY$a}`;
const EVENT_TOUCHMOVE = `touchmove${EVENT_KEY$a}`;
const EVENT_TOUCHEND = `touchend${EVENT_KEY$a}`;
const EVENT_POINTERDOWN = `pointerdown${EVENT_KEY$a}`;
const EVENT_POINTERUP = `pointerup${EVENT_KEY$a}`;
const EVENT_DRAG_START = `dragstart${EVENT_KEY$a}`;
const EVENT_LOAD_DATA_API$2 = `load${EVENT_KEY$a}${DATA_API_KEY$6}`;
const EVENT_CLICK_DATA_API$5 = `click${EVENT_KEY$a}${DATA_API_KEY$6}`;
const CLASS_NAME_CAROUSEL = 'carousel';
const CLASS_NAME_ACTIVE$2 = 'active';
const CLASS_NAME_SLIDE = 'slide';
const CLASS_NAME_END = 'carousel-item-end';
const CLASS_NAME_START = 'carousel-item-start';
const CLASS_NAME_NEXT = 'carousel-item-next';
const CLASS_NAME_PREV = 'carousel-item-prev';
const CLASS_NAME_POINTER_EVENT = 'pointer-event';
const SELECTOR_ACTIVE$1 = '.active';
const SELECTOR_ACTIVE_ITEM = '.active.carousel-item';
const SELECTOR_ITEM = '.carousel-item';
const SELECTOR_ITEM_IMG = '.carousel-item img';
const SELECTOR_NEXT_PREV = '.carousel-item-next, .carousel-item-prev';
const SELECTOR_INDICATORS = '.carousel-indicators';
const SELECTOR_INDICATOR = '[data-bs-target]';
const SELECTOR_DATA_SLIDE = '[data-bs-slide], [data-bs-slide-to]';
const SELECTOR_DATA_RIDE = '[data-bs-ride="carousel"]';
const POINTER_TYPE_TOUCH = 'touch';
const POINTER_TYPE_PEN = 'pen';
/**
* ------------------------------------------------------------------------
* Class Definition
* ------------------------------------------------------------------------
*/
class Carousel extends BaseComponent {
constructor(element, config) {
super(element);
this._items = null;
this._interval = null;
this._activeElement = null;
this._isPaused = false;
this._isSliding = false;
this.touchTimeout = null;
this.touchStartX = 0;
this.touchDeltaX = 0;
this._config = this._getConfig(config);
this._indicatorsElement = SelectorEngine.findOne(SELECTOR_INDICATORS, this._element);
this._touchSupported = 'ontouchstart' in document.documentElement || navigator.maxTouchPoints > 0;
this._pointerEvent = Boolean(window.PointerEvent);
this._addEventListeners();
} // Getters
static get Default() {
return Default$a;
}
static get NAME() {
return NAME$b;
} // Public
next() {
this._slide(ORDER_NEXT);
}
nextWhenVisible() {
// Don't call next when the page isn't visible
// or the carousel or its parent isn't visible
if (!document.hidden && isVisible(this._element)) {
this.next();
}
}
prev() {
this._slide(ORDER_PREV);
}
pause(event) {
if (!event) {
this._isPaused = true;
}
if (SelectorEngine.findOne(SELECTOR_NEXT_PREV, this._element)) {
triggerTransitionEnd(this._element);
this.cycle(true);
}
clearInterval(this._interval);
this._interval = null;
}
cycle(event) {
if (!event) {
this._isPaused = false;
}
if (this._interval) {
clearInterval(this._interval);
this._interval = null;
}
if (this._config && this._config.interval && !this._isPaused) {
this._updateInterval();
this._interval = setInterval((document.visibilityState ? this.nextWhenVisible : this.next).bind(this), this._config.interval);
}
}
to(index) {
this._activeElement = SelectorEngine.findOne(SELECTOR_ACTIVE_ITEM, this._element);
const activeIndex = this._getItemIndex(this._activeElement);
if (index > this._items.length - 1 || index < 0) {
return;
}
if (this._isSliding) {
EventHandler.one(this._element, EVENT_SLID, () => this.to(index));
return;
}
if (activeIndex === index) {
this.pause();
this.cycle();
return;
}
const order = index > activeIndex ? ORDER_NEXT : ORDER_PREV;
this._slide(order, this._items[index]);
} // Private
_getConfig(config) {
config = { ...Default$a,
...Manipulator.getDataAttributes(this._element),
...(typeof config === 'object' ? config : {})
};
typeCheckConfig(NAME$b, config, DefaultType$a);
return config;
}
_handleSwipe() {
const absDeltax = Math.abs(this.touchDeltaX);
if (absDeltax <= SWIPE_THRESHOLD) {
return;
}
const direction = absDeltax / this.touchDeltaX;
this.touchDeltaX = 0;
if (!direction) {
return;
}
this._slide(direction > 0 ? DIRECTION_RIGHT : DIRECTION_LEFT);
}
_addEventListeners() {
if (this._config.keyboard) {
EventHandler.on(this._element, EVENT_KEYDOWN, event => this._keydown(event));
}
if (this._config.pause === 'hover') {
EventHandler.on(this._element, EVENT_MOUSEENTER, event => this.pause(event));
EventHandler.on(this._element, EVENT_MOUSELEAVE, event => this.cycle(event));
}
if (this._config.touch && this._touchSupported) {
this._addTouchEventListeners();
}
}
_addTouchEventListeners() {
const hasPointerPenTouch = event => {
return this._pointerEvent && (event.pointerType === POINTER_TYPE_PEN || event.pointerType === POINTER_TYPE_TOUCH);
};
const start = event => {
if (hasPointerPenTouch(event)) {
this.touchStartX = event.clientX;
} else if (!this._pointerEvent) {
this.touchStartX = event.touches[0].clientX;
}
};
const move = event => {
// ensure swiping with one touch and not pinching
this.touchDeltaX = event.touches && event.touches.length > 1 ? 0 : event.touches[0].clientX - this.touchStartX;
};
const end = event => {
if (hasPointerPenTouch(event)) {
this.touchDeltaX = event.clientX - this.touchStartX;
}
this._handleSwipe();
if (this._config.pause === 'hover') {
// If it's a touch-enabled device, mouseenter/leave are fired as
// part of the mouse compatibility events on first tap - the carousel
// would stop cycling until user tapped out of it;
// here, we listen for touchend, explicitly pause the carousel
// (as if it's the second time we tap on it, mouseenter compat event
// is NOT fired) and after a timeout (to allow for mouse compatibility
// events to fire) we explicitly restart cycling
this.pause();
if (this.touchTimeout) {
clearTimeout(this.touchTimeout);
}
this.touchTimeout = setTimeout(event => this.cycle(event), TOUCHEVENT_COMPAT_WAIT + this._config.interval);
}
};
SelectorEngine.find(SELECTOR_ITEM_IMG, this._element).forEach(itemImg => {
EventHandler.on(itemImg, EVENT_DRAG_START, event => event.preventDefault());
});
if (this._pointerEvent) {
EventHandler.on(this._element, EVENT_POINTERDOWN, event => start(event));
EventHandler.on(this._element, EVENT_POINTERUP, event => end(event));
this._element.classList.add(CLASS_NAME_POINTER_EVENT);
} else {
EventHandler.on(this._element, EVENT_TOUCHSTART, event => start(event));
EventHandler.on(this._element, EVENT_TOUCHMOVE, event => move(event));
EventHandler.on(this._element, EVENT_TOUCHEND, event => end(event));
}
}
_keydown(event) {
if (/input|textarea/i.test(event.target.tagName)) {
return;
}
const direction = KEY_TO_DIRECTION[event.key];
if (direction) {
event.preventDefault();
this._slide(direction);
}
}
_getItemIndex(element) {
this._items = element && element.parentNode ? SelectorEngine.find(SELECTOR_ITEM, element.parentNode) : [];
return this._items.indexOf(element);
}
_getItemByOrder(order, activeElement) {
const isNext = order === ORDER_NEXT;
return getNextActiveElement(this._items, activeElement, isNext, this._config.wrap);
}
_triggerSlideEvent(relatedTarget, eventDirectionName) {
const targetIndex = this._getItemIndex(relatedTarget);
const fromIndex = this._getItemIndex(SelectorEngine.findOne(SELECTOR_ACTIVE_ITEM, this._element));
return EventHandler.trigger(this._element, EVENT_SLIDE, {
relatedTarget,
direction: eventDirectionName,
from: fromIndex,
to: targetIndex
});
}
_setActiveIndicatorElement(element) {
if (this._indicatorsElement) {
const activeIndicator = SelectorEngine.findOne(SELECTOR_ACTIVE$1, this._indicatorsElement);
activeIndicator.classList.remove(CLASS_NAME_ACTIVE$2);
activeIndicator.removeAttribute('aria-current');
const indicators = SelectorEngine.find(SELECTOR_INDICATOR, this._indicatorsElement);
for (let i = 0; i < indicators.length; i++) {
if (Number.parseInt(indicators[i].getAttribute('data-bs-slide-to'), 10) === this._getItemIndex(element)) {
indicators[i].classList.add(CLASS_NAME_ACTIVE$2);
indicators[i].setAttribute('aria-current', 'true');
break;
}
}
}
}
_updateInterval() {
const element = this._activeElement || SelectorEngine.findOne(SELECTOR_ACTIVE_ITEM, this._element);
if (!element) {
return;
}
const elementInterval = Number.parseInt(element.getAttribute('data-bs-interval'), 10);
if (elementInterval) {
this._config.defaultInterval = this._config.defaultInterval || this._config.interval;
this._config.interval = elementInterval;
} else {
this._config.interval = this._config.defaultInterval || this._config.interval;
}
}
_slide(directionOrOrder, element) {
const order = this._directionToOrder(directionOrOrder);
const activeElement = SelectorEngine.findOne(SELECTOR_ACTIVE_ITEM, this._element);
const activeElementIndex = this._getItemIndex(activeElement);
const nextElement = element || this._getItemByOrder(order, activeElement);
const nextElementIndex = this._getItemIndex(nextElement);
const isCycling = Boolean(this._interval);
const isNext = order === ORDER_NEXT;
const directionalClassName = isNext ? CLASS_NAME_START : CLASS_NAME_END;
const orderClassName = isNext ? CLASS_NAME_NEXT : CLASS_NAME_PREV;
const eventDirectionName = this._orderToDirection(order);
if (nextElement && nextElement.classList.contains(CLASS_NAME_ACTIVE$2)) {
this._isSliding = false;
return;
}
if (this._isSliding) {
return;
}
const slideEvent = this._triggerSlideEvent(nextElement, eventDirectionName);
if (slideEvent.defaultPrevented) {
return;
}
if (!activeElement || !nextElement) {
// Some weirdness is happening, so we bail
return;
}
this._isSliding = true;
if (isCycling) {
this.pause();
}
this._setActiveIndicatorElement(nextElement);
this._activeElement = nextElement;
const triggerSlidEvent = () => {
EventHandler.trigger(this._element, EVENT_SLID, {
relatedTarget: nextElement,
direction: eventDirectionName,
from: activeElementIndex,
to: nextElementIndex
});
};
if (this._element.classList.contains(CLASS_NAME_SLIDE)) {
nextElement.classList.add(orderClassName);
reflow(nextElement);
activeElement.classList.add(directionalClassName);
nextElement.classList.add(directionalClassName);
const completeCallBack = () => {
nextElement.classList.remove(directionalClassName, orderClassName);
nextElement.classList.add(CLASS_NAME_ACTIVE$2);
activeElement.classList.remove(CLASS_NAME_ACTIVE$2, orderClassName, directionalClassName);
this._isSliding = false;
setTimeout(triggerSlidEvent, 0);
};
this._queueCallback(completeCallBack, activeElement, true);
} else {
activeElement.classList.remove(CLASS_NAME_ACTIVE$2);
nextElement.classList.add(CLASS_NAME_ACTIVE$2);
this._isSliding = false;
triggerSlidEvent();
}
if (isCycling) {
this.cycle();
}
}
_directionToOrder(direction) {
if (![DIRECTION_RIGHT, DIRECTION_LEFT].includes(direction)) {
return direction;
}
if (isRTL()) {
return direction === DIRECTION_LEFT ? ORDER_PREV : ORDER_NEXT;
}
return direction === DIRECTION_LEFT ? ORDER_NEXT : ORDER_PREV;
}
_orderToDirection(order) {
if (![ORDER_NEXT, ORDER_PREV].includes(order)) {
return order;
}
if (isRTL()) {
return order === ORDER_PREV ? DIRECTION_LEFT : DIRECTION_RIGHT;
}
return order === ORDER_PREV ? DIRECTION_RIGHT : DIRECTION_LEFT;
} // Static
static carouselInterface(element, config) {
const data = Carousel.getOrCreateInstance(element, config);
let {
_config
} = data;
if (typeof config === 'object') {
_config = { ..._config,
...config
};
}
const action = typeof config === 'string' ? config : _config.slide;
if (typeof config === 'number') {
data.to(config);
} else if (typeof action === 'string') {
if (typeof data[action] === 'undefined') {
throw new TypeError(`No method named "${action}"`);
}
data[action]();
} else if (_config.interval && _config.ride) {
data.pause();
data.cycle();
}
}
static jQueryInterface(config) {
return this.each(function () {
Carousel.carouselInterface(this, config);
});
}
static dataApiClickHandler(event) {
const target = getElementFromSelector(this);
if (!target || !target.classList.contains(CLASS_NAME_CAROUSEL)) {
return;
}
const config = { ...Manipulator.getDataAttributes(target),
...Manipulator.getDataAttributes(this)
};
const slideIndex = this.getAttribute('data-bs-slide-to');
if (slideIndex) {
config.interval = false;
}
Carousel.carouselInterface(target, config);
if (slideIndex) {
Carousel.getInstance(target).to(slideIndex);
}
event.preventDefault();
}
}
/**
* ------------------------------------------------------------------------
* Data Api implementation
* ------------------------------------------------------------------------
*/
EventHandler.on(document, EVENT_CLICK_DATA_API$5, SELECTOR_DATA_SLIDE, Carousel.dataApiClickHandler);
EventHandler.on(window, EVENT_LOAD_DATA_API$2, () => {
const carousels = SelectorEngine.find(SELECTOR_DATA_RIDE);
for (let i = 0, len = carousels.length; i < len; i++) {
Carousel.carouselInterface(carousels[i], Carousel.getInstance(carousels[i]));
}
});
/**
* ------------------------------------------------------------------------
* jQuery
* ------------------------------------------------------------------------
* add .Carousel to jQuery only if jQuery is present
*/
defineJQueryPlugin(Carousel);
/**
* --------------------------------------------------------------------------
* Bootstrap (v5.1.3): collapse.js
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
* --------------------------------------------------------------------------
*/
/**
* ------------------------------------------------------------------------
* Constants
* ------------------------------------------------------------------------
*/
const NAME$a = 'collapse';
const DATA_KEY$9 = 'bs.collapse';
const EVENT_KEY$9 = `.${DATA_KEY$9}`;
const DATA_API_KEY$5 = '.data-api';
const Default$9 = {
toggle: true,
parent: null
};
const DefaultType$9 = {
toggle: 'boolean',
parent: '(null|element)'
};
const EVENT_SHOW$5 = `show${EVENT_KEY$9}`;
const EVENT_SHOWN$5 = `shown${EVENT_KEY$9}`;
const EVENT_HIDE$5 = `hide${EVENT_KEY$9}`;
const EVENT_HIDDEN$5 = `hidden${EVENT_KEY$9}`;
const EVENT_CLICK_DATA_API$4 = `click${EVENT_KEY$9}${DATA_API_KEY$5}`;
const CLASS_NAME_SHOW$7 = 'show';
const CLASS_NAME_COLLAPSE = 'collapse';
const CLASS_NAME_COLLAPSING = 'collapsing';
const CLASS_NAME_COLLAPSED = 'collapsed';
const CLASS_NAME_DEEPER_CHILDREN = `:scope .${CLASS_NAME_COLLAPSE} .${CLASS_NAME_COLLAPSE}`;
const CLASS_NAME_HORIZONTAL = 'collapse-horizontal';
const WIDTH = 'width';
const HEIGHT = 'height';
const SELECTOR_ACTIVES = '.collapse.show, .collapse.collapsing';
const SELECTOR_DATA_TOGGLE$4 = '[data-bs-toggle="collapse"]';
/**
* ------------------------------------------------------------------------
* Class Definition
* ------------------------------------------------------------------------
*/
class Collapse extends BaseComponent {
constructor(element, config) {
super(element);
this._isTransitioning = false;
this._config = this._getConfig(config);
this._triggerArray = [];
const toggleList = SelectorEngine.find(SELECTOR_DATA_TOGGLE$4);
for (let i = 0, len = toggleList.length; i < len; i++) {
const elem = toggleList[i];
const selector = getSelectorFromElement(elem);
const filterElement = SelectorEngine.find(selector).filter(foundElem => foundElem === this._element);
if (selector !== null && filterElement.length) {
this._selector = selector;
this._triggerArray.push(elem);
}
}
this._initializeChildren();
if (!this._config.parent) {
this._addAriaAndCollapsedClass(this._triggerArray, this._isShown());
}
if (this._config.toggle) {
this.toggle();
}
} // Getters
static get Default() {
return Default$9;
}
static get NAME() {
return NAME$a;
} // Public
toggle() {
if (this._isShown()) {
this.hide();
} else {
this.show();
}
}
show() {
if (this._isTransitioning || this._isShown()) {
return;
}
let actives = [];
let activesData;
if (this._config.parent) {
const children = SelectorEngine.find(CLASS_NAME_DEEPER_CHILDREN, this._config.parent);
actives = SelectorEngine.find(SELECTOR_ACTIVES, this._config.parent).filter(elem => !children.includes(elem)); // remove children if greater depth
}
const container = SelectorEngine.findOne(this._selector);
if (actives.length) {
const tempActiveData = actives.find(elem => container !== elem);
activesData = tempActiveData ? Collapse.getInstance(tempActiveData) : null;
if (activesData && activesData._isTransitioning) {
return;
}
}
const startEvent = EventHandler.trigger(this._element, EVENT_SHOW$5);
if (startEvent.defaultPrevented) {
return;
}
actives.forEach(elemActive => {
if (container !== elemActive) {
Collapse.getOrCreateInstance(elemActive, {
toggle: false
}).hide();
}
if (!activesData) {
Data.set(elemActive, DATA_KEY$9, null);
}
});
const dimension = this._getDimension();
this._element.classList.remove(CLASS_NAME_COLLAPSE);
this._element.classList.add(CLASS_NAME_COLLAPSING);
this._element.style[dimension] = 0;
this._addAriaAndCollapsedClass(this._triggerArray, true);
this._isTransitioning = true;
const complete = () => {
this._isTransitioning = false;
this._element.classList.remove(CLASS_NAME_COLLAPSING);
this._element.classList.add(CLASS_NAME_COLLAPSE, CLASS_NAME_SHOW$7);
this._element.style[dimension] = '';
EventHandler.trigger(this._element, EVENT_SHOWN$5);
};
const capitalizedDimension = dimension[0].toUpperCase() + dimension.slice(1);
const scrollSize = `scroll${capitalizedDimension}`;
this._queueCallback(complete, this._element, true);
this._element.style[dimension] = `${this._element[scrollSize]}px`;
}
hide() {
if (this._isTransitioning || !this._isShown()) {
return;
}
const startEvent = EventHandler.trigger(this._element, EVENT_HIDE$5);
if (startEvent.defaultPrevented) {
return;
}
const dimension = this._getDimension();
this._element.style[dimension] = `${this._element.getBoundingClientRect()[dimension]}px`;
reflow(this._element);
this._element.classList.add(CLASS_NAME_COLLAPSING);
this._element.classList.remove(CLASS_NAME_COLLAPSE, CLASS_NAME_SHOW$7);
const triggerArrayLength = this._triggerArray.length;
for (let i = 0; i < triggerArrayLength; i++) {
const trigger = this._triggerArray[i];
const elem = getElementFromSelector(trigger);
if (elem && !this._isShown(elem)) {
this._addAriaAndCollapsedClass([trigger], false);
}
}
this._isTransitioning = true;
const complete = () => {
this._isTransitioning = false;
this._element.classList.remove(CLASS_NAME_COLLAPSING);
this._element.classList.add(CLASS_NAME_COLLAPSE);
EventHandler.trigger(this._element, EVENT_HIDDEN$5);
};
this._element.style[dimension] = '';
this._queueCallback(complete, this._element, true);
}
_isShown(element = this._element) {
return element.classList.contains(CLASS_NAME_SHOW$7);
} // Private
_getConfig(config) {
config = { ...Default$9,
...Manipulator.getDataAttributes(this._element),
...config
};
config.toggle = Boolean(config.toggle); // Coerce string values
config.parent = getElement(config.parent);
typeCheckConfig(NAME$a, config, DefaultType$9);
return config;
}
_getDimension() {
return this._element.classList.contains(CLASS_NAME_HORIZONTAL) ? WIDTH : HEIGHT;
}
_initializeChildren() {
if (!this._config.parent) {
return;
}
const children = SelectorEngine.find(CLASS_NAME_DEEPER_CHILDREN, this._config.parent);
SelectorEngine.find(SELECTOR_DATA_TOGGLE$4, this._config.parent).filter(elem => !children.includes(elem)).forEach(element => {
const selected = getElementFromSelector(element);
if (selected) {
this._addAriaAndCollapsedClass([element], this._isShown(selected));
}
});
}
_addAriaAndCollapsedClass(triggerArray, isOpen) {
if (!triggerArray.length) {
return;
}
triggerArray.forEach(elem => {
if (isOpen) {
elem.classList.remove(CLASS_NAME_COLLAPSED);
} else {
elem.classList.add(CLASS_NAME_COLLAPSED);
}
elem.setAttribute('aria-expanded', isOpen);
});
} // Static
static jQueryInterface(config) {
return this.each(function () {
const _config = {};
if (typeof config === 'string' && /show|hide/.test(config)) {
_config.toggle = false;
}
const data = Collapse.getOrCreateInstance(this, _config);
if (typeof config === 'string') {
if (typeof data[config] === 'undefined') {
throw new TypeError(`No method named "${config}"`);
}
data[config]();
}
});
}
}
/**
* ------------------------------------------------------------------------
* Data Api implementation
* ------------------------------------------------------------------------
*/
EventHandler.on(document, EVENT_CLICK_DATA_API$4, SELECTOR_DATA_TOGGLE$4, function (event) {
// preventDefault only for <a> elements (which change the URL) not inside the collapsible element
if (event.target.tagName === 'A' || event.delegateTarget && event.delegateTarget.tagName === 'A') {
event.preventDefault();
}
const selector = getSelectorFromElement(this);
const selectorElements = SelectorEngine.find(selector);
selectorElements.forEach(element => {
Collapse.getOrCreateInstance(element, {
toggle: false
}).toggle();
});
});
/**
* ------------------------------------------------------------------------
* jQuery
* ------------------------------------------------------------------------
* add .Collapse to jQuery only if jQuery is present
*/
defineJQueryPlugin(Collapse);
/**
* --------------------------------------------------------------------------
* Bootstrap (v5.1.3): dropdown.js
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
* --------------------------------------------------------------------------
*/
/**
* ------------------------------------------------------------------------
* Constants
* ------------------------------------------------------------------------
*/
const NAME$9 = 'dropdown';
const DATA_KEY$8 = 'bs.dropdown';
const EVENT_KEY$8 = `.${DATA_KEY$8}`;
const DATA_API_KEY$4 = '.data-api';
const ESCAPE_KEY$2 = 'Escape';
const SPACE_KEY = 'Space';
const TAB_KEY$1 = 'Tab';
const ARROW_UP_KEY = 'ArrowUp';
const ARROW_DOWN_KEY = 'ArrowDown';
const RIGHT_MOUSE_BUTTON = 2; // MouseEvent.button value for the secondary button, usually the right button
const REGEXP_KEYDOWN = new RegExp(`${ARROW_UP_KEY}|${ARROW_DOWN_KEY}|${ESCAPE_KEY$2}`);
const EVENT_HIDE$4 = `hide${EVENT_KEY$8}`;
const EVENT_HIDDEN$4 = `hidden${EVENT_KEY$8}`;
const EVENT_SHOW$4 = `show${EVENT_KEY$8}`;
const EVENT_SHOWN$4 = `shown${EVENT_KEY$8}`;
const EVENT_CLICK_DATA_API$3 = `click${EVENT_KEY$8}${DATA_API_KEY$4}`;
const EVENT_KEYDOWN_DATA_API = `keydown${EVENT_KEY$8}${DATA_API_KEY$4}`;
const EVENT_KEYUP_DATA_API = `keyup${EVENT_KEY$8}${DATA_API_KEY$4}`;
const CLASS_NAME_SHOW$6 = 'show';
const CLASS_NAME_DROPUP = 'dropup';
const CLASS_NAME_DROPEND = 'dropend';
const CLASS_NAME_DROPSTART = 'dropstart';
const CLASS_NAME_NAVBAR = 'navbar';
const SELECTOR_DATA_TOGGLE$3 = '[data-bs-toggle="dropdown"]';
const SELECTOR_MENU = '.dropdown-menu';
const SELECTOR_NAVBAR_NAV = '.navbar-nav';
const SELECTOR_VISIBLE_ITEMS = '.dropdown-menu .dropdown-item:not(.disabled):not(:disabled)';
const PLACEMENT_TOP = isRTL() ? 'top-end' : 'top-start';
const PLACEMENT_TOPEND = isRTL() ? 'top-start' : 'top-end';
const PLACEMENT_BOTTOM = isRTL() ? 'bottom-end' : 'bottom-start';
const PLACEMENT_BOTTOMEND = isRTL() ? 'bottom-start' : 'bottom-end';
const PLACEMENT_RIGHT = isRTL() ? 'left-start' : 'right-start';
const PLACEMENT_LEFT = isRTL() ? 'right-start' : 'left-start';
const Default$8 = {
offset: [0, 2],
boundary: 'clippingParents',
reference: 'toggle',
display: 'dynamic',
popperConfig: null,
autoClose: true
};
const DefaultType$8 = {
offset: '(array|string|function)',
boundary: '(string|element)',
reference: '(string|element|object)',
display: 'string',
popperConfig: '(null|object|function)',
autoClose: '(boolean|string)'
};
/**
* ------------------------------------------------------------------------
* Class Definition
* ------------------------------------------------------------------------
*/
class Dropdown extends BaseComponent {
constructor(element, config) {
super(element);
this._popper = null;
this._config = this._getConfig(config);
this._menu = this._getMenuElement();
this._inNavbar = this._detectNavbar();
} // Getters
static get Default() {
return Default$8;
}
static get DefaultType() {
return DefaultType$8;
}
static get NAME() {
return NAME$9;
} // Public
toggle() {
return this._isShown() ? this.hide() : this.show();
}
show() {
if (isDisabled(this._element) || this._isShown(this._menu)) {
return;
}
const relatedTarget = {
relatedTarget: this._element
};
const showEvent = EventHandler.trigger(this._element, EVENT_SHOW$4, relatedTarget);
if (showEvent.defaultPrevented) {
return;
}
const parent = Dropdown.getParentFromElement(this._element); // Totally disable Popper for Dropdowns in Navbar
if (this._inNavbar) {
Manipulator.setDataAttribute(this._menu, 'popper', 'none');
} else {
this._createPopper(parent);
} // If this is a touch-enabled device we add extra
// empty mouseover listeners to the body's immediate children;
// only needed because of broken event delegation on iOS
// https://www.quirksmode.org/blog/archives/2014/02/mouse_event_bub.html
if ('ontouchstart' in document.documentElement && !parent.closest(SELECTOR_NAVBAR_NAV)) {
[].concat(...document.body.children).forEach(elem => EventHandler.on(elem, 'mouseover', noop));
}
this._element.focus();
this._element.setAttribute('aria-expanded', true);
this._menu.classList.add(CLASS_NAME_SHOW$6);
this._element.classList.add(CLASS_NAME_SHOW$6);
EventHandler.trigger(this._element, EVENT_SHOWN$4, relatedTarget);
}
hide() {
if (isDisabled(this._element) || !this._isShown(this._menu)) {
return;
}
const relatedTarget = {
relatedTarget: this._element
};
this._completeHide(relatedTarget);
}
dispose() {
if (this._popper) {
this._popper.destroy();
}
super.dispose();
}
update() {
this._inNavbar = this._detectNavbar();
if (this._popper) {
this._popper.update();
}
} // Private
_completeHide(relatedTarget) {
const hideEvent = EventHandler.trigger(this._element, EVENT_HIDE$4, relatedTarget);
if (hideEvent.defaultPrevented) {
return;
} // If this is a touch-enabled device we remove the extra
// empty mouseover listeners we added for iOS support
if ('ontouchstart' in document.documentElement) {
[].concat(...document.body.children).forEach(elem => EventHandler.off(elem, 'mouseover', noop));
}
if (this._popper) {
this._popper.destroy();
}
this._menu.classList.remove(CLASS_NAME_SHOW$6);
this._element.classList.remove(CLASS_NAME_SHOW$6);
this._element.setAttribute('aria-expanded', 'false');
Manipulator.removeDataAttribute(this._menu, 'popper');
EventHandler.trigger(this._element, EVENT_HIDDEN$4, relatedTarget);
}
_getConfig(config) {
config = { ...this.constructor.Default,
...Manipulator.getDataAttributes(this._element),
...config
};
typeCheckConfig(NAME$9, config, this.constructor.DefaultType);
if (typeof config.reference === 'object' && !isElement(config.reference) && typeof config.reference.getBoundingClientRect !== 'function') {
// Popper virtual elements require a getBoundingClientRect method
throw new TypeError(`${NAME$9.toUpperCase()}: Option "reference" provided type "object" without a required "getBoundingClientRect" method.`);
}
return config;
}
_createPopper(parent) {
if (typeof Popper__namespace === 'undefined') {
throw new TypeError('Bootstrap\'s dropdowns require Popper (https://popper.js.org)');
}
let referenceElement = this._element;
if (this._config.reference === 'parent') {
referenceElement = parent;
} else if (isElement(this._config.reference)) {
referenceElement = getElement(this._config.reference);
} else if (typeof this._config.reference === 'object') {
referenceElement = this._config.reference;
}
const popperConfig = this._getPopperConfig();
const isDisplayStatic = popperConfig.modifiers.find(modifier => modifier.name === 'applyStyles' && modifier.enabled === false);
this._popper = Popper__namespace.createPopper(referenceElement, this._menu, popperConfig);
if (isDisplayStatic) {
Manipulator.setDataAttribute(this._menu, 'popper', 'static');
}
}
_isShown(element = this._element) {
return element.classList.contains(CLASS_NAME_SHOW$6);
}
_getMenuElement() {
return SelectorEngine.next(this._element, SELECTOR_MENU)[0];
}
_getPlacement() {
const parentDropdown = this._element.parentNode;
if (parentDropdown.classList.contains(CLASS_NAME_DROPEND)) {
return PLACEMENT_RIGHT;
}
if (parentDropdown.classList.contains(CLASS_NAME_DROPSTART)) {
return PLACEMENT_LEFT;
} // We need to trim the value because custom properties can also include spaces
const isEnd = getComputedStyle(this._menu).getPropertyValue('--bs-position').trim() === 'end';
if (parentDropdown.classList.contains(CLASS_NAME_DROPUP)) {
return isEnd ? PLACEMENT_TOPEND : PLACEMENT_TOP;
}
return isEnd ? PLACEMENT_BOTTOMEND : PLACEMENT_BOTTOM;
}
_detectNavbar() {
return this._element.closest(`.${CLASS_NAME_NAVBAR}`) !== null;
}
_getOffset() {
const {
offset
} = this._config;
if (typeof offset === 'string') {
return offset.split(',').map(val => Number.parseInt(val, 10));
}
if (typeof offset === 'function') {
return popperData => offset(popperData, this._element);
}
return offset;
}
_getPopperConfig() {
const defaultBsPopperConfig = {
placement: this._getPlacement(),
modifiers: [{
name: 'preventOverflow',
options: {
boundary: this._config.boundary
}
}, {
name: 'offset',
options: {
offset: this._getOffset()
}
}]
}; // Disable Popper if we have a static display
if (this._config.display === 'static') {
defaultBsPopperConfig.modifiers = [{
name: 'applyStyles',
enabled: false
}];
}
return { ...defaultBsPopperConfig,
...(typeof this._config.popperConfig === 'function' ? this._config.popperConfig(defaultBsPopperConfig) : this._config.popperConfig)
};
}
_selectMenuItem({
key,
target
}) {
const items = SelectorEngine.find(SELECTOR_VISIBLE_ITEMS, this._menu).filter(isVisible);
if (!items.length) {
return;
} // if target isn't included in items (e.g. when expanding the dropdown)
// allow cycling to get the last item in case key equals ARROW_UP_KEY
getNextActiveElement(items, target, key === ARROW_DOWN_KEY, !items.includes(target)).focus();
} // Static
static jQueryInterface(config) {
return this.each(function () {
const data = Dropdown.getOrCreateInstance(this, config);
if (typeof config !== 'string') {
return;
}
if (typeof data[config] === 'undefined') {
throw new TypeError(`No method named "${config}"`);
}
data[config]();
});
}
static clearMenus(event) {
if (event && (event.button === RIGHT_MOUSE_BUTTON || event.type === 'keyup' && event.key !== TAB_KEY$1)) {
return;
}
const toggles = SelectorEngine.find(SELECTOR_DATA_TOGGLE$3);
for (let i = 0, len = toggles.length; i < len; i++) {
const context = Dropdown.getInstance(toggles[i]);
if (!context || context._config.autoClose === false) {
continue;
}
if (!context._isShown()) {
continue;
}
const relatedTarget = {
relatedTarget: context._element
};
if (event) {
const composedPath = event.composedPath();
const isMenuTarget = composedPath.includes(context._menu);
if (composedPath.includes(context._element) || context._config.autoClose === 'inside' && !isMenuTarget || context._config.autoClose === 'outside' && isMenuTarget) {
continue;
} // Tab navigation through the dropdown menu or events from contained inputs shouldn't close the menu
if (context._menu.contains(event.target) && (event.type === 'keyup' && event.key === TAB_KEY$1 || /input|select|option|textarea|form/i.test(event.target.tagName))) {
continue;
}
if (event.type === 'click') {
relatedTarget.clickEvent = event;
}
}
context._completeHide(relatedTarget);
}
}
static getParentFromElement(element) {
return getElementFromSelector(element) || element.parentNode;
}
static dataApiKeydownHandler(event) {
// If not input/textarea:
// - And not a key in REGEXP_KEYDOWN => not a dropdown command
// If input/textarea:
// - If space key => not a dropdown command
// - If key is other than escape
// - If key is not up or down => not a dropdown command
// - If trigger inside the menu => not a dropdown command
if (/input|textarea/i.test(event.target.tagName) ? event.key === SPACE_KEY || event.key !== ESCAPE_KEY$2 && (event.key !== ARROW_DOWN_KEY && event.key !== ARROW_UP_KEY || event.target.closest(SELECTOR_MENU)) : !REGEXP_KEYDOWN.test(event.key)) {
return;
}
const isActive = this.classList.contains(CLASS_NAME_SHOW$6);
if (!isActive && event.key === ESCAPE_KEY$2) {
return;
}
event.preventDefault();
event.stopPropagation();
if (isDisabled(this)) {
return;
}
const getToggleButton = this.matches(SELECTOR_DATA_TOGGLE$3) ? this : SelectorEngine.prev(this, SELECTOR_DATA_TOGGLE$3)[0];
const instance = Dropdown.getOrCreateInstance(getToggleButton);
if (event.key === ESCAPE_KEY$2) {
instance.hide();
return;
}
if (event.key === ARROW_UP_KEY || event.key === ARROW_DOWN_KEY) {
if (!isActive) {
instance.show();
}
instance._selectMenuItem(event);
return;
}
if (!isActive || event.key === SPACE_KEY) {
Dropdown.clearMenus();
}
}
}
/**
* ------------------------------------------------------------------------
* Data Api implementation
* ------------------------------------------------------------------------
*/
EventHandler.on(document, EVENT_KEYDOWN_DATA_API, SELECTOR_DATA_TOGGLE$3, Dropdown.dataApiKeydownHandler);
EventHandler.on(document, EVENT_KEYDOWN_DATA_API, SELECTOR_MENU, Dropdown.dataApiKeydownHandler);
EventHandler.on(document, EVENT_CLICK_DATA_API$3, Dropdown.clearMenus);
EventHandler.on(document, EVENT_KEYUP_DATA_API, Dropdown.clearMenus);
EventHandler.on(document, EVENT_CLICK_DATA_API$3, SELECTOR_DATA_TOGGLE$3, function (event) {
event.preventDefault();
Dropdown.getOrCreateInstance(this).toggle();
});
/**
* ------------------------------------------------------------------------
* jQuery
* ------------------------------------------------------------------------
* add .Dropdown to jQuery only if jQuery is present
*/
defineJQueryPlugin(Dropdown);
/**
* --------------------------------------------------------------------------
* Bootstrap (v5.1.3): util/scrollBar.js
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
* --------------------------------------------------------------------------
*/
const SELECTOR_FIXED_CONTENT = '.fixed-top, .fixed-bottom, .is-fixed, .sticky-top';
const SELECTOR_STICKY_CONTENT = '.sticky-top';
class ScrollBarHelper {
constructor() {
this._element = document.body;
}
getWidth() {
// https://developer.mozilla.org/en-US/docs/Web/API/Window/innerWidth#usage_notes
const documentWidth = document.documentElement.clientWidth;
return Math.abs(window.innerWidth - documentWidth);
}
hide() {
const width = this.getWidth();
this._disableOverFlow(); // give padding to element to balance the hidden scrollbar width
this._setElementAttributes(this._element, 'paddingRight', calculatedValue => calculatedValue + width); // trick: We adjust positive paddingRight and negative marginRight to sticky-top elements to keep showing fullwidth
this._setElementAttributes(SELECTOR_FIXED_CONTENT, 'paddingRight', calculatedValue => calculatedValue + width);
this._setElementAttributes(SELECTOR_STICKY_CONTENT, 'marginRight', calculatedValue => calculatedValue - width);
}
_disableOverFlow() {
this._saveInitialAttribute(this._element, 'overflow');
this._element.style.overflow = 'hidden';
}
_setElementAttributes(selector, styleProp, callback) {
const scrollbarWidth = this.getWidth();
const manipulationCallBack = element => {
if (element !== this._element && window.innerWidth > element.clientWidth + scrollbarWidth) {
return;
}
this._saveInitialAttribute(element, styleProp);
const calculatedValue = window.getComputedStyle(element)[styleProp];
element.style[styleProp] = `${callback(Number.parseFloat(calculatedValue))}px`;
};
this._applyManipulationCallback(selector, manipulationCallBack);
}
reset() {
this._resetElementAttributes(this._element, 'overflow');
this._resetElementAttributes(this._element, 'paddingRight');
this._resetElementAttributes(SELECTOR_FIXED_CONTENT, 'paddingRight');
this._resetElementAttributes(SELECTOR_STICKY_CONTENT, 'marginRight');
}
_saveInitialAttribute(element, styleProp) {
const actualValue = element.style[styleProp];
if (actualValue) {
Manipulator.setDataAttribute(element, styleProp, actualValue);
}
}
_resetElementAttributes(selector, styleProp) {
const manipulationCallBack = element => {
const value = Manipulator.getDataAttribute(element, styleProp);
if (typeof value === 'undefined') {
element.style.removeProperty(styleProp);
} else {
Manipulator.removeDataAttribute(element, styleProp);
element.style[styleProp] = value;
}
};
this._applyManipulationCallback(selector, manipulationCallBack);
}
_applyManipulationCallback(selector, callBack) {
if (isElement(selector)) {
callBack(selector);
} else {
SelectorEngine.find(selector, this._element).forEach(callBack);
}
}
isOverflowing() {
return this.getWidth() > 0;
}
}
/**
* --------------------------------------------------------------------------
* Bootstrap (v5.1.3): util/backdrop.js
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
* --------------------------------------------------------------------------
*/
const Default$7 = {
className: 'modal-backdrop',
isVisible: true,
// if false, we use the backdrop helper without adding any element to the dom
isAnimated: false,
rootElement: 'body',
// give the choice to place backdrop under different elements
clickCallback: null
};
const DefaultType$7 = {
className: 'string',
isVisible: 'boolean',
isAnimated: 'boolean',
rootElement: '(element|string)',
clickCallback: '(function|null)'
};
const NAME$8 = 'backdrop';
const CLASS_NAME_FADE$4 = 'fade';
const CLASS_NAME_SHOW$5 = 'show';
const EVENT_MOUSEDOWN = `mousedown.bs.${NAME$8}`;
class Backdrop {
constructor(config) {
this._config = this._getConfig(config);
this._isAppended = false;
this._element = null;
}
show(callback) {
if (!this._config.isVisible) {
execute(callback);
return;
}
this._append();
if (this._config.isAnimated) {
reflow(this._getElement());
}
this._getElement().classList.add(CLASS_NAME_SHOW$5);
this._emulateAnimation(() => {
execute(callback);
});
}
hide(callback) {
if (!this._config.isVisible) {
execute(callback);
return;
}
this._getElement().classList.remove(CLASS_NAME_SHOW$5);
this._emulateAnimation(() => {
this.dispose();
execute(callback);
});
} // Private
_getElement() {
if (!this._element) {
const backdrop = document.createElement('div');
backdrop.className = this._config.className;
if (this._config.isAnimated) {
backdrop.classList.add(CLASS_NAME_FADE$4);
}
this._element = backdrop;
}
return this._element;
}
_getConfig(config) {
config = { ...Default$7,
...(typeof config === 'object' ? config : {})
}; // use getElement() with the default "body" to get a fresh Element on each instantiation
config.rootElement = getElement(config.rootElement);
typeCheckConfig(NAME$8, config, DefaultType$7);
return config;
}
_append() {
if (this._isAppended) {
return;
}
this._config.rootElement.append(this._getElement());
EventHandler.on(this._getElement(), EVENT_MOUSEDOWN, () => {
execute(this._config.clickCallback);
});
this._isAppended = true;
}
dispose() {
if (!this._isAppended) {
return;
}
EventHandler.off(this._element, EVENT_MOUSEDOWN);
this._element.remove();
this._isAppended = false;
}
_emulateAnimation(callback) {
executeAfterTransition(callback, this._getElement(), this._config.isAnimated);
}
}
/**
* --------------------------------------------------------------------------
* Bootstrap (v5.1.3): util/focustrap.js
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
* --------------------------------------------------------------------------
*/
const Default$6 = {
trapElement: null,
// The element to trap focus inside of
autofocus: true
};
const DefaultType$6 = {
trapElement: 'element',
autofocus: 'boolean'
};
const NAME$7 = 'focustrap';
const DATA_KEY$7 = 'bs.focustrap';
const EVENT_KEY$7 = `.${DATA_KEY$7}`;
const EVENT_FOCUSIN$1 = `focusin${EVENT_KEY$7}`;
const EVENT_KEYDOWN_TAB = `keydown.tab${EVENT_KEY$7}`;
const TAB_KEY = 'Tab';
const TAB_NAV_FORWARD = 'forward';
const TAB_NAV_BACKWARD = 'backward';
class FocusTrap {
constructor(config) {
this._config = this._getConfig(config);
this._isActive = false;
this._lastTabNavDirection = null;
}
activate() {
const {
trapElement,
autofocus
} = this._config;
if (this._isActive) {
return;
}
if (autofocus) {
trapElement.focus();
}
EventHandler.off(document, EVENT_KEY$7); // guard against infinite focus loop
EventHandler.on(document, EVENT_FOCUSIN$1, event => this._handleFocusin(event));
EventHandler.on(document, EVENT_KEYDOWN_TAB, event => this._handleKeydown(event));
this._isActive = true;
}
deactivate() {
if (!this._isActive) {
return;
}
this._isActive = false;
EventHandler.off(document, EVENT_KEY$7);
} // Private
_handleFocusin(event) {
const {
target
} = event;
const {
trapElement
} = this._config;
if (target === document || target === trapElement || trapElement.contains(target)) {
return;
}
const elements = SelectorEngine.focusableChildren(trapElement);
if (elements.length === 0) {
trapElement.focus();
} else if (this._lastTabNavDirection === TAB_NAV_BACKWARD) {
elements[elements.length - 1].focus();
} else {
elements[0].focus();
}
}
_handleKeydown(event) {
if (event.key !== TAB_KEY) {
return;
}
this._lastTabNavDirection = event.shiftKey ? TAB_NAV_BACKWARD : TAB_NAV_FORWARD;
}
_getConfig(config) {
config = { ...Default$6,
...(typeof config === 'object' ? config : {})
};
typeCheckConfig(NAME$7, config, DefaultType$6);
return config;
}
}
/**
* --------------------------------------------------------------------------
* Bootstrap (v5.1.3): modal.js
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
* --------------------------------------------------------------------------
*/
/**
* ------------------------------------------------------------------------
* Constants
* ------------------------------------------------------------------------
*/
const NAME$6 = 'modal';
const DATA_KEY$6 = 'bs.modal';
const EVENT_KEY$6 = `.${DATA_KEY$6}`;
const DATA_API_KEY$3 = '.data-api';
const ESCAPE_KEY$1 = 'Escape';
const Default$5 = {
backdrop: true,
keyboard: true,
focus: true
};
const DefaultType$5 = {
backdrop: '(boolean|string)',
keyboard: 'boolean',
focus: 'boolean'
};
const EVENT_HIDE$3 = `hide${EVENT_KEY$6}`;
const EVENT_HIDE_PREVENTED = `hidePrevented${EVENT_KEY$6}`;
const EVENT_HIDDEN$3 = `hidden${EVENT_KEY$6}`;
const EVENT_SHOW$3 = `show${EVENT_KEY$6}`;
const EVENT_SHOWN$3 = `shown${EVENT_KEY$6}`;
const EVENT_RESIZE = `resize${EVENT_KEY$6}`;
const EVENT_CLICK_DISMISS = `click.dismiss${EVENT_KEY$6}`;
const EVENT_KEYDOWN_DISMISS$1 = `keydown.dismiss${EVENT_KEY$6}`;
const EVENT_MOUSEUP_DISMISS = `mouseup.dismiss${EVENT_KEY$6}`;
const EVENT_MOUSEDOWN_DISMISS = `mousedown.dismiss${EVENT_KEY$6}`;
const EVENT_CLICK_DATA_API$2 = `click${EVENT_KEY$6}${DATA_API_KEY$3}`;
const CLASS_NAME_OPEN = 'modal-open';
const CLASS_NAME_FADE$3 = 'fade';
const CLASS_NAME_SHOW$4 = 'show';
const CLASS_NAME_STATIC = 'modal-static';
const OPEN_SELECTOR$1 = '.modal.show';
const SELECTOR_DIALOG = '.modal-dialog';
const SELECTOR_MODAL_BODY = '.modal-body';
const SELECTOR_DATA_TOGGLE$2 = '[data-bs-toggle="modal"]';
/**
* ------------------------------------------------------------------------
* Class Definition
* ------------------------------------------------------------------------
*/
class Modal extends BaseComponent {
constructor(element, config) {
super(element);
this._config = this._getConfig(config);
this._dialog = SelectorEngine.findOne(SELECTOR_DIALOG, this._element);
this._backdrop = this._initializeBackDrop();
this._focustrap = this._initializeFocusTrap();
this._isShown = false;
this._ignoreBackdropClick = false;
this._isTransitioning = false;
this._scrollBar = new ScrollBarHelper();
} // Getters
static get Default() {
return Default$5;
}
static get NAME() {
return NAME$6;
} // Public
toggle(relatedTarget) {
return this._isShown ? this.hide() : this.show(relatedTarget);
}
show(relatedTarget) {
if (this._isShown || this._isTransitioning) {
return;
}
const showEvent = EventHandler.trigger(this._element, EVENT_SHOW$3, {
relatedTarget
});
if (showEvent.defaultPrevented) {
return;
}
this._isShown = true;
if (this._isAnimated()) {
this._isTransitioning = true;
}
this._scrollBar.hide();
document.body.classList.add(CLASS_NAME_OPEN);
this._adjustDialog();
this._setEscapeEvent();
this._setResizeEvent();
EventHandler.on(this._dialog, EVENT_MOUSEDOWN_DISMISS, () => {
EventHandler.one(this._element, EVENT_MOUSEUP_DISMISS, event => {
if (event.target === this._element) {
this._ignoreBackdropClick = true;
}
});
});
this._showBackdrop(() => this._showElement(relatedTarget));
}
hide() {
if (!this._isShown || this._isTransitioning) {
return;
}
const hideEvent = EventHandler.trigger(this._element, EVENT_HIDE$3);
if (hideEvent.defaultPrevented) {
return;
}
this._isShown = false;
const isAnimated = this._isAnimated();
if (isAnimated) {
this._isTransitioning = true;
}
this._setEscapeEvent();
this._setResizeEvent();
this._focustrap.deactivate();
this._element.classList.remove(CLASS_NAME_SHOW$4);
EventHandler.off(this._element, EVENT_CLICK_DISMISS);
EventHandler.off(this._dialog, EVENT_MOUSEDOWN_DISMISS);
this._queueCallback(() => this._hideModal(), this._element, isAnimated);
}
dispose() {
[window, this._dialog].forEach(htmlElement => EventHandler.off(htmlElement, EVENT_KEY$6));
this._backdrop.dispose();
this._focustrap.deactivate();
super.dispose();
}
handleUpdate() {
this._adjustDialog();
} // Private
_initializeBackDrop() {
return new Backdrop({
isVisible: Boolean(this._config.backdrop),
// 'static' option will be translated to true, and booleans will keep their value
isAnimated: this._isAnimated()
});
}
_initializeFocusTrap() {
return new FocusTrap({
trapElement: this._element
});
}
_getConfig(config) {
config = { ...Default$5,
...Manipulator.getDataAttributes(this._element),
...(typeof config === 'object' ? config : {})
};
typeCheckConfig(NAME$6, config, DefaultType$5);
return config;
}
_showElement(relatedTarget) {
const isAnimated = this._isAnimated();
const modalBody = SelectorEngine.findOne(SELECTOR_MODAL_BODY, this._dialog);
if (!this._element.parentNode || this._element.parentNode.nodeType !== Node.ELEMENT_NODE) {
// Don't move modal's DOM position
document.body.append(this._element);
}
this._element.style.display = 'block';
this._element.removeAttribute('aria-hidden');
this._element.setAttribute('aria-modal', true);
this._element.setAttribute('role', 'dialog');
this._element.scrollTop = 0;
if (modalBody) {
modalBody.scrollTop = 0;
}
if (isAnimated) {
reflow(this._element);
}
this._element.classList.add(CLASS_NAME_SHOW$4);
const transitionComplete = () => {
if (this._config.focus) {
this._focustrap.activate();
}
this._isTransitioning = false;
EventHandler.trigger(this._element, EVENT_SHOWN$3, {
relatedTarget
});
};
this._queueCallback(transitionComplete, this._dialog, isAnimated);
}
_setEscapeEvent() {
if (this._isShown) {
EventHandler.on(this._element, EVENT_KEYDOWN_DISMISS$1, event => {
if (this._config.keyboard && event.key === ESCAPE_KEY$1) {
event.preventDefault();
this.hide();
} else if (!this._config.keyboard && event.key === ESCAPE_KEY$1) {
this._triggerBackdropTransition();
}
});
} else {
EventHandler.off(this._element, EVENT_KEYDOWN_DISMISS$1);
}
}
_setResizeEvent() {
if (this._isShown) {
EventHandler.on(window, EVENT_RESIZE, () => this._adjustDialog());
} else {
EventHandler.off(window, EVENT_RESIZE);
}
}
_hideModal() {
this._element.style.display = 'none';
this._element.setAttribute('aria-hidden', true);
this._element.removeAttribute('aria-modal');
this._element.removeAttribute('role');
this._isTransitioning = false;
this._backdrop.hide(() => {
document.body.classList.remove(CLASS_NAME_OPEN);
this._resetAdjustments();
this._scrollBar.reset();
EventHandler.trigger(this._element, EVENT_HIDDEN$3);
});
}
_showBackdrop(callback) {
EventHandler.on(this._element, EVENT_CLICK_DISMISS, event => {
if (this._ignoreBackdropClick) {
this._ignoreBackdropClick = false;
return;
}
if (event.target !== event.currentTarget) {
return;
}
if (this._config.backdrop === true) {
this.hide();
} else if (this._config.backdrop === 'static') {
this._triggerBackdropTransition();
}
});
this._backdrop.show(callback);
}
_isAnimated() {
return this._element.classList.contains(CLASS_NAME_FADE$3);
}
_triggerBackdropTransition() {
const hideEvent = EventHandler.trigger(this._element, EVENT_HIDE_PREVENTED);
if (hideEvent.defaultPrevented) {
return;
}
const {
classList,
scrollHeight,
style
} = this._element;
const isModalOverflowing = scrollHeight > document.documentElement.clientHeight; // return if the following background transition hasn't yet completed
if (!isModalOverflowing && style.overflowY === 'hidden' || classList.contains(CLASS_NAME_STATIC)) {
return;
}
if (!isModalOverflowing) {
style.overflowY = 'hidden';
}
classList.add(CLASS_NAME_STATIC);
this._queueCallback(() => {
classList.remove(CLASS_NAME_STATIC);
if (!isModalOverflowing) {
this._queueCallback(() => {
style.overflowY = '';
}, this._dialog);
}
}, this._dialog);
this._element.focus();
} // ----------------------------------------------------------------------
// the following methods are used to handle overflowing modals
// ----------------------------------------------------------------------
_adjustDialog() {
const isModalOverflowing = this._element.scrollHeight > document.documentElement.clientHeight;
const scrollbarWidth = this._scrollBar.getWidth();
const isBodyOverflowing = scrollbarWidth > 0;
if (!isBodyOverflowing && isModalOverflowing && !isRTL() || isBodyOverflowing && !isModalOverflowing && isRTL()) {
this._element.style.paddingLeft = `${scrollbarWidth}px`;
}
if (isBodyOverflowing && !isModalOverflowing && !isRTL() || !isBodyOverflowing && isModalOverflowing && isRTL()) {
this._element.style.paddingRight = `${scrollbarWidth}px`;
}
}
_resetAdjustments() {
this._element.style.paddingLeft = '';
this._element.style.paddingRight = '';
} // Static
static jQueryInterface(config, relatedTarget) {
return this.each(function () {
const data = Modal.getOrCreateInstance(this, config);
if (typeof config !== 'string') {
return;
}
if (typeof data[config] === 'undefined') {
throw new TypeError(`No method named "${config}"`);
}
data[config](relatedTarget);
});
}
}
/**
* ------------------------------------------------------------------------
* Data Api implementation
* ------------------------------------------------------------------------
*/
EventHandler.on(document, EVENT_CLICK_DATA_API$2, SELECTOR_DATA_TOGGLE$2, function (event) {
const target = getElementFromSelector(this);
if (['A', 'AREA'].includes(this.tagName)) {
event.preventDefault();
}
EventHandler.one(target, EVENT_SHOW$3, showEvent => {
if (showEvent.defaultPrevented) {
// only register focus restorer if modal will actually get shown
return;
}
EventHandler.one(target, EVENT_HIDDEN$3, () => {
if (isVisible(this)) {
this.focus();
}
});
}); // avoid conflict when clicking moddal toggler while another one is open
const allReadyOpen = SelectorEngine.findOne(OPEN_SELECTOR$1);
if (allReadyOpen) {
Modal.getInstance(allReadyOpen).hide();
}
const data = Modal.getOrCreateInstance(target);
data.toggle(this);
});
enableDismissTrigger(Modal);
/**
* ------------------------------------------------------------------------
* jQuery
* ------------------------------------------------------------------------
* add .Modal to jQuery only if jQuery is present
*/
defineJQueryPlugin(Modal);
/**
* --------------------------------------------------------------------------
* Bootstrap (v5.1.3): offcanvas.js
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
* --------------------------------------------------------------------------
*/
/**
* ------------------------------------------------------------------------
* Constants
* ------------------------------------------------------------------------
*/
const NAME$5 = 'offcanvas';
const DATA_KEY$5 = 'bs.offcanvas';
const EVENT_KEY$5 = `.${DATA_KEY$5}`;
const DATA_API_KEY$2 = '.data-api';
const EVENT_LOAD_DATA_API$1 = `load${EVENT_KEY$5}${DATA_API_KEY$2}`;
const ESCAPE_KEY = 'Escape';
const Default$4 = {
backdrop: true,
keyboard: true,
scroll: false
};
const DefaultType$4 = {
backdrop: 'boolean',
keyboard: 'boolean',
scroll: 'boolean'
};
const CLASS_NAME_SHOW$3 = 'show';
const CLASS_NAME_BACKDROP = 'offcanvas-backdrop';
const OPEN_SELECTOR = '.offcanvas.show';
const EVENT_SHOW$2 = `show${EVENT_KEY$5}`;
const EVENT_SHOWN$2 = `shown${EVENT_KEY$5}`;
const EVENT_HIDE$2 = `hide${EVENT_KEY$5}`;
const EVENT_HIDDEN$2 = `hidden${EVENT_KEY$5}`;
const EVENT_CLICK_DATA_API$1 = `click${EVENT_KEY$5}${DATA_API_KEY$2}`;
const EVENT_KEYDOWN_DISMISS = `keydown.dismiss${EVENT_KEY$5}`;
const SELECTOR_DATA_TOGGLE$1 = '[data-bs-toggle="offcanvas"]';
/**
* ------------------------------------------------------------------------
* Class Definition
* ------------------------------------------------------------------------
*/
class Offcanvas extends BaseComponent {
constructor(element, config) {
super(element);
this._config = this._getConfig(config);
this._isShown = false;
this._backdrop = this._initializeBackDrop();
this._focustrap = this._initializeFocusTrap();
this._addEventListeners();
} // Getters
static get NAME() {
return NAME$5;
}
static get Default() {
return Default$4;
} // Public
toggle(relatedTarget) {
return this._isShown ? this.hide() : this.show(relatedTarget);
}
show(relatedTarget) {
if (this._isShown) {
return;
}
const showEvent = EventHandler.trigger(this._element, EVENT_SHOW$2, {
relatedTarget
});
if (showEvent.defaultPrevented) {
return;
}
this._isShown = true;
this._element.style.visibility = 'visible';
this._backdrop.show();
if (!this._config.scroll) {
new ScrollBarHelper().hide();
}
this._element.removeAttribute('aria-hidden');
this._element.setAttribute('aria-modal', true);
this._element.setAttribute('role', 'dialog');
this._element.classList.add(CLASS_NAME_SHOW$3);
const completeCallBack = () => {
if (!this._config.scroll) {
this._focustrap.activate();
}
EventHandler.trigger(this._element, EVENT_SHOWN$2, {
relatedTarget
});
};
this._queueCallback(completeCallBack, this._element, true);
}
hide() {
if (!this._isShown) {
return;
}
const hideEvent = EventHandler.trigger(this._element, EVENT_HIDE$2);
if (hideEvent.defaultPrevented) {
return;
}
this._focustrap.deactivate();
this._element.blur();
this._isShown = false;
this._element.classList.remove(CLASS_NAME_SHOW$3);
this._backdrop.hide();
const completeCallback = () => {
this._element.setAttribute('aria-hidden', true);
this._element.removeAttribute('aria-modal');
this._element.removeAttribute('role');
this._element.style.visibility = 'hidden';
if (!this._config.scroll) {
new ScrollBarHelper().reset();
}
EventHandler.trigger(this._element, EVENT_HIDDEN$2);
};
this._queueCallback(completeCallback, this._element, true);
}
dispose() {
this._backdrop.dispose();
this._focustrap.deactivate();
super.dispose();
} // Private
_getConfig(config) {
config = { ...Default$4,
...Manipulator.getDataAttributes(this._element),
...(typeof config === 'object' ? config : {})
};
typeCheckConfig(NAME$5, config, DefaultType$4);
return config;
}
_initializeBackDrop() {
return new Backdrop({
className: CLASS_NAME_BACKDROP,
isVisible: this._config.backdrop,
isAnimated: true,
rootElement: this._element.parentNode,
clickCallback: () => this.hide()
});
}
_initializeFocusTrap() {
return new FocusTrap({
trapElement: this._element
});
}
_addEventListeners() {
EventHandler.on(this._element, EVENT_KEYDOWN_DISMISS, event => {
if (this._config.keyboard && event.key === ESCAPE_KEY) {
this.hide();
}
});
} // Static
static jQueryInterface(config) {
return this.each(function () {
const data = Offcanvas.getOrCreateInstance(this, config);
if (typeof config !== 'string') {
return;
}
if (data[config] === undefined || config.startsWith('_') || config === 'constructor') {
throw new TypeError(`No method named "${config}"`);
}
data[config](this);
});
}
}
/**
* ------------------------------------------------------------------------
* Data Api implementation
* ------------------------------------------------------------------------
*/
EventHandler.on(document, EVENT_CLICK_DATA_API$1, SELECTOR_DATA_TOGGLE$1, function (event) {
const target = getElementFromSelector(this);
if (['A', 'AREA'].includes(this.tagName)) {
event.preventDefault();
}
if (isDisabled(this)) {
return;
}
EventHandler.one(target, EVENT_HIDDEN$2, () => {
// focus on trigger when it is closed
if (isVisible(this)) {
this.focus();
}
}); // avoid conflict when clicking a toggler of an offcanvas, while another is open
const allReadyOpen = SelectorEngine.findOne(OPEN_SELECTOR);
if (allReadyOpen && allReadyOpen !== target) {
Offcanvas.getInstance(allReadyOpen).hide();
}
const data = Offcanvas.getOrCreateInstance(target);
data.toggle(this);
});
EventHandler.on(window, EVENT_LOAD_DATA_API$1, () => SelectorEngine.find(OPEN_SELECTOR).forEach(el => Offcanvas.getOrCreateInstance(el).show()));
enableDismissTrigger(Offcanvas);
/**
* ------------------------------------------------------------------------
* jQuery
* ------------------------------------------------------------------------
*/
defineJQueryPlugin(Offcanvas);
/**
* --------------------------------------------------------------------------
* Bootstrap (v5.1.3): util/sanitizer.js
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
* --------------------------------------------------------------------------
*/
const uriAttributes = new Set(['background', 'cite', 'href', 'itemtype', 'longdesc', 'poster', 'src', 'xlink:href']);
const ARIA_ATTRIBUTE_PATTERN = /^aria-[\w-]*$/i;
/**
* A pattern that recognizes a commonly useful subset of URLs that are safe.
*
* Shoutout to Angular https://github.com/angular/angular/blob/12.2.x/packages/core/src/sanitization/url_sanitizer.ts
*/
const SAFE_URL_PATTERN = /^(?:(?:https?|mailto|ftp|tel|file|sms):|[^#&/:?]*(?:[#/?]|$))/i;
/**
* A pattern that matches safe data URLs. Only matches image, video and audio types.
*
* Shoutout to Angular https://github.com/angular/angular/blob/12.2.x/packages/core/src/sanitization/url_sanitizer.ts
*/
const DATA_URL_PATTERN = /^data:(?:image\/(?:bmp|gif|jpeg|jpg|png|tiff|webp)|video\/(?:mpeg|mp4|ogg|webm)|audio\/(?:mp3|oga|ogg|opus));base64,[\d+/a-z]+=*$/i;
const allowedAttribute = (attribute, allowedAttributeList) => {
const attributeName = attribute.nodeName.toLowerCase();
if (allowedAttributeList.includes(attributeName)) {
if (uriAttributes.has(attributeName)) {
return Boolean(SAFE_URL_PATTERN.test(attribute.nodeValue) || DATA_URL_PATTERN.test(attribute.nodeValue));
}
return true;
}
const regExp = allowedAttributeList.filter(attributeRegex => attributeRegex instanceof RegExp); // Check if a regular expression validates the attribute.
for (let i = 0, len = regExp.length; i < len; i++) {
if (regExp[i].test(attributeName)) {
return true;
}
}
return false;
};
const DefaultAllowlist = {
// Global attributes allowed on any supplied element below.
'*': ['class', 'dir', 'id', 'lang', 'role', ARIA_ATTRIBUTE_PATTERN],
a: ['target', 'href', 'title', 'rel'],
area: [],
b: [],
br: [],
col: [],
code: [],
div: [],
em: [],
hr: [],
h1: [],
h2: [],
h3: [],
h4: [],
h5: [],
h6: [],
i: [],
img: ['src', 'srcset', 'alt', 'title', 'width', 'height'],
li: [],
ol: [],
p: [],
pre: [],
s: [],
small: [],
span: [],
sub: [],
sup: [],
strong: [],
u: [],
ul: []
};
function sanitizeHtml(unsafeHtml, allowList, sanitizeFn) {
if (!unsafeHtml.length) {
return unsafeHtml;
}
if (sanitizeFn && typeof sanitizeFn === 'function') {
return sanitizeFn(unsafeHtml);
}
const domParser = new window.DOMParser();
const createdDocument = domParser.parseFromString(unsafeHtml, 'text/html');
const elements = [].concat(...createdDocument.body.querySelectorAll('*'));
for (let i = 0, len = elements.length; i < len; i++) {
const element = elements[i];
const elementName = element.nodeName.toLowerCase();
if (!Object.keys(allowList).includes(elementName)) {
element.remove();
continue;
}
const attributeList = [].concat(...element.attributes);
const allowedAttributes = [].concat(allowList['*'] || [], allowList[elementName] || []);
attributeList.forEach(attribute => {
if (!allowedAttribute(attribute, allowedAttributes)) {
element.removeAttribute(attribute.nodeName);
}
});
}
return createdDocument.body.innerHTML;
}
/**
* --------------------------------------------------------------------------
* Bootstrap (v5.1.3): tooltip.js
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
* --------------------------------------------------------------------------
*/
/**
* ------------------------------------------------------------------------
* Constants
* ------------------------------------------------------------------------
*/
const NAME$4 = 'tooltip';
const DATA_KEY$4 = 'bs.tooltip';
const EVENT_KEY$4 = `.${DATA_KEY$4}`;
const CLASS_PREFIX$1 = 'bs-tooltip';
const DISALLOWED_ATTRIBUTES = new Set(['sanitize', 'allowList', 'sanitizeFn']);
const DefaultType$3 = {
animation: 'boolean',
template: 'string',
title: '(string|element|function)',
trigger: 'string',
delay: '(number|object)',
html: 'boolean',
selector: '(string|boolean)',
placement: '(string|function)',
offset: '(array|string|function)',
container: '(string|element|boolean)',
fallbackPlacements: 'array',
boundary: '(string|element)',
customClass: '(string|function)',
sanitize: 'boolean',
sanitizeFn: '(null|function)',
allowList: 'object',
popperConfig: '(null|object|function)'
};
const AttachmentMap = {
AUTO: 'auto',
TOP: 'top',
RIGHT: isRTL() ? 'left' : 'right',
BOTTOM: 'bottom',
LEFT: isRTL() ? 'right' : 'left'
};
const Default$3 = {
animation: true,
template: '<div class="tooltip" role="tooltip">' + '<div class="tooltip-arrow"></div>' + '<div class="tooltip-inner"></div>' + '</div>',
trigger: 'hover focus',
title: '',
delay: 0,
html: false,
selector: false,
placement: 'top',
offset: [0, 0],
container: false,
fallbackPlacements: ['top', 'right', 'bottom', 'left'],
boundary: 'clippingParents',
customClass: '',
sanitize: true,
sanitizeFn: null,
allowList: DefaultAllowlist,
popperConfig: null
};
const Event$2 = {
HIDE: `hide${EVENT_KEY$4}`,
HIDDEN: `hidden${EVENT_KEY$4}`,
SHOW: `show${EVENT_KEY$4}`,
SHOWN: `shown${EVENT_KEY$4}`,
INSERTED: `inserted${EVENT_KEY$4}`,
CLICK: `click${EVENT_KEY$4}`,
FOCUSIN: `focusin${EVENT_KEY$4}`,
FOCUSOUT: `focusout${EVENT_KEY$4}`,
MOUSEENTER: `mouseenter${EVENT_KEY$4}`,
MOUSELEAVE: `mouseleave${EVENT_KEY$4}`
};
const CLASS_NAME_FADE$2 = 'fade';
const CLASS_NAME_MODAL = 'modal';
const CLASS_NAME_SHOW$2 = 'show';
const HOVER_STATE_SHOW = 'show';
const HOVER_STATE_OUT = 'out';
const SELECTOR_TOOLTIP_INNER = '.tooltip-inner';
const SELECTOR_MODAL = `.${CLASS_NAME_MODAL}`;
const EVENT_MODAL_HIDE = 'hide.bs.modal';
const TRIGGER_HOVER = 'hover';
const TRIGGER_FOCUS = 'focus';
const TRIGGER_CLICK = 'click';
const TRIGGER_MANUAL = 'manual';
/**
* ------------------------------------------------------------------------
* Class Definition
* ------------------------------------------------------------------------
*/
class Tooltip extends BaseComponent {
constructor(element, config) {
if (typeof Popper__namespace === 'undefined') {
throw new TypeError('Bootstrap\'s tooltips require Popper (https://popper.js.org)');
}
super(element); // private
this._isEnabled = true;
this._timeout = 0;
this._hoverState = '';
this._activeTrigger = {};
this._popper = null; // Protected
this._config = this._getConfig(config);
this.tip = null;
this._setListeners();
} // Getters
static get Default() {
return Default$3;
}
static get NAME() {
return NAME$4;
}
static get Event() {
return Event$2;
}
static get DefaultType() {
return DefaultType$3;
} // Public
enable() {
this._isEnabled = true;
}
disable() {
this._isEnabled = false;
}
toggleEnabled() {
this._isEnabled = !this._isEnabled;
}
toggle(event) {
if (!this._isEnabled) {
return;
}
if (event) {
const context = this._initializeOnDelegatedTarget(event);
context._activeTrigger.click = !context._activeTrigger.click;
if (context._isWithActiveTrigger()) {
context._enter(null, context);
} else {
context._leave(null, context);
}
} else {
if (this.getTipElement().classList.contains(CLASS_NAME_SHOW$2)) {
this._leave(null, this);
return;
}
this._enter(null, this);
}
}
dispose() {
clearTimeout(this._timeout);
EventHandler.off(this._element.closest(SELECTOR_MODAL), EVENT_MODAL_HIDE, this._hideModalHandler);
if (this.tip) {
this.tip.remove();
}
this._disposePopper();
super.dispose();
}
show() {
if (this._element.style.display === 'none') {
throw new Error('Please use show on visible elements');
}
if (!(this.isWithContent() && this._isEnabled)) {
return;
}
const showEvent = EventHandler.trigger(this._element, this.constructor.Event.SHOW);
const shadowRoot = findShadowRoot(this._element);
const isInTheDom = shadowRoot === null ? this._element.ownerDocument.documentElement.contains(this._element) : shadowRoot.contains(this._element);
if (showEvent.defaultPrevented || !isInTheDom) {
return;
} // A trick to recreate a tooltip in case a new title is given by using the NOT documented `data-bs-original-title`
// This will be removed later in favor of a `setContent` method
if (this.constructor.NAME === 'tooltip' && this.tip && this.getTitle() !== this.tip.querySelector(SELECTOR_TOOLTIP_INNER).innerHTML) {
this._disposePopper();
this.tip.remove();
this.tip = null;
}
const tip = this.getTipElement();
const tipId = getUID(this.constructor.NAME);
tip.setAttribute('id', tipId);
this._element.setAttribute('aria-describedby', tipId);
if (this._config.animation) {
tip.classList.add(CLASS_NAME_FADE$2);
}
const placement = typeof this._config.placement === 'function' ? this._config.placement.call(this, tip, this._element) : this._config.placement;
const attachment = this._getAttachment(placement);
this._addAttachmentClass(attachment);
const {
container
} = this._config;
Data.set(tip, this.constructor.DATA_KEY, this);
if (!this._element.ownerDocument.documentElement.contains(this.tip)) {
container.append(tip);
EventHandler.trigger(this._element, this.constructor.Event.INSERTED);
}
if (this._popper) {
this._popper.update();
} else {
this._popper = Popper__namespace.createPopper(this._element, tip, this._getPopperConfig(attachment));
}
tip.classList.add(CLASS_NAME_SHOW$2);
const customClass = this._resolvePossibleFunction(this._config.customClass);
if (customClass) {
tip.classList.add(...customClass.split(' '));
} // If this is a touch-enabled device we add extra
// empty mouseover listeners to the body's immediate children;
// only needed because of broken event delegation on iOS
// https://www.quirksmode.org/blog/archives/2014/02/mouse_event_bub.html
if ('ontouchstart' in document.documentElement) {
[].concat(...document.body.children).forEach(element => {
EventHandler.on(element, 'mouseover', noop);
});
}
const complete = () => {
const prevHoverState = this._hoverState;
this._hoverState = null;
EventHandler.trigger(this._element, this.constructor.Event.SHOWN);
if (prevHoverState === HOVER_STATE_OUT) {
this._leave(null, this);
}
};
const isAnimated = this.tip.classList.contains(CLASS_NAME_FADE$2);
this._queueCallback(complete, this.tip, isAnimated);
}
hide() {
if (!this._popper) {
return;
}
const tip = this.getTipElement();
const complete = () => {
if (this._isWithActiveTrigger()) {
return;
}
if (this._hoverState !== HOVER_STATE_SHOW) {
tip.remove();
}
this._cleanTipClass();
this._element.removeAttribute('aria-describedby');
EventHandler.trigger(this._element, this.constructor.Event.HIDDEN);
this._disposePopper();
};
const hideEvent = EventHandler.trigger(this._element, this.constructor.Event.HIDE);
if (hideEvent.defaultPrevented) {
return;
}
tip.classList.remove(CLASS_NAME_SHOW$2); // If this is a touch-enabled device we remove the extra
// empty mouseover listeners we added for iOS support
if ('ontouchstart' in document.documentElement) {
[].concat(...document.body.children).forEach(element => EventHandler.off(element, 'mouseover', noop));
}
this._activeTrigger[TRIGGER_CLICK] = false;
this._activeTrigger[TRIGGER_FOCUS] = false;
this._activeTrigger[TRIGGER_HOVER] = false;
const isAnimated = this.tip.classList.contains(CLASS_NAME_FADE$2);
this._queueCallback(complete, this.tip, isAnimated);
this._hoverState = '';
}
update() {
if (this._popper !== null) {
this._popper.update();
}
} // Protected
isWithContent() {
return Boolean(this.getTitle());
}
getTipElement() {
if (this.tip) {
return this.tip;
}
const element = document.createElement('div');
element.innerHTML = this._config.template;
const tip = element.children[0];
this.setContent(tip);
tip.classList.remove(CLASS_NAME_FADE$2, CLASS_NAME_SHOW$2);
this.tip = tip;
return this.tip;
}
setContent(tip) {
this._sanitizeAndSetContent(tip, this.getTitle(), SELECTOR_TOOLTIP_INNER);
}
_sanitizeAndSetContent(template, content, selector) {
const templateElement = SelectorEngine.findOne(selector, template);
if (!content && templateElement) {
templateElement.remove();
return;
} // we use append for html objects to maintain js events
this.setElementContent(templateElement, content);
}
setElementContent(element, content) {
if (element === null) {
return;
}
if (isElement(content)) {
content = getElement(content); // content is a DOM node or a jQuery
if (this._config.html) {
if (content.parentNode !== element) {
element.innerHTML = '';
element.append(content);
}
} else {
element.textContent = content.textContent;
}
return;
}
if (this._config.html) {
if (this._config.sanitize) {
content = sanitizeHtml(content, this._config.allowList, this._config.sanitizeFn);
}
element.innerHTML = content;
} else {
element.textContent = content;
}
}
getTitle() {
const title = this._element.getAttribute('data-bs-original-title') || this._config.title;
return this._resolvePossibleFunction(title);
}
updateAttachment(attachment) {
if (attachment === 'right') {
return 'end';
}
if (attachment === 'left') {
return 'start';
}
return attachment;
} // Private
_initializeOnDelegatedTarget(event, context) {
return context || this.constructor.getOrCreateInstance(event.delegateTarget, this._getDelegateConfig());
}
_getOffset() {
const {
offset
} = this._config;
if (typeof offset === 'string') {
return offset.split(',').map(val => Number.parseInt(val, 10));
}
if (typeof offset === 'function') {
return popperData => offset(popperData, this._element);
}
return offset;
}
_resolvePossibleFunction(content) {
return typeof content === 'function' ? content.call(this._element) : content;
}
_getPopperConfig(attachment) {
const defaultBsPopperConfig = {
placement: attachment,
modifiers: [{
name: 'flip',
options: {
fallbackPlacements: this._config.fallbackPlacements
}
}, {
name: 'offset',
options: {
offset: this._getOffset()
}
}, {
name: 'preventOverflow',
options: {
boundary: this._config.boundary
}
}, {
name: 'arrow',
options: {
element: `.${this.constructor.NAME}-arrow`
}
}, {
name: 'onChange',
enabled: true,
phase: 'afterWrite',
fn: data => this._handlePopperPlacementChange(data)
}],
onFirstUpdate: data => {
if (data.options.placement !== data.placement) {
this._handlePopperPlacementChange(data);
}
}
};
return { ...defaultBsPopperConfig,
...(typeof this._config.popperConfig === 'function' ? this._config.popperConfig(defaultBsPopperConfig) : this._config.popperConfig)
};
}
_addAttachmentClass(attachment) {
this.getTipElement().classList.add(`${this._getBasicClassPrefix()}-${this.updateAttachment(attachment)}`);
}
_getAttachment(placement) {
return AttachmentMap[placement.toUpperCase()];
}
_setListeners() {
const triggers = this._config.trigger.split(' ');
triggers.forEach(trigger => {
if (trigger === 'click') {
EventHandler.on(this._element, this.constructor.Event.CLICK, this._config.selector, event => this.toggle(event));
} else if (trigger !== TRIGGER_MANUAL) {
const eventIn = trigger === TRIGGER_HOVER ? this.constructor.Event.MOUSEENTER : this.constructor.Event.FOCUSIN;
const eventOut = trigger === TRIGGER_HOVER ? this.constructor.Event.MOUSELEAVE : this.constructor.Event.FOCUSOUT;
EventHandler.on(this._element, eventIn, this._config.selector, event => this._enter(event));
EventHandler.on(this._element, eventOut, this._config.selector, event => this._leave(event));
}
});
this._hideModalHandler = () => {
if (this._element) {
this.hide();
}
};
EventHandler.on(this._element.closest(SELECTOR_MODAL), EVENT_MODAL_HIDE, this._hideModalHandler);
if (this._config.selector) {
this._config = { ...this._config,
trigger: 'manual',
selector: ''
};
} else {
this._fixTitle();
}
}
_fixTitle() {
const title = this._element.getAttribute('title');
const originalTitleType = typeof this._element.getAttribute('data-bs-original-title');
if (title || originalTitleType !== 'string') {
this._element.setAttribute('data-bs-original-title', title || '');
if (title && !this._element.getAttribute('aria-label') && !this._element.textContent) {
this._element.setAttribute('aria-label', title);
}
this._element.setAttribute('title', '');
}
}
_enter(event, context) {
context = this._initializeOnDelegatedTarget(event, context);
if (event) {
context._activeTrigger[event.type === 'focusin' ? TRIGGER_FOCUS : TRIGGER_HOVER] = true;
}
if (context.getTipElement().classList.contains(CLASS_NAME_SHOW$2) || context._hoverState === HOVER_STATE_SHOW) {
context._hoverState = HOVER_STATE_SHOW;
return;
}
clearTimeout(context._timeout);
context._hoverState = HOVER_STATE_SHOW;
if (!context._config.delay || !context._config.delay.show) {
context.show();
return;
}
context._timeout = setTimeout(() => {
if (context._hoverState === HOVER_STATE_SHOW) {
context.show();
}
}, context._config.delay.show);
}
_leave(event, context) {
context = this._initializeOnDelegatedTarget(event, context);
if (event) {
context._activeTrigger[event.type === 'focusout' ? TRIGGER_FOCUS : TRIGGER_HOVER] = context._element.contains(event.relatedTarget);
}
if (context._isWithActiveTrigger()) {
return;
}
clearTimeout(context._timeout);
context._hoverState = HOVER_STATE_OUT;
if (!context._config.delay || !context._config.delay.hide) {
context.hide();
return;
}
context._timeout = setTimeout(() => {
if (context._hoverState === HOVER_STATE_OUT) {
context.hide();
}
}, context._config.delay.hide);
}
_isWithActiveTrigger() {
for (const trigger in this._activeTrigger) {
if (this._activeTrigger[trigger]) {
return true;
}
}
return false;
}
_getConfig(config) {
const dataAttributes = Manipulator.getDataAttributes(this._element);
Object.keys(dataAttributes).forEach(dataAttr => {
if (DISALLOWED_ATTRIBUTES.has(dataAttr)) {
delete dataAttributes[dataAttr];
}
});
config = { ...this.constructor.Default,
...dataAttributes,
...(typeof config === 'object' && config ? config : {})
};
config.container = config.container === false ? document.body : getElement(config.container);
if (typeof config.delay === 'number') {
config.delay = {
show: config.delay,
hide: config.delay
};
}
if (typeof config.title === 'number') {
config.title = config.title.toString();
}
if (typeof config.content === 'number') {
config.content = config.content.toString();
}
typeCheckConfig(NAME$4, config, this.constructor.DefaultType);
if (config.sanitize) {
config.template = sanitizeHtml(config.template, config.allowList, config.sanitizeFn);
}
return config;
}
_getDelegateConfig() {
const config = {};
for (const key in this._config) {
if (this.constructor.Default[key] !== this._config[key]) {
config[key] = this._config[key];
}
} // In the future can be replaced with:
// const keysWithDifferentValues = Object.entries(this._config).filter(entry => this.constructor.Default[entry[0]] !== this._config[entry[0]])
// `Object.fromEntries(keysWithDifferentValues)`
return config;
}
_cleanTipClass() {
const tip = this.getTipElement();
const basicClassPrefixRegex = new RegExp(`(^|\\s)${this._getBasicClassPrefix()}\\S+`, 'g');
const tabClass = tip.getAttribute('class').match(basicClassPrefixRegex);
if (tabClass !== null && tabClass.length > 0) {
tabClass.map(token => token.trim()).forEach(tClass => tip.classList.remove(tClass));
}
}
_getBasicClassPrefix() {
return CLASS_PREFIX$1;
}
_handlePopperPlacementChange(popperData) {
const {
state
} = popperData;
if (!state) {
return;
}
this.tip = state.elements.popper;
this._cleanTipClass();
this._addAttachmentClass(this._getAttachment(state.placement));
}
_disposePopper() {
if (this._popper) {
this._popper.destroy();
this._popper = null;
}
} // Static
static jQueryInterface(config) {
return this.each(function () {
const data = Tooltip.getOrCreateInstance(this, config);
if (typeof config === 'string') {
if (typeof data[config] === 'undefined') {
throw new TypeError(`No method named "${config}"`);
}
data[config]();
}
});
}
}
/**
* ------------------------------------------------------------------------
* jQuery
* ------------------------------------------------------------------------
* add .Tooltip to jQuery only if jQuery is present
*/
defineJQueryPlugin(Tooltip);
/**
* --------------------------------------------------------------------------
* Bootstrap (v5.1.3): popover.js
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
* --------------------------------------------------------------------------
*/
/**
* ------------------------------------------------------------------------
* Constants
* ------------------------------------------------------------------------
*/
const NAME$3 = 'popover';
const DATA_KEY$3 = 'bs.popover';
const EVENT_KEY$3 = `.${DATA_KEY$3}`;
const CLASS_PREFIX = 'bs-popover';
const Default$2 = { ...Tooltip.Default,
placement: 'right',
offset: [0, 8],
trigger: 'click',
content: '',
template: '<div class="popover" role="tooltip">' + '<div class="popover-arrow"></div>' + '<h3 class="popover-header"></h3>' + '<div class="popover-body"></div>' + '</div>'
};
const DefaultType$2 = { ...Tooltip.DefaultType,
content: '(string|element|function)'
};
const Event$1 = {
HIDE: `hide${EVENT_KEY$3}`,
HIDDEN: `hidden${EVENT_KEY$3}`,
SHOW: `show${EVENT_KEY$3}`,
SHOWN: `shown${EVENT_KEY$3}`,
INSERTED: `inserted${EVENT_KEY$3}`,
CLICK: `click${EVENT_KEY$3}`,
FOCUSIN: `focusin${EVENT_KEY$3}`,
FOCUSOUT: `focusout${EVENT_KEY$3}`,
MOUSEENTER: `mouseenter${EVENT_KEY$3}`,
MOUSELEAVE: `mouseleave${EVENT_KEY$3}`
};
const SELECTOR_TITLE = '.popover-header';
const SELECTOR_CONTENT = '.popover-body';
/**
* ------------------------------------------------------------------------
* Class Definition
* ------------------------------------------------------------------------
*/
class Popover extends Tooltip {
// Getters
static get Default() {
return Default$2;
}
static get NAME() {
return NAME$3;
}
static get Event() {
return Event$1;
}
static get DefaultType() {
return DefaultType$2;
} // Overrides
isWithContent() {
return this.getTitle() || this._getContent();
}
setContent(tip) {
this._sanitizeAndSetContent(tip, this.getTitle(), SELECTOR_TITLE);
this._sanitizeAndSetContent(tip, this._getContent(), SELECTOR_CONTENT);
} // Private
_getContent() {
return this._resolvePossibleFunction(this._config.content);
}
_getBasicClassPrefix() {
return CLASS_PREFIX;
} // Static
static jQueryInterface(config) {
return this.each(function () {
const data = Popover.getOrCreateInstance(this, config);
if (typeof config === 'string') {
if (typeof data[config] === 'undefined') {
throw new TypeError(`No method named "${config}"`);
}
data[config]();
}
});
}
}
/**
* ------------------------------------------------------------------------
* jQuery
* ------------------------------------------------------------------------
* add .Popover to jQuery only if jQuery is present
*/
defineJQueryPlugin(Popover);
/**
* --------------------------------------------------------------------------
* Bootstrap (v5.1.3): scrollspy.js
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
* --------------------------------------------------------------------------
*/
/**
* ------------------------------------------------------------------------
* Constants
* ------------------------------------------------------------------------
*/
const NAME$2 = 'scrollspy';
const DATA_KEY$2 = 'bs.scrollspy';
const EVENT_KEY$2 = `.${DATA_KEY$2}`;
const DATA_API_KEY$1 = '.data-api';
const Default$1 = {
offset: 10,
method: 'auto',
target: ''
};
const DefaultType$1 = {
offset: 'number',
method: 'string',
target: '(string|element)'
};
const EVENT_ACTIVATE = `activate${EVENT_KEY$2}`;
const EVENT_SCROLL = `scroll${EVENT_KEY$2}`;
const EVENT_LOAD_DATA_API = `load${EVENT_KEY$2}${DATA_API_KEY$1}`;
const CLASS_NAME_DROPDOWN_ITEM = 'dropdown-item';
const CLASS_NAME_ACTIVE$1 = 'active';
const SELECTOR_DATA_SPY = '[data-bs-spy="scroll"]';
const SELECTOR_NAV_LIST_GROUP$1 = '.nav, .list-group';
const SELECTOR_NAV_LINKS = '.nav-link';
const SELECTOR_NAV_ITEMS = '.nav-item';
const SELECTOR_LIST_ITEMS = '.list-group-item';
const SELECTOR_LINK_ITEMS = `${SELECTOR_NAV_LINKS}, ${SELECTOR_LIST_ITEMS}, .${CLASS_NAME_DROPDOWN_ITEM}`;
const SELECTOR_DROPDOWN$1 = '.dropdown';
const SELECTOR_DROPDOWN_TOGGLE$1 = '.dropdown-toggle';
const METHOD_OFFSET = 'offset';
const METHOD_POSITION = 'position';
/**
* ------------------------------------------------------------------------
* Class Definition
* ------------------------------------------------------------------------
*/
class ScrollSpy extends BaseComponent {
constructor(element, config) {
super(element);
this._scrollElement = this._element.tagName === 'BODY' ? window : this._element;
this._config = this._getConfig(config);
this._offsets = [];
this._targets = [];
this._activeTarget = null;
this._scrollHeight = 0;
EventHandler.on(this._scrollElement, EVENT_SCROLL, () => this._process());
this.refresh();
this._process();
} // Getters
static get Default() {
return Default$1;
}
static get NAME() {
return NAME$2;
} // Public
refresh() {
const autoMethod = this._scrollElement === this._scrollElement.window ? METHOD_OFFSET : METHOD_POSITION;
const offsetMethod = this._config.method === 'auto' ? autoMethod : this._config.method;
const offsetBase = offsetMethod === METHOD_POSITION ? this._getScrollTop() : 0;
this._offsets = [];
this._targets = [];
this._scrollHeight = this._getScrollHeight();
const targets = SelectorEngine.find(SELECTOR_LINK_ITEMS, this._config.target);
targets.map(element => {
const targetSelector = getSelectorFromElement(element);
const target = targetSelector ? SelectorEngine.findOne(targetSelector) : null;
if (target) {
const targetBCR = target.getBoundingClientRect();
if (targetBCR.width || targetBCR.height) {
return [Manipulator[offsetMethod](target).top + offsetBase, targetSelector];
}
}
return null;
}).filter(item => item).sort((a, b) => a[0] - b[0]).forEach(item => {
this._offsets.push(item[0]);
this._targets.push(item[1]);
});
}
dispose() {
EventHandler.off(this._scrollElement, EVENT_KEY$2);
super.dispose();
} // Private
_getConfig(config) {
config = { ...Default$1,
...Manipulator.getDataAttributes(this._element),
...(typeof config === 'object' && config ? config : {})
};
config.target = getElement(config.target) || document.documentElement;
typeCheckConfig(NAME$2, config, DefaultType$1);
return config;
}
_getScrollTop() {
return this._scrollElement === window ? this._scrollElement.pageYOffset : this._scrollElement.scrollTop;
}
_getScrollHeight() {
return this._scrollElement.scrollHeight || Math.max(document.body.scrollHeight, document.documentElement.scrollHeight);
}
_getOffsetHeight() {
return this._scrollElement === window ? window.innerHeight : this._scrollElement.getBoundingClientRect().height;
}
_process() {
const scrollTop = this._getScrollTop() + this._config.offset;
const scrollHeight = this._getScrollHeight();
const maxScroll = this._config.offset + scrollHeight - this._getOffsetHeight();
if (this._scrollHeight !== scrollHeight) {
this.refresh();
}
if (scrollTop >= maxScroll) {
const target = this._targets[this._targets.length - 1];
if (this._activeTarget !== target) {
this._activate(target);
}
return;
}
if (this._activeTarget && scrollTop < this._offsets[0] && this._offsets[0] > 0) {
this._activeTarget = null;
this._clear();
return;
}
for (let i = this._offsets.length; i--;) {
const isActiveTarget = this._activeTarget !== this._targets[i] && scrollTop >= this._offsets[i] && (typeof this._offsets[i + 1] === 'undefined' || scrollTop < this._offsets[i + 1]);
if (isActiveTarget) {
this._activate(this._targets[i]);
}
}
}
_activate(target) {
this._activeTarget = target;
this._clear();
const queries = SELECTOR_LINK_ITEMS.split(',').map(selector => `${selector}[data-bs-target="${target}"],${selector}[href="${target}"]`);
const link = SelectorEngine.findOne(queries.join(','), this._config.target);
link.classList.add(CLASS_NAME_ACTIVE$1);
if (link.classList.contains(CLASS_NAME_DROPDOWN_ITEM)) {
SelectorEngine.findOne(SELECTOR_DROPDOWN_TOGGLE$1, link.closest(SELECTOR_DROPDOWN$1)).classList.add(CLASS_NAME_ACTIVE$1);
} else {
SelectorEngine.parents(link, SELECTOR_NAV_LIST_GROUP$1).forEach(listGroup => {
// Set triggered links parents as active
// With both <ul> and <nav> markup a parent is the previous sibling of any nav ancestor
SelectorEngine.prev(listGroup, `${SELECTOR_NAV_LINKS}, ${SELECTOR_LIST_ITEMS}`).forEach(item => item.classList.add(CLASS_NAME_ACTIVE$1)); // Handle special case when .nav-link is inside .nav-item
SelectorEngine.prev(listGroup, SELECTOR_NAV_ITEMS).forEach(navItem => {
SelectorEngine.children(navItem, SELECTOR_NAV_LINKS).forEach(item => item.classList.add(CLASS_NAME_ACTIVE$1));
});
});
}
EventHandler.trigger(this._scrollElement, EVENT_ACTIVATE, {
relatedTarget: target
});
}
_clear() {
SelectorEngine.find(SELECTOR_LINK_ITEMS, this._config.target).filter(node => node.classList.contains(CLASS_NAME_ACTIVE$1)).forEach(node => node.classList.remove(CLASS_NAME_ACTIVE$1));
} // Static
static jQueryInterface(config) {
return this.each(function () {
const data = ScrollSpy.getOrCreateInstance(this, config);
if (typeof config !== 'string') {
return;
}
if (typeof data[config] === 'undefined') {
throw new TypeError(`No method named "${config}"`);
}
data[config]();
});
}
}
/**
* ------------------------------------------------------------------------
* Data Api implementation
* ------------------------------------------------------------------------
*/
EventHandler.on(window, EVENT_LOAD_DATA_API, () => {
SelectorEngine.find(SELECTOR_DATA_SPY).forEach(spy => new ScrollSpy(spy));
});
/**
* ------------------------------------------------------------------------
* jQuery
* ------------------------------------------------------------------------
* add .ScrollSpy to jQuery only if jQuery is present
*/
defineJQueryPlugin(ScrollSpy);
/**
* --------------------------------------------------------------------------
* Bootstrap (v5.1.3): tab.js
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
* --------------------------------------------------------------------------
*/
/**
* ------------------------------------------------------------------------
* Constants
* ------------------------------------------------------------------------
*/
const NAME$1 = 'tab';
const DATA_KEY$1 = 'bs.tab';
const EVENT_KEY$1 = `.${DATA_KEY$1}`;
const DATA_API_KEY = '.data-api';
const EVENT_HIDE$1 = `hide${EVENT_KEY$1}`;
const EVENT_HIDDEN$1 = `hidden${EVENT_KEY$1}`;
const EVENT_SHOW$1 = `show${EVENT_KEY$1}`;
const EVENT_SHOWN$1 = `shown${EVENT_KEY$1}`;
const EVENT_CLICK_DATA_API = `click${EVENT_KEY$1}${DATA_API_KEY}`;
const CLASS_NAME_DROPDOWN_MENU = 'dropdown-menu';
const CLASS_NAME_ACTIVE = 'active';
const CLASS_NAME_FADE$1 = 'fade';
const CLASS_NAME_SHOW$1 = 'show';
const SELECTOR_DROPDOWN = '.dropdown';
const SELECTOR_NAV_LIST_GROUP = '.nav, .list-group';
const SELECTOR_ACTIVE = '.active';
const SELECTOR_ACTIVE_UL = ':scope > li > .active';
const SELECTOR_DATA_TOGGLE = '[data-bs-toggle="tab"], [data-bs-toggle="pill"], [data-bs-toggle="list"]';
const SELECTOR_DROPDOWN_TOGGLE = '.dropdown-toggle';
const SELECTOR_DROPDOWN_ACTIVE_CHILD = ':scope > .dropdown-menu .active';
/**
* ------------------------------------------------------------------------
* Class Definition
* ------------------------------------------------------------------------
*/
class Tab extends BaseComponent {
// Getters
static get NAME() {
return NAME$1;
} // Public
show() {
if (this._element.parentNode && this._element.parentNode.nodeType === Node.ELEMENT_NODE && this._element.classList.contains(CLASS_NAME_ACTIVE)) {
return;
}
let previous;
const target = getElementFromSelector(this._element);
const listElement = this._element.closest(SELECTOR_NAV_LIST_GROUP);
if (listElement) {
const itemSelector = listElement.nodeName === 'UL' || listElement.nodeName === 'OL' ? SELECTOR_ACTIVE_UL : SELECTOR_ACTIVE;
previous = SelectorEngine.find(itemSelector, listElement);
previous = previous[previous.length - 1];
}
const hideEvent = previous ? EventHandler.trigger(previous, EVENT_HIDE$1, {
relatedTarget: this._element
}) : null;
const showEvent = EventHandler.trigger(this._element, EVENT_SHOW$1, {
relatedTarget: previous
});
if (showEvent.defaultPrevented || hideEvent !== null && hideEvent.defaultPrevented) {
return;
}
this._activate(this._element, listElement);
const complete = () => {
EventHandler.trigger(previous, EVENT_HIDDEN$1, {
relatedTarget: this._element
});
EventHandler.trigger(this._element, EVENT_SHOWN$1, {
relatedTarget: previous
});
};
if (target) {
this._activate(target, target.parentNode, complete);
} else {
complete();
}
} // Private
_activate(element, container, callback) {
const activeElements = container && (container.nodeName === 'UL' || container.nodeName === 'OL') ? SelectorEngine.find(SELECTOR_ACTIVE_UL, container) : SelectorEngine.children(container, SELECTOR_ACTIVE);
const active = activeElements[0];
const isTransitioning = callback && active && active.classList.contains(CLASS_NAME_FADE$1);
const complete = () => this._transitionComplete(element, active, callback);
if (active && isTransitioning) {
active.classList.remove(CLASS_NAME_SHOW$1);
this._queueCallback(complete, element, true);
} else {
complete();
}
}
_transitionComplete(element, active, callback) {
if (active) {
active.classList.remove(CLASS_NAME_ACTIVE);
const dropdownChild = SelectorEngine.findOne(SELECTOR_DROPDOWN_ACTIVE_CHILD, active.parentNode);
if (dropdownChild) {
dropdownChild.classList.remove(CLASS_NAME_ACTIVE);
}
if (active.getAttribute('role') === 'tab') {
active.setAttribute('aria-selected', false);
}
}
element.classList.add(CLASS_NAME_ACTIVE);
if (element.getAttribute('role') === 'tab') {
element.setAttribute('aria-selected', true);
}
reflow(element);
if (element.classList.contains(CLASS_NAME_FADE$1)) {
element.classList.add(CLASS_NAME_SHOW$1);
}
let parent = element.parentNode;
if (parent && parent.nodeName === 'LI') {
parent = parent.parentNode;
}
if (parent && parent.classList.contains(CLASS_NAME_DROPDOWN_MENU)) {
const dropdownElement = element.closest(SELECTOR_DROPDOWN);
if (dropdownElement) {
SelectorEngine.find(SELECTOR_DROPDOWN_TOGGLE, dropdownElement).forEach(dropdown => dropdown.classList.add(CLASS_NAME_ACTIVE));
}
element.setAttribute('aria-expanded', true);
}
if (callback) {
callback();
}
} // Static
static jQueryInterface(config) {
return this.each(function () {
const data = Tab.getOrCreateInstance(this);
if (typeof config === 'string') {
if (typeof data[config] === 'undefined') {
throw new TypeError(`No method named "${config}"`);
}
data[config]();
}
});
}
}
/**
* ------------------------------------------------------------------------
* Data Api implementation
* ------------------------------------------------------------------------
*/
EventHandler.on(document, EVENT_CLICK_DATA_API, SELECTOR_DATA_TOGGLE, function (event) {
if (['A', 'AREA'].includes(this.tagName)) {
event.preventDefault();
}
if (isDisabled(this)) {
return;
}
const data = Tab.getOrCreateInstance(this);
data.show();
});
/**
* ------------------------------------------------------------------------
* jQuery
* ------------------------------------------------------------------------
* add .Tab to jQuery only if jQuery is present
*/
defineJQueryPlugin(Tab);
/**
* --------------------------------------------------------------------------
* Bootstrap (v5.1.3): toast.js
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
* --------------------------------------------------------------------------
*/
/**
* ------------------------------------------------------------------------
* Constants
* ------------------------------------------------------------------------
*/
const NAME = 'toast';
const DATA_KEY = 'bs.toast';
const EVENT_KEY = `.${DATA_KEY}`;
const EVENT_MOUSEOVER = `mouseover${EVENT_KEY}`;
const EVENT_MOUSEOUT = `mouseout${EVENT_KEY}`;
const EVENT_FOCUSIN = `focusin${EVENT_KEY}`;
const EVENT_FOCUSOUT = `focusout${EVENT_KEY}`;
const EVENT_HIDE = `hide${EVENT_KEY}`;
const EVENT_HIDDEN = `hidden${EVENT_KEY}`;
const EVENT_SHOW = `show${EVENT_KEY}`;
const EVENT_SHOWN = `shown${EVENT_KEY}`;
const CLASS_NAME_FADE = 'fade';
const CLASS_NAME_HIDE = 'hide'; // @deprecated - kept here only for backwards compatibility
const CLASS_NAME_SHOW = 'show';
const CLASS_NAME_SHOWING = 'showing';
const DefaultType = {
animation: 'boolean',
autohide: 'boolean',
delay: 'number'
};
const Default = {
animation: true,
autohide: true,
delay: 5000
};
/**
* ------------------------------------------------------------------------
* Class Definition
* ------------------------------------------------------------------------
*/
class Toast extends BaseComponent {
constructor(element, config) {
super(element);
this._config = this._getConfig(config);
this._timeout = null;
this._hasMouseInteraction = false;
this._hasKeyboardInteraction = false;
this._setListeners();
} // Getters
static get DefaultType() {
return DefaultType;
}
static get Default() {
return Default;
}
static get NAME() {
return NAME;
} // Public
show() {
const showEvent = EventHandler.trigger(this._element, EVENT_SHOW);
if (showEvent.defaultPrevented) {
return;
}
this._clearTimeout();
if (this._config.animation) {
this._element.classList.add(CLASS_NAME_FADE);
}
const complete = () => {
this._element.classList.remove(CLASS_NAME_SHOWING);
EventHandler.trigger(this._element, EVENT_SHOWN);
this._maybeScheduleHide();
};
this._element.classList.remove(CLASS_NAME_HIDE); // @deprecated
reflow(this._element);
this._element.classList.add(CLASS_NAME_SHOW);
this._element.classList.add(CLASS_NAME_SHOWING);
this._queueCallback(complete, this._element, this._config.animation);
}
hide() {
if (!this._element.classList.contains(CLASS_NAME_SHOW)) {
return;
}
const hideEvent = EventHandler.trigger(this._element, EVENT_HIDE);
if (hideEvent.defaultPrevented) {
return;
}
const complete = () => {
this._element.classList.add(CLASS_NAME_HIDE); // @deprecated
this._element.classList.remove(CLASS_NAME_SHOWING);
this._element.classList.remove(CLASS_NAME_SHOW);
EventHandler.trigger(this._element, EVENT_HIDDEN);
};
this._element.classList.add(CLASS_NAME_SHOWING);
this._queueCallback(complete, this._element, this._config.animation);
}
dispose() {
this._clearTimeout();
if (this._element.classList.contains(CLASS_NAME_SHOW)) {
this._element.classList.remove(CLASS_NAME_SHOW);
}
super.dispose();
} // Private
_getConfig(config) {
config = { ...Default,
...Manipulator.getDataAttributes(this._element),
...(typeof config === 'object' && config ? config : {})
};
typeCheckConfig(NAME, config, this.constructor.DefaultType);
return config;
}
_maybeScheduleHide() {
if (!this._config.autohide) {
return;
}
if (this._hasMouseInteraction || this._hasKeyboardInteraction) {
return;
}
this._timeout = setTimeout(() => {
this.hide();
}, this._config.delay);
}
_onInteraction(event, isInteracting) {
switch (event.type) {
case 'mouseover':
case 'mouseout':
this._hasMouseInteraction = isInteracting;
break;
case 'focusin':
case 'focusout':
this._hasKeyboardInteraction = isInteracting;
break;
}
if (isInteracting) {
this._clearTimeout();
return;
}
const nextElement = event.relatedTarget;
if (this._element === nextElement || this._element.contains(nextElement)) {
return;
}
this._maybeScheduleHide();
}
_setListeners() {
EventHandler.on(this._element, EVENT_MOUSEOVER, event => this._onInteraction(event, true));
EventHandler.on(this._element, EVENT_MOUSEOUT, event => this._onInteraction(event, false));
EventHandler.on(this._element, EVENT_FOCUSIN, event => this._onInteraction(event, true));
EventHandler.on(this._element, EVENT_FOCUSOUT, event => this._onInteraction(event, false));
}
_clearTimeout() {
clearTimeout(this._timeout);
this._timeout = null;
} // Static
static jQueryInterface(config) {
return this.each(function () {
const data = Toast.getOrCreateInstance(this, config);
if (typeof config === 'string') {
if (typeof data[config] === 'undefined') {
throw new TypeError(`No method named "${config}"`);
}
data[config](this);
}
});
}
}
enableDismissTrigger(Toast);
/**
* ------------------------------------------------------------------------
* jQuery
* ------------------------------------------------------------------------
* add .Toast to jQuery only if jQuery is present
*/
defineJQueryPlugin(Toast);
/**
* --------------------------------------------------------------------------
* Bootstrap (v5.1.3): index.umd.js
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
* --------------------------------------------------------------------------
*/
const index_umd = {
Alert,
Button,
Carousel,
Collapse,
Dropdown,
Modal,
Offcanvas,
Popover,
ScrollSpy,
Tab,
Toast,
Tooltip
};
return index_umd;
}));
//# sourceMappingURL=bootstrap.js.map | PypiClean |
/Nuitka_winsvc-1.7.10-cp310-cp310-win_amd64.whl/nuitka/freezer/Onefile.py | import os
import subprocess
import sys
from nuitka import Options, OutputDirectories
from nuitka.build.SconsInterface import (
asBoolStr,
cleanSconsDirectory,
getSconsDataPath,
runScons,
setCommonSconsOptions,
)
from nuitka.Options import getOnefileTempDirSpec, isOnefileTempDirMode
from nuitka.OutputDirectories import getResultFullpath
from nuitka.plugins.Plugins import Plugins
from nuitka.PostProcessing import executePostProcessingResources
from nuitka.PythonVersions import (
getZstandardSupportingVersions,
python_version,
)
from nuitka.Tracing import onefile_logger, postprocessing_logger
from nuitka.utils.Execution import withEnvironmentVarsOverridden
from nuitka.utils.FileOperations import (
areSamePaths,
getExternalUsePath,
getFileContents,
removeDirectory,
)
from nuitka.utils.InstalledPythons import findInstalledPython
from nuitka.utils.SharedLibraries import cleanupHeaderForAndroid
from nuitka.utils.Signing import addMacOSCodeSignature
from nuitka.utils.Utils import (
isAndroidBasedLinux,
isMacOS,
isWin32OrPosixWindows,
isWin32Windows,
)
from nuitka.utils.WindowsResources import RT_RCDATA, addResourceToFile
def packDistFolderToOnefile(dist_dir):
"""Pack distribution to onefile, i.e. a single file that is directly executable."""
onefile_output_filename = getResultFullpath(onefile=True)
packDistFolderToOnefileBootstrap(onefile_output_filename, dist_dir)
Plugins.onOnefileFinished(onefile_output_filename)
def _runOnefileScons(onefile_compression):
source_dir = OutputDirectories.getSourceDirectoryPath(onefile=True)
# Let plugins do their thing for onefile mode too.
Plugins.writeExtraCodeFiles(onefile=True)
options = {
"result_name": OutputDirectories.getResultBasePath(onefile=True),
"result_exe": OutputDirectories.getResultFullpath(onefile=True),
"source_dir": source_dir,
"debug_mode": asBoolStr(Options.is_debug),
"experimental": ",".join(Options.getExperimentalIndications()),
"trace_mode": asBoolStr(Options.shallTraceExecution()),
"nuitka_src": getSconsDataPath(),
"compiled_exe": OutputDirectories.getResultFullpath(onefile=False),
"onefile_splash_screen": asBoolStr(
Options.getWindowsSplashScreen() is not None
),
}
env_values = setCommonSconsOptions(options)
env_values["_NUITKA_ONEFILE_TEMP_SPEC"] = getOnefileTempDirSpec()
env_values["_NUITKA_ONEFILE_TEMP_BOOL"] = "1" if isOnefileTempDirMode() else "0"
env_values["_NUITKA_ONEFILE_COMPRESSION_BOOL"] = "1" if onefile_compression else "0"
env_values["_NUITKA_ONEFILE_BUILD_BOOL"] = "1" if onefile_compression else "0"
# Allow plugins to build definitions.
env_values.update(Plugins.getBuildDefinitions())
if isWin32Windows() and Options.isWindowsServiceMode():
env_values["_NUITKA_WINSVC_BOOL"] = "1"
winsvcMetrics = Options.getWindowsServiceMetrics()
env_values["_NUITKA_WINSVC_NAME_WIDE_STRING"] = winsvcMetrics['name'] \
if 'name' in winsvcMetrics else os.path.basename(options['result_name'])
env_values["_NUITKA_WINSVC_DISPLAY_NAME_WIDE_STRING"] = winsvcMetrics['display_name'] \
if 'display_name' in winsvcMetrics else env_values["_NUITKA_WINSVC_NAME_WIDE_STRING"]
if 'description' in winsvcMetrics:
env_values["_NUITKA_WINSVC_DESCRIPTION_WIDE_STRING"] = winsvcMetrics['description']
if 'cmdline' in winsvcMetrics:
env_values["_NUITKA_WINSVC_CMDLINE_WIDE_STRING"] = winsvcMetrics['cmdline']
env_values['_NUITKA_WINSVC_INSTALL_WIDE_STRING'] = winsvcMetrics['install']
env_values['_NUITKA_WINSVC_UNINSTALL_WIDE_STRING'] = winsvcMetrics['uninstall']
result = runScons(
options=options,
env_values=env_values,
scons_filename="Onefile.scons",
)
# Exit if compilation failed.
if not result:
onefile_logger.sysexit("Error, onefile bootstrap binary build failed.")
_compressor_python = None
def getCompressorPython():
# User may disable it.
if Options.shallNotCompressOnefile():
return None
global _compressor_python # singleton, pylint: disable=global-statement
if _compressor_python is None:
_compressor_python = findInstalledPython(
python_versions=getZstandardSupportingVersions(),
module_name="zstandard",
module_version="0.15",
)
if _compressor_python is None:
if python_version < 0x350:
onefile_logger.warning(
"""\
Onefile mode cannot compress without 'zstandard' module installed on \
another discoverable Python >= 3.5 on your system."""
)
else:
onefile_logger.warning(
"""\
Onefile mode cannot compress without 'zstandard' module installed."""
)
return _compressor_python
def runOnefileCompressor(
compressor_python, dist_dir, onefile_output_filename, start_binary
):
file_checksums = not isOnefileTempDirMode()
win_path_sep = isWin32OrPosixWindows()
if compressor_python is None or areSamePaths(
compressor_python.getPythonExe(), sys.executable
):
from nuitka.tools.onefile_compressor.OnefileCompressor import (
attachOnefilePayload,
)
attachOnefilePayload(
dist_dir=dist_dir,
onefile_output_filename=onefile_output_filename,
start_binary=start_binary,
expect_compression=compressor_python is not None,
file_checksums=file_checksums,
win_path_sep=win_path_sep,
low_memory=Options.isLowMemory(),
)
else:
onefile_compressor_path = os.path.normpath(
os.path.join(os.path.dirname(__file__), "..", "tools", "onefile_compressor")
)
mapping = {
"NUITKA_PACKAGE_HOME": os.path.dirname(
os.path.abspath(sys.modules["nuitka"].__path__[0])
)
}
mapping["NUITKA_PROGRESS_BAR"] = "1" if Options.shallUseProgressBar() else "0"
onefile_logger.info(
"Using external Python '%s' to compress the payload."
% compressor_python.getPythonExe()
)
with withEnvironmentVarsOverridden(mapping):
subprocess.check_call(
[
compressor_python.getPythonExe(),
onefile_compressor_path,
dist_dir,
getExternalUsePath(onefile_output_filename, only_dirname=True),
start_binary,
str(file_checksums),
str(win_path_sep),
str(Options.isLowMemory()),
],
shell=False,
)
def packDistFolderToOnefileBootstrap(onefile_output_filename, dist_dir):
postprocessing_logger.info(
"Creating single file from dist folder, this may take a while."
)
onefile_logger.info("Running bootstrap binary compilation via Scons.")
# Cleanup first.
source_dir = OutputDirectories.getSourceDirectoryPath(onefile=True)
cleanSconsDirectory(source_dir)
# Used only in some configurations
onefile_payload_filename = os.path.join(source_dir, "__payload.bin")
# Now need to append to payload it, potentially compressing it.
compressor_python = getCompressorPython()
# Decide if we need the payload during build already, or if it should be
# attached.
payload_used_in_build = isMacOS()
if payload_used_in_build:
runOnefileCompressor(
compressor_python=compressor_python,
dist_dir=dist_dir,
onefile_output_filename=onefile_payload_filename,
start_binary=getResultFullpath(onefile=False),
)
# Create the bootstrap binary for unpacking.
_runOnefileScons(
onefile_compression=compressor_python is not None,
)
if isWin32Windows():
executePostProcessingResources(manifest=None, onefile=True)
if isAndroidBasedLinux():
cleanupHeaderForAndroid(onefile_output_filename)
Plugins.onBootstrapBinary(onefile_output_filename)
if isMacOS():
addMacOSCodeSignature(filenames=[onefile_output_filename])
if not payload_used_in_build:
runOnefileCompressor(
compressor_python=compressor_python,
dist_dir=dist_dir,
onefile_output_filename=(
onefile_payload_filename
if isWin32Windows()
else onefile_output_filename
),
start_binary=getResultFullpath(onefile=False),
)
if isWin32Windows():
addResourceToFile(
target_filename=onefile_output_filename,
data=getFileContents(onefile_payload_filename, mode="rb"),
resource_kind=RT_RCDATA,
lang_id=0,
res_name=27,
logger=postprocessing_logger,
)
if Options.isRemoveBuildDir():
onefile_logger.info("Removing onefile build directory '%s'." % source_dir)
removeDirectory(path=source_dir, ignore_errors=False)
assert not os.path.exists(source_dir)
else:
onefile_logger.info("Keeping onefile build directory '%s'." % source_dir) | PypiClean |
/HavNegpy-1.2.tar.gz/HavNegpy-1.2/docs/_build/html/_build/html/_build/doctrees/nbsphinx/_build/doctrees/nbsphinx/_build/html/_build/html/hn_module_tutorial.ipynb | # Tutorial for the HN module of HavNegpy package
```
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import os
import HavNegpy as dd
%matplotlib qt
os.chdir(r'M:\Marshall_Data\mohamed_data\mohamed_data\n44')
def create_dataframe(f):
col_names = ['Freq', 'T', 'Eps1', 'Eps2']
#f = input(str("Enter the filename:"))
df = pd.read_csv(f, sep=r"\s+",index_col=False,usecols = [0,1,2,3],names=col_names,header=None,skiprows=4,encoding='unicode_escape',engine='python')
col1 = ['log f']
for start in range(0, len(df), 63):
name = df['T'][start]
#print(name)
col1.append(name)
df2 = pd.DataFrame()
f1 = df['Freq'][0:63].values
x1 = np.log10((f1))
e = pd.DataFrame(x1)
df2['log f'] = pd.concat([e],axis=1,ignore_index=True)
global Cooling,Heating
for start in range(0, len(df), 63):
f = df['Eps2'][start:start+63].values
ep = np.log10(f)
d = pd.DataFrame(ep)
df2[start] = pd.concat([d],axis=1,ignore_index=True)
df2.columns = col1
'''
a = int(len(col1)/3)
b = 2*a
c = int(len(col1)) - b
Heating1 = df2.iloc[8:,0:a+1]
Cooling = df2.iloc[8:,a+1:b+1]
Heating2 = df2.iloc[8:,b+1:]
heat1_col = col1[0:a+1]
cool_col = col1[a+1:b+1]
heat2_col = col1[b+1:]
Cooling.columns = cool_col
Heating1.columns = heat1_col
Heating2.columns = heat2_col
f2 = df['Freq'][8:59].values
x2 = np.log10((f2))
Cooling['Freq'] = x2
Heating1['Freq'] = x2
Heating2['Freq'] = x2
'''
Cooling = df2.iloc[:,0:25]
Heating = df2.iloc[:,25:]
return df,df2,Cooling,Heating #Heating2
df,df2,cool,heat = create_dataframe('EPS.TXT')
x,y = df2['log f'][9:], heat[40][9:]
plt.figure()
plt.scatter(x,y,label='data for fitting')
plt.xlabel('log f [Hz]')
plt.ylabel('log $\epsilon$"')
plt.legend()
plt.title('Example for HN fitting')
```
image of the plot we are using in this tutorial

```
''' instantiate the HN module from HavgNegpy'''
hn = dd.HN()
''' select range to perform hn fitting'''
''' the select range functions pops in a separate window and allows you two clicks to select the region of interest (ROI)'''
''' In this tutorial, I'll plot the ROI and append as an image in the next cell'''
x1,y1 = hn.select_range(x,y)
''' view the data from select range'''
plt.scatter(x1,y1,label = 'Data for fitting')
plt.xlabel('log f [Hz]')
plt.ylabel('log $\epsilon$"')
plt.legend()
plt.title('ROI selected from HN module')
```
image of the ROI from HN module
```
''' dump the initial guess parameters using dump parameters method (varies for each fn), which dumps the parameters in a json file'''
''' this is required before performing the first fitting as it takes the initial guess from the json file created'''
hn.dump_parameters_hn()
''' view the initial guess for the ROI using initial_view method'''
''' I'll append the image in the next cell'''
hn.initial_view_hn(x1,y1)
```
image of the initial guess
```
''' pefrorm least squares fitting'''
''' The image of the curve fit is added in the next cell '''
hn.fit(x1,y1)
```
Example of the fit performed using single HN function
the procedure is similar for double HN and HN with conductivity

```
'''create a file to save fit results using create_analysis file method'''
''' before saving fit results an analysis file has to be created '''
hn.create_analysis_file()
''' save the fit results using save_fit method of the corresponding fit function'''
''' takes one argument, read more on the documentation'''
hn.save_fit_hn(1)
```
| PypiClean |
/NeuroR-1.6.4.tar.gz/NeuroR-1.6.4/neuror/cut_plane/cut_leaves.py | from itertools import product
from typing import List
import morphio
import numpy as np
from neurom.core.dataformat import COLS
from neuror.cut_plane.planes import HalfSpace
def _get_cut_leaves(half_space, morphology, bin_width, percentile_threshold):
"""Compute the cut leaves from a given HalfSpace object.
For the half plane, we find all the cut leaves in the slice of size bin_width,
and compute the quality of the cut (see docstring of find_cut_leaves for details).
If the quality is positive, a cut is considered valid, and the cut leaves are returned.
Args:
half_space (planes.HalfSpace): half space to search cut points
morphology (morphio.Morphology): morphology
bin_width (float): the bin width
percentile_threshold (float): the minimum percentile of leaves counts in bins
Returns:
leaves: ndarray of dim (n, 3) with cut leaves coordinates
quality: quality for these cut leaves
"""
# get the cut leaves
leaves = np.array([section for section in morphology.iter() if not section.children])
leaves_coord = np.array([leaf.points[-1, COLS.XYZ] for leaf in leaves])
cut_filter = half_space.distance(leaves_coord) < bin_width
cut_leaves = leaves[cut_filter]
# compute the min cut leave given the percentile
projected_uncut_leaves = half_space.project_on_directed_normal(leaves_coord[~cut_filter])
if projected_uncut_leaves.size == 0:
return None, None
_min, _max = min(projected_uncut_leaves), max(projected_uncut_leaves)
bins = np.arange(_min, _max, bin_width)
_dig = np.digitize(projected_uncut_leaves, bins)
leaves_threshold = np.percentile(np.unique(_dig, return_counts=True)[1], percentile_threshold)
quality = len(cut_leaves) - leaves_threshold
if quality > 0:
return leaves_coord[cut_filter], quality
else:
return None, None
def find_cut_leaves(
morph: morphio.Morphology,
bin_width: float = 3,
percentile_threshold: float = 70.0,
searched_axes: List[str] = ("Z",),
searched_half_spaces: List[float] = (-1, 1),
):
"""Find all cut leaves for cuts with strong signal for real cut.
The algorithm works as follow. Given the searched_axes and searched_half_spaces,
a list of candidate cuts is created, consisting of a slice with bin_width adjusted to the most
extreme points of the morphology in the direction of serched_axes/serached_half_spaces.
Each cut contains a set of leaves, which are considered as cut leaves if their quality
is positive. The quality of a cut is defined the number of leaves in the cut minus the
'percentile_threshold' percentile of the distribution of the number of leaves in all other
slices of bin_width size of the morphology. More explicitely, if a cut has more leaves than most
of other possible cuts of the same size, it is likely to be a real cut from an invitro slice.
Note that all cuts can be valid, thus cut leaves can be on both sides.
Args:
morph: morphology
bin_width: the bin width
percentile_threshold: the minimum percentile of leaves counts in bins
searched_axes: x, y or z. Specify the half space for which to search the cut leaves
searched_half_spaces: A negative value means the morphology lives
on the negative side of the plane, and a positive one the opposite.
Returns:
ndarray: cut leaves
list: list of qualities in dicts with axis and side for each
"""
# create half spaces
searched_axes = [axis.upper() for axis in searched_axes]
half_spaces = [
HalfSpace(int(axis == "X"), int(axis == "Y"), int(axis == "Z"), 0, upward=side > 0)
for axis, side in product(searched_axes, searched_half_spaces)
]
# set the half space coef_d as furthest morphology point
for half_space, (axis, side) in zip(half_spaces, product(searched_axes, searched_half_spaces)):
half_space.coefs[3] = -side * np.min(
half_space.project_on_directed_normal(morph.points), axis=0
)
# find the cut leaves
cuts = [
_get_cut_leaves(half_space, morph, bin_width, percentile_threshold)
for half_space in half_spaces
]
# return only cut leaves of half spaces with valid cut
_leaves = [leave for leave, _ in cuts if leave is not None]
leaves = np.vstack(_leaves) if _leaves else np.array([])
qualities = [
{"axis": axis, "side": side, "quality": np.around(quality, 3)}
for (_, quality), (axis, side) in zip(cuts, product(searched_axes, searched_half_spaces))
if quality is not None
]
return leaves, qualities | PypiClean |
/BootstrapPy-0.6.tar.gz/BootstrapPy-0.6/bootstrappy/templates/+package+/static/js/i18n/grid.locale-sv.js | ;(function($){
/**
* jqGrid Swedish Translation
* Harald Normann harald.normann@wts.se, harald.normann@gmail.com
* http://www.worldteamsoftware.com
* Dual licensed under the MIT and GPL licenses:
* http://www.opensource.org/licenses/mit-license.php
* http://www.gnu.org/licenses/gpl.html
**/
$.jgrid = $.jgrid || {};
$.extend($.jgrid,{
defaults : {
recordtext: "Visar {0} - {1} av {2}",
emptyrecords: "Det finns inga poster att visa",
loadtext: "Laddar...",
pgtext : "Sida {0} av {1}"
},
search : {
caption: "Sök Poster - Ange sökvillkor",
Find: "Sök",
Reset: "Nollställ Villkor",
odata : ['lika', 'ej lika', 'mindre', 'mindre eller lika','större','större eller lika', 'börjar med','börjar inte med','tillhör','tillhör inte','slutar med','slutar inte med','innehåller','innehåller inte'],
groupOps: [ { op: "AND", text: "alla" }, { op: "OR", text: "eller" } ],
matchText: " träff",
rulesText: " regler"
},
edit : {
addCaption: "Ny Post",
editCaption: "Redigera Post",
bSubmit: "Spara",
bCancel: "Avbryt",
bClose: "Stäng",
saveData: "Data har ändrats! Spara förändringar?",
bYes : "Ja",
bNo : "Nej",
bExit : "Avbryt",
msg: {
required:"Fältet är obligatoriskt",
number:"Välj korrekt nummer",
minValue:"värdet måste vara större än eller lika med",
maxValue:"värdet måste vara mindre än eller lika med",
email: "är inte korrekt e-post adress",
integer: "Var god ange korrekt heltal",
date: "Var god ange korrekt datum",
url: "är inte en korrekt URL. Prefix måste anges ('http://' or 'https://')",
nodefined : " är inte definierad!",
novalue : " returvärde måste anges!",
customarray : "Custom funktion måste returnera en vektor!",
customfcheck : "Custom funktion måste finnas om Custom kontroll sker!"
}
},
view : {
caption: "Visa Post",
bClose: "Stäng"
},
del : {
caption: "Radera",
msg: "Radera markerad(e) post(er)?",
bSubmit: "Radera",
bCancel: "Avbryt"
},
nav : {
edittext: "",
edittitle: "Redigera markerad rad",
addtext:"",
addtitle: "Skapa ny post",
deltext: "",
deltitle: "Radera markerad rad",
searchtext: "",
searchtitle: "Sök poster",
refreshtext: "",
refreshtitle: "Uppdatera data",
alertcap: "Varning",
alerttext: "Ingen rad är markerad",
viewtext: "",
viewtitle: "Visa markerad rad"
},
col : {
caption: "Välj Kolumner",
bSubmit: "OK",
bCancel: "Avbryt"
},
errors : {
errcap : "Fel",
nourl : "URL saknas",
norecords: "Det finns inga poster att bearbeta",
model : "Antal colNames <> colModel!"
},
formatter : {
integer : {thousandsSeparator: " ", defaultValue: '0'},
number : {decimalSeparator:",", thousandsSeparator: " ", decimalPlaces: 2, defaultValue: '0,00'},
currency : {decimalSeparator:",", thousandsSeparator: " ", decimalPlaces: 2, prefix: "", suffix:"Kr", defaultValue: '0,00'},
date : {
dayNames: [
"Sön", "Mån", "Tis", "Ons", "Tor", "Fre", "Lör",
"Söndag", "Måndag", "Tisdag", "Onsdag", "Torsdag", "Fredag", "Lördag"
],
monthNames: [
"Jan", "Feb", "Mar", "Apr", "Maj", "Jun", "Jul", "Aug", "Sep", "Okt", "Nov", "Dec",
"Januari", "Februari", "Mars", "April", "Maj", "Juni", "Juli", "Augusti", "September", "Oktober", "November", "December"
],
AmPm : ["fm","em","FM","EM"],
S: function (j) {return j < 11 || j > 13 ? ['st', 'nd', 'rd', 'th'][Math.min((j - 1) % 10, 3)] : 'th'},
srcformat: 'Y-m-d',
newformat: 'Y-m-d',
masks : {
ISO8601Long:"Y-m-d H:i:s",
ISO8601Short:"Y-m-d",
ShortDate: "n/j/Y",
LongDate: "l, F d, Y",
FullDateTime: "l, F d, Y g:i:s A",
MonthDay: "F d",
ShortTime: "g:i A",
LongTime: "g:i:s A",
SortableDateTime: "Y-m-d\\TH:i:s",
UniversalSortableDateTime: "Y-m-d H:i:sO",
YearMonth: "F, Y"
},
reformatAfterEdit : false
},
baseLinkUrl: '',
showAction: '',
target: '',
checkbox : {disabled:true},
idName : 'id'
}
});
})(jQuery); | PypiClean |
/DiscordDB-0.0.6.tar.gz/DiscordDB-0.0.6/discordDB/db.py | import discord
from .models import Data
class DiscordDB(object):
"""The Discord database client.
Parameters
----------
bot : discord.ext.commands.Bot
An instance of discord.py Client or Bot representing your discord application.
db_channel_id : int
An integer representing ID of Discord channel you want to be used as database.
"""
def __init__(self, bot, db_channel_id: int):
self.__bot = bot
self.__channel_id = db_channel_id
@property
def channel(self):
"""A property which returns an instance of ``discord.TextChannel`` which is being used as database."""
return self.__bot.get_channel(self.__channel_id)
async def set(self, data: dict) -> int:
"""A method to post and save data to your database channel.
Parameters
----------
data : dict
Dictionary representing your raw data.
Returns
-------
int
An special integer which should be saved by the client to get this same data later.
"""
embed = discord.Embed.from_dict({
"fields": [{
"name": name, "value": value
} for name, value in data.items()]
})
message = await self.channel.send(embed=embed)
return message.id
async def get(self, _id: int) -> Data:
"""A method used to get your saved data from the database channel.
Parameters
----------
_id : int
An special integer which was received from the :py:meth:`discordDB.DiscordDB.set` method.
Returns
-------
Data
An instance of :py:class:`discordDB.models.Data`, similar to python dictionaries but also
supports accessing of its key using . syntax.
"""
message = await self.channel.fetch_message(_id)
_data = message.embeds[0].to_dict()["fields"]
data = Data({_["name"]: _["value"] for _ in _data})
data.created_at = message.created_at
return data | PypiClean |
/Flask-BrowserID-0.0.4.tar.gz/Flask-BrowserID-0.0.4/README.md | # Flask-BrowserID
A Flask extension that provides integration with Mozilla's [BrowserID]() authentication system and Flask-Login. It exposes two routes, for login and logout, and a javascript authentication bundle that allows you to quickly create login and logout buttons.
# Installation
Install with **pip**:
pip install git+https://github.com/garbados/flask-browserid.git
# Quickstart
Flask-BrowserID requires that Flask-Login's LoginManager be configured and registered with the app first, like so:
from flask import Flask
from flask.ext.login import LoginManager
from flask.ext.browserid import BrowserID
from my_stuff import get_user_by_id # finds a user by their id
from other_stuff import get_user # finds a user based on BrowserID response
app = Flask(__name__)
login_manager = LoginManager()
login_manager.user_loader(get_user_by_id)
login_manager.init_app(app)
browser_id = BrowserID()
browser_id.user_loader(get_user)
browser_id.init_app(app)
Now the routes `/api/login` and `/api/logout` have been registered with your app. A javascript bundle, `auth_script`, has also been added to the top level of your request context, so you can access it in templates like so:
[Note: `auth_script` requires JQuery and Mozilla's `include.js`]
<html>
<head>
<script type="text/javascript" src="https://ajax.googleapis.com/ajax/libs/jquery/1.7.2/jquery.min.js"></script>
<script src="https://login.persona.org/include.js" type="text/javascript"></script>
<script type="text/javascript">{{ auth_script|safe }}</script>
</head>
<body>
{% if current_user.is_authenticated() %}
<button id="browserid-logout">Logout</button>
{% else %}
<button id="browserid-login">Login</button>
{% endif %}
</body>
</html>
Thanks to `auth_script`, clicking the `Login` button on that page will attempt to log you in using BrowserID. If you're already logged in, then clicking `Logout` will log you out.
# Required Configuration
Flask-BrowserID requires a function that takes the data returned by BrowserID and uses it to find and return a user, which Flask-BrowserID then logs in using Flask-Login. If the function can't find a user, it can attempt to create a user using the data given. If a user could neither be found nor created, the function should return None. The data returned by BrowserID will look something like this if successful:
{
"status": "okay",
"email": "lloyd@example.com",
"audience": "https://mysite.com",
"expires": 1308859352261,
"issuer": "browserid.org"
}
Or this, if not:
{
"status": "failure",
"reason": "no certificate provided"
}
BrowserID's response will have already been parsed from JSON into a dict by the time it reaches your `user_loader` function.
# Optional Configuration
You can set the URLs Flask-BrowserID uses for login and logout by setting the following in your application's configuration:
* `BROWSERID_LOGIN_URL`: defaults to `/api/login`
* `BROWSERID_LOGOUT_URL`: defaults to `/api/logout`
See [Flask Configuration Handling](http://flask.pocoo.org/docs/config/) for more on how to configure your application.
# Testing
Running `python setup.py test` will run the extension's automated test suite, but some tests can only be run (presently) by manually starting up the server and clicking around. To do so, from the extension's root directory, run `python tests/__init__.py -i`. The `-i` flag tells the test suite to skip normal testing and instead run the testing application with a test template so you can click around.
# Credits
Many thanks to [Flask-Mongoengine](https://github.com/MongoEngine/flask-mongoengine), who I based the structure of this extension on, and to [Flask-Login](https://flask-login.readthedocs.org/en/latest/), for generally being a pretty sweet extension.
| PypiClean |
/NeodroidAgent-0.4.8-py36-none-any.whl/neodroidagent/common/session_factory/vertical/single_agent_environment_session.py | import inspect
import time
from contextlib import suppress
from typing import Any, Type
import torch
import torchsnooper
from draugr import CaptureEarlyStop, add_early_stopping_key_combination, sprint
from draugr.torch_utilities import TensorBoardPytorchWriter, torch_seed
from neodroidagent import PROJECT_APP_PATH
from neodroidagent.agents import Agent
from neodroidagent.utilities import NoAgent
from warg import GDKC, passes_kws_to
from warg.context_wrapper import ContextWrapper
from warg.decorators.timing import StopWatch
from .environment_session import EnvironmentSession
from .procedures.procedure_specification import Procedure
__author__ = "Christian Heider Nielsen"
__doc__ = r"""
"""
__all__ = ["SingleAgentEnvironmentSession"]
class SingleAgentEnvironmentSession(EnvironmentSession):
@passes_kws_to(
add_early_stopping_key_combination,
Agent.__init__,
Agent.save,
Procedure.__call__,
)
def __call__(
self,
agent: Type[Agent],
*,
load_time: Any,
seed: int,
save_ending_model: bool = False,
continue_training: bool = True,
train_agent: bool = True,
debug: bool = False,
**kwargs
):
"""
Start a session, builds Agent and starts/connect environment(s), and runs Procedure
:param args:
:param kwargs:
:return:
"""
with ContextWrapper(torchsnooper.snoop(), debug):
with ContextWrapper(torch.autograd.detect_anomaly(), debug):
if agent is None:
raise NoAgent
if inspect.isclass(agent):
sprint('Instantiating Agent', color="crimson", bold=True, italic=True)
torch_seed(seed)
self._environment.seed(seed)
agent = agent(load_time=load_time, seed=seed, **kwargs)
agent_class_name = agent.__class__.__name__
total_shape = "_".join(
[
str(i)
for i in (
self._environment.observation_space.shape
+ self._environment.action_space.shape
+ self._environment.signal_space.shape
)
]
)
environment_name = f"{self._environment.environment_name}_{total_shape}"
save_directory = (
PROJECT_APP_PATH.user_data / environment_name / agent_class_name
)
log_directory = (
PROJECT_APP_PATH.user_log / environment_name / agent_class_name / load_time
)
with TensorBoardPytorchWriter(log_directory) as metric_writer:
agent.build(
self._environment.observation_space,
self._environment.action_space,
self._environment.signal_space,
metric_writer=metric_writer
)
kwargs.update(
environment_name=(self._environment.environment_name,),
save_directory=save_directory,
log_directory=log_directory,
load_time=load_time,
seed=seed,
train_agent=train_agent,
)
found = False
if continue_training:
sprint(
"Searching for previously trained models for initialisation for this configuration "
"(Architecture, Action Space, Observation Space, ...)", color="crimson", bold=True, italic=True
)
found = agent.load(
save_directory=save_directory, evaluation=not train_agent
)
if not found:
sprint(
"Did not find any previously trained models for this configuration", color="crimson", bold=True, italic=True
)
if not train_agent:
agent.eval()
else:
agent.train()
if not found:
sprint("Training from new initialisation", color="crimson", bold=True, italic=True)
session_proc = self._procedure(agent, environment=self._environment)
with CaptureEarlyStop(callbacks=self._procedure.stop_procedure, **kwargs):
with StopWatch() as timer:
with suppress(KeyboardInterrupt):
training_resume = session_proc(metric_writer=metric_writer, **kwargs)
if training_resume and "stats" in training_resume:
training_resume.stats.save(**kwargs)
end_message = f"Training ended, time elapsed: {timer // 60:.0f}m {timer % 60:.0f}s"
line_width = 9
sprint(f'\n{"-" * line_width} {end_message} {"-" * line_width}\n', color="crimson", bold=True, italic=True)
if save_ending_model:
agent.save(**kwargs)
try:
self._environment.close()
except BrokenPipeError:
pass
exit(0)
if __name__ == "__main__":
print(SingleAgentEnvironmentSession) | PypiClean |
/Mathics_Django-6.0.0-py3-none-any.whl/mathics_django/web/media/js/mathjax/jax/output/SVG/autoload/mmultiscripts.js | MathJax.Hub.Register.StartupHook("SVG Jax Ready",function(){var b="2.7.9";var a=MathJax.ElementJax.mml,c=MathJax.OutputJax.SVG;a.mmultiscripts.Augment({toSVG:function(G,z){this.SVGgetStyles();var B=this.SVG(),N=this.SVGgetScale(B);this.SVGhandleSpace(B);var j=(this.data[this.base]?this.SVGdataStretched(this.base,G,z):c.BBOX.G().Clean());var K=c.TeX.x_height*N,y=c.TeX.scriptspace*N*0.75;var x=this.SVGgetScripts(y);var k=x[0],e=x[1],n=x[2],i=x[3];var g=(this.data[1]||this).SVGgetScale();var C=c.TeX.sup_drop*g,A=c.TeX.sub_drop*g;var o=j.h-C,m=j.d+A,L=0,F;if(j.ic){L=j.ic}if(this.data[this.base]&&(this.data[this.base].type==="mi"||this.data[this.base].type==="mo")){if(c.isChar(this.data[this.base].data.join(""))&&j.scale===1&&!j.stretched&&!this.data[this.base].Get("largeop")){o=m=0}}var H=this.getValues("subscriptshift","superscriptshift"),E=this.SVGgetMu(B);H.subscriptshift=(H.subscriptshift===""?0:c.length2em(H.subscriptshift,E));H.superscriptshift=(H.superscriptshift===""?0:c.length2em(H.superscriptshift,E));var l=0;if(n){l=n.w+L}else{if(i){l=i.w-L}}B.Add(j,Math.max(0,l),0);if(!e&&!i){m=Math.max(m,c.TeX.sub1*N,H.subscriptshift);if(k){m=Math.max(m,k.h-(4/5)*K)}if(n){m=Math.max(m,n.h-(4/5)*K)}if(k){B.Add(k,l+j.w+y-L,-m)}if(n){B.Add(n,0,-m)}}else{if(!k&&!n){var f=this.getValues("displaystyle","texprimestyle");F=c.TeX[(f.displaystyle?"sup1":(f.texprimestyle?"sup3":"sup2"))];o=Math.max(o,F*N,H.superscriptshift);if(e){o=Math.max(o,e.d+(1/4)*K)}if(i){o=Math.max(o,i.d+(1/4)*K)}if(e){B.Add(e,l+j.w+y,o)}if(i){B.Add(i,0,o)}}else{m=Math.max(m,c.TeX.sub2*N);var w=c.TeX.rule_thickness*N;var I=(k||n).h,J=(e||i).d;if(n){I=Math.max(I,n.h)}if(i){J=Math.max(J,i.d)}if((o-J)-(I-m)<3*w){m=3*w-o+J+I;C=(4/5)*K-(o-J);if(C>0){o+=C;m-=C}}o=Math.max(o,H.superscriptshift);m=Math.max(m,H.subscriptshift);if(e){B.Add(e,l+j.w+y,o)}if(i){B.Add(i,l+L-i.w,o)}if(k){B.Add(k,l+j.w+y-L,-m)}if(n){B.Add(n,l-n.w,-m)}}}B.Clean();this.SVGhandleColor(B);this.SVGsaveData(B);var M=this.SVGdata;M.dx=l;M.s=y;M.u=o,M.v=m;M.delta=L;return B},SVGgetScripts:function(r){var p,d,e=[];var o=1,h=this.data.length,g=0;for(var l=0;l<4;l+=2){while(o<h&&(this.data[o]||{}).type!=="mprescripts"){var q=[null,null,null,null];for(var n=l;n<l+2;n++){if(this.data[o]&&this.data[o].type!=="none"&&this.data[o].type!=="mprescripts"){if(!e[n]){e[n]=c.BBOX.G()}q[n]=this.data[o].toSVG()}if((this.data[o]||{}).type!=="mprescripts"){o++}}var f=(l===2);if(f){g+=Math.max((q[l]||{w:0}).w,(q[l+1]||{w:0}).w)}if(q[l]){e[l].Add(q[l].With({x:g-(f?q[l].w:0)}))}if(q[l+1]){e[l+1].Add(q[l+1].With({x:g-(f?q[l+1].w:0)}))}d=e[l]||{w:0};p=e[l+1]||{w:0};d.w=p.w=g=Math.max(d.w,p.w)}o++;g=0}for(n=0;n<4;n++){if(e[n]){e[n].w+=r;e[n].Clean()}}return e}});MathJax.Hub.Startup.signal.Post("SVG mmultiscripts Ready");MathJax.Ajax.loadComplete(c.autoloadDir+"/mmultiscripts.js")}); | PypiClean |
/Oasys-Canvas-Core-1.0.7.tar.gz/Oasys-Canvas-Core-1.0.7/orangecanvas/application/settings.py | import sys
import logging
from collections import namedtuple
from .. import config
from ..utils.settings import SettingChangedEvent
from ..utils.qtcompat import QSettings, qunwrap
from ..utils.propertybindings import (
AbstractBoundProperty, PropertyBinding, BindingManager
)
from PyQt5.QtWidgets import (
QWidget, QMainWindow, QComboBox, QCheckBox, QListView, QTabWidget,
QToolBar, QAction, QStackedWidget, QVBoxLayout, QHBoxLayout,
QFormLayout, QSizePolicy,
QDialogButtonBox
)
from PyQt5.QtGui import (
QStandardItemModel, QStandardItem
)
from PyQt5.QtCore import (
Qt, QEventLoop, QAbstractItemModel, QModelIndex
)
log = logging.getLogger(__name__)
class UserDefaultsPropertyBinding(AbstractBoundProperty):
"""
A Property binding for a setting in a
:class:`orangecanvas.utility.settings.Settings` instance.
"""
def __init__(self, obj, propertyName, parent=None):
AbstractBoundProperty.__init__(self, obj, propertyName, parent)
obj.installEventFilter(self)
def get(self):
return self.obj.get(self.propertyName)
def set(self, value):
self.obj[self.propertyName] = value
def eventFilter(self, obj, event):
if event.type() == SettingChangedEvent.SettingChanged and \
event.key() == self.propertyName:
self.notifyChanged()
return AbstractBoundProperty.eventFilter(self, obj, event)
class UserSettingsModel(QAbstractItemModel):
"""
An Item Model for user settings presenting a list of
key, setting value entries along with it's status and type.
"""
def __init__(self, parent=None, settings=None):
QAbstractItemModel.__init__(self, parent)
self.__settings = settings
self.__headers = ["Name", "Status", "Type", "Value"]
def setSettings(self, settings):
if self.__settings != settings:
self.__settings = settings
self.reset()
def settings(self):
return self.__settings
def rowCount(self, parent=QModelIndex()):
if parent.isValid():
return 0
elif self.__settings:
return len(self.__settings)
else:
return 0
def columnCount(self, parent=QModelIndex()):
if parent.isValid():
return 0
else:
return len(self.__headers)
def parent(self, index):
return QModelIndex()
def index(self, row, column=0, parent=QModelIndex()):
if parent.isValid() or \
column < 0 or column >= self.columnCount() or \
row < 0 or row >= self.rowCount():
return QModelIndex()
return self.createIndex(row, column, row)
def headerData(self, section, orientation, role=Qt.DisplayRole):
if section >= 0 and section < 4 and orientation == Qt.Horizontal:
if role == Qt.DisplayRole:
return self.__headers[section]
return QAbstractItemModel.headerData(self, section, orientation, role)
def data(self, index, role=Qt.DisplayRole):
if self._valid(index):
key = self._keyFromIndex(index)
column = index.column()
if role == Qt.DisplayRole:
if column == 0:
return key
elif column == 1:
default = self.__settings.isdefault(key)
return "Default" if default else "User"
elif column == 2:
return type(self.__settings.get(key)).__name__
elif column == 3:
return self.__settings.get(key)
return self
return None
def flags(self, index):
if self._valid(index):
flags = Qt.ItemIsEnabled | Qt.ItemIsSelectable
if index.column() == 3:
return Qt.ItemIsEditable | flags
else:
return flags
return Qt.NoItemFlags
def setData(self, index, value, role=Qt.EditRole):
if self._valid(index) and index.column() == 3:
key = self._keyFromIndex(index)
value = qunwrap(value)
try:
self.__settings[key] = value
except (TypeError, ValueError) as ex:
log.error("Failed to set value (%r) for key %r", value, key,
exc_info=True)
else:
self.dataChanged.emit(index, index)
return True
return False
def _valid(self, index):
row = index.row()
return row >= 0 and row < self.rowCount()
def _keyFromIndex(self, index):
row = index.row()
return list(self.__settings.keys())[row]
def container_widget_helper(orientation=Qt.Vertical, spacing=None, margin=0):
widget = QWidget()
if orientation == Qt.Vertical:
layout = QVBoxLayout()
widget.setSizePolicy(QSizePolicy.Fixed,
QSizePolicy.MinimumExpanding)
else:
layout = QHBoxLayout()
if spacing is not None:
layout.setSpacing(spacing)
if margin is not None:
layout.setContentsMargins(0, 0, 0, 0)
widget.setLayout(layout)
return widget
_State = namedtuple("_State", ["visible", "position"])
class UserSettingsDialog(QMainWindow):
"""
A User Settings/Defaults dialog.
"""
MAC_UNIFIED = True
def __init__(self, parent=None, **kwargs):
QMainWindow.__init__(self, parent, **kwargs)
self.setWindowFlags(Qt.Dialog)
self.setWindowModality(Qt.ApplicationModal)
self.layout().setSizeConstraint(QVBoxLayout.SetFixedSize)
self.__macUnified = sys.platform == "darwin" and self.MAC_UNIFIED
self._manager = BindingManager(self,
submitPolicy=BindingManager.AutoSubmit)
self.__loop = None
self.__settings = config.settings()
self.__setupUi()
def __setupUi(self):
"""Set up the UI.
"""
if self.__macUnified:
self.tab = QToolBar()
self.addToolBar(Qt.TopToolBarArea, self.tab)
self.setUnifiedTitleAndToolBarOnMac(True)
# This does not seem to work
self.setWindowFlags(self.windowFlags() & \
~Qt.MacWindowToolBarButtonHint)
self.tab.actionTriggered[QAction].connect(
self.__macOnToolBarAction
)
central = QStackedWidget()
central.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
else:
self.tab = central = QTabWidget(self)
# Add a close button to the bottom of the dialog
# (to satisfy GNOME 3 which shows the dialog without a title bar).
container = container_widget_helper()
container.layout().addWidget(central)
buttonbox = QDialogButtonBox(QDialogButtonBox.Close)
buttonbox.rejected.connect(self.close)
container.layout().addWidget(buttonbox)
self.setCentralWidget(container)
self.stack = central
# General Tab
tab = QWidget()
self.addTab(tab, self.tr("General"),
toolTip=self.tr("General Options"))
form = QFormLayout()
tab.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
nodes = QWidget(self, objectName="nodes")
nodes.setLayout(QVBoxLayout())
nodes.layout().setContentsMargins(0, 0, 0, 0)
cb_anim = QCheckBox(
self.tr("Enable node animations"),
objectName="enable-node-animations",
toolTip=self.tr("Enable shadow and ping animations for nodes "
"in the workflow.")
)
self.bind(cb_anim, "checked", "schemeedit/enable-node-animations")
nodes.layout().addWidget(cb_anim)
form.addRow(self.tr("Nodes"), nodes)
links = QWidget(self, objectName="links")
links.setLayout(QVBoxLayout())
links.layout().setContentsMargins(0, 0, 0, 0)
cb_show = QCheckBox(
self.tr("Show channel names between widgets"),
objectName="show-channel-names",
toolTip=self.tr("Show source and sink channel names "
"over the links.")
)
self.bind(cb_show, "checked", "schemeedit/show-channel-names")
links.layout().addWidget(cb_show)
form.addRow(self.tr("Links"), links)
quickmenu = QWidget(self, objectName="quickmenu-options")
quickmenu.setLayout(QVBoxLayout())
quickmenu.layout().setContentsMargins(0, 0, 0, 0)
cb1 = QCheckBox(self.tr("On double click"),
toolTip=self.tr("Open quick menu on a double click "
"on an empty spot in the canvas"))
cb2 = QCheckBox(self.tr("On right click"),
toolTip=self.tr("Open quick menu on a right click "
"on an empty spot in the canvas"))
cb3 = QCheckBox(self.tr("On space key press"),
toolTip=self.tr("On Space key press while the mouse"
"is hovering over the canvas."))
cb4 = QCheckBox(self.tr("On any key press"),
toolTip=self.tr("On any key press while the mouse"
"is hovering over the canvas."))
self.bind(cb1, "checked", "quickmenu/trigger-on-double-click")
self.bind(cb2, "checked", "quickmenu/trigger-on-right-click")
self.bind(cb3, "checked", "quickmenu/trigger-on-space-key")
self.bind(cb4, "checked", "quickmenu/trigger-on-any-key")
quickmenu.layout().addWidget(cb1)
quickmenu.layout().addWidget(cb2)
quickmenu.layout().addWidget(cb3)
quickmenu.layout().addWidget(cb4)
form.addRow(self.tr("Open quick menu on"), quickmenu)
startup = QWidget(self, objectName="startup-group")
startup.setLayout(QVBoxLayout())
startup.layout().setContentsMargins(0, 0, 0, 0)
cb_splash = QCheckBox(self.tr("Show splash screen"), self,
objectName="show-splash-screen")
cb_welcome = QCheckBox(self.tr("Show welcome screen"), self,
objectName="show-welcome-screen")
self.bind(cb_splash, "checked", "startup/show-splash-screen")
self.bind(cb_welcome, "checked", "startup/show-welcome-screen")
startup.layout().addWidget(cb_splash)
startup.layout().addWidget(cb_welcome)
form.addRow(self.tr("On startup"), startup)
toolbox = QWidget(self, objectName="toolbox-group")
toolbox.setLayout(QVBoxLayout())
toolbox.layout().setContentsMargins(0, 0, 0, 0)
exclusive = QCheckBox(self.tr("Only one tab can be open at a time"))
self.bind(exclusive, "checked", "mainwindow/toolbox-dock-exclusive")
toolbox.layout().addWidget(exclusive)
form.addRow(self.tr("Tool box"), toolbox)
tab.setLayout(form)
# Output Tab
tab = QWidget()
self.addTab(tab, self.tr("Output"),
toolTip="Output Redirection")
form = QFormLayout()
box = QWidget(self, objectName="streams")
layout = QVBoxLayout()
layout.setContentsMargins(0, 0, 0, 0)
cb1 = QCheckBox(self.tr("Standard output"))
cb2 = QCheckBox(self.tr("Standard error"))
self.bind(cb1, "checked", "output/redirect-stdout")
self.bind(cb2, "checked", "output/redirect-stderr")
layout.addWidget(cb1)
layout.addWidget(cb2)
box.setLayout(layout)
form.addRow(self.tr("Redirect output"), box)
box = QWidget()
layout = QVBoxLayout()
layout.setContentsMargins(0, 0, 0, 0)
combo = QComboBox()
combo.addItems([self.tr("Critical"),
self.tr("Error"),
self.tr("Warn"),
self.tr("Info"),
self.tr("Debug")])
cb = QCheckBox(self.tr("Show output on 'Error'"),
objectName="focus-on-error")
self.bind(combo, "currentIndex", "logging/level")
self.bind(cb, "checked", "output/show-on-error")
layout.addWidget(combo)
layout.addWidget(cb)
box.setLayout(layout)
form.addRow(self.tr("Logging"), box)
box = QWidget()
layout = QVBoxLayout()
layout.setContentsMargins(0, 0, 0, 0)
cb1 = QCheckBox(self.tr("Stay on top"),
objectName="stay-on-top")
cb2 = QCheckBox(self.tr("Dockable"),
objectName="output-dockable")
self.bind(cb1, "checked", "output/stay-on-top")
self.bind(cb2, "checked", "output/dockable")
layout.addWidget(cb1)
layout.addWidget(cb2)
box.setLayout(layout)
form.addRow(self.tr("Output window"), box)
box = QWidget()
layout = QVBoxLayout()
layout.setContentsMargins(0, 0, 0, 0)
cb1 = QCheckBox(self.tr("Open in external browser"),
objectName="open-in-external-browser")
cb2 = QCheckBox(self.tr("Stay on top"),
objectName="help-stay-on-top")
cb3 = QCheckBox(self.tr("Dockable"),
objectName="help-dockable")
self.bind(cb1, "checked", "help/open-in-external-browser")
self.bind(cb2, "checked", "help/stay-on-top")
self.bind(cb3, "checked", "help/dockable")
layout.addWidget(cb1)
layout.addWidget(cb2)
layout.addWidget(cb3)
box.setLayout(layout)
form.addRow(self.tr("Help window"), box)
tab.setLayout(form)
# Applications Specific
tab = QWidget()
self.addTab(tab, self.tr("Applications"),
toolTip=self.tr("Applications Specific Options"))
form = QFormLayout()
tab.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
tab.setLayout(form)
# Categories Tab
tab = QWidget()
layout = QVBoxLayout()
view = QListView()
from .. import registry
reg = registry.global_registry()
model = QStandardItemModel()
settings = QSettings()
for cat in reg.categories():
item = QStandardItem()
item.setText(cat.name)
item.setCheckable(True)
visible, _ = category_state(cat, settings)
item.setCheckState(Qt.Checked if visible else Qt.Unchecked)
model.appendRow([item])
view.setModel(model)
layout.addWidget(view)
tab.setLayout(layout)
model.itemChanged.connect(
lambda item:
save_category_state(
reg.category(str(item.text())),
_State(item.checkState() == Qt.Checked, -1),
settings
)
)
self.addTab(tab, "Categories")
if self.__macUnified:
# Need some sensible size otherwise mac unified toolbar 'takes'
# the space that should be used for layout of the contents
self.adjustSize()
def addTab(self, widget, text, toolTip=None, icon=None):
if self.__macUnified:
action = QAction(text, self)
if toolTip:
action.setToolTip(toolTip)
if icon:
action.setIcon(toolTip)
action.setData(len(self.tab.actions()))
self.tab.addAction(action)
self.stack.addWidget(widget)
else:
i = self.tab.addTab(widget, text)
if toolTip:
self.tab.setTabToolTip(i, toolTip)
if icon:
self.tab.setTabIcon(i, icon)
def widget(self, index):
if self.__macUnified:
return self.stack.widget(index)
else:
return self.tab.widget(index)
def keyPressEvent(self, event):
if event.key() == Qt.Key_Escape:
self.hide()
self.deleteLater()
def bind(self, source, source_property, key, transformer=None):
target = UserDefaultsPropertyBinding(self.__settings, key)
source = PropertyBinding(source, source_property)
source.set(target.get())
self._manager.bind(target, source)
def commit(self):
self._manager.commit()
def revert(self):
self._manager.revert()
def reset(self):
for target, source in self._manager.bindings():
try:
source.reset()
except NotImplementedError:
# Cannot reset.
pass
except Exception:
log.error("Error reseting %r", source.propertyName,
exc_info=True)
def exec_(self):
self.__loop = QEventLoop()
self.show()
status = self.__loop.exec_()
self.__loop = None
return status
def hideEvent(self, event):
QMainWindow.hideEvent(self, event)
if self.__loop is not None:
self.__loop.exit(0)
self.__loop = None
def __macOnToolBarAction(self, action):
index = qunwrap(action.data())
self.stack.setCurrentIndex(index)
def category_state(cat, settings):
visible = settings.value(
"mainwindow/categories/{0}/visible".format(cat.name),
defaultValue=not cat.hidden,
type=bool
)
position = settings.value(
"mainwindow/categories/{0}/position".format(cat.name),
defaultValue=-1,
type=int
)
return (visible, position)
def save_category_state(cat, state, settings):
settings.setValue(
"mainwindow/categories/{0}/visible".format(cat.name),
state.visible
)
settings.setValue(
"mainwindow/categories/{0}/position".format(cat.name),
state.position
) | PypiClean |
/MegEngine-1.13.1-cp37-cp37m-macosx_10_14_x86_64.whl/megengine/core/autodiff/grad.py | import weakref
from .._imperative_rt import core2
_grad_count = 0
_grad_manager_dict = weakref.WeakValueDictionary()
def get_grad_managers():
return [_grad_manager_dict[key] for key in _grad_manager_dict]
class GradKey(core2.GradKey):
def __init__(self, name=None):
if name:
self.name = name
def backward(self, ys, dys):
return core2.backward(self, ys, dys)
class Grad:
stack = []
grouping = False
key2grad = weakref.WeakValueDictionary()
def __init__(self, name=None):
global _grad_count
if name is None:
name = "grad_%d" % _grad_count
_grad_count += 1
self._refkeeper = []
self._impl = GradKey(name)
Grad.key2grad[self._impl] = self
_grad_manager_dict[self._name] = self
self._group = [weakref.ref(self)]
@property
def _name(self):
return self._impl.name
def _is_attached_to(self, tensor):
return self._impl.is_attached_to(tensor)
def wrt(self, *tensors, callback=None):
for x in tensors:
self._impl.attach(x, callback)
return self
def __call__(self, ys, dys):
from collections.abc import Sequence
if not isinstance(ys, Sequence):
ys = [ys]
if not isinstance(dys, Sequence):
dys = [dys]
group = [ref() for ref in self._group]
for grad in group:
if grad is self:
continue
grad.suppress()
self._impl.backward(ys, dys)
for grad in group:
if grad is self:
continue
grad.resume()
self._refkeeper = None
return None
def __enter__(self):
ref = weakref.ref(self)
self._impl.enter()
if Grad.grouping:
group = Grad.stack[-1]
self._group = group
group.append(ref)
else:
Grad.stack.append(self._group)
return self
def __exit__(self, _1, _2, _3):
self._impl.exit()
self._refkeeper = None
del Grad.key2grad[self._impl]
self._impl = None
self._group.remove(weakref.ref(self))
if len(self._group) == 0:
Grad.stack.remove(self._group)
@staticmethod
def begin_group():
assert not Grad.grouping
Grad.grouping = True
@staticmethod
def end_group():
group = Grad.stack[-1]
assert len(group) > 0
assert Grad.grouping
Grad.grouping = False
def suppress(self):
if self._impl is not None:
self._impl.suppress()
def resume(self):
if self._impl is not None:
self._impl.resume()
class Function:
r"""Defines a block of operations with customizable differentiation.
The computation should be defined in ``forward`` method, with gradient
computation defined in ``backward`` method.
Each instance of ``Function`` should be used only once during forwardding.
Examples:
.. code-block::
class Sigmoid(Function):
def forward(self, x):
y = 1 / (1 + F.exp(-x))
self.y = y
return y
def backward(self, dy):
y = self.y
"""
def forward(self, *args, **kwargs):
r"""Applies operations to ``inputs`` and returns results. It must be overriden by all subclasses.
Args:
input: input tensors.
Returns:
a tuple of Tensor or a single Tensor.
Note:
* This method should return a tuple of Tensor or a single Tensor representing the output
of the function.
* positional arguments should all be Tensor
"""
raise NotImplementedError
def backward(self, *output_grads):
r"""Compute the gradient of the forward function. It must be overriden by all subclasses.
Args:
output_grads: gradients of outputs that are returned by :meth:`forward`.
Note:
* In case when some tensors of outputs are not related to loss function, the corresponding
values in ``output_grads`` would be ``None``.
* This method should return a tuple which containing the gradients of all inputs, in the same order
as the ``inputs`` argument of :meth:`forward` . A ``Tensor`` could be returned
instead if there is only one input. If users want to stop the propagation of some gradients,
the corresponding returned values should be set ``None`` .
"""
raise NotImplementedError
def _default_rule(self, *args):
ret = self.forward(*args)
self.__single_output = isinstance(ret, core2.Tensor)
return ret
def _grad_rule(self, *args):
return self._default_rule(*args), self.backward
def __call__(self, *args):
from ...tensor import Tensor
for arg in args:
if not isinstance(arg, Tensor):
raise TypeError(
"op Function expect type Tensor as inputs, got {}".format(type(arg))
)
grad_key = core2.get_grad_key(args)
if grad_key is None:
return self._default_rule(*args)
grad = Grad.key2grad[grad_key]
group = [ref() for ref in grad._group]
origin_args = [Tensor(arg) for arg in args]
for grad in group:
grad.suppress()
outputs, backward = self._grad_rule(*args)
for grad in reversed(group):
grad.resume()
def normalized_backward(*output_grads):
input_grads = backward(*output_grads)
if isinstance(input_grads, Tensor) or input_grads is None:
input_grads = (input_grads,)
return input_grads
if self.__single_output:
outputs = (outputs,)
outputs = core2.set_grad(normalized_backward, origin_args, outputs)
if self.__single_output:
(outputs,) = outputs
return outputs
def __getstate__(self):
return self.__dict__
def __setstate__(self, state):
self.__dict__.update(state) | PypiClean |
/JyPlotter-0.9.4.tar.gz/JyPlotter-0.9.4/PyPlotter/systemTest.py | ########################################################################
#
# Test Suite
#
########################################################################
import random, math, copy
import Gfx, Graph, Simplex
from Compatibility import *
GR = None
def TestDriver(gfx):
"""Test Gfx.Interface. 'gfx' must be an object derived
from a class that implements GfxDriver.GfxInterface."""
w,h = gfx.getSize()
if w < 400 or h < 300:
raise "Graphics area too small: %d, %d !" % (w,h)
gfx.clear()
poly = [(10,20),(200, 10), (250, 100), (100, 180), (30, 40), (10,20)]
gfx.setColor((0.7, 0.7, 0.5))
gfx.setFillPattern(Gfx.PATTERNED)
gfx.fillPoly(poly)
gfx.setColor((1.0, 0.3, 0.3))
gfx.setLinePattern(Gfx.DASHED)
gfx.setLineWidth(Gfx.THIN)
gfx.drawPoly(poly)
gfx.setLinePattern(Gfx.DOTTED)
gfx.setLineWidth(Gfx.THIN)
gfx.drawRect(200, 200, 100, 100)
gfx.setLineWidth(Gfx.THICK)
gfx.setLinePattern(Gfx.DASHED)
gfx.drawRect(300, 300, 120, 120)
gfx.setLineWidth(Gfx.THIN)
gfx.setLinePattern(Gfx.CONTINUOUS)
gfx.setLineWidth(Gfx.MEDIUM)
gfx.setFillPattern(Gfx.SOLID)
gfx.setColor((0.3, 0.3, 0.3))
gfx.fillRect(100,150,200,100)
gfx.setColor((0.8, 0.0, 0.0))
gfx.drawRect(150,150,210,110)
gfx.setColor((0.1, 1.0, 0.1))
gfx.setFont(Gfx.SANS, Gfx.NORMAL, "")
gfx.writeStr(160,160, "Grafik")
gfx.setFont(Gfx.SERIF, Gfx.LARGE, "bi")
gfx.setColor((0.1, 1.0, 0.5))
gfx.writeStr(170, 180, "Test")
gfx.setFont(Gfx.SANS, Gfx.NORMAL, "")
gfx.writeStr(100, 200, "wxGraph")
gfx.writeStr(0, 0, "0")
gfx.writeStr(90, 190, "Rotated", 90.0)
gfx.setColor((0.5, 0.5, 0.5))
gfx.drawLine(10, 10, 200, 100)
for x in range(0, 361, 15):
gfx.writeStr(500, 300, "Rotate %i"%x, float(x))
gfx.setColor(Gfx.BLACK)
gfx.writeStr(500, 100, "Rotation", 0.0)
gfx.setColor(Gfx.RED)
gfx.writeStr(500, 100, "Rotation", 90.0)
gfx.setColor(Gfx.GREEN)
gfx.writeStr(500, 100, "Rotation", 45.0)
gfx.setLineWidth(Gfx.THIN)
gfx.setColor(Gfx.BLUE)
gfx.drawRect(350, 50, 100, 50)
gfx.setColor(Gfx.GREEN)
gfx.drawRect(349, 49, 102, 52)
gfx.setColor(Gfx.RED)
gfx.fillRect(350, 50, 100, 50)
## gfx.setColor(Gfx.GREEN)
## gfx.drawRect(350, 50, 100, 50)
def Test():
gfx = GR.Window()
TestDriver(gfx)
gfx.drawPoly([])
gfx.drawPoly([(5,5)])
gfx.fillPoly([])
gfx.fillPoly([(2,2)])
gfx.setColor((1.,0.,0.))
gfx.setLineWidth(Gfx.THIN)
gfx.drawRect(0,0,640,480)
gfx.setColor((0.,0.,1.))
gfx.drawLine(0,0,639,0)
gfx.drawLine(0,479,639,479)
gfx.setColor((0.,1.,0.))
gfx.drawLine(0,0,0,479)
gfx.drawLine(639,0,639,479)
gfx.setColor((1.,0.,0.))
gfx.drawPoint(320, 240)
#print ("ready")
gfx.waitUntilClosed()
#def paintCallback(dc):
# gfx = Driver(dc)
# gfx.drawLine(-10, -10, 100, 100)
#
#def wxGfx_TestPostscript():
# gfx = Window()
# gfx.DumpPostscript(gfx.win, "test.ps", paintCallback)
# gfx.waitUntilClosed()
def Test_wxGfx():
global GR
import wxGfx as GR
Test()
def Test_tkGfx():
global GR
import tkGfx as GR
Test()
def Test_gtkGfx():
global GR
import gtkGfx as GR
Test()
def Test_qtGfx():
global GR
import qtGfx as GR
Test()
def Test_awtGfx():
global GR
import awtGfx as GR
Test()
def Test_nilDevice():
gfx = Gfx.nilDriver(800, 600)
TestDriver(gfx)
def Test_psGfx():
import psGfx
gfx = psGfx.Driver()
TestDriver(gfx)
gfx.save("Test_Postscript.eps")
##~ def TestCoordinateTransformer(gfx):
##~ """Test CoordinateTransformer by plotting a function."""
##~
##~ w,h = gfx.getSize()
##~ if w < 200 or h < 200:
##~ raise "Graphics area too small: %d, %d !" % (w,h)
##~
##~ gfx.clear((1.0, 1.0, 1.0))
##~ gfx.setLineThickness(gfx.MEDIUM)
##~ tr = CoordinateTransformer(0, 0, w-1, h-1, -0.5, -1.0, 2.0, 2.5)
##~
##~ gfx.setColor((0.0, 0.0, 0.0))
##~ gfx.drawLine(tr.X(-0.5), tr.Y(0.0), tr.X(2.0), tr.Y(0.0)) # x-axis
##~ gfx.drawLine(tr.X(0.0), tr.Y(-1.0), tr.X(0.0), tr.Y(2.5)) # y-axis
##~
##~ x = -0.5
##~ dx = 0.1
##~ table = []
##~ while x <= 2.0:
##~ y = x**3
##~ table.append((x, y))
##~ x += dx
##~
##~ gfx.setColor((1.0, 0.1, 0.1))
##~ gfx.drawPoly(tr.transform(table))
def frange(start, stop, step):
"""frange(start, stop, step) -> list of floats"""
l = []
if start <= stop and step > 0:
x = start
while x <= stop:
l.append(x)
x += step
return l
elif start >= stop and step < 0:
x = start
while x >= stop:
l.append(x)
x += step
return l
elif start == stop and step == 0: return [start]
else:
raise ValueError("conflicting values for start, stop and step:"\
+ " %f, %f, %f" % (start, stop, step))
def Test_Graph():
getGR()
gfx = GR.Window()
gr = Graph.Cartesian(gfx, -8.0, -8.0, 8.0, 8.0,
axisPen = Gfx.Pen(color=(0.0, 0.0, 1.0), lineWidth=Gfx.MEDIUM),
labelPen = Gfx.Pen(color=(0.6, 0.1, 0.1)),
styleFlags=Graph.TITLE|Graph.CAPTION|Graph.LABELS| \
Graph.SHUFFLE_DRAW|Graph.FULL_GRID|Graph.AXISES|\
Graph.KEEP_ASPECT)
gr.addPen("X**3", Gfx.RED_PEN)
gr.addPen("X**2", Gfx.BLUE_PEN)
gr.addPen("X", Gfx.YELLOW_PEN)
gr.addPen("X**4", Gfx.TURKEY_PEN)
# gr.addPen("Z012345678901234567890123456789", Gfx.GREEN_PEN)
# for i in range(14): gr.addPen(str(i), Gfx.BLACK_PEN)
for x in frange(-8.0, 8.0, 0.02):
gr.addValue("X**3", x, x**3)
gr.addValue("X**2", x, x**2)
gr.addValue("X", x, x)
gr.addValue("X**4", x, x**4)
# gr.addValue("Z012345678901234567890123456789", x, x*2)
gr.setTitle("f(x) = x*x*x")
gr.setLabels("X-Achse", "Y-Achse")
#gr.setTypeFlags(Graph.AUTO_ADJUST)
#gr.redrawGraph()
gfx.waitUntilClosed()
def Test_Graph2():
getGR()
gfx = GR.Window()
gr = Graph.Cartesian(gfx, -8.0, -8.0, 8.0, 8.0,
axisPen = Gfx.Pen(color=(0.0, 0.0, 1.0), lineWidth=Gfx.MEDIUM),
labelPen = Gfx.Pen(color=(0.6, 0.1, 0.1)),
styleFlags=Graph.TITLE|Graph.CAPTION|Graph.LABELS| \
Graph.SHUFFLE_DRAW|Graph.FULL_GRID|Graph.AXISES|\
Graph.KEEP_ASPECT|Graph.AUTO_PEN)
pen = copy.copy(Gfx.RED_PEN)
pen.linePattern = Gfx.DASHED
pen.lineWidth = Gfx.MEDIUM
pen2 = copy.copy(Gfx.BLUE_PEN)
pen2.linePattern = Gfx.DASHED
pen2.lineWidth = Gfx.MEDIUM
pen3 = copy.copy(Gfx.GREEN_PEN)
pen3.linePattern = Gfx.DOTTED
pen3.lineWidth = Gfx.MEDIUM
gr.addPen("sin(x)", pen)
gr.addPen("cos(x)", pen3)
gr.addPen("line", pen2)
for x in frange(-8.0, 8.0, 0.02):
gr.addValue("sin(x)", x, math.sin(x))
gr.addValue("cos(x)", x, math.cos(x))
gr.addValue("line", -8,-8)
gr.addValue("line", 8, 8)
gr.setTitle("f(x) = x*x*x")
gr.setLabels("X-Achse", "Y-Achse")
gr.redraw()
gfx.waitUntilClosed()
def Test_GraphLg():
getGR()
gfx = GR.Window()
gr = Graph.Cartesian(gfx, 1.0, -8.0, 1100.0, 8.0,
axisPen = Gfx.Pen(color=(0.0, 0.0, 1.0), lineWidth=Gfx.MEDIUM),
labelPen = Gfx.Pen(color=(0.6, 0.1, 0.1)),
styleFlags=Graph.TITLE|Graph.CAPTION|Graph.LABELS| \
Graph.SHUFFLE_DRAW|Graph.FULL_GRID|Graph.AXISES| \
Graph.LOG_X)
gr.addPen("sin(log(x))")
gr.addPen("log(x)")
points = gr.xaxisSteps(1.0, 1100.0)
# print len(points), points[:10], points[-10:]
for x in points:
gr.addValue("sin(log(x))", x, math.sin(math.log(x)))
gr.addValue("log(x)", x, math.log(x))
gr.setTitle("log(x), sin(log(x)")
gr.setLabels("X-Achse", "Y-Achse")
# gr.setTypeFlags(Graph.AUTO_ADJUST)
gr.redrawGraph()
gfx.waitUntilClosed()
########################################################################
#
# Simplex Diagram Tests
#
########################################################################
def TestNearest():
"""Test method getNearest of class PatchedTriangle"""
gfx = GR.Window()
plotter = Simplex.Plotter(gfx)
diagram = Simplex.PatchDiagram(plotter, lambda p: p)
# Test 1
for p in diagram.points:
plotter.setColor((1.0, 0.0, 0.0))
plotter.plot(p)
q = diagram._getNearest(p)
plotter.setColor((0.0, 1.0, 0.0))
plotter.plot(q)
gfx.waitUntilClosed()
gfx = GR.Window()
plotter = Simplex.Plotter(gfx)
diagram = Simplex.PatchDiagram(plotter, lambda p: p)
# Test 2
diagram.plotter.clear()
d = 500
for y in range(d):
w = d-y
for x in range(w):
p2 = y/float(d)
p1 = (1.0-p2)*x/float(w)
p0 = 1.0-p2-p1
p = (p0, p1, p2)
q = diagram._getNearest(p)
c = diagram.colorTable[q]
plotter.setColor(c)
plotter.plot(p)
plotter.setColor((0.0, 0.0, 0.0))
for p in diagram.points:
plotter.plot(p)
gfx.waitUntilClosed()
DemandGame = { "11":1/3.0, "12":1/3.0, "13":1/3.0,
"21":2/3.0, "22":0.0, "23":0.0,
"31":1/2.0, "32":0.0, "33":1/2.0 }
def PopulationDynamics(pr, pay, e=0.0, noise=0.0):
"""population ratio, payofftable, correlation, noise ->
new population ratio.
"""
n1, n2, n3 = pr[0], pr[1], pr[2]
p1=pay["11"]*(n1+e*(n2+n3))+pay["12"]*(n2-e*n2)+pay["13"]*(n3-e*n3)
p2=pay["22"]*(n2+e*(n1+n3))+pay["21"]*(n1-e*n1)+pay["23"]*(n3-e*n3)
p3=pay["33"]*(n3+e*(n1+n2))+pay["31"]*(n1-e*n1)+pay["32"]*(n2-e*n2)
P = p1+p3+p3/3.0
if P > 0.0: n1 *= p1/P; n2 *= p2/P; n3 *= p3/P
N = n1+n2+n3
if N == 0.0: n1, n2, n3 = pr[0], pr[1], pr[2]; N = 1.0
m = N*noise
a = random.random(); b = 1.0-random.random()
if b < a: c = a; a = b; b = c
n1 = n1 - n1*noise + m*a
n2 = n2 - n2*noise + m*(b-a)
n3 = n3 - n3*noise + m*(1.0-b)
return (n1/N, n2/N, n3/N)
def TestSimplexDiagram():
getGR()
gfx = GR.Window(title="Simplex Diagram Test")
f = lambda p: PopulationDynamics(p,DemandGame, e=0.0, noise=0.0)
diag = Simplex.Diagram(gfx, f)
diag.show()
diag.setStyle(styleFlags = Simplex.PATCHES)
diag.show(5)
diag.setStyle(styleFlags = Simplex.TRAJECTORIES)
diag.changeColors((1.,1.,0.),(0.,0.,0.))
diag.show()
gfx.waitUntilClosed()
def TestTrajectoryDiagram():
getGR()
gfx = GR.Window(title="Demand Game - Trajectory Diagram")
tp = Simplex.Plotter(gfx)
diag = Simplex.TrajectoryDiagram(tp,lambda p:
PopulationDynamics(p,DemandGame, e=0.0, noise=0.0),
raster = Simplex.RASTER_RANDOM, redrawable=False)
## for i in range(15):
## diag.step(1)
## gfx.win.Refresh()
## gfx.win.Update()
diag.step(10)
#diag.redraw()
gfx.waitUntilClosed()
def TestVectorField():
getGR()
gfx = GR.Window(title="Demand Game - Vector Field")
tp = Simplex.Plotter(gfx)
diag = Simplex.VectorField(tp,lambda p:
PopulationDynamics(p,DemandGame, e=0.0, noise=0.0),
raster = Simplex.RASTER_DEFAULT)
## for i in range(15):
## diag.step(1)
## gfx.win.Refresh()
## gfx.win.Update()
diag.show()
gfx.waitUntilClosed()
def TestPatchedTriangle():
gfx = GR.Window(title="Demand Game - setup for the progrssive graph")
tp = Simplex.Plotter(gfx)
diag = Simplex.PatchDiagram(tp,lambda p:
PopulationDynamics(p,DemandGame,e=0.0,noise=0.0),
density=50)
gfx.waitUntilClosed()
gfx = GR.Window(title="Demand Game - progressive graph")
tp = Simplex.Plotter(gfx)
diag = Simplex.PatchDiagram(tp,lambda p:
PopulationDynamics(p,DemandGame,e=0.0,noise=0.0),
density=50)
for i in range(25):
diag.step(1)
gfx.refresh()
diag.showFixedPoints((1.0, 0.5, 0.0))
gfx.waitUntilClosed()
def TestPatchDensity():
for density in range(53, 75):
gfx = GR.Window(title="Test Patch Density")
tp = Simplex.Plotter(gfx)
diag = Simplex.PatchDiagram(tp,lambda p:
PopulationDynamics(p,DemandGame,e=0.0,noise=0.0),
density)
gfx.waitUntilClosed()
def TestSimplex():
getGR()
TestTrajectoryDiagram()
TestVectorField()
#TestNearest()
TestPatchedTriangle()
#TestPatchDensity()
def getGR():
global GR
if GR == None:
try:
import java, pawt
import awtGfx as GR
except:
#import qtGfx as GR
#import wxGfx as GR
#import gtkGfx as GR
import tkGfx as GR
print(GR.__name__)
if __name__ == "__main__":
getGR()
# Test_gtkGfx()
# Test_tkGfx()
# Test_wxGfx()
Test_Graph2()
# TestSimplexDiagram()
# Test_GraphLg() | PypiClean |
/EdiHeadyTrack-0.1.6.tar.gz/EdiHeadyTrack-0.1.6/README.md |
[](https://www.gnu.org/licenses/gpl-3.0)
[](https://github.com/isDynamics/EdiHeadyTrack/actions/workflows/pytesting.yml)
[](https://codecov.io/gh/isDynamics/EdiHeadyTrack)
# EdiHeadyTrack
## Contributors
<a href="https://github.com/isDynamics/EdiHeadyTrack/graphs/contributors">
<img src="https://contrib.rocks/image?repo=isDynamics/EdiHeadyTrack" />
</a>
Made with [contrib.rocks](https://contrib.rocks).
<!-- ## Table of contents
- [EdiHeadyTrack](#ediheadytrack)
- [Table of contents](#table-of-contents)
- [About](#about)
- [Technologies](#technologies)
- [Setup](#setup)
- [Citation](#citation)
- [To do](#to-do)
- [Getting Involved](#getting-involved)
- [Citations](#citations) -->
## About
EdiHeadyTrack is a Python package for measuring head kinematics using markerless head pose detection methods. The current implementation primarily uses the FaceMesh module of MediaPipe's Python API for facial landmark detection alongside OpenCV for handling simple computer vision tasks.
## Technologies
Project is created with:
* Python 3.9.0
## Setup
EdiHeadyTrack is available on PyPI! Install using:
```bash
pip install EdiHeadyTrack
```
Or install the latest development version directly from GitHub!
```bash
pip install git+https:\\github.com/isDynamics/EdiHeadyTrack
```
## Usage
## Example
An example output from EdiHeadyTrack is shown below:

<!-- ## Change log
See [CHANGELOG.md](https://github.com/isDynamics/EdiHeadyTrack/blob/master/CHANGELOG.md).
## Contributing
See [CONTRIBUTING.md](https://github.com/isDynamics/EdiHeadyTrack/blob/master/CONTRIBUTING.md). -->
## Citation
If you use `EdiHeadyTrack` in you work, please cite the following publication:
<!-- > S. Heldens, A. Sclocco, H. Dreuning, B. van Werkhoven, P. Hijma, J. Maassen & R.V. van Nieuwpoort (2022), "litstudy: A Python package for literature reviews", SoftwareX 20 -->
As BibTeX:
<!-- ```
@article{litstudy,
title = {litstudy: A Python package for literature reviews},
journal = {SoftwareX},
volume = {20},
pages = {101207},
year = {2022},
issn = {2352-7110},
doi = {https://doi.org/10.1016/j.softx.2022.101207},
url = {https://www.sciencedirect.com/science/article/pii/S235271102200125X},
author = {S. Heldens and A. Sclocco and H. Dreuning and B. {van Werkhoven} and P. Hijma and J. Maassen and R. V. {van Nieuwpoort}},
}
``` -->
## To do
- [ ] Write unit tests.
- [ ] Upload to PyPI
- [ ] Update setup section of README
## Getting Involved
For any suggestions, please [create a new issue](https://github.com/isDynamics/EdiHeadyTrack/issues).
## Citations
<!-- 1. Teixeira-Dias, F. (1995). *Numerical simulation of tensile and shear tests in plane strain and plane stress* (Doctoral dissertation)
2. Teixeira-Dias, F. and Menezes, L.F. (2001), *Numerical aspects of finite element simulations of residual stresses in metal matrix composites*. Int. J. Numer. Meth. Engng., 50: 629-644.
[1]: https://www.researchgate.net/publication/237021517_Numerical_simulation_of_tensile_and_shear_tests_in_plane_strain_and_plane_stress
[2]: https://doi.org/10.1002/1097-0207(20010130)50:3<629::AID-NME41>3.0.CO;2-7 -->
| PypiClean |
/Flask_AdminLTE3-1.0.9-py3-none-any.whl/flask_adminlte3/static/plugins/codemirror/addon/fold/brace-fold.js |
(function(mod) {
if (typeof exports == "object" && typeof module == "object") // CommonJS
mod(require("../../lib/codemirror"));
else if (typeof define == "function" && define.amd) // AMD
define(["../../lib/codemirror"], mod);
else // Plain browser env
mod(CodeMirror);
})(function(CodeMirror) {
"use strict";
function bracketFolding(pairs) {
return function(cm, start) {
var line = start.line, lineText = cm.getLine(line);
function findOpening(pair) {
var tokenType;
for (var at = start.ch, pass = 0;;) {
var found = at <= 0 ? -1 : lineText.lastIndexOf(pair[0], at - 1);
if (found == -1) {
if (pass == 1) break;
pass = 1;
at = lineText.length;
continue;
}
if (pass == 1 && found < start.ch) break;
tokenType = cm.getTokenTypeAt(CodeMirror.Pos(line, found + 1));
if (!/^(comment|string)/.test(tokenType)) return {ch: found + 1, tokenType: tokenType, pair: pair};
at = found - 1;
}
}
function findRange(found) {
var count = 1, lastLine = cm.lastLine(), end, startCh = found.ch, endCh
outer: for (var i = line; i <= lastLine; ++i) {
var text = cm.getLine(i), pos = i == line ? startCh : 0;
for (;;) {
var nextOpen = text.indexOf(found.pair[0], pos), nextClose = text.indexOf(found.pair[1], pos);
if (nextOpen < 0) nextOpen = text.length;
if (nextClose < 0) nextClose = text.length;
pos = Math.min(nextOpen, nextClose);
if (pos == text.length) break;
if (cm.getTokenTypeAt(CodeMirror.Pos(i, pos + 1)) == found.tokenType) {
if (pos == nextOpen) ++count;
else if (!--count) { end = i; endCh = pos; break outer; }
}
++pos;
}
}
if (end == null || line == end) return null
return {from: CodeMirror.Pos(line, startCh),
to: CodeMirror.Pos(end, endCh)};
}
var found = []
for (var i = 0; i < pairs.length; i++) {
var open = findOpening(pairs[i])
if (open) found.push(open)
}
found.sort(function(a, b) { return a.ch - b.ch })
for (var i = 0; i < found.length; i++) {
var range = findRange(found[i])
if (range) return range
}
return null
}
}
CodeMirror.registerHelper("fold", "brace", bracketFolding([["{", "}"], ["[", "]"]]));
CodeMirror.registerHelper("fold", "brace-paren", bracketFolding([["{", "}"], ["[", "]"], ["(", ")"]]));
CodeMirror.registerHelper("fold", "import", function(cm, start) {
function hasImport(line) {
if (line < cm.firstLine() || line > cm.lastLine()) return null;
var start = cm.getTokenAt(CodeMirror.Pos(line, 1));
if (!/\S/.test(start.string)) start = cm.getTokenAt(CodeMirror.Pos(line, start.end + 1));
if (start.type != "keyword" || start.string != "import") return null;
// Now find closing semicolon, return its position
for (var i = line, e = Math.min(cm.lastLine(), line + 10); i <= e; ++i) {
var text = cm.getLine(i), semi = text.indexOf(";");
if (semi != -1) return {startCh: start.end, end: CodeMirror.Pos(i, semi)};
}
}
var startLine = start.line, has = hasImport(startLine), prev;
if (!has || hasImport(startLine - 1) || ((prev = hasImport(startLine - 2)) && prev.end.line == startLine - 1))
return null;
for (var end = has.end;;) {
var next = hasImport(end.line + 1);
if (next == null) break;
end = next.end;
}
return {from: cm.clipPos(CodeMirror.Pos(startLine, has.startCh + 1)), to: end};
});
CodeMirror.registerHelper("fold", "include", function(cm, start) {
function hasInclude(line) {
if (line < cm.firstLine() || line > cm.lastLine()) return null;
var start = cm.getTokenAt(CodeMirror.Pos(line, 1));
if (!/\S/.test(start.string)) start = cm.getTokenAt(CodeMirror.Pos(line, start.end + 1));
if (start.type == "meta" && start.string.slice(0, 8) == "#include") return start.start + 8;
}
var startLine = start.line, has = hasInclude(startLine);
if (has == null || hasInclude(startLine - 1) != null) return null;
for (var end = startLine;;) {
var next = hasInclude(end + 1);
if (next == null) break;
++end;
}
return {from: CodeMirror.Pos(startLine, has + 1),
to: cm.clipPos(CodeMirror.Pos(end))};
});
}); | PypiClean |
/Django-G11N-1.1.0.1.tar.gz/Django-G11N-1.1.0.1/django_g11n/migrations/0002_add_regions.py | from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('django_g11n', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Region',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('dts_insert', models.DateTimeField(auto_now_add=True)),
('dts_update', models.DateTimeField(blank=True, null=True)),
('dts_delete', models.DateTimeField(blank=True, null=True)),
('numeric', models.CharField(max_length=3, unique=True)),
('english', models.CharField(max_length=64)),
('obsolete', models.BooleanField(default=False)),
('unsd_m49', models.BooleanField(default=False)),
('reference', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='django_g11n.Country')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='RegionChain',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('dts_insert', models.DateTimeField(auto_now_add=True)),
('dts_update', models.DateTimeField(blank=True, null=True)),
('dts_delete', models.DateTimeField(blank=True, null=True)),
('lower', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='chains_region_is_lower', to='django_g11n.Region')),
('upper', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='chains_region_is_upper', to='django_g11n.Region')),
],
),
migrations.AlterUniqueTogether(
name='regionchain',
unique_together=set([('upper', 'lower')]),
),
] | PypiClean |
/Flask-Plugins-1.6.1.tar.gz/Flask-Plugins-1.6.1/flask_plugins/__init__.py | import os
import sys
import importlib
from collections import deque
from werkzeug.utils import cached_property, import_string
from jinja2 import Markup
from flask import json
# Find the stack on which we want to store the database connection.
# Starting with Flask 0.9, the _app_ctx_stack is the correct one,
# before that we need to use the _request_ctx_stack.
try:
from flask import _app_ctx_stack as stack
except ImportError:
from flask import _request_ctx_stack as stack
from ._compat import itervalues, iteritems, intern_method
__version__ = "1.6.1"
__author__ = "Peter Justin"
class PluginError(Exception):
pass
def get_plugin(identifier):
"""Returns a plugin instance from the enabled plugins for the given
name.
"""
ctx = stack.top
return ctx.app.extensions.get('plugin_manager').plugins[identifier]
def get_plugin_from_all(identifier):
"""Returns a plugin instance from all plugins (includes also the disabled
ones) for the given name.
"""
ctx = stack.top
return ctx.app.extensions.get('plugin_manager').all_plugins[identifier]
def get_enabled_plugins():
"""Returns all enabled plugins as a list"""
ctx = stack.top
return ctx.app.extensions.get('plugin_manager').plugins.values()
def get_all_plugins():
"""Returns all plugins as a list including the disabled ones."""
ctx = stack.top
return ctx.app.extensions.get('plugin_manager').all_plugins.values()
class Plugin(object):
"""Every plugin should implement this class. It handles the registration
for the plugin hooks, creates or modifies additional relations or
registers plugin specific thinks
"""
#: If setup is called, this will be set to ``True``.
enabled = False
def __init__(self, path):
#: The plugin's root path. All the files in the plugin are under this
#: path.
self.path = os.path.abspath(path)
with open(os.path.join(path, 'info.json')) as fd:
self.info = i = json.load(fd)
#: The plugin's name, as given in info.json. This is the human
#: readable name.
self.name = i['name']
#: The plugin's identifier. This is an actual Python identifier,
#: and in most situations should match the name of the directory the
#: plugin is in.
self.identifier = i['identifier']
#: The human readable description. This is the default (English)
#: version.
self.description = i.get('description')
#: This is a dictionary of localized versions of the description.
#: The language codes are all lowercase, and the ``en`` key is
#: preloaded with the base description.
self.description_lc = dict(
(k.split('_', 1)[1].lower(), v) for k, v in i.items()
if k.startswith('description_')
)
self.description_lc.setdefault('en', self.description)
#: The author's name, as given in info.json. This may or may not
#: include their email, so it's best just to display it as-is.
self.author = i['author']
#: A short phrase describing the license, like "GPL", "BSD", "Public
#: Domain", or "Creative Commons BY-SA 3.0".
self.license = i.get('license')
#: A URL pointing to the license text online.
self.license_url = i.get('license_url')
#: The URL to the plugin's or author's Web site.
self.website = i.get('website')
#: The plugin's version string.
self.version = i.get('version')
#: Any additional options. These are entirely application-specific,
#: and may determine other aspects of the application's behavior.
self.options = i.get('options', {})
@cached_property
def license_text(self):
"""
The contents of the theme's license.txt file, if it exists. This is
used to display the full license text if necessary. (It is `None` if
there was not a license.txt.)
"""
lt_path = os.path.join(self.path, 'license.txt')
if os.path.exists(lt_path):
with open(lt_path) as fd:
return fd.read()
else:
return None
def setup(self): # pragma: no cover
"""This method is used to register all things that the plugin wants to
register.
"""
pass
def enable(self):
"""Enables the plugin by removing the 'DISABLED' file in the plugins
root directory, calls the ``setup()`` method and sets the plugin state
to true.
"""
disabled_file = os.path.join(self.path, "DISABLED")
try:
if os.path.exists(disabled_file):
os.remove(disabled_file)
if not self.enabled:
self.enabled = True
except:
raise
return self.enabled
def disable(self):
"""Disablesthe plugin.
The app usually has to be restarted after this action because
plugins _can_ register blueprints and in order to "unregister" them,
the application object has to be destroyed.
This is a limitation of Flask and if you want to know more about this
visit this link: http://flask.pocoo.org/docs/0.10/blueprints/
"""
disabled_file = os.path.join(self.path, "DISABLED")
try:
open(disabled_file, "a").close()
self.enabled = False
except:
raise
return self.enabled
def install(self): # pragma: no cover
"""Installs the things that must be installed in order to
have a fully and correctly working plugin. For example, something that
needs to be installed can be a relation and/or modify a existing
relation.
"""
pass
def uninstall(self): # pragma: no cover
"""Uninstalls all the things which were previously
installed by `install()`. A Plugin must override this method.
"""
pass
class PluginManager(object):
"""Collects all Plugins and maps the metadata to the plugin"""
def __init__(self, app=None, **kwargs):
"""Initializes the PluginManager. It is also possible to initialize the
PluginManager via a factory. For example::
plugin_manager = PluginManager()
plugin_manager.init_app(app)
:param app: The flask application.
:param plugin_folder: The plugin folder where the plugins resides.
:param base_app_folder: The base folder for the application. It is used
to build the plugins package name.
"""
# All enabled plugins
self._plugins = None
# All plugins - including the disabled ones
self._all_plugins = None
# All available plugins including the disabled ones
self._available_plugins = dict()
# All found plugins
self._found_plugins = dict()
if app is not None:
self.init_app(app, **kwargs)
def init_app(self, app, base_app_folder=None, plugin_folder="plugins"):
self._event_manager = EventManager(app)
app.jinja_env.globals["emit_event"] = self._event_manager.template_emit
app.plugin_manager = self
if not hasattr(app, 'extensions'):
app.extensions = {}
app.extensions['plugin_manager'] = self
self.app = app
if base_app_folder is None:
base_app_folder = self.app.root_path.split(os.sep)[-1]
self.plugin_folder = os.path.join(self.app.root_path, plugin_folder)
self.base_plugin_package = ".".join(
[base_app_folder, plugin_folder]
)
self.setup_plugins()
@property
def all_plugins(self):
"""Returns all plugins including disabled ones."""
if self._all_plugins is None:
self.load_plugins()
return self._all_plugins
@property
def plugins(self):
"""Returns all enabled plugins as a dictionary. You still need to
call the setup method to fully enable them."""
if self._plugins is None:
self.load_plugins()
return self._plugins
def load_plugins(self):
"""Loads all plugins. They are still disabled.
Returns a list with all loaded plugins. They should now be accessible
via self.plugins.
"""
self._plugins = {}
self._all_plugins = {}
for plugin_name, plugin_package in iteritems(self.find_plugins()):
try:
plugin_class = import_string(
"{}.{}".format(plugin_package, plugin_name)
)
except ImportError:
raise PluginError(
"Couldn't import {} Plugin. Please check if the "
"__plugin__ variable is set correctly.".format(plugin_name)
)
plugin_path = os.path.join(
self.plugin_folder,
os.path.basename(plugin_package.replace(".", "/"))
)
plugin_instance = plugin_class(plugin_path)
try:
if self._available_plugins[plugin_name]:
self._plugins[plugin_instance.identifier] = plugin_instance
except KeyError:
pass
self._all_plugins[plugin_instance.identifier] = plugin_instance
def find_plugins(self):
"""Find all possible plugins in the plugin folder."""
for item in os.listdir(self.plugin_folder):
if os.path.isdir(os.path.join(self.plugin_folder, item)) \
and os.path.exists(
os.path.join(self.plugin_folder, item, "__init__.py")):
plugin = ".".join([self.base_plugin_package, item])
# Same like from exammple.plugins.pluginname import __plugin__
tmp = importlib.import_module(plugin)
try:
# Add the plugin to the available plugins if the plugin
# isn't disabled
if not os.path.exists(
os.path.join(self.plugin_folder, item, "DISABLED")
):
self._available_plugins[tmp.__plugin__] = \
"{}".format(plugin)
self._found_plugins[tmp.__plugin__] = \
"{}".format(plugin)
except AttributeError:
pass
return self._found_plugins
def setup_plugins(self): # pragma: no cover
"""Runs the setup for all enabled plugins. Should be run after the
PluginManager has been initialized. Sets the state of the plugin to
enabled.
"""
for plugin in itervalues(self.plugins):
with self.app.app_context():
plugin.enabled = True
plugin.setup()
def install_plugins(self, plugins=None):
"""Installs one or more plugins.
:param plugins: An iterable with plugins. If no plugins are passed
it will try to install all plugins.
"""
for plugin in plugins or itervalues(self.plugins):
with self.app.app_context():
plugin.install()
def uninstall_plugins(self, plugins=None):
"""Uninstalls one or more plugins.
:param plugins: An iterable with plugins. If no plugins are passed
it will try to uninstall all plugins.
"""
for plugin in plugins or itervalues(self.plugins):
with self.app.app_context():
plugin.uninstall()
def enable_plugins(self, plugins=None):
"""Enables one or more plugins.
It either returns the amount of enabled plugins or
raises an exception caused by ``os.remove`` which says most likely
that you can't write on the filesystem.
:param plugins: An iterable with plugins.
"""
_enabled_count = 0
for plugin in plugins:
plugin.enable()
_enabled_count += 1
return _enabled_count
def disable_plugins(self, plugins=None):
"""Disables one or more plugins.
It either returns the amount of disabled plugins or
raises an exception caused by ``open`` which says most likely
that you can't write on the filesystem.
The app usually has to be restarted after this action because
plugins **can** register blueprints and in order to "unregister" them,
the application object has to be destroyed.
This is a limitation of Flask and if you want to know more about this
visit this link: http://flask.pocoo.org/docs/0.10/blueprints/
:param plugins: An iterable with plugins
"""
_disabled_count = 0
for plugin in plugins:
plugin.disable()
_disabled_count += 1
return _disabled_count
def connect_event(event, callback, position='after'):
"""Connect a callback to an event. Per default the callback is
appended to the end of the handlers but handlers can ask for a higher
privilege by setting `position` to ``'before'``.
Example usage::
def on_before_metadata_assembled(metadata):
metadata.append('<!-- IM IN UR METADATA -->')
# And in your setup() method do this:
connect_event('before-metadata-assembled',
on_before_metadata_assembled)
"""
ctx = stack.top
ctx.app.extensions.get('plugin_manager')._event_manager.connect(
event, callback, position
)
def emit_event(event, *args, **kwargs):
"""Emit a event and return a list of event results. Each called
function contributes one item to the returned list.
This is equivalent to the following call to :func:`iter_listeners`::
result = []
for listener in iter_listeners(event):
result.append(listener(*args, **kwargs))
"""
ctx = stack.top
return [
x(*args, **kwargs) for x in
ctx.app.extensions.get('plugin_manager')._event_manager.iter(event)
]
def iter_listeners(event):
"""Return an iterator for all the listeners for the event provided."""
ctx = stack.top
return ctx.app.extensions.get('plugin_manager')._event_manager.iter(event)
class EventManager(object):
"""Helper class that handles event listeners and event emitting.
This is *not* a public interface. Always use the `emit_event` or
`connect_event` or the `iter_listeners` functions to access it.
"""
def __init__(self, app):
self.app = app
self._listeners = {}
self._last_listener = 0
def connect(self, event, callback, position='after'):
"""Connect a callback to an event."""
assert position in ('before', 'after'), 'invalid position'
listener_id = self._last_listener
event = intern_method(event)
if event not in self._listeners:
self._listeners[event] = deque([callback])
elif position == 'after':
self._listeners[event].append(callback)
elif position == 'before':
self._listeners[event].appendleft(callback)
self._last_listener += 1
return listener_id
def remove(self, event, callback):
"""Remove a callback again."""
try:
self._listeners[event].remove(callback)
except (KeyError, ValueError):
pass
def iter(self, event):
"""Return an iterator for all listeners of a given name."""
if event not in self._listeners:
return iter(())
return iter(self._listeners[event])
def template_emit(self, event, *args, **kwargs):
"""Emits events for the template context."""
results = []
for f in self.iter(event):
rv = f(*args, **kwargs)
if rv is not None:
results.append(rv)
return Markup(TemplateEventResult(results))
class TemplateEventResult(list):
"""A list subclass for results returned by the event listener that
concatenates the results if converted to string, otherwise it works
exactly like any other list.
"""
def __init__(self, items):
list.__init__(self, items)
def __unicode__(self):
return u''.join(map(str, self))
def __str__(self):
if sys.version_info[0] >= 3:
return self.__unicode__()
else:
return self.__unicode__().encode('utf-8') | PypiClean |
/BIT_framework-0.0.2-py3-none-any.whl/BIT_DL/pytorch/utils/rnn.py | """RNN helpers for PyTorch models."""
from typing import List, Optional, Tuple, TypeVar, Union
import torch
from texar.torch.core.cell_wrappers import RNNCellBase
from texar.torch.utils.shapes import mask_sequences
from texar.torch.utils.utils import map_structure, map_structure_zip, no_map
__all__ = [
"reverse_sequence",
"dynamic_rnn",
"bidirectional_dynamic_rnn",
]
State = TypeVar('State')
def reverse_sequence(inputs: torch.Tensor,
seq_lengths: Union[torch.LongTensor, List[int]],
time_major: bool) -> torch.Tensor:
r"""Reverses variable length slices.
This op first slices input along the dimension batch_axis, and for each
slice i, reverses the first seq_lengths[i] elements along the dimension
seq_axis.
The elements of seq_lengths must obey seq_lengths[i] <=
input.dims[seq_dim], and seq_lengths must be a vector of length
input.dims[batch_dim].
The output slice i along dimension batch_axis is then given by input slice
i, with the first seq_lengths[i] slices along dimension seq_axis reversed.
Args:
inputs: A Tensor. The input to reverse.
seq_lengths: A Tensor. Must be one of the following types: int32,
int64. 1-D with length input.dims(batch_dim) and
max(seq_lengths) <= input.dims(seq_dim)
time_major: The shape format of the ``inputs`` and ``outputs`` Tensors.
If true, these ``Tensors`` must be shaped
``[max_time, batch_size, depth]``. If false, these ``Tensors`` must
be shaped ``[batch_size, max_time, depth]``.
Using ``time_major = True`` is a bit more efficient because it
avoids transposes at the beginning and end of the RNN calculation.
However, most TensorFlow data is batch-major, so by
default this functionb accepts input and emits output
in batch-major form.
Returns:
A ``Tensor``. Has the same type as input.
"""
if time_major:
inputs = inputs.permute(1, 0, 2)
batch_size = inputs.shape[0]
outputs = inputs.clone()
for i in range(batch_size):
outputs[i][0:seq_lengths[i]] = torch.flip(
inputs[i][0:seq_lengths[i]], dims=(0,))
if time_major:
outputs = outputs.permute(1, 0, 2)
return outputs
def bidirectional_dynamic_rnn(
cell_fw: RNNCellBase[State],
cell_bw: RNNCellBase[State],
inputs: torch.Tensor,
sequence_length: Optional[Union[torch.LongTensor, List[int]]] = None,
initial_state_fw: Optional[State] = None,
initial_state_bw: Optional[State] = None,
time_major: bool = False) -> Tuple[Tuple[torch.Tensor, torch.Tensor],
Tuple[State, State]]:
r"""Creates a dynamic version of bidirectional recurrent neural network.
Takes input and builds independent forward and backward RNNs. The
input_size of forward and backward cell must match. The initial state for
both directions is zero by default (but can be set optionally) and no
intermediate states are ever returned -- the network is fully unrolled
for the given (passed in) length(s) of the sequence(s) or completely
unrolled if length(s) is not given.
Args:
cell_fw: An instance of RNNCell, to be used for forward direction.
cell_bw: An instance of RNNCell, to be used for backward direction.
inputs: The RNN inputs.
If time_major == False (default), this must be a tensor of shape:
``[batch_size, max_time, ...]``, or a nested tuple of such elements.
If time_major == True, this must be a tensor of shape:
``[max_time, batch_size, ...]``, or a nested tuple of such elements.
sequence_length: (optional) An int32/int64 tensor, size
``[batch_size]``, containing the actual lengths for each of the
sequences in
the batch. If not provided, all batch entries are assumed
to be full sequences; and time reversal is applied from time
``0`` to ``max_time`` for each sequence.
initial_state_fw: (optional) An initial state for the forward RNN.
This must be a tensor of appropriate type and shape
``[batch_size, cell_fw.state_size]``.
If ``cell_fw.state_size`` is a tuple, this should be a tuple of
tensors having shapes ``[batch_size, s]``
for ``s`` in ``cell_fw.state_size``.
initial_state_bw: (optional) Same as for ``initial_state_fw``, but using
the corresponding properties of ``cell_bw``.
time_major: The shape format of the ``inputs`` and ``outputs`` Tensors.
If true, these ``Tensors`` must be shaped
``[max_time, batch_size, depth]``.
If false, these ``Tensors`` must be shaped
``[batch_size, max_time, depth]``.
Using ``time_major = True`` is a bit more efficient because it
avoids transposes at the beginning and end of the RNN calculation.
However, most TensorFlow data is batch-major, so by
default this function accepts input and emits output
in batch-major form.
Returns:
A tuple (outputs, output_states) where:
outputs: A tuple (output_fw, output_bw) containing the forward and
the backward rnn output ``Tensor``.
If time_major == False (default),
output_fw will be a ``Tensor`` shaped:
``[batch_size, max_time, cell_fw.output_size]``
and output_bw will be a ``Tensor`` shaped:
``[batch_size, max_time, cell_bw.output_size]``.
If time_major == True,
output_fw will be a ``Tensor`` shaped:
``[max_time, batch_size, cell_fw.output_size]``
and output_bw will be a ``Tensor`` shaped:
``[max_time, batch_size, cell_bw.output_size]``.
It returns a tuple instead of a single concatenated ``Tensor``,
unlike in the ``bidirectional_rnn``. If the concatenated
one is preferred, the forward and backward outputs can
be concatenated as ``tf.concat(outputs, 2)``.
output_states: A tuple (output_state_fw, output_state_bw) containing
the forward and the backward final states of bidirectional rnn.
"""
output_fw, output_state_fw = dynamic_rnn(cell=cell_fw,
inputs=inputs,
sequence_length=sequence_length,
initial_state=initial_state_fw,
time_major=time_major)
if time_major:
time_steps = inputs.shape[0]
batch_size = inputs.shape[1]
else:
time_steps = inputs.shape[1]
batch_size = inputs.shape[0]
if sequence_length is None:
sequence_length = torch.tensor([time_steps] * batch_size,
dtype=torch.int32,
device=inputs.device)
# Backward direction
inputs_reverse = reverse_sequence(inputs=inputs,
seq_lengths=sequence_length,
time_major=time_major)
tmp, output_state_bw = dynamic_rnn(cell=cell_bw,
inputs=inputs_reverse,
sequence_length=sequence_length,
initial_state=initial_state_bw,
time_major=time_major)
output_bw = reverse_sequence(inputs=tmp,
seq_lengths=sequence_length,
time_major=time_major)
outputs = (output_fw, output_bw)
output_states = (output_state_fw, output_state_bw)
return outputs, output_states
def dynamic_rnn(
cell: RNNCellBase[State],
inputs: torch.Tensor,
sequence_length: Optional[Union[torch.LongTensor, List[int]]] = None,
initial_state: Optional[State] = None,
time_major: bool = False) -> Tuple[torch.Tensor, State]:
r"""Creates a recurrent neural network specified by RNNCell ``cell``.
Performs fully dynamic unrolling of ``inputs``.
Args:
cell: An instance of RNNCell.
inputs: The RNN inputs.
If ``time_major == False`` (default), this must be a ``Tensor``
of shape: ``[batch_size, max_time, ...]``, or a nested
tuple of such elements.
If ``time_major == True``, this must be a ``Tensor`` of shape:
``[max_time, batch_size, ...]``, or a nested tuple of such
elements.
This may also be a (possibly nested) tuple of Tensors satisfying
this property. The first two dimensions must match across all the
inputs, but otherwise the ranks and other shape components
may differ. In this case, input to ``cell`` at each time-step
will replicate the structure of these tuples, except for the
time dimension (from which the time is taken).
The input to ``cell`` at each time step will be a
``Tensor`` or (possibly nested) tuple of Tensors each with
dimensions ``[batch_size, ...]``.
sequence_length: (optional) An int32/int64 tensor sized
``[batch_size]``. Used to copy-through state and
zero-out outputs when past a batch element's sequence length.
So it's more for performance than correctness.
initial_state: (optional) An initial state for the RNN.
If ``cell.state_size`` is an integer, this must be
a ``Tensor`` of appropriate type and shape
``[batch_size, cell.state_size]``. If ``cell.state_size`` is
a tuple, this should be a tuple of tensors having shapes
``[batch_size, s]`` for ``s`` in ``cell.state_size``.
time_major: The shape format of the ``inputs`` and ``outputs`` Tensors.
If true, these ``Tensors`` must be shaped
``[max_time, batch_size, depth]``. If false, these ``Tensors``
must be shaped ``[batch_size, max_time, depth]``.
Using ``time_major = True`` is a bit more efficient because
it avoids transposes at the beginning and end of the
RNN calculation. However, most TensorFlow data is batch-major,
so by default this function accepts input and emits output in
batch-major form.
Returns:
A pair (outputs, state) where:
outputs: The RNN output ``Tensor``.
If time_major == False (default), this will be a ``Tensor`` shaped:
``[batch_size, max_time, cell.output_size]``.
If time_major == True, this will be a ``Tensor`` shaped:
``[max_time, batch_size, cell.output_size]``.
Note, if ``cell.output_size`` is a (possibly nested) tuple of
integers or ``torch.Size`` objects, then ``outputs``
will be a tuple having the same structure as ``cell.output_size``,
containing Tensors having shapes corresponding to the shape
data in ``cell.output_size``.
state: The final state. If ``cell.state_size`` is an int, this
will be shaped ``[batch_size, cell.state_size]``. If it is a
``torch.Size``, this will be shaped
``[batch_size] + cell.state_size``.
If it is a (possibly nested) tuple of ints or ``torch.Size``,
this will be a tuple having the corresponding shapes.
If cells are ``LSTMCells``, ``state`` will be a tuple containing
a ``LSTMStateTuple`` for each cell.
Raises:
TypeError: If ``cell`` is not an instance of RNNCell.
ValueError: If inputs is None or an empty list.
"""
# By default, time_major==False and inputs are batch-major: shaped
# [batch, time, depth]
# For internal calculations, we transpose to [time, batch, depth]
if not time_major:
# (B,T,D) => (T,B,D)
inputs = inputs.permute(1, 0, 2)
time_steps = inputs.shape[0]
batch_size = inputs.shape[1]
if sequence_length is not None:
if not isinstance(sequence_length, torch.Tensor):
sequence_length = torch.tensor(sequence_length,
dtype=torch.int32,
device=inputs.device)
if sequence_length.dim() != 1:
raise ValueError(
"sequence_length must be a vector of length batch_size, "
"but saw shape: %s" % sequence_length.shape)
if sequence_length.shape != torch.Size([batch_size]):
raise ValueError("Expected shape for Tensor sequence_length is %s"
% batch_size, " but saw shape: %s"
% sequence_length.shape)
else:
sequence_length = torch.tensor([time_steps] * batch_size,
dtype=torch.int32,
device=inputs.device)
if initial_state is not None:
state = initial_state
else:
state = cell.zero_state(batch_size=batch_size)
(outputs, final_state) = _dynamic_rnn_loop(
cell, inputs, state, sequence_length=sequence_length)
# Outputs of _dynamic_rnn_loop are always shaped [time, batch, depth].
# If we are performing batch-major calculations, transpose output back
# to shape [batch, time, depth]
if not time_major:
# (T,B,D) => (B,T,D)
outputs = outputs.permute(1, 0, 2)
return outputs, final_state
def _dynamic_rnn_loop(cell: RNNCellBase[State],
inputs: torch.Tensor,
initial_state: State,
sequence_length: torch.LongTensor) \
-> Tuple[torch.Tensor, State]:
r"""Internal implementation of Dynamic RNN.
Args:
cell: An instance of RNNCell.
inputs: A ``Tensor`` of shape ``[time, batch_size, input_size]``,
or a nested tuple of such elements.
initial_state: A ``Tensor`` of shape ``[batch_size, state_size]``,
or if ``cell.state_size`` is a tuple, then this should be a tuple
of tensors having shapes ``[batch_size, s]`` for ``s`` in
``cell.state_size``.
sequence_length: (optional) An ``int32`` ``Tensor``
of shape ``[batch_size]``.
Returns:
Tuple ``(final_outputs, final_state)``.
final_outputs:
A ``Tensor`` of shape ``[time, batch_size, cell.output_size]``. If
``cell.output_size`` is a (possibly nested) tuple of ints or
``torch.Size`` objects, then this returns a
(possibly nested) tuple of Tensors matching the corresponding
shapes.
final_state:
A ``Tensor``, or possibly nested tuple of Tensors, matching
in length and shapes to ``initial_state``.
"""
state = initial_state
time_steps = inputs.shape[0]
all_outputs = []
all_state = map_structure(lambda _: no_map(list), state)
for i in range(time_steps):
output, state = cell(inputs[i], state)
all_outputs.append(output)
map_structure_zip(lambda xs, x: xs.append(x), (all_state, state))
# TODO: Do not compute everything regardless of sequence_length
final_outputs = torch.stack(all_outputs, dim=0)
final_outputs = mask_sequences(final_outputs,
sequence_length=sequence_length,
time_major=True)
final_state = map_structure(lambda _: no_map(list), state)
# pylint: disable=cell-var-from-loop
# Our use case is fine because the function is called immediately and
# exclusively in the current iteration of the loop.
for batch_idx, time_idx in enumerate(sequence_length.tolist()):
if time_idx > 0:
map_structure_zip(
lambda xs, x: xs.append(x[time_idx - 1][batch_idx]),
(final_state, all_state))
else:
map_structure_zip(
lambda xs, x: xs.append(x[batch_idx]),
(final_state, initial_state))
# pylint: enable=cell-var-from-loop
final_state = map_structure(
lambda x: torch.stack(x, dim=0), final_state)
return final_outputs, final_state | PypiClean |
/netket-3.9.2.tar.gz/netket-3.9.2/netket/models/equivariant.py |
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Tuple, Any
import numpy as np
from jax import numpy as jnp
from flax import linen as nn
from jax.nn.initializers import zeros, lecun_normal
from jax.scipy.special import logsumexp
from netket.utils import HashableArray, warn_deprecation, deprecate_dtype
from netket.utils.types import NNInitFunc, Array
from netket.utils.group import PermutationGroup
from netket.graph import Graph, Lattice
from netket.jax import logsumexp_cplx, is_complex_dtype
from netket.nn.activation import reim_selu
from netket.nn.symmetric_linear import (
DenseSymmMatrix,
DenseSymmFFT,
DenseEquivariantFFT,
DenseEquivariantIrrep,
)
# Same as netket.nn.symmetric_linear.default_equivariant_initializer
# All GCNN layers have kernels of shape [out_features, in_features, n_symm]
default_gcnn_initializer = lecun_normal(in_axis=1, out_axis=0)
def identity(x):
return x
@deprecate_dtype
class GCNN_FFT(nn.Module):
r"""Implements a GCNN using a fast fourier transform over the translation group.
The group convolution can be written in terms of translational convolutions with
symmetry transformed filters as described in ` Cohen et. *al* <http://proceedings.mlr.press/v48/cohenc16.pdf>`_
The translational convolutions are then implemented with Fast Fourier Transforms.
"""
symmetries: HashableArray
"""A group of symmetry operations (or array of permutation indices) over which the network should be equivariant.
Numpy/Jax arrays must be wrapped into an :class:`netket.utils.HashableArray`.
"""
product_table: HashableArray
"""Product table describing the algebra of the symmetry group
Numpy/Jax arrays must be wrapped into an :class:`netket.utils.HashableArray`.
"""
shape: Tuple
"""Shape of the translation group"""
layers: int
"""Number of layers (not including sum layer over output)."""
features: Tuple
"""Number of features in each layer starting from the input. If a single number is given,
all layers will have the same number of features."""
characters: HashableArray
"""Array specifying the characters of the desired symmetry representation"""
param_dtype: Any = float
"""The dtype of the weights."""
activation: Any = reim_selu
"""The nonlinear activation function between hidden layers."""
output_activation: Any = identity
"""The nonlinear activation before the output. Defaults to the identity."""
input_mask: Array = None
"""Optional array of shape `(n_sites,)` used to restrict the convolutional
kernel. Only parameters with mask :math:'\ne 0' are used. For best performance a
boolean mask should be used."""
hidden_mask: Array = None
"""Optional array of shape `(n_symm,)` where `(n_symm,)` = `len(graph.automorphisms())`
used to restrict the convolutional kernel. Only parameters with mask :math:'\ne 0' are used.
For best performance a boolean mask should be used"""
equal_amplitudes: bool = False
"""If true forces all basis states to have the same amplitude by setting `Re[logψ] = 0`"""
use_bias: bool = True
"""if True uses a bias in all layers."""
precision: Any = None
"""numerical precision of the computation see :class:`jax.lax.Precision` for details."""
kernel_init: NNInitFunc = default_gcnn_initializer
"""Initializer for the kernels of all layers."""
bias_init: NNInitFunc = zeros
"""Initializer for the biases of all layers."""
complex_output: bool = True
"""Use complex-valued `logsumexp`. Necessary when parameters are real but some
`characters` are negative."""
def setup(self):
self.n_symm = np.asarray(self.symmetries).shape[0]
self.dense_symm = DenseSymmFFT(
space_group=self.symmetries,
shape=self.shape,
features=self.features[0],
param_dtype=self.param_dtype,
use_bias=self.use_bias,
kernel_init=self.kernel_init,
bias_init=self.bias_init,
precision=self.precision,
mask=self.input_mask,
)
self.equivariant_layers = [
DenseEquivariantFFT(
product_table=self.product_table,
shape=self.shape,
features=self.features[layer + 1],
use_bias=self.use_bias,
param_dtype=self.param_dtype,
precision=self.precision,
kernel_init=self.kernel_init,
bias_init=self.bias_init,
mask=self.hidden_mask,
)
for layer in range(self.layers - 1)
]
@nn.compact
def __call__(self, x):
if x.ndim < 3:
x = jnp.expand_dims(x, -2) # add a feature dimension
x = self.dense_symm(x)
for layer in range(self.layers - 1):
x = self.activation(x)
x = self.equivariant_layers[layer](x)
x = self.output_activation(x)
if self.complex_output:
x = logsumexp_cplx(x, axis=(-2, -1), b=jnp.asarray(self.characters))
else:
x = logsumexp(x, axis=(-2, -1), b=jnp.asarray(self.characters))
if self.equal_amplitudes:
return 1j * jnp.imag(x)
else:
return x
@deprecate_dtype
class GCNN_Irrep(nn.Module):
r"""Implements a GCNN by projecting onto irreducible
representations of the group. The projection onto
the group is implemented with matrix multiplication
Layers act on a feature maps of shape [batch_size, in_features, n_symm] and
returns a feature map of shape [batch_size, features, n_symm].
The input and the output are related by
.. math ::
y^{(i)}_g = \sum_{h,j} f^{(j)}_h W^{(ij)}_{h^{-1}g}.
Note that this switches the convention of Cohen et al. to use an actual group
convolution, but this doesn't affect equivariance.
The convolution is implemented in terms of a group Fourier transform.
Therefore, the group structure is represented internally as the set of its
irrep matrices. After Fourier transforming, the convolution translates to
.. math ::
y^{(i)}_\rho = \sum_j f^{(j)}_\rho W^{(ij)}_\rho,
where all terms are d x d matrices rather than numbers, and the juxtaposition
stands for matrix multiplication.
"""
symmetries: HashableArray
"""A group of symmetry operations (or array of permutation indices) over which the network should be equivariant.
Numpy/Jax arrays must be wrapped into an :class:`netket.utils.HashableArray`.
"""
irreps: Tuple[HashableArray]
"""List of irreducible representation matrices"""
layers: int
"""Number of layers (not including sum layer over output)."""
features: Tuple
"""Number of features in each layer starting from the input. If a single number is given,
all layers will have the same number of features."""
characters: HashableArray
"""Array specifying the characters of the desired symmetry representation"""
param_dtype: Any = np.float64
"""The dtype of the weights."""
activation: Any = reim_selu
"""The nonlinear activation function between hidden layers."""
output_activation: Any = identity
"""The nonlinear activation before the output."""
input_mask: Array = None
"""Optional array of shape `(n_sites,)` used to restrict the convolutional
kernel. Only parameters with mask :math:'\ne 0' are used. For best performance a
boolean mask should be used."""
hidden_mask: Array = None
"""Optional array of shape `(n_symm,)` where `(n_symm,)` = `len(graph.automorphisms())`
used to restrict the convolutional kernel. Only parameters with mask :math:'\ne 0' are used.
For best performance a boolean mask should be used"""
equal_amplitudes: bool = False
"""If true forces all basis states to have the same amplitude by setting `Re[logψ] = 0`"""
use_bias: bool = True
"""if True uses a bias in all layers."""
precision: Any = None
"""numerical precision of the computation see :class:`jax.lax.Precision` for details."""
kernel_init: NNInitFunc = default_gcnn_initializer
"""Initializer for the kernels of all layers."""
bias_init: NNInitFunc = zeros
"""Initializer for the biases of all layers."""
complex_output: bool = True
"""Use complex-valued `logsumexp`. Necessary when parameters are real but some
`characters` are negative."""
def setup(self):
self.n_symm = np.asarray(self.symmetries).shape[0]
self.dense_symm = DenseSymmMatrix(
symmetries=self.symmetries,
features=self.features[0],
param_dtype=self.param_dtype,
use_bias=self.use_bias,
kernel_init=self.kernel_init,
bias_init=self.bias_init,
precision=self.precision,
mask=self.input_mask,
)
self.equivariant_layers = [
DenseEquivariantIrrep(
irreps=self.irreps,
features=self.features[layer + 1],
use_bias=self.use_bias,
param_dtype=self.param_dtype,
precision=self.precision,
kernel_init=self.kernel_init,
bias_init=self.bias_init,
mask=self.hidden_mask,
)
for layer in range(self.layers - 1)
]
@nn.compact
def __call__(self, x):
if x.ndim < 3:
x = jnp.expand_dims(x, -2) # add a feature dimension
x = self.dense_symm(x)
for layer in range(self.layers - 1):
x = self.activation(x)
x = self.equivariant_layers[layer](x)
x = self.output_activation(x)
if self.complex_output:
x = logsumexp_cplx(x, axis=(-2, -1), b=jnp.asarray(self.characters))
else:
x = logsumexp(x, axis=(-2, -1), b=jnp.asarray(self.characters))
if self.equal_amplitudes:
return 1j * jnp.imag(x)
else:
return x
@deprecate_dtype
class GCNN_Parity_FFT(nn.Module):
r"""Implements a GCNN using a fast fourier transform over the translation group.
The group convolution can be written in terms of translational convolutions with
symmetry transformed filters as described in ` Cohen et. *al* <http://proceedings.mlr.press/v48/cohenc16.pdf>`_
The translational convolutions are then implemented with Fast Fourier Transforms.
This model adds parity symmetry under the transformation x->-x
"""
symmetries: HashableArray
"""A group of symmetry operations (or array of permutation indices) over which the network should be equivariant.
Numpy/Jax arrays must be wrapped into an :class:`netket.utils.HashableArray`.
"""
product_table: HashableArray
"""Product table describing the algebra of the symmetry group
Numpy/Jax arrays must be wrapped into an :class:`netket.utils.HashableArray`.
"""
shape: Tuple
"""Shape of the translation group"""
layers: int
"""Number of layers (not including sum layer over output)."""
features: Tuple
"""Number of features in each layer starting from the input. If a single number is given,
all layers will have the same number of features."""
characters: HashableArray
"""Array specifying the characters of the desired symmetry representation"""
parity: int
"""Integer specifying the eigenvalue with respect to parity"""
param_dtype: Any = np.float64
"""The dtype of the weights."""
activation: Any = reim_selu
"""The nonlinear activation function between hidden layers."""
output_activation: Any = identity
"""The nonlinear activation before the output."""
input_mask: Array = None
"""Optional array of shape `(n_sites,)` used to restrict the convolutional
kernel. Only parameters with mask :math:'\ne 0' are used. For best performance a
boolean mask should be used."""
hidden_mask: Array = None
"""Optional array of shape `(n_symm,)` where `(n_symm,)` = `len(graph.automorphisms())`
used to restrict the convolutional kernel. Only parameters with mask :math:'\ne 0' are used.
For best performance a boolean mask should be used"""
equal_amplitudes: bool = False
"""If true forces all basis states to have the same amplitude by setting Re[psi] = 0"""
use_bias: bool = True
"""if True uses a bias in all layers."""
extra_bias: bool = False
"""Deprecated. If True, uses bias in parity-flip layers too. Required for using
parameters saved before PR#1030, but hinders performance.
See also `nk.models.update_GCNN_parity`."""
precision: Any = None
"""numerical precision of the computation see :class:`jax.lax.Precision` for details."""
kernel_init: NNInitFunc = default_gcnn_initializer
"""Initializer for the kernels of all layers."""
bias_init: NNInitFunc = zeros
"""Initializer for the biases of all layers."""
complex_output: bool = True
"""Use complex-valued `logsumexp`. Necessary when parameters are real but some
`characters` are negative."""
def setup(self):
# TODO: eventually remove this warning
# supports a deprecated attribute
if self.extra_bias:
warn_deprecation(
"`extra_bias` is detrimental for performance and is deprecated. "
"Please switch to the default `extra_bias=False`. Previously saved "
"parameters can be migrated using `nk.models.update_GCNN_parity`."
)
self.n_symm = np.asarray(self.symmetries).shape[0]
self.dense_symm = DenseSymmFFT(
space_group=self.symmetries,
shape=self.shape,
features=self.features[0],
param_dtype=self.param_dtype,
use_bias=self.use_bias,
kernel_init=self.kernel_init,
bias_init=self.bias_init,
precision=self.precision,
mask=self.input_mask,
)
self.equivariant_layers = [
DenseEquivariantFFT(
product_table=self.product_table,
shape=self.shape,
features=self.features[layer + 1],
use_bias=self.use_bias,
param_dtype=self.param_dtype,
precision=self.precision,
kernel_init=self.kernel_init,
bias_init=self.bias_init,
mask=self.hidden_mask,
)
for layer in range(self.layers - 1)
]
self.equivariant_layers_flip = [
DenseEquivariantFFT(
product_table=self.product_table,
shape=self.shape,
features=self.features[layer + 1],
# this would bias the same outputs as self.equivariant
use_bias=self.extra_bias and self.use_bias,
param_dtype=self.param_dtype,
precision=self.precision,
kernel_init=self.kernel_init,
bias_init=self.bias_init,
mask=self.hidden_mask,
)
for layer in range(self.layers - 1)
]
@nn.compact
def __call__(self, x):
if x.ndim < 3:
x = jnp.expand_dims(x, -2) # add a feature dimension
x_flip = self.dense_symm(-1 * x)
x = self.dense_symm(x)
for layer in range(self.layers - 1):
x = self.activation(x)
x_flip = self.activation(x_flip)
x_new = (
self.equivariant_layers[layer](x)
+ self.equivariant_layers_flip[layer](x_flip)
) / np.sqrt(2)
x_flip = (
self.equivariant_layers[layer](x_flip)
+ self.equivariant_layers_flip[layer](x)
) / np.sqrt(2)
x = jnp.array(x_new, copy=True)
x = jnp.concatenate((x, x_flip), -1)
x = self.output_activation(x)
if self.parity == 1:
par_chars = jnp.expand_dims(
jnp.concatenate(
(jnp.array(self.characters), jnp.array(self.characters)), 0
),
(0, 1),
)
else:
par_chars = jnp.expand_dims(
jnp.concatenate(
(jnp.array(self.characters), -1 * jnp.array(self.characters)), 0
),
(0, 1),
)
if self.complex_output:
x = logsumexp_cplx(x, axis=(-2, -1), b=par_chars)
else:
x = logsumexp(x, axis=(-2, -1), b=par_chars)
if self.equal_amplitudes:
return 1j * jnp.imag(x)
else:
return x
@deprecate_dtype
class GCNN_Parity_Irrep(nn.Module):
r"""Implements a GCNN by projecting onto irreducible
representations of the group. The projection onto
the group is implemented with matrix multiplication
Layers act on a feature maps of shape [batch_size, in_features, n_symm] and
returns a feature map of shape [batch_size, features, n_symm].
The input and the output are related by
.. math ::
y^{(i)}_g = \sum_{h,j} f^{(j)}_h W^{(ij)}_{h^{-1}g}.
Note that this switches the convention of Cohen et al. to use an actual group
convolution, but this doesn't affect equivariance.
The convolution is implemented in terms of a group Fourier transform.
Therefore, the group structure is represented internally as the set of its
irrep matrices. After Fourier transforming, the convolution translates to
.. math ::
y^{(i)}_\rho = \sum_j f^{(j)}_\rho W^{(ij)}_\rho,
where all terms are d x d matrices rather than numbers, and the juxtaposition
stands for matrix multiplication.
This model adds parity symmetry under the transformation x->-x
"""
symmetries: HashableArray
"""A group of symmetry operations (or array of permutation indices) over which the network should be equivariant.
Numpy/Jax arrays must be wrapped into an :class:`netket.utils.HashableArray`.
"""
irreps: Tuple[HashableArray]
"""List of irreducible representation matrices"""
layers: int
"""Number of layers (not including sum layer over output)."""
features: Tuple
"""Number of features in each layer starting from the input. If a single number is given,
all layers will have the same number of features."""
characters: HashableArray
"""Array specifying the characters of the desired symmetry representation"""
parity: int
"""Integer specifying the eigenvalue with respect to parity"""
param_dtype: Any = np.float64
"""The dtype of the weights."""
activation: Any = reim_selu
"""The nonlinear activation function between hidden layers."""
output_activation: Any = identity
"""The nonlinear activation before the output."""
input_mask: Array = None
"""Optional array of shape `(n_sites,)` used to restrict the convolutional
kernel. Only parameters with mask :math:'\ne 0' are used. For best performance a
boolean mask should be used."""
hidden_mask: Array = None
"""Optional array of shape `(n_symm,)` where `(n_symm,)` = `len(graph.automorphisms())`
used to restrict the convolutional kernel. Only parameters with mask :math:'\ne 0' are used.
For best performance a boolean mask should be used"""
equal_amplitudes: bool = False
"""If true forces all basis states to have the same amplitude by setting Re[psi] = 0"""
use_bias: bool = True
"""if True uses a bias in all layers."""
extra_bias: bool = False
"""Deprecated. If True, uses bias in parity-flip layers too. Required for using
parameters saved before PR#1030, but hinders performance.
See also `nk.models.update_GCNN_parity`."""
precision: Any = None
"""numerical precision of the computation see :class:`jax.lax.Precision` for details."""
kernel_init: NNInitFunc = default_gcnn_initializer
"""Initializer for the kernels of all layers."""
bias_init: NNInitFunc = zeros
"""Initializer for the biases of all layers."""
complex_output: bool = True
"""Use complex-valued `logsumexp`. Necessary when parameters are real but some
`characters` are negative."""
def setup(self):
# TODO: eventually remove this warning
# supports a deprecated attribute
if self.extra_bias:
warn_deprecation(
"`extra_bias` is detrimental for performance and is deprecated. "
"Please switch to the default `extra_bias=False`. Previously saved "
"parameters can be migrated using `nk.models.update_GCNN_parity`."
)
self.n_symm = np.asarray(self.symmetries).shape[0]
self.dense_symm = DenseSymmMatrix(
symmetries=self.symmetries,
features=self.features[0],
param_dtype=self.param_dtype,
use_bias=self.use_bias,
kernel_init=self.kernel_init,
bias_init=self.bias_init,
precision=self.precision,
mask=self.input_mask,
)
self.equivariant_layers = [
DenseEquivariantIrrep(
irreps=self.irreps,
features=self.features[layer + 1],
use_bias=self.use_bias,
param_dtype=self.param_dtype,
precision=self.precision,
kernel_init=self.kernel_init,
bias_init=self.bias_init,
mask=self.hidden_mask,
)
for layer in range(self.layers - 1)
]
self.equivariant_layers_flip = [
DenseEquivariantIrrep(
irreps=self.irreps,
features=self.features[layer + 1],
# this would bias the same outputs as self.equivariant
use_bias=self.extra_bias and self.use_bias,
param_dtype=self.param_dtype,
precision=self.precision,
kernel_init=self.kernel_init,
bias_init=self.bias_init,
mask=self.hidden_mask,
)
for layer in range(self.layers - 1)
]
@nn.compact
def __call__(self, x):
if x.ndim < 3:
x = jnp.expand_dims(x, -2) # add a feature dimension
x_flip = self.dense_symm(-1 * x)
x = self.dense_symm(x)
for layer in range(self.layers - 1):
x = self.activation(x)
x_flip = self.activation(x_flip)
x_new = (
self.equivariant_layers[layer](x)
+ self.equivariant_layers_flip[layer](x_flip)
) / np.sqrt(2)
x_flip = (
self.equivariant_layers[layer](x_flip)
+ self.equivariant_layers_flip[layer](x)
) / np.sqrt(2)
x = jnp.array(x_new, copy=True)
x = jnp.concatenate((x, x_flip), -1)
x = self.output_activation(x)
if self.parity == 1:
par_chars = jnp.expand_dims(
jnp.concatenate(
(jnp.array(self.characters), jnp.array(self.characters)), 0
),
(0, 1),
)
else:
par_chars = jnp.expand_dims(
jnp.concatenate(
(jnp.array(self.characters), -1 * jnp.array(self.characters)), 0
),
(0, 1),
)
if self.complex_output:
x = logsumexp_cplx(x, axis=(-2, -1), b=par_chars)
else:
x = logsumexp(x, axis=(-2, -1), b=par_chars)
if self.equal_amplitudes:
return 1j * jnp.imag(x)
else:
return x
@deprecate_dtype
def GCNN(
symmetries=None,
product_table=None,
irreps=None,
point_group=None,
mode="auto",
shape=None,
layers=None,
features=None,
characters=None,
parity=None,
param_dtype=np.float64,
complex_output=True,
input_mask=None,
hidden_mask=None,
**kwargs,
):
r"""Implements a Group Convolutional Neural Network (G-CNN) that outputs a wavefunction
that is invariant over a specified symmetry group.
The G-CNN is described in `Cohen et al. <http://proceedings.mlr.press/v48/cohenc16.pdf>`_
and applied to quantum many-body problems in `Roth et al. <https://arxiv.org/pdf/2104.05085.pdf>`_ .
The G-CNN alternates convolution operations with pointwise non-linearities. The first
layer is symmetrized linear transform given by DenseSymm, while the other layers are
G-convolutions given by DenseEquivariant. The hidden layers of the G-CNN are related by
the following equation:
.. math ::
{\bf f}^{i+1}_h = \Gamma( \sum_h W_{g^{-1} h} {\bf f}^i_h).
Args:
symmetries: A specification of the symmetry group. Can be given by a
:class:`netket.graph.Graph`, a :class:`netket.utils.group.PermutationGroup`, or an
array :code:`[n_symm, n_sites]` specifying the permutations
corresponding to symmetry transformations of the lattice.
product_table: Product table describing the algebra of the symmetry group.
Only needs to be specified if mode='fft' and symmetries is specified as an array.
irreps: List of 3D tensors that project onto irreducible representations of the symmetry group.
Only needs to be specified if mode='irreps' and symmetries is specified as an array.
point_group: The point group, from which the space group is built. If symmetries is a
graph the default point group is overwritten.
mode: string "fft, irreps, matrix, auto" specifying whether to use a fast
fourier transform over the translation group, a fourier transform using
the irreducible representations or by constructing the full kernel matrix.
shape: A tuple specifying the dimensions of the translation group.
layers: Number of layers (not including sum layer over output).
features: Number of features in each layer starting from the input. If a single
number is given, all layers will have the same number of features.
characters: Array specifying the characters of the desired symmetry representation.
parity: Optional argument with value +/-1 that specifies the eigenvalue
with respect to parity (only use on two level systems).
param_dtype: The dtype of the weights.
activation: The nonlinear activation function between hidden layers. Defaults to
:class:`netket.nn.activation.reim_selu` .
output_activation: The nonlinear activation before the output.
equal_amplitudes: If True forces all basis states to have equal amplitude
by setting :math:`\Re(\psi) = 0` .
use_bias: If True uses a bias in all layers.
precision: Numerical precision of the computation see :class:`jax.lax.Precision` for details.
kernel_init: Initializer for the kernels of all layers. Defaults to
:code:`lecun_normal(in_axis=1, out_axis=0)` which guarantees the correct variance of the
output. See the documentation of :func:`flax.linen.initializers.lecun_normal`
for more information.
bias_init: Initializer for the biases of all layers.
complex_output: If True, ensures that the network output is always complex.
Necessary when network parameters are real but some `characters` are negative.
input_mask: Optional array of shape `(n_sites,)` used to restrict the convolutional
kernel. Only parameters with mask :math:'\ne 0' are used. For best performance a
boolean mask should be used.
hidden_mask: Optional array of shape `(n_symm,)` where `(n_symm,)` = `len(graph.automorphisms())`
used to restrict the convolutional kernel. Only parameters with mask :math:'\ne 0' are used.
For best performance a boolean mask should be used.
"""
if input_mask is not None:
input_mask = HashableArray(input_mask)
if hidden_mask is not None:
hidden_mask = HashableArray(hidden_mask)
if isinstance(symmetries, Lattice) and (
point_group is not None or symmetries._point_group is not None
):
# With graph try to find point group, otherwise default to automorphisms
shape = tuple(symmetries.extent)
sg = symmetries.space_group(point_group)
if mode == "auto":
mode = "fft"
elif isinstance(symmetries, Graph):
sg = symmetries.automorphisms()
if mode == "auto":
mode = "irreps"
if mode == "fft":
raise ValueError(
"When requesting 'mode=fft' a valid point group must be specified"
"in order to construct the space group"
)
elif isinstance(symmetries, PermutationGroup):
# If we get a group and default to irrep projection
if mode == "auto":
mode = "irreps"
sg = symmetries
else:
if irreps is not None and (mode == "irreps" or mode == "auto"):
mode = "irreps"
sg = symmetries
irreps = tuple(HashableArray(irrep) for irrep in irreps)
elif product_table is not None and (mode == "fft" or mode == "auto"):
mode = "fft"
sg = symmetries
product_table = HashableArray(product_table)
else:
raise ValueError(
"Specification of symmetries is wrong or incompatible with selected mode"
)
if mode == "fft":
if shape is None:
raise TypeError(
"When requesting `mode=fft`, the shape of the translation group must be specified. "
"Either supply the `shape` keyword argument or pass a `netket.graph.Graph` object to "
"the symmetries keyword argument."
)
else:
shape = tuple(shape)
if isinstance(features, int):
features = (features,) * layers
if characters is None:
characters = HashableArray(np.ones(len(np.asarray(sg))))
else:
if (
not jnp.iscomplexobj(characters)
and not is_complex_dtype(param_dtype)
and not complex_output
and jnp.any(characters < 0)
):
raise ValueError(
"`complex_output` must be used with real parameters and negative "
"characters to avoid NaN errors."
)
characters = HashableArray(characters)
if mode == "fft":
sym = HashableArray(np.asarray(sg))
if product_table is None:
product_table = HashableArray(sg.product_table)
if parity:
return GCNN_Parity_FFT(
symmetries=sym,
product_table=product_table,
layers=layers,
features=features,
characters=characters,
shape=shape,
parity=parity,
param_dtype=param_dtype,
complex_output=complex_output,
hidden_mask=hidden_mask,
input_mask=input_mask,
**kwargs,
)
else:
return GCNN_FFT(
symmetries=sym,
product_table=product_table,
layers=layers,
features=features,
characters=characters,
shape=shape,
param_dtype=param_dtype,
complex_output=complex_output,
hidden_mask=hidden_mask,
input_mask=input_mask,
**kwargs,
)
elif mode in ["irreps", "auto"]:
sym = HashableArray(np.asarray(sg))
if irreps is None:
irreps = tuple(HashableArray(irrep) for irrep in sg.irrep_matrices())
if parity:
return GCNN_Parity_Irrep(
symmetries=sym,
irreps=irreps,
layers=layers,
features=features,
characters=characters,
parity=parity,
param_dtype=param_dtype,
complex_output=complex_output,
hidden_mask=hidden_mask,
input_mask=input_mask,
**kwargs,
)
else:
return GCNN_Irrep(
symmetries=sym,
irreps=irreps,
layers=layers,
features=features,
characters=characters,
param_dtype=param_dtype,
complex_output=complex_output,
hidden_mask=hidden_mask,
input_mask=input_mask,
**kwargs,
)
else:
raise ValueError(
f"Unknown mode={mode}. Valid modes are 'fft',irreps' or 'auto'."
) | PypiClean |
/LFPy-2.3.tar.gz/LFPy-2.3/examples/bioRxiv281717/figure_7_8/figure_7_8.py | import os
import numpy as np
import example_parallel_network_parameterspace as ps
from example_parallel_network_parameters import PSET
import matplotlib.pyplot as plt
from matplotlib.ticker import ScalarFormatter
color_cycle = [
u'#1f77b4',
u'#1f77b4',
u'#ff7f0e',
u'#ff7f0e',
u'#2ca02c',
u'#2ca02c',
u'#d62728',
u'#d62728',
]
plt.rcParams['axes.prop_cycle'] = plt.cycler(color=color_cycle)
plt.rcParams.update({
'axes.xmargin': 0.0,
'axes.ymargin': 0.0,
})
fontsize = 14
titlesize = 16
legendsize = 12
plt.rcParams.update({
'axes.xmargin': 0.0,
'axes.ymargin': 0.0,
'axes.labelsize': fontsize,
'axes.titlesize': titlesize,
'figure.titlesize': fontsize,
'font.size': fontsize,
'legend.fontsize': legendsize,
})
def get_plot_array(x, shape=(3, -1)):
return np.array(x).reshape(shape)
plt.close('all')
for (PS0, PS1), figname in zip(
[['MPI', 'POP'], ['MPI5', 'POP5']], ['figure_7', 'figure_8']):
# fig, axes = plt.subplots(2, 1, sharey=True, figsize=(8, 8))
fig, axes = plt.subplots(1, 3, sharey=True, figsize=(16, 6))
fig.subplots_adjust(left=0.05, right=0.85, bottom=0.15, top=0.9)
# plot scaling with MPI pool size and fixed network size
ax = axes[0]
for ls, COMPUTE_LFP in zip(['-o', ':o'], [True, False]):
# containers
MPISIZE = []
# init = []
params = []
pops = []
conn = []
sim = []
# save = []
ps_ids = []
for pset in ps.PSPACES[PS0].iter_inner():
if pset.COMPUTE_LFP == COMPUTE_LFP:
# get identifier
ps_id = ps.get_unique_id(pset)
# set reference network size
PSET.populationParameters['POP_SIZE'] = np.array(
pset.POP_SIZE_REF)
# create x-axis
MPISIZE.append(pset.MPISIZE)
try:
# load log file
keys = np.loadtxt(
os.path.join(
ps.OUTPUTDIR,
ps_id,
'log.txt'),
usecols=[0],
dtype=str)
values = np.loadtxt(
os.path.join(
ps.OUTPUTDIR,
ps_id,
'log.txt'),
usecols=[1])
# fill in lists, try-excepts for possibly failed
# simulations
data = dict(zip(keys, values))
try:
params.append(data['parameters'])
except KeyError:
params.append(np.nan)
print('missing param. time id {}'.format(ps_id))
try:
pops.append(data['population'])
except KeyError:
pops.append(np.nan)
print('missing pop. time id {}'.format(ps_id))
try:
conn.append(data['connections'])
except KeyError:
conn.append(np.nan)
print('missing conn. time id {}'.format(ps_id))
try:
sim.append(data['simulation'])
except KeyError:
sim.append(np.nan)
print('missing sim. time id {}'.format(ps_id))
except IOError:
print('fail to load param id {}'.format(ps_id))
params.append(np.nan)
pops.append(np.nan)
conn.append(np.nan)
sim.append(np.nan)
ps_ids.append(ps_id)
shape = (-1, len(ps.PSPACES[PS0]['GLOBALSEED']))
x = get_plot_array(MPISIZE, shape)[:, 0]
y = get_plot_array(params, shape)
ax.loglog(x, y.mean(axis=1), ls,
label='par.' +
', {} E.P'.format('w.' if COMPUTE_LFP else 'w.o.'),
lw=2, ms=8,
basex=2, basey=2,
)
ax.errorbar(
x,
y.mean(
axis=1),
yerr=y.std(
axis=1),
fmt='none',
capsize=4,
label='_nolegend_',
lw=2,
ms=8)
y = get_plot_array(pops, shape)
ax.loglog(x, y.mean(axis=1), ls,
label='pop.' +
', {} E.P'.format('w.' if COMPUTE_LFP else 'w.o.'),
lw=2, ms=8,
basex=2, basey=2,
)
ax.errorbar(
x,
y.mean(
axis=1),
yerr=y.std(
axis=1),
fmt='none',
capsize=4,
label='_nolegend_',
lw=2,
ms=8)
y = get_plot_array(conn, shape)
ax.loglog(x, y.mean(axis=1), ls,
label='conn.' +
', {} E.P'.format('w.' if COMPUTE_LFP else 'w.o.'),
lw=2, ms=8,
basex=2, basey=2,
)
ax.errorbar(
x,
y.mean(
axis=1),
yerr=y.std(
axis=1),
fmt='none',
capsize=4,
label='_nolegend_',
lw=2,
ms=8)
y = get_plot_array(sim, shape)
ax.loglog(x, y.mean(axis=1), ls,
label='sim.' +
', {} E.P'.format('w.' if COMPUTE_LFP else 'w.o.'),
lw=2, ms=8,
basex=2, basey=2,
)
ax.errorbar(
x,
y.mean(
axis=1),
yerr=y.std(
axis=1),
fmt='none',
capsize=4,
label='_nolegend_',
lw=2,
ms=8)
ax.minorticks_off()
ax.axis(ax.axis('tight'))
ax.grid('on', which='major')
ax.set_xticks(np.unique(MPISIZE))
ax.set_xticklabels(np.unique(MPISIZE), rotation=90)
ax.set_yticks([2.**x for x in range(16)])
ax.set_yticklabels([2**x for x in range(16)])
for axis in [ax.xaxis, ax.yaxis]:
axis.set_major_formatter(ScalarFormatter())
ax.set_xlabel(r'$N_\mathrm{MPI}$', labelpad=0)
ax.set_ylabel('time (s)', labelpad=0)
ax.set_title(r'$N_\mathrm{%s}^{(1)}=%i, N_\mathrm{%s}^{(1)}=%i$' %
(PSET.populationParameters['m_type'][0].replace('_', '\\_'),
PSET.populationParameters['POP_SIZE'][0],
PSET.populationParameters['m_type'][1].replace(
'_', r'\_'),
PSET.populationParameters['POP_SIZE'][1]))
ax.text(-0.05, 1.05, 'A',
horizontalalignment='center',
verticalalignment='center',
fontsize=16, fontweight='demibold',
transform=ax.transAxes)
axis = ax.axis()
# plot scaling with population size and fixed MPI pool size
for ls, COMPUTE_LFP in zip(['-', ':'], [True, False]):
for axindex, marker, PRESERVE in zip(
[1, 2], ['o', 'o'], ['total', 'indegree']):
ax = axes[axindex]
# containers
POPSCALING = []
# init = []
params = []
pops = []
conn = []
sim = []
# save = []
ps_ids = []
for pset in ps.PSPACES[PS1].iter_inner():
if pset.COMPUTE_LFP == COMPUTE_LFP \
and pset.PRESERVE == PRESERVE:
# get identifier
ps_id = ps.get_unique_id(pset)
# create x-axis
POPSCALING.append(pset.POPSCALING)
try:
# load log file
keys = np.loadtxt(
os.path.join(
ps.OUTPUTDIR,
ps_id,
'log.txt'),
usecols=[0],
dtype=str)
values = np.loadtxt(
os.path.join(
ps.OUTPUTDIR,
ps_id,
'log.txt'),
usecols=[1])
# fill in lists, try-excepts for possibly failed
# simulations
data = dict(zip(keys, values))
try:
params.append(data['parameters'])
except KeyError:
params.append(np.nan)
print('missing param. time id {}'.format(ps_id))
try:
pops.append(data['population'])
except KeyError:
pops.append(np.nan)
print('missing pop. time id {}'.format(ps_id))
try:
conn.append(data['connections'])
except KeyError:
conn.append(np.nan)
print('missing conn. time id {}'.format(ps_id))
try:
sim.append(data['simulation'])
except KeyError:
sim.append(np.nan)
print('missing sim. time id {}'.format(ps_id))
except IOError:
print('fail to load param id {}'.format(ps_id))
params.append(np.nan)
pops.append(np.nan)
conn.append(np.nan)
sim.append(np.nan)
ps_ids.append(ps_id)
shape = (-1, len(ps.PSPACES[PS1]['GLOBALSEED']))
x = get_plot_array(POPSCALING, shape)[:, 0]
y = get_plot_array(params, shape)
ax.loglog(x, y.mean(axis=1), ls, marker=marker,
label='par.' +
', {} E.P'.format('w.' if COMPUTE_LFP else 'w.o.'),
lw=2, ms=8,
basex=2, basey=2
)
ax.errorbar(
x,
y.mean(
axis=1),
yerr=y.std(
axis=1),
fmt='none',
capsize=4,
label='_nolegend_',
lw=2,
ms=8)
y = get_plot_array(pops, shape)
ax.loglog(x, y.mean(axis=1), ls, marker=marker,
label='pop.' +
', {} E.P'.format('w.' if COMPUTE_LFP else 'w.o.'),
lw=2, ms=8,
basex=2, basey=2
)
ax.errorbar(
x,
y.mean(
axis=1),
yerr=y.std(
axis=1),
fmt='none',
capsize=4,
label='_nolegend_',
lw=2,
ms=8)
y = get_plot_array(conn, shape)
ax.loglog(x, y.mean(axis=1), ls, marker=marker,
label='conn.' +
', {} E.P'.format('w.' if COMPUTE_LFP else 'w.o.'),
lw=2, ms=8,
basex=2, basey=2
)
ax.errorbar(
x,
y.mean(
axis=1),
yerr=y.std(
axis=1),
fmt='none',
capsize=4,
label='_nolegend_',
lw=2,
ms=8)
y = get_plot_array(sim, shape)
ax.loglog(x, y.mean(axis=1), ls, marker=marker,
label='sim.' +
', {} E.P'.format('w.' if COMPUTE_LFP else 'w.o.'),
lw=2, ms=8,
basex=2, basey=2
)
ax.errorbar(
x,
y.mean(
axis=1),
yerr=y.std(
axis=1),
fmt='none',
capsize=4,
label='_nolegend_',
lw=2,
ms=8)
ax.axis(ax.axis('tight'))
ax.grid('on', which='major')
ax.set_xticks(np.unique(POPSCALING))
ax.set_xticklabels(np.unique(POPSCALING), rotation=90)
for axis in [ax.xaxis, ax.yaxis]:
axis.set_major_formatter(ScalarFormatter())
ax.set_xlabel(r'relative network size $b$', labelpad=10)
ax.set_title(
r'$N_\mathrm{%s}^{(1)}=%i, N_\mathrm{%s}^{(1)}=%i$,' %
(PSET.populationParameters['m_type'][0].replace(
'_',
'\\_'),
PSET.populationParameters['POP_SIZE'][0],
PSET.populationParameters['m_type'][1].replace(
'_',
r'\_'),
PSET.populationParameters['POP_SIZE'][1],
) +
'\n' +
r'$N_\mathrm{MPI}=%i$, ' % pset.MPISIZE
+ '$k_{YX}^{(b)}=%sK_{YX}^{(1)}/N_Y^{(1)}$' %
'r' if PRESERVE == 'indegree' else '')
ax.text(-0.05, 1.05, 'BC'[axindex - 1],
horizontalalignment='center',
verticalalignment='center',
fontsize=16, fontweight='demibold',
transform=ax.transAxes)
axis = ax.axis()
ax.legend(bbox_to_anchor=(1.01, 0), loc='lower left', borderaxespad=0.)
fig.savefig(figname + '.pdf', bbox_inches='tight') | PypiClean |
/DI_engine-0.4.9-py3-none-any.whl/dizoo/box2d/bipedalwalker/config/bipedalwalker_ppopg_config.py | from easydict import EasyDict
import torch
import torch.nn as nn
from ding.model.common import FCEncoder, ReparameterizationHead
bipedalwalker_ppo_config = dict(
exp_name='bipedalwalker_ppopg',
env=dict(
env_id='BipedalWalker-v3',
collector_env_num=8,
evaluator_env_num=5,
# (bool) Scale output action into legal range.
act_scale=True,
n_evaluator_episode=5,
stop_value=500,
rew_clip=True,
),
policy=dict(
cuda=True,
action_space='continuous',
model=dict(
obs_shape=24,
action_shape=4,
),
learn=dict(
epoch_per_collect=10,
batch_size=64,
learning_rate=3e-4,
entropy_weight=0.0001,
clip_ratio=0.2,
adv_norm=True,
),
collect=dict(
n_episode=16,
discount_factor=0.99,
collector=dict(get_train_sample=True),
),
),
)
bipedalwalker_ppo_config = EasyDict(bipedalwalker_ppo_config)
main_config = bipedalwalker_ppo_config
bipedalwalker_ppo_create_config = dict(
env=dict(
type='bipedalwalker',
import_names=['dizoo.box2d.bipedalwalker.envs.bipedalwalker_env'],
),
env_manager=dict(type='subprocess'),
policy=dict(type='ppo_pg'),
collector=dict(type='episode'),
)
bipedalwalker_ppo_create_config = EasyDict(bipedalwalker_ppo_create_config)
create_config = bipedalwalker_ppo_create_config
class PPOPGContinuousModel(nn.Module):
def __init__(self, obs_shape, action_shape):
super(PPOPGContinuousModel, self).__init__()
self.encoder = nn.Sequential(nn.Linear(obs_shape, 64), nn.Tanh())
self.head = ReparameterizationHead(
hidden_size=64,
output_size=action_shape,
layer_num=2,
sigma_type='conditioned',
activation=nn.Tanh(),
)
def forward(self, inputs):
x = self.encoder(inputs)
x = self.head(x)
return {'logit': x}
if __name__ == "__main__":
# or you can enter `ding -m serial_onpolicy -c bipedalwalker_ppo_config.py -s 0`
from ding.entry import serial_pipeline_onpolicy
from copy import deepcopy
for seed in [1, 2, 3]:
new_main_config = deepcopy(main_config)
new_main_config.exp_name += "_seed{}".format(seed)
model = PPOPGContinuousModel(new_main_config.policy.model.obs_shape, new_main_config.policy.model.action_shape)
serial_pipeline_onpolicy(
[new_main_config, deepcopy(create_config)], seed=seed, max_env_step=int(5e6), model=model
) | PypiClean |
/ConferenceCorpus-0.1.1.tar.gz/ConferenceCorpus-0.1.1/corpus/datasources/tibkat.py | from corpus.eventcorpus import EventDataSource, EventDataSourceConfig
from corpus.event import EventSeriesManager,EventSeries, Event, EventManager
from lodstorage.storageconfig import StorageConfig
from corpus.datasources.tibkatftx import FTXParser
from corpus.utils.textparse import Textparse
import re
from corpus.utils.progress import Progress
from pathlib import Path
class Tibkat(EventDataSource):
'''
TIBKAT event meta data access
https://www.tib.eu
Technische Informationsbibliothek (TIB)
Public datasets available via
https://tib.eu/data/rdf
'''
sourceConfig = EventDataSourceConfig(lookupId="tibkat", name="tib.eu", url="https://www.tib.eu", title="TIBKAT", tableSuffix="tibkat",locationAttribute="location")
home = str(Path.home())
# use a symbolic link if you want a different location
ftxroot=f"{home}/.conferencecorpus/tibkat/ftx"
wantedbks=["54"] # Informatik
limitFiles=10000
def __init__(self):
'''
construct me
'''
super().__init__(TibkatEventManager(),TibkatEventSeriesManager(),Tibkat.sourceConfig)
class TibkatEventManager(EventManager):
'''
manage TIBKAT derived scientific events
'''
def __init__(self,config:StorageConfig=None):
'''
Constructor
'''
self.source="tibkat"
super(TibkatEventManager,self).__init__(name="TIBKATEvents", sourceConfig=Tibkat.sourceConfig,clazz=TibkatEvent,config=config)
def configure(self):
'''
configure me
'''
def isWantedBk(self,bk):
'''
check whether the given basisklassifikation bk is in the list of wanted ones
Args:
bk(str): the basisklassifkation to check
'''
for wantedbk in Tibkat.wantedbks:
if bk.startswith(wantedbk):
return True
return False
def isInWantedBkDocuments(self,document):
'''
filter for wanted Basisklassifikation
Args:
document(XMLEntity): the document to check
'''
if len(Tibkat.wantedbks)==0:
return True
wanted=False
if hasattr(document, "bk"):
bk=document.bk
if isinstance(bk,list):
for bkvalue in bk:
wanted=wanted or self.isWantedBk(bkvalue)
else:
wanted=self.isWantedBk(bk)
return wanted
def getListOfDicts(self)->list:
'''
get my list of dicts
'''
lod=[]
self.ftxParser=FTXParser(Tibkat.ftxroot)
xmlFiles=self.ftxParser.ftxXmlFiles()
xmlFiles=xmlFiles[:Tibkat.limitFiles]
progress=Progress(progressSteps=1,expectedTotal=len(xmlFiles),msg=f"parsing {len(xmlFiles)} TIBKAT FTX files",showMemory=True)
for xmlFile in xmlFiles:
for document in self.ftxParser.parse(xmlFile,local=True):
if self.isInWantedBkDocuments(document):
rawEvent=document.asDict()
TibkatEvent.postProcessLodRecord(rawEvent)
lod.append(rawEvent)
progress.next()
progress.done()
return lod
class TibkatEvent(Event):
'''
event derived from TIBKAT
'''
def __init__(self):
'''constructor '''
super().__init__()
pass
@staticmethod
def postProcessLodRecord(rawEvent:dict):
'''
fix the given raw Event
Args:
rawEvent(dict): the raw event record to fix
'''
rawEvent["source"]="tibkat"
rawEvent["eventId"]=rawEvent["ppn"]
if "description" in rawEvent:
description=rawEvent["description"]
if isinstance(description,list):
parseResults=[]
# find shortes Acronym
for descEntry in description:
parseResult=TibkatEvent.parseDescription(descEntry)
if 'acronym' in parseResult and 'ordinal' in parseResult:
parseResults.append(parseResult)
parseResultsByAcronymLen = sorted(parseResults, key=lambda rawEvent: len(rawEvent['acronym']))
if len(parseResultsByAcronymLen)>0:
shortestAcronymDescriptionResult=parseResultsByAcronymLen[0]
TibkatEvent.mergeDict(rawEvent, shortestAcronymDescriptionResult)
else:
TibkatEvent.mergeDescription(description, rawEvent)
pass
@classmethod
def mergeDict(cls,rawEvent:dict,more:dict):
for key in more:
rawEvent[key]=more[key]
@classmethod
def mergeDescription(cls,description:str,rawEvent:dict):
parseResult=TibkatEvent.parseDescription(description)
TibkatEvent.mergeDict(rawEvent, parseResult)
@classmethod
def parseDescription(cls,description:str)->dict:
'''
parse the given description
Args:
description(str): an event description
'''
result={}
parts=description.split(";")
if len(parts)>1:
title=parts[0]
titlepattern=r"\((?P<acronym>[^)]*)\)"
titlematch=re.search(titlepattern,title)
if titlematch:
result["acronym"]=titlematch.group("acronym")
else:
result["acronym"]=title.strip()
loctime=parts[1]
#8 (Vietri) : 1996.05.23-25
loctimepattern=r"\s?(?P<ordinal>[1-9][0-9]*)\s?\((?P<location>[^)]*)\)\s?:\s?(?P<daterange>[12][0-9][0-9][0-9]\.[0-9][0-9]\.[0-9][0-9]\-([0-9][0-9]\.[0-9][0-9]|[0-9][0-9]))"
loctimematch=re.search(loctimepattern,loctime)
if loctimematch:
ordinalStr=loctimematch.group("ordinal")
if ordinalStr is not None:
ordinal=int(ordinalStr)
# check for "year" ordinals
if ordinal>1000:
# completely ignore description
return {}
result["ordinal"]=ordinal
locationStr=loctimematch.group("location")
if locationStr is not None:
result['location']=locationStr
dateRangeStr=loctimematch.group("daterange")
dateResult=Textparse.getDateRange(dateRangeStr)
# merge with result
result={**result, **dateResult}
pass
return result
class TibkatEventSeriesManager(EventSeriesManager):
'''
TIBKAT event series access
'''
def __init__(self, config: StorageConfig=None):
'''
Constructor
'''
super().__init__(name="TibkatEventSeries", sourceConfig=Tibkat.sourceConfig, clazz=TibkatEventSeries, config=config)
def configure(self):
'''
configure me
'''
def getListOfDicts(self)->list:
'''
get my list of dicts
'''
lod=[{"source":"tibkat"}]
return lod
class TibkatEventSeries(EventSeries):
'''
a Tibkat Event Series
'''
def __init__(self):
'''constructor '''
super().__init__()
pass | PypiClean |
/Chiplotle-0.4.1.tar.gz/Chiplotle-0.4.1/chiplotle/geometry/core/label.py | from chiplotle.hpgl.label import Label as HPGLLabel
from chiplotle.hpgl.commands import PA
from chiplotle.geometry.core.coordinatearray import CoordinateArray
from chiplotle.geometry.core.shape import _Shape
from chiplotle.tools import mathtools
import math
## TODO should a Label be a path? Probably not.
class Label(_Shape):
'''
A text label.
- `text` is the text to be displayed.
- `charwidth` is the width of characters in cms.
- `charheight` is the height of characters in cms.
- `charspace` is the spacing factor between characters.
- `linespace` is a spacing factor between lines.
- `origin` is the origin of the text, can be:
'top-left' 'top-center' 'top-right'
'middle-left' 'middle-center' 'middle-right'
'bottom-left' 'bottom-center' 'bottom-right'
'''
HPGL_ORIGIN_MAP = {
'bottom-left' : 1,
'middle-left' : 2,
'top-left' : 3,
'bottom-center': 4,
'middle-center': 5,
'top-center' : 6,
'bottom-right' : 7,
'middle-right' : 8,
'top-right' : 9}
def __init__(self,
text,
charwidth,
charheight,
charspace = None,
linespace = None,
origin = 'bottom-left'):
_Shape.__init__(self)
self.text = text
self.charspace = charspace
self.linespace = linespace
self.origin = origin
self.points = [(0, 0), (charwidth, 0), (charwidth, charheight)]
self.never_upside_down = False
## PUBLIC PROPERTIES ##
@property
def points(self):
return self._points
@points.setter
def points(self, arg):
self._points = CoordinateArray(arg)
@property
def angle(self):
return self.points.difference[0].angle
## TODO make settable...
@property
def charwidth(self):
return self.points.difference[0].magnitude
# @charwidth.setter
# def charwidth(self, arg):
# self._points
@property
def charheight(self):
return self.points.difference[1].magnitude
## PRIVATE PROPERTIES ##
# @property
# def _infix_commands(self):
# angle = self.angle
# if self.never_upside_down:
# if math.pi * 3 / 2.0 > angle > math.pi / 2.0:
# angle += math.pi
#
# if _Shape.language == 'HPGL':
# origin = self.HPGL_ORIGIN_MAP[self.origin]
# label = HPGLLabel(
# text = self.text,
# charwidth = self.charwidth,
# charheight = self.charheight,
# charspace = self.charspace,
# linespace = self.linespace,
# origin = origin,
# direction = mathtools.polar_to_xy((1, angle)),
# )
# return [PA(self.points[0]), label]
#
# elif _Shape.language == 'gcode':
# print 'Sorry, no g-code support!'
# raise NotImplementedError
def __str__(self):
return '%s(%s)' % (self.__class__.__name__, self.text)
## DEMO CODE
if __name__ == '__main__':
from chiplotle import *
lb = Label("Hello!", 1, 2, origin = 'bottom-center')
PenDecorator(Pen(1))(lb) ## we need this for Label to display with hp2xx
rotate(lb, 3.14 / 4 * 3)
c = circle(100 / 2.5)
g = group([c, lb])
io.view(g) | PypiClean |
/GSEIM-1.4.tar.gz/GSEIM-1.4/src/grc/core/ports/port.py |
from __future__ import absolute_import
from .. import Constants
from ..base import Element
from ..utils.descriptors import (
EvaluatedFlag, EvaluatedEnum, EvaluatedPInt,
setup_names, lazy_property
)
@setup_names
class Port(Element):
is_port = True
dtype = EvaluatedEnum(list(Constants.TYPE_TO_SIZEOF.keys()), default='')
def __init__(self, parent, direction, id, label='', domain=Constants.DEFAULT_DOMAIN, dtype='',
bus_struct=None, **_):
"""Make a new port from nested data."""
Element.__init__(self, parent)
self._dir = direction
self.port_subtype = direction if direction != 'source' else ''
if direction in ('source', 'sink'):
self.port_type = 'flowgraph'
elif direction in ('e_left', 'e_right', 'e_top', 'e_bottom'):
self.port_type = 'electrical'
elif direction in ('b_left', 'b_right', 'b_top', 'b_bottom'):
self.port_type = 'bus'
self.key = id
if not label:
label = id if not id.isdigit() else {
'sink': 'in',
'source': 'out',
'e_left': 'e',
'e_right': 'e',
'e_top': 'e',
'e_bottom': 'e',
'b_left': 'b',
'b_right': 'b',
'b_top': 'b',
'b_bottom': 'b',
}[direction]
self.name = self._base_name = label
self.domain = domain
self.dtype = dtype
self.stored_hidden_state = None
self.bus_structure = bus_struct
# end of args ########################################################
def __str__(self):
if self.is_source:
return 'Source - {}({})'.format(self.name, self.key)
if self.is_sink:
return 'Sink - {}({})'.format(self.name, self.key)
if self.is_e_left:
return 'e_left - {}({})'.format(self.name, self.key)
if self.is_e_right:
return 'e_right - {}({})'.format(self.name, self.key)
if self.is_e_top:
return 'e_top - {}({})'.format(self.name, self.key)
if self.is_e_bottom:
return 'e_bottom - {}({})'.format(self.name, self.key)
if self.is_b_left:
return 'b_left - {}({})'.format(self.name, self.key)
if self.is_b_right:
return 'b_right - {}({})'.format(self.name, self.key)
if self.is_b_top:
return 'b_top - {}({})'.format(self.name, self.key)
if self.is_b_bottom:
return 'b_bottom - {}({})'.format(self.name, self.key)
def __repr__(self):
if self.is_source:
s1 = 'sources'
elif self.is_sink:
s1 = 'sinks'
elif self.is_e_left:
s1 = 'e_lefts'
elif self.is_e_right:
s1 = 'e_rights'
elif self.is_e_top:
s1 = 'e_tops'
elif self.is_e_bottom:
s1 = 'e_bottoms'
elif self.is_b_left:
s1 = 'b_lefts'
elif self.is_b_right:
s1 = 'b_rights'
elif self.is_b_top:
s1 = 'b_tops'
elif self.is_b_bottom:
s1 = 'b_bottoms'
return '{!r}.{}[{}]'.format(self.parent, s1, self.key)
@lazy_property
def is_sink(self):
return self._dir == 'sink'
@lazy_property
def is_source(self):
return self._dir == 'source'
@lazy_property
def is_e_left(self):
return self._dir == 'e_left'
@lazy_property
def is_e_right(self):
return self._dir == 'e_right'
@lazy_property
def is_e_top(self):
return self._dir == 'e_top'
@lazy_property
def is_e_bottom(self):
return self._dir == 'e_bottom'
@lazy_property
def is_b_left(self):
return self._dir == 'b_left'
@lazy_property
def is_b_right(self):
return self._dir == 'b_right'
@lazy_property
def is_b_top(self):
return self._dir == 'b_top'
@lazy_property
def is_b_bottom(self):
return self._dir == 'b_bottom'
def validate(self):
Element.validate(self)
platform = self.parent_platform
num_connections = len(list(self.connections(enabled=True)))
if self.dtype not in Constants.TYPE_TO_SIZEOF.keys():
self.add_error_message('Type "{}" is not a possible type.'.format(self.dtype))
if self.is_sink and num_connections > 1:
self.add_error_message('sink can have only one connection.')
def rewrite(self):
del self.dtype
Element.rewrite(self)
def connections(self, enabled=None):
"""Iterator over all connections to/from this port
enabled: None for all, True for enabled only, False for disabled only
"""
for con in self.parent_flowgraph.connections:
#TODO clean this up - but how to get past this validation
# things don't compare simply with an x in y because
# bus ports are created differently.
port_in_con = False
if self in con and (enabled is None or enabled == con.enabled):
yield con | PypiClean |
/Bugs%20Everywhere%20(BEurtle%20fork)-1.5.0.1.-2012-07-16-.zip/Bugs Everywhere (BEurtle fork)-1.5.0.1.-2012-07-16-/libbe/ui/util/editor.py | import codecs
import locale
import os
import sys
import tempfile
import libbe
import libbe.util.encoding
if libbe.TESTING == True:
import doctest
comment_marker = u"== Anything below this line will be ignored\n"
class CantFindEditor(Exception):
def __init__(self):
Exception.__init__(self, "Can't find editor to get string from")
def editor_string(comment=None, encoding=None):
"""Invokes the editor, and returns the user-produced text as a string
>>> if "EDITOR" in os.environ:
... del os.environ["EDITOR"]
>>> if "VISUAL" in os.environ:
... del os.environ["VISUAL"]
>>> editor_string()
Traceback (most recent call last):
CantFindEditor: Can't find editor to get string from
>>> os.environ["EDITOR"] = "echo bar > "
>>> editor_string()
u'bar\\n'
>>> os.environ["VISUAL"] = "echo baz > "
>>> editor_string()
u'baz\\n'
>>> os.environ["VISUAL"] = "echo 'baz\\n== Anything below this line will be ignored\\nHi' > "
>>> editor_string()
u'baz\\n'
>>> del os.environ["EDITOR"]
>>> del os.environ["VISUAL"]
"""
if encoding == None:
encoding = libbe.util.encoding.get_text_file_encoding()
editor = None
for name in ('VISUAL', 'EDITOR'):
if name in os.environ and os.environ[name] != '':
editor = os.environ[name]
break
if editor == None:
raise CantFindEditor()
fhandle, fname = tempfile.mkstemp()
try:
if comment is not None:
cstring = u'\n'+comment_string(comment)
os.write(fhandle, cstring.encode(encoding))
os.close(fhandle)
oldmtime = os.path.getmtime(fname)
os.system("%s %s" % (editor, fname))
output = libbe.util.encoding.get_file_contents(
fname, encoding=encoding, decode=True)
output = trimmed_string(output)
if output.rstrip('\n') == "":
output = None
finally:
os.unlink(fname)
return output
def comment_string(comment):
"""
>>> comment_string('hello') == comment_marker+"hello"
True
"""
return comment_marker + comment
def trimmed_string(instring):
"""
>>> trimmed_string("hello\\n"+comment_marker)
u'hello\\n'
>>> trimmed_string("hi!\\n" + comment_string('Booga'))
u'hi!\\n'
"""
out = []
for line in instring.splitlines(True):
if line.startswith(comment_marker):
break
out.append(line)
return ''.join(out)
if libbe.TESTING == True:
suite = doctest.DocTestSuite() | PypiClean |
/Facebook-Bot-Library-0.1.tar.gz/Facebook-Bot-Library-0.1/FBBlib/order.py | class FBOrder(object):
def __init__(self, recipient_name, order_number, currency, payment_method, purchases_array, price_summary, order_url="", address="", timestamp="", price_adjustments_array="", **kwargs):
self.recipient_name = recipient_name
self.order_number = order_number
self.currency = currency
self.payment_method = payment_method
self.order_url = order_url
self.timestamp = timestamp
self.purchases = purchases_array
self.address = address
self.price_summary = price_summary
self.price_adjustments = price_adjustments_array
def one_purchase(title, price, subtitle="", quantity="",currency="", image_url=""):
return {
"title":title,
"subtitle":subtitle,
"quantity":quantity,
"price":price,
"currency":currency,
"image_url":image_url
}
def address(street1, street2, city, postal_code, state, country):
return {
"street_1":street1,
"street_2":street2,
"city":city,
"postal_code":postal_code,
"state":state,
"country":country
}
def price_summary(subtotal, shipping_cost, total_tax, total_cost):
return {
"subtotal":subtotal,
"shipping_cost":shipping_cost,
"total_tax":total_tax,
"total_cost":total_cost
}
def price_one_adjustment(name, amount):
return {
"name":name,
"amount":amount
}
def get_payload(self):
return {
"template_type":"receipt",
"recipient_name":self.recipient_name,
"order_number":self.order_number,
"currency":self.currency,
"payment_method":self.payment_method,
"order_url":self.order_url,
"timestamp":self.timestamp,
"elements":self.purchases,
#{
# "title":"Classic White T-Shirt",
# "subtitle":"100% Soft and Luxurious Cotton",
# "quantity":2,
# "price":50,
# "currency":"USD",
# "image_url":"http://petersapparel.parseapp.com/img/whiteshirt.png"
#}
"address":self.address,
"summary":self.price_summary,
"adjustments":self.price_adjustments
} | PypiClean |
/Caroline-presentation-0.2.4.tar.gz/Caroline-presentation-0.2.4/caroline/html_dist/js/mathjax/sre/mathmaps/en/symbols/latin-mathfonts-sans-serif-italic.js | [{"locale":"en"},{"category":"Lu","mappings":{"default":{"default":"sans serif italic capital a","short":"sans serif italic cap a"},"mathspeak":{"default":"sans serif italic upper A"},"clearspeak":{"default":"sans serif italic cap A"}},"key":"1D608"},{"category":"Lu","mappings":{"default":{"default":"sans serif italic capital b","short":"sans serif italic cap b"},"mathspeak":{"default":"sans serif italic upper B"},"clearspeak":{"default":"sans serif italic cap B"}},"key":"1D609"},{"category":"Lu","mappings":{"default":{"default":"sans serif italic capital c","short":"sans serif italic cap c"},"mathspeak":{"default":"sans serif italic upper C"},"clearspeak":{"default":"sans serif italic cap C"}},"key":"1D60A"},{"category":"Lu","mappings":{"default":{"default":"sans serif italic capital d","short":"sans serif italic cap d"},"mathspeak":{"default":"sans serif italic upper D"},"clearspeak":{"default":"sans serif italic cap D"}},"key":"1D60B"},{"category":"Lu","mappings":{"default":{"default":"sans serif italic capital e","short":"sans serif italic cap e"},"mathspeak":{"default":"sans serif italic upper E"},"clearspeak":{"default":"sans serif italic cap E"}},"key":"1D60C"},{"category":"Lu","mappings":{"default":{"default":"sans serif italic capital f","short":"sans serif italic cap f"},"mathspeak":{"default":"sans serif italic upper F"},"clearspeak":{"default":"sans serif italic cap F"}},"key":"1D60D"},{"category":"Lu","mappings":{"default":{"default":"sans serif italic capital g","short":"sans serif italic cap g"},"mathspeak":{"default":"sans serif italic upper G"},"clearspeak":{"default":"sans serif italic cap G"}},"key":"1D60E"},{"category":"Lu","mappings":{"default":{"default":"sans serif italic capital h","short":"sans serif italic cap h"},"mathspeak":{"default":"sans serif italic upper H"},"clearspeak":{"default":"sans serif italic cap H"}},"key":"1D60F"},{"category":"Lu","mappings":{"default":{"default":"sans serif italic capital i","short":"sans serif italic cap i"},"mathspeak":{"default":"sans serif italic upper I"},"clearspeak":{"default":"sans serif italic cap I"}},"key":"1D610"},{"category":"Lu","mappings":{"default":{"default":"sans serif italic capital j","short":"sans serif italic cap j"},"mathspeak":{"default":"sans serif italic upper J"},"clearspeak":{"default":"sans serif italic cap J"}},"key":"1D611"},{"category":"Lu","mappings":{"default":{"default":"sans serif italic capital k","short":"sans serif italic cap k"},"mathspeak":{"default":"sans serif italic upper K"},"clearspeak":{"default":"sans serif italic cap K"}},"key":"1D612"},{"category":"Lu","mappings":{"default":{"default":"sans serif italic capital l","short":"sans serif italic cap l"},"mathspeak":{"default":"sans serif italic upper L"},"clearspeak":{"default":"sans serif italic cap L"}},"key":"1D613"},{"category":"Lu","mappings":{"default":{"default":"sans serif italic capital m","short":"sans serif italic cap m"},"mathspeak":{"default":"sans serif italic upper M"},"clearspeak":{"default":"sans serif italic cap M"}},"key":"1D614"},{"category":"Lu","mappings":{"default":{"default":"sans serif italic capital n","short":"sans serif italic cap n"},"mathspeak":{"default":"sans serif italic upper N"},"clearspeak":{"default":"sans serif italic cap N"}},"key":"1D615"},{"category":"Lu","mappings":{"default":{"default":"sans serif italic capital o","short":"sans serif italic cap o"},"mathspeak":{"default":"sans serif italic upper O"},"clearspeak":{"default":"sans serif italic cap O"}},"key":"1D616"},{"category":"Lu","mappings":{"default":{"default":"sans serif italic capital p","short":"sans serif italic cap p"},"mathspeak":{"default":"sans serif italic upper P"},"clearspeak":{"default":"sans serif italic cap P"}},"key":"1D617"},{"category":"Lu","mappings":{"default":{"default":"sans serif italic capital q","short":"sans serif italic cap q"},"mathspeak":{"default":"sans serif italic upper Q"},"clearspeak":{"default":"sans serif italic cap Q"}},"key":"1D618"},{"category":"Lu","mappings":{"default":{"default":"sans serif italic capital r","short":"sans serif italic cap r"},"mathspeak":{"default":"sans serif italic upper R"},"clearspeak":{"default":"sans serif italic cap R"}},"key":"1D619"},{"category":"Lu","mappings":{"default":{"default":"sans serif italic capital s","short":"sans serif italic cap s"},"mathspeak":{"default":"sans serif italic upper S"},"clearspeak":{"default":"sans serif italic cap S"}},"key":"1D61A"},{"category":"Lu","mappings":{"default":{"default":"sans serif italic capital t","short":"sans serif italic cap t"},"mathspeak":{"default":"sans serif italic upper T"},"clearspeak":{"default":"sans serif italic cap T"}},"key":"1D61B"},{"category":"Lu","mappings":{"default":{"default":"sans serif italic capital u","short":"sans serif italic cap u"},"mathspeak":{"default":"sans serif italic upper U"},"clearspeak":{"default":"sans serif italic cap U"}},"key":"1D61C"},{"category":"Lu","mappings":{"default":{"default":"sans serif italic capital v","short":"sans serif italic cap v"},"mathspeak":{"default":"sans serif italic upper V"},"clearspeak":{"default":"sans serif italic cap V"}},"key":"1D61D"},{"category":"Lu","mappings":{"default":{"default":"sans serif italic capital w","short":"sans serif italic cap w"},"mathspeak":{"default":"sans serif italic upper W"},"clearspeak":{"default":"sans serif italic cap W"}},"key":"1D61E"},{"category":"Lu","mappings":{"default":{"default":"sans serif italic capital x","short":"sans serif italic cap x"},"mathspeak":{"default":"sans serif italic upper X"},"clearspeak":{"default":"sans serif italic cap X"}},"key":"1D61F"},{"category":"Lu","mappings":{"default":{"default":"sans serif italic capital y","short":"sans serif italic cap y"},"mathspeak":{"default":"sans serif italic upper Y"},"clearspeak":{"default":"sans serif italic cap Y"}},"key":"1D620"},{"category":"Lu","mappings":{"default":{"default":"sans serif italic capital z","short":"sans serif italic cap z"},"mathspeak":{"default":"sans serif italic upper Z"},"clearspeak":{"default":"sans serif italic cap Z"}},"key":"1D621"},{"category":"Ll","mappings":{"default":{"default":"sans serif italic small a","short":"sans serif italic a"}},"key":"1D622"},{"category":"Ll","mappings":{"default":{"default":"sans serif italic small b","short":"sans serif italic b"}},"key":"1D623"},{"category":"Ll","mappings":{"default":{"default":"sans serif italic small c","short":"sans serif italic c"}},"key":"1D624"},{"category":"Ll","mappings":{"default":{"default":"sans serif italic small d","short":"sans serif italic d"}},"key":"1D625"},{"category":"Ll","mappings":{"default":{"default":"sans serif italic small e","short":"sans serif italic e"}},"key":"1D626"},{"category":"Ll","mappings":{"default":{"default":"sans serif italic small f","short":"sans serif italic f"}},"key":"1D627"},{"category":"Ll","mappings":{"default":{"default":"sans serif italic small g","short":"sans serif italic g"}},"key":"1D628"},{"category":"Ll","mappings":{"default":{"default":"sans serif italic small h","short":"sans serif italic h"}},"key":"1D629"},{"category":"Ll","mappings":{"default":{"default":"sans serif italic small i","short":"sans serif italic i"}},"key":"1D62A"},{"category":"Ll","mappings":{"default":{"default":"sans serif italic small j","short":"sans serif italic j"}},"key":"1D62B"},{"category":"Ll","mappings":{"default":{"default":"sans serif italic small k","short":"sans serif italic k"}},"key":"1D62C"},{"category":"Ll","mappings":{"default":{"default":"sans serif italic small l","short":"sans serif italic l"}},"key":"1D62D"},{"category":"Ll","mappings":{"default":{"default":"sans serif italic small m","short":"sans serif italic m"}},"key":"1D62E"},{"category":"Ll","mappings":{"default":{"default":"sans serif italic small n","short":"sans serif italic n"}},"key":"1D62F"},{"category":"Ll","mappings":{"default":{"default":"sans serif italic small o","short":"sans serif italic o"}},"key":"1D630"},{"category":"Ll","mappings":{"default":{"default":"sans serif italic small p","short":"sans serif italic p"}},"key":"1D631"},{"category":"Ll","mappings":{"default":{"default":"sans serif italic small q","short":"sans serif italic q"}},"key":"1D632"},{"category":"Ll","mappings":{"default":{"default":"sans serif italic small r","short":"sans serif italic r"}},"key":"1D633"},{"category":"Ll","mappings":{"default":{"default":"sans serif italic small s","short":"sans serif italic s"}},"key":"1D634"},{"category":"Ll","mappings":{"default":{"default":"sans serif italic small t","short":"sans serif italic t"}},"key":"1D635"},{"category":"Ll","mappings":{"default":{"default":"sans serif italic small u","short":"sans serif italic u"}},"key":"1D636"},{"category":"Ll","mappings":{"default":{"default":"sans serif italic small v","short":"sans serif italic v"}},"key":"1D637"},{"category":"Ll","mappings":{"default":{"default":"sans serif italic small w","short":"sans serif italic w"}},"key":"1D638"},{"category":"Ll","mappings":{"default":{"default":"sans serif italic small x","short":"sans serif italic x"}},"key":"1D639"},{"category":"Ll","mappings":{"default":{"default":"sans serif italic small y","short":"sans serif italic y"}},"key":"1D63A"},{"category":"Ll","mappings":{"default":{"default":"sans serif italic small z","short":"sans serif italic z"}},"key":"1D63B"}] | PypiClean |
/Nitrous-0.9.3-py3-none-any.whl/turbogears/i18n/data/en_BW.py |
languages={'gv': 'Manx', 'gu': 'Gujarati', 'rom': 'Romany', 'alg': 'Algonquian Languages', 'ale': 'Aleut', 'sco': 'Scots', 'mni': 'Manipuri', 'gd': 'Scottish Gaelic', 'ga': 'Irish', 'mno': 'Manobo Languages', 'osa': 'Osage', 'gn': 'Guarani', 'gl': 'Gallegan', 'mwr': 'Marwari', 'ty': 'Tahitian', 'tw': 'Twi', 'tt': 'Tatar', 'tr': 'Turkish', 'ts': 'Tsonga', 'tn': 'Tswana', 'to': 'Tonga (Tonga Islands)', 'aus': 'Australian Languages', 'av': 'Avaric', 'tk': 'Turkmen', 'th': 'Thai', 'roa': 'Romance (Other)', 'tg': 'Tajik', 'te': 'Telugu', 'uga': 'Ugaritic', 'ta': 'Tamil', 'fat': 'Fanti', 'fan': 'Fang', 'wo': 'Wolof', 'rm': 'Rhaeto-Romance', 'din': 'Dinka', 'bla': 'Siksika', 'cmc': 'Chamic Languages', 'ml': 'Malayalam', 'zh': 'Chinese', 'tem': 'Timne', 'za': 'Zhuang', 'cau': 'Caucasian (Other)', 'zu': 'Zulu', 'ter': 'Tereno', 'tet': 'Tetum', 'mnc': 'Manchu', 'kut': 'Kutenai', 'suk': 'Sukuma', 'kum': 'Kumyk', 'sus': 'Susu', 'new': 'Newari', 'sux': 'Sumerian', 'den': 'Slave', 'men': 'Mende', 'mul': 'Multiple Languages', 'lez': 'Lezghian', 'root': 'Root', 'eka': 'Ekajuk', 'akk': 'Akkadian', 'dra': 'Dravidian (Other)', 'jrb': 'Judeo-Arabic', 'sgn': 'Sign Languages', 'sga': 'Irish, Old (to 900)', 'apa': 'Apache Languages', 'bra': 'Braj', 'chb': 'Chibcha', 'chg': 'Chagatai', 'chk': 'Chuukese', 'chm': 'Mari', 'chn': 'Chinook Jargon', 'cho': 'Choctaw', 'chp': 'Chipewyan', 'chr': 'Cherokee', 'chy': 'Cheyenne', 'ti': 'Tigrinya', 'vot': 'Votic', 'mg': 'Malagasy', 'iba': 'Iban', 'mo': 'Moldavian', 'mn': 'Mongolian', 'mi': 'Maori', 'mh': 'Marshallese', 'mk': 'Macedonian', 'mt': 'Maltese', 'cai': 'Central American Indian (Other)', 'del': 'Delaware', 'ms': 'Malay', 'mr': 'Marathi', 'my': 'Burmese', 'cad': 'Caddo', 'tai': 'Tai (Other)', 'afh': 'Afrihili', 'sit': 'Sino-Tibetan (Other)', 'enm': 'English, Middle (1100-1500)', 'csb': 'Kashubian', 'nyn': 'Nyankole', 'nyo': 'Nyoro', 'nym': 'Nyamwezi', 'sio': 'Siouan Languages', 'map': 'Austronesian', 'mas': 'Masai', 'lah': 'Lahnda', 'lad': 'Ladino', 'fy': 'Frisian', 'snk': 'Soninke', 'fa': 'Persian', 'mad': 'Madurese', 'mag': 'Magahi', 'mai': 'Maithili', 'fi': 'Finnish', 'fj': 'Fijian', 'man': 'Mandingo', 'egy': 'Egyptian (Ancient)', 'znd': 'Zande', 'ss': 'Swati', 'sr': 'Serbian', 'sq': 'Albanian', 'sw': 'Swahili', 'sv': 'Swedish', 'su': 'Sundanese', 'st': 'Sotho, Southern', 'sk': 'Slovak', 'si': 'Sinhalese', 'sh': 'Serbo-Croatian', 'so': 'Somali', 'sn': 'Shona', 'sm': 'Samoan', 'sl': 'Slovenian', 'sc': 'Sardinian', 'sa': 'Sanskrit', 'sg': 'Sango', 'se': 'Northern Sami', 'sd': 'Sindhi', 'zen': 'Zenaga', 'kbd': 'Kabardian', 'afa': 'Afro-Asiatic (Other)', 'lg': 'Ganda', 'lb': 'Luxembourgish', 'fiu': 'Finno - Ugrian (Other)', 'ln': 'Lingala', 'lo': 'Lao', 'li': 'Limburgish', 'byn': 'Blin', 'lt': 'Lithuanian', 'lu': 'Luba-Katanga', 'yi': 'Yiddish', 'non': 'Norse, Old', 'ceb': 'Cebuano', 'yo': 'Yoruba', 'nog': 'Nogai', 'bat': 'Baltic (Other)', 'dak': 'Dakota', 'dar': 'Dargwa', 'day': 'Dayak', 'ssa': 'Nilo-Saharam (Other)', 'kpe': 'Kpelle', 'el': 'Greek', 'eo': 'Esperanto', 'en': 'English', 'lam': 'Lamba', 'ee': 'Ewe', 'mdf': 'Moksha', 'fr': 'French', 'mdr': 'Mandar', 'et': 'Estonian', 'es': 'Spanish', 'ru': 'Russian', 'gon': 'Gondi', 'goh': 'German, Old High (ca.750-1050)', 'sms': 'Skolt Sami', 'smn': 'Inari Sami', 'smj': 'Lule Sami', 'smi': 'Sami languages (Other)', 'got': 'Gothic', 'rn': 'Rundi', 'ro': 'Romanian', 'dsb': 'Lower Sorbian', 'sma': 'Southern Sami', 'gor': 'Gorontalo', 'ast': 'Asturian', 'wal': 'Walamo', 'crh': 'Crimean Turkish; Crimean Tatar', 'ath': 'Athapascan Languages', 'gez': 'Geez', 'xh': 'Xhosa', 'ff': 'Fulah', 'mak': 'Makasar', 'zap': 'Zapotec', 'kok': 'Konkani', 'kos': 'Kosraean', 'fo': 'Faroese', 'tog': 'Tonga (Nyasa)', 'hup': 'Hupa', 'udm': 'Udmurt', 'bej': 'Beja', 'bem': 'Bemba', 'tsi': 'Tsimshian', 'ber': 'Berber', 'nzi': 'Nzima', 'sai': 'South American Indian (Other)', 'ang': 'English, Old (ca.450-1100)', 'pra': 'Prakrit Languages', 'bho': 'Bhojpuri', 'sal': 'Salishan languages', 'pro': u'Proven\xe7al, Old (to 1500)', 'raj': 'Rajasthani', 'sad': 'Sandawe', 'rar': 'Rarotongan', 'rap': 'Rapanui', 'sas': 'Sasak', 'car': 'Carib', 'min': 'Minangkabau', 'mic': 'Micmac', 'efi': 'Efik', 'arn': 'Araucanian', 'ypk': 'Yupik Languages', 'mis': 'Miscellaneous Languages', 'kac': 'Kachin', 'kab': 'Kabyle', 'kaa': 'Kara-Kalpak', 'kam': 'Kamba', 'kar': 'Karen', 'kaw': 'Kawi', 'tyv': 'Tuvinian', 'awa': 'Awadhi', 'ka': 'Georgian', 'doi': 'Dogri', 'kg': 'Kongo', 'kk': 'Kazakh', 'kj': 'Kuanyama', 'ki': 'Kikuyu', 'ko': 'Korean', 'kn': 'Kannada', 'km': 'Khmer', 'kl': 'Kalaallisut', 'ks': 'Kashmiri', 'kr': 'Kanuri', 'kw': 'Cornish', 'kv': 'Komi', 'ku': 'Kurdish', 'ky': 'Kirghiz', 'tkl': 'Tokelau', 'bua': 'Buriat', 'mga': 'Irish, Middle (900-1200)', 'hit': 'Hittite', 'dyu': 'Dyula', 'de': 'German', 'da': 'Danish', 'dz': 'Dzongkha', 'ira': 'Iranian', 'dv': 'Divehi', 'hil': 'Hiligaynon', 'him': 'Himachali', 'gem': 'Germanic (Other)', 'crp': 'Creoles and Pidgins (Other)', 'qu': 'Quechua', 'bas': 'Basa', 'gba': 'Gbaya', 'bad': 'Banda', 'ban': 'Balinese', 'bal': 'Baluchi', 'bam': 'Bambara', 'shn': 'Shan', 'bai': 'Bamileke Languages', 'arp': 'Arapaho', 'art': 'Artificial (Other)', 'arw': 'Arawak', 'arc': 'Aramaic', 'sem': 'Semitic (Other)', 'sel': 'Selkup', 'nub': 'Nubian Languages', 'btk': 'Batak', 'lus': 'Lushai', 'mus': 'Creek', 'lua': 'Luba-Lulua', 'iro': 'Iroquoian languages', 'lui': 'Luiseno', 'mun': 'Munda Languages', 'lun': 'Lunda', 'luo': 'Luo', 'wa': 'Walloon', 'tup': 'Tupi languages', 'jv': 'Javanese', 'tut': 'Altaic (Other)', 'tum': 'Tumbuka', 'ja': 'Japanese', 'cop': 'Coptic', 'ilo': 'Iloko', 'la': 'Latin', 'gwi': "Gwich'in", 'und': 'Undetermined', 'tli': 'Tlingit', 'ch': 'Chamorro', 'co': 'Corsican', 'ca': 'Catalan', 'ce': 'Chechen', 'pon': 'Pohnpeian', 'cy': 'Welsh', 'sah': 'Yakut', 'cs': 'Czech', 'cr': 'Cree', 'bnt': 'Bantu', 'cv': 'Chuvash', 'cu': 'Church Slavic', 'lv': 'Latvian', 'dum': 'Dutch, Middle (ca. 1050-1350)', 'pt': 'Portuguese', 'dua': 'Duala', 'fro': 'French, Old (842-ca.1400)', 'yap': 'Yapese', 'frm': 'French, Middle (ca.1400-1600)', 'tiv': 'Tiv', 'yao': 'Yao', 'pa': 'Punjabi', 'xal': 'Kalmyk', 'pi': 'Pali', 'pl': 'Polish', 'gay': 'Gayo', 'oto': 'Otomian Languages', 'ota': 'Turkish, Ottoman (1500-1928)', 'hmn': 'Hmong', 'an': 'Aragonese', 'gaa': 'Ga', 'fur': 'Friulian', 'khi': 'Khoisan (Other)', 'sla': 'Slavic (Other)', 've': 'Venda', 'vi': 'Vietnamese', 'is': 'Icelandic', 'kho': 'Khotanese', 'iu': 'Inuktitut', 'it': 'Italian', 'vo': u'Volap\xfck', 'ii': 'Sichuan Yi', 'ik': 'Inupiaq', 'io': 'Ido', 'ine': 'Indo-European (Other)', 'ia': 'Interlingua', 'jpr': 'Judeo-Persian', 'ie': 'Interlingue', 'id': 'Indonesian', 'ig': 'Igbo', 'pap': 'Papiamento', 'ewo': 'Ewondo', 'pau': 'Palauan', 'paa': 'Papuan (Other)', 'pag': 'Pangasinan', 'sat': 'Santali', 'pal': 'Pahlavi', 'pam': 'Pampanga', 'phi': 'Philippine (Other)', 'cel': 'Celtic (Other)', 'phn': 'Phoenician', 'nic': 'Niger - Kordofanian (Other)', 'nia': 'Nias', 'dgr': 'Dogrib', 'syr': 'Syriac', 'niu': 'Niuean', 'jbo': 'Lojban', 'nah': 'Nahuatl', 'sam': 'Samaritan Aramaic', 'hai': 'Haida', 'gmh': 'German, Middle High (ca.1050-1500)', 'cus': 'Cushitic (Other)', 'wen': 'Sorbian Languages', 'ady': 'Adyghe', 'elx': 'Elamite', 'ada': 'Adangme', 'haw': 'Hawaiian', 'bin': 'Bini', 'bik': 'Bikol', 'mos': 'Mossi', 'moh': 'Mohawk', 'tl': 'Tagalog', 'tvl': 'Tuvalu', 'ijo': 'Ijo', 'kmb': 'Kimbundu', 'peo': 'Persian Old (ca.600-400 B.C.)', 'umb': 'Umbundu', 'tmh': 'Tamashek', 'fon': 'Fon', 'hsb': 'Upper Sorbian', 'be': 'Belarusian', 'bg': 'Bulgarian', 'ba': 'Bashkir', 'ps': 'Pashto (Pushto)', 'bm': 'Bambara', 'bn': 'Bengali', 'bo': 'Tibetan', 'bh': 'Bihari', 'bi': 'Bislama', 'br': 'Breton', 'bs': 'Bosnian', 'om': 'Oromo', 'oj': 'Ojibwa', 'ace': 'Achinese', 'ach': 'Acoli', 'oc': u'Occitan (post 1500); Proven\xe7al', 'kru': 'Kurukh', 'srr': 'Serer', 'kro': 'Kru', 'krc': 'Karachay-Balkar', 'nds': 'Low German; Low Saxon', 'os': 'Ossetic', 'or': 'Oriya', 'sog': 'Sogdien', 'nso': 'Sotho, Northern', 'son': 'Songhai', 'vai': 'Vai', 'wak': 'Wakashan Languages', 'lol': 'Mongo', 'mkh': 'Mon-Khmer (Other)', 'loz': 'Lozi', 'gil': 'Gilbertese', 'was': 'Washo', 'war': 'Waray', 'hz': 'Herero', 'hy': 'Armenian', 'sid': 'Sidamo', 'hr': 'Croatian', 'ht': 'Haitian', 'hu': 'Hungarian', 'hi': 'Hindi', 'ho': 'Hiri Motu', 'ha': 'Hausa', 'bug': 'Buginese', 'he': 'Hebrew', 'uz': 'Uzbek', 'ur': 'Urdu', 'uk': 'Ukrainian', 'ug': 'Uighur', 'aa': 'Afar', 'ab': 'Abkhazian', 'ae': 'Avestan', 'af': 'Afrikaans', 'ak': 'Akan', 'am': 'Amharic', 'myv': 'Erzya', 'eu': 'Basque', 'as': 'Assamese', 'ar': 'Arabic', 'inh': 'Ingush', 'tpi': 'Tok Pisin', 'myn': 'Mayan', 'ay': 'Aymara', 'kha': 'Khasi', 'az': 'Azerbaijani', 'inc': 'Indic (Other)', 'nl': 'Dutch', 'nn': 'Norwegian Nynorsk', 'no': 'Norwegian', 'na': 'Nauru', 'nb': u'Norwegian Bokm\xe5l', 'nai': 'North American Indian (Other)', 'nd': 'Ndebele, North', 'ne': 'Nepali', 'ng': 'Ndonga', 'ny': 'Nyanja; Chichewa; Chewa', 'nap': 'Neapolitan', 'grb': 'Gerbo', 'grc': 'Greek, Ancient (to 1453)', 'nr': 'Ndebele, South', 'tig': 'Tigre', 'nv': 'Navajo', 'zun': 'Zuni', 'rw': 'Kinyarwanda', 'cpe': 'Creoles and Pidgins, English-based (Other)', 'cpf': 'Creoles and Pidgins, French-based (Other)', 'cpp': 'Creoles and pidgins, Portuguese-based (Other)'}
countries={'BD': 'Bangladesh', 'BE': 'Belgium', 'BF': 'Burkina Faso', 'BG': 'Bulgaria', 'BA': 'Bosnia and Herzegovina', 'BB': 'Barbados', 'WF': 'Wallis and Futuna', 'BM': 'Bermuda', 'BN': 'Brunei', 'BO': 'Bolivia', 'BH': 'Bahrain', 'BI': 'Burundi', 'BJ': 'Benin', 'BT': 'Bhutan', 'JM': 'Jamaica', 'BV': 'Bouvet Island', 'BW': 'Botswana', 'WS': 'Samoa', 'BR': 'Brazil', 'BS': 'Bahamas', 'BY': 'Belarus', 'BZ': 'Belize', 'RU': 'Russia', 'RW': 'Rwanda', 'TL': 'Timor-Leste', 'RE': u'R\xe9union', 'TM': 'Turkmenistan', 'TJ': 'Tajikistan', 'RO': 'Romania', 'TK': 'Tokelau', 'GW': 'Guinea-Bissau', 'GU': 'Guam', 'GT': 'Guatemala', 'GS': 'South Georgia and South Sandwich Islands', 'GR': 'Greece', 'GQ': 'Equatorial Guinea', 'GP': 'Guadeloupe', 'JP': 'Japan', 'GY': 'Guyana', 'GF': 'French Guiana', 'GE': 'Georgia', 'GD': 'Grenada', 'GB': 'United Kingdom', 'GA': 'Gabon', 'SV': 'El Salvador', 'GN': 'Guinea', 'GM': 'Gambia', 'GL': 'Greenland', 'GI': 'Gibraltar', 'GH': 'Ghana', 'OM': 'Oman', 'TN': 'Tunisia', 'JO': 'Jordan', 'SP': 'Serbia', 'HR': 'Croatia', 'HT': 'Haiti', 'HU': 'Hungary', 'HK': 'Hong Kong S.A.R., China', 'HN': 'Honduras', 'HM': 'Heard Island and McDonald Islands', 'VE': 'Venezuela', 'PR': 'Puerto Rico', 'PS': 'Palestinian Territory', 'PW': 'Palau', 'PT': 'Portugal', 'SJ': 'Svalbard and Jan Mayen', 'PY': 'Paraguay', 'IQ': 'Iraq', 'PA': 'Panama', 'PF': 'French Polynesia', 'PG': 'Papua New Guinea', 'PE': 'Peru', 'PK': 'Pakistan', 'PH': 'Philippines', 'PN': 'Pitcairn', 'PL': 'Poland', 'PM': 'Saint Pierre and Miquelon', 'ZM': 'Zambia', 'EH': 'Western Sahara', 'EE': 'Estonia', 'EG': 'Egypt', 'ZA': 'South Africa', 'EC': 'Ecuador', 'IT': 'Italy', 'VN': 'Vietnam', 'SB': 'Solomon Islands', 'ET': 'Ethiopia', 'SO': 'Somalia', 'ZW': 'Zimbabwe', 'SA': 'Saudi Arabia', 'ES': 'Spain', 'ER': 'Eritrea', 'MD': 'Moldova', 'MG': 'Madagascar', 'MA': 'Morocco', 'MC': 'Monaco', 'UZ': 'Uzbekistan', 'MM': 'Myanmar', 'ML': 'Mali', 'MO': 'Macao S.A.R. China', 'MN': 'Mongolia', 'MH': 'Marshall Islands', 'MK': 'Macedonia', 'MU': 'Mauritius', 'MT': 'Malta', 'MW': 'Malawi', 'MV': 'Maldives', 'MQ': 'Martinique', 'MP': 'Northern Mariana Islands', 'MS': 'Montserrat', 'MR': 'Mauritania', 'UG': 'Uganda', 'MY': 'Malaysia', 'MX': 'Mexico', 'IL': 'Israel', 'FR': 'France', 'IO': 'British Indian Ocean Territory', 'SH': 'Saint Helena', 'FI': 'Finland', 'FJ': 'Fiji', 'FK': 'Falkland Islands', 'FM': 'Micronesia', 'FO': 'Faroe Islands', 'NI': 'Nicaragua', 'NL': 'Netherlands', 'NO': 'Norway', 'NA': 'Namibia', 'VU': 'Vanuatu', 'NC': 'New Caledonia', 'NE': 'Niger', 'NF': 'Norfolk Island', 'NG': 'Nigeria', 'NZ': 'New Zealand', 'NP': 'Nepal', 'NR': 'Nauru', 'NU': 'Niue', 'CK': 'Cook Islands', 'CI': u"C\xf4te d'Ivoire", 'CH': 'Switzerland', 'CO': 'Colombia', 'CN': 'China', 'CM': 'Cameroon', 'CL': 'Chile', 'CC': 'Cocos Islands', 'CA': 'Canada', 'CG': 'Congo', 'CF': 'Central African Republic', 'CD': 'Democratic Republic of the Congo', 'CZ': 'Czech Republic', 'CY': 'Cyprus', 'CX': 'Christmas Island', 'CR': 'Costa Rica', 'CV': 'Cape Verde', 'CU': 'Cuba', 'SZ': 'Swaziland', 'SY': 'Syria', 'KG': 'Kyrgyzstan', 'KE': 'Kenya', 'SR': 'Suriname', 'KI': 'Kiribati', 'KH': 'Cambodia', 'KN': 'Saint Kitts and Nevis', 'KM': 'Comoros', 'ST': 'Sao Tome and Principe', 'SK': 'Slovakia', 'KR': 'South Korea', 'SI': 'Slovenia', 'KP': 'North Korea', 'KW': 'Kuwait', 'SN': 'Senegal', 'SM': 'San Marino', 'SL': 'Sierra Leone', 'SC': 'Seychelles', 'KZ': 'Kazakhstan', 'KY': 'Cayman Islands', 'SG': 'Singapore', 'SE': 'Sweden', 'SD': 'Sudan', 'DO': 'Dominican Republic', 'DM': 'Dominica', 'DJ': 'Djibouti', 'DK': 'Denmark', 'VG': 'British Virgin Islands', 'DE': 'Germany', 'YE': 'Yemen', 'DZ': 'Algeria', 'US': 'United States', 'UY': 'Uruguay', 'YU': 'Yugoslavia', 'YT': 'Mayotte', 'UM': 'United States Minor Outlying Islands', 'LB': 'Lebanon', 'LC': 'Saint Lucia', 'LA': 'Laos', 'TV': 'Tuvalu', 'TW': 'Taiwan', 'TT': 'Trinidad and Tobago', 'TR': 'Turkey', 'LK': 'Sri Lanka', 'LI': 'Liechtenstein', 'LV': 'Latvia', 'TO': 'Tonga', 'LT': 'Lithuania', 'LU': 'Luxembourg', 'LR': 'Liberia', 'LS': 'Lesotho', 'TH': 'Thailand', 'TF': 'French Southern Territories', 'TG': 'Togo', 'TD': 'Chad', 'TC': 'Turks and Caicos Islands', 'LY': 'Libya', 'VA': 'Vatican', 'VC': 'Saint Vincent and the Grenadines', 'AE': 'United Arab Emirates', 'AD': 'Andorra', 'AG': 'Antigua and Barbuda', 'AF': 'Afghanistan', 'AI': 'Anguilla', 'VI': 'U.S. Virgin Islands', 'IS': 'Iceland', 'IR': 'Iran', 'AM': 'Armenia', 'AL': 'Albania', 'AO': 'Angola', 'AN': 'Netherlands Antilles', 'AQ': 'Antarctica', 'AS': 'American Samoa', 'AR': 'Argentina', 'AU': 'Australia', 'AT': 'Austria', 'AW': 'Aruba', 'IN': 'India', 'TZ': 'Tanzania', 'AZ': 'Azerbaijan', 'IE': 'Ireland', 'ID': 'Indonesia', 'UA': 'Ukraine', 'QA': 'Qatar', 'MZ': 'Mozambique'}
months=['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December']
abbrMonths=['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
days=['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday']
abbrDays=['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
dateFormats={'medium': '%%(abbrmonthname)s %d,%y', 'full': '%%(dayname)s %d %%(monthname)s %Y', 'long': '%d %%(monthname)s %Y', 'short': '%d/%m/%y'}
numericSymbols={'group': ',', 'nativeZeroDigit': '0', 'exponential': 'E', 'perMille': u'\u2030', 'nan': u'\ufffd', 'decimal': '.', 'percentSign': '%', 'list': ';', 'patternDigit': '#', 'plusSign': '+', 'infinity': u'\u221e', 'minusSign': '-'} | PypiClean |
/DLTA-AI-1.1.tar.gz/DLTA-AI-1.1/DLTA_AI_app/mmdetection/configs/sparse_rcnn/sparse_rcnn_r50_fpn_1x_coco.py | _base_ = [
'../_base_/datasets/coco_detection.py',
'../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py'
]
num_stages = 6
num_proposals = 100
model = dict(
type='SparseRCNN',
backbone=dict(
type='ResNet',
depth=50,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
norm_cfg=dict(type='BN', requires_grad=True),
norm_eval=True,
style='pytorch',
init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
start_level=0,
add_extra_convs='on_input',
num_outs=4),
rpn_head=dict(
type='EmbeddingRPNHead',
num_proposals=num_proposals,
proposal_feature_channel=256),
roi_head=dict(
type='SparseRoIHead',
num_stages=num_stages,
stage_loss_weights=[1] * num_stages,
proposal_feature_channel=256,
bbox_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
bbox_head=[
dict(
type='DIIHead',
num_classes=80,
num_ffn_fcs=2,
num_heads=8,
num_cls_fcs=1,
num_reg_fcs=3,
feedforward_channels=2048,
in_channels=256,
dropout=0.0,
ffn_act_cfg=dict(type='ReLU', inplace=True),
dynamic_conv_cfg=dict(
type='DynamicConv',
in_channels=256,
feat_channels=64,
out_channels=256,
input_feat_shape=7,
act_cfg=dict(type='ReLU', inplace=True),
norm_cfg=dict(type='LN')),
loss_bbox=dict(type='L1Loss', loss_weight=5.0),
loss_iou=dict(type='GIoULoss', loss_weight=2.0),
loss_cls=dict(
type='FocalLoss',
use_sigmoid=True,
gamma=2.0,
alpha=0.25,
loss_weight=2.0),
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
clip_border=False,
target_means=[0., 0., 0., 0.],
target_stds=[0.5, 0.5, 1., 1.])) for _ in range(num_stages)
]),
# training and testing settings
train_cfg=dict(
rpn=None,
rcnn=[
dict(
assigner=dict(
type='HungarianAssigner',
cls_cost=dict(type='FocalLossCost', weight=2.0),
reg_cost=dict(type='BBoxL1Cost', weight=5.0),
iou_cost=dict(type='IoUCost', iou_mode='giou',
weight=2.0)),
sampler=dict(type='PseudoSampler'),
pos_weight=1) for _ in range(num_stages)
]),
test_cfg=dict(rpn=None, rcnn=dict(max_per_img=num_proposals)))
# optimizer
optimizer = dict(_delete_=True, type='AdamW', lr=0.000025, weight_decay=0.0001)
optimizer_config = dict(_delete_=True, grad_clip=dict(max_norm=1, norm_type=2))
# learning policy
lr_config = dict(policy='step', step=[8, 11])
runner = dict(type='EpochBasedRunner', max_epochs=12) | PypiClean |
/MOM-Tapyr-1.6.2.tar.gz/MOM-Tapyr-1.6.2/SQ.py |
from __future__ import division, print_function
from __future__ import absolute_import
from _MOM import MOM
from _TFL import TFL
from _TFL.pyk import pyk
from _TFL.I18N import _, _T
import _TFL._Meta.Object
import _TFL._Meta.Once_Property
import _TFL._Meta.Property
import _TFL.Decorator
import _TFL.Q_Exp
import _TFL.Q_Result
from _MOM._Attr.Filter import Q
@pyk.adapt__str__
class _SQ_ (TFL.Meta.Object) :
"""Symbolic query generator.
Each symbolic query starts with the selection of an essential type to
query, for instance::
sq_property = SQ [Q.PAP.Property]
sq_person = SQ [Q.PAP.Person]
``sq_person`` is a symbolic query for a non-partial type, `sq_property`
defines a polymorphic symbolic query over a set of types derived from
the partial type ``PAP.Property``.
``SQ [Q.PNS.Type]`` returns an instance of :class:`_E_Type_`.
"""
_Table = {}
def __getitem__ (self, key) :
if isinstance (key, TFL.Q_Exp._Get_) :
key = key._name
try :
result = self._Table [key]
except KeyError :
result = self._Table [key] = self._E_Type_ (key)
return result
# end def __getitem__
def __str__ (self) :
return "SQ"
# end def __str__
# end class _SQ_
@TFL.Add_New_Method (_SQ_)
@pyk.adapt__str__
class _E_Type_ (TFL.Meta.Object) :
"""Symbolic query for a specific E_Type.
Each instance of :class:`_E_Type_` is bound to a specific essential
type. One can refine a symbolic query by applying any of the various
methods, e.g., :meth:`filter`.
"""
_attr = None
_attrs = ()
_distinct = None
_filters = ()
_group_by = ()
_kw = {}
_limit = None
_offset = None
_order_by = ()
_strict = False
def _q_refiner (f) :
name = f.__name__
qr_f = getattr (TFL._Q_Result_, name, None)
if qr_f is not None and not f.__doc__ :
f.__doc__ = qr_f.__doc__
return f
# end def _q_refiner
def __init__ (self, t) :
self.type_name = getattr (t, "type_name", t)
# end def __init__
def __call__ (self, q) :
"""Apply symbolic query to `q`.
`q` can be one of:
* an instance of :class:`~_MOM.Scope.Scope`
* an instance of :class:`E_Type_Manager<_MOM.E_Type_Manager.Entity>`
* the result of a call to
:meth:`~_MOM.E_Type_Manager.Entity.query`
In this case, the query refinements of `self` are applied to
`q`.
In any case, the symbolically defined query operations of `self` are
applied to the query resulting from `q`.
"""
if isinstance (q, MOM.Scope) :
q = q [self.type_name]
if isinstance (q, MOM.E_Type_Manager.Entity) :
result = q.query (strict = self._strict)
else :
result = q
if self._filters or self._kw :
result = result.filter (* self._filters, ** self._kw)
if self._group_by :
result = result.group_by (* self._group_by)
if self._distinct is not None :
result = result.distinct (self._distinct)
for c in self._order_by :
result = result.order_by (c)
if self._limit is not None :
result = result.limit (self._limit)
if self._offset is not None :
result = result.offset (self._offset)
if self._attr is not None :
result = result.attr (self._attr)
if self._attrs :
result = result.attrs (self._attrs)
return result
# end def __call__
def apply (self, q) :
return self (q)
# end def apply
apply.__doc__ = __call__.__doc__
@_q_refiner
def attr (self, getter) :
return self._clone (_attr = getter)
# end def attr
@_q_refiner
def attrs (self, * getters) :
return self._clone (_attrs = getters)
# end def attrs
@_q_refiner
def distinct (self, value = True) :
return self._clone (_distinct = value)
# end def distinct
@_q_refiner
def filter (self, * criteria, ** kw) :
self._strict = kw.pop ("strict", False)
sk = kw.pop ("sort_key", ())
return self._clone \
( _filters = self._filters + criteria
, _kw = dict (self._kw, ** kw)
, _order_by = self._order_by + ((sk, ) if sk else ())
)
# end def filter
@_q_refiner
def group_by (self, * columns) :
return self._clone (_group_by = self._group_by + columns)
# end def group_by
@_q_refiner
def limit (self, limit) :
return self._clone (_limit = limit)
# end def limit
@_q_refiner
def offset (self, offset) :
return self._clone (_offset = offset)
# end def offset
@_q_refiner
def order_by (self, * criteria) :
return self._clone (_order_by = self._order_by + criteria)
# end def order_by
def _clone (self, ** kw) :
cls = self.__class__
result = cls.__new__ (cls)
result.__dict__.update (self.__dict__, ** kw)
return result
# end def _clone
def __str__ (self) :
result = ["SQ [%s]" % (self.type_name)]
if self._filters or self._kw :
args = list (str (f) for f in self._filters)
args.extend \
( "%s = %r" % (k, v)
for k, v in sorted (pyk.iteritems (self._kw))
)
result.append ("filter (%s)" % ", ".join (args))
if self._group_by :
args = list (str (f) for f in self._group_by)
result.append ("group_by (%s)" % ", ".join (args))
if self._distinct is not None :
result.append ("distinct (%s)" % self._distinct)
if self._order_by :
args = list (str (f) for f in self._order_by)
result.append ("order_by (%s)" % ", ".join (args))
if self._limit is not None :
result.append ("limit (%s)" % (self._limit, ))
if self._offset is not None :
result.append ("limit (%s)" % (self._offset, ))
if self._attr is not None :
result.append ("attr (%s)" % (self._attr, ))
if self._attrs :
result.append ("attrs (%s)" % (self._attrs, ))
sep = "\n ." if len (result) > 2 else "."
return sep.join (result)
# end def __str__
# end class _E_Type_
SQ = _SQ_ ()
### «text» ### start of documentation
__doc__ = r"""
This module implements a symbolic query language. It exports the symbolic query
generator instance :obj:`SQ` which is used to define symbolic queries.
A symbolic query generated by :obj:`SQ` is a Python callable:
applying a SQ instance to a :class:`scope<_MOM.Scope.Scope>`,
:class:`E_Type_Manager<_MOM.E_Type_Manager.Entity>`, or
:meth:`query result<_MOM.E_Type_Manager.Entity.query>`
applies the symbolic query for that scope/manager/query
and returns the resulting query result.
.. data:: SQ
`SQ` is an instance of :class:`_SQ_`.
>>> sq = SQ ["PAP.Person"]
>>> print (sq)
SQ [PAP.Person]
>>> print (sq.order_by (- Q.last_name))
SQ [PAP.Person].order_by (- Q.last_name)
>>> print (sq.order_by (- Q.last_name).limit (5).offset (10))
SQ [PAP.Person]
.order_by (- Q.last_name)
.limit (5)
.limit (10)
>>> print (sq.filter (Q.last_name.STARTSWITH ("tanzer")))
SQ [PAP.Person].filter (Q.last_name.startswith ('tanzer',))
>>> print (sq)
SQ [PAP.Person]
>>> SQ [Q.PAP.Person] is sq
True
>>> sq.order_by (- Q.last_name) is sq
False
>>> SQ [Q.PAP.Person_has_Phone] is sq
False
"""
if __name__ != "__main__" :
MOM._Export ("SQ")
### __END__ MOM.SQ | PypiClean |
/MyoSuite-2.0.1-py3-none-any.whl/myosuite/agents/baseline_Reflex/reflexCtr.py | # - [x y z] -> [anterior lateral superior]
# (<-> [posterior medial inferior])
from __future__ import division # '/' always means non-truncating division
import numpy as np
class LocoCtrl(object):
DEBUG = 0
RIGHT = 0 # r_leg
LEFT = 1 # l_leg
# (todo) use these when handling angles
# THETA0 = 0*np.pi/180 # trunk angle when standing straight
# S_THETA = 1 # 1: leaning forward > 0; -1: leaning backward > 0
# HIP0 = 0*np.pi/180 # hip angle when standing straight
# S_HIP = 1 # 1: extension > 0; -1: flexion > 0
# KNEE0 = 0*np.pi/180 # knee angle when standing straight
# S_KNEE = 1 # 1: extension > 0; -1: flexion > 0
# ANKLE0 = 0*np.pi/180 # ankle angle when standing straight
# S_ANKLE = 1 # 1: plantar flexion > 0; -1: dorsiflexion > 0
# muscle names
m_keys = ['HAB', 'HAD', 'HFL', 'GLU', 'HAM', 'RF', 'VAS', 'BFSH', 'GAS', 'SOL', 'TA']
# body sensor data
s_b_keys = ['theta', 'd_pos', 'dtheta']
# theta[0]: around local x axis (pointing anterior)
# theta[1]: around local y axis (pointing leftward)
# theta[2]: around local z axis (pointing upward)
# pos[0]: local x
# pos[1]: local y
# pos[2]: local z
# leg sensor data
# anglular values follow the Song2015 convention
s_l_keys = [
'contact_ipsi', 'contact_contra', 'load_ipsi', 'load_contra',
'alpha', 'alpha_f', 'dalpha',
'phi_hip', 'phi_knee', 'phi_ankle', 'dphi_knee'
'F_RF', 'F_VAS', 'F_GAS', 'F_SOL',
]
# control states
cs_keys = [
'ph_st', # leg in stance
'ph_st_csw', # leg in stance ^ contra-leg in swing
'ph_st_sw0', # leg in stance ^ initial swing
'ph_sw', # leg in swing
'ph_sw_flex_k', # leg in swing ^ flex knees
'ph_sw_hold_k', # leg in swing ^ hold knee
'ph_sw_stop_l', # leg in swing ^ stop leg
'ph_sw_hold_l' # leg in swing ^ hold leg
]
# control parameters
cp_keys = [
'theta_tgt', 'c0', 'cv', 'alpha_delta',
'knee_sw_tgt', 'knee_tgt', 'knee_off_st', 'ankle_tgt',
'HFL_3_PG', 'HFL_3_DG', 'HFL_6_PG', 'HFL_6_DG', 'HFL_10_PG',
'GLU_3_PG', 'GLU_3_DG', 'GLU_6_PG', 'GLU_6_DG', 'GLU_10_PG',
'HAM_3_GLU', 'HAM_9_PG',
'RF_1_FG', 'RF_8_DG_knee',
'VAS_1_FG', 'VAS_2_PG', 'VAS_10_PG',
'BFSH_2_PG', 'BFSH_7_DG_alpha', 'BFSH_7_PG', 'BFSH_8_DG', 'BFSH_8_PG',
'BFSH_9_G_HAM', 'BFSH_9_HAM0', 'BFSH_10_PG',
'GAS_2_FG',
'SOL_1_FG',
'TA_5_PG', 'TA_5_G_SOL',
'theta_tgt_f', 'c0_f', 'cv_f',
'HAB_3_PG', 'HAB_3_DG', 'HAB_6_PG',
'HAD_3_PG', 'HAD_3_DG', 'HAD_6_PG'
]
m_map = dict(zip(m_keys, range(len(m_keys))))
s_b_map = dict(zip(s_b_keys, range(len(s_b_keys))))
s_l_map = dict(zip(s_l_keys, range(len(s_l_keys))))
cs_map = dict(zip(cs_keys, range(len(cs_keys))))
cp_map = dict(zip(cp_keys, range(len(cp_keys))))
# -----------------------------------------------------------------------------------------------------------------
def __init__(self, TIMESTEP, control_mode=1, control_dimension=3, params=np.ones(len(cp_keys))):
if self.DEBUG:
print("===========================================")
print("locomotion controller created in DEBUG mode")
print("===========================================")
self.control_mode = control_mode
# 0: spinal control (no brain control)
# 1: full control
self.control_dimension = control_dimension # 2D or 3D
if self.control_mode == 0:
self.brain_control_on = 0
elif self.control_mode == 1:
self.brain_control_on = 1
self.spinal_control_phase = {}
self.in_contact = {}
self.brain_command = {}
self.stim = {}
self.n_par = len(LocoCtrl.cp_keys)
self.cp = {}
self.reset(params)
# -----------------------------------------------------------------------------------------------------------------
def reset(self, params=None):
self.in_contact['r_leg'] = 0 # 1
self.in_contact['l_leg'] = 1 # 0
spinal_control_phase_r = {}
spinal_control_phase_r['ph_st'] = 0
spinal_control_phase_r['ph_st_csw'] = 0
spinal_control_phase_r['ph_st_sw0'] = 0
spinal_control_phase_r['ph_st_st'] = 0
spinal_control_phase_r['ph_sw'] = 1
spinal_control_phase_r['ph_sw_flex_k'] = 1
spinal_control_phase_r['ph_sw_hold_k'] = 0
spinal_control_phase_r['ph_sw_stop_l'] = 0
spinal_control_phase_r['ph_sw_hold_l'] = 0
self.spinal_control_phase['r_leg'] = spinal_control_phase_r
spinal_control_phase_l = {}
spinal_control_phase_l['ph_st'] = 1
spinal_control_phase_l['ph_st_csw'] = 0
spinal_control_phase_l['ph_st_sw0'] = 0
spinal_control_phase_l['ph_st_st'] = 0
spinal_control_phase_l['ph_sw'] = 0
spinal_control_phase_l['ph_sw_flex_k'] = 0
spinal_control_phase_l['ph_sw_hold_k'] = 0
spinal_control_phase_l['ph_sw_stop_l'] = 0
spinal_control_phase_l['ph_sw_hold_l'] = 0
self.spinal_control_phase['l_leg'] = spinal_control_phase_l
self.stim['r_leg'] = dict(zip(self.m_keys, 0.01*np.ones(len(self.m_keys))))
self.stim['l_leg'] = dict(zip(self.m_keys, 0.01*np.ones(len(self.m_keys))))
if params is not None:
self.set_control_params(params)
# -----------------------------------------------------------------------------------------------------------------
def set_control_params(self, params):
if len(params) == self.n_par:
self.set_control_params_RL('r_leg', params)
self.set_control_params_RL('l_leg', params)
else:
raise Exception('error in the number of params!!')
# -----------------------------------------------------------------------------------------------------------------
def set_control_params_RL(self, s_leg, params):
cp = {}
cp_map = self.cp_map
cp['theta_tgt'] = params[cp_map['theta_tgt']] *10*np.pi/180 # *10*np.pi/180
cp['c0'] = params[cp_map['c0']] *20*np.pi/180 +55*np.pi/180 #*20*np.pi/180 +55*np.pi/180
cp['cv'] = params[cp_map['cv']] *2*np.pi/180 # *2*np.pi/180
cp['alpha_delta'] = params[cp_map['alpha_delta']] *5*np.pi/180
cp['knee_sw_tgt'] = params[cp_map['knee_sw_tgt']] *20*np.pi/180 +120*np.pi/180 # *20*np.pi/180 +120*np.pi/180
cp['knee_tgt'] = params[cp_map['knee_tgt']] *15*np.pi/180 +160*np.pi/180 # *15*np.pi/180 +160*np.pi/180
cp['knee_off_st'] = params[cp_map['knee_off_st']] *10*np.pi/180 +165*np.pi/180 # *10*np.pi/180 +165*np.pi/180
cp['ankle_tgt'] = params[cp_map['ankle_tgt']] *20*np.pi/180 +60*np.pi/180 # *20*np.pi/180 +60*np.pi/180
cp['HFL_3_PG'] = params[cp_map['HFL_3_PG']] *2.0
cp['HFL_3_DG'] = params[cp_map['HFL_3_DG']] *1.0
cp['HFL_6_PG'] = params[cp_map['HFL_6_PG']] *1.0
cp['HFL_6_DG'] = params[cp_map['HFL_6_DG']] *.1
cp['HFL_10_PG'] = params[cp_map['HFL_10_PG']] *1.0
cp['GLU_3_PG'] = params[cp_map['GLU_3_PG']] *2.0
cp['GLU_3_DG'] = params[cp_map['GLU_3_DG']] *0.5
cp['GLU_6_PG'] = params[cp_map['GLU_6_PG']] *1.0
cp['GLU_6_DG'] = params[cp_map['GLU_6_DG']] *0.1
cp['GLU_10_PG'] = params[cp_map['GLU_10_PG']] *.5
cp['HAM_3_GLU'] = params[cp_map['HAM_3_GLU']] *1.0
cp['HAM_9_PG'] = params[cp_map['HAM_9_PG']] *2.0
cp['RF_1_FG'] = params[cp_map['RF_1_FG']] *0.3
cp['RF_8_DG_knee'] = params[cp_map['RF_8_DG_knee']] *0.1
cp['VAS_1_FG'] = params[cp_map['VAS_1_FG']] *1.0
cp['VAS_2_PG'] = params[cp_map['VAS_2_PG']] *2.0
cp['VAS_10_PG'] = params[cp_map['VAS_10_PG']] *.3
cp['BFSH_2_PG'] = params[cp_map['BFSH_2_PG']] *2.0
cp['BFSH_7_DG_alpha'] = params[cp_map['BFSH_7_DG_alpha']] *0.2
cp['BFSH_7_PG'] = params[cp_map['BFSH_7_PG']] *2.0
cp['BFSH_8_DG'] = params[cp_map['BFSH_8_DG']] *1.0
cp['BFSH_8_PG'] = params[cp_map['BFSH_8_DG']] *1.0
cp['BFSH_9_G_HAM'] = params[cp_map['BFSH_9_G_HAM']] *2.0
cp['BFSH_9_HAM0'] = params[cp_map['BFSH_9_HAM0']] *0.3
cp['BFSH_10_PG'] = params[cp_map['BFSH_10_PG']] *2.0
cp['GAS_2_FG'] = params[cp_map['GAS_2_FG']] *1.2
cp['SOL_1_FG'] = params[cp_map['SOL_1_FG']] *1.2
cp['TA_5_PG'] = params[cp_map['TA_5_PG']] *2.0
cp['TA_5_G_SOL'] = params[cp_map['TA_5_G_SOL']] *0.5
if self.control_dimension == 3:
if len(params) != 46:
raise Exception('error in the number of params!!')
cp['theta_tgt_f'] = params[cp_map['theta_tgt_f']] *5.0*np.pi/180
cp['c0_f'] = params[cp_map['c0_f']] *20*np.pi/180 + 60*np.pi/180
cp['cv_f'] = params[cp_map['cv_f']] *10*np.pi/180
cp['HAB_3_PG'] = params[cp_map['HAB_3_PG']] *10.0
cp['HAB_3_DG'] = params[cp_map['HAB_3_DG']] *1
cp['HAB_6_PG'] = params[cp_map['HAB_6_PG']] *2.0
cp['HAD_3_PG'] = params[cp_map['HAD_3_PG']] *2.0
cp['HAD_3_DG'] = params[cp_map['HAD_3_DG']] *0.3
cp['HAD_6_PG'] = params[cp_map['HAD_6_PG']] *2.0
elif self.control_dimension == 2:
if len(params) != 37:
raise Exception('error in the number of params!!')
self.cp[s_leg] = cp
# -----------------------------------------------------------------------------------------------------------------
def update(self, sensor_data):
self.sensor_data = sensor_data
if self.brain_control_on:
# update self.brain_command
self._brain_control(sensor_data)
# updates self.stim
self._spinal_control(sensor_data)
#print('spinal right - ', self.spinal_control_phase['r_leg'])
stim = np.array([self.stim['r_leg']['HFL'], self.stim['r_leg']['GLU'],
self.stim['r_leg']['HAM'], self.stim['r_leg']['RF'],
self.stim['r_leg']['VAS'], self.stim['r_leg']['BFSH'],
self.stim['r_leg']['GAS'], self.stim['r_leg']['SOL'],
self.stim['r_leg']['TA'],
self.stim['l_leg']['HFL'], self.stim['l_leg']['GLU'],
self.stim['l_leg']['HAM'], self.stim['l_leg']['RF'],
self.stim['l_leg']['VAS'], self.stim['l_leg']['BFSH'],
self.stim['l_leg']['GAS'], self.stim['l_leg']['SOL'],
self.stim['l_leg']['TA']
])
# todo: self._flaten(self.stim)
return stim
# -----------------------------------------------------------------------------------------------------------------
def _brain_control(self, sensor_data=0):
s_b = sensor_data['body']
cp = self.cp
self.brain_command['r_leg'] = {}
self.brain_command['l_leg'] = {}
for s_leg in ['r_leg', 'l_leg']:
if self.control_dimension == 3:
self.brain_command[s_leg]['theta_tgt_f'] = cp[s_leg]['theta_tgt_f']
sign_frontral = 1 if s_leg is 'r_leg' else -1 # Right was 1 intially
alpha_tgt_global_frontal = cp[s_leg]['c0_f'] + sign_frontral*cp[s_leg]['cv_f']*s_b['d_pos'][1]
theta_f = sign_frontral*s_b['theta'][0]
self.brain_command[s_leg]['alpha_tgt_f'] = alpha_tgt_global_frontal - theta_f
self.brain_command[s_leg]['theta_tgt'] = cp[s_leg]['theta_tgt']
alpha_tgt_global = cp[s_leg]['c0'] - cp[s_leg]['cv']*s_b['d_pos'][0]
self.brain_command[s_leg]['alpha_tgt'] = alpha_tgt_global - s_b['theta'][1]
self.brain_command[s_leg]['alpha_delta'] = cp[s_leg]['alpha_delta']
self.brain_command[s_leg]['knee_sw_tgt'] = cp[s_leg]['knee_sw_tgt']
self.brain_command[s_leg]['knee_tgt'] = cp[s_leg]['knee_tgt']
self.brain_command[s_leg]['knee_off_st'] = cp[s_leg]['knee_off_st']
self.brain_command[s_leg]['ankle_tgt'] = cp[s_leg]['ankle_tgt']
# alpha = hip - 0.5*knee
self.brain_command[s_leg]['hip_tgt'] = \
self.brain_command[s_leg]['alpha_tgt'] + 0.5*self.brain_command[s_leg]['knee_tgt']
# select which leg to swing
self.brain_command['r_leg']['swing_init'] = 0
self.brain_command['l_leg']['swing_init'] = 0
if sensor_data['r_leg']['contact_ipsi'] and sensor_data['l_leg']['contact_ipsi']:
r_delta_alpha = sensor_data['r_leg']['alpha'] - self.brain_command['r_leg']['alpha_tgt']
l_delta_alpha = sensor_data['l_leg']['alpha'] - self.brain_command['l_leg']['alpha_tgt']
if r_delta_alpha > l_delta_alpha:
self.brain_command['r_leg']['swing_init'] = 1
else:
self.brain_command['l_leg']['swing_init'] = 1
# -----------------------------------------------------------------------------------------------------------------
def _spinal_control(self, sensor_data):
for s_leg in ['r_leg', 'l_leg']:
self._update_spinal_control_phase(s_leg, sensor_data)
self.stim[s_leg] = self.spinal_control_leg(s_leg, sensor_data)
# -----------------------------------------------------------------------------------------------------------------
def _update_spinal_control_phase(self, s_leg, sensor_data):
s_l = sensor_data[s_leg]
alpha_tgt = self.brain_command[s_leg]['alpha_tgt']
alpha_delta = self.brain_command[s_leg]['alpha_delta']
knee_sw_tgt = self.brain_command[s_leg]['knee_sw_tgt']
# when foot touches ground
if not self.in_contact[s_leg] and s_l['contact_ipsi']:
# initiate stance control
self.spinal_control_phase[s_leg]['ph_st'] = 1
# swing control off
self.spinal_control_phase[s_leg]['ph_sw'] = 0
self.spinal_control_phase[s_leg]['ph_sw_flex_k'] = 0
self.spinal_control_phase[s_leg]['ph_sw_hold_k'] = 0
self.spinal_control_phase[s_leg]['ph_sw_stop_l'] = 0
self.spinal_control_phase[s_leg]['ph_sw_hold_l'] = 0
#print(f"{s_leg} touches the ground")
# during stance control
if self.spinal_control_phase[s_leg]['ph_st']:
# contra-leg in swing (single stance phase)
self.spinal_control_phase[s_leg]['ph_st_csw'] = not s_l['contact_contra']
# initiate swing
self.spinal_control_phase[s_leg]['ph_st_sw0'] = self.brain_command[s_leg]['swing_init']
# do not initiate swing
self.spinal_control_phase[s_leg]['ph_st_st'] = not self.spinal_control_phase[s_leg]['ph_st_sw0']
#print(f"{s_leg} Stance control active")
# when foot loses contact
if self.in_contact[s_leg] and not s_l['contact_ipsi']:
# stance control off
self.spinal_control_phase[s_leg]['ph_st'] = 0
self.spinal_control_phase[s_leg]['ph_st_csw'] = 0
self.spinal_control_phase[s_leg]['ph_st_sw0'] = 0
self.spinal_control_phase[s_leg]['ph_st_st'] = 0
# initiate swing control
self.spinal_control_phase[s_leg]['ph_sw'] = 1
# flex knee
self.spinal_control_phase[s_leg]['ph_sw_flex_k'] = 1
#print(f"{s_leg} looses contact with ground")
# during swing control
if self.spinal_control_phase[s_leg]['ph_sw']:
#print(f"{s_leg} Swing control active")
if self.spinal_control_phase[s_leg]['ph_sw_flex_k']:
if s_l['phi_knee'] < knee_sw_tgt: # knee flexed
self.spinal_control_phase[s_leg]['ph_sw_flex_k'] = 0
# hold knee
self.spinal_control_phase[s_leg]['ph_sw_hold_k'] = 1
else:
if self.spinal_control_phase[s_leg]['ph_sw_hold_k']:
if s_l['alpha'] < alpha_tgt: # leg swung enough
self.spinal_control_phase[s_leg]['ph_sw_hold_k'] = 0
if s_l['alpha'] < alpha_tgt + alpha_delta: # leg swung enough
# stop leg
self.spinal_control_phase[s_leg]['ph_sw_stop_l'] = 1
if self.spinal_control_phase[s_leg]['ph_sw_stop_l'] \
and s_l['dalpha'] > 0: # leg started to retract
# hold leg
self.spinal_control_phase[s_leg]['ph_sw_hold_l'] = 1
self.in_contact[s_leg] = s_l['contact_ipsi']
# -----------------------------------------------------------------------------------------------------------------
def spinal_control_leg(self, s_leg, sensor_data):
s_l = sensor_data[s_leg]
s_b = sensor_data['body']
cp = self.cp[s_leg]
ph_st = self.spinal_control_phase[s_leg]['ph_st']
ph_st_csw = self.spinal_control_phase[s_leg]['ph_st_csw']
ph_st_sw0 = self.spinal_control_phase[s_leg]['ph_st_sw0']
ph_st_st = self.spinal_control_phase[s_leg]['ph_st_st']
ph_sw = self.spinal_control_phase[s_leg]['ph_sw']
ph_sw_flex_k = self.spinal_control_phase[s_leg]['ph_sw_flex_k']
ph_sw_hold_k = self.spinal_control_phase[s_leg]['ph_sw_hold_k']
ph_sw_stop_l = self.spinal_control_phase[s_leg]['ph_sw_stop_l']
ph_sw_hold_l = self.spinal_control_phase[s_leg]['ph_sw_hold_l']
theta = s_b['theta'][1]
dtheta = s_b['dtheta'][1]
sign_frontral = 1 if s_leg is 'r_leg' else -1
theta_f = sign_frontral*s_b['theta'][0]
dtheta_f = sign_frontral*s_b['dtheta'][0]
theta_tgt = self.brain_command[s_leg]['theta_tgt']
alpha_tgt = self.brain_command[s_leg]['alpha_tgt']
alpha_delta = self.brain_command[s_leg]['alpha_delta']
hip_tgt = self.brain_command[s_leg]['hip_tgt']
knee_tgt = self.brain_command[s_leg]['knee_tgt']
knee_sw_tgt = self.brain_command[s_leg]['knee_sw_tgt']
knee_off_st = self.brain_command[s_leg]['knee_off_st']
ankle_tgt = self.brain_command[s_leg]['ankle_tgt']
stim = {}
pre_stim = 0.01
if self.control_dimension == 3:
theta_tgt_f = self.brain_command[s_leg]['theta_tgt_f']
alpha_tgt_f = self.brain_command[s_leg]['alpha_tgt_f']
S_HAB_3 = ph_st*s_l['load_ipsi']*np.maximum(
- cp['HAB_3_PG']*(theta_f-theta_tgt_f)
- cp['HAB_3_DG']*dtheta_f
, 0)
S_HAB_6 = (ph_st_sw0*s_l['load_contra'] + ph_sw)*np.maximum(
cp['HAB_6_PG']*(s_l['alpha_f'] - alpha_tgt_f)
, 0)
stim['HAB'] = S_HAB_3 + S_HAB_6
S_HAD_3 = ph_st*s_l['load_ipsi']*np.maximum(
cp['HAD_3_PG']*(theta_f-theta_tgt_f)
+ cp['HAD_3_DG']*dtheta_f
, 0)
S_HAD_6 = (ph_st_sw0*s_l['load_contra'] + ph_sw)*np.maximum(
- cp['HAD_6_PG']*(s_l['alpha_f'] - alpha_tgt_f)
, 0)
stim['HAD'] = S_HAD_3 + S_HAD_6
S_HFL_3 = ph_st*s_l['load_ipsi']*np.maximum(
- cp['HFL_3_PG']*(theta-theta_tgt)
- cp['HFL_3_DG']*dtheta
, 0)
S_HFL_6 = (ph_st_sw0*s_l['load_contra'] + ph_sw)*np.maximum(
cp['HFL_6_PG']*(s_l['alpha']-alpha_tgt)
+ cp['HFL_6_DG']*s_l['dalpha']
, 0)
S_HFL_10 = ph_sw_hold_l*np.maximum(
cp['HFL_10_PG']*(s_l['phi_hip'] - hip_tgt)
, 0)
stim['HFL'] = pre_stim + S_HFL_3 + S_HFL_6 + S_HFL_10
S_GLU_3 = ph_st*s_l['load_ipsi']*np.maximum(
cp['GLU_3_PG']*(theta-theta_tgt)
+ cp['GLU_3_DG']*dtheta
, 0)
S_GLU_6 = (ph_st_sw0*s_l['load_contra'] + ph_sw)*np.maximum(
- cp['GLU_6_PG']*(s_l['alpha']-alpha_tgt)
- cp['GLU_6_DG']*s_l['dalpha']
, 0)
S_GLU_10 = ph_sw_hold_l*np.maximum(
- cp['GLU_10_PG']*(s_l['phi_hip'] - hip_tgt)
, 0)
stim['GLU'] = pre_stim + S_GLU_3 + S_GLU_6 + S_GLU_10
S_HAM_3 = cp['HAM_3_GLU']*S_GLU_3
S_HAM_9 = ph_sw_stop_l*np.maximum(
- cp['HAM_9_PG']*(s_l['alpha'] - (alpha_tgt + alpha_delta))
, 0)
stim['HAM'] = pre_stim + S_HAM_3 + S_HAM_9
S_RF_1 = (ph_st_st + ph_st_sw0*(1-s_l['load_contra']))*np.maximum(
cp['RF_1_FG']*s_l['F_RF']
, 0)
S_RF_8 = ph_sw_hold_k*np.maximum(
- cp['RF_8_DG_knee']*s_l['dphi_knee']
, 0)
stim['RF'] = pre_stim + S_RF_1 + S_RF_8
S_VAS_1 = (ph_st_st + ph_st_sw0*(1-s_l['load_contra']))*np.maximum(
cp['VAS_1_FG']*s_l['F_VAS']
, 0)
S_VAS_2 = -(ph_st_st + ph_st_sw0*(1-s_l['load_contra']))*np.maximum(
cp['VAS_2_PG']*(s_l['phi_knee'] - knee_off_st)
, 0)
S_VAS_10 = ph_sw_hold_l*np.maximum(
- cp['VAS_10_PG']*(s_l['phi_knee'] - knee_tgt)
, 0)
stim['VAS'] = pre_stim + S_VAS_1 + S_VAS_2 + S_VAS_10
S_BFSH_2 = (ph_st_st + ph_st_sw0*(1-s_l['load_contra']))*np.maximum(
cp['BFSH_2_PG']*(s_l['phi_knee'] - knee_off_st)
, 0)
S_BFSH_7 = (ph_st_sw0*(s_l['load_contra']) + ph_sw_flex_k)*np.maximum(
- cp['BFSH_7_DG_alpha']*s_l['dalpha']
+ cp['BFSH_7_PG']*(s_l['phi_knee'] - knee_sw_tgt)
, 0)
S_BFSH_8 = ph_sw_hold_k*np.maximum(
cp['BFSH_8_DG']*(s_l['dphi_knee'])
*cp['BFSH_8_PG']*(s_l['alpha'] - alpha_tgt)
, 0)
S_BFSH_9 = np.maximum(
cp['BFSH_9_G_HAM']*(S_HAM_9 - cp['BFSH_9_HAM0'])
, 0)
S_BFSH_10 = ph_sw_hold_l*np.maximum(
cp['BFSH_10_PG']*(s_l['phi_knee'] - knee_tgt)
, 0)
stim['BFSH'] = pre_stim + S_BFSH_2 + S_BFSH_7 + S_BFSH_8 + S_BFSH_9 + S_BFSH_10
S_GAS_2 = ph_st*np.maximum(
cp['GAS_2_FG']*s_l['F_GAS']
, 0)
stim['GAS'] = pre_stim + S_GAS_2
S_SOL_1 = ph_st*np.maximum(
cp['SOL_1_FG']*s_l['F_SOL']
, 0)
stim['SOL'] = pre_stim + S_SOL_1
S_TA_5 = np.maximum(
cp['TA_5_PG']*(s_l['phi_ankle'] - ankle_tgt)
, 0)
S_TA_5_st = -ph_st*np.maximum(
cp['TA_5_G_SOL']*S_SOL_1
, 0)
stim['TA'] = pre_stim + S_TA_5 + S_TA_5_st
for muscle in stim:
stim[muscle] = np.clip(stim[muscle], 0.01, 1.0)
return stim | PypiClean |
/Flask-Dance-7.0.0.tar.gz/Flask-Dance-7.0.0/docs/proxies.rst | Proxies and HTTPS
=================
Running a secure HTTPS website is important, but encrypting and decrypting
HTTPS traffic is computationally expensive. Many people running large-scale
websites (including `Heroku`_) use a `TLS termination proxy`_ to reduce load
on the HTTP server. This works great, but means that the webserver running
your Flask application is actually speaking HTTP, not HTTPS.
As a result, Flask-Dance can get confused, and generate callback URLs
that have an ``http://`` scheme, instead of an ``https://`` scheme.
This is bad, because OAuth requires that all connections use HTTPS for
security purposes, and OAuth providers will reject requests that suggest
a callback URL with a ``http://`` scheme.
When you proxy the request from a `TLS termination proxy`_, probably your
load balancer, you need to ensure a few headers are set/proxied correctly
for Flask to do the right thing out of the box:
* ``Host``: preserve the Host header of the original request
* ``X-Real-IP``: preserve the source IP of the original request
* ``X-Forwarded-For``: a list of IP addresses of the source IP and any
HTTP proxies we've been through
* ``X-Forwarded-Proto``: the protocol, http or https, that the request
came in with
In 99.9% of the cases the `TLS termination proxy`_ will be configured to
do the right thing by default and any well-behaved Flask application will
work out of the box. However, if you're accessing the WSGI environment
directly, you will run into trouble. Don't do this and instead use the
functions provided by Werkzeug's :mod:`~werkzeug.wsgi` module or Flask's
:attr:`~flask.request` to access things like a ``Host`` header.
If your Flask app is behind a TLS termination proxy, and you need to make
sure that Flask is aware of that, check Flask's documentation for
:external:doc:`how to deploy a proxy setup <deploying/proxy_fix>`.
Please read it and follow its instructions. This is not unique to
Flask-Dance and there's nothing to configure on Flask-Dance's side
to solve this. It's also worth noting you might wish to set Flask's
:data:`PREFERRED_URL_SCHEME`.
.. _TLS termination proxy: https://en.wikipedia.org/wiki/TLS_termination_proxy
.. _Heroku: https://www.heroku.com/
| PypiClean |
/NeodroidAgent-0.4.8-py36-none-any.whl/NeodroidAgent-0.4.8.dist-info/LICENSE.md | Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2018 Christian Heider Nielsen
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
| PypiClean |
/Brainfeatures-0.0.4.tar.gz/Brainfeatures-0.0.4/brainfeatures/utils/file_util.py | from itertools import product
import pandas as pd
import numpy as np
import logging
import pickle
import json
import os
import re
from mne.io import read_raw_edf
def replace_extension(path, new_extension):
""" replace an extension """
assert new_extension.startswith(".")
old_exension = os.path.splitext(path)[1]
path = path.replace(old_exension, new_extension)
return path
def json_store(to_store, path):
""" store sth to json file """
assert path.endswith(".json"), "wrong file extension"
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
with open(path, "w") as json_file:
json.dump(to_store, json_file, indent=4, sort_keys=True)
def pandas_store_as_h5(path, df, key_):
""" store a pandas df to h5 """
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
df.to_hdf(path, key_)
def mne_load_signals_and_fs_from_edf(file_, wanted_chs, ch_name_pattern=None,
factor=1e6):
""" read an edf file, pick channels, scale with factor and return signals
as well as sampling frequency """
assert os.path.exists(file_), "file not found {}".format(file_)
raw = read_raw_edf(file_, verbose="error")
fs = raw.info["sfreq"]
raw = raw.load_data()
if ch_name_pattern is not None:
chs = [ch_name_pattern.format(ch) for ch in wanted_chs]
else:
chs = wanted_chs
raw = raw.reorder_channels(chs)
# achieves two things: asserts that channels are sorted and picked
# channels are in same order
assert raw.ch_names == sorted(chs), (
"actual channel names: {}, wanted channels names: {}"
.format(', '.join(raw.ch_names), ', '.join(chs)))
signals = raw.get_data()
if factor is not None:
signals = signals * factor
signals = pd.DataFrame(signals, index=wanted_chs)
return signals, fs
def get_duration_with_raw_mne(file_path):
""" get duration from raw edf mne object without loading it """
assert os.path.exists(file_path), "file not found {}".format(file_path)
raw = read_raw_edf(file_path, verbose="error")
n_sampels = raw._raw_lengths[0]
sfreq = raw.info["sfreq"]
return int(n_sampels / sfreq)
def parse_age_and_gender_from_edf_header(file_path, return_raw_header=False):
""" parse sex and age of patient from the patient_id in the header of the
edf file
:param file_path: path of the recording
:param return_raw_header: whether to return raw header or parse age/gender
:return: gender (M, X, F) and age of patient
"""
assert os.path.exists(file_path), "file not found {}".format(file_path)
f = open(file_path, 'rb')
content = f.read(88)
f.close()
if return_raw_header:
return content
patient_id = content[8:88].decode('ascii')
[age] = re.findall("Age:(\d+)", patient_id)
[gender] = re.findall("\s(\w)\s", patient_id)
return int(age), gender
def property_in_path(curr_path, property):
tokens = curr_path.split("/")
return property in tokens
def natural_key(string):
""" provides a human-like sorting key of a string """
p = r'(\d+)'
key = [int(t) if t.isdigit() else None for t in re.split(p, string)]
return key
# is this the same as natural key?
# def session_key(string):
# """ sort the file name by session """
# p = r'(s\d*)_'
# return re.findall(p, string)
def save_exp(exp, save_raw=False, out_dir=None):
""" save all relevant information contained in an experiment """
if save_raw:
with open(out_dir + "exp.pkl", "wb") as pickle_file:
pickle.dump(exp, pickle_file)
config = {}
for i in range(exp._n_runs):
for subset in exp.predictions.keys():
preds = exp.predictions[subset]
if subset in exp.performances.keys() and out_dir is not None:
performances = exp.performances[subset]
preds.to_csv(out_dir + "predictions_{}.csv".format(subset, i))
performances.to_csv(
out_dir + "performances_{}.csv".format(subset, i))
if subset in exp.info.keys() and "feature_importances" in \
exp.info[subset].keys():
feature_importances = exp.info[subset]["feature_importances"]
if out_dir is not None:
feature_importances.to_csv(
out_dir + "feature_importances_{}.csv".format(subset, i))
config.update({"shuffle": exp._shuffle_splits})
config.update({"n_runs": exp._n_runs})
config.update({"n_jobs": exp._n_jobs})
if exp._preproc_params is not None:
config.update(exp._preproc_params)
if exp._feat_gen_params is not None:
config.update(exp._feat_gen_params)
if exp._pca_thresh is not None:
config.update({"pca_thresh": exp._pca_thresh})
for key, value in exp.times.items():
if type(value) is dict:
for key2, value2 in value.items():
config.update({'_'.join(["time", key, key2]): value2})
else:
config.update({'_'.join(["time", key]): value})
config.update({"n_features": len(exp._feature_names)})
d = {}
if "estimator_params" in exp._estimator.__dict__:
params = exp._estimator.__dict__["estimator_params"]
else:
params = []
for param in params:
d.update(
{'_'.join(["model", param]): exp._estimator.__dict__[param]})
config.update(d)
if "n_estimators" in exp._estimator.__dict__:
config.update(
{"n_estimators": exp._estimator.__dict__["n_estimators"]})
config.update({"sfreq": exp.info["devel"]["sfreq"]})
config.update(
{"model": str(exp._estimator.__class__).split('.')[-1][:-2]})
for param in ["C", "gamma", "kernel"]:
if param in exp._estimator.__dict__:
if param == "gamma":
param = '_' + param
config.update({'_'.join(["model", param]):
exp._estimator.__dict__[param]})
if out_dir is not None:
with open(out_dir + "config.json", "w") as json_file:
json.dump(config, json_file, indent=4, sort_keys=True)
return config
def read_feature_results(directory, models, decoding_tasks, decoding_types):
""" read features results from directory structure"""
from sklearn.metrics import (roc_auc_score, accuracy_score, roc_curve,
mean_squared_error)
subsets = ["cv", "eval"]
result_df = pd.DataFrame()
for model, decoding_type, task, subset in product(models, decoding_types,
decoding_tasks, subsets):
path = os.path.join(directory, model, decoding_type, task, subset)
if not os.path.exists(path):
logging.error("path does not exist: {}".format(path))
continue
if subset == "eval":
train_or_eval = "eval"
else:
train_or_eval = "train"
df = pd.DataFrame.from_csv(os.path.join(
path, "predictions_{}.csv".format(train_or_eval)))
# compute some metrics
roc_curves, aucs, accs, rmses = [], [], [], []
for group, d in df.groupby("id"):
if task in ["pathological", "gender"]:
auc = roc_auc_score(d.y_true, d.y_pred)
aucs.append(auc)
roc = roc_curve(d.y_true, d.y_pred)
roc_curves.append(roc)
acc = accuracy_score(d.y_true, d.y_pred >= .5)
accs.append(acc)
else:
rmse = np.sqrt(mean_squared_error(d.y_true, d.y_pred))
rmses.append(rmse)
n = len(df.groupby("id"))
if task in ["pathological", "gender"]:
accs = np.mean(accs) * 100
aucs = np.mean(aucs) * 100
rmses = None
else:
accs = None
aucs = None
rmses = np.mean(rmses)
row = {
"model": model,
"task": task,
"accuracy": accs,
"auc": aucs,
"subset": subset,
"rmse": rmses,
"n": n,
"type": decoding_type,
}
result_df = result_df.append(row, ignore_index=True)
return result_df | PypiClean |
/Kivy-2.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl/kivy/uix/layout.py | __all__ = ('Layout', )
from kivy.clock import Clock
from kivy.uix.widget import Widget
from kivy.compat import isclose
class Layout(Widget):
'''Layout interface class, used to implement every layout. See module
documentation for more information.
'''
_trigger_layout = None
def __init__(self, **kwargs):
if self.__class__ == Layout:
raise Exception('The Layout class is abstract and \
cannot be used directly.')
if self._trigger_layout is None:
self._trigger_layout = Clock.create_trigger(self.do_layout, -1)
super(Layout, self).__init__(**kwargs)
def do_layout(self, *largs):
'''This function is called when a layout is called by a trigger.
If you are writing a new Layout subclass, don't call this function
directly but use :meth:`_trigger_layout` instead.
The function is by default called *before* the next frame, therefore
the layout isn't updated immediately. Anything depending on the
positions of e.g. children should be scheduled for the next frame.
.. versionadded:: 1.0.8
'''
raise NotImplementedError('Must be implemented in subclasses.')
def add_widget(self, widget, *args, **kwargs):
fbind = widget.fbind
fbind('size', self._trigger_layout)
fbind('size_hint', self._trigger_layout)
fbind('size_hint_max', self._trigger_layout)
fbind('size_hint_min', self._trigger_layout)
super(Layout, self).add_widget(widget, *args, **kwargs)
def remove_widget(self, widget, *args, **kwargs):
funbind = widget.funbind
funbind('size', self._trigger_layout)
funbind('size_hint', self._trigger_layout)
funbind('size_hint_max', self._trigger_layout)
funbind('size_hint_min', self._trigger_layout)
super(Layout, self).remove_widget(widget, *args, **kwargs)
def layout_hint_with_bounds(
self, sh_sum, available_space, min_bounded_size, sh_min_vals,
sh_max_vals, hint):
'''(internal) Computes the appropriate (size) hint for all the
widgets given (potential) min or max bounds on the widgets' size.
The ``hint`` list is updated with appropriate sizes.
It walks through the hints and for any widgets whose hint will result
in violating min or max constraints, it fixes the hint. Any remaining
or missing space after all the widgets are fixed get distributed
to the widgets making them smaller or larger according to their
size hint.
This algorithms knows nothing about the widgets other than what is
passed through the input params, so it's fairly generic for laying
things out according to constraints using size hints.
:Parameters:
`sh_sum`: float
The sum of the size hints (basically ``sum(size_hint)``).
`available_space`: float
The amount of pixels available for all the widgets
whose size hint is not None. Cannot be zero.
`min_bounded_size`: float
The minimum amount of space required according to the
`size_hint_min` of the widgets (basically
``sum(size_hint_min)``).
`sh_min_vals`: list or iterable
Items in the iterable are the size_hint_min for each widget.
Can be None. The length should be the same as ``hint``
`sh_max_vals`: list or iterable
Items in the iterable are the size_hint_max for each widget.
Can be None. The length should be the same as ``hint``
`hint`: list
A list whose size is the same as the length of ``sh_min_vals``
and ``sh_min_vals`` whose each element is the corresponding
size hint value of that element. This list is updated in place
with correct size hints that ensure the constraints are not
violated.
:returns:
Nothing. ``hint`` is updated in place.
'''
if not sh_sum:
return
# TODO: test when children have size_hint, max/min of zero
# all divs are float denominator ;)
stretch_ratio = sh_sum / float(available_space)
if available_space <= min_bounded_size or \
isclose(available_space, min_bounded_size):
# too small, just set to min
for i, (sh, sh_min) in enumerate(zip(hint, sh_min_vals)):
if sh is None:
continue
if sh_min is not None:
hint[i] = sh_min * stretch_ratio # set to min size
else:
hint[i] = 0. # everything else is zero
return
# these dicts take i (widget child) as key
not_mined_contrib = {} # all who's sh > min_sh or had no min_sh
not_maxed_contrib = {} # all who's sh < max_sh or had no max_sh
sh_mins_avail = {} # the sh amt removable until we hit sh_min
sh_maxs_avail = {} # the sh amt addable until we hit sh_max
oversize_amt = undersize_amt = 0
hint_orig = hint[:]
# first, for all the items, set them to be within their max/min
# size_hint bound, also find how much their size_hint can be reduced
# or increased
for i, (sh, sh_min, sh_max) in enumerate(
zip(hint, sh_min_vals, sh_max_vals)):
if sh is None:
continue
diff = 0
if sh_min is not None:
sh_min *= stretch_ratio
diff = sh_min - sh # how much we are under the min
if diff > 0:
hint[i] = sh_min
undersize_amt += diff
else:
not_mined_contrib[i] = None
sh_mins_avail[i] = hint[i] - sh_min
else:
not_mined_contrib[i] = None
sh_mins_avail[i] = hint[i]
if sh_max is not None:
sh_max *= stretch_ratio
diff = sh - sh_max
if diff > 0:
hint[i] = sh_max # how much we are over the max
oversize_amt += diff
else:
not_maxed_contrib[i] = None
sh_maxs_avail[i] = sh_max - hint[i]
else:
not_maxed_contrib[i] = None
sh_maxs_avail[i] = sh_sum - hint[i]
if i in not_mined_contrib:
not_mined_contrib[i] = max(0., diff) # how much got removed
if i in not_maxed_contrib:
not_maxed_contrib[i] = max(0., diff) # how much got added
# if margin is zero, the amount of the widgets that were made smaller
# magically equals the amount of the widgets that were made larger
# so we're all good
margin = oversize_amt - undersize_amt
if isclose(oversize_amt, undersize_amt, abs_tol=1e-15):
return
# we need to redistribute the margin among all widgets
# if margin is positive, then we have extra space because the widgets
# that were larger and were reduced contributed more, so increase
# the size hint for those that are allowed to be larger by the
# most allowed, proportionately to their size (or inverse size hint).
# similarly for the opposite case
if margin > 1e-15:
contrib_amt = not_maxed_contrib
sh_available = sh_maxs_avail
mult = 1.
contrib_proportion = hint_orig
elif margin < -1e-15:
margin *= -1.
contrib_amt = not_mined_contrib
sh_available = sh_mins_avail
mult = -1.
# when reducing the size of widgets proportionately, those with
# larger sh get reduced less, and those with smaller, more.
mn = min((h for h in hint_orig if h))
mx = max((h for h in hint_orig if h is not None))
hint_top = (2. * mn if mn else 1.) if mn == mx else mn + mx
contrib_proportion = [None if h is None else hint_top - h for
h in hint_orig]
# contrib_amt is all the widgets that are not their max/min and
# can afford to be made bigger/smaller
# We only use the contrib_amt indices from now on
contrib_prop_sum = float(
sum((contrib_proportion[i] for i in contrib_amt)))
if contrib_prop_sum < 1e-9:
assert mult == 1. # should only happen when all sh are zero
return
contrib_height = {
i: val / (contrib_proportion[i] / contrib_prop_sum) for
i, val in contrib_amt.items()}
items = sorted(
(i for i in contrib_amt),
key=lambda x: contrib_height[x])
j = items[0]
sum_i_contributed = contrib_amt[j]
last_height = contrib_height[j]
sh_available_i = {j: sh_available[j]}
contrib_prop_sum_i = contrib_proportion[j]
n = len(items) # check when n <= 1
i = 1
if 1 < n:
j = items[1]
curr_height = contrib_height[j]
done = False
while not done and i < n:
while i < n and last_height == curr_height:
j = items[i]
sum_i_contributed += contrib_amt[j]
contrib_prop_sum_i += contrib_proportion[j]
sh_available_i[j] = sh_available[j]
curr_height = contrib_height[j]
i += 1
last_height = curr_height
while not done:
margin_height = ((margin + sum_i_contributed) /
(contrib_prop_sum_i / contrib_prop_sum))
if margin_height - curr_height > 1e-9 and i < n:
break
done = True
for k, available_sh in list(sh_available_i.items()):
if margin_height - available_sh / (
contrib_proportion[k] / contrib_prop_sum) > 1e-9:
del sh_available_i[k]
sum_i_contributed -= contrib_amt[k]
contrib_prop_sum_i -= contrib_proportion[k]
margin -= available_sh
hint[k] += mult * available_sh
done = False
if not sh_available_i: # all were under the margin
break
if sh_available_i:
assert contrib_prop_sum_i and margin
margin_height = ((margin + sum_i_contributed) /
(contrib_prop_sum_i / contrib_prop_sum))
for i in sh_available_i:
hint[i] += mult * (
margin_height * contrib_proportion[i] / contrib_prop_sum -
contrib_amt[i]) | PypiClean |
/MaterialDjango-0.2.5.tar.gz/MaterialDjango-0.2.5/materialdjango/static/materialdjango/components/bower_components/paper-radio-button/.github/ISSUE_TEMPLATE.md | <!-- Instructions: https://github.com/PolymerElements/paper-radio-button/CONTRIBUTING.md#filing-issues -->
### Description
<!-- Example: The `paper-foo` element causes the page to turn pink when clicked. -->
### Expected outcome
<!-- Example: The page stays the same color. -->
### Actual outcome
<!-- Example: The page turns pink. -->
### Live Demo
<!-- Example: https://jsbin.com/cagaye/edit?html,output -->
### Steps to reproduce
<!-- Example
1. Put a `paper-foo` element in the page.
2. Open the page in a web browser.
3. Click the `paper-foo` element.
-->
### Browsers Affected
<!-- Check all that apply -->
- [ ] Chrome
- [ ] Firefox
- [ ] Safari 9
- [ ] Safari 8
- [ ] Safari 7
- [ ] Edge
- [ ] IE 11
- [ ] IE 10
| PypiClean |
/bareon-0.0.1a3.tar.gz/bareon-0.0.1a3/bareon/openstack/common/log.py | import inspect
import itertools
import logging
import logging.config
import logging.handlers
import os
import re
import sys
import traceback
from oslo_config import cfg
from oslo_serialization import jsonutils
import six
from six import moves
from bareon.openstack.common.gettextutils import _
from bareon.openstack.common import importutils
from bareon.openstack.common import local
_DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
_SANITIZE_KEYS = ['adminPass', 'admin_pass', 'password', 'admin_password']
# NOTE(ldbragst): Let's build a list of regex objects using the list of
# _SANITIZE_KEYS we already have. This way, we only have to add the new key
# to the list of _SANITIZE_KEYS and we can generate regular expressions
# for XML and JSON automatically.
_SANITIZE_PATTERNS = []
_FORMAT_PATTERNS = [r'(%(key)s\s*[=]\s*[\"\']).*?([\"\'])',
r'(<%(key)s>).*?(</%(key)s>)',
r'([\"\']%(key)s[\"\']\s*:\s*[\"\']).*?([\"\'])',
r'([\'"].*?%(key)s[\'"]\s*:\s*u?[\'"]).*?([\'"])',
r'([\'"].*?%(key)s[\'"]\s*,\s*\'--?[A-z]+\'\s*,\s*u?[\'"])'
'.*?([\'"])',
r'(%(key)s\s*--?[A-z]+\s*).*?([\s])']
for key in _SANITIZE_KEYS:
for pattern in _FORMAT_PATTERNS:
reg_ex = re.compile(pattern % {'key': key}, re.DOTALL)
_SANITIZE_PATTERNS.append(reg_ex)
common_cli_opts = [
cfg.BoolOpt('debug',
short='d',
default=False,
help='Print debugging output (set logging level to '
'DEBUG instead of default WARNING level).'),
cfg.BoolOpt('verbose',
short='v',
default=False,
help='Print more verbose output (set logging level to '
'INFO instead of default WARNING level).'),
]
logging_cli_opts = [
cfg.StrOpt('log-config-append',
metavar='PATH',
deprecated_name='log-config',
help='The name of a logging configuration file. This file '
'is appended to any existing logging configuration '
'files. For details about logging configuration files, '
'see the Python logging module documentation.'),
cfg.StrOpt('log-format',
metavar='FORMAT',
help='DEPRECATED. '
'A logging.Formatter log message format string which may '
'use any of the available logging.LogRecord attributes. '
'This option is deprecated. Please use '
'logging_context_format_string and '
'logging_default_format_string instead.'),
cfg.StrOpt('log-date-format',
default=_DEFAULT_LOG_DATE_FORMAT,
metavar='DATE_FORMAT',
help='Format string for %%(asctime)s in log records. '
'Default: %(default)s .'),
cfg.StrOpt('log-file',
metavar='PATH',
deprecated_name='logfile',
help='(Optional) Name of log file to output to. '
'If no default is set, logging will go to stdout.'),
cfg.StrOpt('log-dir',
deprecated_name='logdir',
help='(Optional) The base directory used for relative '
'--log-file paths.'),
cfg.BoolOpt('use-syslog',
default=False,
help='Use syslog for logging. '
'Existing syslog format is DEPRECATED during I, '
'and will change in J to honor RFC5424.'),
cfg.BoolOpt('use-syslog-rfc-format',
# TODO(bogdando) remove or use True after existing
# syslog format deprecation in J
default=False,
help='(Optional) Enables or disables syslog rfc5424 format '
'for logging. If enabled, prefixes the MSG part of the '
'syslog message with APP-NAME (RFC5424). The '
'format without the APP-NAME is deprecated in I, '
'and will be removed in J.'),
cfg.StrOpt('syslog-log-facility',
default='LOG_USER',
help='Syslog facility to receive log lines.')
]
generic_log_opts = [
cfg.BoolOpt('use_stderr',
default=True,
help='Log output to standard error.')
]
log_opts = [
cfg.StrOpt('logging_context_format_string',
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
'%(name)s [%(request_id)s %(user_identity)s] '
'%(instance)s%(message)s',
help='Format string to use for log messages with context.'),
cfg.StrOpt('logging_default_format_string',
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
'%(name)s [-] %(instance)s%(message)s',
help='Format string to use for log messages without context.'),
cfg.StrOpt('logging_debug_format_suffix',
default='%(funcName)s %(pathname)s:%(lineno)d',
help='Data to append to log format when level is DEBUG.'),
cfg.StrOpt('logging_exception_prefix',
default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s '
'%(instance)s',
help='Prefix each line of exception output with this format.'),
cfg.ListOpt('default_log_levels',
default=[
'amqp=WARN',
'amqplib=WARN',
'boto=WARN',
'qpid=WARN',
'sqlalchemy=WARN',
'suds=INFO',
'oslo.messaging=INFO',
'iso8601=WARN',
'requests.packages.urllib3.connectionpool=WARN'
],
help='List of logger=LEVEL pairs.'),
cfg.BoolOpt('publish_errors',
default=False,
help='Enables or disables publication of error events.'),
cfg.BoolOpt('fatal_deprecations',
default=False,
help='Enables or disables fatal status of deprecations.'),
# NOTE(mikal): there are two options here because sometimes we are handed
# a full instance (and could include more information), and other times we
# are just handed a UUID for the instance.
cfg.StrOpt('instance_format',
default='[instance: %(uuid)s] ',
help='The format for an instance that is passed with the log '
'message. '),
cfg.StrOpt('instance_uuid_format',
default='[instance: %(uuid)s] ',
help='The format for an instance UUID that is passed with the '
'log message. '),
]
CONF = cfg.CONF
CONF.register_cli_opts(common_cli_opts)
CONF.register_cli_opts(logging_cli_opts)
CONF.register_opts(generic_log_opts)
CONF.register_opts(log_opts)
# our new audit level
# NOTE(jkoelker) Since we synthesized an audit level, make the logging
# module aware of it so it acts like other levels.
logging.AUDIT = logging.INFO + 1
logging.addLevelName(logging.AUDIT, 'AUDIT')
try:
NullHandler = logging.NullHandler
except AttributeError: # NOTE(jkoelker) NullHandler added in Python 2.7
class NullHandler(logging.Handler):
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
def _dictify_context(context):
if context is None:
return None
if not isinstance(context, dict) and getattr(context, 'to_dict', None):
context = context.to_dict()
return context
def _get_binary_name():
return os.path.basename(inspect.stack()[-1][1])
def _get_log_file_path(binary=None):
logfile = CONF.log_file
logdir = CONF.log_dir
if logfile and not logdir:
return logfile
if logfile and logdir:
return os.path.join(logdir, logfile)
if logdir:
binary = binary or _get_binary_name()
return '%s.log' % (os.path.join(logdir, binary),)
return None
def mask_password(message, secret="***"):
"""Replace password with 'secret' in message.
:param message: The string which includes security information.
:param secret: value with which to replace passwords.
:returns: The unicode value of message with the password fields masked.
For example:
>>> mask_password("'adminPass' : 'aaaaa'")
"'adminPass' : '***'"
>>> mask_password("'admin_pass' : 'aaaaa'")
"'admin_pass' : '***'"
>>> mask_password('"password" : "aaaaa"')
'"password" : "***"'
>>> mask_password("'original_password' : 'aaaaa'")
"'original_password' : '***'"
>>> mask_password("u'original_password' : u'aaaaa'")
"u'original_password' : u'***'"
"""
message = six.text_type(message)
# NOTE(ldbragst): Check to see if anything in message contains any key
# specified in _SANITIZE_KEYS, if not then just return the message since
# we don't have to mask any passwords.
if not any(key in message for key in _SANITIZE_KEYS):
return message
secret = r'\g<1>' + secret + r'\g<2>'
for pattern in _SANITIZE_PATTERNS:
message = re.sub(pattern, secret, message)
return message
class BaseLoggerAdapter(logging.LoggerAdapter):
def audit(self, msg, *args, **kwargs):
self.log(logging.AUDIT, msg, *args, **kwargs)
class LazyAdapter(BaseLoggerAdapter):
def __init__(self, name='unknown', version='unknown'):
self._logger = None
self.extra = {}
self.name = name
self.version = version
@property
def logger(self):
if not self._logger:
self._logger = getLogger(self.name, self.version)
return self._logger
class ContextAdapter(BaseLoggerAdapter):
warn = logging.LoggerAdapter.warning
def __init__(self, logger, project_name, version_string):
self.logger = logger
self.project = project_name
self.version = version_string
self._deprecated_messages_sent = dict()
@property
def handlers(self):
return self.logger.handlers
def deprecated(self, msg, *args, **kwargs):
"""Call this method when a deprecated feature is used.
If the system is configured for fatal deprecations then the message
is logged at the 'critical' level and :class:`DeprecatedConfig` will
be raised.
Otherwise, the message will be logged (once) at the 'warn' level.
:raises: :class:`DeprecatedConfig` if the system is configured for
fatal deprecations.
"""
stdmsg = _("Deprecated: %s") % msg
if CONF.fatal_deprecations:
self.critical(stdmsg, *args, **kwargs)
raise DeprecatedConfig(msg=stdmsg)
# Using a list because a tuple with dict can't be stored in a set.
sent_args = self._deprecated_messages_sent.setdefault(msg, list())
if args in sent_args:
# Already logged this message, so don't log it again.
return
sent_args.append(args)
self.warn(stdmsg, *args, **kwargs)
def process(self, msg, kwargs):
# NOTE(mrodden): catch any Message/other object and
# coerce to unicode before they can get
# to the python logging and possibly
# cause string encoding trouble
if not isinstance(msg, six.string_types):
msg = six.text_type(msg)
if 'extra' not in kwargs:
kwargs['extra'] = {}
extra = kwargs['extra']
context = kwargs.pop('context', None)
if not context:
context = getattr(local.store, 'context', None)
if context:
extra.update(_dictify_context(context))
instance = kwargs.pop('instance', None)
instance_uuid = (extra.get('instance_uuid') or
kwargs.pop('instance_uuid', None))
instance_extra = ''
if instance:
instance_extra = CONF.instance_format % instance
elif instance_uuid:
instance_extra = (CONF.instance_uuid_format
% {'uuid': instance_uuid})
extra['instance'] = instance_extra
extra.setdefault('user_identity', kwargs.pop('user_identity', None))
extra['project'] = self.project
extra['version'] = self.version
extra['extra'] = extra.copy()
return msg, kwargs
class JSONFormatter(logging.Formatter):
def __init__(self, fmt=None, datefmt=None):
# NOTE(jkoelker) we ignore the fmt argument, but its still there
# since logging.config.fileConfig passes it.
self.datefmt = datefmt
def formatException(self, ei, strip_newlines=True):
lines = traceback.format_exception(*ei)
if strip_newlines:
lines = [moves.filter(
lambda x: x,
line.rstrip().splitlines()) for line in lines]
lines = list(itertools.chain(*lines))
return lines
def format(self, record):
message = {'message': record.getMessage(),
'asctime': self.formatTime(record, self.datefmt),
'name': record.name,
'msg': record.msg,
'args': record.args,
'levelname': record.levelname,
'levelno': record.levelno,
'pathname': record.pathname,
'filename': record.filename,
'module': record.module,
'lineno': record.lineno,
'funcname': record.funcName,
'created': record.created,
'msecs': record.msecs,
'relative_created': record.relativeCreated,
'thread': record.thread,
'thread_name': record.threadName,
'process_name': record.processName,
'process': record.process,
'traceback': None}
if hasattr(record, 'extra'):
message['extra'] = record.extra
if record.exc_info:
message['traceback'] = self.formatException(record.exc_info)
return jsonutils.dumps(message)
def _create_logging_excepthook(product_name):
def logging_excepthook(exc_type, value, tb):
extra = {'exc_info': (exc_type, value, tb)}
getLogger(product_name).critical(
"".join(traceback.format_exception_only(exc_type, value)),
**extra)
return logging_excepthook
class LogConfigError(Exception):
message = _('Error loading logging config %(log_config)s: %(err_msg)s')
def __init__(self, log_config, err_msg):
self.log_config = log_config
self.err_msg = err_msg
def __str__(self):
return self.message % dict(log_config=self.log_config,
err_msg=self.err_msg)
def _load_log_config(log_config_append):
try:
logging.config.fileConfig(log_config_append,
disable_existing_loggers=False)
except moves.configparser.Error as exc:
raise LogConfigError(log_config_append, six.text_type(exc))
def setup(product_name, version='unknown'):
"""Setup logging."""
if CONF.log_config_append:
_load_log_config(CONF.log_config_append)
else:
_setup_logging_from_conf(product_name, version)
sys.excepthook = _create_logging_excepthook(product_name)
def set_defaults(logging_context_format_string):
cfg.set_defaults(log_opts,
logging_context_format_string=
logging_context_format_string)
def _find_facility_from_conf():
facility_names = logging.handlers.SysLogHandler.facility_names
facility = getattr(logging.handlers.SysLogHandler,
CONF.syslog_log_facility,
None)
if facility is None and CONF.syslog_log_facility in facility_names:
facility = facility_names.get(CONF.syslog_log_facility)
if facility is None:
valid_facilities = facility_names.keys()
consts = ['LOG_AUTH', 'LOG_AUTHPRIV', 'LOG_CRON', 'LOG_DAEMON',
'LOG_FTP', 'LOG_KERN', 'LOG_LPR', 'LOG_MAIL', 'LOG_NEWS',
'LOG_AUTH', 'LOG_SYSLOG', 'LOG_USER', 'LOG_UUCP',
'LOG_LOCAL0', 'LOG_LOCAL1', 'LOG_LOCAL2', 'LOG_LOCAL3',
'LOG_LOCAL4', 'LOG_LOCAL5', 'LOG_LOCAL6', 'LOG_LOCAL7']
valid_facilities.extend(consts)
raise TypeError(_('syslog facility must be one of: %s') %
', '.join("'%s'" % fac
for fac in valid_facilities))
return facility
class RFCSysLogHandler(logging.handlers.SysLogHandler):
def __init__(self, *args, **kwargs):
self.binary_name = _get_binary_name()
# Do not use super() unless type(logging.handlers.SysLogHandler)
# is 'type' (Python 2.7).
# Use old style calls, if the type is 'classobj' (Python 2.6)
logging.handlers.SysLogHandler.__init__(self, *args, **kwargs)
def format(self, record):
# Do not use super() unless type(logging.handlers.SysLogHandler)
# is 'type' (Python 2.7).
# Use old style calls, if the type is 'classobj' (Python 2.6)
msg = logging.handlers.SysLogHandler.format(self, record)
msg = self.binary_name + ' ' + msg
return msg
def _setup_logging_from_conf(project, version):
log_root = getLogger(None).logger
for handler in log_root.handlers:
log_root.removeHandler(handler)
if CONF.use_syslog:
facility = _find_facility_from_conf()
# TODO(bogdando) use the format provided by RFCSysLogHandler
# after existing syslog format deprecation in J
if CONF.use_syslog_rfc_format:
syslog = RFCSysLogHandler(address='/dev/log',
facility=facility)
else:
syslog = logging.handlers.SysLogHandler(address='/dev/log',
facility=facility)
log_root.addHandler(syslog)
logpath = _get_log_file_path()
if logpath:
filelog = logging.handlers.WatchedFileHandler(logpath)
log_root.addHandler(filelog)
if CONF.use_stderr:
streamlog = ColorHandler()
log_root.addHandler(streamlog)
elif not logpath:
# pass sys.stdout as a positional argument
# python2.6 calls the argument strm, in 2.7 it's stream
streamlog = logging.StreamHandler(sys.stdout)
log_root.addHandler(streamlog)
if CONF.publish_errors:
handler = importutils.import_object(
"bareon.openstack.common.log_handler.PublishErrorsHandler",
logging.ERROR)
log_root.addHandler(handler)
datefmt = CONF.log_date_format
for handler in log_root.handlers:
# NOTE(alaski): CONF.log_format overrides everything currently. This
# should be deprecated in favor of context aware formatting.
if CONF.log_format:
handler.setFormatter(logging.Formatter(fmt=CONF.log_format,
datefmt=datefmt))
log_root.info('Deprecated: log_format is now deprecated and will '
'be removed in the next release')
else:
handler.setFormatter(ContextFormatter(project=project,
version=version,
datefmt=datefmt))
if CONF.debug:
log_root.setLevel(logging.DEBUG)
elif CONF.verbose:
log_root.setLevel(logging.INFO)
else:
log_root.setLevel(logging.WARNING)
for pair in CONF.default_log_levels:
mod, _sep, level_name = pair.partition('=')
logger = logging.getLogger(mod)
# NOTE(AAzza) in python2.6 Logger.setLevel doesn't convert string name
# to integer code.
if sys.version_info < (2, 7):
level = logging.getLevelName(level_name)
logger.setLevel(level)
else:
logger.setLevel(level_name)
_loggers = {}
def getLogger(name='unknown', version='unknown'):
if name not in _loggers:
_loggers[name] = ContextAdapter(logging.getLogger(name),
name,
version)
return _loggers[name]
def getLazyLogger(name='unknown', version='unknown'):
"""Returns lazy logger.
Creates a pass-through logger that does not create the real logger
until it is really needed and delegates all calls to the real logger
once it is created.
"""
return LazyAdapter(name, version)
class WritableLogger(object):
"""A thin wrapper that responds to `write` and logs."""
def __init__(self, logger, level=logging.INFO):
self.logger = logger
self.level = level
def write(self, msg):
self.logger.log(self.level, msg.rstrip())
class ContextFormatter(logging.Formatter):
"""A context.RequestContext aware formatter configured through flags.
The flags used to set format strings are: logging_context_format_string
and logging_default_format_string. You can also specify
logging_debug_format_suffix to append extra formatting if the log level is
debug.
For information about what variables are available for the formatter see:
http://docs.python.org/library/logging.html#formatter
If available, uses the context value stored in TLS - local.store.context
"""
def __init__(self, *args, **kwargs):
"""Initialize ContextFormatter instance
Takes additional keyword arguments which can be used in the message
format string.
:keyword project: project name
:type project: string
:keyword version: project version
:type version: string
"""
self.project = kwargs.pop('project', 'unknown')
self.version = kwargs.pop('version', 'unknown')
logging.Formatter.__init__(self, *args, **kwargs)
def format(self, record):
"""Uses contextstring if request_id is set, otherwise default."""
# store project info
record.project = self.project
record.version = self.version
# store request info
context = getattr(local.store, 'context', None)
if context:
d = _dictify_context(context)
for k, v in d.items():
setattr(record, k, v)
# NOTE(sdague): default the fancier formatting params
# to an empty string so we don't throw an exception if
# they get used
for key in ('instance', 'color', 'user_identity'):
if key not in record.__dict__:
record.__dict__[key] = ''
if record.__dict__.get('request_id'):
self._fmt = CONF.logging_context_format_string
else:
self._fmt = CONF.logging_default_format_string
if (record.levelno == logging.DEBUG and
CONF.logging_debug_format_suffix):
self._fmt += " " + CONF.logging_debug_format_suffix
# Cache this on the record, Logger will respect our formatted copy
if record.exc_info:
record.exc_text = self.formatException(record.exc_info, record)
return logging.Formatter.format(self, record)
def formatException(self, exc_info, record=None):
"""Format exception output with CONF.logging_exception_prefix."""
if not record:
return logging.Formatter.formatException(self, exc_info)
stringbuffer = moves.StringIO()
traceback.print_exception(exc_info[0], exc_info[1], exc_info[2],
None, stringbuffer)
lines = stringbuffer.getvalue().split('\n')
stringbuffer.close()
if CONF.logging_exception_prefix.find('%(asctime)') != -1:
record.asctime = self.formatTime(record, self.datefmt)
formatted_lines = []
for line in lines:
pl = CONF.logging_exception_prefix % record.__dict__
fl = '%s%s' % (pl, line)
formatted_lines.append(fl)
return '\n'.join(formatted_lines)
class ColorHandler(logging.StreamHandler):
LEVEL_COLORS = {
logging.DEBUG: '\033[00;32m', # GREEN
logging.INFO: '\033[00;36m', # CYAN
logging.AUDIT: '\033[01;36m', # BOLD CYAN
logging.WARN: '\033[01;33m', # BOLD YELLOW
logging.ERROR: '\033[01;31m', # BOLD RED
logging.CRITICAL: '\033[01;31m', # BOLD RED
}
def format(self, record):
record.color = self.LEVEL_COLORS[record.levelno]
return logging.StreamHandler.format(self, record)
class DeprecatedConfig(Exception):
message = _("Fatal call to deprecated config: %(msg)s")
def __init__(self, msg):
super(Exception, self).__init__(self.message % dict(msg=msg)) | PypiClean |
/lektor-3.4.0b6-py3-none-any.whl/lektor/pluginsystem.py | from __future__ import annotations
import inspect
import os
import sys
import warnings
from pathlib import Path
from typing import Type
from weakref import ref as weakref
from inifile import IniFile
from lektor.compat import importlib_metadata as metadata
from lektor.context import get_ctx
from lektor.utils import process_extra_flags
def get_plugin(plugin_id_or_class, env=None):
"""Looks up the plugin instance by id or class."""
if env is None:
ctx = get_ctx()
if ctx is None:
raise RuntimeError(
"Context is unavailable and no environment "
"was passed to the function."
)
env = ctx.env
plugin_id = env.plugin_ids_by_class.get(plugin_id_or_class, plugin_id_or_class)
try:
return env.plugins[plugin_id]
except KeyError as error:
raise LookupError("Plugin %r not found" % plugin_id) from error
class Plugin:
"""This needs to be subclassed for custom plugins."""
name = "Your Plugin Name"
description = "Description goes here"
__dist: metadata.Distribution = None
def __init__(self, env, id):
self._env = weakref(env)
self.id = id
@property
def env(self):
rv = self._env()
if rv is None:
raise RuntimeError("Environment went away")
return rv
@property
def version(self):
if self.__dist is not None:
return self.__dist.version
return None
@property
def path(self) -> str:
mod = sys.modules[self.__class__.__module__.split(".", maxsplit=1)[0]]
path = Path(mod.__file__).resolve().parent
package_cache = self.env.project.get_package_cache_path()
try:
# We could use Path.is_relative_to(), except that's py39+ only
path.relative_to(package_cache)
# We're only interested in local, editable packages. This is not one.
return None
except ValueError:
pass
return os.fspath(path)
@property
def import_name(self):
return self.__class__.__module__ + ":" + self.__class__.__name__
def get_lektor_config(self):
"""Returns the global config."""
ctx = get_ctx()
if ctx is not None:
cfg = ctx.pad.db.config
else:
cfg = self.env.load_config()
return cfg
@property
def config_filename(self):
"""The filename of the plugin specific config file."""
return os.path.join(self.env.root_path, "configs", self.id + ".ini")
def get_config(self, fresh=False):
"""Returns the config specific for this plugin. By default this
will be cached for the current build context but this can be
disabled by passing ``fresh=True``.
"""
ctx = get_ctx()
if ctx is not None and not fresh:
cache = ctx.cache.setdefault(__name__ + ":configs", {})
cfg = cache.get(self.id)
if cfg is None:
cfg = IniFile(self.config_filename)
cache[self.id] = cfg
else:
cfg = IniFile(self.config_filename)
if ctx is not None:
ctx.record_dependency(self.config_filename)
return cfg
def emit(self, event, **kwargs):
return self.env.plugin_controller.emit(self.id + "-" + event, **kwargs)
def to_json(self):
return {
"id": self.id,
"name": self.name,
"version": self.version,
"description": self.description,
"path": self.path,
"import_name": self.import_name,
}
def _find_plugins():
"""Find all available plugins.
Returns an interator of (distribution, entry_point) pairs.
"""
for dist in metadata.distributions():
for ep in dist.entry_points:
if ep.group == "lektor.plugins":
_check_dist_name(dist.metadata["Name"], ep.name)
yield dist, ep
def _check_dist_name(dist_name, plugin_id):
"""Check that plugin comes from a validly named distribution.
Raises RuntimeError if distribution name is not of the form
``lektor-``*<plugin_id>*.
"""
# XXX: do we really need to be so strict about distribution names?
match_name = "lektor-" + plugin_id.lower()
if match_name != dist_name.lower():
raise RuntimeError(
"Disallowed distribution name: distribution name for "
f"plugin {plugin_id!r} must be {match_name!r} (not {dist_name!r})."
)
def initialize_plugins(env):
"""Initializes the plugins for the environment."""
for dist, ep in _find_plugins():
plugin_id = ep.name
plugin_cls = ep.load()
env.plugin_controller.instanciate_plugin(plugin_id, plugin_cls, dist)
env.plugin_controller.emit("setup-env")
class PluginController:
"""Helper management class that is used to control plugins through
the environment.
"""
def __init__(self, env, extra_flags=None):
self._env = weakref(env)
self.extra_flags = extra_flags
@property
def env(self):
rv = self._env()
if rv is None:
raise RuntimeError("Environment went away")
return rv
def instanciate_plugin(
self,
plugin_id: str,
plugin_cls: Type[Plugin],
dist: metadata.Distribution | None = None,
) -> None:
env = self.env
if plugin_id in env.plugins:
raise RuntimeError('Plugin "%s" is already registered' % plugin_id)
plugin = plugin_cls(env, plugin_id)
# Plugin.version needs the source distribution to be able to cleanly determine
# the plugin version. For reasons of backward compatibility, we don't want to
# change the signature of the constructor, so we stick it in a private attribute
# here.
plugin._Plugin__dist = dist
env.plugins[plugin_id] = plugin
env.plugin_ids_by_class[plugin_cls] = plugin_id
def iter_plugins(self):
# XXX: sort?
return self.env.plugins.values()
def emit(self, event, **kwargs):
"""Invoke event hook for all plugins that support it.
Any ``kwargs`` are passed to the hook methods.
Returns a dict mapping plugin ids to hook method return values.
"""
rv = {}
extra_flags = process_extra_flags(self.extra_flags)
funcname = "on_" + event.replace("-", "_")
for plugin in self.iter_plugins():
handler = getattr(plugin, funcname, None)
if handler is not None:
kw = {**kwargs, "extra_flags": extra_flags}
try:
inspect.signature(handler).bind(**kw)
except TypeError:
del kw["extra_flags"]
rv[plugin.id] = handler(**kw)
if "extra_flags" not in kw:
warnings.warn(
# deprecated since 3.2.0
f"The plugin {plugin.id!r} function {funcname!r} does not "
"accept extra_flags. "
"It should be updated to accept `**extra` so that it will "
"not break if new parameters are passed to it by newer "
"versions of Lektor.",
DeprecationWarning,
)
return rv | PypiClean |
/Dero-0.15.0-py3-none-any.whl/dero/modeler/model.py | from copy import deepcopy
from sympy import Eq, solve, Expr
from dero.modeler.typing import EqList, Any, List, Equation, EquationOrNone
from dero.ext_sympy.subs import substitute_equations, substitute_equations_ordered
from dero.ext_sympy.match import get_equation_where_lhs_matches
class Model:
def __init__(self, equations: EqList):
self.set_equations(equations)
def set_equations(self, equations: EqList):
self.equations = []
self.evaluated_equations = []
for equation in equations:
self.equations.append(equation)
if hasattr(equation, 'is_IndexedEquation') and equation.is_IndexedEquation:
self.evaluated_equations.extend(equation.evaluated_index_eqs)
else:
self.evaluated_equations.append(equation)
def sub_eq(self, eq: Equation, deep_substitute: bool = True) -> Eq:
if deep_substitute:
return substitute_equations(eq, self.evaluated_equations)
else:
return substitute_equations_ordered(eq, self.evaluated_equations)
def solve(self, *symbols, **flags):
return solve(self.evaluated_equations, *symbols, **flags)
def get_eq_for(self, lhs_expr: Expr) -> EquationOrNone:
return get_equation_where_lhs_matches(lhs_expr, self.evaluated_equations)
def subs(self, *args, **kwargs):
new_model = deepcopy(self)
new_model.equations = [eq.subs(*args, **kwargs) for eq in self.equations]
new_model.evaluated_equations = [eq.subs(*args, **kwargs) for eq in self.evaluated_equations]
# TODO: deep sub, run on loop, extract definitions so they don't evaluate as true. stop when equations stop changing.
new_model._eliminate_useless_eqs() # need to do before reevaluating as expecting eq but have True
new_model._reevaluate_eqs()
new_model._eliminate_useless_eqs() # need to do again after reevaluating as some are newly True
return new_model
def _eliminate_useless_eqs(self):
self.equations = [eq for eq in self.equations if not eq == True]
self.evaluated_equations = [eq for eq in self.evaluated_equations if not eq == True]
def _reevaluate_eqs(self):
self.equations = [
substitute_equations_ordered(eq, self.equations) for eq in self.equations
]
self.evaluated_equations = [
substitute_equations_ordered(eq, self.evaluated_equations) for eq in self.evaluated_equations
] | PypiClean |
/FROST2STAC-1.0.tar.gz/FROST2STAC-1.0/CONTRIBUTING.rst | .. highlight:: shell
============
Contributing
============
Contributions are welcome, and they are greatly appreciated! Every little bit
helps, and credit will always be given.
You can contribute in many ways:
Types of Contributions
----------------------
Report Bugs
~~~~~~~~~~~
Report bugs at https://codebase.helmholtz.cloud/CAT4KIT/frost2stac/issues.
If you are reporting a bug, please include:
* Your operating system name and version.
* Any details about your local setup that might be helpful in troubleshooting.
* Detailed steps to reproduce the bug.
Fix Bugs
~~~~~~~~
Look through the Helmholtz Gitlab issues for bugs. Anything tagged with "bug" and "help
wanted" is open to whoever wants to implement it.
Implement Features
~~~~~~~~~~~~~~~~~~
Look through the Helmholtz Gitlab issues for features. Anything tagged with "enhancement"
and "help wanted" is open to whoever wants to implement it.
Write Documentation
~~~~~~~~~~~~~~~~~~~
FROST2STAC could always use more documentation, whether as part of the
official FROST2STAC docs, in docstrings, or even on the web in blog posts,
articles, and such.
Submit Feedback
~~~~~~~~~~~~~~~
The best way to send feedback is to file an issue at https://codebase.helmholtz.cloud/CAT4KIT/frost2stac/issues.
If you are proposing a feature:
* Explain in detail how it would work.
* Keep the scope as narrow as possible, to make it easier to implement.
* Remember that this is a volunteer-driven project, and that contributions
are welcome :)
Get Started!
------------
Ready to contribute? Here's how to set up `frost2stac` for local development.
1. Fork the `frost2stac` repo on Helmholtz Gitlab.
2. Clone your fork locally::
$ git clone git@codebase.helmholtz.cloud:frost2stac/frost2stac.git
3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development::
$ mkvirtualenv frost2stac
$ cd frost2stac/
$ python setup.py develop
4. Create a branch for local development::
$ git checkout -b name-of-your-bugfix-or-feature
Now you can make your changes locally.
5. When you're done making changes, check that your changes pass flake8 and the
tests, including testing other Python versions with tox::
$ flake8 frost2stac tests
$ python setup.py test or pytest
$ tox
To get flake8 and tox, just pip install them into your virtualenv.
6. Commit your changes and push your branch to Helmholtz Gitlab::
$ git add .
$ git commit -m "Your detailed description of your changes."
$ git push origin name-of-your-bugfix-or-feature
7. Submit a pull request through the Helmholtz Gitlab website.
Pull Request Guidelines
-----------------------
Before you submit a pull request, check that it meets these guidelines:
1. The pull request should include tests.
2. If the pull request adds functionality, the docs should be updated. Put
your new functionality into a function with a docstring, and add the
feature to the list in README.rst.
3. The pull request should work for Python 3.5, 3.6, 3.7 and 3.8, and for PyPy. Check
https://travis-ci.com/CAT4KIT/frost2stac/pull_requests
and make sure that the tests pass for all supported Python versions.
Tips
----
To run a subset of tests::
$ pytest tests.test_frost2stac
Deploying
---------
A reminder for the maintainers on how to deploy.
Make sure all your changes are committed (including an entry in HISTORY.rst).
Then run::
$ bump2version patch # possible: major / minor / patch
$ git push
$ git push --tags
Travis will then deploy to PyPI if tests pass.
| PypiClean |
/Gnotty-0.2.7.tar.gz/Gnotty-0.2.7/gnotty/static/js/gnotty.js | WEB_SOCKET_SWF_LOCATION = '/static/swf/WebSocketMain.swf';
/*
Manages a connection to an IRC room - takes an options objects
that should include the following members:
- httpHost: HTTP host for the gnotty WebSocket server.
- httpPort: HTTP port for the gnotty WebSocket server.
- ircHost: IRC host to connect to.
- ircPort: IRC port to connect to.
- ircChannel: IRC channel to join.
- ircNickname: IRC nickname.
- ircPassword: IRC password (optional).
The follwing methods are implemented:
- message(message): Sends a message string to the channel
- leave(): Disconnect from the channel
- onJoin(): Called when the client has joined the channel
- onInvalid(): Called if the nickname used is invalid, eg:
too long, or contains invalid characters.
- onNickNames(nicknames): Called each time someone joins or leaves the
channel, nicknames is an unsorted array of
strings.
- onMessage(message): Called when a message is received from the
channel, message is an object with nickname
and message string members.
- onLeave(): Called when client.leave() has completed
*/
var IRCClient = function(options) {
var self = this;
for (var k in options) {
self[k] = options[k];
}
var host = options.httpHost == '0.0.0.0' ? '' : options.httpHost;
self.socket = io.connect(host + ':' + options.httpPort, {
transports: ['websocket', 'htmlfile', 'xhr-multipart',
'xhr-polling', 'jsonp-polling']
});
self.message = function(message) {
self.socket.emit('message', message);
};
self.leave = function() {
self.socket.disconnect();
if (self.onLeave) {
var interval = setInterval(function() {
if (!self.socket.socket.connected) {
clearInterval(interval);
self.onLeave();
}
}, 100);
}
};
self.socket.on('connect', function() {
self.socket.emit('start', options.ircHost, options.ircPort,
options.ircChannel, options.ircNickname,
options.ircPassword);
});
self.socket.on('join', function() {
if (self.onJoin) {
self.onJoin();
}
});
self.socket.on('invalid', function() {
self.socket.disconnect();
if (self.onInvalid) {
self.onInvalid();
} else {
alert('Invalid nickname');
}
});
self.socket.on('nicknames', function(nicknames) {
if (self.onNicknames) {
self.onNicknames(nicknames);
}
});
self.socket.on('message', function(nickname, message, color) {
if (self.onMessage) {
self.onMessage({
nickname: nickname,
message: message,
color: color
});
}
});
};
// UI setup.
var gnotty = function(options) {
var focused = true;
var joining = false;
var unread = 0;
var title = $('title').text();
// Main setup function called when nickname is entered.
// Creates IRC client and sets up event handlers.
var start = function(nickname, password) {
// Start the IRC client.
joining = true;
options.ircNickname = nickname;
options.ircPassword = password;
client = new IRCClient(options);
// Set up the loading animation.
$('.loading').modal({backdrop: 'static'}).css({opacity: 0.7});
var bar = $('.loading .bar');
var width = $('.loading .progress').css({opacity: 0.5}).width();
var connectTimeout = 30000;
$('.loading .bar').animate({width: width}, connectTimeout / 2);
// Fade the page out and reload it whenever we're finished,
// such as an error occurring, or explicitly leaving.
client.onLeave = function() {
$('body').fadeOut('fast', function() {
location = location.href.split('?')[0];
});
};
// Error handler - shows an error message, and leaves.
var error = function(message) {
if (message) {
alert(message);
}
client.leave();
};
// Took too long to connect.
var timeout = setTimeout(function() {
error('Took too long to connect, please try again');
}, connectTimeout);
// Name in use, too long, invalid chars.
client.onInvalid = function() {
error('Invalid nickname, please try again');
};
// Animations for setting up the main chat interface
// once successfully joined.
var joined = function() {
$('.loading').modal('hide');
$('#password').hide();
$('#input').removeClass('nick').addClass('msg');
$('#input').animate({width: '65%'}, function() {
$('#input').attr('placeholder', 'message');
$('#leave').fadeIn();
$('.hidden').slideDown(function() {
$('#submit').addClass('submit-joined').val('Send');
$('#messages').fadeIn();
}).removeClass('hidden');
});
$('#leave').click(function() {
client.leave();
});
}
// On join, finish the progress animation.
client.onJoin = function() {
joining = false;
bar.stop().animate({width: width}, 500);
clearTimeout(timeout);
var interval = setInterval(function() {
if (bar.width() == width) {
clearInterval(interval);
joined();
}
}, 100);
};
// Render the nickanmes list each time we receive it,
// which is each time someone joins or leaves.
client.onNicknames = function(nicknames) {
var data = {nicknames: nicknames};
$('#nicknames').html($('#nicknames-template').tmpl(data));
};
// Message received handler.
client.onMessage = function(data) {
if ((data.message == 'joins' || data.message == 'leaves')
&& !showJoinsAndLeaves()) {
return;
}
// Add a timestamp to each message as we receive it, and
// add it to the messages display.
var d = new Date();
var parts = [d.getHours(), d.getMinutes(), d.getSeconds()];
data.time = $.map(parts, function(s) {
return (String(s).length == 1 ? '0' : '') + s;
}).join(':')
data.message = urlize($('<div>').text(data.message).html());
// Auto-scroll the window if we're at the bottom of the
// messages list. We need to calculate it before we add
// actual message to the list.
var win = $(window);
var doc = $(window.document);
var bottom = win.scrollTop() + win.height() >= doc.height();
$('#messages-template').tmpl(data).appendTo('#messages');
if (bottom) {
window.scrollBy(0, 10000);
}
// Add the number of unread messages to the title if the
// page isn't focused.
if (!focused) {
unread += 1;
var s = (unread == 1 ? '' : 's');
$('title').text('(' + unread + ' message' + s + ') ' + title);
}
};
};
// Main submit handler - if there are still hidden elements,
// we haven't connected yet, so the value submitted is the
// initial nickname. Otherwise we've started, and the value
// submitted is a message.
$('.chat').submit(function() {
var value = $('#input').val();
if (!joining && value) {
if ($('.hidden').length > 0) {
start(value, $('#password').val());
} else {
client.message(value);
}
}
$('#input').val('').focus();
return false;
});
// Wat. Enter key stops triggering the above form submit if the
// submit button is not visible via media queries on small
// devices, so we need to trigger it manually here.
$('#input').keypress(function(e) {
if (e.keyCode == 13) {
$('.chat').submit();
return false;
}
});
// Join if there's a nickname in the querystring.
var parts = location.href.split('?nickname=');
if (parts.length == 2) {
start(parts[1].split('&')[0]);
}
// When the window loses focus, reset the unread messages count.
$(window).blur(function() {
unread = 0;
focused = false;
});
// When the window regains focus, remove the unread messages
// count from the page title.
$(window).focus(function() {
focused = true;
$('title').text(title);
});
// Focus the main input box on first load.
$('#input').val('').focus();
// Only add the password field when we're on the actual chat
// interface, since having it in the archive interfaces
// would result in the password being put into the
// querystring when the join form is submitted regularly.
$('#input').after('<input type="password" class="input-xlarge" ' +
'id="password" placeholder="password (optional)" ' +
'name="password" autocomplete="off">');
// Remove the action of the form for the show joins/leaves checkbox.
// This prevents the page from being reloaded, which is what happens
// (triggered in show-joins-leaves.js) when the checkbox changes,
// in any of the archive views, since we need to reload to
// to show any changes triggered by this, but when we're chatting
// in the channel, this happens on the fly.
$('#joins-leaves').attr({action: ''});
}; | PypiClean |
/Flask_AdminLTE3-1.0.9-py3-none-any.whl/flask_adminlte3/static/plugins/codemirror/mode/commonlisp/commonlisp.js |
(function(mod) {
if (typeof exports == "object" && typeof module == "object") // CommonJS
mod(require("../../lib/codemirror"));
else if (typeof define == "function" && define.amd) // AMD
define(["../../lib/codemirror"], mod);
else // Plain browser env
mod(CodeMirror);
})(function(CodeMirror) {
"use strict";
CodeMirror.defineMode("commonlisp", function (config) {
var specialForm = /^(block|let*|return-from|catch|load-time-value|setq|eval-when|locally|symbol-macrolet|flet|macrolet|tagbody|function|multiple-value-call|the|go|multiple-value-prog1|throw|if|progn|unwind-protect|labels|progv|let|quote)$/;
var assumeBody = /^with|^def|^do|^prog|case$|^cond$|bind$|when$|unless$/;
var numLiteral = /^(?:[+\-]?(?:\d+|\d*\.\d+)(?:[efd][+\-]?\d+)?|[+\-]?\d+(?:\/[+\-]?\d+)?|#b[+\-]?[01]+|#o[+\-]?[0-7]+|#x[+\-]?[\da-f]+)/;
var symbol = /[^\s'`,@()\[\]";]/;
var type;
function readSym(stream) {
var ch;
while (ch = stream.next()) {
if (ch == "\\") stream.next();
else if (!symbol.test(ch)) { stream.backUp(1); break; }
}
return stream.current();
}
function base(stream, state) {
if (stream.eatSpace()) {type = "ws"; return null;}
if (stream.match(numLiteral)) return "number";
var ch = stream.next();
if (ch == "\\") ch = stream.next();
if (ch == '"') return (state.tokenize = inString)(stream, state);
else if (ch == "(") { type = "open"; return "bracket"; }
else if (ch == ")" || ch == "]") { type = "close"; return "bracket"; }
else if (ch == ";") { stream.skipToEnd(); type = "ws"; return "comment"; }
else if (/['`,@]/.test(ch)) return null;
else if (ch == "|") {
if (stream.skipTo("|")) { stream.next(); return "symbol"; }
else { stream.skipToEnd(); return "error"; }
} else if (ch == "#") {
var ch = stream.next();
if (ch == "(") { type = "open"; return "bracket"; }
else if (/[+\-=\.']/.test(ch)) return null;
else if (/\d/.test(ch) && stream.match(/^\d*#/)) return null;
else if (ch == "|") return (state.tokenize = inComment)(stream, state);
else if (ch == ":") { readSym(stream); return "meta"; }
else if (ch == "\\") { stream.next(); readSym(stream); return "string-2" }
else return "error";
} else {
var name = readSym(stream);
if (name == ".") return null;
type = "symbol";
if (name == "nil" || name == "t" || name.charAt(0) == ":") return "atom";
if (state.lastType == "open" && (specialForm.test(name) || assumeBody.test(name))) return "keyword";
if (name.charAt(0) == "&") return "variable-2";
return "variable";
}
}
function inString(stream, state) {
var escaped = false, next;
while (next = stream.next()) {
if (next == '"' && !escaped) { state.tokenize = base; break; }
escaped = !escaped && next == "\\";
}
return "string";
}
function inComment(stream, state) {
var next, last;
while (next = stream.next()) {
if (next == "#" && last == "|") { state.tokenize = base; break; }
last = next;
}
type = "ws";
return "comment";
}
return {
startState: function () {
return {ctx: {prev: null, start: 0, indentTo: 0}, lastType: null, tokenize: base};
},
token: function (stream, state) {
if (stream.sol() && typeof state.ctx.indentTo != "number")
state.ctx.indentTo = state.ctx.start + 1;
type = null;
var style = state.tokenize(stream, state);
if (type != "ws") {
if (state.ctx.indentTo == null) {
if (type == "symbol" && assumeBody.test(stream.current()))
state.ctx.indentTo = state.ctx.start + config.indentUnit;
else
state.ctx.indentTo = "next";
} else if (state.ctx.indentTo == "next") {
state.ctx.indentTo = stream.column();
}
state.lastType = type;
}
if (type == "open") state.ctx = {prev: state.ctx, start: stream.column(), indentTo: null};
else if (type == "close") state.ctx = state.ctx.prev || state.ctx;
return style;
},
indent: function (state, _textAfter) {
var i = state.ctx.indentTo;
return typeof i == "number" ? i : state.ctx.start + 1;
},
closeBrackets: {pairs: "()[]{}\"\""},
lineComment: ";;",
fold: "brace-paren",
blockCommentStart: "#|",
blockCommentEnd: "|#"
};
});
CodeMirror.defineMIME("text/x-common-lisp", "commonlisp");
}); | PypiClean |
/CMT-0.4.0-py3-none-any.whl/cmt/converter/cmap_v0.py | from cmt.a_converter import AConverter
from cmt.cmap.v0 import *
from cmt.cmap.v1 import *
from cmt.ecmap.v0 import *
class Converter(AConverter):
@staticmethod
def convert(source: CMap_0) -> ECMap_0:
ecmap = ECMap_0()
ecmap.cmap = source
ecmap.cmap.medal_times = None
return ecmap
@staticmethod
def downgrade(source: CMap_0) -> None:
raise ValueError(
f"Downgrading {source.identifier.name} {source.format_version} to"
f" {source.identifier.name} {source.format_version - 1} is not possible."
)
@staticmethod
def upgrade(source: CMap_0) -> CMap_1:
res = CMap_1()
res.name = source.name
for time in source.medal_times:
new_time = CheckpointTime_1()
new_time.platin = int(time.platin * 5 / 3)
new_time.gold = int(time.gold * 5 / 3)
new_time.silver = int(time.silver * 5 / 3)
new_time.bronze = int(time.bronze * 5 / 3)
res.checkpoint_times.append(new_time)
res.sun_rotation = source.sun_rotation
res.sun_angle = source.sun_angle
res.camera_pos = source.camera_pos
res.camera_look = source.camera_look
for ent in source.entities:
new_ent = None
if type(ent) == Block_0:
new_ent = Block_1()
new_ent.block_type = ent.block_type
new_ent.position = ent.position
new_ent.scale = ent.scale
new_ent.rotation_z = ent.rotation_z
new_ent.checkpoint_nr = ent.checkpoint_nr
new_ent.byte_size = ent.byte_size
elif type(ent) == Sphere_0:
new_ent = Sphere_1()
new_ent.position = ent.position
elif type(ent) == PlayerStart_0:
new_ent = PlayerStart_1()
new_ent.position = ent.position
new_ent.rotation_z = ent.rotation_z
elif type(ent) == Dummy_0:
new_ent = Dummy_1()
new_ent.id = ent.id
new_ent.position = ent.position
new_ent.scale = ent.scale
new_ent.rotation_z = ent.rotation_z
if new_ent is not None:
res.entities.append(new_ent)
return res | PypiClean |
/Mathics_Django-6.0.0-py3-none-any.whl/mathics_django/web/media/js/mathjax/localization/fa/fa.js | MathJax.Localization.addTranslation("fa",null,{menuTitle:"\u0641\u0627\u0631\u0633\u06CC",fontDirection:"rtl",version:"2.7.9",isLoaded:true,domains:{_:{version:"2.7.9",isLoaded:true,strings:{CookieConfig:"MathJax \u06CC\u06A9 \u06A9\u0644\u0648\u0686\u06A9 \u062A\u0646\u0638\u06CC\u0645\u0627\u062A \u06A9\u0627\u0631\u0628\u0631\u06CC \u06CC\u0627\u0641\u062A \u06A9\u0647 \u0634\u0627\u0645\u0644 \u06A9\u062F \u0627\u062C\u0631\u0627\u06CC\u06CC \u0627\u0633\u062A. \u0622\u06CC\u0627 \u0645\u06CC\u200C\u062E\u0648\u0627\u0647\u06CC\u062F \u0622\u0646 \u0631\u0627 \u0627\u062C\u0631\u0627 \u06A9\u0646\u06CC\u062F\u061F\n\n(\u0628\u0647\u062A\u0631 \u0627\u0633\u062A \u0628\u0631 \u0644\u063A\u0648 \u0628\u0641\u0634\u0627\u0631\u06CC\u062F \u0645\u06AF\u0631 \u0627\u06CC\u0646\u06A9\u0647 \u062E\u0648\u062F\u062A\u0627\u0646 \u06A9\u0644\u0648\u0686\u06A9 \u0631\u0627 \u062A\u0646\u0638\u06CC\u0645\u200C\u06A9\u0631\u062F\u0647\u200C\u0627\u06CC\u062F.)",MathProcessingError:"\u062E\u0637\u0627\u06CC \u067E\u0631\u062F\u0627\u0632\u0634 \u0631\u06CC\u0627\u0636\u06CC",MathError:"\u062D\u0637\u0627\u06CC \u0631\u06CC\u0627\u0636\u06CC",LoadFile:"\u0628\u0627\u0631\u06AF\u06CC\u0631\u06CC %1",Loading:"\u0628\u0627\u0631\u06AF\u06CC\u0631\u06CC",LoadFailed:"\u062E\u0637\u0627 \u062F\u0631 \u0628\u0627\u0631\u06AF\u06CC\u0631\u06CC \u067E\u0631\u0648\u0646\u062F\u0647: %1",ProcessMath:"\u067E\u0631\u062F\u0627\u0632\u0634 \u0631\u06CC\u0627\u0636\u06CC: %1\u066A",Processing:"\u067E\u0631\u062F\u0627\u0632\u0634",TypesetMath:"\u062D\u0631\u0648\u0641\u200C\u0686\u06CC\u0646\u06CC \u0631\u06CC\u0627\u0636\u06CC: %1\u066A",Typesetting:"\u062D\u0631\u0648\u0641\u200C\u0686\u06CC\u0646\u06CC",MathJaxNotSupported:"\u0645\u0631\u0648\u0631\u06AF\u0631 \u0634\u0645\u0627 \u0627\u0632 MathJax \u067E\u0634\u062A\u06CC\u0628\u0627\u0646\u06CC \u0646\u0645\u06CC\u200C\u06A9\u0646\u062F"}},FontWarnings:{},"HTML-CSS":{},HelpDialog:{},MathML:{},MathMenu:{},TeX:{}},plural:function(a){return 1},number:function(a){return a}});MathJax.Ajax.loadComplete("[MathJax]/localization/fa/fa.js"); | PypiClean |
/Dero-0.15.0-py3-none-any.whl/dero/reg/differenced.py | from .reg import reg
from ..ext_pandas.filldata import add_missing_group_rows, drop_missing_group_rows
from dero.reg.lag.create import _is_special_lag_keyword
def diff_reg(df, yvar, xvars, id_col, date_col, difference_lag=1, diff_cols=None,
diff_fill_method: str='ffill', diff_fill_limit: int = None, **reg_kwargs):
if diff_cols == None:
# All by default
diff_cols = [yvar] + xvars
df = create_differenced_variables(df, diff_cols, id_col=id_col, date_col=date_col, difference_lag=difference_lag,
fill_method=diff_fill_method, fill_limit=diff_fill_limit)
# Convert names in lists of variables being passed to reg
reg_yvar, reg_xvars = _convert_variable_names(yvar, xvars, diff_cols)
this_reg_kwargs = reg_kwargs.copy()
if 'interaction_tuples' in reg_kwargs:
this_reg_kwargs['interaction_tuples'] = _convert_interaction_tuples(reg_kwargs['interaction_tuples'], diff_cols)
if 'lag_variables' in reg_kwargs:
this_reg_kwargs['lag_variables'] = _convert_list_of_variables_to_difference_names(reg_kwargs['lag_variables'], diff_cols)
result = reg(df, reg_yvar, reg_xvars, **this_reg_kwargs)
differenced_names = [col + ' Change' for col in diff_cols]
df.drop(differenced_names, axis=1, inplace=True)
return result
def create_differenced_variables(df, diff_cols, id_col='TICKER', date_col='Date', difference_lag=1,
fill_method='ffill', fill_limit: int = None):
"""
Note: partially inplace
"""
df.sort_values([id_col, date_col], inplace=True)
if fill_method is not None:
# Save original byvars, for outputting df of same shape
orig_index_df = df[[id_col, date_col]]
# Fill in missing data
df = add_missing_group_rows(df, [id_col], [date_col], fill_method=fill_method, fill_limit=fill_limit)
for col in diff_cols:
_create_differenced_variable(df, col, id_col=id_col, difference_lag=difference_lag)
if fill_method is not None:
df = orig_index_df.merge(df, how='left', on=[id_col, date_col])
return df
def _create_differenced_variable(df, col, id_col='TICKER', difference_lag=1, keep_lag=False):
"""
Note: inplace
"""
df[col + '_lag'] = df.groupby(id_col)[col].shift(difference_lag)
df[col + ' Change'] = df[col] - df[col + '_lag']
if not keep_lag:
df.drop(col + '_lag', axis=1, inplace=True)
def _convert_variable_names(yvar, xvars, diff_cols):
if yvar in diff_cols:
yvar = yvar + ' Change'
out_xvars = _convert_list_of_variables_to_difference_names(xvars, diff_cols)
return yvar, out_xvars
def _convert_list_of_variables_to_difference_names(varlist, diff_cols):
# if 'all' or 'xvars' is passed, no conversion needed
if _is_special_lag_keyword(varlist):
return varlist
out_vars = []
for var in varlist:
if var in diff_cols:
out_vars.append(var + ' Change')
else:
out_vars.append(var)
return out_vars
def _convert_interaction_tuples(interaction_tuples, diff_cols):
out_tuples = []
for tup in interaction_tuples:
out_tuples.append(tuple([var + ' Change' if var in diff_cols else var for var in tup]))
return out_tuples
def _is_diff_reg_str(reg_str):
return reg_str in ('diff', 'difference', 'diff_reg', 'diff reg', 'difference reg', 'difference regression') | PypiClean |
/MedPy-0.4.0.tar.gz/MedPy-0.4.0/bin/medpy_binary_resampling.py | # build-in modules
import os
import logging
import argparse
# third-party modules
import numpy
from scipy.ndimage.interpolation import zoom
from scipy.ndimage.morphology import distance_transform_edt, binary_erosion
from scipy.ndimage.measurements import label
# own modules
from medpy.core import Logger
from medpy.filter import resample, bounding_box
from medpy.utilities import argparseu
from medpy.io import load, save, header
# information
__author__ = "Oskar Maier"
__version__ = "r0.1.0, 2014-11-25"
__email__ = "oskar.maier@googlemail.com"
__status__ = "Release"
__description__ = """
Re-samples a binary image according to a supplied voxel spacing.
For an optimal results without outliers or holes in the case of up-sampling, the required
number of additional slices is added using shape based interpolation. All down-sampling
and the remaining small up-sampling operations are then executed with a nearest
neighbourhood interpolation of a chosen order.
BSpline is used for interpolation. A order between 0 and 5 can be selected. The default
is 0 (= nearest neighbour). In some rare case an order of 1 (= linear) might be
necessary.
Note that the pixel data type of the input image is treated as binary.
Copyright (C) 2013 Oskar Maier
This program comes with ABSOLUTELY NO WARRANTY; This is free software,
and you are welcome to redistribute it under certain conditions; see
the LICENSE file or <http://www.gnu.org/licenses/> for details.
"""
# code
def main():
parser = getParser()
args = getArguments(parser)
# prepare logger
logger = Logger.getInstance()
if args.debug: logger.setLevel(logging.DEBUG)
elif args.verbose: logger.setLevel(logging.INFO)
# loading input images
img, hdr = load(args.input)
img = img.astype(numpy.bool)
# check spacing values
if not len(args.spacing) == img.ndim:
parser.error('The image has {} dimensions, but {} spacing parameters have been supplied.'.format(img.ndim, len(args.spacing)))
# check if output image exists
if not args.force:
if os.path.exists(args.output):
parser.error('The output image {} already exists.'.format(args.output))
logger.debug('target voxel spacing: {}'.format(args.spacing))
# determine number of required complete slices for up-sampling
vs = header.get_pixel_spacing(hdr)
rcss = [int(y // x - 1) for x, y in zip(args.spacing, vs)] # TODO: For option b, remove the - 1; better: no option b, since I am rounding later anyway
# remove negatives and round up to next even number
rcss = [x if x > 0 else 0 for x in rcss]
rcss = [x if 0 == x % 2 else x + 1 for x in rcss]
logger.debug('intermediate slices to add per dimension: {}'.format(rcss))
# for each dimension requiring up-sampling, from the highest down, perform shape based slice interpolation
logger.info('Adding required slices using shape based interpolation.')
for dim, rcs in enumerate(rcss):
if rcs > 0:
logger.debug('adding {} intermediate slices to dimension {}'.format(rcs, dim))
img = shape_based_slice_interpolation(img, dim, rcs)
logger.debug('resulting new image shape: {}'.format(img.shape))
# compute and set new voxel spacing
nvs = [x / (y + 1.) for x, y in zip(vs, rcss)]
header.set_pixel_spacing(hdr, nvs)
logger.debug('intermediate voxel spacing: {}'.format(nvs))
# interpolate with nearest neighbour
logger.info('Re-sampling the image with a b-spline order of {}.'.format(args.order))
img, hdr = resample(img, hdr, args.spacing, args.order, mode='nearest')
# saving the resulting image
save(img, args.output, hdr, args.force)
def shape_based_slice_interpolation(img, dim, nslices):
"""
Adds `nslices` slices between all slices of the binary image `img` along dimension
`dim` respecting the original slice values to be situated in the middle of each
slice. Extrapolation situations are handled by simple repeating.
Interpolation of new slices is performed using shape based interpolation.
Parameters
----------
img : array_like
A n-dimensional image.
dim : int
The dimension along which to add slices.
nslices : int
The number of slices to add. Must be an even number.
Returns
-------
out : ndarray
The re-sampled image.
"""
# check arguments
if not 0 == nslices % 2:
raise ValueError('nslices must be an even number')
out = None
slicer = [slice(None)] * img.ndim
chunk_full_shape = list(img.shape)
chunk_full_shape[dim] = nslices + 2
for sl1, sl2 in zip(numpy.rollaxis(img, dim)[:-1], numpy.rollaxis(img, dim)[1:]):
if 0 == numpy.count_nonzero(sl1) and 0 == numpy.count_nonzero(sl2):
chunk = numpy.zeros(chunk_full_shape, dtype=numpy.bool)
else:
chunk = shape_based_slice_insertation_object_wise(sl1, sl2, dim, nslices)
if out is None:
out = numpy.delete(chunk, -1, dim)
else:
out = numpy.concatenate((out, numpy.delete(chunk, -1, dim)), dim)
slicer[dim] = numpy.newaxis
out = numpy.concatenate((out, sl2[slicer]), dim)
slicer[dim] = slice(0, 1)
for _ in range(nslices // 2):
out = numpy.concatenate((img[slicer], out), dim)
slicer[dim] = slice(-1, None)
for _ in range(nslices // 2):
out = numpy.concatenate((out, img[slicer]), dim)
return out
def shape_based_slice_insertation_object_wise(sl1, sl2, dim, nslices, order=3):
"""
Wrapper to apply `shape_based_slice_insertation()` for each binary object
separately to ensure correct extrapolation behaviour.
"""
out = None
sandwich = numpy.concatenate((sl1[numpy.newaxis], sl2[numpy.newaxis]), 0)
label_image, n_labels = label(sandwich)
for lid in range(1, n_labels + 1):
_sl1, _sl2 = label_image == lid
_out = shape_based_slice_insertation(_sl1, _sl2, dim, nslices, order=3)
if out is None:
out = _out
else:
out |= _out
return out
def shape_based_slice_insertation(sl1, sl2, dim, nslices, order=3):
"""
Insert `nslices` new slices between `sl1` and `sl2` along dimension `dim` using shape
based binary interpolation.
Extrapolation is handled adding `nslices`/2 step-wise eroded copies of the last slice
in each direction.
Parameters
----------
sl1 : array_like
First slice. Treated as binary data.
sl2 : array_like
Second slice. Treated as binary data.
dim : int
The new dimension along which to add the new slices.
nslices : int
The number of slices to add.
order : int
The b-spline interpolation order for re-sampling the distance maps.
Returns
-------
out : ndarray
A binary image of size `sl1`.shape() extend by `nslices`+2 along the new
dimension `dim`. The border slices are the original slices `sl1` and `sl2`.
"""
sl1 = sl1.astype(numpy.bool)
sl2 = sl2.astype(numpy.bool)
# extrapolation through erosion
if 0 == numpy.count_nonzero(sl1):
slices = [sl1]
for _ in range(nslices / 2):
slices.append(numpy.zeros_like(sl1))
for i in range(1, nslices / 2 + nslices % 2 + 1)[::-1]:
slices.append(binary_erosion(sl2, iterations=i))
slices.append(sl2)
return numpy.rollaxis(numpy.asarray(slices), 0, dim + 1)
#return numpy.asarray([sl.T for sl in slices]).T
elif 0 ==numpy.count_nonzero(sl2):
slices = [sl1]
for i in range(1, nslices / 2 + 1):
slices.append(binary_erosion(sl1, iterations=i))
for _ in range(0, nslices / 2 + nslices % 2):
slices.append(numpy.zeros_like(sl2))
slices.append(sl2)
return numpy.rollaxis(numpy.asarray(slices), 0, dim + 1)
#return numpy.asarray([sl.T for sl in slices]).T
# interpolation shape based
# note: distance_transform_edt shows strange behaviour for ones-arrays
dt1 = distance_transform_edt(~sl1) - distance_transform_edt(sl1)
dt2 = distance_transform_edt(~sl2) - distance_transform_edt(sl2)
slicer = [slice(None)] * dt1.ndim
slicer = slicer[:dim] + [numpy.newaxis] + slicer[dim:]
out = numpy.concatenate((dt1[slicer], dt2[slicer]), axis=dim)
zoom_factors = [1] * dt1.ndim
zoom_factors = zoom_factors[:dim] + [(nslices + 2)/2.] + zoom_factors[dim:]
out = zoom(out, zoom_factors, order=order)
return out <= 0
def getArguments(parser):
"Provides additional validation of the arguments collected by argparse."
args = parser.parse_args()
if args.order < 0 or args.order > 5:
parser.error('The order has to be a number between 0 and 5.')
return args
def getParser():
"Creates and returns the argparse parser object."
parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter, description=__description__)
parser.add_argument('input', help='the input image')
parser.add_argument('output', help='the output image')
parser.add_argument('spacing', type=argparseu.sequenceOfFloatsGt, help='the desired voxel spacing in colon-separated values, e.g. 1.2,1.2,5.0')
parser.add_argument('-o', '--order', type=int, default=0, dest='order', help='the bspline order, default is 0 (= nearest neighbour)')
parser.add_argument('-v', '--verbose', dest='verbose', action='store_true', help='verbose output')
parser.add_argument('-d', dest='debug', action='store_true', help='Display debug information.')
parser.add_argument('-f', '--force', dest='force', action='store_true', help='overwrite existing files')
return parser
if __name__ == "__main__":
main() | PypiClean |
/Mopidy-Touchscreen-1.0.0.tar.gz/Mopidy-Touchscreen-1.0.0/mopidy_touchscreen/input/gpio_inpput_manager.py | import logging
import RPi.GPIO as GPIO
import pygame
logger = logging.getLogger(__name__)
class GPIOManager():
def __init__(self, pins):
GPIO.setmode(GPIO.BCM)
# Left Button
GPIO.setup(pins['left'], GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.add_event_detect(pins['left'], GPIO.BOTH, callback=left,
bouncetime=30)
# Right Button
GPIO.setup(pins['right'], GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.add_event_detect(pins['right'], GPIO.BOTH,
callback=right,
bouncetime=30)
# Up Button
GPIO.setup(pins['up'], GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.add_event_detect(pins['up'], GPIO.BOTH, callback=up,
bouncetime=30)
# Down Button
GPIO.setup(pins['down'], GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.add_event_detect(pins['down'], GPIO.BOTH, callback=right,
bouncetime=30)
# Enter Button
GPIO.setup(pins['enter'], GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.add_event_detect(pins['enter'], GPIO.BOTH,
callback=right,
bouncetime=30)
def right(channel):
dict = {}
if GPIO.input(channel) == 1:
type = pygame.KEYUP
else:
type = pygame.KEYDOWN
dict['unicode'] = None
dict['key'] = pygame.K_RIGHT
event = pygame.event.Event(type, dict)
pygame.event.post(event)
def left(channel):
dict = {}
if GPIO.input(channel) == 1:
type = pygame.KEYUP
else:
type = pygame.KEYDOWN
dict['unicode'] = None
dict['key'] = pygame.K_RIGHT
event = pygame.event.Event(type, dict)
pygame.event.post(event)
def down(channel):
dict = {}
if GPIO.input(channel) == 1:
type = pygame.KEYUP
else:
type = pygame.KEYDOWN
dict['unicode'] = None
dict['key'] = pygame.K_DOWN
event = pygame.event.Event(type, dict)
pygame.event.post(event)
def up(channel):
dict = {}
if GPIO.input(channel) == 1:
type = pygame.KEYUP
else:
type = pygame.KEYDOWN
dict['unicode'] = None
dict['key'] = pygame.K_UP
event = pygame.event.Event(type, dict)
pygame.event.post(event)
def enter(channel):
dict = {}
if GPIO.input(channel) == 1:
type = pygame.KEYUP
else:
type = pygame.KEYDOWN
dict['unicode'] = None
dict['key'] = pygame.K_RETURN
event = pygame.event.Event(type, dict)
pygame.event.post(event) | PypiClean |
/IST411Team1Conway-1.0.0.tar.gz/IST411Team1Conway-1.0.0/IST411Team1/node3.py | import Pyro4, subprocess, zlib, time, pysftp, json, hashlib, p2p
from node5 import Node5
bool = False
beginTime = time.clock()
@Pyro4.expose
class node3:
def __init__(self,name,daemon):
self.name = name
self.json = None
self.daemon = daemon
self.crcChecksum = None
self.recSFTPChecksum = None
"""Getting the json"""
def get_json(self):
compJson = self.compressPayload(self.json)
print("Compressed message: ",compJson)
bool = True
return compJson
"""Method to handle daemon shutdown"""
def shutDown(self):
shutDown = False
try:
self.daemon.shutdown()
shutDown = True
except Exception as e:
print(e)
return shutDown
"""Compressing Payload"""
def compressPayload(self,data):
try:
print(self.name, " is compressing payload...")
payloadComp = zlib.compress(data)
log = {"Node":self.name,"Compressed payload":str(payloadComp)}
Node5.log(log)
return payloadComp
except Exception as e:
print(e)
"""Generating Checsum for Compressed Payload"""
def genCrcChecksum(self,data):
try:
print(self.name," is generating checksum...")
checksum = zlib.crc32(data)
print(self.name," checksum: ",checksum)
log = {"Node":self.name,"CRC Checksum":str(checksum)}
Node5.log(log)
return checksum
except Exception as e:
print(e)
"""Getting the Checksum"""
def getChecksum(self):
return self.crcChecksum
"""Receiving the payload vis SFTP"""
def receiveSFTPPayload(self):
try:
print(self.name," is retrieving payload from remote directory via SFTP...")
payload = None
cnopts = pysftp.CnOpts()
cnopts.hostkeys = None
cinfo = {'cnopts':cnopts,'host':'oz-ist-linux-fa17-411','username':'ftpuser','password':'test1234','port':101}
with pysftp.Connection(**cinfo) as sftp:
sftp.get('/home/ftpuser/Team1SFTPpayload.json','Team1SFTPReceived.json')
with open('Team1SFTPReceived.json','r') as inFile:
payload = json.load(inFile)
payload = payload.encode('utf-8')
log = {"Name":self.name,"Payload received via SFTP":str(payload)}
Node5.log(log)
return payload
except Exception as e:
print(e)
"""Receiving the Payload's Checksum via SFTP"""
def receiveSFTPChecksum(self):
try:
print(self.name," is retrieving payload's checksum from remote directory via SFTP...")
checksum = None
cnopts = pysftp.CnOpts()
cnopts.hostkeys = None
cinfo = {'cnopts':cnopts,'host':'oz-ist-linux-fa17-411','username':'ftpuser','password':'test1234','port':101}
with pysftp.Connection(**cinfo) as sftp:
sftp.get('/home/ftpuser/Team1SFTPchecksum.txt','Team1ChecksumReceived.txt')
with open('Team1ChecksumReceived.txt','r') as inFile:
checksum = inFile.read()
log = {"Node":self.name,"Checksum received via SFTP":str(checksum)}
Node5.log(log)
return checksum
except Exception as e:
print(e)
"Authenticating Payload by Checking to see if Checksums match"""
def verifySFTPChecksum(self,checksum,payload):
verifyPerformed = False
try:
checksumOfPayload = hashlib.sha256(payload).hexdigest()
print(checksumOfPayload)
print(checksum)
if checksumOfPayload == checksum:
print("Checksum of payload received from Node 2 via SFTP verifed.")
log = {"Node":self.name,"Checksum received via SFTP is verified":"True","Checksum Received":str(checksum),"Checksum Generated for matching":str(checksumOfPayload)}
Node5.log(log)
else:
print("Payload received from Node 2 via SFTP has been compromised.")
verifyPerformed = True
except Exception as e:
print(e)
return verifyPerformed
if __name__ == '__main__':
print("Starting Node 3...")
daemon = Pyro4.Daemon()
node3 = node3("Node 3",daemon)
node3.json = node3.receiveSFTPPayload()
node3.recSFTPChecksum = node3.receiveSFTPChecksum()
node3.verifySFTPChecksum(node3.recSFTPChecksum,node3.json)
node3.crcChecksum = node3.genCrcChecksum(node3.json)
uri = node3.daemon.register(node3)
print(node3.name + "'s uri: ",uri)
print(node3.name," is ready for remote access via Pyro4.")
p2pTime = p2p.end(beginTime)
log = {"Node":node3.name,"P2P payload time in seconds":p2pTime}
Node5.log(log)
print(node3.name," to Node 4 payload time: ",p2pTime," seconds")
subprocess.Popen(['python3','node4.py',str(uri)])
node3.daemon.requestLoop() | PypiClean |
/Adsys_PDFReaderTool-0.0.1.tar.gz/Adsys_PDFReaderTool-0.0.1/term_frequency/tfidf.py | from pprint import pprint
from collections import defaultdict
import PyPDF2
from os import listdir
from os.path import isfile, join
import pprint as pp
import nltk
from nltk.tokenize import sent_tokenize, word_tokenize
from nltk.corpus import stopwords
import extract
filename = "/Users/dontesmall/Desktop/pdf_test_folder"
CORPUS = extract.extractTexttoarray((filename))
documents = []
for indx in CORPUS:
documents.append(", ".join(map(str, indx)))
# Format of the corpus is that each newline has a new 'document'
# CORPUS = """
# In information retrieval, tf–idf or TFIDF, short for term frequency–inverse document frequency, is a numerical statistic that is intended to reflect how important a word is to a document in a collection or corpus.[1] It is often used as a weighting factor in searches of information retrieval, text mining, and user modeling. The tf–idf value increases proportionally to the number of times a word appears in the document and is offset by the number of documents in the corpus that contain the word, which helps to adjust for the fact that some words appear more frequently in general. Tf–idf is one of the most popular term-weighting schemes today; 83% of text-based recommender systems in digital libraries use tf–idf.
# LeBron Raymone James Sr. (/ləˈbrɒn/; born December 30, 1984), often referred to mononymously as LeBron, is an American professional basketball player for the Los Angeles Lakers of the National Basketball Association (NBA). He is often considered the best basketball player in the world and regarded by some as the greatest player of all time.[1][2][3][4] His accomplishments include four NBA Most Valuable Player Awards, three NBA Finals MVP Awards, and two Olympic gold medals. James has appeared in fifteen NBA All-Star Games and been named NBA All-Star MVP three times. He won the 2008 NBA scoring title, is the all-time NBA playoffs scoring leader, and is fourth in all-time career points scored. He has been voted onto the All-NBA First Team twelve times and the All-Defensive First Team five times.
# Marie Skłodowska Curie (/ˈkjʊəri/;[3] French: [kyʁi]; Polish: [kʲiˈri]; born Maria Salomea Skłodowska;[a] 7 November 1867 – 4 July 1934) was a Polish and naturalized-French physicist and chemist who conducted pioneering research on radioactivity. She was the first woman to win a Nobel Prize, the first person and only woman to win twice, and the only person to win a Nobel Prize in two different sciences. She was part of the Curie family legacy of five Nobel Prizes. She was also the first woman to become a professor at the University of Paris, and in 1995 became the first woman to be entombed on her own merits in the Panthéon in Paris.
# """.strip().lower()
DOC_ID_TO_TF = {} # doc-id -> {tf: term_freq_map where term_freq_map is word -> percentage of words in doc that is this one,
CORPUS_CONTINER = str(documents).strip('[]') # tfidf: ...}
DOCS = CORPUS_CONTINER.split("\n") # Documents where the index is the doc id
WORDS = CORPUS_CONTINER.split()
DF = defaultdict(lambda: 0)
for word in WORDS:
DF[word] += 1
for doc_id, doc in enumerate(DOCS):
#print("HERE IS THE DOCS :" + str(DOCS))
#Num of times of the word showed up in doc
TF = defaultdict(lambda: 0)
TFIDF = {}
doc_words = doc.split()
word_count = len(doc_words)
# percentage of words in doc that is this one = count of this word in this doc / total number of words in this doc
for word in doc_words:
# Here is the total num of count
TF[word] +=1
for word in TF.keys():
TF[word] /= word_count
TFIDF[word] = TF[word] / DF[word]
# loop over tfidt to sort it as a map
pairs = [(word, tfidf) for word, tfidf in TFIDF.items()]
# Why by [1] ?
pairs.sort(key = lambda p: p[1])
top_10 = pairs[-15:]
print("TOP 10 TFIDF")
pprint(top_10)
print("BOTTOM 10 TFIDF")
pprint(pairs[0:15])
DOC_ID_TO_TF[doc_id] = {'tf': TF, 'tfidf': TFIDF}
# pprint(DOC_ID_TO_TF) | PypiClean |
/ESMValTool-2.9.0-py3-none-any.whl/esmvaltool/diag_scripts/examples/my_little_diagnostic.py |
# operating system manipulations (e.g. path constructions)
import os
# to manipulate iris cubes
import iris
import matplotlib.pyplot as plt
# import internal esmvaltool modules here
from esmvaltool.diag_scripts.shared import group_metadata, run_diagnostic
from esmvalcore.preprocessor import area_statistics
def _plot_time_series(cfg, cube, dataset):
"""
Example of personal diagnostic plotting function.
Arguments:
cfg - nested dictionary of metadata
cube - the cube to plot
dataset - name of the dataset to plot
Returns:
string; makes some time-series plots
Note: this function is private; remove the '_'
so you can make it public.
"""
# custom local paths for e.g. plots are supported -
# here is an example
# root_dir = '/group_workspaces/jasmin2/cmip6_prep/' # edit as per need
# out_path = 'esmvaltool_users/valeriu/' # edit as per need
# local_path = os.path.join(root_dir, out_path)
# but one can use the already defined esmvaltool output paths
local_path = cfg['plot_dir']
# do the plotting dance
plt.plot(cube.data, label=dataset)
plt.xlabel('Time (months)')
plt.ylabel('Area average')
plt.title('Time series at (ground level - first level)')
plt.tight_layout()
plt.grid()
plt.legend()
png_name = 'Time_series_' + dataset + '.png'
plt.savefig(os.path.join(local_path, png_name))
plt.close()
# no need to brag :)
return 'I made some plots!'
def run_my_diagnostic(cfg):
"""
Simple example of a diagnostic.
This is a basic (and rather esotherical) diagnostic that firstly
loads the needed model data as iris cubes, performs a difference between
values at ground level and first vertical level, then squares the
result.
Before plotting, we grab the squared result (not all operations on cubes)
and apply an area average on it. This is a useful example of how to use
standard esmvalcore.preprocessor functionality within a diagnostic, and
especially after a certain (custom) diagnostic has been run and the user
needs to perform an operation that is already part of the preprocessor
standard library of functions.
The user will implement their own (custom) diagnostics, but this
example shows that once the preprocessor has finished a whole lot of
user-specific metrics can be computed as part of the diagnostic,
and then plotted in various manners.
Arguments:
cfg - nested dictionary of metadata
Returns:
string; runs the user diagnostic
"""
# assemble the data dictionary keyed by dataset name
# this makes use of the handy group_metadata function that
# orders the data by 'dataset'; the resulting dictionary is
# keyed on datasets e.g. dict = {'MPI-ESM-LR': [var1, var2...]}
# where var1, var2 are dicts holding all needed information per variable
my_files_dict = group_metadata(cfg['input_data'].values(), 'dataset')
# iterate over key(dataset) and values(list of vars)
for key, value in my_files_dict.items():
# load the cube from data files only
# using a single variable here so just grab the first (and only)
# list element
cube = iris.load_cube(value[0]['filename'])
# the first data analysis bit: simple cube difference:
# perform a difference between ground and first levels
diff_cube = cube[:, 0, :, :] - cube[:, 1, :, :]
# square the difference'd cube just for fun
squared_cube = diff_cube ** 2.
# the second data analysis bit (slightly more advanced):
# compute an area average over the squared cube
# to apply the area average use a preprocessor function
# rather than writing your own function
area_avg_cube = area_statistics(squared_cube, 'mean')
# finalize your analysis by plotting a time series of the
# diffed, squared and area averaged cube; call the plot function:
_plot_time_series(cfg, area_avg_cube, key)
# that's it, we're done!
return 'I am done with my first ESMValTool diagnostic!'
if __name__ == '__main__':
# always use run_diagnostic() to get the config (the preprocessor
# nested dictionary holding all the needed information)
with run_diagnostic() as config:
# list here the functions that need to run
run_my_diagnostic(config) | PypiClean |
/KE-py-0.9.tar.gz/KE-py-0.9/KE/v3/project.py | from __future__ import with_statement, print_function, absolute_import
from KE.base import Base
from KE.util import danger_action
from datetime import datetime
class Project(Base):
def __init__(self, client=None, project_id=None, name=''):
"""Project Object"""
super(Project, self).__init__(client=client)
self.id = project_id
self.name = name
@classmethod
def from_json(cls, client=None, json_obj=None):
"""Deserialize the project json object to a Project object
:param client: the KE client
:param json_obj: the project json object
:return: Project object
"""
project = Project(client=client, project_id=json_obj['uuid'], name=json_obj['name'])
project.last_modified = json_obj['last_modified']
project.status = json_obj['status']
project.create_time_utc = json_obj['create_time_utc']
if project.create_time_utc:
project.create_time_utc_dt = datetime.utcfromtimestamp(json_obj.get('create_time_utc') / 1000)
project.last_update_time = json_obj['last_update_time']
if project.last_update_time:
project.last_update_time_dt = datetime.utcfromtimestamp(json_obj.get('last_update_time') / 1000)
project.description = json_obj['description']
project.realizations = json_obj['realizations']
project.owner = json_obj['owner']
project.last_modified = json_obj['last_modified']
project.override_kylin_properties = json_obj['override_kylin_properties']
project.version = json_obj['version']
project.name = json_obj['name']
project.tables = json_obj['tables']
return project
def cubes(self, name=None, offset=0, size=20,
exact_match=True, model_name=None,
sort_by='update_time', reverse=True):
"""Get Cubes
:param offset: - 可选 int,返回数据起始下标,默认为 0
:param size: - 可选 int,分页返回对应每页返回多少,默认为10
:param name: - 可选 string, Cube 名称
:param exact_match: - 可选 boolean,是否根据 Cube 名称完全匹配,默认为 true
:param model_name: - 可选 string,返回对应模型名称等于该关键字的 Cube
:param sort_by: - 可选 string,指定排序字段,默认为 update_time
:param reverse: - 可选 boolean,是否倒序,默认为 true
:return: List of Cube object
"""
return self._client.cubes(project=self.name, name=name, offset=offset,
size=size, exact_match=exact_match, model_name=model_name,
sort_by=sort_by, reverse=reverse)
def models(self):
# TODO
pass
def jobs(self, time_filter=0, status=None, offset=0, size=20, sort_by=None):
"""Get jobs of the project
Parameters:
time_filter:
最近一天 0
最近一周 1
最近一月 2
最近一年 3
所有 4
status:
NEW 0
PENDING 1
RUNNING 2
FINISHED 4
ERROR 8
DISCARDED 16
STOPPED 32
sort_by:
排序字段
offset:
size:
:return: Job object list
:rtype: jobs
"""
return self._client.jobs(project=self.name, time_filter=time_filter, status=status,
offset=offset, size=size, sort_by=sort_by)
@danger_action
def delete(self):
"""Delete the project; 删除Project
:return:
"""
json_obj = self._client.fetch_json(uri='/projects/{project}'.format(project=self.name), method='DELETE')
return json_obj['data']
def __repr__(self):
return '<Project %s>' % self.name | PypiClean |
/BornAgain-21.0-cp310-cp310-macosx_13_0_arm64.whl/bornagain/ba_fitmonitor.py | # ************************************************************************** #
import bornagain as ba
from bornagain import ba_plot as bp
try: # workaround for build servers
import numpy as np
from matplotlib import pyplot as plt
from matplotlib import gridspec
except Exception as e:
print("In ba_fitmonitor.py: {:s}".format(str(e)))
class Plotter:
"""
Draws fit progress. Base class for simulation-specific classes (PlotterGISAS etc).
"""
def __init__(self,
zmin=None,
zmax=None,
xlabel=None,
ylabel=None,
units=ba.Coords_UNDEFINED,
aspect=None):
self._fig = plt.figure(figsize=(10.25, 7.69))
self._fig.canvas.draw()
self._zmin = zmin
self._zmax = zmax
self._xlabel = xlabel
self._ylabel = ylabel
self._units = units
self._aspect = aspect
def __call__(self, fit_objective):
self.plot(fit_objective)
def reset(self):
self._fig.clf()
def plot(self):
self._fig.tight_layout()
plt.pause(0.3)
class PlotterGISAS(Plotter):
"""
Draws fit progress, for GISAS simulation.
"""
def __init__(self,
zmin=None,
zmax=None,
xlabel=None,
ylabel=None,
units=ba.Coords_UNDEFINED,
aspect=None):
Plotter.__init__(self, zmin, zmax, xlabel, ylabel, units, aspect)
@staticmethod
def make_subplot(nplot):
plt.subplot(2, 2, nplot)
plt.subplots_adjust(wspace=0.2, hspace=0.2)
def plot(self, fit_objective):
Plotter.reset(self)
real_data = fit_objective.experimentalData()
sim_data = fit_objective.simulationResult()
diff = fit_objective.absoluteDifference()
self.make_subplot(1)
# same limits for both plots
arr = real_data.array()
zmax = np.amax(arr) if self._zmax is None else self._zmax
zmin = zmax*1e-6 if self._zmin is None else self._zmin
bp.plot_simres(real_data,
title="Experimental data",
intensity_min=zmin,
intensity_max=zmax,
units=self._units,
xlabel=self._xlabel,
ylabel=self._ylabel,
zlabel='',
aspect=self._aspect)
self.make_subplot(2)
bp.plot_simres(sim_data,
title="Simulated data",
intensity_min=zmin,
intensity_max=zmax,
units=self._units,
xlabel=self._xlabel,
ylabel=self._ylabel,
zlabel='',
aspect=self._aspect)
self.make_subplot(3)
bp.plot_simres(diff,
title="Difference",
intensity_min=zmin,
intensity_max=zmax,
units=self._units,
xlabel=self._xlabel,
ylabel=self._ylabel,
zlabel='',
aspect=self._aspect)
self.make_subplot(4)
plt.title('Parameters')
plt.axis('off')
iteration_info = fit_objective.iterationInfo()
plt.text(
0.01, 0.85, "Iterations " +
'{:d}'.format(iteration_info.iterationCount()))
plt.text(0.01, 0.75,
"Chi2 " + '{:8.4f}'.format(iteration_info.chi2()))
index = 0
params = iteration_info.parameterMap()
for key in params:
plt.text(0.01, 0.55 - index*0.1,
'{:30.30s}: {:6.3f}'.format(key, params[key]))
index = index + 1
Plotter.plot(self)
class PlotterSpecular:
"""
Draws fit progress, for specular simulation.
"""
def __init__(self, units=ba.Coords_UNDEFINED, pause=0.0):
self.units = units
self.pause = pause
self._fig = plt.figure(figsize=(10, 7))
self._fig.canvas.draw()
def __call__(self, fit_objective):
self.plot(fit_objective)
def plot(self, fit_objective):
self._fig.clf()
# retrieving data from fit suite
exp_data = fit_objective.experimentalData()
sim_data = fit_objective.simulationResult()
unc_data = fit_objective.uncertaintyData()
# data values
sim_values = sim_data.array(self.units)
exp_values = exp_data.array(self.units)
unc_values = None if unc_data is None else unc_data.array(
self.units)
# default font properties dictionary to use
font = { 'size': 16 }
plt.yscale('log')
plt.ylim((0.5*np.min(exp_values), 5*np.max(exp_values)))
plt.plot(exp_data.convertedBinCenters(), exp_values, 'k--')
if unc_values is not None:
plt.plot(exp_data.convertedBinCenters(),
exp_values - unc_values,
'xkcd:grey',
alpha=0.6)
plt.plot(exp_data.convertedBinCenters(),
exp_values + unc_values,
'xkcd:grey',
alpha=0.6)
plt.plot(sim_data.convertedBinCenters(), sim_values, 'b')
xlabel = bp.get_axes_labels(exp_data, self.units)[0]
legend = ['Experiment', 'BornAgain']
if unc_values is not None:
legend = ['Experiment', r'Exp $\pm \sigma$', 'BornAgain']
plt.legend(legend, loc='upper right', prop=font)
plt.xlabel(xlabel, fontdict=font)
plt.ylabel("Intensity", fontdict=font)
if bp.do_show:
plt.pause(self.pause)
def show(self):
if bp.do_show:
plt.show() | PypiClean |
/Heralding-1.0.7.tar.gz/Heralding-1.0.7/heralding/reporting/reporting_relay.py |
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import zmq
import queue
import logging
import heralding.misc
from heralding.misc.socket_names import SocketNames
logger = logging.getLogger(__name__)
class ReportingRelay:
_logQueue = None
def __init__(self):
# we are singleton
assert ReportingRelay._logQueue is None
ReportingRelay._logQueue = queue.Queue(maxsize=10000)
self.enabled = True
context = heralding.misc.zmq_context
self.internalReportingPublisher = context.socket(zmq.PUB)
@staticmethod
def logAuthAttempt(data):
ReportingRelay._logQueue.put({'message_type': 'auth', 'content': data})
@staticmethod
def logSessionInfo(data):
if ReportingRelay._logQueue is not None:
ReportingRelay._logQueue.put({
'message_type': 'session_info',
'content': data
})
@staticmethod
def logListenPorts(data):
if ReportingRelay._logQueue is not None:
ReportingRelay._logQueue.put({
'message_type': 'listen_ports',
'content': data
})
@staticmethod
def logAuxiliaryData(data):
if ReportingRelay._logQueue is not None:
ReportingRelay._logQueue.put({
'message_type': 'aux_info',
'content': data
})
def start(self):
self.internalReportingPublisher.bind(SocketNames.INTERNAL_REPORTING.value)
while self.enabled or ReportingRelay._logQueue.qsize() > 0:
try:
data = ReportingRelay._logQueue.get(timeout=0.5)
self.internalReportingPublisher.send_pyobj(data)
except queue.Empty:
pass
# None signals 'going down' to listeners
self.internalReportingPublisher.send_pyobj(None)
self.internalReportingPublisher.close()
# None is also used to signal we are all done
ReportingRelay._logQueue = None
def stop(self):
self.enabled = False | PypiClean |
/LilypondToBandVideoConverter-1.1.1.tar.gz/LilypondToBandVideoConverter-1.1.1/lilypondtobvc/src/convertermodules/miditransformer.py |
#====================
# IMPORTS
#====================
from array import array
from datetime import datetime
import re
from basemodules.simpleassertion import Assertion
from basemodules.simplelogging import Logging
from basemodules.simpletypes import Boolean, Dictionary, Integer, \
List, Map, Natural, Object, Real, \
String, StringList, StringMap, \
StringSet, Tuple
from basemodules.stringutil import deserializeToMap
from basemodules.ttbase import adaptToRange, iif, iif2, isInRange, \
MyRandom
from basemodules.validitychecker import ValidityChecker
from .midifilehandler import MidiFileHandler
#====================
_infinity = 999999999
_humanizationStyleNameToTextMap = {}
_humanizedTrackNameSet = set()
_voiceNameToVariationFactorMap = {}
#====================
def sign (x) -> Integer:
"""Returns the sign of x, 0 for 0, -1 for a negative number and +1
for a positive number"""
return iif2(x == 0, 0, x < 0, -1, 1)
#--------------------
def _canonicalTrackName (trackName : String) -> String:
"""Returns track name without any suffixes appended for
multiple MIDI tracks of the same instrument."""
for suffix in ["Bottom", "Middle", "Top"]:
if trackName.endswith(suffix):
trackName = trackName.replace(suffix, "")
return trackName
#====================
class _LineBuffer:
"""This class is a utility for providing buffered output to a line
list. The line list is given upon construction of the buffer
and the buffer may be manipulated, cleared or appended to the
line list."""
#--------------------
def __init__ (self,
associatedLineList : StringList):
"""Prepares <self> as a buffer for <associatedLineList>"""
self._info = []
self._isActive = False
self._lineList = associatedLineList
#--------------------
def activate (self,
isActive : Boolean):
"""Activates or deactivates <self> depending on <isActive>."""
self._isActive = isActive
#--------------------
def clear (self):
"""Resets <self> to an empty buffer."""
self._info = []
#--------------------
def flush (self):
"""Appends all data from <self> to associated line list and
sets <self> inactive."""
self._lineList.extend(self._info)
self.clear()
self.activate(False)
#--------------------
def length (self) -> Natural:
"""Returns count of lines in <self>."""
return len(self._info)
#--------------------
def lineList (self) -> StringList:
"""Returns list of lines in <self>."""
return self._info
#--------------------
def pop (self) -> String:
"""Removes first element in buffer."""
self._info.pop(0)
#--------------------
def prepend (self, currentLine):
"""Prepends <currentLine> to buffer as first line."""
self._info.insert(0, currentLine)
#--------------------
def writeLine (self, currentLine):
"""Adds <currentLine> as last line in buffer (when active) or
as last line in associated line list (when buffer is
inactive)."""
if self._isActive:
self._info.append(currentLine)
else:
self._lineList.append(currentLine)
#====================
# MusicTime
#====================
class _MusicTime:
"""This module encapsulates the handling of positions within a
measure as well as the conversion between MIDI time and musical
time."""
# the vector of scaling factors for converting from music time to
# midi time
_factorVector = None
_quartersPerMeasure = None
_separator = ":"
_ticksPerQuarterNote = None
semibreveDuration = None
quarterDuration = None
sixteenthDuration = None
thirtysecondDuration = None
firstPosition = None
#--------------------
@classmethod
def initialize (cls,
ticksPerQuarterNote : Natural,
quartersPerMeasure : Real):
"""Sets values for <ticksPerQuarterNote> and
<quartersPerMeasure>"""
Logging.trace(">>: tpq = %d, qpm = %r",
ticksPerQuarterNote, quartersPerMeasure)
cls._quartersPerMeasure = quartersPerMeasure
cls._ticksPerQuarterNote = ticksPerQuarterNote
cls._factorVector = \
array('i',
( int(ticksPerQuarterNote * quartersPerMeasure),
int(ticksPerQuarterNote),
int(ticksPerQuarterNote / 4),
1 ))
cls.measureDuration = _MusicTime(1, 0, 0, 0, True)
cls.semibreveDuration = _MusicTime(0, 4, 0, 0, True)
cls.quarterDuration = cls.semibreveDuration.multiply(0.25)
cls.sixteenthDuration = cls.quarterDuration.multiply(0.25)
cls.thirtysecondDuration = cls.quarterDuration.multiply(0.125)
cls.firstPosition = _MusicTime(1, 1, 1, 1, False)
Logging.trace("<<")
#--------------------
def __init__ (self,
measureCount : Integer,
quartersCount : Natural,
sixteenthsCount : Natural,
remainderCount : Natural,
isDuration : Boolean):
"""Creates a music time object, which is either a time or a
duration"""
self._isDuration = isDuration
self._data = array('i',
(measureCount, quartersCount,
sixteenthsCount, remainderCount))
#--------------------
def __repr__ (self) -> String:
"""Returns the string representation of a music time object"""
return ("MusicTime(%s/%s)"
% (":".join([ "%d" % element for element in self._data ]),
iif(self._isDuration, "du", "tm")))
#--------------------
def absolute (self) -> Object:
"""Calculates absolute value of <self>"""
result._isDuration = self._isDuration
result._data = array('i',
(abs(self._data[0]), self._data[1],
self._data[2], self._data[3]))
return result
#--------------------
def add (self,
duration : Object) -> Object:
"""Returns sum of <self> and <duration>"""
Logging.trace(">>: time = %r, duration = %r", self, duration)
Assertion.pre(not self._isDuration and duration._isDuration,
"bad parameters for add")
cls = self.__class__
midiTime = self.toMidiTime()
midiDuration = duration.toMidiTime()
midiResultTime = midiTime + midiDuration
result = cls.fromMidiTime(midiResultTime, False)
Logging.trace("<<: %r", result)
return result
#--------------------
@classmethod
def fromMidiTime (cls,
midiTime : Integer,
isDuration : Boolean) -> Object:
"""Splits up absolute midi time in <midiTime> into measure
number, quarter number within measure, sixteenth number
within quarter and remainder ticks; counting starts with 1
for a time and with 0 for a duration; result is returned as
a music time; also assumes that <quartersPerMeasure>
contains the right number of quarters within a measure"""
Logging.trace(">>: midiTime = %d, isDuration = %r",
midiTime, isDuration)
isNegative = (midiTime < 0)
remainingMidiTime = int(abs(midiTime))
ticksPerQuarterNote = cls._ticksPerQuarterNote
partList = []
offset = iif(isDuration, 0, 1)
for factor in cls._factorVector:
part = int(remainingMidiTime / factor)
remainingMidiTime -= int(factor * part)
part += offset
partList.append(part)
measureCount, quarterCount, sixteenthCount, \
remainderCount = partList
result = _MusicTime(measureCount, quarterCount, sixteenthCount,
remainderCount, isDuration)
Logging.trace("<<: %r", result)
return result
#--------------------
def isAt (self,
reference : Object,
rasterSize : Object) -> Boolean:
"""Tells whether <self> is near <reference> for some measure
where <reference> is given as float factor of a semibreve;
must be within a symmetric interval of <rasterSize>; a
wraparound at the measure boundary is accounted for"""
Logging.trace(">>: %r, reference = %r, rasterSize = %r",
self, reference, rasterSize)
Assertion.pre(not self._isDuration,
"first parameter must be a time")
cls = self.__class__
tpqn = cls._ticksPerQuarterNote
# find relative position in midi ticks
measure = self.measure()
midiMeasureDuration = round(cls._quartersPerMeasure * tpqn)
midiTime = (self.toMidiTime()
- (measure - 1) * midiMeasureDuration)
# check whether relative position is near <referenceTime>
midiReferenceTime = round(4 * tpqn * reference)
midiHalfRasterSize = round(4 * tpqn * rasterSize / 2.0)
Logging.trace("--: midiTime = %d, midiReferenceTime = %d,"
+ " midiHalfRaster = %d, midiMeasure = %d,"
+ " qpm = %r, tpqn = %d",
midiTime, midiReferenceTime,
midiHalfRasterSize, midiMeasureDuration,
cls._quartersPerMeasure, tpqn)
# check positive range
midiOtherTime = midiReferenceTime + midiHalfRasterSize
isNear = isInRange(midiTime, midiReferenceTime, midiOtherTime)
Logging.trace("--: midiOtherTimeA = %d, isNear = %r",
midiOtherTime, isNear)
if not isNear and midiOtherTime > midiMeasureDuration:
# wraparound
midiOtherTime -= midiMeasureDuration
isNear = isInRange(midiTime, 0, midiOtherTime)
Logging.trace("--: midiOtherTimeB = %d, isNear = %r",
midiOtherTime, isNear)
# check negative range
if not isNear:
midiOtherTime = midiReferenceTime - midiHalfRasterSize
isNear = isInRange(midiTime, midiOtherTime, midiReferenceTime)
Logging.trace("--: midiOtherTimeC = %d, isNear = %r",
midiOtherTime, isNear)
if not isNear and midiOtherTime < 0:
# wraparound
midiOtherTime += midiMeasureDuration
isNear = isInRange(midiTime, midiOtherTime,
midiMeasureDuration)
Logging.trace("--: midiOtherTimeD = %d, isNear = %r",
midiOtherTime, isNear)
result = isNear
Logging.trace("<<: %r", result)
return result
#--------------------
def measure (self) -> Integer:
"""Tells the measure of <self>"""
return self._data[0]
#--------------------
def normalize (self) -> Object:
"""Calculates a standard representation of <self> and returns
it"""
Assertion.pre(not self._isDuration, "parameter must be a time")
cls = self.__class__
midiTime = self.toMidiTime()
return cls.fromMidiTime(midiTime, False)
#--------------------
def multiply (self,
factor : Real) -> Object:
"""Does a scalar multiplication of duration <self> by <factor>
and returns scaled duration"""
Logging.trace(">>: duration = %r, factor = %f", self, factor)
Assertion.pre(self._isDuration, "parameter must be a duration")
cls = self.__class__
midiDuration = self.toMidiTime()
result = cls.fromMidiTime(round(midiDuration * factor), True)
Logging.trace("<<: %r", result)
return result
#--------------------
def subtract (self,
other : Object) -> Object:
"""Calculates difference of <self> and <other> and returns a
duration"""
Logging.trace(">>: %r, %r", self, other)
cls = self.__class__
midiTimeA = self.toMidiTime()
midiTimeB = other.toMidiTime()
midiDuration = midiTimeA - midiTimeB
result = cls.fromMidiTime(midiDuration, True)
Logging.trace("<<: %r", result)
return result
#--------------------
def toMidiTime (self) -> Integer:
"""Converts <self> into midi time and returns value; assumes
that <quartersPerMeasure> contains the right number of
quarters within a measure; if <self._isDuration> is true,
given time is a duration (starting at '0:0:0:0')"""
Logging.trace(">>: %r", self)
cls = self.__class__
isNegative = self.measure() < 0
offset = iif(self._isDuration, 0, -1)
result = sum(map(lambda x, y: (x + offset) * y,
self._data, cls._factorVector))
result = round(iif(isNegative, -result, result))
Logging.trace("<<: %d", result)
return result
#====================
class _HumanizationStyle:
"""This class encapsulates all services for midi track
humanization. The style describes how many count-in measures
are skipped, how the timing and the velocity may be changed
depending on the position of a hit within a measure."""
defaultStyleName = "humanizationStyleDefault"
_defaultStyleAsString = \
("{ 0.00: 1.15/0, 0.25: 1/0.2, 0.50: 1.1/0.2, 0.75: 1/0.2,"
+ " OTHER: 0.85/B0.25,"
+ " RASTER: 0.03125, SLACK:0.1 }")
_velocityAndTimingSeparator = "/"
#--------------------
# EXPORTED FEATURES
#--------------------
@classmethod
def initialize (cls):
"""Fills mapping from style name to associated style
string from configuration file"""
Logging.trace(">>")
Logging.trace("<<")
#--------------------
def __init__ (self,
styleName : String):
"""Finds style for given <styleName> and returns it as a
structure; if name is unknown, returns None"""
Logging.trace(">>: %s", styleName)
cls = self.__class__
humanizationStyleNameList = _humanizationStyleNameToTextMap.keys()
if styleName in humanizationStyleNameList:
styleAsString = _humanizationStyleNameToTextMap[styleName]
else:
Logging.trace("--: could not find style name %s", styleName)
if cls.defaultStyleName in humanizationStyleNameList:
styleAsString = \
_humanizationStyleNameToTextMap[cls.defaultStyleName]
else:
styleAsString = cls._defaultStyleAsString
style = deserializeToMap(styleAsString)
Logging.trace("--: style = %s", style)
rasterSize = style.get("RASTER", "0.03125")
ValidityChecker.isNumberString(rasterSize,
"raster invalid in %r" % styleName,
True)
rasterSize = float(rasterSize)
slackValue = style.get("SLACK")
ValidityChecker.isNumberString(slackValue,
"slack invalid in %r" % styleName,
True)
slackValue = float(slackValue)
self._name = styleName
self._rasterSize = rasterSize
self._slack = slackValue
self._positionToDirectionMap = {}
self._positionToTimeVariationMap = {}
self._positionToVelocityVariationMap = {}
self._validPositions = []
keyList = style.keys()
separator = cls._velocityAndTimingSeparator
# velocity and timing definition
for positionKey in keyList:
velocityAndTiming = style[positionKey]
if positionKey in ["RASTER", "SLACK"]:
continue
elif positionKey != "OTHER":
positionKey = float(positionKey)
self._validPositions.append(positionKey)
Logging.trace("--: position = %r, value = %r",
positionKey, velocityAndTiming)
Assertion.check(separator in velocityAndTiming,
"bad value for %r in %r"
% (positionKey, styleName))
velocity, timing = velocityAndTiming.split(separator)
direction = timing[0]
if direction not in "AB":
direction = "-"
else:
timing = timing[1:]
velocity = float(velocity)
timing = float(timing)
self._positionToVelocityVariationMap[positionKey] = velocity
self._positionToDirectionMap[positionKey] = direction
self._positionToTimeVariationMap[positionKey] = timing
Logging.trace("--: %r -> %4.2f/%s%4.2f",
positionKey, velocity, direction, timing)
Logging.trace("<<: %r", self)
#--------------------
def __repr__ (self) -> String:
"""Returns the string representation of <self>"""
st = ("_HumanizationStyle(%s,"
+ " RASTER = %r, SLACK = %r,"
+ " VELOCITY = %r, DIRECTIONS = %r, TIMING = %r)")
result = st % (self._name,
self._rasterSize, self._slack,
self._positionToVelocityVariationMap,
self._positionToDirectionMap,
self._positionToTimeVariationMap)
return result
#--------------------
def hasDirectionalShiftAt (self,
eventPositionInMeasure : Real) -> Boolean:
"""Tells whether there is a directional timing shift at
<eventPositionInMeasure> and returns it"""
Logging.trace(">>: %r", eventPositionInMeasure)
result = self._positionToDirectionMap[eventPositionInMeasure]
Logging.trace("<<: %r", result)
return result
#--------------------
@classmethod
def instrumentVariationFactors (cls,
instrumentName : String) -> Tuple:
"""Returns the instrument specific variation factors
for <instrumentName>"""
Logging.trace(">>: %r", instrumentName)
result = _voiceNameToVariationFactorMap.get(instrumentName,
[1.0,1.0])
Logging.trace("<<: [%4.3f, %4.3f]", result[0], result[1])
return result
#--------------------
def keys (self) -> List:
"""Returns all time positions"""
return self._validPositions
#--------------------
def timingVariationFactor (self,
eventPositionInMeasure : Real) -> Real:
"""Returns the associated timing variation factor (in percent)
for the <eventPositionInMeasure>"""
Logging.trace(">>: %r", eventPositionInMeasure)
result = \
self._positionToTimeVariationMap.get(eventPositionInMeasure, 0)
Logging.trace("<<: %r", result)
return result
#--------------------
def raster (self) -> Real:
"""Returns the raster of current style"""
Logging.trace(">>")
result = self._rasterSize
Logging.trace("<<: %r", result)
return result
#--------------------
def velocityEmphasisFactor (self,
eventPositionInMeasure : Real) -> Real:
"""Returns the associated velocity factor (in percent) for the
<eventPositionInMeasure>"""
Logging.trace(">>: %r", eventPositionInMeasure)
result = self._positionToVelocityVariationMap \
.get(eventPositionInMeasure, 1.0)
Logging.trace("<<: %r", result)
return result
#--------------------
def velocitySlack (self) -> Real:
"""Returns the associated slack (in percent)"""
Logging.trace(">>")
result = self._slack
Logging.trace("<<: %r", result)
return result
#====================
class _Humanizer:
"""This class encapsulates the service for humanization of one or
more MIDI event lists based on a humanization style. It
uses an internal event list and processes each single
note-on/note-off event as well as the timing of other events."""
_quartersPerMeasure = None
_countInMeasureCount = 0
#--------------------
class _HumanizerEvent:
"""This class defines a single event in the event list of the
humanizer."""
def __init__ (self):
self.midiTime = None
self.text = None
self.kind = None
self.channel = None
self.note = None
self.velocity = None
self.partner = None
#--------------------
def __repr__ (self) -> String:
st = ("_HumanizerEvent(midiTime = %r, text = %r, kind = %r,"
+ " channel = %r, note = %r, velocity = %r,"
+ " partner = %r)")
return (st % (self.midiTime, self.text, self.kind,
self.channel, self.note, self.velocity,
self.partner))
#--------------------
# LOCAL FEATURES
#--------------------
def _adjustTiming (self,
eventIndex : Natural,
musicTime : _MusicTime,
eventPositionInMeasure : Real,
instrumentTimingVariationFactor : Real,
timeToAdjustedTimeMap : Map):
"""Adjusts timing of note event given at <eventIndex> with
parameters <musicTime> and <noteKind>;
<instrumentTimingVariationFactor> gives an instrument
specific factor and <timeToAdjustedTimeMap> the list of
already processed timestamps with their adjusted values"""
Logging.trace(">>: index = %d, time = %r, positionInMeasure = %r,"
+ " instrumentTimingVariation = %4.3f",
eventIndex, musicTime, eventPositionInMeasure,
instrumentTimingVariationFactor)
cls = self.__class__
result = None
timeAsString = str(musicTime)
style = self._styleForTime(musicTime)
effectiveMeasureIndex = (musicTime.measure()
- cls._countInMeasureCount)
if effectiveMeasureIndex <= 0:
# leave as is, because those measures are count-ins
result = musicTime
elif timeAsString in timeToAdjustedTimeMap:
# we already have seen this event time => reuse cached value
result = timeToAdjustedTimeMap[timeAsString]
else:
direction = \
style.hasDirectionalShiftAt(eventPositionInMeasure)
variationFactor = \
style.timingVariationFactor(eventPositionInMeasure)
variationDuration = \
_MusicTime.thirtysecondDuration.multiply(variationFactor)
# do a random variation with a square distribution
randomFactor = cls._squaredrand() * 2 - 1
# if ahead or behind, adapt the random factor
randomFactor = iif2(direction == "A", -abs(randomFactor),
direction == "B", abs(randomFactor),
randomFactor)
# adjust by instrument
randomFactor *= instrumentTimingVariationFactor
variationDuration = variationDuration.multiply(randomFactor)
result = musicTime.add(variationDuration)
timeToAdjustedTimeMap[timeAsString] = result
Logging.trace("<<: %r", result)
return result
#--------------------
def _adjustVelocity (self,
eventIndex : Natural,
musicTime : _MusicTime,
velocity : Natural,
eventPositionInMeasure : Real,
instrumentVelocityVariationFactor : Real):
"""Adjusts velocity of note event given at <eventIndex> with
parameters <musicTime> and <noteKind>;
<instrumentTimingVariationFactor> gives an instrument
specific factor"""
Logging.trace(">>: index = %d, time = %r, velocity = %d,"
+ " positionInMeasure = %r,"
+ " instrumentVelocityVariation = %4.3f",
eventIndex, musicTime, velocity, eventPositionInMeasure,
instrumentVelocityVariationFactor)
cls = self.__class__
result = None
style = self._styleForTime(musicTime)
measure = musicTime.measure() - cls._countInMeasureCount
if measure <= 0:
# leave as is, because those measures are count-ins
result = velocity
else:
# randomFactor shall be between -1 and 1
randomFactor = cls._squaredrand() * 2 - 1
# adjust by instrument
randomFactor *= instrumentVelocityVariationFactor
slack = style.velocitySlack()
# whenever some velocity variation is in the measure, do
# not apply the emphasis
factor = style.velocityEmphasisFactor(eventPositionInMeasure)
factor = iif(measure in self._varyingVelocityMeasureSet,
1.0, factor)
factor += randomFactor * slack
velocity = int(velocity * factor)
velocity = adaptToRange(velocity, 0, 127)
result = velocity
Logging.trace("<<: %d", result)
return result
#--------------------
def _asLineList (self) -> StringList:
"""Converts internal event list containing MIDI events into a
text line list"""
Logging.trace(">>")
result = []
for event in self._eventList:
midiTime = event.midiTime
currentLine = iif(isInRange(midiTime, 0, _infinity - 1),
str(midiTime) + " ", "")
if event.kind not in ["On", "Off"]:
currentLine += event.text
else:
currentLine += ("On ch=%d n=%d v=%d"
% (event.channel, event.note, event.velocity))
result.append(currentLine)
Logging.trace("--: %s", currentLine)
Logging.trace("<<")
return result
#--------------------
def _collectMeasuresWithVaryingVelocity (self):
"""Finds all measures where events have varying velocity that is not
regular (like e.g. a crescendo); those measures will later
only get a slight humanization in velocity, but will not
have a velocity humanization pattern applied"""
Logging.trace(">>: eventListCount = %d", len(self._eventList))
cls = self.__class__
midiTimeToVelocityMap = {}
# collect maximum velocities for given midi times
for event in self._eventList:
if event.kind == "On":
midiTime = event.midiTime
velocity = \
max(event.velocity, midiTimeToVelocityMap.get(midiTime, 0))
midiTimeToVelocityMap[midiTime] = velocity
measureToVelocityMap = {}
measureToDeltaVelocityMap = {}
self._varyingVelocityMeasureSet.clear()
# for each measure check whether the velocity is monotonously
# increasing or decreasing or has some jumps
for midiTime in sorted(midiTimeToVelocityMap.keys(), key=int):
musicTime = _MusicTime.fromMidiTime(midiTime, False)
velocity = midiTimeToVelocityMap[midiTime]
measure = musicTime.measure() - cls._countInMeasureCount
Logging.trace("--: m2v[%s] = %d", musicTime, velocity)
if measure in measureToVelocityMap:
delta = velocity - measureToVelocityMap[measure]
if measure in measureToDeltaVelocityMap:
otherDelta = measureToDeltaVelocityMap[measure]
Logging.trace("--: delta = %r, otherDelta = %r",
delta, otherDelta)
if (delta != 0 and otherDelta != 0
and sign(delta) != sign(otherDelta)):
Logging.trace("--: varying measure %d", measure)
self._varyingVelocityMeasureSet.add(measure)
measureToDeltaVelocityMap[measure] = delta
measureToVelocityMap[measure] = velocity
Logging.trace("--: velocity[%s] = %r", measure, velocity)
Logging.trace("--: %r", self._varyingVelocityMeasureSet)
Logging.trace("<<")
#--------------------
def _convertToEventList (self,
lineList : StringList):
"""Converts <lineList> containing MIDI events into the
internal event list"""
Logging.trace(">>")
cls = self.__class__
eventCount = len(lineList)
noteToStartIndexMap = {}
# split MIDI event lines into time and event text part
# and process note on and note off events
for i in range(eventCount):
currentLine = lineList[i]
if " " in currentLine:
midiTime, st = currentLine.split(" ", 1)
midiTime = int(midiTime)
tokenList = st.split(" ")
kind = tokenList[0]
else:
st = currentLine
midiTime = iif(currentLine == "MTrk", -1, _infinity)
kind = "special"
channel = None
affectedNote = None
velocity = None
partner = None
if kind in ["On", "Off"]:
channel = int((tokenList[1])[3:])
affectedNote = int((tokenList[2])[2:])
if kind == "On":
if tokenList[-1] == "v=0":
kind = "Off"
else:
# note on event
noteToStartIndexMap[affectedNote] = i
velocity = int((tokenList[3])[2:])
partner = 0
if kind == "Off":
velocity = 0
if affectedNote in noteToStartIndexMap:
j = noteToStartIndexMap[affectedNote]
partner = j
self._eventList[j].partner = i
del noteToStartIndexMap[affectedNote]
event = cls._HumanizerEvent()
event.midiTime = midiTime
event.text = st
event.kind = kind
event.channel = channel
event.note = affectedNote
event.velocity = velocity
event.partner = partner
Logging.trace("--: event = %r", str(event))
self._eventList.append(event)
Logging.trace("<<")
#--------------------
def _findEventPositionInMeasure (self,
musicTime : _MusicTime) -> Real:
"""Finds position of event within measure and returns it as
a float value"""
Logging.trace(">>: %r", musicTime)
result = None
style = self._styleForTime(musicTime)
rasterSize = style.raster()
# traverse all time positions
for position in style.keys():
if musicTime.isAt(position, rasterSize):
result = position
break
result = iif(result is None, "OTHER", result)
Logging.trace("<<: %r", result)
return result
#--------------------
def _processEventList (self,
trackName : String,
timeToAdjustedTimeMap : Map):
"""Traverses all events in the internal event list for track with
<trackName> and transforms note events;
<timeToAdjustedTimeMap> gives the list of already processed
timestamps with their adjusted values"""
Logging.trace(">>: %r", trackName)
instrumentVelocityVariationFactor, \
instrumentTimingVariationFactor = \
_HumanizationStyle.instrumentVariationFactors(trackName)
eventCount = len(self._eventList)
noteToStartIndexMap = {}
for i in reversed(range(eventCount)):
event = self._eventList[i]
if event.kind == "On":
partnerEvent = self._eventList[event.partner]
note = event.note
startMidiTime = event.midiTime
endMidiTime = partnerEvent.midiTime
midiDuration = endMidiTime - startMidiTime
self._processSingleEvent(i, startMidiTime, note,
event.velocity, midiDuration,
instrumentVelocityVariationFactor,
instrumentTimingVariationFactor,
timeToAdjustedTimeMap)
if note in noteToStartIndexMap:
# check whether there is an overlap to following
# note of same pitch
j = noteToStartIndexMap[note]
nextNoteEvent = self._eventList[j]
otherStartMidiTime = nextNoteEvent.midiTime
if otherStartMidiTime <= endMidiTime:
# clip
partnerEvent.midiTime = otherStartMidiTime - 1
Logging.trace("--: corrected overlap of next"
+ " %d (%d) into %d (%d)",
j, otherStartMidiTime, i, endMidiTime)
noteToStartIndexMap[note] = i
Logging.trace("<<")
#--------------------
def _processSingleEvent (self,
eventIndex : Natural,
midiTime : Natural,
note : Natural,
velocity : Natural,
midiDuration : Natural,
instrumentVelocityVariation : Real,
instrumentTimingVariation : Real,
timeToAdjustedTimeMap : Map):
"""Humanizes note event given at <eventIndex> with parameters
<midiTime>, <note>, <velocity> and <midiDuration>;
<instrumentVelocityVariation> and
<instrumentTimingVariation> give the instrument specific
factors for the variation and <timeToAdjustedTimeMap> the
list of already processed timestamps with their adjusted values"""
Logging.trace(">>: index = %d, midiTime = %r,"
+ " note = %r, velocity = %r,"
+ " midiDuration = %r, instrumentVelocityVariation = %r,"
+ " instrumentTimingVariation = %r",
eventIndex, midiTime, note, velocity,
midiDuration, instrumentVelocityVariation,
instrumentTimingVariation)
musicTime = _MusicTime.fromMidiTime(midiTime, False)
eventPositionInMeasure = self._findEventPositionInMeasure(musicTime)
velocity = self._adjustVelocity(eventIndex, musicTime, velocity,
eventPositionInMeasure,
instrumentVelocityVariation)
musicTime = self._adjustTiming(eventIndex, musicTime,
eventPositionInMeasure,
instrumentTimingVariation,
timeToAdjustedTimeMap)
event = self._eventList[eventIndex]
event.midiTime = musicTime.toMidiTime()
event.velocity = velocity
Logging.trace("<<: %r", event)
#--------------------
def _sortEventList (self):
"""Sorts events in <eventList> by time"""
Logging.trace(">>")
kindOrder = { "special":0, "Meta":1, "PrCh":2, "Par":3, "KeySig":4,
"TimeSig":5, "Tempo":6, "Off":7, "On":8 }
trackEndMetaEventText = "Meta TrkEnd"
keyExtractionProc = (lambda x:
iif(x.text == trackEndMetaEventText, _infinity,
x.midiTime * 10 + kindOrder[x.kind]))
self._eventList.sort(key=keyExtractionProc)
Logging.trace("<<")
#--------------------
@classmethod
def _squaredrand (cls) -> Real:
"""Returns a random distribution around 0.5 with a root
probability density around 0.5"""
result = MyRandom.random()
result = result * 2 - 1
result = sign(result) * result * result
result = result / 2.0 + 0.5
Logging.trace("--: %f", result)
return result
#--------------------
def _styleForMeasure (self,
measureIndex : Integer):
"""Returns style that is valid at given <measureIndex>"""
if measureIndex in self._measureToHumanizationStyleMap:
result = self._measureToHumanizationStyleMap[measureIndex]
else:
if measureIndex > 1:
result = self._styleForMeasure(measureIndex - 1)
else:
styleName = _HumanizationStyle.defaultStyleName
result = _HumanizationStyle(styleName)
self._measureToHumanizationStyleMap[measureIndex] = result
return result
#--------------------
def _styleForTime (self,
musicTime : _MusicTime):
"""Returns style that is valid at given <musicTime>"""
Logging.trace(">>: %r", musicTime)
cls = self.__class__
measure = musicTime.measure() - cls._countInMeasureCount
result = self._styleForMeasure(measure)
Logging.trace("<<: %r", result._name)
return result
#--------------------
# EXPORTED FEATURES
#--------------------
@classmethod
def initialize (cls,
quartersPerMeasure : Real,
countInMeasureCount : Natural):
"""Sets value for <quartersPerMeasure> and <countInMeasureCount>"""
Logging.trace(">>: qpm = %r, countIn = %d",
quartersPerMeasure, countInMeasureCount)
cls._quartersPerMeasure = quartersPerMeasure
cls._countInMeasureCount = countInMeasureCount
MyRandom.initialize()
Logging.trace("<<")
#--------------------
def __init__ (self):
"""Initializes event humanizer; provides a list of tuples
for storing midi events; the tuple contains
- midiTime: the MIDI time
- text: the event without time as a text string
- kind: information whether event is a 'note on',
'note off' or 'other'
for a 'note on' or 'note off' event we additionally have
- note: the MIDI pitch
for a 'note on' event we additionally have
- velocity: the MIDI velocity
- partner: the index of the closing note off event"""
Logging.trace(">>")
self._measureToHumanizationStyleMap = {}
self._eventList = []
self._varyingVelocityMeasureSet = set()
# the following map takes a simplified track name (like
# keyboard) and associates a midi time to shifted time map to
# keep different tracks for the same instrument synchronous in
# timing (but not in velocity!)
self._canonicalTrackNameToTimingMap = {}
Logging.trace("<<")
#--------------------
def process (self,
trackName : String,
trackLineList : StringList,
measureToHumanizationStyleMap : Map):
"""Humanizes MIDI events in <trackLineList> based on map
<measureToHumanizationStyleMap> from measure to style
name and returns resulting event line list"""
Logging.trace(">>: trackName = %r, measureToStyleMap = %r",
trackName, measureToHumanizationStyleMap)
cls = self.__class__
canonicalTrackName = _canonicalTrackName(trackName)
timeToAdjustedTimeMap = \
self._canonicalTrackNameToTimingMap.get(canonicalTrackName, {})
self._eventList = []
self._varyingVelocityMeasureSet = set()
self._measureToHumanizationStyleMap = measureToHumanizationStyleMap
self._convertToEventList(trackLineList)
self._collectMeasuresWithVaryingVelocity()
self._processEventList(trackName, timeToAdjustedTimeMap)
self._sortEventList()
result = self._asLineList()
self._canonicalTrackNameToTimingMap[canonicalTrackName] = \
timeToAdjustedTimeMap
Logging.trace("<<")
return result
#====================
class _ParseState:
"""Enumeration type for midi text line parsers"""
afterSettings = "_ParseState.afterSettings"
inBadTrack = "_ParseState.inBadTrack"
inFirstTrack = "_ParseState.inFirstTrack"
inInstrumentTrack = "_ParseState.inInstrumentTrack"
inLimbo = "_ParseState.inLimbo"
inOtherTrack = "_ParseState.inOtherTrack"
inTrack = "_ParseState.inTrack"
inTrackPrefix = "_ParseState.inTrackPrefix"
#====================
class MidiTransformer:
"""This class encapsulates the transformation of MIDI files by
functions. All transformation is done on a text representation
that is finally converted back to the MIDI format."""
_channelReferenceRegExp = re.compile(r" ch=(\d+)")
_parameterChangeRegExp = re.compile(r" Par ")
_volumeChangeRegExp = re.compile(r" Par .* c=7 ")
_panChangeRegExp = re.compile(r" Par .* c=10 ")
_programChangeRegExp = re.compile(r" PrCh ")
_reverbChangeRegExp = re.compile(r" Par .* c=91 ")
_trackBeginRegExp = re.compile(r"MTrk")
_trackEndRegExp = re.compile(r"TrkEnd")
_trackNameRegExp = re.compile(r"Meta TrkName +\"(.*)\"")
#--------------------
# LOCAL FEATURES
#--------------------
def _addMissingTrackNamesHandleTrackEnd (self,
inFirstTrack : Boolean,
hasTrackName : Boolean,
inBadTrack : Boolean,
lineBuffer : StringList,
trackCountMap : Map,
trackName : String,
instrumentName : String):
"""Processes a track end when adding missing track names."""
Logging.trace(">>")
cls = self.__class__
if inBadTrack:
trackIsSkipped = True
else:
Logging.trace("--: %s", iif(inFirstTrack,
"first track", "other track"))
Logging.trace("--: %s%s", iif(not hasTrackName, "no ", ""),
"trackname found")
if not hasTrackName and not inFirstTrack:
Logging.trace("--: mapping instrument %r", instrumentName)
trackName = cls._instrumentNameToTrackName(instrumentName,
trackCountMap)
lineBuffer.pop()
lineBuffer.prepend("0 Meta TrkName \"" + trackName + "\"")
lineBuffer.prepend("MTrk")
if lineBuffer.length() >= 5:
trackIsSkipped = False
else:
trackIsSkipped = True
Logging.trace("--: very short track replaced by empty track")
if not trackIsSkipped:
Logging.trace("--: final track name = %r", trackName)
else:
Logging.trace("--: bad track => replaced by empty track")
cls._makeEmptyTrack(lineBuffer)
lineBuffer.flush()
Logging.trace("<<")
#--------------------
@classmethod
def _humanizeTrack (cls,
humanizer : _Humanizer,
trackName : String,
measureToHumanizationStyleMap : Map,
trackLineList : StringList,
lineList : StringList):
"""Humanizes entries in <trackLineList> by
<measureToHumanizationStyleMap> and appends them to
<lineList>"""
Logging.trace(">>")
processedLineList = humanizer.process(trackName, trackLineList,
measureToHumanizationStyleMap)
lineList.extend(processedLineList)
Logging.trace("<<")
#--------------------
@classmethod
def _instrumentNameToTrackName (cls,
instrumentName : String,
trackCountMap : Map) -> String:
"""Calculates track name for an anonymous track with
instrument with <instrumentName>; <trackCountMap> gives the
count of tracks with some track name"""
Logging.trace(">>: %r", instrumentName)
instrumentToTrackMap = { "power kit" : "drums",
"overdriven guitar" : "guitar",
"reed organ" : "bass",
"rock organ" : "keyboard",
"synth voice" : "vocals" }
trackName = instrumentToTrackMap.get(instrumentName, instrumentName)
if trackName in trackCountMap:
relativeIndex = trackCountMap[trackName] + 1
else:
relativeIndex = 0
trackCountMap[trackName] = relativeIndex
trackName = (trackName
+ iif(relativeIndex == 0, "",
"-ABCDEF"[relativeIndex: relativeIndex + 1]))
Logging.trace("<<: %r", trackName)
return trackName
#--------------------
@classmethod
def _normalizedTrackName (cls,
trackName : String) -> String:
"""Returns standard form of <trackName> without any colons
etc."""
Logging.trace(">>: %r", trackName)
result = trackName.split(":", 1)[0]
Logging.trace("<<: %r", result)
return result
#--------------------
@classmethod
def _makeEmptyTrack (cls,
lineBuffer : StringList):
"""Replaces contents of <lineBuffer> by an empty track"""
Logging.trace(">>")
lineBuffer.clear()
lineBuffer.writeLine("MTrk")
lineBuffer.writeLine("0 Meta TrkEnd")
lineBuffer.writeLine("TrkEnd")
Logging.trace("<<")
#--------------------
@classmethod
def _processPositionInstrumentsLine (cls,
currentLine : String,
parseState : _ParseState,
trackToSettingsMap : Map,
activeSettings : Map,
lineBuffer : StringList):
"""Process a single line <currentLine> in parser state
<parseState> with given <trackToSettingsMap> while
positioning instruments and updates <lineBuffer>
accordingly; finally returns updated parse state and active
settings"""
Logging.trace(">>: [%s]#%s", parseState, currentLine)
if cls._trackEndRegExp.match(currentLine):
activeSettings = None
lineBuffer.writeLine(currentLine)
lineBuffer.flush()
parseState = _ParseState.inLimbo
elif cls._trackNameRegExp.search(currentLine):
matchResult = cls._trackNameRegExp.search(currentLine)
originalTrackName = matchResult.group(1)
trackName = originalTrackName
trackName = iif(trackName > "" and trackName[-1] in "ABCDEFG",
trackName[:-1], trackName)
for suffix in [ "Top", "Middle", "Bottom" ]:
if trackName.endswith(suffix):
trackName = trackName[:-len(suffix)]
Logging.trace("--: trackName = %r, normalized = %r",
originalTrackName, trackName)
lineBuffer.writeLine(currentLine)
parseState = iif(trackName not in trackToSettingsMap,
_ParseState.inOtherTrack,
_ParseState.inInstrumentTrack)
activeSettings = trackToSettingsMap.get(trackName, {})
elif cls._parameterChangeRegExp.search(currentLine):
# ignore this line
Logging.trace("--: skipped")
elif cls._programChangeRegExp.search(currentLine):
midiTime = int(currentLine.split(" ", 1)[0])
if parseState == _ParseState.inOtherTrack:
# leave line as is
lineBuffer.writeLine(currentLine)
elif parseState != _ParseState.inInstrumentTrack:
Logging.trace("--: skipped program change")
else:
Logging.trace("--: replace by new settings")
prefix = "%d Par ch=%d " % (midiTime,
activeSettings.midiChannel)
def lineGeneratorProc (controllerIndex, value):
st = (prefix + "c=%d v=%d") % (controllerIndex, value)
lineBuffer.writeLine(st)
st = "%d PrCh ch=%d p=%d" % (midiTime,
activeSettings.midiChannel,
activeSettings.midiInstrument)
lineGeneratorProc(0, activeSettings.midiInstrumentBank)
lineBuffer.writeLine(st)
lineGeneratorProc(7, activeSettings.midiVolume)
lineGeneratorProc(10, activeSettings.panPosition)
lineGeneratorProc(91, activeSettings.reverbLevel)
parseState = _ParseState.afterSettings
else:
if (cls._channelReferenceRegExp.search(currentLine)
and parseState != _ParseState.inOtherTrack):
matchResult = cls._channelReferenceRegExp.search(currentLine)
st = " ch=%d" % activeSettings.midiChannel
currentLine = currentLine.replace(matchResult.group(0), st)
Logging.trace("--: channel is updated - %r", currentLine)
lineBuffer.writeLine(currentLine)
Logging.trace("<<: %s", parseState)
return parseState, activeSettings
#--------------------
# EXPORTED FEATURES
#--------------------
@classmethod
def initialize (cls,
voiceNameMap : StringMap,
styleNameToTextMap : Dictionary,
trackNameSet : StringSet):
"""Sets global variables for this module"""
global _humanizationStyleNameToTextMap, _humanizedTrackNameSet
global _voiceNameToVariationFactorMap
Logging.trace(">>: voiceNameMap = %r, styleNameToTextMap = %r,"
+ " trackList = %r",
voiceNameMap, styleNameToTextMap, trackNameSet)
_humanizationStyleNameToTextMap = styleNameToTextMap
_humanizedTrackNameSet = trackNameSet
_voiceNameToVariationFactorMap = voiceNameMap
Logging.trace("<<")
#--------------------
def __init__ (self,
midiFileName : String,
intermediateFilesAreKept : Boolean = False):
"""Reads data from <midiFileName> and stores it internally in
a text representation."""
Logging.trace(">>: %r", midiFileName)
self._intermediateFilesAreKept = intermediateFilesAreKept
midiFile = MidiFileHandler()
self._lineList = midiFile.readFile(midiFileName)
Logging.trace("<<")
#--------------------
def save (self,
targetMidiFileName : String):
"""Writes internal data to MIDI file with <targetMidiFileName>."""
Logging.trace(">>: %r", targetMidiFileName)
midiFile = MidiFileHandler()
midiFile.writeFile(targetMidiFileName, self._lineList)
Logging.trace("<<")
#--------------------
def addMissingTrackNames (self):
"""Adds track names to <self> when there are none based on
instruments in a track."""
Logging.trace(">>")
cls = self.__class__
trackInstrumentRegExp = re.compile(r"Meta InstrName +\"(.*)\"")
badTrackNameRegExp = re.compile(r"new:")
lineList = []
lineBuffer = _LineBuffer(lineList)
trackCountMap = {}
parseState = _ParseState.inLimbo
for currentLine in self._lineList:
Logging.trace("--: [%s]#%s", parseState, currentLine)
if trackInstrumentRegExp.search(currentLine):
matchResult = trackInstrumentRegExp.search(currentLine)
instrumentName = matchResult.group(1)
Logging.trace("--: instrumentName = %r", instrumentName)
elif cls._trackEndRegExp.match(currentLine):
lineBuffer.writeLine(currentLine)
inFirstTrack = (parseState == _ParseState.inFirstTrack)
hasTrackName = (parseState == _ParseState.inTrack)
inBadTrack = (parseState == _ParseState.inBadTrack)
self._addMissingTrackNamesHandleTrackEnd(
inFirstTrack, hasTrackName, inBadTrack, lineBuffer,
trackCountMap, trackName, instrumentName)
continue
elif cls._trackBeginRegExp.match(currentLine):
Logging.trace("--: track start - %s", currentLine)
lineBuffer.activate(True)
instrumentName = ""
trackName = ""
parseState = \
iif(parseState == _ParseState.inLimbo,
_ParseState.inFirstTrack, _ParseState.inTrackPrefix)
elif cls._trackNameRegExp.search(currentLine):
matchResult = cls._trackNameRegExp.search(currentLine)
trackName = matchResult.group(1)
if badTrackNameRegExp.search(trackName):
parseState = _ParseState.inBadTrack
elif parseState == _ParseState.inTrackPrefix:
parseState = _ParseState.inTrack
trackName = cls._normalizedTrackName(trackName)
currentLine = (currentLine.split('"', 1)[0]
+ "\"" + trackName + "\"")
Logging.trace("--: trackName = %r, parseState = %s",
trackName, parseState)
lineBuffer.writeLine(currentLine)
self._lineList = lineList
Logging.trace("<<")
#--------------------
def addProcessingDateToTracks (self,
trackNameList : StringList):
"""Tags all instrument tracks in <self> having a track name in
<trackNameList> with a meta text with the processing date"""
Logging.trace(">>")
cls = self.__class__
tagLinePrefix = "0 Meta Text \""
tagLineSuffix = " at %s\"" % datetime.now().strftime("%Y-%m-%dT%H%M")
lineList = []
lineBuffer = _LineBuffer(lineList)
for currentLine in self._lineList:
Logging.trace("--: #%s", currentLine)
if cls._trackBeginRegExp.match(currentLine):
lineBuffer.activate(True)
lineBuffer.writeLine(currentLine)
else:
lineBuffer.writeLine(currentLine)
if cls._trackEndRegExp.match(currentLine):
lineBuffer.flush()
elif cls._trackNameRegExp.search(currentLine):
matchResult = cls._trackNameRegExp.search(currentLine)
trackName = matchResult.group(1)
canonicalTrackName = _canonicalTrackName(trackName)
Logging.trace("--: trackName = %r, canonical = %r",
trackName, canonicalTrackName)
if canonicalTrackName in trackNameList:
isHumanized = (canonicalTrackName
in _humanizedTrackNameSet)
tagLine = (tagLinePrefix
+ iif(isHumanized,
"humanized", "processed")
+ tagLineSuffix)
Logging.trace("--: tagLine = %r", tagLine)
lineBuffer.writeLine(tagLine)
lineBuffer.flush()
self._lineList = lineList
Logging.trace("<<")
#--------------------
def filterByTrackNamePrefix (self,
trackNamePrefix : String):
"""Analyzes tracks and leaves only those ones with track name
that start with <trackNamePrefix>; in any case the first
track (the midi control track) is also kept"""
Logging.trace(">>: prefix = %r", trackNamePrefix)
cls = self.__class__
lineList = []
lineBuffer = _LineBuffer(lineList)
isFirstTrack = True
for currentLine in self._lineList:
Logging.trace("--: #%s", currentLine)
if cls._trackBeginRegExp.match(currentLine):
trackName = ""
lineBuffer.activate(True)
lineBuffer.writeLine(currentLine)
else:
lineBuffer.writeLine(currentLine)
if cls._trackNameRegExp.search(currentLine):
matchResult = cls._trackNameRegExp.search(currentLine)
trackName = matchResult.group(1)
Logging.trace("--: trackName = %r", trackName)
elif cls._trackEndRegExp.match(currentLine):
trackIsMaintained = (trackName.startswith(trackNamePrefix)
or isFirstTrack)
message = "track is " + iif(trackIsMaintained,
"maintained", "skipped")
Logging.trace("--: " + message)
if not trackIsMaintained:
cls._makeEmptyTrack(lineBuffer)
lineBuffer.flush()
isFirstTrack = False
lineBuffer.flush()
self._lineList = lineList
Logging.trace("<<")
#--------------------
def humanizeTracks (self,
countInMeasureCount : Natural,
measureToHumanizationStyleNameMap : Map):
"""Adapts instrument tracks in <self> to emulate a human player based
on style given by <measureToHumanizationStyleNameMap>"""
Logging.trace(">>: countIn = %r, styleMap = %r",
countInMeasureCount, measureToHumanizationStyleNameMap)
cls = self.__class__
if len(measureToHumanizationStyleNameMap) > 0:
measureToHumanizationStyleMap = {}
for measure, styleName in measureToHumanizationStyleNameMap.items():
style = _HumanizationStyle(styleName)
measureToHumanizationStyleMap[measure] = style
# enumeration for kind of some track
TrackKind_unknown = 0
TrackKind_instrument = 1
TrackKind_other = 2
# TODO: algorithm can only cope with a single time signature
fileBeginRegExp = re.compile(r"MFile\W+(\w+)\W+(\w+)\W+(\w+)")
timeSignatureRegExp = re.compile(r"TimeSig\W+(\w+)/(\w+)")
lineList = []
lineBuffer = _LineBuffer(lineList)
humanizer = _Humanizer()
for currentLine in self._lineList:
## Logging.trace("--: #%s", currentLine)
if cls._trackBeginRegExp.match(currentLine):
trackName = ""
lineBuffer.activate(True)
lineBuffer.writeLine(currentLine)
trackKind = TrackKind_unknown
else:
lineBuffer.writeLine(currentLine)
if fileBeginRegExp.match(currentLine):
matchResult = fileBeginRegExp.match(currentLine)
ticksPerQuarterNote = int(matchResult.group(3))
Logging.trace("--: ticks per quarter = %d",
ticksPerQuarterNote)
_MusicTime.initialize(ticksPerQuarterNote, 4)
elif timeSignatureRegExp.search(currentLine):
matchResult = timeSignatureRegExp.search(currentLine)
numerator = int(matchResult.group(1))
denominator = int(matchResult.group(2))
quartersPerMeasure = \
round(numerator / denominator * 4, 3)
Logging.trace("--: qpm = %r", quartersPerMeasure)
_MusicTime.initialize(ticksPerQuarterNote,
quartersPerMeasure)
_Humanizer.initialize(quartersPerMeasure,
countInMeasureCount)
elif cls._trackEndRegExp.match(currentLine):
if trackKind != TrackKind_instrument:
Logging.trace("--: other track end")
lineBuffer.flush()
else:
Logging.trace("--: instrument track end: %r",
trackName)
trackLineList = lineBuffer.lineList()
lineBuffer.clear()
cls._humanizeTrack(humanizer, trackName,
measureToHumanizationStyleMap,
trackLineList, lineList)
elif cls._trackNameRegExp.search(currentLine):
matchResult = cls._trackNameRegExp.search(currentLine)
trackName = matchResult.group(1)
canonicalTrackName = _canonicalTrackName(trackName)
isHumanized = (canonicalTrackName
in _humanizedTrackNameSet)
trackKind = iif(isHumanized,
TrackKind_instrument, TrackKind_other)
Logging.trace("--: trackName = %r, kind = %r",
canonicalTrackName, trackKind)
self._lineList = lineList
Logging.trace("<<")
#--------------------
def positionInstruments (self,
trackToSettingsMap : StringMap):
"""Scans instrument tracks in <self> and changes channel,
player based on <trackToSettingsMap>"""
Logging.trace(">>: %r", trackToSettingsMap)
cls = self.__class__
lineList = []
lineBuffer = _LineBuffer(lineList)
parseState = _ParseState.inLimbo
activeSettings = {}
for currentLine in self._lineList:
parseState, activeSettings = \
cls._processPositionInstrumentsLine(currentLine, parseState,
trackToSettingsMap,
activeSettings, lineBuffer)
lineBuffer.flush()
self._lineList = lineList
Logging.trace("<<")
#--------------------
def removeUnwantedControlCodes (self):
"""Analyzes tracks and kicks out midi volume changes, reverb effect
and pan settings"""
Logging.trace(">>")
cls = self.__class__
lineList = []
lineBuffer = _LineBuffer(lineList)
for currentLine in self._lineList:
Logging.trace("--: #%s", currentLine)
if cls._volumeChangeRegExp.search(currentLine):
Logging.trace("--: skipped volume change")
elif cls._reverbChangeRegExp.search(currentLine):
Logging.trace("--: skipped reverb change")
elif cls._panChangeRegExp.search(currentLine):
Logging.trace("--: skipped pan change")
else:
lineBuffer.writeLine(currentLine)
lineBuffer.flush()
self._lineList = lineList
Logging.trace("<<") | PypiClean |
/MiWork-2021.2.20.20.8.11-py3-none-any.whl/miwork/dt_enum.py |
from __future__ import absolute_import, division, print_function, unicode_literals
from enum import Enum
class MessageType(Enum):
"""消息的类型
支持:文本、图片、富文本、分享群聊卡片、卡片消息
"""
text = 'text' # 文本
image = 'image' # 图片
post = 'post' # 富文本
share_chat = 'share_chat' # 分享群名片
card = 'interactive' # 卡片消息
forward = 'forward' # 转发消息
class UrgentType(Enum):
"""消息加急类型
支持:飞书内部、短信、电话
"""
app = 'app' # 飞书内部
sms = 'sms' # 短信
phone = 'phone' # 电话
class I18NType(Enum):
"""国际化消息的类型
支持:中文、英文、日文
"""
zh_cn = 'zh_cn'
ja_jp = 'ja_jp'
en_us = 'en_us'
class ImageColor(Enum):
"""卡片消息头部的颜色
"""
orange = 'orange'
red = 'red'
yellow = 'yellow'
gray = 'gray'
blue = 'blue'
green = 'green'
class MethodType(Enum):
"""卡片消息按钮的请求类型
"""
post = 'post' # 发送 post 请求
get = 'get' # 发送 get 请求
jump = 'jump' # 跳转到指定 url
class CalendarRole(Enum):
reader = 'reader' # 订阅者,可查看日程详情
free_busy_reader = 'free_busy_reader' # 游客,只能看到"忙碌/空闲"
class CalendarEventVisibility(Enum):
"""日历的日程的可见性
支持:仅向他人显示是否“忙碌”;公开,显示日程详情;仅自己可见
"""
default = 'default' # 默认,仅向他人显示是否“忙碌”
public = 'public' # 公开,显示日程详情
private = 'private' # 仅自己可见
class ApprovalUploadFileType(Enum):
image = 'image'
attachment = 'attachment'
class EventType(Enum):
"""事件类型
https://open.feishu.cn/document/uYjL24iN/uUTNz4SN1MjL1UzM
"""
url_verification = 'url_verification' # 这是一个验证请求
app_ticket = 'app_ticket' # 租户管理员开通 ISV 应用后,会定时发送 app_ticket 事件到监听地址
app_open = 'app_open' # 当企业管理员在管理员后台开通应用时推送事件
message = 'message' # 接收用户发送给应用的消息,包括与机器人直接对话或者在群聊中与机器人交流
user_add = 'user_add' # 通讯录变更
user_update = 'user_update'
user_leave = 'user_leave'
dept_add = 'dept_add'
dept_update = 'dept_update'
dept_delete = 'dept_delete'
contact_scope_change = 'contact_scope_change'
approval = 'approval' # 审批通过
leave_approval = 'leave_approval' # 请假审批
work_approval = 'work_approval' # 加班审批
shift_approval = 'shift_approval' # 换班审批
remedy_approval = 'remedy_approval' # 补卡审批
trip_approval = 'trip_approval' # 出差审批
remove_bot = 'remove_bot' # 移除机器人
add_bot = 'add_bot' # 添加机器人
p2p_chat_create = 'p2p_chat_create' # 用户第一次打开这个机器人的会话界面
add_user_to_chat = 'add_user_to_chat' # 用户进群
remove_user_from_chat = 'remove_user_from_chat' # 用户出群
revoke_add_user_from_chat = 'revoke_add_user_from_chat' # 撤销加人
unknown = 'unknown'
class ApprovalInstanceStatus(Enum):
pending = 'PENDING' # 待审核
approved = 'APPROVED' # 已通过
rejected = 'REJECTED' # 已拒绝
canceled = 'CANCELED' # 已取消
deleted = 'DELETED' # 已取消
class ApprovalTaskStatus(Enum):
pending = 'PENDING' # 审批中
approved = 'APPROVED' # 通过
rejected = 'REJECTED' # 拒绝
transfered = 'TRANSFERRED' # 已转交
canceled = 'DONE' # 完成
class ApprovalTaskTypeStatus(Enum):
or_sign = 'OR' # 或签,一名负责人通过即可通过审批节点
and_sign = 'AND' # 或签,需所有负责人通过才能通过审批节点
auto_pass = 'AUTO_PASS' # 自动通过
auto_reject = 'AUTO_REJECT' # 自动拒绝
sequential = 'SEQUENTIAL' # 按照顺序
class ApprovalTimelineType(Enum):
"""动态类型"""
start = 'START' # 审批开始
passed = 'PASS' # 通过
reject = 'REJECT' # 拒绝
auto_pass = 'AUTO_PASS' # 自动通过
auto_reject = 'AUTO_REJECT' # 自动拒绝
remove_repeat = 'REMOVE_REPEAT' # 去重
transfer = 'TRANSFER' # 转交
add_approver_before = 'ADD_APPROVER_BEFORE' # 前加签
add_approver = 'ADD_APPROVER' # 并加签
add_approver_after = 'ADD_APPROVER_AFTER' # 后加签
delete_approver = 'DELETE_APPROVER' # 减签
rollback_selected = 'ROLLBACK_SELECTED' # 指定回退
rollback = 'ROLLBACK' # 全部回退
cancel = 'CANCEL' # 撤回
delete = 'DELETE' # 删除
cc = 'CC' # 抄送
class PayPricePlanType(Enum):
"""价格方案类型
"""
trial = 'trial' # 试用
permanent = 'permanent' # 一次性付费
per_year = 'per_year' # 企业年付费
per_month = 'per_month' # 企业月付费
per_seat_per_year = 'per_seat_per_year' # 按人按年付费
per_seat_per_month = 'per_seat_per_month' # 按人按月付费
permanent_count = 'permanent_count' # 按次付费
class PayBuyType(Enum):
"""购买类型
"""
buy = 'buy' # 普通购买
# 升级购买:仅price_plan_type为per_year、per_month、per_seat_per_year、per_seat_per_month时可升级购买
upgrade = 'upgrade'
renew = 'renew' # 续费购买
class PayStatus(Enum):
"""订单当前状态
"""
normal = 'normal' # 正常
refund = 'refund' # 已退款
all = 'all' # 全部,查询的时候会用到
class MeetingReplyStatus(Enum):
"""回复状态,NOT_CHECK_IN 表示未签到,ENDED_BEFORE_DUE 表示提前结束
"""
not_check_in = 'NOT_CHECK_IN' # 未签到
ended_before_due = 'ENDED_BEFORE_DUE' # 提前结束 | PypiClean |
/FMPy-0.3.11-py3-none-any.whl/fmpy/modelica/__init__.py | def import_fmu_to_modelica(fmu_path, model_path, interface_type):
from os import makedirs
from pathlib import Path
import jinja2
from fmpy import extract, read_model_description
fmu_path = Path(fmu_path)
model_path = Path(model_path)
model_name = model_path.stem
package_dir = Path(model_path).parent
if not (package_dir / 'package.order').is_file():
raise Exception(f"{package_dir} is not a package of a Modelica library.")
model_description = read_model_description(fmu_path)
if model_description.defaultExperiment is not None and model_description.defaultExperiment.stepSize is not None:
communicationStepSize = model_description.defaultExperiment.stepSize
else:
communicationStepSize = '1e-2'
if interface_type == 'Model Exchange':
model_identifier = model_description.modelExchange.modelIdentifier
else:
model_identifier = model_description.coSimulation.modelIdentifier
package_root = package_dir
while (package_root.parent / 'package.order').is_file():
package_root = package_root.parent
unzipdir = package_root / 'Resources' / 'FMUs' / model_identifier
makedirs(unzipdir, exist_ok=True)
extract(filename=fmu_path, unzipdir=unzipdir)
loader = jinja2.FileSystemLoader(searchpath=Path(__file__).parent / 'templates')
environment = jinja2.Environment(loader=loader, trim_blocks=True, block_start_string='@@',
block_end_string='@@', variable_start_string='@=', variable_end_string='=@')
if interface_type == 'Co-Simulation':
template = environment.get_template(f'FMI2_CS.mo')
else:
template = environment.get_template(f'FMI2_ME.mo')
parameters = []
inputs = []
outputs = []
width = 400
height = 200
x0 = -int(width / 2)
x1 = int(width / 2)
y0 = -int(height / 2)
y1 = int(height / 2)
for variable in model_description.modelVariables:
if variable.type not in {'Real', 'Integer', 'Boolean'}:
continue
if variable.causality == 'parameter':
parameters.append(variable)
elif variable.causality == 'input':
inputs.append(variable)
elif variable.causality == 'output':
outputs.append(variable)
labels = []
annotations = dict()
for i, variable in enumerate(inputs):
y = y1 - (i + 1) * (height / (1 + len(inputs)))
annotations[
variable.name] = f'annotation (Placement(transformation(extent={{ {{ {x0 - 40}, {y - 20} }}, {{ {x0}, {y + 20} }} }}), iconTransformation(extent={{ {{ {x0 - 40}, {y - 20} }}, {{ {x0}, {y + 20} }} }})))'
labels.append(
f', Text(extent={{ {{ {x0 + 10}, {y - 10} }}, {{ -10, {y + 10} }} }}, textColor={{0,0,0}}, textString="{variable.name}", horizontalAlignment=TextAlignment.Left)')
for i, variable in enumerate(outputs):
y = y1 - (i + 1) * (height / (1 + len(outputs)))
annotations[
variable.name] = f'annotation (Placement(transformation(extent={{ {{ {x1}, {y - 10} }}, {{ {x1 + 20}, {y + 10} }} }}), iconTransformation(extent={{ {{ {x1}, {y - 10} }}, {{ {x1 + 20}, {y + 10} }} }})))'
labels.append(
f', Text(extent={{ {{ 10, {y - 10} }}, {{ {x1 - 10}, {y + 10} }} }}, textColor={{0,0,0}}, textString="{variable.name}", horizontalAlignment=TextAlignment.Right)')
def as_array(values, default):
if len(values) > 0:
return '{ ' + ', '.join(map(str, values)) + ' }'
else:
return f'fill({default}, 0)'
def as_quoted_array(values, default):
if len(values) > 0:
return '{ ' + ', '.join(map(lambda v: f"'{v}'", values)) + ' }'
else:
return f'fill({default}, 0)'
def start_value(variable):
if variable.type == 'Boolean':
return 'true' if variable.start in ['true', '1'] else 'false'
else:
return str(variable.start)
def modelica_type(variable):
if variable.declaredType is not None and variable.declaredType.name.startswith('Modelica.'):
return variable.declaredType.name
else:
return variable.type
template.globals.update({
'as_array': as_array,
'as_quoted_array': as_quoted_array,
'start_value': start_value,
'modelica_type': modelica_type
})
class_text = template.render(
package=package_root.name,
description=model_description.description,
modelName=model_name,
modelIdentifier=model_identifier,
interfaceType=0 if interface_type == 'Model Exchange' else 1,
instantiationToken=model_description.guid,
nx=model_description.numberOfContinuousStates,
nz=model_description.numberOfEventIndicators,
parameters=parameters,
communicationStepSize=communicationStepSize,
x0=x0,
x1=x1,
y0=y0,
y1=y1,
labels=' '.join(labels),
inputs=inputs,
outputs=outputs,
annotations=annotations,
realInputVRs=[str(v.valueReference) for v in inputs if v.type == 'Real'],
realInputs=[v.name for v in inputs if v.type == 'Real'],
integerInputVRs=[str(v.valueReference) for v in inputs if v.type == 'Integer'],
integerInputs=[v.name for v in inputs if v.type == 'Integer'],
booleanInputVRs=[str(v.valueReference) for v in inputs if v.type == 'Boolean'],
booleanInputs=[v.name for v in inputs if v.type == 'Boolean'],
)
with open(model_path, 'w') as f:
f.write(class_text)
with open(package_dir / 'package.order', 'r') as f:
package_order = list(map(lambda l: l.strip(), f.readlines()))
if model_name not in package_order:
with open(package_dir / 'package.order', 'a') as f:
f.write(model_name + '\n') | PypiClean |
/DI_engine-0.4.9-py3-none-any.whl/ding/worker/coordinator/base_parallel_commander.py | from abc import ABC, abstractmethod
from collections import defaultdict
from easydict import EasyDict
import copy
from ding.utils import import_module, COMMANDER_REGISTRY, LimitedSpaceContainer
class BaseCommander(ABC):
r"""
Overview:
Base parallel commander abstract class.
Interface:
get_collector_task
"""
@classmethod
def default_config(cls: type) -> EasyDict:
cfg = EasyDict(copy.deepcopy(cls.config))
cfg.cfg_type = cls.__name__ + 'Dict'
return cfg
@abstractmethod
def get_collector_task(self) -> dict:
raise NotImplementedError
def judge_collector_finish(self, task_id: str, info: dict) -> bool:
collector_done = info.get('collector_done', False)
if collector_done:
return True
return False
def judge_learner_finish(self, task_id: str, info: dict) -> bool:
learner_done = info.get('learner_done', False)
if learner_done:
return True
return False
@COMMANDER_REGISTRY.register('naive')
class NaiveCommander(BaseCommander):
r"""
Overview:
A naive implementation of parallel commander.
Interface:
__init__, get_collector_task, get_learner_task, finsh_collector_task, finish_learner_task,
notify_fail_collector_task, notify_fail_learner_task, update_learner_info
"""
config = dict(
collector_task_space=1,
learner_task_space=1,
eval_interval=60,
)
def __init__(self, cfg: dict) -> None:
r"""
Overview:
Init the naive commander according to config
Arguments:
- cfg (:obj:`dict`): The config to init commander. Should include \
"collector_task_space" and "learner_task_space".
"""
self._cfg = cfg
self._exp_name = cfg.exp_name
commander_cfg = self._cfg.policy.other.commander
self._collector_task_space = LimitedSpaceContainer(0, commander_cfg.collector_task_space)
self._learner_task_space = LimitedSpaceContainer(0, commander_cfg.learner_task_space)
self._collector_env_cfg = copy.deepcopy(self._cfg.env)
self._collector_env_cfg.pop('collector_episode_num')
self._collector_env_cfg.pop('evaluator_episode_num')
self._collector_env_cfg.manager.episode_num = self._cfg.env.collector_episode_num
self._collector_task_count = 0
self._learner_task_count = 0
self._learner_info = defaultdict(list)
self._learner_task_finish_count = 0
self._collector_task_finish_count = 0
def get_collector_task(self) -> dict:
r"""
Overview:
Get a new collector task when ``collector_task_count`` is smaller than ``collector_task_space``.
Return:
- task (:obj:`dict`): New collector task.
"""
if self._collector_task_space.acquire_space():
self._collector_task_count += 1
collector_cfg = copy.deepcopy(self._cfg.policy.collect.collector)
collector_cfg.collect_setting = {'eps': 0.9}
collector_cfg.eval_flag = False
collector_cfg.policy = copy.deepcopy(self._cfg.policy)
collector_cfg.policy_update_path = 'test.pth'
collector_cfg.env = self._collector_env_cfg
collector_cfg.exp_name = self._exp_name
return {
'task_id': 'collector_task_id{}'.format(self._collector_task_count),
'buffer_id': 'test',
'collector_cfg': collector_cfg,
}
else:
return None
def get_learner_task(self) -> dict:
r"""
Overview:
Get the new learner task when task_count is less than task_space
Return:
- task (:obj:`dict`): the new learner task
"""
if self._learner_task_space.acquire_space():
self._learner_task_count += 1
learner_cfg = copy.deepcopy(self._cfg.policy.learn.learner)
learner_cfg.exp_name = self._exp_name
return {
'task_id': 'learner_task_id{}'.format(self._learner_task_count),
'policy_id': 'test.pth',
'buffer_id': 'test',
'learner_cfg': learner_cfg,
'replay_buffer_cfg': copy.deepcopy(self._cfg.policy.other.replay_buffer),
'policy': copy.deepcopy(self._cfg.policy),
}
else:
return None
def finish_collector_task(self, task_id: str, finished_task: dict) -> None:
r"""
Overview:
finish collector task will add the collector_task_finish_count
"""
self._collector_task_space.release_space()
self._collector_task_finish_count += 1
def finish_learner_task(self, task_id: str, finished_task: dict) -> str:
r"""
Overview:
finish learner task will add the learner_task_finish_count and get the buffer_id of task to close the buffer
Return:
the finished_task buffer_id
"""
self._learner_task_finish_count += 1
self._learner_task_space.release_space()
return finished_task['buffer_id']
def notify_fail_collector_task(self, task: dict) -> None:
r"""
Overview:
naive coordinator will pass when need to notify_fail_collector_task
"""
self._collector_task_space.release_space()
def notify_fail_learner_task(self, task: dict) -> None:
r"""
Overview:
naive coordinator will pass when need to notify_fail_learner_task
"""
self._learner_task_space.release_space()
def update_learner_info(self, task_id: str, info: dict) -> None:
r"""
Overview:
append the info to learner:
Arguments:
- task_id (:obj:`str`): the learner task_id
- info (:obj:`dict`): the info to append to learner
"""
self._learner_info[task_id].append(info)
def increase_collector_task_space(self):
r""""
Overview:
Increase task space when a new collector has added dynamically.
"""
self._collector_task_space.increase_space()
def decrease_collector_task_space(self):
r""""
Overview:
Decrease task space when a new collector has removed dynamically.
"""
self._collector_task_space.decrease_space()
def create_parallel_commander(cfg: EasyDict) -> BaseCommander:
r"""
Overview:
create the commander according to cfg
Arguments:
- cfg (:obj:`dict`): the commander cfg to create, should include import_names and parallel_commander_type
"""
cfg = EasyDict(cfg)
import_names = cfg.policy.other.commander.import_names
import_module(import_names)
return COMMANDER_REGISTRY.build(cfg.policy.other.commander.type, cfg=cfg)
def get_parallel_commander_cls(cfg: EasyDict) -> type:
cfg = EasyDict(cfg)
import_module(cfg.get('import_names', []))
return COMMANDER_REGISTRY.get(cfg.type) | PypiClean |
/GenIce2-2.1.7.1.tar.gz/GenIce2-2.1.7.1/genice2/lattices/sI.py |
from genice2.valueparser import parse_cages
from genice2.cell import cellvectors
import genice2.lattices
desc = {"ref": {
"A15": 'Frank 1959',
"sI": 'Jeffrey 1984',
"CS1": 'Kosyakov 1999',
"MEP": 'IZA Database'},
"usage": "No options available.",
"brief": "Clathrate hydrates sI."
}
class Lattice(genice2.lattices.Lattice):
def __init__(self):
self.density = 0.795 # default self.density
self.bondlen = 3 # bond threshold
self.cell = """
12.238818320447 12.238818320447 12.238818320447
"""
self.waters = """
10.8155661379622 3.79048442202564 0
3.79048442202564 0 1.42325218248478
0 3.05970458011175 6.1194091602235
8.35850097194928 3.88031734849772 8.35850097194928
2.23909181172578 2.23909181172578 2.23909181172578
0 9.17911374033525 6.1194091602235
2.23909181172578 9.99972650872122 2.23909181172578
6.1194091602235 9.90989358224914 4.69615697773872
7.54266134270828 6.1194091602235 9.90989358224914
9.99972650872122 2.23909181172578 9.99972650872122
3.88031734849772 3.88031734849772 3.88031734849772
2.32892473819786 4.69615697773872 6.1194091602235
3.88031734849772 8.35850097194928 3.88031734849772
3.88031734849772 3.88031734849772 8.35850097194928
8.44833389842136 0 1.42325218248478
6.1194091602235 2.32892473819786 4.69615697773872
9.99972650872122 9.99972650872122 2.23909181172578
6.1194091602235 9.90989358224914 7.54266134270828
9.90989358224914 4.69615697773872 6.1194091602235
3.79048442202564 0 10.8155661379622
6.1194091602235 2.32892473819786 7.54266134270828
3.88031734849772 8.35850097194928 8.35850097194928
10.8155661379622 8.44833389842136 0
9.99972650872122 9.99972650872122 9.99972650872122
0 1.42325218248478 3.79048442202564
9.99972650872122 2.23909181172578 2.23909181172578
9.90989358224914 7.54266134270828 6.1194091602235
9.17911374033525 6.1194091602235 0
0 10.8155661379622 8.44833389842136
8.35850097194928 8.35850097194928 8.35850097194928
6.1194091602235 0 3.05970458011175
0 1.42325218248478 8.44833389842136
2.32892473819786 7.54266134270828 6.1194091602235
4.69615697773872 6.1194091602235 2.32892473819786
4.69615697773872 6.1194091602235 9.90989358224914
2.23909181172578 2.23909181172578 9.99972650872122
0 10.8155661379622 3.79048442202564
1.42325218248478 8.44833389842136 0
1.42325218248478 3.79048442202564 0
8.35850097194928 3.88031734849772 3.88031734849772
8.35850097194928 8.35850097194928 3.88031734849772
7.54266134270828 6.1194091602235 2.32892473819786
2.23909181172578 9.99972650872122 9.99972650872122
6.1194091602235 0 9.17911374033525
3.05970458011175 6.1194091602235 0
8.44833389842136 0 10.8155661379622
"""
self.coord = "absolute"
self.cagepos, self.cagetype = parse_cages("""
12 0.5000 0.5000 0.5000
12 1.0000 1.0000 0.0000
14 0.5000 0.7500 0.0000
14 0.5000 0.2500 0.0000
14 0.0000 0.5000 0.7500
14 0.2500 1.0000 0.5000
14 0.7500 1.0000 0.5000
14 0.0000 0.5000 0.2500
""")
self.cell = cellvectors(a=12.238818320447,
b=12.238818320447,
c=12.238818320447) | PypiClean |
/Flask-ServerInfo-0.1.2.zip/Flask-ServerInfo-0.1.2/flask_serverinfo.py |
__all__ = 'setup view dumps logging_info logger_info server_info JSONEncoder'.split()
__version__ = '0.1.2'
import flask
from flask import json, Flask, Request, Response
from logging import Logger, getLogger, root
from werkzeug.local import LocalProxy
from werkzeug.routing import Map
from werkzeug.datastructures import MultiDict, Headers
class JSONEncoder(json.JSONEncoder):
base_types = (basestring, int, float, bool, type(None))
iter_types = (dict, tuple, list, set)
inspect_types = (LocalProxy, Flask, Map, Request, Response)
def default(self, o):
if isinstance(o, self.inspect_types):
return dict((k, getattr(o, k)) for k in dir(o)
if isinstance(k, basestring) and not k.startswith('__') and not callable(getattr(o, k)))
elif isinstance(o, MultiDict):
return o.lists()
elif isinstance(o, Headers):
return o.items()
elif isinstance(o, Logger):
return logger_info(o)
try:
return super(JSONEncoder, self).default(o)
except TypeError:
return '{} {!r}'.format(type(o), o)
def iterencode(self, o, _one_shot=False):
o = self.replace_circular_refs(o)
try:
for chunk in super(JSONEncoder, self).iterencode(o, _one_shot):
yield chunk
except ValueError as error:
yield '"{}: {} {!r}"'.format(error, type(o), o)
def replace_circular_refs(self, o, path='', cache=None):
if cache is None:
cache = {}
if not isinstance(o, self.base_types):
if isinstance(o, dict):
o = dict((k, o[k]) for k in o if isinstance(k, basestring))
elif isinstance(o, self.iter_types):
o = list(o)
else:
return self.replace_circular_refs(self.default(o), path, cache)
for key, value in (o.iteritems() if isinstance(o, dict) else enumerate(o)):
if not isinstance(value, self.base_types):
if id(value) in cache:
o[key] = '<$ref: {}>'.format(cache[id(value)])
else:
cache[id(value)] = '{}{}'.format(path, key)
o[key] = self.replace_circular_refs(value, '{}{}.'.format(path, key), cache)
return o
DUMP_OPTIONS = dict(
indent = 2,
sort_keys = True,
cls = JSONEncoder,
)
def dumps(data, **options):
options = dict(DUMP_OPTIONS, **options)
return json.dumps(data, **options)
def logging_info(*additional_logger_names):
return [logger_info(getLogger(name)) for name in
['root'] + list(additional_logger_names) + sorted(root.manager.loggerDict.keys())]
def logger_info(l):
p = l.parent or l
return '<Logger> [%02d/%02d] %01d%1s %s' % (
l.level, l.getEffectiveLevel(), len(l.handlers),
'+' if l.propagate else '',
l.name if p is l or l.name.startswith(p.name + '.') else p.name + ' :: ' + l.name,
)
def server_info(app=None, *additional_logger_names):
return dict(
app = app or flask.current_app,
logging = logging_info(*additional_logger_names),
)
def view():
return Response(dumps(dict(
request = flask.request,
response = Response(mimetype = 'application/json'),
server = server_info(),
)),
mimetype = 'application/json',
)
def setup(app, uri, endpoint='serverinfo_view', **options):
app.route(uri, endpoint=endpoint, **options)(view) | PypiClean |
/MSM_PELE-1.1.1-py3-none-any.whl/AdaptivePELE/AdaptivePELE/analysis/writeTrajToSnapshot.py | import os
import sys
import argparse
import glob
from AdaptivePELE.utilities import utilities
from AdaptivePELE.atomset import atomset
def parseArguments():
desc = "Write the information related to the conformation network to file\n"
parser = argparse.ArgumentParser(description=desc)
parser.add_argument("clusteringObject", type=str, help="Path to the clustering object")
parser.add_argument("epoch", type=str, help="Path to the epoch to search the snapshot")
parser.add_argument("trajectory", type=int, help="Trajectory number")
parser.add_argument("snapshot", type=int, help="Snapshot to select (in accepted steps)")
parser.add_argument("-o", type=str, default=None, help="Output path where to write the files")
parser.add_argument("-top", type=str, default=None, help="Topology file for non-pdb trajectories")
args = parser.parse_args()
return args.clusteringObject, args.trajectory, args.snapshot, args.epoch, args.o, args.top
if __name__ == "__main__":
clusteringObject, trajectory, snapshot, epoch, outputPath, topology = parseArguments()
if outputPath is not None:
outputPath = os.path.join(outputPath, "")
if not os.path.exists(outputPath):
os.makedirs(outputPath)
else:
outputPath = ""
if topology is not None:
topology_contents = utilities.getTopologyFile(topology)
else:
topology_contents = None
sys.stderr.write("Reading clustering object...\n")
cl = utilities.readClusteringObject(clusteringObject)
if cl.conformationNetwork is None:
sys.exit("Clustering object loaded has no conformation network!!")
conf = cl.conformationNetwork
filename = glob.glob(epoch+"/*traj*_%d*" % trajectory)
snapshots = utilities.getSnapshots(filename[0], topology)
snapshots = snapshots[:snapshot+1]
if not isinstance(snapshots[0], basestring):
new_snapshots = []
for snapshot in snapshots:
PDB = atomset.PDB()
PDB.initialise(snapshot, topology=topology_contents)
new_snapshots.append(PDB.get_pdb_string())
snapshots = new_snapshots
procMapping = open(os.path.join(epoch, "processorMapping.txt")).read().rstrip().split(',')
leaf = procMapping[trajectory-1]
pathway = conf.createPathwayToCluster(int(leaf))
cl.writePathwayTrajectory(pathway, outputPath+"pathway.pdb")
with open(outputPath+"pathway.pdb", "a") as f:
f.write("ENDMDL\n".join(snapshots)) | PypiClean |
/JPype1-1.4.1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl/jpype/_jthread.py | import _jpype
from . import _jcustomizer
@_jcustomizer.JImplementationFor('java.lang.Thread')
class _JThread(object):
""" Customizer for ``java.land.Thread``
This adds addition JPype methods to java.lang.Thread to support
Python.
"""
@staticmethod
def isAttached():
""" Checks if a thread is attached to the JVM.
Python automatically attaches as daemon threads when a Java method is
called. This creates a resource in Java for the Python thread. This
method can be used to check if a Python thread is currently attached so
that it can be disconnected prior to thread termination to prevent
leaks.
Returns:
True if the thread is attached to the JVM, False if the thread is
not attached or the JVM is not running.
"""
return _jpype.isThreadAttachedToJVM()
@staticmethod
def attach():
""" Attaches the current thread to the JVM as a user thread.
User threads that are attached to the JVM will prevent the JVM from
shutting down until the thread is terminated or detached. To convert
a daemon thread to a main thread, the thread must first be detached.
Raises:
RuntimeError: If the JVM is not running.
"""
return _jpype.attachThreadToJVM()
@staticmethod
def attachAsDaemon():
""" Attaches the current thread to the JVM as a daemon.
Daemon threads act as background tasks and do not prevent the JVM from
shutdown normally. JPype automatically attaches any threads that call
Java resources as daemon threads. To convert a daemon thread to a user
thread, the thread must first be detached.
Raises:
RuntimeError: If the JVM is not running.
"""
return _jpype.attachThreadAsDaemon()
@staticmethod
def detach():
""" Detaches a thread from the JVM.
This function detaches the thread and frees the associated resource in
the JVM. For codes making heavy use of threading this should be used
to prevent resource leaks. The thread can be reattached, so there
is no harm in detaching early or more than once. This method cannot fail
and there is no harm in calling it when the JVM is not running.
"""
return _jpype.detachThreadFromJVM() | PypiClean |
/Flask_AdminLTE3-1.0.9-py3-none-any.whl/flask_adminlte3/static/plugins/codemirror/mode/xquery/xquery.js |
(function(mod) {
if (typeof exports == "object" && typeof module == "object") // CommonJS
mod(require("../../lib/codemirror"));
else if (typeof define == "function" && define.amd) // AMD
define(["../../lib/codemirror"], mod);
else // Plain browser env
mod(CodeMirror);
})(function(CodeMirror) {
"use strict";
CodeMirror.defineMode("xquery", function() {
// The keywords object is set to the result of this self executing
// function. Each keyword is a property of the keywords object whose
// value is {type: atype, style: astyle}
var keywords = function(){
// convenience functions used to build keywords object
function kw(type) {return {type: type, style: "keyword"};}
var operator = kw("operator")
, atom = {type: "atom", style: "atom"}
, punctuation = {type: "punctuation", style: null}
, qualifier = {type: "axis_specifier", style: "qualifier"};
// kwObj is what is return from this function at the end
var kwObj = {
',': punctuation
};
// a list of 'basic' keywords. For each add a property to kwObj with the value of
// {type: basic[i], style: "keyword"} e.g. 'after' --> {type: "after", style: "keyword"}
var basic = ['after', 'all', 'allowing', 'ancestor', 'ancestor-or-self', 'any', 'array', 'as',
'ascending', 'at', 'attribute', 'base-uri', 'before', 'boundary-space', 'by', 'case', 'cast',
'castable', 'catch', 'child', 'collation', 'comment', 'construction', 'contains', 'content',
'context', 'copy', 'copy-namespaces', 'count', 'decimal-format', 'declare', 'default', 'delete',
'descendant', 'descendant-or-self', 'descending', 'diacritics', 'different', 'distance',
'document', 'document-node', 'element', 'else', 'empty', 'empty-sequence', 'encoding', 'end',
'entire', 'every', 'exactly', 'except', 'external', 'first', 'following', 'following-sibling',
'for', 'from', 'ftand', 'ftnot', 'ft-option', 'ftor', 'function', 'fuzzy', 'greatest', 'group',
'if', 'import', 'in', 'inherit', 'insensitive', 'insert', 'instance', 'intersect', 'into',
'invoke', 'is', 'item', 'language', 'last', 'lax', 'least', 'let', 'levels', 'lowercase', 'map',
'modify', 'module', 'most', 'namespace', 'next', 'no', 'node', 'nodes', 'no-inherit',
'no-preserve', 'not', 'occurs', 'of', 'only', 'option', 'order', 'ordered', 'ordering',
'paragraph', 'paragraphs', 'parent', 'phrase', 'preceding', 'preceding-sibling', 'preserve',
'previous', 'processing-instruction', 'relationship', 'rename', 'replace', 'return',
'revalidation', 'same', 'satisfies', 'schema', 'schema-attribute', 'schema-element', 'score',
'self', 'sensitive', 'sentence', 'sentences', 'sequence', 'skip', 'sliding', 'some', 'stable',
'start', 'stemming', 'stop', 'strict', 'strip', 'switch', 'text', 'then', 'thesaurus', 'times',
'to', 'transform', 'treat', 'try', 'tumbling', 'type', 'typeswitch', 'union', 'unordered',
'update', 'updating', 'uppercase', 'using', 'validate', 'value', 'variable', 'version',
'weight', 'when', 'where', 'wildcards', 'window', 'with', 'without', 'word', 'words', 'xquery'];
for(var i=0, l=basic.length; i < l; i++) { kwObj[basic[i]] = kw(basic[i]);};
// a list of types. For each add a property to kwObj with the value of
// {type: "atom", style: "atom"}
var types = ['xs:anyAtomicType', 'xs:anySimpleType', 'xs:anyType', 'xs:anyURI',
'xs:base64Binary', 'xs:boolean', 'xs:byte', 'xs:date', 'xs:dateTime', 'xs:dateTimeStamp',
'xs:dayTimeDuration', 'xs:decimal', 'xs:double', 'xs:duration', 'xs:ENTITIES', 'xs:ENTITY',
'xs:float', 'xs:gDay', 'xs:gMonth', 'xs:gMonthDay', 'xs:gYear', 'xs:gYearMonth', 'xs:hexBinary',
'xs:ID', 'xs:IDREF', 'xs:IDREFS', 'xs:int', 'xs:integer', 'xs:item', 'xs:java', 'xs:language',
'xs:long', 'xs:Name', 'xs:NCName', 'xs:negativeInteger', 'xs:NMTOKEN', 'xs:NMTOKENS',
'xs:nonNegativeInteger', 'xs:nonPositiveInteger', 'xs:normalizedString', 'xs:NOTATION',
'xs:numeric', 'xs:positiveInteger', 'xs:precisionDecimal', 'xs:QName', 'xs:short', 'xs:string',
'xs:time', 'xs:token', 'xs:unsignedByte', 'xs:unsignedInt', 'xs:unsignedLong',
'xs:unsignedShort', 'xs:untyped', 'xs:untypedAtomic', 'xs:yearMonthDuration'];
for(var i=0, l=types.length; i < l; i++) { kwObj[types[i]] = atom;};
// each operator will add a property to kwObj with value of {type: "operator", style: "keyword"}
var operators = ['eq', 'ne', 'lt', 'le', 'gt', 'ge', ':=', '=', '>', '>=', '<', '<=', '.', '|', '?', 'and', 'or', 'div', 'idiv', 'mod', '*', '/', '+', '-'];
for(var i=0, l=operators.length; i < l; i++) { kwObj[operators[i]] = operator;};
// each axis_specifiers will add a property to kwObj with value of {type: "axis_specifier", style: "qualifier"}
var axis_specifiers = ["self::", "attribute::", "child::", "descendant::", "descendant-or-self::", "parent::",
"ancestor::", "ancestor-or-self::", "following::", "preceding::", "following-sibling::", "preceding-sibling::"];
for(var i=0, l=axis_specifiers.length; i < l; i++) { kwObj[axis_specifiers[i]] = qualifier; };
return kwObj;
}();
function chain(stream, state, f) {
state.tokenize = f;
return f(stream, state);
}
// the primary mode tokenizer
function tokenBase(stream, state) {
var ch = stream.next(),
mightBeFunction = false,
isEQName = isEQNameAhead(stream);
// an XML tag (if not in some sub, chained tokenizer)
if (ch == "<") {
if(stream.match("!--", true))
return chain(stream, state, tokenXMLComment);
if(stream.match("![CDATA", false)) {
state.tokenize = tokenCDATA;
return "tag";
}
if(stream.match("?", false)) {
return chain(stream, state, tokenPreProcessing);
}
var isclose = stream.eat("/");
stream.eatSpace();
var tagName = "", c;
while ((c = stream.eat(/[^\s\u00a0=<>\"\'\/?]/))) tagName += c;
return chain(stream, state, tokenTag(tagName, isclose));
}
// start code block
else if(ch == "{") {
pushStateStack(state, { type: "codeblock"});
return null;
}
// end code block
else if(ch == "}") {
popStateStack(state);
return null;
}
// if we're in an XML block
else if(isInXmlBlock(state)) {
if(ch == ">")
return "tag";
else if(ch == "/" && stream.eat(">")) {
popStateStack(state);
return "tag";
}
else
return "variable";
}
// if a number
else if (/\d/.test(ch)) {
stream.match(/^\d*(?:\.\d*)?(?:E[+\-]?\d+)?/);
return "atom";
}
// comment start
else if (ch === "(" && stream.eat(":")) {
pushStateStack(state, { type: "comment"});
return chain(stream, state, tokenComment);
}
// quoted string
else if (!isEQName && (ch === '"' || ch === "'"))
return chain(stream, state, tokenString(ch));
// variable
else if(ch === "$") {
return chain(stream, state, tokenVariable);
}
// assignment
else if(ch ===":" && stream.eat("=")) {
return "keyword";
}
// open paren
else if(ch === "(") {
pushStateStack(state, { type: "paren"});
return null;
}
// close paren
else if(ch === ")") {
popStateStack(state);
return null;
}
// open paren
else if(ch === "[") {
pushStateStack(state, { type: "bracket"});
return null;
}
// close paren
else if(ch === "]") {
popStateStack(state);
return null;
}
else {
var known = keywords.propertyIsEnumerable(ch) && keywords[ch];
// if there's a EQName ahead, consume the rest of the string portion, it's likely a function
if(isEQName && ch === '\"') while(stream.next() !== '"'){}
if(isEQName && ch === '\'') while(stream.next() !== '\''){}
// gobble up a word if the character is not known
if(!known) stream.eatWhile(/[\w\$_-]/);
// gobble a colon in the case that is a lib func type call fn:doc
var foundColon = stream.eat(":");
// if there's not a second colon, gobble another word. Otherwise, it's probably an axis specifier
// which should get matched as a keyword
if(!stream.eat(":") && foundColon) {
stream.eatWhile(/[\w\$_-]/);
}
// if the next non whitespace character is an open paren, this is probably a function (if not a keyword of other sort)
if(stream.match(/^[ \t]*\(/, false)) {
mightBeFunction = true;
}
// is the word a keyword?
var word = stream.current();
known = keywords.propertyIsEnumerable(word) && keywords[word];
// if we think it's a function call but not yet known,
// set style to variable for now for lack of something better
if(mightBeFunction && !known) known = {type: "function_call", style: "variable def"};
// if the previous word was element, attribute, axis specifier, this word should be the name of that
if(isInXmlConstructor(state)) {
popStateStack(state);
return "variable";
}
// as previously checked, if the word is element,attribute, axis specifier, call it an "xmlconstructor" and
// push the stack so we know to look for it on the next word
if(word == "element" || word == "attribute" || known.type == "axis_specifier") pushStateStack(state, {type: "xmlconstructor"});
// if the word is known, return the details of that else just call this a generic 'word'
return known ? known.style : "variable";
}
}
// handle comments, including nested
function tokenComment(stream, state) {
var maybeEnd = false, maybeNested = false, nestedCount = 0, ch;
while (ch = stream.next()) {
if (ch == ")" && maybeEnd) {
if(nestedCount > 0)
nestedCount--;
else {
popStateStack(state);
break;
}
}
else if(ch == ":" && maybeNested) {
nestedCount++;
}
maybeEnd = (ch == ":");
maybeNested = (ch == "(");
}
return "comment";
}
// tokenizer for string literals
// optionally pass a tokenizer function to set state.tokenize back to when finished
function tokenString(quote, f) {
return function(stream, state) {
var ch;
if(isInString(state) && stream.current() == quote) {
popStateStack(state);
if(f) state.tokenize = f;
return "string";
}
pushStateStack(state, { type: "string", name: quote, tokenize: tokenString(quote, f) });
// if we're in a string and in an XML block, allow an embedded code block
if(stream.match("{", false) && isInXmlAttributeBlock(state)) {
state.tokenize = tokenBase;
return "string";
}
while (ch = stream.next()) {
if (ch == quote) {
popStateStack(state);
if(f) state.tokenize = f;
break;
}
else {
// if we're in a string and in an XML block, allow an embedded code block in an attribute
if(stream.match("{", false) && isInXmlAttributeBlock(state)) {
state.tokenize = tokenBase;
return "string";
}
}
}
return "string";
};
}
// tokenizer for variables
function tokenVariable(stream, state) {
var isVariableChar = /[\w\$_-]/;
// a variable may start with a quoted EQName so if the next character is quote, consume to the next quote
if(stream.eat("\"")) {
while(stream.next() !== '\"'){};
stream.eat(":");
} else {
stream.eatWhile(isVariableChar);
if(!stream.match(":=", false)) stream.eat(":");
}
stream.eatWhile(isVariableChar);
state.tokenize = tokenBase;
return "variable";
}
// tokenizer for XML tags
function tokenTag(name, isclose) {
return function(stream, state) {
stream.eatSpace();
if(isclose && stream.eat(">")) {
popStateStack(state);
state.tokenize = tokenBase;
return "tag";
}
// self closing tag without attributes?
if(!stream.eat("/"))
pushStateStack(state, { type: "tag", name: name, tokenize: tokenBase});
if(!stream.eat(">")) {
state.tokenize = tokenAttribute;
return "tag";
}
else {
state.tokenize = tokenBase;
}
return "tag";
};
}
// tokenizer for XML attributes
function tokenAttribute(stream, state) {
var ch = stream.next();
if(ch == "/" && stream.eat(">")) {
if(isInXmlAttributeBlock(state)) popStateStack(state);
if(isInXmlBlock(state)) popStateStack(state);
return "tag";
}
if(ch == ">") {
if(isInXmlAttributeBlock(state)) popStateStack(state);
return "tag";
}
if(ch == "=")
return null;
// quoted string
if (ch == '"' || ch == "'")
return chain(stream, state, tokenString(ch, tokenAttribute));
if(!isInXmlAttributeBlock(state))
pushStateStack(state, { type: "attribute", tokenize: tokenAttribute});
stream.eat(/[a-zA-Z_:]/);
stream.eatWhile(/[-a-zA-Z0-9_:.]/);
stream.eatSpace();
// the case where the attribute has not value and the tag was closed
if(stream.match(">", false) || stream.match("/", false)) {
popStateStack(state);
state.tokenize = tokenBase;
}
return "attribute";
}
// handle comments, including nested
function tokenXMLComment(stream, state) {
var ch;
while (ch = stream.next()) {
if (ch == "-" && stream.match("->", true)) {
state.tokenize = tokenBase;
return "comment";
}
}
}
// handle CDATA
function tokenCDATA(stream, state) {
var ch;
while (ch = stream.next()) {
if (ch == "]" && stream.match("]", true)) {
state.tokenize = tokenBase;
return "comment";
}
}
}
// handle preprocessing instructions
function tokenPreProcessing(stream, state) {
var ch;
while (ch = stream.next()) {
if (ch == "?" && stream.match(">", true)) {
state.tokenize = tokenBase;
return "comment meta";
}
}
}
// functions to test the current context of the state
function isInXmlBlock(state) { return isIn(state, "tag"); }
function isInXmlAttributeBlock(state) { return isIn(state, "attribute"); }
function isInXmlConstructor(state) { return isIn(state, "xmlconstructor"); }
function isInString(state) { return isIn(state, "string"); }
function isEQNameAhead(stream) {
// assume we've already eaten a quote (")
if(stream.current() === '"')
return stream.match(/^[^\"]+\"\:/, false);
else if(stream.current() === '\'')
return stream.match(/^[^\"]+\'\:/, false);
else
return false;
}
function isIn(state, type) {
return (state.stack.length && state.stack[state.stack.length - 1].type == type);
}
function pushStateStack(state, newState) {
state.stack.push(newState);
}
function popStateStack(state) {
state.stack.pop();
var reinstateTokenize = state.stack.length && state.stack[state.stack.length-1].tokenize;
state.tokenize = reinstateTokenize || tokenBase;
}
// the interface for the mode API
return {
startState: function() {
return {
tokenize: tokenBase,
cc: [],
stack: []
};
},
token: function(stream, state) {
if (stream.eatSpace()) return null;
var style = state.tokenize(stream, state);
return style;
},
blockCommentStart: "(:",
blockCommentEnd: ":)"
};
});
CodeMirror.defineMIME("application/xquery", "xquery");
}); | PypiClean |
/FEV_KEGG-1.1.4.tar.gz/FEV_KEGG-1.1.4/FEV_KEGG/Experiments/31.py | from FEV_KEGG.Graph.Elements import EcNumber
from FEV_KEGG.Evolution.Taxonomy import NCBI
from FEV_KEGG.KEGG import Organism
if __name__ == '__main__':
output = ['Maj. %\tothers\tboth\tours']
#- extract EC numbers from Poot-Hernandez et al. (2015) by hand, any which are marked as blue (preserved)
theirECnumberStrings = ['1.1.1.-', '1.1.1.100', '1.1.1.157', '1.1.1.158', '1.1.1.193', '1.1.1.205', '1.1.1.22', '1.1.1.267', '1.1.1.29', '1.1.1.30', '1.1.1.35', '1.1.1.36', '1.1.1.37', '1.1.1.41', '1.1.1.42', '1.1.1.43', '1.1.1.44', '1.1.1.49', '1.1.1.85', '1.1.1.86', '1.1.1.94', '1.17.1.2', '1.17.4.1', '1.17.4.2', '1.17.7.1', '1.2.1.-', '1.2.1.11', '1.2.1.12', '1.2.1.41', '1.2.1.70', '1.2.4.1', '1.2.4.2', '1.2.4.4', '1.3.1.12', '1.3.1.26', '1.3.3.1', '1.3.99.1', '1.4.3.16', '1.5.1.20', '1.5.1.3', '1.5.1.5', '1.8.1.4', '1.8.1.7', '1.8.1.9', '2.-.-.-', '2.1.1.45', '2.1.2.1', '2.1.2.2', '2.1.2.3', '2.1.2.9', '2.1.3.2', '2.2.1.6', '2.2.1.7', '2.3.1.-', '2.3.1.1', '2.3.1.109', '2.3.1.117', '2.3.1.12', '2.3.1.129', '2.3.1.15', '2.3.1.157', '2.3.1.16', '2.3.1.179', '2.3.1.180', '2.3.1.181', '2.3.1.39', '2.3.1.40', '2.3.1.41', '2.3.1.47', '2.3.1.51', '2.3.1.54', '2.3.1.61', '2.3.1.8', '2.3.1.89', '2.3.1.9', '2.3.3.1', '2.3.3.13', '2.4.1.182', '2.4.1.227', '2.4.2.1', '2.4.2.10', '2.4.2.11', '2.4.2.14', '2.4.2.17', '2.4.2.19', '2.4.2.2', '2.4.2.3', '2.4.2.4', '2.4.2.7', '2.5.1.-', '2.5.1.1', '2.5.1.10', '2.5.1.15', '2.5.1.3', '2.5.1.30', '2.5.1.31', '2.5.1.47', '2.5.1.48', '2.5.1.49', '2.5.1.55', '2.5.1.61', '2.5.1.7', '2.5.1.78', '2.5.1.9', '2.5.1.90', '2.6.1.-', '2.6.1.1', '2.6.1.11', '2.6.1.16', '2.6.1.17', '2.6.1.42', '2.6.1.62', '2.6.1.66', '2.6.1.85', '2.6.1.9', '2.7.1.130', '2.7.1.148', '2.7.1.23', '2.7.1.24', '2.7.1.26', '2.7.1.33', '2.7.1.40', '2.7.2.-', '2.7.2.11', '2.7.2.3', '2.7.2.4', '2.7.4.14', '2.7.4.16', '2.7.4.22', '2.7.4.3', '2.7.4.6', '2.7.4.7', '2.7.4.8', '2.7.4.9', '2.7.6.1', '2.7.6.3', '2.7.7.1', '2.7.7.18', '2.7.7.3', '2.7.7.38', '2.7.7.41', '2.7.7.6', '2.7.7.60', '2.7.7.7', '2.7.7.8', '2.7.8.-', '2.7.8.13', '2.7.8.24', '2.7.8.5', '2.7.8.7', '2.7.8.8', '2.8.1.6', '2.8.1.8', '3.1.3.27', '3.1.3.45', '3.1.3.5', '3.1.3.6', '3.5.1.-', '3.5.1.18', '3.5.1.47', '3.5.2.3', '3.5.4.19', '3.5.4.25', '3.6.1.-', '3.6.1.11', '3.6.1.13', '3.6.1.15', '3.6.1.17', '3.6.1.19', '3.6.1.27', '3.6.1.40', '3.6.1.41', '3.6.1.5', '4.1.1.11', '4.1.1.20', '4.1.1.23', '4.1.1.31', '4.1.1.36', '4.1.1.37', '4.1.1.49', '4.1.1.65', '4.1.3.38', '4.2.1.-', '4.2.1.11', '4.2.1.17', '4.2.1.18', '4.2.1.2', '4.2.1.20', '4.2.1.22', '4.2.1.24', '4.2.1.3', '4.2.1.35', '4.2.1.51', '4.2.1.52', '4.2.1.55', '4.2.1.60', '4.2.1.75', '4.2.1.9', '4.2.3.1', '4.3.1.17', '4.3.1.19', '4.3.2.1', '4.3.2.2', '4.6.1.12', '5.1.1.1', '5.1.1.3', '5.1.1.7', '5.3.1.1', '5.3.1.16', '5.3.1.9', '5.4.2.1', '5.4.2.10', '5.4.2.2', '5.4.2.7', '5.4.3.8', '5.4.99.18', '6.1.1.10', '6.1.1.11', '6.1.1.17', '6.1.1.4', '6.1.1.5', '6.1.1.9', '6.2.1.1', '6.2.1.5', '6.3.2.1', '6.3.2.10', '6.3.2.12', '6.3.2.13', '6.3.2.2', '6.3.2.3', '6.3.2.4', '6.3.2.6', '6.3.2.8', '6.3.2.9', '6.3.3.1', '6.3.3.3', '6.3.4.13', '6.3.4.14', '6.3.4.15', '6.3.4.18', '6.3.4.2', '6.3.4.4', '6.3.4.5', '6.3.5.2', '6.3.5.3', '6.3.5.4', '6.3.5.5', '6.4.1.2']
theirECnumbers = set()
for string in theirECnumberStrings:
theirECnumbers.add( EcNumber(string) )
#- remove outdated EC numbers
outdatedEcNumberStrings = ['1.1.1.158', '1.17.1.2', '1.17.4.2', '1.3.1.26', '1.3.3.1', '1.3.99.1', '2.3.1.89', '2.4.2.11', '2.7.4.14', '3.5.1.47', '3.6.1.15', '3.6.1.19', '4.2.1.52', '4.2.1.60', '5.4.2.1']
outdatedEcNumbers = set()
for string in outdatedEcNumberStrings:
outdatedEcNumbers.add( EcNumber(string) )
theirECnumbers.difference_update( outdatedEcNumbers )
#- reduce set of EC numbers to first three levels
theirECnumbers = EcNumber.insertWildcards(theirECnumbers, keepLevels = 3, allowHigherWildcards = False)
taxonomy = NCBI.getTaxonomy()
#- REPEAT with different groups
for i in range(1, 3):
if i == 1:
#- 1. get group of organisms deemed representative by Poot-Hernandez et al.
representativeOrganisms = ['abo', 'aci', 'aeh', 'aha', 'bcc', 'bci', 'bfl', 'bpn', 'buc', 'cbu', 'cps', 'crp', 'csa', 'eco', 'ftu', 'hch', 'hdu', 'hha', 'ilo', 'lpn', 'maq', 'mca', 'msu', 'noc', 'pat', 'pcr', 'pfl', 'pha', 'pin', 'plu', 'ppr', 'rma', 'saz', 'sde', 'sdn', 'shm', 'tcx', 'vfi', 'vvu', 'xca']
organisms = representativeOrganisms
output.append( 'Representative:' )
elif i == 2:
#- 2. get group of organisms 'Gammaproteobacteria', excluding unclassified
organisms = taxonomy.getOrganismAbbreviationsByPath('Gammaproteobacteria', exceptPaths='unclassified', oneOrganismPerSpecies=False)
output.append( '\nGammaproteobacteria without unclassified:' )
group = Organism.Group( organisms )
#- REPEAT for varying majority-percentages:
for percentage in [100, 90, 80, 70, 60, 50, 40, 30, 20, 10 , 1]:
#- calculate EC numbers occuring in group's core metabolism
ourECnumbers = group.majorityEcGraph(majorityPercentage = percentage, noMultifunctional = False).getECs()
#- reduce set of EC numbers to first three levels
ourECnumbers = EcNumber.insertWildcards(ourECnumbers, keepLevels = 3, allowHigherWildcards = False)
#- overlap Poot-Hernandez' set with ours and print amount of EC numbers inside the intersection and falling off either side
onlyInTheirs = theirECnumbers.difference( ourECnumbers )
inBoth = theirECnumbers.intersection( ourECnumbers )
onlyInOurs = ourECnumbers.difference( theirECnumbers )
output.append(str(percentage) + '%:\t' + str(len(onlyInTheirs)) + '\t' + str(len(inBoth)) + '\t' + str(len(onlyInOurs)) )
for line in output:
print(line) | PypiClean |
/Munin-0.2.4.tar.gz/Munin-0.2.4/munin/utilities/html_embeddings.py |
__author__ = "Christian Heider Nielsen"
__doc__ = """
Created on 27/04/2019
@author: cnheider
"""
from collections import namedtuple
from io import BytesIO, StringIO
from typing import Any, Sequence, Tuple
from matplotlib import pyplot
from matplotlib.figure import Figure
from warg.data_structures.named_ordered_dictionary import NOD
MetricEntry = namedtuple("MetricEntry", ("Description", "Math", "Values", "Aggregated"))
def generate_metric_table(
truths: Sequence, predictions: Sequence, categories: Sequence, decimals: int = 1
) -> Sequence[MetricEntry]:
"""
:param truths:
:param predictions:
:param categories:
:param decimals:
:return:
"""
import numpy
from pycm import ConfusionMatrix
cm = ConfusionMatrix(actual_vector=truths, predict_vector=predictions)
cm.relabel({k: v for k, v in zip(range(len(categories)), categories)})
support = MetricEntry(
"Occurrence of each class (P)",
generate_math_html("TP+FN"),
{k: numpy.round(v, decimals) for k, v in cm.P.items()},
numpy.round(sum(cm.P.values()) / len(categories), decimals),
)
sensitivity = MetricEntry(
"True Positive Rate (TPR)",
generate_math_html("\dfrac{TP}{TP+FN}"),
{k: numpy.round(v, decimals) for k, v in cm.TPR.items()},
numpy.round(sum(cm.TPR.values()) / len(categories), decimals),
)
specificity = MetricEntry(
"True Negative Rate (TNR)",
generate_math_html("\dfrac{TN}{TN+FP}"),
{k: numpy.round(v, decimals) for k, v in cm.TNR.items()},
numpy.round(sum(cm.TNR.values()) / len(categories), decimals),
)
precision = MetricEntry(
"Positive Predictive Rate (PPV)",
generate_math_html("\dfrac{TP}{TP+FP}"),
{k: numpy.round(v, decimals) for k, v in cm.PPV.items()},
numpy.round(sum(cm.PPV.values()) / len(categories), decimals),
)
npv = MetricEntry(
"Negative Predictive Value (NPV)",
generate_math_html("\dfrac{TP}{TP+FP}"),
{k: numpy.round(v, decimals) for k, v in cm.NPV.items()},
numpy.round(sum(cm.NPV.values()) / len(categories), decimals),
)
accuracy = MetricEntry(
"Trueness",
generate_math_html("\dfrac{TP+TN}{TP+TN+FP+FN}"),
{k: numpy.round(v, decimals) for k, v in cm.ACC.items()},
numpy.round(sum(cm.ACC.values()) / len(categories), decimals),
)
f1_score = MetricEntry(
"Harmonic mean of precision and sensitivity",
generate_math_html("2*\dfrac{PPV*TPR}{PPV+TPR}"),
{k: numpy.round(v, decimals) for k, v in cm.F1.items()},
numpy.round(sum(cm.F1.values()) / len(categories), decimals),
)
mcc = MetricEntry(
"Matthews correlation coefficient",
generate_math_html("\dfrac{TP*TN-FP*FN}{\sqrt{(TP+FP)(TP+FN)(TN+FP)(TN+FN)}}"),
{k: numpy.round(v, decimals) for k, v in cm.MCC.items()},
numpy.round(sum(cm.MCC.values()) / len(categories), decimals),
)
roc_auc = MetricEntry(
"Receiver Operating Characteristics (ROC), "
"Sensitivity vs (1 − Specificity), "
"(True Positive Rate vs False Positive Rate), "
"Area Under the Curve (AUC)",
generate_math_html("\dfrac{TNR+TPR}{2}"),
{k: numpy.round(v, decimals) for k, v in cm.AUC.items()},
numpy.round(sum(cm.AUC.values()) / len(categories), decimals),
)
return NOD.nod_of(
support, sensitivity, specificity, precision, npv, accuracy, f1_score, mcc, roc_auc,
).as_flat_tuples()
def generate_math_html(equation: str = "e^x", inline: bool = True, html_classes: str = "math_span") -> str:
"""
For inline math, use \(...\).
For standalone math, use $$...$$, \[...\] or \begin...\end.
md = markdown.Markdown(extensions=['mdx_math'])
md.convert('$$e^x$$')
:param html_classes:
:param equation:
:param inline:
:return:"""
import markdown
md = markdown.Markdown(extensions=["mdx_math"], extension_configs={"mdx_math": {"add_preview": True}})
if inline:
stripped = md.convert(f"\({equation}\)").lstrip("<p>").rstrip("</p>")
return f'<span class="{html_classes}"><{stripped}></span>'
return md.convert(f"$${equation}$$")
def generate_qr(data: Any) -> str:
"""
:return:
"""
import pyqrcode
import io
import base64
code = pyqrcode.create(data)
stream = io.BytesIO()
code.png(stream, scale=6)
return base64.b64encode(stream.getvalue()).decode("ascii")
def plt_html_svg(fig: Figure = None, *, size: Tuple[int, int] = (400, 400), dpi: int = 100) -> str:
"""
if figure not supplied it USEs lastest figure of pyplot
:param fig:
:param size:
:param dpi:
:return:
"""
fig_file = StringIO()
if fig is None: # USE lastest figure
pyplot.savefig(fig_file, format="svg", dpi=dpi)
else:
fig.savefig(fig_file, format="svg", dpi=dpi)
return f'<svg width="{size[0]}" height="{size[1]}" {fig_file.getvalue().split("<svg")[1]}'
def plt_html(
fig: Figure = None,
*,
title: str = "image",
format: str = "png",
size: Tuple[int, int] = (400, 400),
dpi: int = 100,
):
"""
if figure not supplied it USEs lastest figure of pyplot
:param fig:
:param title:
:param format:
:param size:
:param dpi:
:return:
"""
if format == "svg":
return plt_html_svg(fig, size=size, dpi=dpi)
import base64
fig_file = BytesIO()
if fig is None: # USE lastest figure
pyplot.savefig(fig_file, format=format, dpi=dpi)
else:
fig.savefig(fig_file, format=format, dpi=dpi)
fig_file.seek(0) # rewind to beginning of file
b64_img = base64.b64encode(fig_file.getvalue()).decode("ascii")
return (
f"<img "
f'width="{size[0]}" '
f'height="{size[1]}" '
f'src="data:image/{format};base64,{b64_img}" '
f'alt="{title}"/><br>'
) | PypiClean |
/Glashammer-0.2.1.tar.gz/Glashammer-0.2.1/glashammer/utils/events.py | # Events
from collections import deque
from glashammer.utils.local import local
from glashammer.utils.log import debug
def emit_event(event, *args, **kwargs):
"""Emit a event and return a `EventResult` instance."""
app = local.application
if event != 'log':
debug('Emit: %s (%s)' % (event, ', '.join(map(repr, args))))
return [x(*args, **kwargs) for x in
app.events.iter(event)]
class EventManager(object):
"""Helper class that handles event listeners and event emitting.
This is *not* a public interface. Always use the `emit_event` or
`iter_listeners` functions to access it or the `connect_event` or
`disconnect_event` methods on the application.
"""
def __init__(self, app):
self.app = app
self._listeners = {}
self._last_listener = 0
def connect(self, event, callback, position='after'):
"""Connect a callback to an event."""
assert position in ('before', 'after'), 'invalid position'
listener_id = self._last_listener
event = intern(event)
if event not in self._listeners:
self._listeners[event] = deque([callback])
elif position == 'after':
self._listeners[event].append(callback)
elif position == 'before':
self._listeners[event].appendleft(callback)
self._last_listener += 1
return listener_id
def iter(self, event):
"""Return an iterator for all listeners of a given name."""
if event not in self._listeners:
return iter(())
return iter(self._listeners[event])
def template_emit(self, event, *args, **kwargs):
"""Emits events for the template context."""
results = []
for f in self.iter(event):
rv = f(*args, **kwargs)
if rv is not None:
results.append(rv)
return TemplateEventResult(results)
class TemplateEventResult(list):
"""A list subclass for results returned by the event listener that
concatenates the results if converted to string, otherwise it works
exactly like any other list.
"""
def __init__(self, items):
list.__init__(self, items)
def __unicode__(self):
return u''.join(map(unicode, self))
def __str__(self):
return unicode(self).encode('utf-8') | PypiClean |
/FastFlask-1.2.32-py3-none-any.whl/click/_winconsole.py | import io
import sys
import time
import typing as t
from ctypes import byref
from ctypes import c_char
from ctypes import c_char_p
from ctypes import c_int
from ctypes import c_ssize_t
from ctypes import c_ulong
from ctypes import c_void_p
from ctypes import POINTER
from ctypes import py_object
from ctypes import Structure
from ctypes.wintypes import DWORD
from ctypes.wintypes import HANDLE
from ctypes.wintypes import LPCWSTR
from ctypes.wintypes import LPWSTR
from ._compat import _NonClosingTextIOWrapper
assert sys.platform == "win32"
import msvcrt # noqa: E402
from ctypes import windll # noqa: E402
from ctypes import WINFUNCTYPE # noqa: E402
c_ssize_p = POINTER(c_ssize_t)
kernel32 = windll.kernel32
GetStdHandle = kernel32.GetStdHandle
ReadConsoleW = kernel32.ReadConsoleW
WriteConsoleW = kernel32.WriteConsoleW
GetConsoleMode = kernel32.GetConsoleMode
GetLastError = kernel32.GetLastError
GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32))
CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))(
("CommandLineToArgvW", windll.shell32)
)
LocalFree = WINFUNCTYPE(c_void_p, c_void_p)(("LocalFree", windll.kernel32))
STDIN_HANDLE = GetStdHandle(-10)
STDOUT_HANDLE = GetStdHandle(-11)
STDERR_HANDLE = GetStdHandle(-12)
PyBUF_SIMPLE = 0
PyBUF_WRITABLE = 1
ERROR_SUCCESS = 0
ERROR_NOT_ENOUGH_MEMORY = 8
ERROR_OPERATION_ABORTED = 995
STDIN_FILENO = 0
STDOUT_FILENO = 1
STDERR_FILENO = 2
EOF = b"\x1a"
MAX_BYTES_WRITTEN = 32767
try:
from ctypes import pythonapi
except ImportError:
# On PyPy we cannot get buffers so our ability to operate here is
# severely limited.
get_buffer = None
else:
class Py_buffer(Structure):
_fields_ = [
("buf", c_void_p),
("obj", py_object),
("len", c_ssize_t),
("itemsize", c_ssize_t),
("readonly", c_int),
("ndim", c_int),
("format", c_char_p),
("shape", c_ssize_p),
("strides", c_ssize_p),
("suboffsets", c_ssize_p),
("internal", c_void_p),
]
PyObject_GetBuffer = pythonapi.PyObject_GetBuffer
PyBuffer_Release = pythonapi.PyBuffer_Release
def get_buffer(obj, writable=False):
buf = Py_buffer()
flags = PyBUF_WRITABLE if writable else PyBUF_SIMPLE
PyObject_GetBuffer(py_object(obj), byref(buf), flags)
try:
buffer_type = c_char * buf.len
return buffer_type.from_address(buf.buf)
finally:
PyBuffer_Release(byref(buf))
class _WindowsConsoleRawIOBase(io.RawIOBase):
def __init__(self, handle):
self.handle = handle
def isatty(self):
super().isatty()
return True
class _WindowsConsoleReader(_WindowsConsoleRawIOBase):
def readable(self):
return True
def readinto(self, b):
bytes_to_be_read = len(b)
if not bytes_to_be_read:
return 0
elif bytes_to_be_read % 2:
raise ValueError(
"cannot read odd number of bytes from UTF-16-LE encoded console"
)
buffer = get_buffer(b, writable=True)
code_units_to_be_read = bytes_to_be_read // 2
code_units_read = c_ulong()
rv = ReadConsoleW(
HANDLE(self.handle),
buffer,
code_units_to_be_read,
byref(code_units_read),
None,
)
if GetLastError() == ERROR_OPERATION_ABORTED:
# wait for KeyboardInterrupt
time.sleep(0.1)
if not rv:
raise OSError(f"Windows error: {GetLastError()}")
if buffer[0] == EOF:
return 0
return 2 * code_units_read.value
class _WindowsConsoleWriter(_WindowsConsoleRawIOBase):
def writable(self):
return True
@staticmethod
def _get_error_message(errno):
if errno == ERROR_SUCCESS:
return "ERROR_SUCCESS"
elif errno == ERROR_NOT_ENOUGH_MEMORY:
return "ERROR_NOT_ENOUGH_MEMORY"
return f"Windows error {errno}"
def write(self, b):
bytes_to_be_written = len(b)
buf = get_buffer(b)
code_units_to_be_written = min(bytes_to_be_written, MAX_BYTES_WRITTEN) // 2
code_units_written = c_ulong()
WriteConsoleW(
HANDLE(self.handle),
buf,
code_units_to_be_written,
byref(code_units_written),
None,
)
bytes_written = 2 * code_units_written.value
if bytes_written == 0 and bytes_to_be_written > 0:
raise OSError(self._get_error_message(GetLastError()))
return bytes_written
class ConsoleStream:
def __init__(self, text_stream: t.TextIO, byte_stream: t.BinaryIO) -> None:
self._text_stream = text_stream
self.buffer = byte_stream
@property
def name(self) -> str:
return self.buffer.name
def write(self, x: t.AnyStr) -> int:
if isinstance(x, str):
return self._text_stream.write(x)
try:
self.flush()
except Exception:
pass
return self.buffer.write(x)
def writelines(self, lines: t.Iterable[t.AnyStr]) -> None:
for line in lines:
self.write(line)
def __getattr__(self, name: str) -> t.Any:
return getattr(self._text_stream, name)
def isatty(self) -> bool:
return self.buffer.isatty()
def __repr__(self):
return f"<ConsoleStream name={self.name!r} encoding={self.encoding!r}>"
def _get_text_stdin(buffer_stream: t.BinaryIO) -> t.TextIO:
text_stream = _NonClosingTextIOWrapper(
io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)),
"utf-16-le",
"strict",
line_buffering=True,
)
return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream))
def _get_text_stdout(buffer_stream: t.BinaryIO) -> t.TextIO:
text_stream = _NonClosingTextIOWrapper(
io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)),
"utf-16-le",
"strict",
line_buffering=True,
)
return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream))
def _get_text_stderr(buffer_stream: t.BinaryIO) -> t.TextIO:
text_stream = _NonClosingTextIOWrapper(
io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)),
"utf-16-le",
"strict",
line_buffering=True,
)
return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream))
_stream_factories: t.Mapping[int, t.Callable[[t.BinaryIO], t.TextIO]] = {
0: _get_text_stdin,
1: _get_text_stdout,
2: _get_text_stderr,
}
def _is_console(f: t.TextIO) -> bool:
if not hasattr(f, "fileno"):
return False
try:
fileno = f.fileno()
except (OSError, io.UnsupportedOperation):
return False
handle = msvcrt.get_osfhandle(fileno)
return bool(GetConsoleMode(handle, byref(DWORD())))
def _get_windows_console_stream(
f: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str]
) -> t.Optional[t.TextIO]:
if (
get_buffer is not None
and encoding in {"utf-16-le", None}
and errors in {"strict", None}
and _is_console(f)
):
func = _stream_factories.get(f.fileno())
if func is not None:
b = getattr(f, "buffer", None)
if b is None:
return None
return func(b) | PypiClean |
/DLTA-AI-1.1.tar.gz/DLTA-AI-1.1/DLTA_AI_app/trackers/strongsort/deep/models/resnet_ibn_b.py | from __future__ import division, absolute_import
import math
import torch.nn as nn
import torch.utils.model_zoo as model_zoo
__all__ = ['resnet50_ibn_b']
model_urls = {
'resnet50': 'https://download.pytorch.org/models/resnet50-19c8e357.pth',
'resnet101': 'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth',
'resnet152': 'https://download.pytorch.org/models/resnet152-b121ed2d.pth',
}
def conv3x3(in_planes, out_planes, stride=1):
"3x3 convolution with padding"
return nn.Conv2d(
in_planes,
out_planes,
kernel_size=3,
stride=stride,
padding=1,
bias=False
)
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(BasicBlock, self).__init__()
self.conv1 = conv3x3(inplanes, planes, stride)
self.bn1 = nn.BatchNorm2d(planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(planes, planes)
self.bn2 = nn.BatchNorm2d(planes)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, inplanes, planes, stride=1, downsample=None, IN=False):
super(Bottleneck, self).__init__()
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(
planes,
planes,
kernel_size=3,
stride=stride,
padding=1,
bias=False
)
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = nn.Conv2d(
planes, planes * self.expansion, kernel_size=1, bias=False
)
self.bn3 = nn.BatchNorm2d(planes * self.expansion)
self.IN = None
if IN:
self.IN = nn.InstanceNorm2d(planes * 4, affine=True)
self.relu = nn.ReLU(inplace=True)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
if self.IN is not None:
out = self.IN(out)
out = self.relu(out)
return out
class ResNet(nn.Module):
"""Residual network + IBN layer.
Reference:
- He et al. Deep Residual Learning for Image Recognition. CVPR 2016.
- Pan et al. Two at Once: Enhancing Learning and Generalization
Capacities via IBN-Net. ECCV 2018.
"""
def __init__(
self,
block,
layers,
num_classes=1000,
loss='softmax',
fc_dims=None,
dropout_p=None,
**kwargs
):
scale = 64
self.inplanes = scale
super(ResNet, self).__init__()
self.loss = loss
self.feature_dim = scale * 8 * block.expansion
self.conv1 = nn.Conv2d(
3, scale, kernel_size=7, stride=2, padding=3, bias=False
)
self.bn1 = nn.InstanceNorm2d(scale, affine=True)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.layer1 = self._make_layer(
block, scale, layers[0], stride=1, IN=True
)
self.layer2 = self._make_layer(
block, scale * 2, layers[1], stride=2, IN=True
)
self.layer3 = self._make_layer(block, scale * 4, layers[2], stride=2)
self.layer4 = self._make_layer(block, scale * 8, layers[3], stride=2)
self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
self.fc = self._construct_fc_layer(
fc_dims, scale * 8 * block.expansion, dropout_p
)
self.classifier = nn.Linear(self.feature_dim, num_classes)
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.InstanceNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
def _make_layer(self, block, planes, blocks, stride=1, IN=False):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(
self.inplanes,
planes * block.expansion,
kernel_size=1,
stride=stride,
bias=False
),
nn.BatchNorm2d(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks - 1):
layers.append(block(self.inplanes, planes))
layers.append(block(self.inplanes, planes, IN=IN))
return nn.Sequential(*layers)
def _construct_fc_layer(self, fc_dims, input_dim, dropout_p=None):
"""Constructs fully connected layer
Args:
fc_dims (list or tuple): dimensions of fc layers, if None, no fc layers are constructed
input_dim (int): input dimension
dropout_p (float): dropout probability, if None, dropout is unused
"""
if fc_dims is None:
self.feature_dim = input_dim
return None
assert isinstance(
fc_dims, (list, tuple)
), 'fc_dims must be either list or tuple, but got {}'.format(
type(fc_dims)
)
layers = []
for dim in fc_dims:
layers.append(nn.Linear(input_dim, dim))
layers.append(nn.BatchNorm1d(dim))
layers.append(nn.ReLU(inplace=True))
if dropout_p is not None:
layers.append(nn.Dropout(p=dropout_p))
input_dim = dim
self.feature_dim = fc_dims[-1]
return nn.Sequential(*layers)
def featuremaps(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
return x
def forward(self, x):
f = self.featuremaps(x)
v = self.avgpool(f)
v = v.view(v.size(0), -1)
if self.fc is not None:
v = self.fc(v)
if not self.training:
return v
y = self.classifier(v)
if self.loss == 'softmax':
return y
elif self.loss == 'triplet':
return y, v
else:
raise KeyError("Unsupported loss: {}".format(self.loss))
def init_pretrained_weights(model, model_url):
"""Initializes model with pretrained weights.
Layers that don't match with pretrained layers in name or size are kept unchanged.
"""
pretrain_dict = model_zoo.load_url(model_url)
model_dict = model.state_dict()
pretrain_dict = {
k: v
for k, v in pretrain_dict.items()
if k in model_dict and model_dict[k].size() == v.size()
}
model_dict.update(pretrain_dict)
model.load_state_dict(model_dict)
def resnet50_ibn_b(num_classes, loss='softmax', pretrained=False, **kwargs):
model = ResNet(
Bottleneck, [3, 4, 6, 3], num_classes=num_classes, loss=loss, **kwargs
)
if pretrained:
init_pretrained_weights(model, model_urls['resnet50'])
return model | PypiClean |
/FEADRE_AI-1.0.7.tar.gz/FEADRE_AI-1.0.7/FEADRE_AI/fmath/calc_3d/all_3D.py |
from __future__ import print_function
import os
import cv2
import sys
import numpy as np
import math
class PoseEstimator:
"""Estimate head pose according to the facial landmarks"""
def __init__(self, img_size=(480, 640)):
self.size = img_size
# 3D model points.
self.model_points_6 = np.array([
(0.0, 0.0, 0.0), # Nose tip
(0.0, -330.0, -65.0), # Chin 下巴
(-225.0, 170.0, -135.0), # Left eye left corner
(225.0, 170.0, -135.0), # Right eye right corner
(-150.0, -150.0, -125.0), # Left Mouth corner
(150.0, -150.0, -125.0) # Right mouth corner
], dtype=float) / 4.5
self.model_points_14 = np.array([
(6.825897, 6.760612, 4.402142),
(1.330353, 7.122144, 6.903745),
(-1.330353, 7.122144, 6.903745),
(-6.825897, 6.760612, 4.402142),
(5.311432, 5.485328, 3.987654),
(1.789930, 5.393625, 4.413414),
(-1.789930, 5.393625, 4.413414),
(-5.311432, 5.485328, 3.987654),
(2.005628, 1.409845, 6.165652),
(-2.005628, 1.409845, 6.165652),
(2.774015, -2.080775, 5.048531),
(-2.774015, -2.080775, 5.048531),
(0.000000, -3.116408, 6.097667),
(0.000000, -7.415691, 4.070434)], dtype=float)
self.model_points_68 = np.array([
[-73.393523, -29.801432, -47.667532],
[-72.775014, -10.949766, -45.909403],
[-70.533638, 7.929818, -44.84258],
[-66.850058, 26.07428, -43.141114],
[-59.790187, 42.56439, -38.635298],
[-48.368973, 56.48108, -30.750622],
[-34.121101, 67.246992, -18.456453],
[-17.875411, 75.056892, -3.609035],
[0.098749, 77.061286, 0.881698],
[17.477031, 74.758448, -5.181201],
[32.648966, 66.929021, -19.176563],
[46.372358, 56.311389, -30.77057],
[57.34348, 42.419126, -37.628629],
[64.388482, 25.45588, -40.886309],
[68.212038, 6.990805, -42.281449],
[70.486405, -11.666193, -44.142567],
[71.375822, -30.365191, -47.140426],
[-61.119406, -49.361602, -14.254422],
[-51.287588, -58.769795, -7.268147],
[-37.8048, -61.996155, -0.442051],
[-24.022754, -61.033399, 6.606501],
[-11.635713, -56.686759, 11.967398],
[12.056636, -57.391033, 12.051204],
[25.106256, -61.902186, 7.315098],
[38.338588, -62.777713, 1.022953],
[51.191007, -59.302347, -5.349435],
[60.053851, -50.190255, -11.615746],
[0.65394, -42.19379, 13.380835],
[0.804809, -30.993721, 21.150853],
[0.992204, -19.944596, 29.284036],
[1.226783, -8.414541, 36.94806],
[-14.772472, 2.598255, 20.132003],
[-7.180239, 4.751589, 23.536684],
[0.55592, 6.5629, 25.944448],
[8.272499, 4.661005, 23.695741],
[15.214351, 2.643046, 20.858157],
[-46.04729, -37.471411, -7.037989],
[-37.674688, -42.73051, -3.021217],
[-27.883856, -42.711517, -1.353629],
[-19.648268, -36.754742, 0.111088],
[-28.272965, -35.134493, 0.147273],
[-38.082418, -34.919043, -1.476612],
[19.265868, -37.032306, 0.665746],
[27.894191, -43.342445, -0.24766],
[37.437529, -43.110822, -1.696435],
[45.170805, -38.086515, -4.894163],
[38.196454, -35.532024, -0.282961],
[28.764989, -35.484289, 1.172675],
[-28.916267, 28.612716, 2.24031],
[-17.533194, 22.172187, 15.934335],
[-6.68459, 19.029051, 22.611355],
[0.381001, 20.721118, 23.748437],
[8.375443, 19.03546, 22.721995],
[18.876618, 22.394109, 15.610679],
[28.794412, 28.079924, 3.217393],
[19.057574, 36.298248, 14.987997],
[8.956375, 39.634575, 22.554245],
[0.381549, 40.395647, 23.591626],
[-7.428895, 39.836405, 22.406106],
[-18.160634, 36.677899, 15.121907],
[-24.37749, 28.677771, 4.785684],
[-6.897633, 25.475976, 20.893742],
[0.340663, 26.014269, 22.220479],
[8.444722, 25.326198, 21.02552],
[24.474473, 28.323008, 5.712776],
[8.449166, 30.596216, 20.671489],
[0.205322, 31.408738, 21.90367],
[-7.198266, 30.844876, 20.328022]])
self.focal_length = self.size[1]
self.camera_center = (self.size[1] / 2, self.size[0] / 2)
self.camera_matrix = np.array(
[[self.focal_length, 0, self.camera_center[0]],
[0, self.focal_length, self.camera_center[1]],
[0, 0, 1]], dtype="double")
# Assuming no lens distortion
self.dist_coeefs = np.zeros((4, 1))
# Rotation vector and translation vector
self.r_vec = np.array([[0.01891013], [0.08560084], [-3.14392813]])
self.t_vec = np.array([[-14.97821226], [-10.62040383], [-2053.03596872]])
def get_euler_angle(self, rotation_vector):
# calc rotation angles
theta = cv2.norm(rotation_vector, cv2.NORM_L2)
# transform to quaterniond
w = math.cos(theta / 2)
x = math.sin(theta / 2) * rotation_vector[0][0] / theta
y = math.sin(theta / 2) * rotation_vector[1][0] / theta
z = math.sin(theta / 2) * rotation_vector[2][0] / theta
# pitch (x-axis rotation)
t0 = 2.0 * (w * x + y * z)
t1 = 1.0 - 2.0 * (x ** 2 + y ** 2)
pitch = math.atan2(t0, t1)
# yaw (y-axis rotation)
t2 = 2.0 * (w * y - z * x)
if t2 > 1.0:
t2 = 1.0
if t2 < -1.0:
t2 = -1.0
yaw = math.asin(t2)
# roll (z-axis rotation)
t3 = 2.0 * (w * z + x * y)
t4 = 1.0 - 2.0 * (y ** 2 + z ** 2)
roll = math.atan2(t3, t4)
return pitch, yaw, roll
def solve_pose_by_6_points(self, image_points):
"""
Solve pose from image points
Return (rotation_vector, translation_vector) as pose.
"""
points_6 = np.float32([
image_points[30], image_points[36], image_points[45],
image_points[48], image_points[54], image_points[8]])
_, rotation_vector, translation_vector = cv2.solvePnP(
self.model_points_6,
points_6,
self.camera_matrix,
self.dist_coeefs,
rvec=self.r_vec,
tvec=self.t_vec,
useExtrinsicGuess=True)
return rotation_vector, translation_vector
def solve_pose_by_14_points(self, image_points):
points_14 = np.float32([
image_points[17], image_points[21], image_points[22], image_points[26], image_points[36],
image_points[39], image_points[42], image_points[45], image_points[31], image_points[35],
image_points[48], image_points[54], image_points[57], image_points[8]])
_, rotation_vector, translation_vector = cv2.solvePnP(
self.model_points_14,
points_14,
self.camera_matrix,
self.dist_coeefs,
rvec=self.r_vec,
tvec=self.t_vec,
useExtrinsicGuess=True)
return rotation_vector, translation_vector
def solve_pose_by_68_points(self, image_points):
image_points = np.float32(np.expand_dims(image_points, axis=1))
_, rotation_vector, translation_vector = cv2.solvePnP(
self.model_points_68,
image_points,
self.camera_matrix,
self.dist_coeefs,
rvec=self.r_vec,
tvec=self.t_vec,
useExtrinsicGuess=True)
return rotation_vector, translation_vector
def draw_annotation_box(self, image, rotation_vector, translation_vector, color=(255, 255, 255), line_width=2):
"""Draw a 3D box as annotation of pose"""
point_3d = []
rear_size = 75
rear_depth = 0
point_3d.append((-rear_size, -rear_size, rear_depth))
point_3d.append((-rear_size, rear_size, rear_depth))
point_3d.append((rear_size, rear_size, rear_depth))
point_3d.append((rear_size, -rear_size, rear_depth))
point_3d.append((-rear_size, -rear_size, rear_depth))
front_size = 100
front_depth = 100
point_3d.append((-front_size, -front_size, front_depth))
point_3d.append((-front_size, front_size, front_depth))
point_3d.append((front_size, front_size, front_depth))
point_3d.append((front_size, -front_size, front_depth))
point_3d.append((-front_size, -front_size, front_depth))
point_3d = np.array(point_3d, dtype=np.float).reshape(-1, 3)
# Map to 2d image points
(point_2d, _) = cv2.projectPoints(point_3d,
rotation_vector,
translation_vector,
self.camera_matrix,
self.dist_coeefs)
point_2d = np.int32(point_2d.reshape(-1, 2))
# Draw all the lines
cv2.polylines(image, [point_2d], True, color, line_width, cv2.LINE_AA)
cv2.line(image, tuple(point_2d[1]), tuple(
point_2d[6]), color, line_width, cv2.LINE_AA)
cv2.line(image, tuple(point_2d[2]), tuple(
point_2d[7]), color, line_width, cv2.LINE_AA)
cv2.line(image, tuple(point_2d[3]), tuple(
point_2d[8]), color, line_width, cv2.LINE_AA)
def run(pic_path):
import dlib
detector = dlib.get_frontal_face_detector() # 加载dlib自带的人脸检测器
img = cv2.imread(pic_path)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) # opencv读到的是BGR的矩阵
faces = detector(img, 1) # 检测人脸,返回检出的人脸框,可能有多张
r = faces[0] # 只取第一张脸
predictor = dlib.shape_predictor('./shape_predictor_68_face_landmarks.dat') # 加载关键点检测模型
ldmk = predictor(img, faces[0]) # 对指定的人脸进行特征点检测
points_68 = np.matrix([[p.x, p.y] for p in ldmk.parts()])
# points_68 = [[p.x, p.y] for p in ldmk.parts()]
# x0, y0, x1, y1 = r.left(), r.top(), r.right(), r.bottom()
# cv2.rectangle(img, (x0, y0), (x1, y1), (255, 0, 0), 2) # 画个人脸框框
# for p in points_68:
# cv2.circle(img, (int(p[0]), int(p[1])), 2, (0, 255, 0), -1, 0)
# cv2.imshow("img",cv2.cvtColor(img, cv2.COLOR_RGB2BGR))
# cv2.waitKey()
pose_estimator = PoseEstimator(img_size=img.shape)
# pose = pose_estimator.solve_pose_by_6_points(points_68)
# pose = pose_estimator.solve_pose_by_14_points(points_68)
pose = pose_estimator.solve_pose_by_68_points(points_68)
pitch, yaw, roll = pose_estimator.get_euler_angle(pose[0])
def _radian2angle(r):
return (r / math.pi) * 180
Y, X, Z = map(_radian2angle, [pitch, yaw, roll])
line = 'Y:{:.1f}\nX:{:.1f}\nZ:{:.1f}'.format(Y, X, Z)
print('{},{}'.format(os.path.basename(pic_path), line.replace('\n', ',')))
img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR)
y = 20
for _, txt in enumerate(line.split('\n')):
cv2.putText(img, txt, (20, y), cv2.FONT_HERSHEY_PLAIN, 1.3, (0, 0, 255), 1)
y = y + 15
ori_68 = [[p.x, p.y] for p in ldmk.parts()]
for p in ori_68:
cv2.circle(img, (int(p[0]), int(p[1])), 2, (0, 255, 0), -1, 0)
cv2.imshow('img', img)
if cv2.waitKey(-1) == 27:
pass
return 0
if __name__ == "__main__":
sys.exit(run("./8_Election_Campain_Election_Campaign_8_326.jpg")) | PypiClean |
/Mopidy-MPRIS-3.0.3.tar.gz/Mopidy-MPRIS-3.0.3/README.rst | ************
Mopidy-MPRIS
************
.. image:: https://img.shields.io/pypi/v/Mopidy-MPRIS
:target: https://pypi.org/project/Mopidy-MPRIS/
:alt: Latest PyPI version
.. image:: https://img.shields.io/circleci/build/gh/mopidy/mopidy-mpris
:target: https://circleci.com/gh/mopidy/mopidy-mpris
:alt: CircleCI build status
.. image:: https://img.shields.io/codecov/c/gh/mopidy/mopidy-mpris
:target: https://codecov.io/gh/mopidy/mopidy-mpris
:alt: Test coverage
`Mopidy`_ extension for controlling Mopidy through D-Bus using the `MPRIS
specification`_.
Mopidy-MPRIS supports the minimum requirements of the `MPRIS specification`_
as well as the optional `Playlists interface`_. The `TrackList interface`_
is currently not supported.
.. _Mopidy: https://www.mopidy.com/
.. _MPRIS specification: https://specifications.freedesktop.org/mpris-spec/latest/
.. _Playlists interface: https://specifications.freedesktop.org/mpris-spec/latest/Playlists_Interface.html
.. _TrackList interface: https://specifications.freedesktop.org/mpris-spec/latest/Track_List_Interface.html
Table of contents
=================
- Requirements_
- Installation_
- Configuration_
- Usage_
- Clients_
- `GNOME Shell builtin`_
- `gnome-shell-extensions-mediaplayer`_
- `gnome-shell-extensions-mpris-indicator-button`_
- `Ubuntu Sound Menu`_
- `Advanced setups`_
- `Running as a service`_
- `MPRIS on the system bus`_
- `UPnP/DLNA with Rygel`_
- `Development tips`_
- `Browsing the MPRIS API with D-Feet`_
- `Testing the MPRIS API with pydbus`_
- `Project resources`_
- Credits_
Requirements
============
- `pydbus`_ D-Bus Python bindings, which again depends on ``python-gi``. Thus
it is usually easiest to install with your distribution's package manager.
.. _pydbus: https://github.com/LEW21/pydbus
Installation
============
Install by running::
sudo python3 -m pip install Mopidy-MPRIS
See https://mopidy.com/ext/mpris/ for alternative installation methods.
Configuration
=============
No configuration is required for the MPRIS extension to work.
The following configuration values are available:
- ``mpris/enabled``: If the MPRIS extension should be enabled or not.
Defaults to ``true``.
- ``mpris/bus_type``: The type of D-Bus bus Mopidy-MPRIS should connect to.
Choices include ``session`` (the default) and ``system``.
Usage
=====
Once Mopidy-MPRIS has been installed and your Mopidy server has been
restarted, the Mopidy-MPRIS extension announces its presence on D-Bus so that
any MPRIS compatible clients on your system can interact with it. Exactly how
you control Mopidy through MPRIS depends on which MPRIS client you use.
Clients
=======
The following clients have been tested with Mopidy-MPRIS.
GNOME Shell builtin
-------------------
State:
Not working
Tested versions:
Ubuntu 18.10,
GNOME Shell 3.30.1-2ubuntu1,
Mopidy-MPRIS 2.0.0
GNOME Shell, which is the default desktop on Ubuntu 18.04 onwards, has a
builtin MPRIS client. This client seems to work well with Spotify's player,
but Mopidy-MPRIS does not show up here.
If you have any tips on what's missing to get this working, please open an
issue.
gnome-shell-extensions-mediaplayer
----------------------------------
State:
Working
Tested versions:
Ubuntu 18.10,
GNOME Shell 3.30.1-2ubuntu1,
gnome-shell-extension-mediaplayer 63,
Mopidy-MPRIS 2.0.0
Website:
https://github.com/JasonLG1979/gnome-shell-extensions-mediaplayer
gnome-shell-extensions-mediaplayer is a quite feature rich MPRIS client
built as an extension to GNOME Shell. With the improvements to Mopidy-MPRIS
in v2.0, this extension works very well with Mopidy.
gnome-shell-extensions-mpris-indicator-button
---------------------------------------------
State:
Working
Tested versions:
Ubuntu 18.10,
GNOME Shell 3.30.1-2ubuntu1,
gnome-shell-extensions-mpris-indicator-button 5,
Mopidy-MPRIS 2.0.0
Website:
https://github.com/JasonLG1979/gnome-shell-extensions-mpris-indicator-button/
gnome-shell-extensions-mpris-indicator-button is a minimalistic version of
gnome-shell-extensions-mediaplayer. It works with Mopidy-MPRIS, with the
exception of the play/pause button not changing state when Mopidy starts
playing.
If you have any tips on what's missing to get the play/pause button display
correctly, please open an issue.
Ubuntu Sound Menu
-----------------
State:
Unknown
Historically, Ubuntu Sound Menu was the primary target for Mopidy-MPRIS'
development. Since Ubuntu 18.04 replaced Unity with GNOME Shell, this is no
longer the case. It is currently unknown to what degree Mopidy-MPRIS works
with old Ubuntu setups.
If you run an Ubuntu setup with Unity and have tested Mopidy-MPRIS, please
open an issue to share your results.
Advanced setups
===============
Running as a service
--------------------
If you have input on how to best configure Mopidy-MPRIS when Mopidy is
running as a service, please add a comment to `issue #15`_.
.. _issue #15: https://github.com/mopidy/mopidy-mpris/issues/15
MPRIS on the system bus
-----------------------
You can set the ``mpris/bus_type`` config value to ``system``. This will lead
to Mopidy-MPRIS making itself available on the system bus instead of the
logged in user's session bus.
.. note::
Few MPRIS clients will try to access MPRIS devices on the system bus, so
this will give you limited functionality. For example, media keys in
GNOME Shell does not work with media players that expose their MPRIS
interface on the system bus instead of the user's session bus.
The default setup will often not permit Mopidy to publish its service on the
D-Bus system bus, causing a warning similar to this in Mopidy's log::
MPRIS frontend setup failed (g-dbus-error-quark:
GDBus.Error:org.freedesktop.DBus.Error.AccessDenied: Connection ":1.3071"
is not allowed to own the service "org.mpris.MediaPlayer2.mopidy" due to
security policies in the configuration file (9))
To solve this, create the file
``/etc/dbus-1/system.d/org.mpris.MediaPlayer2.mopidy.conf`` with the
following contents:
.. code:: xml
<!DOCTYPE busconfig PUBLIC "-//freedesktop//DTD D-BUS Bus Configuration 1.0//EN"
"http://www.freedesktop.org/standards/dbus/1.0/busconfig.dtd">
<busconfig>
<!-- Allow mopidy user to publish the Mopidy-MPRIS service -->
<policy user="mopidy">
<allow own="org.mpris.MediaPlayer2.mopidy"/>
</policy>
<!-- Allow anyone to invoke methods on the Mopidy-MPRIS service -->
<policy context="default">
<allow send_destination="org.mpris.MediaPlayer2.mopidy"/>
<allow receive_sender="org.mpris.MediaPlayer2.mopidy"/>
</policy>
</busconfig>
If you run Mopidy as another user than ``mopidy``, you must
update ``user="mopidy"`` in the above file accordingly.
Once the file is in place, you must restart Mopidy for the change to take
effect.
To test the setup, you can run the following command as any user on the
system to play/pause the music::
dbus-send --system --print-reply \
--dest=org.mpris.MediaPlayer2.mopidy \
/org/mpris/MediaPlayer2 \
org.mpris.MediaPlayer2.Player.PlayPause
UPnP/DLNA with Rygel
--------------------
Rygel_ is an application that will translate between Mopidy's MPRIS interface
and UPnP. Rygel must be run on the same machine as Mopidy, but will make
Mopidy controllable by any device on the local network that can control a
UPnP/DLNA MediaRenderer.
.. _Rygel: https://wiki.gnome.org/Projects/Rygel
The setup process is approximately as follows:
1. Install Rygel.
On Debian/Ubuntu/Raspbian::
sudo apt install rygel
2. Enable Rygel's MPRIS plugin.
On Debian/Ubuntu/Raspbian, edit ``/etc/rygel.conf``, find the ``[MPRIS]``
section, and change ``enabled=false`` to ``enabled=true``.
3. Start Rygel.
To start it as the current user::
systemctl --user start rygel
To make Rygel start as the current user on boot::
systemctl --user enable rygel
4. Configure your system's firewall to allow the local network to reach
Rygel. Exactly how is out of scope for this document.
5. Start Mopidy with Mopidy-MPRIS enabled.
6. If you view Rygel's log output with::
journalctl --user -feu rygel
You should see a log statement similar to::
New plugin "org.mpris.MediaPlayer2.mopidy" available
6. If everything went well, you should now be able to control Mopidy from a
device on your local network that can control an UPnP/DLNA MediaRenderer,
for example the Android app BubbleUPnP.
Alternatively, `upmpdcli combined with Mopidy-MPD`_ serves the same purpose as
this setup.
.. _upmpdcli combined with Mopidy-MPD: https://docs.mopidy.com/en/latest/clients/upnp/
Development tips
================
Mopidy-MPRIS has an extensive test suite, so the first step for all changes
or additions is to add a test exercising the new code. However, making the
tests pass doesn't ensure that what comes out on the D-Bus bus is correct. To
introspect this through the bus, there's a couple of useful tools.
Browsing the MPRIS API with D-Feet
----------------------------------
D-Feet is a graphical D-Bus browser. On Debian/Ubuntu systems it can be
installed by running::
sudo apt install d-feet
Then run the ``d-feet`` command. In the D-Feet window, select the tab
corresponding to the bus you run Mopidy-MPRIS on, usually the session bus.
Then search for "MediaPlayer2" to find all available MPRIS interfaces.
To get the current value of a property, double-click it. To execute a method,
double-click it, provide any required arguments, and click "Execute".
For more information on D-Feet, see the `GNOME wiki
<https://wiki.gnome.org/Apps/DFeet>`_.
Testing the MPRIS API with pydbus
---------------------------------
To use the MPRIS API directly, start Mopidy, and then run the following in a
Python shell to use ``pydbus`` as an MPRIS client::
>>> import pydbus
>>> bus = pydbus.SessionBus()
>>> player = bus.get('org.mpris.MediaPlayer2.mopidy', '/org/mpris/MediaPlayer2')
Now you can control Mopidy through the player object. To get properties from
Mopidy, run for example::
>>> player.PlaybackStatus
'Playing'
>>> player.Metadata
{'mpris:artUrl': 'https://i.scdn.co/image/8eb49b41eeb45c1cf53e1ddfea7973d9ca257777',
'mpris:length': 342000000,
'mpris:trackid': '/com/mopidy/track/36',
'xesam:album': '65/Milo',
'xesam:albumArtist': ['Kiasmos'],
'xesam:artist': ['Rival Consoles'],
'xesam:discNumber': 1,
'xesam:title': 'Arp',
'xesam:trackNumber': 5,
'xesam:url': 'spotify:track:7CoxEEsqo3XdvUsScRV4WD'}
>>>
To pause Mopidy's playback through D-Bus, run::
>>> player.Pause()
>>>
For details on the API, please refer to the `MPRIS specification
<https://specifications.freedesktop.org/mpris-spec/latest/>`__.
Project resources
=================
- `Source code <https://github.com/mopidy/mopidy-mpris>`_
- `Issue tracker <https://github.com/mopidy/mopidy-mpris/issues>`_
- `Changelog <https://github.com/mopidy/mopidy-mpris/releases>`_
Credits
=======
- Original author: `Stein Magnus Jodal <https://github.com/jodal>`__
- Current maintainer: `Stein Magnus Jodal <https://github.com/jodal>`__
- `Contributors <https://github.com/mopidy/mopidy-mpris/graphs/contributors>`_
| PypiClean |
/GQCConstraints-1.1.1-py3-none-any.whl/GQCC/SpinUnresolvedConstraints/FCI.py | import gqcpy
import numpy as np
from GQCC.Optimization.SpinUnresolvedOptimizer import SpinUnresolvedOptimizer
class FCI(SpinUnresolvedOptimizer):
"""
A constructor that sets up everything needed for spin unresolved constrained CI calculations.
:param molecule: The molecule used for the calculations.
:param basis_set: The basis set used for the calculations.
:param operator: The operator that will be constrained.
:param basis: The type of basis in which the FCI calculation will be performed. Default is restricted.
:returns: An object which contains all required data (basis, Hamiltonian,... ) and possesses the necessary methods to perform calculations.
To initialize a constrained FCI object, we need several objects from GQCP.
First, we need a molecule.
.. code-block:: python
H2 = gqcpy.Molecule.HChain(2, 4.5, 0)
Secondly, we need to create our spin-orbital basis. Since this is FCI, it should be orthonormal.
.. code-block:: python
basis = gqcpy.RSpinOrbitalBasis_d(H2, "6-31G")
basis.lowdinOrthonormalize()
Finally we can create a spin-unresolved operator to constrain. Let's use a Mulliken operator in this example.
.. code-block:: python
S = basis.quantize(gqcpy.OverlapOperator())
mulliken_domain = basis.mullikenDomain(lambda shell: shell.nucleus().position()[1] == 0 and shell.nucleus().position()[2] == 0)
P = mulliken_domain.projectionMatrix(basis.expansion())
mulliken_operator = S.partitioned(P)
Now we can use GQCC to construct our constrained ansatz.
.. code-block:: python
Constrained_object = GQCC.SpinUnresolvedConstraints.FCI(H2, basis, mulliken_operator, "population")
.. note:
The spin-unresolved FCI module also works with generalized `gqcpy.GSpinorBasis_(c)d` bases.
"""
def __init__(self, molecule, SQ_basis, operator, constrained_observable):
# Check compatibility of the operator type based on the used basis_type.
if type(SQ_basis) is gqcpy.gqcpy.RSpinOrbitalBasis_d:
compatible_operators = [gqcpy.gqcpy.ScalarRSQOneElectronOperator_d, gqcpy.gqcpy.ScalarRSQTwoElectronOperator_d]
elif type(SQ_basis) is gqcpy.gqcpy.RSpinOrbitalBasis_cd:
compatible_operators = [gqcpy.gqcpy.ScalarRSQOneElectronOperator_cd, gqcpy.gqcpy.ScalarRSQTwoElectronOperator_cd]
elif type(SQ_basis) is gqcpy.gqcpy.GSpinorBasis_d:
compatible_operators = [gqcpy.gqcpy.ScalarGSQOneElectronOperator_d, gqcpy.gqcpy.ScalarGSQTwoElectronOperator_d, gqcpy.gqcpy.ScalarGSQOneElectronOperatorProduct_d]
elif type(SQ_basis) is gqcpy.gqcpy.GSpinorBasis_cd:
compatible_operators = [gqcpy.gqcpy.ScalarGSQOneElectronOperator_cd, gqcpy.gqcpy.ScalarGSQTwoElectronOperator_cd, gqcpy.gqcpy.ScalarGSQOneElectronOperatorProduct_cd]
else:
raise ValueError("the chosen `SQ_basis` is not compatible with this type of calculation. Use `gqcpy.R/GSpinOrbitalBasis_(c)d`instead.")
assert (type(operator) in compatible_operators), "Only `ScalarR/GSQOneElectronOperator_(c)d` or `ScalarR/GSQTwoElectronOperator_(c)d` can be constrained with this method."
# We can now create a first quantized hamiltonian and use the spin orbital basis to "quantize" it to second quantization.
# The SQHamiltonian is stored in the class object.
fq_hamiltonian = gqcpy.FQMolecularHamiltonian(molecule)
self._sq_hamiltonian = SQ_basis.quantize(fq_hamiltonian)
# Since we are going to do full CI calculations, we need an ONV-basis.
# From the total number of orbitals and total number of electrons we can set up an ONV basis.
if type(SQ_basis) is gqcpy.gqcpy.RSpinOrbitalBasis_d or type(SQ_basis) is gqcpy.gqcpy.RSpinOrbitalBasis_cd:
K = int(SQ_basis.numberOfSpatialOrbitals())
else:
K = int(SQ_basis.numberOfSpinors())
N_total = molecule.numberOfElectrons()
if N_total % 2 == 0:
N_a = int(N_total / 2)
N_b = int(N_total / 2)
else:
N_a = int(np.ceil(N_total / 2))
N_b = int(np.floor(N_total / 2))
# The ONV basis gets stored within the class object.
if type(SQ_basis) is gqcpy.gqcpy.RSpinOrbitalBasis_d or type(SQ_basis) is gqcpy.gqcpy.RSpinOrbitalBasis_cd:
self._onv_basis = gqcpy.SpinResolvedONVBasis(K, N_a, N_b)
else:
full_onv_basis = gqcpy.SpinUnresolvedONVBasis(K, N_total)
self._onv_basis = gqcpy.SpinUnresolvedSelectedONVBasis(full_onv_basis)
# Calculate the nuclear repulsion term and store it in the class object.
self._nuclear_repulsion = gqcpy.NuclearRepulsionOperator(molecule.nuclearFramework()).value()
# Stor the operator that is being constrained.
self._operator = operator
self._constrained_observable = constrained_observable
def _solveCIEigenproblem(self, hamiltonian):
"""
A method used to solve a CI eigenproblem.
:param hamiltonian: The SQHamiltonian used for the calculation.
:returns: The ground state energy.
:returns: The ground state parameters
"""
# Use GQCP to set up a full CI calculation.
if type(hamiltonian) is gqcpy.gqcpy.RSQHamiltonian_d or type(hamiltonian) is gqcpy.gqcpy.GSQHamiltonian_d:
CIsolver = gqcpy.EigenproblemSolver.Dense_d()
CIenvironment = gqcpy.CIEnvironment.Dense(hamiltonian, self._onv_basis)
qc_structure = gqcpy.CI(self._onv_basis).optimize(CIsolver, CIenvironment)
else:
CIsolver = gqcpy.EigenproblemSolver.Dense_cd()
CIenvironment = gqcpy.CIEnvironment.Dense_cd(hamiltonian, self._onv_basis)
qc_structure = gqcpy.CI_cd(self._onv_basis).optimize(CIsolver, CIenvironment)
return qc_structure.groundStateEnergy(), qc_structure.groundStateParameters()
def calculateEnergyAndExpectationValue(self, multiplier, return_parameters=False, verbose=0):
"""
A method used to calculate the energy and the expectation value of the operator at a given multiplier `mu`.
:param multiplier: The multiplier used to modify the Hamiltonian.
:param return_parameters: A boolean flag to indicate whether only the wavefunction parameters should also be returned.
:param verbose: An integer representing the amount of output that will be printed.
:returns: The energy at the given `mu` value.
:returns: The expectation value of the operator at the given `mu` value.
:returns: The wavefunction parameters (only if `return_parameters` is set to `True`).
In order to calculate the energy and expectation value of your operator, associated with a certain Lagrange multiplier (let's say -1).
.. code-block:: python
energy, expval = Constrained_object.calculateEnergyAndExpectationValue(-1)
"""
# Modify the Hamiltonian with the given multiplier.
# For a one- or two- electron operator this happens in the same way.
# Note that the one-electron operator modifies the one-electron intergrals, and the two-electron operator modifies the two-electron integrals.
if type(self._operator) in [gqcpy.gqcpy.ScalarRSQOneElectronOperator_d, gqcpy.gqcpy.ScalarRSQOneElectronOperator_cd, gqcpy.gqcpy.ScalarGSQOneElectronOperator_d, gqcpy.gqcpy.ScalarGSQOneElectronOperator_cd, gqcpy.gqcpy.ScalarRSQTwoElectronOperator_d, gqcpy.gqcpy.ScalarRSQTwoElectronOperator_cd, gqcpy.gqcpy.ScalarGSQTwoElectronOperator_d, gqcpy.gqcpy.ScalarGSQTwoElectronOperator_cd]:
modified_hamiltonian = self._sq_hamiltonian - multiplier * self._operator
# For an operator product.
elif type(self._operator) in [gqcpy.gqcpy.ScalarGSQOneElectronOperatorProduct_d, gqcpy.gqcpy.ScalarGSQOneElectronOperatorProduct_cd]:
modified_hamiltonian = self._sq_hamiltonian - (multiplier * self._operator.oneElectron()) - (multiplier * self._operator.twoElectron())
else:
raise ValueError("Something went wrong with the operator type.")
# Use the private method to solve the full CI eigenproblem.
gs_energy, gs_parameters = self._solveCIEigenproblem(modified_hamiltonian)
# Calculate the density matrices.
D = gs_parameters.calculate1DM()
d = gs_parameters.calculate2DM()
# Calculate the expectation value of the constrained operator.
# This differs depending on which kind of operator is being used.
# For a one electron operator.
if type(self._operator) in [gqcpy.gqcpy.ScalarRSQOneElectronOperator_d, gqcpy.gqcpy.ScalarRSQOneElectronOperator_cd, gqcpy.gqcpy.ScalarGSQOneElectronOperator_d, gqcpy.gqcpy.ScalarGSQOneElectronOperator_cd]:
expectation_value = self._operator.calculateExpectationValue(D)[0]
# For a two electron operator.
elif type(self._operator) in [gqcpy.gqcpy.ScalarRSQTwoElectronOperator_d, gqcpy.gqcpy.ScalarRSQTwoElectronOperator_cd, gqcpy.gqcpy.ScalarGSQTwoElectronOperator_d, gqcpy.gqcpy.ScalarGSQTwoElectronOperator_cd]:
expectation_value = self._operator.calculateExpectationValue(d)[0]
# For an operator product.
elif type(self._operator) in [gqcpy.gqcpy.ScalarGSQOneElectronOperatorProduct_d, gqcpy.gqcpy.ScalarGSQOneElectronOperatorProduct_cd]:
expectation_value = self._operator.calculateExpectationValue(D, d)
# Calculate the energy by correcting the ground state energy of the modified Hamiltonian.
energy = gs_energy + (multiplier * expectation_value) + self._nuclear_repulsion
# Print the progress of which mu values have been completed if verbose >= 2.
if verbose >= 2:
print("--------------------------------------------------------")
print("Mu = " + str(np.around(multiplier, 2)) + " done.")
if return_parameters:
return energy, expectation_value, gs_parameters
else:
return energy, expectation_value | PypiClean |
/NitPycker-0.1.tar.gz/NitPycker-0.1/nitpycker/plugins/xunit.py | import os
from xml.etree import ElementTree
from nitpycker.plugins import TestReporter
from nitpycker.result import TestState, ResultAggregator
__author__ = "Benjamin Schubert, ben.c.schubert@gmail.com"
class XMLReporter(TestReporter):
"""
An Xunit compliant reporter, useful mainly for CI integration
"""
@staticmethod
def add_error_list(report: ElementTree.Element, results: dict) -> None:
"""
Adds every result to the XML document tree
:param report: the XML document tree to use
:param results: contains lists of tests to add
"""
for tests in results:
for test in results[tests]:
testcase = ElementTree.Element(
'testcase',
attrib={
"classname": ".".join(test[0].id().split(".")[:-1]),
"name": test[0].id().split(".")[-1],
"time": str(round(test[0].time_taken, 3))
}
)
if tests == TestState.unexpected_successes.name:
_type_ = TestState.failures.name
elif tests == TestState.expected_failures.name:
_type_ = None
else:
_type_ = tests
if len(test) > 1:
fail_type = ElementTree.Element(
_type_,
attrib={
"type": test[1][0].__name__,
"message": str(test[1][1])
}
)
fail_type.text = test[1][2]
# TODO add stdout and stderr capture here
testcase.append(fail_type)
report.append(testcase)
def report(self, result_reporter: ResultAggregator) -> None:
"""
creates an xml rapport for the test. This is xunit compliant
:param result_reporter: results to parse
"""
report = ElementTree.Element(
"testsuite",
attrib={
"name": os.path.basename(os.getcwd()),
"tests": str(sum(len(test_type) for test_type in result_reporter.results.values())),
"errors": str(len(result_reporter.results[TestState.errors.name])),
"failures": str(sum(
len(result_reporter.results[test_type])
for test_type in [TestState.failures.name, TestState.unexpected_successes.name])),
"skip": str(len(result_reporter.results[TestState.skipped.name]))
}
)
if sum(len(test_type) for test_type in result_reporter.results.values()):
self.add_error_list(report, result_reporter.results)
with open("nitpycker.xml", "wb") as f:
ElementTree.ElementTree(report).write(f, encoding="utf-8", xml_declaration=True) | PypiClean |
/Bayesian-Outlier-Model-1.0a13.tar.gz/Bayesian-Outlier-Model-1.0a13/src/outlier_model/toil_workflow.py | import argparse
import os
import shutil
from toil.common import Toil
from toil.job import Job
from toil.lib.docker import apiDockerCall, _fixPermissions
def workflow(job, samples, args):
sample_id = job.fileStore.writeGlobalFile(args.sample)
background_id = job.fileStore.writeGlobalFile(args.background)
gene_id = job.fileStore.writeGlobalFile(args.gene_list) if args.gene_list else None
job.addChildJobFn(map_job, run_outlier_model, samples, sample_id, background_id, gene_id, args)
def run_outlier_model(job, name, sample_id, background_id, gene_id, args, cores=4, memory='5G'):
# Process names with flexible extensions
sample_ext = os.path.splitext(args.sample)[1]
sample_name = 'sample_matrix{}'.format(sample_ext)
bg_ext = os.path.splitext(args.background)[1]
bg_name = 'bg_matrix{}'.format(bg_ext)
# Read in input file from jobStore
job.fileStore.readGlobalFile(sample_id, os.path.join(job.tempDir, sample_name))
job.fileStore.readGlobalFile(background_id, os.path.join(job.tempDir, bg_name))
if gene_id:
job.fileStore.readGlobalFile(gene_id, os.path.join(job.tempDir, 'gene-list.txt'))
# Define parameters and call Docker container
parameters = ['--sample', '/data/{}'.format(sample_name),
'--background', '/data/{}'.format(bg_name),
'--name', name,
'--out-dir', '/data',
'--group', args.group,
'--col-skip', str(args.col_skip),
'--num-backgrounds', str(args.num_backgrounds),
'--max-genes', str(args.max_genes),
'--num-training-genes', str(args.num_training_genes)]
if gene_id:
parameters.extend(['--gene-list', '/data/gene-list.txt'])
image = 'jvivian/bayesian-outlier-model:1.0a13'
apiDockerCall(job=job,
image=image,
working_dir=job.tempDir,
parameters=parameters,
user='root')
_fixPermissions(tool=image, workDir=job.tempDir)
out_dir = os.path.join(job.tempDir, name)
shutil.move(out_dir, args.out_dir)
def cli():
parser = argparse.ArgumentParser(description=main.__doc__, formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('--sample', required=True, type=str, help='Sample(s) by Genes matrix (csv/tsv/hd5)')
parser.add_argument('--background', required=True, type=str,
help='Samples by Genes matrix with metadata columns first (including a group column that '
'discriminates samples by some category) (csv/tsv/hd5)')
parser.add_argument('--manifest', required=True, type=str,
help='Single column file of sample names in sample matrix')
parser.add_argument('--gene-list', type=str, help='Single column file of genes to use for training')
parser.add_argument('--out-dir', default='.', type=str, help='Output directory')
parser.add_argument('--group', default='tissue', type=str,
help='Categorical column vector in the background matrix')
parser.add_argument('--col-skip', default=1, type=int,
help='Number of metadata columns to skip in background matrix so remainder are genes')
parser.add_argument('--num-backgrounds', default=5, type=int,
help='Number of background categorical groups to include in the model training')
parser.add_argument('--max-genes', default=100, type=int,
help='Maximum number of genes to run. I.e. if a gene list is input, how many additional'
'genes to add via SelectKBest. Useful for improving beta coefficients'
'if gene list does not contain enough tissue-specific genes.')
parser.add_argument('--num-training-genes', default=50, type=int,
help='If gene-list is empty, will use SelectKBest to choose gene set.')
# Add Toil options
Job.Runner.addToilOptions(parser)
return parser.parse_args()
def map_job(job, func, inputs, *args, **kwargs):
"""
Spawns a tree of jobs to avoid overloading the number of jobs spawned by a single parent.
This function is appropriate to use when batching samples greater than 1,000.
:param JobFunctionWrappingJob job: passed automatically by Toil
:param function func: Function to spawn dynamically, passes one sample as first argument
:param list inputs: Array of samples to be batched
:param list args: any arguments to be passed to the function
"""
# num_partitions isn't exposed as an argument in order to be transparent to the user.
# The value for num_partitions is a tested value
num_partitions = 100
partition_size = len(inputs) / num_partitions
if partition_size > 1:
for partition in partitions(inputs, partition_size):
job.addChildJobFn(map_job, func, partition, *args, **kwargs)
else:
for sample in inputs:
job.addChildJobFn(func, sample, *args, **kwargs)
def partitions(l, partition_size):
"""
>>> list(partitions([], 10))
[]
>>> list(partitions([1,2,3,4,5], 1))
[[1], [2], [3], [4], [5]]
>>> list(partitions([1,2,3,4,5], 2))
[[1, 2], [3, 4], [5]]
>>> list(partitions([1,2,3,4,5], 5))
[[1, 2, 3, 4, 5]]
:param list l: List to be partitioned
:param int partition_size: Size of partitions
"""
for i in xrange(0, len(l), partition_size):
yield l[i:i + partition_size]
def main():
args = cli()
samples = [x.strip() for x in open(args.manifest, 'r').readlines() if not x.isspace()]
# Start Toil run
with Toil(args) as toil:
if not toil.options.restart:
toil.start(Job.wrapJobFn(workflow, samples, args))
else:
toil.restart()
if __name__ == '__main__':
main() | PypiClean |
/Firefly%20III%20API%20Python%20Client-1.5.6.post2.tar.gz/Firefly III API Python Client-1.5.6.post2/firefly_iii_client/model/system_info.py | import re # noqa: F401
import sys # noqa: F401
from firefly_iii_client.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from firefly_iii_client.exceptions import ApiAttributeError
def lazy_import():
from firefly_iii_client.model.system_info_data import SystemInfoData
globals()['SystemInfoData'] = SystemInfoData
class SystemInfo(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'data': (SystemInfoData,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'data': 'data', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, data, *args, **kwargs): # noqa: E501
"""SystemInfo - a model defined in OpenAPI
Args:
data (SystemInfoData):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.data = data
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, data, *args, **kwargs): # noqa: E501
"""SystemInfo - a model defined in OpenAPI
Args:
data (SystemInfoData):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.data = data
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | PypiClean |
/Hermes_lnestelroad-1.0.2-py3-none-any.whl/Hermes/Message.py |
# System modules
import json
import time
import struct
import logging
from typing import Dict, List, Any
# Third party modules
import zmq
from Hermes.DBP import command_checks, commands
from Hermes.zhelpers import dump
# NOTE: This is an example of including zeromq sockets within the wrapper as opposed to how Heartbeats deal with events.
# Refer to the Clone patter KVMsg for reference
class Message():
"""
A class to handel all broker related messaging including asserting formats, sending, and receiving.
Requires a socket to pull messages off of and is capable of handeling JSON, buffers(arrays), strings
, pickles, and custom serializations.
Attributes
----------
socket : zmq.Socket
a socket from which to pull messages off of and send through
Incomeing : zmq.Frame
a multi frame message object which holds the raw incoming message
outgoing : List[str]
The outgoing multi part message with the first frame containing the address or the requestor
and a blank delimiter frame.
valid : bool
Used to determine if the incoming message adheres to the formating protocol
"""
def __init__(self, socket: zmq.Socket, logger=None):
self.valid: bool = True
self.socket: zmq.Socket = socket
self.body: Any = None
self.incoming: List[bytes] = None
self.command: bytes = None
self.return_addr: bytes = None
self.outgoing: List[bytes] = []
self.time: float = None
if logger is not None:
self.logger: logging.Logger = logger
else:
self.logger: logging.Logger = logging.getLogger(__name__)
def recv(self, display=False):
"""
Receives the first multipart message on behalf of the polled socket, formats the outgoing
attribute's message header, and caches the payload.
Parameters
----------
"""
self.incoming = self.socket.recv_multipart()
self.incoming_raw = self.incoming.copy() # For debugging
# Req, Rep, and Dealer sockets already do this part. Routers must do it manually
if self.socket.socket_type == zmq.ROUTER:
# Caches the return address of the requestor
self.return_addr = self.incoming.pop(0)
# Checks for blank delimiter
delimiter = self.incoming.pop(0)
if delimiter != b'':
self.valid = False
if len(self.incoming) >= 3:
# Command validity is left up to the service.
self.command = self.incoming.pop(0)
self.time = struct.unpack('f', self.incoming.pop(0))[0]
self.body = self.incoming
else:
self.valid = False
if not self.valid:
self.logger.info("Incoming message invalid. Disregarding...")
self.send(invalid=True)
if display:
self.display_envelope(raw=True, message=self.incoming_raw)
def send(self, command='', body='', display=False, invalid=False):
"""
Sends the current multipart outgoing message attribute on behalf of the polled
socket.
Parameters
----------
command: str
The command with which to send the message with.
body: Any
The payload for which the message will hold.
display : bool
A flag for displaying outgoing message frames to the console as it sends
"""
# Outgoing message header formating. ORDER MATTERS
if self.socket.socket_type == zmq.ROUTER:
self.add_frame(self.return_addr)
self.add_frame('')
self.add_frame(command)
self.add_frame(time.time())
if invalid:
self.add_frame("Error: Invalid Message Envelope.")
else:
self.add_frame(body)
if display:
self.display_envelope(raw=True, message=self.outgoing)
self.logger.debug("Putting message on outgoing queue.")
self.socket.send_multipart(self.outgoing)
def add_frame(self, body):
"""
Converts objects to zmq frame(s) and appends it/them to the multipart outgoing message attribute.
Parameters
----------
body : Any
This is the message to append to the outgoing message attribute.
"""
# TODO: Allow for positional placement parameters
# TODO: Implement with msgpack.
# TODO: Standardize body as json
if type(body) == bytes:
self.outgoing.append(body)
elif type(body) == str:
self.outgoing.append(bytes(body, 'utf-8'))
elif type(body) == dict:
self.outgoing.append(bytes(json.dumps(body), 'utf-8'))
elif type(body) == float:
self.outgoing.append(struct.pack('f', body))
elif type(body) == int:
self.outgoing.append(struct.pack('i', body))
def display_envelope(self, raw=True, message: List[bytes] = None):
"""
Prints out all parts of either the current outgoing or incoming message
Parameters
----------
incoming : bool, default=True
Flag to determine which message to see. True for incoming false for outgoing
raw : bool, default=True
Flag to determine if to disply original message before validation.
"""
if raw and message is not None:
for index, frame in enumerate(message):
print(f"\tFrame {index}: {frame}")
else:
print(
f"Message Frames: \n\tReturn Address:\t{self.return_addr}\n\tTime Sent:\t{self.time}\n\tCommand:\t{self.command}\n\tBody:\t\t{self.body}") | PypiClean |
/BigJob2-0.54.post73.tar.gz/BigJob2-0.54.post73/ez_setup.py | import os
import shutil
import sys
import tempfile
import tarfile
import optparse
import subprocess
import platform
import textwrap
from distutils import log
try:
from site import USER_SITE
except ImportError:
USER_SITE = None
DEFAULT_VERSION = "2.1"
DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/"
def _python_cmd(*args):
args = (sys.executable,) + args
return subprocess.call(args) == 0
def _install(tarball, install_args=()):
# extracting the tarball
tmpdir = tempfile.mkdtemp()
log.warn('Extracting in %s', tmpdir)
old_wd = os.getcwd()
try:
os.chdir(tmpdir)
tar = tarfile.open(tarball)
_extractall(tar)
tar.close()
# going in the directory
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
os.chdir(subdir)
log.warn('Now working in %s', subdir)
# installing
log.warn('Installing Setuptools')
if not _python_cmd('setup.py', 'install', *install_args):
log.warn('Something went wrong during the installation.')
log.warn('See the error message above.')
# exitcode will be 2
return 2
finally:
os.chdir(old_wd)
shutil.rmtree(tmpdir)
def _build_egg(egg, tarball, to_dir):
# extracting the tarball
tmpdir = tempfile.mkdtemp()
log.warn('Extracting in %s', tmpdir)
old_wd = os.getcwd()
try:
os.chdir(tmpdir)
tar = tarfile.open(tarball)
_extractall(tar)
tar.close()
# going in the directory
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
os.chdir(subdir)
log.warn('Now working in %s', subdir)
# building an egg
log.warn('Building a Setuptools egg in %s', to_dir)
_python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
finally:
os.chdir(old_wd)
shutil.rmtree(tmpdir)
# returning the result
log.warn(egg)
if not os.path.exists(egg):
raise IOError('Could not build the egg.')
def _do_download(version, download_base, to_dir, download_delay):
egg = os.path.join(to_dir, 'setuptools-%s-py%d.%d.egg'
% (version, sys.version_info[0], sys.version_info[1]))
if not os.path.exists(egg):
tarball = download_setuptools(version, download_base,
to_dir, download_delay)
_build_egg(egg, tarball, to_dir)
sys.path.insert(0, egg)
# Remove previously-imported pkg_resources if present (see
# https://bitbucket.org/pypa/setuptools/pull-request/7/ for details).
if 'pkg_resources' in sys.modules:
del sys.modules['pkg_resources']
import setuptools
setuptools.bootstrap_install_from = egg
def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
to_dir=os.curdir, download_delay=15):
to_dir = os.path.abspath(to_dir)
rep_modules = 'pkg_resources', 'setuptools'
imported = set(sys.modules).intersection(rep_modules)
try:
import pkg_resources
except ImportError:
return _do_download(version, download_base, to_dir, download_delay)
try:
pkg_resources.require("setuptools>=" + version)
return
except pkg_resources.DistributionNotFound:
return _do_download(version, download_base, to_dir, download_delay)
except pkg_resources.VersionConflict as VC_err:
if imported:
msg = textwrap.dedent("""
The required version of setuptools (>={version}) is not available,
and can't be installed while this script is running. Please
install a more recent version first, using
'easy_install -U setuptools'.
(Currently using {VC_err.args[0]!r})
""").format(VC_err=VC_err, version=version)
sys.stderr.write(msg)
sys.exit(2)
# otherwise, reload ok
del pkg_resources, sys.modules['pkg_resources']
return _do_download(version, download_base, to_dir, download_delay)
def _clean_check(cmd, target):
"""
Run the command to download target. If the command fails, clean up before
re-raising the error.
"""
try:
subprocess.check_call(cmd)
except subprocess.CalledProcessError:
if os.access(target, os.F_OK):
os.unlink(target)
raise
def download_file_powershell(url, target):
"""
Download the file at url to target using Powershell (which will validate
trust). Raise an exception if the command cannot complete.
"""
target = os.path.abspath(target)
cmd = [
'powershell',
'-Command',
"(new-object System.Net.WebClient).DownloadFile(%(url)r, %(target)r)" % vars(),
]
_clean_check(cmd, target)
def has_powershell():
if platform.system() != 'Windows':
return False
cmd = ['powershell', '-Command', 'echo test']
devnull = open(os.path.devnull, 'wb')
try:
try:
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
except:
return False
finally:
devnull.close()
return True
download_file_powershell.viable = has_powershell
def download_file_curl(url, target):
cmd = ['curl', url, '--silent', '--output', target]
_clean_check(cmd, target)
def has_curl():
cmd = ['curl', '--version']
devnull = open(os.path.devnull, 'wb')
try:
try:
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
except:
return False
finally:
devnull.close()
return True
download_file_curl.viable = has_curl
def download_file_wget(url, target):
cmd = ['wget', url, '--quiet', '--output-document', target]
_clean_check(cmd, target)
def has_wget():
cmd = ['wget', '--version']
devnull = open(os.path.devnull, 'wb')
try:
try:
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
except:
return False
finally:
devnull.close()
return True
download_file_wget.viable = has_wget
def download_file_insecure(url, target):
"""
Use Python to download the file, even though it cannot authenticate the
connection.
"""
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
src = dst = None
try:
src = urlopen(url)
# Read/write all in one block, so we don't create a corrupt file
# if the download is interrupted.
data = src.read()
dst = open(target, "wb")
dst.write(data)
finally:
if src:
src.close()
if dst:
dst.close()
download_file_insecure.viable = lambda: True
def get_best_downloader():
downloaders = [
download_file_powershell,
download_file_curl,
download_file_wget,
download_file_insecure,
]
for dl in downloaders:
if dl.viable():
return dl
def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
to_dir=os.curdir, delay=15,
downloader_factory=get_best_downloader):
"""Download setuptools from a specified location and return its filename
`version` should be a valid setuptools version number that is available
as an egg for download under the `download_base` URL (which should end
with a '/'). `to_dir` is the directory where the egg will be downloaded.
`delay` is the number of seconds to pause before an actual download
attempt.
``downloader_factory`` should be a function taking no arguments and
returning a function for downloading a URL to a target.
"""
# making sure we use the absolute path
to_dir = os.path.abspath(to_dir)
tgz_name = "setuptools-%s.tar.gz" % version
url = download_base + tgz_name
saveto = os.path.join(to_dir, tgz_name)
if not os.path.exists(saveto): # Avoid repeated downloads
log.warn("Downloading %s", url)
downloader = downloader_factory()
downloader(url, saveto)
return os.path.realpath(saveto)
def _extractall(self, path=".", members=None):
"""Extract all members from the archive to the current working
directory and set owner, modification time and permissions on
directories afterwards. `path' specifies a different directory
to extract to. `members' is optional and must be a subset of the
list returned by getmembers().
"""
import copy
import operator
from tarfile import ExtractError
directories = []
if members is None:
members = self
for tarinfo in members:
if tarinfo.isdir():
# Extract directories with a safe mode.
directories.append(tarinfo)
tarinfo = copy.copy(tarinfo)
tarinfo.mode = 448 # decimal for oct 0700
self.extract(tarinfo, path)
# Reverse sort directories.
directories.sort(key=operator.attrgetter('name'), reverse=True)
# Set correct owner, mtime and filemode on directories.
for tarinfo in directories:
dirpath = os.path.join(path, tarinfo.name)
try:
self.chown(tarinfo, dirpath)
self.utime(tarinfo, dirpath)
self.chmod(tarinfo, dirpath)
except ExtractError as e:
if self.errorlevel > 1:
raise
else:
self._dbg(1, "tarfile: %s" % e)
def _build_install_args(options):
"""
Build the arguments to 'python setup.py install' on the setuptools package
"""
return ['--user'] if options.user_install else []
def _parse_args():
"""
Parse the command line for options
"""
parser = optparse.OptionParser()
parser.add_option(
'--user', dest='user_install', action='store_true', default=False,
help='install in user site package (requires Python 2.6 or later)')
parser.add_option(
'--download-base', dest='download_base', metavar="URL",
default=DEFAULT_URL,
help='alternative URL from where to download the setuptools package')
parser.add_option(
'--insecure', dest='downloader_factory', action='store_const',
const=lambda: download_file_insecure, default=get_best_downloader,
help='Use internal, non-validating downloader'
)
options, args = parser.parse_args()
# positional arguments are ignored
return options
def main(version=DEFAULT_VERSION):
"""Install or upgrade setuptools and EasyInstall"""
options = _parse_args()
tarball = download_setuptools(download_base=options.download_base,
downloader_factory=options.downloader_factory)
return _install(tarball, _build_install_args(options))
if __name__ == '__main__':
sys.exit(main()) | PypiClean |
/Boorunaut-0.4.3-py3-none-any.whl/booru/forms.py | from django import forms
from django.conf import settings
from django.contrib.admin.widgets import AdminTextareaWidget
from django.core.exceptions import ValidationError
from django.core.validators import URLValidator
from django.utils import six
from taggit.forms import TagField, TagWidget
from taggit.utils import edit_string_for_tags
from booru import utils
from booru.account.forms import UsernameExistsField
from booru.account.models import Timeout
from booru.core.models import BannedHash
from booru.models import Category, Gallery, Implication, Post, PostTag
def validate_sources(source):
sources = source.splitlines()
val = URLValidator()
for index, source in enumerate(sources):
if '://' not in source:
source = 'http://' + source
sources[index] = source
try:
val(source)
except ValidationError as e:
return None
sources = "\n".join(sources)
return sources
class CreatePostForm(forms.ModelForm):
'''Form for creating an post.'''
media = forms.FileField(required=False)
media_url = forms.URLField(required=False)
sample = forms.ImageField(required=False)
preview = forms.ImageField(required=False)
tags = TagField(required=True, help_text="Required: Choose one or more tags.")
source = forms.CharField(required=False)
rating = forms.IntegerField()
class Meta:
model = Post
fields = ["media", "media_url", "sample", "preview", "tags", "rating", "source", "description"]
def __init__(self, *args, **kwargs):
super(CreatePostForm, self).__init__(*args, **kwargs)
self.fields['media'].widget = forms.FileInput(attrs={'class': 'custom-file-input'})
self.fields['media_url'].widget = forms.URLInput(attrs={'class': 'form-control'})
self.fields['media_url'].required = False
self.fields['source'].widget = forms.Textarea(attrs={'class': 'form-control'})
self.fields['rating'].widget = forms.Select(attrs={'class': 'form-control'},
choices=Post.RATING_CHOICES)
self.fields['description'].widget = forms.Textarea(attrs={'class': 'form-control'})
self.fields['tags'].widget = forms.TextInput(attrs={'class': 'form-control'})
def clean( self ):
cleaned_data = self.cleaned_data
media_file = cleaned_data.get('media')
media_url = cleaned_data.get('media_url')
detected_media = None
if media_file is None and not media_url:
raise forms.ValidationError("Please select an image or video.")
elif media_file is not None and media_url:
raise forms.ValidationError("Please only upload one image or video.")
elif media_file is not None:
detected_media = media_file
elif media_url:
detected_media = utils.get_remote_image_as_InMemoryUploadedFile(media_url)
if not utils.get_pil_image_if_valid(detected_media):
if not utils.check_video_is_valid(detected_media):
raise forms.ValidationError("Please upload a valid image or video.")
if detected_media.size >= settings.BOORUNAUT_MAX_SIZE_FILE:
max_size_mb = settings.BOORUNAUT_MAX_SIZE_FILE / 1024 / 1024
raise forms.ValidationError("Please upload a file with less than {} MB.".format(max_size_mb))
md5_checksum = utils.get_file_md5(detected_media)
if BannedHash.objects.filter(content=md5_checksum).exists():
raise forms.ValidationError("This file is not allowed to be uploaded. Contact the staff.")
self.cleaned_data['media'] = detected_media
return cleaned_data
def clean_source(self):
source = self.cleaned_data['source']
if source:
source = validate_sources(self.cleaned_data['source'])
if not source:
raise forms.ValidationError("Please use valid URLs.")
return source
class EditPostForm(forms.ModelForm):
'''Form for editing an post.'''
rating = forms.IntegerField()
parent = forms.IntegerField(required=False)
source = forms.CharField(required=False)
tags = TagField(required=False)
class Meta:
model = Post
fields = ["rating", "parent", "source", "tags", "description"]
def clean_source(self):
source = self.cleaned_data['source']
if source:
source = validate_sources(self.cleaned_data['source'])
if not source:
raise forms.ValidationError("Please use valid URLs.")
return source
def __init__(self, *args, **kwargs):
super(EditPostForm, self).__init__(*args, **kwargs)
self.fields['rating'].widget = forms.Select(attrs={'class': 'form-control'},
choices=Post.RATING_CHOICES)
self.fields['parent'].widget = forms.NumberInput(attrs={'class': 'form-control'})
self.fields['source'].widget = forms.Textarea(attrs={'class': 'form-control', 'rows':4, 'cols':15})
self.fields['description'].widget = forms.Textarea(attrs={'class': 'form-control', 'rows':4, 'cols':15})
class TagListSearchForm(forms.Form):
'''Form for creating an post.'''
tags = forms.CharField(required=False)
category = forms.ModelChoiceField(queryset=Category.objects.all(),
widget=forms.Select(attrs={'class': 'form-control form-control-sm'}),
required=False, empty_label=None)
class Meta:
fields = "__all__"
def __init__(self, *args, **kwargs):
super(TagListSearchForm, self).__init__(*args, **kwargs)
self.fields['tags'].widget = forms.TextInput(attrs={'class': 'form-control form-control-sm'})
class TagEditForm(forms.ModelForm):
'''Form for creating an post.'''
category = forms.ModelChoiceField(queryset=Category.objects.all(),
widget=forms.Select(attrs={'class': 'form-control'}),
required=False, empty_label=None)
associated_user_name = forms.CharField(required=False)
aliases = TagField(required=False, help_text="Separate the aliases with spaces. They are used to find tags easier on the search bar.")
def __init__(self, *args, **kwargs):
super(TagEditForm, self).__init__(*args, **kwargs)
self.fields['description'].widget = forms.Textarea(attrs={'class': 'form-control'})
self.fields['associated_link'].widget = forms.Textarea(attrs={'class': 'form-control'})
self.fields['associated_user_name'].widget = forms.Textarea(attrs={'class': 'form-control'})
class Meta:
model = PostTag
fields = ["category", "description", "associated_link", "associated_user_name", "aliases"]
class ImplicationCreateForm(forms.Form):
from_tag = forms.CharField(required=True)
to_tag = forms.CharField(required=True)
class Meta:
fields = "__all__"
def __init__(self, *args, **kwargs):
super(ImplicationCreateForm, self).__init__(*args, **kwargs)
self.fields['from_tag'].widget = forms.TextInput(attrs={'class': 'form-control tag-search'})
self.fields['to_tag'].widget = forms.TextInput(attrs={'class': 'form-control tag-search'})
def clean_from_tag(self):
from_tag = self.cleaned_data['from_tag']
return from_tag.lower()
def clean_to_tag(self):
to_tag = self.cleaned_data['to_tag']
return to_tag.lower()
class ImplicationFilterForm(forms.Form):
name = forms.CharField(required=False)
status = forms.ChoiceField(required=False, choices=(('', '-----'),) + Implication.STATUS_CHOICES)
class Meta:
fields = "__all__"
def __init__(self, *args, **kwargs):
super(ImplicationFilterForm, self).__init__(*args, **kwargs)
class MassRenameForm(forms.Form):
filter_by = forms.CharField(required=False)
when = forms.CharField(required=True)
replace_with = forms.CharField(required=True)
class Meta:
fields = "__all__"
def __init__(self, *args, **kwargs):
super(MassRenameForm, self).__init__(*args, **kwargs)
self.fields['filter_by'].widget = forms.TextInput(attrs={'class': 'form-control'})
self.fields['when'].widget = forms.TextInput(attrs={'class': 'form-control'})
self.fields['replace_with'].widget = forms.TextInput(attrs={'class': 'form-control'})
class BanUserForm(forms.ModelForm):
username = UsernameExistsField(
max_length=254,
widget=forms.TextInput(attrs={'autofocus': True, 'class': 'form-control'}),
)
expiration = forms.DateTimeField(required=True, input_formats=['%m/%d/%Y'])
reason = forms.CharField(required=True)
class Meta:
model = Timeout
fields = ["username", "expiration", "reason"]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['username'].widget = forms.TextInput(attrs={'class': 'form-control', 'autofocus': True})
self.fields['expiration'].widget = forms.TextInput(attrs={'class': 'form-control', 'placeholder' : 'month/day/year'})
self.fields['reason'].widget = forms.TextInput(attrs={'class': 'form-control'})
class GalleryCreateForm(forms.ModelForm):
'''Form for creating an gallery.'''
name = forms.CharField(required=True)
description = forms.CharField(required=False)
posts_ids = forms.CharField(required=False)
class Meta:
model = Gallery
fields = ["name", "description"]
def __init__(self, *args, **kwargs):
super(GalleryCreateForm, self).__init__(*args, **kwargs)
self.fields['name'].widget = forms.TextInput(attrs={'class': 'form-control'})
self.fields['description'].widget = forms.Textarea(attrs={'class': 'form-control'})
self.fields['posts_ids'].widget = forms.Textarea(attrs={'class': 'form-control'})
class GalleryEditForm(forms.ModelForm):
'''Form for creating an gallery.'''
name = forms.CharField(required=True)
description = forms.CharField(required=True)
posts_ids = forms.CharField(required=True)
class Meta:
model = Gallery
fields = ["name", "description"]
def __init__(self, *args, **kwargs):
super(GalleryEditForm, self).__init__(*args, **kwargs)
self.fields['name'].widget = forms.TextInput(attrs={'class': 'form-control'})
self.fields['description'].widget = forms.Textarea(attrs={'class': 'form-control'})
self.fields['posts_ids'].widget = forms.Textarea(attrs={'class': 'form-control'})
class GalleryListSearchForm(forms.Form):
'''Form searching galleries in the Gallery List.'''
name = forms.CharField(required=False)
class Meta:
fields = "__all__"
def __init__(self, *args, **kwargs):
super(GalleryListSearchForm, self).__init__(*args, **kwargs)
self.fields['name'].widget = forms.TextInput(attrs={'class': 'form-control form-control-sm'})
class SiteConfigurationForm(forms.Form):
site_title = forms.CharField(required=True, help_text="The name of the website to be shown.")
site_description = forms.CharField(required=False, help_text="The description of the website for search engines.")
welcome_page = forms.BooleanField(required=False)
terms_of_service = forms.CharField(required=False)
privacy_policy = forms.CharField(required=False)
announcement = forms.CharField(required=False, help_text="The contents here will be shown on the top of the website for all users. Markdown is enabled.")
class Meta:
fields = "__all__"
def __init__(self, *args, **kwargs):
super(SiteConfigurationForm, self).__init__(*args, **kwargs)
self.fields['site_title'].widget = forms.TextInput(attrs={'class': 'form-control'})
self.fields['site_description'].widget = forms.TextInput(attrs={'class': 'form-control'})
self.fields['welcome_page'].widget = forms.CheckboxInput(attrs={'class': 'form-control', 'data-toggle': 'toggle'})
self.fields['terms_of_service'].widget = forms.Textarea(attrs={'class': 'form-control'})
self.fields['privacy_policy'].widget = forms.Textarea(attrs={'class': 'form-control'})
self.fields['announcement'].widget = forms.Textarea(attrs={'class': 'form-control'}) | PypiClean |
/MariaDB_SQLBuilder-1.0.0a5-py3-none-any.whl/mariadb_sqlbuilder/builder/update_builder.py | from json import dumps
from typing import Union, Dict, List
from mariadb_sqlbuilder.helpful.arithmetic import Arithmetic
from .base_builder import ConditionsBuilder, _get_tcn, _transform_value_valid
from .join_builder import BaseJoinExtension
class UpdateBuilder(ConditionsBuilder, BaseJoinExtension):
"""
TODO: add a description
This is a dummy docstring.
"""
def __init__(self, tb, **kwargs):
ConditionsBuilder.__init__(self, tb, **kwargs)
BaseJoinExtension.__init__(self, tb, **kwargs)
# check if variable already exists, else init it
self.__toSet = {}
self.sure_not_use_conditions = False
self.__subSets = []
self.__jsonBuildings = []
def set(self, column, value: Union[str, int, None, Arithmetic]):
"""
Set a value for a column in the table to update.
:param column:
:param value:
:return:
"""
self.__toSet[_get_tcn(self.tb.table, column)] = _transform_value_valid(value)
return self
def join_set(self, join_table: str, join_column: str, value: Union[str, int, None, Arithmetic]):
"""
Set a value for a column in a join table.
:param join_table:
:param join_column:
:param value:
:return:
"""
self.__toSet[_get_tcn(join_table, join_column)] = _transform_value_valid(value)
return self
def im_sure_im_not_use_conditions(self, im_sure: bool = True):
"""
Set a flag to indicate that the where conditions are not needed.
:param im_sure:
:return:
"""
self.sure_not_use_conditions = im_sure
return self
def execute(self):
"""
Execute the update statement.
:return:
"""
if not self.is_conditions() and not self.sure_not_use_conditions:
raise PermissionError('Update Builder: You are not sure enough not to use where')
cursor = self.tb.connect.get_available_cursor()
cursor.execute(
self.get_sql()
)
if self.__subSets:
for subset in self.__subSets:
cursor.execute(subset.get_sql())
cursor.connection.commit()
self.tb.connect.release_cursor(cursor)
def get_sql(self) -> str:
"""
Get the SQL statement to execute.
:return:
"""
for element in self.__jsonBuildings:
self.__set_json(element[0], element[1])
sql = f"UPDATE {self.tb.table} " \
f"{' '.join(self._joins) if self._joins else ''} " \
f"SET " \
f"{', '.join([f'{key} = {value}' for (key, value) in self.__toSet.items()])} " \
f"{self._get_where_sql()};"
return sql
def __set_json(self, json: Dict[str, any], pop: List[str] = None):
"""
Set values from a json object.
:param json:
:param pop:
:return:
"""
if pop is None:
pop = []
key: str
value: any
join_keys = [x.table for x in self._join_builders]
for key, value in json.items():
if isinstance(value, dict):
if key in join_keys and not key in pop:
for sub_key, sub_value in value.items():
self.join_set(key, sub_key, sub_value)
else:
self.set(key, dumps(value))
else:
self.set(key, value)
return self
def set_json(self, json: Dict[str, any], pop: List[str] = None):
"""
Set values with a json object.
:param json: dict with data example from select
:param pop: pop keys from the json,
if you have keys inside that are not a table but a dict/list
:return:
"""
self.__jsonBuildings.append([json, pop])
return self | PypiClean |
/FairMongo-5.2.0.tar.gz/FairMongo-5.2.0/FCM/FQ/__init__.py | import json
from F.LOG import Log
DESCENDING = -1
ASCENDING = 1
Log = Log("FQ")
"""
-> Master Base Query Class/Object/Helper
"""
class AO:
MATCH = "$match"
GROUP = "$group"
TO_DATE = "$toDate"
ADD_FIELDS = "$addFields"
LIMIT = "$limit"
SORT = "$sort"
class A(AO):
MATCH = lambda matchQuery: { AO.MATCH: matchQuery }
LIMIT = lambda value: { AO.LIMIT: value }
SORT = lambda sortQuery: { AO.SORT: sortQuery }
class AP(A):
SORT_by_SINGLE_FIELD = lambda fieldName: { AO.SORT: { fieldName: DESCENDING } }
BUILD_PIPELINE = lambda *stages: [s for s in stages]
class R:
SEARCH = lambda search_term: fr'.*{search_term}.*'
SEARCH_STRICT = lambda search_term: fr'\b{search_term}\b'
class O:
REGEX = "$regex"
SEARCH = "$search"
SET = "$set"
PULL = "$pull"
PUll_ALL = "$pullAll"
OR = "$or"
NOR = "$nor"
AND = "$and"
IN = "$in"
WHERE = "$where"
ADD_TO_SET = "$addToSet"
EACH = "$each"
TYPE = "$type"
EQUALS = "$eq"
NOT_EQUALS = "$ne"
EXISTS = "$exists"
NOT = "$not"
SIZE = "$size"
OPTIONS = '$options'
i_OPTION = 'i'
GREATER_THAN_OR_EQUAL = "$gte"
LESS_THAN_OR_EQUAL = "$lte"
GTE = GREATER_THAN_OR_EQUAL
LTE = LESS_THAN_OR_EQUAL
class Q:
BASE = lambda key, value: {key: value}
COMMENTS_AUTHOR = lambda key, value: { "this.comments.author": value }
BASE_TWO = lambda key1, value1, key2, value2: {key1: value1, key2: value2}
OR = lambda list_of_queries: {O.OR: list_of_queries}
AND = lambda list_of_queries: {O.AND: list_of_queries}
REGEX = lambda search_term: Q.BASE_TWO(O.REGEX, R.SEARCH(search_term), O.OPTIONS, 'i')
REGEX_STRICT = lambda search_term: Q.BASE_TWO(O.REGEX, R.SEARCH_STRICT(search_term), O.OPTIONS, 'i')
SEARCH = lambda field, search_term: Q.BASE(field, Q.REGEX(search_term))
SEARCH_EMBEDDED = lambda fieldOne, fieldTwo, search_term: Q.BASE(f"{fieldOne}.{fieldTwo}", Q.REGEX(search_term))
SEARCH_STRICT = lambda field, search_term: Q.BASE(field, Q.REGEX_STRICT(search_term))
LTE = lambda value: Q.BASE(O.LESS_THAN_OR_EQUAL, value)
SIZE = lambda value: Q.BASE(O.SET, value)
EQUALS = lambda value: Q.BASE(O.EQUALS, value)
NOT_EQUALS = lambda value: Q.BASE(O.NOT_EQUALS, value)
SET = lambda field, list_value: Q.BASE(O.SET, Q.BASE(field, list_value))
PULL = lambda value: Q.BASE(O.PULL, value)
ADD_TO_SET = lambda field, list_value: Q.BASE(O.ADD_TO_SET, Q.BASE(field, Q.BASE(O.EACH, list_value)))
LESS_THAN_OR_EQUAL = lambda value: Q.BASE(O.LESS_THAN_OR_EQUAL, value)
GREATER_THAN_OR_EQUAL = lambda value: Q.BASE(O.GREATER_THAN_OR_EQUAL, value)
FIELD_EXISTENCE = lambda fieldName, doesExist: Q.BASE(fieldName, Q.BASE(O.EXISTS, doesExist))
FIELD_EQUALS = lambda field, value: Q.BASE(field, Q.EQUALS(value))
FIELD_NOT_EQUALS = lambda field, value: Q.BASE(field, Q.NOT_EQUALS(value))
class QBuilder(Q, O, R):
query_builder = {}
def add_to_query_builder(self, key, value):
self.query_builder[key] = value
def get_built_query(self):
return self.query_builder
def clear_query_builder(self):
self.query_builder = {}
def print_built_query(self):
obj = json.dumps(self.query_builder, sort_keys=True, indent=4, default=str)
print(obj) | PypiClean |
/COMPAS-1.17.5.tar.gz/COMPAS-1.17.5/src/compas/geometry/transformations/rotation.py | from compas.utilities import flatten
from compas.geometry import normalize_vector
from compas.geometry import cross_vectors
from compas.geometry import length_vector
from compas.geometry import allclose
from compas.geometry.transformations import decompose_matrix
from compas.geometry.transformations import matrix_from_euler_angles
from compas.geometry.transformations import euler_angles_from_matrix
from compas.geometry.transformations import matrix_from_axis_and_angle
from compas.geometry.transformations import axis_and_angle_from_matrix
from compas.geometry.transformations import matrix_from_quaternion
from compas.geometry.transformations import matrix_from_frame
from compas.geometry.transformations import basis_vectors_from_matrix
from compas.geometry.transformations import Transformation
class Rotation(Transformation):
"""Class representing a rotation transformation.
The class contains methods for converting rotation matrices to axis-angle
representations, Euler angles, quaternion and basis vectors.
Parameters
----------
matrix : list[list[float]], optional
A 4x4 matrix (or similar) representing a rotation.
Attributes
----------
quaternion : :class:`~compas.geometry.Quaternion`, read-only
The quaternion from the rotation.
axis_and_angle : tuple[:class:`~compas.geometry.Vector`, float], read-only
The axis and the angle of the rotation.
axis_angle_vector : :class:`~compas.geometry.Vector`, read-only
The axis-angle vector of the rotation.
basis_vectors : tuple[:class:`~compas.geometry.Vector`, :class:`~compas.geometry.Vector`], read-only
The basis vectors of the rotation.
Raises
------
ValueError
If the default constructor is used,
and the provided transformation matrix is not a rotation.
Examples
--------
>>> from compas.geometry import Frame
>>> f1 = Frame([0, 0, 0], [0.68, 0.68, 0.27], [-0.67, 0.73, -0.15])
>>> R = Rotation.from_frame(f1)
>>> args = False, 'xyz'
>>> alpha, beta, gamma = R.euler_angles(*args)
>>> xaxis, yaxis, zaxis = [1, 0, 0], [0, 1, 0], [0, 0, 1]
>>> Rx = Rotation.from_axis_and_angle(xaxis, alpha)
>>> Ry = Rotation.from_axis_and_angle(yaxis, beta)
>>> Rz = Rotation.from_axis_and_angle(zaxis, gamma)
>>> f2 = Frame.worldXY()
>>> f1 == f2.transformed(Rx * Ry * Rz)
True
"""
def __init__(self, matrix=None, check=True):
if matrix:
_, _, angles, _, _ = decompose_matrix(matrix)
if check:
if not allclose(flatten(matrix), flatten(matrix_from_euler_angles(angles))):
raise ValueError("This is not a proper rotation matrix.")
super(Rotation, self).__init__(matrix=matrix)
@property
def quaternion(self):
from compas.geometry import Quaternion
return Quaternion.from_matrix(self.matrix)
@property
def axis_and_angle(self):
from compas.geometry import Vector
axis, angle = axis_and_angle_from_matrix(self.matrix)
return Vector(*axis), angle
@property
def axis_angle_vector(self):
axis, angle = self.axis_and_angle
return axis.scaled(angle)
@property
def basis_vectors(self):
from compas.geometry import Vector
xaxis, yaxis = basis_vectors_from_matrix(self.matrix)
return Vector(*xaxis), Vector(*yaxis)
def __repr__(self):
return "Rotation({0!r}, check=False)".format(self.matrix)
@classmethod
def from_axis_and_angle(cls, axis, angle, point=[0, 0, 0]):
"""Construct a rotation transformation from a rotation axis and an angle and an optional point of rotation.
The rotation is based on the right hand rule, i.e. anti-clockwise if the
axis of rotation points towards the observer.
Parameters
----------
axis : [float, float, float] | :class:`~compas.geometry.Vector`
Three numbers that represent the axis of rotation.
angle : float
The rotation angle in radians.
point : [float, float, float] | :class:`~compas.geometry.Point`
A point to perform a rotation around an origin other than [0, 0, 0].
Returns
-------
:class:`~compas.geometry.Rotation`
Notes
-----
The rotation is based on the right hand rule, i.e. anti-clockwise
if the axis of rotation points towards the observer.
Examples
--------
>>> axis1 = normalize_vector([-0.043, -0.254, 0.617])
>>> angle1 = 0.1
>>> R = Rotation.from_axis_and_angle(axis1, angle1)
>>> axis2, angle2 = R.axis_and_angle
>>> allclose(axis1, axis2)
True
>>> allclose([angle1], [angle2])
True
"""
R = cls()
R.matrix = matrix_from_axis_and_angle(axis, angle, point=point)
return R
@classmethod
def from_basis_vectors(cls, xaxis, yaxis):
"""Construct a rotation transformation from basis vectors (= orthonormal vectors).
Parameters
----------
xaxis : [float, float, float] | :class:`~compas.geometry.Vector`
The x-axis of the frame.
yaxis : [float, float, float] | :class:`~compas.geometry.Vector`
The y-axis of the frame.
Returns
-------
:class:`~compas.geometry.Rotation`
Examples
--------
>>> xaxis = [0.68, 0.68, 0.27]
>>> yaxis = [-0.67, 0.73, -0.15]
>>> R = Rotation.from_basis_vectors(xaxis, yaxis)
"""
xaxis = normalize_vector(list(xaxis))
yaxis = normalize_vector(list(yaxis))
zaxis = cross_vectors(xaxis, yaxis)
yaxis = cross_vectors(zaxis, xaxis)
matrix = [
[xaxis[0], yaxis[0], zaxis[0], 0],
[xaxis[1], yaxis[1], zaxis[1], 0],
[xaxis[2], yaxis[2], zaxis[2], 0],
[0, 0, 0, 1],
]
R = cls()
R.matrix = matrix
return R
@classmethod
def from_frame(cls, frame):
"""Construct a rotation transformationn from world XY to frame.
Parameters
----------
frame : [point, vector, vector] | :class:`~compas.geometry.Frame`
A frame describing the targeted Cartesian coordinate system.
Returns
-------
:class:`~compas.geometry.Rotation`
Notes
-----
Creating a rotation from a frame means that we omit all translational
components. If that is unwanted, use ``Transformation.from_frame(frame)``.
Examples
--------
>>> from compas.geometry import Frame
>>> f1 = Frame([1, 1, 1], [0.68, 0.68, 0.27], [-0.67, 0.73, -0.15])
>>> T = Transformation.from_frame(f1)
>>> f2 = Frame.from_transformation(T)
>>> f1 == f2
True
"""
R = cls()
matrix = matrix_from_frame(frame)
matrix[0][3] = 0.0
matrix[1][3] = 0.0
matrix[2][3] = 0.0
R.matrix = matrix
return R
@classmethod
def from_quaternion(cls, quaternion):
"""Construct a rotation transformation` from quaternion coefficients.
Parameters
----------
quaternion : [float, float, float, float] | :class:`~compas.geometry.Quaternion`
Four numbers that represents the four coefficient values of a quaternion.
Returns
-------
:class:`~compas.geometry.Rotation`
Examples
--------
>>> from compas.geometry import allclose
>>> q1 = [0.945, -0.021, -0.125, 0.303]
>>> R = Rotation.from_quaternion(q1)
>>> q2 = R.quaternion
>>> allclose(q1, q2, tol=1e-3)
True
"""
R = cls()
R.matrix = matrix_from_quaternion(quaternion)
return R
@classmethod
def from_axis_angle_vector(cls, axis_angle_vector, point=[0, 0, 0]):
"""Construct a rotation transformation from an axis-angle vector.
Parameters
----------
axis_angle_vector : [float, float, float] | :class:`~compas.geometry.Vector`
Three numbers that represent the axis of rotation and angle of rotation through the vector's magnitude.
point : [float, float, float] | :class:`~compas.geometry.Point`, optional
A point to perform a rotation around an origin other than [0, 0, 0].
Returns
-------
:class:`~compas.geometry.Rotation`
Examples
--------
>>> from compas.geometry import allclose
>>> aav1 = [-0.043, -0.254, 0.617]
>>> R = Rotation.from_axis_angle_vector(aav1)
>>> aav2 = R.axis_angle_vector
>>> allclose(aav1, aav2)
True
"""
angle = length_vector(axis_angle_vector)
return cls.from_axis_and_angle(axis_angle_vector, angle, point)
@classmethod
def from_euler_angles(cls, euler_angles, static=True, axes="xyz", **kwargs):
"""Construct a rotation transformation from Euler angles.
In 3D space any orientation can be achieved by composing three
elemental rotations, rotations about the axes (x,y,z) of a coordinate
system. A triple of Euler angles can be interpreted in 24 ways, which
depends on if the rotations are applied to a static (extrinsic) or
rotating (intrinsic) frame and the order of axes.
Parameters
----------
euler_angles: [float, float, float]
Three numbers that represent the angles of rotations about the
defined axes.
static: bool, optional
If True the rotations are applied to a static frame.
If False, to a rotational.
axes: str, optional
A 3 character string specifying order of the axes.
Returns
-------
:class:`~compas.geometry.Rotation`
Examples
--------
>>> from compas.geometry import allclose
>>> ea1 = 1.4, 0.5, 2.3
>>> args = False, 'xyz'
>>> R1 = Rotation.from_euler_angles(ea1, *args)
>>> ea2 = R1.euler_angles(*args)
>>> allclose(ea1, ea2)
True
>>> alpha, beta, gamma = ea1
>>> xaxis, yaxis, zaxis = [1, 0, 0], [0, 1, 0], [0, 0, 1]
>>> Rx = Rotation.from_axis_and_angle(xaxis, alpha)
>>> Ry = Rotation.from_axis_and_angle(yaxis, beta)
>>> Rz = Rotation.from_axis_and_angle(zaxis, gamma)
>>> R2 = Rx * Ry * Rz
>>> R1 == R2
True
"""
return super(Rotation, cls).from_euler_angles(euler_angles, static, axes)
# split up into two properties
# euler_angles
# rotating_euler_angles
# xyz seems irelevant
# could be added to base Transformation
# always relevant
def euler_angles(self, static=True, axes="xyz"):
"""Returns Euler angles from the rotation according to specified
axis sequence and rotation type.
Parameters
----------
static : bool, optional
If True the rotations are applied to a static frame.
If False, to a rotational.
axes : str, optional
A 3 character string specifying the order of the axes.
Returns
-------
[float, float, float]
The 3 Euler angles.
Examples
--------
>>> from compas.geometry import allclose
>>> ea1 = 1.4, 0.5, 2.3
>>> args = False, 'xyz'
>>> R1 = Rotation.from_euler_angles(ea1, *args)
>>> ea2 = R1.euler_angles(*args)
>>> allclose(ea1, ea2)
True
"""
return euler_angles_from_matrix(self.matrix, static, axes) | PypiClean |
/HADeploy-0.6.1.tar.gz/HADeploy-0.6.1/lib/hadeploy/plugins/ranger/roles/ranger_modules/README.md | # ranger_modules
This ansible role host a set of modules aimed to manipulate policies on Apache Ranger.
* ranger\_hdfs\_policies: Allow creation/deletion/update of HDFS Ranger policies. Doc [at this location](docs/ranger_hdfs_policies.txt)
* ranger\_hbase\_policies: Allow creation/deletion/update of HBase Ranger policies. Doc [at this location](docs/ranger_hbase_policies.txt)
* ranger\_kafka\_policies: Allow creation/deletion/update of Kafka Ranger policies. Doc [at this location](docs/ranger_kafka_policies.txt)
* ranger\_yarn\_policies: Allow creation/deletion/update of Yarn Ranger policies. Doc [at this location](docs/ranger_yarn_policies.txt)
* ranger\_storm\_policies: Allow creation/deletion/update of Storm Ranger policies. Doc [at this location](docs/ranger_storm_policies.txt)
## Requirements
These modules need the python-requests package to be present on the remote node.
# Example Playbook
# Grant full rights for user 'coxi' on folders '/apps/coxi01' and '/user/coxi01', in a recursive way
- hosts: edge_node1
roles:
- ranger_modules
tasks:
- ranger_hdfs_policies:
state: present
admin_url: http://ranger.mycompany.com:6080
admin_username: admin
admin_password: admin
policies:
- name: "coxi01"
paths:
- "/apps/coxi01"
- "/user/coxi01"
permissions:
- users:
- coxi
accesses:
- Write
- read
- execute
# License
GNU GPL
Click on the [Link](COPYING) to see the full text.
| PypiClean |
/Nuitka_winsvc-1.7.10-cp310-cp310-win_amd64.whl/nuitka/nodes/VariableReleaseNodes.py | from nuitka.ModuleRegistry import getOwnerFromCodeName
from .NodeBases import StatementBase
class StatementReleaseVariableBase(StatementBase):
"""Releasing a variable.
Just release the value, which of course is not to be used afterwards.
Typical code: Function exit user variables, try/finally release of temporary
variables.
"""
__slots__ = "variable", "variable_trace"
def __init__(self, variable, source_ref):
StatementBase.__init__(self, source_ref=source_ref)
self.variable = variable
self.variable_trace = None
@staticmethod
def isStatementReleaseVariable():
return True
def finalize(self):
del self.variable
del self.variable_trace
del self.parent
def getDetails(self):
return {"variable": self.variable}
def getDetailsForDisplay(self):
return {
"variable_name": self.variable.getName(),
"owner": self.variable.getOwner().getCodeName(),
}
@classmethod
def fromXML(cls, provider, source_ref, **args):
assert cls is makeStatementReleaseVariable, cls
owner = getOwnerFromCodeName(args["owner"])
assert owner is not None, args["owner"]
variable = owner.getProvidedVariable(args["variable_name"])
return cls(variable=variable, source_ref=source_ref)
def getVariable(self):
return self.variable
def getVariableTrace(self):
return self.variable_trace
def setVariable(self, variable):
self.variable = variable
def computeStatement(self, trace_collection):
self.variable_trace = trace_collection.getVariableCurrentTrace(self.variable)
if self.variable_trace.mustNotHaveValue():
return (
None,
"new_statements",
"Uninitialized %s is not released." % (self.variable.getDescription()),
)
escape_desc = self.variable_trace.getReleaseEscape()
assert escape_desc is not None, self.variable_trace
if escape_desc.isControlFlowEscape():
# Any code could be run, note that.
trace_collection.onControlFlowEscape(self)
# TODO: We might be able to remove ourselves based on the trace
# we belong to.
return self, None, None
@staticmethod
def mayRaiseException(exception_type):
# By default, __del__ is not allowed to raise an exception.
return False
class StatementReleaseVariableTemp(StatementReleaseVariableBase):
kind = "STATEMENT_RELEASE_VARIABLE_TEMP"
class StatementReleaseVariableLocal(StatementReleaseVariableBase):
kind = "STATEMENT_RELEASE_VARIABLE_LOCAL"
class StatementReleaseVariableParameter(StatementReleaseVariableLocal):
kind = "STATEMENT_RELEASE_VARIABLE_PARAMETER"
def computeStatement(self, trace_collection):
if self.variable.getOwner().isAutoReleaseVariable(self.variable):
return (
None,
"new_statements",
"Original parameter variable value of '%s' is not released."
% self.variable.getName(),
)
return StatementReleaseVariableLocal.computeStatement(self, trace_collection)
def makeStatementReleaseVariable(variable, source_ref):
if variable.isTempVariable():
return StatementReleaseVariableTemp(variable=variable, source_ref=source_ref)
elif variable.isParameterVariable():
return StatementReleaseVariableParameter(
variable=variable, source_ref=source_ref
)
else:
return StatementReleaseVariableLocal(variable=variable, source_ref=source_ref)
def makeStatementsReleaseVariables(variables, source_ref):
return tuple(
makeStatementReleaseVariable(variable=variable, source_ref=source_ref)
for variable in variables
) | PypiClean |
/Flask-Auth-0.85.tar.gz/Flask-Auth-0.85/flaskext/auth/auth.py | import time, hashlib, datetime
from functools import partial
from flask import session, abort, current_app, redirect, url_for
DEFAULT_HASH_ALGORITHM = hashlib.sha1
DEFAULT_USER_TIMEOUT = 3600
SESSION_USER_KEY = 'auth_user'
SESSION_LOGIN_KEY = 'auth_login'
def _default_not_authorized(*args, **kwargs):
return abort(401)
def _redirect_to_login(login_url_name):
return redirect(url_for(login_url_name))
class Auth(object):
"""
Extension initialization object containing settings for the extension.
Supported settings:
- login_url_name: Name of the URL that is used for login. It's used in
the not_logged_in_callback if provided in the constructor.
- not_logged_in_callback: Function to call when a user accesses a page
without being logged in. Normally used to redirect to the login page.
If a login_url_name is provided, it will by default redirect to that
url. Otherwise, the default is abort(401).
- not_permitted_callback: Function to call when a user tries to access
a page for which he doesn't have the permission. Default: abort(401).
- hash_algorithm: Algorithm from the hashlib library used for password
encryption. Default: sha1.
- user_timeout: Timeout (in seconds) after which the sesion of the user
expires. Default: 3600. A timeout of 0 means it will never expire.
- load_role: Function to load a role. Is called with user.role as only
parameter.
"""
def __init__(self, app=None, login_url_name=None):
if login_url_name is None:
self.not_logged_in_callback = _default_not_authorized
else:
self.not_logged_in_callback = partial(_redirect_to_login,
login_url_name)
self.not_permitted_callback = _default_not_authorized
self.hash_algorithm = DEFAULT_HASH_ALGORITHM
self.user_timeout = DEFAULT_USER_TIMEOUT
self.load_role = lambda _: None
if app is not None:
self.init_app(app)
def init_app(self, app):
app.auth = self
class AuthUser(object):
"""
Baseclass for a user model. Contains a few convenience methods.
Attributes:
- username: Username of the user.
- password: Password of the user. By default not encrypted. The
set_and_encrypt_password() method sets and encrypts the password.
- salt: Salt used for the encrytion of the password.
- role: Role of this user. """
role = None
def __init__(self, username=None, password=None, salt=None, role=None):
self.username = username
# Storing password unmodified. Encryption of the password should
# happen explicitly.
self.password = password
self.salt = salt
self.role = role
def set_and_encrypt_password(self, password, salt=str(int(time.time()))):
"""
Encrypts and sets the password. If no salt is provided, a new
one is generated.
"""
self.salt = salt
self.password = encrypt(password, self.salt)
def authenticate(self, password):
"""
Attempts to verify the password and log the user in. Returns true if
succesful.
"""
if self.password == encrypt(password, self.salt):
login(self)
return True
return False
def __eq__(self, other):
return self.username == getattr(other, 'username', None)
def __ne__(self, other):
return not self.__eq__(other)
def __getstate__(self):
return self.__dict__
@classmethod
def load_current_user(cls, apply_timeout=True):
"""
Load current user based on the result of get_current_user_data().
"""
data = get_current_user_data(apply_timeout)
if not data:
return None
user = cls()
user.__dict__ = data
return user
def is_logged_in(self):
user_data = get_current_user_data()
return user_data is not None and user_data.get('username') == self.username
def encrypt(password, salt=None, hash_algorithm=None):
"""Encrypts a password based on the hashing algorithm."""
to_encrypt = password
if salt is not None:
to_encrypt += salt
if hash_algorithm is not None:
return hash_algorithm(to_encrypt).hexdigest()
return current_app.auth.hash_algorithm(to_encrypt).hexdigest()
def login(user):
"""
Logs the user in. Note that NO AUTHENTICATION is done by this function. If
you want to authenticate a user, use the AuthUser.authenticate() method.
"""
session[SESSION_USER_KEY] = user.__getstate__()
session[SESSION_LOGIN_KEY] = datetime.datetime.utcnow()
def logout():
"""Logs the currently logged in user out and returns the user data."""
session.pop(SESSION_LOGIN_KEY, None)
return session.pop(SESSION_USER_KEY, None)
def get_current_user_data(apply_timeout=True):
"""
Returns the data of the current user (user.__dict__) if there is a
current user and he didn't time out yet. If timeout should be ignored,
provide apply_timeout=False.
"""
user_data = session.get(SESSION_USER_KEY, None)
if user_data is None:
return None
if not apply_timeout:
return user_data
login_datetime = session[SESSION_LOGIN_KEY]
now = datetime.datetime.utcnow()
user_timeout = current_app.auth.user_timeout
if user_timeout > 0 and now - login_datetime > \
datetime.timedelta(seconds=user_timeout):
logout()
return None
return user_data
def not_logged_in(callback, *args, **kwargs):
"""
Executes not logged in callback. Not for external use.
"""
if callback is None:
return current_app.auth.not_logged_in_callback(*args, **kwargs)
else:
return callback(*args, **kwargs)
def login_required(callback=None):
"""
Decorator for views that require login. Callback can be specified to
override the default callback on the auth object.
"""
def wrap(func):
def decorator(*args, **kwargs):
if get_current_user_data() is None:
return not_logged_in(callback, *args, **kwargs)
return func(*args, **kwargs)
return decorator
return wrap | PypiClean |
/CSUMMDET-1.0.23.tar.gz/CSUMMDET-1.0.23/mmdet/models/detectors/two_stage.py | import torch
import torch.nn as nn
from mmdet.core import bbox2result, bbox2roi, build_assigner, build_sampler
from .. import builder
from ..registry import DETECTORS
from .base import BaseDetector
from .test_mixins import BBoxTestMixin, MaskTestMixin, RPNTestMixin
@DETECTORS.register_module
class TwoStageDetector(BaseDetector, RPNTestMixin, BBoxTestMixin,
MaskTestMixin):
"""Base class for two-stage detectors.
Two-stage detectors typically consisting of a region proposal network and a
task-specific regression head.
"""
def __init__(self,
backbone,
neck=None,
shared_head=None,
rpn_head=None,
bbox_roi_extractor=None,
bbox_head=None,
mask_roi_extractor=None,
mask_head=None,
train_cfg=None,
test_cfg=None,
pretrained=None):
super(TwoStageDetector, self).__init__()
self.backbone = builder.build_backbone(backbone)
if neck is not None:
self.neck = builder.build_neck(neck)
if shared_head is not None:
self.shared_head = builder.build_shared_head(shared_head)
if rpn_head is not None:
self.rpn_head = builder.build_head(rpn_head)
if bbox_head is not None:
self.bbox_roi_extractor = builder.build_roi_extractor(
bbox_roi_extractor)
self.bbox_head = builder.build_head(bbox_head)
if mask_head is not None:
if mask_roi_extractor is not None:
self.mask_roi_extractor = builder.build_roi_extractor(
mask_roi_extractor)
self.share_roi_extractor = False
else:
self.share_roi_extractor = True
self.mask_roi_extractor = self.bbox_roi_extractor
self.mask_head = builder.build_head(mask_head)
self.train_cfg = train_cfg
self.test_cfg = test_cfg
self.init_weights(pretrained=pretrained)
@property
def with_rpn(self):
return hasattr(self, 'rpn_head') and self.rpn_head is not None
def init_weights(self, pretrained=None):
super(TwoStageDetector, self).init_weights(pretrained)
self.backbone.init_weights(pretrained=pretrained)
if self.with_neck:
if isinstance(self.neck, nn.Sequential):
for m in self.neck:
m.init_weights()
else:
self.neck.init_weights()
if self.with_shared_head:
self.shared_head.init_weights(pretrained=pretrained)
if self.with_rpn:
self.rpn_head.init_weights()
if self.with_bbox:
self.bbox_roi_extractor.init_weights()
self.bbox_head.init_weights()
if self.with_mask:
self.mask_head.init_weights()
if not self.share_roi_extractor:
self.mask_roi_extractor.init_weights()
def extract_feat(self, img):
"""Directly extract features from the backbone+neck
"""
x = self.backbone(img)
if self.with_neck:
x = self.neck(x)
return x
def forward_dummy(self, img):
"""Used for computing network flops.
See `mmedetection/tools/get_flops.py`
"""
outs = ()
# backbone
x = self.extract_feat(img)
# rpn
if self.with_rpn:
rpn_outs = self.rpn_head(x)
outs = outs + (rpn_outs, )
proposals = torch.randn(1000, 4).cuda()
# bbox head
rois = bbox2roi([proposals])
if self.with_bbox:
bbox_feats = self.bbox_roi_extractor(
x[:self.bbox_roi_extractor.num_inputs], rois)
if self.with_shared_head:
bbox_feats = self.shared_head(bbox_feats)
cls_score, bbox_pred = self.bbox_head(bbox_feats)
outs = outs + (cls_score, bbox_pred)
# mask head
if self.with_mask:
mask_rois = rois[:100]
mask_feats = self.mask_roi_extractor(
x[:self.mask_roi_extractor.num_inputs], mask_rois)
if self.with_shared_head:
mask_feats = self.shared_head(mask_feats)
mask_pred = self.mask_head(mask_feats)
outs = outs + (mask_pred, )
return outs
def forward_train(self,
img,
img_meta,
gt_bboxes,
gt_labels,
gt_bboxes_ignore=None,
gt_masks=None,
proposals=None):
"""
Args:
img (Tensor): of shape (N, C, H, W) encoding input images.
Typically these should be mean centered and std scaled.
img_meta (list[dict]): list of image info dict where each dict has:
'img_shape', 'scale_factor', 'flip', and my also contain
'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'.
For details on the values of these keys see
`mmdet/datasets/pipelines/formatting.py:Collect`.
gt_bboxes (list[Tensor]): each item are the truth boxes for each
image in [tl_x, tl_y, br_x, br_y] format.
gt_labels (list[Tensor]): class indices corresponding to each box
gt_bboxes_ignore (None | list[Tensor]): specify which bounding
boxes can be ignored when computing the loss.
gt_masks (None | Tensor) : true segmentation masks for each box
used if the architecture supports a segmentation task.
proposals : override rpn proposals with custom proposals. Use when
`with_rpn` is False.
Returns:
dict[str, Tensor]: a dictionary of loss components
"""
x = self.extract_feat(img)
losses = dict()
# RPN forward and loss
if self.with_rpn:
rpn_outs = self.rpn_head(x)
rpn_loss_inputs = rpn_outs + (gt_bboxes, img_meta,
self.train_cfg.rpn)
rpn_losses = self.rpn_head.loss(
*rpn_loss_inputs, gt_bboxes_ignore=gt_bboxes_ignore)
losses.update(rpn_losses)
proposal_cfg = self.train_cfg.get('rpn_proposal',
self.test_cfg.rpn)
proposal_inputs = rpn_outs + (img_meta, proposal_cfg)
proposal_list = self.rpn_head.get_bboxes(*proposal_inputs)
else:
proposal_list = proposals
# assign gts and sample proposals
if self.with_bbox or self.with_mask:
bbox_assigner = build_assigner(self.train_cfg.rcnn.assigner)
bbox_sampler = build_sampler(
self.train_cfg.rcnn.sampler, context=self)
num_imgs = img.size(0)
if gt_bboxes_ignore is None:
gt_bboxes_ignore = [None for _ in range(num_imgs)]
sampling_results = []
for i in range(num_imgs):
assign_result = bbox_assigner.assign(proposal_list[i],
gt_bboxes[i],
gt_bboxes_ignore[i],
gt_labels[i])
sampling_result = bbox_sampler.sample(
assign_result,
proposal_list[i],
gt_bboxes[i],
gt_labels[i],
feats=[lvl_feat[i][None] for lvl_feat in x])
sampling_results.append(sampling_result)
# bbox head forward and loss
if self.with_bbox:
rois = bbox2roi([res.bboxes for res in sampling_results])
# TODO: a more flexible way to decide which feature maps to use
bbox_feats = self.bbox_roi_extractor(
x[:self.bbox_roi_extractor.num_inputs], rois)
if self.with_shared_head:
bbox_feats = self.shared_head(bbox_feats)
cls_score, bbox_pred = self.bbox_head(bbox_feats)
bbox_targets = self.bbox_head.get_target(sampling_results,
gt_bboxes, gt_labels,
self.train_cfg.rcnn)
loss_bbox = self.bbox_head.loss(cls_score, bbox_pred,
*bbox_targets)
losses.update(loss_bbox)
# mask head forward and loss
if self.with_mask:
if not self.share_roi_extractor:
pos_rois = bbox2roi(
[res.pos_bboxes for res in sampling_results])
mask_feats = self.mask_roi_extractor(
x[:self.mask_roi_extractor.num_inputs], pos_rois)
if self.with_shared_head:
mask_feats = self.shared_head(mask_feats)
else:
pos_inds = []
device = bbox_feats.device
for res in sampling_results:
pos_inds.append(
torch.ones(
res.pos_bboxes.shape[0],
device=device,
dtype=torch.uint8))
pos_inds.append(
torch.zeros(
res.neg_bboxes.shape[0],
device=device,
dtype=torch.uint8))
pos_inds = torch.cat(pos_inds)
mask_feats = bbox_feats[pos_inds]
mask_pred = self.mask_head(mask_feats)
mask_targets = self.mask_head.get_target(sampling_results,
gt_masks,
self.train_cfg.rcnn)
pos_labels = torch.cat(
[res.pos_gt_labels for res in sampling_results])
loss_mask = self.mask_head.loss(mask_pred, mask_targets,
pos_labels)
losses.update(loss_mask)
return losses
def simple_test(self, img, img_meta, proposals=None, rescale=False):
"""Test without augmentation."""
assert self.with_bbox, "Bbox head must be implemented."
x = self.extract_feat(img)
proposal_list = self.simple_test_rpn(
x, img_meta, self.test_cfg.rpn) if proposals is None else proposals
det_bboxes, det_labels = self.simple_test_bboxes(
x, img_meta, proposal_list, self.test_cfg.rcnn, rescale=rescale)
bbox_results = bbox2result(det_bboxes, det_labels,
self.bbox_head.num_classes)
if not self.with_mask:
return bbox_results
else:
segm_results = self.simple_test_mask(
x, img_meta, det_bboxes, det_labels, rescale=rescale)
return bbox_results, segm_results
def aug_test(self, imgs, img_metas, rescale=False):
"""Test with augmentations.
If rescale is False, then returned bboxes and masks will fit the scale
of imgs[0].
"""
# recompute feats to save memory
proposal_list = self.aug_test_rpn(
self.extract_feats(imgs), img_metas, self.test_cfg.rpn)
det_bboxes, det_labels = self.aug_test_bboxes(
self.extract_feats(imgs), img_metas, proposal_list,
self.test_cfg.rcnn)
if rescale:
_det_bboxes = det_bboxes
else:
_det_bboxes = det_bboxes.clone()
_det_bboxes[:, :4] *= img_metas[0][0]['scale_factor']
bbox_results = bbox2result(_det_bboxes, det_labels,
self.bbox_head.num_classes)
# det_bboxes always keep the original scale
if self.with_mask:
segm_results = self.aug_test_mask(
self.extract_feats(imgs), img_metas, det_bboxes, det_labels)
return bbox_results, segm_results
else:
return bbox_results | PypiClean |
/DLTA-AI-1.1.tar.gz/DLTA-AI-1.1/DLTA_AI_app/mmdetection/mmdet/core/bbox/assigners/task_aligned_assigner.py | import torch
from ..builder import BBOX_ASSIGNERS
from ..iou_calculators import build_iou_calculator
from .assign_result import AssignResult
from .base_assigner import BaseAssigner
INF = 100000000
@BBOX_ASSIGNERS.register_module()
class TaskAlignedAssigner(BaseAssigner):
"""Task aligned assigner used in the paper:
`TOOD: Task-aligned One-stage Object Detection.
<https://arxiv.org/abs/2108.07755>`_.
Assign a corresponding gt bbox or background to each predicted bbox.
Each bbox will be assigned with `0` or a positive integer
indicating the ground truth index.
- 0: negative sample, no assigned gt
- positive integer: positive sample, index (1-based) of assigned gt
Args:
topk (int): number of bbox selected in each level
iou_calculator (dict): Config dict for iou calculator.
Default: dict(type='BboxOverlaps2D')
"""
def __init__(self, topk, iou_calculator=dict(type='BboxOverlaps2D')):
assert topk >= 1
self.topk = topk
self.iou_calculator = build_iou_calculator(iou_calculator)
def assign(self,
pred_scores,
decode_bboxes,
anchors,
gt_bboxes,
gt_bboxes_ignore=None,
gt_labels=None,
alpha=1,
beta=6):
"""Assign gt to bboxes.
The assignment is done in following steps
1. compute alignment metric between all bbox (bbox of all pyramid
levels) and gt
2. select top-k bbox as candidates for each gt
3. limit the positive sample's center in gt (because the anchor-free
detector only can predict positive distance)
Args:
pred_scores (Tensor): predicted class probability,
shape(n, num_classes)
decode_bboxes (Tensor): predicted bounding boxes, shape(n, 4)
anchors (Tensor): pre-defined anchors, shape(n, 4).
gt_bboxes (Tensor): Groundtruth boxes, shape (k, 4).
gt_bboxes_ignore (Tensor, optional): Ground truth bboxes that are
labelled as `ignored`, e.g., crowd boxes in COCO.
gt_labels (Tensor, optional): Label of gt_bboxes, shape (k, ).
Returns:
:obj:`TaskAlignedAssignResult`: The assign result.
"""
anchors = anchors[:, :4]
num_gt, num_bboxes = gt_bboxes.size(0), anchors.size(0)
# compute alignment metric between all bbox and gt
overlaps = self.iou_calculator(decode_bboxes, gt_bboxes).detach()
bbox_scores = pred_scores[:, gt_labels].detach()
# assign 0 by default
assigned_gt_inds = anchors.new_full((num_bboxes, ),
0,
dtype=torch.long)
assign_metrics = anchors.new_zeros((num_bboxes, ))
if num_gt == 0 or num_bboxes == 0:
# No ground truth or boxes, return empty assignment
max_overlaps = anchors.new_zeros((num_bboxes, ))
if num_gt == 0:
# No gt boxes, assign everything to background
assigned_gt_inds[:] = 0
if gt_labels is None:
assigned_labels = None
else:
assigned_labels = anchors.new_full((num_bboxes, ),
-1,
dtype=torch.long)
assign_result = AssignResult(
num_gt, assigned_gt_inds, max_overlaps, labels=assigned_labels)
assign_result.assign_metrics = assign_metrics
return assign_result
# select top-k bboxes as candidates for each gt
alignment_metrics = bbox_scores**alpha * overlaps**beta
topk = min(self.topk, alignment_metrics.size(0))
_, candidate_idxs = alignment_metrics.topk(topk, dim=0, largest=True)
candidate_metrics = alignment_metrics[candidate_idxs,
torch.arange(num_gt)]
is_pos = candidate_metrics > 0
# limit the positive sample's center in gt
anchors_cx = (anchors[:, 0] + anchors[:, 2]) / 2.0
anchors_cy = (anchors[:, 1] + anchors[:, 3]) / 2.0
for gt_idx in range(num_gt):
candidate_idxs[:, gt_idx] += gt_idx * num_bboxes
ep_anchors_cx = anchors_cx.view(1, -1).expand(
num_gt, num_bboxes).contiguous().view(-1)
ep_anchors_cy = anchors_cy.view(1, -1).expand(
num_gt, num_bboxes).contiguous().view(-1)
candidate_idxs = candidate_idxs.view(-1)
# calculate the left, top, right, bottom distance between positive
# bbox center and gt side
l_ = ep_anchors_cx[candidate_idxs].view(-1, num_gt) - gt_bboxes[:, 0]
t_ = ep_anchors_cy[candidate_idxs].view(-1, num_gt) - gt_bboxes[:, 1]
r_ = gt_bboxes[:, 2] - ep_anchors_cx[candidate_idxs].view(-1, num_gt)
b_ = gt_bboxes[:, 3] - ep_anchors_cy[candidate_idxs].view(-1, num_gt)
is_in_gts = torch.stack([l_, t_, r_, b_], dim=1).min(dim=1)[0] > 0.01
is_pos = is_pos & is_in_gts
# if an anchor box is assigned to multiple gts,
# the one with the highest iou will be selected.
overlaps_inf = torch.full_like(overlaps,
-INF).t().contiguous().view(-1)
index = candidate_idxs.view(-1)[is_pos.view(-1)]
overlaps_inf[index] = overlaps.t().contiguous().view(-1)[index]
overlaps_inf = overlaps_inf.view(num_gt, -1).t()
max_overlaps, argmax_overlaps = overlaps_inf.max(dim=1)
assigned_gt_inds[
max_overlaps != -INF] = argmax_overlaps[max_overlaps != -INF] + 1
assign_metrics[max_overlaps != -INF] = alignment_metrics[
max_overlaps != -INF, argmax_overlaps[max_overlaps != -INF]]
if gt_labels is not None:
assigned_labels = assigned_gt_inds.new_full((num_bboxes, ), -1)
pos_inds = torch.nonzero(
assigned_gt_inds > 0, as_tuple=False).squeeze()
if pos_inds.numel() > 0:
assigned_labels[pos_inds] = gt_labels[
assigned_gt_inds[pos_inds] - 1]
else:
assigned_labels = None
assign_result = AssignResult(
num_gt, assigned_gt_inds, max_overlaps, labels=assigned_labels)
assign_result.assign_metrics = assign_metrics
return assign_result | PypiClean |
/FlexGet-3.9.6-py3-none-any.whl/flexget/components/imdb/db.py | from datetime import datetime, timedelta
from loguru import logger
from sqlalchemy import Boolean, Column, DateTime, Float, Integer, String, Table, Unicode
from sqlalchemy.orm import relationship
from sqlalchemy.schema import ForeignKey, Index
from flexget import db_schema
from flexget.components.imdb.utils import extract_id
from flexget.db_schema import UpgradeImpossible
logger = logger.bind(name='imdb.db')
SCHEMA_VER = 10
Base = db_schema.versioned_base('imdb_lookup', SCHEMA_VER)
# association tables
genres_table = Table(
'imdb_movie_genres',
Base.metadata,
Column('movie_id', Integer, ForeignKey('imdb_movies.id')),
Column('genre_id', Integer, ForeignKey('imdb_genres.id')),
Index('ix_imdb_movie_genres', 'movie_id', 'genre_id'),
)
Base.register_table(genres_table)
actors_table = Table(
'imdb_movie_actors',
Base.metadata,
Column('movie_id', Integer, ForeignKey('imdb_movies.id')),
Column('actor_id', Integer, ForeignKey('imdb_actors.id')),
Index('ix_imdb_movie_actors', 'movie_id', 'actor_id'),
)
Base.register_table(actors_table)
directors_table = Table(
'imdb_movie_directors',
Base.metadata,
Column('movie_id', Integer, ForeignKey('imdb_movies.id')),
Column('director_id', Integer, ForeignKey('imdb_directors.id')),
Index('ix_imdb_movie_directors', 'movie_id', 'director_id'),
)
Base.register_table(directors_table)
writers_table = Table(
'imdb_movie_writers',
Base.metadata,
Column('movie_id', Integer, ForeignKey('imdb_movies.id')),
Column('writer_id', Integer, ForeignKey('imdb_writers.id')),
Index('ix_imdb_movie_writers', 'movie_id', 'writer_id'),
)
Base.register_table(writers_table)
plot_keywords_table = Table(
'imdb_movie_plot_keywords',
Base.metadata,
Column('movie_id', Integer, ForeignKey('imdb_movies.id')),
Column('keyword_id', Integer, ForeignKey('imdb_plot_keywords.id')),
Index('ix_imdb_movie_plot_keywords', 'movie_id', 'keyword_id'),
)
Base.register_table(plot_keywords_table)
class Movie(Base):
__tablename__ = 'imdb_movies'
id = Column(Integer, primary_key=True)
title = Column(Unicode)
original_title = Column(Unicode)
url = Column(String, index=True)
# many-to-many relations
genres = relationship('Genre', secondary=genres_table, backref='movies')
actors = relationship('Actor', secondary=actors_table, backref='movies')
directors = relationship('Director', secondary=directors_table, backref='movies')
writers = relationship('Writer', secondary=writers_table, backref='movies')
plot_keywords = relationship('PlotKeyword', secondary=plot_keywords_table, backref='movies')
languages = relationship('MovieLanguage', order_by='MovieLanguage.prominence')
score = Column(Float)
votes = Column(Integer)
meta_score = Column(Integer)
year = Column(Integer)
plot_outline = Column(Unicode)
mpaa_rating = Column(String, default='')
photo = Column(String)
# updated time, so we can grab new rating counts after 48 hours
# set a default, so existing data gets updated with a rating
updated = Column(DateTime)
@property
def imdb_id(self):
return extract_id(self.url)
@property
def expired(self):
"""
:return: True if movie details are considered to be expired, ie. need of update
"""
if self.updated is None:
logger.debug('updated is None: {}', self)
return True
refresh_interval = 2
if self.year:
# Make sure age is not negative
age = max((datetime.now().year - self.year), 0)
refresh_interval += age * 5
logger.debug('movie `{}` age {} expires in {} days', self.title, age, refresh_interval)
return self.updated < datetime.now() - timedelta(days=refresh_interval)
def __repr__(self):
return f'<Movie(name={self.title},votes={self.votes},year={self.year})>'
class MovieLanguage(Base):
__tablename__ = 'imdb_movie_languages'
movie_id = Column(Integer, ForeignKey('imdb_movies.id'), primary_key=True)
language_id = Column(Integer, ForeignKey('imdb_languages.id'), primary_key=True)
prominence = Column(Integer)
language = relationship('Language')
def __init__(self, language, prominence=None):
self.language = language
self.prominence = prominence
class Language(Base):
__tablename__ = 'imdb_languages'
id = Column(Integer, primary_key=True)
name = Column(Unicode)
def __init__(self, name):
self.name = name
class Genre(Base):
__tablename__ = 'imdb_genres'
id = Column(Integer, primary_key=True)
name = Column(String)
def __init__(self, name):
self.name = name
class Actor(Base):
__tablename__ = 'imdb_actors'
id = Column(Integer, primary_key=True)
imdb_id = Column(String)
name = Column(Unicode)
def __init__(self, imdb_id, name=None):
self.imdb_id = imdb_id
self.name = name
class Director(Base):
__tablename__ = 'imdb_directors'
id = Column(Integer, primary_key=True)
imdb_id = Column(String)
name = Column(Unicode)
def __init__(self, imdb_id, name=None):
self.imdb_id = imdb_id
self.name = name
class Writer(Base):
__tablename__ = 'imdb_writers'
id = Column(Integer, primary_key=True)
imdb_id = Column(String)
name = Column(Unicode)
def __init__(self, imdb_id, name=None):
self.imdb_id = imdb_id
self.name = name
class PlotKeyword(Base):
__tablename__ = "imdb_plot_keywords"
id = Column(Integer, primary_key=True)
name = Column(String)
def __init__(self, name):
self.name = name
class SearchResult(Base):
__tablename__ = 'imdb_search'
id = Column(Integer, primary_key=True)
title = Column(Unicode, index=True)
url = Column(String)
fails = Column(Boolean, default=False)
queried = Column(DateTime)
@property
def imdb_id(self):
return extract_id(self.url)
def __init__(self, title, url=None):
self.title = title
self.url = url
self.queried = datetime.now()
def __repr__(self):
return f'<SearchResult(title={self.title},url={self.url},fails={self.fails})>'
@db_schema.upgrade('imdb_lookup')
def upgrade(ver, session):
# v5 We may have cached bad data due to imdb changes, just wipe everything. GitHub #697
# v6 The association tables were not cleared on the last upgrade, clear again. GitHub #714
# v7 Another layout change cached bad data. GitHub #729
# v8 Added writers to the DB Schema
# v9 Added Metacritic score exftraction/filtering
# v10 Added plot keywords to the DB schema
if ver is None or ver <= 9:
raise UpgradeImpossible(
'Resetting imdb_lookup caches because bad data may have been cached.'
)
return ver | PypiClean |
/DateRangeParser-1.3.2.tar.gz/DateRangeParser-1.3.2/daterangeparser/parse_date_range.py |
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import datetime
import calendar
from pyparsing import ParseException, Optional, Word, oneOf, nums, stringEnd, Literal, Group
MONTHS = {
'jan': 1,
'january': 1,
'feb': 2,
'february': 2,
'mar': 3,
'march': 3,
'apr': 4,
'april': 4,
'may': 5,
'jun': 6,
'june': 6,
'jul': 7,
'july': 7,
'aug': 8,
'august': 8,
'sep': 9,
'sept': 9,
'september': 9,
'oct': 10,
'october': 10,
'nov': 11,
'november': 11,
'dec': 12,
'december': 12
}
def check_day(tokens=None):
"""
Converts to int and checks a day number, ensuring it is > 1 and < 31.
"""
if not tokens:
raise ParseException("Couldn't parse resulting datetime")
t = int(tokens[0])
if 1 <= t <= 31:
return t
else:
raise ParseException("Couldn't parse resulting datetime")
def month_to_number(tokens):
"""
Converts a given month in string format to the equivalent month number.
Works with strings in any case, both abbreviated (Jan) and full (January).
"""
month_name = tokens[0].lower()
return MONTHS[month_name]
def post_process(res, allow_implicit=True):
"""
Perform post-processing on the results of the date range parsing.
At the moment this consists mainly of ensuring that any missing information is filled in.
For example, if no years are specified at all in the string then both years are set to the
current year, and if one part of the string includes no month or year then these are
filled in from the other part of the string.
:param res: The results from the parsing operation, as returned by the parseString function
:param allow_implicit: If implicit dates are allowed
:return: the results with populated date information
"""
# Get current date
today = datetime.date.today()
if not allow_implicit:
if ('start' in res and 'day' not in res.start) or ('end' in res and 'day' not in res.end):
raise ParseException("Couldn't parse resulting datetime")
if 'start' not in res:
# We have a single date, not a range
res['start'] = {}
if 'month' not in res.end and 'day' not in res.end:
# We have only got a year, so go from start to end of the year
res['start']['year'] = res.end.year
res['start']['month'] = 1
res['start']['day'] = 1
res['end']['month'] = 12
res['end']['day'] = 31
return res
elif 'month' in res.end and 'day' not in res.end:
if not isinstance(res.end.month, int):
raise ParseException("Couldn't parse resulting datetime")
# special case - treat bare month as a range from start to end of month
if 'year' not in res.end or res.end.year == "":
res['start']['year'] = today.year
res['end']['year'] = today.year
else:
res['start']['year'] = res.end.year
res['start']['day'] = 1
res['start']['month'] = res.end.month
res['end']['day'] = calendar.monthrange(
res['start']['year'], res.end.month)[1]
else:
res['start']['day'] = res.end.day
res['start']['month'] = res.end.month
if 'year' not in res.end:
res['start']['year'] = today.year
else:
res['start']['year'] = res.end.year
res['end'] = None
return res
if 'month' not in res.end and 'month' not in res.start and \
'day' not in res.end and 'day' not in res.start:
# No months or days given, just years
res['start']['month'] = 1
res['start']['day'] = 1
res['end']['month'] = 12
res['end']['day'] = 31
return res
# Sort out years
if 'year' not in res.end:
res.end['year'] = today.year
res.start['year'] = today.year
elif 'year' not in res.start:
res.start['year'] = res.end.year
# Sort out months
if 'month' not in res.start:
res.start['month'] = res.end.month
if 'day' not in res.start or res.start['day'] == '':
res.start['day'] = 1
if res.end.month and ('day' not in res.end or res.end['day'] == ''):
res.end['day'] = calendar.monthrange(res.end.year, res.end.month)[1]
return res
def create_parser():
"""Creates the parser using PyParsing functions."""
# Day details (day number, superscript and day name)
daynum = Word(nums, max=2)
superscript = oneOf("th rd st nd", caseless=True)
day = oneOf("Mon Monday Tue Tues Tuesday Wed Weds Wednesday "
"Thu Thur Thurs Thursday Fri Friday Sat Saturday Sun Sunday", caseless=True)
full_day_string = daynum + Optional(superscript).suppress()
full_day_string.setParseAction(check_day)
full_day_string.leaveWhitespace()
# Month names, with abbreviations, with action to convert to equivalent month number
month = oneOf(list(MONTHS.keys()), caseless=True) + \
Optional(Literal(".").suppress())
month.setParseAction(month_to_number)
# Year
year = Word(nums, exact=4)
year.setParseAction(lambda tokens: int(tokens[0]))
time_sep = oneOf(": .")
am_pm = oneOf("am pm", caseless=True)
hours = Word(nums, max=2)
mins = Word(nums, max=2)
time = hours("hour") + time_sep.suppress() + \
mins("mins") + Optional(am_pm)("meridian")
# date pattern
date = (
Group(Optional(time).suppress() & Optional(full_day_string("day")) & Optional(day).suppress() &
Optional(month("month")) & Optional(year("year")))
)
# Possible separators
separator = oneOf("- -- to until through till untill \u2013 \u2014 ->", caseless=True)
# Strings to completely ignore (whitespace ignored by default)
ignoreable_chars = oneOf(", from starting beginning of", caseless=True)
# Final putting together of everything
daterange = (
Optional(date("start") + Optional(time).suppress() + separator.suppress()) +
date("end") + Optional(time).suppress() + stringEnd()
)
daterange.ignore(ignoreable_chars)
return daterange
def parse(text, allow_implicit=True):
"""
Parses a date range string and returns the start and end as datetimes.
**Accepted formats:**
This parsing routine works with date ranges and single dates, and should
work with a wide variety of human-style string formats, including:
- 27th-29th June 2010
- 30 May to 9th Aug
- 3rd Jan 1980 - 2nd Jan 2013
- Wed 23 Jan - Sat 16 February 2013
- Tuesday 29 May -> Sat 2 June 2012
- From 27th to 29th March 1999
- 1--9 Jul
- 14th July 1988
- 23rd October 7:30pm
- From 07:30 18th Nov to 17:00 24th Nov
**Notes:**
- If an error encountered while parsing the date range then a
`pyparsing.ParseException` will be raised.
- If no year is specified then the current year is used.
- All day names are ignored, so there is no checking to see whether,
for example, the 23rd Jan 2013 is actually a Wednesday.
- All times are ignored, assuming they are placed either before or after
each date, otherwise they will cause an error.
- The separators that are allows as part of the date range are `to`,
`until`, `-`, `--` and `->`, plus the unicode em and en dashes.
- Other punctuation, such as commas, is ignored.
:param text: The string to parse
:param allow_implicit: If implicit dates are allowed. For example,
string 'May' by default treated as range
from May, 1st to May, 31th. Setting allow_implicit to False helps avoid it.
:return: A tuple ``(start, end)`` where each element is a datetime object.
If the string only defines a single date then the tuple is ``(date, None)``.
All times in the datetime objects are set to 00:00 as this function only parses dates.
"""
parser = create_parser()
# print text
result = parser.parseString(text)
# print result.dump()
# print "----------"
res = post_process(result, allow_implicit)
# print res.dump()
# Create standard dd/mm/yyyy strings and then convert to Python datetime
# objects
if 'year' not in res.start:
# in case only separator was given
raise ParseException("Couldn't parse resulting datetime")
try:
start_str = "%(day)s/%(month)s/%(year)s" % res.start
start_datetime = datetime.datetime.strptime(start_str, "%d/%m/%Y")
except ValueError:
raise ParseException("Couldn't parse resulting datetime")
if res.end is None:
return start_datetime, None
elif not res.end:
raise ParseException("Couldn't parse resulting datetime")
else:
try:
if "month" not in res.end:
res.end["month"] = res.start["month"]
end_str = "%(day)s/%(month)s/%(year)s" % res.end
end_datetime = datetime.datetime.strptime(end_str, "%d/%m/%Y")
except ValueError:
raise ParseException("Couldn't parse resulting datetime")
if end_datetime < start_datetime:
# end is before beginning!
# This is probably caused by a date straddling the change of year
# without the year being given
# So, we assume that the start should be the previous year
res.start['year'] = res.start['year'] - 1
start_str = "%(day)s/%(month)s/%(year)s" % res.start
start_datetime = datetime.datetime.strptime(start_str, "%d/%m/%Y")
return start_datetime, end_datetime
def interactive_test():
"""Sets up an interactive loop for testing date strings."""
while True:
text = input("Enter a date range string (or 'quit'): ")
if text.lower() == 'quit':
break
# Get the PyParsing parser
daterange = create_parser()
res = daterange.parseString(text)
res = post_process(res)
start_str = "%(day)s/%(month)s/%(year)s" % res.start
end_str = "%(day)s/%(month)s/%(year)s" % res.end
start_datetime = datetime.datetime.strptime(start_str, "%d/%m/%Y")
end_datetime = datetime.datetime.strptime(end_str, "%d/%m/%Y")
print(text)
print("From: %s" % start_str)
print("To: %s" % end_str)
print(start_datetime)
print(end_datetime)
print("----")
# print res.dump() | PypiClean |
/MergePythonSDK.ticketing-2.2.2-py3-none-any.whl/MergePythonSDK/ats/model/phone_number_type_enum.py | import re # noqa: F401
import sys # noqa: F401
from typing import (
Optional,
Union,
List,
Dict,
)
from MergePythonSDK.shared.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
OpenApiModel,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from MergePythonSDK.shared.exceptions import ApiAttributeError
from MergePythonSDK.shared.model_utils import import_model_by_name
from MergePythonSDK.shared.model_utils import MergeEnumType
class PhoneNumberTypeEnum(ModelNormal, MergeEnumType):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('value',): {
'HOME': "HOME",
'WORK': "WORK",
'MOBILE': "MOBILE",
'SKYPE': "SKYPE",
'OTHER': "OTHER",
},
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
defined_types = {
'value': (str,),
}
return defined_types
@cached_property
def discriminator():
return None
attribute_map = {
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, value, *args, **kwargs): # noqa: E501
"""PhoneNumberTypeEnum - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', True)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
for arg in args:
if isinstance(arg, dict):
kwargs.update(arg)
else:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, value, *args, **kwargs): # noqa: E501
"""PhoneNumberTypeEnum - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
for arg in args:
if isinstance(arg, dict):
kwargs.update(arg)
else:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value | PypiClean |
/Office365-REST-Python-Client-2.4.3.tar.gz/Office365-REST-Python-Client-2.4.3/office365/sharepoint/webs/web.py |
from office365.runtime.client_result import ClientResult
from office365.runtime.client_value_collection import ClientValueCollection
from office365.runtime.queries.function import FunctionQuery
from office365.runtime.queries.service_operation import ServiceOperationQuery
from office365.runtime.paths.resource_path import ResourcePath
from office365.runtime.paths.service_operation import ServiceOperationPath
from office365.runtime.types.collections import StringCollection
from office365.sharepoint.activities.entity import SPActivityEntity
from office365.sharepoint.activities.logger import ActivityLogger
from office365.sharepoint.alerts.collection import AlertCollection
from office365.sharepoint.authentication.acs_service_principal_info import SPACSServicePrincipalInfo
from office365.sharepoint.base_entity_collection import BaseEntityCollection
from office365.sharepoint.businessdata.app_bdc_catalog import AppBdcCatalog
from office365.sharepoint.changes.collection import ChangeCollection
from office365.sharepoint.clientsidecomponent.hostedapps.manager import HostedAppsManager
from office365.sharepoint.clientsidecomponent.storage_entity import StorageEntity
from office365.sharepoint.clientsidecomponent.query_result import SPClientSideComponentQueryResult
from office365.sharepoint.clientsidecomponent.identifier import SPClientSideComponentIdentifier
from office365.sharepoint.contenttypes.collection import ContentTypeCollection
from office365.sharepoint.eventreceivers.definition_collection import EventReceiverDefinitionCollection
from office365.sharepoint.fields.collection import FieldCollection
from office365.sharepoint.files.file import File
from office365.sharepoint.flows.synchronization_result import FlowSynchronizationResult
from office365.sharepoint.folders.folder import Folder
from office365.sharepoint.folders.collection import FolderCollection
from office365.sharepoint.listitems.listitem import ListItem
from office365.sharepoint.lists.creation_information import ListCreationInformation
from office365.sharepoint.lists.document_library_information import DocumentLibraryInformation
from office365.sharepoint.lists.get_parameters import GetListsParameters
from office365.sharepoint.lists.list import List
from office365.sharepoint.lists.collection import ListCollection
from office365.sharepoint.lists.template_collection import ListTemplateCollection
from office365.sharepoint.lists.template_type import ListTemplateType
from office365.sharepoint.marketplace.corporatecuratedgallery.available_addins_response import SPAvailableAddinsResponse
from office365.sharepoint.navigation.navigation import Navigation
from office365.sharepoint.permissions.base_permissions import BasePermissions
from office365.sharepoint.permissions.roles.definitions.collection import RoleDefinitionCollection
from office365.sharepoint.permissions.securable_object import SecurableObject
from office365.sharepoint.principal.groups.group import Group
from office365.sharepoint.principal.groups.collection import GroupCollection
from office365.sharepoint.principal.users.user import User
from office365.sharepoint.principal.users.collection import UserCollection
from office365.sharepoint.pushnotifications.collection import PushNotificationSubscriberCollection
from office365.sharepoint.pushnotifications.subscriber import PushNotificationSubscriber
from office365.sharepoint.recyclebin.item_collection import RecycleBinItemCollection
from office365.sharepoint.sharing.external_site_option import ExternalSharingSiteOption
from office365.sharepoint.sharing.links.access_request import SharingLinkAccessRequest
from office365.sharepoint.sharing.object_sharing_settings import ObjectSharingSettings
from office365.sharepoint.sharing.links.data import SharingLinkData
from office365.sharepoint.sharing.result import SharingResult
from office365.sharepoint.sharing.shared_document_info import SharedDocumentInfo
from office365.sharepoint.sitescripts.utility import SiteScriptUtility
from office365.sharepoint.marketplace.sitecollection.appcatalog.accessor import SiteCollectionCorporateCatalogAccessor
from office365.sharepoint.marketplace.tenant.appcatalog.accessor import TenantCorporateCatalogAccessor
from office365.sharepoint.translation.user_resource import UserResource
from office365.sharepoint.ui.applicationpages.peoplepicker.web_service_interface import (
ClientPeoplePickerWebServiceInterface
)
from office365.sharepoint.usercustomactions.collection import UserCustomActionCollection
from office365.sharepoint.views.view import View
from office365.sharepoint.webparts.client.collection import ClientWebPartCollection
from office365.sharepoint.webs.calendar_type import CalendarType
from office365.sharepoint.webs.context_web_information import ContextWebInformation
from office365.sharepoint.fields.datetime_field_format_type import DateTimeFieldFormatType
from office365.sharepoint.webs.modernize_homepage_result import ModernizeHomepageResult
from office365.sharepoint.webs.multilingual_settings import MultilingualSettings
from office365.sharepoint.webs.regional_settings import RegionalSettings
from office365.sharepoint.sitescripts.serialization_info import SiteScriptSerializationInfo
from office365.sharepoint.sitescripts.serialization_result import SiteScriptSerializationResult
from office365.sharepoint.webs.information_collection import WebInformationCollection
from office365.sharepoint.webs.template_collection import WebTemplateCollection
from office365.sharepoint.types.resource_path import ResourcePath as SPResPath
from office365.sharepoint.webs.theme_info import ThemeInfo
class Web(SecurableObject):
"""
Represents a SharePoint site. A site is a type of SecurableObject.
A group of related webpages that is hosted by a server on the World Wide Web or an intranet.
Each website has its own entry points, metadata, administration settings, and workflows.
Also referred to as web site.
"""
def __init__(self, context, resource_path=None):
"""
Specifies the push notification subscriber over the site for the specified device app instance identifier.
:type resource_path: ResourcePath or None
:type context: office365.sharepoint.client_context.ClientContext
"""
if resource_path is None:
resource_path = ResourcePath("Web")
super(Web, self).__init__(context, resource_path)
self._web_url = None
def add_list(self, title, template_type=ListTemplateType.GenericList):
"""
Creates a new list and adds it to the web.
:param str title: Specifies the display name of the new list.
:param int template_type: Specifies the list server template of the new list.
"""
info = ListCreationInformation(title, None, template_type)
return self.lists.add(info)
def available_addins(self, server_relative_urls=None):
"""
:param list[str] server_relative_urls:
"""
payload = {"serverRelativeUrls": server_relative_urls}
return_type = ClientResult(self.context, SPAvailableAddinsResponse())
qry = ServiceOperationQuery(self, "AvailableAddins", None, payload, None, return_type)
self.context.add_query(qry)
return return_type
def add_cross_farm_message(self, message):
"""
:param str message:
"""
payload = {"messagePayloadBase64": message}
return_type = ClientResult(self.context, bool())
qry = ServiceOperationQuery(self, "AddCrossFarmMessage", None, payload, None, return_type)
self.context.add_query(qry)
return return_type
def get_site_script(self, include_branding=True, included_lists=None, include_links_to_exported_items=True,
include_regional_settings=True):
"""
Creates site script syntax from current SharePoint site.
:param bool include_branding: Extracts the configuration of the site's branding.
:param list[str] or None included_lists: A list of one or more lists. Each is identified by the list url.
:param bool include_links_to_exported_items: Extracts navigation links. In order to export navigation links
pointing to lists, the list needs to be included in the request as well.
:param bool include_regional_settings: Extracts the site's regional settings.
"""
result = ClientResult(self.context, SiteScriptSerializationResult())
info = SiteScriptSerializationInfo(include_branding, included_lists, include_links_to_exported_items,
include_regional_settings)
def _web_loaded():
SiteScriptUtility.get_site_script_from_web(self.context, self.url, info, return_type=result)
self.ensure_property("Url", _web_loaded)
return result
def consent_to_power_platform(self):
""""""
return_type = FlowSynchronizationResult(self.context)
qry = ServiceOperationQuery(self, "ConsentToPowerPlatform", None, None, None, return_type)
self.context.add_query(qry)
return return_type
def get_push_notification_subscriber(self, device_app_instance_id):
"""
Specifies the push notification subscriber over the site for the specified device app instance identifier.
:param str device_app_instance_id: Device application instance identifier.
"""
return_type = PushNotificationSubscriber(self.context)
qry = ServiceOperationQuery(self, "GetPushNotificationSubscriber", [device_app_instance_id], None,
None, return_type)
self.context.add_query(qry)
return return_type
def get_client_side_components(self, components):
"""
Returns the client side components for the requested components.
Client components include data necessary to render Client Side Web Parts and Client Side Applications.
:param list components: array of requested components, defined by id and version.
"""
return_type = ClientResult(self.context, ClientValueCollection(SPClientSideComponentIdentifier))
payload = {
"components": components
}
qry = ServiceOperationQuery(self, "GetClientSideComponents", None, payload,
None, return_type)
self.context.add_query(qry)
return return_type
def get_client_side_components_by_component_type(self, component_types):
"""
:param str component_types:
"""
return_type = ClientResult(self.context, ClientValueCollection(SPClientSideComponentIdentifier))
payload = {
"componentTypesString": component_types
}
qry = ServiceOperationQuery(self, "GetClientSideComponentsByComponentType", None, payload,
None, return_type)
self.context.add_query(qry)
return return_type
def get_push_notification_subscribers_by_user(self, user):
"""
Queries for the push notification subscribers for the site for the specified user.
:param str or User user: User object or login name
"""
return_type = PushNotificationSubscriberCollection(self.context)
def _create_and_add_query(login_name):
"""
:type login_name: str
"""
qry = ServiceOperationQuery(self, "GetPushNotificationSubscribersByUser",
[login_name], None, None, return_type)
self.context.add_query(qry)
if isinstance(user, User):
def _user_loaded():
_create_and_add_query(user.login_name)
user.ensure_property("LoginName", _user_loaded)
else:
_create_and_add_query(user)
return return_type
@staticmethod
def create_organization_sharing_link(context, url, is_edit_link=False):
""" Creates and returns an organization-internal link that can be used to access a document and gain permissions
to it.
:param office365.sharepoint.client_context.ClientContext context:
:param str url: he URL of the site, with the path of the object in SharePoint that is represented as query
string parameters, forSharing set to 1 if sharing, and bypass set to 1 to bypass any mobile logic.
:param bool is_edit_link: If true, the link will allow the logged in user to edit privileges on the item.
"""
return_type = ClientResult(context, str())
params = {"url": url, "isEditLink": is_edit_link}
qry = ServiceOperationQuery(context.web, "CreateOrganizationSharingLink", None, params, None, return_type)
qry.static = True
context.add_query(qry)
return return_type
@staticmethod
def destroy_organization_sharing_link(context, url, is_edit_link, remove_associated_sharing_link_group):
""" Removes an existing organization link for an object.
:param office365.sharepoint.client_context.ClientContext context: SharePoint client context
:param str url: the URL of the site, with the path of the object in SharePoint that is represented as query
string parameters, forSharing set to 1 if sharing, and bypass set to 1 to bypass any mobile logic.
:param bool is_edit_link: If true, the link will allow the logged in user to edit privileges on the item.
:param bool remove_associated_sharing_link_group: Indicates whether to remove the groups that contain the users
who have been given access to the shared object via the sharing link
"""
payload = {
"url": url,
"isEditLink": is_edit_link,
"removeAssociatedSharingLinkGroup": remove_associated_sharing_link_group
}
qry = ServiceOperationQuery(context.web, "DestroyOrganizationSharingLink", None, payload, None, None)
qry.static = True
context.add_query(qry)
return context.web
@staticmethod
def get_context_web_information(context):
"""
Returns an object that specifies metadata about the site
:type context: office365.sharepoint.client_context.ClientContext
"""
return_type = ClientResult(context, ContextWebInformation())
qry = ServiceOperationQuery(context.web, "GetContextWebInformation", None, None, None, return_type, True)
context.add_query(qry)
return return_type
@staticmethod
def get_web_url_from_page_url(context, page_full_url):
"""Returns the URL of the root folder for the site containing the specified URL
:type context: office365.sharepoint.client_context.ClientContext
:param str page_full_url: Specifies the URL from which to return the site URL.
"""
return_type = ClientResult(context, str())
payload = {
"pageFullUrl": page_full_url
}
qry = ServiceOperationQuery(context.web, "GetWebUrlFromPageUrl", None, payload, None, return_type)
qry.static = True
context.add_query(qry)
return return_type
def create_default_associated_groups(self, user_login, user_login2, group_name_seed):
"""
Creates the default Owners, Members and Visitors SPGroups on the web.
:param str user_login: The user logon name of the group owner.
:param str user_login2: The secondary contact for the group.
:param str group_name_seed: The name seed to use when creating of the full names of the default groups.
For example, if the name seed is Contoso then the default groups will be created with the names:
Contoso Owners, Contoso Members and Contoso Visitors. If the value of this parameter is null then the
web title is used instead.
"""
payload = {
"userLogin": user_login,
"userLogin2": user_login2,
"groupNameSeed": group_name_seed
}
qry = ServiceOperationQuery(self, "CreateDefaultAssociatedGroups", None, payload)
qry.static = True
self.context.add_query(qry)
return self
def create_group_based_environment(self):
return_type = FlowSynchronizationResult(self.context)
qry = ServiceOperationQuery(self, "CreateGroupBasedEnvironment", None, None, None, return_type)
self.context.add_query(qry)
return return_type
def get_group_based_environment(self):
return_type = FlowSynchronizationResult(self.context)
qry = ServiceOperationQuery(self, "GetGroupBasedEnvironment", None, None, None, return_type)
self.context.add_query(qry)
return return_type
def get_acs_service_principals(self, app_ids=None):
"""
List service principals
:para, list[str] appIds:
"""
payload = {"appIds": app_ids}
return_type = ClientResult(self.context, ClientValueCollection(SPACSServicePrincipalInfo))
qry = ServiceOperationQuery(self, "GetACSServicePrincipals", None, payload, None, return_type)
self.context.add_query(qry)
return return_type
def sync_flow_instances(self, target_web_url):
"""
:param str target_web_url:
"""
return_type = FlowSynchronizationResult(self.context)
payload = {"targetWebUrl": target_web_url}
qry = ServiceOperationQuery(self, "SyncFlowInstances", None, payload, None, return_type)
self.context.add_query(qry)
return return_type
def sync_flow_templates(self, category):
"""
:param str category:
"""
return_type = FlowSynchronizationResult(self.context)
payload = {"category": category}
qry = ServiceOperationQuery(self, "SyncFlowTemplates", None, payload, None, return_type)
self.context.add_query(qry)
return return_type
def get_all_client_side_components(self):
""""""
return_type = ClientResult(self.context, str())
qry = ServiceOperationQuery(self, "GetAllClientSideComponents", None, None, None, return_type)
self.context.add_query(qry)
return return_type
def get_app_bdc_catalog(self):
"""
Returns the Business Data Connectivity (BDC) MetadataCatalog for an application that gives access to the
external content types defined in the BDC metadata model packaged by the application.<151>
This method SHOULD be called on the Web (section 3.2.5.143) object that represents the site for the application
and it returns the BDC MetadataCatalog deployed on the site.
"""
return_type = AppBdcCatalog(self.context)
qry = ServiceOperationQuery(self, "GetAppBdcCatalog", None, None, None, return_type)
self.context.add_query(qry)
return return_type
def get_client_side_web_parts(self, project=None, include_errors=False):
"""
It MUST return an array of 3rd party webpart components installed on this site
:param bool include_errors: If true, webparts with errors MUST be included in the results of the request.
If false, webparts with errors MUST be excluded in the results of the request.
:param str project:
"""
return_type = ClientResult(self.context, ClientValueCollection(SPClientSideComponentQueryResult))
params = {
"includeErrors": include_errors,
"project": project
}
qry = ServiceOperationQuery(self, "GetClientSideWebParts", None, params, None, return_type)
self.context.add_query(qry)
return return_type
def add_supported_ui_language(self, lcid):
"""
Adds a supported UI language by its language identifier.
:param int lcid: Specifies the language identifier to be added.
"""
qry = ServiceOperationQuery(self, "AddSupportedUILanguage", {"lcid": lcid})
self.context.add_query(qry)
return self
def get_lists(self, row_limit=100):
"""
:param int row_limit: Specifies a limit for the number of lists in the query that are returned per page
"""
return_type = ListCollection(self.context)
payload = {
"getListsParams": GetListsParameters(row_limit=row_limit)
}
qry = ServiceOperationQuery(self, "GetLists", None, payload, None, return_type)
self.context.add_query(qry)
return return_type
def get_sub_webs_filtered_for_current_user(self, query):
"""Returns a collection of objects that contain metadata about subsites of the current site (2) in which the
current user is a member.
:type query: office365.sharepoint.webs.subweb_query.SubwebQuery
"""
return_type = WebInformationCollection(self.context)
qry = ServiceOperationQuery(self, "getSubWebsFilteredForCurrentUser", {
"nWebTemplateFilter": query.WebTemplateFilter,
"nConfigurationFilter": query.ConfigurationFilter
}, None, None, return_type)
self.context.add_query(qry)
return return_type
def get_recycle_bin_items(self, paging_info=None, row_limit=100, is_ascending=True, order_by=None, item_state=None):
"""
Gets the recycle bin items that are based on the specified query.
:param str paging_info: an Object that is used to obtain the next set of rows in a paged view of the Recycle Bin
:param int row_limit: a limit for the number of items returned in the query per page.
:param bool is_ascending: a Boolean value that specifies whether to sort in ascending order.
:param int order_by: the column by which to order the Recycle Bin query.
:param int item_state: Recycle Bin stage of items to return in the query.
"""
return_type = RecycleBinItemCollection(self.context)
payload = {
"rowLimit": row_limit,
"isAscending": is_ascending,
"pagingInfo": paging_info,
"orderBy": order_by,
"itemState": item_state
}
qry = ServiceOperationQuery(self, "GetRecycleBinItems", None, payload, None, return_type)
self.context.add_query(qry)
return return_type
def get_all_webs(self):
"""Returns a collection containing a flat list of all Web objects in the Web."""
from office365.sharepoint.webs.collection import WebCollection
return_type = WebCollection(self.context, self.webs.resource_path)
def _webs_loaded():
self._load_sub_webs_inner(self.webs, return_type)
self.ensure_property("Webs", _webs_loaded)
return return_type
def _load_sub_webs_inner(self, webs, all_webs):
"""
:type webs: office365.sharepoint.webs.collection.WebCollection
:type all_webs: office365.sharepoint.webs.collection.WebCollection
"""
for cur_web in webs: # type: Web
all_webs.add_child(cur_web)
def _webs_loaded(web):
if len(web.webs) > 0:
self._load_sub_webs_inner(web.webs, all_webs)
cur_web.ensure_property("Webs", _webs_loaded, cur_web)
def get_list_using_path(self, decoded_url):
"""
Returns the list that is associated with the specified server-relative path.
:param str decoded_url: Contains the site-relative path for a list, for example, /Lists/Announcements.
"""
path = SPResPath.create_relative(self.context.base_url, decoded_url)
return_type = List(self.context)
self.lists.add_child(return_type)
qry = ServiceOperationQuery(self, "GetListUsingPath", path, None, None, return_type)
self.context.add_query(qry)
return return_type
def get_news_list(self, allow_create=False):
"""
Returns the News List on this web, if it exists. If the list does not exist, the list will be created and
then returned if allowCreate is set to true. The News List is a hidden SP.List in which News Posts are stored.
:param bool allow_create: Indicates whether to create the list if it does not exist on this web.
"true" means yes.
"""
return_type = List(self.context)
self.lists.add_child(return_type)
payload = {"allowCreate": allow_create}
qry = ServiceOperationQuery(self, "GetNewsList", None, payload, None, return_type)
self.context.add_query(qry)
return return_type
def get_view_from_url(self, list_url):
"""Returns a view of a list within the site based on the specified URL.
:param str list_url: Contains either an absolute URL or a site-relative URL of a view.
"""
return View(self.context, ServiceOperationPath("GetViewFromUrl", [list_url], self.resource_path))
def get_view_from_path(self, decoded_url):
"""Returns a view of a list within the site based on the specified path.
:param str decoded_url: Contains either an absolute path or a site-relative path of a view.
"""
return View(self.context, ServiceOperationPath("GetViewFromPath", [decoded_url], self.resource_path))
def get_regional_datetime_schema(self):
"""Get DateTime Schema based on regional settings"""
return_type = ClientResult(self.context, str())
qry = ServiceOperationQuery(self, "GetRegionalDateTimeSchema", None, None, None, return_type)
self.context.add_query(qry)
return return_type
def get_sharing_link_data(self, link_url):
"""
This method determines basic information about the supplied link URL, including limited data about the object
the link URL refers to and any additional sharing link data if the link URL is a tokenized sharing link
:param str link_url: A URL that is either a tokenized sharing link or a canonical URL for a document
"""
return_type = ClientResult(self.context, SharingLinkData())
payload = {"linkUrl": link_url}
qry = ServiceOperationQuery(self, "GetSharingLinkData", None, payload, None, return_type)
self.context.add_query(qry)
return return_type
@staticmethod
def get_context_web_theme_data(context):
"""
Get ThemeData for the context web.
:type context: office365.sharepoint.client_context.ClientContext
"""
return_type = ClientResult(context, str())
qry = ServiceOperationQuery(context.web, "GetContextWebThemeData", None, None, None, return_type)
qry.static = True
context.add_query(qry)
return return_type
def create_site_page(self, page_metadata):
"""Create a site page
:param str page_metadata:
"""
payload = {"pageMetaData": page_metadata}
return_type = ClientResult(self.context)
qry = ServiceOperationQuery(self, "CreateSitePage", None, payload, None, return_type)
self.context.add_query(qry)
return return_type
@staticmethod
def create_anonymous_link(context, url, is_edit_link, return_type=None):
"""Create an anonymous link which can be used to access a document without needing to authenticate.
:param bool is_edit_link: If true, the link will allow the guest user edit privileges on the item.
:param str url: The URL of the site, with the path of the object in SharePoint represented as query
string parameters
:param office365.sharepoint.client_context.ClientContext context: client context
:param ClientResult return_type: Return type
"""
if return_type is None:
return_type = ClientResult(context, str())
payload = {
"url": str(SPResPath.create_absolute(context.base_url, url)),
"isEditLink": is_edit_link
}
qry = ServiceOperationQuery(context.web, "CreateAnonymousLink", None, payload, None, return_type, True)
context.add_query(qry)
return return_type
@staticmethod
def create_anonymous_link_with_expiration(context, url, is_edit_link, expiration_string, return_type=None):
"""
Creates and returns an anonymous link that can be used to access a document without needing to authenticate.
:param bool is_edit_link: If true, the link will allow the guest user edit privileges on the item.
:param str url: The URL of the site, with the path of the object in SharePoint represented as query
string parameters
:param str expiration_string: A date/time string for which the format conforms to the ISO 8601:2004(E) complete
representation for calendar date and time of day, and which represents the time and date of expiry for the
anonymous link. Both the minutes and hour value MUST be specified for the difference between the local and
UTC time. Midnight is represented as 00:00:00.
:param office365.sharepoint.client_context.ClientContext context: client context
:param ClientResult return_type: Return type
"""
if return_type is None:
return_type = ClientResult(context, str())
payload = {
"url": str(SPResPath.create_absolute(context.base_url, url)),
"isEditLink": is_edit_link,
"expirationString": expiration_string
}
qry = ServiceOperationQuery(context.web, "CreateAnonymousLinkWithExpiration", None, payload, None, return_type)
qry.static = True
context.add_query(qry)
return return_type
@staticmethod
def get_object_sharing_settings(context, object_url, group_id, use_simplified_roles):
"""Given a path to an object in SharePoint, this will generate a sharing settings object which contains
necessary information for rendering sharing information
:param office365.sharepoint.client_context.ClientContext context: SharePoint client
:param str object_url: A URL with one of two possible formats.
The two possible URL formats are:
1) The URL of the site, with the path of the object in SharePoint represented as query string parameters,
forSharing set to 1 if sharing, and mbypass set to 1 to bypass any mobile logic
e.g. https://contoso.com/?forSharing=1&mbypass=1&List=%7BCF908473%2D72D4%2D449D%2D8A53%2D4BD01EC54B84%7D&
obj={CF908473-72D4-449D-8A53-4BD01EC54B84},1,DOCUMENT
2) The URL of the SharePoint object (web, list, item) intended for sharing
e.g. https://contoso.com/Documents/SampleFile.docx
:param int group_id: The id value of the permissions group if adding to a group, 0 otherwise.
:param bool use_simplified_roles: A Boolean value indicating whether to use the SharePoint
simplified roles (Edit, View) or not.
"""
return_type = ObjectSharingSettings(context)
payload = {
"objectUrl": object_url,
"groupId": group_id,
"useSimplifiedRoles": use_simplified_roles
}
qry = ServiceOperationQuery(context.web, "GetObjectSharingSettings", None, payload, None, return_type)
qry.static = True
context.add_query(qry)
return return_type
def get_client_side_components_by_id(self, component_ids=None):
"""
Returns the client side components for the requested component identifiers.
Client components include data necessary to render Client Side Web Parts and Client Side Applications.
:param list[str] component_ids: List of requested component identifiers.
"""
return_type = ClientResult(self.context, ClientValueCollection(SPClientSideComponentQueryResult))
payload = {
"componentIds": StringCollection(component_ids)
}
qry = ServiceOperationQuery(self, "GetClientSideComponentsById", None, payload, None, return_type)
self.context.add_query(qry)
return return_type
def get_file_by_server_relative_url(self, server_relative_url):
"""
Returns the file object located at the specified server-relative URL, for example:
- "/sites/MySite/Shared Documents/MyDocument.docx"
- "Shared Documents/MyDocument.docx"
:param str server_relative_url: Specifies the server-relative URL for the file.
"""
path = SPResPath.create_relative(self.context.base_url, server_relative_url)
return File(self.context, ServiceOperationPath("getFileByServerRelativeUrl", [str(path)], self.resource_path))
def get_file_by_server_relative_path(self, path):
"""Returns the file object located at the specified server-relative path, for example:
- "/sites/MySite/Shared Documents/MyDocument.docx"
- "Shared Documents/MyDocument.docx"
Note: prefer this method over get_folder_by_server_relative_url since it supports % and # symbols in names
:param str path: Contains the server-relative path of the file.
"""
path = SPResPath.create_relative(self.context.base_url, path)
return File(self.context,
ServiceOperationPath("getFileByServerRelativePath", path.to_json(), self.resource_path))
def get_folder_by_server_relative_url(self, url):
"""Returns the folder object located at the specified server-relative URL.
:param str url: Specifies the server-relative URL for the folder.
"""
return Folder(self.context, ServiceOperationPath("getFolderByServerRelativeUrl", [url], self.resource_path))
def get_folder_by_server_relative_path(self, decoded_url):
"""Returns the folder object located at the specified server-relative URL, for example:
- "/sites/MySite/Shared Documents"
- "Shared Documents"
Prefer this method over get_folder_by_server_relative_url since it supports % and # symbols
:param str decoded_url: Contains the server-relative URL for the folder
"""
path = SPResPath(decoded_url)
return Folder(self.context,
ServiceOperationPath("getFolderByServerRelativePath", path.to_json(), self.resource_path))
def ensure_folder_path(self, path):
"""
Ensures a nested folder hierarchy exist
:param str path: relative server URL (path) to a folder
"""
return self.root_folder.folders.ensure_path(path)
def ensure_user(self, login_name):
"""Checks whether the specified logon name belongs to a valid user of the website, and if the logon name does
not already exist, adds it to the website.
:param str login_name: Specifies a string that contains the login name.
"""
return_type = User(self.context)
self.site_users.add_child(return_type)
qry = ServiceOperationQuery(self, "EnsureUser", [login_name], None, None, return_type)
self.context.add_query(qry)
return return_type
def ensure_tenant_app_catalog(self, caller_id):
"""
:param str caller_id:
"""
return_type = ClientResult(self.context, bool())
payload = {"callerId": caller_id}
qry = ServiceOperationQuery(self, "EnsureTenantAppCatalog", None, payload, None, return_type)
self.context.add_query(qry)
return return_type
def get_user_effective_permissions(self, user_name):
"""Gets the effective permissions that the specified user has within the current application scope.
:param str user_name: Specifies the user login name.
"""
return_type = ClientResult(self.context, BasePermissions())
qry = ServiceOperationQuery(self, "GetUserEffectivePermissions", [user_name], None, None, return_type)
self.context.add_query(qry)
return return_type
def get_list_by_title(self, title):
"""
Returns the list with the specified display name.
:param str title: Specifies the display name
"""
return List(self.context, ServiceOperationPath("GetListByTitle", [title], self.resource_path))
def does_user_have_permissions(self, permission_mask):
"""Returns whether the current user has the given set of permissions.
:param BasePermissions permission_mask: Specifies the set of permissions to verify.
"""
return_type = ClientResult(self.context, bool())
qry = ServiceOperationQuery(self, "DoesUserHavePermissions", permission_mask, None, None, return_type)
self.context.add_query(qry)
return return_type
def does_push_notification_subscriber_exist(self, device_app_instance_id):
"""
Specifies whether the push notification subscriber exists for the current user
with the given device app instance identifier.
:param str device_app_instance_id: Device application instance identifier.
"""
return_type = ClientResult(self.context, bool())
params = {"deviceAppInstanceId": device_app_instance_id}
qry = ServiceOperationQuery(self, "DoesPushNotificationSubscriberExist", params, None, None, return_type)
self.context.add_query(qry)
return return_type
def get_folder_by_id(self, unique_id):
"""
Returns the folder object with the specified GUID.
:param str unique_id: A GUID that identifies the folder.
"""
return Folder(self.context, ServiceOperationPath("GetFolderById", [unique_id], self.resource_path))
def get_user_by_id(self, user_id):
"""Returns the user corresponding to the specified member identifier for the current site.
:param int user_id: Specifies the member identifier.
"""
return User(self.context, ServiceOperationPath("getUserById", [user_id], self.resource_path))
def default_document_library(self):
"""Retrieves the default document library."""
return List(self.context, ServiceOperationPath("defaultDocumentLibrary", None, self.resource_path))
def get_list(self, path):
"""Get list by path
:param str path: A string that contains the site-relative URL for a list, for example, /Lists/Announcements.
"""
safe_path = SPResPath.create_relative(self.context.base_url, path)
return List(self.context, ServiceOperationPath("getList", [str(safe_path)], self.resource_path))
def get_changes(self, query):
"""Returns the collection of all changes from the change log that have occurred within the scope of the site,
based on the specified query.
:param office365.sharepoint.changes.query.ChangeQuery query: Specifies which changes to return
"""
return_type = ChangeCollection(self.context)
payload = {"query": query}
qry = ServiceOperationQuery(self, "getChanges", None, payload, None, return_type)
self.context.add_query(qry)
return return_type
def get_available_web_templates(self, lcid=1033, do_include_cross_language=False):
"""
Returns a collection of site templates available for the site.
:param int lcid: Specifies the LCID of the site templates to be retrieved.
:param bool do_include_cross_language: Specifies whether to include language-neutral site templates.
"""
params = {
"lcid": lcid,
"doIncludeCrossLanguage": do_include_cross_language
}
return_type = WebTemplateCollection(self.context,
ServiceOperationPath("GetAvailableWebTemplates", params,
self.resource_path))
qry = ServiceOperationQuery(self, "GetAvailableWebTemplates", params, None, None, return_type)
self.context.add_query(qry)
return return_type
def hub_site_data(self, force_refresh=False):
"""Retrieves data describing a SharePoint hub site.
:param bool force_refresh: Default value is false. When false, the data is returned from the server's cache.
When true, the cache is refreshed with the latest updates and then returned. Use this if you just made
changes and need to see those changes right away.
"""
return_type = ClientResult(self.context)
payload = {"forceRefresh": force_refresh}
qry = ServiceOperationQuery(self, "HubSiteData", None, payload, None, return_type)
self.context.add_query(qry)
return return_type
def increment_site_client_tag(self):
"""
Increments the client cache control number for this site collection.
"""
qry = ServiceOperationQuery(self, "IncrementSiteClientTag")
self.context.add_query(qry)
return self
def apply_web_template(self, web_template):
"""
Applies the specified site definition or site template to the website that has no template applied to it.
:param str web_template: The name of the site definition or the file name of the site template to be applied.
:return:
"""
qry = ServiceOperationQuery(self, "ApplyWebTemplate", {"webTemplate": web_template})
self.context.add_query(qry)
return self
def get_custom_list_templates(self):
"""
Specifies the collection of custom list templates for a given site.
"""
return_type = ListTemplateCollection(self.context)
qry = ServiceOperationQuery(self, "GetCustomListTemplates", None, None, None, return_type)
self.context.add_query(qry)
return return_type
def get_file_by_guest_url(self, guest_url):
"""
Returns the file object from the guest access URL.
:param str guest_url: The guest access URL to get the file with.
"""
return_type = File(self.context)
payload = {"guestUrl": guest_url}
qry = ServiceOperationQuery(self, "GetFileByGuestUrl", None, payload, None, return_type)
self.context.add_query(qry)
return return_type
def get_file_by_guest_url_extended(self, guest_url, ensure_access=None, password=None):
"""
Returns the file object from the tokenized sharing link URL.
:param str guest_url: The tokenized sharing link URL for the folder.
:param str password: This value contains the password to be supplied to a tokenized sharing link for validation.
This value is only needed if the link requires a password before granting access and the calling user
does not currently have perpetual access through the tokenized sharing link.
This value MUST be set to the correct password for the tokenized sharing link for the access granting
operation to succeed. If the tokenized sharing link does not require a password or the calling user
already has perpetual access through the tokenized sharing link, this value will be ignored.
:param bool ensure_access: Indicates if the request to the tokenized sharing link grants perpetual access to
the calling user.
"""
return_type = File(self.context)
payload = {
"guestUrl": guest_url,
"requestSettings": SharingLinkAccessRequest(ensure_access, password)
}
qry = ServiceOperationQuery(self, "GetFileByGuestUrlExtended", None, payload, None, return_type)
self.context.add_query(qry)
return return_type
def get_file_by_guest_url_ensure_access(self, guest_url, ensure_access):
"""
Returns the file object from the tokenized sharing link URL.
:param str guest_url: The guest access URL to get the file with.
:param bool ensure_access: Indicates if the request to the tokenized sharing link grants perpetual access to
the calling user. If it is set to true then the user who is requesting the file will be granted perpetual
permissions through the tokenized sharing link.
"""
return_type = File(self.context)
payload = {"guestUrl": guest_url, "ensureAccess": ensure_access}
qry = ServiceOperationQuery(self, "GetFileByGuestUrlEnsureAccess", None, payload, None, return_type)
self.context.add_query(qry)
return return_type
def get_file_by_linking_url(self, linking_url):
"""
Returns the file object from the linking URL.
:param str linking_url: The linking URL to return the file object for.
A linking URL can be obtained from LinkingUrl.
"""
return_type = File(self.context)
payload = {"linkingUrl": linking_url}
qry = ServiceOperationQuery(self, "GetFileByLinkingUrl", None, payload, None, return_type)
self.context.add_query(qry)
return return_type
def get_file_by_url(self, file_url):
"""
Returns the file object from the given URL.
:param str file_url: The URL used to get the file object.
"""
return_type = File(self.context)
params = {"fileUrl": file_url}
qry = ServiceOperationQuery(self, "GetFileByUrl", params, None, None, return_type)
self.context.add_query(qry)
return return_type
def get_file_by_wopi_frame_url(self, wopi_frame_url):
"""
Returns the file object from the WOPI frame URL.
:param str wopi_frame_url: The WOPI frame URL used to get the file object.
"""
return_type = File(self.context)
qry = ServiceOperationQuery(self, "GetFileByWOPIFrameUrl", [wopi_frame_url], None, None, return_type)
self.context.add_query(qry)
return return_type
def get_folder_by_guest_url(self, guest_url):
"""
Returns the folder object from the tokenized sharing link URL.
:param str guest_url: The tokenized sharing link URL for the folder.
"""
return_type = Folder(self.context)
payload = {"guestUrl": guest_url}
qry = ServiceOperationQuery(self, "GetFolderByGuestUrl", None, payload, None, return_type)
self.context.add_query(qry)
return return_type
def get_folder_by_guest_url_extended(self, guest_url, ensure_access=None, password=None):
"""
Returns the folder object from the tokenized sharing link URL.
:param str guest_url: The tokenized sharing link URL for the folder.
:param str password: This value contains the password to be supplied to a tokenized sharing link for validation.
This value is only needed if the link requires a password before granting access and the calling user
does not currently have perpetual access through the tokenized sharing link.
This value MUST be set to the correct password for the tokenized sharing link for the access granting
operation to succeed. If the tokenized sharing link does not require a password or the calling user
already has perpetual access through the tokenized sharing link, this value will be ignored.
:param bool ensure_access: Indicates if the request to the tokenized sharing link grants perpetual access to
the calling user.
"""
return_type = Folder(self.context)
payload = {
"guestUrl": guest_url,
"requestSettings": SharingLinkAccessRequest(ensure_access, password)
}
qry = ServiceOperationQuery(self, "GetFolderByGuestUrlExtended", None, payload, None, return_type)
self.context.add_query(qry)
return return_type
def parse_datetime(self, value, display_format=DateTimeFieldFormatType.DateTime, calendar_type=CalendarType.None_):
"""
Returns parsed DateTime value.
:param str value: The input is the string of a datetime that's in web's local time and in web's calendar.
For example, the input "09/08/1430" when web's calendar was set to Hijri, the actual datetime is 07/31/2009
in Gregorian calendar.
:param int display_format: Int value representing SP.DateTimeFieldFormatType
:param int calendar_type: Int value representing SP.CalendarType
"""
return_type = ClientResult(self.context)
payload = {
"value": value,
"displayFormat": display_format,
"calendarType": calendar_type
}
qry = ServiceOperationQuery(self, "ParseDateTime", None, payload, None, return_type)
self.context.add_query(qry)
return return_type
def share(self, user_principal_name,
share_option=ExternalSharingSiteOption.View,
send_email=True, email_subject=None, email_body=None):
"""
Share a Web with user
:param str user_principal_name: User identifier
:param ExternalSharingSiteOption share_option: The sharing type of permission to grant on the object.
:param bool send_email: A flag to determine if an email notification SHOULD be sent (if email is configured).
:param str email_subject: The email subject.
:param str email_body: The email subject.
:rtype: SharingResult
"""
return_type = SharingResult(self.context)
def _picker_value_resolved(resp, picker_result, group):
picker_input = "[{0}]".format(picker_result.value)
role_value = "group:{groupId}".format(groupId=group.properties["Id"])
Web.share_object(self.context, self.url, picker_input, role_value, 0,
False, send_email, False, email_subject, email_body, return_type=return_type)
def _grp_resolved(group):
picker_result = ClientPeoplePickerWebServiceInterface.client_people_picker_resolve_user(self.context,
user_principal_name)
self.context.after_execute(_picker_value_resolved, True, picker_result, group)
def _web_resolved():
groups = {
ExternalSharingSiteOption.View: self.associated_visitor_group,
ExternalSharingSiteOption.Edit: self.associated_member_group,
ExternalSharingSiteOption.Owner: self.associated_owner_group,
}
selected_group = groups[share_option]
self.context.load(selected_group, after_loaded=_grp_resolved)
self.ensure_property("Url", _web_resolved)
return return_type
def unshare(self):
"""
Removes Sharing permissions on a Web
:rtype: SharingResult
"""
return_type = SharingResult(self.context)
def _web_initialized():
Web.unshare_object(self.context, self.url, return_type=return_type)
self.ensure_property("Url", _web_initialized)
return return_type
@staticmethod
def get_document_libraries(context, web_full_url):
"""
Returns the document libraries of a SharePoint site, specifically a list of objects that represents
document library information. Document libraries that are private—picture library, catalog library,
asset library, application list, form template or libraries—for whom the user does not have permission to view
the items are not included.
:param office365.sharepoint.client_context.ClientContext context: SharePoint context
:param str web_full_url: The URL of the web.
"""
return_type = ClientResult(context, ClientValueCollection(DocumentLibraryInformation))
payload = {
"webFullUrl": web_full_url
}
qry = ServiceOperationQuery(context.web, "GetDocumentLibraries", None, payload, None, return_type)
qry.static = True
context.add_query(qry)
return return_type
@staticmethod
def default_document_library_url(context, web_url):
"""
Returns the default document library URL.
:param office365.sharepoint.client_context.ClientContext context: SharePoint context
:param str web_url: URL of the web.
"""
return_type = ClientResult(context, DocumentLibraryInformation())
payload = {
"webUrl": web_url,
}
qry = ServiceOperationQuery(context.web, "DefaultDocumentLibraryUrl", None, payload, None, return_type, True)
context.add_query(qry)
return return_type
@staticmethod
def delete_all_anonymous_links_for_object(context, url):
"""
Removes all existing anonymous links for an object.
:param office365.sharepoint.client_context.ClientContext context: SharePoint context
:param str url: The URL of the object being shared, with the path of the object in SharePoint that is
represented as query string parameters.
"""
payload = {
"url": url
}
qry = ServiceOperationQuery(context.web, "DeleteAllAnonymousLinksForObject", None, payload)
qry.static = True
context.add_query(qry)
return context.web
@staticmethod
def delete_anonymous_link_for_object(context, url, is_edit_link, remove_associated_sharing_link_group):
"""
Removes an existing anonymous link for an object..
:param office365.sharepoint.client_context.ClientContext context: SharePoint context
:param str url: The URL of the object being shared, with the path of the object in SharePoint that is
represented as query string parameters.
:param bool is_edit_link: If true, the edit link for the object will be removed. If false, the view only link
for the object will be removed.
:param bool remove_associated_sharing_link_group: Indicates whether to remove the groups that contain the users
who have been given access to the shared object via the sharing link¹.
"""
payload = {
"url": url,
"isEditLink": is_edit_link,
"removeAssociatedSharingLinkGroup": remove_associated_sharing_link_group
}
qry = ServiceOperationQuery(context.web, "DeleteAnonymousLinkForObject", None, payload)
qry.static = True
context.add_query(qry)
return context.web
@staticmethod
def get_document_and_media_libraries(context, web_full_url, include_page_libraries):
"""
Returns the document libraries of a SharePoint site, including picture, asset, and site assets libraries.
Document libraries that are private, catalog library, application list, form template, or libraries that user
does not have permission to view items are not inlcuded.
:param office365.sharepoint.client_context.ClientContext context: SharePoint context
:param str web_full_url: URL of the web.
:param bool include_page_libraries: Indicates whether to include page libraries. A value of "true" means yes.
"""
return_type = ClientResult(context, ClientValueCollection(DocumentLibraryInformation))
payload = {
"webFullUrl": web_full_url,
"includePageLibraries": include_page_libraries
}
qry = ServiceOperationQuery(context.web, "GetDocumentAndMediaLibraries", None, payload, None, return_type)
qry.static = True
context.add_query(qry)
return return_type
@staticmethod
def get_sharing_link_kind(context, file_url, return_type=None):
"""
This method determines the kind of tokenized sharing link represented by the supplied file URL.
:param office365.sharepoint.client_context.ClientContext context:
:param str file_url: A URL that is a tokenized sharing link for a document
:param ClientResult or None return_type: Return object
"""
if return_type is None:
return_type = ClientResult(context)
qry = ServiceOperationQuery(context.web, "GetSharingLinkKind", None, {"fileUrl": file_url}, None, return_type)
qry.static = True
context.add_query(qry)
return return_type
@staticmethod
def forward_object_link(context, url, people_picker_input, email_subject=None, email_body=None):
"""
Shares an object in SharePoint, such as a list item or a site with no Acl changes, by sending the link.
This is used when the user has no permission to share and cannot send access request.
The user can use this method to send the link of the object to site members who already have permission
to view the object.
Returns a SharingResult object that contains completion script and page for redirection if desired.
:param office365.sharepoint.client_context.ClientContext context: SharePoint context
:param str url: The URL of the website with the path of an object in SharePoint query string parameters.
:param str people_picker_input: A string of JSON representing users in people picker format.
:param str email_subject: The email subject.
:param str email_body: The email subject.
"""
return_type = SharingResult(context)
payload = {
"url": url,
"peoplePickerInput": people_picker_input,
"emailSubject": email_subject,
"emailBody": email_body,
}
qry = ServiceOperationQuery(context.web, "ForwardObjectLink", None, payload, None, return_type)
qry.static = True
context.add_query(qry)
return return_type
@staticmethod
def share_object(context, url, people_picker_input,
role_value=None,
group_id=0, propagate_acl=False,
send_email=True, include_anonymous_link_in_email=False, email_subject=None, email_body=None,
use_simplified_roles=True, return_type=None):
"""
This method shares an object in SharePoint such as a list item or site. It returns a SharingResult object
which contains the completion script and a page to redirect to if desired.
:param office365.sharepoint.client_context.ClientContext context: SharePoint context
:param str url: The URL of the website with the path of an object in SharePoint query string parameters.
:param str role_value: The sharing role value for the type of permission to grant on the object.
:param str people_picker_input: A string of JSON representing users in people picker format.
:param int group_id: The ID of the group to be added. Zero if not adding to a permissions group.
:param bool propagate_acl: A flag to determine if permissions SHOULD be pushed to items with unique permissions
:param bool send_email: A flag to determine if an email notification SHOULD be sent (if email is configured).
:param bool include_anonymous_link_in_email: If an email is being sent, this determines if an anonymous link
SHOULD be added to the message.
:param str email_subject: The email subject.
:param str email_body: The email subject.
:param bool use_simplified_roles: A Boolean value indicating whether to use the SharePoint simplified roles
(Edit, View) or not.
:param SharingResult or None return_type: Return type
"""
if return_type is None:
return_type = SharingResult(context)
payload = {
"url": url,
"groupId": group_id,
"peoplePickerInput": people_picker_input,
"roleValue": role_value,
"includeAnonymousLinkInEmail": include_anonymous_link_in_email,
"propagateAcl": propagate_acl,
"sendEmail": send_email,
"emailSubject": email_subject,
"emailBody": email_body,
"useSimplifiedRoles": use_simplified_roles
}
qry = ServiceOperationQuery(context.web, "ShareObject", None, payload, None, return_type)
qry.static = True
context.add_query(qry)
return return_type
@staticmethod
def unshare_object(context, url, return_type=None):
"""
Removes Sharing permissions on an object.
:param office365.sharepoint.client_context.ClientContext context: SharePoint context
:param str url: A SharingResult object which contains status codes pertaining to the completion of the operation
:param SharingResult return_type: Return type
"""
if return_type is None:
return_type = SharingResult(context)
payload = {
"url": url
}
qry = ServiceOperationQuery(context.web, "UnshareObject", None, payload, None, return_type)
qry.static = True
context.add_query(qry)
return return_type
def get_file_by_id(self, unique_id):
"""Returns the file object with the specified GUID.
:param str unique_id: A GUID that identifies the file object.
"""
return File(self.context, ServiceOperationPath("GetFileById", [unique_id], self.resource_path))
def get_list_item(self, str_url):
"""
Returns the list item that is associated with the specified server-relative URL.
:param str str_url: A string that contains the server-relative URL,
for example, "/sites/MySite/Shared Documents/MyDocument.docx".
:return: ListItem
"""
return ListItem(self.context, ServiceOperationPath("GetListItem", [str_url], self.resource_path))
def get_list_item_using_path(self, decoded_url):
"""
Returns the list item that is associated with the specified server-relative path.
:param str decoded_url: A string that contains the server-relative path,
for example, "/sites/MySite/Shared Documents/MyDocument.docx" or "Shared Documents/MyDocument.docx".
:return: ListItem
"""
params = SPResPath.create_relative(self.context.base_url, decoded_url)
return ListItem(self.context, ServiceOperationPath("GetListItemUsingPath", params, self.resource_path))
def get_catalog(self, type_catalog):
"""Gets the list template gallery, site template gallery, or Web Part gallery for the Web site.
:param int type_catalog: The type of the gallery.
"""
return List(self.context, ServiceOperationPath("getCatalog", [type_catalog], self.resource_path))
def page_context_info(self, include_odb_settings, emit_navigation_info):
"""
Return Page context info for the current list being rendered.
:param bool include_odb_settings:
:param bool emit_navigation_info:
"""
return_type = ClientResult(self.context)
payload = {
"includeODBSettings": include_odb_settings,
"emitNavigationInfo": emit_navigation_info
}
qry = ServiceOperationQuery(self, "PageContextInfo", None, payload, None, return_type)
self.context.add_query(qry)
return return_type
def get_storage_entity(self, key):
"""
This will return the storage entity identified by the given key
:param str key: ID of storage entity to be returned.
"""
return_type = StorageEntity(self.context)
params = {
"key": key,
}
qry = ServiceOperationQuery(self, "GetStorageEntity", params, None, None, return_type)
self.context.add_query(qry)
return return_type
def set_storage_entity(self, key, value, description=None, comments=None):
"""
This will set the storage entity identified by the given key
:param str key: Id of the storage entity to be set.
:param str value: Value of the storage entity to be set.
:param str description: Description of the storage entity to be set.
:param str comments: Comments of the storage entity to be set.
"""
payload = {
"key": key,
"value": value,
"description": description,
"comments": comments
}
qry = ServiceOperationQuery(self, "SetStorageEntity", None, payload, None)
self.context.add_query(qry)
return self
def remove_storage_entity(self, key):
"""
This will remove the storage entity identified by the given key
:param str key: Id of the storage entity to be removed.
"""
params = {
"key": key,
}
qry = ServiceOperationQuery(self, "RemoveStorageEntity", params)
self.context.add_query(qry)
return self
def remove_supported_ui_language(self, lcid):
"""
Removes a supported UI language by its language identifier.
:param str lcid: Specifies the language identifier to be removed.
"""
params = {"lcid": lcid}
qry = ServiceOperationQuery(self, "RemoveSupportedUILanguage", params)
self.context.add_query(qry)
return self
def set_access_request_site_description_and_update(self, description=None):
"""
:param str description:
"""
payload = {"description": description}
qry = ServiceOperationQuery(self, "SetAccessRequestSiteDescriptionAndUpdate", None, payload)
self.context.add_query(qry)
return self
def set_global_nav_settings(self, title, source):
"""
:param str title:
:param str source:
"""
payload = {"title": title, "source": source}
qry = ServiceOperationQuery(self, "SetGlobalNavSettings", None, payload)
self.context.add_query(qry)
return self
def sync_hub_site_theme(self):
""""""
qry = ServiceOperationQuery(self, "SyncHubSiteTheme")
self.context.add_query(qry)
return self
def assign_document_id(self, site_prefix, enabled=True):
"""
Assign Document IDs
:param str site_prefix: Specify whether IDs will be automatically assigned to all documents in the
Site Collection. Additionally, you can specify a set of 4-12 characters that will be used at the beginning
of all IDs assigned for documents in this Site Collection, to help ensure that items in different
Site Collections will never get the same ID. Note: A timer job will be scheduled to assign IDs to
documents already in the Site Collection.
:param bool enabled:
"""
props = {'docid_x005f_msft_x005f_hier_x005f_siteprefix': site_prefix, 'docid_x005f_enabled': "1"}
return self.set_property("AllProperties", props).update()
@property
def activities(self):
return self.properties.get("Activities",
BaseEntityCollection(self.context, SPActivityEntity,
ResourcePath("Activities", self.resource_path)))
@property
def activity_logger(self):
return self.properties.get("ActivityLogger",
ActivityLogger(self.context, ResourcePath("ActivityLogger", self.resource_path)))
@property
def allow_rss_feeds(self):
"""Gets a Boolean value that specifies whether the site collection allows RSS feeds.
:rtype: str
"""
return self.properties.get("AllowRssFeeds", None)
@property
def alternate_css_url(self):
"""Gets the URL for an alternate cascading style sheet (CSS) to use in the website.
:rtype: str
"""
return self.properties.get("AlternateCssUrl", None)
@property
def app_instance_id(self):
"""
Specifies the identifier of the app instance that this site (2) represents. If this site (2) does not
represent an app instance, then this MUST specify an empty GUID.
"""
return self.properties.get("AppInstanceId", None)
@property
def author(self):
"""
Gets a user object that represents the user who created the Web site.
"""
return self.properties.get("Author", User(self.context, ResourcePath("Author", self.resource_path)))
@property
def created(self):
"""Specifies when the site was created.
:rtype: datetime or None
"""
return self.properties.get("Created", None)
@property
def custom_master_url(self):
"""Gets the URL for a custom master page to apply to the Web site
:rtype: str or None
"""
return self.properties.get("CustomMasterUrl", None)
@property
def custom_site_actions_disabled(self):
"""
:rtype: bool or None
"""
return self.properties.get("CustomSiteActionsDisabled", None)
@property
def id(self):
"""
Specifies the site identifier for the site
:rtype: str
"""
return self.properties.get("Id", None)
@property
def access_requests_list(self):
return self.properties.get('AccessRequestsList',
List(self.context, ResourcePath("AccessRequestsList", self.resource_path)))
@property
def access_request_list_url(self):
"""
:rtype: str or None
"""
return self.properties.get("AccessRequestListUrl", None)
@property
def effective_base_permissions(self):
"""Specifies the effective permissions that are assigned to the current user"""
from office365.sharepoint.permissions.base_permissions import BasePermissions
return self.properties.get("EffectiveBasePermissions", BasePermissions())
@property
def enable_minimal_download(self):
"""
Specifies whether the site will use the minimal download strategy by default.
The minimal download strategy will use a single .aspx file (start.aspx) for your pages, with the actual URL
encoded in the text following the hash mark ('#'). When navigating from page to page, only the changes
between two compatible pages will be downloaded. Fewer bytes will be downloaded and the page will appear
more quickly.
"""
return self.properties.get("EnableMinimalDownload", None)
@property
def webs(self):
"""Specifies the collection of all child sites for the site"""
from office365.sharepoint.webs.collection import WebCollection
return self.properties.get("Webs",
WebCollection(self.context, ResourcePath("webs", self.resource_path), self))
@property
def folders(self):
"""Specifies the collection of all first-level folders in the site """
return self.properties.get('Folders',
FolderCollection(self.context, ResourcePath("folders", self.resource_path), self))
@property
def hosted_apps(self):
return self.properties.get('HostedApps',
HostedAppsManager(self.context, ResourcePath("HostedApps", self.resource_path)))
@property
def lists(self):
"""Specifies the collection of lists that are contained in the site available to the current user based on the
current user's permissions."""
return self.properties.get('Lists',
ListCollection(self.context, ResourcePath("lists", self.resource_path)))
@property
def onedrive_shared_items(self):
return self.properties.get('OneDriveSharedItems',
BaseEntityCollection(self.context, SharedDocumentInfo,
ResourcePath("OneDriveSharedItems", self.resource_path)))
@property
def site_users(self):
"""
Specifies the collection of users in the site collection that contains the site
"""
return self.properties.get('SiteUsers',
UserCollection(self.context, ResourcePath("siteUsers", self.resource_path)))
@property
def site_groups(self):
"""Gets the collection of groups for the site collection."""
return self.properties.get('SiteGroups',
GroupCollection(self.context, ResourcePath("siteGroups", self.resource_path)))
@property
def current_user(self):
"""Gets the current user."""
return self.properties.get('CurrentUser',
User(self.context, ResourcePath("CurrentUser", self.resource_path)))
@property
def parent_web(self):
"""Gets the parent website of the specified website."""
return self.properties.get('ParentWeb',
Web(self.context, ResourcePath("ParentWeb", self.resource_path)))
@property
def associated_visitor_group(self):
"""Gets or sets the associated visitor group of the Web site."""
return self.properties.get('AssociatedVisitorGroup',
Group(self.context, ResourcePath("AssociatedVisitorGroup", self.resource_path)))
@property
def associated_owner_group(self):
"""Gets or sets the associated owner group of the Web site."""
return self.properties.get('AssociatedOwnerGroup',
Group(self.context, ResourcePath("AssociatedOwnerGroup", self.resource_path)))
@property
def associated_member_group(self):
"""Gets or sets the group of users who have been given contribute permissions to the Web site."""
return self.properties.get('AssociatedMemberGroup',
Group(self.context, ResourcePath("AssociatedMemberGroup", self.resource_path)))
@property
def can_modernize_homepage(self):
"""Specifies the site theme associated with the site"""
return self.properties.get('CanModernizeHomepage',
ModernizeHomepageResult(self.context,
ResourcePath("CanModernizeHomepage", self.resource_path)))
@property
def fields(self):
"""Specifies the collection of all the fields (2) in the site (2)."""
return self.properties.get('Fields',
FieldCollection(self.context, ResourcePath("Fields", self.resource_path)))
@property
def content_types(self):
"""Gets the collection of content types for the Web site."""
return self.properties.get('ContentTypes',
ContentTypeCollection(self.context,
ResourcePath("ContentTypes", self.resource_path), self))
@property
def configuration(self):
"""
Specifies the identifier (ID) of the site definition that was used to create the site (2). If the site (2)
was created with a custom site template this specifies the identifier (ID) of the site definition from which
the custom site template is derived.
"""
return self.properties.get("Configuration", None)
@property
def description_resource(self):
"""A UserResource object that represents the description of this web."""
return self.properties.get('DescriptionResource',
UserResource(self.context, ResourcePath("DescriptionResource", self.resource_path)))
@property
def role_definitions(self):
"""Gets the collection of role definitions for the Web site."""
return self.properties.get("RoleDefinitions",
RoleDefinitionCollection(self.context,
ResourcePath("RoleDefinitions", self.resource_path)))
@property
def event_receivers(self):
"""Specifies the collection of event receiver definitions that are currently available on the Web site"""
return self.properties.get('EventReceivers',
EventReceiverDefinitionCollection(self.context,
ResourcePath("EventReceivers", self.resource_path),
self))
@property
def client_web_parts(self):
"""
Gets a collection of the ClientWebParts installed in this SP.Web. It can be used to get metadata of the
ClientWebParts or render them. It is a read-only collection as ClientWebParts need to be installed in
an app package."""
return self.properties.get('ClientWebParts',
ClientWebPartCollection(self.context,
ResourcePath("ClientWebParts", self.resource_path)))
@property
def tenant_app_catalog(self):
"""Returns the tenant app catalog for the given tenant if it exists."""
return self.properties.get('TenantAppCatalog',
TenantCorporateCatalogAccessor(self.context,
ResourcePath("TenantAppCatalog", self.resource_path)))
@property
def site_collection_app_catalog(self):
"""Returns the site collection app catalog for the given web if it exists."""
return self.properties.get('SiteCollectionAppCatalog',
SiteCollectionCorporateCatalogAccessor(self.context,
ResourcePath("SiteCollectionAppCatalog",
self.resource_path)))
@property
def web_infos(self):
"""Specifies the collection of all child sites for the site"""
return self.properties.get('WebInfos',
WebInformationCollection(self.context, ResourcePath("WebInfos", self.resource_path)))
@property
def theme_info(self):
"""Specifies the site theme associated with the site"""
return self.properties.get('ThemeInfo',
ThemeInfo(self.context, ResourcePath("ThemeInfo", self.resource_path)))
@property
def url(self):
"""Gets the absolute URL for the website.
:rtype: str or None
"""
return self.properties.get('Url', None)
@property
def quick_launch_enabled(self):
"""Gets a value that specifies whether the Quick Launch area is enabled on the site.
:rtype: bool or None
"""
return self.properties.get('QuickLaunchEnabled', None)
@quick_launch_enabled.setter
def quick_launch_enabled(self, value):
"""Sets a value that specifies whether the Quick Launch area is enabled on the site.
:type value: bool
"""
self.set_property('QuickLaunchEnabled', value)
@property
def site_logo_url(self):
"""Gets a value that specifies Site logo url.
:rtype: str or None
"""
return self.properties.get('SiteLogoUrl', None)
@property
def list_templates(self):
"""Gets a value that specifies the collection of list definitions and list templates available for creating
lists on the site."""
return self.properties.get('ListTemplates',
ListTemplateCollection(self.context,
ResourcePath("ListTemplates", self.resource_path)))
@property
def is_multilingual(self):
"""Gets whether Multilingual UI is turned on for this web or not.
:rtype: bool or None
"""
return self.properties.get('IsMultilingual', None)
@is_multilingual.setter
def is_multilingual(self, val):
"""
Sets whether Multilingual UI is turned on for this web or not.
:type val: bool
"""
self.set_property("IsMultilingual", val)
@property
def multilingual_settings(self):
"""Gets a value that specifies the collection of list definitions and list templates available for creating
lists on the site."""
return self.properties.get('MultilingualSettings',
MultilingualSettings(self.context,
ResourcePath("MultilingualSettings", self.resource_path)))
@property
def web_template(self):
"""Gets the name of the site definition or site template that was used to create the site.
:rtype: str or None
"""
return self.properties.get('WebTemplate', None)
@property
def regional_settings(self):
"""Gets the regional settings that are currently implemented on the website."""
return self.properties.get('RegionalSettings',
RegionalSettings(self.context, ResourcePath("RegionalSettings", self.resource_path)))
@property
def recycle_bin(self):
"""Specifies the collection of Recycle Bin items of the Recycle Bin of the site"""
return self.properties.get('RecycleBin',
RecycleBinItemCollection(self.context,
ResourcePath("RecycleBin", self.resource_path)))
@property
def recycle_bin_enabled(self):
"""Specifies whether the Recycle Bin is enabled."""
return self.properties.get("RecycleBinEnabled", None)
@property
def navigation(self):
"""Specifies the navigation structure on the site (2), including the Quick Launch area and the link bar."""
return self.properties.get('Navigation',
Navigation(self.context,
ResourcePath("Navigation", self.resource_path)))
@property
def push_notification_subscribers(self):
"""Specifies the collection of push notification subscribers for the site"""
return self.properties.get('PushNotificationSubscribers',
PushNotificationSubscriberCollection(self.context,
ResourcePath("PushNotificationSubscribers",
self.resource_path)))
@property
def root_folder(self):
"""Get a root folder"""
return self.properties.get("RootFolder", Folder(self.context, ResourcePath("RootFolder", self.resource_path)))
@property
def alerts(self):
"""Gets the collection of alerts for the site or subsite."""
return self.properties.get('Alerts',
AlertCollection(self.context,
ResourcePath("Alerts", self.resource_path)))
@property
def available_fields(self):
"""
Specifies the collection of all fields available for the current scope, including those of the
current site, as well as any parent sites.
"""
return self.properties.get('AvailableFields',
FieldCollection(self.context, ResourcePath("AvailableFields", self.resource_path)))
@property
def available_content_types(self):
"""
Specifies the collection of all site content types that apply to the current scope,
including those of the current site (2), as well as any parent sites.
"""
return self.properties.get('AvailableContentTypes',
ContentTypeCollection(self.context, ResourcePath("AvailableContentTypes",
self.resource_path)))
@property
def site_user_info_list(self):
"""
Specifies the user information list for the site collection that contains the site
"""
return self.properties.get('SiteUserInfoList',
List(self.context, ResourcePath("SiteUserInfoList", self.resource_path)))
@property
def title(self):
"""Gets the title of the web.
:rtype: str or None
"""
return self.properties.get("Title", None)
@property
def welcome_page(self):
"""
Specifies the URL of the Welcome page for the site
:rtype: str or None
"""
return self.properties.get('WelcomePage', None)
@property
def supported_ui_language_ids(self):
"""Specifies the language code identifiers (LCIDs) of the languages that are enabled for the site."""
return self.properties.get('SupportedUILanguageIds', ClientValueCollection(int))
@property
def ui_version(self):
"""
Gets or sets the user interface (UI) version of the Web site.
:rtype: int or None
"""
return self.properties.get('UIVersion', None)
@property
def user_custom_actions(self):
"""Specifies the collection of user custom actions for the site"""
return self.properties.get('UserCustomActions',
UserCustomActionCollection(self.context,
ResourcePath("UserCustomActions", self.resource_path)))
@property
def server_relative_path(self):
"""
Gets the server-relative Path of the Web.
"""
return self.properties.get("ServerRelativePath", SPResPath())
@property
def syndication_enabled(self):
"""Specifies whether the [RSS2.0] feeds are enabled on the site"""
return self.properties.get("SyndicationEnabled", None)
@property
def title_resource(self):
"""A UserResource object that represents the title of this web."""
return self.properties.get('TitleResource',
UserResource(self.context, ResourcePath("TitleResource", self.resource_path)))
@property
def treeview_enabled(self):
"""Specifies whether the tree view is enabled on the site """
return self.properties.get("TreeViewEnabled", None)
def get_property(self, name, default_value=None):
if default_value is None:
property_mapping = {
"AccessRequestsList": self.access_requests_list,
"ActivityLogger": self.activity_logger,
"AvailableFields": self.available_fields,
"AvailableContentTypes": self.available_content_types,
"AssociatedOwnerGroup": self.associated_owner_group,
"AssociatedMemberGroup": self.associated_member_group,
"AssociatedVisitorGroup": self.associated_visitor_group,
"ContentTypes": self.content_types,
"ClientWebParts": self.client_web_parts,
"CurrentUser": self.current_user,
"DescriptionResource": self.description_resource,
"EffectiveBasePermissions": self.effective_base_permissions,
"EventReceivers": self.event_receivers,
"HostedApps": self.hosted_apps,
"ListTemplates": self.list_templates,
"MultilingualSettings": self.multilingual_settings,
"OneDriveSharedItems": self.onedrive_shared_items,
"ParentWeb": self.parent_web,
"PushNotificationSubscribers": self.push_notification_subscribers,
"RootFolder": self.root_folder,
"RegionalSettings": self.regional_settings,
"RoleDefinitions": self.role_definitions,
"RecycleBin": self.recycle_bin,
"SiteCollectionAppCatalog": self.site_collection_app_catalog,
"SiteGroups": self.site_groups,
"SiteUsers": self.site_users,
"SiteUserInfoList": self.site_user_info_list,
"TenantAppCatalog": self.tenant_app_catalog,
"TitleResource": self.title_resource,
"UserCustomActions": self.user_custom_actions,
"WebInfos": self.web_infos,
"ThemeInfo": self.theme_info
}
default_value = property_mapping.get(name, None)
return super(Web, self).get_property(name, default_value)
def set_property(self, name, value, persist_changes=True):
super(Web, self).set_property(name, value, persist_changes)
if name == "Url":
self._web_url = value
return self
@property
def resource_url(self):
"""Returns Web url"""
orig_resource_url = super(Web, self).resource_url
if self._web_url is not None:
orig_resource_url = orig_resource_url.replace(self.context.service_root_url(), self._web_url + '/_api')
return orig_resource_url | PypiClean |
/Flask-Statics-Helper-1.0.0.tar.gz/Flask-Statics-Helper-1.0.0/flask_statics/static/BootstrapValidator/js/language/ro_RO.js | (function($) {
/**
* Romanian language package
* Translated by @filipac
*/
$.fn.bootstrapValidator.i18n = $.extend(true, $.fn.bootstrapValidator.i18n, {
base64: {
'default': 'Te rog introdu un base64 valid'
},
between: {
'default': 'Te rog introdu o valoare intre %s si %s',
notInclusive: 'Te rog introdu o valoare doar intre %s si %s'
},
callback: {
'default': 'Te rog introdu o valoare valida'
},
choice: {
'default': 'Te rog introdu o valoare valida',
less: 'Te rog alege minim %s optiuni',
more: 'Te rog alege maxim %s optiuni',
between: 'Te rog alege %s - %s optiuni'
},
color: {
'default': 'Te rog sa introduci o culoare valida'
},
creditCard: {
'default': 'Te rog introdu un numar de card valid'
},
cusip: {
'default': 'Te rog introdu un numar CUSIP valid'
},
cvv: {
'default': 'Te rog introdu un numar CVV valid'
},
date: {
'default': 'Te rog introdu o data valida',
min: 'Te rog sa introduci o data dupa %s',
max: 'Te rog sa introduci o data inainte de %s',
range: 'Te rog sa introduci o data in intervalul %s - %s'
},
different: {
'default': 'Te rog sa introduci o valoare diferita'
},
digits: {
'default': 'Te rog sa introduci doar cifre'
},
ean: {
'default': 'Te rog sa introduci un numar EAN valid'
},
emailAddress: {
'default': 'Te rog sa introduci o adresa de email valide'
},
file: {
'default': 'Te rog sa introduci un fisier valid'
},
greaterThan: {
'default': 'Te rog sa introduci o valoare mai mare sau egala cu %s',
notInclusive: 'Te rog sa introduci o valoare mai mare ca %s'
},
grid: {
'default': 'Te rog sa introduci un numar GRId valid'
},
hex: {
'default': 'Te rog sa introduci un numar hexadecimal valid'
},
hexColor: {
'default': 'Te rog sa introduci o culoare hex valida'
},
iban: {
'default': 'Te rog sa introduci un IBAN valid',
countryNotSupported: 'Tara %s nu este acceptata',
country: 'Te rog sa introduci un IBAN valid din %s',
countries: {
AD: 'Andorra',
AE: 'Emiratele Arabe unite',
AL: 'Albania',
AO: 'Angola',
AT: 'Austria',
AZ: 'Azerbaijan',
BA: 'Bosnia si Herzegovina',
BE: 'Belgia',
BF: 'Burkina Faso',
BG: 'Bulgaria',
BH: 'Bahrain',
BI: 'Burundi',
BJ: 'Benin',
BR: 'Brazilia',
CH: 'Elvetia',
CI: 'Coasta de Fildes',
CM: 'Cameroon',
CR: 'Costa Rica',
CV: 'Cape Verde',
CY: 'Cipru',
CZ: 'Republica Cehia',
DE: 'Germania',
DK: 'Danemarca',
DO: 'Republica Dominicană',
DZ: 'Algeria',
EE: 'Estonia',
ES: 'Spania',
FI: 'Finlanda',
FO: 'Insulele Faroe',
FR: 'Franta',
GB: 'Regatul Unit',
GE: 'Georgia',
GI: 'Gibraltar',
GL: 'Groenlanda',
GR: 'Grecia',
GT: 'Guatemala',
HR: 'Croatia',
HU: 'Ungaria',
IE: 'Irlanda',
IL: 'Israel',
IR: 'Iran',
IS: 'Islanda',
IT: 'Italia',
JO: 'Iordania',
KW: 'Kuwait',
KZ: 'Kazakhstan',
LB: 'Lebanon',
LI: 'Liechtenstein',
LT: 'Lithuania',
LU: 'Luxembourg',
LV: 'Latvia',
MC: 'Monaco',
MD: 'Moldova',
ME: 'Muntenegru',
MG: 'Madagascar',
MK: 'Macedonia',
ML: 'Mali',
MR: 'Mauritania',
MT: 'Malta',
MU: 'Mauritius',
MZ: 'Mozambique',
NL: 'Olanda',
NO: 'Norvegia',
PK: 'Pakistan',
PL: 'Polanda',
PS: 'Palestina',
PT: 'Portugalia',
QA: 'Qatar',
RO: 'Romania',
RS: 'Serbia',
SA: 'Arabia Saudita',
SE: 'Suedia',
SI: 'Slovenia',
SK: 'Slovacia',
SM: 'San Marino',
SN: 'Senegal',
TN: 'Tunisia',
TR: 'Turkey',
VG: 'Insulele Virgin'
}
},
id: {
'default': 'Te rog sa introduci un numar de identificare valid',
countryNotSupported: 'Codul %s nu este suportat',
country: 'Te rog sa introduci un numar de identificare valid din %s',
countries: {
BA: 'Bosnia si Herzegovina',
BG: 'Bulgaria',
BR: 'Brazilia',
CH: 'Elvetia',
CL: 'Chile',
CN: 'China',
CZ: 'Republica Cehia',
DK: 'Danemarca',
EE: 'Estonia',
ES: 'Spania',
FI: 'Finlanda',
HR: 'Croatia',
IE: 'Irlanda',
IS: 'Islanda',
LT: 'Lithuania',
LV: 'Latvia',
ME: 'Muntenegru',
MK: 'Macedonia',
NL: 'Olanda',
RO: 'Romania',
RS: 'Serbia',
SE: 'Suedia',
SI: 'Slovenia',
SK: 'Slovacia',
SM: 'San Marino',
TH: 'Thailanda',
ZA: 'Africa de Sud'
}
},
identical: {
'default': 'Te rog sa introduci aceeasi valoare'
},
imei: {
'default': 'Te rog sa introduci un numar IMEI valid'
},
imo: {
'default': 'Te rog sa introduci un numar IMO valid'
},
integer: {
'default': 'Te rog sa introduci un numar valid'
},
ip: {
'default': 'Te rog sa introduci o adresa IP valida',
ipv4: 'Te rog sa introduci o adresa IPv4 valida',
ipv6: 'Te rog sa introduci o adresa IPv6 valida'
},
isbn: {
'default': 'Te rog sa introduci un numar ISBN valid'
},
isin: {
'default': 'Te rog sa introduci un numar ISIN valid'
},
ismn: {
'default': 'Te rog sa introduci un numar ISMN valid'
},
issn: {
'default': 'Te rog sa introduci un numar ISSN valid'
},
lessThan: {
'default': 'Te rog sa introduci o valoare mai mica sau egala cu %s',
notInclusive: 'Te rog sa introduci o valoare mai mica decat %s'
},
mac: {
'default': 'Te rog sa introduci o adresa MAC valida'
},
meid: {
'default': 'Te rog sa introduci un numar MEID valid'
},
notEmpty: {
'default': 'Te rog sa introduci o valoare'
},
numeric: {
'default': 'Te rog sa introduci un numar'
},
phone: {
'default': 'Te rog sa introduci un numar de telefon valid',
countryNotSupported: 'Prefixul %s nu este suportat',
country: 'Te rog sa introduci un numar de telefon valid din %s',
countries: {
BR: 'Brazilia',
CN: 'China',
CZ: 'Republica Cehia',
DE: 'Germania',
DK: 'Danemarca',
ES: 'Spania',
FR: 'Franta',
GB: 'Regatul Unit',
MA: 'Maroc',
PK: 'Pakistan',
RO: 'Romania',
RU: 'Rusia',
SK: 'Slovacia',
TH: 'Thailanda',
US: 'SUA',
VE: 'Venezuela'
}
},
regexp: {
'default': 'Te rog sa introduci o valoare in formatul'
},
remote: {
'default': 'Te rog sa introduci o valoare valida'
},
rtn: {
'default': 'Te rog sa introduci un numar RTN valid'
},
sedol: {
'default': 'Te rog sa introduci un numar SEDOL valid'
},
siren: {
'default': 'Te rog sa introduci un numar SIREN valid'
},
siret: {
'default': 'Te rog sa introduci un numar SIRET valid'
},
step: {
'default': 'Te rog introdu un pas de %s'
},
stringCase: {
'default': 'Te rog sa introduci doar litere mici',
upper: 'Te rog sa introduci doar litere mari'
},
stringLength: {
'default': 'Te rog sa introduci o valoare cu lungimea valida',
less: 'Te rog sa introduci mai putin de %s caractere',
more: 'Te rog sa introduci mai mult de %s caractere',
between: 'Te rog sa introduci o valoare cu lungimea intre %s si %s caractere'
},
uri: {
'default': 'Te rog sa introduci un URI valid'
},
uuid: {
'default': 'Te rog sa introduci un numar UUID valid',
version: 'Te rog sa introduci un numar UUID versiunea %s valid'
},
vat: {
'default': 'Te rog sa introduci un numar TVA valid',
countryNotSupported: 'Tara %s nu este acceptata',
country: 'Te rog sa introduci un numar TVA valid din %s',
countries: {
AT: 'Austria',
BE: 'Belgia',
BG: 'Bulgaria',
BR: 'Brazilia',
CH: 'Elvetia',
CY: 'Cipru',
CZ: 'Republica Cehia',
DE: 'Germania',
DK: 'Danemarca',
EE: 'Estonia',
ES: 'Spania',
FI: 'Finlanda',
FR: 'Franta',
GB: 'Regatul Unit',
GR: 'Grecia',
EL: 'Grecia',
HU: 'Ungaria',
HR: 'Croatia',
IE: 'Irlanda',
IS: 'Islanda',
IT: 'Italia',
LT: 'Lituania',
LU: 'Luxemburg',
LV: 'Latvia',
MT: 'Malta',
NL: 'Olanda',
NO: 'Norvegia',
PL: 'Polanda',
PT: 'Portugalia',
RO: 'Romania',
RU: 'Rusia',
RS: 'Serbia',
SE: 'Suedia',
SI: 'Slovenia',
SK: 'Slovacia',
VE: 'Venezuela',
ZA: 'Africa de Sud'
}
},
vin: {
'default': 'Te rog sa introduci un numar VIN valid'
},
zipCode: {
'default': 'Te rog sa introduci un cod postal valid',
countryNotSupported: 'Tara %s nu este acceptata',
country: 'Te rog sa introduci un cod postal valid din %s',
countries: {
AT: 'Austria',
BR: 'Brazilia',
CA: 'Canada',
CH: 'Elvetia',
CZ: 'Republica Cehia',
DE: 'Germania',
DK: 'Danemarca',
FR: 'Franta',
GB: 'Regatul Unit',
IE: 'Irlanda',
IT: 'Italia',
MA: 'Maroc',
NL: 'Olanda',
PT: 'Portugalia',
RO: 'Romania',
RU: 'Rusia',
SE: 'Suedia',
SG: 'Singapore',
SK: 'Slovacia',
US: 'SUA'
}
}
});
}(window.jQuery)); | PypiClean |
/Hikka_Pyro_New-2.0.103-py3-none-any.whl/hikkapyro/types/messages_and_media/document.py |
from datetime import datetime
from typing import List
import hikkapyro
from hikkapyro import raw, utils
from hikkapyro import types
from hikkapyro.file_id import FileId, FileType, FileUniqueId, FileUniqueType
from ..object import Object
class Document(Object):
"""A generic file (as opposed to photos, voice messages, audio files, ...).
Parameters:
file_id (``str``):
Identifier for this file, which can be used to download or reuse the file.
file_unique_id (``str``):
Unique identifier for this file, which is supposed to be the same over time and for different accounts.
Can't be used to download or reuse the file.
file_name (``str``, *optional*):
Original filename as defined by sender.
mime_type (``str``, *optional*):
MIME type of the file as defined by sender.
file_size (``int``, *optional*):
File size.
date (:py:obj:`~datetime.datetime`, *optional*):
Date the document was sent.
thumbs (List of :obj:`~pyrogram.types.Thumbnail`, *optional*):
Document thumbnails as defined by sender.
"""
def __init__(
self,
*,
client: "hikkapyro.Client" = None,
file_id: str,
file_unique_id: str,
file_name: str = None,
mime_type: str = None,
file_size: int = None,
date: datetime = None,
thumbs: List["types.Thumbnail"] = None
):
super().__init__(client)
self.file_id = file_id
self.file_unique_id = file_unique_id
self.file_name = file_name
self.mime_type = mime_type
self.file_size = file_size
self.date = date
self.thumbs = thumbs
@staticmethod
def _parse(client, document: "raw.types.Document", file_name: str) -> "Document":
return Document(
file_id=FileId(
file_type=FileType.DOCUMENT,
dc_id=document.dc_id,
media_id=document.id,
access_hash=document.access_hash,
file_reference=document.file_reference
).encode(),
file_unique_id=FileUniqueId(
file_unique_type=FileUniqueType.DOCUMENT,
media_id=document.id
).encode(),
file_name=file_name,
mime_type=document.mime_type,
file_size=document.size,
date=utils.timestamp_to_datetime(document.date),
thumbs=types.Thumbnail._parse(client, document),
client=client
) | PypiClean |
/GraphQL_core_next-1.1.1-py3-none-any.whl/graphql/utilities/value_from_ast_untyped.py | from typing import Any, Dict
from ..error import INVALID
from ..language import ValueNode
from ..pyutils import inspect, is_invalid
__all__ = ["value_from_ast_untyped"]
def value_from_ast_untyped(
value_node: ValueNode, variables: Dict[str, Any] = None
) -> Any:
"""Produce a Python value given a GraphQL Value AST.
Unlike `value_from_ast()`, no type is provided. The resulting Python value will
reflect the provided GraphQL value AST.
| GraphQL Value | JSON Value | Python Value |
| -------------------- | ---------- | ------------ |
| Input Object | Object | dict |
| List | Array | list |
| Boolean | Boolean | bool |
| String / Enum | String | str |
| Int / Float | Number | int / float |
| Null | null | None |
"""
func = _value_from_kind_functions.get(value_node.kind)
if func:
return func(value_node, variables)
# Not reachable. All possible value nodes have been considered.
raise TypeError( # pragma: no cover
f"Unexpected value node: '{inspect(value_node)}'."
)
def value_from_null(_value_node, _variables):
return None
def value_from_int(value_node, _variables):
try:
return int(value_node.value)
except ValueError:
return INVALID
def value_from_float(value_node, _variables):
try:
return float(value_node.value)
except ValueError:
return INVALID
def value_from_string(value_node, _variables):
return value_node.value
def value_from_list(value_node, variables):
return [value_from_ast_untyped(node, variables) for node in value_node.values]
def value_from_object(value_node, variables):
return {
field.name.value: value_from_ast_untyped(field.value, variables)
for field in value_node.fields
}
def value_from_variable(value_node, variables):
variable_name = value_node.name.value
if not variables:
return INVALID
value = variables.get(variable_name, INVALID)
if is_invalid(value):
return INVALID
return value
_value_from_kind_functions = {
"null_value": value_from_null,
"int_value": value_from_int,
"float_value": value_from_float,
"string_value": value_from_string,
"enum_value": value_from_string,
"boolean_value": value_from_string,
"list_value": value_from_list,
"object_value": value_from_object,
"variable": value_from_variable,
} | PypiClean |
/Mesa-2.1.1-py3-none-any.whl/mesa/visualization/templates/external/bootstrap-slider-11.0.2/test/specs/AriaValueTextFormatterSpec.js | describe("Aria-valuetext Tests", function() {
it("Sets the aria-valuetext to 'formatter' value", function() {
var textValArrayA = new Array('Monday','Wednesday','Friday');
var tooltipFormatterA = function(value) {
var arrActiveValueA = value;
return textValArrayA[arrActiveValueA-1];
};
//Formatter is used
var testSliderA = $("#accessibilitySliderA").slider({
formatter : tooltipFormatterA
});
testSliderA.slider('setValue', 2);
var tooltipMessageA = $("#accessibilitySliderA").prev(".slider").children(".min-slider-handle").attr("aria-valuetext");
var expectedMessageA = tooltipFormatterA(2);
expect(tooltipMessageA).toBe(expectedMessageA);
$("#accessibilitySliderA").slider('destroy');
});
it("Does not use aria-valuetext if 'formatter' is not used", function() {
//Formatter is not used
var testSliderB = $("#accessibilitySliderB").slider({});
testSliderB.slider('setValue', 1);
var ariaValueTextB = $("#accessibilitySliderB").prev(".slider").children(".min-slider-handle").attr("aria-valuetext");
expect(ariaValueTextB).not.toBeDefined();
$("#accessibilitySliderB").slider('destroy');
});
it("aria-valuetext if 'formatter' is used and has min & max value", function() {
var textValArrayC = new Array('Monday','Tuesday','Wednesday','Thursday','Friday','Saturday','Sunday');
var tooltipFormatterC = function(value) {
if(value[1]){
var arrActiveValueC0 = value[0];
var arrActiveValueC1 = value[1];
return [ textValArrayC[arrActiveValueC0-1], textValArrayC[arrActiveValueC1-1] ];
} else {
var arrActiveValueC = value;
return textValArrayC[arrActiveValueC-1];
}
};
//Formatter is used for ranges
var testSliderC = $("#accessibilitySliderC").slider({
range: true,
formatter : tooltipFormatterC
});
var valuesToSet = [2,4];
testSliderC.slider('setValue', valuesToSet);
var expectedMessageC = tooltipFormatterC([2,4]);
var ttminMessage = $("#accessibilitySliderC").prev(".slider").children(".min-slider-handle").attr("aria-valuetext");
var ttmaxMessage = $("#accessibilitySliderC").prev(".slider").children(".max-slider-handle").attr("aria-valuetext");
expect(ttminMessage).toBe(expectedMessageC[0]);
expect(ttmaxMessage).toBe(expectedMessageC[1]);
$('#accessibilitySliderC').slider('destroy');
});
describe("Unset 'aria-valuetext' attribute when value can be represented as a number", function() {
var $testSliderC;
var dayOfWeek;
var dayFormatter = function(value) {
if (value[1]) {
return [ dayOfWeek[value[0]-1], dayOfWeek[value[1]-1] ];
}
return dayOfWeek[value-1];
};
beforeEach(function() {
dayOfWeek = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'];
$testSliderC = $('#accessibilitySliderC').slider({
id: 'testAccessbilitySlider',
min: 1,
max: 7,
formatter : dayFormatter
});
});
afterEach(function() {
if ($testSliderC) {
$testSliderC.slider('destroy');
}
});
it("Should unset 'aria-valuetext' attribute", function() {
dayOfWeek[0] = '1';
var valueToSet = 1;
$testSliderC.slider('setValue', valueToSet);
var ariaValueText = $('#testAccessbilitySlider').find('.min-slider-handle')[0].getAttribute('aria-valuetext');
expect(ariaValueText).toBeNull();
});
it("Should unset 'aria-valuetext' attributes for range sliders", function() {
dayOfWeek[0] = '1';
dayOfWeek[6] = '7';
$testSliderC.slider('setAttribute', 'range', true);
$testSliderC.slider('refresh');
var valuesToSet = [1, 7];
$testSliderC.slider('setValue', valuesToSet);
var ariaValueText1 = $('#testAccessbilitySlider').find('.min-slider-handle')[0].getAttribute('aria-valuetext');
var ariaValueText2 = $('#testAccessbilitySlider').find('.max-slider-handle')[0].getAttribute('aria-valuetext');
expect(ariaValueText1).toBeNull();
expect(ariaValueText2).toBeNull();
});
});
}); | PypiClean |
/Flask-AppBuilder-red-2.1.13.tar.gz/Flask-AppBuilder-red-2.1.13/flask_appbuilder/fieldwidgets.py | from flask_babel import lazy_gettext as _
from wtforms import widgets
from wtforms.widgets import html_params, HTMLString
class DatePickerWidget(object):
"""
Date Time picker from Eonasdan GitHub
"""
data_template = (
'<div class="input-group date appbuilder_date" id="datepicker">'
'<span class="input-group-addon"><i class="fa fa-calendar cursor-hand"></i>'
"</span>"
'<input class="form-control" data-format="yyyy-MM-dd" %(text)s />'
"</div>"
)
def __call__(self, field, **kwargs):
kwargs.setdefault("id", field.id)
kwargs.setdefault("name", field.name)
if not field.data:
field.data = ""
template = self.data_template
return HTMLString(
template % {"text": html_params(type="text", value=field.data, **kwargs)}
)
class DateTimePickerWidget(object):
"""
Date Time picker from Eonasdan GitHub
"""
data_template = (
'<div class="input-group date appbuilder_datetime" id="datetimepicker">'
'<span class="input-group-addon"><i class="fa fa-calendar cursor-hand"></i>'
"</span>"
'<input class="form-control" data-format="yyyy-MM-dd hh:mm:ss" %(text)s />'
"</div>"
)
def __call__(self, field, **kwargs):
kwargs.setdefault("id", field.id)
kwargs.setdefault("name", field.name)
if not field.data:
field.data = ""
template = self.data_template
return HTMLString(
template % {"text": html_params(type="text", value=field.data, **kwargs)}
)
class BS3TextFieldWidget(widgets.TextInput):
def __call__(self, field, **kwargs):
kwargs["class"] = u"form-control"
if field.label:
kwargs["placeholder"] = field.label.text
if "name_" in kwargs:
field.name = kwargs["name_"]
return super(BS3TextFieldWidget, self).__call__(field, **kwargs)
class BS3TextAreaFieldWidget(widgets.TextArea):
def __call__(self, field, **kwargs):
kwargs["class"] = u"form-control"
kwargs["rows"] = 3
if field.label:
kwargs["placeholder"] = field.label.text
return super(BS3TextAreaFieldWidget, self).__call__(field, **kwargs)
class BS3PasswordFieldWidget(widgets.PasswordInput):
def __call__(self, field, **kwargs):
kwargs["class"] = u"form-control"
if field.label:
kwargs["placeholder"] = field.label.text
return super(BS3PasswordFieldWidget, self).__call__(field, **kwargs)
class Select2AJAXWidget(object):
data_template = "<input %(text)s />"
def __init__(self, endpoint, extra_classes=None, style=None):
self.endpoint = endpoint
self.extra_classes = extra_classes
self.style = style or u"width:250px"
def __call__(self, field, **kwargs):
kwargs.setdefault("id", field.id)
kwargs.setdefault("name", field.name)
kwargs.setdefault("endpoint", self.endpoint)
kwargs.setdefault("style", self.style)
input_classes = "input-group my_select2_ajax"
if self.extra_classes:
input_classes = input_classes + " " + self.extra_classes
kwargs.setdefault("class", input_classes)
if not field.data:
field.data = ""
template = self.data_template
return HTMLString(
template % {"text": html_params(type="text", value=field.data, **kwargs)}
)
class Select2SlaveAJAXWidget(object):
data_template = '<input class="input-group my_select2_ajax_slave" %(text)s />'
def __init__(self, master_id, endpoint, extra_classes=None, style=None):
self.endpoint = endpoint
self.master_id = master_id
self.extra_classes = extra_classes
self.style = style or u"width:250px"
def __call__(self, field, **kwargs):
kwargs.setdefault("id", field.id)
kwargs.setdefault("name", field.name)
kwargs.setdefault("endpoint", self.endpoint)
kwargs.setdefault("master_id", self.master_id)
kwargs.setdefault("style", self.style)
input_classes = "input-group my_select2_ajax"
if self.extra_classes:
input_classes = input_classes + " " + self.extra_classes
kwargs.setdefault("class", input_classes)
if not field.data:
field.data = ""
template = self.data_template
return HTMLString(
template % {"text": html_params(type="text", value=field.data, **kwargs)}
)
class Select2Widget(widgets.Select):
extra_classes = None
def __init__(self, extra_classes=None, style=None):
self.extra_classes = extra_classes
self.style = style or u"width:250px"
return super(Select2Widget, self).__init__()
def __call__(self, field, **kwargs):
kwargs["class"] = u"my_select2 form-control"
if self.extra_classes:
kwargs["class"] = kwargs["class"] + " " + self.extra_classes
kwargs["style"] = self.style
kwargs["data-placeholder"] = _("Select Value")
if "name_" in kwargs:
field.name = kwargs["name_"]
return super(Select2Widget, self).__call__(field, **kwargs)
class Select2ManyWidget(widgets.Select):
extra_classes = None
def __init__(self, extra_classes=None, style=None):
self.extra_classes = extra_classes
self.style = style or u"width:250px"
return super(Select2ManyWidget, self).__init__()
def __call__(self, field, **kwargs):
kwargs["class"] = u"my_select2 form-control"
if self.extra_classes:
kwargs["class"] = kwargs["class"] + " " + self.extra_classes
kwargs["style"] = self.style
kwargs["data-placeholder"] = _("Select Value")
kwargs["multiple"] = u"true"
if "name_" in kwargs:
field.name = kwargs["name_"]
return super(Select2ManyWidget, self).__call__(field, **kwargs) | PypiClean |
/LibrarianFileManager-0.0.1.tar.gz/LibrarianFileManager-0.0.1/src/librarian/actors/plotter.py | import warnings
from datetime import date
import matplotlib as mpl
import matplotlib.pyplot as plt
from cycler import cycler
from librarian.actors.reader import Reader
# =====================================
# Abstract Classes
# =====================================
class Plotter(Reader):
"""Plotter class for plotting data contained in Librarian
files/catalogs.
"""
# ---------------------------------------------------
# Formatting:
# ---------------------------------------------------
# Font sizes
_small_size = 10
_medium_size = 12
_bigger_size = 14
_large_size = 16
# Plot styles: lines, markers, etc.
_linewidth = 2
_linestyle = '-' # solid lines
# (modstyle)
# _linestyle = None
_markersize = 2
_capsizes = 2
_capthick = 1.5
# Figure size
_fig_width = 6.4
_fig_height = 4.8
_figsize = (_fig_width, _fig_height)
def __init__(self, **kwargs):
"""Initializes the plotter, including the axis information
and style of the plots.
Possible Parameters
----------
Figure Parameters
----------
xlabel : str
xlabel of the plot.
ylabel : str
ylabel of the plot.
title : str
title of the plot.
showdate : bool
If True, adds a date to the upper right of the plot.
xlim : tuple
The x limits of the plot.
ylim : tuple
The y limits of the plot.
ylim_ratio : tuple
The y limits of the ratio subplot.
ratio_plot : bool
Determines whether there is an additional subplot
for ratio plotting.
ylabel_ratio : str
ylabel of the ratio subplot, if it exists.
Style Parameters
----------
font.size : int
default text sizes
figure.titlesize : int
fontsize of the figure title
axes.titlesize : int
fontsize of the axes title
axes.labelsize : int
fontsize of the x and y labels
xtick.labelsize : int
fontsize of the x tick labels
ytick.labelsize : int
fontsize of the y tick labels
legend.fontsize : int
fontsize of the legend
lines.linewidth : int
default plot linewidth
axes.prop_cycle : cycler
default color cycle
Returns
-------
None
"""
# Get plot metadata from kwargs:
self.metadata = {
'figsize': kwargs.get('figsize', self._figsize),
'title': kwargs.get('title', 'Plotter'),
'xlabel': kwargs.get('xlabel', 'x'),
'ylabel': kwargs.get('ylabel', 'y'),
'ylabel_ratio': kwargs.get('ylabel_ratio', 'Ratio'),
'xlim': kwargs.get('xlim', None),
'ylim': kwargs.get('ylim', None),
'ylim_ratio': kwargs.get('ylim_ratio', None),
}
# Get plot style info for plotting with a local rc_context
self.mpl_rc = {
'font.size': kwargs.get('font.size', self._medium_size),
'figure.titlesize': kwargs.get('axes.titlesize',
self._large_size),
'axes.titlesize': kwargs.get('axes.titlesize',
self._bigger_size),
'axes.labelsize': kwargs.get('axes.labelsize',
self._medium_size),
'xtick.labelsize': kwargs.get('xtick.labelsize',
self._small_size),
'ytick.labelsize': kwargs.get('ytick.labelsize',
self._small_size),
'legend.fontsize': kwargs.get('legend.fontsize',
self._medium_size),
'lines.linewidth': kwargs.get('lines.linewidth',
self._linewidth),
'axes.prop_cycle': kwargs.get('axes.prop_cycle',
cycler(
'color',
['darkgreen', 'royalblue',
'darkgoldenrod', 'darkred'])
),
}
def subplots(self, ratio_plot=False,
showdate=False, labeltext=None,
**kwargs):
"""Creates a figure and associated axes using default or
given parameters during initialization.
Can be used to produce a figure with a ratio subplot.
New Parameters
----------
ratio_plot : bool
Determines whether there is an additional subplot
for ratio plotting.
showdate : bool
If True, adds a date to the upper right of the plot.
labeltext : str
Text to be added to the plot as an additional label.
Returns
-------
Figure, axes.Axes
The figure and axes/subplots specified by the
above parameters.
"""
# Get plt subplots
gridspec_kw = {'height_ratios': (3.5, 1) if ratio_plot else (1,),
'hspace': 0.0}
nsubplots = 2 if ratio_plot else 1
fig, axes = plt.subplots(nsubplots, gridspec_kw=gridspec_kw,
figsize=kwargs.get('figsize', self.metadata['figsize']))
if nsubplots == 1:
axes = [axes]
# axes limits
if kwargs.get('xlim', self.metadata['xlim']) is not None:
axes[0].set_xlim(*kwargs.get('xlim', self.metadata['xlim']))
if kwargs.get('ylim', self.metadata['ylim']) is not None:\
axes[0].set_ylim(*kwargs.get('ylim', self.metadata['ylim']))
if ratio_plot:
if kwargs.get('ylim_ratio', self.metadata['ylim_ratio']) is not None:
axes[1].set_ylim(*kwargs.get('ylim_ratio',
self.metadata['ylim_ratio'])
)
axes[1].set_yscale('log')
# axes labels
axes[-1].set_xlabel(kwargs.get('xlabel', self.metadata['xlabel']))
axes[0].set_ylabel(kwargs.get('ylabel', self.metadata['ylabel']), labelpad=5)
if ratio_plot:
axes[1].set_ylabel(kwargs.get('ylabel_ratio',
self.metadata['ylabel_ratio']),
labelpad=-10)
# tick settings
for ax_instance in axes:
ax_instance.minorticks_on()
ax_instance.tick_params(top=True, right=True, bottom=True,
left=True, direction='in', which='both')
if ratio_plot:
axes[0].tick_params(labelbottom=False)
axes[1].tick_params(axis='y')
# Extra plot information
pad = .01
if showdate:
# Including date
axes[0].text(
x=1,
y=1.005+pad,
s=date.today().strftime("%m/%d/%y"),
transform=axes[0].transAxes,
ha="right",
va="bottom",
fontsize=self._medium_size * 0.95,
fontweight="normal"
)
if labeltext is not None:
# Extra primary label
axes[0].text(
x=-0.1,
y=1.005+pad,
s=labeltext,
transform=axes[0].transAxes,
ha="left",
va="bottom",
fontsize=self._medium_size * 1.5,
fontweight="bold",
fontname="DIN Condensed"
)
if kwargs.get('title', self.metadata['title']) is not None:
# Main title
axes[0].text(
x=.12,
y=1.005+pad,
s=kwargs.get('title', self.metadata['title']),
transform=axes[0].transAxes,
ha="left",
va="bottom",
fontsize=self._medium_size * 1.5,
fontstyle="italic",
fontname="Arial"
)
plt.tight_layout()
return fig, axes
def plot_data(self, data, **kwargs):
"""Plots data in a specified way."""
raise NotImplementedError("Plotter.plot_data() not implemented.")
def check_conditions(self, data_name, params):
"""Check if the file_path meets the conditions to be
acted upon.
"""
return True
def file_action(self, file_path,
local_rc=True, conditions=None,
**kwargs):
"""Defining the file action of the Reader to
load data from files and plot.
"""
# Otherwise, load the data
data = self.load_data(file_path)
# If we use a single rc_context for this catalog
# plot within that context
if local_rc:
with mpl.rc_context(self.mpl_rc):
self.plot_data(data, **kwargs)
# Otherwise, simply plot without an rc_context
else:
self.plot_data(data, **kwargs)
def act_on_catalog(self, catalog,
local_rc=True, conditions=None,
fig_kwargs=None, **kwargs):
"""Perform the defined plotting action
on all files within the catalog.
"""
file_paths = catalog.get_files()
data_params = catalog.get_data_params()
# Using default conditions if none are given
if conditions is None:
def conditions(data_name, params):
return self.check_conditions(data_name, params)
if fig_kwargs is None:
def fig_kwargs(data_name, params):
return {}
# If we use a single rc_context for this entire catalog
if local_rc:
with mpl.rc_context(self.mpl_rc):
for file_path, (data_name, params) in zip(file_paths,
data_params):
if conditions(data_name, params):
tmp_kwargs = kwargs.copy()
tmp_kwargs.update(fig_kwargs(data_name, params))
self.file_action(file_path, local_rc=False,
**tmp_kwargs)
# Otherwise, each plot has its own rc_context
else:
for file_path, (data_name, params) in zip(file_paths,
data_params):
if conditions(data_name, params):
tmp_kwargs = kwargs.copy()
tmp_kwargs.update(fig_kwargs(data_name, params))
self.file_action(file_path, local_rc=True,
**tmp_kwargs)
def act_on_catalogs(self, catalogs,
local_rc=True, conditions=None,
fig_kwargs=None, **kwargs):
"""Perform the defined plotting action
on all files within a list of catalogs.
"""
# Using default conditions if none are given
if conditions is None:
def conditions(data_name, params):
return self.check_conditions(data_name, params)
if fig_kwargs is None:
def fig_kwargs(data_name, params):
return {}
# If we use a single rc_context for this entire set of catalogs
if local_rc:
with mpl.rc_context(self.mpl_rc):
for catalog in catalogs:
self.act_on_catalog(catalog, local_rc=False,
conditions=conditions,
fig_kwargs=fig_kwargs,
**kwargs)
# Otherwise, each individual plot has its own rc_context
else:
for catalog in catalogs:
self.act_on_catalog(catalog, local_rc=False,
conditions=conditions,
fig_kwargs=fig_kwargs,
**kwargs) | PypiClean |
/ContFrac-1.0.0.tar.gz/ContFrac-1.0.0/LICENSE.md | BSD 3-Clause License
===============================================================================
Copyright © 2019, Matjaž Guštin <dev@matjaz.it> <https://matjaz.it>.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of nor the names of its contributors may be used to endorse
or promote products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS “AS IS” AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
| PypiClean |
/Ekas-topsis-102017078-1.0.0.tar.gz/Ekas-topsis-102017078-1.0.0/README.md | Ekas-topsis-102017078
# Topsis in Python
Author: **Ekaspreet 102017078**
Maintainer: **Ekaspreet <ekaspreet0209@gmail.com>**.
TOPSIS: It is a for Multiple Criteria Decision Making,A Technique for Order Preference by Similarity to Ideal
More details at [wikipedia](https://en.wikipedia.org/wiki/TOPSIS).
<br>
<br>
### In Command Prompt
```
>> topsis data.csv "1,1,1,1" "+,+,-,+" result.csv
```
## Input file (data.csv)
| Model | Correlation | R<sup>2</sup> | RMSE | Accuracy |
| ----- | ----------- | ------------- | ---- | -------- |
| M1 | 0.79 | 0.62 | 1.25 | 60.89 |
| M2 | 0.66 | 0.44 | 2.89 | 63.07 |
| M3 | 0.56 | 0.31 | 1.57 | 62.87 |
| M4 | 0.82 | 0.67 | 2.68 | 70.19 |
| M5 | 0.75 | 0.56 | 1.3 | 80.39 |
Weights (`weights`) is not already normalised will be normalised later in the code.
Information of positive(+) or negative(-) impact criteria should be provided in `impacts`.
<br>
## Output file (result.csv)
| Model | Correlation | R<sup>2</sup> | RMSE | Accuracy | Score | Rank |
| ----- | ----------- | ------------- | ---- | -------- | ------ | ---- |
| M1 | 0.79 | 0.62 | 1.25 | 60.89 | 0.7722 | 2 |
| M2 | 0.66 | 0.44 | 2.89 | 63.07 | 0.2255 | 5 |
| M3 | 0.56 | 0.31 | 1.57 | 62.87 | 0.4388 | 4 |
| M4 | 0.82 | 0.67 | 2.68 | 70.19 | 0.5238 | 3 |
| M5 | 0.75 | 0.56 | 1.3 | 80.39 | 0.8113 | 1 |
<br>
The output file contains columns of input file along with two additional columns having **Score** and **Rank**
| PypiClean |
/BlueWhale3-ImageAnalytics-0.6.1.tar.gz/BlueWhale3-ImageAnalytics-0.6.1/doc/widgets/imagegrid.md | Image Grid
==========
Displays images in a similarity grid.
**Inputs**
- Embeddings: Image embeddings from Image Embedding widget.
- Data Subset: A subset of embeddings or images.
**Outputs**
- Images: Images from the dataset with an additional column specifying if the image is selected or the group, if there are several.
- Selected Images: Selected images with an additional column specifying the group.
The **Image Grid** widget can display images from a dataset in a similarity grid - images with similar content are placed closer to each other. It can be used for image comparison, while looking for similarities or discrepancies between selected data instances (e.g. bacterial growth or bitmap representations of handwriting).

1. *Image Filename Attribute*: Attribute containing paths to images.
2. *Image cell fit*: Resize scales the images to grid, while Crop crops them to squares.
3. *Grid size*: Set the size of the grid. Click *Set size automatically* to optimize the projection.
4. Tick the box to commit the changes automatically. Alternatively, click *Apply*.
5. Information on the input.
6. Access help, save image, and report (in that order).
Example
-------
**Image Grid** can be used to visualize similarity of images in a 2D projection. We have used 5 images of fruits and vegetables, namely orange, banana, strawberry, broccoli and cauliflower.
We loaded the images with [Import Images](importimages.md) and embedded them with Inception v3 embedder in [Image Embedding](imageembedding.md).
Finally, we visualized the images in **Image Grid**. It is obvious that broccoli and cauliflower and much more alike than strawberry and banana.

| PypiClean |
/HolmesIV-2021.9.8a1.tar.gz/HolmesIV-2021.9.8a1/mycroft/util/signal.py | import tempfile
import time
import os
import os.path
import mycroft
from mycroft.util.file_utils import ensure_directory_exists, create_file
def get_ipc_directory(domain=None):
"""Get the directory used for Inter Process Communication
Files in this folder can be accessed by different processes on the
machine. Useful for communication. This is often a small RAM disk.
Args:
domain (str): The IPC domain. Basically a subdirectory to prevent
overlapping signal filenames.
Returns:
str: a path to the IPC directory
"""
config = mycroft.configuration.Configuration.get()
path = config.get("ipc_path")
if not path:
# If not defined, use /tmp/mycroft/ipc
path = os.path.join(tempfile.gettempdir(), "mycroft", "ipc")
return ensure_directory_exists(path, domain)
def create_signal(signal_name):
"""Create a named signal
Args:
signal_name (str): The signal's name. Must only contain characters
valid in filenames.
"""
try:
path = os.path.join(get_ipc_directory(), "signal", signal_name)
create_file(path)
return os.path.isfile(path)
except IOError:
return False
def check_for_signal(signal_name, sec_lifetime=0):
"""See if a named signal exists
Args:
signal_name (str): The signal's name. Must only contain characters
valid in filenames.
sec_lifetime (int, optional): How many seconds the signal should
remain valid. If 0 or not specified, it is a single-use signal.
If -1, it never expires.
Returns:
bool: True if the signal is defined, False otherwise
"""
path = os.path.join(get_ipc_directory(), "signal", signal_name)
if os.path.isfile(path):
if sec_lifetime == 0:
# consume this single-use signal
_remove_signal(path)
elif sec_lifetime == -1:
return True
elif int(os.path.getctime(path) + sec_lifetime) < int(time.time()):
# remove once expired
_remove_signal(path)
return False
return True
# No such signal exists
return False
def _remove_signal(signal_name):
# this method is private because nothing should import it, if something
# does that it wont work with regular mycroft-core, plus there is no
# good reason to call this from elsewhere
if os.path.isfile(signal_name):
path = signal_name
else:
path = os.path.join(get_ipc_directory(), "signal", signal_name)
# consume this signal
try:
os.remove(path)
except:
# some other process might have removed it meanwhile!
if os.path.isfile(path):
# what now? probably a file permission error,
# this signal will keep triggering if file is not removed
raise | PypiClean |
/Crwy-1.7.1.tar.gz/Crwy-1.7.1/crwy/utils/extend/dingding_robot.py | import json
from crwy.spider import BaseSpider
from crwy.exceptions import CrwyExtendException
class DingDingRobot(BaseSpider):
def __init__(self, access_token=None,
api_url="https://oapi.dingtalk.com/robot/send?access_token="):
super(DingDingRobot, self).__init__()
if not api_url:
raise CrwyExtendException('access_token unset.')
self.api_url = api_url
self.header = {'Content-Type': 'application/json'}
self.access_token = access_token
self.html_downloader.session.headers = self.header
def send_text(self, content, at_mobiles=list(), is_at_all=False):
try:
data = {
"text": {
"content": content
},
"msgtype": "text",
"at": {
"isAtAll": is_at_all,
"atMobiles": at_mobiles
}
}
res = self.html_downloader.download(
self.api_url + self.access_token,
method='POST',
data=json.dumps(data))
return res
except Exception as e:
raise CrwyExtendException(e)
def send_markdown(self, title, content, at_mobiles=list(),
is_at_all=False):
try:
data = {
"msgtype": "markdown",
"markdown": {
"title": title,
"text": content
},
"at": {
"atMobiles": at_mobiles,
"isAtAll": is_at_all
}
}
res = self.html_downloader.download(
self.api_url + self.access_token,
method='POST',
data=json.dumps(data))
return res
except Exception as e:
raise CrwyExtendException(e)
def send_action_card(self, title, content, hide_avatar="0",
btn_oriengtation="0", single_title="阅读全文",
single_url="#"):
try:
data = {
"actionCard": {
"title": title,
"text": content,
"hideAvatar": hide_avatar,
"btnOrientation": btn_oriengtation,
"singleTitle": single_title,
"singleURL": single_url
},
"msgtype": "actionCard"
}
res = self.html_downloader.download(
self.api_url + self.access_token,
method='POST',
data=json.dumps(data))
return res
except Exception as e:
raise CrwyExtendException(e)
def send_feed_card(self, links):
"""
:param links: array[{'title':'', 'messageURL':'', 'picURL':''}]
:return:
"""
try:
data = {
"feedCard": {
"links": links
},
"msgtype": "feedCard"
}
res = self.html_downloader.download(
self.api_url + self.access_token,
method='POST',
data=json.dumps(data))
return res
except Exception as e:
raise CrwyExtendException(e) | PypiClean |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.