hexsha stringlengths 40 40 | size int64 4 996k | ext stringclasses 8
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 996k | avg_line_length float64 1.33 58.2k | max_line_length int64 2 323k | alphanum_fraction float64 0 0.97 | content_no_comment stringlengths 0 946k | is_comment_constant_removed bool 2
classes | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f7250248eaa636892462bb0e99e0d5df70467f27 | 22,991 | py | Python | flaski/apps/main/ihistogram.py | mpg-age-bioinformatics/flaski | f56e00dd80d8706ecb8593ba6585a97eed881896 | [
"MIT"
] | 9 | 2020-08-03T01:22:59.000Z | 2022-03-03T02:02:04.000Z | flaski/apps/main/ihistogram.py | mpg-age-bioinformatics/flaski | f56e00dd80d8706ecb8593ba6585a97eed881896 | [
"MIT"
] | 79 | 2020-06-03T06:34:46.000Z | 2021-09-22T13:31:43.000Z | flaski/apps/main/ihistogram.py | mpg-age-bioinformatics/flaski | f56e00dd80d8706ecb8593ba6585a97eed881896 | [
"MIT"
] | 5 | 2020-10-05T10:20:23.000Z | 2022-03-01T14:23:12.000Z | #from matplotlib.figure import Figure
import plotly.express as px
import plotly.graph_objects as go
import plotly.figure_factory as ff
from collections import OrderedDict
import numpy as np
import sys
def GET_COLOR(x):
if str(x)[:3].lower() == "rgb":
vals=x.split("rgb(")[-1].split(")")[0].split(",")
vals=[ float(s.strip(" ")) for s in vals ]
#vals=tuple(vals)
return vals
else:
return str(x)
def make_figure(df,pa):
"""Generates figure.
Args:
df (pandas.core.frame.DataFrame): Pandas DataFrame containing the input data.
pa (dict): A dictionary of the style { "argument":"value"} as outputted by `figure_defaults`.
Returns:
A Plotly figure
"""
tmp=df.copy()
tmp=tmp[pa["vals"]]
fig = go.Figure( )
# MAIN FIGURE
#Load checkboxes
pab={}
# print("Main", pa["kde"])
for arg in ["show_legend","upper_axis","lower_axis","left_axis","right_axis","errorbar",\
"errorbar_symmetric","tick_left_axis","tick_lower_axis","tick_upper_axis","tick_right_axis",\
"kde","show_hist","show_curve","show_rug"]:
if pa[arg] in ["off",".off"]:
pab[arg]=False
else:
pab[arg]=True
# if arg in ["upper_axis","lower_axis","left_axis","right_axis"]:
# print(arg, pa[arg], pab[arg])
#Load floats
floats=["bin_size","errorbar_value","errorbar_thickness","errorbar_width","x","y","axis_line_width","ticks_line_width",\
"ticks_length","x_lower_limit","x_upper_limit","y_lower_limit","y_upper_limit","spikes_thickness","xticks_rotation",\
"yticks_rotation","xticks_fontsize","yticks_fontsize","grid_width","legend_borderwidth","legend_tracegroupgap","legend_x",\
"legend_y","fig_width","fig_height"]
for a in floats:
if pa[a] == "" or pa[a]=="None" or pa[a]==None:
pab[a]=None
else:
pab[a]=float(pa[a])
#Load integers
integers=["label_fontsize","legend_fontsize","legend_title_fontsize","title_fontsize","maxxticks","maxyticks"]
for a in integers:
if pa[a] == "" or pa[a]=="None" or pa[a]==None:
pab[a]=None
else:
pab[a]=int(pa[a])
#Load Nones
possible_nones=["errorbar_color","title_fontcolor","axis_line_color","ticks_color","spikes_color","label_fontcolor",\
"paper_bgcolor","plot_bgcolor","grid_color","legend_bgcolor","legend_bordercolor","legend_fontcolor","legend_title_fontcolor",\
"title_fontfamily","label_fontfamily","legend_fontfamily","legend_title_fontfamily"]
for p in possible_nones:
if pa[p] == "None" or pa[p]=="Default" :
pab[p]=None
else:
pab[p]=pa[p]
#KDE (KERNEL DENSITY ESTIMATION) plot
if pab["kde"]==True:
colors=list()
if pa["rug_text"]!="":
rug_text=pa["rug_text"].split(",")
else:
rug_text=[]
for h in pa["groups_settings"].values():
if h["color_rgb"] == "":
if h["color_value"]=="None":
colors.append(None)
else:
colors.append(h["color_value"])
else:
colors.append(GET_COLOR(h["color_rgb"]))
hist_data=[]
for col in tmp.columns:
hist_data.append(tmp[col].dropna())
if (not pab["show_hist"]) & (not pab["show_curve"]):
pa["show_curve"]="on"
pab["show_curve"]=True
fig=ff.create_distplot(hist_data=hist_data, group_labels=pa["vals"],curve_type=pa["curve_type"],show_hist=pab["show_hist"],\
show_curve=pab["show_curve"],show_rug=pab["show_rug"],bin_size=pab["bin_size"],rug_text=rug_text,colors=colors, histnorm=pa["kde_histnorm"])
else:
for h in pa["groups_settings"].values():
#Initialize dummie dict
h_=dict()
#Load integers
integers=["hover_fontsize","bins_number"]
for a in integers:
if h[a] == "" or h[a]=="None" or h[a] == None:
h_[a]=None
else:
h_[a]=int(h[a])
#Load Nones
possible_nones=["hover_bgcolor","hover_bordercolor","hover_fontfamily","hover_fontcolor"]
for p in possible_nones:
if h[p] == "None" or h[p]=="Default" :
h_[p]=None
else:
h_[p]=h[p]
#Load floats
floats=["opacity","linewidth"]
for a in floats:
if h[a] == "":
h_[a]=None
else:
h_[a]=float(h[a])
if h["label"]!="":
name=h["label"]
else:
name=""
if h["text"]!="":
text=h["text"]
else:
text=""
if h["color_rgb"] == "":
if h["color_value"]=="None":
marker_color=None
else:
marker_color = h["color_value"]
else:
marker_color = GET_COLOR( h["color_rgb"] )
if h["line_rgb"] == "":
if h["line_color"]=="None":
line_color=None
else:
line_color = h["line_color"]
else:
line_color = GET_COLOR( h["line_rgb"] )
if h["histnorm"] == "None":
histnorm = ""
else:
histnorm = h["histnorm"]
if h["cumulative"]=="on":
cumulative_enabled=True
else:
cumulative_enabled=False
marker=dict(color=marker_color,line=dict(width=h_["linewidth"],color=line_color))
cumulative=dict(enabled=cumulative_enabled,direction=h["cumulative_direction"])
hoverlabel=dict(bgcolor=h_["hover_bgcolor"],bordercolor=h_["hover_bordercolor"],align=h["hover_align"],\
font=dict(family=h_["hover_fontfamily"],size=h_["hover_fontsize"],color=h_["hover_fontcolor"]))
if pab["errorbar"]==True:
errorbar=True
if h["orientation_value"]=="vertical":
if pab["errorbar"]==True:
error_y=dict(visible=errorbar,value=pab["errorbar_value"],type=pa["errorbar_type"],symmetric=pab["errorbar_symmetric"],color=pab["errorbar_color"],\
thickness=pab["errorbar_thickness"],width=pab["errorbar_width"])
else:
error_y=dict(visible=False)
fig.add_trace(go.Histogram(x=tmp[h["name"]].dropna(),text=text,hoverinfo=h["hoverinfo"],histfunc=h["histfunc"],cumulative=cumulative,\
opacity=h_["opacity"],nbinsx=h_["bins_number"],name=name,marker=marker,error_y=error_y,hoverlabel=hoverlabel,histnorm=histnorm))
elif h["orientation_value"]=="horizontal":
if pab["errorbar"]==True:
error_x=dict(visible=errorbar,value=pab["errorbar_value"],type=pa["errorbar_type"],symmetric=pab["errorbar_symmetric"],color=pab["errorbar_color"],\
thickness=pab["errorbar_thickness"],width=pab["errorbar_width"])
else:
error_x=dict(visible=False)
fig.add_trace(go.Histogram(y=tmp[h["name"]].dropna(),text=text,hoverinfo=h["hoverinfo"],histfunc=h["histfunc"],cumulative=cumulative,\
opacity=h_["opacity"],nbinsy=h_["bins_number"],name=name,marker=marker,error_x=error_x,hoverlabel=hoverlabel,histnorm=histnorm))
#UPDATE LAYOUT OF HISTOGRAMS
#Figure size
fig.update_layout( width=pab["fig_width"], height=pab["fig_height"] ) # autosize=False,
#Update title
title=dict(text=pa["title"],font=dict(family=pab["title_fontfamily"],size=pab["title_fontsize"],color=pab["title_fontcolor"]),\
xref=pa["xref"],yref=pa["yref"],x=pab["x"],y=pab["y"],xanchor=pa["title_xanchor"],yanchor=pa["title_yanchor"])
fig.update_layout(title=title,barmode=pa["barmode"])
#Update axes
if pa["log_scale"]==True and pa["orientation"]=="vertical":
fig.update_yaxes(type="log")
elif pa["log_scale"]==True and pa["orientation"]=="horizontal":
fig.update_xaxes(type="log")
# print(pab["lower_axis"],pab["axis_line_width"],pab["axis_line_color"],pab["upper_axis"])
fig.update_xaxes(zeroline=False, showline=pab["lower_axis"], linewidth=pab["axis_line_width"], linecolor=pab["axis_line_color"], mirror=pab["upper_axis"])
fig.update_yaxes(zeroline=False, showline=pab["left_axis"], linewidth=pab["axis_line_width"], linecolor=pab["axis_line_color"],mirror=pab["right_axis"])
#Update ticks
if pab["tick_lower_axis"]==False and pab["tick_right_axis"]==False and pab["tick_left_axis"]==False and pab["tick_upper_axis"]==False:
pa["ticks_direction_value"]=""
ticks=""
else:
ticks=pa["ticks_direction_value"]
fig.update_xaxes(ticks=ticks, tickwidth=pab["ticks_line_width"], tickcolor=pab["ticks_color"], ticklen=pab["ticks_length"])
fig.update_yaxes(ticks=ticks, tickwidth=pab["ticks_line_width"], tickcolor=pab["ticks_color"], ticklen=pab["ticks_length"])
#Update mirror property of axis based on ticks and axis selected by user
#Determines if the axis lines or/and ticks are mirrored to the opposite side of the plotting area.
# If "True", the axis lines are mirrored. If "ticks", the axis lines and ticks are mirrored. If "False", mirroring is disable.
# If "all", axis lines are mirrored on all shared-axes subplots. If "allticks", axis lines and ticks are mirrored on all shared-axes subplots.
if pab["tick_upper_axis"] :
fig.update_xaxes(mirror="ticks")
# elif pab["upper_axis"] :
# fig.update_xaxes(mirror=True)
# else:
# fig.update_xaxes(mirror=False)
if pab["tick_right_axis"]:
fig.update_yaxes(mirror="ticks")
# elif pab["right_axis"]:
# fig.update_yaxes(mirror=True)
# else:
# fig.update_yaxes(mirror=False)
# fig.update_yaxes(mirror=True)
if (pa["x_lower_limit"]!="") and (pa["x_upper_limit"]!="") :
xmin=pab["x_lower_limit"]
xmax=pab["x_upper_limit"]
fig.update_xaxes(range=[xmin, xmax])
if (pa["y_lower_limit"]!="") and (pa["y_upper_limit"]!="") :
ymin=pab["y_lower_limit"]
ymax=pab["y_upper_limit"]
fig.update_yaxes(range=[ymin, ymax])
if pa["maxxticks"]!="":
fig.update_xaxes(nticks=pab["maxxticks"])
if pa["maxyticks"]!="":
fig.update_yaxes(nticks=pab["maxyticks"])
#Update spikes
if pa["spikes_value"]=="both":
fig.update_xaxes(showspikes=True,spikecolor=pab["spikes_color"],spikethickness=pab["spikes_thickness"],spikedash=pa["spikes_dash"],spikemode=pa["spikes_mode"])
fig.update_yaxes(showspikes=True,spikecolor=pab["spikes_color"],spikethickness=pab["spikes_thickness"],spikedash=pa["spikes_dash"],spikemode=pa["spikes_mode"])
elif pa["spikes_value"]=="x":
fig.update_xaxes(showspikes=True,spikecolor=pab["spikes_color"],spikethickness=pab["spikes_thickness"],spikedash=pa["spikes_dash"],spikemode=pa["spikes_mode"])
elif pa["spikes_value"]=="y":
fig.update_yaxes(showspikes=True,spikecolor=pab["spikes_color"],spikethickness=pab["spikes_thickness"],spikedash=pa["spikes_dash"],spikemode=pa["spikes_mode"])
elif pa["spikes_value"]=="None":
fig.update_xaxes(showspikes=None)
fig.update_yaxes(showspikes=None)
#UPDATE X AXIS AND Y AXIS LAYOUT
xaxis=dict(visible=True, title=dict(text=pa["xlabel"],font=dict(family=pab["label_fontfamily"],size=pab["label_fontsize"],color=pab["label_fontcolor"])))
yaxis=dict(visible=True, title=dict(text=pa["ylabel"],font=dict(family=pab["label_fontfamily"],size=pab["label_fontsize"],color=pab["label_fontcolor"])))
fig.update_layout(paper_bgcolor=pab["paper_bgcolor"],plot_bgcolor=pab["plot_bgcolor"],xaxis = xaxis,yaxis = yaxis)
fig.update_xaxes(tickangle=pab["xticks_rotation"], tickfont=dict(size=pab["xticks_fontsize"]))
fig.update_yaxes(tickangle=pab["yticks_rotation"], tickfont=dict(size=pab["yticks_fontsize"]))
#UPDATE GRID PROPERTIES
if pa["grid_value"] == "None":
fig.update_xaxes(showgrid=False)
fig.update_yaxes(showgrid=False)
elif pa["grid_value"]=="x":
fig.update_yaxes(showgrid=True,gridcolor=pab["grid_color"],gridwidth=pab["grid_width"])
elif pa["grid_value"]=="y":
fig.update_xaxes(showgrid=True,gridcolor=pab["grid_color"],gridwidth=pab["grid_width"])
elif pa["grid_value"]=="both":
fig.update_xaxes(showgrid=True,gridcolor=pab["grid_color"],gridwidth=pab["grid_width"])
fig.update_yaxes(showgrid=True,gridcolor=pab["grid_color"],gridwidth=pab["grid_width"])
fig.update_layout(template='plotly_white')
#UPDATE LEGEND PROPERTIES
if pab["show_legend"]==True:
if pa["legend_orientation"]=="vertical":
legend_orientation="v"
elif pa["legend_orientation"]=="horizontal":
legend_orientation="h"
fig.update_layout(showlegend=True,legend=dict(x=pab["legend_x"],y=pab["legend_y"],bgcolor=pab["legend_bgcolor"],bordercolor=pab["legend_bordercolor"],\
borderwidth=pab["legend_borderwidth"],valign=pa["legend_valign"],\
font=dict(family=pab["legend_fontfamily"],size=pab["legend_fontsize"],color=pab["legend_fontcolor"]),orientation=legend_orientation,\
traceorder=pa["legend_traceorder"],tracegroupgap=pab["legend_tracegroupgap"],\
title=dict(text=pa["legend_title"],side=pa["legend_side"],font=dict(family=pab["legend_title_fontfamily"],size=pab["legend_title_fontsize"],\
color=pab["legend_title_fontcolor"]))))
else:
fig.update_layout(showlegend=False)
return fig
STANDARD_SIZES=[str(i) for i in list(range(1,101))]
STANDARD_COLORS=["None","aliceblue","antiquewhite","aqua","aquamarine","azure","beige",\
"bisque","black","blanchedalmond","blue","blueviolet","brown","burlywood",\
"cadetblue","chartreuse","chocolate","coral","cornflowerblue","cornsilk",\
"crimson","cyan","darkblue","darkcyan","darkgoldenrod","darkgray","darkgrey",\
"darkgreen","darkkhaki","darkmagenta","darkolivegreen","darkorange","darkorchid",\
"darkred","darksalmon","darkseagreen","darkslateblue","darkslategray","darkslategrey",\
"darkturquoise","darkviolet","deeppink","deepskyblue","dimgray","dimgrey","dodgerblue",\
"firebrick","floralwhite","forestgreen","fuchsia","gainsboro","ghostwhite","gold",\
"goldenrod","gray","grey","green","greenyellow","honeydew","hotpink","indianred","indigo",\
"ivory","khaki","lavender","lavenderblush","lawngreen","lemonchiffon","lightblue","lightcoral",\
"lightcyan","lightgoldenrodyellow","lightgray","lightgrey","lightgreen","lightpink","lightsalmon",\
"lightseagreen","lightskyblue","lightslategray","lightslategrey","lightsteelblue","lightyellow",\
"lime","limegreen","linen","magenta","maroon","mediumaquamarine","mediumblue","mediumorchid",\
"mediumpurple","mediumseagreen","mediumslateblue","mediumspringgreen","mediumturquoise",\
"mediumvioletred","midnightblue","mintcream","mistyrose","moccasin","navajowhite","navy",\
"oldlace","olive","olivedrab","orange","orangered","orchid","palegoldenrod","palegreen",\
"paleturquoise","palevioletred","papayawhip","peachpuff","peru","pink","plum","powderblue",\
"purple","red","rosybrown","royalblue","rebeccapurple","saddlebrown","salmon","sandybrown",\
"seagreen","seashell","sienna","silver","skyblue","slateblue","slategray","slategrey","snow",\
"springgreen","steelblue","tan","teal","thistle","tomato","turquoise","violet","wheat","white",\
"whitesmoke","yellow","yellowgreen"]
STANDARD_HISTNORMS=['None', 'percent', 'probability', 'density', 'probability density']
LINE_STYLES=["solid", "dot", "dash", "longdash", "dashdot","longdashdot"]
STANDARD_BARMODES=["stack", "group","overlay","relative"]
STANDARD_ORIENTATIONS=['vertical','horizontal']
STANDARD_ALIGNMENTS=["left","right","auto"]
STANDARD_VERTICAL_ALIGNMENTS=["top", "middle","bottom"]
STANDARD_FONTS=["Arial", "Balto", "Courier New", "Default", "Droid Sans", "Droid Serif", "Droid Sans Mono",\
"Gravitas One", "Old Standard TT", "Open Sans", "Overpass", "PT Sans Narrow", "Raleway", "Times New Roman"]
TICKS_DIRECTIONS=["inside","outside",'']
LEGEND_LOCATIONS=['best','upper right','upper left','lower left','lower right','right','center left','center right','lower center','upper center','center']
MODES=["expand",None]
STANDARD_HOVERINFO=["x", "y", "z", "text", "name","all","none","skip","x+y","x+text","x+name",\
"y+text","y+name","text+name","x+y+name","x+y+text","x+text+name","y+text+name"]
STANDARD_HISTFUNC=["count","sum","avg","min","max"]
STANDARD_CUMULATIVE_DIRECTIONS=["increasing","decreasing"]
STANDARD_ERRORBAR_TYPES=["percent","constant","sqrt"]
STANDARD_REFERENCES=["container","paper"]
STANDARD_TITLE_XANCHORS=["auto","left","center","right"]
STANDARD_TITLE_YANCHORS=["top","middle","bottom"]
STANDARD_LEGEND_XANCHORS=["auto","left","center","right"]
STANDARD_LEGEND_YANCHORS=["auto","top","middle","bottom"]
STANDARD_TRACEORDERS=["reversed", "grouped", "reversed+grouped", "normal"]
STANDARD_SIDES=["top","left","top left"]
STANDARD_SPIKEMODES=["toaxis", "across", "marker","toaxis+across","toaxis+marker","across+marker","toaxis+across+marker"]
STANDARD_CURVETYPES=["kde","normal"]
def figure_defaults():
""" Generates default figure arguments.
Returns:
dict: A dictionary of the style { "argument":"value"}
"""
# https://matplotlib.org/3.1.1/api/markers_api.html
# https://matplotlib.org/2.0.2/api/colors_api.html
# lists allways need to have thee default value after the list
# eg.:
# "title_size":standard_sizes,\
# "titles":"20"
# "fig_size_x"="6"
# "fig_size_y"="6"
plot_arguments={"fig_width":"600",\
"fig_height":"600",\
"title":'iHistogram',\
"title_fontsize":"20",\
"title_fontfamily":"Default",\
"title_fontcolor":"None",\
"titles":"20",\
"kde":".off",\
"curve_type":"kde",\
"curve_types":STANDARD_CURVETYPES,\
"kde_histnorm":"probability density",\
"kde_histnorms":["probability density","probability"],\
"show_hist":".off",\
"show_curve":".on",\
"show_rug":".off",\
"rug_text":"",\
"bin_size":"1",\
"opacity":0.8,\
"paper_bgcolor":"white",\
"plot_bgcolor":"white",\
"hoverinfos":STANDARD_HOVERINFO,\
"hover_alignments":STANDARD_ALIGNMENTS,\
"histfuncs":STANDARD_HISTFUNC,\
"references":STANDARD_REFERENCES,\
"xref":"container",\
"yref":"container",\
"x":"0.5",\
"y":"0.9",\
"title_xanchors":STANDARD_TITLE_XANCHORS,\
"title_yanchors":STANDARD_TITLE_YANCHORS,\
"title_xanchor":"auto",\
"title_yanchor":"auto",\
"show_legend":"on",\
"errorbar":".off",\
"errorbar_value":"10",\
"errorbar_type":"percent",\
"errorbar_types":STANDARD_ERRORBAR_TYPES,\
"errorbar_symmetric":".off",\
"errorbar_color":"darkgrey",\
"errorbar_width":"2",\
"errorbar_thickness":"2",\
"axis_line_width":1.0,\
"axis_line_color":"lightgrey",\
"ticks_line_width":1.0,\
"ticks_color":"lightgrey",\
"cols":[],\
"groups":[],\
"vals":[],\
"groups_settings":dict(),\
"log_scale":".off",\
"fonts":STANDARD_FONTS,\
"cumulative_directions":STANDARD_CUMULATIVE_DIRECTIONS,\
"colors":STANDARD_COLORS,\
"histnorms":STANDARD_HISTNORMS,\
"barmode":"overlay",\
"barmodes":STANDARD_BARMODES,\
"histtype_value":"bar",\
"linestyles":LINE_STYLES,\
"linestyle_value":"",\
"orientations":STANDARD_ORIENTATIONS, \
"fontsizes":STANDARD_SIZES,\
"xlabel_size":STANDARD_SIZES,\
"ylabel_size":STANDARD_SIZES,\
"xlabel":"",\
"ylabel":"",\
"label_fontfamily":"Default",\
"label_fontsize":"15",\
"label_fontcolor":"None",\
"xlabels":"14",\
"ylabels":"14",\
"left_axis":".on" ,\
"right_axis":".on",\
"upper_axis":".on",\
"lower_axis":".on",\
"tick_left_axis":".on" ,\
"tick_right_axis":".off",\
"tick_upper_axis":".off",\
"tick_lower_axis":".on",\
"ticks_direction":TICKS_DIRECTIONS,\
"ticks_direction_value":TICKS_DIRECTIONS[1],\
"ticks_length":"6.0",\
"xticks_fontsize":"14",\
"yticks_fontsize":"14",\
"xticks_rotation":"0",\
"yticks_rotation":"0",\
"x_lower_limit":"",\
"y_lower_limit":"",\
"x_upper_limit":"",\
"y_upper_limit":"",\
"maxxticks":"",\
"maxyticks":"",\
"spikes":["None","both","x","y"],\
"spikes_value":"None",\
"spikes_color":"None",\
"spikes_thickness":"3.0",\
"dashes":LINE_STYLES,\
"spikes_dash":"dash",\
"spikes_mode":"toaxis",\
"spikes_modes":STANDARD_SPIKEMODES,\
"grid":["None","both","x","y"],\
"grid_value":"None",\
"grid_width":"1",\
"grid_color":"lightgrey",\
"legend_title":"",\
"legend_bgcolor":"None",\
"legend_borderwidth":"0",\
"legend_bordercolor":"None",\
"legend_fontfamily":"Default",\
"legend_fontsize":"12",\
"legend_fontcolor":"None",\
"legend_title_fontfamily":"Default",\
"legend_title_fontsize":"12",\
"legend_title_fontcolor":"None",\
"legend_orientation":"vertical",\
"traceorders":STANDARD_TRACEORDERS,\
"legend_traceorder":"normal",\
"legend_tracegroupgap":"10",\
"legend_y":"1",\
"legend_x":"1.02",\
"legend_xanchor":"left",\
"legend_yanchor":"auto",\
"legend_xanchors":STANDARD_LEGEND_XANCHORS,\
"legend_yanchors":STANDARD_LEGEND_YANCHORS,\
"legend_valign":"middle",\
"valignments":STANDARD_VERTICAL_ALIGNMENTS,\
"sides":STANDARD_SIDES,\
"legend_side":"left",\
"download_format":["png","pdf","svg"],\
"downloadf":"pdf",\
"downloadn":"ihistogram",\
"session_downloadn":"MySession.ihistogram.plot",\
"inputsessionfile":"Select file..",\
"session_argumentsn":"MyArguments.ihistogram.plot",\
"inputargumentsfile":"Select file.."}
return plot_arguments | 42.108059 | 170 | 0.610587 |
import plotly.express as px
import plotly.graph_objects as go
import plotly.figure_factory as ff
from collections import OrderedDict
import numpy as np
import sys
def GET_COLOR(x):
if str(x)[:3].lower() == "rgb":
vals=x.split("rgb(")[-1].split(")")[0].split(",")
vals=[ float(s.strip(" ")) for s in vals ]
return vals
else:
return str(x)
def make_figure(df,pa):
tmp=df.copy()
tmp=tmp[pa["vals"]]
fig = go.Figure( )
pab={}
for arg in ["show_legend","upper_axis","lower_axis","left_axis","right_axis","errorbar",\
"errorbar_symmetric","tick_left_axis","tick_lower_axis","tick_upper_axis","tick_right_axis",\
"kde","show_hist","show_curve","show_rug"]:
if pa[arg] in ["off",".off"]:
pab[arg]=False
else:
pab[arg]=True
floats=["bin_size","errorbar_value","errorbar_thickness","errorbar_width","x","y","axis_line_width","ticks_line_width",\
"ticks_length","x_lower_limit","x_upper_limit","y_lower_limit","y_upper_limit","spikes_thickness","xticks_rotation",\
"yticks_rotation","xticks_fontsize","yticks_fontsize","grid_width","legend_borderwidth","legend_tracegroupgap","legend_x",\
"legend_y","fig_width","fig_height"]
for a in floats:
if pa[a] == "" or pa[a]=="None" or pa[a]==None:
pab[a]=None
else:
pab[a]=float(pa[a])
integers=["label_fontsize","legend_fontsize","legend_title_fontsize","title_fontsize","maxxticks","maxyticks"]
for a in integers:
if pa[a] == "" or pa[a]=="None" or pa[a]==None:
pab[a]=None
else:
pab[a]=int(pa[a])
possible_nones=["errorbar_color","title_fontcolor","axis_line_color","ticks_color","spikes_color","label_fontcolor",\
"paper_bgcolor","plot_bgcolor","grid_color","legend_bgcolor","legend_bordercolor","legend_fontcolor","legend_title_fontcolor",\
"title_fontfamily","label_fontfamily","legend_fontfamily","legend_title_fontfamily"]
for p in possible_nones:
if pa[p] == "None" or pa[p]=="Default" :
pab[p]=None
else:
pab[p]=pa[p]
if pab["kde"]==True:
colors=list()
if pa["rug_text"]!="":
rug_text=pa["rug_text"].split(",")
else:
rug_text=[]
for h in pa["groups_settings"].values():
if h["color_rgb"] == "":
if h["color_value"]=="None":
colors.append(None)
else:
colors.append(h["color_value"])
else:
colors.append(GET_COLOR(h["color_rgb"]))
hist_data=[]
for col in tmp.columns:
hist_data.append(tmp[col].dropna())
if (not pab["show_hist"]) & (not pab["show_curve"]):
pa["show_curve"]="on"
pab["show_curve"]=True
fig=ff.create_distplot(hist_data=hist_data, group_labels=pa["vals"],curve_type=pa["curve_type"],show_hist=pab["show_hist"],\
show_curve=pab["show_curve"],show_rug=pab["show_rug"],bin_size=pab["bin_size"],rug_text=rug_text,colors=colors, histnorm=pa["kde_histnorm"])
else:
for h in pa["groups_settings"].values():
h_=dict()
integers=["hover_fontsize","bins_number"]
for a in integers:
if h[a] == "" or h[a]=="None" or h[a] == None:
h_[a]=None
else:
h_[a]=int(h[a])
possible_nones=["hover_bgcolor","hover_bordercolor","hover_fontfamily","hover_fontcolor"]
for p in possible_nones:
if h[p] == "None" or h[p]=="Default" :
h_[p]=None
else:
h_[p]=h[p]
floats=["opacity","linewidth"]
for a in floats:
if h[a] == "":
h_[a]=None
else:
h_[a]=float(h[a])
if h["label"]!="":
name=h["label"]
else:
name=""
if h["text"]!="":
text=h["text"]
else:
text=""
if h["color_rgb"] == "":
if h["color_value"]=="None":
marker_color=None
else:
marker_color = h["color_value"]
else:
marker_color = GET_COLOR( h["color_rgb"] )
if h["line_rgb"] == "":
if h["line_color"]=="None":
line_color=None
else:
line_color = h["line_color"]
else:
line_color = GET_COLOR( h["line_rgb"] )
if h["histnorm"] == "None":
histnorm = ""
else:
histnorm = h["histnorm"]
if h["cumulative"]=="on":
cumulative_enabled=True
else:
cumulative_enabled=False
marker=dict(color=marker_color,line=dict(width=h_["linewidth"],color=line_color))
cumulative=dict(enabled=cumulative_enabled,direction=h["cumulative_direction"])
hoverlabel=dict(bgcolor=h_["hover_bgcolor"],bordercolor=h_["hover_bordercolor"],align=h["hover_align"],\
font=dict(family=h_["hover_fontfamily"],size=h_["hover_fontsize"],color=h_["hover_fontcolor"]))
if pab["errorbar"]==True:
errorbar=True
if h["orientation_value"]=="vertical":
if pab["errorbar"]==True:
error_y=dict(visible=errorbar,value=pab["errorbar_value"],type=pa["errorbar_type"],symmetric=pab["errorbar_symmetric"],color=pab["errorbar_color"],\
thickness=pab["errorbar_thickness"],width=pab["errorbar_width"])
else:
error_y=dict(visible=False)
fig.add_trace(go.Histogram(x=tmp[h["name"]].dropna(),text=text,hoverinfo=h["hoverinfo"],histfunc=h["histfunc"],cumulative=cumulative,\
opacity=h_["opacity"],nbinsx=h_["bins_number"],name=name,marker=marker,error_y=error_y,hoverlabel=hoverlabel,histnorm=histnorm))
elif h["orientation_value"]=="horizontal":
if pab["errorbar"]==True:
error_x=dict(visible=errorbar,value=pab["errorbar_value"],type=pa["errorbar_type"],symmetric=pab["errorbar_symmetric"],color=pab["errorbar_color"],\
thickness=pab["errorbar_thickness"],width=pab["errorbar_width"])
else:
error_x=dict(visible=False)
fig.add_trace(go.Histogram(y=tmp[h["name"]].dropna(),text=text,hoverinfo=h["hoverinfo"],histfunc=h["histfunc"],cumulative=cumulative,\
opacity=h_["opacity"],nbinsy=h_["bins_number"],name=name,marker=marker,error_x=error_x,hoverlabel=hoverlabel,histnorm=histnorm))
fig.update_layout( width=pab["fig_width"], height=pab["fig_height"] )
title=dict(text=pa["title"],font=dict(family=pab["title_fontfamily"],size=pab["title_fontsize"],color=pab["title_fontcolor"]),\
xref=pa["xref"],yref=pa["yref"],x=pab["x"],y=pab["y"],xanchor=pa["title_xanchor"],yanchor=pa["title_yanchor"])
fig.update_layout(title=title,barmode=pa["barmode"])
if pa["log_scale"]==True and pa["orientation"]=="vertical":
fig.update_yaxes(type="log")
elif pa["log_scale"]==True and pa["orientation"]=="horizontal":
fig.update_xaxes(type="log")
fig.update_xaxes(zeroline=False, showline=pab["lower_axis"], linewidth=pab["axis_line_width"], linecolor=pab["axis_line_color"], mirror=pab["upper_axis"])
fig.update_yaxes(zeroline=False, showline=pab["left_axis"], linewidth=pab["axis_line_width"], linecolor=pab["axis_line_color"],mirror=pab["right_axis"])
if pab["tick_lower_axis"]==False and pab["tick_right_axis"]==False and pab["tick_left_axis"]==False and pab["tick_upper_axis"]==False:
pa["ticks_direction_value"]=""
ticks=""
else:
ticks=pa["ticks_direction_value"]
fig.update_xaxes(ticks=ticks, tickwidth=pab["ticks_line_width"], tickcolor=pab["ticks_color"], ticklen=pab["ticks_length"])
fig.update_yaxes(ticks=ticks, tickwidth=pab["ticks_line_width"], tickcolor=pab["ticks_color"], ticklen=pab["ticks_length"])
if pab["tick_upper_axis"] :
fig.update_xaxes(mirror="ticks")
if pab["tick_right_axis"]:
fig.update_yaxes(mirror="ticks")
if (pa["x_lower_limit"]!="") and (pa["x_upper_limit"]!="") :
xmin=pab["x_lower_limit"]
xmax=pab["x_upper_limit"]
fig.update_xaxes(range=[xmin, xmax])
if (pa["y_lower_limit"]!="") and (pa["y_upper_limit"]!="") :
ymin=pab["y_lower_limit"]
ymax=pab["y_upper_limit"]
fig.update_yaxes(range=[ymin, ymax])
if pa["maxxticks"]!="":
fig.update_xaxes(nticks=pab["maxxticks"])
if pa["maxyticks"]!="":
fig.update_yaxes(nticks=pab["maxyticks"])
if pa["spikes_value"]=="both":
fig.update_xaxes(showspikes=True,spikecolor=pab["spikes_color"],spikethickness=pab["spikes_thickness"],spikedash=pa["spikes_dash"],spikemode=pa["spikes_mode"])
fig.update_yaxes(showspikes=True,spikecolor=pab["spikes_color"],spikethickness=pab["spikes_thickness"],spikedash=pa["spikes_dash"],spikemode=pa["spikes_mode"])
elif pa["spikes_value"]=="x":
fig.update_xaxes(showspikes=True,spikecolor=pab["spikes_color"],spikethickness=pab["spikes_thickness"],spikedash=pa["spikes_dash"],spikemode=pa["spikes_mode"])
elif pa["spikes_value"]=="y":
fig.update_yaxes(showspikes=True,spikecolor=pab["spikes_color"],spikethickness=pab["spikes_thickness"],spikedash=pa["spikes_dash"],spikemode=pa["spikes_mode"])
elif pa["spikes_value"]=="None":
fig.update_xaxes(showspikes=None)
fig.update_yaxes(showspikes=None)
xaxis=dict(visible=True, title=dict(text=pa["xlabel"],font=dict(family=pab["label_fontfamily"],size=pab["label_fontsize"],color=pab["label_fontcolor"])))
yaxis=dict(visible=True, title=dict(text=pa["ylabel"],font=dict(family=pab["label_fontfamily"],size=pab["label_fontsize"],color=pab["label_fontcolor"])))
fig.update_layout(paper_bgcolor=pab["paper_bgcolor"],plot_bgcolor=pab["plot_bgcolor"],xaxis = xaxis,yaxis = yaxis)
fig.update_xaxes(tickangle=pab["xticks_rotation"], tickfont=dict(size=pab["xticks_fontsize"]))
fig.update_yaxes(tickangle=pab["yticks_rotation"], tickfont=dict(size=pab["yticks_fontsize"]))
if pa["grid_value"] == "None":
fig.update_xaxes(showgrid=False)
fig.update_yaxes(showgrid=False)
elif pa["grid_value"]=="x":
fig.update_yaxes(showgrid=True,gridcolor=pab["grid_color"],gridwidth=pab["grid_width"])
elif pa["grid_value"]=="y":
fig.update_xaxes(showgrid=True,gridcolor=pab["grid_color"],gridwidth=pab["grid_width"])
elif pa["grid_value"]=="both":
fig.update_xaxes(showgrid=True,gridcolor=pab["grid_color"],gridwidth=pab["grid_width"])
fig.update_yaxes(showgrid=True,gridcolor=pab["grid_color"],gridwidth=pab["grid_width"])
fig.update_layout(template='plotly_white')
if pab["show_legend"]==True:
if pa["legend_orientation"]=="vertical":
legend_orientation="v"
elif pa["legend_orientation"]=="horizontal":
legend_orientation="h"
fig.update_layout(showlegend=True,legend=dict(x=pab["legend_x"],y=pab["legend_y"],bgcolor=pab["legend_bgcolor"],bordercolor=pab["legend_bordercolor"],\
borderwidth=pab["legend_borderwidth"],valign=pa["legend_valign"],\
font=dict(family=pab["legend_fontfamily"],size=pab["legend_fontsize"],color=pab["legend_fontcolor"]),orientation=legend_orientation,\
traceorder=pa["legend_traceorder"],tracegroupgap=pab["legend_tracegroupgap"],\
title=dict(text=pa["legend_title"],side=pa["legend_side"],font=dict(family=pab["legend_title_fontfamily"],size=pab["legend_title_fontsize"],\
color=pab["legend_title_fontcolor"]))))
else:
fig.update_layout(showlegend=False)
return fig
STANDARD_SIZES=[str(i) for i in list(range(1,101))]
STANDARD_COLORS=["None","aliceblue","antiquewhite","aqua","aquamarine","azure","beige",\
"bisque","black","blanchedalmond","blue","blueviolet","brown","burlywood",\
"cadetblue","chartreuse","chocolate","coral","cornflowerblue","cornsilk",\
"crimson","cyan","darkblue","darkcyan","darkgoldenrod","darkgray","darkgrey",\
"darkgreen","darkkhaki","darkmagenta","darkolivegreen","darkorange","darkorchid",\
"darkred","darksalmon","darkseagreen","darkslateblue","darkslategray","darkslategrey",\
"darkturquoise","darkviolet","deeppink","deepskyblue","dimgray","dimgrey","dodgerblue",\
"firebrick","floralwhite","forestgreen","fuchsia","gainsboro","ghostwhite","gold",\
"goldenrod","gray","grey","green","greenyellow","honeydew","hotpink","indianred","indigo",\
"ivory","khaki","lavender","lavenderblush","lawngreen","lemonchiffon","lightblue","lightcoral",\
"lightcyan","lightgoldenrodyellow","lightgray","lightgrey","lightgreen","lightpink","lightsalmon",\
"lightseagreen","lightskyblue","lightslategray","lightslategrey","lightsteelblue","lightyellow",\
"lime","limegreen","linen","magenta","maroon","mediumaquamarine","mediumblue","mediumorchid",\
"mediumpurple","mediumseagreen","mediumslateblue","mediumspringgreen","mediumturquoise",\
"mediumvioletred","midnightblue","mintcream","mistyrose","moccasin","navajowhite","navy",\
"oldlace","olive","olivedrab","orange","orangered","orchid","palegoldenrod","palegreen",\
"paleturquoise","palevioletred","papayawhip","peachpuff","peru","pink","plum","powderblue",\
"purple","red","rosybrown","royalblue","rebeccapurple","saddlebrown","salmon","sandybrown",\
"seagreen","seashell","sienna","silver","skyblue","slateblue","slategray","slategrey","snow",\
"springgreen","steelblue","tan","teal","thistle","tomato","turquoise","violet","wheat","white",\
"whitesmoke","yellow","yellowgreen"]
STANDARD_HISTNORMS=['None', 'percent', 'probability', 'density', 'probability density']
LINE_STYLES=["solid", "dot", "dash", "longdash", "dashdot","longdashdot"]
STANDARD_BARMODES=["stack", "group","overlay","relative"]
STANDARD_ORIENTATIONS=['vertical','horizontal']
STANDARD_ALIGNMENTS=["left","right","auto"]
STANDARD_VERTICAL_ALIGNMENTS=["top", "middle","bottom"]
STANDARD_FONTS=["Arial", "Balto", "Courier New", "Default", "Droid Sans", "Droid Serif", "Droid Sans Mono",\
"Gravitas One", "Old Standard TT", "Open Sans", "Overpass", "PT Sans Narrow", "Raleway", "Times New Roman"]
TICKS_DIRECTIONS=["inside","outside",'']
LEGEND_LOCATIONS=['best','upper right','upper left','lower left','lower right','right','center left','center right','lower center','upper center','center']
MODES=["expand",None]
STANDARD_HOVERINFO=["x", "y", "z", "text", "name","all","none","skip","x+y","x+text","x+name",\
"y+text","y+name","text+name","x+y+name","x+y+text","x+text+name","y+text+name"]
STANDARD_HISTFUNC=["count","sum","avg","min","max"]
STANDARD_CUMULATIVE_DIRECTIONS=["increasing","decreasing"]
STANDARD_ERRORBAR_TYPES=["percent","constant","sqrt"]
STANDARD_REFERENCES=["container","paper"]
STANDARD_TITLE_XANCHORS=["auto","left","center","right"]
STANDARD_TITLE_YANCHORS=["top","middle","bottom"]
STANDARD_LEGEND_XANCHORS=["auto","left","center","right"]
STANDARD_LEGEND_YANCHORS=["auto","top","middle","bottom"]
STANDARD_TRACEORDERS=["reversed", "grouped", "reversed+grouped", "normal"]
STANDARD_SIDES=["top","left","top left"]
STANDARD_SPIKEMODES=["toaxis", "across", "marker","toaxis+across","toaxis+marker","across+marker","toaxis+across+marker"]
STANDARD_CURVETYPES=["kde","normal"]
def figure_defaults():
plot_arguments={"fig_width":"600",\
"fig_height":"600",\
"title":'iHistogram',\
"title_fontsize":"20",\
"title_fontfamily":"Default",\
"title_fontcolor":"None",\
"titles":"20",\
"kde":".off",\
"curve_type":"kde",\
"curve_types":STANDARD_CURVETYPES,\
"kde_histnorm":"probability density",\
"kde_histnorms":["probability density","probability"],\
"show_hist":".off",\
"show_curve":".on",\
"show_rug":".off",\
"rug_text":"",\
"bin_size":"1",\
"opacity":0.8,\
"paper_bgcolor":"white",\
"plot_bgcolor":"white",\
"hoverinfos":STANDARD_HOVERINFO,\
"hover_alignments":STANDARD_ALIGNMENTS,\
"histfuncs":STANDARD_HISTFUNC,\
"references":STANDARD_REFERENCES,\
"xref":"container",\
"yref":"container",\
"x":"0.5",\
"y":"0.9",\
"title_xanchors":STANDARD_TITLE_XANCHORS,\
"title_yanchors":STANDARD_TITLE_YANCHORS,\
"title_xanchor":"auto",\
"title_yanchor":"auto",\
"show_legend":"on",\
"errorbar":".off",\
"errorbar_value":"10",\
"errorbar_type":"percent",\
"errorbar_types":STANDARD_ERRORBAR_TYPES,\
"errorbar_symmetric":".off",\
"errorbar_color":"darkgrey",\
"errorbar_width":"2",\
"errorbar_thickness":"2",\
"axis_line_width":1.0,\
"axis_line_color":"lightgrey",\
"ticks_line_width":1.0,\
"ticks_color":"lightgrey",\
"cols":[],\
"groups":[],\
"vals":[],\
"groups_settings":dict(),\
"log_scale":".off",\
"fonts":STANDARD_FONTS,\
"cumulative_directions":STANDARD_CUMULATIVE_DIRECTIONS,\
"colors":STANDARD_COLORS,\
"histnorms":STANDARD_HISTNORMS,\
"barmode":"overlay",\
"barmodes":STANDARD_BARMODES,\
"histtype_value":"bar",\
"linestyles":LINE_STYLES,\
"linestyle_value":"",\
"orientations":STANDARD_ORIENTATIONS, \
"fontsizes":STANDARD_SIZES,\
"xlabel_size":STANDARD_SIZES,\
"ylabel_size":STANDARD_SIZES,\
"xlabel":"",\
"ylabel":"",\
"label_fontfamily":"Default",\
"label_fontsize":"15",\
"label_fontcolor":"None",\
"xlabels":"14",\
"ylabels":"14",\
"left_axis":".on" ,\
"right_axis":".on",\
"upper_axis":".on",\
"lower_axis":".on",\
"tick_left_axis":".on" ,\
"tick_right_axis":".off",\
"tick_upper_axis":".off",\
"tick_lower_axis":".on",\
"ticks_direction":TICKS_DIRECTIONS,\
"ticks_direction_value":TICKS_DIRECTIONS[1],\
"ticks_length":"6.0",\
"xticks_fontsize":"14",\
"yticks_fontsize":"14",\
"xticks_rotation":"0",\
"yticks_rotation":"0",\
"x_lower_limit":"",\
"y_lower_limit":"",\
"x_upper_limit":"",\
"y_upper_limit":"",\
"maxxticks":"",\
"maxyticks":"",\
"spikes":["None","both","x","y"],\
"spikes_value":"None",\
"spikes_color":"None",\
"spikes_thickness":"3.0",\
"dashes":LINE_STYLES,\
"spikes_dash":"dash",\
"spikes_mode":"toaxis",\
"spikes_modes":STANDARD_SPIKEMODES,\
"grid":["None","both","x","y"],\
"grid_value":"None",\
"grid_width":"1",\
"grid_color":"lightgrey",\
"legend_title":"",\
"legend_bgcolor":"None",\
"legend_borderwidth":"0",\
"legend_bordercolor":"None",\
"legend_fontfamily":"Default",\
"legend_fontsize":"12",\
"legend_fontcolor":"None",\
"legend_title_fontfamily":"Default",\
"legend_title_fontsize":"12",\
"legend_title_fontcolor":"None",\
"legend_orientation":"vertical",\
"traceorders":STANDARD_TRACEORDERS,\
"legend_traceorder":"normal",\
"legend_tracegroupgap":"10",\
"legend_y":"1",\
"legend_x":"1.02",\
"legend_xanchor":"left",\
"legend_yanchor":"auto",\
"legend_xanchors":STANDARD_LEGEND_XANCHORS,\
"legend_yanchors":STANDARD_LEGEND_YANCHORS,\
"legend_valign":"middle",\
"valignments":STANDARD_VERTICAL_ALIGNMENTS,\
"sides":STANDARD_SIDES,\
"legend_side":"left",\
"download_format":["png","pdf","svg"],\
"downloadf":"pdf",\
"downloadn":"ihistogram",\
"session_downloadn":"MySession.ihistogram.plot",\
"inputsessionfile":"Select file..",\
"session_argumentsn":"MyArguments.ihistogram.plot",\
"inputargumentsfile":"Select file.."}
return plot_arguments | true | true |
f72502fa59d5dbbaf1359af738eaf27afc125199 | 2,374 | py | Python | .history/spider/pokemon_spider_20201213130808.py | KustomApe/ksie | d6f97d0298d04d06788563546c66ff50c6bb2d31 | [
"MIT"
] | 1 | 2021-12-11T04:50:25.000Z | 2021-12-11T04:50:25.000Z | .history/spider/pokemon_spider_20201213130808.py | KustomApe/ksie | d6f97d0298d04d06788563546c66ff50c6bb2d31 | [
"MIT"
] | null | null | null | .history/spider/pokemon_spider_20201213130808.py | KustomApe/ksie | d6f97d0298d04d06788563546c66ff50c6bb2d31 | [
"MIT"
] | null | null | null | from selenium import webdriver
import pandas as pd
import time
"""[注意事項]
robot.txtを必ず読んで、ルールに沿った形でクローリングするように気をつけてください。
あくまで自己責任でお願いできればと思います。
"""
"""[Initial Setting]
初期設定
"""
options = webdriver.ChromeOptions()
options.add_argument('--headeless')
options.add_argument('--disable-gpu')
options.add_argument('--lang-ja')
browser = webdriver.Chrome(chrome_options=options, executable_path='./chromedriver')
df = pd.DataFrame(columns=['ranking', 'name', 'image'])
url = 'https://swsh.pokedb.tokyo/pokemon/list/'
"""[CSS Selector Setting]
CSSセレクターの設定
"""
PAGER_NEXT = "li.select-page.arrow a[rel='next']"
POSTS = ".product-item-list__item"
RANKING = ".pokemon-ranking-rank"
NAME = ".product-item-list__item-name"
IMAGE = ".product-item-list__item-image img"
PRICE = ".product-item-list__item-price"
CATEGORY = ".product-item-list__item-category"
CAR = ".product-item-list__item-car-name"
"""[Activate Section]
実行部分
"""
browser.get(url)
while True: #Continue until getting the last page.
if len(browser.find_elements_by_css_selector(PAGER_NEXT)) > 0:
print('Starting to get posts...')
posts = browser.find_elements_by_css_selector(POSTS)
print(len(posts))
for post in posts:
try:
name = post.find_element_by_css_selector(PRODUCT_NAME).text
print(name)
thumbnailURL = post.find_element_by_css_selector(IMAGE).get_attribute('src')
print(thumbnailURL)
price = post.find_element_by_css_selector(PRICE).text
print(price)
category = post.find_element_by_css_selector(CATEGORY).text
print(category)
car = post.find_element_by_css_selector(CAR).text
print(car)
se = pd.Series([name, thumbnailURL, price, category, car], ['name', 'image', 'price', 'category', 'car'])
df = df.append(se, ignore_index=True)
except Exception as e:
print(e)
btn = browser.find_element_by_css_selector(PAGER_NEXT).get_attribute('href')
print('next url:{}'.format(btn))
time.sleep(3)
browser.get(btn)
print('Moving to next page.')
else:
print('No pager exist anymore...')
break
print('Finished Crawling. Writing out to CSV file...')
df.to_csv('car_parts.csv')
print('Done')
| 33.43662 | 121 | 0.655013 | from selenium import webdriver
import pandas as pd
import time
options = webdriver.ChromeOptions()
options.add_argument('--headeless')
options.add_argument('--disable-gpu')
options.add_argument('--lang-ja')
browser = webdriver.Chrome(chrome_options=options, executable_path='./chromedriver')
df = pd.DataFrame(columns=['ranking', 'name', 'image'])
url = 'https://swsh.pokedb.tokyo/pokemon/list/'
PAGER_NEXT = "li.select-page.arrow a[rel='next']"
POSTS = ".product-item-list__item"
RANKING = ".pokemon-ranking-rank"
NAME = ".product-item-list__item-name"
IMAGE = ".product-item-list__item-image img"
PRICE = ".product-item-list__item-price"
CATEGORY = ".product-item-list__item-category"
CAR = ".product-item-list__item-car-name"
browser.get(url)
while True:
if len(browser.find_elements_by_css_selector(PAGER_NEXT)) > 0:
print('Starting to get posts...')
posts = browser.find_elements_by_css_selector(POSTS)
print(len(posts))
for post in posts:
try:
name = post.find_element_by_css_selector(PRODUCT_NAME).text
print(name)
thumbnailURL = post.find_element_by_css_selector(IMAGE).get_attribute('src')
print(thumbnailURL)
price = post.find_element_by_css_selector(PRICE).text
print(price)
category = post.find_element_by_css_selector(CATEGORY).text
print(category)
car = post.find_element_by_css_selector(CAR).text
print(car)
se = pd.Series([name, thumbnailURL, price, category, car], ['name', 'image', 'price', 'category', 'car'])
df = df.append(se, ignore_index=True)
except Exception as e:
print(e)
btn = browser.find_element_by_css_selector(PAGER_NEXT).get_attribute('href')
print('next url:{}'.format(btn))
time.sleep(3)
browser.get(btn)
print('Moving to next page.')
else:
print('No pager exist anymore...')
break
print('Finished Crawling. Writing out to CSV file...')
df.to_csv('car_parts.csv')
print('Done')
| true | true |
f72503d39b41bc560c31dfc0d1965fa96e277d2c | 2,467 | py | Python | data/image_folder.py | OnizukaLab/pytorch-CycleGAN-and-pix2pix | 95b8dfe8bba43ae5ec9d7b299107fc155e7939c0 | [
"BSD-3-Clause"
] | null | null | null | data/image_folder.py | OnizukaLab/pytorch-CycleGAN-and-pix2pix | 95b8dfe8bba43ae5ec9d7b299107fc155e7939c0 | [
"BSD-3-Clause"
] | 2 | 2019-07-30T09:02:40.000Z | 2019-08-01T11:36:44.000Z | data/image_folder.py | OnizukaLab/pytorch-CycleGAN-and-pix2pix | 95b8dfe8bba43ae5ec9d7b299107fc155e7939c0 | [
"BSD-3-Clause"
] | null | null | null | """A modified image folder class
We modify the official PyTorch image folder (https://github.com/pytorch/vision/blob/master/torchvision/datasets/folder.py)
so that this class can load images from both current directory and its subdirectories.
"""
import torch.utils.data as data
from PIL import Image
import os
import os.path
import re
IMG_EXTENSIONS = [
'.jpg', '.JPG', '.jpeg', '.JPEG',
'.png', '.PNG', '.ppm', '.PPM', '.bmp', '.BMP',
]
PATTERN = re.compile(r".*?([0-9]+)\.(jpg|JPG|jpeg|JPEG|png|PNG|ppm|PPM|bmp|BMP)$")
def is_image_file(filename):
return any(filename.endswith(extension) for extension in IMG_EXTENSIONS)
def make_dataset(dir, max_dataset_size=float("inf")):
images = []
assert os.path.isdir(dir), '%s is not a valid directory' % dir
for root, _, fnames in sorted(os.walk(dir)):
for fname in fnames:
if is_image_file(fname):
path = os.path.join(root, fname)
images.append(path)
return images[:min(max_dataset_size, len(images))]
def make_numbering_dataset(dir, max_dataset_size=float("inf")):
images = []
assert os.path.isdir(dir), '%s is not a valid directory' % dir
for root, _, fnames in sorted(os.walk(dir)):
for fname in fnames:
m = PATTERN.match(fname)
if m is not None:
idx = int(m.group(1))
path = os.path.join(root, fname)
images.append((idx, path))
return images[:min(max_dataset_size, len(images))]
def default_loader(path):
return Image.open(path).convert('RGB')
class ImageFolder(data.Dataset):
def __init__(self, root, transform=None, return_paths=False,
loader=default_loader):
imgs = make_dataset(root)
if len(imgs) == 0:
raise(RuntimeError("Found 0 images in: " + root + "\n"
"Supported image extensions are: " +
",".join(IMG_EXTENSIONS)))
self.root = root
self.imgs = imgs
self.transform = transform
self.return_paths = return_paths
self.loader = loader
def __getitem__(self, index):
path = self.imgs[index]
img = self.loader(path)
if self.transform is not None:
img = self.transform(img)
if self.return_paths:
return img, path
else:
return img
def __len__(self):
return len(self.imgs)
| 29.369048 | 122 | 0.602756 |
import torch.utils.data as data
from PIL import Image
import os
import os.path
import re
IMG_EXTENSIONS = [
'.jpg', '.JPG', '.jpeg', '.JPEG',
'.png', '.PNG', '.ppm', '.PPM', '.bmp', '.BMP',
]
PATTERN = re.compile(r".*?([0-9]+)\.(jpg|JPG|jpeg|JPEG|png|PNG|ppm|PPM|bmp|BMP)$")
def is_image_file(filename):
return any(filename.endswith(extension) for extension in IMG_EXTENSIONS)
def make_dataset(dir, max_dataset_size=float("inf")):
images = []
assert os.path.isdir(dir), '%s is not a valid directory' % dir
for root, _, fnames in sorted(os.walk(dir)):
for fname in fnames:
if is_image_file(fname):
path = os.path.join(root, fname)
images.append(path)
return images[:min(max_dataset_size, len(images))]
def make_numbering_dataset(dir, max_dataset_size=float("inf")):
images = []
assert os.path.isdir(dir), '%s is not a valid directory' % dir
for root, _, fnames in sorted(os.walk(dir)):
for fname in fnames:
m = PATTERN.match(fname)
if m is not None:
idx = int(m.group(1))
path = os.path.join(root, fname)
images.append((idx, path))
return images[:min(max_dataset_size, len(images))]
def default_loader(path):
return Image.open(path).convert('RGB')
class ImageFolder(data.Dataset):
def __init__(self, root, transform=None, return_paths=False,
loader=default_loader):
imgs = make_dataset(root)
if len(imgs) == 0:
raise(RuntimeError("Found 0 images in: " + root + "\n"
"Supported image extensions are: " +
",".join(IMG_EXTENSIONS)))
self.root = root
self.imgs = imgs
self.transform = transform
self.return_paths = return_paths
self.loader = loader
def __getitem__(self, index):
path = self.imgs[index]
img = self.loader(path)
if self.transform is not None:
img = self.transform(img)
if self.return_paths:
return img, path
else:
return img
def __len__(self):
return len(self.imgs)
| true | true |
f72504fc16114679445e08709bb22521dfd7fe3a | 233 | py | Python | python/euler1_7.py | gregorymorrison/euler1 | 171d98a60e62041bc49882336cd65687df810b6c | [
"MIT"
] | 1 | 2019-03-13T11:11:50.000Z | 2019-03-13T11:11:50.000Z | python/euler1_7.py | gdm9000/euler1 | 171d98a60e62041bc49882336cd65687df810b6c | [
"MIT"
] | null | null | null | python/euler1_7.py | gdm9000/euler1 | 171d98a60e62041bc49882336cd65687df810b6c | [
"MIT"
] | 1 | 2018-02-10T00:27:17.000Z | 2018-02-10T00:27:17.000Z | #!/usr/bin/python
euler1 = dict()
euler1[range] = 1000
solver = lambda x: sum(i for i in xrange(x) if i%3==0 or i%5==0)
euler1[solver] = solver
result = euler1[solver] (euler1[range])
euler1[result] = result
print euler1[result] | 17.923077 | 64 | 0.682403 |
euler1 = dict()
euler1[range] = 1000
solver = lambda x: sum(i for i in xrange(x) if i%3==0 or i%5==0)
euler1[solver] = solver
result = euler1[solver] (euler1[range])
euler1[result] = result
print euler1[result] | false | true |
f725054c84988206eb2120605f89dfc44d68a15d | 8,431 | py | Python | recipes/libxslt/all/conanfile.py | aapng/conan-center-index | d68a8fbb938402a5a53fa6b0214c49ccf878f8a9 | [
"MIT"
] | null | null | null | recipes/libxslt/all/conanfile.py | aapng/conan-center-index | d68a8fbb938402a5a53fa6b0214c49ccf878f8a9 | [
"MIT"
] | 1 | 2021-05-12T10:46:25.000Z | 2021-05-13T06:12:41.000Z | recipes/libxslt/all/conanfile.py | aapng/conan-center-index | d68a8fbb938402a5a53fa6b0214c49ccf878f8a9 | [
"MIT"
] | 2 | 2020-10-24T00:42:55.000Z | 2021-01-26T09:01:14.000Z | import glob
import os
from conans import ConanFile, tools, AutoToolsBuildEnvironment, VisualStudioBuildEnvironment
class LibxsltConan(ConanFile):
name = "libxslt"
url = "https://github.com/conan-io/conan-center-index"
description = "libxslt is a software library implementing XSLT processor, based on libxml2"
topics = ("XSLT", "processor")
homepage = "https://xmlsoft.org"
license = "MIT"
settings = "os", "arch", "compiler", "build_type"
default_options = {'shared': False,
'fPIC': True,
"debugger": False,
"crypto": False,
"profiler": False,
"plugins": False}
options = {name: [True, False] for name in default_options.keys()}
_option_names = [name for name in default_options.keys() if name not in ["shared", "fPIC"]]
_source_subfolder = "source_subfolder"
exports_sources = "patches/**"
def requirements(self):
self.requires("libxml2/2.9.10")
@property
def _is_msvc(self):
return self.settings.compiler == 'Visual Studio'
@property
def _full_source_subfolder(self):
return os.path.join(self.source_folder, self._source_subfolder)
def source(self):
tools.get(**self.conan_data["sources"][self.version])
os.rename("libxslt-{0}".format(self.version), self._source_subfolder)
def config_options(self):
if self.settings.os == "Windows":
del self.options.fPIC
def configure(self):
del self.settings.compiler.libcxx
del self.settings.compiler.cppstd
def _patch_sources(self):
for patch in self.conan_data["patches"][self.version]:
tools.patch(**patch)
def build(self):
self._patch_sources()
if self._is_msvc:
self._build_windows()
else:
self._build_with_configure()
def _build_windows(self):
with tools.chdir(os.path.join(self._full_source_subfolder, 'win32')):
debug = "yes" if self.settings.build_type == "Debug" else "no"
static = "no" if self.options.shared else "yes"
with tools.vcvars(self.settings):
args = ["cscript",
"configure.js",
"compiler=msvc",
"prefix=%s" % self.package_folder,
"cruntime=/%s" % self.settings.compiler.runtime,
"debug=%s" % debug,
"static=%s" % static,
'include="%s"' % ";".join(self.deps_cpp_info.include_paths),
'lib="%s"' % ";".join(self.deps_cpp_info.lib_paths),
'iconv=no',
'xslt_debug=no']
for name in self._option_names:
cname = {"plugins": "modules"}.get(name, name)
value = getattr(self.options, name)
value = "yes" if value else "no"
args.append("%s=%s" % (cname, value))
configure_command = ' '.join(args)
self.output.info(configure_command)
self.run(configure_command)
# Fix library names because they can be not just zlib.lib
def format_libs(package):
libs = []
for lib in self.deps_cpp_info[package].libs:
libname = lib
if not libname.endswith('.lib'):
libname += '.lib'
libs.append(libname)
for lib in self.deps_cpp_info[package].system_libs:
libname = lib
if not libname.endswith('.lib'):
libname += '.lib'
libs.append(libname)
return ' '.join(libs)
def fix_library(option, package, old_libname):
if option:
tools.replace_in_file("Makefile.msvc",
"LIBS = %s" % old_libname,
"LIBS = %s" % format_libs(package))
if "icu" in self.deps_cpp_info.deps:
fix_library(True, 'icu', 'wsock32.lib')
tools.replace_in_file("Makefile.msvc", "libxml2.lib", format_libs("libxml2"))
tools.replace_in_file("Makefile.msvc", "libxml2_a.lib", format_libs("libxml2"))
with tools.environment_append(VisualStudioBuildEnvironment(self).vars):
self.run("nmake /f Makefile.msvc install")
def _build_with_configure(self):
env_build = AutoToolsBuildEnvironment(self, win_bash=tools.os_info.is_windows)
full_install_subfolder = tools.unix_path(self.package_folder)
# fix rpath
if self.settings.os == "Macos":
tools.replace_in_file(os.path.join(self._full_source_subfolder, "configure"), r"-install_name \$rpath/", "-install_name ")
configure_args = ['--with-python=no', '--prefix=%s' % full_install_subfolder]
if self.options.shared:
configure_args.extend(['--enable-shared', '--disable-static'])
else:
configure_args.extend(['--enable-static', '--disable-shared'])
xml_config = tools.unix_path(self.deps_cpp_info["libxml2"].rootpath) + "/bin/xml2-config"
configure_args.append('XML_CONFIG=%s' % xml_config)
for name in self._option_names:
value = getattr(self.options, name)
value = ("--with-%s" % name) if value else ("--without-%s" % name)
configure_args.append(value)
# Disable --build when building for iPhoneSimulator. The configure script halts on
# not knowing if it should cross-compile.
build = None
if self.settings.os == "iOS" and self.settings.arch == "x86_64":
build = False
env_build.configure(args=configure_args, build=build, configure_dir=self._full_source_subfolder)
env_build.make(args=["install", "V=1"])
def package(self):
self.copy("COPYING", src=self._full_source_subfolder, dst="licenses", ignore_case=True, keep_path=False)
tools.rmdir(os.path.join(self.package_folder, "share"))
tools.rmdir(os.path.join(self.package_folder, "lib", "pkgconfig"))
if self.settings.os == "Windows":
# There is no way to avoid building the tests, but at least we don't want them in the package
for prefix in ["run", "test"]:
for test in glob.glob("%s/bin/%s*" % (self.package_folder, prefix)):
os.remove(test)
if self.settings.compiler == "Visual Studio":
if self.settings.build_type == "Debug":
os.unlink(os.path.join(self.package_folder, "bin", "libexslt.pdb"))
os.unlink(os.path.join(self.package_folder, "bin", "libxslt.pdb"))
os.unlink(os.path.join(self.package_folder, "bin", "xsltproc.pdb"))
if self.options.shared:
os.unlink(os.path.join(self.package_folder, "lib", "libxslt_a.lib"))
os.unlink(os.path.join(self.package_folder, "lib", "libexslt_a.lib"))
else:
os.unlink(os.path.join(self.package_folder, "lib", "libxslt.lib"))
os.unlink(os.path.join(self.package_folder, "lib", "libexslt.lib"))
os.unlink(os.path.join(self.package_folder, "bin", "libxslt.dll"))
os.unlink(os.path.join(self.package_folder, "bin", "libexslt.dll"))
for f in "libxslt.la", "libexslt.la":
la = os.path.join(self.package_folder, 'lib', f)
if os.path.isfile(la):
os.unlink(la)
def package_info(self):
self.cpp_info.libs = ['exslt', 'xslt']
if self._is_msvc:
if self.options.shared:
self.cpp_info.libs = ['lib%s' % l for l in self.cpp_info.libs]
else:
self.cpp_info.libs = ['lib%s_a' % l for l in self.cpp_info.libs]
self.cpp_info.includedirs.append(os.path.join("include", "libxslt"))
if self.settings.os == "Linux" or self.settings.os == "Macos":
self.cpp_info.system_libs.append('m')
if self.settings.os == "Windows":
self.cpp_info.system_libs.append('ws2_32')
| 45.572973 | 134 | 0.562448 | import glob
import os
from conans import ConanFile, tools, AutoToolsBuildEnvironment, VisualStudioBuildEnvironment
class LibxsltConan(ConanFile):
name = "libxslt"
url = "https://github.com/conan-io/conan-center-index"
description = "libxslt is a software library implementing XSLT processor, based on libxml2"
topics = ("XSLT", "processor")
homepage = "https://xmlsoft.org"
license = "MIT"
settings = "os", "arch", "compiler", "build_type"
default_options = {'shared': False,
'fPIC': True,
"debugger": False,
"crypto": False,
"profiler": False,
"plugins": False}
options = {name: [True, False] for name in default_options.keys()}
_option_names = [name for name in default_options.keys() if name not in ["shared", "fPIC"]]
_source_subfolder = "source_subfolder"
exports_sources = "patches/**"
def requirements(self):
self.requires("libxml2/2.9.10")
@property
def _is_msvc(self):
return self.settings.compiler == 'Visual Studio'
@property
def _full_source_subfolder(self):
return os.path.join(self.source_folder, self._source_subfolder)
def source(self):
tools.get(**self.conan_data["sources"][self.version])
os.rename("libxslt-{0}".format(self.version), self._source_subfolder)
def config_options(self):
if self.settings.os == "Windows":
del self.options.fPIC
def configure(self):
del self.settings.compiler.libcxx
del self.settings.compiler.cppstd
def _patch_sources(self):
for patch in self.conan_data["patches"][self.version]:
tools.patch(**patch)
def build(self):
self._patch_sources()
if self._is_msvc:
self._build_windows()
else:
self._build_with_configure()
def _build_windows(self):
with tools.chdir(os.path.join(self._full_source_subfolder, 'win32')):
debug = "yes" if self.settings.build_type == "Debug" else "no"
static = "no" if self.options.shared else "yes"
with tools.vcvars(self.settings):
args = ["cscript",
"configure.js",
"compiler=msvc",
"prefix=%s" % self.package_folder,
"cruntime=/%s" % self.settings.compiler.runtime,
"debug=%s" % debug,
"static=%s" % static,
'include="%s"' % ";".join(self.deps_cpp_info.include_paths),
'lib="%s"' % ";".join(self.deps_cpp_info.lib_paths),
'iconv=no',
'xslt_debug=no']
for name in self._option_names:
cname = {"plugins": "modules"}.get(name, name)
value = getattr(self.options, name)
value = "yes" if value else "no"
args.append("%s=%s" % (cname, value))
configure_command = ' '.join(args)
self.output.info(configure_command)
self.run(configure_command)
def format_libs(package):
libs = []
for lib in self.deps_cpp_info[package].libs:
libname = lib
if not libname.endswith('.lib'):
libname += '.lib'
libs.append(libname)
for lib in self.deps_cpp_info[package].system_libs:
libname = lib
if not libname.endswith('.lib'):
libname += '.lib'
libs.append(libname)
return ' '.join(libs)
def fix_library(option, package, old_libname):
if option:
tools.replace_in_file("Makefile.msvc",
"LIBS = %s" % old_libname,
"LIBS = %s" % format_libs(package))
if "icu" in self.deps_cpp_info.deps:
fix_library(True, 'icu', 'wsock32.lib')
tools.replace_in_file("Makefile.msvc", "libxml2.lib", format_libs("libxml2"))
tools.replace_in_file("Makefile.msvc", "libxml2_a.lib", format_libs("libxml2"))
with tools.environment_append(VisualStudioBuildEnvironment(self).vars):
self.run("nmake /f Makefile.msvc install")
def _build_with_configure(self):
env_build = AutoToolsBuildEnvironment(self, win_bash=tools.os_info.is_windows)
full_install_subfolder = tools.unix_path(self.package_folder)
if self.settings.os == "Macos":
tools.replace_in_file(os.path.join(self._full_source_subfolder, "configure"), r"-install_name \$rpath/", "-install_name ")
configure_args = ['--with-python=no', '--prefix=%s' % full_install_subfolder]
if self.options.shared:
configure_args.extend(['--enable-shared', '--disable-static'])
else:
configure_args.extend(['--enable-static', '--disable-shared'])
xml_config = tools.unix_path(self.deps_cpp_info["libxml2"].rootpath) + "/bin/xml2-config"
configure_args.append('XML_CONFIG=%s' % xml_config)
for name in self._option_names:
value = getattr(self.options, name)
value = ("--with-%s" % name) if value else ("--without-%s" % name)
configure_args.append(value)
build = None
if self.settings.os == "iOS" and self.settings.arch == "x86_64":
build = False
env_build.configure(args=configure_args, build=build, configure_dir=self._full_source_subfolder)
env_build.make(args=["install", "V=1"])
def package(self):
self.copy("COPYING", src=self._full_source_subfolder, dst="licenses", ignore_case=True, keep_path=False)
tools.rmdir(os.path.join(self.package_folder, "share"))
tools.rmdir(os.path.join(self.package_folder, "lib", "pkgconfig"))
if self.settings.os == "Windows":
for prefix in ["run", "test"]:
for test in glob.glob("%s/bin/%s*" % (self.package_folder, prefix)):
os.remove(test)
if self.settings.compiler == "Visual Studio":
if self.settings.build_type == "Debug":
os.unlink(os.path.join(self.package_folder, "bin", "libexslt.pdb"))
os.unlink(os.path.join(self.package_folder, "bin", "libxslt.pdb"))
os.unlink(os.path.join(self.package_folder, "bin", "xsltproc.pdb"))
if self.options.shared:
os.unlink(os.path.join(self.package_folder, "lib", "libxslt_a.lib"))
os.unlink(os.path.join(self.package_folder, "lib", "libexslt_a.lib"))
else:
os.unlink(os.path.join(self.package_folder, "lib", "libxslt.lib"))
os.unlink(os.path.join(self.package_folder, "lib", "libexslt.lib"))
os.unlink(os.path.join(self.package_folder, "bin", "libxslt.dll"))
os.unlink(os.path.join(self.package_folder, "bin", "libexslt.dll"))
for f in "libxslt.la", "libexslt.la":
la = os.path.join(self.package_folder, 'lib', f)
if os.path.isfile(la):
os.unlink(la)
def package_info(self):
self.cpp_info.libs = ['exslt', 'xslt']
if self._is_msvc:
if self.options.shared:
self.cpp_info.libs = ['lib%s' % l for l in self.cpp_info.libs]
else:
self.cpp_info.libs = ['lib%s_a' % l for l in self.cpp_info.libs]
self.cpp_info.includedirs.append(os.path.join("include", "libxslt"))
if self.settings.os == "Linux" or self.settings.os == "Macos":
self.cpp_info.system_libs.append('m')
if self.settings.os == "Windows":
self.cpp_info.system_libs.append('ws2_32')
| true | true |
f72505f9706d238ac6c8305129e9adec3227f5ac | 2,455 | py | Python | Software for Proof of Work/SourceCode.py | fkerem/Cryptocurrency-Blockchain | 965268a09a6f8b3e700e8bbc741e49a4d54805c6 | [
"MIT"
] | null | null | null | Software for Proof of Work/SourceCode.py | fkerem/Cryptocurrency-Blockchain | 965268a09a6f8b3e700e8bbc741e49a4d54805c6 | [
"MIT"
] | null | null | null | Software for Proof of Work/SourceCode.py | fkerem/Cryptocurrency-Blockchain | 965268a09a6f8b3e700e8bbc741e49a4d54805c6 | [
"MIT"
] | null | null | null | """
Readme.txt:
Required modules: random, sys, hashlib, sha3
Please read the comments below for further explanation.
"""
from random import *
import sys
import hashlib
if sys.version_info < (3, 6):
import sha3
def serialnoncegenerator(): # To generate uniformly randomly 128-bit integer
serial = str(randint(0, 2**128 - 1))
return serial
def payee(): # To generate a payee name arbitrarily.
payee = ""
for i in range(10):
num = randint(48, 90)
while(num > 57 and num < 65):
num = randint(48, 90)
payee += chr(num)
return payee
def satoshi(): # To generate a satoshi amount arbitrarily.
return str(randint(1, 999))
def PoWGenerator(transaction): # To generate a valid Proof of Work
new_tr = ""
PoW = ""
while True:
nonce = serialnoncegenerator()
noncestr = "Nonce: " + nonce + "\n"
new_tr = transaction + noncestr # Transaction is updated adding Nonce line.
PoW = hashlib.sha3_256(new_tr).hexdigest()
if PoW[:6] == "000000": # While the first 6 digits of the hash digest is not 0,
break # nonce value is changed and the transaction is hashed again.
trPoW = "Proof of Work: " + PoW + "\n"
new_tr = new_tr + trPoW # Transaction is updated adding PoW line.
return (PoW,new_tr) # Returning PoW and valid transaction.
# To generate a transaction text excluding Nonce and PoW lines.
def trWoutLastTwoLines(prevHash):
transaction = \
"*** Bitcoin transaction ***" + "\n" + \
"Serial number: " + serialnoncegenerator() + "\n" + \
"Payer: User Name" + "\n" + \
"Payee: " + payee() + "\n" + \
"Amount: " + satoshi() + " Satoshi" + "\n" + \
"Previous hash in the chain: " + prevHash + "\n"
return transaction
result = []
prevHash = "" # The hash of the previous transaction.
for i in range(10): # To generate 10 transactions.
if i == 0:
prevHash = "First transaction"
transaction = trWoutLastTwoLines(prevHash) # Generate a transaction without having last 2 lines.
prevHash, transaction = PoWGenerator(transaction) # Generating PoW for the current transaction and updating the transaction.
result.append(transaction)
# Generating the output file.
myFile = open("LongestChain.txt", "w")
for tra in result:
myFile.write(tra)
myFile.close()
| 33.175676 | 129 | 0.61833 |
from random import *
import sys
import hashlib
if sys.version_info < (3, 6):
import sha3
def serialnoncegenerator():
serial = str(randint(0, 2**128 - 1))
return serial
def payee():
payee = ""
for i in range(10):
num = randint(48, 90)
while(num > 57 and num < 65):
num = randint(48, 90)
payee += chr(num)
return payee
def satoshi():
return str(randint(1, 999))
def PoWGenerator(transaction):
new_tr = ""
PoW = ""
while True:
nonce = serialnoncegenerator()
noncestr = "Nonce: " + nonce + "\n"
new_tr = transaction + noncestr
PoW = hashlib.sha3_256(new_tr).hexdigest()
if PoW[:6] == "000000":
break
trPoW = "Proof of Work: " + PoW + "\n"
new_tr = new_tr + trPoW
return (PoW,new_tr)
def trWoutLastTwoLines(prevHash):
transaction = \
"*** Bitcoin transaction ***" + "\n" + \
"Serial number: " + serialnoncegenerator() + "\n" + \
"Payer: User Name" + "\n" + \
"Payee: " + payee() + "\n" + \
"Amount: " + satoshi() + " Satoshi" + "\n" + \
"Previous hash in the chain: " + prevHash + "\n"
return transaction
result = []
prevHash = ""
for i in range(10):
if i == 0:
prevHash = "First transaction"
transaction = trWoutLastTwoLines(prevHash)
prevHash, transaction = PoWGenerator(transaction)
result.append(transaction)
myFile = open("LongestChain.txt", "w")
for tra in result:
myFile.write(tra)
myFile.close()
| true | true |
f725069e16f136f40e31fccafac67c140404d6b4 | 59,971 | py | Python | flash/core/data/data_module.py | sumanmichael/lightning-flash | 4c69c1bf49fa74d0f2fdb9c4dbdcdfd5942352db | [
"Apache-2.0"
] | null | null | null | flash/core/data/data_module.py | sumanmichael/lightning-flash | 4c69c1bf49fa74d0f2fdb9c4dbdcdfd5942352db | [
"Apache-2.0"
] | null | null | null | flash/core/data/data_module.py | sumanmichael/lightning-flash | 4c69c1bf49fa74d0f2fdb9c4dbdcdfd5942352db | [
"Apache-2.0"
] | 1 | 2021-07-14T09:17:46.000Z | 2021-07-14T09:17:46.000Z | # Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import platform
from typing import Any, Callable, Collection, Dict, Iterable, List, Optional, Sequence, Tuple, TYPE_CHECKING, Union
import numpy as np
import pytorch_lightning as pl
import torch
from pytorch_lightning.trainer.states import RunningStage
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from torch.utils.data import DataLoader, Dataset
from torch.utils.data.dataset import IterableDataset, Subset
from torch.utils.data.sampler import Sampler
import flash
from flash.core.data.auto_dataset import BaseAutoDataset, IterableAutoDataset
from flash.core.data.base_viz import BaseVisualization
from flash.core.data.callback import BaseDataFetcher
from flash.core.data.data_pipeline import DataPipeline, DefaultPreprocess, Postprocess, Preprocess
from flash.core.data.data_source import DataSource, DefaultDataSources
from flash.core.data.splits import SplitDataset
from flash.core.data.utils import _STAGES_PREFIX
from flash.core.utilities.imports import _FIFTYONE_AVAILABLE, requires
if _FIFTYONE_AVAILABLE and TYPE_CHECKING:
from fiftyone.core.collections import SampleCollection
else:
SampleCollection = None
class DataModule(pl.LightningDataModule):
"""A basic DataModule class for all Flash tasks. This class includes references to a
:class:`~flash.core.data.data_source.DataSource`, :class:`~flash.core.data.process.Preprocess`,
:class:`~flash.core.data.process.Postprocess`, and a :class:`~flash.core.data.callback.BaseDataFetcher`.
Args:
train_dataset: Dataset for training. Defaults to None.
val_dataset: Dataset for validating model performance during training. Defaults to None.
test_dataset: Dataset to test model performance. Defaults to None.
predict_dataset: Dataset for predicting. Defaults to None.
data_source: The :class:`~flash.core.data.data_source.DataSource` that was used to create the datasets.
preprocess: The :class:`~flash.core.data.process.Preprocess` to use when constructing the
:class:`~flash.core.data.data_pipeline.DataPipeline`. If ``None``, a
:class:`~flash.core.data.process.DefaultPreprocess` will be used.
postprocess: The :class:`~flash.core.data.process.Postprocess` to use when constructing the
:class:`~flash.core.data.data_pipeline.DataPipeline`. If ``None``, a plain
:class:`~flash.core.data.process.Postprocess` will be used.
data_fetcher: The :class:`~flash.core.data.callback.BaseDataFetcher` to attach to the
:class:`~flash.core.data.process.Preprocess`. If ``None``, the output from
:meth:`~flash.core.data.data_module.DataModule.configure_data_fetcher` will be used.
val_split: An optional float which gives the relative amount of the training dataset to use for the validation
dataset.
batch_size: The batch size to be used by the DataLoader. Defaults to 1.
num_workers: The number of workers to use for parallelized loading.
Defaults to None which equals the number of available CPU threads,
or 0 for Windows or Darwin platform.
sampler: A sampler following the :class:`~torch.utils.data.sampler.Sampler` type.
Will be passed to the DataLoader for the training dataset. Defaults to None.
"""
preprocess_cls = DefaultPreprocess
postprocess_cls = Postprocess
def __init__(
self,
train_dataset: Optional[Dataset] = None,
val_dataset: Optional[Dataset] = None,
test_dataset: Optional[Dataset] = None,
predict_dataset: Optional[Dataset] = None,
data_source: Optional[DataSource] = None,
preprocess: Optional[Preprocess] = None,
postprocess: Optional[Postprocess] = None,
data_fetcher: Optional[BaseDataFetcher] = None,
val_split: Optional[float] = None,
batch_size: int = 4,
num_workers: Optional[int] = None,
sampler: Optional[Sampler] = None,
) -> None:
super().__init__()
if flash._IS_TESTING and torch.cuda.is_available():
batch_size = 16
self._data_source: DataSource = data_source
self._preprocess: Optional[Preprocess] = preprocess
self._postprocess: Optional[Postprocess] = postprocess
self._viz: Optional[BaseVisualization] = None
self._data_fetcher: Optional[BaseDataFetcher] = data_fetcher or self.configure_data_fetcher()
# TODO: Preprocess can change
self.data_fetcher.attach_to_preprocess(self.preprocess)
self._train_ds = train_dataset
self._val_ds = val_dataset
self._test_ds = test_dataset
self._predict_ds = predict_dataset
if self._train_ds is not None and (val_split is not None and self._val_ds is None):
self._train_ds, self._val_ds = self._split_train_val(self._train_ds, val_split)
if self._train_ds:
self.train_dataloader = self._train_dataloader
if self._val_ds:
self.val_dataloader = self._val_dataloader
if self._test_ds:
self.test_dataloader = self._test_dataloader
if self._predict_ds:
self.predict_dataloader = self._predict_dataloader
self.batch_size = batch_size
# TODO: figure out best solution for setting num_workers
if num_workers is None:
if platform.system() in ("Darwin", "Windows"):
num_workers = 0
else:
num_workers = os.cpu_count()
self.num_workers = num_workers
self.sampler = sampler
self.set_running_stages()
@property
def train_dataset(self) -> Optional[Dataset]:
"""This property returns the train dataset."""
return self._train_ds
@property
def val_dataset(self) -> Optional[Dataset]:
"""This property returns the validation dataset."""
return self._val_ds
@property
def test_dataset(self) -> Optional[Dataset]:
"""This property returns the test dataset."""
return self._test_ds
@property
def predict_dataset(self) -> Optional[Dataset]:
"""This property returns the predict dataset."""
return self._predict_ds
@property
def viz(self) -> BaseVisualization:
return self._viz or DataModule.configure_data_fetcher()
@viz.setter
def viz(self, viz: BaseVisualization) -> None:
self._viz = viz
@staticmethod
def configure_data_fetcher(*args, **kwargs) -> BaseDataFetcher:
"""This function is used to configure a :class:`~flash.core.data.callback.BaseDataFetcher`.
Override with your custom one.
"""
return BaseDataFetcher()
@property
def data_fetcher(self) -> BaseDataFetcher:
return self._data_fetcher or DataModule.configure_data_fetcher()
@data_fetcher.setter
def data_fetcher(self, data_fetcher: BaseDataFetcher) -> None:
self._data_fetcher = data_fetcher
def _reset_iterator(self, stage: str) -> Iterable[Any]:
iter_name = f"_{stage}_iter"
# num_workers has to be set to 0 to work properly
num_workers = self.num_workers
self.num_workers = 0
dataloader_fn = getattr(self, f"{stage}_dataloader")
iterator = iter(dataloader_fn())
self.num_workers = num_workers
setattr(self, iter_name, iterator)
return iterator
def _show_batch(self, stage: str, func_names: Union[str, List[str]], reset: bool = True) -> None:
"""This function is used to handle transforms profiling for batch visualization."""
# don't show in CI
if os.getenv("FLASH_TESTING", "0") == "1":
return None
iter_name = f"_{stage}_iter"
if not hasattr(self, iter_name):
self._reset_iterator(stage)
# list of functions to visualise
if isinstance(func_names, str):
func_names = [func_names]
iter_dataloader = getattr(self, iter_name)
with self.data_fetcher.enable():
if reset:
self.data_fetcher.batches[stage] = {}
try:
_ = next(iter_dataloader)
except StopIteration:
iter_dataloader = self._reset_iterator(stage)
_ = next(iter_dataloader)
data_fetcher: BaseVisualization = self.data_fetcher
data_fetcher._show(stage, func_names)
if reset:
self.data_fetcher.batches[stage] = {}
def show_train_batch(self, hooks_names: Union[str, List[str]] = "load_sample", reset: bool = True) -> None:
"""This function is used to visualize a batch from the train dataloader."""
stage_name: str = _STAGES_PREFIX[RunningStage.TRAINING]
self._show_batch(stage_name, hooks_names, reset=reset)
def show_val_batch(self, hooks_names: Union[str, List[str]] = "load_sample", reset: bool = True) -> None:
"""This function is used to visualize a batch from the validation dataloader."""
stage_name: str = _STAGES_PREFIX[RunningStage.VALIDATING]
self._show_batch(stage_name, hooks_names, reset=reset)
def show_test_batch(self, hooks_names: Union[str, List[str]] = "load_sample", reset: bool = True) -> None:
"""This function is used to visualize a batch from the test dataloader."""
stage_name: str = _STAGES_PREFIX[RunningStage.TESTING]
self._show_batch(stage_name, hooks_names, reset=reset)
def show_predict_batch(self, hooks_names: Union[str, List[str]] = "load_sample", reset: bool = True) -> None:
"""This function is used to visualize a batch from the predict dataloader."""
stage_name: str = _STAGES_PREFIX[RunningStage.PREDICTING]
self._show_batch(stage_name, hooks_names, reset=reset)
@staticmethod
def get_dataset_attribute(dataset: torch.utils.data.Dataset, attr_name: str, default: Optional[Any] = None) -> Any:
if isinstance(dataset, Subset):
return getattr(dataset.dataset, attr_name, default)
return getattr(dataset, attr_name, default)
@staticmethod
def set_dataset_attribute(dataset: torch.utils.data.Dataset, attr_name: str, value: Any) -> None:
if isinstance(dataset, Subset):
dataset = dataset.dataset
if isinstance(dataset, (Dataset, IterableDataset)):
setattr(dataset, attr_name, value)
def set_running_stages(self):
if self._train_ds:
self.set_dataset_attribute(self._train_ds, "running_stage", RunningStage.TRAINING)
if self._val_ds:
self.set_dataset_attribute(self._val_ds, "running_stage", RunningStage.VALIDATING)
if self._test_ds:
self.set_dataset_attribute(self._test_ds, "running_stage", RunningStage.TESTING)
if self._predict_ds:
self.set_dataset_attribute(self._predict_ds, "running_stage", RunningStage.PREDICTING)
def _resolve_collate_fn(self, dataset: Dataset, running_stage: RunningStage) -> Optional[Callable]:
if isinstance(dataset, (BaseAutoDataset, SplitDataset)):
return self.data_pipeline.worker_preprocessor(running_stage)
def _train_dataloader(self) -> DataLoader:
train_ds: Dataset = self._train_ds() if isinstance(self._train_ds, Callable) else self._train_ds
shuffle: bool = False
collate_fn = self._resolve_collate_fn(train_ds, RunningStage.TRAINING)
if isinstance(train_ds, IterableAutoDataset):
drop_last = False
else:
drop_last = len(train_ds) > self.batch_size
pin_memory = True
if self.sampler is None:
shuffle = not isinstance(train_ds, (IterableDataset, IterableAutoDataset))
if isinstance(getattr(self, "trainer", None), pl.Trainer):
return self.trainer.lightning_module.process_train_dataset(
train_ds,
batch_size=self.batch_size,
num_workers=self.num_workers,
pin_memory=pin_memory,
shuffle=shuffle,
drop_last=drop_last,
collate_fn=collate_fn,
sampler=self.sampler,
)
return DataLoader(
train_ds,
batch_size=self.batch_size,
shuffle=shuffle,
sampler=self.sampler,
num_workers=self.num_workers,
pin_memory=pin_memory,
drop_last=drop_last,
collate_fn=collate_fn,
)
def _val_dataloader(self) -> DataLoader:
val_ds: Dataset = self._val_ds() if isinstance(self._val_ds, Callable) else self._val_ds
collate_fn = self._resolve_collate_fn(val_ds, RunningStage.VALIDATING)
pin_memory = True
if isinstance(getattr(self, "trainer", None), pl.Trainer):
return self.trainer.lightning_module.process_val_dataset(
val_ds,
batch_size=self.batch_size,
num_workers=self.num_workers,
pin_memory=pin_memory,
collate_fn=collate_fn,
)
return DataLoader(
val_ds,
batch_size=self.batch_size,
num_workers=self.num_workers,
pin_memory=pin_memory,
collate_fn=collate_fn,
)
def _test_dataloader(self) -> DataLoader:
test_ds: Dataset = self._test_ds() if isinstance(self._test_ds, Callable) else self._test_ds
collate_fn = self._resolve_collate_fn(test_ds, RunningStage.TESTING)
pin_memory = True
if isinstance(getattr(self, "trainer", None), pl.Trainer):
return self.trainer.lightning_module.process_test_dataset(
test_ds,
batch_size=self.batch_size,
num_workers=self.num_workers,
pin_memory=pin_memory,
collate_fn=collate_fn,
)
return DataLoader(
test_ds,
batch_size=self.batch_size,
num_workers=self.num_workers,
pin_memory=pin_memory,
collate_fn=collate_fn,
)
def _predict_dataloader(self) -> DataLoader:
predict_ds: Dataset = self._predict_ds() if isinstance(self._predict_ds, Callable) else self._predict_ds
if isinstance(predict_ds, IterableAutoDataset):
batch_size = self.batch_size
else:
batch_size = min(self.batch_size, len(predict_ds) if len(predict_ds) > 0 else 1)
collate_fn = self._resolve_collate_fn(predict_ds, RunningStage.PREDICTING)
pin_memory = True
if isinstance(getattr(self, "trainer", None), pl.Trainer):
return self.trainer.lightning_module.process_test_dataset(
predict_ds,
batch_size=batch_size,
num_workers=self.num_workers,
pin_memory=pin_memory,
collate_fn=collate_fn,
)
return DataLoader(
predict_ds, batch_size=batch_size, num_workers=self.num_workers, pin_memory=True, collate_fn=collate_fn
)
@property
def num_classes(self) -> Optional[int]:
n_cls_train = getattr(self.train_dataset, "num_classes", None)
n_cls_val = getattr(self.val_dataset, "num_classes", None)
n_cls_test = getattr(self.test_dataset, "num_classes", None)
return n_cls_train or n_cls_val or n_cls_test
@property
def multi_label(self) -> Optional[bool]:
multi_label_train = getattr(self.train_dataset, "multi_label", None)
multi_label_val = getattr(self.val_dataset, "multi_label", None)
multi_label_test = getattr(self.test_dataset, "multi_label", None)
return multi_label_train or multi_label_val or multi_label_test
@property
def data_source(self) -> Optional[DataSource]:
return self._data_source
@property
def preprocess(self) -> Preprocess:
return self._preprocess or self.preprocess_cls()
@property
def postprocess(self) -> Postprocess:
return self._postprocess or self.postprocess_cls()
@property
def data_pipeline(self) -> DataPipeline:
return DataPipeline(self.data_source, self.preprocess, self.postprocess)
def available_data_sources(self) -> Sequence[str]:
"""Get the list of available data source names for use with this
:class:`~flash.core.data.data_module.DataModule`.
Returns:
The list of data source names.
"""
return self.preprocess.available_data_sources()
@staticmethod
def _split_train_val(
train_dataset: Dataset,
val_split: float,
) -> Tuple[Any, Any]:
if not isinstance(val_split, float) or (isinstance(val_split, float) and val_split > 1 or val_split < 0):
raise MisconfigurationException(f"`val_split` should be a float between 0 and 1. Found {val_split}.")
if isinstance(train_dataset, IterableAutoDataset):
raise MisconfigurationException(
"`val_split` should be `None` when the dataset is built with an IterableDataset."
)
val_num_samples = int(len(train_dataset) * val_split)
indices = list(range(len(train_dataset)))
np.random.shuffle(indices)
val_indices = indices[:val_num_samples]
train_indices = indices[val_num_samples:]
return (
SplitDataset(train_dataset, train_indices, use_duplicated_indices=True),
SplitDataset(train_dataset, val_indices, use_duplicated_indices=True),
)
@classmethod
def from_data_source(
cls,
data_source: str,
train_data: Any = None,
val_data: Any = None,
test_data: Any = None,
predict_data: Any = None,
train_transform: Optional[Dict[str, Callable]] = None,
val_transform: Optional[Dict[str, Callable]] = None,
test_transform: Optional[Dict[str, Callable]] = None,
predict_transform: Optional[Dict[str, Callable]] = None,
data_fetcher: Optional[BaseDataFetcher] = None,
preprocess: Optional[Preprocess] = None,
val_split: Optional[float] = None,
batch_size: int = 4,
num_workers: Optional[int] = None,
sampler: Optional[Sampler] = None,
**preprocess_kwargs: Any,
) -> "DataModule":
"""Creates a :class:`~flash.core.data.data_module.DataModule` object from the given inputs to
:meth:`~flash.core.data.data_source.DataSource.load_data` (``train_data``, ``val_data``, ``test_data``,
``predict_data``). The data source will be resolved from the instantiated
:class:`~flash.core.data.process.Preprocess`
using :meth:`~flash.core.data.process.Preprocess.data_source_of_name`.
Args:
data_source: The name of the data source to use for the
:meth:`~flash.core.data.data_source.DataSource.load_data`.
train_data: The input to :meth:`~flash.core.data.data_source.DataSource.load_data` to use when creating
the train dataset.
val_data: The input to :meth:`~flash.core.data.data_source.DataSource.load_data` to use when creating
the validation dataset.
test_data: The input to :meth:`~flash.core.data.data_source.DataSource.load_data` to use when creating
the test dataset.
predict_data: The input to :meth:`~flash.core.data.data_source.DataSource.load_data` to use when creating
the predict dataset.
train_transform: The dictionary of transforms to use during training which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
val_transform: The dictionary of transforms to use during validation which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
test_transform: The dictionary of transforms to use during testing which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
predict_transform: The dictionary of transforms to use during predicting which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
data_fetcher: The :class:`~flash.core.data.callback.BaseDataFetcher` to pass to the
:class:`~flash.core.data.data_module.DataModule`.
preprocess: The :class:`~flash.core.data.data.Preprocess` to pass to the
:class:`~flash.core.data.data_module.DataModule`. If ``None``, ``cls.preprocess_cls`` will be
constructed and used.
val_split: The ``val_split`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
batch_size: The ``batch_size`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
num_workers: The ``num_workers`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
sampler: The ``sampler`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
preprocess_kwargs: Additional keyword arguments to use when constructing the preprocess. Will only be used
if ``preprocess = None``.
Returns:
The constructed data module.
Examples::
data_module = DataModule.from_data_source(
DefaultDataSources.FOLDERS,
train_data="train_folder",
train_transform={
"to_tensor_transform": torch.as_tensor,
},
)
"""
preprocess = preprocess or cls.preprocess_cls(
train_transform,
val_transform,
test_transform,
predict_transform,
**preprocess_kwargs,
)
data_source = preprocess.data_source_of_name(data_source)
train_dataset, val_dataset, test_dataset, predict_dataset = data_source.to_datasets(
train_data,
val_data,
test_data,
predict_data,
)
return cls(
train_dataset,
val_dataset,
test_dataset,
predict_dataset,
data_source=data_source,
preprocess=preprocess,
data_fetcher=data_fetcher,
val_split=val_split,
batch_size=batch_size,
num_workers=num_workers,
sampler=sampler,
)
@classmethod
def from_folders(
cls,
train_folder: Optional[str] = None,
val_folder: Optional[str] = None,
test_folder: Optional[str] = None,
predict_folder: Optional[str] = None,
train_transform: Optional[Dict[str, Callable]] = None,
val_transform: Optional[Dict[str, Callable]] = None,
test_transform: Optional[Dict[str, Callable]] = None,
predict_transform: Optional[Dict[str, Callable]] = None,
data_fetcher: Optional[BaseDataFetcher] = None,
preprocess: Optional[Preprocess] = None,
val_split: Optional[float] = None,
batch_size: int = 4,
num_workers: Optional[int] = None,
sampler: Optional[Sampler] = None,
**preprocess_kwargs: Any,
) -> "DataModule":
"""Creates a :class:`~flash.core.data.data_module.DataModule` object from the given folders using the
:class:`~flash.core.data.data_source.DataSource` of name
:attr:`~flash.core.data.data_source.DefaultDataSources.FOLDERS`
from the passed or constructed :class:`~flash.core.data.process.Preprocess`.
Args:
train_folder: The folder containing the train data.
val_folder: The folder containing the validation data.
test_folder: The folder containing the test data.
predict_folder: The folder containing the predict data.
train_transform: The dictionary of transforms to use during training which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
val_transform: The dictionary of transforms to use during validation which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
test_transform: The dictionary of transforms to use during testing which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
predict_transform: The dictionary of transforms to use during predicting which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
data_fetcher: The :class:`~flash.core.data.callback.BaseDataFetcher` to pass to the
:class:`~flash.core.data.data_module.DataModule`.
preprocess: The :class:`~flash.core.data.data.Preprocess` to pass to the
:class:`~flash.core.data.data_module.DataModule`. If ``None``, ``cls.preprocess_cls``
will be constructed and used.
val_split: The ``val_split`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
batch_size: The ``batch_size`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
num_workers: The ``num_workers`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
sampler: The ``sampler`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
preprocess_kwargs: Additional keyword arguments to use when constructing the preprocess. Will only be used
if ``preprocess = None``.
Returns:
The constructed data module.
Examples::
data_module = DataModule.from_folders(
train_folder="train_folder",
train_transform={
"to_tensor_transform": torch.as_tensor,
},
)
"""
return cls.from_data_source(
DefaultDataSources.FOLDERS,
train_folder,
val_folder,
test_folder,
predict_folder,
train_transform=train_transform,
val_transform=val_transform,
test_transform=test_transform,
predict_transform=predict_transform,
data_fetcher=data_fetcher,
preprocess=preprocess,
val_split=val_split,
batch_size=batch_size,
num_workers=num_workers,
sampler=sampler,
**preprocess_kwargs,
)
@classmethod
def from_files(
cls,
train_files: Optional[Sequence[str]] = None,
train_targets: Optional[Sequence[Any]] = None,
val_files: Optional[Sequence[str]] = None,
val_targets: Optional[Sequence[Any]] = None,
test_files: Optional[Sequence[str]] = None,
test_targets: Optional[Sequence[Any]] = None,
predict_files: Optional[Sequence[str]] = None,
train_transform: Optional[Dict[str, Callable]] = None,
val_transform: Optional[Dict[str, Callable]] = None,
test_transform: Optional[Dict[str, Callable]] = None,
predict_transform: Optional[Dict[str, Callable]] = None,
data_fetcher: Optional[BaseDataFetcher] = None,
preprocess: Optional[Preprocess] = None,
val_split: Optional[float] = None,
batch_size: int = 4,
num_workers: Optional[int] = None,
sampler: Optional[Sampler] = None,
**preprocess_kwargs: Any,
) -> "DataModule":
"""Creates a :class:`~flash.core.data.data_module.DataModule` object from the given sequences of files
using the :class:`~flash.core.data.data_source.DataSource` of name
:attr:`~flash.core.data.data_source.DefaultDataSources.FILES` from the passed or constructed
:class:`~flash.core.data.process.Preprocess`.
Args:
train_files: A sequence of files to use as the train inputs.
train_targets: A sequence of targets (one per train file) to use as the train targets.
val_files: A sequence of files to use as the validation inputs.
val_targets: A sequence of targets (one per validation file) to use as the validation targets.
test_files: A sequence of files to use as the test inputs.
test_targets: A sequence of targets (one per test file) to use as the test targets.
predict_files: A sequence of files to use when predicting.
train_transform: The dictionary of transforms to use during training which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
val_transform: The dictionary of transforms to use during validation which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
test_transform: The dictionary of transforms to use during testing which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
predict_transform: The dictionary of transforms to use during predicting which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
data_fetcher: The :class:`~flash.core.data.callback.BaseDataFetcher` to pass to the
:class:`~flash.core.data.data_module.DataModule`.
preprocess: The :class:`~flash.core.data.data.Preprocess` to pass to the
:class:`~flash.core.data.data_module.DataModule`. If ``None``, ``cls.preprocess_cls``
will be constructed and used.
val_split: The ``val_split`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
batch_size: The ``batch_size`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
num_workers: The ``num_workers`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
sampler: The ``sampler`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
preprocess_kwargs: Additional keyword arguments to use when constructing the preprocess. Will only be used
if ``preprocess = None``.
Returns:
The constructed data module.
Examples::
data_module = DataModule.from_files(
train_files=["image_1.png", "image_2.png", "image_3.png"],
train_targets=[1, 0, 1],
train_transform={
"to_tensor_transform": torch.as_tensor,
},
)
"""
return cls.from_data_source(
DefaultDataSources.FILES,
(train_files, train_targets),
(val_files, val_targets),
(test_files, test_targets),
predict_files,
train_transform=train_transform,
val_transform=val_transform,
test_transform=test_transform,
predict_transform=predict_transform,
data_fetcher=data_fetcher,
preprocess=preprocess,
val_split=val_split,
batch_size=batch_size,
num_workers=num_workers,
sampler=sampler,
**preprocess_kwargs,
)
@classmethod
def from_tensors(
cls,
train_data: Optional[Collection[torch.Tensor]] = None,
train_targets: Optional[Collection[Any]] = None,
val_data: Optional[Collection[torch.Tensor]] = None,
val_targets: Optional[Sequence[Any]] = None,
test_data: Optional[Collection[torch.Tensor]] = None,
test_targets: Optional[Sequence[Any]] = None,
predict_data: Optional[Collection[torch.Tensor]] = None,
train_transform: Optional[Dict[str, Callable]] = None,
val_transform: Optional[Dict[str, Callable]] = None,
test_transform: Optional[Dict[str, Callable]] = None,
predict_transform: Optional[Dict[str, Callable]] = None,
data_fetcher: Optional[BaseDataFetcher] = None,
preprocess: Optional[Preprocess] = None,
val_split: Optional[float] = None,
batch_size: int = 4,
num_workers: Optional[int] = None,
sampler: Optional[Sampler] = None,
**preprocess_kwargs: Any,
) -> "DataModule":
"""Creates a :class:`~flash.core.data.data_module.DataModule` object from the given tensors using the
:class:`~flash.core.data.data_source.DataSource`
of name :attr:`~flash.core.data.data_source.DefaultDataSources.TENSOR`
from the passed or constructed :class:`~flash.core.data.process.Preprocess`.
Args:
train_data: A tensor or collection of tensors to use as the train inputs.
train_targets: A sequence of targets (one per train input) to use as the train targets.
val_data: A tensor or collection of tensors to use as the validation inputs.
val_targets: A sequence of targets (one per validation input) to use as the validation targets.
test_data: A tensor or collection of tensors to use as the test inputs.
test_targets: A sequence of targets (one per test input) to use as the test targets.
predict_data: A tensor or collection of tensors to use when predicting.
train_transform: The dictionary of transforms to use during training which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
val_transform: The dictionary of transforms to use during validation which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
test_transform: The dictionary of transforms to use during testing which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
predict_transform: The dictionary of transforms to use during predicting which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
data_fetcher: The :class:`~flash.core.data.callback.BaseDataFetcher` to pass to the
:class:`~flash.core.data.data_module.DataModule`.
preprocess: The :class:`~flash.core.data.data.Preprocess` to pass to the
:class:`~flash.core.data.data_module.DataModule`. If ``None``, ``cls.preprocess_cls``
will be constructed and used.
val_split: The ``val_split`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
batch_size: The ``batch_size`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
num_workers: The ``num_workers`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
sampler: The ``sampler`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
preprocess_kwargs: Additional keyword arguments to use when constructing the preprocess. Will only be used
if ``preprocess = None``.
Returns:
The constructed data module.
Examples::
data_module = DataModule.from_tensors(
train_files=torch.rand(3, 128),
train_targets=[1, 0, 1],
train_transform={
"to_tensor_transform": torch.as_tensor,
},
)
"""
return cls.from_data_source(
DefaultDataSources.TENSORS,
(train_data, train_targets),
(val_data, val_targets),
(test_data, test_targets),
predict_data,
train_transform=train_transform,
val_transform=val_transform,
test_transform=test_transform,
predict_transform=predict_transform,
data_fetcher=data_fetcher,
preprocess=preprocess,
val_split=val_split,
batch_size=batch_size,
num_workers=num_workers,
sampler=sampler,
**preprocess_kwargs,
)
@classmethod
def from_numpy(
cls,
train_data: Optional[Collection[np.ndarray]] = None,
train_targets: Optional[Collection[Any]] = None,
val_data: Optional[Collection[np.ndarray]] = None,
val_targets: Optional[Sequence[Any]] = None,
test_data: Optional[Collection[np.ndarray]] = None,
test_targets: Optional[Sequence[Any]] = None,
predict_data: Optional[Collection[np.ndarray]] = None,
train_transform: Optional[Dict[str, Callable]] = None,
val_transform: Optional[Dict[str, Callable]] = None,
test_transform: Optional[Dict[str, Callable]] = None,
predict_transform: Optional[Dict[str, Callable]] = None,
data_fetcher: Optional[BaseDataFetcher] = None,
preprocess: Optional[Preprocess] = None,
val_split: Optional[float] = None,
batch_size: int = 4,
num_workers: Optional[int] = None,
sampler: Optional[Sampler] = None,
**preprocess_kwargs: Any,
) -> "DataModule":
"""Creates a :class:`~flash.core.data.data_module.DataModule` object from the given numpy array using the
:class:`~flash.core.data.data_source.DataSource`
of name :attr:`~flash.core.data.data_source.DefaultDataSources.NUMPY`
from the passed or constructed :class:`~flash.core.data.process.Preprocess`.
Args:
train_data: A numpy array to use as the train inputs.
train_targets: A sequence of targets (one per train input) to use as the train targets.
val_data: A numpy array to use as the validation inputs.
val_targets: A sequence of targets (one per validation input) to use as the validation targets.
test_data: A numpy array to use as the test inputs.
test_targets: A sequence of targets (one per test input) to use as the test targets.
predict_data: A numpy array to use when predicting.
train_transform: The dictionary of transforms to use during training which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
val_transform: The dictionary of transforms to use during validation which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
test_transform: The dictionary of transforms to use during testing which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
predict_transform: The dictionary of transforms to use during predicting which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
data_fetcher: The :class:`~flash.core.data.callback.BaseDataFetcher` to pass to the
:class:`~flash.core.data.data_module.DataModule`.
preprocess: The :class:`~flash.core.data.data.Preprocess` to pass to the
:class:`~flash.core.data.data_module.DataModule`. If ``None``, ``cls.preprocess_cls``
will be constructed and used.
val_split: The ``val_split`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
batch_size: The ``batch_size`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
num_workers: The ``num_workers`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
sampler: The ``sampler`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
preprocess_kwargs: Additional keyword arguments to use when constructing the preprocess. Will only be used
if ``preprocess = None``.
Returns:
The constructed data module.
Examples::
data_module = DataModule.from_numpy(
train_files=np.random.rand(3, 128),
train_targets=[1, 0, 1],
train_transform={
"to_tensor_transform": torch.as_tensor,
},
)
"""
return cls.from_data_source(
DefaultDataSources.NUMPY,
(train_data, train_targets),
(val_data, val_targets),
(test_data, test_targets),
predict_data,
train_transform=train_transform,
val_transform=val_transform,
test_transform=test_transform,
predict_transform=predict_transform,
data_fetcher=data_fetcher,
preprocess=preprocess,
val_split=val_split,
batch_size=batch_size,
num_workers=num_workers,
sampler=sampler,
**preprocess_kwargs,
)
@classmethod
def from_json(
cls,
input_fields: Union[str, Sequence[str]],
target_fields: Optional[Union[str, Sequence[str]]] = None,
train_file: Optional[str] = None,
val_file: Optional[str] = None,
test_file: Optional[str] = None,
predict_file: Optional[str] = None,
train_transform: Optional[Dict[str, Callable]] = None,
val_transform: Optional[Dict[str, Callable]] = None,
test_transform: Optional[Dict[str, Callable]] = None,
predict_transform: Optional[Dict[str, Callable]] = None,
data_fetcher: Optional[BaseDataFetcher] = None,
preprocess: Optional[Preprocess] = None,
val_split: Optional[float] = None,
batch_size: int = 4,
num_workers: Optional[int] = None,
sampler: Optional[Sampler] = None,
field: Optional[str] = None,
**preprocess_kwargs: Any,
) -> "DataModule":
"""Creates a :class:`~flash.core.data.data_module.DataModule` object from the given JSON files using the
:class:`~flash.core.data.data_source.DataSource`
of name :attr:`~flash.core.data.data_source.DefaultDataSources.JSON`
from the passed or constructed :class:`~flash.core.data.process.Preprocess`.
Args:
input_fields: The field or fields in the JSON objects to use for the input.
target_fields: The field or fields in the JSON objects to use for the target.
train_file: The JSON file containing the training data.
val_file: The JSON file containing the validation data.
test_file: The JSON file containing the testing data.
predict_file: The JSON file containing the data to use when predicting.
train_transform: The dictionary of transforms to use during training which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
val_transform: The dictionary of transforms to use during validation which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
test_transform: The dictionary of transforms to use during testing which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
predict_transform: The dictionary of transforms to use during predicting which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
data_fetcher: The :class:`~flash.core.data.callback.BaseDataFetcher` to pass to the
:class:`~flash.core.data.data_module.DataModule`.
preprocess: The :class:`~flash.core.data.data.Preprocess` to pass to the
:class:`~flash.core.data.data_module.DataModule`. If ``None``, ``cls.preprocess_cls``
will be constructed and used.
val_split: The ``val_split`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
batch_size: The ``batch_size`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
num_workers: The ``num_workers`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
sampler: The ``sampler`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
field: To specify the field that holds the data in the JSON file.
preprocess_kwargs: Additional keyword arguments to use when constructing the preprocess. Will only be used
if ``preprocess = None``.
Returns:
The constructed data module.
Examples::
data_module = DataModule.from_json(
"input",
"target",
train_file="train_data.json",
train_transform={
"to_tensor_transform": torch.as_tensor,
},
)
# In the case where the data is of the form:
# {
# "version": 0.0.x,
# "data": [
# {
# "input_field" : "input_data",
# "target_field" : "target_output"
# },
# ...
# ]
# }
data_module = DataModule.from_json(
"input",
"target",
train_file="train_data.json",
train_transform={
"to_tensor_transform": torch.as_tensor,
},
feild="data"
)
"""
return cls.from_data_source(
DefaultDataSources.JSON,
(train_file, input_fields, target_fields, field),
(val_file, input_fields, target_fields, field),
(test_file, input_fields, target_fields, field),
(predict_file, input_fields, target_fields, field),
train_transform=train_transform,
val_transform=val_transform,
test_transform=test_transform,
predict_transform=predict_transform,
data_fetcher=data_fetcher,
preprocess=preprocess,
val_split=val_split,
batch_size=batch_size,
num_workers=num_workers,
sampler=sampler,
**preprocess_kwargs,
)
@classmethod
def from_csv(
cls,
input_fields: Union[str, Sequence[str]],
target_fields: Optional[Union[str, Sequence[str]]] = None,
train_file: Optional[str] = None,
val_file: Optional[str] = None,
test_file: Optional[str] = None,
predict_file: Optional[str] = None,
train_transform: Optional[Dict[str, Callable]] = None,
val_transform: Optional[Dict[str, Callable]] = None,
test_transform: Optional[Dict[str, Callable]] = None,
predict_transform: Optional[Dict[str, Callable]] = None,
data_fetcher: Optional[BaseDataFetcher] = None,
preprocess: Optional[Preprocess] = None,
val_split: Optional[float] = None,
batch_size: int = 4,
num_workers: Optional[int] = None,
sampler: Optional[Sampler] = None,
**preprocess_kwargs: Any,
) -> "DataModule":
"""Creates a :class:`~flash.core.data.data_module.DataModule` object from the given CSV files using the
:class:`~flash.core.data.data_source.DataSource`
of name :attr:`~flash.core.data.data_source.DefaultDataSources.CSV`
from the passed or constructed :class:`~flash.core.data.process.Preprocess`.
Args:
input_fields: The field or fields (columns) in the CSV file to use for the input.
target_fields: The field or fields (columns) in the CSV file to use for the target.
train_file: The CSV file containing the training data.
val_file: The CSV file containing the validation data.
test_file: The CSV file containing the testing data.
predict_file: The CSV file containing the data to use when predicting.
train_transform: The dictionary of transforms to use during training which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
val_transform: The dictionary of transforms to use during validation which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
test_transform: The dictionary of transforms to use during testing which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
predict_transform: The dictionary of transforms to use during predicting which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
data_fetcher: The :class:`~flash.core.data.callback.BaseDataFetcher` to pass to the
:class:`~flash.core.data.data_module.DataModule`.
preprocess: The :class:`~flash.core.data.data.Preprocess` to pass to the
:class:`~flash.core.data.data_module.DataModule`. If ``None``, ``cls.preprocess_cls``
will be constructed and used.
val_split: The ``val_split`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
batch_size: The ``batch_size`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
num_workers: The ``num_workers`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
sampler: The ``sampler`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
preprocess_kwargs: Additional keyword arguments to use when constructing the preprocess. Will only be used
if ``preprocess = None``.
Returns:
The constructed data module.
Examples::
data_module = DataModule.from_csv(
"input",
"target",
train_file="train_data.csv",
train_transform={
"to_tensor_transform": torch.as_tensor,
},
)
"""
return cls.from_data_source(
DefaultDataSources.CSV,
(train_file, input_fields, target_fields),
(val_file, input_fields, target_fields),
(test_file, input_fields, target_fields),
(predict_file, input_fields, target_fields),
train_transform=train_transform,
val_transform=val_transform,
test_transform=test_transform,
predict_transform=predict_transform,
data_fetcher=data_fetcher,
preprocess=preprocess,
val_split=val_split,
batch_size=batch_size,
num_workers=num_workers,
sampler=sampler,
**preprocess_kwargs,
)
@classmethod
def from_datasets(
cls,
train_dataset: Optional[Dataset] = None,
val_dataset: Optional[Dataset] = None,
test_dataset: Optional[Dataset] = None,
predict_dataset: Optional[Dataset] = None,
train_transform: Optional[Dict[str, Callable]] = None,
val_transform: Optional[Dict[str, Callable]] = None,
test_transform: Optional[Dict[str, Callable]] = None,
predict_transform: Optional[Dict[str, Callable]] = None,
data_fetcher: Optional[BaseDataFetcher] = None,
preprocess: Optional[Preprocess] = None,
val_split: Optional[float] = None,
batch_size: int = 4,
num_workers: Optional[int] = None,
sampler: Optional[Sampler] = None,
**preprocess_kwargs: Any,
) -> "DataModule":
"""Creates a :class:`~flash.core.data.data_module.DataModule` object from the given datasets using the
:class:`~flash.core.data.data_source.DataSource`
of name :attr:`~flash.core.data.data_source.DefaultDataSources.DATASETS`
from the passed or constructed :class:`~flash.core.data.process.Preprocess`.
Args:
train_dataset: Dataset used during training.
val_dataset: Dataset used during validating.
test_dataset: Dataset used during testing.
predict_dataset: Dataset used during predicting.
train_transform: The dictionary of transforms to use during training which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
val_transform: The dictionary of transforms to use during validation which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
test_transform: The dictionary of transforms to use during testing which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
predict_transform: The dictionary of transforms to use during predicting which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
data_fetcher: The :class:`~flash.core.data.callback.BaseDataFetcher` to pass to the
:class:`~flash.core.data.data_module.DataModule`.
preprocess: The :class:`~flash.core.data.data.Preprocess` to pass to the
:class:`~flash.core.data.data_module.DataModule`. If ``None``, ``cls.preprocess_cls``
will be constructed and used.
val_split: The ``val_split`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
batch_size: The ``batch_size`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
num_workers: The ``num_workers`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
sampler: The ``sampler`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
preprocess_kwargs: Additional keyword arguments to use when constructing the preprocess. Will only be used
if ``preprocess = None``.
Returns:
The constructed data module.
Examples::
data_module = DataModule.from_datasets(
train_dataset=train_dataset,
train_transform={
"to_tensor_transform": torch.as_tensor,
},
)
"""
return cls.from_data_source(
DefaultDataSources.DATASETS,
train_dataset,
val_dataset,
test_dataset,
predict_dataset,
train_transform=train_transform,
val_transform=val_transform,
test_transform=test_transform,
predict_transform=predict_transform,
data_fetcher=data_fetcher,
preprocess=preprocess,
val_split=val_split,
batch_size=batch_size,
num_workers=num_workers,
sampler=sampler,
**preprocess_kwargs,
)
@classmethod
@requires("fiftyone")
def from_fiftyone(
cls,
train_dataset: Optional[SampleCollection] = None,
val_dataset: Optional[SampleCollection] = None,
test_dataset: Optional[SampleCollection] = None,
predict_dataset: Optional[SampleCollection] = None,
train_transform: Optional[Dict[str, Callable]] = None,
val_transform: Optional[Dict[str, Callable]] = None,
test_transform: Optional[Dict[str, Callable]] = None,
predict_transform: Optional[Dict[str, Callable]] = None,
data_fetcher: Optional[BaseDataFetcher] = None,
preprocess: Optional[Preprocess] = None,
val_split: Optional[float] = None,
batch_size: int = 4,
num_workers: Optional[int] = None,
**preprocess_kwargs: Any,
) -> "DataModule":
"""Creates a :class:`~flash.core.data.data_module.DataModule` object
from the given FiftyOne Datasets using the
:class:`~flash.core.data.data_source.DataSource` of name
:attr:`~flash.core.data.data_source.DefaultDataSources.FIFTYONE`
from the passed or constructed :class:`~flash.core.data.process.Preprocess`.
Args:
train_dataset: The ``fiftyone.core.collections.SampleCollection`` containing the train data.
val_dataset: The ``fiftyone.core.collections.SampleCollection`` containing the validation data.
test_dataset: The ``fiftyone.core.collections.SampleCollection`` containing the test data.
predict_dataset: The ``fiftyone.core.collections.SampleCollection`` containing the predict data.
train_transform: The dictionary of transforms to use during training which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
val_transform: The dictionary of transforms to use during validation which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
test_transform: The dictionary of transforms to use during testing which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
predict_transform: The dictionary of transforms to use during predicting which maps
:class:`~flash.core.data.process.Preprocess` hook names to callable transforms.
data_fetcher: The :class:`~flash.core.data.callback.BaseDataFetcher` to pass to the
:class:`~flash.core.data.data_module.DataModule`.
preprocess: The :class:`~flash.core.data.data.Preprocess` to pass to the
:class:`~flash.core.data.data_module.DataModule`. If ``None``, ``cls.preprocess_cls``
will be constructed and used.
val_split: The ``val_split`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
batch_size: The ``batch_size`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
num_workers: The ``num_workers`` argument to pass to the :class:`~flash.core.data.data_module.DataModule`.
preprocess_kwargs: Additional keyword arguments to use when constructing the preprocess. Will only be used
if ``preprocess = None``.
Returns:
The constructed data module.
Examples::
train_dataset = fo.Dataset.from_dir(
"/path/to/dataset",
dataset_type=fo.types.ImageClassificationDirectoryTree,
)
data_module = DataModule.from_fiftyone(
train_data = train_dataset,
train_transform={
"to_tensor_transform": torch.as_tensor,
},
)
"""
return cls.from_data_source(
DefaultDataSources.FIFTYONE,
train_dataset,
val_dataset,
test_dataset,
predict_dataset,
train_transform=train_transform,
val_transform=val_transform,
test_transform=test_transform,
predict_transform=predict_transform,
data_fetcher=data_fetcher,
preprocess=preprocess,
val_split=val_split,
batch_size=batch_size,
num_workers=num_workers,
**preprocess_kwargs,
)
| 48.402744 | 119 | 0.644712 |
import os
import platform
from typing import Any, Callable, Collection, Dict, Iterable, List, Optional, Sequence, Tuple, TYPE_CHECKING, Union
import numpy as np
import pytorch_lightning as pl
import torch
from pytorch_lightning.trainer.states import RunningStage
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from torch.utils.data import DataLoader, Dataset
from torch.utils.data.dataset import IterableDataset, Subset
from torch.utils.data.sampler import Sampler
import flash
from flash.core.data.auto_dataset import BaseAutoDataset, IterableAutoDataset
from flash.core.data.base_viz import BaseVisualization
from flash.core.data.callback import BaseDataFetcher
from flash.core.data.data_pipeline import DataPipeline, DefaultPreprocess, Postprocess, Preprocess
from flash.core.data.data_source import DataSource, DefaultDataSources
from flash.core.data.splits import SplitDataset
from flash.core.data.utils import _STAGES_PREFIX
from flash.core.utilities.imports import _FIFTYONE_AVAILABLE, requires
if _FIFTYONE_AVAILABLE and TYPE_CHECKING:
from fiftyone.core.collections import SampleCollection
else:
SampleCollection = None
class DataModule(pl.LightningDataModule):
preprocess_cls = DefaultPreprocess
postprocess_cls = Postprocess
def __init__(
self,
train_dataset: Optional[Dataset] = None,
val_dataset: Optional[Dataset] = None,
test_dataset: Optional[Dataset] = None,
predict_dataset: Optional[Dataset] = None,
data_source: Optional[DataSource] = None,
preprocess: Optional[Preprocess] = None,
postprocess: Optional[Postprocess] = None,
data_fetcher: Optional[BaseDataFetcher] = None,
val_split: Optional[float] = None,
batch_size: int = 4,
num_workers: Optional[int] = None,
sampler: Optional[Sampler] = None,
) -> None:
super().__init__()
if flash._IS_TESTING and torch.cuda.is_available():
batch_size = 16
self._data_source: DataSource = data_source
self._preprocess: Optional[Preprocess] = preprocess
self._postprocess: Optional[Postprocess] = postprocess
self._viz: Optional[BaseVisualization] = None
self._data_fetcher: Optional[BaseDataFetcher] = data_fetcher or self.configure_data_fetcher()
self.data_fetcher.attach_to_preprocess(self.preprocess)
self._train_ds = train_dataset
self._val_ds = val_dataset
self._test_ds = test_dataset
self._predict_ds = predict_dataset
if self._train_ds is not None and (val_split is not None and self._val_ds is None):
self._train_ds, self._val_ds = self._split_train_val(self._train_ds, val_split)
if self._train_ds:
self.train_dataloader = self._train_dataloader
if self._val_ds:
self.val_dataloader = self._val_dataloader
if self._test_ds:
self.test_dataloader = self._test_dataloader
if self._predict_ds:
self.predict_dataloader = self._predict_dataloader
self.batch_size = batch_size
if num_workers is None:
if platform.system() in ("Darwin", "Windows"):
num_workers = 0
else:
num_workers = os.cpu_count()
self.num_workers = num_workers
self.sampler = sampler
self.set_running_stages()
@property
def train_dataset(self) -> Optional[Dataset]:
return self._train_ds
@property
def val_dataset(self) -> Optional[Dataset]:
return self._val_ds
@property
def test_dataset(self) -> Optional[Dataset]:
return self._test_ds
@property
def predict_dataset(self) -> Optional[Dataset]:
return self._predict_ds
@property
def viz(self) -> BaseVisualization:
return self._viz or DataModule.configure_data_fetcher()
@viz.setter
def viz(self, viz: BaseVisualization) -> None:
self._viz = viz
@staticmethod
def configure_data_fetcher(*args, **kwargs) -> BaseDataFetcher:
return BaseDataFetcher()
@property
def data_fetcher(self) -> BaseDataFetcher:
return self._data_fetcher or DataModule.configure_data_fetcher()
@data_fetcher.setter
def data_fetcher(self, data_fetcher: BaseDataFetcher) -> None:
self._data_fetcher = data_fetcher
def _reset_iterator(self, stage: str) -> Iterable[Any]:
iter_name = f"_{stage}_iter"
num_workers = self.num_workers
self.num_workers = 0
dataloader_fn = getattr(self, f"{stage}_dataloader")
iterator = iter(dataloader_fn())
self.num_workers = num_workers
setattr(self, iter_name, iterator)
return iterator
def _show_batch(self, stage: str, func_names: Union[str, List[str]], reset: bool = True) -> None:
if os.getenv("FLASH_TESTING", "0") == "1":
return None
iter_name = f"_{stage}_iter"
if not hasattr(self, iter_name):
self._reset_iterator(stage)
# list of functions to visualise
if isinstance(func_names, str):
func_names = [func_names]
iter_dataloader = getattr(self, iter_name)
with self.data_fetcher.enable():
if reset:
self.data_fetcher.batches[stage] = {}
try:
_ = next(iter_dataloader)
except StopIteration:
iter_dataloader = self._reset_iterator(stage)
_ = next(iter_dataloader)
data_fetcher: BaseVisualization = self.data_fetcher
data_fetcher._show(stage, func_names)
if reset:
self.data_fetcher.batches[stage] = {}
def show_train_batch(self, hooks_names: Union[str, List[str]] = "load_sample", reset: bool = True) -> None:
stage_name: str = _STAGES_PREFIX[RunningStage.TRAINING]
self._show_batch(stage_name, hooks_names, reset=reset)
def show_val_batch(self, hooks_names: Union[str, List[str]] = "load_sample", reset: bool = True) -> None:
stage_name: str = _STAGES_PREFIX[RunningStage.VALIDATING]
self._show_batch(stage_name, hooks_names, reset=reset)
def show_test_batch(self, hooks_names: Union[str, List[str]] = "load_sample", reset: bool = True) -> None:
stage_name: str = _STAGES_PREFIX[RunningStage.TESTING]
self._show_batch(stage_name, hooks_names, reset=reset)
def show_predict_batch(self, hooks_names: Union[str, List[str]] = "load_sample", reset: bool = True) -> None:
stage_name: str = _STAGES_PREFIX[RunningStage.PREDICTING]
self._show_batch(stage_name, hooks_names, reset=reset)
@staticmethod
def get_dataset_attribute(dataset: torch.utils.data.Dataset, attr_name: str, default: Optional[Any] = None) -> Any:
if isinstance(dataset, Subset):
return getattr(dataset.dataset, attr_name, default)
return getattr(dataset, attr_name, default)
@staticmethod
def set_dataset_attribute(dataset: torch.utils.data.Dataset, attr_name: str, value: Any) -> None:
if isinstance(dataset, Subset):
dataset = dataset.dataset
if isinstance(dataset, (Dataset, IterableDataset)):
setattr(dataset, attr_name, value)
def set_running_stages(self):
if self._train_ds:
self.set_dataset_attribute(self._train_ds, "running_stage", RunningStage.TRAINING)
if self._val_ds:
self.set_dataset_attribute(self._val_ds, "running_stage", RunningStage.VALIDATING)
if self._test_ds:
self.set_dataset_attribute(self._test_ds, "running_stage", RunningStage.TESTING)
if self._predict_ds:
self.set_dataset_attribute(self._predict_ds, "running_stage", RunningStage.PREDICTING)
def _resolve_collate_fn(self, dataset: Dataset, running_stage: RunningStage) -> Optional[Callable]:
if isinstance(dataset, (BaseAutoDataset, SplitDataset)):
return self.data_pipeline.worker_preprocessor(running_stage)
def _train_dataloader(self) -> DataLoader:
train_ds: Dataset = self._train_ds() if isinstance(self._train_ds, Callable) else self._train_ds
shuffle: bool = False
collate_fn = self._resolve_collate_fn(train_ds, RunningStage.TRAINING)
if isinstance(train_ds, IterableAutoDataset):
drop_last = False
else:
drop_last = len(train_ds) > self.batch_size
pin_memory = True
if self.sampler is None:
shuffle = not isinstance(train_ds, (IterableDataset, IterableAutoDataset))
if isinstance(getattr(self, "trainer", None), pl.Trainer):
return self.trainer.lightning_module.process_train_dataset(
train_ds,
batch_size=self.batch_size,
num_workers=self.num_workers,
pin_memory=pin_memory,
shuffle=shuffle,
drop_last=drop_last,
collate_fn=collate_fn,
sampler=self.sampler,
)
return DataLoader(
train_ds,
batch_size=self.batch_size,
shuffle=shuffle,
sampler=self.sampler,
num_workers=self.num_workers,
pin_memory=pin_memory,
drop_last=drop_last,
collate_fn=collate_fn,
)
def _val_dataloader(self) -> DataLoader:
val_ds: Dataset = self._val_ds() if isinstance(self._val_ds, Callable) else self._val_ds
collate_fn = self._resolve_collate_fn(val_ds, RunningStage.VALIDATING)
pin_memory = True
if isinstance(getattr(self, "trainer", None), pl.Trainer):
return self.trainer.lightning_module.process_val_dataset(
val_ds,
batch_size=self.batch_size,
num_workers=self.num_workers,
pin_memory=pin_memory,
collate_fn=collate_fn,
)
return DataLoader(
val_ds,
batch_size=self.batch_size,
num_workers=self.num_workers,
pin_memory=pin_memory,
collate_fn=collate_fn,
)
def _test_dataloader(self) -> DataLoader:
test_ds: Dataset = self._test_ds() if isinstance(self._test_ds, Callable) else self._test_ds
collate_fn = self._resolve_collate_fn(test_ds, RunningStage.TESTING)
pin_memory = True
if isinstance(getattr(self, "trainer", None), pl.Trainer):
return self.trainer.lightning_module.process_test_dataset(
test_ds,
batch_size=self.batch_size,
num_workers=self.num_workers,
pin_memory=pin_memory,
collate_fn=collate_fn,
)
return DataLoader(
test_ds,
batch_size=self.batch_size,
num_workers=self.num_workers,
pin_memory=pin_memory,
collate_fn=collate_fn,
)
def _predict_dataloader(self) -> DataLoader:
predict_ds: Dataset = self._predict_ds() if isinstance(self._predict_ds, Callable) else self._predict_ds
if isinstance(predict_ds, IterableAutoDataset):
batch_size = self.batch_size
else:
batch_size = min(self.batch_size, len(predict_ds) if len(predict_ds) > 0 else 1)
collate_fn = self._resolve_collate_fn(predict_ds, RunningStage.PREDICTING)
pin_memory = True
if isinstance(getattr(self, "trainer", None), pl.Trainer):
return self.trainer.lightning_module.process_test_dataset(
predict_ds,
batch_size=batch_size,
num_workers=self.num_workers,
pin_memory=pin_memory,
collate_fn=collate_fn,
)
return DataLoader(
predict_ds, batch_size=batch_size, num_workers=self.num_workers, pin_memory=True, collate_fn=collate_fn
)
@property
def num_classes(self) -> Optional[int]:
n_cls_train = getattr(self.train_dataset, "num_classes", None)
n_cls_val = getattr(self.val_dataset, "num_classes", None)
n_cls_test = getattr(self.test_dataset, "num_classes", None)
return n_cls_train or n_cls_val or n_cls_test
@property
def multi_label(self) -> Optional[bool]:
multi_label_train = getattr(self.train_dataset, "multi_label", None)
multi_label_val = getattr(self.val_dataset, "multi_label", None)
multi_label_test = getattr(self.test_dataset, "multi_label", None)
return multi_label_train or multi_label_val or multi_label_test
@property
def data_source(self) -> Optional[DataSource]:
return self._data_source
@property
def preprocess(self) -> Preprocess:
return self._preprocess or self.preprocess_cls()
@property
def postprocess(self) -> Postprocess:
return self._postprocess or self.postprocess_cls()
@property
def data_pipeline(self) -> DataPipeline:
return DataPipeline(self.data_source, self.preprocess, self.postprocess)
def available_data_sources(self) -> Sequence[str]:
return self.preprocess.available_data_sources()
@staticmethod
def _split_train_val(
train_dataset: Dataset,
val_split: float,
) -> Tuple[Any, Any]:
if not isinstance(val_split, float) or (isinstance(val_split, float) and val_split > 1 or val_split < 0):
raise MisconfigurationException(f"`val_split` should be a float between 0 and 1. Found {val_split}.")
if isinstance(train_dataset, IterableAutoDataset):
raise MisconfigurationException(
"`val_split` should be `None` when the dataset is built with an IterableDataset."
)
val_num_samples = int(len(train_dataset) * val_split)
indices = list(range(len(train_dataset)))
np.random.shuffle(indices)
val_indices = indices[:val_num_samples]
train_indices = indices[val_num_samples:]
return (
SplitDataset(train_dataset, train_indices, use_duplicated_indices=True),
SplitDataset(train_dataset, val_indices, use_duplicated_indices=True),
)
@classmethod
def from_data_source(
cls,
data_source: str,
train_data: Any = None,
val_data: Any = None,
test_data: Any = None,
predict_data: Any = None,
train_transform: Optional[Dict[str, Callable]] = None,
val_transform: Optional[Dict[str, Callable]] = None,
test_transform: Optional[Dict[str, Callable]] = None,
predict_transform: Optional[Dict[str, Callable]] = None,
data_fetcher: Optional[BaseDataFetcher] = None,
preprocess: Optional[Preprocess] = None,
val_split: Optional[float] = None,
batch_size: int = 4,
num_workers: Optional[int] = None,
sampler: Optional[Sampler] = None,
**preprocess_kwargs: Any,
) -> "DataModule":
preprocess = preprocess or cls.preprocess_cls(
train_transform,
val_transform,
test_transform,
predict_transform,
**preprocess_kwargs,
)
data_source = preprocess.data_source_of_name(data_source)
train_dataset, val_dataset, test_dataset, predict_dataset = data_source.to_datasets(
train_data,
val_data,
test_data,
predict_data,
)
return cls(
train_dataset,
val_dataset,
test_dataset,
predict_dataset,
data_source=data_source,
preprocess=preprocess,
data_fetcher=data_fetcher,
val_split=val_split,
batch_size=batch_size,
num_workers=num_workers,
sampler=sampler,
)
@classmethod
def from_folders(
cls,
train_folder: Optional[str] = None,
val_folder: Optional[str] = None,
test_folder: Optional[str] = None,
predict_folder: Optional[str] = None,
train_transform: Optional[Dict[str, Callable]] = None,
val_transform: Optional[Dict[str, Callable]] = None,
test_transform: Optional[Dict[str, Callable]] = None,
predict_transform: Optional[Dict[str, Callable]] = None,
data_fetcher: Optional[BaseDataFetcher] = None,
preprocess: Optional[Preprocess] = None,
val_split: Optional[float] = None,
batch_size: int = 4,
num_workers: Optional[int] = None,
sampler: Optional[Sampler] = None,
**preprocess_kwargs: Any,
) -> "DataModule":
return cls.from_data_source(
DefaultDataSources.FOLDERS,
train_folder,
val_folder,
test_folder,
predict_folder,
train_transform=train_transform,
val_transform=val_transform,
test_transform=test_transform,
predict_transform=predict_transform,
data_fetcher=data_fetcher,
preprocess=preprocess,
val_split=val_split,
batch_size=batch_size,
num_workers=num_workers,
sampler=sampler,
**preprocess_kwargs,
)
@classmethod
def from_files(
cls,
train_files: Optional[Sequence[str]] = None,
train_targets: Optional[Sequence[Any]] = None,
val_files: Optional[Sequence[str]] = None,
val_targets: Optional[Sequence[Any]] = None,
test_files: Optional[Sequence[str]] = None,
test_targets: Optional[Sequence[Any]] = None,
predict_files: Optional[Sequence[str]] = None,
train_transform: Optional[Dict[str, Callable]] = None,
val_transform: Optional[Dict[str, Callable]] = None,
test_transform: Optional[Dict[str, Callable]] = None,
predict_transform: Optional[Dict[str, Callable]] = None,
data_fetcher: Optional[BaseDataFetcher] = None,
preprocess: Optional[Preprocess] = None,
val_split: Optional[float] = None,
batch_size: int = 4,
num_workers: Optional[int] = None,
sampler: Optional[Sampler] = None,
**preprocess_kwargs: Any,
) -> "DataModule":
return cls.from_data_source(
DefaultDataSources.FILES,
(train_files, train_targets),
(val_files, val_targets),
(test_files, test_targets),
predict_files,
train_transform=train_transform,
val_transform=val_transform,
test_transform=test_transform,
predict_transform=predict_transform,
data_fetcher=data_fetcher,
preprocess=preprocess,
val_split=val_split,
batch_size=batch_size,
num_workers=num_workers,
sampler=sampler,
**preprocess_kwargs,
)
@classmethod
def from_tensors(
cls,
train_data: Optional[Collection[torch.Tensor]] = None,
train_targets: Optional[Collection[Any]] = None,
val_data: Optional[Collection[torch.Tensor]] = None,
val_targets: Optional[Sequence[Any]] = None,
test_data: Optional[Collection[torch.Tensor]] = None,
test_targets: Optional[Sequence[Any]] = None,
predict_data: Optional[Collection[torch.Tensor]] = None,
train_transform: Optional[Dict[str, Callable]] = None,
val_transform: Optional[Dict[str, Callable]] = None,
test_transform: Optional[Dict[str, Callable]] = None,
predict_transform: Optional[Dict[str, Callable]] = None,
data_fetcher: Optional[BaseDataFetcher] = None,
preprocess: Optional[Preprocess] = None,
val_split: Optional[float] = None,
batch_size: int = 4,
num_workers: Optional[int] = None,
sampler: Optional[Sampler] = None,
**preprocess_kwargs: Any,
) -> "DataModule":
return cls.from_data_source(
DefaultDataSources.TENSORS,
(train_data, train_targets),
(val_data, val_targets),
(test_data, test_targets),
predict_data,
train_transform=train_transform,
val_transform=val_transform,
test_transform=test_transform,
predict_transform=predict_transform,
data_fetcher=data_fetcher,
preprocess=preprocess,
val_split=val_split,
batch_size=batch_size,
num_workers=num_workers,
sampler=sampler,
**preprocess_kwargs,
)
@classmethod
def from_numpy(
cls,
train_data: Optional[Collection[np.ndarray]] = None,
train_targets: Optional[Collection[Any]] = None,
val_data: Optional[Collection[np.ndarray]] = None,
val_targets: Optional[Sequence[Any]] = None,
test_data: Optional[Collection[np.ndarray]] = None,
test_targets: Optional[Sequence[Any]] = None,
predict_data: Optional[Collection[np.ndarray]] = None,
train_transform: Optional[Dict[str, Callable]] = None,
val_transform: Optional[Dict[str, Callable]] = None,
test_transform: Optional[Dict[str, Callable]] = None,
predict_transform: Optional[Dict[str, Callable]] = None,
data_fetcher: Optional[BaseDataFetcher] = None,
preprocess: Optional[Preprocess] = None,
val_split: Optional[float] = None,
batch_size: int = 4,
num_workers: Optional[int] = None,
sampler: Optional[Sampler] = None,
**preprocess_kwargs: Any,
) -> "DataModule":
return cls.from_data_source(
DefaultDataSources.NUMPY,
(train_data, train_targets),
(val_data, val_targets),
(test_data, test_targets),
predict_data,
train_transform=train_transform,
val_transform=val_transform,
test_transform=test_transform,
predict_transform=predict_transform,
data_fetcher=data_fetcher,
preprocess=preprocess,
val_split=val_split,
batch_size=batch_size,
num_workers=num_workers,
sampler=sampler,
**preprocess_kwargs,
)
@classmethod
def from_json(
cls,
input_fields: Union[str, Sequence[str]],
target_fields: Optional[Union[str, Sequence[str]]] = None,
train_file: Optional[str] = None,
val_file: Optional[str] = None,
test_file: Optional[str] = None,
predict_file: Optional[str] = None,
train_transform: Optional[Dict[str, Callable]] = None,
val_transform: Optional[Dict[str, Callable]] = None,
test_transform: Optional[Dict[str, Callable]] = None,
predict_transform: Optional[Dict[str, Callable]] = None,
data_fetcher: Optional[BaseDataFetcher] = None,
preprocess: Optional[Preprocess] = None,
val_split: Optional[float] = None,
batch_size: int = 4,
num_workers: Optional[int] = None,
sampler: Optional[Sampler] = None,
field: Optional[str] = None,
**preprocess_kwargs: Any,
) -> "DataModule":
return cls.from_data_source(
DefaultDataSources.JSON,
(train_file, input_fields, target_fields, field),
(val_file, input_fields, target_fields, field),
(test_file, input_fields, target_fields, field),
(predict_file, input_fields, target_fields, field),
train_transform=train_transform,
val_transform=val_transform,
test_transform=test_transform,
predict_transform=predict_transform,
data_fetcher=data_fetcher,
preprocess=preprocess,
val_split=val_split,
batch_size=batch_size,
num_workers=num_workers,
sampler=sampler,
**preprocess_kwargs,
)
@classmethod
def from_csv(
cls,
input_fields: Union[str, Sequence[str]],
target_fields: Optional[Union[str, Sequence[str]]] = None,
train_file: Optional[str] = None,
val_file: Optional[str] = None,
test_file: Optional[str] = None,
predict_file: Optional[str] = None,
train_transform: Optional[Dict[str, Callable]] = None,
val_transform: Optional[Dict[str, Callable]] = None,
test_transform: Optional[Dict[str, Callable]] = None,
predict_transform: Optional[Dict[str, Callable]] = None,
data_fetcher: Optional[BaseDataFetcher] = None,
preprocess: Optional[Preprocess] = None,
val_split: Optional[float] = None,
batch_size: int = 4,
num_workers: Optional[int] = None,
sampler: Optional[Sampler] = None,
**preprocess_kwargs: Any,
) -> "DataModule":
return cls.from_data_source(
DefaultDataSources.CSV,
(train_file, input_fields, target_fields),
(val_file, input_fields, target_fields),
(test_file, input_fields, target_fields),
(predict_file, input_fields, target_fields),
train_transform=train_transform,
val_transform=val_transform,
test_transform=test_transform,
predict_transform=predict_transform,
data_fetcher=data_fetcher,
preprocess=preprocess,
val_split=val_split,
batch_size=batch_size,
num_workers=num_workers,
sampler=sampler,
**preprocess_kwargs,
)
@classmethod
def from_datasets(
cls,
train_dataset: Optional[Dataset] = None,
val_dataset: Optional[Dataset] = None,
test_dataset: Optional[Dataset] = None,
predict_dataset: Optional[Dataset] = None,
train_transform: Optional[Dict[str, Callable]] = None,
val_transform: Optional[Dict[str, Callable]] = None,
test_transform: Optional[Dict[str, Callable]] = None,
predict_transform: Optional[Dict[str, Callable]] = None,
data_fetcher: Optional[BaseDataFetcher] = None,
preprocess: Optional[Preprocess] = None,
val_split: Optional[float] = None,
batch_size: int = 4,
num_workers: Optional[int] = None,
sampler: Optional[Sampler] = None,
**preprocess_kwargs: Any,
) -> "DataModule":
return cls.from_data_source(
DefaultDataSources.DATASETS,
train_dataset,
val_dataset,
test_dataset,
predict_dataset,
train_transform=train_transform,
val_transform=val_transform,
test_transform=test_transform,
predict_transform=predict_transform,
data_fetcher=data_fetcher,
preprocess=preprocess,
val_split=val_split,
batch_size=batch_size,
num_workers=num_workers,
sampler=sampler,
**preprocess_kwargs,
)
@classmethod
@requires("fiftyone")
def from_fiftyone(
cls,
train_dataset: Optional[SampleCollection] = None,
val_dataset: Optional[SampleCollection] = None,
test_dataset: Optional[SampleCollection] = None,
predict_dataset: Optional[SampleCollection] = None,
train_transform: Optional[Dict[str, Callable]] = None,
val_transform: Optional[Dict[str, Callable]] = None,
test_transform: Optional[Dict[str, Callable]] = None,
predict_transform: Optional[Dict[str, Callable]] = None,
data_fetcher: Optional[BaseDataFetcher] = None,
preprocess: Optional[Preprocess] = None,
val_split: Optional[float] = None,
batch_size: int = 4,
num_workers: Optional[int] = None,
**preprocess_kwargs: Any,
) -> "DataModule":
return cls.from_data_source(
DefaultDataSources.FIFTYONE,
train_dataset,
val_dataset,
test_dataset,
predict_dataset,
train_transform=train_transform,
val_transform=val_transform,
test_transform=test_transform,
predict_transform=predict_transform,
data_fetcher=data_fetcher,
preprocess=preprocess,
val_split=val_split,
batch_size=batch_size,
num_workers=num_workers,
**preprocess_kwargs,
)
| true | true |
f72506abcd96241b0e568bab11db58147f3f22c6 | 13,446 | py | Python | source/rttov_test/profile-datasets-py/div83/028.py | bucricket/projectMAScorrection | 89489026c8e247ec7c364e537798e766331fe569 | [
"BSD-3-Clause"
] | null | null | null | source/rttov_test/profile-datasets-py/div83/028.py | bucricket/projectMAScorrection | 89489026c8e247ec7c364e537798e766331fe569 | [
"BSD-3-Clause"
] | 1 | 2022-03-12T12:19:59.000Z | 2022-03-12T12:19:59.000Z | source/rttov_test/profile-datasets-py/div83/028.py | bucricket/projectMAScorrection | 89489026c8e247ec7c364e537798e766331fe569 | [
"BSD-3-Clause"
] | null | null | null | """
Profile ../profile-datasets-py/div83/028.py
file automaticaly created by prof_gen.py script
"""
self["ID"] = "../profile-datasets-py/div83/028.py"
self["Q"] = numpy.array([ 2.70658300e+00, 2.88421200e+00, 3.36234900e+00,
4.31645100e+00, 5.09368400e+00, 5.28904200e+00,
5.19020300e+00, 5.37709100e+00, 5.81179600e+00,
6.08195300e+00, 6.10215300e+00, 6.10604300e+00,
6.12691200e+00, 6.14242200e+00, 6.13258200e+00,
6.07811300e+00, 5.93228500e+00, 5.70609700e+00,
5.40576100e+00, 5.05456400e+00, 4.69607800e+00,
4.41534100e+00, 4.18436200e+00, 3.99542400e+00,
3.83612500e+00, 3.68572600e+00, 3.53743700e+00,
3.42014800e+00, 3.34060900e+00, 3.29236900e+00,
3.26049900e+00, 3.23329000e+00, 3.19587000e+00,
3.14459000e+00, 3.07860100e+00, 3.00642100e+00,
2.93912100e+00, 2.88521200e+00, 2.84905200e+00,
2.83165200e+00, 2.82883200e+00, 2.82954200e+00,
2.82819200e+00, 2.82242200e+00, 2.80869200e+00,
2.78689200e+00, 2.75919200e+00, 2.73845300e+00,
2.73261300e+00, 2.73094300e+00, 2.76521200e+00,
2.88293200e+00, 3.08358000e+00, 3.25216900e+00,
3.36816900e+00, 3.57363700e+00, 4.08970300e+00,
4.79533700e+00, 5.36314100e+00, 6.07875300e+00,
6.96754100e+00, 7.93924700e+00, 8.66240500e+00,
9.61853700e+00, 1.07741800e+01, 1.21489500e+01,
1.39513100e+01, 1.62331400e+01, 1.91987300e+01,
2.30749700e+01, 3.25815400e+01, 4.45335200e+01,
5.84331900e+01, 6.90079400e+01, 9.48516000e+01,
1.35035800e+02, 2.00376800e+02, 2.45029900e+02,
2.73666100e+02, 2.87530300e+02, 3.16561800e+02,
3.58260600e+02, 4.11909300e+02, 4.63045500e+02,
5.01176700e+02, 5.27209900e+02, 5.36886600e+02,
8.34994200e+02, 1.80191700e+03, 2.49548700e+03,
2.75726600e+03, 2.84195000e+03, 3.28452600e+03,
3.45919200e+03, 3.54301200e+03, 3.61181700e+03,
3.70948800e+03, 4.03132300e+03, 3.92145200e+03,
3.81598200e+03, 3.71468000e+03])
self["P"] = numpy.array([ 5.00000000e-03, 1.61000000e-02, 3.84000000e-02,
7.69000000e-02, 1.37000000e-01, 2.24400000e-01,
3.45400000e-01, 5.06400000e-01, 7.14000000e-01,
9.75300000e-01, 1.29720000e+00, 1.68720000e+00,
2.15260000e+00, 2.70090000e+00, 3.33980000e+00,
4.07700000e+00, 4.92040000e+00, 5.87760000e+00,
6.95670000e+00, 8.16550000e+00, 9.51190000e+00,
1.10038000e+01, 1.26492000e+01, 1.44559000e+01,
1.64318000e+01, 1.85847000e+01, 2.09224000e+01,
2.34526000e+01, 2.61829000e+01, 2.91210000e+01,
3.22744000e+01, 3.56505000e+01, 3.92566000e+01,
4.31001000e+01, 4.71882000e+01, 5.15278000e+01,
5.61260000e+01, 6.09895000e+01, 6.61253000e+01,
7.15398000e+01, 7.72396000e+01, 8.32310000e+01,
8.95204000e+01, 9.61138000e+01, 1.03017000e+02,
1.10237000e+02, 1.17778000e+02, 1.25646000e+02,
1.33846000e+02, 1.42385000e+02, 1.51266000e+02,
1.60496000e+02, 1.70078000e+02, 1.80018000e+02,
1.90320000e+02, 2.00989000e+02, 2.12028000e+02,
2.23442000e+02, 2.35234000e+02, 2.47408000e+02,
2.59969000e+02, 2.72919000e+02, 2.86262000e+02,
3.00000000e+02, 3.14137000e+02, 3.28675000e+02,
3.43618000e+02, 3.58966000e+02, 3.74724000e+02,
3.90893000e+02, 4.07474000e+02, 4.24470000e+02,
4.41882000e+02, 4.59712000e+02, 4.77961000e+02,
4.96630000e+02, 5.15720000e+02, 5.35232000e+02,
5.55167000e+02, 5.75525000e+02, 5.96306000e+02,
6.17511000e+02, 6.39140000e+02, 6.61192000e+02,
6.83667000e+02, 7.06565000e+02, 7.29886000e+02,
7.53628000e+02, 7.77790000e+02, 8.02371000e+02,
8.27371000e+02, 8.52788000e+02, 8.78620000e+02,
9.04866000e+02, 9.31524000e+02, 9.58591000e+02,
9.86067000e+02, 1.01395000e+03, 1.04223000e+03,
1.07092000e+03, 1.10000000e+03])
self["CO2"] = numpy.array([ 375.157 , 375.1549, 375.1517, 375.1454, 375.1361, 375.123 ,
375.1071, 375.087 , 375.0668, 375.0397, 375.0107, 374.9887,
374.9787, 374.9797, 374.9977, 375.0777, 375.2648, 375.5219,
375.781 , 376.0111, 376.2142, 376.3473, 376.4444, 376.4995,
376.5366, 376.5616, 376.5587, 376.5497, 376.5067, 376.4628,
376.4328, 376.4008, 376.4098, 376.4218, 376.4898, 376.5879,
376.7209, 376.9269, 377.1439, 377.4289, 377.7349, 378.0429,
378.3499, 378.6699, 378.8849, 379.1099, 379.426 , 379.818 ,
380.22 , 380.6 , 380.9949, 381.1929, 381.3058, 381.3868,
381.3797, 381.3736, 381.4184, 381.4712, 381.603 , 381.8037,
381.9853, 382.093 , 382.2037, 382.2303, 382.2499, 382.2444,
382.2217, 382.1838, 382.1207, 382.0572, 381.9996, 381.94 ,
381.9237, 381.9096, 381.9038, 381.8944, 381.8765, 381.8654,
381.8535, 381.8452, 381.8181, 381.7832, 381.7287, 381.6712,
381.6146, 381.5577, 381.5041, 381.3403, 380.9254, 380.6148,
380.4651, 380.347 , 380.1074, 379.98 , 379.9132, 379.88 ,
379.8368, 379.7091, 379.748 , 379.7862, 379.8238])
self["CO"] = numpy.array([ 0.08586157, 0.08822425, 0.09316929, 0.1023676 , 0.1185754 ,
0.1468322 , 0.1650141 , 0.1423442 , 0.1988138 , 0.2464785 ,
0.2425355 , 0.1728059 , 0.09032735, 0.05148888, 0.04070355,
0.02625254, 0.01856279, 0.01646571, 0.01638821, 0.01663942,
0.01699772, 0.01728332, 0.01752593, 0.01768283, 0.01776983,
0.01781003, 0.01765084, 0.01745274, 0.01717194, 0.01689034,
0.01677895, 0.01666045, 0.01666065, 0.01666605, 0.01682295,
0.01706175, 0.01745855, 0.01820485, 0.01902765, 0.02024864,
0.02165474, 0.02315113, 0.02472473, 0.02648693, 0.02833782,
0.03041192, 0.03305521, 0.0363218 , 0.03988319, 0.04259638,
0.04561457, 0.04762276, 0.04920275, 0.05076773, 0.05211422,
0.05354631, 0.05613547, 0.05906392, 0.06294046, 0.06783429,
0.07255159, 0.0749807 , 0.07756863, 0.07782795, 0.07786446,
0.07780645, 0.07769262, 0.07765014, 0.07772161, 0.0777983 ,
0.07789106, 0.07798593, 0.07807734, 0.07817121, 0.07830787,
0.0784506 , 0.07860355, 0.0787634 , 0.07888221, 0.07899858,
0.07900668, 0.07900059, 0.07892907, 0.07883838, 0.07871643,
0.07857295, 0.07842407, 0.07827489, 0.07809013, 0.07792565,
0.07775791, 0.07732203, 0.07679743, 0.07609317, 0.07584283,
0.07573138, 0.07569875, 0.07569213, 0.07580885, 0.07592695,
0.07604646])
self["T"] = numpy.array([ 192.286, 199.539, 213.251, 231.442, 250.157, 264.95 ,
273.398, 275.988, 274.097, 268.487, 258.113, 251.109,
244.191, 236.22 , 228.14 , 222.084, 217.46 , 212.602,
207.757, 203.601, 201.12 , 200.706, 201.105, 201.977,
203.045, 204.056, 204.756, 205.56 , 206.453, 207.334,
208.062, 208.554, 208.87 , 209.253, 209.669, 210.106,
210.665, 211.542, 212.867, 214.547, 216.311, 217.838,
218.912, 219.576, 219.8 , 219.676, 219.51 , 219.534,
219.844, 220.107, 220.317, 220.448, 220.385, 220.111,
219.629, 218.951, 218.129, 217.334, 216.702, 216.3 ,
216.18 , 216.383, 217.044, 217.933, 219.029, 220.335,
221.923, 223.603, 225.325, 227.06 , 228.825, 230.687,
232.647, 234.715, 236.751, 238.774, 240.843, 242.923,
244.981, 246.857, 248.589, 250.26 , 251.903, 253.563,
255.223, 256.949, 258.721, 260.01 , 260.408, 261.77 ,
263.794, 265.708, 267.388, 269.546, 271.8 , 274.023,
276.301, 277.785, 277.785, 277.785, 277.785])
self["N2O"] = numpy.array([ 0.00843998, 0.00675998, 0.00550998, 0.00451998, 0.00367998,
0.00292998, 0.00182999, 0.00093999, 0.00086 , 0.00346998,
0.00574997, 0.00809995, 0.01049994, 0.01381992, 0.0167699 ,
0.01908988, 0.02167987, 0.02497986, 0.02886984, 0.03770981,
0.04611978, 0.06027973, 0.07687968, 0.09277963, 0.1074996 ,
0.1215696 , 0.1351095 , 0.1461595 , 0.1561495 , 0.1657995 ,
0.1742494 , 0.1789794 , 0.1835694 , 0.1880094 , 0.1941394 ,
0.2008494 , 0.2071794 , 0.2146194 , 0.2221894 , 0.2293794 ,
0.2371893 , 0.2450493 , 0.2529193 , 0.2607493 , 0.2684792 ,
0.2760492 , 0.2834092 , 0.2904892 , 0.2972092 , 0.3034892 ,
0.3092591 , 0.3144191 , 0.318879 , 0.320709 , 0.3223789 ,
0.3238688 , 0.3251487 , 0.3261884 , 0.3269782 , 0.327468 ,
0.3276377 , 0.3276374 , 0.3276372 , 0.3276368 , 0.3276365 ,
0.327636 , 0.3276354 , 0.3276347 , 0.3276337 , 0.3276324 ,
0.3276293 , 0.3276254 , 0.3276209 , 0.3276174 , 0.3276089 ,
0.3275958 , 0.3275743 , 0.3275597 , 0.3275503 , 0.3275458 ,
0.3275363 , 0.3275226 , 0.327505 , 0.3274883 , 0.3274758 ,
0.3274673 , 0.3274641 , 0.3273664 , 0.3270496 , 0.3268224 ,
0.3267366 , 0.3267089 , 0.3265639 , 0.3265066 , 0.3264792 ,
0.3264566 , 0.3264246 , 0.3263192 , 0.3263552 , 0.3263897 ,
0.3264229 ])
self["O3"] = numpy.array([ 0.1874915 , 0.2149024 , 0.285496 , 0.452577 , 0.6652036 ,
0.8636454 , 1.069974 , 1.339963 , 1.74506 , 2.367676 ,
3.20938 , 3.929546 , 4.632512 , 5.261088 , 5.711085 ,
5.883594 , 6.014724 , 6.133965 , 6.165117 , 6.02297 ,
5.613614 , 4.935388 , 4.273622 , 3.776325 , 3.563446 ,
3.711146 , 3.983026 , 3.953546 , 3.702878 , 3.374489 ,
3.12198 , 2.98387 , 2.886441 , 2.747311 , 2.547492 ,
2.304513 , 2.054054 , 1.818675 , 1.589585 , 1.352436 ,
1.123747 , 0.9392643 , 0.8266587 , 0.7792758 , 0.7783948 ,
0.8330277 , 0.9674253 , 0.9618644 , 0.8516127 , 0.7847689 ,
0.7466939 , 0.7239019 , 0.7068658 , 0.6795178 , 0.6289309 ,
0.552276 , 0.4571801 , 0.3607593 , 0.2771505 , 0.2096017 ,
0.1594669 , 0.125989 , 0.1039241 , 0.08256201, 0.06434251,
0.05068798, 0.04490537, 0.04165022, 0.03932574, 0.03722404,
0.03566284, 0.03436047, 0.03329735, 0.03248326, 0.03198487,
0.03175341, 0.03176373, 0.03195377, 0.03239443, 0.03349827,
0.03453746, 0.03505364, 0.03472099, 0.03362072, 0.03259216,
0.03219242, 0.03263077, 0.03360821, 0.03466293, 0.0363357 ,
0.03850264, 0.03980027, 0.03990021, 0.03985157, 0.03947275,
0.03814204, 0.03129189, 0.02650153, 0.02650445, 0.02650726,
0.02650996])
self["CH4"] = numpy.array([ 0.08335807, 0.1208587 , 0.1487335 , 0.1710033 , 0.204093 ,
0.2627246 , 0.2753886 , 0.2884114 , 0.3086782 , 0.3450409 ,
0.3803777 , 0.4445503 , 0.5298488 , 0.6725639 , 0.8022351 ,
0.9129405 , 1.009404 , 1.083974 , 1.152114 , 1.199524 ,
1.244704 , 1.297344 , 1.352464 , 1.405274 , 1.474424 ,
1.543284 , 1.609544 , 1.646784 , 1.673834 , 1.669345 ,
1.664525 , 1.659355 , 1.653835 , 1.638835 , 1.624395 ,
1.610745 , 1.598205 , 1.587065 , 1.588215 , 1.589415 ,
1.590696 , 1.592025 , 1.593435 , 1.623955 , 1.645655 ,
1.668345 , 1.686135 , 1.700245 , 1.713615 , 1.718985 ,
1.724565 , 1.730115 , 1.735775 , 1.740904 , 1.744604 ,
1.748444 , 1.750363 , 1.752232 , 1.753511 , 1.754389 ,
1.755058 , 1.755206 , 1.755345 , 1.755023 , 1.754691 ,
1.753979 , 1.753136 , 1.752172 , 1.751106 , 1.75008 ,
1.749123 , 1.748182 , 1.747308 , 1.746459 , 1.745774 ,
1.745114 , 1.74469 , 1.744322 , 1.744063 , 1.743828 ,
1.743578 , 1.743295 , 1.742992 , 1.742723 , 1.742476 ,
1.742291 , 1.742144 , 1.741505 , 1.73969 , 1.738371 ,
1.737825 , 1.737618 , 1.736797 , 1.736442 , 1.736296 ,
1.736177 , 1.736016 , 1.735456 , 1.735647 , 1.735831 ,
1.736007 ])
self["CTP"] = 500.0
self["CFRACTION"] = 0.0
self["IDG"] = 0
self["ISH"] = 0
self["ELEVATION"] = 0.0
self["S2M"]["T"] = 277.785
self["S2M"]["Q"] = 3714.67970528
self["S2M"]["O"] = 0.0265099568307
self["S2M"]["P"] = 1003.55103
self["S2M"]["U"] = 0.0
self["S2M"]["V"] = 0.0
self["S2M"]["WFETC"] = 100000.0
self["SKIN"]["SURFTYPE"] = 1
self["SKIN"]["WATERTYPE"] = 1
self["SKIN"]["T"] = 277.785
self["SKIN"]["SALINITY"] = 35.0
self["SKIN"]["FOAM_FRACTION"] = 0.0
self["SKIN"]["FASTEM"] = numpy.array([ 3. , 5. , 15. , 0.1, 0.3])
self["ZENANGLE"] = 0.0
self["AZANGLE"] = 0.0
self["SUNZENANGLE"] = 0.0
self["SUNAZANGLE"] = 0.0
self["LATITUDE"] = -47.333
self["GAS_UNITS"] = 2
self["BE"] = 0.0
self["COSBK"] = 0.0
self["DATE"] = numpy.array([2006, 8, 1])
self["TIME"] = numpy.array([0, 0, 0])
| 57.956897 | 92 | 0.566711 |
self["ID"] = "../profile-datasets-py/div83/028.py"
self["Q"] = numpy.array([ 2.70658300e+00, 2.88421200e+00, 3.36234900e+00,
4.31645100e+00, 5.09368400e+00, 5.28904200e+00,
5.19020300e+00, 5.37709100e+00, 5.81179600e+00,
6.08195300e+00, 6.10215300e+00, 6.10604300e+00,
6.12691200e+00, 6.14242200e+00, 6.13258200e+00,
6.07811300e+00, 5.93228500e+00, 5.70609700e+00,
5.40576100e+00, 5.05456400e+00, 4.69607800e+00,
4.41534100e+00, 4.18436200e+00, 3.99542400e+00,
3.83612500e+00, 3.68572600e+00, 3.53743700e+00,
3.42014800e+00, 3.34060900e+00, 3.29236900e+00,
3.26049900e+00, 3.23329000e+00, 3.19587000e+00,
3.14459000e+00, 3.07860100e+00, 3.00642100e+00,
2.93912100e+00, 2.88521200e+00, 2.84905200e+00,
2.83165200e+00, 2.82883200e+00, 2.82954200e+00,
2.82819200e+00, 2.82242200e+00, 2.80869200e+00,
2.78689200e+00, 2.75919200e+00, 2.73845300e+00,
2.73261300e+00, 2.73094300e+00, 2.76521200e+00,
2.88293200e+00, 3.08358000e+00, 3.25216900e+00,
3.36816900e+00, 3.57363700e+00, 4.08970300e+00,
4.79533700e+00, 5.36314100e+00, 6.07875300e+00,
6.96754100e+00, 7.93924700e+00, 8.66240500e+00,
9.61853700e+00, 1.07741800e+01, 1.21489500e+01,
1.39513100e+01, 1.62331400e+01, 1.91987300e+01,
2.30749700e+01, 3.25815400e+01, 4.45335200e+01,
5.84331900e+01, 6.90079400e+01, 9.48516000e+01,
1.35035800e+02, 2.00376800e+02, 2.45029900e+02,
2.73666100e+02, 2.87530300e+02, 3.16561800e+02,
3.58260600e+02, 4.11909300e+02, 4.63045500e+02,
5.01176700e+02, 5.27209900e+02, 5.36886600e+02,
8.34994200e+02, 1.80191700e+03, 2.49548700e+03,
2.75726600e+03, 2.84195000e+03, 3.28452600e+03,
3.45919200e+03, 3.54301200e+03, 3.61181700e+03,
3.70948800e+03, 4.03132300e+03, 3.92145200e+03,
3.81598200e+03, 3.71468000e+03])
self["P"] = numpy.array([ 5.00000000e-03, 1.61000000e-02, 3.84000000e-02,
7.69000000e-02, 1.37000000e-01, 2.24400000e-01,
3.45400000e-01, 5.06400000e-01, 7.14000000e-01,
9.75300000e-01, 1.29720000e+00, 1.68720000e+00,
2.15260000e+00, 2.70090000e+00, 3.33980000e+00,
4.07700000e+00, 4.92040000e+00, 5.87760000e+00,
6.95670000e+00, 8.16550000e+00, 9.51190000e+00,
1.10038000e+01, 1.26492000e+01, 1.44559000e+01,
1.64318000e+01, 1.85847000e+01, 2.09224000e+01,
2.34526000e+01, 2.61829000e+01, 2.91210000e+01,
3.22744000e+01, 3.56505000e+01, 3.92566000e+01,
4.31001000e+01, 4.71882000e+01, 5.15278000e+01,
5.61260000e+01, 6.09895000e+01, 6.61253000e+01,
7.15398000e+01, 7.72396000e+01, 8.32310000e+01,
8.95204000e+01, 9.61138000e+01, 1.03017000e+02,
1.10237000e+02, 1.17778000e+02, 1.25646000e+02,
1.33846000e+02, 1.42385000e+02, 1.51266000e+02,
1.60496000e+02, 1.70078000e+02, 1.80018000e+02,
1.90320000e+02, 2.00989000e+02, 2.12028000e+02,
2.23442000e+02, 2.35234000e+02, 2.47408000e+02,
2.59969000e+02, 2.72919000e+02, 2.86262000e+02,
3.00000000e+02, 3.14137000e+02, 3.28675000e+02,
3.43618000e+02, 3.58966000e+02, 3.74724000e+02,
3.90893000e+02, 4.07474000e+02, 4.24470000e+02,
4.41882000e+02, 4.59712000e+02, 4.77961000e+02,
4.96630000e+02, 5.15720000e+02, 5.35232000e+02,
5.55167000e+02, 5.75525000e+02, 5.96306000e+02,
6.17511000e+02, 6.39140000e+02, 6.61192000e+02,
6.83667000e+02, 7.06565000e+02, 7.29886000e+02,
7.53628000e+02, 7.77790000e+02, 8.02371000e+02,
8.27371000e+02, 8.52788000e+02, 8.78620000e+02,
9.04866000e+02, 9.31524000e+02, 9.58591000e+02,
9.86067000e+02, 1.01395000e+03, 1.04223000e+03,
1.07092000e+03, 1.10000000e+03])
self["CO2"] = numpy.array([ 375.157 , 375.1549, 375.1517, 375.1454, 375.1361, 375.123 ,
375.1071, 375.087 , 375.0668, 375.0397, 375.0107, 374.9887,
374.9787, 374.9797, 374.9977, 375.0777, 375.2648, 375.5219,
375.781 , 376.0111, 376.2142, 376.3473, 376.4444, 376.4995,
376.5366, 376.5616, 376.5587, 376.5497, 376.5067, 376.4628,
376.4328, 376.4008, 376.4098, 376.4218, 376.4898, 376.5879,
376.7209, 376.9269, 377.1439, 377.4289, 377.7349, 378.0429,
378.3499, 378.6699, 378.8849, 379.1099, 379.426 , 379.818 ,
380.22 , 380.6 , 380.9949, 381.1929, 381.3058, 381.3868,
381.3797, 381.3736, 381.4184, 381.4712, 381.603 , 381.8037,
381.9853, 382.093 , 382.2037, 382.2303, 382.2499, 382.2444,
382.2217, 382.1838, 382.1207, 382.0572, 381.9996, 381.94 ,
381.9237, 381.9096, 381.9038, 381.8944, 381.8765, 381.8654,
381.8535, 381.8452, 381.8181, 381.7832, 381.7287, 381.6712,
381.6146, 381.5577, 381.5041, 381.3403, 380.9254, 380.6148,
380.4651, 380.347 , 380.1074, 379.98 , 379.9132, 379.88 ,
379.8368, 379.7091, 379.748 , 379.7862, 379.8238])
self["CO"] = numpy.array([ 0.08586157, 0.08822425, 0.09316929, 0.1023676 , 0.1185754 ,
0.1468322 , 0.1650141 , 0.1423442 , 0.1988138 , 0.2464785 ,
0.2425355 , 0.1728059 , 0.09032735, 0.05148888, 0.04070355,
0.02625254, 0.01856279, 0.01646571, 0.01638821, 0.01663942,
0.01699772, 0.01728332, 0.01752593, 0.01768283, 0.01776983,
0.01781003, 0.01765084, 0.01745274, 0.01717194, 0.01689034,
0.01677895, 0.01666045, 0.01666065, 0.01666605, 0.01682295,
0.01706175, 0.01745855, 0.01820485, 0.01902765, 0.02024864,
0.02165474, 0.02315113, 0.02472473, 0.02648693, 0.02833782,
0.03041192, 0.03305521, 0.0363218 , 0.03988319, 0.04259638,
0.04561457, 0.04762276, 0.04920275, 0.05076773, 0.05211422,
0.05354631, 0.05613547, 0.05906392, 0.06294046, 0.06783429,
0.07255159, 0.0749807 , 0.07756863, 0.07782795, 0.07786446,
0.07780645, 0.07769262, 0.07765014, 0.07772161, 0.0777983 ,
0.07789106, 0.07798593, 0.07807734, 0.07817121, 0.07830787,
0.0784506 , 0.07860355, 0.0787634 , 0.07888221, 0.07899858,
0.07900668, 0.07900059, 0.07892907, 0.07883838, 0.07871643,
0.07857295, 0.07842407, 0.07827489, 0.07809013, 0.07792565,
0.07775791, 0.07732203, 0.07679743, 0.07609317, 0.07584283,
0.07573138, 0.07569875, 0.07569213, 0.07580885, 0.07592695,
0.07604646])
self["T"] = numpy.array([ 192.286, 199.539, 213.251, 231.442, 250.157, 264.95 ,
273.398, 275.988, 274.097, 268.487, 258.113, 251.109,
244.191, 236.22 , 228.14 , 222.084, 217.46 , 212.602,
207.757, 203.601, 201.12 , 200.706, 201.105, 201.977,
203.045, 204.056, 204.756, 205.56 , 206.453, 207.334,
208.062, 208.554, 208.87 , 209.253, 209.669, 210.106,
210.665, 211.542, 212.867, 214.547, 216.311, 217.838,
218.912, 219.576, 219.8 , 219.676, 219.51 , 219.534,
219.844, 220.107, 220.317, 220.448, 220.385, 220.111,
219.629, 218.951, 218.129, 217.334, 216.702, 216.3 ,
216.18 , 216.383, 217.044, 217.933, 219.029, 220.335,
221.923, 223.603, 225.325, 227.06 , 228.825, 230.687,
232.647, 234.715, 236.751, 238.774, 240.843, 242.923,
244.981, 246.857, 248.589, 250.26 , 251.903, 253.563,
255.223, 256.949, 258.721, 260.01 , 260.408, 261.77 ,
263.794, 265.708, 267.388, 269.546, 271.8 , 274.023,
276.301, 277.785, 277.785, 277.785, 277.785])
self["N2O"] = numpy.array([ 0.00843998, 0.00675998, 0.00550998, 0.00451998, 0.00367998,
0.00292998, 0.00182999, 0.00093999, 0.00086 , 0.00346998,
0.00574997, 0.00809995, 0.01049994, 0.01381992, 0.0167699 ,
0.01908988, 0.02167987, 0.02497986, 0.02886984, 0.03770981,
0.04611978, 0.06027973, 0.07687968, 0.09277963, 0.1074996 ,
0.1215696 , 0.1351095 , 0.1461595 , 0.1561495 , 0.1657995 ,
0.1742494 , 0.1789794 , 0.1835694 , 0.1880094 , 0.1941394 ,
0.2008494 , 0.2071794 , 0.2146194 , 0.2221894 , 0.2293794 ,
0.2371893 , 0.2450493 , 0.2529193 , 0.2607493 , 0.2684792 ,
0.2760492 , 0.2834092 , 0.2904892 , 0.2972092 , 0.3034892 ,
0.3092591 , 0.3144191 , 0.318879 , 0.320709 , 0.3223789 ,
0.3238688 , 0.3251487 , 0.3261884 , 0.3269782 , 0.327468 ,
0.3276377 , 0.3276374 , 0.3276372 , 0.3276368 , 0.3276365 ,
0.327636 , 0.3276354 , 0.3276347 , 0.3276337 , 0.3276324 ,
0.3276293 , 0.3276254 , 0.3276209 , 0.3276174 , 0.3276089 ,
0.3275958 , 0.3275743 , 0.3275597 , 0.3275503 , 0.3275458 ,
0.3275363 , 0.3275226 , 0.327505 , 0.3274883 , 0.3274758 ,
0.3274673 , 0.3274641 , 0.3273664 , 0.3270496 , 0.3268224 ,
0.3267366 , 0.3267089 , 0.3265639 , 0.3265066 , 0.3264792 ,
0.3264566 , 0.3264246 , 0.3263192 , 0.3263552 , 0.3263897 ,
0.3264229 ])
self["O3"] = numpy.array([ 0.1874915 , 0.2149024 , 0.285496 , 0.452577 , 0.6652036 ,
0.8636454 , 1.069974 , 1.339963 , 1.74506 , 2.367676 ,
3.20938 , 3.929546 , 4.632512 , 5.261088 , 5.711085 ,
5.883594 , 6.014724 , 6.133965 , 6.165117 , 6.02297 ,
5.613614 , 4.935388 , 4.273622 , 3.776325 , 3.563446 ,
3.711146 , 3.983026 , 3.953546 , 3.702878 , 3.374489 ,
3.12198 , 2.98387 , 2.886441 , 2.747311 , 2.547492 ,
2.304513 , 2.054054 , 1.818675 , 1.589585 , 1.352436 ,
1.123747 , 0.9392643 , 0.8266587 , 0.7792758 , 0.7783948 ,
0.8330277 , 0.9674253 , 0.9618644 , 0.8516127 , 0.7847689 ,
0.7466939 , 0.7239019 , 0.7068658 , 0.6795178 , 0.6289309 ,
0.552276 , 0.4571801 , 0.3607593 , 0.2771505 , 0.2096017 ,
0.1594669 , 0.125989 , 0.1039241 , 0.08256201, 0.06434251,
0.05068798, 0.04490537, 0.04165022, 0.03932574, 0.03722404,
0.03566284, 0.03436047, 0.03329735, 0.03248326, 0.03198487,
0.03175341, 0.03176373, 0.03195377, 0.03239443, 0.03349827,
0.03453746, 0.03505364, 0.03472099, 0.03362072, 0.03259216,
0.03219242, 0.03263077, 0.03360821, 0.03466293, 0.0363357 ,
0.03850264, 0.03980027, 0.03990021, 0.03985157, 0.03947275,
0.03814204, 0.03129189, 0.02650153, 0.02650445, 0.02650726,
0.02650996])
self["CH4"] = numpy.array([ 0.08335807, 0.1208587 , 0.1487335 , 0.1710033 , 0.204093 ,
0.2627246 , 0.2753886 , 0.2884114 , 0.3086782 , 0.3450409 ,
0.3803777 , 0.4445503 , 0.5298488 , 0.6725639 , 0.8022351 ,
0.9129405 , 1.009404 , 1.083974 , 1.152114 , 1.199524 ,
1.244704 , 1.297344 , 1.352464 , 1.405274 , 1.474424 ,
1.543284 , 1.609544 , 1.646784 , 1.673834 , 1.669345 ,
1.664525 , 1.659355 , 1.653835 , 1.638835 , 1.624395 ,
1.610745 , 1.598205 , 1.587065 , 1.588215 , 1.589415 ,
1.590696 , 1.592025 , 1.593435 , 1.623955 , 1.645655 ,
1.668345 , 1.686135 , 1.700245 , 1.713615 , 1.718985 ,
1.724565 , 1.730115 , 1.735775 , 1.740904 , 1.744604 ,
1.748444 , 1.750363 , 1.752232 , 1.753511 , 1.754389 ,
1.755058 , 1.755206 , 1.755345 , 1.755023 , 1.754691 ,
1.753979 , 1.753136 , 1.752172 , 1.751106 , 1.75008 ,
1.749123 , 1.748182 , 1.747308 , 1.746459 , 1.745774 ,
1.745114 , 1.74469 , 1.744322 , 1.744063 , 1.743828 ,
1.743578 , 1.743295 , 1.742992 , 1.742723 , 1.742476 ,
1.742291 , 1.742144 , 1.741505 , 1.73969 , 1.738371 ,
1.737825 , 1.737618 , 1.736797 , 1.736442 , 1.736296 ,
1.736177 , 1.736016 , 1.735456 , 1.735647 , 1.735831 ,
1.736007 ])
self["CTP"] = 500.0
self["CFRACTION"] = 0.0
self["IDG"] = 0
self["ISH"] = 0
self["ELEVATION"] = 0.0
self["S2M"]["T"] = 277.785
self["S2M"]["Q"] = 3714.67970528
self["S2M"]["O"] = 0.0265099568307
self["S2M"]["P"] = 1003.55103
self["S2M"]["U"] = 0.0
self["S2M"]["V"] = 0.0
self["S2M"]["WFETC"] = 100000.0
self["SKIN"]["SURFTYPE"] = 1
self["SKIN"]["WATERTYPE"] = 1
self["SKIN"]["T"] = 277.785
self["SKIN"]["SALINITY"] = 35.0
self["SKIN"]["FOAM_FRACTION"] = 0.0
self["SKIN"]["FASTEM"] = numpy.array([ 3. , 5. , 15. , 0.1, 0.3])
self["ZENANGLE"] = 0.0
self["AZANGLE"] = 0.0
self["SUNZENANGLE"] = 0.0
self["SUNAZANGLE"] = 0.0
self["LATITUDE"] = -47.333
self["GAS_UNITS"] = 2
self["BE"] = 0.0
self["COSBK"] = 0.0
self["DATE"] = numpy.array([2006, 8, 1])
self["TIME"] = numpy.array([0, 0, 0])
| true | true |
f725070abe59440c81ec609b73017feaae140853 | 4,547 | py | Python | mkt/home/tests/test_views.py | oremj/zamboni | a751dc6d22f7af947da327b0a091cbab0a999f49 | [
"BSD-3-Clause"
] | null | null | null | mkt/home/tests/test_views.py | oremj/zamboni | a751dc6d22f7af947da327b0a091cbab0a999f49 | [
"BSD-3-Clause"
] | null | null | null | mkt/home/tests/test_views.py | oremj/zamboni | a751dc6d22f7af947da327b0a091cbab0a999f49 | [
"BSD-3-Clause"
] | null | null | null | import datetime
from django.conf import settings
from nose.tools import eq_
from pyquery import PyQuery as pq
from amo.tests import app_factory, mock_es
from amo.urlresolvers import reverse
import mkt
from mkt.browse.tests.test_views import BrowseBase
from mkt.webapps.models import Webapp
from mkt.zadmin.models import FeaturedApp, FeaturedAppRegion
class TestHome(BrowseBase):
def setUp(self):
super(TestHome, self).setUp()
self.url = reverse('home')
# TODO: Remove log-in bit when we remove `request.can_view_consumer`.
assert self.client.login(username='steamcube@mozilla.com',
password='password')
@mock_es
def test_no_paypal_js(self):
self.create_switch('enabled-paypal', active=False)
resp = self.client.get(self.url)
assert not settings.PAYPAL_JS_URL in resp.content, (
'When PayPal is disabled, its JS lib should not load')
@mock_es
def test_load_paypal_js(self):
self.create_switch('enabled-paypal')
resp = self.client.get(self.url)
assert settings.PAYPAL_JS_URL in resp.content, (
'When PayPal is enabled, its JS lib should load')
@mock_es
def test_page(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
self.assertTemplateUsed(r, 'home/home.html')
@mock_es
def test_featured_desktop(self):
a, b, c, d = self.setup_featured(4)
# Check that the Home featured app is shown only in US region.
for region in mkt.regions.REGIONS_DICT:
pks = self.get_pks('featured', self.url,
{'region': region})
self.assertSetEqual(pks, [c.id, d.id] if region == 'us' else [])
@mock_es
def test_featured_mobile(self):
a, b, c, d = self.setup_featured(4)
# Check that the Home featured app is shown only in US region.
for region in mkt.regions.REGIONS_DICT:
pks = self.get_pks('featured', self.url,
{'region': region, 'mobile': 'true'})
self.assertSetEqual(pks, [d.id] if region == 'us' else [])
def test_featured_src(self):
_, _, app = self.setup_featured()
r = self.client.get(self.url)
eq_(pq(r.content)('.mkt-tile').attr('href'),
app.get_detail_url() + '?src=mkt-home')
def test_tile_no_rating_link(self):
r = self.client.get(self.url)
assert not pq(r.content)('.mkt-tile .rating_link')
@mock_es
def test_featured_region_exclusions(self):
self._test_featured_region_exclusions()
@mock_es
def test_featured_fallback_to_worldwide(self):
a, b, c = self.setup_featured()
worldwide_apps = [app_factory().id for x in xrange(5)]
for app in worldwide_apps:
fa = FeaturedApp.objects.create(app_id=app, category=None)
FeaturedAppRegion.objects.create(featured_app=fa,
region=mkt.regions.WORLDWIDE.id)
# In US: 1 US-featured app + 5 Worldwide-featured app.
# Elsewhere: 6 Worldwide-featured apps.
for region in mkt.regions.REGIONS_DICT:
if region == 'us':
expected = [c.id] + worldwide_apps[:5]
else:
expected = worldwide_apps
eq_(self.get_pks('featured', self.url, {'region': region}),
expected)
def test_popular(self):
self._test_popular()
def test_popular_region_exclusions(self):
self._test_popular_region_exclusions()
def make_time_limited_feature(self):
a = app_factory()
fa = self.make_featured(app=a, category=None)
fa.start_date = datetime.date(2012, 1, 1)
fa.end_date = datetime.date(2012, 2, 1)
fa.save()
return a
@mock_es
def test_featured_time_excluded(self):
a = self.make_time_limited_feature()
for d in [datetime.date(2012, 1, 1),
datetime.date(2012, 1, 15),
datetime.date(2012, 2, 1)]:
Webapp.now = staticmethod(lambda: d)
eq_(self.get_pks('featured', self.url, {'region': 'us'}),
[a.id])
@mock_es
def test_featured_time_included(self):
self.make_time_limited_feature()
for d in [datetime.date(2011, 12, 15),
datetime.date(2012, 2, 2)]:
Webapp.now = staticmethod(lambda: d)
eq_(self.get_pks('featured', self.url, {'region': 'us'}), [])
| 35.248062 | 77 | 0.610073 | import datetime
from django.conf import settings
from nose.tools import eq_
from pyquery import PyQuery as pq
from amo.tests import app_factory, mock_es
from amo.urlresolvers import reverse
import mkt
from mkt.browse.tests.test_views import BrowseBase
from mkt.webapps.models import Webapp
from mkt.zadmin.models import FeaturedApp, FeaturedAppRegion
class TestHome(BrowseBase):
def setUp(self):
super(TestHome, self).setUp()
self.url = reverse('home')
assert self.client.login(username='steamcube@mozilla.com',
password='password')
@mock_es
def test_no_paypal_js(self):
self.create_switch('enabled-paypal', active=False)
resp = self.client.get(self.url)
assert not settings.PAYPAL_JS_URL in resp.content, (
'When PayPal is disabled, its JS lib should not load')
@mock_es
def test_load_paypal_js(self):
self.create_switch('enabled-paypal')
resp = self.client.get(self.url)
assert settings.PAYPAL_JS_URL in resp.content, (
'When PayPal is enabled, its JS lib should load')
@mock_es
def test_page(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
self.assertTemplateUsed(r, 'home/home.html')
@mock_es
def test_featured_desktop(self):
a, b, c, d = self.setup_featured(4)
for region in mkt.regions.REGIONS_DICT:
pks = self.get_pks('featured', self.url,
{'region': region})
self.assertSetEqual(pks, [c.id, d.id] if region == 'us' else [])
@mock_es
def test_featured_mobile(self):
a, b, c, d = self.setup_featured(4)
for region in mkt.regions.REGIONS_DICT:
pks = self.get_pks('featured', self.url,
{'region': region, 'mobile': 'true'})
self.assertSetEqual(pks, [d.id] if region == 'us' else [])
def test_featured_src(self):
_, _, app = self.setup_featured()
r = self.client.get(self.url)
eq_(pq(r.content)('.mkt-tile').attr('href'),
app.get_detail_url() + '?src=mkt-home')
def test_tile_no_rating_link(self):
r = self.client.get(self.url)
assert not pq(r.content)('.mkt-tile .rating_link')
@mock_es
def test_featured_region_exclusions(self):
self._test_featured_region_exclusions()
@mock_es
def test_featured_fallback_to_worldwide(self):
a, b, c = self.setup_featured()
worldwide_apps = [app_factory().id for x in xrange(5)]
for app in worldwide_apps:
fa = FeaturedApp.objects.create(app_id=app, category=None)
FeaturedAppRegion.objects.create(featured_app=fa,
region=mkt.regions.WORLDWIDE.id)
for region in mkt.regions.REGIONS_DICT:
if region == 'us':
expected = [c.id] + worldwide_apps[:5]
else:
expected = worldwide_apps
eq_(self.get_pks('featured', self.url, {'region': region}),
expected)
def test_popular(self):
self._test_popular()
def test_popular_region_exclusions(self):
self._test_popular_region_exclusions()
def make_time_limited_feature(self):
a = app_factory()
fa = self.make_featured(app=a, category=None)
fa.start_date = datetime.date(2012, 1, 1)
fa.end_date = datetime.date(2012, 2, 1)
fa.save()
return a
@mock_es
def test_featured_time_excluded(self):
a = self.make_time_limited_feature()
for d in [datetime.date(2012, 1, 1),
datetime.date(2012, 1, 15),
datetime.date(2012, 2, 1)]:
Webapp.now = staticmethod(lambda: d)
eq_(self.get_pks('featured', self.url, {'region': 'us'}),
[a.id])
@mock_es
def test_featured_time_included(self):
self.make_time_limited_feature()
for d in [datetime.date(2011, 12, 15),
datetime.date(2012, 2, 2)]:
Webapp.now = staticmethod(lambda: d)
eq_(self.get_pks('featured', self.url, {'region': 'us'}), [])
| true | true |
f725072ba5ab89efad25c3839e4eab5683dd5e8a | 9,159 | py | Python | epgbackup/src/plugin.py | builder08/enigma2-plugins_2 | f8f08b947e23c1c86b011492a7323125774c3482 | [
"OLDAP-2.3"
] | null | null | null | epgbackup/src/plugin.py | builder08/enigma2-plugins_2 | f8f08b947e23c1c86b011492a7323125774c3482 | [
"OLDAP-2.3"
] | null | null | null | epgbackup/src/plugin.py | builder08/enigma2-plugins_2 | f8f08b947e23c1c86b011492a7323125774c3482 | [
"OLDAP-2.3"
] | null | null | null | # for localized messages
from . import _
# Config
from Components.config import config, ConfigYesNo, ConfigNumber, ConfigSelection, \
ConfigSubsection, ConfigSelectionNumber, ConfigDirectory, NoSave
from Screens.MessageBox import MessageBox
from Screens.Standby import TryQuitMainloop
from Tools.BoundFunction import boundFunction
# Error-print
from EPGBackupTools import debugOut, PLUGIN_VERSION
from traceback import format_exc
extPrefix = _("EXTENSIONMENU_PREFIX")
config.plugins.epgbackup = ConfigSubsection()
# Do not change order of choices
config.plugins.epgbackup.show_setup_in = ConfigSelection(choices=[
("extension", _("extensions")),
("plugin", _("pluginmenue")),
("both", _("extensions") + "/" + _("pluginmenue")),
("system", _("systemmenue")),
], default="both")
config.plugins.epgbackup.show_make_backup_in_extmenu = ConfigYesNo(default=False)
config.plugins.epgbackup.show_backuprestore_in_extmenu = ConfigYesNo(default=False)
config.plugins.epgbackup.backup_enabled = ConfigYesNo(default=True)
config.plugins.epgbackup.make_backup_after_unsuccess_restore = ConfigYesNo(default=True)
config.plugins.epgbackup.callAfterEPGRefresh = ConfigYesNo(default=True)
config.plugins.epgbackup.backupSaveInterval = ConfigSelection(choices=[
("-1", _("backup timer disabled")),
("30", _("30 minutes")),
("60", _("1 hour")),
("300", _("6 hours")),
("1200", _("1 day")),
], default="-1")
config.plugins.epgbackup.show_messages_background = ConfigYesNo(default=True)
config.plugins.epgbackup.filesize_valid = ConfigSelectionNumber(min=1,
max=20, stepwidth=1, default=3, wraparound=True)
config.plugins.epgbackup.timespan_valid = ConfigNumber(default=7)
config.plugins.epgbackup.showadvancedoptions = NoSave(ConfigYesNo(default=False))
config.plugins.epgbackup.epgwrite_wait = ConfigNumber(default=3)
config.plugins.epgbackup.showin_usr_scripts = ConfigYesNo(default=True)
config.plugins.epgbackup.backup_strategy = ConfigSelection(choices=[
("youngest_before_biggest", _("Youngest before Biggest"), _("The youngest file from the saved backup-files will be restored.\nIf it is older than the current existing EPG-file and the EPG-file isn't valid then the biggest backup-file will be restored.")),
("biggest_before_youngest", _("Biggest before Youngest"), _("The biggest file from the saved backup-files will be restored.\nIf it is smaller than the current existing EPG-file and the EPG-file isn't valid then the youngest backup-file will be restored.")),
("youngest", _("Only younger"), _("The backup-file will only be restored if it is younger than the current existing EPG-file.")),
("biggest", _("Only bigger"), _("The backup-file will only be restored if it is greater than the current existing EPG-file.")),
], default="youngest_before_biggest"
)
config.plugins.epgbackup.enable_debug = ConfigYesNo(default=False)
config.plugins.epgbackup.plugin_debug_in_file = ConfigYesNo(default=False)
config.plugins.epgbackup.backup_log_dir = ConfigDirectory(default="/tmp")
config.plugins.epgbackup.max_boot_count = ConfigNumber(default=3)
try:
from Components.Language import language
from Plugins.SystemPlugins.MPHelp import registerHelp, XMLHelpReader
from Tools.Directories import resolveFilename, SCOPE_PLUGINS, fileExists
lang = language.getLanguage()[:2]
HELPPATH = resolveFilename(SCOPE_PLUGINS, "Extensions/EPGBackup")
if fileExists(HELPPATH + "/locale/" + str(lang) + "/mphelp.xml"):
helpfile = HELPPATH + "/locale/" + str(lang) + "/mphelp.xml"
else:
helpfile = HELPPATH + "/mphelp.xml"
reader = XMLHelpReader(helpfile)
epgBackuphHelp = registerHelp(*reader)
except:
debugOut("Help-Error:\n" + str(format_exc()), forced=True)
epgBackuphHelp = None
# Plugin
epgbackup = None
from Components.PluginComponent import plugins
from Plugins.Plugin import PluginDescriptor
gUserScriptExists = False
# Autostart
def autostart(reason, **kwargs):
global epgbackup
global gUserScriptExists
if reason == 0 and "session" in kwargs:
session = kwargs["session"]
from EPGBackupSupport import EPGBackupSupport
try:
epgbackup = EPGBackupSupport(session)
except:
debugOut("Error while initializing EPGBackupSupport:\n" + str(format_exc()), forced=True)
try:
from Plugins.Extensions.UserScripts.plugin import UserScriptsConfiguration
gUserScriptExists = True
del UserScriptsConfiguration
except:
pass
def openconfig(session, **kwargs):
try:
from EPGBackupConfig import EPGBackupConfig
session.openWithCallback(doneConfiguring, EPGBackupConfig)
except:
debugOut("Config-Import-Error:\n" + str(format_exc()), forced=True)
def showinSetup(menuid):
if menuid == "system":
return [(extPrefix + " " + _("EXTENSIONNAME_SETUP"), openconfig, "EPGBackupConfig", None)]
return []
def makeBackup(session, **kwargs):
epgbackup.makeBackup(interactive=True)
def restoreBackup(session, **kwargs):
epgbackup.forceDefaultRestore()
def doneConfiguring(session, needsRestart):
if needsRestart:
session.openWithCallback(boundFunction(restartGUICB, session), MessageBox,
_("To apply your Changes the GUI has to be restarted.\nDo you want to restart the GUI now?"),
MessageBox.TYPE_YESNO, title=_("EPGBackup Config V %s") % (PLUGIN_VERSION), timeout=30)
def restartGUICB(session, answer):
if answer is True:
session.open(TryQuitMainloop, 3)
SetupPlugDescExt = PluginDescriptor(name=extPrefix + " " + _("EXTENSIONNAME_SETUP"),
description=_("Backup and restore EPG Data, including integration of EPGRefresh-plugin"), where=PluginDescriptor.WHERE_EXTENSIONSMENU,
fnc=openconfig,
needsRestart=False)
SetupPlugDescPlug = PluginDescriptor(name=extPrefix + " " + _("EXTENSIONNAME_SETUP"),
description=_("Backup and restore EPG Data, including integration of EPGRefresh-plugin"), where=PluginDescriptor.WHERE_PLUGINMENU,
fnc=openconfig,
needsRestart=False)
MakePlugDescExt = PluginDescriptor(name=extPrefix + " " + _("Make Backup"),
description=_("Start making a Backup"), where=PluginDescriptor.WHERE_EXTENSIONSMENU,
fnc=makeBackup,
needsRestart=False)
RestorePlugDescExt = PluginDescriptor(name=extPrefix + " " + _("Restore Backup"),
description=_("Start a Restore of a Backup"), where=PluginDescriptor.WHERE_EXTENSIONSMENU,
fnc=restoreBackup,
needsRestart=False)
def AdjustPlugin(enable, PlugDescriptor):
try:
if enable:
plugins.addPlugin(PlugDescriptor)
else:
plugins.removePlugin(PlugDescriptor)
except ValueError:
pass
except:
debugOut("AdjustPlugin-Error:\n" + str(format_exc()), forced=True)
def PluginHousekeeping(configentry):
PlugDescInstall = []
PlugDescDeinstall = []
# value == extension: prior config-entry is both, so extension has not to be added
# value == both: prior config-entry is plugin, so only extension must be added
if configentry == config.plugins.epgbackup.show_setup_in:
# systemmenu don't have to be adjusted, because restart is required
if config.plugins.epgbackup.show_setup_in.value == "extension":
PlugDescDeinstall.append(SetupPlugDescPlug)
elif config.plugins.epgbackup.show_setup_in.value == "plugin":
PlugDescInstall.append(SetupPlugDescPlug)
PlugDescDeinstall.append(SetupPlugDescExt)
elif config.plugins.epgbackup.show_setup_in.value == "both":
PlugDescInstall.append(SetupPlugDescExt)
elif configentry == config.plugins.epgbackup.show_make_backup_in_extmenu:
if configentry.value:
PlugDescInstall.append(MakePlugDescExt)
else:
PlugDescDeinstall.append(MakePlugDescExt)
elif configentry == config.plugins.epgbackup.show_backuprestore_in_extmenu:
if configentry.value:
PlugDescInstall.append(RestorePlugDescExt)
else:
PlugDescDeinstall.append(RestorePlugDescExt)
for PlugDescriptor in PlugDescDeinstall:
AdjustPlugin(False, PlugDescriptor)
for PlugDescriptor in PlugDescInstall:
AdjustPlugin(True, PlugDescriptor)
config.plugins.epgbackup.show_setup_in.addNotifier(PluginHousekeeping, initial_call=False, immediate_feedback=True)
config.plugins.epgbackup.show_make_backup_in_extmenu.addNotifier(PluginHousekeeping, initial_call=False, immediate_feedback=True)
config.plugins.epgbackup.show_backuprestore_in_extmenu.addNotifier(PluginHousekeeping, initial_call=False, immediate_feedback=True)
def Plugins(**kwargs):
pluginList = [
PluginDescriptor(
where=[PluginDescriptor.WHERE_SESSIONSTART, PluginDescriptor.WHERE_AUTOSTART],
fnc=autostart)
]
if config.plugins.epgbackup.show_setup_in.value == "system":
pluginList.append(PluginDescriptor(
name=extPrefix + " " + _("EXTENSIONNAME_SETUP"),
description=_("Keep EPG-Data over Crashes"),
where=PluginDescriptor.WHERE_MENU,
fnc=showinSetup,
needsRestart=False)
)
else:
if config.plugins.epgbackup.show_setup_in.value in ("plugin", "both"):
pluginList.append(SetupPlugDescPlug)
if config.plugins.epgbackup.show_setup_in.value in ("extension", "both"):
pluginList.append(SetupPlugDescExt)
if config.plugins.epgbackup.show_make_backup_in_extmenu.value:
pluginList.append(MakePlugDescExt)
if config.plugins.epgbackup.show_backuprestore_in_extmenu.value:
pluginList.append(RestorePlugDescExt)
return pluginList
| 39.821739 | 259 | 0.780653 |
from . import _
from Components.config import config, ConfigYesNo, ConfigNumber, ConfigSelection, \
ConfigSubsection, ConfigSelectionNumber, ConfigDirectory, NoSave
from Screens.MessageBox import MessageBox
from Screens.Standby import TryQuitMainloop
from Tools.BoundFunction import boundFunction
from EPGBackupTools import debugOut, PLUGIN_VERSION
from traceback import format_exc
extPrefix = _("EXTENSIONMENU_PREFIX")
config.plugins.epgbackup = ConfigSubsection()
config.plugins.epgbackup.show_setup_in = ConfigSelection(choices=[
("extension", _("extensions")),
("plugin", _("pluginmenue")),
("both", _("extensions") + "/" + _("pluginmenue")),
("system", _("systemmenue")),
], default="both")
config.plugins.epgbackup.show_make_backup_in_extmenu = ConfigYesNo(default=False)
config.plugins.epgbackup.show_backuprestore_in_extmenu = ConfigYesNo(default=False)
config.plugins.epgbackup.backup_enabled = ConfigYesNo(default=True)
config.plugins.epgbackup.make_backup_after_unsuccess_restore = ConfigYesNo(default=True)
config.plugins.epgbackup.callAfterEPGRefresh = ConfigYesNo(default=True)
config.plugins.epgbackup.backupSaveInterval = ConfigSelection(choices=[
("-1", _("backup timer disabled")),
("30", _("30 minutes")),
("60", _("1 hour")),
("300", _("6 hours")),
("1200", _("1 day")),
], default="-1")
config.plugins.epgbackup.show_messages_background = ConfigYesNo(default=True)
config.plugins.epgbackup.filesize_valid = ConfigSelectionNumber(min=1,
max=20, stepwidth=1, default=3, wraparound=True)
config.plugins.epgbackup.timespan_valid = ConfigNumber(default=7)
config.plugins.epgbackup.showadvancedoptions = NoSave(ConfigYesNo(default=False))
config.plugins.epgbackup.epgwrite_wait = ConfigNumber(default=3)
config.plugins.epgbackup.showin_usr_scripts = ConfigYesNo(default=True)
config.plugins.epgbackup.backup_strategy = ConfigSelection(choices=[
("youngest_before_biggest", _("Youngest before Biggest"), _("The youngest file from the saved backup-files will be restored.\nIf it is older than the current existing EPG-file and the EPG-file isn't valid then the biggest backup-file will be restored.")),
("biggest_before_youngest", _("Biggest before Youngest"), _("The biggest file from the saved backup-files will be restored.\nIf it is smaller than the current existing EPG-file and the EPG-file isn't valid then the youngest backup-file will be restored.")),
("youngest", _("Only younger"), _("The backup-file will only be restored if it is younger than the current existing EPG-file.")),
("biggest", _("Only bigger"), _("The backup-file will only be restored if it is greater than the current existing EPG-file.")),
], default="youngest_before_biggest"
)
config.plugins.epgbackup.enable_debug = ConfigYesNo(default=False)
config.plugins.epgbackup.plugin_debug_in_file = ConfigYesNo(default=False)
config.plugins.epgbackup.backup_log_dir = ConfigDirectory(default="/tmp")
config.plugins.epgbackup.max_boot_count = ConfigNumber(default=3)
try:
from Components.Language import language
from Plugins.SystemPlugins.MPHelp import registerHelp, XMLHelpReader
from Tools.Directories import resolveFilename, SCOPE_PLUGINS, fileExists
lang = language.getLanguage()[:2]
HELPPATH = resolveFilename(SCOPE_PLUGINS, "Extensions/EPGBackup")
if fileExists(HELPPATH + "/locale/" + str(lang) + "/mphelp.xml"):
helpfile = HELPPATH + "/locale/" + str(lang) + "/mphelp.xml"
else:
helpfile = HELPPATH + "/mphelp.xml"
reader = XMLHelpReader(helpfile)
epgBackuphHelp = registerHelp(*reader)
except:
debugOut("Help-Error:\n" + str(format_exc()), forced=True)
epgBackuphHelp = None
epgbackup = None
from Components.PluginComponent import plugins
from Plugins.Plugin import PluginDescriptor
gUserScriptExists = False
def autostart(reason, **kwargs):
global epgbackup
global gUserScriptExists
if reason == 0 and "session" in kwargs:
session = kwargs["session"]
from EPGBackupSupport import EPGBackupSupport
try:
epgbackup = EPGBackupSupport(session)
except:
debugOut("Error while initializing EPGBackupSupport:\n" + str(format_exc()), forced=True)
try:
from Plugins.Extensions.UserScripts.plugin import UserScriptsConfiguration
gUserScriptExists = True
del UserScriptsConfiguration
except:
pass
def openconfig(session, **kwargs):
try:
from EPGBackupConfig import EPGBackupConfig
session.openWithCallback(doneConfiguring, EPGBackupConfig)
except:
debugOut("Config-Import-Error:\n" + str(format_exc()), forced=True)
def showinSetup(menuid):
if menuid == "system":
return [(extPrefix + " " + _("EXTENSIONNAME_SETUP"), openconfig, "EPGBackupConfig", None)]
return []
def makeBackup(session, **kwargs):
epgbackup.makeBackup(interactive=True)
def restoreBackup(session, **kwargs):
epgbackup.forceDefaultRestore()
def doneConfiguring(session, needsRestart):
if needsRestart:
session.openWithCallback(boundFunction(restartGUICB, session), MessageBox,
_("To apply your Changes the GUI has to be restarted.\nDo you want to restart the GUI now?"),
MessageBox.TYPE_YESNO, title=_("EPGBackup Config V %s") % (PLUGIN_VERSION), timeout=30)
def restartGUICB(session, answer):
if answer is True:
session.open(TryQuitMainloop, 3)
SetupPlugDescExt = PluginDescriptor(name=extPrefix + " " + _("EXTENSIONNAME_SETUP"),
description=_("Backup and restore EPG Data, including integration of EPGRefresh-plugin"), where=PluginDescriptor.WHERE_EXTENSIONSMENU,
fnc=openconfig,
needsRestart=False)
SetupPlugDescPlug = PluginDescriptor(name=extPrefix + " " + _("EXTENSIONNAME_SETUP"),
description=_("Backup and restore EPG Data, including integration of EPGRefresh-plugin"), where=PluginDescriptor.WHERE_PLUGINMENU,
fnc=openconfig,
needsRestart=False)
MakePlugDescExt = PluginDescriptor(name=extPrefix + " " + _("Make Backup"),
description=_("Start making a Backup"), where=PluginDescriptor.WHERE_EXTENSIONSMENU,
fnc=makeBackup,
needsRestart=False)
RestorePlugDescExt = PluginDescriptor(name=extPrefix + " " + _("Restore Backup"),
description=_("Start a Restore of a Backup"), where=PluginDescriptor.WHERE_EXTENSIONSMENU,
fnc=restoreBackup,
needsRestart=False)
def AdjustPlugin(enable, PlugDescriptor):
try:
if enable:
plugins.addPlugin(PlugDescriptor)
else:
plugins.removePlugin(PlugDescriptor)
except ValueError:
pass
except:
debugOut("AdjustPlugin-Error:\n" + str(format_exc()), forced=True)
def PluginHousekeeping(configentry):
PlugDescInstall = []
PlugDescDeinstall = []
if configentry == config.plugins.epgbackup.show_setup_in:
if config.plugins.epgbackup.show_setup_in.value == "extension":
PlugDescDeinstall.append(SetupPlugDescPlug)
elif config.plugins.epgbackup.show_setup_in.value == "plugin":
PlugDescInstall.append(SetupPlugDescPlug)
PlugDescDeinstall.append(SetupPlugDescExt)
elif config.plugins.epgbackup.show_setup_in.value == "both":
PlugDescInstall.append(SetupPlugDescExt)
elif configentry == config.plugins.epgbackup.show_make_backup_in_extmenu:
if configentry.value:
PlugDescInstall.append(MakePlugDescExt)
else:
PlugDescDeinstall.append(MakePlugDescExt)
elif configentry == config.plugins.epgbackup.show_backuprestore_in_extmenu:
if configentry.value:
PlugDescInstall.append(RestorePlugDescExt)
else:
PlugDescDeinstall.append(RestorePlugDescExt)
for PlugDescriptor in PlugDescDeinstall:
AdjustPlugin(False, PlugDescriptor)
for PlugDescriptor in PlugDescInstall:
AdjustPlugin(True, PlugDescriptor)
config.plugins.epgbackup.show_setup_in.addNotifier(PluginHousekeeping, initial_call=False, immediate_feedback=True)
config.plugins.epgbackup.show_make_backup_in_extmenu.addNotifier(PluginHousekeeping, initial_call=False, immediate_feedback=True)
config.plugins.epgbackup.show_backuprestore_in_extmenu.addNotifier(PluginHousekeeping, initial_call=False, immediate_feedback=True)
def Plugins(**kwargs):
pluginList = [
PluginDescriptor(
where=[PluginDescriptor.WHERE_SESSIONSTART, PluginDescriptor.WHERE_AUTOSTART],
fnc=autostart)
]
if config.plugins.epgbackup.show_setup_in.value == "system":
pluginList.append(PluginDescriptor(
name=extPrefix + " " + _("EXTENSIONNAME_SETUP"),
description=_("Keep EPG-Data over Crashes"),
where=PluginDescriptor.WHERE_MENU,
fnc=showinSetup,
needsRestart=False)
)
else:
if config.plugins.epgbackup.show_setup_in.value in ("plugin", "both"):
pluginList.append(SetupPlugDescPlug)
if config.plugins.epgbackup.show_setup_in.value in ("extension", "both"):
pluginList.append(SetupPlugDescExt)
if config.plugins.epgbackup.show_make_backup_in_extmenu.value:
pluginList.append(MakePlugDescExt)
if config.plugins.epgbackup.show_backuprestore_in_extmenu.value:
pluginList.append(RestorePlugDescExt)
return pluginList
| true | true |
f725089c2e562403e45979d33cb8bab9a94933e2 | 6,399 | py | Python | OMG/env/random_load.py | Webbah/sec-for-reinforcement-learning | 19db622dce4963d25cb1b6e4ae12ddf98b6d27d2 | [
"MIT"
] | 2 | 2021-12-16T12:49:26.000Z | 2022-01-28T19:18:43.000Z | OMG/env/random_load.py | Webbah/sec-for-reinforcement-learning | 19db622dce4963d25cb1b6e4ae12ddf98b6d27d2 | [
"MIT"
] | null | null | null | OMG/env/random_load.py | Webbah/sec-for-reinforcement-learning | 19db622dce4963d25cb1b6e4ae12ddf98b6d27d2 | [
"MIT"
] | null | null | null | import numpy as np
import pandas as pd
from openmodelica_microgrid_gym.util import RandProcess
class RandomLoad:
def __init__(self, train_episode_length: int, ts: float, rand_process: RandProcess, loadstep_time: int = None,
load_curve: pd.DataFrame = None, bounds=None, bounds_std=None):
"""
:param max_episode_steps: number of steps per training episode (can differ from env.max_episode_steps)
:param ts: sampletime of env
:param rand_pocess: Instance of random process defines noise added to load
:param loadstep_time: number of env step where load step should happen
:param load_curve: Stored load data to sample from instead of smaple from distribution
:param bounds: Bounds to clip the sampled load data
:param bounds_std: Chosen bounds are sampled from a distribution with std=bounds_std and mean=bounds
"""
self.train_episode_length = train_episode_length
self.ts = ts
self.rand_process = rand_process
if loadstep_time is None:
self.loadstep_time = np.random.randint(0, self.train_episode_length)
else:
self.loadstep_time = loadstep_time
self.load_curve = load_curve
if bounds is None:
self.bounds = (-np.inf, np.inf)
else:
self.bounds = bounds
if bounds_std is None:
self.bounds_std = (0, 0)
else:
self.bounds_std = bounds_std
self.lowerbound_std = 0
self.upperbound_std = 0
def reset(self, loadstep_time=None):
if loadstep_time is None:
self.loadstep_time = np.random.randint(0, self.train_episode_length)
else:
self.loadstep_time = loadstep_time
def load_step(self, t, gain):
"""
Changes the load parameters
:param t:
:param gain: device parameter
:return: Sample from SP
"""
# Defines a load step after 0.01 s
if self.loadstep_time * self.ts < t <= self.loadstep_time * self.ts + self.ts:
self.rand_process.proc.mean = gain * 0.55
self.rand_process.reserve = gain * 0.55
elif t <= self.ts:
self.rand_process.proc.mean = gain
return self.rand_process.sample(t)
def clipped_step(self, t):
return np.clip(self.rand_process.sample(t),
self.bounds[0] + self.lowerbound_std,
self.bounds[1] + self.upperbound_std
)
def one_random_loadstep_per_episode(self, t):
if self.loadstep_time * self.ts < t <= self.loadstep_time * self.ts + self.ts:
# do with 100 percent propability
self.do_change(1002, 102)
# else:
# with 2 permill change drift
# self.do_change(2, 0)
return np.clip(self.rand_process.sample(t),
self.bounds[0] + self.lowerbound_std,
self.bounds[1] + self.upperbound_std
)
def give_dataframe_value(self, t, col):
"""
Gives load values from a stored dataframe (self.load_curve)
:parma t: time - represents here the row of the dataframe
:param col: colon name of the dataframe (typically str)
"""
if t < 0:
# return None
return self.load_curve[col][0]
if self.load_curve is None:
raise ValueError('No dataframe given! Please feed load class (.load_curve) with data')
return self.load_curve[col][int(t / self.ts)]
def random_load_step(self, t, event_prob: int = 2, step_prob: int = 50):
"""
Changes the load parameters applying a loadstep with 0.2% probability which is a pure step with 50 %
probability otherwise a drift. In every event the random process variance is drawn randomly [1, 150].
:param t: time
:param event_prob: probability (in pre mill) that the step event is triggered in the current step
:param step_prob: probability (in pre cent) that event is a abrupt step (drift otherwise!, random process speed
not adjustable yet
:return: Sample from SP
"""
# Changes rand process data with probability of 5% and sets new value randomly
if np.random.randint(0, 1001) < 2:
gain = np.random.randint(self.rand_process.bounds[0], self.rand_process.bounds[1])
self.rand_process.proc.mean = gain
self.rand_process.proc.vol = np.random.randint(1, 150)
self.rand_process.proc.speed = np.random.randint(10, 1200)
# define sdt for clipping once every event
# np.maximum to not allow negative values
self.lowerbound_std = np.maximum(np.random.normal(scale=self.bounds_std[0]), 0.0001)
self.upperbound_std = np.random.normal(scale=self.bounds_std[1])
# With 50% probability do a step or a drift
if np.random.randint(0, 101) < 50:
# step
self.rand_process.reserve = gain
else:
# drift -> Lower speed to allow
self.rand_process.proc.speed = np.random.randint(10, 100)
return np.clip(self.rand_process.sample(t),
self.bounds[0] + self.lowerbound_std,
self.bounds[1] + self.upperbound_std
)
def do_change(self, event_prob_permill=2, step_prob_percent=50):
if np.random.randint(0, 1001) < event_prob_permill:
gain = np.random.randint(self.rand_process.bounds[0], self.rand_process.bounds[1])
self.rand_process.proc.mean = gain
self.rand_process.proc.vol = np.random.randint(1, 150)
self.rand_process.proc.speed = np.random.randint(10, 1200)
# define sdt for clipping once every event
self.lowerbound_std = np.random.normal(scale=self.bounds_std[0])
self.upperbound_std = np.random.normal(scale=self.bounds_std[1])
# With 50% probability do a step or a drift
if np.random.randint(0, 101) < step_prob_percent:
# step
self.rand_process.reserve = gain
else:
# drift -> Lower speed to allow
self.rand_process.proc.speed = np.random.randint(10, 100)
| 41.823529 | 119 | 0.609314 | import numpy as np
import pandas as pd
from openmodelica_microgrid_gym.util import RandProcess
class RandomLoad:
def __init__(self, train_episode_length: int, ts: float, rand_process: RandProcess, loadstep_time: int = None,
load_curve: pd.DataFrame = None, bounds=None, bounds_std=None):
self.train_episode_length = train_episode_length
self.ts = ts
self.rand_process = rand_process
if loadstep_time is None:
self.loadstep_time = np.random.randint(0, self.train_episode_length)
else:
self.loadstep_time = loadstep_time
self.load_curve = load_curve
if bounds is None:
self.bounds = (-np.inf, np.inf)
else:
self.bounds = bounds
if bounds_std is None:
self.bounds_std = (0, 0)
else:
self.bounds_std = bounds_std
self.lowerbound_std = 0
self.upperbound_std = 0
def reset(self, loadstep_time=None):
if loadstep_time is None:
self.loadstep_time = np.random.randint(0, self.train_episode_length)
else:
self.loadstep_time = loadstep_time
def load_step(self, t, gain):
if self.loadstep_time * self.ts < t <= self.loadstep_time * self.ts + self.ts:
self.rand_process.proc.mean = gain * 0.55
self.rand_process.reserve = gain * 0.55
elif t <= self.ts:
self.rand_process.proc.mean = gain
return self.rand_process.sample(t)
def clipped_step(self, t):
return np.clip(self.rand_process.sample(t),
self.bounds[0] + self.lowerbound_std,
self.bounds[1] + self.upperbound_std
)
def one_random_loadstep_per_episode(self, t):
if self.loadstep_time * self.ts < t <= self.loadstep_time * self.ts + self.ts:
self.do_change(1002, 102)
return np.clip(self.rand_process.sample(t),
self.bounds[0] + self.lowerbound_std,
self.bounds[1] + self.upperbound_std
)
def give_dataframe_value(self, t, col):
if t < 0:
return self.load_curve[col][0]
if self.load_curve is None:
raise ValueError('No dataframe given! Please feed load class (.load_curve) with data')
return self.load_curve[col][int(t / self.ts)]
def random_load_step(self, t, event_prob: int = 2, step_prob: int = 50):
if np.random.randint(0, 1001) < 2:
gain = np.random.randint(self.rand_process.bounds[0], self.rand_process.bounds[1])
self.rand_process.proc.mean = gain
self.rand_process.proc.vol = np.random.randint(1, 150)
self.rand_process.proc.speed = np.random.randint(10, 1200)
self.lowerbound_std = np.maximum(np.random.normal(scale=self.bounds_std[0]), 0.0001)
self.upperbound_std = np.random.normal(scale=self.bounds_std[1])
if np.random.randint(0, 101) < 50:
self.rand_process.reserve = gain
else:
self.rand_process.proc.speed = np.random.randint(10, 100)
return np.clip(self.rand_process.sample(t),
self.bounds[0] + self.lowerbound_std,
self.bounds[1] + self.upperbound_std
)
def do_change(self, event_prob_permill=2, step_prob_percent=50):
if np.random.randint(0, 1001) < event_prob_permill:
gain = np.random.randint(self.rand_process.bounds[0], self.rand_process.bounds[1])
self.rand_process.proc.mean = gain
self.rand_process.proc.vol = np.random.randint(1, 150)
self.rand_process.proc.speed = np.random.randint(10, 1200)
self.lowerbound_std = np.random.normal(scale=self.bounds_std[0])
self.upperbound_std = np.random.normal(scale=self.bounds_std[1])
if np.random.randint(0, 101) < step_prob_percent:
self.rand_process.reserve = gain
else:
self.rand_process.proc.speed = np.random.randint(10, 100)
| true | true |
f72508f773fd8c5c239a480ae2c67e066c971dd2 | 1,265 | py | Python | api/migrations/0022_auto_20150222_0024.py | eiling/SchoolIdolAPI | a05980fdb33b143dbe2febfc1ad6cf723f025c8d | [
"Apache-2.0"
] | 65 | 2017-12-29T12:28:11.000Z | 2022-03-15T06:42:26.000Z | api/migrations/0022_auto_20150222_0024.py | eiling/SchoolIdolAPI | a05980fdb33b143dbe2febfc1ad6cf723f025c8d | [
"Apache-2.0"
] | 31 | 2017-12-18T02:03:09.000Z | 2022-01-13T00:43:35.000Z | api/migrations/0022_auto_20150222_0024.py | eiling/SchoolIdolAPI | a05980fdb33b143dbe2febfc1ad6cf723f025c8d | [
"Apache-2.0"
] | 7 | 2018-08-27T15:11:01.000Z | 2021-08-16T05:15:13.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('api', '0021_card_video_story'),
]
operations = [
migrations.AddField(
model_name='userpreferences',
name='following',
field=models.ManyToManyField(related_name='followers', to=settings.AUTH_USER_MODEL),
preserve_default=True,
),
migrations.AlterField(
model_name='account',
name='owner',
field=models.ForeignKey(related_name='accounts_set', to=settings.AUTH_USER_MODEL),
preserve_default=True,
),
migrations.AlterField(
model_name='ownedcard',
name='card',
field=models.ForeignKey(related_name='ownedcards', to='api.Card'),
preserve_default=True,
),
migrations.AlterField(
model_name='ownedcard',
name='owner_account',
field=models.ForeignKey(related_name='ownedcards', to='api.Account'),
preserve_default=True,
),
]
| 30.853659 | 96 | 0.611858 |
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('api', '0021_card_video_story'),
]
operations = [
migrations.AddField(
model_name='userpreferences',
name='following',
field=models.ManyToManyField(related_name='followers', to=settings.AUTH_USER_MODEL),
preserve_default=True,
),
migrations.AlterField(
model_name='account',
name='owner',
field=models.ForeignKey(related_name='accounts_set', to=settings.AUTH_USER_MODEL),
preserve_default=True,
),
migrations.AlterField(
model_name='ownedcard',
name='card',
field=models.ForeignKey(related_name='ownedcards', to='api.Card'),
preserve_default=True,
),
migrations.AlterField(
model_name='ownedcard',
name='owner_account',
field=models.ForeignKey(related_name='ownedcards', to='api.Account'),
preserve_default=True,
),
]
| true | true |
f725091c50677d690c2ec6cbbf02012349ecebe0 | 109,596 | py | Python | numpy/core/tests/test_datetime.py | kurtamohler/numpy | 73157efcd17da95ce984d1595ac4907233b9dbf5 | [
"BSD-3-Clause"
] | 1 | 2022-02-27T15:07:29.000Z | 2022-02-27T15:07:29.000Z | numpy/core/tests/test_datetime.py | kurtamohler/numpy | 73157efcd17da95ce984d1595ac4907233b9dbf5 | [
"BSD-3-Clause"
] | 41 | 2019-04-01T15:52:29.000Z | 2021-09-07T00:15:51.000Z | numpy/core/tests/test_datetime.py | kurtamohler/numpy | 73157efcd17da95ce984d1595ac4907233b9dbf5 | [
"BSD-3-Clause"
] | 4 | 2021-06-25T08:40:39.000Z | 2021-08-08T09:52:42.000Z |
import numpy
import numpy as np
import datetime
import pytest
from numpy.testing import (
assert_, assert_equal, assert_raises, assert_warns, suppress_warnings,
assert_raises_regex,
)
from numpy.compat import pickle
# Use pytz to test out various time zones if available
try:
from pytz import timezone as tz
_has_pytz = True
except ImportError:
_has_pytz = False
try:
RecursionError
except NameError:
RecursionError = RuntimeError # python < 3.5
class TestDateTime:
def test_datetime_dtype_creation(self):
for unit in ['Y', 'M', 'W', 'D',
'h', 'm', 's', 'ms', 'us',
'μs', # alias for us
'ns', 'ps', 'fs', 'as']:
dt1 = np.dtype('M8[750%s]' % unit)
assert_(dt1 == np.dtype('datetime64[750%s]' % unit))
dt2 = np.dtype('m8[%s]' % unit)
assert_(dt2 == np.dtype('timedelta64[%s]' % unit))
# Generic units shouldn't add [] to the end
assert_equal(str(np.dtype("M8")), "datetime64")
# Should be possible to specify the endianness
assert_equal(np.dtype("=M8"), np.dtype("M8"))
assert_equal(np.dtype("=M8[s]"), np.dtype("M8[s]"))
assert_(np.dtype(">M8") == np.dtype("M8") or
np.dtype("<M8") == np.dtype("M8"))
assert_(np.dtype(">M8[D]") == np.dtype("M8[D]") or
np.dtype("<M8[D]") == np.dtype("M8[D]"))
assert_(np.dtype(">M8") != np.dtype("<M8"))
assert_equal(np.dtype("=m8"), np.dtype("m8"))
assert_equal(np.dtype("=m8[s]"), np.dtype("m8[s]"))
assert_(np.dtype(">m8") == np.dtype("m8") or
np.dtype("<m8") == np.dtype("m8"))
assert_(np.dtype(">m8[D]") == np.dtype("m8[D]") or
np.dtype("<m8[D]") == np.dtype("m8[D]"))
assert_(np.dtype(">m8") != np.dtype("<m8"))
# Check that the parser rejects bad datetime types
assert_raises(TypeError, np.dtype, 'M8[badunit]')
assert_raises(TypeError, np.dtype, 'm8[badunit]')
assert_raises(TypeError, np.dtype, 'M8[YY]')
assert_raises(TypeError, np.dtype, 'm8[YY]')
assert_raises(TypeError, np.dtype, 'm4')
assert_raises(TypeError, np.dtype, 'M7')
assert_raises(TypeError, np.dtype, 'm7')
assert_raises(TypeError, np.dtype, 'M16')
assert_raises(TypeError, np.dtype, 'm16')
def test_datetime_casting_rules(self):
# Cannot cast safely/same_kind between timedelta and datetime
assert_(not np.can_cast('m8', 'M8', casting='same_kind'))
assert_(not np.can_cast('M8', 'm8', casting='same_kind'))
assert_(not np.can_cast('m8', 'M8', casting='safe'))
assert_(not np.can_cast('M8', 'm8', casting='safe'))
# Can cast safely/same_kind from integer to timedelta
assert_(np.can_cast('i8', 'm8', casting='same_kind'))
assert_(np.can_cast('i8', 'm8', casting='safe'))
assert_(np.can_cast('i4', 'm8', casting='same_kind'))
assert_(np.can_cast('i4', 'm8', casting='safe'))
assert_(np.can_cast('u4', 'm8', casting='same_kind'))
assert_(np.can_cast('u4', 'm8', casting='safe'))
# Cannot cast safely from unsigned integer of the same size, which
# could overflow
assert_(np.can_cast('u8', 'm8', casting='same_kind'))
assert_(not np.can_cast('u8', 'm8', casting='safe'))
# Cannot cast safely/same_kind from float to timedelta
assert_(not np.can_cast('f4', 'm8', casting='same_kind'))
assert_(not np.can_cast('f4', 'm8', casting='safe'))
# Cannot cast safely/same_kind from integer to datetime
assert_(not np.can_cast('i8', 'M8', casting='same_kind'))
assert_(not np.can_cast('i8', 'M8', casting='safe'))
# Cannot cast safely/same_kind from bool to datetime
assert_(not np.can_cast('b1', 'M8', casting='same_kind'))
assert_(not np.can_cast('b1', 'M8', casting='safe'))
# Can cast safely/same_kind from bool to timedelta
assert_(np.can_cast('b1', 'm8', casting='same_kind'))
assert_(np.can_cast('b1', 'm8', casting='safe'))
# Can cast datetime safely from months/years to days
assert_(np.can_cast('M8[M]', 'M8[D]', casting='safe'))
assert_(np.can_cast('M8[Y]', 'M8[D]', casting='safe'))
# Cannot cast timedelta safely from months/years to days
assert_(not np.can_cast('m8[M]', 'm8[D]', casting='safe'))
assert_(not np.can_cast('m8[Y]', 'm8[D]', casting='safe'))
# Can cast datetime same_kind from months/years to days
assert_(np.can_cast('M8[M]', 'M8[D]', casting='same_kind'))
assert_(np.can_cast('M8[Y]', 'M8[D]', casting='same_kind'))
# Can't cast timedelta same_kind from months/years to days
assert_(not np.can_cast('m8[M]', 'm8[D]', casting='same_kind'))
assert_(not np.can_cast('m8[Y]', 'm8[D]', casting='same_kind'))
# Can cast datetime same_kind across the date/time boundary
assert_(np.can_cast('M8[D]', 'M8[h]', casting='same_kind'))
# Can cast timedelta same_kind across the date/time boundary
assert_(np.can_cast('m8[D]', 'm8[h]', casting='same_kind'))
assert_(np.can_cast('m8[h]', 'm8[D]', casting='same_kind'))
# Cannot cast safely if the integer multiplier doesn't divide
assert_(not np.can_cast('M8[7h]', 'M8[3h]', casting='safe'))
assert_(not np.can_cast('M8[3h]', 'M8[6h]', casting='safe'))
# But can cast same_kind
assert_(np.can_cast('M8[7h]', 'M8[3h]', casting='same_kind'))
# Can cast safely if the integer multiplier does divide
assert_(np.can_cast('M8[6h]', 'M8[3h]', casting='safe'))
# We can always cast types with generic units (corresponding to NaT) to
# more specific types
assert_(np.can_cast('m8', 'm8[h]', casting='same_kind'))
assert_(np.can_cast('m8', 'm8[h]', casting='safe'))
assert_(np.can_cast('M8', 'M8[h]', casting='same_kind'))
assert_(np.can_cast('M8', 'M8[h]', casting='safe'))
# but not the other way around
assert_(not np.can_cast('m8[h]', 'm8', casting='same_kind'))
assert_(not np.can_cast('m8[h]', 'm8', casting='safe'))
assert_(not np.can_cast('M8[h]', 'M8', casting='same_kind'))
assert_(not np.can_cast('M8[h]', 'M8', casting='safe'))
def test_compare_generic_nat(self):
# regression tests for gh-6452
assert_(np.datetime64('NaT') !=
np.datetime64('2000') + np.timedelta64('NaT'))
assert_(np.datetime64('NaT') != np.datetime64('NaT', 'us'))
assert_(np.datetime64('NaT', 'us') != np.datetime64('NaT'))
@pytest.mark.parametrize("size", [
3, 21, 217, 1000])
def test_datetime_nat_argsort_stability(self, size):
# NaT < NaT should be False internally for
# sort stability
expected = np.arange(size)
arr = np.tile(np.datetime64('NaT'), size)
assert_equal(np.argsort(arr, kind='mergesort'), expected)
@pytest.mark.parametrize("size", [
3, 21, 217, 1000])
def test_timedelta_nat_argsort_stability(self, size):
# NaT < NaT should be False internally for
# sort stability
expected = np.arange(size)
arr = np.tile(np.timedelta64('NaT'), size)
assert_equal(np.argsort(arr, kind='mergesort'), expected)
@pytest.mark.parametrize("arr, expected", [
# the example provided in gh-12629
(['NaT', 1, 2, 3],
[1, 2, 3, 'NaT']),
# multiple NaTs
(['NaT', 9, 'NaT', -707],
[-707, 9, 'NaT', 'NaT']),
# this sort explores another code path for NaT
([1, -2, 3, 'NaT'],
[-2, 1, 3, 'NaT']),
# 2-D array
([[51, -220, 'NaT'],
[-17, 'NaT', -90]],
[[-220, 51, 'NaT'],
[-90, -17, 'NaT']]),
])
@pytest.mark.parametrize("dtype", [
'M8[ns]', 'M8[us]',
'm8[ns]', 'm8[us]'])
def test_datetime_timedelta_sort_nat(self, arr, expected, dtype):
# fix for gh-12629 and gh-15063; NaT sorting to end of array
arr = np.array(arr, dtype=dtype)
expected = np.array(expected, dtype=dtype)
arr.sort()
assert_equal(arr, expected)
def test_datetime_scalar_construction(self):
# Construct with different units
assert_equal(np.datetime64('1950-03-12', 'D'),
np.datetime64('1950-03-12'))
assert_equal(np.datetime64('1950-03-12T13', 's'),
np.datetime64('1950-03-12T13', 'm'))
# Default construction means NaT
assert_equal(np.datetime64(), np.datetime64('NaT'))
# Some basic strings and repr
assert_equal(str(np.datetime64('NaT')), 'NaT')
assert_equal(repr(np.datetime64('NaT')),
"numpy.datetime64('NaT')")
assert_equal(str(np.datetime64('2011-02')), '2011-02')
assert_equal(repr(np.datetime64('2011-02')),
"numpy.datetime64('2011-02')")
# None gets constructed as NaT
assert_equal(np.datetime64(None), np.datetime64('NaT'))
# Default construction of NaT is in generic units
assert_equal(np.datetime64().dtype, np.dtype('M8'))
assert_equal(np.datetime64('NaT').dtype, np.dtype('M8'))
# Construction from integers requires a specified unit
assert_raises(ValueError, np.datetime64, 17)
# When constructing from a scalar or zero-dimensional array,
# it either keeps the units or you can override them.
a = np.datetime64('2000-03-18T16', 'h')
b = np.array('2000-03-18T16', dtype='M8[h]')
assert_equal(a.dtype, np.dtype('M8[h]'))
assert_equal(b.dtype, np.dtype('M8[h]'))
assert_equal(np.datetime64(a), a)
assert_equal(np.datetime64(a).dtype, np.dtype('M8[h]'))
assert_equal(np.datetime64(b), a)
assert_equal(np.datetime64(b).dtype, np.dtype('M8[h]'))
assert_equal(np.datetime64(a, 's'), a)
assert_equal(np.datetime64(a, 's').dtype, np.dtype('M8[s]'))
assert_equal(np.datetime64(b, 's'), a)
assert_equal(np.datetime64(b, 's').dtype, np.dtype('M8[s]'))
# Construction from datetime.date
assert_equal(np.datetime64('1945-03-25'),
np.datetime64(datetime.date(1945, 3, 25)))
assert_equal(np.datetime64('2045-03-25', 'D'),
np.datetime64(datetime.date(2045, 3, 25), 'D'))
# Construction from datetime.datetime
assert_equal(np.datetime64('1980-01-25T14:36:22.5'),
np.datetime64(datetime.datetime(1980, 1, 25,
14, 36, 22, 500000)))
# Construction with time units from a date is okay
assert_equal(np.datetime64('1920-03-13', 'h'),
np.datetime64('1920-03-13T00'))
assert_equal(np.datetime64('1920-03', 'm'),
np.datetime64('1920-03-01T00:00'))
assert_equal(np.datetime64('1920', 's'),
np.datetime64('1920-01-01T00:00:00'))
assert_equal(np.datetime64(datetime.date(2045, 3, 25), 'ms'),
np.datetime64('2045-03-25T00:00:00.000'))
# Construction with date units from a datetime is also okay
assert_equal(np.datetime64('1920-03-13T18', 'D'),
np.datetime64('1920-03-13'))
assert_equal(np.datetime64('1920-03-13T18:33:12', 'M'),
np.datetime64('1920-03'))
assert_equal(np.datetime64('1920-03-13T18:33:12.5', 'Y'),
np.datetime64('1920'))
def test_datetime_scalar_construction_timezone(self):
# verify that supplying an explicit timezone works, but is deprecated
with assert_warns(DeprecationWarning):
assert_equal(np.datetime64('2000-01-01T00Z'),
np.datetime64('2000-01-01T00'))
with assert_warns(DeprecationWarning):
assert_equal(np.datetime64('2000-01-01T00-08'),
np.datetime64('2000-01-01T08'))
def test_datetime_array_find_type(self):
dt = np.datetime64('1970-01-01', 'M')
arr = np.array([dt])
assert_equal(arr.dtype, np.dtype('M8[M]'))
# at the moment, we don't automatically convert these to datetime64
dt = datetime.date(1970, 1, 1)
arr = np.array([dt])
assert_equal(arr.dtype, np.dtype('O'))
dt = datetime.datetime(1970, 1, 1, 12, 30, 40)
arr = np.array([dt])
assert_equal(arr.dtype, np.dtype('O'))
# find "supertype" for non-dates and dates
b = np.bool_(True)
dm = np.datetime64('1970-01-01', 'M')
d = datetime.date(1970, 1, 1)
dt = datetime.datetime(1970, 1, 1, 12, 30, 40)
arr = np.array([b, dm])
assert_equal(arr.dtype, np.dtype('O'))
arr = np.array([b, d])
assert_equal(arr.dtype, np.dtype('O'))
arr = np.array([b, dt])
assert_equal(arr.dtype, np.dtype('O'))
arr = np.array([d, d]).astype('datetime64')
assert_equal(arr.dtype, np.dtype('M8[D]'))
arr = np.array([dt, dt]).astype('datetime64')
assert_equal(arr.dtype, np.dtype('M8[us]'))
@pytest.mark.parametrize("unit", [
# test all date / time units and use
# "generic" to select generic unit
("Y"), ("M"), ("W"), ("D"), ("h"), ("m"),
("s"), ("ms"), ("us"), ("ns"), ("ps"),
("fs"), ("as"), ("generic") ])
def test_timedelta_np_int_construction(self, unit):
# regression test for gh-7617
if unit != "generic":
assert_equal(np.timedelta64(np.int64(123), unit),
np.timedelta64(123, unit))
else:
assert_equal(np.timedelta64(np.int64(123)),
np.timedelta64(123))
def test_timedelta_scalar_construction(self):
# Construct with different units
assert_equal(np.timedelta64(7, 'D'),
np.timedelta64(1, 'W'))
assert_equal(np.timedelta64(120, 's'),
np.timedelta64(2, 'm'))
# Default construction means 0
assert_equal(np.timedelta64(), np.timedelta64(0))
# None gets constructed as NaT
assert_equal(np.timedelta64(None), np.timedelta64('NaT'))
# Some basic strings and repr
assert_equal(str(np.timedelta64('NaT')), 'NaT')
assert_equal(repr(np.timedelta64('NaT')),
"numpy.timedelta64('NaT')")
assert_equal(str(np.timedelta64(3, 's')), '3 seconds')
assert_equal(repr(np.timedelta64(-3, 's')),
"numpy.timedelta64(-3,'s')")
assert_equal(repr(np.timedelta64(12)),
"numpy.timedelta64(12)")
# Construction from an integer produces generic units
assert_equal(np.timedelta64(12).dtype, np.dtype('m8'))
# When constructing from a scalar or zero-dimensional array,
# it either keeps the units or you can override them.
a = np.timedelta64(2, 'h')
b = np.array(2, dtype='m8[h]')
assert_equal(a.dtype, np.dtype('m8[h]'))
assert_equal(b.dtype, np.dtype('m8[h]'))
assert_equal(np.timedelta64(a), a)
assert_equal(np.timedelta64(a).dtype, np.dtype('m8[h]'))
assert_equal(np.timedelta64(b), a)
assert_equal(np.timedelta64(b).dtype, np.dtype('m8[h]'))
assert_equal(np.timedelta64(a, 's'), a)
assert_equal(np.timedelta64(a, 's').dtype, np.dtype('m8[s]'))
assert_equal(np.timedelta64(b, 's'), a)
assert_equal(np.timedelta64(b, 's').dtype, np.dtype('m8[s]'))
# Construction from datetime.timedelta
assert_equal(np.timedelta64(5, 'D'),
np.timedelta64(datetime.timedelta(days=5)))
assert_equal(np.timedelta64(102347621, 's'),
np.timedelta64(datetime.timedelta(seconds=102347621)))
assert_equal(np.timedelta64(-10234760000, 'us'),
np.timedelta64(datetime.timedelta(
microseconds=-10234760000)))
assert_equal(np.timedelta64(10234760000, 'us'),
np.timedelta64(datetime.timedelta(
microseconds=10234760000)))
assert_equal(np.timedelta64(1023476, 'ms'),
np.timedelta64(datetime.timedelta(milliseconds=1023476)))
assert_equal(np.timedelta64(10, 'm'),
np.timedelta64(datetime.timedelta(minutes=10)))
assert_equal(np.timedelta64(281, 'h'),
np.timedelta64(datetime.timedelta(hours=281)))
assert_equal(np.timedelta64(28, 'W'),
np.timedelta64(datetime.timedelta(weeks=28)))
# Cannot construct across nonlinear time unit boundaries
a = np.timedelta64(3, 's')
assert_raises(TypeError, np.timedelta64, a, 'M')
assert_raises(TypeError, np.timedelta64, a, 'Y')
a = np.timedelta64(6, 'M')
assert_raises(TypeError, np.timedelta64, a, 'D')
assert_raises(TypeError, np.timedelta64, a, 'h')
a = np.timedelta64(1, 'Y')
assert_raises(TypeError, np.timedelta64, a, 'D')
assert_raises(TypeError, np.timedelta64, a, 'm')
a = datetime.timedelta(seconds=3)
assert_raises(TypeError, np.timedelta64, a, 'M')
assert_raises(TypeError, np.timedelta64, a, 'Y')
a = datetime.timedelta(weeks=3)
assert_raises(TypeError, np.timedelta64, a, 'M')
assert_raises(TypeError, np.timedelta64, a, 'Y')
a = datetime.timedelta()
assert_raises(TypeError, np.timedelta64, a, 'M')
assert_raises(TypeError, np.timedelta64, a, 'Y')
def test_timedelta_object_array_conversion(self):
# Regression test for gh-11096
inputs = [datetime.timedelta(28),
datetime.timedelta(30),
datetime.timedelta(31)]
expected = np.array([28, 30, 31], dtype='timedelta64[D]')
actual = np.array(inputs, dtype='timedelta64[D]')
assert_equal(expected, actual)
def test_timedelta_0_dim_object_array_conversion(self):
# Regression test for gh-11151
test = np.array(datetime.timedelta(seconds=20))
actual = test.astype(np.timedelta64)
# expected value from the array constructor workaround
# described in above issue
expected = np.array(datetime.timedelta(seconds=20),
np.timedelta64)
assert_equal(actual, expected)
def test_timedelta_scalar_construction_units(self):
# String construction detecting units
assert_equal(np.datetime64('2010').dtype,
np.dtype('M8[Y]'))
assert_equal(np.datetime64('2010-03').dtype,
np.dtype('M8[M]'))
assert_equal(np.datetime64('2010-03-12').dtype,
np.dtype('M8[D]'))
assert_equal(np.datetime64('2010-03-12T17').dtype,
np.dtype('M8[h]'))
assert_equal(np.datetime64('2010-03-12T17:15').dtype,
np.dtype('M8[m]'))
assert_equal(np.datetime64('2010-03-12T17:15:08').dtype,
np.dtype('M8[s]'))
assert_equal(np.datetime64('2010-03-12T17:15:08.1').dtype,
np.dtype('M8[ms]'))
assert_equal(np.datetime64('2010-03-12T17:15:08.12').dtype,
np.dtype('M8[ms]'))
assert_equal(np.datetime64('2010-03-12T17:15:08.123').dtype,
np.dtype('M8[ms]'))
assert_equal(np.datetime64('2010-03-12T17:15:08.1234').dtype,
np.dtype('M8[us]'))
assert_equal(np.datetime64('2010-03-12T17:15:08.12345').dtype,
np.dtype('M8[us]'))
assert_equal(np.datetime64('2010-03-12T17:15:08.123456').dtype,
np.dtype('M8[us]'))
assert_equal(np.datetime64('1970-01-01T00:00:02.1234567').dtype,
np.dtype('M8[ns]'))
assert_equal(np.datetime64('1970-01-01T00:00:02.12345678').dtype,
np.dtype('M8[ns]'))
assert_equal(np.datetime64('1970-01-01T00:00:02.123456789').dtype,
np.dtype('M8[ns]'))
assert_equal(np.datetime64('1970-01-01T00:00:02.1234567890').dtype,
np.dtype('M8[ps]'))
assert_equal(np.datetime64('1970-01-01T00:00:02.12345678901').dtype,
np.dtype('M8[ps]'))
assert_equal(np.datetime64('1970-01-01T00:00:02.123456789012').dtype,
np.dtype('M8[ps]'))
assert_equal(np.datetime64(
'1970-01-01T00:00:02.1234567890123').dtype,
np.dtype('M8[fs]'))
assert_equal(np.datetime64(
'1970-01-01T00:00:02.12345678901234').dtype,
np.dtype('M8[fs]'))
assert_equal(np.datetime64(
'1970-01-01T00:00:02.123456789012345').dtype,
np.dtype('M8[fs]'))
assert_equal(np.datetime64(
'1970-01-01T00:00:02.1234567890123456').dtype,
np.dtype('M8[as]'))
assert_equal(np.datetime64(
'1970-01-01T00:00:02.12345678901234567').dtype,
np.dtype('M8[as]'))
assert_equal(np.datetime64(
'1970-01-01T00:00:02.123456789012345678').dtype,
np.dtype('M8[as]'))
# Python date object
assert_equal(np.datetime64(datetime.date(2010, 4, 16)).dtype,
np.dtype('M8[D]'))
# Python datetime object
assert_equal(np.datetime64(
datetime.datetime(2010, 4, 16, 13, 45, 18)).dtype,
np.dtype('M8[us]'))
# 'today' special value
assert_equal(np.datetime64('today').dtype,
np.dtype('M8[D]'))
# 'now' special value
assert_equal(np.datetime64('now').dtype,
np.dtype('M8[s]'))
def test_datetime_nat_casting(self):
a = np.array('NaT', dtype='M8[D]')
b = np.datetime64('NaT', '[D]')
# Arrays
assert_equal(a.astype('M8[s]'), np.array('NaT', dtype='M8[s]'))
assert_equal(a.astype('M8[ms]'), np.array('NaT', dtype='M8[ms]'))
assert_equal(a.astype('M8[M]'), np.array('NaT', dtype='M8[M]'))
assert_equal(a.astype('M8[Y]'), np.array('NaT', dtype='M8[Y]'))
assert_equal(a.astype('M8[W]'), np.array('NaT', dtype='M8[W]'))
# Scalars -> Scalars
assert_equal(np.datetime64(b, '[s]'), np.datetime64('NaT', '[s]'))
assert_equal(np.datetime64(b, '[ms]'), np.datetime64('NaT', '[ms]'))
assert_equal(np.datetime64(b, '[M]'), np.datetime64('NaT', '[M]'))
assert_equal(np.datetime64(b, '[Y]'), np.datetime64('NaT', '[Y]'))
assert_equal(np.datetime64(b, '[W]'), np.datetime64('NaT', '[W]'))
# Arrays -> Scalars
assert_equal(np.datetime64(a, '[s]'), np.datetime64('NaT', '[s]'))
assert_equal(np.datetime64(a, '[ms]'), np.datetime64('NaT', '[ms]'))
assert_equal(np.datetime64(a, '[M]'), np.datetime64('NaT', '[M]'))
assert_equal(np.datetime64(a, '[Y]'), np.datetime64('NaT', '[Y]'))
assert_equal(np.datetime64(a, '[W]'), np.datetime64('NaT', '[W]'))
# NaN -> NaT
nan = np.array([np.nan] * 8)
fnan = nan.astype('f')
lnan = nan.astype('g')
cnan = nan.astype('D')
cfnan = nan.astype('F')
clnan = nan.astype('G')
nat = np.array([np.datetime64('NaT')] * 8)
assert_equal(nan.astype('M8[ns]'), nat)
assert_equal(fnan.astype('M8[ns]'), nat)
assert_equal(lnan.astype('M8[ns]'), nat)
assert_equal(cnan.astype('M8[ns]'), nat)
assert_equal(cfnan.astype('M8[ns]'), nat)
assert_equal(clnan.astype('M8[ns]'), nat)
nat = np.array([np.timedelta64('NaT')] * 8)
assert_equal(nan.astype('timedelta64[ns]'), nat)
assert_equal(fnan.astype('timedelta64[ns]'), nat)
assert_equal(lnan.astype('timedelta64[ns]'), nat)
assert_equal(cnan.astype('timedelta64[ns]'), nat)
assert_equal(cfnan.astype('timedelta64[ns]'), nat)
assert_equal(clnan.astype('timedelta64[ns]'), nat)
def test_days_creation(self):
assert_equal(np.array('1599', dtype='M8[D]').astype('i8'),
(1600-1970)*365 - (1972-1600)/4 + 3 - 365)
assert_equal(np.array('1600', dtype='M8[D]').astype('i8'),
(1600-1970)*365 - (1972-1600)/4 + 3)
assert_equal(np.array('1601', dtype='M8[D]').astype('i8'),
(1600-1970)*365 - (1972-1600)/4 + 3 + 366)
assert_equal(np.array('1900', dtype='M8[D]').astype('i8'),
(1900-1970)*365 - (1970-1900)//4)
assert_equal(np.array('1901', dtype='M8[D]').astype('i8'),
(1900-1970)*365 - (1970-1900)//4 + 365)
assert_equal(np.array('1967', dtype='M8[D]').astype('i8'), -3*365 - 1)
assert_equal(np.array('1968', dtype='M8[D]').astype('i8'), -2*365 - 1)
assert_equal(np.array('1969', dtype='M8[D]').astype('i8'), -1*365)
assert_equal(np.array('1970', dtype='M8[D]').astype('i8'), 0*365)
assert_equal(np.array('1971', dtype='M8[D]').astype('i8'), 1*365)
assert_equal(np.array('1972', dtype='M8[D]').astype('i8'), 2*365)
assert_equal(np.array('1973', dtype='M8[D]').astype('i8'), 3*365 + 1)
assert_equal(np.array('1974', dtype='M8[D]').astype('i8'), 4*365 + 1)
assert_equal(np.array('2000', dtype='M8[D]').astype('i8'),
(2000 - 1970)*365 + (2000 - 1972)//4)
assert_equal(np.array('2001', dtype='M8[D]').astype('i8'),
(2000 - 1970)*365 + (2000 - 1972)//4 + 366)
assert_equal(np.array('2400', dtype='M8[D]').astype('i8'),
(2400 - 1970)*365 + (2400 - 1972)//4 - 3)
assert_equal(np.array('2401', dtype='M8[D]').astype('i8'),
(2400 - 1970)*365 + (2400 - 1972)//4 - 3 + 366)
assert_equal(np.array('1600-02-29', dtype='M8[D]').astype('i8'),
(1600-1970)*365 - (1972-1600)//4 + 3 + 31 + 28)
assert_equal(np.array('1600-03-01', dtype='M8[D]').astype('i8'),
(1600-1970)*365 - (1972-1600)//4 + 3 + 31 + 29)
assert_equal(np.array('2000-02-29', dtype='M8[D]').astype('i8'),
(2000 - 1970)*365 + (2000 - 1972)//4 + 31 + 28)
assert_equal(np.array('2000-03-01', dtype='M8[D]').astype('i8'),
(2000 - 1970)*365 + (2000 - 1972)//4 + 31 + 29)
assert_equal(np.array('2001-03-22', dtype='M8[D]').astype('i8'),
(2000 - 1970)*365 + (2000 - 1972)//4 + 366 + 31 + 28 + 21)
def test_days_to_pydate(self):
assert_equal(np.array('1599', dtype='M8[D]').astype('O'),
datetime.date(1599, 1, 1))
assert_equal(np.array('1600', dtype='M8[D]').astype('O'),
datetime.date(1600, 1, 1))
assert_equal(np.array('1601', dtype='M8[D]').astype('O'),
datetime.date(1601, 1, 1))
assert_equal(np.array('1900', dtype='M8[D]').astype('O'),
datetime.date(1900, 1, 1))
assert_equal(np.array('1901', dtype='M8[D]').astype('O'),
datetime.date(1901, 1, 1))
assert_equal(np.array('2000', dtype='M8[D]').astype('O'),
datetime.date(2000, 1, 1))
assert_equal(np.array('2001', dtype='M8[D]').astype('O'),
datetime.date(2001, 1, 1))
assert_equal(np.array('1600-02-29', dtype='M8[D]').astype('O'),
datetime.date(1600, 2, 29))
assert_equal(np.array('1600-03-01', dtype='M8[D]').astype('O'),
datetime.date(1600, 3, 1))
assert_equal(np.array('2001-03-22', dtype='M8[D]').astype('O'),
datetime.date(2001, 3, 22))
def test_dtype_comparison(self):
assert_(not (np.dtype('M8[us]') == np.dtype('M8[ms]')))
assert_(np.dtype('M8[us]') != np.dtype('M8[ms]'))
assert_(np.dtype('M8[2D]') != np.dtype('M8[D]'))
assert_(np.dtype('M8[D]') != np.dtype('M8[2D]'))
def test_pydatetime_creation(self):
a = np.array(['1960-03-12', datetime.date(1960, 3, 12)], dtype='M8[D]')
assert_equal(a[0], a[1])
a = np.array(['1999-12-31', datetime.date(1999, 12, 31)], dtype='M8[D]')
assert_equal(a[0], a[1])
a = np.array(['2000-01-01', datetime.date(2000, 1, 1)], dtype='M8[D]')
assert_equal(a[0], a[1])
# Will fail if the date changes during the exact right moment
a = np.array(['today', datetime.date.today()], dtype='M8[D]')
assert_equal(a[0], a[1])
# datetime.datetime.now() returns local time, not UTC
#a = np.array(['now', datetime.datetime.now()], dtype='M8[s]')
#assert_equal(a[0], a[1])
# we can give a datetime.date time units
assert_equal(np.array(datetime.date(1960, 3, 12), dtype='M8[s]'),
np.array(np.datetime64('1960-03-12T00:00:00')))
def test_datetime_string_conversion(self):
a = ['2011-03-16', '1920-01-01', '2013-05-19']
str_a = np.array(a, dtype='S')
uni_a = np.array(a, dtype='U')
dt_a = np.array(a, dtype='M')
# String to datetime
assert_equal(dt_a, str_a.astype('M'))
assert_equal(dt_a.dtype, str_a.astype('M').dtype)
dt_b = np.empty_like(dt_a)
dt_b[...] = str_a
assert_equal(dt_a, dt_b)
# Datetime to string
assert_equal(str_a, dt_a.astype('S0'))
str_b = np.empty_like(str_a)
str_b[...] = dt_a
assert_equal(str_a, str_b)
# Unicode to datetime
assert_equal(dt_a, uni_a.astype('M'))
assert_equal(dt_a.dtype, uni_a.astype('M').dtype)
dt_b = np.empty_like(dt_a)
dt_b[...] = uni_a
assert_equal(dt_a, dt_b)
# Datetime to unicode
assert_equal(uni_a, dt_a.astype('U'))
uni_b = np.empty_like(uni_a)
uni_b[...] = dt_a
assert_equal(uni_a, uni_b)
# Datetime to long string - gh-9712
assert_equal(str_a, dt_a.astype((np.string_, 128)))
str_b = np.empty(str_a.shape, dtype=(np.string_, 128))
str_b[...] = dt_a
assert_equal(str_a, str_b)
def test_datetime_array_str(self):
a = np.array(['2011-03-16', '1920-01-01', '2013-05-19'], dtype='M')
assert_equal(str(a), "['2011-03-16' '1920-01-01' '2013-05-19']")
a = np.array(['2011-03-16T13:55', '1920-01-01T03:12'], dtype='M')
assert_equal(np.array2string(a, separator=', ',
formatter={'datetime': lambda x:
"'%s'" % np.datetime_as_string(x, timezone='UTC')}),
"['2011-03-16T13:55Z', '1920-01-01T03:12Z']")
# Check that one NaT doesn't corrupt subsequent entries
a = np.array(['2010', 'NaT', '2030']).astype('M')
assert_equal(str(a), "['2010' 'NaT' '2030']")
def test_timedelta_array_str(self):
a = np.array([-1, 0, 100], dtype='m')
assert_equal(str(a), "[ -1 0 100]")
a = np.array(['NaT', 'NaT'], dtype='m')
assert_equal(str(a), "['NaT' 'NaT']")
# Check right-alignment with NaTs
a = np.array([-1, 'NaT', 0], dtype='m')
assert_equal(str(a), "[ -1 'NaT' 0]")
a = np.array([-1, 'NaT', 1234567], dtype='m')
assert_equal(str(a), "[ -1 'NaT' 1234567]")
# Test with other byteorder:
a = np.array([-1, 'NaT', 1234567], dtype='>m')
assert_equal(str(a), "[ -1 'NaT' 1234567]")
a = np.array([-1, 'NaT', 1234567], dtype='<m')
assert_equal(str(a), "[ -1 'NaT' 1234567]")
def test_pickle(self):
# Check that pickle roundtripping works
for proto in range(2, pickle.HIGHEST_PROTOCOL + 1):
dt = np.dtype('M8[7D]')
assert_equal(pickle.loads(pickle.dumps(dt, protocol=proto)), dt)
dt = np.dtype('M8[W]')
assert_equal(pickle.loads(pickle.dumps(dt, protocol=proto)), dt)
scalar = np.datetime64('2016-01-01T00:00:00.000000000')
assert_equal(pickle.loads(pickle.dumps(scalar, protocol=proto)),
scalar)
delta = scalar - np.datetime64('2015-01-01T00:00:00.000000000')
assert_equal(pickle.loads(pickle.dumps(delta, protocol=proto)),
delta)
# Check that loading pickles from 1.6 works
pkl = b"cnumpy\ndtype\np0\n(S'M8'\np1\nI0\nI1\ntp2\nRp3\n" + \
b"(I4\nS'<'\np4\nNNNI-1\nI-1\nI0\n((dp5\n(S'D'\np6\n" + \
b"I7\nI1\nI1\ntp7\ntp8\ntp9\nb."
assert_equal(pickle.loads(pkl), np.dtype('<M8[7D]'))
pkl = b"cnumpy\ndtype\np0\n(S'M8'\np1\nI0\nI1\ntp2\nRp3\n" + \
b"(I4\nS'<'\np4\nNNNI-1\nI-1\nI0\n((dp5\n(S'W'\np6\n" + \
b"I1\nI1\nI1\ntp7\ntp8\ntp9\nb."
assert_equal(pickle.loads(pkl), np.dtype('<M8[W]'))
pkl = b"cnumpy\ndtype\np0\n(S'M8'\np1\nI0\nI1\ntp2\nRp3\n" + \
b"(I4\nS'>'\np4\nNNNI-1\nI-1\nI0\n((dp5\n(S'us'\np6\n" + \
b"I1\nI1\nI1\ntp7\ntp8\ntp9\nb."
assert_equal(pickle.loads(pkl), np.dtype('>M8[us]'))
def test_setstate(self):
"Verify that datetime dtype __setstate__ can handle bad arguments"
dt = np.dtype('>M8[us]')
assert_raises(ValueError, dt.__setstate__, (4, '>', None, None, None, -1, -1, 0, 1))
assert_(dt.__reduce__()[2] == np.dtype('>M8[us]').__reduce__()[2])
assert_raises(TypeError, dt.__setstate__, (4, '>', None, None, None, -1, -1, 0, ({}, 'xxx')))
assert_(dt.__reduce__()[2] == np.dtype('>M8[us]').__reduce__()[2])
def test_dtype_promotion(self):
# datetime <op> datetime computes the metadata gcd
# timedelta <op> timedelta computes the metadata gcd
for mM in ['m', 'M']:
assert_equal(
np.promote_types(np.dtype(mM+'8[2Y]'), np.dtype(mM+'8[2Y]')),
np.dtype(mM+'8[2Y]'))
assert_equal(
np.promote_types(np.dtype(mM+'8[12Y]'), np.dtype(mM+'8[15Y]')),
np.dtype(mM+'8[3Y]'))
assert_equal(
np.promote_types(np.dtype(mM+'8[62M]'), np.dtype(mM+'8[24M]')),
np.dtype(mM+'8[2M]'))
assert_equal(
np.promote_types(np.dtype(mM+'8[1W]'), np.dtype(mM+'8[2D]')),
np.dtype(mM+'8[1D]'))
assert_equal(
np.promote_types(np.dtype(mM+'8[W]'), np.dtype(mM+'8[13s]')),
np.dtype(mM+'8[s]'))
assert_equal(
np.promote_types(np.dtype(mM+'8[13W]'), np.dtype(mM+'8[49s]')),
np.dtype(mM+'8[7s]'))
# timedelta <op> timedelta raises when there is no reasonable gcd
assert_raises(TypeError, np.promote_types,
np.dtype('m8[Y]'), np.dtype('m8[D]'))
assert_raises(TypeError, np.promote_types,
np.dtype('m8[M]'), np.dtype('m8[W]'))
# timedelta and float cannot be safely cast with each other
assert_raises(TypeError, np.promote_types, "float32", "m8")
assert_raises(TypeError, np.promote_types, "m8", "float32")
assert_raises(TypeError, np.promote_types, "uint64", "m8")
assert_raises(TypeError, np.promote_types, "m8", "uint64")
# timedelta <op> timedelta may overflow with big unit ranges
assert_raises(OverflowError, np.promote_types,
np.dtype('m8[W]'), np.dtype('m8[fs]'))
assert_raises(OverflowError, np.promote_types,
np.dtype('m8[s]'), np.dtype('m8[as]'))
def test_cast_overflow(self):
# gh-4486
def cast():
numpy.datetime64("1971-01-01 00:00:00.000000000000000").astype("<M8[D]")
assert_raises(OverflowError, cast)
def cast2():
numpy.datetime64("2014").astype("<M8[fs]")
assert_raises(OverflowError, cast2)
def test_pyobject_roundtrip(self):
# All datetime types should be able to roundtrip through object
a = np.array([0, 0, 0, 0, 0, 0, 0, 0, 0,
-1020040340, -2942398, -1, 0, 1, 234523453, 1199164176],
dtype=np.int64)
# With date units
for unit in ['M8[D]', 'M8[W]', 'M8[M]', 'M8[Y]']:
b = a.copy().view(dtype=unit)
b[0] = '-0001-01-01'
b[1] = '-0001-12-31'
b[2] = '0000-01-01'
b[3] = '0001-01-01'
b[4] = '1969-12-31'
b[5] = '1970-01-01'
b[6] = '9999-12-31'
b[7] = '10000-01-01'
b[8] = 'NaT'
assert_equal(b.astype(object).astype(unit), b,
"Error roundtripping unit %s" % unit)
# With time units
for unit in ['M8[as]', 'M8[16fs]', 'M8[ps]', 'M8[us]',
'M8[300as]', 'M8[20us]']:
b = a.copy().view(dtype=unit)
b[0] = '-0001-01-01T00'
b[1] = '-0001-12-31T00'
b[2] = '0000-01-01T00'
b[3] = '0001-01-01T00'
b[4] = '1969-12-31T23:59:59.999999'
b[5] = '1970-01-01T00'
b[6] = '9999-12-31T23:59:59.999999'
b[7] = '10000-01-01T00'
b[8] = 'NaT'
assert_equal(b.astype(object).astype(unit), b,
"Error roundtripping unit %s" % unit)
def test_month_truncation(self):
# Make sure that months are truncating correctly
assert_equal(np.array('1945-03-01', dtype='M8[M]'),
np.array('1945-03-31', dtype='M8[M]'))
assert_equal(np.array('1969-11-01', dtype='M8[M]'),
np.array('1969-11-30T23:59:59.99999', dtype='M').astype('M8[M]'))
assert_equal(np.array('1969-12-01', dtype='M8[M]'),
np.array('1969-12-31T23:59:59.99999', dtype='M').astype('M8[M]'))
assert_equal(np.array('1970-01-01', dtype='M8[M]'),
np.array('1970-01-31T23:59:59.99999', dtype='M').astype('M8[M]'))
assert_equal(np.array('1980-02-01', dtype='M8[M]'),
np.array('1980-02-29T23:59:59.99999', dtype='M').astype('M8[M]'))
def test_different_unit_comparison(self):
# Check some years with date units
for unit1 in ['Y', 'M', 'D']:
dt1 = np.dtype('M8[%s]' % unit1)
for unit2 in ['Y', 'M', 'D']:
dt2 = np.dtype('M8[%s]' % unit2)
assert_equal(np.array('1945', dtype=dt1),
np.array('1945', dtype=dt2))
assert_equal(np.array('1970', dtype=dt1),
np.array('1970', dtype=dt2))
assert_equal(np.array('9999', dtype=dt1),
np.array('9999', dtype=dt2))
assert_equal(np.array('10000', dtype=dt1),
np.array('10000-01-01', dtype=dt2))
assert_equal(np.datetime64('1945', unit1),
np.datetime64('1945', unit2))
assert_equal(np.datetime64('1970', unit1),
np.datetime64('1970', unit2))
assert_equal(np.datetime64('9999', unit1),
np.datetime64('9999', unit2))
assert_equal(np.datetime64('10000', unit1),
np.datetime64('10000-01-01', unit2))
# Check some datetimes with time units
for unit1 in ['6h', 'h', 'm', 's', '10ms', 'ms', 'us']:
dt1 = np.dtype('M8[%s]' % unit1)
for unit2 in ['h', 'm', 's', 'ms', 'us']:
dt2 = np.dtype('M8[%s]' % unit2)
assert_equal(np.array('1945-03-12T18', dtype=dt1),
np.array('1945-03-12T18', dtype=dt2))
assert_equal(np.array('1970-03-12T18', dtype=dt1),
np.array('1970-03-12T18', dtype=dt2))
assert_equal(np.array('9999-03-12T18', dtype=dt1),
np.array('9999-03-12T18', dtype=dt2))
assert_equal(np.array('10000-01-01T00', dtype=dt1),
np.array('10000-01-01T00', dtype=dt2))
assert_equal(np.datetime64('1945-03-12T18', unit1),
np.datetime64('1945-03-12T18', unit2))
assert_equal(np.datetime64('1970-03-12T18', unit1),
np.datetime64('1970-03-12T18', unit2))
assert_equal(np.datetime64('9999-03-12T18', unit1),
np.datetime64('9999-03-12T18', unit2))
assert_equal(np.datetime64('10000-01-01T00', unit1),
np.datetime64('10000-01-01T00', unit2))
# Check some days with units that won't overflow
for unit1 in ['D', '12h', 'h', 'm', 's', '4s', 'ms', 'us']:
dt1 = np.dtype('M8[%s]' % unit1)
for unit2 in ['D', 'h', 'm', 's', 'ms', 'us']:
dt2 = np.dtype('M8[%s]' % unit2)
assert_(np.equal(np.array('1932-02-17', dtype='M').astype(dt1),
np.array('1932-02-17T00:00:00', dtype='M').astype(dt2),
casting='unsafe'))
assert_(np.equal(np.array('10000-04-27', dtype='M').astype(dt1),
np.array('10000-04-27T00:00:00', dtype='M').astype(dt2),
casting='unsafe'))
# Shouldn't be able to compare datetime and timedelta
# TODO: Changing to 'same_kind' or 'safe' casting in the ufuncs by
# default is needed to properly catch this kind of thing...
a = np.array('2012-12-21', dtype='M8[D]')
b = np.array(3, dtype='m8[D]')
#assert_raises(TypeError, np.less, a, b)
assert_raises(TypeError, np.less, a, b, casting='same_kind')
def test_datetime_like(self):
a = np.array([3], dtype='m8[4D]')
b = np.array(['2012-12-21'], dtype='M8[D]')
assert_equal(np.ones_like(a).dtype, a.dtype)
assert_equal(np.zeros_like(a).dtype, a.dtype)
assert_equal(np.empty_like(a).dtype, a.dtype)
assert_equal(np.ones_like(b).dtype, b.dtype)
assert_equal(np.zeros_like(b).dtype, b.dtype)
assert_equal(np.empty_like(b).dtype, b.dtype)
def test_datetime_unary(self):
for tda, tdb, tdzero, tdone, tdmone in \
[
# One-dimensional arrays
(np.array([3], dtype='m8[D]'),
np.array([-3], dtype='m8[D]'),
np.array([0], dtype='m8[D]'),
np.array([1], dtype='m8[D]'),
np.array([-1], dtype='m8[D]')),
# NumPy scalars
(np.timedelta64(3, '[D]'),
np.timedelta64(-3, '[D]'),
np.timedelta64(0, '[D]'),
np.timedelta64(1, '[D]'),
np.timedelta64(-1, '[D]'))]:
# negative ufunc
assert_equal(-tdb, tda)
assert_equal((-tdb).dtype, tda.dtype)
assert_equal(np.negative(tdb), tda)
assert_equal(np.negative(tdb).dtype, tda.dtype)
# positive ufunc
assert_equal(np.positive(tda), tda)
assert_equal(np.positive(tda).dtype, tda.dtype)
assert_equal(np.positive(tdb), tdb)
assert_equal(np.positive(tdb).dtype, tdb.dtype)
# absolute ufunc
assert_equal(np.absolute(tdb), tda)
assert_equal(np.absolute(tdb).dtype, tda.dtype)
# sign ufunc
assert_equal(np.sign(tda), tdone)
assert_equal(np.sign(tdb), tdmone)
assert_equal(np.sign(tdzero), tdzero)
assert_equal(np.sign(tda).dtype, tda.dtype)
# The ufuncs always produce native-endian results
assert_
def test_datetime_add(self):
for dta, dtb, dtc, dtnat, tda, tdb, tdc in \
[
# One-dimensional arrays
(np.array(['2012-12-21'], dtype='M8[D]'),
np.array(['2012-12-24'], dtype='M8[D]'),
np.array(['2012-12-21T11'], dtype='M8[h]'),
np.array(['NaT'], dtype='M8[D]'),
np.array([3], dtype='m8[D]'),
np.array([11], dtype='m8[h]'),
np.array([3*24 + 11], dtype='m8[h]')),
# NumPy scalars
(np.datetime64('2012-12-21', '[D]'),
np.datetime64('2012-12-24', '[D]'),
np.datetime64('2012-12-21T11', '[h]'),
np.datetime64('NaT', '[D]'),
np.timedelta64(3, '[D]'),
np.timedelta64(11, '[h]'),
np.timedelta64(3*24 + 11, '[h]'))]:
# m8 + m8
assert_equal(tda + tdb, tdc)
assert_equal((tda + tdb).dtype, np.dtype('m8[h]'))
# m8 + bool
assert_equal(tdb + True, tdb + 1)
assert_equal((tdb + True).dtype, np.dtype('m8[h]'))
# m8 + int
assert_equal(tdb + 3*24, tdc)
assert_equal((tdb + 3*24).dtype, np.dtype('m8[h]'))
# bool + m8
assert_equal(False + tdb, tdb)
assert_equal((False + tdb).dtype, np.dtype('m8[h]'))
# int + m8
assert_equal(3*24 + tdb, tdc)
assert_equal((3*24 + tdb).dtype, np.dtype('m8[h]'))
# M8 + bool
assert_equal(dta + True, dta + 1)
assert_equal(dtnat + True, dtnat)
assert_equal((dta + True).dtype, np.dtype('M8[D]'))
# M8 + int
assert_equal(dta + 3, dtb)
assert_equal(dtnat + 3, dtnat)
assert_equal((dta + 3).dtype, np.dtype('M8[D]'))
# bool + M8
assert_equal(False + dta, dta)
assert_equal(False + dtnat, dtnat)
assert_equal((False + dta).dtype, np.dtype('M8[D]'))
# int + M8
assert_equal(3 + dta, dtb)
assert_equal(3 + dtnat, dtnat)
assert_equal((3 + dta).dtype, np.dtype('M8[D]'))
# M8 + m8
assert_equal(dta + tda, dtb)
assert_equal(dtnat + tda, dtnat)
assert_equal((dta + tda).dtype, np.dtype('M8[D]'))
# m8 + M8
assert_equal(tda + dta, dtb)
assert_equal(tda + dtnat, dtnat)
assert_equal((tda + dta).dtype, np.dtype('M8[D]'))
# In M8 + m8, the result goes to higher precision
assert_equal(np.add(dta, tdb, casting='unsafe'), dtc)
assert_equal(np.add(dta, tdb, casting='unsafe').dtype,
np.dtype('M8[h]'))
assert_equal(np.add(tdb, dta, casting='unsafe'), dtc)
assert_equal(np.add(tdb, dta, casting='unsafe').dtype,
np.dtype('M8[h]'))
# M8 + M8
assert_raises(TypeError, np.add, dta, dtb)
def test_datetime_subtract(self):
for dta, dtb, dtc, dtd, dte, dtnat, tda, tdb, tdc in \
[
# One-dimensional arrays
(np.array(['2012-12-21'], dtype='M8[D]'),
np.array(['2012-12-24'], dtype='M8[D]'),
np.array(['1940-12-24'], dtype='M8[D]'),
np.array(['1940-12-24T00'], dtype='M8[h]'),
np.array(['1940-12-23T13'], dtype='M8[h]'),
np.array(['NaT'], dtype='M8[D]'),
np.array([3], dtype='m8[D]'),
np.array([11], dtype='m8[h]'),
np.array([3*24 - 11], dtype='m8[h]')),
# NumPy scalars
(np.datetime64('2012-12-21', '[D]'),
np.datetime64('2012-12-24', '[D]'),
np.datetime64('1940-12-24', '[D]'),
np.datetime64('1940-12-24T00', '[h]'),
np.datetime64('1940-12-23T13', '[h]'),
np.datetime64('NaT', '[D]'),
np.timedelta64(3, '[D]'),
np.timedelta64(11, '[h]'),
np.timedelta64(3*24 - 11, '[h]'))]:
# m8 - m8
assert_equal(tda - tdb, tdc)
assert_equal((tda - tdb).dtype, np.dtype('m8[h]'))
assert_equal(tdb - tda, -tdc)
assert_equal((tdb - tda).dtype, np.dtype('m8[h]'))
# m8 - bool
assert_equal(tdc - True, tdc - 1)
assert_equal((tdc - True).dtype, np.dtype('m8[h]'))
# m8 - int
assert_equal(tdc - 3*24, -tdb)
assert_equal((tdc - 3*24).dtype, np.dtype('m8[h]'))
# int - m8
assert_equal(False - tdb, -tdb)
assert_equal((False - tdb).dtype, np.dtype('m8[h]'))
# int - m8
assert_equal(3*24 - tdb, tdc)
assert_equal((3*24 - tdb).dtype, np.dtype('m8[h]'))
# M8 - bool
assert_equal(dtb - True, dtb - 1)
assert_equal(dtnat - True, dtnat)
assert_equal((dtb - True).dtype, np.dtype('M8[D]'))
# M8 - int
assert_equal(dtb - 3, dta)
assert_equal(dtnat - 3, dtnat)
assert_equal((dtb - 3).dtype, np.dtype('M8[D]'))
# M8 - m8
assert_equal(dtb - tda, dta)
assert_equal(dtnat - tda, dtnat)
assert_equal((dtb - tda).dtype, np.dtype('M8[D]'))
# In M8 - m8, the result goes to higher precision
assert_equal(np.subtract(dtc, tdb, casting='unsafe'), dte)
assert_equal(np.subtract(dtc, tdb, casting='unsafe').dtype,
np.dtype('M8[h]'))
# M8 - M8 with different goes to higher precision
assert_equal(np.subtract(dtc, dtd, casting='unsafe'),
np.timedelta64(0, 'h'))
assert_equal(np.subtract(dtc, dtd, casting='unsafe').dtype,
np.dtype('m8[h]'))
assert_equal(np.subtract(dtd, dtc, casting='unsafe'),
np.timedelta64(0, 'h'))
assert_equal(np.subtract(dtd, dtc, casting='unsafe').dtype,
np.dtype('m8[h]'))
# m8 - M8
assert_raises(TypeError, np.subtract, tda, dta)
# bool - M8
assert_raises(TypeError, np.subtract, False, dta)
# int - M8
assert_raises(TypeError, np.subtract, 3, dta)
def test_datetime_multiply(self):
for dta, tda, tdb, tdc in \
[
# One-dimensional arrays
(np.array(['2012-12-21'], dtype='M8[D]'),
np.array([6], dtype='m8[h]'),
np.array([9], dtype='m8[h]'),
np.array([12], dtype='m8[h]')),
# NumPy scalars
(np.datetime64('2012-12-21', '[D]'),
np.timedelta64(6, '[h]'),
np.timedelta64(9, '[h]'),
np.timedelta64(12, '[h]'))]:
# m8 * int
assert_equal(tda * 2, tdc)
assert_equal((tda * 2).dtype, np.dtype('m8[h]'))
# int * m8
assert_equal(2 * tda, tdc)
assert_equal((2 * tda).dtype, np.dtype('m8[h]'))
# m8 * float
assert_equal(tda * 1.5, tdb)
assert_equal((tda * 1.5).dtype, np.dtype('m8[h]'))
# float * m8
assert_equal(1.5 * tda, tdb)
assert_equal((1.5 * tda).dtype, np.dtype('m8[h]'))
# m8 * m8
assert_raises(TypeError, np.multiply, tda, tdb)
# m8 * M8
assert_raises(TypeError, np.multiply, dta, tda)
# M8 * m8
assert_raises(TypeError, np.multiply, tda, dta)
# M8 * int
assert_raises(TypeError, np.multiply, dta, 2)
# int * M8
assert_raises(TypeError, np.multiply, 2, dta)
# M8 * float
assert_raises(TypeError, np.multiply, dta, 1.5)
# float * M8
assert_raises(TypeError, np.multiply, 1.5, dta)
# NaTs
with suppress_warnings() as sup:
sup.filter(RuntimeWarning, "invalid value encountered in multiply")
nat = np.timedelta64('NaT')
def check(a, b, res):
assert_equal(a * b, res)
assert_equal(b * a, res)
for tp in (int, float):
check(nat, tp(2), nat)
check(nat, tp(0), nat)
for f in (float('inf'), float('nan')):
check(np.timedelta64(1), f, nat)
check(np.timedelta64(0), f, nat)
check(nat, f, nat)
@pytest.mark.parametrize("op1, op2, exp", [
# m8 same units round down
(np.timedelta64(7, 's'),
np.timedelta64(4, 's'),
1),
# m8 same units round down with negative
(np.timedelta64(7, 's'),
np.timedelta64(-4, 's'),
-2),
# m8 same units negative no round down
(np.timedelta64(8, 's'),
np.timedelta64(-4, 's'),
-2),
# m8 different units
(np.timedelta64(1, 'm'),
np.timedelta64(31, 's'),
1),
# m8 generic units
(np.timedelta64(1890),
np.timedelta64(31),
60),
# Y // M works
(np.timedelta64(2, 'Y'),
np.timedelta64('13', 'M'),
1),
# handle 1D arrays
(np.array([1, 2, 3], dtype='m8'),
np.array([2], dtype='m8'),
np.array([0, 1, 1], dtype=np.int64)),
])
def test_timedelta_floor_divide(self, op1, op2, exp):
assert_equal(op1 // op2, exp)
@pytest.mark.parametrize("op1, op2", [
# div by 0
(np.timedelta64(10, 'us'),
np.timedelta64(0, 'us')),
# div with NaT
(np.timedelta64('NaT'),
np.timedelta64(50, 'us')),
# special case for int64 min
# in integer floor division
(np.timedelta64(np.iinfo(np.int64).min),
np.timedelta64(-1)),
])
def test_timedelta_floor_div_warnings(self, op1, op2):
with assert_warns(RuntimeWarning):
actual = op1 // op2
assert_equal(actual, 0)
assert_equal(actual.dtype, np.int64)
@pytest.mark.parametrize("val1, val2", [
# the smallest integer that can't be represented
# exactly in a double should be preserved if we avoid
# casting to double in floordiv operation
(9007199254740993, 1),
# stress the alternate floordiv code path where
# operand signs don't match and remainder isn't 0
(9007199254740999, -2),
])
def test_timedelta_floor_div_precision(self, val1, val2):
op1 = np.timedelta64(val1)
op2 = np.timedelta64(val2)
actual = op1 // op2
# Python reference integer floor
expected = val1 // val2
assert_equal(actual, expected)
@pytest.mark.parametrize("val1, val2", [
# years and months sometimes can't be unambiguously
# divided for floor division operation
(np.timedelta64(7, 'Y'),
np.timedelta64(3, 's')),
(np.timedelta64(7, 'M'),
np.timedelta64(1, 'D')),
])
def test_timedelta_floor_div_error(self, val1, val2):
with assert_raises_regex(TypeError, "common metadata divisor"):
val1 // val2
@pytest.mark.parametrize("op1, op2", [
# reuse the test cases from floordiv
(np.timedelta64(7, 's'),
np.timedelta64(4, 's')),
# m8 same units round down with negative
(np.timedelta64(7, 's'),
np.timedelta64(-4, 's')),
# m8 same units negative no round down
(np.timedelta64(8, 's'),
np.timedelta64(-4, 's')),
# m8 different units
(np.timedelta64(1, 'm'),
np.timedelta64(31, 's')),
# m8 generic units
(np.timedelta64(1890),
np.timedelta64(31)),
# Y // M works
(np.timedelta64(2, 'Y'),
np.timedelta64('13', 'M')),
# handle 1D arrays
(np.array([1, 2, 3], dtype='m8'),
np.array([2], dtype='m8')),
])
def test_timedelta_divmod(self, op1, op2):
expected = (op1 // op2, op1 % op2)
assert_equal(divmod(op1, op2), expected)
@pytest.mark.parametrize("op1, op2", [
# reuse cases from floordiv
# div by 0
(np.timedelta64(10, 'us'),
np.timedelta64(0, 'us')),
# div with NaT
(np.timedelta64('NaT'),
np.timedelta64(50, 'us')),
# special case for int64 min
# in integer floor division
(np.timedelta64(np.iinfo(np.int64).min),
np.timedelta64(-1)),
])
def test_timedelta_divmod_warnings(self, op1, op2):
with assert_warns(RuntimeWarning):
expected = (op1 // op2, op1 % op2)
with assert_warns(RuntimeWarning):
actual = divmod(op1, op2)
assert_equal(actual, expected)
def test_datetime_divide(self):
for dta, tda, tdb, tdc, tdd in \
[
# One-dimensional arrays
(np.array(['2012-12-21'], dtype='M8[D]'),
np.array([6], dtype='m8[h]'),
np.array([9], dtype='m8[h]'),
np.array([12], dtype='m8[h]'),
np.array([6], dtype='m8[m]')),
# NumPy scalars
(np.datetime64('2012-12-21', '[D]'),
np.timedelta64(6, '[h]'),
np.timedelta64(9, '[h]'),
np.timedelta64(12, '[h]'),
np.timedelta64(6, '[m]'))]:
# m8 / int
assert_equal(tdc / 2, tda)
assert_equal((tdc / 2).dtype, np.dtype('m8[h]'))
# m8 / float
assert_equal(tda / 0.5, tdc)
assert_equal((tda / 0.5).dtype, np.dtype('m8[h]'))
# m8 / m8
assert_equal(tda / tdb, 6.0 / 9.0)
assert_equal(np.divide(tda, tdb), 6.0 / 9.0)
assert_equal(np.true_divide(tda, tdb), 6.0 / 9.0)
assert_equal(tdb / tda, 9.0 / 6.0)
assert_equal((tda / tdb).dtype, np.dtype('f8'))
assert_equal(tda / tdd, 60.0)
assert_equal(tdd / tda, 1.0 / 60.0)
# int / m8
assert_raises(TypeError, np.divide, 2, tdb)
# float / m8
assert_raises(TypeError, np.divide, 0.5, tdb)
# m8 / M8
assert_raises(TypeError, np.divide, dta, tda)
# M8 / m8
assert_raises(TypeError, np.divide, tda, dta)
# M8 / int
assert_raises(TypeError, np.divide, dta, 2)
# int / M8
assert_raises(TypeError, np.divide, 2, dta)
# M8 / float
assert_raises(TypeError, np.divide, dta, 1.5)
# float / M8
assert_raises(TypeError, np.divide, 1.5, dta)
# NaTs
with suppress_warnings() as sup:
sup.filter(RuntimeWarning, r".*encountered in true\_divide")
nat = np.timedelta64('NaT')
for tp in (int, float):
assert_equal(np.timedelta64(1) / tp(0), nat)
assert_equal(np.timedelta64(0) / tp(0), nat)
assert_equal(nat / tp(0), nat)
assert_equal(nat / tp(2), nat)
# Division by inf
assert_equal(np.timedelta64(1) / float('inf'), np.timedelta64(0))
assert_equal(np.timedelta64(0) / float('inf'), np.timedelta64(0))
assert_equal(nat / float('inf'), nat)
# Division by nan
assert_equal(np.timedelta64(1) / float('nan'), nat)
assert_equal(np.timedelta64(0) / float('nan'), nat)
assert_equal(nat / float('nan'), nat)
def test_datetime_compare(self):
# Test all the comparison operators
a = np.datetime64('2000-03-12T18:00:00.000000')
b = np.array(['2000-03-12T18:00:00.000000',
'2000-03-12T17:59:59.999999',
'2000-03-12T18:00:00.000001',
'1970-01-11T12:00:00.909090',
'2016-01-11T12:00:00.909090'],
dtype='datetime64[us]')
assert_equal(np.equal(a, b), [1, 0, 0, 0, 0])
assert_equal(np.not_equal(a, b), [0, 1, 1, 1, 1])
assert_equal(np.less(a, b), [0, 0, 1, 0, 1])
assert_equal(np.less_equal(a, b), [1, 0, 1, 0, 1])
assert_equal(np.greater(a, b), [0, 1, 0, 1, 0])
assert_equal(np.greater_equal(a, b), [1, 1, 0, 1, 0])
def test_datetime_compare_nat(self):
dt_nat = np.datetime64('NaT', 'D')
dt_other = np.datetime64('2000-01-01')
td_nat = np.timedelta64('NaT', 'h')
td_other = np.timedelta64(1, 'h')
for op in [np.equal, np.less, np.less_equal,
np.greater, np.greater_equal]:
assert_(not op(dt_nat, dt_nat))
assert_(not op(dt_nat, dt_other))
assert_(not op(dt_other, dt_nat))
assert_(not op(td_nat, td_nat))
assert_(not op(td_nat, td_other))
assert_(not op(td_other, td_nat))
assert_(np.not_equal(dt_nat, dt_nat))
assert_(np.not_equal(dt_nat, dt_other))
assert_(np.not_equal(dt_other, dt_nat))
assert_(np.not_equal(td_nat, td_nat))
assert_(np.not_equal(td_nat, td_other))
assert_(np.not_equal(td_other, td_nat))
def test_datetime_minmax(self):
# The metadata of the result should become the GCD
# of the operand metadata
a = np.array('1999-03-12T13', dtype='M8[2m]')
b = np.array('1999-03-12T12', dtype='M8[s]')
assert_equal(np.minimum(a, b), b)
assert_equal(np.minimum(a, b).dtype, np.dtype('M8[s]'))
assert_equal(np.fmin(a, b), b)
assert_equal(np.fmin(a, b).dtype, np.dtype('M8[s]'))
assert_equal(np.maximum(a, b), a)
assert_equal(np.maximum(a, b).dtype, np.dtype('M8[s]'))
assert_equal(np.fmax(a, b), a)
assert_equal(np.fmax(a, b).dtype, np.dtype('M8[s]'))
# Viewed as integers, the comparison is opposite because
# of the units chosen
assert_equal(np.minimum(a.view('i8'), b.view('i8')), a.view('i8'))
# Interaction with NaT
a = np.array('1999-03-12T13', dtype='M8[2m]')
dtnat = np.array('NaT', dtype='M8[h]')
assert_equal(np.minimum(a, dtnat), dtnat)
assert_equal(np.minimum(dtnat, a), dtnat)
assert_equal(np.maximum(a, dtnat), dtnat)
assert_equal(np.maximum(dtnat, a), dtnat)
assert_equal(np.fmin(dtnat, a), a)
assert_equal(np.fmin(a, dtnat), a)
assert_equal(np.fmax(dtnat, a), a)
assert_equal(np.fmax(a, dtnat), a)
# Also do timedelta
a = np.array(3, dtype='m8[h]')
b = np.array(3*3600 - 3, dtype='m8[s]')
assert_equal(np.minimum(a, b), b)
assert_equal(np.minimum(a, b).dtype, np.dtype('m8[s]'))
assert_equal(np.fmin(a, b), b)
assert_equal(np.fmin(a, b).dtype, np.dtype('m8[s]'))
assert_equal(np.maximum(a, b), a)
assert_equal(np.maximum(a, b).dtype, np.dtype('m8[s]'))
assert_equal(np.fmax(a, b), a)
assert_equal(np.fmax(a, b).dtype, np.dtype('m8[s]'))
# Viewed as integers, the comparison is opposite because
# of the units chosen
assert_equal(np.minimum(a.view('i8'), b.view('i8')), a.view('i8'))
# should raise between datetime and timedelta
#
# TODO: Allowing unsafe casting by
# default in ufuncs strikes again... :(
a = np.array(3, dtype='m8[h]')
b = np.array('1999-03-12T12', dtype='M8[s]')
#assert_raises(TypeError, np.minimum, a, b)
#assert_raises(TypeError, np.maximum, a, b)
#assert_raises(TypeError, np.fmin, a, b)
#assert_raises(TypeError, np.fmax, a, b)
assert_raises(TypeError, np.minimum, a, b, casting='same_kind')
assert_raises(TypeError, np.maximum, a, b, casting='same_kind')
assert_raises(TypeError, np.fmin, a, b, casting='same_kind')
assert_raises(TypeError, np.fmax, a, b, casting='same_kind')
def test_hours(self):
t = np.ones(3, dtype='M8[s]')
t[0] = 60*60*24 + 60*60*10
assert_(t[0].item().hour == 10)
def test_divisor_conversion_year(self):
assert_(np.dtype('M8[Y/4]') == np.dtype('M8[3M]'))
assert_(np.dtype('M8[Y/13]') == np.dtype('M8[4W]'))
assert_(np.dtype('M8[3Y/73]') == np.dtype('M8[15D]'))
def test_divisor_conversion_month(self):
assert_(np.dtype('M8[M/2]') == np.dtype('M8[2W]'))
assert_(np.dtype('M8[M/15]') == np.dtype('M8[2D]'))
assert_(np.dtype('M8[3M/40]') == np.dtype('M8[54h]'))
def test_divisor_conversion_week(self):
assert_(np.dtype('m8[W/7]') == np.dtype('m8[D]'))
assert_(np.dtype('m8[3W/14]') == np.dtype('m8[36h]'))
assert_(np.dtype('m8[5W/140]') == np.dtype('m8[360m]'))
def test_divisor_conversion_day(self):
assert_(np.dtype('M8[D/12]') == np.dtype('M8[2h]'))
assert_(np.dtype('M8[D/120]') == np.dtype('M8[12m]'))
assert_(np.dtype('M8[3D/960]') == np.dtype('M8[270s]'))
def test_divisor_conversion_hour(self):
assert_(np.dtype('m8[h/30]') == np.dtype('m8[2m]'))
assert_(np.dtype('m8[3h/300]') == np.dtype('m8[36s]'))
def test_divisor_conversion_minute(self):
assert_(np.dtype('m8[m/30]') == np.dtype('m8[2s]'))
assert_(np.dtype('m8[3m/300]') == np.dtype('m8[600ms]'))
def test_divisor_conversion_second(self):
assert_(np.dtype('m8[s/100]') == np.dtype('m8[10ms]'))
assert_(np.dtype('m8[3s/10000]') == np.dtype('m8[300us]'))
def test_divisor_conversion_fs(self):
assert_(np.dtype('M8[fs/100]') == np.dtype('M8[10as]'))
assert_raises(ValueError, lambda: np.dtype('M8[3fs/10000]'))
def test_divisor_conversion_as(self):
assert_raises(ValueError, lambda: np.dtype('M8[as/10]'))
def test_string_parser_variants(self):
# Allow space instead of 'T' between date and time
assert_equal(np.array(['1980-02-29T01:02:03'], np.dtype('M8[s]')),
np.array(['1980-02-29 01:02:03'], np.dtype('M8[s]')))
# Allow positive years
assert_equal(np.array(['+1980-02-29T01:02:03'], np.dtype('M8[s]')),
np.array(['+1980-02-29 01:02:03'], np.dtype('M8[s]')))
# Allow negative years
assert_equal(np.array(['-1980-02-29T01:02:03'], np.dtype('M8[s]')),
np.array(['-1980-02-29 01:02:03'], np.dtype('M8[s]')))
# UTC specifier
with assert_warns(DeprecationWarning):
assert_equal(
np.array(['+1980-02-29T01:02:03'], np.dtype('M8[s]')),
np.array(['+1980-02-29 01:02:03Z'], np.dtype('M8[s]')))
with assert_warns(DeprecationWarning):
assert_equal(
np.array(['-1980-02-29T01:02:03'], np.dtype('M8[s]')),
np.array(['-1980-02-29 01:02:03Z'], np.dtype('M8[s]')))
# Time zone offset
with assert_warns(DeprecationWarning):
assert_equal(
np.array(['1980-02-29T02:02:03'], np.dtype('M8[s]')),
np.array(['1980-02-29 00:32:03-0130'], np.dtype('M8[s]')))
with assert_warns(DeprecationWarning):
assert_equal(
np.array(['1980-02-28T22:32:03'], np.dtype('M8[s]')),
np.array(['1980-02-29 00:02:03+01:30'], np.dtype('M8[s]')))
with assert_warns(DeprecationWarning):
assert_equal(
np.array(['1980-02-29T02:32:03.506'], np.dtype('M8[s]')),
np.array(['1980-02-29 00:32:03.506-02'], np.dtype('M8[s]')))
with assert_warns(DeprecationWarning):
assert_equal(np.datetime64('1977-03-02T12:30-0230'),
np.datetime64('1977-03-02T15:00'))
def test_string_parser_error_check(self):
# Arbitrary bad string
assert_raises(ValueError, np.array, ['badvalue'], np.dtype('M8[us]'))
# Character after year must be '-'
assert_raises(ValueError, np.array, ['1980X'], np.dtype('M8[us]'))
# Cannot have trailing '-'
assert_raises(ValueError, np.array, ['1980-'], np.dtype('M8[us]'))
# Month must be in range [1,12]
assert_raises(ValueError, np.array, ['1980-00'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-13'], np.dtype('M8[us]'))
# Month must have two digits
assert_raises(ValueError, np.array, ['1980-1'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-1-02'], np.dtype('M8[us]'))
# 'Mor' is not a valid month
assert_raises(ValueError, np.array, ['1980-Mor'], np.dtype('M8[us]'))
# Cannot have trailing '-'
assert_raises(ValueError, np.array, ['1980-01-'], np.dtype('M8[us]'))
# Day must be in range [1,len(month)]
assert_raises(ValueError, np.array, ['1980-01-0'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-01-00'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-01-32'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1979-02-29'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-30'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-03-32'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-04-31'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-05-32'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-06-31'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-07-32'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-08-32'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-09-31'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-10-32'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-11-31'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-12-32'], np.dtype('M8[us]'))
# Cannot have trailing characters
assert_raises(ValueError, np.array, ['1980-02-03%'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03 q'],
np.dtype('M8[us]'))
# Hours must be in range [0, 23]
assert_raises(ValueError, np.array, ['1980-02-03 25'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03T25'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03 24:01'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03T24:01'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03 -1'],
np.dtype('M8[us]'))
# No trailing ':'
assert_raises(ValueError, np.array, ['1980-02-03 01:'],
np.dtype('M8[us]'))
# Minutes must be in range [0, 59]
assert_raises(ValueError, np.array, ['1980-02-03 01:-1'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03 01:60'],
np.dtype('M8[us]'))
# No trailing ':'
assert_raises(ValueError, np.array, ['1980-02-03 01:60:'],
np.dtype('M8[us]'))
# Seconds must be in range [0, 59]
assert_raises(ValueError, np.array, ['1980-02-03 01:10:-1'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03 01:01:60'],
np.dtype('M8[us]'))
# Timezone offset must within a reasonable range
with assert_warns(DeprecationWarning):
assert_raises(ValueError, np.array, ['1980-02-03 01:01:00+0661'],
np.dtype('M8[us]'))
with assert_warns(DeprecationWarning):
assert_raises(ValueError, np.array, ['1980-02-03 01:01:00+2500'],
np.dtype('M8[us]'))
with assert_warns(DeprecationWarning):
assert_raises(ValueError, np.array, ['1980-02-03 01:01:00-0070'],
np.dtype('M8[us]'))
with assert_warns(DeprecationWarning):
assert_raises(ValueError, np.array, ['1980-02-03 01:01:00-3000'],
np.dtype('M8[us]'))
with assert_warns(DeprecationWarning):
assert_raises(ValueError, np.array, ['1980-02-03 01:01:00-25:00'],
np.dtype('M8[us]'))
def test_creation_overflow(self):
date = '1980-03-23 20:00:00'
timesteps = np.array([date], dtype='datetime64[s]')[0].astype(np.int64)
for unit in ['ms', 'us', 'ns']:
timesteps *= 1000
x = np.array([date], dtype='datetime64[%s]' % unit)
assert_equal(timesteps, x[0].astype(np.int64),
err_msg='Datetime conversion error for unit %s' % unit)
assert_equal(x[0].astype(np.int64), 322689600000000000)
# gh-13062
with pytest.raises(OverflowError):
np.datetime64(2**64, 'D')
with pytest.raises(OverflowError):
np.timedelta64(2**64, 'D')
def test_datetime_as_string(self):
# Check all the units with default string conversion
date = '1959-10-13'
datetime = '1959-10-13T12:34:56.789012345678901234'
assert_equal(np.datetime_as_string(np.datetime64(date, 'Y')),
'1959')
assert_equal(np.datetime_as_string(np.datetime64(date, 'M')),
'1959-10')
assert_equal(np.datetime_as_string(np.datetime64(date, 'D')),
'1959-10-13')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'h')),
'1959-10-13T12')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'm')),
'1959-10-13T12:34')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 's')),
'1959-10-13T12:34:56')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'ms')),
'1959-10-13T12:34:56.789')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'us')),
'1959-10-13T12:34:56.789012')
datetime = '1969-12-31T23:34:56.789012345678901234'
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'ns')),
'1969-12-31T23:34:56.789012345')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'ps')),
'1969-12-31T23:34:56.789012345678')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'fs')),
'1969-12-31T23:34:56.789012345678901')
datetime = '1969-12-31T23:59:57.789012345678901234'
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'as')),
datetime)
datetime = '1970-01-01T00:34:56.789012345678901234'
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'ns')),
'1970-01-01T00:34:56.789012345')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'ps')),
'1970-01-01T00:34:56.789012345678')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'fs')),
'1970-01-01T00:34:56.789012345678901')
datetime = '1970-01-01T00:00:05.789012345678901234'
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'as')),
datetime)
# String conversion with the unit= parameter
a = np.datetime64('2032-07-18T12:23:34.123456', 'us')
assert_equal(np.datetime_as_string(a, unit='Y', casting='unsafe'),
'2032')
assert_equal(np.datetime_as_string(a, unit='M', casting='unsafe'),
'2032-07')
assert_equal(np.datetime_as_string(a, unit='W', casting='unsafe'),
'2032-07-18')
assert_equal(np.datetime_as_string(a, unit='D', casting='unsafe'),
'2032-07-18')
assert_equal(np.datetime_as_string(a, unit='h'), '2032-07-18T12')
assert_equal(np.datetime_as_string(a, unit='m'),
'2032-07-18T12:23')
assert_equal(np.datetime_as_string(a, unit='s'),
'2032-07-18T12:23:34')
assert_equal(np.datetime_as_string(a, unit='ms'),
'2032-07-18T12:23:34.123')
assert_equal(np.datetime_as_string(a, unit='us'),
'2032-07-18T12:23:34.123456')
assert_equal(np.datetime_as_string(a, unit='ns'),
'2032-07-18T12:23:34.123456000')
assert_equal(np.datetime_as_string(a, unit='ps'),
'2032-07-18T12:23:34.123456000000')
assert_equal(np.datetime_as_string(a, unit='fs'),
'2032-07-18T12:23:34.123456000000000')
assert_equal(np.datetime_as_string(a, unit='as'),
'2032-07-18T12:23:34.123456000000000000')
# unit='auto' parameter
assert_equal(np.datetime_as_string(
np.datetime64('2032-07-18T12:23:34.123456', 'us'), unit='auto'),
'2032-07-18T12:23:34.123456')
assert_equal(np.datetime_as_string(
np.datetime64('2032-07-18T12:23:34.12', 'us'), unit='auto'),
'2032-07-18T12:23:34.120')
assert_equal(np.datetime_as_string(
np.datetime64('2032-07-18T12:23:34', 'us'), unit='auto'),
'2032-07-18T12:23:34')
assert_equal(np.datetime_as_string(
np.datetime64('2032-07-18T12:23:00', 'us'), unit='auto'),
'2032-07-18T12:23')
# 'auto' doesn't split up hour and minute
assert_equal(np.datetime_as_string(
np.datetime64('2032-07-18T12:00:00', 'us'), unit='auto'),
'2032-07-18T12:00')
assert_equal(np.datetime_as_string(
np.datetime64('2032-07-18T00:00:00', 'us'), unit='auto'),
'2032-07-18')
# 'auto' doesn't split up the date
assert_equal(np.datetime_as_string(
np.datetime64('2032-07-01T00:00:00', 'us'), unit='auto'),
'2032-07-01')
assert_equal(np.datetime_as_string(
np.datetime64('2032-01-01T00:00:00', 'us'), unit='auto'),
'2032-01-01')
@pytest.mark.skipif(not _has_pytz, reason="The pytz module is not available.")
def test_datetime_as_string_timezone(self):
# timezone='local' vs 'UTC'
a = np.datetime64('2010-03-15T06:30', 'm')
assert_equal(np.datetime_as_string(a),
'2010-03-15T06:30')
assert_equal(np.datetime_as_string(a, timezone='naive'),
'2010-03-15T06:30')
assert_equal(np.datetime_as_string(a, timezone='UTC'),
'2010-03-15T06:30Z')
assert_(np.datetime_as_string(a, timezone='local') !=
'2010-03-15T06:30')
b = np.datetime64('2010-02-15T06:30', 'm')
assert_equal(np.datetime_as_string(a, timezone=tz('US/Central')),
'2010-03-15T01:30-0500')
assert_equal(np.datetime_as_string(a, timezone=tz('US/Eastern')),
'2010-03-15T02:30-0400')
assert_equal(np.datetime_as_string(a, timezone=tz('US/Pacific')),
'2010-03-14T23:30-0700')
assert_equal(np.datetime_as_string(b, timezone=tz('US/Central')),
'2010-02-15T00:30-0600')
assert_equal(np.datetime_as_string(b, timezone=tz('US/Eastern')),
'2010-02-15T01:30-0500')
assert_equal(np.datetime_as_string(b, timezone=tz('US/Pacific')),
'2010-02-14T22:30-0800')
# Dates to strings with a timezone attached is disabled by default
assert_raises(TypeError, np.datetime_as_string, a, unit='D',
timezone=tz('US/Pacific'))
# Check that we can print out the date in the specified time zone
assert_equal(np.datetime_as_string(a, unit='D',
timezone=tz('US/Pacific'), casting='unsafe'),
'2010-03-14')
assert_equal(np.datetime_as_string(b, unit='D',
timezone=tz('US/Central'), casting='unsafe'),
'2010-02-15')
def test_datetime_arange(self):
# With two datetimes provided as strings
a = np.arange('2010-01-05', '2010-01-10', dtype='M8[D]')
assert_equal(a.dtype, np.dtype('M8[D]'))
assert_equal(a,
np.array(['2010-01-05', '2010-01-06', '2010-01-07',
'2010-01-08', '2010-01-09'], dtype='M8[D]'))
a = np.arange('1950-02-10', '1950-02-06', -1, dtype='M8[D]')
assert_equal(a.dtype, np.dtype('M8[D]'))
assert_equal(a,
np.array(['1950-02-10', '1950-02-09', '1950-02-08',
'1950-02-07'], dtype='M8[D]'))
# Unit should be detected as months here
a = np.arange('1969-05', '1970-05', 2, dtype='M8')
assert_equal(a.dtype, np.dtype('M8[M]'))
assert_equal(a,
np.datetime64('1969-05') + np.arange(12, step=2))
# datetime, integer|timedelta works as well
# produces arange (start, start + stop) in this case
a = np.arange('1969', 18, 3, dtype='M8')
assert_equal(a.dtype, np.dtype('M8[Y]'))
assert_equal(a,
np.datetime64('1969') + np.arange(18, step=3))
a = np.arange('1969-12-19', 22, np.timedelta64(2), dtype='M8')
assert_equal(a.dtype, np.dtype('M8[D]'))
assert_equal(a,
np.datetime64('1969-12-19') + np.arange(22, step=2))
# Step of 0 is disallowed
assert_raises(ValueError, np.arange, np.datetime64('today'),
np.datetime64('today') + 3, 0)
# Promotion across nonlinear unit boundaries is disallowed
assert_raises(TypeError, np.arange, np.datetime64('2011-03-01', 'D'),
np.timedelta64(5, 'M'))
assert_raises(TypeError, np.arange,
np.datetime64('2012-02-03T14', 's'),
np.timedelta64(5, 'Y'))
def test_datetime_arange_no_dtype(self):
d = np.array('2010-01-04', dtype="M8[D]")
assert_equal(np.arange(d, d + 1), d)
assert_raises(ValueError, np.arange, d)
def test_timedelta_arange(self):
a = np.arange(3, 10, dtype='m8')
assert_equal(a.dtype, np.dtype('m8'))
assert_equal(a, np.timedelta64(0) + np.arange(3, 10))
a = np.arange(np.timedelta64(3, 's'), 10, 2, dtype='m8')
assert_equal(a.dtype, np.dtype('m8[s]'))
assert_equal(a, np.timedelta64(0, 's') + np.arange(3, 10, 2))
# Step of 0 is disallowed
assert_raises(ValueError, np.arange, np.timedelta64(0),
np.timedelta64(5), 0)
# Promotion across nonlinear unit boundaries is disallowed
assert_raises(TypeError, np.arange, np.timedelta64(0, 'D'),
np.timedelta64(5, 'M'))
assert_raises(TypeError, np.arange, np.timedelta64(0, 'Y'),
np.timedelta64(5, 'D'))
@pytest.mark.parametrize("val1, val2, expected", [
# case from gh-12092
(np.timedelta64(7, 's'),
np.timedelta64(3, 's'),
np.timedelta64(1, 's')),
# negative value cases
(np.timedelta64(3, 's'),
np.timedelta64(-2, 's'),
np.timedelta64(-1, 's')),
(np.timedelta64(-3, 's'),
np.timedelta64(2, 's'),
np.timedelta64(1, 's')),
# larger value cases
(np.timedelta64(17, 's'),
np.timedelta64(22, 's'),
np.timedelta64(17, 's')),
(np.timedelta64(22, 's'),
np.timedelta64(17, 's'),
np.timedelta64(5, 's')),
# different units
(np.timedelta64(1, 'm'),
np.timedelta64(57, 's'),
np.timedelta64(3, 's')),
(np.timedelta64(1, 'us'),
np.timedelta64(727, 'ns'),
np.timedelta64(273, 'ns')),
# NaT is propagated
(np.timedelta64('NaT'),
np.timedelta64(50, 'ns'),
np.timedelta64('NaT')),
# Y % M works
(np.timedelta64(2, 'Y'),
np.timedelta64(22, 'M'),
np.timedelta64(2, 'M')),
])
def test_timedelta_modulus(self, val1, val2, expected):
assert_equal(val1 % val2, expected)
@pytest.mark.parametrize("val1, val2", [
# years and months sometimes can't be unambiguously
# divided for modulus operation
(np.timedelta64(7, 'Y'),
np.timedelta64(3, 's')),
(np.timedelta64(7, 'M'),
np.timedelta64(1, 'D')),
])
def test_timedelta_modulus_error(self, val1, val2):
with assert_raises_regex(TypeError, "common metadata divisor"):
val1 % val2
def test_timedelta_modulus_div_by_zero(self):
with assert_warns(RuntimeWarning):
actual = np.timedelta64(10, 's') % np.timedelta64(0, 's')
assert_equal(actual, np.timedelta64('NaT'))
@pytest.mark.parametrize("val1, val2", [
# cases where one operand is not
# timedelta64
(np.timedelta64(7, 'Y'),
15,),
(7.5,
np.timedelta64(1, 'D')),
])
def test_timedelta_modulus_type_resolution(self, val1, val2):
# NOTE: some of the operations may be supported
# in the future
with assert_raises_regex(TypeError,
"'remainder' cannot use operands with types"):
val1 % val2
def test_timedelta_arange_no_dtype(self):
d = np.array(5, dtype="m8[D]")
assert_equal(np.arange(d, d + 1), d)
assert_equal(np.arange(d), np.arange(0, d))
def test_datetime_maximum_reduce(self):
a = np.array(['2010-01-02', '1999-03-14', '1833-03'], dtype='M8[D]')
assert_equal(np.maximum.reduce(a).dtype, np.dtype('M8[D]'))
assert_equal(np.maximum.reduce(a),
np.datetime64('2010-01-02'))
a = np.array([1, 4, 0, 7, 2], dtype='m8[s]')
assert_equal(np.maximum.reduce(a).dtype, np.dtype('m8[s]'))
assert_equal(np.maximum.reduce(a),
np.timedelta64(7, 's'))
def test_datetime_busday_offset(self):
# First Monday in June
assert_equal(
np.busday_offset('2011-06', 0, roll='forward', weekmask='Mon'),
np.datetime64('2011-06-06'))
# Last Monday in June
assert_equal(
np.busday_offset('2011-07', -1, roll='forward', weekmask='Mon'),
np.datetime64('2011-06-27'))
assert_equal(
np.busday_offset('2011-07', -1, roll='forward', weekmask='Mon'),
np.datetime64('2011-06-27'))
# Default M-F business days, different roll modes
assert_equal(np.busday_offset('2010-08', 0, roll='backward'),
np.datetime64('2010-07-30'))
assert_equal(np.busday_offset('2010-08', 0, roll='preceding'),
np.datetime64('2010-07-30'))
assert_equal(np.busday_offset('2010-08', 0, roll='modifiedpreceding'),
np.datetime64('2010-08-02'))
assert_equal(np.busday_offset('2010-08', 0, roll='modifiedfollowing'),
np.datetime64('2010-08-02'))
assert_equal(np.busday_offset('2010-08', 0, roll='forward'),
np.datetime64('2010-08-02'))
assert_equal(np.busday_offset('2010-08', 0, roll='following'),
np.datetime64('2010-08-02'))
assert_equal(np.busday_offset('2010-10-30', 0, roll='following'),
np.datetime64('2010-11-01'))
assert_equal(
np.busday_offset('2010-10-30', 0, roll='modifiedfollowing'),
np.datetime64('2010-10-29'))
assert_equal(
np.busday_offset('2010-10-30', 0, roll='modifiedpreceding'),
np.datetime64('2010-10-29'))
assert_equal(
np.busday_offset('2010-10-16', 0, roll='modifiedfollowing'),
np.datetime64('2010-10-18'))
assert_equal(
np.busday_offset('2010-10-16', 0, roll='modifiedpreceding'),
np.datetime64('2010-10-15'))
# roll='raise' by default
assert_raises(ValueError, np.busday_offset, '2011-06-04', 0)
# Bigger offset values
assert_equal(np.busday_offset('2006-02-01', 25),
np.datetime64('2006-03-08'))
assert_equal(np.busday_offset('2006-03-08', -25),
np.datetime64('2006-02-01'))
assert_equal(np.busday_offset('2007-02-25', 11, weekmask='SatSun'),
np.datetime64('2007-04-07'))
assert_equal(np.busday_offset('2007-04-07', -11, weekmask='SatSun'),
np.datetime64('2007-02-25'))
# NaT values when roll is not raise
assert_equal(np.busday_offset(np.datetime64('NaT'), 1, roll='nat'),
np.datetime64('NaT'))
assert_equal(np.busday_offset(np.datetime64('NaT'), 1, roll='following'),
np.datetime64('NaT'))
assert_equal(np.busday_offset(np.datetime64('NaT'), 1, roll='preceding'),
np.datetime64('NaT'))
def test_datetime_busdaycalendar(self):
# Check that it removes NaT, duplicates, and weekends
# and sorts the result.
bdd = np.busdaycalendar(
holidays=['NaT', '2011-01-17', '2011-03-06', 'NaT',
'2011-12-26', '2011-05-30', '2011-01-17'])
assert_equal(bdd.holidays,
np.array(['2011-01-17', '2011-05-30', '2011-12-26'], dtype='M8'))
# Default M-F weekmask
assert_equal(bdd.weekmask, np.array([1, 1, 1, 1, 1, 0, 0], dtype='?'))
# Check string weekmask with varying whitespace.
bdd = np.busdaycalendar(weekmask="Sun TueWed Thu\tFri")
assert_equal(bdd.weekmask, np.array([0, 1, 1, 1, 1, 0, 1], dtype='?'))
# Check length 7 0/1 string
bdd = np.busdaycalendar(weekmask="0011001")
assert_equal(bdd.weekmask, np.array([0, 0, 1, 1, 0, 0, 1], dtype='?'))
# Check length 7 string weekmask.
bdd = np.busdaycalendar(weekmask="Mon Tue")
assert_equal(bdd.weekmask, np.array([1, 1, 0, 0, 0, 0, 0], dtype='?'))
# All-zeros weekmask should raise
assert_raises(ValueError, np.busdaycalendar, weekmask=[0, 0, 0, 0, 0, 0, 0])
# weekday names must be correct case
assert_raises(ValueError, np.busdaycalendar, weekmask="satsun")
# All-zeros weekmask should raise
assert_raises(ValueError, np.busdaycalendar, weekmask="")
# Invalid weekday name codes should raise
assert_raises(ValueError, np.busdaycalendar, weekmask="Mon Tue We")
assert_raises(ValueError, np.busdaycalendar, weekmask="Max")
assert_raises(ValueError, np.busdaycalendar, weekmask="Monday Tue")
def test_datetime_busday_holidays_offset(self):
# With exactly one holiday
assert_equal(
np.busday_offset('2011-11-10', 1, holidays=['2011-11-11']),
np.datetime64('2011-11-14'))
assert_equal(
np.busday_offset('2011-11-04', 5, holidays=['2011-11-11']),
np.datetime64('2011-11-14'))
assert_equal(
np.busday_offset('2011-11-10', 5, holidays=['2011-11-11']),
np.datetime64('2011-11-18'))
assert_equal(
np.busday_offset('2011-11-14', -1, holidays=['2011-11-11']),
np.datetime64('2011-11-10'))
assert_equal(
np.busday_offset('2011-11-18', -5, holidays=['2011-11-11']),
np.datetime64('2011-11-10'))
assert_equal(
np.busday_offset('2011-11-14', -5, holidays=['2011-11-11']),
np.datetime64('2011-11-04'))
# With the holiday appearing twice
assert_equal(
np.busday_offset('2011-11-10', 1,
holidays=['2011-11-11', '2011-11-11']),
np.datetime64('2011-11-14'))
assert_equal(
np.busday_offset('2011-11-14', -1,
holidays=['2011-11-11', '2011-11-11']),
np.datetime64('2011-11-10'))
# With a NaT holiday
assert_equal(
np.busday_offset('2011-11-10', 1,
holidays=['2011-11-11', 'NaT']),
np.datetime64('2011-11-14'))
assert_equal(
np.busday_offset('2011-11-14', -1,
holidays=['NaT', '2011-11-11']),
np.datetime64('2011-11-10'))
# With another holiday after
assert_equal(
np.busday_offset('2011-11-10', 1,
holidays=['2011-11-11', '2011-11-24']),
np.datetime64('2011-11-14'))
assert_equal(
np.busday_offset('2011-11-14', -1,
holidays=['2011-11-11', '2011-11-24']),
np.datetime64('2011-11-10'))
# With another holiday before
assert_equal(
np.busday_offset('2011-11-10', 1,
holidays=['2011-10-10', '2011-11-11']),
np.datetime64('2011-11-14'))
assert_equal(
np.busday_offset('2011-11-14', -1,
holidays=['2011-10-10', '2011-11-11']),
np.datetime64('2011-11-10'))
# With another holiday before and after
assert_equal(
np.busday_offset('2011-11-10', 1,
holidays=['2011-10-10', '2011-11-11', '2011-11-24']),
np.datetime64('2011-11-14'))
assert_equal(
np.busday_offset('2011-11-14', -1,
holidays=['2011-10-10', '2011-11-11', '2011-11-24']),
np.datetime64('2011-11-10'))
# A bigger forward jump across more than one week/holiday
holidays = ['2011-10-10', '2011-11-11', '2011-11-24',
'2011-12-25', '2011-05-30', '2011-02-21',
'2011-12-26', '2012-01-02']
bdd = np.busdaycalendar(weekmask='1111100', holidays=holidays)
assert_equal(
np.busday_offset('2011-10-03', 4, holidays=holidays),
np.busday_offset('2011-10-03', 4))
assert_equal(
np.busday_offset('2011-10-03', 5, holidays=holidays),
np.busday_offset('2011-10-03', 5 + 1))
assert_equal(
np.busday_offset('2011-10-03', 27, holidays=holidays),
np.busday_offset('2011-10-03', 27 + 1))
assert_equal(
np.busday_offset('2011-10-03', 28, holidays=holidays),
np.busday_offset('2011-10-03', 28 + 2))
assert_equal(
np.busday_offset('2011-10-03', 35, holidays=holidays),
np.busday_offset('2011-10-03', 35 + 2))
assert_equal(
np.busday_offset('2011-10-03', 36, holidays=holidays),
np.busday_offset('2011-10-03', 36 + 3))
assert_equal(
np.busday_offset('2011-10-03', 56, holidays=holidays),
np.busday_offset('2011-10-03', 56 + 3))
assert_equal(
np.busday_offset('2011-10-03', 57, holidays=holidays),
np.busday_offset('2011-10-03', 57 + 4))
assert_equal(
np.busday_offset('2011-10-03', 60, holidays=holidays),
np.busday_offset('2011-10-03', 60 + 4))
assert_equal(
np.busday_offset('2011-10-03', 61, holidays=holidays),
np.busday_offset('2011-10-03', 61 + 5))
assert_equal(
np.busday_offset('2011-10-03', 61, busdaycal=bdd),
np.busday_offset('2011-10-03', 61 + 5))
# A bigger backward jump across more than one week/holiday
assert_equal(
np.busday_offset('2012-01-03', -1, holidays=holidays),
np.busday_offset('2012-01-03', -1 - 1))
assert_equal(
np.busday_offset('2012-01-03', -4, holidays=holidays),
np.busday_offset('2012-01-03', -4 - 1))
assert_equal(
np.busday_offset('2012-01-03', -5, holidays=holidays),
np.busday_offset('2012-01-03', -5 - 2))
assert_equal(
np.busday_offset('2012-01-03', -25, holidays=holidays),
np.busday_offset('2012-01-03', -25 - 2))
assert_equal(
np.busday_offset('2012-01-03', -26, holidays=holidays),
np.busday_offset('2012-01-03', -26 - 3))
assert_equal(
np.busday_offset('2012-01-03', -33, holidays=holidays),
np.busday_offset('2012-01-03', -33 - 3))
assert_equal(
np.busday_offset('2012-01-03', -34, holidays=holidays),
np.busday_offset('2012-01-03', -34 - 4))
assert_equal(
np.busday_offset('2012-01-03', -56, holidays=holidays),
np.busday_offset('2012-01-03', -56 - 4))
assert_equal(
np.busday_offset('2012-01-03', -57, holidays=holidays),
np.busday_offset('2012-01-03', -57 - 5))
assert_equal(
np.busday_offset('2012-01-03', -57, busdaycal=bdd),
np.busday_offset('2012-01-03', -57 - 5))
# Can't supply both a weekmask/holidays and busdaycal
assert_raises(ValueError, np.busday_offset, '2012-01-03', -15,
weekmask='1111100', busdaycal=bdd)
assert_raises(ValueError, np.busday_offset, '2012-01-03', -15,
holidays=holidays, busdaycal=bdd)
# Roll with the holidays
assert_equal(
np.busday_offset('2011-12-25', 0,
roll='forward', holidays=holidays),
np.datetime64('2011-12-27'))
assert_equal(
np.busday_offset('2011-12-26', 0,
roll='forward', holidays=holidays),
np.datetime64('2011-12-27'))
assert_equal(
np.busday_offset('2011-12-26', 0,
roll='backward', holidays=holidays),
np.datetime64('2011-12-23'))
assert_equal(
np.busday_offset('2012-02-27', 0,
roll='modifiedfollowing',
holidays=['2012-02-27', '2012-02-26', '2012-02-28',
'2012-03-01', '2012-02-29']),
np.datetime64('2012-02-24'))
assert_equal(
np.busday_offset('2012-03-06', 0,
roll='modifiedpreceding',
holidays=['2012-03-02', '2012-03-03', '2012-03-01',
'2012-03-05', '2012-03-07', '2012-03-06']),
np.datetime64('2012-03-08'))
def test_datetime_busday_holidays_count(self):
holidays = ['2011-01-01', '2011-10-10', '2011-11-11', '2011-11-24',
'2011-12-25', '2011-05-30', '2011-02-21', '2011-01-17',
'2011-12-26', '2012-01-02', '2011-02-21', '2011-05-30',
'2011-07-01', '2011-07-04', '2011-09-05', '2011-10-10']
bdd = np.busdaycalendar(weekmask='1111100', holidays=holidays)
# Validate against busday_offset broadcast against
# a range of offsets
dates = np.busday_offset('2011-01-01', np.arange(366),
roll='forward', busdaycal=bdd)
assert_equal(np.busday_count('2011-01-01', dates, busdaycal=bdd),
np.arange(366))
# Returns negative value when reversed
assert_equal(np.busday_count(dates, '2011-01-01', busdaycal=bdd),
-np.arange(366))
dates = np.busday_offset('2011-12-31', -np.arange(366),
roll='forward', busdaycal=bdd)
assert_equal(np.busday_count(dates, '2011-12-31', busdaycal=bdd),
np.arange(366))
# Returns negative value when reversed
assert_equal(np.busday_count('2011-12-31', dates, busdaycal=bdd),
-np.arange(366))
# Can't supply both a weekmask/holidays and busdaycal
assert_raises(ValueError, np.busday_offset, '2012-01-03', '2012-02-03',
weekmask='1111100', busdaycal=bdd)
assert_raises(ValueError, np.busday_offset, '2012-01-03', '2012-02-03',
holidays=holidays, busdaycal=bdd)
# Number of Mondays in March 2011
assert_equal(np.busday_count('2011-03', '2011-04', weekmask='Mon'), 4)
# Returns negative value when reversed
assert_equal(np.busday_count('2011-04', '2011-03', weekmask='Mon'), -4)
def test_datetime_is_busday(self):
holidays = ['2011-01-01', '2011-10-10', '2011-11-11', '2011-11-24',
'2011-12-25', '2011-05-30', '2011-02-21', '2011-01-17',
'2011-12-26', '2012-01-02', '2011-02-21', '2011-05-30',
'2011-07-01', '2011-07-04', '2011-09-05', '2011-10-10',
'NaT']
bdd = np.busdaycalendar(weekmask='1111100', holidays=holidays)
# Weekend/weekday tests
assert_equal(np.is_busday('2011-01-01'), False)
assert_equal(np.is_busday('2011-01-02'), False)
assert_equal(np.is_busday('2011-01-03'), True)
# All the holidays are not business days
assert_equal(np.is_busday(holidays, busdaycal=bdd),
np.zeros(len(holidays), dtype='?'))
def test_datetime_y2038(self):
# Test parsing on either side of the Y2038 boundary
a = np.datetime64('2038-01-19T03:14:07')
assert_equal(a.view(np.int64), 2**31 - 1)
a = np.datetime64('2038-01-19T03:14:08')
assert_equal(a.view(np.int64), 2**31)
# Test parsing on either side of the Y2038 boundary with
# a manually specified timezone offset
with assert_warns(DeprecationWarning):
a = np.datetime64('2038-01-19T04:14:07+0100')
assert_equal(a.view(np.int64), 2**31 - 1)
with assert_warns(DeprecationWarning):
a = np.datetime64('2038-01-19T04:14:08+0100')
assert_equal(a.view(np.int64), 2**31)
# Test parsing a date after Y2038
a = np.datetime64('2038-01-20T13:21:14')
assert_equal(str(a), '2038-01-20T13:21:14')
def test_isnat(self):
assert_(np.isnat(np.datetime64('NaT', 'ms')))
assert_(np.isnat(np.datetime64('NaT', 'ns')))
assert_(not np.isnat(np.datetime64('2038-01-19T03:14:07')))
assert_(np.isnat(np.timedelta64('NaT', "ms")))
assert_(not np.isnat(np.timedelta64(34, "ms")))
res = np.array([False, False, True])
for unit in ['Y', 'M', 'W', 'D',
'h', 'm', 's', 'ms', 'us',
'ns', 'ps', 'fs', 'as']:
arr = np.array([123, -321, "NaT"], dtype='<datetime64[%s]' % unit)
assert_equal(np.isnat(arr), res)
arr = np.array([123, -321, "NaT"], dtype='>datetime64[%s]' % unit)
assert_equal(np.isnat(arr), res)
arr = np.array([123, -321, "NaT"], dtype='<timedelta64[%s]' % unit)
assert_equal(np.isnat(arr), res)
arr = np.array([123, -321, "NaT"], dtype='>timedelta64[%s]' % unit)
assert_equal(np.isnat(arr), res)
def test_isnat_error(self):
# Test that only datetime dtype arrays are accepted
for t in np.typecodes["All"]:
if t in np.typecodes["Datetime"]:
continue
assert_raises(TypeError, np.isnat, np.zeros(10, t))
def test_isfinite_scalar(self):
assert_(not np.isfinite(np.datetime64('NaT', 'ms')))
assert_(not np.isfinite(np.datetime64('NaT', 'ns')))
assert_(np.isfinite(np.datetime64('2038-01-19T03:14:07')))
assert_(not np.isfinite(np.timedelta64('NaT', "ms")))
assert_(np.isfinite(np.timedelta64(34, "ms")))
@pytest.mark.parametrize('unit', ['Y', 'M', 'W', 'D', 'h', 'm', 's', 'ms',
'us', 'ns', 'ps', 'fs', 'as'])
@pytest.mark.parametrize('dstr', ['<datetime64[%s]', '>datetime64[%s]',
'<timedelta64[%s]', '>timedelta64[%s]'])
def test_isfinite_isinf_isnan_units(self, unit, dstr):
'''check isfinite, isinf, isnan for all units of <M, >M, <m, >m dtypes
'''
arr_val = [123, -321, "NaT"]
arr = np.array(arr_val, dtype= dstr % unit)
pos = np.array([True, True, False])
neg = np.array([False, False, True])
false = np.array([False, False, False])
assert_equal(np.isfinite(arr), pos)
assert_equal(np.isinf(arr), false)
assert_equal(np.isnan(arr), neg)
def test_assert_equal(self):
assert_raises(AssertionError, assert_equal,
np.datetime64('nat'), np.timedelta64('nat'))
def test_corecursive_input(self):
# construct a co-recursive list
a, b = [], []
a.append(b)
b.append(a)
obj_arr = np.array([None])
obj_arr[0] = a
# At some point this caused a stack overflow (gh-11154). Now raises
# ValueError since the nested list cannot be converted to a datetime.
assert_raises(ValueError, obj_arr.astype, 'M8')
assert_raises(ValueError, obj_arr.astype, 'm8')
@pytest.mark.parametrize("shape", [(), (1,)])
def test_discovery_from_object_array(self, shape):
arr = np.array("2020-10-10", dtype=object).reshape(shape)
res = np.array("2020-10-10", dtype="M8").reshape(shape)
assert res.dtype == np.dtype("M8[D]")
assert_equal(arr.astype("M8"), res)
arr[...] = np.bytes_("2020-10-10") # try a numpy string type
assert_equal(arr.astype("M8"), res)
arr = arr.astype("S")
assert_equal(arr.astype("S").astype("M8"), res)
@pytest.mark.parametrize("time_unit", [
"Y", "M", "W", "D", "h", "m", "s", "ms", "us", "ns", "ps", "fs", "as",
# compound units
"10D", "2M",
])
def test_limit_symmetry(self, time_unit):
"""
Dates should have symmetric limits around the unix epoch at +/-np.int64
"""
epoch = np.datetime64(0, time_unit)
latest = np.datetime64(np.iinfo(np.int64).max, time_unit)
earliest = np.datetime64(-np.iinfo(np.int64).max, time_unit)
# above should not have overflowed
assert earliest < epoch < latest
@pytest.mark.parametrize("time_unit", [
"Y", "M",
pytest.param("W", marks=pytest.mark.xfail(reason="gh-13197")),
"D", "h", "m",
"s", "ms", "us", "ns", "ps", "fs", "as",
pytest.param("10D", marks=pytest.mark.xfail(reason="similar to gh-13197")),
])
@pytest.mark.parametrize("sign", [-1, 1])
def test_limit_str_roundtrip(self, time_unit, sign):
"""
Limits should roundtrip when converted to strings.
This tests the conversion to and from npy_datetimestruct.
"""
# TODO: add absolute (gold standard) time span limit strings
limit = np.datetime64(np.iinfo(np.int64).max * sign, time_unit)
# Convert to string and back. Explicit unit needed since the day and
# week reprs are not distinguishable.
limit_via_str = np.datetime64(str(limit), time_unit)
assert limit_via_str == limit
class TestDateTimeData:
def test_basic(self):
a = np.array(['1980-03-23'], dtype=np.datetime64)
assert_equal(np.datetime_data(a.dtype), ('D', 1))
def test_bytes(self):
# byte units are converted to unicode
dt = np.datetime64('2000', (b'ms', 5))
assert np.datetime_data(dt.dtype) == ('ms', 5)
dt = np.datetime64('2000', b'5ms')
assert np.datetime_data(dt.dtype) == ('ms', 5)
def test_non_ascii(self):
# μs is normalized to μ
dt = np.datetime64('2000', ('μs', 5))
assert np.datetime_data(dt.dtype) == ('us', 5)
dt = np.datetime64('2000', '5μs')
assert np.datetime_data(dt.dtype) == ('us', 5)
| 45.494396 | 101 | 0.53845 |
import numpy
import numpy as np
import datetime
import pytest
from numpy.testing import (
assert_, assert_equal, assert_raises, assert_warns, suppress_warnings,
assert_raises_regex,
)
from numpy.compat import pickle
try:
from pytz import timezone as tz
_has_pytz = True
except ImportError:
_has_pytz = False
try:
RecursionError
except NameError:
RecursionError = RuntimeError
class TestDateTime:
def test_datetime_dtype_creation(self):
for unit in ['Y', 'M', 'W', 'D',
'h', 'm', 's', 'ms', 'us',
'μs',
'ns', 'ps', 'fs', 'as']:
dt1 = np.dtype('M8[750%s]' % unit)
assert_(dt1 == np.dtype('datetime64[750%s]' % unit))
dt2 = np.dtype('m8[%s]' % unit)
assert_(dt2 == np.dtype('timedelta64[%s]' % unit))
assert_equal(str(np.dtype("M8")), "datetime64")
# Should be possible to specify the endianness
assert_equal(np.dtype("=M8"), np.dtype("M8"))
assert_equal(np.dtype("=M8[s]"), np.dtype("M8[s]"))
assert_(np.dtype(">M8") == np.dtype("M8") or
np.dtype("<M8") == np.dtype("M8"))
assert_(np.dtype(">M8[D]") == np.dtype("M8[D]") or
np.dtype("<M8[D]") == np.dtype("M8[D]"))
assert_(np.dtype(">M8") != np.dtype("<M8"))
assert_equal(np.dtype("=m8"), np.dtype("m8"))
assert_equal(np.dtype("=m8[s]"), np.dtype("m8[s]"))
assert_(np.dtype(">m8") == np.dtype("m8") or
np.dtype("<m8") == np.dtype("m8"))
assert_(np.dtype(">m8[D]") == np.dtype("m8[D]") or
np.dtype("<m8[D]") == np.dtype("m8[D]"))
assert_(np.dtype(">m8") != np.dtype("<m8"))
# Check that the parser rejects bad datetime types
assert_raises(TypeError, np.dtype, 'M8[badunit]')
assert_raises(TypeError, np.dtype, 'm8[badunit]')
assert_raises(TypeError, np.dtype, 'M8[YY]')
assert_raises(TypeError, np.dtype, 'm8[YY]')
assert_raises(TypeError, np.dtype, 'm4')
assert_raises(TypeError, np.dtype, 'M7')
assert_raises(TypeError, np.dtype, 'm7')
assert_raises(TypeError, np.dtype, 'M16')
assert_raises(TypeError, np.dtype, 'm16')
def test_datetime_casting_rules(self):
# Cannot cast safely/same_kind between timedelta and datetime
assert_(not np.can_cast('m8', 'M8', casting='same_kind'))
assert_(not np.can_cast('M8', 'm8', casting='same_kind'))
assert_(not np.can_cast('m8', 'M8', casting='safe'))
assert_(not np.can_cast('M8', 'm8', casting='safe'))
# Can cast safely/same_kind from integer to timedelta
assert_(np.can_cast('i8', 'm8', casting='same_kind'))
assert_(np.can_cast('i8', 'm8', casting='safe'))
assert_(np.can_cast('i4', 'm8', casting='same_kind'))
assert_(np.can_cast('i4', 'm8', casting='safe'))
assert_(np.can_cast('u4', 'm8', casting='same_kind'))
assert_(np.can_cast('u4', 'm8', casting='safe'))
# Cannot cast safely from unsigned integer of the same size, which
# could overflow
assert_(np.can_cast('u8', 'm8', casting='same_kind'))
assert_(not np.can_cast('u8', 'm8', casting='safe'))
# Cannot cast safely/same_kind from float to timedelta
assert_(not np.can_cast('f4', 'm8', casting='same_kind'))
assert_(not np.can_cast('f4', 'm8', casting='safe'))
# Cannot cast safely/same_kind from integer to datetime
assert_(not np.can_cast('i8', 'M8', casting='same_kind'))
assert_(not np.can_cast('i8', 'M8', casting='safe'))
# Cannot cast safely/same_kind from bool to datetime
assert_(not np.can_cast('b1', 'M8', casting='same_kind'))
assert_(not np.can_cast('b1', 'M8', casting='safe'))
# Can cast safely/same_kind from bool to timedelta
assert_(np.can_cast('b1', 'm8', casting='same_kind'))
assert_(np.can_cast('b1', 'm8', casting='safe'))
# Can cast datetime safely from months/years to days
assert_(np.can_cast('M8[M]', 'M8[D]', casting='safe'))
assert_(np.can_cast('M8[Y]', 'M8[D]', casting='safe'))
# Cannot cast timedelta safely from months/years to days
assert_(not np.can_cast('m8[M]', 'm8[D]', casting='safe'))
assert_(not np.can_cast('m8[Y]', 'm8[D]', casting='safe'))
# Can cast datetime same_kind from months/years to days
assert_(np.can_cast('M8[M]', 'M8[D]', casting='same_kind'))
assert_(np.can_cast('M8[Y]', 'M8[D]', casting='same_kind'))
# Can't cast timedelta same_kind from months/years to days
assert_(not np.can_cast('m8[M]', 'm8[D]', casting='same_kind'))
assert_(not np.can_cast('m8[Y]', 'm8[D]', casting='same_kind'))
assert_(np.can_cast('M8[D]', 'M8[h]', casting='same_kind'))
assert_(np.can_cast('m8[D]', 'm8[h]', casting='same_kind'))
assert_(np.can_cast('m8[h]', 'm8[D]', casting='same_kind'))
assert_(not np.can_cast('M8[7h]', 'M8[3h]', casting='safe'))
assert_(not np.can_cast('M8[3h]', 'M8[6h]', casting='safe'))
# But can cast same_kind
assert_(np.can_cast('M8[7h]', 'M8[3h]', casting='same_kind'))
# Can cast safely if the integer multiplier does divide
assert_(np.can_cast('M8[6h]', 'M8[3h]', casting='safe'))
# We can always cast types with generic units (corresponding to NaT) to
# more specific types
assert_(np.can_cast('m8', 'm8[h]', casting='same_kind'))
assert_(np.can_cast('m8', 'm8[h]', casting='safe'))
assert_(np.can_cast('M8', 'M8[h]', casting='same_kind'))
assert_(np.can_cast('M8', 'M8[h]', casting='safe'))
# but not the other way around
assert_(not np.can_cast('m8[h]', 'm8', casting='same_kind'))
assert_(not np.can_cast('m8[h]', 'm8', casting='safe'))
assert_(not np.can_cast('M8[h]', 'M8', casting='same_kind'))
assert_(not np.can_cast('M8[h]', 'M8', casting='safe'))
def test_compare_generic_nat(self):
# regression tests for gh-6452
assert_(np.datetime64('NaT') !=
np.datetime64('2000') + np.timedelta64('NaT'))
assert_(np.datetime64('NaT') != np.datetime64('NaT', 'us'))
assert_(np.datetime64('NaT', 'us') != np.datetime64('NaT'))
@pytest.mark.parametrize("size", [
3, 21, 217, 1000])
def test_datetime_nat_argsort_stability(self, size):
# NaT < NaT should be False internally for
# sort stability
expected = np.arange(size)
arr = np.tile(np.datetime64('NaT'), size)
assert_equal(np.argsort(arr, kind='mergesort'), expected)
@pytest.mark.parametrize("size", [
3, 21, 217, 1000])
def test_timedelta_nat_argsort_stability(self, size):
# NaT < NaT should be False internally for
# sort stability
expected = np.arange(size)
arr = np.tile(np.timedelta64('NaT'), size)
assert_equal(np.argsort(arr, kind='mergesort'), expected)
@pytest.mark.parametrize("arr, expected", [
# the example provided in gh-12629
(['NaT', 1, 2, 3],
[1, 2, 3, 'NaT']),
# multiple NaTs
(['NaT', 9, 'NaT', -707],
[-707, 9, 'NaT', 'NaT']),
# this sort explores another code path for NaT
([1, -2, 3, 'NaT'],
[-2, 1, 3, 'NaT']),
# 2-D array
([[51, -220, 'NaT'],
[-17, 'NaT', -90]],
[[-220, 51, 'NaT'],
[-90, -17, 'NaT']]),
])
@pytest.mark.parametrize("dtype", [
'M8[ns]', 'M8[us]',
'm8[ns]', 'm8[us]'])
def test_datetime_timedelta_sort_nat(self, arr, expected, dtype):
# fix for gh-12629 and gh-15063; NaT sorting to end of array
arr = np.array(arr, dtype=dtype)
expected = np.array(expected, dtype=dtype)
arr.sort()
assert_equal(arr, expected)
def test_datetime_scalar_construction(self):
# Construct with different units
assert_equal(np.datetime64('1950-03-12', 'D'),
np.datetime64('1950-03-12'))
assert_equal(np.datetime64('1950-03-12T13', 's'),
np.datetime64('1950-03-12T13', 'm'))
# Default construction means NaT
assert_equal(np.datetime64(), np.datetime64('NaT'))
# Some basic strings and repr
assert_equal(str(np.datetime64('NaT')), 'NaT')
assert_equal(repr(np.datetime64('NaT')),
"numpy.datetime64('NaT')")
assert_equal(str(np.datetime64('2011-02')), '2011-02')
assert_equal(repr(np.datetime64('2011-02')),
"numpy.datetime64('2011-02')")
# None gets constructed as NaT
assert_equal(np.datetime64(None), np.datetime64('NaT'))
# Default construction of NaT is in generic units
assert_equal(np.datetime64().dtype, np.dtype('M8'))
assert_equal(np.datetime64('NaT').dtype, np.dtype('M8'))
# Construction from integers requires a specified unit
assert_raises(ValueError, np.datetime64, 17)
# When constructing from a scalar or zero-dimensional array,
# it either keeps the units or you can override them.
a = np.datetime64('2000-03-18T16', 'h')
b = np.array('2000-03-18T16', dtype='M8[h]')
assert_equal(a.dtype, np.dtype('M8[h]'))
assert_equal(b.dtype, np.dtype('M8[h]'))
assert_equal(np.datetime64(a), a)
assert_equal(np.datetime64(a).dtype, np.dtype('M8[h]'))
assert_equal(np.datetime64(b), a)
assert_equal(np.datetime64(b).dtype, np.dtype('M8[h]'))
assert_equal(np.datetime64(a, 's'), a)
assert_equal(np.datetime64(a, 's').dtype, np.dtype('M8[s]'))
assert_equal(np.datetime64(b, 's'), a)
assert_equal(np.datetime64(b, 's').dtype, np.dtype('M8[s]'))
# Construction from datetime.date
assert_equal(np.datetime64('1945-03-25'),
np.datetime64(datetime.date(1945, 3, 25)))
assert_equal(np.datetime64('2045-03-25', 'D'),
np.datetime64(datetime.date(2045, 3, 25), 'D'))
# Construction from datetime.datetime
assert_equal(np.datetime64('1980-01-25T14:36:22.5'),
np.datetime64(datetime.datetime(1980, 1, 25,
14, 36, 22, 500000)))
# Construction with time units from a date is okay
assert_equal(np.datetime64('1920-03-13', 'h'),
np.datetime64('1920-03-13T00'))
assert_equal(np.datetime64('1920-03', 'm'),
np.datetime64('1920-03-01T00:00'))
assert_equal(np.datetime64('1920', 's'),
np.datetime64('1920-01-01T00:00:00'))
assert_equal(np.datetime64(datetime.date(2045, 3, 25), 'ms'),
np.datetime64('2045-03-25T00:00:00.000'))
# Construction with date units from a datetime is also okay
assert_equal(np.datetime64('1920-03-13T18', 'D'),
np.datetime64('1920-03-13'))
assert_equal(np.datetime64('1920-03-13T18:33:12', 'M'),
np.datetime64('1920-03'))
assert_equal(np.datetime64('1920-03-13T18:33:12.5', 'Y'),
np.datetime64('1920'))
def test_datetime_scalar_construction_timezone(self):
# verify that supplying an explicit timezone works, but is deprecated
with assert_warns(DeprecationWarning):
assert_equal(np.datetime64('2000-01-01T00Z'),
np.datetime64('2000-01-01T00'))
with assert_warns(DeprecationWarning):
assert_equal(np.datetime64('2000-01-01T00-08'),
np.datetime64('2000-01-01T08'))
def test_datetime_array_find_type(self):
dt = np.datetime64('1970-01-01', 'M')
arr = np.array([dt])
assert_equal(arr.dtype, np.dtype('M8[M]'))
# at the moment, we don't automatically convert these to datetime64
dt = datetime.date(1970, 1, 1)
arr = np.array([dt])
assert_equal(arr.dtype, np.dtype('O'))
dt = datetime.datetime(1970, 1, 1, 12, 30, 40)
arr = np.array([dt])
assert_equal(arr.dtype, np.dtype('O'))
b = np.bool_(True)
dm = np.datetime64('1970-01-01', 'M')
d = datetime.date(1970, 1, 1)
dt = datetime.datetime(1970, 1, 1, 12, 30, 40)
arr = np.array([b, dm])
assert_equal(arr.dtype, np.dtype('O'))
arr = np.array([b, d])
assert_equal(arr.dtype, np.dtype('O'))
arr = np.array([b, dt])
assert_equal(arr.dtype, np.dtype('O'))
arr = np.array([d, d]).astype('datetime64')
assert_equal(arr.dtype, np.dtype('M8[D]'))
arr = np.array([dt, dt]).astype('datetime64')
assert_equal(arr.dtype, np.dtype('M8[us]'))
@pytest.mark.parametrize("unit", [
("Y"), ("M"), ("W"), ("D"), ("h"), ("m"),
("s"), ("ms"), ("us"), ("ns"), ("ps"),
("fs"), ("as"), ("generic") ])
def test_timedelta_np_int_construction(self, unit):
if unit != "generic":
assert_equal(np.timedelta64(np.int64(123), unit),
np.timedelta64(123, unit))
else:
assert_equal(np.timedelta64(np.int64(123)),
np.timedelta64(123))
def test_timedelta_scalar_construction(self):
assert_equal(np.timedelta64(7, 'D'),
np.timedelta64(1, 'W'))
assert_equal(np.timedelta64(120, 's'),
np.timedelta64(2, 'm'))
assert_equal(np.timedelta64(), np.timedelta64(0))
assert_equal(np.timedelta64(None), np.timedelta64('NaT'))
assert_equal(str(np.timedelta64('NaT')), 'NaT')
assert_equal(repr(np.timedelta64('NaT')),
"numpy.timedelta64('NaT')")
assert_equal(str(np.timedelta64(3, 's')), '3 seconds')
assert_equal(repr(np.timedelta64(-3, 's')),
"numpy.timedelta64(-3,'s')")
assert_equal(repr(np.timedelta64(12)),
"numpy.timedelta64(12)")
assert_equal(np.timedelta64(12).dtype, np.dtype('m8'))
a = np.timedelta64(2, 'h')
b = np.array(2, dtype='m8[h]')
assert_equal(a.dtype, np.dtype('m8[h]'))
assert_equal(b.dtype, np.dtype('m8[h]'))
assert_equal(np.timedelta64(a), a)
assert_equal(np.timedelta64(a).dtype, np.dtype('m8[h]'))
assert_equal(np.timedelta64(b), a)
assert_equal(np.timedelta64(b).dtype, np.dtype('m8[h]'))
assert_equal(np.timedelta64(a, 's'), a)
assert_equal(np.timedelta64(a, 's').dtype, np.dtype('m8[s]'))
assert_equal(np.timedelta64(b, 's'), a)
assert_equal(np.timedelta64(b, 's').dtype, np.dtype('m8[s]'))
assert_equal(np.timedelta64(5, 'D'),
np.timedelta64(datetime.timedelta(days=5)))
assert_equal(np.timedelta64(102347621, 's'),
np.timedelta64(datetime.timedelta(seconds=102347621)))
assert_equal(np.timedelta64(-10234760000, 'us'),
np.timedelta64(datetime.timedelta(
microseconds=-10234760000)))
assert_equal(np.timedelta64(10234760000, 'us'),
np.timedelta64(datetime.timedelta(
microseconds=10234760000)))
assert_equal(np.timedelta64(1023476, 'ms'),
np.timedelta64(datetime.timedelta(milliseconds=1023476)))
assert_equal(np.timedelta64(10, 'm'),
np.timedelta64(datetime.timedelta(minutes=10)))
assert_equal(np.timedelta64(281, 'h'),
np.timedelta64(datetime.timedelta(hours=281)))
assert_equal(np.timedelta64(28, 'W'),
np.timedelta64(datetime.timedelta(weeks=28)))
a = np.timedelta64(3, 's')
assert_raises(TypeError, np.timedelta64, a, 'M')
assert_raises(TypeError, np.timedelta64, a, 'Y')
a = np.timedelta64(6, 'M')
assert_raises(TypeError, np.timedelta64, a, 'D')
assert_raises(TypeError, np.timedelta64, a, 'h')
a = np.timedelta64(1, 'Y')
assert_raises(TypeError, np.timedelta64, a, 'D')
assert_raises(TypeError, np.timedelta64, a, 'm')
a = datetime.timedelta(seconds=3)
assert_raises(TypeError, np.timedelta64, a, 'M')
assert_raises(TypeError, np.timedelta64, a, 'Y')
a = datetime.timedelta(weeks=3)
assert_raises(TypeError, np.timedelta64, a, 'M')
assert_raises(TypeError, np.timedelta64, a, 'Y')
a = datetime.timedelta()
assert_raises(TypeError, np.timedelta64, a, 'M')
assert_raises(TypeError, np.timedelta64, a, 'Y')
def test_timedelta_object_array_conversion(self):
inputs = [datetime.timedelta(28),
datetime.timedelta(30),
datetime.timedelta(31)]
expected = np.array([28, 30, 31], dtype='timedelta64[D]')
actual = np.array(inputs, dtype='timedelta64[D]')
assert_equal(expected, actual)
def test_timedelta_0_dim_object_array_conversion(self):
test = np.array(datetime.timedelta(seconds=20))
actual = test.astype(np.timedelta64)
expected = np.array(datetime.timedelta(seconds=20),
np.timedelta64)
assert_equal(actual, expected)
def test_timedelta_scalar_construction_units(self):
assert_equal(np.datetime64('2010').dtype,
np.dtype('M8[Y]'))
assert_equal(np.datetime64('2010-03').dtype,
np.dtype('M8[M]'))
assert_equal(np.datetime64('2010-03-12').dtype,
np.dtype('M8[D]'))
assert_equal(np.datetime64('2010-03-12T17').dtype,
np.dtype('M8[h]'))
assert_equal(np.datetime64('2010-03-12T17:15').dtype,
np.dtype('M8[m]'))
assert_equal(np.datetime64('2010-03-12T17:15:08').dtype,
np.dtype('M8[s]'))
assert_equal(np.datetime64('2010-03-12T17:15:08.1').dtype,
np.dtype('M8[ms]'))
assert_equal(np.datetime64('2010-03-12T17:15:08.12').dtype,
np.dtype('M8[ms]'))
assert_equal(np.datetime64('2010-03-12T17:15:08.123').dtype,
np.dtype('M8[ms]'))
assert_equal(np.datetime64('2010-03-12T17:15:08.1234').dtype,
np.dtype('M8[us]'))
assert_equal(np.datetime64('2010-03-12T17:15:08.12345').dtype,
np.dtype('M8[us]'))
assert_equal(np.datetime64('2010-03-12T17:15:08.123456').dtype,
np.dtype('M8[us]'))
assert_equal(np.datetime64('1970-01-01T00:00:02.1234567').dtype,
np.dtype('M8[ns]'))
assert_equal(np.datetime64('1970-01-01T00:00:02.12345678').dtype,
np.dtype('M8[ns]'))
assert_equal(np.datetime64('1970-01-01T00:00:02.123456789').dtype,
np.dtype('M8[ns]'))
assert_equal(np.datetime64('1970-01-01T00:00:02.1234567890').dtype,
np.dtype('M8[ps]'))
assert_equal(np.datetime64('1970-01-01T00:00:02.12345678901').dtype,
np.dtype('M8[ps]'))
assert_equal(np.datetime64('1970-01-01T00:00:02.123456789012').dtype,
np.dtype('M8[ps]'))
assert_equal(np.datetime64(
'1970-01-01T00:00:02.1234567890123').dtype,
np.dtype('M8[fs]'))
assert_equal(np.datetime64(
'1970-01-01T00:00:02.12345678901234').dtype,
np.dtype('M8[fs]'))
assert_equal(np.datetime64(
'1970-01-01T00:00:02.123456789012345').dtype,
np.dtype('M8[fs]'))
assert_equal(np.datetime64(
'1970-01-01T00:00:02.1234567890123456').dtype,
np.dtype('M8[as]'))
assert_equal(np.datetime64(
'1970-01-01T00:00:02.12345678901234567').dtype,
np.dtype('M8[as]'))
assert_equal(np.datetime64(
'1970-01-01T00:00:02.123456789012345678').dtype,
np.dtype('M8[as]'))
assert_equal(np.datetime64(datetime.date(2010, 4, 16)).dtype,
np.dtype('M8[D]'))
assert_equal(np.datetime64(
datetime.datetime(2010, 4, 16, 13, 45, 18)).dtype,
np.dtype('M8[us]'))
assert_equal(np.datetime64('today').dtype,
np.dtype('M8[D]'))
assert_equal(np.datetime64('now').dtype,
np.dtype('M8[s]'))
def test_datetime_nat_casting(self):
a = np.array('NaT', dtype='M8[D]')
b = np.datetime64('NaT', '[D]')
assert_equal(a.astype('M8[s]'), np.array('NaT', dtype='M8[s]'))
assert_equal(a.astype('M8[ms]'), np.array('NaT', dtype='M8[ms]'))
assert_equal(a.astype('M8[M]'), np.array('NaT', dtype='M8[M]'))
assert_equal(a.astype('M8[Y]'), np.array('NaT', dtype='M8[Y]'))
assert_equal(a.astype('M8[W]'), np.array('NaT', dtype='M8[W]'))
assert_equal(np.datetime64(b, '[s]'), np.datetime64('NaT', '[s]'))
assert_equal(np.datetime64(b, '[ms]'), np.datetime64('NaT', '[ms]'))
assert_equal(np.datetime64(b, '[M]'), np.datetime64('NaT', '[M]'))
assert_equal(np.datetime64(b, '[Y]'), np.datetime64('NaT', '[Y]'))
assert_equal(np.datetime64(b, '[W]'), np.datetime64('NaT', '[W]'))
assert_equal(np.datetime64(a, '[s]'), np.datetime64('NaT', '[s]'))
assert_equal(np.datetime64(a, '[ms]'), np.datetime64('NaT', '[ms]'))
assert_equal(np.datetime64(a, '[M]'), np.datetime64('NaT', '[M]'))
assert_equal(np.datetime64(a, '[Y]'), np.datetime64('NaT', '[Y]'))
assert_equal(np.datetime64(a, '[W]'), np.datetime64('NaT', '[W]'))
nan = np.array([np.nan] * 8)
fnan = nan.astype('f')
lnan = nan.astype('g')
cnan = nan.astype('D')
cfnan = nan.astype('F')
clnan = nan.astype('G')
nat = np.array([np.datetime64('NaT')] * 8)
assert_equal(nan.astype('M8[ns]'), nat)
assert_equal(fnan.astype('M8[ns]'), nat)
assert_equal(lnan.astype('M8[ns]'), nat)
assert_equal(cnan.astype('M8[ns]'), nat)
assert_equal(cfnan.astype('M8[ns]'), nat)
assert_equal(clnan.astype('M8[ns]'), nat)
nat = np.array([np.timedelta64('NaT')] * 8)
assert_equal(nan.astype('timedelta64[ns]'), nat)
assert_equal(fnan.astype('timedelta64[ns]'), nat)
assert_equal(lnan.astype('timedelta64[ns]'), nat)
assert_equal(cnan.astype('timedelta64[ns]'), nat)
assert_equal(cfnan.astype('timedelta64[ns]'), nat)
assert_equal(clnan.astype('timedelta64[ns]'), nat)
def test_days_creation(self):
assert_equal(np.array('1599', dtype='M8[D]').astype('i8'),
(1600-1970)*365 - (1972-1600)/4 + 3 - 365)
assert_equal(np.array('1600', dtype='M8[D]').astype('i8'),
(1600-1970)*365 - (1972-1600)/4 + 3)
assert_equal(np.array('1601', dtype='M8[D]').astype('i8'),
(1600-1970)*365 - (1972-1600)/4 + 3 + 366)
assert_equal(np.array('1900', dtype='M8[D]').astype('i8'),
(1900-1970)*365 - (1970-1900)//4)
assert_equal(np.array('1901', dtype='M8[D]').astype('i8'),
(1900-1970)*365 - (1970-1900)//4 + 365)
assert_equal(np.array('1967', dtype='M8[D]').astype('i8'), -3*365 - 1)
assert_equal(np.array('1968', dtype='M8[D]').astype('i8'), -2*365 - 1)
assert_equal(np.array('1969', dtype='M8[D]').astype('i8'), -1*365)
assert_equal(np.array('1970', dtype='M8[D]').astype('i8'), 0*365)
assert_equal(np.array('1971', dtype='M8[D]').astype('i8'), 1*365)
assert_equal(np.array('1972', dtype='M8[D]').astype('i8'), 2*365)
assert_equal(np.array('1973', dtype='M8[D]').astype('i8'), 3*365 + 1)
assert_equal(np.array('1974', dtype='M8[D]').astype('i8'), 4*365 + 1)
assert_equal(np.array('2000', dtype='M8[D]').astype('i8'),
(2000 - 1970)*365 + (2000 - 1972)//4)
assert_equal(np.array('2001', dtype='M8[D]').astype('i8'),
(2000 - 1970)*365 + (2000 - 1972)//4 + 366)
assert_equal(np.array('2400', dtype='M8[D]').astype('i8'),
(2400 - 1970)*365 + (2400 - 1972)//4 - 3)
assert_equal(np.array('2401', dtype='M8[D]').astype('i8'),
(2400 - 1970)*365 + (2400 - 1972)//4 - 3 + 366)
assert_equal(np.array('1600-02-29', dtype='M8[D]').astype('i8'),
(1600-1970)*365 - (1972-1600)//4 + 3 + 31 + 28)
assert_equal(np.array('1600-03-01', dtype='M8[D]').astype('i8'),
(1600-1970)*365 - (1972-1600)//4 + 3 + 31 + 29)
assert_equal(np.array('2000-02-29', dtype='M8[D]').astype('i8'),
(2000 - 1970)*365 + (2000 - 1972)//4 + 31 + 28)
assert_equal(np.array('2000-03-01', dtype='M8[D]').astype('i8'),
(2000 - 1970)*365 + (2000 - 1972)//4 + 31 + 29)
assert_equal(np.array('2001-03-22', dtype='M8[D]').astype('i8'),
(2000 - 1970)*365 + (2000 - 1972)//4 + 366 + 31 + 28 + 21)
def test_days_to_pydate(self):
assert_equal(np.array('1599', dtype='M8[D]').astype('O'),
datetime.date(1599, 1, 1))
assert_equal(np.array('1600', dtype='M8[D]').astype('O'),
datetime.date(1600, 1, 1))
assert_equal(np.array('1601', dtype='M8[D]').astype('O'),
datetime.date(1601, 1, 1))
assert_equal(np.array('1900', dtype='M8[D]').astype('O'),
datetime.date(1900, 1, 1))
assert_equal(np.array('1901', dtype='M8[D]').astype('O'),
datetime.date(1901, 1, 1))
assert_equal(np.array('2000', dtype='M8[D]').astype('O'),
datetime.date(2000, 1, 1))
assert_equal(np.array('2001', dtype='M8[D]').astype('O'),
datetime.date(2001, 1, 1))
assert_equal(np.array('1600-02-29', dtype='M8[D]').astype('O'),
datetime.date(1600, 2, 29))
assert_equal(np.array('1600-03-01', dtype='M8[D]').astype('O'),
datetime.date(1600, 3, 1))
assert_equal(np.array('2001-03-22', dtype='M8[D]').astype('O'),
datetime.date(2001, 3, 22))
def test_dtype_comparison(self):
assert_(not (np.dtype('M8[us]') == np.dtype('M8[ms]')))
assert_(np.dtype('M8[us]') != np.dtype('M8[ms]'))
assert_(np.dtype('M8[2D]') != np.dtype('M8[D]'))
assert_(np.dtype('M8[D]') != np.dtype('M8[2D]'))
def test_pydatetime_creation(self):
a = np.array(['1960-03-12', datetime.date(1960, 3, 12)], dtype='M8[D]')
assert_equal(a[0], a[1])
a = np.array(['1999-12-31', datetime.date(1999, 12, 31)], dtype='M8[D]')
assert_equal(a[0], a[1])
a = np.array(['2000-01-01', datetime.date(2000, 1, 1)], dtype='M8[D]')
assert_equal(a[0], a[1])
a = np.array(['today', datetime.date.today()], dtype='M8[D]')
assert_equal(a[0], a[1])
assert_equal(np.array(datetime.date(1960, 3, 12), dtype='M8[s]'),
np.array(np.datetime64('1960-03-12T00:00:00')))
def test_datetime_string_conversion(self):
a = ['2011-03-16', '1920-01-01', '2013-05-19']
str_a = np.array(a, dtype='S')
uni_a = np.array(a, dtype='U')
dt_a = np.array(a, dtype='M')
assert_equal(dt_a, str_a.astype('M'))
assert_equal(dt_a.dtype, str_a.astype('M').dtype)
dt_b = np.empty_like(dt_a)
dt_b[...] = str_a
assert_equal(dt_a, dt_b)
assert_equal(str_a, dt_a.astype('S0'))
str_b = np.empty_like(str_a)
str_b[...] = dt_a
assert_equal(str_a, str_b)
assert_equal(dt_a, uni_a.astype('M'))
assert_equal(dt_a.dtype, uni_a.astype('M').dtype)
dt_b = np.empty_like(dt_a)
dt_b[...] = uni_a
assert_equal(dt_a, dt_b)
assert_equal(uni_a, dt_a.astype('U'))
uni_b = np.empty_like(uni_a)
uni_b[...] = dt_a
assert_equal(uni_a, uni_b)
assert_equal(str_a, dt_a.astype((np.string_, 128)))
str_b = np.empty(str_a.shape, dtype=(np.string_, 128))
str_b[...] = dt_a
assert_equal(str_a, str_b)
def test_datetime_array_str(self):
a = np.array(['2011-03-16', '1920-01-01', '2013-05-19'], dtype='M')
assert_equal(str(a), "['2011-03-16' '1920-01-01' '2013-05-19']")
a = np.array(['2011-03-16T13:55', '1920-01-01T03:12'], dtype='M')
assert_equal(np.array2string(a, separator=', ',
formatter={'datetime': lambda x:
"'%s'" % np.datetime_as_string(x, timezone='UTC')}),
"['2011-03-16T13:55Z', '1920-01-01T03:12Z']")
a = np.array(['2010', 'NaT', '2030']).astype('M')
assert_equal(str(a), "['2010' 'NaT' '2030']")
def test_timedelta_array_str(self):
a = np.array([-1, 0, 100], dtype='m')
assert_equal(str(a), "[ -1 0 100]")
a = np.array(['NaT', 'NaT'], dtype='m')
assert_equal(str(a), "['NaT' 'NaT']")
# Check right-alignment with NaTs
a = np.array([-1, 'NaT', 0], dtype='m')
assert_equal(str(a), "[ -1 'NaT' 0]")
a = np.array([-1, 'NaT', 1234567], dtype='m')
assert_equal(str(a), "[ -1 'NaT' 1234567]")
# Test with other byteorder:
a = np.array([-1, 'NaT', 1234567], dtype='>m')
assert_equal(str(a), "[ -1 'NaT' 1234567]")
a = np.array([-1, 'NaT', 1234567], dtype='<m')
assert_equal(str(a), "[ -1 'NaT' 1234567]")
def test_pickle(self):
# Check that pickle roundtripping works
for proto in range(2, pickle.HIGHEST_PROTOCOL + 1):
dt = np.dtype('M8[7D]')
assert_equal(pickle.loads(pickle.dumps(dt, protocol=proto)), dt)
dt = np.dtype('M8[W]')
assert_equal(pickle.loads(pickle.dumps(dt, protocol=proto)), dt)
scalar = np.datetime64('2016-01-01T00:00:00.000000000')
assert_equal(pickle.loads(pickle.dumps(scalar, protocol=proto)),
scalar)
delta = scalar - np.datetime64('2015-01-01T00:00:00.000000000')
assert_equal(pickle.loads(pickle.dumps(delta, protocol=proto)),
delta)
# Check that loading pickles from 1.6 works
pkl = b"cnumpy\ndtype\np0\n(S'M8'\np1\nI0\nI1\ntp2\nRp3\n" + \
b"(I4\nS'<'\np4\nNNNI-1\nI-1\nI0\n((dp5\n(S'D'\np6\n" + \
b"I7\nI1\nI1\ntp7\ntp8\ntp9\nb."
assert_equal(pickle.loads(pkl), np.dtype('<M8[7D]'))
pkl = b"cnumpy\ndtype\np0\n(S'M8'\np1\nI0\nI1\ntp2\nRp3\n" + \
b"(I4\nS'<'\np4\nNNNI-1\nI-1\nI0\n((dp5\n(S'W'\np6\n" + \
b"I1\nI1\nI1\ntp7\ntp8\ntp9\nb."
assert_equal(pickle.loads(pkl), np.dtype('<M8[W]'))
pkl = b"cnumpy\ndtype\np0\n(S'M8'\np1\nI0\nI1\ntp2\nRp3\n" + \
b"(I4\nS'>'\np4\nNNNI-1\nI-1\nI0\n((dp5\n(S'us'\np6\n" + \
b"I1\nI1\nI1\ntp7\ntp8\ntp9\nb."
assert_equal(pickle.loads(pkl), np.dtype('>M8[us]'))
def test_setstate(self):
dt = np.dtype('>M8[us]')
assert_raises(ValueError, dt.__setstate__, (4, '>', None, None, None, -1, -1, 0, 1))
assert_(dt.__reduce__()[2] == np.dtype('>M8[us]').__reduce__()[2])
assert_raises(TypeError, dt.__setstate__, (4, '>', None, None, None, -1, -1, 0, ({}, 'xxx')))
assert_(dt.__reduce__()[2] == np.dtype('>M8[us]').__reduce__()[2])
def test_dtype_promotion(self):
# datetime <op> datetime computes the metadata gcd
# timedelta <op> timedelta computes the metadata gcd
for mM in ['m', 'M']:
assert_equal(
np.promote_types(np.dtype(mM+'8[2Y]'), np.dtype(mM+'8[2Y]')),
np.dtype(mM+'8[2Y]'))
assert_equal(
np.promote_types(np.dtype(mM+'8[12Y]'), np.dtype(mM+'8[15Y]')),
np.dtype(mM+'8[3Y]'))
assert_equal(
np.promote_types(np.dtype(mM+'8[62M]'), np.dtype(mM+'8[24M]')),
np.dtype(mM+'8[2M]'))
assert_equal(
np.promote_types(np.dtype(mM+'8[1W]'), np.dtype(mM+'8[2D]')),
np.dtype(mM+'8[1D]'))
assert_equal(
np.promote_types(np.dtype(mM+'8[W]'), np.dtype(mM+'8[13s]')),
np.dtype(mM+'8[s]'))
assert_equal(
np.promote_types(np.dtype(mM+'8[13W]'), np.dtype(mM+'8[49s]')),
np.dtype(mM+'8[7s]'))
# timedelta <op> timedelta raises when there is no reasonable gcd
assert_raises(TypeError, np.promote_types,
np.dtype('m8[Y]'), np.dtype('m8[D]'))
assert_raises(TypeError, np.promote_types,
np.dtype('m8[M]'), np.dtype('m8[W]'))
# timedelta and float cannot be safely cast with each other
assert_raises(TypeError, np.promote_types, "float32", "m8")
assert_raises(TypeError, np.promote_types, "m8", "float32")
assert_raises(TypeError, np.promote_types, "uint64", "m8")
assert_raises(TypeError, np.promote_types, "m8", "uint64")
# timedelta <op> timedelta may overflow with big unit ranges
assert_raises(OverflowError, np.promote_types,
np.dtype('m8[W]'), np.dtype('m8[fs]'))
assert_raises(OverflowError, np.promote_types,
np.dtype('m8[s]'), np.dtype('m8[as]'))
def test_cast_overflow(self):
# gh-4486
def cast():
numpy.datetime64("1971-01-01 00:00:00.000000000000000").astype("<M8[D]")
assert_raises(OverflowError, cast)
def cast2():
numpy.datetime64("2014").astype("<M8[fs]")
assert_raises(OverflowError, cast2)
def test_pyobject_roundtrip(self):
# All datetime types should be able to roundtrip through object
a = np.array([0, 0, 0, 0, 0, 0, 0, 0, 0,
-1020040340, -2942398, -1, 0, 1, 234523453, 1199164176],
dtype=np.int64)
# With date units
for unit in ['M8[D]', 'M8[W]', 'M8[M]', 'M8[Y]']:
b = a.copy().view(dtype=unit)
b[0] = '-0001-01-01'
b[1] = '-0001-12-31'
b[2] = '0000-01-01'
b[3] = '0001-01-01'
b[4] = '1969-12-31'
b[5] = '1970-01-01'
b[6] = '9999-12-31'
b[7] = '10000-01-01'
b[8] = 'NaT'
assert_equal(b.astype(object).astype(unit), b,
"Error roundtripping unit %s" % unit)
# With time units
for unit in ['M8[as]', 'M8[16fs]', 'M8[ps]', 'M8[us]',
'M8[300as]', 'M8[20us]']:
b = a.copy().view(dtype=unit)
b[0] = '-0001-01-01T00'
b[1] = '-0001-12-31T00'
b[2] = '0000-01-01T00'
b[3] = '0001-01-01T00'
b[4] = '1969-12-31T23:59:59.999999'
b[5] = '1970-01-01T00'
b[6] = '9999-12-31T23:59:59.999999'
b[7] = '10000-01-01T00'
b[8] = 'NaT'
assert_equal(b.astype(object).astype(unit), b,
"Error roundtripping unit %s" % unit)
def test_month_truncation(self):
# Make sure that months are truncating correctly
assert_equal(np.array('1945-03-01', dtype='M8[M]'),
np.array('1945-03-31', dtype='M8[M]'))
assert_equal(np.array('1969-11-01', dtype='M8[M]'),
np.array('1969-11-30T23:59:59.99999', dtype='M').astype('M8[M]'))
assert_equal(np.array('1969-12-01', dtype='M8[M]'),
np.array('1969-12-31T23:59:59.99999', dtype='M').astype('M8[M]'))
assert_equal(np.array('1970-01-01', dtype='M8[M]'),
np.array('1970-01-31T23:59:59.99999', dtype='M').astype('M8[M]'))
assert_equal(np.array('1980-02-01', dtype='M8[M]'),
np.array('1980-02-29T23:59:59.99999', dtype='M').astype('M8[M]'))
def test_different_unit_comparison(self):
# Check some years with date units
for unit1 in ['Y', 'M', 'D']:
dt1 = np.dtype('M8[%s]' % unit1)
for unit2 in ['Y', 'M', 'D']:
dt2 = np.dtype('M8[%s]' % unit2)
assert_equal(np.array('1945', dtype=dt1),
np.array('1945', dtype=dt2))
assert_equal(np.array('1970', dtype=dt1),
np.array('1970', dtype=dt2))
assert_equal(np.array('9999', dtype=dt1),
np.array('9999', dtype=dt2))
assert_equal(np.array('10000', dtype=dt1),
np.array('10000-01-01', dtype=dt2))
assert_equal(np.datetime64('1945', unit1),
np.datetime64('1945', unit2))
assert_equal(np.datetime64('1970', unit1),
np.datetime64('1970', unit2))
assert_equal(np.datetime64('9999', unit1),
np.datetime64('9999', unit2))
assert_equal(np.datetime64('10000', unit1),
np.datetime64('10000-01-01', unit2))
# Check some datetimes with time units
for unit1 in ['6h', 'h', 'm', 's', '10ms', 'ms', 'us']:
dt1 = np.dtype('M8[%s]' % unit1)
for unit2 in ['h', 'm', 's', 'ms', 'us']:
dt2 = np.dtype('M8[%s]' % unit2)
assert_equal(np.array('1945-03-12T18', dtype=dt1),
np.array('1945-03-12T18', dtype=dt2))
assert_equal(np.array('1970-03-12T18', dtype=dt1),
np.array('1970-03-12T18', dtype=dt2))
assert_equal(np.array('9999-03-12T18', dtype=dt1),
np.array('9999-03-12T18', dtype=dt2))
assert_equal(np.array('10000-01-01T00', dtype=dt1),
np.array('10000-01-01T00', dtype=dt2))
assert_equal(np.datetime64('1945-03-12T18', unit1),
np.datetime64('1945-03-12T18', unit2))
assert_equal(np.datetime64('1970-03-12T18', unit1),
np.datetime64('1970-03-12T18', unit2))
assert_equal(np.datetime64('9999-03-12T18', unit1),
np.datetime64('9999-03-12T18', unit2))
assert_equal(np.datetime64('10000-01-01T00', unit1),
np.datetime64('10000-01-01T00', unit2))
# Check some days with units that won't overflow
for unit1 in ['D', '12h', 'h', 'm', 's', '4s', 'ms', 'us']:
dt1 = np.dtype('M8[%s]' % unit1)
for unit2 in ['D', 'h', 'm', 's', 'ms', 'us']:
dt2 = np.dtype('M8[%s]' % unit2)
assert_(np.equal(np.array('1932-02-17', dtype='M').astype(dt1),
np.array('1932-02-17T00:00:00', dtype='M').astype(dt2),
casting='unsafe'))
assert_(np.equal(np.array('10000-04-27', dtype='M').astype(dt1),
np.array('10000-04-27T00:00:00', dtype='M').astype(dt2),
casting='unsafe'))
# TODO: Changing to 'same_kind' or 'safe' casting in the ufuncs by
# default is needed to properly catch this kind of thing...
a = np.array('2012-12-21', dtype='M8[D]')
b = np.array(3, dtype='m8[D]')
#assert_raises(TypeError, np.less, a, b)
assert_raises(TypeError, np.less, a, b, casting='same_kind')
def test_datetime_like(self):
a = np.array([3], dtype='m8[4D]')
b = np.array(['2012-12-21'], dtype='M8[D]')
assert_equal(np.ones_like(a).dtype, a.dtype)
assert_equal(np.zeros_like(a).dtype, a.dtype)
assert_equal(np.empty_like(a).dtype, a.dtype)
assert_equal(np.ones_like(b).dtype, b.dtype)
assert_equal(np.zeros_like(b).dtype, b.dtype)
assert_equal(np.empty_like(b).dtype, b.dtype)
def test_datetime_unary(self):
for tda, tdb, tdzero, tdone, tdmone in \
[
# One-dimensional arrays
(np.array([3], dtype='m8[D]'),
np.array([-3], dtype='m8[D]'),
np.array([0], dtype='m8[D]'),
np.array([1], dtype='m8[D]'),
np.array([-1], dtype='m8[D]')),
# NumPy scalars
(np.timedelta64(3, '[D]'),
np.timedelta64(-3, '[D]'),
np.timedelta64(0, '[D]'),
np.timedelta64(1, '[D]'),
np.timedelta64(-1, '[D]'))]:
# negative ufunc
assert_equal(-tdb, tda)
assert_equal((-tdb).dtype, tda.dtype)
assert_equal(np.negative(tdb), tda)
assert_equal(np.negative(tdb).dtype, tda.dtype)
# positive ufunc
assert_equal(np.positive(tda), tda)
assert_equal(np.positive(tda).dtype, tda.dtype)
assert_equal(np.positive(tdb), tdb)
assert_equal(np.positive(tdb).dtype, tdb.dtype)
# absolute ufunc
assert_equal(np.absolute(tdb), tda)
assert_equal(np.absolute(tdb).dtype, tda.dtype)
# sign ufunc
assert_equal(np.sign(tda), tdone)
assert_equal(np.sign(tdb), tdmone)
assert_equal(np.sign(tdzero), tdzero)
assert_equal(np.sign(tda).dtype, tda.dtype)
# The ufuncs always produce native-endian results
assert_
def test_datetime_add(self):
for dta, dtb, dtc, dtnat, tda, tdb, tdc in \
[
# One-dimensional arrays
(np.array(['2012-12-21'], dtype='M8[D]'),
np.array(['2012-12-24'], dtype='M8[D]'),
np.array(['2012-12-21T11'], dtype='M8[h]'),
np.array(['NaT'], dtype='M8[D]'),
np.array([3], dtype='m8[D]'),
np.array([11], dtype='m8[h]'),
np.array([3*24 + 11], dtype='m8[h]')),
# NumPy scalars
(np.datetime64('2012-12-21', '[D]'),
np.datetime64('2012-12-24', '[D]'),
np.datetime64('2012-12-21T11', '[h]'),
np.datetime64('NaT', '[D]'),
np.timedelta64(3, '[D]'),
np.timedelta64(11, '[h]'),
np.timedelta64(3*24 + 11, '[h]'))]:
# m8 + m8
assert_equal(tda + tdb, tdc)
assert_equal((tda + tdb).dtype, np.dtype('m8[h]'))
# m8 + bool
assert_equal(tdb + True, tdb + 1)
assert_equal((tdb + True).dtype, np.dtype('m8[h]'))
# m8 + int
assert_equal(tdb + 3*24, tdc)
assert_equal((tdb + 3*24).dtype, np.dtype('m8[h]'))
# bool + m8
assert_equal(False + tdb, tdb)
assert_equal((False + tdb).dtype, np.dtype('m8[h]'))
# int + m8
assert_equal(3*24 + tdb, tdc)
assert_equal((3*24 + tdb).dtype, np.dtype('m8[h]'))
# M8 + bool
assert_equal(dta + True, dta + 1)
assert_equal(dtnat + True, dtnat)
assert_equal((dta + True).dtype, np.dtype('M8[D]'))
# M8 + int
assert_equal(dta + 3, dtb)
assert_equal(dtnat + 3, dtnat)
assert_equal((dta + 3).dtype, np.dtype('M8[D]'))
# bool + M8
assert_equal(False + dta, dta)
assert_equal(False + dtnat, dtnat)
assert_equal((False + dta).dtype, np.dtype('M8[D]'))
# int + M8
assert_equal(3 + dta, dtb)
assert_equal(3 + dtnat, dtnat)
assert_equal((3 + dta).dtype, np.dtype('M8[D]'))
# M8 + m8
assert_equal(dta + tda, dtb)
assert_equal(dtnat + tda, dtnat)
assert_equal((dta + tda).dtype, np.dtype('M8[D]'))
# m8 + M8
assert_equal(tda + dta, dtb)
assert_equal(tda + dtnat, dtnat)
assert_equal((tda + dta).dtype, np.dtype('M8[D]'))
# In M8 + m8, the result goes to higher precision
assert_equal(np.add(dta, tdb, casting='unsafe'), dtc)
assert_equal(np.add(dta, tdb, casting='unsafe').dtype,
np.dtype('M8[h]'))
assert_equal(np.add(tdb, dta, casting='unsafe'), dtc)
assert_equal(np.add(tdb, dta, casting='unsafe').dtype,
np.dtype('M8[h]'))
# M8 + M8
assert_raises(TypeError, np.add, dta, dtb)
def test_datetime_subtract(self):
for dta, dtb, dtc, dtd, dte, dtnat, tda, tdb, tdc in \
[
# One-dimensional arrays
(np.array(['2012-12-21'], dtype='M8[D]'),
np.array(['2012-12-24'], dtype='M8[D]'),
np.array(['1940-12-24'], dtype='M8[D]'),
np.array(['1940-12-24T00'], dtype='M8[h]'),
np.array(['1940-12-23T13'], dtype='M8[h]'),
np.array(['NaT'], dtype='M8[D]'),
np.array([3], dtype='m8[D]'),
np.array([11], dtype='m8[h]'),
np.array([3*24 - 11], dtype='m8[h]')),
# NumPy scalars
(np.datetime64('2012-12-21', '[D]'),
np.datetime64('2012-12-24', '[D]'),
np.datetime64('1940-12-24', '[D]'),
np.datetime64('1940-12-24T00', '[h]'),
np.datetime64('1940-12-23T13', '[h]'),
np.datetime64('NaT', '[D]'),
np.timedelta64(3, '[D]'),
np.timedelta64(11, '[h]'),
np.timedelta64(3*24 - 11, '[h]'))]:
# m8 - m8
assert_equal(tda - tdb, tdc)
assert_equal((tda - tdb).dtype, np.dtype('m8[h]'))
assert_equal(tdb - tda, -tdc)
assert_equal((tdb - tda).dtype, np.dtype('m8[h]'))
# m8 - bool
assert_equal(tdc - True, tdc - 1)
assert_equal((tdc - True).dtype, np.dtype('m8[h]'))
# m8 - int
assert_equal(tdc - 3*24, -tdb)
assert_equal((tdc - 3*24).dtype, np.dtype('m8[h]'))
# int - m8
assert_equal(False - tdb, -tdb)
assert_equal((False - tdb).dtype, np.dtype('m8[h]'))
# int - m8
assert_equal(3*24 - tdb, tdc)
assert_equal((3*24 - tdb).dtype, np.dtype('m8[h]'))
# M8 - bool
assert_equal(dtb - True, dtb - 1)
assert_equal(dtnat - True, dtnat)
assert_equal((dtb - True).dtype, np.dtype('M8[D]'))
# M8 - int
assert_equal(dtb - 3, dta)
assert_equal(dtnat - 3, dtnat)
assert_equal((dtb - 3).dtype, np.dtype('M8[D]'))
# M8 - m8
assert_equal(dtb - tda, dta)
assert_equal(dtnat - tda, dtnat)
assert_equal((dtb - tda).dtype, np.dtype('M8[D]'))
# In M8 - m8, the result goes to higher precision
assert_equal(np.subtract(dtc, tdb, casting='unsafe'), dte)
assert_equal(np.subtract(dtc, tdb, casting='unsafe').dtype,
np.dtype('M8[h]'))
# M8 - M8 with different goes to higher precision
assert_equal(np.subtract(dtc, dtd, casting='unsafe'),
np.timedelta64(0, 'h'))
assert_equal(np.subtract(dtc, dtd, casting='unsafe').dtype,
np.dtype('m8[h]'))
assert_equal(np.subtract(dtd, dtc, casting='unsafe'),
np.timedelta64(0, 'h'))
assert_equal(np.subtract(dtd, dtc, casting='unsafe').dtype,
np.dtype('m8[h]'))
# m8 - M8
assert_raises(TypeError, np.subtract, tda, dta)
# bool - M8
assert_raises(TypeError, np.subtract, False, dta)
# int - M8
assert_raises(TypeError, np.subtract, 3, dta)
def test_datetime_multiply(self):
for dta, tda, tdb, tdc in \
[
# One-dimensional arrays
(np.array(['2012-12-21'], dtype='M8[D]'),
np.array([6], dtype='m8[h]'),
np.array([9], dtype='m8[h]'),
np.array([12], dtype='m8[h]')),
# NumPy scalars
(np.datetime64('2012-12-21', '[D]'),
np.timedelta64(6, '[h]'),
np.timedelta64(9, '[h]'),
np.timedelta64(12, '[h]'))]:
# m8 * int
assert_equal(tda * 2, tdc)
assert_equal((tda * 2).dtype, np.dtype('m8[h]'))
# int * m8
assert_equal(2 * tda, tdc)
assert_equal((2 * tda).dtype, np.dtype('m8[h]'))
# m8 * float
assert_equal(tda * 1.5, tdb)
assert_equal((tda * 1.5).dtype, np.dtype('m8[h]'))
# float * m8
assert_equal(1.5 * tda, tdb)
assert_equal((1.5 * tda).dtype, np.dtype('m8[h]'))
# m8 * m8
assert_raises(TypeError, np.multiply, tda, tdb)
# m8 * M8
assert_raises(TypeError, np.multiply, dta, tda)
# M8 * m8
assert_raises(TypeError, np.multiply, tda, dta)
# M8 * int
assert_raises(TypeError, np.multiply, dta, 2)
# int * M8
assert_raises(TypeError, np.multiply, 2, dta)
# M8 * float
assert_raises(TypeError, np.multiply, dta, 1.5)
# float * M8
assert_raises(TypeError, np.multiply, 1.5, dta)
# NaTs
with suppress_warnings() as sup:
sup.filter(RuntimeWarning, "invalid value encountered in multiply")
nat = np.timedelta64('NaT')
def check(a, b, res):
assert_equal(a * b, res)
assert_equal(b * a, res)
for tp in (int, float):
check(nat, tp(2), nat)
check(nat, tp(0), nat)
for f in (float('inf'), float('nan')):
check(np.timedelta64(1), f, nat)
check(np.timedelta64(0), f, nat)
check(nat, f, nat)
@pytest.mark.parametrize("op1, op2, exp", [
# m8 same units round down
(np.timedelta64(7, 's'),
np.timedelta64(4, 's'),
1),
# m8 same units round down with negative
(np.timedelta64(7, 's'),
np.timedelta64(-4, 's'),
-2),
# m8 same units negative no round down
(np.timedelta64(8, 's'),
np.timedelta64(-4, 's'),
-2),
# m8 different units
(np.timedelta64(1, 'm'),
np.timedelta64(31, 's'),
1),
# m8 generic units
(np.timedelta64(1890),
np.timedelta64(31),
60),
# Y // M works
(np.timedelta64(2, 'Y'),
np.timedelta64('13', 'M'),
1),
# handle 1D arrays
(np.array([1, 2, 3], dtype='m8'),
np.array([2], dtype='m8'),
np.array([0, 1, 1], dtype=np.int64)),
])
def test_timedelta_floor_divide(self, op1, op2, exp):
assert_equal(op1 // op2, exp)
@pytest.mark.parametrize("op1, op2", [
# div by 0
(np.timedelta64(10, 'us'),
np.timedelta64(0, 'us')),
# div with NaT
(np.timedelta64('NaT'),
np.timedelta64(50, 'us')),
# special case for int64 min
# in integer floor division
(np.timedelta64(np.iinfo(np.int64).min),
np.timedelta64(-1)),
])
def test_timedelta_floor_div_warnings(self, op1, op2):
with assert_warns(RuntimeWarning):
actual = op1 // op2
assert_equal(actual, 0)
assert_equal(actual.dtype, np.int64)
@pytest.mark.parametrize("val1, val2", [
# the smallest integer that can't be represented
(9007199254740993, 1),
(9007199254740999, -2),
])
def test_timedelta_floor_div_precision(self, val1, val2):
op1 = np.timedelta64(val1)
op2 = np.timedelta64(val2)
actual = op1 // op2
expected = val1 // val2
assert_equal(actual, expected)
@pytest.mark.parametrize("val1, val2", [
# divided for floor division operation
(np.timedelta64(7, 'Y'),
np.timedelta64(3, 's')),
(np.timedelta64(7, 'M'),
np.timedelta64(1, 'D')),
])
def test_timedelta_floor_div_error(self, val1, val2):
with assert_raises_regex(TypeError, "common metadata divisor"):
val1 // val2
@pytest.mark.parametrize("op1, op2", [
# reuse the test cases from floordiv
(np.timedelta64(7, 's'),
np.timedelta64(4, 's')),
# m8 same units round down with negative
(np.timedelta64(7, 's'),
np.timedelta64(-4, 's')),
# m8 same units negative no round down
(np.timedelta64(8, 's'),
np.timedelta64(-4, 's')),
# m8 different units
(np.timedelta64(1, 'm'),
np.timedelta64(31, 's')),
# m8 generic units
(np.timedelta64(1890),
np.timedelta64(31)),
# Y // M works
(np.timedelta64(2, 'Y'),
np.timedelta64('13', 'M')),
# handle 1D arrays
(np.array([1, 2, 3], dtype='m8'),
np.array([2], dtype='m8')),
])
def test_timedelta_divmod(self, op1, op2):
expected = (op1 // op2, op1 % op2)
assert_equal(divmod(op1, op2), expected)
@pytest.mark.parametrize("op1, op2", [
# reuse cases from floordiv
# div by 0
(np.timedelta64(10, 'us'),
np.timedelta64(0, 'us')),
# div with NaT
(np.timedelta64('NaT'),
np.timedelta64(50, 'us')),
# special case for int64 min
# in integer floor division
(np.timedelta64(np.iinfo(np.int64).min),
np.timedelta64(-1)),
])
def test_timedelta_divmod_warnings(self, op1, op2):
with assert_warns(RuntimeWarning):
expected = (op1 // op2, op1 % op2)
with assert_warns(RuntimeWarning):
actual = divmod(op1, op2)
assert_equal(actual, expected)
def test_datetime_divide(self):
for dta, tda, tdb, tdc, tdd in \
[
# One-dimensional arrays
(np.array(['2012-12-21'], dtype='M8[D]'),
np.array([6], dtype='m8[h]'),
np.array([9], dtype='m8[h]'),
np.array([12], dtype='m8[h]'),
np.array([6], dtype='m8[m]')),
# NumPy scalars
(np.datetime64('2012-12-21', '[D]'),
np.timedelta64(6, '[h]'),
np.timedelta64(9, '[h]'),
np.timedelta64(12, '[h]'),
np.timedelta64(6, '[m]'))]:
# m8 / int
assert_equal(tdc / 2, tda)
assert_equal((tdc / 2).dtype, np.dtype('m8[h]'))
# m8 / float
assert_equal(tda / 0.5, tdc)
assert_equal((tda / 0.5).dtype, np.dtype('m8[h]'))
# m8 / m8
assert_equal(tda / tdb, 6.0 / 9.0)
assert_equal(np.divide(tda, tdb), 6.0 / 9.0)
assert_equal(np.true_divide(tda, tdb), 6.0 / 9.0)
assert_equal(tdb / tda, 9.0 / 6.0)
assert_equal((tda / tdb).dtype, np.dtype('f8'))
assert_equal(tda / tdd, 60.0)
assert_equal(tdd / tda, 1.0 / 60.0)
# int / m8
assert_raises(TypeError, np.divide, 2, tdb)
# float / m8
assert_raises(TypeError, np.divide, 0.5, tdb)
# m8 / M8
assert_raises(TypeError, np.divide, dta, tda)
# M8 / m8
assert_raises(TypeError, np.divide, tda, dta)
# M8 / int
assert_raises(TypeError, np.divide, dta, 2)
# int / M8
assert_raises(TypeError, np.divide, 2, dta)
# M8 / float
assert_raises(TypeError, np.divide, dta, 1.5)
# float / M8
assert_raises(TypeError, np.divide, 1.5, dta)
# NaTs
with suppress_warnings() as sup:
sup.filter(RuntimeWarning, r".*encountered in true\_divide")
nat = np.timedelta64('NaT')
for tp in (int, float):
assert_equal(np.timedelta64(1) / tp(0), nat)
assert_equal(np.timedelta64(0) / tp(0), nat)
assert_equal(nat / tp(0), nat)
assert_equal(nat / tp(2), nat)
# Division by inf
assert_equal(np.timedelta64(1) / float('inf'), np.timedelta64(0))
assert_equal(np.timedelta64(0) / float('inf'), np.timedelta64(0))
assert_equal(nat / float('inf'), nat)
# Division by nan
assert_equal(np.timedelta64(1) / float('nan'), nat)
assert_equal(np.timedelta64(0) / float('nan'), nat)
assert_equal(nat / float('nan'), nat)
def test_datetime_compare(self):
# Test all the comparison operators
a = np.datetime64('2000-03-12T18:00:00.000000')
b = np.array(['2000-03-12T18:00:00.000000',
'2000-03-12T17:59:59.999999',
'2000-03-12T18:00:00.000001',
'1970-01-11T12:00:00.909090',
'2016-01-11T12:00:00.909090'],
dtype='datetime64[us]')
assert_equal(np.equal(a, b), [1, 0, 0, 0, 0])
assert_equal(np.not_equal(a, b), [0, 1, 1, 1, 1])
assert_equal(np.less(a, b), [0, 0, 1, 0, 1])
assert_equal(np.less_equal(a, b), [1, 0, 1, 0, 1])
assert_equal(np.greater(a, b), [0, 1, 0, 1, 0])
assert_equal(np.greater_equal(a, b), [1, 1, 0, 1, 0])
def test_datetime_compare_nat(self):
dt_nat = np.datetime64('NaT', 'D')
dt_other = np.datetime64('2000-01-01')
td_nat = np.timedelta64('NaT', 'h')
td_other = np.timedelta64(1, 'h')
for op in [np.equal, np.less, np.less_equal,
np.greater, np.greater_equal]:
assert_(not op(dt_nat, dt_nat))
assert_(not op(dt_nat, dt_other))
assert_(not op(dt_other, dt_nat))
assert_(not op(td_nat, td_nat))
assert_(not op(td_nat, td_other))
assert_(not op(td_other, td_nat))
assert_(np.not_equal(dt_nat, dt_nat))
assert_(np.not_equal(dt_nat, dt_other))
assert_(np.not_equal(dt_other, dt_nat))
assert_(np.not_equal(td_nat, td_nat))
assert_(np.not_equal(td_nat, td_other))
assert_(np.not_equal(td_other, td_nat))
def test_datetime_minmax(self):
# The metadata of the result should become the GCD
# of the operand metadata
a = np.array('1999-03-12T13', dtype='M8[2m]')
b = np.array('1999-03-12T12', dtype='M8[s]')
assert_equal(np.minimum(a, b), b)
assert_equal(np.minimum(a, b).dtype, np.dtype('M8[s]'))
assert_equal(np.fmin(a, b), b)
assert_equal(np.fmin(a, b).dtype, np.dtype('M8[s]'))
assert_equal(np.maximum(a, b), a)
assert_equal(np.maximum(a, b).dtype, np.dtype('M8[s]'))
assert_equal(np.fmax(a, b), a)
assert_equal(np.fmax(a, b).dtype, np.dtype('M8[s]'))
# Viewed as integers, the comparison is opposite because
# of the units chosen
assert_equal(np.minimum(a.view('i8'), b.view('i8')), a.view('i8'))
# Interaction with NaT
a = np.array('1999-03-12T13', dtype='M8[2m]')
dtnat = np.array('NaT', dtype='M8[h]')
assert_equal(np.minimum(a, dtnat), dtnat)
assert_equal(np.minimum(dtnat, a), dtnat)
assert_equal(np.maximum(a, dtnat), dtnat)
assert_equal(np.maximum(dtnat, a), dtnat)
assert_equal(np.fmin(dtnat, a), a)
assert_equal(np.fmin(a, dtnat), a)
assert_equal(np.fmax(dtnat, a), a)
assert_equal(np.fmax(a, dtnat), a)
# Also do timedelta
a = np.array(3, dtype='m8[h]')
b = np.array(3*3600 - 3, dtype='m8[s]')
assert_equal(np.minimum(a, b), b)
assert_equal(np.minimum(a, b).dtype, np.dtype('m8[s]'))
assert_equal(np.fmin(a, b), b)
assert_equal(np.fmin(a, b).dtype, np.dtype('m8[s]'))
assert_equal(np.maximum(a, b), a)
assert_equal(np.maximum(a, b).dtype, np.dtype('m8[s]'))
assert_equal(np.fmax(a, b), a)
assert_equal(np.fmax(a, b).dtype, np.dtype('m8[s]'))
# Viewed as integers, the comparison is opposite because
# of the units chosen
assert_equal(np.minimum(a.view('i8'), b.view('i8')), a.view('i8'))
# should raise between datetime and timedelta
#
# TODO: Allowing unsafe casting by
# default in ufuncs strikes again... :(
a = np.array(3, dtype='m8[h]')
b = np.array('1999-03-12T12', dtype='M8[s]')
#assert_raises(TypeError, np.minimum, a, b)
#assert_raises(TypeError, np.maximum, a, b)
#assert_raises(TypeError, np.fmin, a, b)
#assert_raises(TypeError, np.fmax, a, b)
assert_raises(TypeError, np.minimum, a, b, casting='same_kind')
assert_raises(TypeError, np.maximum, a, b, casting='same_kind')
assert_raises(TypeError, np.fmin, a, b, casting='same_kind')
assert_raises(TypeError, np.fmax, a, b, casting='same_kind')
def test_hours(self):
t = np.ones(3, dtype='M8[s]')
t[0] = 60*60*24 + 60*60*10
assert_(t[0].item().hour == 10)
def test_divisor_conversion_year(self):
assert_(np.dtype('M8[Y/4]') == np.dtype('M8[3M]'))
assert_(np.dtype('M8[Y/13]') == np.dtype('M8[4W]'))
assert_(np.dtype('M8[3Y/73]') == np.dtype('M8[15D]'))
def test_divisor_conversion_month(self):
assert_(np.dtype('M8[M/2]') == np.dtype('M8[2W]'))
assert_(np.dtype('M8[M/15]') == np.dtype('M8[2D]'))
assert_(np.dtype('M8[3M/40]') == np.dtype('M8[54h]'))
def test_divisor_conversion_week(self):
assert_(np.dtype('m8[W/7]') == np.dtype('m8[D]'))
assert_(np.dtype('m8[3W/14]') == np.dtype('m8[36h]'))
assert_(np.dtype('m8[5W/140]') == np.dtype('m8[360m]'))
def test_divisor_conversion_day(self):
assert_(np.dtype('M8[D/12]') == np.dtype('M8[2h]'))
assert_(np.dtype('M8[D/120]') == np.dtype('M8[12m]'))
assert_(np.dtype('M8[3D/960]') == np.dtype('M8[270s]'))
def test_divisor_conversion_hour(self):
assert_(np.dtype('m8[h/30]') == np.dtype('m8[2m]'))
assert_(np.dtype('m8[3h/300]') == np.dtype('m8[36s]'))
def test_divisor_conversion_minute(self):
assert_(np.dtype('m8[m/30]') == np.dtype('m8[2s]'))
assert_(np.dtype('m8[3m/300]') == np.dtype('m8[600ms]'))
def test_divisor_conversion_second(self):
assert_(np.dtype('m8[s/100]') == np.dtype('m8[10ms]'))
assert_(np.dtype('m8[3s/10000]') == np.dtype('m8[300us]'))
def test_divisor_conversion_fs(self):
assert_(np.dtype('M8[fs/100]') == np.dtype('M8[10as]'))
assert_raises(ValueError, lambda: np.dtype('M8[3fs/10000]'))
def test_divisor_conversion_as(self):
assert_raises(ValueError, lambda: np.dtype('M8[as/10]'))
def test_string_parser_variants(self):
# Allow space instead of 'T' between date and time
assert_equal(np.array(['1980-02-29T01:02:03'], np.dtype('M8[s]')),
np.array(['1980-02-29 01:02:03'], np.dtype('M8[s]')))
# Allow positive years
assert_equal(np.array(['+1980-02-29T01:02:03'], np.dtype('M8[s]')),
np.array(['+1980-02-29 01:02:03'], np.dtype('M8[s]')))
# Allow negative years
assert_equal(np.array(['-1980-02-29T01:02:03'], np.dtype('M8[s]')),
np.array(['-1980-02-29 01:02:03'], np.dtype('M8[s]')))
# UTC specifier
with assert_warns(DeprecationWarning):
assert_equal(
np.array(['+1980-02-29T01:02:03'], np.dtype('M8[s]')),
np.array(['+1980-02-29 01:02:03Z'], np.dtype('M8[s]')))
with assert_warns(DeprecationWarning):
assert_equal(
np.array(['-1980-02-29T01:02:03'], np.dtype('M8[s]')),
np.array(['-1980-02-29 01:02:03Z'], np.dtype('M8[s]')))
# Time zone offset
with assert_warns(DeprecationWarning):
assert_equal(
np.array(['1980-02-29T02:02:03'], np.dtype('M8[s]')),
np.array(['1980-02-29 00:32:03-0130'], np.dtype('M8[s]')))
with assert_warns(DeprecationWarning):
assert_equal(
np.array(['1980-02-28T22:32:03'], np.dtype('M8[s]')),
np.array(['1980-02-29 00:02:03+01:30'], np.dtype('M8[s]')))
with assert_warns(DeprecationWarning):
assert_equal(
np.array(['1980-02-29T02:32:03.506'], np.dtype('M8[s]')),
np.array(['1980-02-29 00:32:03.506-02'], np.dtype('M8[s]')))
with assert_warns(DeprecationWarning):
assert_equal(np.datetime64('1977-03-02T12:30-0230'),
np.datetime64('1977-03-02T15:00'))
def test_string_parser_error_check(self):
# Arbitrary bad string
assert_raises(ValueError, np.array, ['badvalue'], np.dtype('M8[us]'))
# Character after year must be '-'
assert_raises(ValueError, np.array, ['1980X'], np.dtype('M8[us]'))
# Cannot have trailing '-'
assert_raises(ValueError, np.array, ['1980-'], np.dtype('M8[us]'))
# Month must be in range [1,12]
assert_raises(ValueError, np.array, ['1980-00'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-13'], np.dtype('M8[us]'))
# Month must have two digits
assert_raises(ValueError, np.array, ['1980-1'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-1-02'], np.dtype('M8[us]'))
# 'Mor' is not a valid month
assert_raises(ValueError, np.array, ['1980-Mor'], np.dtype('M8[us]'))
# Cannot have trailing '-'
assert_raises(ValueError, np.array, ['1980-01-'], np.dtype('M8[us]'))
# Day must be in range [1,len(month)]
assert_raises(ValueError, np.array, ['1980-01-0'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-01-00'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-01-32'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1979-02-29'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-30'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-03-32'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-04-31'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-05-32'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-06-31'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-07-32'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-08-32'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-09-31'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-10-32'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-11-31'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-12-32'], np.dtype('M8[us]'))
# Cannot have trailing characters
assert_raises(ValueError, np.array, ['1980-02-03%'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03 q'],
np.dtype('M8[us]'))
# Hours must be in range [0, 23]
assert_raises(ValueError, np.array, ['1980-02-03 25'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03T25'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03 24:01'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03T24:01'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03 -1'],
np.dtype('M8[us]'))
# No trailing ':'
assert_raises(ValueError, np.array, ['1980-02-03 01:'],
np.dtype('M8[us]'))
# Minutes must be in range [0, 59]
assert_raises(ValueError, np.array, ['1980-02-03 01:-1'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03 01:60'],
np.dtype('M8[us]'))
# No trailing ':'
assert_raises(ValueError, np.array, ['1980-02-03 01:60:'],
np.dtype('M8[us]'))
# Seconds must be in range [0, 59]
assert_raises(ValueError, np.array, ['1980-02-03 01:10:-1'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03 01:01:60'],
np.dtype('M8[us]'))
# Timezone offset must within a reasonable range
with assert_warns(DeprecationWarning):
assert_raises(ValueError, np.array, ['1980-02-03 01:01:00+0661'],
np.dtype('M8[us]'))
with assert_warns(DeprecationWarning):
assert_raises(ValueError, np.array, ['1980-02-03 01:01:00+2500'],
np.dtype('M8[us]'))
with assert_warns(DeprecationWarning):
assert_raises(ValueError, np.array, ['1980-02-03 01:01:00-0070'],
np.dtype('M8[us]'))
with assert_warns(DeprecationWarning):
assert_raises(ValueError, np.array, ['1980-02-03 01:01:00-3000'],
np.dtype('M8[us]'))
with assert_warns(DeprecationWarning):
assert_raises(ValueError, np.array, ['1980-02-03 01:01:00-25:00'],
np.dtype('M8[us]'))
def test_creation_overflow(self):
date = '1980-03-23 20:00:00'
timesteps = np.array([date], dtype='datetime64[s]')[0].astype(np.int64)
for unit in ['ms', 'us', 'ns']:
timesteps *= 1000
x = np.array([date], dtype='datetime64[%s]' % unit)
assert_equal(timesteps, x[0].astype(np.int64),
err_msg='Datetime conversion error for unit %s' % unit)
assert_equal(x[0].astype(np.int64), 322689600000000000)
# gh-13062
with pytest.raises(OverflowError):
np.datetime64(2**64, 'D')
with pytest.raises(OverflowError):
np.timedelta64(2**64, 'D')
def test_datetime_as_string(self):
# Check all the units with default string conversion
date = '1959-10-13'
datetime = '1959-10-13T12:34:56.789012345678901234'
assert_equal(np.datetime_as_string(np.datetime64(date, 'Y')),
'1959')
assert_equal(np.datetime_as_string(np.datetime64(date, 'M')),
'1959-10')
assert_equal(np.datetime_as_string(np.datetime64(date, 'D')),
'1959-10-13')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'h')),
'1959-10-13T12')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'm')),
'1959-10-13T12:34')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 's')),
'1959-10-13T12:34:56')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'ms')),
'1959-10-13T12:34:56.789')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'us')),
'1959-10-13T12:34:56.789012')
datetime = '1969-12-31T23:34:56.789012345678901234'
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'ns')),
'1969-12-31T23:34:56.789012345')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'ps')),
'1969-12-31T23:34:56.789012345678')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'fs')),
'1969-12-31T23:34:56.789012345678901')
datetime = '1969-12-31T23:59:57.789012345678901234'
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'as')),
datetime)
datetime = '1970-01-01T00:34:56.789012345678901234'
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'ns')),
'1970-01-01T00:34:56.789012345')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'ps')),
'1970-01-01T00:34:56.789012345678')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'fs')),
'1970-01-01T00:34:56.789012345678901')
datetime = '1970-01-01T00:00:05.789012345678901234'
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'as')),
datetime)
# String conversion with the unit= parameter
a = np.datetime64('2032-07-18T12:23:34.123456', 'us')
assert_equal(np.datetime_as_string(a, unit='Y', casting='unsafe'),
'2032')
assert_equal(np.datetime_as_string(a, unit='M', casting='unsafe'),
'2032-07')
assert_equal(np.datetime_as_string(a, unit='W', casting='unsafe'),
'2032-07-18')
assert_equal(np.datetime_as_string(a, unit='D', casting='unsafe'),
'2032-07-18')
assert_equal(np.datetime_as_string(a, unit='h'), '2032-07-18T12')
assert_equal(np.datetime_as_string(a, unit='m'),
'2032-07-18T12:23')
assert_equal(np.datetime_as_string(a, unit='s'),
'2032-07-18T12:23:34')
assert_equal(np.datetime_as_string(a, unit='ms'),
'2032-07-18T12:23:34.123')
assert_equal(np.datetime_as_string(a, unit='us'),
'2032-07-18T12:23:34.123456')
assert_equal(np.datetime_as_string(a, unit='ns'),
'2032-07-18T12:23:34.123456000')
assert_equal(np.datetime_as_string(a, unit='ps'),
'2032-07-18T12:23:34.123456000000')
assert_equal(np.datetime_as_string(a, unit='fs'),
'2032-07-18T12:23:34.123456000000000')
assert_equal(np.datetime_as_string(a, unit='as'),
'2032-07-18T12:23:34.123456000000000000')
# unit='auto' parameter
assert_equal(np.datetime_as_string(
np.datetime64('2032-07-18T12:23:34.123456', 'us'), unit='auto'),
'2032-07-18T12:23:34.123456')
assert_equal(np.datetime_as_string(
np.datetime64('2032-07-18T12:23:34.12', 'us'), unit='auto'),
'2032-07-18T12:23:34.120')
assert_equal(np.datetime_as_string(
np.datetime64('2032-07-18T12:23:34', 'us'), unit='auto'),
'2032-07-18T12:23:34')
assert_equal(np.datetime_as_string(
np.datetime64('2032-07-18T12:23:00', 'us'), unit='auto'),
'2032-07-18T12:23')
# 'auto' doesn't split up hour and minute
assert_equal(np.datetime_as_string(
np.datetime64('2032-07-18T12:00:00', 'us'), unit='auto'),
'2032-07-18T12:00')
assert_equal(np.datetime_as_string(
np.datetime64('2032-07-18T00:00:00', 'us'), unit='auto'),
'2032-07-18')
assert_equal(np.datetime_as_string(
np.datetime64('2032-07-01T00:00:00', 'us'), unit='auto'),
'2032-07-01')
assert_equal(np.datetime_as_string(
np.datetime64('2032-01-01T00:00:00', 'us'), unit='auto'),
'2032-01-01')
@pytest.mark.skipif(not _has_pytz, reason="The pytz module is not available.")
def test_datetime_as_string_timezone(self):
# timezone='local' vs 'UTC'
a = np.datetime64('2010-03-15T06:30', 'm')
assert_equal(np.datetime_as_string(a),
'2010-03-15T06:30')
assert_equal(np.datetime_as_string(a, timezone='naive'),
'2010-03-15T06:30')
assert_equal(np.datetime_as_string(a, timezone='UTC'),
'2010-03-15T06:30Z')
assert_(np.datetime_as_string(a, timezone='local') !=
'2010-03-15T06:30')
b = np.datetime64('2010-02-15T06:30', 'm')
assert_equal(np.datetime_as_string(a, timezone=tz('US/Central')),
'2010-03-15T01:30-0500')
assert_equal(np.datetime_as_string(a, timezone=tz('US/Eastern')),
'2010-03-15T02:30-0400')
assert_equal(np.datetime_as_string(a, timezone=tz('US/Pacific')),
'2010-03-14T23:30-0700')
assert_equal(np.datetime_as_string(b, timezone=tz('US/Central')),
'2010-02-15T00:30-0600')
assert_equal(np.datetime_as_string(b, timezone=tz('US/Eastern')),
'2010-02-15T01:30-0500')
assert_equal(np.datetime_as_string(b, timezone=tz('US/Pacific')),
'2010-02-14T22:30-0800')
# Dates to strings with a timezone attached is disabled by default
assert_raises(TypeError, np.datetime_as_string, a, unit='D',
timezone=tz('US/Pacific'))
# Check that we can print out the date in the specified time zone
assert_equal(np.datetime_as_string(a, unit='D',
timezone=tz('US/Pacific'), casting='unsafe'),
'2010-03-14')
assert_equal(np.datetime_as_string(b, unit='D',
timezone=tz('US/Central'), casting='unsafe'),
'2010-02-15')
def test_datetime_arange(self):
# With two datetimes provided as strings
a = np.arange('2010-01-05', '2010-01-10', dtype='M8[D]')
assert_equal(a.dtype, np.dtype('M8[D]'))
assert_equal(a,
np.array(['2010-01-05', '2010-01-06', '2010-01-07',
'2010-01-08', '2010-01-09'], dtype='M8[D]'))
a = np.arange('1950-02-10', '1950-02-06', -1, dtype='M8[D]')
assert_equal(a.dtype, np.dtype('M8[D]'))
assert_equal(a,
np.array(['1950-02-10', '1950-02-09', '1950-02-08',
'1950-02-07'], dtype='M8[D]'))
# Unit should be detected as months here
a = np.arange('1969-05', '1970-05', 2, dtype='M8')
assert_equal(a.dtype, np.dtype('M8[M]'))
assert_equal(a,
np.datetime64('1969-05') + np.arange(12, step=2))
# datetime, integer|timedelta works as well
# produces arange (start, start + stop) in this case
a = np.arange('1969', 18, 3, dtype='M8')
assert_equal(a.dtype, np.dtype('M8[Y]'))
assert_equal(a,
np.datetime64('1969') + np.arange(18, step=3))
a = np.arange('1969-12-19', 22, np.timedelta64(2), dtype='M8')
assert_equal(a.dtype, np.dtype('M8[D]'))
assert_equal(a,
np.datetime64('1969-12-19') + np.arange(22, step=2))
# Step of 0 is disallowed
assert_raises(ValueError, np.arange, np.datetime64('today'),
np.datetime64('today') + 3, 0)
# Promotion across nonlinear unit boundaries is disallowed
assert_raises(TypeError, np.arange, np.datetime64('2011-03-01', 'D'),
np.timedelta64(5, 'M'))
assert_raises(TypeError, np.arange,
np.datetime64('2012-02-03T14', 's'),
np.timedelta64(5, 'Y'))
def test_datetime_arange_no_dtype(self):
d = np.array('2010-01-04', dtype="M8[D]")
assert_equal(np.arange(d, d + 1), d)
assert_raises(ValueError, np.arange, d)
def test_timedelta_arange(self):
a = np.arange(3, 10, dtype='m8')
assert_equal(a.dtype, np.dtype('m8'))
assert_equal(a, np.timedelta64(0) + np.arange(3, 10))
a = np.arange(np.timedelta64(3, 's'), 10, 2, dtype='m8')
assert_equal(a.dtype, np.dtype('m8[s]'))
assert_equal(a, np.timedelta64(0, 's') + np.arange(3, 10, 2))
# Step of 0 is disallowed
assert_raises(ValueError, np.arange, np.timedelta64(0),
np.timedelta64(5), 0)
# Promotion across nonlinear unit boundaries is disallowed
assert_raises(TypeError, np.arange, np.timedelta64(0, 'D'),
np.timedelta64(5, 'M'))
assert_raises(TypeError, np.arange, np.timedelta64(0, 'Y'),
np.timedelta64(5, 'D'))
@pytest.mark.parametrize("val1, val2, expected", [
# case from gh-12092
(np.timedelta64(7, 's'),
np.timedelta64(3, 's'),
np.timedelta64(1, 's')),
# negative value cases
(np.timedelta64(3, 's'),
np.timedelta64(-2, 's'),
np.timedelta64(-1, 's')),
(np.timedelta64(-3, 's'),
np.timedelta64(2, 's'),
np.timedelta64(1, 's')),
# larger value cases
(np.timedelta64(17, 's'),
np.timedelta64(22, 's'),
np.timedelta64(17, 's')),
(np.timedelta64(22, 's'),
np.timedelta64(17, 's'),
np.timedelta64(5, 's')),
# different units
(np.timedelta64(1, 'm'),
np.timedelta64(57, 's'),
np.timedelta64(3, 's')),
(np.timedelta64(1, 'us'),
np.timedelta64(727, 'ns'),
np.timedelta64(273, 'ns')),
# NaT is propagated
(np.timedelta64('NaT'),
np.timedelta64(50, 'ns'),
np.timedelta64('NaT')),
# Y % M works
(np.timedelta64(2, 'Y'),
np.timedelta64(22, 'M'),
np.timedelta64(2, 'M')),
])
def test_timedelta_modulus(self, val1, val2, expected):
assert_equal(val1 % val2, expected)
@pytest.mark.parametrize("val1, val2", [
# years and months sometimes can't be unambiguously
(np.timedelta64(7, 'Y'),
np.timedelta64(3, 's')),
(np.timedelta64(7, 'M'),
np.timedelta64(1, 'D')),
])
def test_timedelta_modulus_error(self, val1, val2):
with assert_raises_regex(TypeError, "common metadata divisor"):
val1 % val2
def test_timedelta_modulus_div_by_zero(self):
with assert_warns(RuntimeWarning):
actual = np.timedelta64(10, 's') % np.timedelta64(0, 's')
assert_equal(actual, np.timedelta64('NaT'))
@pytest.mark.parametrize("val1, val2", [
(np.timedelta64(7, 'Y'),
15,),
(7.5,
np.timedelta64(1, 'D')),
])
def test_timedelta_modulus_type_resolution(self, val1, val2):
with assert_raises_regex(TypeError,
"'remainder' cannot use operands with types"):
val1 % val2
def test_timedelta_arange_no_dtype(self):
d = np.array(5, dtype="m8[D]")
assert_equal(np.arange(d, d + 1), d)
assert_equal(np.arange(d), np.arange(0, d))
def test_datetime_maximum_reduce(self):
a = np.array(['2010-01-02', '1999-03-14', '1833-03'], dtype='M8[D]')
assert_equal(np.maximum.reduce(a).dtype, np.dtype('M8[D]'))
assert_equal(np.maximum.reduce(a),
np.datetime64('2010-01-02'))
a = np.array([1, 4, 0, 7, 2], dtype='m8[s]')
assert_equal(np.maximum.reduce(a).dtype, np.dtype('m8[s]'))
assert_equal(np.maximum.reduce(a),
np.timedelta64(7, 's'))
def test_datetime_busday_offset(self):
assert_equal(
np.busday_offset('2011-06', 0, roll='forward', weekmask='Mon'),
np.datetime64('2011-06-06'))
assert_equal(
np.busday_offset('2011-07', -1, roll='forward', weekmask='Mon'),
np.datetime64('2011-06-27'))
assert_equal(
np.busday_offset('2011-07', -1, roll='forward', weekmask='Mon'),
np.datetime64('2011-06-27'))
assert_equal(np.busday_offset('2010-08', 0, roll='backward'),
np.datetime64('2010-07-30'))
assert_equal(np.busday_offset('2010-08', 0, roll='preceding'),
np.datetime64('2010-07-30'))
assert_equal(np.busday_offset('2010-08', 0, roll='modifiedpreceding'),
np.datetime64('2010-08-02'))
assert_equal(np.busday_offset('2010-08', 0, roll='modifiedfollowing'),
np.datetime64('2010-08-02'))
assert_equal(np.busday_offset('2010-08', 0, roll='forward'),
np.datetime64('2010-08-02'))
assert_equal(np.busday_offset('2010-08', 0, roll='following'),
np.datetime64('2010-08-02'))
assert_equal(np.busday_offset('2010-10-30', 0, roll='following'),
np.datetime64('2010-11-01'))
assert_equal(
np.busday_offset('2010-10-30', 0, roll='modifiedfollowing'),
np.datetime64('2010-10-29'))
assert_equal(
np.busday_offset('2010-10-30', 0, roll='modifiedpreceding'),
np.datetime64('2010-10-29'))
assert_equal(
np.busday_offset('2010-10-16', 0, roll='modifiedfollowing'),
np.datetime64('2010-10-18'))
assert_equal(
np.busday_offset('2010-10-16', 0, roll='modifiedpreceding'),
np.datetime64('2010-10-15'))
assert_raises(ValueError, np.busday_offset, '2011-06-04', 0)
assert_equal(np.busday_offset('2006-02-01', 25),
np.datetime64('2006-03-08'))
assert_equal(np.busday_offset('2006-03-08', -25),
np.datetime64('2006-02-01'))
assert_equal(np.busday_offset('2007-02-25', 11, weekmask='SatSun'),
np.datetime64('2007-04-07'))
assert_equal(np.busday_offset('2007-04-07', -11, weekmask='SatSun'),
np.datetime64('2007-02-25'))
assert_equal(np.busday_offset(np.datetime64('NaT'), 1, roll='nat'),
np.datetime64('NaT'))
assert_equal(np.busday_offset(np.datetime64('NaT'), 1, roll='following'),
np.datetime64('NaT'))
assert_equal(np.busday_offset(np.datetime64('NaT'), 1, roll='preceding'),
np.datetime64('NaT'))
def test_datetime_busdaycalendar(self):
bdd = np.busdaycalendar(
holidays=['NaT', '2011-01-17', '2011-03-06', 'NaT',
'2011-12-26', '2011-05-30', '2011-01-17'])
assert_equal(bdd.holidays,
np.array(['2011-01-17', '2011-05-30', '2011-12-26'], dtype='M8'))
assert_equal(bdd.weekmask, np.array([1, 1, 1, 1, 1, 0, 0], dtype='?'))
bdd = np.busdaycalendar(weekmask="Sun TueWed Thu\tFri")
assert_equal(bdd.weekmask, np.array([0, 1, 1, 1, 1, 0, 1], dtype='?'))
bdd = np.busdaycalendar(weekmask="0011001")
assert_equal(bdd.weekmask, np.array([0, 0, 1, 1, 0, 0, 1], dtype='?'))
bdd = np.busdaycalendar(weekmask="Mon Tue")
assert_equal(bdd.weekmask, np.array([1, 1, 0, 0, 0, 0, 0], dtype='?'))
assert_raises(ValueError, np.busdaycalendar, weekmask=[0, 0, 0, 0, 0, 0, 0])
assert_raises(ValueError, np.busdaycalendar, weekmask="satsun")
assert_raises(ValueError, np.busdaycalendar, weekmask="")
assert_raises(ValueError, np.busdaycalendar, weekmask="Mon Tue We")
assert_raises(ValueError, np.busdaycalendar, weekmask="Max")
assert_raises(ValueError, np.busdaycalendar, weekmask="Monday Tue")
def test_datetime_busday_holidays_offset(self):
assert_equal(
np.busday_offset('2011-11-10', 1, holidays=['2011-11-11']),
np.datetime64('2011-11-14'))
assert_equal(
np.busday_offset('2011-11-04', 5, holidays=['2011-11-11']),
np.datetime64('2011-11-14'))
assert_equal(
np.busday_offset('2011-11-10', 5, holidays=['2011-11-11']),
np.datetime64('2011-11-18'))
assert_equal(
np.busday_offset('2011-11-14', -1, holidays=['2011-11-11']),
np.datetime64('2011-11-10'))
assert_equal(
np.busday_offset('2011-11-18', -5, holidays=['2011-11-11']),
np.datetime64('2011-11-10'))
assert_equal(
np.busday_offset('2011-11-14', -5, holidays=['2011-11-11']),
np.datetime64('2011-11-04'))
assert_equal(
np.busday_offset('2011-11-10', 1,
holidays=['2011-11-11', '2011-11-11']),
np.datetime64('2011-11-14'))
assert_equal(
np.busday_offset('2011-11-14', -1,
holidays=['2011-11-11', '2011-11-11']),
np.datetime64('2011-11-10'))
assert_equal(
np.busday_offset('2011-11-10', 1,
holidays=['2011-11-11', 'NaT']),
np.datetime64('2011-11-14'))
assert_equal(
np.busday_offset('2011-11-14', -1,
holidays=['NaT', '2011-11-11']),
np.datetime64('2011-11-10'))
assert_equal(
np.busday_offset('2011-11-10', 1,
holidays=['2011-11-11', '2011-11-24']),
np.datetime64('2011-11-14'))
assert_equal(
np.busday_offset('2011-11-14', -1,
holidays=['2011-11-11', '2011-11-24']),
np.datetime64('2011-11-10'))
assert_equal(
np.busday_offset('2011-11-10', 1,
holidays=['2011-10-10', '2011-11-11']),
np.datetime64('2011-11-14'))
assert_equal(
np.busday_offset('2011-11-14', -1,
holidays=['2011-10-10', '2011-11-11']),
np.datetime64('2011-11-10'))
assert_equal(
np.busday_offset('2011-11-10', 1,
holidays=['2011-10-10', '2011-11-11', '2011-11-24']),
np.datetime64('2011-11-14'))
assert_equal(
np.busday_offset('2011-11-14', -1,
holidays=['2011-10-10', '2011-11-11', '2011-11-24']),
np.datetime64('2011-11-10'))
holidays = ['2011-10-10', '2011-11-11', '2011-11-24',
'2011-12-25', '2011-05-30', '2011-02-21',
'2011-12-26', '2012-01-02']
bdd = np.busdaycalendar(weekmask='1111100', holidays=holidays)
assert_equal(
np.busday_offset('2011-10-03', 4, holidays=holidays),
np.busday_offset('2011-10-03', 4))
assert_equal(
np.busday_offset('2011-10-03', 5, holidays=holidays),
np.busday_offset('2011-10-03', 5 + 1))
assert_equal(
np.busday_offset('2011-10-03', 27, holidays=holidays),
np.busday_offset('2011-10-03', 27 + 1))
assert_equal(
np.busday_offset('2011-10-03', 28, holidays=holidays),
np.busday_offset('2011-10-03', 28 + 2))
assert_equal(
np.busday_offset('2011-10-03', 35, holidays=holidays),
np.busday_offset('2011-10-03', 35 + 2))
assert_equal(
np.busday_offset('2011-10-03', 36, holidays=holidays),
np.busday_offset('2011-10-03', 36 + 3))
assert_equal(
np.busday_offset('2011-10-03', 56, holidays=holidays),
np.busday_offset('2011-10-03', 56 + 3))
assert_equal(
np.busday_offset('2011-10-03', 57, holidays=holidays),
np.busday_offset('2011-10-03', 57 + 4))
assert_equal(
np.busday_offset('2011-10-03', 60, holidays=holidays),
np.busday_offset('2011-10-03', 60 + 4))
assert_equal(
np.busday_offset('2011-10-03', 61, holidays=holidays),
np.busday_offset('2011-10-03', 61 + 5))
assert_equal(
np.busday_offset('2011-10-03', 61, busdaycal=bdd),
np.busday_offset('2011-10-03', 61 + 5))
assert_equal(
np.busday_offset('2012-01-03', -1, holidays=holidays),
np.busday_offset('2012-01-03', -1 - 1))
assert_equal(
np.busday_offset('2012-01-03', -4, holidays=holidays),
np.busday_offset('2012-01-03', -4 - 1))
assert_equal(
np.busday_offset('2012-01-03', -5, holidays=holidays),
np.busday_offset('2012-01-03', -5 - 2))
assert_equal(
np.busday_offset('2012-01-03', -25, holidays=holidays),
np.busday_offset('2012-01-03', -25 - 2))
assert_equal(
np.busday_offset('2012-01-03', -26, holidays=holidays),
np.busday_offset('2012-01-03', -26 - 3))
assert_equal(
np.busday_offset('2012-01-03', -33, holidays=holidays),
np.busday_offset('2012-01-03', -33 - 3))
assert_equal(
np.busday_offset('2012-01-03', -34, holidays=holidays),
np.busday_offset('2012-01-03', -34 - 4))
assert_equal(
np.busday_offset('2012-01-03', -56, holidays=holidays),
np.busday_offset('2012-01-03', -56 - 4))
assert_equal(
np.busday_offset('2012-01-03', -57, holidays=holidays),
np.busday_offset('2012-01-03', -57 - 5))
assert_equal(
np.busday_offset('2012-01-03', -57, busdaycal=bdd),
np.busday_offset('2012-01-03', -57 - 5))
assert_raises(ValueError, np.busday_offset, '2012-01-03', -15,
weekmask='1111100', busdaycal=bdd)
assert_raises(ValueError, np.busday_offset, '2012-01-03', -15,
holidays=holidays, busdaycal=bdd)
# Roll with the holidays
assert_equal(
np.busday_offset('2011-12-25', 0,
roll='forward', holidays=holidays),
np.datetime64('2011-12-27'))
assert_equal(
np.busday_offset('2011-12-26', 0,
roll='forward', holidays=holidays),
np.datetime64('2011-12-27'))
assert_equal(
np.busday_offset('2011-12-26', 0,
roll='backward', holidays=holidays),
np.datetime64('2011-12-23'))
assert_equal(
np.busday_offset('2012-02-27', 0,
roll='modifiedfollowing',
holidays=['2012-02-27', '2012-02-26', '2012-02-28',
'2012-03-01', '2012-02-29']),
np.datetime64('2012-02-24'))
assert_equal(
np.busday_offset('2012-03-06', 0,
roll='modifiedpreceding',
holidays=['2012-03-02', '2012-03-03', '2012-03-01',
'2012-03-05', '2012-03-07', '2012-03-06']),
np.datetime64('2012-03-08'))
def test_datetime_busday_holidays_count(self):
holidays = ['2011-01-01', '2011-10-10', '2011-11-11', '2011-11-24',
'2011-12-25', '2011-05-30', '2011-02-21', '2011-01-17',
'2011-12-26', '2012-01-02', '2011-02-21', '2011-05-30',
'2011-07-01', '2011-07-04', '2011-09-05', '2011-10-10']
bdd = np.busdaycalendar(weekmask='1111100', holidays=holidays)
# Validate against busday_offset broadcast against
# a range of offsets
dates = np.busday_offset('2011-01-01', np.arange(366),
roll='forward', busdaycal=bdd)
assert_equal(np.busday_count('2011-01-01', dates, busdaycal=bdd),
np.arange(366))
# Returns negative value when reversed
assert_equal(np.busday_count(dates, '2011-01-01', busdaycal=bdd),
-np.arange(366))
dates = np.busday_offset('2011-12-31', -np.arange(366),
roll='forward', busdaycal=bdd)
assert_equal(np.busday_count(dates, '2011-12-31', busdaycal=bdd),
np.arange(366))
# Returns negative value when reversed
assert_equal(np.busday_count('2011-12-31', dates, busdaycal=bdd),
-np.arange(366))
# Can't supply both a weekmask/holidays and busdaycal
assert_raises(ValueError, np.busday_offset, '2012-01-03', '2012-02-03',
weekmask='1111100', busdaycal=bdd)
assert_raises(ValueError, np.busday_offset, '2012-01-03', '2012-02-03',
holidays=holidays, busdaycal=bdd)
assert_equal(np.busday_count('2011-03', '2011-04', weekmask='Mon'), 4)
assert_equal(np.busday_count('2011-04', '2011-03', weekmask='Mon'), -4)
def test_datetime_is_busday(self):
holidays = ['2011-01-01', '2011-10-10', '2011-11-11', '2011-11-24',
'2011-12-25', '2011-05-30', '2011-02-21', '2011-01-17',
'2011-12-26', '2012-01-02', '2011-02-21', '2011-05-30',
'2011-07-01', '2011-07-04', '2011-09-05', '2011-10-10',
'NaT']
bdd = np.busdaycalendar(weekmask='1111100', holidays=holidays)
assert_equal(np.is_busday('2011-01-01'), False)
assert_equal(np.is_busday('2011-01-02'), False)
assert_equal(np.is_busday('2011-01-03'), True)
assert_equal(np.is_busday(holidays, busdaycal=bdd),
np.zeros(len(holidays), dtype='?'))
def test_datetime_y2038(self):
a = np.datetime64('2038-01-19T03:14:07')
assert_equal(a.view(np.int64), 2**31 - 1)
a = np.datetime64('2038-01-19T03:14:08')
assert_equal(a.view(np.int64), 2**31)
with assert_warns(DeprecationWarning):
a = np.datetime64('2038-01-19T04:14:07+0100')
assert_equal(a.view(np.int64), 2**31 - 1)
with assert_warns(DeprecationWarning):
a = np.datetime64('2038-01-19T04:14:08+0100')
assert_equal(a.view(np.int64), 2**31)
a = np.datetime64('2038-01-20T13:21:14')
assert_equal(str(a), '2038-01-20T13:21:14')
def test_isnat(self):
assert_(np.isnat(np.datetime64('NaT', 'ms')))
assert_(np.isnat(np.datetime64('NaT', 'ns')))
assert_(not np.isnat(np.datetime64('2038-01-19T03:14:07')))
assert_(np.isnat(np.timedelta64('NaT', "ms")))
assert_(not np.isnat(np.timedelta64(34, "ms")))
res = np.array([False, False, True])
for unit in ['Y', 'M', 'W', 'D',
'h', 'm', 's', 'ms', 'us',
'ns', 'ps', 'fs', 'as']:
arr = np.array([123, -321, "NaT"], dtype='<datetime64[%s]' % unit)
assert_equal(np.isnat(arr), res)
arr = np.array([123, -321, "NaT"], dtype='>datetime64[%s]' % unit)
assert_equal(np.isnat(arr), res)
arr = np.array([123, -321, "NaT"], dtype='<timedelta64[%s]' % unit)
assert_equal(np.isnat(arr), res)
arr = np.array([123, -321, "NaT"], dtype='>timedelta64[%s]' % unit)
assert_equal(np.isnat(arr), res)
def test_isnat_error(self):
for t in np.typecodes["All"]:
if t in np.typecodes["Datetime"]:
continue
assert_raises(TypeError, np.isnat, np.zeros(10, t))
def test_isfinite_scalar(self):
assert_(not np.isfinite(np.datetime64('NaT', 'ms')))
assert_(not np.isfinite(np.datetime64('NaT', 'ns')))
assert_(np.isfinite(np.datetime64('2038-01-19T03:14:07')))
assert_(not np.isfinite(np.timedelta64('NaT', "ms")))
assert_(np.isfinite(np.timedelta64(34, "ms")))
@pytest.mark.parametrize('unit', ['Y', 'M', 'W', 'D', 'h', 'm', 's', 'ms',
'us', 'ns', 'ps', 'fs', 'as'])
@pytest.mark.parametrize('dstr', ['<datetime64[%s]', '>datetime64[%s]',
'<timedelta64[%s]', '>timedelta64[%s]'])
def test_isfinite_isinf_isnan_units(self, unit, dstr):
arr_val = [123, -321, "NaT"]
arr = np.array(arr_val, dtype= dstr % unit)
pos = np.array([True, True, False])
neg = np.array([False, False, True])
false = np.array([False, False, False])
assert_equal(np.isfinite(arr), pos)
assert_equal(np.isinf(arr), false)
assert_equal(np.isnan(arr), neg)
def test_assert_equal(self):
assert_raises(AssertionError, assert_equal,
np.datetime64('nat'), np.timedelta64('nat'))
def test_corecursive_input(self):
a, b = [], []
a.append(b)
b.append(a)
obj_arr = np.array([None])
obj_arr[0] = a
assert_raises(ValueError, obj_arr.astype, 'M8')
assert_raises(ValueError, obj_arr.astype, 'm8')
@pytest.mark.parametrize("shape", [(), (1,)])
def test_discovery_from_object_array(self, shape):
arr = np.array("2020-10-10", dtype=object).reshape(shape)
res = np.array("2020-10-10", dtype="M8").reshape(shape)
assert res.dtype == np.dtype("M8[D]")
assert_equal(arr.astype("M8"), res)
arr[...] = np.bytes_("2020-10-10")
assert_equal(arr.astype("M8"), res)
arr = arr.astype("S")
assert_equal(arr.astype("S").astype("M8"), res)
@pytest.mark.parametrize("time_unit", [
"Y", "M", "W", "D", "h", "m", "s", "ms", "us", "ns", "ps", "fs", "as",
"10D", "2M",
])
def test_limit_symmetry(self, time_unit):
epoch = np.datetime64(0, time_unit)
latest = np.datetime64(np.iinfo(np.int64).max, time_unit)
earliest = np.datetime64(-np.iinfo(np.int64).max, time_unit)
assert earliest < epoch < latest
@pytest.mark.parametrize("time_unit", [
"Y", "M",
pytest.param("W", marks=pytest.mark.xfail(reason="gh-13197")),
"D", "h", "m",
"s", "ms", "us", "ns", "ps", "fs", "as",
pytest.param("10D", marks=pytest.mark.xfail(reason="similar to gh-13197")),
])
@pytest.mark.parametrize("sign", [-1, 1])
def test_limit_str_roundtrip(self, time_unit, sign):
limit = np.datetime64(np.iinfo(np.int64).max * sign, time_unit)
limit_via_str = np.datetime64(str(limit), time_unit)
assert limit_via_str == limit
class TestDateTimeData:
def test_basic(self):
a = np.array(['1980-03-23'], dtype=np.datetime64)
assert_equal(np.datetime_data(a.dtype), ('D', 1))
def test_bytes(self):
dt = np.datetime64('2000', (b'ms', 5))
assert np.datetime_data(dt.dtype) == ('ms', 5)
dt = np.datetime64('2000', b'5ms')
assert np.datetime_data(dt.dtype) == ('ms', 5)
def test_non_ascii(self):
dt = np.datetime64('2000', ('μs', 5))
assert np.datetime_data(dt.dtype) == ('us', 5)
dt = np.datetime64('2000', '5μs')
assert np.datetime_data(dt.dtype) == ('us', 5)
| true | true |
f7250920aa8cce465657186e7f5d41dd1494786a | 2,066 | py | Python | azure-mgmt-datamigration/azure/mgmt/datamigration/models/project_file_properties_py3.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 1 | 2019-05-10T19:58:09.000Z | 2019-05-10T19:58:09.000Z | azure-mgmt-datamigration/azure/mgmt/datamigration/models/project_file_properties_py3.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 1 | 2018-11-29T14:46:42.000Z | 2018-11-29T14:46:42.000Z | azure-mgmt-datamigration/azure/mgmt/datamigration/models/project_file_properties_py3.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 1 | 2021-07-28T14:50:54.000Z | 2021-07-28T14:50:54.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ProjectFileProperties(Model):
"""Base class for file properties.
Variables are only populated by the server, and will be ignored when
sending a request.
:param extension: Optional File extension. If submitted it should not have
a leading period and must match the extension from filePath.
:type extension: str
:param file_path: Relative path of this file resource. This property can
be set when creating or updating the file resource.
:type file_path: str
:ivar last_modified: Modification DateTime.
:vartype last_modified: datetime
:param media_type: File content type. This propery can be modified to
reflect the file content type.
:type media_type: str
:ivar size: File size.
:vartype size: long
"""
_validation = {
'last_modified': {'readonly': True},
'size': {'readonly': True},
}
_attribute_map = {
'extension': {'key': 'extension', 'type': 'str'},
'file_path': {'key': 'filePath', 'type': 'str'},
'last_modified': {'key': 'lastModified', 'type': 'iso-8601'},
'media_type': {'key': 'mediaType', 'type': 'str'},
'size': {'key': 'size', 'type': 'long'},
}
def __init__(self, *, extension: str=None, file_path: str=None, media_type: str=None, **kwargs) -> None:
super(ProjectFileProperties, self).__init__(**kwargs)
self.extension = extension
self.file_path = file_path
self.last_modified = None
self.media_type = media_type
self.size = None
| 36.892857 | 108 | 0.616167 |
from msrest.serialization import Model
class ProjectFileProperties(Model):
_validation = {
'last_modified': {'readonly': True},
'size': {'readonly': True},
}
_attribute_map = {
'extension': {'key': 'extension', 'type': 'str'},
'file_path': {'key': 'filePath', 'type': 'str'},
'last_modified': {'key': 'lastModified', 'type': 'iso-8601'},
'media_type': {'key': 'mediaType', 'type': 'str'},
'size': {'key': 'size', 'type': 'long'},
}
def __init__(self, *, extension: str=None, file_path: str=None, media_type: str=None, **kwargs) -> None:
super(ProjectFileProperties, self).__init__(**kwargs)
self.extension = extension
self.file_path = file_path
self.last_modified = None
self.media_type = media_type
self.size = None
| true | true |
f72509bc9ac00b2ac21743261ec417182f6782d1 | 6,683 | py | Python | code/gtp/gtp.py | Go-Trojans/trojan-go | bf1160120e79fbb1cdd37fa08f17160fb133aa40 | [
"Artistic-1.0-cl8"
] | null | null | null | code/gtp/gtp.py | Go-Trojans/trojan-go | bf1160120e79fbb1cdd37fa08f17160fb133aa40 | [
"Artistic-1.0-cl8"
] | null | null | null | code/gtp/gtp.py | Go-Trojans/trojan-go | bf1160120e79fbb1cdd37fa08f17160fb133aa40 | [
"Artistic-1.0-cl8"
] | 1 | 2021-08-28T20:31:01.000Z | 2021-08-28T20:31:01.000Z | # GTP for Trojan-go
# Helper file
import re
def pre_engine(s):
s = re.sub("[^\t\n -~]", "", s)
s = s.split("#")[0]
s = s.replace("\t", " ")
return s
def pre_controller(s):
s = re.sub("[^\t\n -~]", "", s)
s = s.replace("\t", " ")
return s
def gtp_boolean(b):
return "true" if b else "false"
def gtp_list(l):
return "\n".join(l)
def gtp_color(color):
# an arbitrary choice amongst a number of possibilities
return {BLACK: "B", WHITE: "W"}[color]
def gtp_vertex(vertex):
if vertex == PASS:
return "pass"
elif vertex == RESIGN:
return "resign"
else:
x, y = vertex
return "{}{}".format("ABCDEFGHJKLMNOPQRSTYVWYZ"[x - 1], y)
def gtp_move(color, vertex):
return " ".join([gtp_color(color), gtp_vertex(vertex)])
def parse_message(message):
message = pre_engine(message).strip()
first, rest = (message.split(" ", 1) + [None])[:2]
if first.isdigit():
message_id = int(first)
if rest is not None:
command, arguments = (rest.split(" ", 1) + [None])[:2]
else:
command, arguments = None, None
else:
message_id = None
command, arguments = first, rest
return message_id, command, arguments
WHITE = -1
BLACK = +1
EMPTY = 0
PASS = (0, 0)
RESIGN = "resign"
def parse_color(color):
if color.lower() in ["b", "black"]:
return BLACK
elif color.lower() in ["w", "white"]:
return WHITE
else:
return False
def parse_vertex(vertex_string):
# Translate the Vertex from command line to GO languages
if vertex_string is None:
return False
elif vertex_string.lower() == "pass":
return PASS
elif len(vertex_string) > 1:
x = "abcdefghjklmnopqrstuvwxyz".find(vertex_string[0].lower()) + 1
if x == 0:
return False
if vertex_string[1:].isdigit():
y = int(vertex_string[1:])
else:
return False
else:
return False
return (x, y)
def parse_move(move_string):
color_string, vertex_string = (move_string.split(" ") + [None])[:2]
color = parse_color(color_string)
if color is False:
return False
vertex = parse_vertex(vertex_string)
if vertex is False:
return False
return color, vertex
MIN_BOARD_SIZE = 7
MAX_BOARD_SIZE = 19
def format_success(message_id, response=None):
if response is None:
response = ""
else:
response = " {}".format(response)
if message_id:
return "={}{}\n\n".format(message_id, response)
else:
return "={}\n\n".format(response)
def format_error(message_id, response):
if response:
response = " {}".format(response)
if message_id:
return "?{}{}\n\n".format(message_id, response)
else:
return "?{}\n\n".format(response)
# Not used
class Engine(object):
def __init__(self, game_obj, name="gtp (python library)", version="0.2"):
self.size = 19
self.komi = 6.5
self._game = game_obj
self._game.clear()
self._name = name
self._version = version
self.disconnect = False
self.known_commands = [
field[4:] for field in dir(self) if field.startswith("cmd_")]
def send(self, message):
message_id, command, arguments = parse_message(message)
if command in self.known_commands:
try:
return format_success(
message_id, getattr(self, "cmd_" + command)(arguments))
except ValueError as exception:
return format_error(message_id, exception.args[0])
else:
return format_error(message_id, "unknown command")
def vertex_in_range(self, vertex):
if vertex == PASS:
return True
if 1 <= vertex[0] <= self.size and 1 <= vertex[1] <= self.size:
return True
else:
return False
# commands
def cmd_protocol_version(self, arguments):
return 2
def cmd_name(self, arguments):
return self._name
def cmd_version(self, arguments):
return self._version
def cmd_known_command(self, arguments):
return gtp_boolean(arguments in self.known_commands)
def cmd_list_commands(self, arguments):
return gtp_list(self.known_commands)
def cmd_quit(self, arguments):
self.disconnect = True
def cmd_boardsize(self, arguments):
if arguments.isdigit():
size = int(arguments)
if MIN_BOARD_SIZE <= size <= MAX_BOARD_SIZE:
self.size = size
self._game.set_size(size)
else:
raise ValueError("unacceptable size")
else:
raise ValueError("non digit size")
def cmd_clear_board(self, arguments):
self._game.clear()
def cmd_komi(self, arguments):
try:
komi = float(arguments)
self.komi = komi
self._game.set_komi(komi)
except ValueError:
raise ValueError("syntax error")
def cmd_play(self, arguments):
move = parse_move(arguments)
if move:
color, vertex = move
if self.vertex_in_range(vertex):
if self._game.make_move(color, vertex):
return
raise ValueError("illegal move")
def cmd_genmove(self, arguments):
c = parse_color(arguments)
if c:
move = self._game.get_move(c)
self._game.make_move(c, move)
return gtp_vertex(move)
else:
raise ValueError("unknown player: {}".format(arguments))
# Not used
class MinimalGame(object):
def __init__(self, size=19, komi=6.5):
self.size = size
self.komi = 6.5
self.board = [EMPTY] * (self.size * self.size)
def _flatten(self, vertex):
(x, y) = vertex
return (x - 1) * self.size + (y - 1)
def clear(self):
self.board = [EMPTY] * (self.size * self.size)
def make_move(self, color, vertex):
# no legality check other than the space being empty..
# no side-effects beyond placing the stone..
if vertex == PASS:
return True # noop
idx = self._flatten(vertex)
if self.board[idx] == EMPTY:
self.board[idx] = color
return True
else:
return False
def set_size(self, n):
self.size = n
self.clear()
def set_komi(self, k):
self.komi = k
def get_move(self, color):
# pass every time. At least it's legal
return (0, 0)
| 25.029963 | 77 | 0.572497 |
import re
def pre_engine(s):
s = re.sub("[^\t\n -~]", "", s)
s = s.split("#")[0]
s = s.replace("\t", " ")
return s
def pre_controller(s):
s = re.sub("[^\t\n -~]", "", s)
s = s.replace("\t", " ")
return s
def gtp_boolean(b):
return "true" if b else "false"
def gtp_list(l):
return "\n".join(l)
def gtp_color(color):
return {BLACK: "B", WHITE: "W"}[color]
def gtp_vertex(vertex):
if vertex == PASS:
return "pass"
elif vertex == RESIGN:
return "resign"
else:
x, y = vertex
return "{}{}".format("ABCDEFGHJKLMNOPQRSTYVWYZ"[x - 1], y)
def gtp_move(color, vertex):
return " ".join([gtp_color(color), gtp_vertex(vertex)])
def parse_message(message):
message = pre_engine(message).strip()
first, rest = (message.split(" ", 1) + [None])[:2]
if first.isdigit():
message_id = int(first)
if rest is not None:
command, arguments = (rest.split(" ", 1) + [None])[:2]
else:
command, arguments = None, None
else:
message_id = None
command, arguments = first, rest
return message_id, command, arguments
WHITE = -1
BLACK = +1
EMPTY = 0
PASS = (0, 0)
RESIGN = "resign"
def parse_color(color):
if color.lower() in ["b", "black"]:
return BLACK
elif color.lower() in ["w", "white"]:
return WHITE
else:
return False
def parse_vertex(vertex_string):
if vertex_string is None:
return False
elif vertex_string.lower() == "pass":
return PASS
elif len(vertex_string) > 1:
x = "abcdefghjklmnopqrstuvwxyz".find(vertex_string[0].lower()) + 1
if x == 0:
return False
if vertex_string[1:].isdigit():
y = int(vertex_string[1:])
else:
return False
else:
return False
return (x, y)
def parse_move(move_string):
color_string, vertex_string = (move_string.split(" ") + [None])[:2]
color = parse_color(color_string)
if color is False:
return False
vertex = parse_vertex(vertex_string)
if vertex is False:
return False
return color, vertex
MIN_BOARD_SIZE = 7
MAX_BOARD_SIZE = 19
def format_success(message_id, response=None):
if response is None:
response = ""
else:
response = " {}".format(response)
if message_id:
return "={}{}\n\n".format(message_id, response)
else:
return "={}\n\n".format(response)
def format_error(message_id, response):
if response:
response = " {}".format(response)
if message_id:
return "?{}{}\n\n".format(message_id, response)
else:
return "?{}\n\n".format(response)
class Engine(object):
def __init__(self, game_obj, name="gtp (python library)", version="0.2"):
self.size = 19
self.komi = 6.5
self._game = game_obj
self._game.clear()
self._name = name
self._version = version
self.disconnect = False
self.known_commands = [
field[4:] for field in dir(self) if field.startswith("cmd_")]
def send(self, message):
message_id, command, arguments = parse_message(message)
if command in self.known_commands:
try:
return format_success(
message_id, getattr(self, "cmd_" + command)(arguments))
except ValueError as exception:
return format_error(message_id, exception.args[0])
else:
return format_error(message_id, "unknown command")
def vertex_in_range(self, vertex):
if vertex == PASS:
return True
if 1 <= vertex[0] <= self.size and 1 <= vertex[1] <= self.size:
return True
else:
return False
def cmd_protocol_version(self, arguments):
return 2
def cmd_name(self, arguments):
return self._name
def cmd_version(self, arguments):
return self._version
def cmd_known_command(self, arguments):
return gtp_boolean(arguments in self.known_commands)
def cmd_list_commands(self, arguments):
return gtp_list(self.known_commands)
def cmd_quit(self, arguments):
self.disconnect = True
def cmd_boardsize(self, arguments):
if arguments.isdigit():
size = int(arguments)
if MIN_BOARD_SIZE <= size <= MAX_BOARD_SIZE:
self.size = size
self._game.set_size(size)
else:
raise ValueError("unacceptable size")
else:
raise ValueError("non digit size")
def cmd_clear_board(self, arguments):
self._game.clear()
def cmd_komi(self, arguments):
try:
komi = float(arguments)
self.komi = komi
self._game.set_komi(komi)
except ValueError:
raise ValueError("syntax error")
def cmd_play(self, arguments):
move = parse_move(arguments)
if move:
color, vertex = move
if self.vertex_in_range(vertex):
if self._game.make_move(color, vertex):
return
raise ValueError("illegal move")
def cmd_genmove(self, arguments):
c = parse_color(arguments)
if c:
move = self._game.get_move(c)
self._game.make_move(c, move)
return gtp_vertex(move)
else:
raise ValueError("unknown player: {}".format(arguments))
class MinimalGame(object):
def __init__(self, size=19, komi=6.5):
self.size = size
self.komi = 6.5
self.board = [EMPTY] * (self.size * self.size)
def _flatten(self, vertex):
(x, y) = vertex
return (x - 1) * self.size + (y - 1)
def clear(self):
self.board = [EMPTY] * (self.size * self.size)
def make_move(self, color, vertex):
if vertex == PASS:
return True
idx = self._flatten(vertex)
if self.board[idx] == EMPTY:
self.board[idx] = color
return True
else:
return False
def set_size(self, n):
self.size = n
self.clear()
def set_komi(self, k):
self.komi = k
def get_move(self, color):
return (0, 0)
| true | true |
f7250ab88692dbc04fa8a5fc7d974f0ae2eb1e02 | 2,352 | py | Python | model-optimizer/mo/front/kaldi/extractors/normalize_component_ext.py | Andruxin52rus/openvino | d824e371fe7dffb90e6d3d58e4e34adecfce4606 | [
"Apache-2.0"
] | 2 | 2020-11-18T14:14:06.000Z | 2020-11-28T04:55:57.000Z | model-optimizer/mo/front/kaldi/extractors/normalize_component_ext.py | Andruxin52rus/openvino | d824e371fe7dffb90e6d3d58e4e34adecfce4606 | [
"Apache-2.0"
] | 30 | 2020-11-13T11:44:07.000Z | 2022-02-21T13:03:16.000Z | model-optimizer/mo/front/kaldi/extractors/normalize_component_ext.py | mmakridi/openvino | 769bb7709597c14debdaa356dd60c5a78bdfa97e | [
"Apache-2.0"
] | 3 | 2021-03-09T08:27:29.000Z | 2021-04-07T04:58:54.000Z | """
Copyright (C) 2018-2020 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import numpy as np
from extensions.ops.normalize import NormalizeOp
from mo.front.caffe.extractors.utils import embed_input
from mo.front.extractor import FrontExtractorOp
from mo.front.kaldi.loader.utils import collect_until_token, read_binary_bool_token, read_binary_integer32_token, \
read_binary_float_token
from mo.utils.error import Error
class NormalizeComponentFrontExtractor(FrontExtractorOp):
op = 'normalizecomponent'
enabled = True
@classmethod
def extract(cls, node):
pb = node.parameters
try:
collect_until_token(pb, b'<Dim>')
except Error:
try:
pb.seek(0)
collect_until_token(pb, b'<InputDim>')
except Error:
raise Error("Neither <Dim> nor <InputDim> were found")
in_dim = read_binary_integer32_token(pb)
try:
collect_until_token(pb, b'<TargetRms>')
target_rms = read_binary_float_token(pb)
except Error:
# model does not contain TargetRms
target_rms = 1.0
try:
collect_until_token(pb, b'<AddLogStddev>')
add_log = read_binary_bool_token(pb)
except Error:
# model does not contain AddLogStddev
add_log = False
if add_log is not False:
raise Error("AddLogStddev True in Normalize component is not supported")
scale = target_rms * np.sqrt(in_dim)
attrs = {
'eps': 0.00000001,
'across_spatial': 0,
'channel_shared': 1,
'in_dim': in_dim,
}
embed_input(attrs, 1, 'weights', [scale])
NormalizeOp.update_node_stat(node, attrs)
return cls.enabled
| 32.219178 | 115 | 0.65051 |
import numpy as np
from extensions.ops.normalize import NormalizeOp
from mo.front.caffe.extractors.utils import embed_input
from mo.front.extractor import FrontExtractorOp
from mo.front.kaldi.loader.utils import collect_until_token, read_binary_bool_token, read_binary_integer32_token, \
read_binary_float_token
from mo.utils.error import Error
class NormalizeComponentFrontExtractor(FrontExtractorOp):
op = 'normalizecomponent'
enabled = True
@classmethod
def extract(cls, node):
pb = node.parameters
try:
collect_until_token(pb, b'<Dim>')
except Error:
try:
pb.seek(0)
collect_until_token(pb, b'<InputDim>')
except Error:
raise Error("Neither <Dim> nor <InputDim> were found")
in_dim = read_binary_integer32_token(pb)
try:
collect_until_token(pb, b'<TargetRms>')
target_rms = read_binary_float_token(pb)
except Error:
target_rms = 1.0
try:
collect_until_token(pb, b'<AddLogStddev>')
add_log = read_binary_bool_token(pb)
except Error:
add_log = False
if add_log is not False:
raise Error("AddLogStddev True in Normalize component is not supported")
scale = target_rms * np.sqrt(in_dim)
attrs = {
'eps': 0.00000001,
'across_spatial': 0,
'channel_shared': 1,
'in_dim': in_dim,
}
embed_input(attrs, 1, 'weights', [scale])
NormalizeOp.update_node_stat(node, attrs)
return cls.enabled
| true | true |
f7250ae4fa8e806718a9b01881b2da9dab3d34a5 | 6,708 | py | Python | aclhound/targets/arista.py | gdelaney/aclhound | 417a4ad788e886ce78a9527222e2ab4609c20d23 | [
"BSD-2-Clause"
] | 13 | 2015-01-10T16:42:07.000Z | 2018-07-12T01:53:21.000Z | aclhound/targets/arista.py | gdelaney/aclhound | 417a4ad788e886ce78a9527222e2ab4609c20d23 | [
"BSD-2-Clause"
] | 31 | 2015-01-02T21:42:00.000Z | 2016-04-13T21:31:52.000Z | aclhound/targets/arista.py | gdelaney/aclhound | 417a4ad788e886ce78a9527222e2ab4609c20d23 | [
"BSD-2-Clause"
] | 15 | 2015-01-17T20:09:01.000Z | 2020-09-23T09:06:07.000Z | #!/usr/bin/env python2.7
# Copyright (C) 2014-2015 Job Snijders <job@instituut.net>
#
# This file is part of ACLHound
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from ipaddr import IPNetwork
from grako.contexts import Closure
def render(self, **kwargs):
policy = self.data
afi = kwargs['afi']
config_blob = []
def afi_match(host):
if host == "any":
return True
elif IPNetwork(host).version == afi:
return True
else:
return False
for rule in policy:
rule = rule[0]
s_hosts = rule['source']['l3']['ip']
d_hosts = rule['destination']['l3']['ip']
logging = rule['keywords']['log']
stateful = rule['keywords']['state']
# deal with ICMP
if "icmp" in rule['protocol']:
policy = rule['protocol']['icmp']
# FIXME this should happen in render or aclsemantics
if not isinstance(policy, Closure):
policy = [policy]
# cycle through all ICMP related elements in the AST
for entry in policy:
for s_host in s_hosts:
if not afi_match(s_host):
continue
for d_host in d_hosts:
if not afi_match(d_host):
continue
if rule['action'] == "allow":
action = "permit"
else:
action = "deny"
line = "%s icmp" % action
for host in [s_host, d_host]:
if host == "any":
line += " any"
elif IPNetwork(host).prefixlen in [32, 128]:
line += " host %s" % host.split('/')[0]
elif afi == 4:
line += " %s %s" % (IPNetwork(host).network,
IPNetwork(host).hostmask)
else:
line += " " + host
if not entry == "any":
for el in ['icmp_type', 'icmp_code']:
if not str(entry[el]) == "any":
line += " " + str(entry[el])
if logging:
line += " log"
if line not in config_blob:
config_blob.append(line)
# jump out of the loop because we have nothing to do with
# L4 when doing ICMP
continue
# layer 3 and 4
s_ports = rule['source']['l4']['ports']
d_ports = rule['destination']['l4']['ports']
for s_port in s_ports:
for d_port in d_ports:
for s_host in s_hosts:
if not afi_match(s_host):
continue
for d_host in d_hosts:
if not afi_match(d_host):
continue
if rule['action'] == "allow":
action = "permit"
else:
action = "deny"
line = action
if rule['protocol'] == "any":
line += " ip" if afi == 4 else " ipv6"
else:
line += " " + rule['protocol']
if s_host == "any":
line += " any"
elif IPNetwork(s_host).prefixlen in [32, 128]:
line += " host %s" % s_host.split('/')[0]
elif afi == 4:
line += " %s %s" % (IPNetwork(s_host).network,
IPNetwork(s_host).hostmask)
else:
line += " " + s_host
if type(s_port) == tuple:
line += " range %s %s" % (s_port[0], s_port[1])
elif not s_port == "any":
line += " eq %s" % str(s_port)
if d_host == "any":
line += " any"
elif IPNetwork(d_host).prefixlen in [32, 128]:
line += " host %s" % d_host.split('/')[0]
elif afi == 4:
line += " %s %s" % (IPNetwork(d_host).network,
IPNetwork(d_host).hostmask)
else:
line += " " + d_host
if type(d_port) == tuple:
line += " range %s %s" % (d_port[0], d_port[1])
elif not d_port == "any":
line += " eq %s" % str(d_port)
if stateful and rule['protocol'] == "tcp":
line += " established"
if logging:
line += " log"
if line not in config_blob:
config_blob.append(line)
if afi == 4:
config_blob.append('deny ip any any')
if afi == 6:
config_blob.append('deny any any')
return config_blob
| 41.153374 | 78 | 0.45319 |
from ipaddr import IPNetwork
from grako.contexts import Closure
def render(self, **kwargs):
policy = self.data
afi = kwargs['afi']
config_blob = []
def afi_match(host):
if host == "any":
return True
elif IPNetwork(host).version == afi:
return True
else:
return False
for rule in policy:
rule = rule[0]
s_hosts = rule['source']['l3']['ip']
d_hosts = rule['destination']['l3']['ip']
logging = rule['keywords']['log']
stateful = rule['keywords']['state']
if "icmp" in rule['protocol']:
policy = rule['protocol']['icmp']
if not isinstance(policy, Closure):
policy = [policy]
for entry in policy:
for s_host in s_hosts:
if not afi_match(s_host):
continue
for d_host in d_hosts:
if not afi_match(d_host):
continue
if rule['action'] == "allow":
action = "permit"
else:
action = "deny"
line = "%s icmp" % action
for host in [s_host, d_host]:
if host == "any":
line += " any"
elif IPNetwork(host).prefixlen in [32, 128]:
line += " host %s" % host.split('/')[0]
elif afi == 4:
line += " %s %s" % (IPNetwork(host).network,
IPNetwork(host).hostmask)
else:
line += " " + host
if not entry == "any":
for el in ['icmp_type', 'icmp_code']:
if not str(entry[el]) == "any":
line += " " + str(entry[el])
if logging:
line += " log"
if line not in config_blob:
config_blob.append(line)
continue
s_ports = rule['source']['l4']['ports']
d_ports = rule['destination']['l4']['ports']
for s_port in s_ports:
for d_port in d_ports:
for s_host in s_hosts:
if not afi_match(s_host):
continue
for d_host in d_hosts:
if not afi_match(d_host):
continue
if rule['action'] == "allow":
action = "permit"
else:
action = "deny"
line = action
if rule['protocol'] == "any":
line += " ip" if afi == 4 else " ipv6"
else:
line += " " + rule['protocol']
if s_host == "any":
line += " any"
elif IPNetwork(s_host).prefixlen in [32, 128]:
line += " host %s" % s_host.split('/')[0]
elif afi == 4:
line += " %s %s" % (IPNetwork(s_host).network,
IPNetwork(s_host).hostmask)
else:
line += " " + s_host
if type(s_port) == tuple:
line += " range %s %s" % (s_port[0], s_port[1])
elif not s_port == "any":
line += " eq %s" % str(s_port)
if d_host == "any":
line += " any"
elif IPNetwork(d_host).prefixlen in [32, 128]:
line += " host %s" % d_host.split('/')[0]
elif afi == 4:
line += " %s %s" % (IPNetwork(d_host).network,
IPNetwork(d_host).hostmask)
else:
line += " " + d_host
if type(d_port) == tuple:
line += " range %s %s" % (d_port[0], d_port[1])
elif not d_port == "any":
line += " eq %s" % str(d_port)
if stateful and rule['protocol'] == "tcp":
line += " established"
if logging:
line += " log"
if line not in config_blob:
config_blob.append(line)
if afi == 4:
config_blob.append('deny ip any any')
if afi == 6:
config_blob.append('deny any any')
return config_blob
| true | true |
f7250b0e5c4a53b9bd86487a7b9aa16553365458 | 3,463 | py | Python | deepgram/_utils.py | jeremycline/python-sdk | 5847241be8585982673b6f21080c3f5b921123e4 | [
"MIT"
] | null | null | null | deepgram/_utils.py | jeremycline/python-sdk | 5847241be8585982673b6f21080c3f5b921123e4 | [
"MIT"
] | null | null | null | deepgram/_utils.py | jeremycline/python-sdk | 5847241be8585982673b6f21080c3f5b921123e4 | [
"MIT"
] | null | null | null | from ._constants import DEFAULT_ENDPOINT
from ._types import Options
from ._version import __version__
from typing import Any, Union, Optional, IO, Mapping, Tuple, List
import aiohttp, urllib.parse, json, re, platform
import websockets, websockets.client
Payload = Optional[Union[dict, str, bytes, IO]]
def _prepare_headers(options: Options, headers: Mapping[str, str] = {}) -> dict:
return {**headers,
'Authorization': None if 'api_key' not in options else options.get('auth_method', 'Token') + ' ' + options['api_key'],
'User-Agent': f'deepgram/{__version__} python/{platform.python_version()}'
}
def _normalize_payload(payload: Payload) -> Optional[Union[bytes, IO]]:
if payload is None:
return None
if isinstance(payload, dict):
return json.dumps(payload).encode('utf-8')
if isinstance(payload, str):
return payload.encode('utf-8')
return payload
def _make_query_string(params: Mapping[str, Any] = {}) -> str:
def elem_decomposer(key: str, value: Any) -> List[Tuple[str, str]]:
if value in [None, ""]:
return []
if isinstance(value, list):
return [elem_decomposer(key, item)[0] for item in value] # break into multiple parameters
# just take the first element in the sublist, rather than trying to flatten recursively
# passing nested lists as query parameters isn't really well-defined,
# nor does anything in our API currently take things like that as of 2021-08-10
# so everything coming through this second pass should be a 1-item list
if isinstance(value, bool):
return [(key, str(value).lower())] # make sure False and True stay lowercased in accordance with DG convention
return [(key, str(value))]
unflattened = [elem_decomposer(k, v) for k, v in params.items()] # sublist for each original parameter
flattened = sum(unflattened, []) # flatten
return ('?' if flattened else '') + urllib.parse.urlencode(flattened)
async def _request(path: str, options: Options, method: str = 'GET', payload: Payload = None, headers: Optional[Mapping[str, str]] = {}) -> Optional[dict]:
destination = options.get('api_url', DEFAULT_ENDPOINT) + path
updated_headers = _prepare_headers(options, headers)
try:
async with aiohttp.request(method, destination, data=_normalize_payload(payload), headers=updated_headers, raise_for_status=True) as resp:
content = (await resp.text()).strip()
if not content:
return None
body = json.loads(content)
if body.get('error'):
raise Exception(f'DG: {content}')
return body
except aiohttp.ClientResponseError as e:
raise Exception(f'DG: {e}')
except aiohttp.ClientError as e:
raise e
async def _socket_connect(path: str, options: Options, headers: Optional[Mapping[str, str]] = {}) -> websockets.client.WebSocketClientProtocol:
destination = re.sub(r'^http', 'ws', options.get('api_url', DEFAULT_ENDPOINT)) + path
updated_headers = _prepare_headers(options, headers)
try:
return await websockets.connect(destination, extra_headers=updated_headers, ping_interval=5)
# If we're streaming too much faster than realtime, connection might close without an aggressive ping interval
except websockets.exceptions.InvalidHandshake as e:
raise Exception(f'DG: {e}') | 50.926471 | 155 | 0.67687 | from ._constants import DEFAULT_ENDPOINT
from ._types import Options
from ._version import __version__
from typing import Any, Union, Optional, IO, Mapping, Tuple, List
import aiohttp, urllib.parse, json, re, platform
import websockets, websockets.client
Payload = Optional[Union[dict, str, bytes, IO]]
def _prepare_headers(options: Options, headers: Mapping[str, str] = {}) -> dict:
return {**headers,
'Authorization': None if 'api_key' not in options else options.get('auth_method', 'Token') + ' ' + options['api_key'],
'User-Agent': f'deepgram/{__version__} python/{platform.python_version()}'
}
def _normalize_payload(payload: Payload) -> Optional[Union[bytes, IO]]:
if payload is None:
return None
if isinstance(payload, dict):
return json.dumps(payload).encode('utf-8')
if isinstance(payload, str):
return payload.encode('utf-8')
return payload
def _make_query_string(params: Mapping[str, Any] = {}) -> str:
def elem_decomposer(key: str, value: Any) -> List[Tuple[str, str]]:
if value in [None, ""]:
return []
if isinstance(value, list):
return [elem_decomposer(key, item)[0] for item in value]
# nor does anything in our API currently take things like that as of 2021-08-10
# so everything coming through this second pass should be a 1-item list
if isinstance(value, bool):
return [(key, str(value).lower())] # make sure False and True stay lowercased in accordance with DG convention
return [(key, str(value))]
unflattened = [elem_decomposer(k, v) for k, v in params.items()] # sublist for each original parameter
flattened = sum(unflattened, []) # flatten
return ('?' if flattened else '') + urllib.parse.urlencode(flattened)
async def _request(path: str, options: Options, method: str = 'GET', payload: Payload = None, headers: Optional[Mapping[str, str]] = {}) -> Optional[dict]:
destination = options.get('api_url', DEFAULT_ENDPOINT) + path
updated_headers = _prepare_headers(options, headers)
try:
async with aiohttp.request(method, destination, data=_normalize_payload(payload), headers=updated_headers, raise_for_status=True) as resp:
content = (await resp.text()).strip()
if not content:
return None
body = json.loads(content)
if body.get('error'):
raise Exception(f'DG: {content}')
return body
except aiohttp.ClientResponseError as e:
raise Exception(f'DG: {e}')
except aiohttp.ClientError as e:
raise e
async def _socket_connect(path: str, options: Options, headers: Optional[Mapping[str, str]] = {}) -> websockets.client.WebSocketClientProtocol:
destination = re.sub(r'^http', 'ws', options.get('api_url', DEFAULT_ENDPOINT)) + path
updated_headers = _prepare_headers(options, headers)
try:
return await websockets.connect(destination, extra_headers=updated_headers, ping_interval=5)
# If we're streaming too much faster than realtime, connection might close without an aggressive ping interval
except websockets.exceptions.InvalidHandshake as e:
raise Exception(f'DG: {e}') | true | true |
f7250ba98072e446898a6e4e1a69f331c437a919 | 215,396 | py | Python | hydrus/client/gui/ClientGUITags.py | bbappserver/hydrus-build-test | de7868c2f549faaf4a189b120cddcb39d16a64ba | [
"WTFPL"
] | null | null | null | hydrus/client/gui/ClientGUITags.py | bbappserver/hydrus-build-test | de7868c2f549faaf4a189b120cddcb39d16a64ba | [
"WTFPL"
] | null | null | null | hydrus/client/gui/ClientGUITags.py | bbappserver/hydrus-build-test | de7868c2f549faaf4a189b120cddcb39d16a64ba | [
"WTFPL"
] | null | null | null | import collections
import itertools
import os
import random
import time
import typing
from qtpy import QtCore as QC
from qtpy import QtWidgets as QW
from qtpy import QtGui as QG
from hydrus.core import HydrusConstants as HC
from hydrus.core import HydrusData
from hydrus.core import HydrusExceptions
from hydrus.core import HydrusGlobals as HG
from hydrus.core import HydrusSerialisable
from hydrus.core import HydrusTags
from hydrus.core import HydrusText
from hydrus.core.networking import HydrusNetwork
from hydrus.client import ClientApplicationCommand as CAC
from hydrus.client import ClientConstants as CC
from hydrus.client import ClientManagers
from hydrus.client.gui import ClientGUIAsync
from hydrus.client.gui import ClientGUICore as CGC
from hydrus.client.gui import ClientGUIDialogs
from hydrus.client.gui import ClientGUIDialogsQuick
from hydrus.client.gui import ClientGUIFunctions
from hydrus.client.gui import ClientGUIMenus
from hydrus.client.gui import ClientGUIScrolledPanels
from hydrus.client.gui import ClientGUIScrolledPanelsReview
from hydrus.client.gui import ClientGUIShortcuts
from hydrus.client.gui import ClientGUITagSuggestions
from hydrus.client.gui import ClientGUITopLevelWindowsPanels
from hydrus.client.gui import QtPorting as QP
from hydrus.client.gui.lists import ClientGUIListBoxes
from hydrus.client.gui.lists import ClientGUIListConstants as CGLC
from hydrus.client.gui.lists import ClientGUIListCtrl
from hydrus.client.gui.networking import ClientGUIHydrusNetwork
from hydrus.client.gui.search import ClientGUIACDropdown
from hydrus.client.gui.widgets import ClientGUICommon
from hydrus.client.gui.widgets import ClientGUIControls
from hydrus.client.gui.widgets import ClientGUIMenuButton
from hydrus.client.media import ClientMedia
from hydrus.client.metadata import ClientTags
from hydrus.client.metadata import ClientTagsHandling
class EditTagAutocompleteOptionsPanel( ClientGUIScrolledPanels.EditPanel ):
def __init__( self, parent: QW.QWidget, tag_autocomplete_options: ClientTagsHandling.TagAutocompleteOptions ):
ClientGUIScrolledPanels.EditPanel.__init__( self, parent )
self._original_tag_autocomplete_options = tag_autocomplete_options
services_manager = HG.client_controller.services_manager
all_real_tag_service_keys = services_manager.GetServiceKeys( HC.REAL_TAG_SERVICES )
all_real_file_service_keys = services_manager.GetServiceKeys( ( HC.LOCAL_FILE_DOMAIN, HC.FILE_REPOSITORY ) )
#
self._write_autocomplete_tag_domain = ClientGUICommon.BetterChoice( self )
self._write_autocomplete_tag_domain.setToolTip( 'A manage tags autocomplete will start with this domain. Typically only useful with this service or "all known tags".' )
self._write_autocomplete_tag_domain.addItem( services_manager.GetName( CC.COMBINED_TAG_SERVICE_KEY ), CC.COMBINED_TAG_SERVICE_KEY )
for service_key in all_real_tag_service_keys:
self._write_autocomplete_tag_domain.addItem( services_manager.GetName( service_key ), service_key )
self._override_write_autocomplete_file_domain = QW.QCheckBox( self )
self._override_write_autocomplete_file_domain.setToolTip( 'If set, a manage tags dialog autocomplete will start with a different file domain than the one that launched the dialog.' )
self._write_autocomplete_file_domain = ClientGUICommon.BetterChoice( self )
self._write_autocomplete_file_domain.setToolTip( 'A manage tags autocomplete will start with this domain. Normally only useful for "all known files" or "my files".' )
self._write_autocomplete_file_domain.addItem( services_manager.GetName( CC.COMBINED_FILE_SERVICE_KEY ), CC.COMBINED_FILE_SERVICE_KEY )
for service_key in all_real_file_service_keys:
self._write_autocomplete_file_domain.addItem( services_manager.GetName( service_key ), service_key )
self._search_namespaces_into_full_tags = QW.QCheckBox( self )
self._search_namespaces_into_full_tags.setToolTip( 'If on, a search for "ser" will return all "series:" results such as "series:metrod". On large tag services, these searches are extremely slow.' )
self._namespace_bare_fetch_all_allowed = QW.QCheckBox( self )
self._namespace_bare_fetch_all_allowed.setToolTip( 'If on, a search for "series:" will return all "series:" results. On large tag services, these searches are extremely slow.' )
self._namespace_fetch_all_allowed = QW.QCheckBox( self )
self._namespace_fetch_all_allowed.setToolTip( 'If on, a search for "series:*" will return all "series:" results. On large tag services, these searches are extremely slow.' )
self._fetch_all_allowed = QW.QCheckBox( self )
self._fetch_all_allowed.setToolTip( 'If on, a search for "*" will return all tags. On large tag services, these searches are extremely slow.' )
self._fetch_results_automatically = QW.QCheckBox( self )
self._fetch_results_automatically.setToolTip( 'If on, results will load as you type. If off, you will have to hit a shortcut (default Ctrl+Space) to load results.' )
self._exact_match_character_threshold = ClientGUICommon.NoneableSpinCtrl( self, none_phrase = 'always autocomplete (only appropriate for small tag services)', min = 1, max = 256, unit = 'characters' )
self._exact_match_character_threshold.setToolTip( 'When the search text has <= this many characters, autocomplete will not occur and you will only get results that exactly match the input. Increasing this value makes autocomplete snappier but reduces the number of results.' )
#
self._write_autocomplete_tag_domain.SetValue( tag_autocomplete_options.GetWriteAutocompleteTagDomain() )
self._override_write_autocomplete_file_domain.setChecked( tag_autocomplete_options.OverridesWriteAutocompleteFileDomain() )
self._write_autocomplete_file_domain.SetValue( tag_autocomplete_options.GetWriteAutocompleteFileDomain() )
self._search_namespaces_into_full_tags.setChecked( tag_autocomplete_options.SearchNamespacesIntoFullTags() )
self._namespace_bare_fetch_all_allowed.setChecked( tag_autocomplete_options.NamespaceBareFetchAllAllowed() )
self._namespace_fetch_all_allowed.setChecked( tag_autocomplete_options.NamespaceFetchAllAllowed() )
self._fetch_all_allowed.setChecked( tag_autocomplete_options.FetchAllAllowed() )
self._fetch_results_automatically.setChecked( tag_autocomplete_options.FetchResultsAutomatically() )
self._exact_match_character_threshold.SetValue( tag_autocomplete_options.GetExactMatchCharacterThreshold() )
#
rows = []
rows.append( ( 'Fetch results as you type: ', self._fetch_results_automatically ) )
rows.append( ( 'Do-not-autocomplete character threshold: ', self._exact_match_character_threshold ) )
if tag_autocomplete_options.GetServiceKey() == CC.COMBINED_TAG_SERVICE_KEY:
self._write_autocomplete_tag_domain.setVisible( False )
self._override_write_autocomplete_file_domain.setVisible( False )
self._write_autocomplete_file_domain.setVisible( False )
else:
rows.append( ( 'Override default autocomplete file domain in _manage tags_: ', self._override_write_autocomplete_file_domain ) )
rows.append( ( 'Default autocomplete file domain in _manage tags_: ', self._write_autocomplete_file_domain ) )
rows.append( ( 'Default autocomplete tag domain in _manage tags_: ', self._write_autocomplete_tag_domain ) )
rows.append( ( 'Search namespaces with normal input: ', self._search_namespaces_into_full_tags ) )
rows.append( ( 'Allow "namespace:": ', self._namespace_bare_fetch_all_allowed ) )
rows.append( ( 'Allow "namespace:*": ', self._namespace_fetch_all_allowed ) )
rows.append( ( 'Allow "*": ', self._fetch_all_allowed ) )
gridbox = ClientGUICommon.WrapInGrid( self, rows )
vbox = QP.VBoxLayout()
label = 'The settings that permit searching namespaces and expansive "*" queries can be very expensive on a large client and may cause problems!'
st = ClientGUICommon.BetterStaticText( self, label = label )
QP.AddToLayout( vbox, st, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, gridbox, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.addStretch( 1 )
self.widget().setLayout( vbox )
self._UpdateControls()
self._override_write_autocomplete_file_domain.stateChanged.connect( self._UpdateControls )
self._search_namespaces_into_full_tags.stateChanged.connect( self._UpdateControls )
self._namespace_bare_fetch_all_allowed.stateChanged.connect( self._UpdateControls )
def _UpdateControls( self ):
self._write_autocomplete_file_domain.setEnabled( self._override_write_autocomplete_file_domain.isChecked() )
if self._search_namespaces_into_full_tags.isChecked():
self._namespace_bare_fetch_all_allowed.setEnabled( False )
self._namespace_fetch_all_allowed.setEnabled( False )
else:
self._namespace_bare_fetch_all_allowed.setEnabled( True )
if self._namespace_bare_fetch_all_allowed.isChecked():
self._namespace_fetch_all_allowed.setEnabled( False )
else:
self._namespace_fetch_all_allowed.setEnabled( True )
for c in ( self._namespace_bare_fetch_all_allowed, self._namespace_fetch_all_allowed ):
if not c.isEnabled():
c.blockSignals( True )
c.setChecked( True )
c.blockSignals( False )
def GetValue( self ):
tag_autocomplete_options = ClientTagsHandling.TagAutocompleteOptions( self._original_tag_autocomplete_options.GetServiceKey() )
write_autocomplete_tag_domain = self._write_autocomplete_tag_domain.GetValue()
override_write_autocomplete_file_domain = self._override_write_autocomplete_file_domain.isChecked()
write_autocomplete_file_domain = self._write_autocomplete_file_domain.GetValue()
search_namespaces_into_full_tags = self._search_namespaces_into_full_tags.isChecked()
namespace_bare_fetch_all_allowed = self._namespace_bare_fetch_all_allowed.isChecked()
namespace_fetch_all_allowed = self._namespace_fetch_all_allowed.isChecked()
fetch_all_allowed = self._fetch_all_allowed.isChecked()
tag_autocomplete_options.SetTuple(
write_autocomplete_tag_domain,
override_write_autocomplete_file_domain,
write_autocomplete_file_domain,
search_namespaces_into_full_tags,
namespace_bare_fetch_all_allowed,
namespace_fetch_all_allowed,
fetch_all_allowed
)
tag_autocomplete_options.SetFetchResultsAutomatically( self._fetch_results_automatically.isChecked() )
tag_autocomplete_options.SetExactMatchCharacterThreshold( self._exact_match_character_threshold.GetValue() )
return tag_autocomplete_options
class EditTagDisplayApplication( ClientGUIScrolledPanels.EditPanel ):
def __init__( self, parent, master_service_keys_to_sibling_applicable_service_keys, master_service_keys_to_parent_applicable_service_keys ):
master_service_keys_to_sibling_applicable_service_keys = collections.defaultdict( list, master_service_keys_to_sibling_applicable_service_keys )
master_service_keys_to_parent_applicable_service_keys = collections.defaultdict( list, master_service_keys_to_parent_applicable_service_keys )
ClientGUIScrolledPanels.EditPanel.__init__( self, parent )
self._tag_services_notebook = ClientGUICommon.BetterNotebook( self )
min_width = ClientGUIFunctions.ConvertTextToPixelWidth( self._tag_services_notebook, 100 )
self._tag_services_notebook.setMinimumWidth( min_width )
#
services = list( HG.client_controller.services_manager.GetServices( HC.REAL_TAG_SERVICES ) )
select_service_key = services[0].GetServiceKey()
for service in services:
master_service_key = service.GetServiceKey()
name = service.GetName()
sibling_applicable_service_keys = master_service_keys_to_sibling_applicable_service_keys[ master_service_key ]
parent_applicable_service_keys = master_service_keys_to_parent_applicable_service_keys[ master_service_key ]
page = self._Panel( self._tag_services_notebook, master_service_key, sibling_applicable_service_keys, parent_applicable_service_keys )
select = master_service_key == select_service_key
self._tag_services_notebook.addTab( page, name )
if select:
self._tag_services_notebook.setCurrentWidget( page )
#
vbox = QP.VBoxLayout()
message = 'While a tag service normally applies its own siblings and parents to itself, it does not have to. If you want a different service\'s siblings (e.g. putting the PTR\'s siblings on your "my tags"), or multiple services\', then set it here. You can also apply no siblings or parents at all.'
message += os.linesep * 2
message += 'If there are conflicts, the services at the top of the list have precedence. Parents are collapsed by sibling rules before they are applied.'
self._message = ClientGUICommon.BetterStaticText( self, label = message )
self._message.setWordWrap( True )
self._sync_status = ClientGUICommon.BetterStaticText( self )
self._sync_status.setWordWrap( True )
if HG.client_controller.new_options.GetBoolean( 'tag_display_maintenance_during_active' ):
self._sync_status.setText( 'Siblings and parents are set to sync all the time. Changes will start applying as soon as you ok this dialog.' )
self._sync_status.setObjectName( 'HydrusValid' )
else:
if HG.client_controller.new_options.GetBoolean( 'tag_display_maintenance_during_idle' ):
self._sync_status.setText( 'Siblings and parents are only set to sync during idle time. Changes here will only start to apply when you are not using the client.' )
else:
self._sync_status.setText( 'Siblings and parents are not set to sync in the background at any time. If there is sync work to do, you will have to force it to run using the \'review\' window under _tags->siblings and parents sync_.' )
self._sync_status.setObjectName( 'HydrusWarning' )
self._sync_status.style().polish( self._sync_status )
QP.AddToLayout( vbox, self._message, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._sync_status, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._tag_services_notebook, CC.FLAGS_EXPAND_BOTH_WAYS )
self.widget().setLayout( vbox )
def GetValue( self ):
master_service_keys_to_sibling_applicable_service_keys = collections.defaultdict( list )
master_service_keys_to_parent_applicable_service_keys = collections.defaultdict( list )
for page in self._tag_services_notebook.GetPages():
( master_service_key, sibling_applicable_service_keys, parent_applicable_service_keys ) = page.GetValue()
master_service_keys_to_sibling_applicable_service_keys[ master_service_key ] = sibling_applicable_service_keys
master_service_keys_to_parent_applicable_service_keys[ master_service_key ] = parent_applicable_service_keys
return ( master_service_keys_to_sibling_applicable_service_keys, master_service_keys_to_parent_applicable_service_keys )
class _Panel( QW.QWidget ):
def __init__( self, parent: QW.QWidget, master_service_key: bytes, sibling_applicable_service_keys: typing.Sequence[ bytes ], parent_applicable_service_keys: typing.Sequence[ bytes ] ):
QW.QWidget.__init__( self, parent )
self._master_service_key = master_service_key
#
self._sibling_box = ClientGUICommon.StaticBox( self, 'sibling application' )
#
self._sibling_service_keys_listbox = ClientGUIListBoxes.QueueListBox( self._sibling_box, 4, HG.client_controller.services_manager.GetName, add_callable = self._AddSibling )
#
self._sibling_service_keys_listbox.AddDatas( sibling_applicable_service_keys )
#
self._parent_box = ClientGUICommon.StaticBox( self, 'parent application' )
#
self._parent_service_keys_listbox = ClientGUIListBoxes.QueueListBox( self._sibling_box, 4, HG.client_controller.services_manager.GetName, add_callable = self._AddParent )
#
self._parent_service_keys_listbox.AddDatas( parent_applicable_service_keys )
#
self._sibling_box.Add( self._sibling_service_keys_listbox, CC.FLAGS_EXPAND_BOTH_WAYS )
self._parent_box.Add( self._parent_service_keys_listbox, CC.FLAGS_EXPAND_BOTH_WAYS )
#
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._sibling_box, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( vbox, self._parent_box, CC.FLAGS_EXPAND_BOTH_WAYS )
self.setLayout( vbox )
def _AddParent( self ):
current_service_keys = self._parent_service_keys_listbox.GetData()
return self._AddService( current_service_keys )
def _AddService( self, current_service_keys ):
allowed_services = HG.client_controller.services_manager.GetServices( HC.REAL_TAG_SERVICES )
allowed_services = [ service for service in allowed_services if service.GetServiceKey() not in current_service_keys ]
if len( allowed_services ) == 0:
QW.QMessageBox.information( self, 'Information', 'You have all the current tag services applied to this service.' )
raise HydrusExceptions.VetoException()
choice_tuples = [ ( service.GetName(), service.GetServiceKey(), service.GetName() ) for service in allowed_services ]
try:
service_key = ClientGUIDialogsQuick.SelectFromListButtons( self, 'Which service?', choice_tuples )
return service_key
except HydrusExceptions.CancelledException:
raise HydrusExceptions.VetoException()
def _AddSibling( self ):
current_service_keys = self._sibling_service_keys_listbox.GetData()
return self._AddService( current_service_keys )
def GetValue( self ):
return ( self._master_service_key, self._sibling_service_keys_listbox.GetData(), self._parent_service_keys_listbox.GetData() )
class EditTagDisplayManagerPanel( ClientGUIScrolledPanels.EditPanel ):
def __init__( self, parent, tag_display_manager: ClientTagsHandling.TagDisplayManager ):
ClientGUIScrolledPanels.EditPanel.__init__( self, parent )
self._original_tag_display_manager = tag_display_manager
self._tag_services = ClientGUICommon.BetterNotebook( self )
min_width = ClientGUIFunctions.ConvertTextToPixelWidth( self._tag_services, 100 )
self._tag_services.setMinimumWidth( min_width )
#
services = list( HG.client_controller.services_manager.GetServices( ( HC.COMBINED_TAG, HC.LOCAL_TAG, HC.TAG_REPOSITORY ) ) )
for service in services:
service_key = service.GetServiceKey()
name = service.GetName()
page = self._Panel( self._tag_services, self._original_tag_display_manager, service_key )
select = service_key == CC.COMBINED_TAG_SERVICE_KEY
self._tag_services.addTab( page, name )
if select: self._tag_services.setCurrentWidget( page )
#
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._tag_services, CC.FLAGS_EXPAND_BOTH_WAYS )
self.widget().setLayout( vbox )
def GetValue( self ):
tag_display_manager = self._original_tag_display_manager.Duplicate()
tag_display_manager.ClearTagDisplayOptions()
for page in self._tag_services.GetPages():
( service_key, tag_display_types_to_tag_filters, tag_autocomplete_options ) = page.GetValue()
for ( tag_display_type, tag_filter ) in tag_display_types_to_tag_filters.items():
tag_display_manager.SetTagFilter( tag_display_type, service_key, tag_filter )
tag_display_manager.SetTagAutocompleteOptions( tag_autocomplete_options )
return tag_display_manager
class _Panel( QW.QWidget ):
def __init__( self, parent: QW.QWidget, tag_display_manager: ClientTagsHandling.TagDisplayManager, service_key: bytes ):
QW.QWidget.__init__( self, parent )
single_tag_filter = tag_display_manager.GetTagFilter( ClientTags.TAG_DISPLAY_SINGLE_MEDIA, service_key )
selection_tag_filter = tag_display_manager.GetTagFilter( ClientTags.TAG_DISPLAY_SELECTION_LIST, service_key )
tag_autocomplete_options = tag_display_manager.GetTagAutocompleteOptions( service_key )
self._service_key = service_key
#
self._display_box = ClientGUICommon.StaticBox( self, 'display' )
message = 'This filters which tags will show on \'single\' file views such as the media viewer and thumbnail banners.'
self._single_tag_filter_button = TagFilterButton( self._display_box, message, single_tag_filter, label_prefix = 'tags shown: ' )
message = 'This filters which tags will show on \'selection\' file views such as the \'selection tags\' list on regular search pages.'
self._selection_tag_filter_button = TagFilterButton( self._display_box, message, selection_tag_filter, label_prefix = 'tags shown: ' )
#
self._tao_box = ClientGUICommon.StaticBox( self, 'autocomplete' )
self._tag_autocomplete_options_panel = EditTagAutocompleteOptionsPanel( self._tao_box, tag_autocomplete_options )
#
rows = []
rows.append( ( 'Tag filter for single file views: ', self._single_tag_filter_button ) )
rows.append( ( 'Tag filter for multiple file views: ', self._selection_tag_filter_button ) )
gridbox = ClientGUICommon.WrapInGrid( self._display_box, rows )
self._display_box.Add( gridbox, CC.FLAGS_EXPAND_PERPENDICULAR )
#
self._tao_box.Add( self._tag_autocomplete_options_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
#
vbox = QP.VBoxLayout()
if self._service_key == CC.COMBINED_TAG_SERVICE_KEY:
message = 'These options apply to all tag services, or to where the tag domain is "all known tags".'
message += os.linesep * 2
message += 'This tag domain is the union of all other services, so it can be more computationally expensive. You most often see it on new search pages.'
else:
message = 'This is just one tag service. You most often search a specific tag service in the manage tags dialog.'
st = ClientGUICommon.BetterStaticText( self, message )
st.setWordWrap( True )
QP.AddToLayout( vbox, st, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._display_box, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._tao_box, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.addStretch( 1 )
self.setLayout( vbox )
def GetValue( self ):
tag_display_types_to_tag_filters = {}
tag_display_types_to_tag_filters[ ClientTags.TAG_DISPLAY_SINGLE_MEDIA ] = self._single_tag_filter_button.GetValue()
tag_display_types_to_tag_filters[ ClientTags.TAG_DISPLAY_SELECTION_LIST ] = self._selection_tag_filter_button.GetValue()
tag_autocomplete_options = self._tag_autocomplete_options_panel.GetValue()
return ( self._service_key, tag_display_types_to_tag_filters, tag_autocomplete_options )
class EditTagFilterPanel( ClientGUIScrolledPanels.EditPanel ):
TEST_RESULT_DEFAULT = 'Enter a tag here to test if it passes the current filter:'
TEST_RESULT_BLACKLIST_DEFAULT = 'Enter a tag here to test if it passes the blacklist (siblings tested, unnamespaced rules match namespaced tags):'
def __init__( self, parent, tag_filter, only_show_blacklist = False, namespaces = None, message = None ):
ClientGUIScrolledPanels.EditPanel.__init__( self, parent )
self._only_show_blacklist = only_show_blacklist
self._namespaces = namespaces
self._wildcard_replacements = {}
self._wildcard_replacements[ '*' ] = ''
self._wildcard_replacements[ '*:' ] = ':'
self._wildcard_replacements[ '*:*' ] = ':'
#
help_button = ClientGUICommon.BetterBitmapButton( self, CC.global_pixmaps().help, self._ShowHelp )
help_hbox = ClientGUICommon.WrapInText( help_button, self, 'help for this panel -->', QG.QColor( 0, 0, 255 ) )
#
self._import_favourite = ClientGUICommon.BetterButton( self, 'import', self._ImportFavourite )
self._export_favourite = ClientGUICommon.BetterButton( self, 'export', self._ExportFavourite )
self._load_favourite = ClientGUICommon.BetterButton( self, 'load', self._LoadFavourite )
self._save_favourite = ClientGUICommon.BetterButton( self, 'save', self._SaveFavourite )
self._delete_favourite = ClientGUICommon.BetterButton( self, 'delete', self._DeleteFavourite )
#
self._show_all_panels_button = ClientGUICommon.BetterButton( self, 'show other panels', self._ShowAllPanels )
self._show_all_panels_button.setToolTip( 'This shows the whitelist and advanced panels, in case you want to craft a clever blacklist with \'except\' rules.' )
show_the_button = self._only_show_blacklist and HG.client_controller.new_options.GetBoolean( 'advanced_mode' )
self._show_all_panels_button.setVisible( show_the_button )
#
self._notebook = ClientGUICommon.BetterNotebook( self )
#
self._advanced_panel = self._InitAdvancedPanel()
self._whitelist_panel = self._InitWhitelistPanel()
self._blacklist_panel = self._InitBlacklistPanel()
#
if self._only_show_blacklist:
self._whitelist_panel.setVisible( False )
self._notebook.addTab( self._blacklist_panel, 'blacklist' )
self._advanced_panel.setVisible( False )
else:
self._notebook.addTab( self._whitelist_panel, 'whitelist' )
self._notebook.addTab( self._blacklist_panel, 'blacklist' )
self._notebook.addTab( self._advanced_panel, 'advanced' )
#
self._redundant_st = ClientGUICommon.BetterStaticText( self, '', ellipsize_end = True )
self._current_filter_st = ClientGUICommon.BetterStaticText( self, 'currently keeping: ', ellipsize_end = True )
self._test_result_st = ClientGUICommon.BetterStaticText( self, self.TEST_RESULT_DEFAULT )
self._test_result_st.setAlignment( QC.Qt.AlignVCenter | QC.Qt.AlignRight )
self._test_result_st.setWordWrap( True )
self._test_input = QW.QPlainTextEdit( self )
#
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, help_hbox, CC.FLAGS_ON_RIGHT )
if message is not None:
st = ClientGUICommon.BetterStaticText( self, message )
st.setWordWrap( True )
QP.AddToLayout( vbox, st, CC.FLAGS_EXPAND_PERPENDICULAR )
hbox = QP.HBoxLayout()
QP.AddToLayout( hbox, self._import_favourite, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( hbox, self._export_favourite, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( hbox, self._load_favourite, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( hbox, self._save_favourite, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( hbox, self._delete_favourite, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( vbox, hbox, CC.FLAGS_ON_RIGHT )
QP.AddToLayout( vbox, self._show_all_panels_button, CC.FLAGS_ON_RIGHT )
QP.AddToLayout( vbox, self._notebook, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( vbox, self._redundant_st, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._current_filter_st, CC.FLAGS_EXPAND_PERPENDICULAR )
test_text_vbox = QP.VBoxLayout()
QP.AddToLayout( test_text_vbox, self._test_result_st, CC.FLAGS_EXPAND_PERPENDICULAR )
hbox = QP.HBoxLayout()
QP.AddToLayout( hbox, test_text_vbox, CC.FLAGS_CENTER_PERPENDICULAR_EXPAND_DEPTH )
QP.AddToLayout( hbox, self._test_input, CC.FLAGS_CENTER_PERPENDICULAR_EXPAND_DEPTH )
QP.AddToLayout( vbox, hbox, CC.FLAGS_EXPAND_PERPENDICULAR )
self.widget().setLayout( vbox )
#
self._advanced_blacklist.listBoxChanged.connect( self._UpdateStatus )
self._advanced_whitelist.listBoxChanged.connect( self._UpdateStatus )
self._simple_whitelist_global_checkboxes.clicked.connect( self.EventSimpleWhitelistGlobalCheck )
self._simple_whitelist_namespace_checkboxes.clicked.connect( self.EventSimpleWhitelistNamespaceCheck )
self._simple_blacklist_global_checkboxes.clicked.connect( self.EventSimpleBlacklistGlobalCheck )
self._simple_blacklist_namespace_checkboxes.clicked.connect( self.EventSimpleBlacklistNamespaceCheck )
self._test_input.textChanged.connect( self._UpdateTest )
self.SetValue( tag_filter )
def _AdvancedAddBlacklist( self, tag_slice ):
tag_slice = self._CleanTagSliceInput( tag_slice )
if tag_slice in self._advanced_blacklist.GetTagSlices():
self._advanced_blacklist.RemoveTagSlices( ( tag_slice, ) )
else:
self._advanced_whitelist.RemoveTagSlices( ( tag_slice, ) )
if self._CurrentlyBlocked( tag_slice ):
self._ShowRedundantError( HydrusTags.ConvertTagSliceToString( tag_slice ) + ' is already blocked by a broader rule!' )
self._advanced_blacklist.AddTagSlices( ( tag_slice, ) )
self._UpdateStatus()
def _AdvancedAddBlacklistButton( self ):
tag_slice = self._advanced_blacklist_input.GetValue()
self._AdvancedAddBlacklist( tag_slice )
def _AdvancedAddBlacklistMultiple( self, tag_slices ):
for tag_slice in tag_slices:
self._AdvancedAddBlacklist( tag_slice )
def _AdvancedAddWhitelist( self, tag_slice ):
tag_slice = self._CleanTagSliceInput( tag_slice )
if tag_slice in self._advanced_whitelist.GetTagSlices():
self._advanced_whitelist.RemoveTagSlices( ( tag_slice, ) )
else:
self._advanced_blacklist.RemoveTagSlices( ( tag_slice, ) )
# if it is still blocked after that, it needs whitelisting explicitly
if not self._CurrentlyBlocked( tag_slice ) and tag_slice not in ( '', ':' ):
self._ShowRedundantError( HydrusTags.ConvertTagSliceToString( tag_slice ) + ' is already permitted by a broader rule!' )
self._advanced_whitelist.AddTagSlices( ( tag_slice, ) )
self._UpdateStatus()
def _AdvancedAddWhitelistButton( self ):
tag_slice = self._advanced_whitelist_input.GetValue()
self._AdvancedAddWhitelist( tag_slice )
def _AdvancedAddWhitelistMultiple( self, tag_slices ):
for tag_slice in tag_slices:
self._AdvancedAddWhitelist( tag_slice )
def _AdvancedBlacklistEverything( self ):
self._advanced_blacklist.SetTagSlices( [] )
self._advanced_whitelist.RemoveTagSlices( ( '', ':' ) )
self._advanced_blacklist.AddTagSlices( ( '', ':' ) )
self._UpdateStatus()
def _AdvancedDeleteBlacklist( self ):
selected_tag_slices = self._advanced_blacklist.GetSelectedTagSlices()
if len( selected_tag_slices ) > 0:
result = ClientGUIDialogsQuick.GetYesNo( self, 'Remove all selected?' )
if result == QW.QDialog.Accepted:
self._advanced_blacklist.RemoveTagSlices( selected_tag_slices )
self._UpdateStatus()
def _AdvancedDeleteWhitelist( self ):
selected_tag_slices = self._advanced_whitelist.GetSelectedTagSlices()
if len( selected_tag_slices ) > 0:
result = ClientGUIDialogsQuick.GetYesNo( self, 'Remove all selected?' )
if result == QW.QDialog.Accepted:
self._advanced_whitelist.RemoveTagSlices( selected_tag_slices )
self._UpdateStatus()
def _CleanTagSliceInput( self, tag_slice ):
tag_slice = tag_slice.lower().strip()
while '**' in tag_slice:
tag_slice = tag_slice.replace( '**', '*' )
if tag_slice in self._wildcard_replacements:
tag_slice = self._wildcard_replacements[ tag_slice ]
if ':' in tag_slice:
( namespace, subtag ) = HydrusTags.SplitTag( tag_slice )
if subtag == '*':
tag_slice = '{}:'.format( namespace )
return tag_slice
def _CurrentlyBlocked( self, tag_slice ):
if tag_slice in ( '', ':' ):
test_slices = { tag_slice }
elif tag_slice.count( ':' ) == 1 and tag_slice.endswith( ':' ):
test_slices = { ':', tag_slice }
elif ':' in tag_slice:
( ns, st ) = HydrusTags.SplitTag( tag_slice )
test_slices = { ':', ns + ':', tag_slice }
else:
test_slices = { '', tag_slice }
blacklist = set( self._advanced_blacklist.GetTagSlices() )
return not blacklist.isdisjoint( test_slices )
def _DeleteFavourite( self ):
def do_it( name ):
names_to_tag_filters = HG.client_controller.new_options.GetFavouriteTagFilters()
if name in names_to_tag_filters:
message = 'Delete "{}"?'.format( name )
result = ClientGUIDialogsQuick.GetYesNo( self, message )
if result != QW.QDialog.Accepted:
return
del names_to_tag_filters[ name ]
HG.client_controller.new_options.SetFavouriteTagFilters( names_to_tag_filters )
names_to_tag_filters = HG.client_controller.new_options.GetFavouriteTagFilters()
menu = QW.QMenu()
if len( names_to_tag_filters ) == 0:
ClientGUIMenus.AppendMenuLabel( menu, 'no favourites set!' )
else:
for ( name, tag_filter ) in names_to_tag_filters.items():
ClientGUIMenus.AppendMenuItem( menu, name, 'delete {}'.format( name ), do_it, name )
CGC.core().PopupMenu( self, menu )
def _ExportFavourite( self ):
names_to_tag_filters = HG.client_controller.new_options.GetFavouriteTagFilters()
menu = QW.QMenu()
if len( names_to_tag_filters ) == 0:
ClientGUIMenus.AppendMenuLabel( menu, 'no favourites set!' )
else:
for ( name, tag_filter ) in names_to_tag_filters.items():
ClientGUIMenus.AppendMenuItem( menu, name, 'load {}'.format( name ), HG.client_controller.pub, 'clipboard', 'text', tag_filter.DumpToString() )
CGC.core().PopupMenu( self, menu )
def _GetWhiteBlacklistsPossible( self ):
blacklist_tag_slices = self._advanced_blacklist.GetTagSlices()
whitelist_tag_slices = self._advanced_whitelist.GetTagSlices()
blacklist_is_only_simples = set( blacklist_tag_slices ).issubset( { '', ':' } )
nothing_is_whitelisted = len( whitelist_tag_slices ) == 0
whitelist_possible = blacklist_is_only_simples
blacklist_possible = nothing_is_whitelisted
return ( whitelist_possible, blacklist_possible )
def _ImportFavourite( self ):
try:
raw_text = HG.client_controller.GetClipboardText()
except HydrusExceptions.DataMissing as e:
QW.QMessageBox.critical( self, 'Error', str(e) )
return
try:
obj = HydrusSerialisable.CreateFromString( raw_text )
except Exception as e:
QW.QMessageBox.critical( self, 'Error', 'I could not understand what was in the clipboard' )
return
if not isinstance( obj, HydrusTags.TagFilter ):
QW.QMessageBox.critical( self, 'Error', 'That object was not a Tag Filter! It seemed to be a "{}".'.format(type(obj)) )
return
tag_filter = obj
with ClientGUIDialogs.DialogTextEntry( self, 'Enter a name for the favourite.' ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
names_to_tag_filters = HG.client_controller.new_options.GetFavouriteTagFilters()
name = dlg.GetValue()
if name in names_to_tag_filters:
message = '"{}" already exists! Overwrite?'.format( name )
result = ClientGUIDialogsQuick.GetYesNo( self, message )
if result != QW.QDialog.Accepted:
return
names_to_tag_filters[ name ] = tag_filter
HG.client_controller.new_options.SetFavouriteTagFilters( names_to_tag_filters )
self.SetValue( tag_filter )
def _InitAdvancedPanel( self ):
advanced_panel = QW.QWidget( self._notebook )
#
blacklist_panel = ClientGUICommon.StaticBox( advanced_panel, 'exclude these' )
self._advanced_blacklist = ClientGUIListBoxes.ListBoxTagsFilter( blacklist_panel )
self._advanced_blacklist_input = ClientGUIControls.TextAndPasteCtrl( blacklist_panel, self._AdvancedAddBlacklistMultiple, allow_empty_input = True )
add_blacklist_button = ClientGUICommon.BetterButton( blacklist_panel, 'add', self._AdvancedAddBlacklistButton )
delete_blacklist_button = ClientGUICommon.BetterButton( blacklist_panel, 'delete', self._AdvancedDeleteBlacklist )
blacklist_everything_button = ClientGUICommon.BetterButton( blacklist_panel, 'block everything', self._AdvancedBlacklistEverything )
#
whitelist_panel = ClientGUICommon.StaticBox( advanced_panel, 'except for these' )
self._advanced_whitelist = ClientGUIListBoxes.ListBoxTagsFilter( whitelist_panel )
self._advanced_whitelist_input = ClientGUIControls.TextAndPasteCtrl( whitelist_panel, self._AdvancedAddWhitelistMultiple, allow_empty_input = True )
self._advanced_add_whitelist_button = ClientGUICommon.BetterButton( whitelist_panel, 'add', self._AdvancedAddWhitelistButton )
delete_whitelist_button = ClientGUICommon.BetterButton( whitelist_panel, 'delete', self._AdvancedDeleteWhitelist )
#
button_hbox = QP.HBoxLayout()
QP.AddToLayout( button_hbox, self._advanced_blacklist_input, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( button_hbox, add_blacklist_button, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( button_hbox, delete_blacklist_button, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( button_hbox, blacklist_everything_button, CC.FLAGS_CENTER_PERPENDICULAR )
blacklist_panel.Add( self._advanced_blacklist, CC.FLAGS_EXPAND_BOTH_WAYS )
blacklist_panel.Add( button_hbox, CC.FLAGS_EXPAND_PERPENDICULAR )
#
button_hbox = QP.HBoxLayout()
QP.AddToLayout( button_hbox, self._advanced_whitelist_input, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( button_hbox, self._advanced_add_whitelist_button, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( button_hbox, delete_whitelist_button, CC.FLAGS_CENTER_PERPENDICULAR )
whitelist_panel.Add( self._advanced_whitelist, CC.FLAGS_EXPAND_BOTH_WAYS )
whitelist_panel.Add( button_hbox, CC.FLAGS_EXPAND_PERPENDICULAR )
#
hbox = QP.HBoxLayout()
QP.AddToLayout( hbox, blacklist_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( hbox, whitelist_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
advanced_panel.setLayout( hbox )
return advanced_panel
def _InitBlacklistPanel( self ):
blacklist_panel = QW.QWidget( self._notebook )
#
self._simple_blacklist_error_st = ClientGUICommon.BetterStaticText( blacklist_panel )
self._simple_blacklist_global_checkboxes = QP.CheckListBox( blacklist_panel )
self._simple_blacklist_global_checkboxes.Append( 'unnamespaced tags', '' )
self._simple_blacklist_global_checkboxes.Append( 'namespaced tags', ':' )
self._simple_blacklist_namespace_checkboxes = QP.CheckListBox( blacklist_panel )
for namespace in self._namespaces:
if namespace == '':
continue
self._simple_blacklist_namespace_checkboxes.Append( namespace, namespace + ':' )
self._simple_blacklist = ClientGUIListBoxes.ListBoxTagsFilter( blacklist_panel )
self._simple_blacklist_input = ClientGUIControls.TextAndPasteCtrl( blacklist_panel, self._SimpleAddBlacklistMultiple, allow_empty_input = True )
#
left_vbox = QP.VBoxLayout()
QP.AddToLayout( left_vbox, self._simple_blacklist_global_checkboxes, CC.FLAGS_EXPAND_PERPENDICULAR )
left_vbox.addStretch( 1 )
QP.AddToLayout( left_vbox, self._simple_blacklist_namespace_checkboxes, CC.FLAGS_EXPAND_PERPENDICULAR )
right_vbox = QP.VBoxLayout()
QP.AddToLayout( right_vbox, self._simple_blacklist, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( right_vbox, self._simple_blacklist_input, CC.FLAGS_EXPAND_PERPENDICULAR )
main_hbox = QP.HBoxLayout()
QP.AddToLayout( main_hbox, left_vbox, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( main_hbox, right_vbox, CC.FLAGS_EXPAND_BOTH_WAYS )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._simple_blacklist_error_st, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, main_hbox, CC.FLAGS_EXPAND_BOTH_WAYS )
blacklist_panel.setLayout( vbox )
self._simple_blacklist.tagsRemoved.connect( self._SimpleBlacklistRemoved )
return blacklist_panel
def _InitWhitelistPanel( self ):
whitelist_panel = QW.QWidget( self._notebook )
#
self._simple_whitelist_error_st = ClientGUICommon.BetterStaticText( whitelist_panel )
self._simple_whitelist_global_checkboxes = QP.CheckListBox( whitelist_panel )
self._simple_whitelist_global_checkboxes.Append( 'unnamespaced tags', '' )
self._simple_whitelist_global_checkboxes.Append( 'namespaced tags', ':' )
self._simple_whitelist_namespace_checkboxes = QP.CheckListBox( whitelist_panel )
for namespace in self._namespaces:
if namespace == '':
continue
self._simple_whitelist_namespace_checkboxes.Append( namespace, namespace + ':' )
self._simple_whitelist = ClientGUIListBoxes.ListBoxTagsFilter( whitelist_panel )
self._simple_whitelist_input = ClientGUIControls.TextAndPasteCtrl( whitelist_panel, self._SimpleAddWhitelistMultiple, allow_empty_input = True )
#
left_vbox = QP.VBoxLayout()
QP.AddToLayout( left_vbox, self._simple_whitelist_global_checkboxes, CC.FLAGS_EXPAND_PERPENDICULAR )
left_vbox.addStretch( 1 )
QP.AddToLayout( left_vbox, self._simple_whitelist_namespace_checkboxes, CC.FLAGS_EXPAND_PERPENDICULAR )
right_vbox = QP.VBoxLayout()
QP.AddToLayout( right_vbox, self._simple_whitelist, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( right_vbox, self._simple_whitelist_input, CC.FLAGS_EXPAND_PERPENDICULAR )
main_hbox = QP.HBoxLayout()
QP.AddToLayout( main_hbox, left_vbox, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( main_hbox, right_vbox, CC.FLAGS_EXPAND_BOTH_WAYS )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._simple_whitelist_error_st, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, main_hbox, CC.FLAGS_EXPAND_BOTH_WAYS )
whitelist_panel.setLayout( vbox )
self._simple_whitelist.tagsRemoved.connect( self._SimpleWhitelistRemoved )
return whitelist_panel
def _LoadFavourite( self ):
names_to_tag_filters = HG.client_controller.new_options.GetFavouriteTagFilters()
menu = QW.QMenu()
if len( names_to_tag_filters ) == 0:
ClientGUIMenus.AppendMenuLabel( menu, 'no favourites set!' )
else:
for ( name, tag_filter ) in names_to_tag_filters.items():
ClientGUIMenus.AppendMenuItem( menu, name, 'load {}'.format( name ), self.SetValue, tag_filter )
CGC.core().PopupMenu( self, menu )
def _SaveFavourite( self ):
with ClientGUIDialogs.DialogTextEntry( self, 'Enter a name for the favourite.' ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
names_to_tag_filters = HG.client_controller.new_options.GetFavouriteTagFilters()
name = dlg.GetValue()
tag_filter = self.GetValue()
if name in names_to_tag_filters:
message = '"{}" already exists! Overwrite?'.format( name )
result = ClientGUIDialogsQuick.GetYesNo( self, message )
if result != QW.QDialog.Accepted:
return
names_to_tag_filters[ name ] = tag_filter
HG.client_controller.new_options.SetFavouriteTagFilters( names_to_tag_filters )
def _ShowAllPanels( self ):
self._whitelist_panel.setVisible( True )
self._advanced_panel.setVisible( True )
self._notebook.addTab( self._whitelist_panel, 'whitelist' )
self._notebook.addTab( self._advanced_panel, 'advanced' )
self._show_all_panels_button.setVisible( False )
def _ShowHelp( self ):
help = 'Here you can set rules to filter tags for one purpose or another. The default is typically to permit all tags. Check the current filter summary text at the bottom-left of the panel to ensure you have your logic correct.'
help += os.linesep * 2
help += 'The different tabs are multiple ways of looking at the filter--sometimes it is more useful to think about a filter as a whitelist (where only the listed contents are kept) or a blacklist (where everything _except_ the listed contents are kept), and there is also an advanced tab that lets you do a more complicated combination of the two.'
help += os.linesep * 2
help += 'As well as selecting broader categories of tags with the checkboxes, you can type or paste the individual tags directly--just hit enter to add each one--and double-click an existing entry in a list to remove it.'
help += os.linesep * 2
help += 'If you wish to manually type a special tag, use these shorthands:'
help += os.linesep * 2
help += '"namespace:" - all instances of that namespace'
help += os.linesep
help += '":" - all namespaced tags'
help += os.linesep
help += '"" (i.e. an empty string) - all unnamespaced tags'
QW.QMessageBox.information( self, 'Information', help )
def _ShowRedundantError( self, text ):
self._redundant_st.setText( text )
HG.client_controller.CallLaterQtSafe( self._redundant_st, 2, self._redundant_st.setText, '' )
def _SimpleAddBlacklistMultiple( self, tag_slices ):
for tag_slice in tag_slices:
self._AdvancedAddBlacklist( tag_slice )
def _SimpleAddWhitelistMultiple( self, tag_slices ):
for tag_slice in tag_slices:
if tag_slice in ( '', ':' ) and tag_slice in self._simple_whitelist.GetTagSlices():
self._AdvancedAddBlacklist( tag_slice )
else:
self._AdvancedAddWhitelist( tag_slice )
def _SimpleBlacklistRemoved( self, tag_slices ):
for tag_slice in tag_slices:
self._AdvancedAddBlacklist( tag_slice )
def _SimpleBlacklistReset( self ):
pass
def _SimpleWhitelistRemoved( self, tag_slices ):
tag_slices = set( tag_slices )
for simple in ( '', ':' ):
if simple in tag_slices:
tag_slices.discard( simple )
self._AdvancedAddBlacklist( simple )
for tag_slice in tag_slices:
self._AdvancedAddWhitelist( tag_slice )
def _SimpleWhitelistReset( self ):
pass
def _UpdateStatus( self ):
( whitelist_possible, blacklist_possible ) = self._GetWhiteBlacklistsPossible()
whitelist_tag_slices = self._advanced_whitelist.GetTagSlices()
blacklist_tag_slices = self._advanced_blacklist.GetTagSlices()
if whitelist_possible:
self._simple_whitelist_error_st.clear()
self._simple_whitelist.setEnabled( True )
self._simple_whitelist_global_checkboxes.setEnabled( True )
self._simple_whitelist_input.setEnabled( True )
whitelist_tag_slices = set( whitelist_tag_slices )
if not self._CurrentlyBlocked( '' ):
whitelist_tag_slices.add( '' )
if not self._CurrentlyBlocked( ':' ):
whitelist_tag_slices.add( ':' )
self._simple_whitelist_namespace_checkboxes.setEnabled( False )
else:
self._simple_whitelist_namespace_checkboxes.setEnabled( True )
self._simple_whitelist.SetTagSlices( whitelist_tag_slices )
for index in range( self._simple_whitelist_global_checkboxes.count() ):
check = QP.GetClientData( self._simple_whitelist_global_checkboxes, index ) in whitelist_tag_slices
self._simple_whitelist_global_checkboxes.Check( index, check )
for index in range( self._simple_whitelist_namespace_checkboxes.count() ):
check = QP.GetClientData( self._simple_whitelist_namespace_checkboxes, index ) in whitelist_tag_slices
self._simple_whitelist_namespace_checkboxes.Check( index, check )
else:
self._simple_whitelist_error_st.setText( 'The filter is currently more complicated than a simple whitelist, so cannot be shown here.' )
self._simple_whitelist.setEnabled( False )
self._simple_whitelist_global_checkboxes.setEnabled( False )
self._simple_whitelist_namespace_checkboxes.setEnabled( False )
self._simple_whitelist_input.setEnabled( False )
self._simple_whitelist.SetTagSlices( '' )
for index in range( self._simple_whitelist_global_checkboxes.count() ):
self._simple_whitelist_global_checkboxes.Check( index, False )
for index in range( self._simple_whitelist_namespace_checkboxes.count() ):
self._simple_whitelist_namespace_checkboxes.Check( index, False )
#
whitelist_tag_slices = self._advanced_whitelist.GetTagSlices()
blacklist_tag_slices = self._advanced_blacklist.GetTagSlices()
if blacklist_possible:
self._simple_blacklist_error_st.clear()
self._simple_blacklist.setEnabled( True )
self._simple_blacklist_global_checkboxes.setEnabled( True )
self._simple_blacklist_input.setEnabled( True )
if self._CurrentlyBlocked( ':' ):
self._simple_blacklist_namespace_checkboxes.setEnabled( False )
else:
self._simple_blacklist_namespace_checkboxes.setEnabled( True )
self._simple_blacklist.SetTagSlices( blacklist_tag_slices )
for index in range( self._simple_blacklist_global_checkboxes.count() ):
check = QP.GetClientData( self._simple_blacklist_global_checkboxes, index ) in blacklist_tag_slices
self._simple_blacklist_global_checkboxes.Check( index, check )
for index in range( self._simple_blacklist_namespace_checkboxes.count() ):
check = QP.GetClientData( self._simple_blacklist_namespace_checkboxes, index ) in blacklist_tag_slices
self._simple_blacklist_namespace_checkboxes.Check( index, check )
else:
self._simple_blacklist_error_st.setText( 'The filter is currently more complicated than a simple blacklist, so cannot be shown here.' )
self._simple_blacklist.setEnabled( False )
self._simple_blacklist_global_checkboxes.setEnabled( False )
self._simple_blacklist_namespace_checkboxes.setEnabled( False )
self._simple_blacklist_input.setEnabled( False )
self._simple_blacklist.SetTagSlices( '' )
for index in range( self._simple_blacklist_global_checkboxes.count() ):
self._simple_blacklist_global_checkboxes.Check( index, False )
for index in range( self._simple_blacklist_namespace_checkboxes.count() ):
self._simple_blacklist_namespace_checkboxes.Check( index, False )
#
whitelist_tag_slices = self._advanced_whitelist.GetTagSlices()
blacklist_tag_slices = self._advanced_blacklist.GetTagSlices()
if len( blacklist_tag_slices ) == 0:
self._advanced_whitelist_input.setEnabled( False )
self._advanced_add_whitelist_button.setEnabled( False )
else:
self._advanced_whitelist_input.setEnabled( True )
self._advanced_add_whitelist_button.setEnabled( True )
#
tag_filter = self.GetValue()
if self._only_show_blacklist:
pretty_tag_filter = tag_filter.ToBlacklistString()
else:
pretty_tag_filter = 'currently keeping: {}'.format( tag_filter.ToPermittedString() )
self._current_filter_st.setText( pretty_tag_filter )
self._UpdateTest()
def _UpdateTest( self ):
test_input = self._test_input.toPlainText()
if test_input == '':
if self._only_show_blacklist:
test_result_text = self.TEST_RESULT_BLACKLIST_DEFAULT
else:
test_result_text = self.TEST_RESULT_DEFAULT
self._test_result_st.setObjectName( '' )
self._test_result_st.setText( test_result_text )
self._test_result_st.style().polish( self._test_result_st )
else:
test_tags = HydrusText.DeserialiseNewlinedTexts( test_input )
test_tags = HydrusTags.CleanTags( test_tags )
tag_filter = self.GetValue()
self._test_result_st.setObjectName( '' )
self._test_result_st.clear()
self._test_result_st.style().polish( self._test_result_st )
if self._only_show_blacklist:
def work_callable():
results = []
tags_to_siblings = HG.client_controller.Read( 'tag_siblings_lookup', CC.COMBINED_TAG_SERVICE_KEY, test_tags )
for ( test_tag, siblings ) in tags_to_siblings.items():
results.append( False not in ( tag_filter.TagOK( sibling_tag, apply_unnamespaced_rules_to_namespaced_tags = True ) for sibling_tag in siblings ) )
return results
else:
def work_callable():
results = [ tag_filter.TagOK( test_tag ) for test_tag in test_tags ]
return results
def publish_callable( results ):
all_good = False not in results
all_bad = True not in results
if len( results ) == 1:
if all_good:
test_result_text = 'tag passes!'
self._test_result_st.setObjectName( 'HydrusValid' )
else:
test_result_text = 'tag blocked!'
self._test_result_st.setObjectName( 'HydrusInvalid' )
else:
if all_good:
test_result_text = 'all pass!'
self._test_result_st.setObjectName( 'HydrusValid' )
elif all_bad:
test_result_text = 'all blocked!'
self._test_result_st.setObjectName( 'HydrusInvalid' )
else:
c = collections.Counter()
c.update( results )
test_result_text = '{} pass, {} blocked!'.format( HydrusData.ToHumanInt( c[ True ] ), HydrusData.ToHumanInt( c[ False ] ) )
self._test_result_st.setObjectName( 'HydrusInvalid' )
self._test_result_st.setText( test_result_text )
self._test_result_st.style().polish( self._test_result_st )
async_job = ClientGUIAsync.AsyncQtJob( self, work_callable, publish_callable )
async_job.start()
def EventSimpleBlacklistNamespaceCheck( self, index ):
index = index.row()
if index != -1:
tag_slice = QP.GetClientData( self._simple_blacklist_namespace_checkboxes, index )
self._AdvancedAddBlacklist( tag_slice )
def EventSimpleBlacklistGlobalCheck( self, index ):
index = index.row()
if index != -1:
tag_slice = QP.GetClientData( self._simple_blacklist_global_checkboxes, index )
self._AdvancedAddBlacklist( tag_slice )
def EventSimpleWhitelistNamespaceCheck( self, index ):
index = index.row()
if index != -1:
tag_slice = QP.GetClientData( self._simple_whitelist_namespace_checkboxes, index )
self._AdvancedAddWhitelist( tag_slice )
def EventSimpleWhitelistGlobalCheck( self, index ):
index = index.row()
if index != -1:
tag_slice = QP.GetClientData( self._simple_whitelist_global_checkboxes, index )
if tag_slice in ( '', ':' ) and tag_slice in self._simple_whitelist.GetTagSlices():
self._AdvancedAddBlacklist( tag_slice )
else:
self._AdvancedAddWhitelist( tag_slice )
def GetValue( self ):
tag_filter = HydrusTags.TagFilter()
for tag_slice in self._advanced_blacklist.GetTagSlices():
tag_filter.SetRule( tag_slice, HC.FILTER_BLACKLIST )
for tag_slice in self._advanced_whitelist.GetTagSlices():
tag_filter.SetRule( tag_slice, HC.FILTER_WHITELIST )
return tag_filter
def SetValue( self, tag_filter: HydrusTags.TagFilter ):
blacklist_tag_slices = [ tag_slice for ( tag_slice, rule ) in tag_filter.GetTagSlicesToRules().items() if rule == HC.FILTER_BLACKLIST ]
whitelist_tag_slices = [ tag_slice for ( tag_slice, rule ) in tag_filter.GetTagSlicesToRules().items() if rule == HC.FILTER_WHITELIST ]
self._advanced_blacklist.SetTagSlices( blacklist_tag_slices )
self._advanced_whitelist.SetTagSlices( whitelist_tag_slices )
( whitelist_possible, blacklist_possible ) = self._GetWhiteBlacklistsPossible()
selection_tests = []
if self._only_show_blacklist:
selection_tests.append( ( blacklist_possible, self._blacklist_panel ) )
else:
selection_tests.append( ( whitelist_possible, self._whitelist_panel ) )
selection_tests.append( ( blacklist_possible, self._blacklist_panel ) )
selection_tests.append( ( True, self._advanced_panel ) )
for ( test, page ) in selection_tests:
if test:
self._notebook.SelectPage( page )
break
self._UpdateStatus()
class ManageTagsPanel( ClientGUIScrolledPanels.ManagePanel ):
def __init__( self, parent, file_service_key, media, immediate_commit = False, canvas_key = None ):
ClientGUIScrolledPanels.ManagePanel.__init__( self, parent )
self._file_service_key = file_service_key
self._immediate_commit = immediate_commit
self._canvas_key = canvas_key
media = ClientMedia.FlattenMedia( media )
self._current_media = [ m.Duplicate() for m in media ]
self._hashes = set()
for m in self._current_media:
self._hashes.update( m.GetHashes() )
self._tag_repositories = ClientGUICommon.BetterNotebook( self )
#
services = HG.client_controller.services_manager.GetServices( HC.REAL_TAG_SERVICES )
default_tag_repository_key = HC.options[ 'default_tag_repository' ]
for service in services:
service_key = service.GetServiceKey()
name = service.GetName()
page = self._Panel( self._tag_repositories, self._file_service_key, service.GetServiceKey(), self._current_media, self._immediate_commit, canvas_key = self._canvas_key )
page._add_tag_box.selectUp.connect( self.EventSelectUp )
page._add_tag_box.selectDown.connect( self.EventSelectDown )
page._add_tag_box.showPrevious.connect( self.EventShowPrevious )
page._add_tag_box.showNext.connect( self.EventShowNext )
page.okSignal.connect( self.okSignal )
select = service_key == default_tag_repository_key
self._tag_repositories.addTab( page, name )
if select: self._tag_repositories.setCurrentIndex( self._tag_repositories.count() - 1 )
#
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._tag_repositories, CC.FLAGS_EXPAND_BOTH_WAYS )
self.widget().setLayout( vbox )
if self._canvas_key is not None:
HG.client_controller.sub( self, 'CanvasHasNewMedia', 'canvas_new_display_media' )
self._my_shortcut_handler = ClientGUIShortcuts.ShortcutsHandler( self, [ 'global', 'media', 'main_gui' ] )
self._tag_repositories.currentChanged.connect( self.EventServiceChanged )
self._SetSearchFocus()
def _GetGroupsOfServiceKeysToContentUpdates( self ):
groups_of_service_keys_to_content_updates = []
for page in self._tag_repositories.GetPages():
( service_key, groups_of_content_updates ) = page.GetGroupsOfContentUpdates()
for content_updates in groups_of_content_updates:
if len( content_updates ) > 0:
service_keys_to_content_updates = { service_key : content_updates }
groups_of_service_keys_to_content_updates.append( service_keys_to_content_updates )
return groups_of_service_keys_to_content_updates
def _SetSearchFocus( self ):
page = self._tag_repositories.currentWidget()
if page is not None:
page.SetTagBoxFocus()
def CanvasHasNewMedia( self, canvas_key, new_media_singleton ):
if canvas_key == self._canvas_key:
if new_media_singleton is not None:
self._current_media = ( new_media_singleton.Duplicate(), )
for page in self._tag_repositories.GetPages():
page.SetMedia( self._current_media )
def CleanBeforeDestroy( self ):
ClientGUIScrolledPanels.ManagePanel.CleanBeforeDestroy( self )
for page in self._tag_repositories.GetPages():
page.CleanBeforeDestroy()
def CommitChanges( self ):
groups_of_service_keys_to_content_updates = self._GetGroupsOfServiceKeysToContentUpdates()
for service_keys_to_content_updates in groups_of_service_keys_to_content_updates:
HG.client_controller.WriteSynchronous( 'content_updates', service_keys_to_content_updates )
def EventSelectDown( self ):
self._tag_repositories.SelectRight()
self._SetSearchFocus()
def EventSelectUp( self ):
self._tag_repositories.SelectLeft()
self._SetSearchFocus()
def EventShowNext( self ):
if self._canvas_key is not None:
HG.client_controller.pub( 'canvas_show_next', self._canvas_key )
def EventShowPrevious( self ):
if self._canvas_key is not None:
HG.client_controller.pub( 'canvas_show_previous', self._canvas_key )
def EventServiceChanged( self, index ):
if not self or not QP.isValid( self ): # actually did get a runtime error here, on some Linux WM dialog shutdown
return
if self.sender() != self._tag_repositories:
return
page = self._tag_repositories.currentWidget()
if page is not None:
HG.client_controller.CallAfterQtSafe( page, page.SetTagBoxFocus )
def ProcessApplicationCommand( self, command: CAC.ApplicationCommand ):
command_processed = True
data = command.GetData()
if command.IsSimpleCommand():
action = data
if action == CAC.SIMPLE_MANAGE_FILE_TAGS:
self._OKParent()
elif action == CAC.SIMPLE_FOCUS_MEDIA_VIEWER:
tlws = ClientGUIFunctions.GetTLWParents( self )
from hydrus.client.gui import ClientGUICanvasFrame
command_processed = False
for tlw in tlws:
if isinstance( tlw, ClientGUICanvasFrame.CanvasFrame ):
tlw.TakeFocusForUser()
command_processed = True
break
elif action == CAC.SIMPLE_SET_SEARCH_FOCUS:
self._SetSearchFocus()
else:
command_processed = False
else:
command_processed = False
return command_processed
def UserIsOKToCancel( self ):
groups_of_service_keys_to_content_updates = self._GetGroupsOfServiceKeysToContentUpdates()
if len( groups_of_service_keys_to_content_updates ) > 0:
message = 'Are you sure you want to cancel? You have uncommitted changes that will be lost.'
result = ClientGUIDialogsQuick.GetYesNo( self, message )
if result != QW.QDialog.Accepted:
return False
return True
class _Panel( QW.QWidget ):
okSignal = QC.Signal()
def __init__( self, parent, file_service_key, tag_service_key, media, immediate_commit, canvas_key = None ):
QW.QWidget.__init__( self, parent )
self._file_service_key = file_service_key
self._tag_service_key = tag_service_key
self._immediate_commit = immediate_commit
self._canvas_key = canvas_key
self._groups_of_content_updates = []
self._service = HG.client_controller.services_manager.GetService( self._tag_service_key )
self._i_am_local_tag_service = self._service.GetServiceType() == HC.LOCAL_TAG
self._tags_box_sorter = ClientGUIListBoxes.StaticBoxSorterForListBoxTags( self, 'tags', show_siblings_sort = True )
self._tags_box = ClientGUIListBoxes.ListBoxTagsMediaTagsDialog( self._tags_box_sorter, self.EnterTags, self.RemoveTags )
self._tags_box_sorter.SetTagsBox( self._tags_box )
#
self._new_options = HG.client_controller.new_options
if self._i_am_local_tag_service:
text = 'remove all/selected tags'
else:
text = 'petition to remove all/selected tags'
self._remove_tags = ClientGUICommon.BetterButton( self._tags_box_sorter, text, self._RemoveTagsButton )
self._copy_button = ClientGUICommon.BetterBitmapButton( self._tags_box_sorter, CC.global_pixmaps().copy, self._Copy )
self._copy_button.setToolTip( 'Copy selected tags to the clipboard. If none are selected, copies all.' )
self._paste_button = ClientGUICommon.BetterBitmapButton( self._tags_box_sorter, CC.global_pixmaps().paste, self._Paste )
self._paste_button.setToolTip( 'Paste newline-separated tags from the clipboard into here.' )
self._show_deleted = False
menu_items = []
check_manager = ClientGUICommon.CheckboxManagerOptions( 'allow_remove_on_manage_tags_input' )
menu_items.append( ( 'check', 'allow remove/petition result on tag input for already existing tag', 'If checked, inputting a tag that already exists will try to remove it.', check_manager ) )
check_manager = ClientGUICommon.CheckboxManagerOptions( 'yes_no_on_remove_on_manage_tags' )
menu_items.append( ( 'check', 'confirm remove/petition tags on explicit delete actions', 'If checked, clicking the remove/petition tags button (or hitting the deleted key on the list) will first confirm the action with a yes/no dialog.', check_manager ) )
check_manager = ClientGUICommon.CheckboxManagerCalls( self._FlipShowDeleted, lambda: self._show_deleted )
menu_items.append( ( 'check', 'show deleted', 'Show deleted tags, if any.', check_manager ) )
menu_items.append( ( 'separator', 0, 0, 0 ) )
menu_items.append( ( 'normal', 'migrate tags for these files', 'Migrate the tags for the files used to launch this manage tags panel.', self._MigrateTags ) )
if not self._i_am_local_tag_service and self._service.HasPermission( HC.CONTENT_TYPE_ACCOUNTS, HC.PERMISSION_ACTION_MODERATE ):
menu_items.append( ( 'separator', 0, 0, 0 ) )
menu_items.append( ( 'normal', 'modify users who added the selected tags', 'Modify the users who added the selected tags.', self._ModifyMappers ) )
self._cog_button = ClientGUIMenuButton.MenuBitmapButton( self._tags_box_sorter, CC.global_pixmaps().cog, menu_items )
#
self._add_tag_box = ClientGUIACDropdown.AutoCompleteDropdownTagsWrite( self, self.AddTags, self._file_service_key, self._tag_service_key, null_entry_callable = self.OK )
self._tags_box.SetTagServiceKey( self._tag_service_key )
self._suggested_tags = ClientGUITagSuggestions.SuggestedTagsPanel( self, self._tag_service_key, media, self.AddTags )
self.SetMedia( media )
button_hbox = QP.HBoxLayout()
QP.AddToLayout( button_hbox, self._remove_tags, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( button_hbox, self._copy_button, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( button_hbox, self._paste_button, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( button_hbox, self._cog_button, CC.FLAGS_CENTER )
self._tags_box_sorter.Add( button_hbox, CC.FLAGS_ON_RIGHT )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._tags_box_sorter, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( vbox, self._add_tag_box )
#
hbox = QP.HBoxLayout()
QP.AddToLayout( hbox, self._suggested_tags, CC.FLAGS_EXPAND_BOTH_WAYS_POLITE )
QP.AddToLayout( hbox, vbox, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
#
self._my_shortcut_handler = ClientGUIShortcuts.ShortcutsHandler( self, [ 'global', 'main_gui' ] )
self.setLayout( hbox )
if self._immediate_commit:
HG.client_controller.sub( self, 'ProcessContentUpdates', 'content_updates_gui' )
self._suggested_tags.mouseActivationOccurred.connect( self.SetTagBoxFocus )
def _EnterTags( self, tags, only_add = False, only_remove = False, forced_reason = None ):
tags = HydrusTags.CleanTags( tags )
if not self._i_am_local_tag_service and self._service.HasPermission( HC.CONTENT_TYPE_MAPPINGS, HC.PERMISSION_ACTION_MODERATE ):
forced_reason = 'admin'
tags_managers = [ m.GetTagsManager() for m in self._media ]
currents = [ tags_manager.GetCurrent( self._tag_service_key, ClientTags.TAG_DISPLAY_STORAGE ) for tags_manager in tags_managers ]
pendings = [ tags_manager.GetPending( self._tag_service_key, ClientTags.TAG_DISPLAY_STORAGE ) for tags_manager in tags_managers ]
petitioneds = [ tags_manager.GetPetitioned( self._tag_service_key, ClientTags.TAG_DISPLAY_STORAGE ) for tags_manager in tags_managers ]
num_files = len( self._media )
# let's figure out what these tags can mean for the media--add, remove, or what?
choices = collections.defaultdict( list )
for tag in tags:
num_current = sum( ( 1 for current in currents if tag in current ) )
if self._i_am_local_tag_service:
if not only_remove:
if num_current < num_files:
num_non_current = num_files - num_current
choices[ HC.CONTENT_UPDATE_ADD ].append( ( tag, num_non_current ) )
if not only_add:
if num_current > 0:
choices[ HC.CONTENT_UPDATE_DELETE ].append( ( tag, num_current ) )
else:
num_pending = sum( ( 1 for pending in pendings if tag in pending ) )
num_petitioned = sum( ( 1 for petitioned in petitioneds if tag in petitioned ) )
if not only_remove:
if num_current + num_pending < num_files:
num_pendable = num_files - ( num_current + num_pending )
choices[ HC.CONTENT_UPDATE_PEND ].append( ( tag, num_pendable ) )
if not only_add:
if num_current > num_petitioned and not only_add:
num_petitionable = num_current - num_petitioned
choices[ HC.CONTENT_UPDATE_PETITION ].append( ( tag, num_petitionable ) )
if num_pending > 0 and not only_add:
choices[ HC.CONTENT_UPDATE_RESCIND_PEND ].append( ( tag, num_pending ) )
if not only_remove:
if num_petitioned > 0:
choices[ HC.CONTENT_UPDATE_RESCIND_PETITION ].append( ( tag, num_petitioned ) )
if len( choices ) == 0:
return
# now we have options, let's ask the user what they want to do
if len( choices ) == 1:
[ ( choice_action, tag_counts ) ] = list( choices.items() )
tags = { tag for ( tag, count ) in tag_counts }
else:
bdc_choices = []
preferred_order = [ HC.CONTENT_UPDATE_ADD, HC.CONTENT_UPDATE_DELETE, HC.CONTENT_UPDATE_PEND, HC.CONTENT_UPDATE_RESCIND_PEND, HC.CONTENT_UPDATE_PETITION, HC.CONTENT_UPDATE_RESCIND_PETITION ]
choice_text_lookup = {}
choice_text_lookup[ HC.CONTENT_UPDATE_ADD ] = 'add'
choice_text_lookup[ HC.CONTENT_UPDATE_DELETE ] = 'delete'
choice_text_lookup[ HC.CONTENT_UPDATE_PEND ] = 'pend (add)'
choice_text_lookup[ HC.CONTENT_UPDATE_PETITION ] = 'petition to remove'
choice_text_lookup[ HC.CONTENT_UPDATE_RESCIND_PEND ] = 'undo pend'
choice_text_lookup[ HC.CONTENT_UPDATE_RESCIND_PETITION ] = 'undo petition to remove'
choice_tooltip_lookup = {}
choice_tooltip_lookup[ HC.CONTENT_UPDATE_ADD ] = 'this adds the tags to this local tag service'
choice_tooltip_lookup[ HC.CONTENT_UPDATE_DELETE ] = 'this deletes the tags from this local tag service'
choice_tooltip_lookup[ HC.CONTENT_UPDATE_PEND ] = 'this pends the tags to be added to this tag repository when you upload'
choice_tooltip_lookup[ HC.CONTENT_UPDATE_PETITION ] = 'this petitions the tags for deletion from this tag repository when you upload'
choice_tooltip_lookup[ HC.CONTENT_UPDATE_RESCIND_PEND ] = 'this rescinds the currently pending tags, so they will not be added'
choice_tooltip_lookup[ HC.CONTENT_UPDATE_RESCIND_PETITION ] = 'this rescinds the current tag petitions, so they will not be deleted'
for choice_action in preferred_order:
if choice_action not in choices:
continue
choice_text_prefix = choice_text_lookup[ choice_action ]
tag_counts = choices[ choice_action ]
choice_tags = { tag for ( tag, count ) in tag_counts }
if len( choice_tags ) == 1:
[ ( tag, count ) ] = tag_counts
text = '{} "{}" for {} files'.format( choice_text_prefix, HydrusText.ElideText( tag, 64 ), HydrusData.ToHumanInt( count ) )
else:
text = '{} {} tags'.format( choice_text_prefix, HydrusData.ToHumanInt( len( choice_tags ) ) )
data = ( choice_action, choice_tags )
t_c_lines = [ choice_tooltip_lookup[ choice_action ] ]
if len( tag_counts ) > 25:
t_c = tag_counts[:25]
else:
t_c = tag_counts
t_c_lines.extend( ( '{} - {} files'.format( tag, HydrusData.ToHumanInt( count ) ) for ( tag, count ) in t_c ) )
if len( tag_counts ) > 25:
t_c_lines.append( 'and {} others'.format( HydrusData.ToHumanInt( len( tag_counts ) - 25 ) ) )
tooltip = os.linesep.join( t_c_lines )
bdc_choices.append( ( text, data, tooltip ) )
try:
if len( tags ) > 1:
message = 'The file{} some of those tags, but not all, so there are different things you can do.'.format( 's have' if len( self._media ) > 1 else ' has' )
else:
message = 'Of the {} files being managed, some have that tag, but not all of them do, so there are different things you can do.'.format( HydrusData.ToHumanInt( len( self._media ) ) )
( choice_action, tags ) = ClientGUIDialogsQuick.SelectFromListButtons( self, 'What would you like to do?', bdc_choices, message = message )
except HydrusExceptions.CancelledException:
return
reason = None
if choice_action == HC.CONTENT_UPDATE_PETITION:
if forced_reason is None:
# add the easy reason buttons here
if len( tags ) == 1:
( tag, ) = tags
tag_text = '"' + tag + '"'
else:
tag_text = 'the ' + HydrusData.ToHumanInt( len( tags ) ) + ' tags'
message = 'Enter a reason for ' + tag_text + ' to be removed. A janitor will review your petition.'
suggestions = []
suggestions.append( 'mangled parse/typo' )
suggestions.append( 'not applicable' )
suggestions.append( 'should be namespaced' )
suggestions.append( 'splitting filename/title/etc... into individual tags' )
with ClientGUIDialogs.DialogTextEntry( self, message, suggestions = suggestions ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
reason = dlg.GetValue()
else:
return
else:
reason = forced_reason
# we have an action and tags, so let's effect the content updates
content_updates_group = []
recent_tags = set()
medias_and_tags_managers = [ ( m, m.GetTagsManager() ) for m in self._media ]
medias_and_sets_of_tags = [ ( m, tm.GetCurrent( self._tag_service_key, ClientTags.TAG_DISPLAY_STORAGE ), tm.GetPending( self._tag_service_key, ClientTags.TAG_DISPLAY_STORAGE ), tm.GetPetitioned( self._tag_service_key, ClientTags.TAG_DISPLAY_STORAGE ) ) for ( m, tm ) in medias_and_tags_managers ]
# there is a big CPU hit here as every time you processcontentupdates, the tagsmanagers need to regen caches lmao
# so if I refetch current tags etc... for every tag loop, we end up getting 16 million tagok calls etc...
# however, as tags is a set, thus with unique members, let's say for now this is ok, don't need to regen just to consult current
for tag in tags:
if choice_action == HC.CONTENT_UPDATE_ADD: media_to_affect = [ m for ( m, mc, mp, mpt ) in medias_and_sets_of_tags if tag not in mc ]
elif choice_action == HC.CONTENT_UPDATE_DELETE: media_to_affect = [ m for ( m, mc, mp, mpt ) in medias_and_sets_of_tags if tag in mc ]
elif choice_action == HC.CONTENT_UPDATE_PEND: media_to_affect = [ m for ( m, mc, mp, mpt ) in medias_and_sets_of_tags if tag not in mc and tag not in mp ]
elif choice_action == HC.CONTENT_UPDATE_PETITION: media_to_affect = [ m for ( m, mc, mp, mpt ) in medias_and_sets_of_tags if tag in mc and tag not in mpt ]
elif choice_action == HC.CONTENT_UPDATE_RESCIND_PEND: media_to_affect = [ m for ( m, mc, mp, mpt ) in medias_and_sets_of_tags if tag in mp ]
elif choice_action == HC.CONTENT_UPDATE_RESCIND_PETITION: media_to_affect = [ m for ( m, mc, mp, mpt ) in medias_and_sets_of_tags if tag in mpt ]
hashes = set( itertools.chain.from_iterable( ( m.GetHashes() for m in media_to_affect ) ) )
if len( hashes ) > 0:
content_updates = []
if choice_action in ( HC.CONTENT_UPDATE_ADD, HC.CONTENT_UPDATE_PEND ):
recent_tags.add( tag )
content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, choice_action, ( tag, hashes ), reason = reason ) )
if len( content_updates ) > 0:
if not self._immediate_commit:
for m in media_to_affect:
mt = m.GetTagsManager()
for content_update in content_updates:
mt.ProcessContentUpdate( self._tag_service_key, content_update )
content_updates_group.extend( content_updates )
num_recent_tags = HG.client_controller.new_options.GetNoneableInteger( 'num_recent_tags' )
if len( recent_tags ) > 0 and num_recent_tags is not None:
if len( recent_tags ) > num_recent_tags:
recent_tags = random.sample( recent_tags, num_recent_tags )
HG.client_controller.Write( 'push_recent_tags', self._tag_service_key, recent_tags )
if len( content_updates_group ) > 0:
if self._immediate_commit:
service_keys_to_content_updates = { self._tag_service_key : content_updates_group }
HG.client_controller.WriteSynchronous( 'content_updates', service_keys_to_content_updates )
else:
self._groups_of_content_updates.append( content_updates_group )
self._suggested_tags.MediaUpdated()
self._tags_box.SetTagsByMedia( self._media )
def _MigrateTags( self ):
hashes = set()
for m in self._media:
hashes.update( m.GetHashes() )
def do_it( tag_service_key, hashes ):
tlw = HG.client_controller.GetMainTLW()
frame = ClientGUITopLevelWindowsPanels.FrameThatTakesScrollablePanel( tlw, 'migrate tags' )
panel = ClientGUIScrolledPanelsReview.MigrateTagsPanel( frame, self._tag_service_key, hashes )
frame.SetPanel( panel )
QP.CallAfter( do_it, self._tag_service_key, hashes )
self.OK()
def _Copy( self ):
tags = list( self._tags_box.GetSelectedTags() )
if len( tags ) == 0:
( current_tags_to_count, deleted_tags_to_count, pending_tags_to_count, petitioned_tags_to_count ) = ClientMedia.GetMediasTagCount( self._media, self._tag_service_key, ClientTags.TAG_DISPLAY_STORAGE )
tags = set( current_tags_to_count.keys() ).union( pending_tags_to_count.keys() )
if len( tags ) > 0:
tags = HydrusTags.SortNumericTags( tags )
text = os.linesep.join( tags )
HG.client_controller.pub( 'clipboard', 'text', text )
def _FlipShowDeleted( self ):
self._show_deleted = not self._show_deleted
self._tags_box.SetShow( 'deleted', self._show_deleted )
def _ModifyMappers( self ):
contents = []
tags = self._tags_box.GetSelectedTags()
if len( tags ) == 0:
QW.QMessageBox.information( self, 'No tags selected!', 'Please select some tags first!' )
return
hashes_and_current_tags = [ ( m.GetHashes(), m.GetTagsManager().GetCurrent( self._tag_service_key, ClientTags.TAG_DISPLAY_STORAGE ) ) for m in self._media ]
for tag in tags:
hashes_iter = itertools.chain.from_iterable( ( hashes for ( hashes, current_tags ) in hashes_and_current_tags if tag in current_tags ) )
contents.extend( [ HydrusNetwork.Content( HC.CONTENT_TYPE_MAPPING, ( tag, hash ) ) for hash in hashes_iter ] )
if len( contents ) > 0:
subject_account_identifiers = [ HydrusNetwork.AccountIdentifier( content = content ) for content in contents ]
frame = ClientGUITopLevelWindowsPanels.FrameThatTakesScrollablePanel( self.window().parentWidget(), 'manage accounts' )
panel = ClientGUIHydrusNetwork.ModifyAccountsPanel( frame, self._tag_service_key, subject_account_identifiers )
frame.SetPanel( panel )
def _Paste( self ):
try:
text = HG.client_controller.GetClipboardText()
except HydrusExceptions.DataMissing as e:
QW.QMessageBox.warning( self, 'Warning', str(e) )
return
try:
tags = HydrusText.DeserialiseNewlinedTexts( text )
tags = HydrusTags.CleanTags( tags )
self.AddTags( tags, only_add = True )
except Exception as e:
QW.QMessageBox.warning( self, 'Warning', 'I could not understand what was in the clipboard' )
def _RemoveTagsButton( self ):
tags_managers = [ m.GetTagsManager() for m in self._media ]
removable_tags = set()
for tags_manager in tags_managers:
removable_tags.update( tags_manager.GetCurrent( self._tag_service_key, ClientTags.TAG_DISPLAY_STORAGE ) )
removable_tags.update( tags_manager.GetPending( self._tag_service_key, ClientTags.TAG_DISPLAY_STORAGE ) )
selected_tags = list( self._tags_box.GetSelectedTags() )
if len( selected_tags ) == 0:
tags_to_remove = list( removable_tags )
else:
tags_to_remove = [ tag for tag in selected_tags if tag in removable_tags ]
tags_to_remove = HydrusTags.SortNumericTags( tags_to_remove )
self.RemoveTags( tags_to_remove )
def AddTags( self, tags, only_add = False ):
if not self._new_options.GetBoolean( 'allow_remove_on_manage_tags_input' ):
only_add = True
if len( tags ) > 0:
self.EnterTags( tags, only_add = only_add )
def CleanBeforeDestroy( self ):
self._add_tag_box.CancelCurrentResultsFetchJob()
def ClearMedia( self ):
self.SetMedia( set() )
def EnterTags( self, tags, only_add = False ):
if len( tags ) > 0:
self._EnterTags( tags, only_add = only_add )
def GetGroupsOfContentUpdates( self ):
return ( self._tag_service_key, self._groups_of_content_updates )
def HasChanges( self ):
return len( self._groups_of_content_updates ) > 0
def OK( self ):
self.okSignal.emit()
def ProcessApplicationCommand( self, command: CAC.ApplicationCommand ):
command_processed = True
data = command.GetData()
if command.IsSimpleCommand():
action = data
if action == CAC.SIMPLE_SET_SEARCH_FOCUS:
self.SetTagBoxFocus()
elif action in ( CAC.SIMPLE_SHOW_AND_FOCUS_MANAGE_TAGS_FAVOURITE_TAGS, CAC.SIMPLE_SHOW_AND_FOCUS_MANAGE_TAGS_RELATED_TAGS, CAC.SIMPLE_SHOW_AND_FOCUS_MANAGE_TAGS_FILE_LOOKUP_SCRIPT_TAGS, CAC.SIMPLE_SHOW_AND_FOCUS_MANAGE_TAGS_RECENT_TAGS ):
self._suggested_tags.TakeFocusForUser( action )
elif action == CAC.SIMPLE_REFRESH_RELATED_TAGS:
self._suggested_tags.RefreshRelatedThorough()
else:
command_processed = False
else:
command_processed = False
return command_processed
def ProcessContentUpdates( self, service_keys_to_content_updates ):
for ( service_key, content_updates ) in list(service_keys_to_content_updates.items()):
for content_update in content_updates:
for m in self._media:
if HydrusData.SetsIntersect( m.GetHashes(), content_update.GetHashes() ):
m.GetMediaResult().ProcessContentUpdate( service_key, content_update )
self._tags_box.SetTagsByMedia( self._media )
self._suggested_tags.MediaUpdated()
def RemoveTags( self, tags ):
if len( tags ) > 0:
if self._new_options.GetBoolean( 'yes_no_on_remove_on_manage_tags' ):
if len( tags ) < 10:
message = 'Are you sure you want to remove these tags:'
message += os.linesep * 2
message += os.linesep.join( ( HydrusText.ElideText( tag, 64 ) for tag in tags ) )
else:
message = 'Are you sure you want to remove these ' + HydrusData.ToHumanInt( len( tags ) ) + ' tags?'
result = ClientGUIDialogsQuick.GetYesNo( self, message )
if result != QW.QDialog.Accepted:
return
self._EnterTags( tags, only_remove = True )
def SetMedia( self, media ):
if media is None:
media = set()
self._media = media
self._tags_box.SetTagsByMedia( self._media )
self._suggested_tags.SetMedia( media )
def SetTagBoxFocus( self ):
self._add_tag_box.setFocus( QC.Qt.OtherFocusReason )
class ManageTagParents( ClientGUIScrolledPanels.ManagePanel ):
def __init__( self, parent, tags = None ):
ClientGUIScrolledPanels.ManagePanel.__init__( self, parent )
self._tag_repositories = ClientGUICommon.BetterNotebook( self )
#
default_tag_repository_key = HC.options[ 'default_tag_repository' ]
services = list( HG.client_controller.services_manager.GetServices( ( HC.LOCAL_TAG, ) ) )
services.extend( [ service for service in HG.client_controller.services_manager.GetServices( ( HC.TAG_REPOSITORY, ) ) if service.HasPermission( HC.CONTENT_TYPE_TAG_PARENTS, HC.PERMISSION_ACTION_PETITION ) ] )
for service in services:
name = service.GetName()
service_key = service.GetServiceKey()
page = self._Panel( self._tag_repositories, service_key, tags )
select = service_key == default_tag_repository_key
self._tag_repositories.addTab( page, name )
if select: self._tag_repositories.setCurrentWidget( page )
#
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._tag_repositories, CC.FLAGS_EXPAND_BOTH_WAYS )
self.widget().setLayout( vbox )
def _SetSearchFocus( self ):
page = self._tag_repositories.currentWidget()
if page is not None:
page.SetTagBoxFocus()
def CommitChanges( self ):
service_keys_to_content_updates = {}
for page in self._tag_repositories.GetPages():
( service_key, content_updates ) = page.GetContentUpdates()
if len( content_updates ) > 0:
service_keys_to_content_updates[ service_key ] = content_updates
if len( service_keys_to_content_updates ) > 0:
HG.client_controller.Write( 'content_updates', service_keys_to_content_updates )
def UserIsOKToOK( self ):
if self._tag_repositories.currentWidget().HasUncommittedPair():
message = 'Are you sure you want to OK? You have an uncommitted pair.'
result = ClientGUIDialogsQuick.GetYesNo( self, message )
if result != QW.QDialog.Accepted:
return False
return True
class _Panel( QW.QWidget ):
def __init__( self, parent, service_key, tags = None ):
QW.QWidget.__init__( self, parent )
self._service_key = service_key
self._service = HG.client_controller.services_manager.GetService( self._service_key )
self._i_am_local_tag_service = self._service.GetServiceType() == HC.LOCAL_TAG
self._pairs_to_reasons = {}
self._original_statuses_to_pairs = collections.defaultdict( set )
self._current_statuses_to_pairs = collections.defaultdict( set )
self._show_all = QW.QCheckBox( self )
listctrl_panel = ClientGUIListCtrl.BetterListCtrlPanel( self )
self._tag_parents = ClientGUIListCtrl.BetterListCtrl( listctrl_panel, CGLC.COLUMN_LIST_TAG_PARENTS.ID, 8, self._ConvertPairToListCtrlTuples, delete_key_callback = self._ListCtrlActivated, activation_callback = self._ListCtrlActivated )
listctrl_panel.SetListCtrl( self._tag_parents )
self._tag_parents.Sort()
menu_items = []
menu_items.append( ( 'normal', 'from clipboard', 'Load parents from text in your clipboard.', HydrusData.Call( self._ImportFromClipboard, False ) ) )
menu_items.append( ( 'normal', 'from clipboard (only add pairs--no deletions)', 'Load parents from text in your clipboard.', HydrusData.Call( self._ImportFromClipboard, True ) ) )
menu_items.append( ( 'normal', 'from .txt file', 'Load parents from a .txt file.', HydrusData.Call( self._ImportFromTXT, False ) ) )
menu_items.append( ( 'normal', 'from .txt file (only add pairs--no deletions)', 'Load parents from a .txt file.', HydrusData.Call( self._ImportFromTXT, True ) ) )
listctrl_panel.AddMenuButton( 'import', menu_items )
menu_items = []
menu_items.append( ( 'normal', 'to clipboard', 'Save selected parents to your clipboard.', self._ExportToClipboard ) )
menu_items.append( ( 'normal', 'to .txt file', 'Save selected parents to a .txt file.', self._ExportToTXT ) )
listctrl_panel.AddMenuButton( 'export', menu_items, enabled_only_on_selection = True )
self._children = ClientGUIListBoxes.ListBoxTagsStringsAddRemove( self, self._service_key, ClientTags.TAG_DISPLAY_ACTUAL )
self._parents = ClientGUIListBoxes.ListBoxTagsStringsAddRemove( self, self._service_key, ClientTags.TAG_DISPLAY_ACTUAL )
( gumpf, preview_height ) = ClientGUIFunctions.ConvertTextToPixels( self._children, ( 12, 6 ) )
self._children.setMinimumHeight( preview_height )
self._parents.setMinimumHeight( preview_height )
self._child_input = ClientGUIACDropdown.AutoCompleteDropdownTagsWrite( self, self.EnterChildren, CC.LOCAL_FILE_SERVICE_KEY, service_key, show_paste_button = True )
self._child_input.setEnabled( False )
self._parent_input = ClientGUIACDropdown.AutoCompleteDropdownTagsWrite( self, self.EnterParents, CC.LOCAL_FILE_SERVICE_KEY, service_key, show_paste_button = True )
self._parent_input.setEnabled( False )
self._add = QW.QPushButton( 'add', self )
self._add.clicked.connect( self.EventAddButton )
self._add.setEnabled( False )
#
self._status_st = ClientGUICommon.BetterStaticText( self, 'initialising\u2026' + os.linesep + '.' )
self._sync_status_st = ClientGUICommon.BetterStaticText( self, '' )
self._sync_status_st.setWordWrap( True )
self._count_st = ClientGUICommon.BetterStaticText( self, '' )
#
children_vbox = QP.VBoxLayout()
QP.AddToLayout( children_vbox, ClientGUICommon.BetterStaticText( self, label = 'set children' ), CC.FLAGS_CENTER )
QP.AddToLayout( children_vbox, self._children, CC.FLAGS_EXPAND_BOTH_WAYS )
parents_vbox = QP.VBoxLayout()
QP.AddToLayout( parents_vbox, ClientGUICommon.BetterStaticText( self, label = 'set parents' ), CC.FLAGS_CENTER )
QP.AddToLayout( parents_vbox, self._parents, CC.FLAGS_EXPAND_BOTH_WAYS )
tags_box = QP.HBoxLayout()
QP.AddToLayout( tags_box, children_vbox, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( tags_box, parents_vbox, CC.FLAGS_EXPAND_BOTH_WAYS )
input_box = QP.HBoxLayout()
QP.AddToLayout( input_box, self._child_input, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( input_box, self._parent_input, CC.FLAGS_EXPAND_BOTH_WAYS )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._status_st, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._sync_status_st, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._count_st, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, ClientGUICommon.WrapInText(self._show_all,self,'show all pairs'), CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, listctrl_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( vbox, self._add, CC.FLAGS_ON_RIGHT )
QP.AddToLayout( vbox, tags_box, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
QP.AddToLayout( vbox, input_box, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
self.setLayout( vbox )
#
self._tag_parents.itemSelectionChanged.connect( self._SetButtonStatus )
self._children.listBoxChanged.connect( self._UpdateListCtrlData )
self._parents.listBoxChanged.connect( self._UpdateListCtrlData )
self._show_all.clicked.connect( self._UpdateListCtrlData )
HG.client_controller.CallToThread( self.THREADInitialise, tags, self._service_key )
def _AddPairs( self, pairs, add_only = False ):
pairs = list( pairs )
pairs.sort( key = lambda c_p: HydrusTags.ConvertTagToSortable( c_p[1] ) )
new_pairs = []
current_pairs = []
petitioned_pairs = []
pending_pairs = []
for pair in pairs:
if pair in self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ]:
if not add_only:
pending_pairs.append( pair )
elif pair in self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ]:
petitioned_pairs.append( pair )
elif pair in self._original_statuses_to_pairs[ HC.CONTENT_STATUS_CURRENT ]:
if not add_only:
current_pairs.append( pair )
elif self._CanAdd( pair ):
new_pairs.append( pair )
affected_pairs = []
if len( new_pairs ) > 0:
do_it = True
if not self._i_am_local_tag_service:
if self._service.HasPermission( HC.CONTENT_TYPE_TAG_PARENTS, HC.PERMISSION_ACTION_MODERATE ):
reason = 'admin'
else:
if len( new_pairs ) > 10:
pair_strings = 'The many pairs you entered.'
else:
pair_strings = os.linesep.join( ( child + '->' + parent for ( child, parent ) in new_pairs ) )
message = 'Enter a reason for:' + os.linesep * 2 + pair_strings + os.linesep * 2 + 'To be added. A janitor will review your request.'
suggestions = []
suggestions.append( 'obvious by definition (a sword is a weapon)' )
suggestions.append( 'character/series/studio/etc... belonging (character x belongs to series y)' )
with ClientGUIDialogs.DialogTextEntry( self, message, suggestions = suggestions ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
reason = dlg.GetValue()
else:
do_it = False
if do_it:
for pair in new_pairs: self._pairs_to_reasons[ pair ] = reason
if do_it:
self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ].update( new_pairs )
affected_pairs.extend( new_pairs )
else:
if len( current_pairs ) > 0:
do_it = True
if not self._i_am_local_tag_service:
if len( current_pairs ) > 10:
pair_strings = 'The many pairs you entered.'
else:
pair_strings = os.linesep.join( ( child + '->' + parent for ( child, parent ) in current_pairs ) )
if len( current_pairs ) > 1:
message = 'The pairs:' + os.linesep * 2 + pair_strings + os.linesep * 2 + 'Already exist.'
else:
message = 'The pair ' + pair_strings + ' already exists.'
result = ClientGUIDialogsQuick.GetYesNo( self, message, title = 'Choose what to do.', yes_label = 'petition to remove', no_label = 'do nothing' )
if result == QW.QDialog.Accepted:
if self._service.HasPermission( HC.CONTENT_TYPE_TAG_PARENTS, HC.PERMISSION_ACTION_MODERATE ):
reason = 'admin'
else:
message = 'Enter a reason for:'
message += os.linesep * 2
message += pair_strings
message += os.linesep * 2
message += 'to be removed. A janitor will review your petition.'
suggestions = []
suggestions.append( 'obvious typo/mistake' )
with ClientGUIDialogs.DialogTextEntry( self, message, suggestions = suggestions ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
reason = dlg.GetValue()
else:
do_it = False
if do_it:
for pair in current_pairs: self._pairs_to_reasons[ pair ] = reason
else:
do_it = False
if do_it:
self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ].update( current_pairs )
affected_pairs.extend( current_pairs )
if len( pending_pairs ) > 0:
if len( pending_pairs ) > 10:
pair_strings = 'The many pairs you entered.'
else:
pair_strings = os.linesep.join( ( child + '->' + parent for ( child, parent ) in pending_pairs ) )
if len( pending_pairs ) > 1:
message = 'The pairs:' + os.linesep * 2 + pair_strings + os.linesep * 2 + 'Are pending.'
else:
message = 'The pair ' + pair_strings + ' is pending.'
result = ClientGUIDialogsQuick.GetYesNo( self, message, title = 'Choose what to do.', yes_label = 'rescind the pend', no_label = 'do nothing' )
if result == QW.QDialog.Accepted:
self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ].difference_update( pending_pairs )
affected_pairs.extend( pending_pairs )
if len( petitioned_pairs ) > 0:
if len( petitioned_pairs ) > 10:
pair_strings = 'The many pairs you entered.'
else:
pair_strings = os.linesep.join( ( child + '->' + parent for ( child, parent ) in petitioned_pairs ) )
if len( petitioned_pairs ) > 1:
message = 'The pairs:' + os.linesep * 2 + pair_strings + os.linesep * 2 + 'Are petitioned.'
else:
message = 'The pair ' + pair_strings + ' is petitioned.'
result = ClientGUIDialogsQuick.GetYesNo( self, message, title = 'Choose what to do.', yes_label = 'rescind the petition', no_label = 'do nothing' )
if result == QW.QDialog.Accepted:
self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ].difference_update( petitioned_pairs )
affected_pairs.extend( petitioned_pairs )
if len( affected_pairs ) > 0:
def in_current( pair ):
for status in ( HC.CONTENT_STATUS_CURRENT, HC.CONTENT_STATUS_PENDING, HC.CONTENT_STATUS_PETITIONED ):
if pair in self._current_statuses_to_pairs[ status ]:
return True
return False
affected_pairs = [ ( self._tag_parents.HasData( pair ), in_current( pair ), pair ) for pair in affected_pairs ]
to_add = [ pair for ( exists, current, pair ) in affected_pairs if not exists ]
to_update = [ pair for ( exists, current, pair ) in affected_pairs if exists and current ]
to_delete = [ pair for ( exists, current, pair ) in affected_pairs if exists and not current ]
self._tag_parents.AddDatas( to_add )
self._tag_parents.UpdateDatas( to_update )
self._tag_parents.DeleteDatas( to_delete )
self._tag_parents.Sort()
def _CanAdd( self, potential_pair ):
( potential_child, potential_parent ) = potential_pair
if potential_child == potential_parent: return False
current_pairs = self._current_statuses_to_pairs[ HC.CONTENT_STATUS_CURRENT ].union( self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ] ).difference( self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ] )
current_children = { child for ( child, parent ) in current_pairs }
# test for loops
if potential_parent in current_children:
simple_children_to_parents = ClientManagers.BuildSimpleChildrenToParents( current_pairs )
if ClientManagers.LoopInSimpleChildrenToParents( simple_children_to_parents, potential_child, potential_parent ):
QW.QMessageBox.critical( self, 'Error', 'Adding '+potential_child+'->'+potential_parent+' would create a loop!' )
return False
return True
def _ConvertPairToListCtrlTuples( self, pair ):
( child, parent ) = pair
if pair in self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ]:
status = HC.CONTENT_STATUS_PENDING
elif pair in self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ]:
status = HC.CONTENT_STATUS_PETITIONED
elif pair in self._original_statuses_to_pairs[ HC.CONTENT_STATUS_CURRENT ]:
status = HC.CONTENT_STATUS_CURRENT
sign = HydrusData.ConvertStatusToPrefix( status )
pretty_status = sign
display_tuple = ( pretty_status, child, parent )
sort_tuple = ( status, child, parent )
return ( display_tuple, sort_tuple )
def _DeserialiseImportString( self, import_string ):
tags = HydrusText.DeserialiseNewlinedTexts( import_string )
if len( tags ) % 2 == 1:
raise Exception( 'Uneven number of tags found!' )
pairs = []
for i in range( len( tags ) // 2 ):
pair = ( tags[ 2 * i ], tags[ ( 2 * i ) + 1 ] )
pairs.append( pair )
return pairs
def _ExportToClipboard( self ):
export_string = self._GetExportString()
HG.client_controller.pub( 'clipboard', 'text', export_string )
def _ExportToTXT( self ):
export_string = self._GetExportString()
with QP.FileDialog( self, 'Set the export path.', default_filename = 'parents.txt', acceptMode = QW.QFileDialog.AcceptSave, fileMode = QW.QFileDialog.AnyFile ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
path = dlg.GetPath()
with open( path, 'w', encoding = 'utf-8' ) as f:
f.write( export_string )
def _GetExportString( self ):
tags = []
for ( a, b ) in self._tag_parents.GetData( only_selected = True ):
tags.append( a )
tags.append( b )
export_string = os.linesep.join( tags )
return export_string
def _ImportFromClipboard( self, add_only = False ):
try:
import_string = HG.client_controller.GetClipboardText()
except HydrusExceptions.DataMissing as e:
QW.QMessageBox.critical( self, 'Error', str(e) )
return
pairs = self._DeserialiseImportString( import_string )
self._AddPairs( pairs, add_only = add_only )
self._UpdateListCtrlData()
def _ImportFromTXT( self, add_only = False ):
with QP.FileDialog( self, 'Select the file to import.', acceptMode = QW.QFileDialog.AcceptOpen ) as dlg:
if dlg.exec() != QW.QDialog.Accepted:
return
else:
path = dlg.GetPath()
with open( path, 'r', encoding = 'utf-8' ) as f:
import_string = f.read()
pairs = self._DeserialiseImportString( import_string )
self._AddPairs( pairs, add_only = add_only )
self._UpdateListCtrlData()
def _ListCtrlActivated( self ):
parents_to_children = collections.defaultdict( set )
pairs = self._tag_parents.GetData( only_selected = True )
if len( pairs ) > 0:
self._AddPairs( pairs )
def _SetButtonStatus( self ):
if len( self._children.GetTags() ) == 0 or len( self._parents.GetTags() ) == 0:
self._add.setEnabled( False )
else:
self._add.setEnabled( True )
def _UpdateListCtrlData( self ):
children = self._children.GetTags()
parents = self._parents.GetTags()
pertinent_tags = children.union( parents )
self._tag_parents.DeleteDatas( self._tag_parents.GetData() )
all_pairs = set()
show_all = self._show_all.isChecked()
for ( status, pairs ) in self._current_statuses_to_pairs.items():
if status == HC.CONTENT_STATUS_DELETED:
continue
if len( pertinent_tags ) == 0:
if status == HC.CONTENT_STATUS_CURRENT and not show_all:
continue
# show all pending/petitioned
all_pairs.update( pairs )
else:
# show all appropriate
for pair in pairs:
( a, b ) = pair
if a in pertinent_tags or b in pertinent_tags or show_all:
all_pairs.add( pair )
self._tag_parents.AddDatas( all_pairs )
self._tag_parents.Sort()
def EnterChildren( self, tags ):
if len( tags ) > 0:
self._parents.RemoveTags( tags )
self._children.EnterTags( tags )
self._UpdateListCtrlData()
self._SetButtonStatus()
def EnterParents( self, tags ):
if len( tags ) > 0:
self._children.RemoveTags( tags )
self._parents.EnterTags( tags )
self._UpdateListCtrlData()
self._SetButtonStatus()
def EventAddButton( self ):
children = self._children.GetTags()
parents = self._parents.GetTags()
pairs = list( itertools.product( children, parents ) )
self._AddPairs( pairs )
self._children.SetTags( [] )
self._parents.SetTags( [] )
self._UpdateListCtrlData()
self._SetButtonStatus()
def GetContentUpdates( self ):
# we make it manually here because of the mass pending tags done (but not undone on a rescind) on a pending pair!
# we don't want to send a pend and then rescind it, cause that will spam a thousand bad tags and not undo it
content_updates = []
if self._i_am_local_tag_service:
for pair in self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ]: content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_TAG_PARENTS, HC.CONTENT_UPDATE_ADD, pair ) )
for pair in self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ]: content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_TAG_PARENTS, HC.CONTENT_UPDATE_DELETE, pair ) )
else:
current_pending = self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ]
original_pending = self._original_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ]
current_petitioned = self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ]
original_petitioned = self._original_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ]
new_pends = current_pending.difference( original_pending )
rescinded_pends = original_pending.difference( current_pending )
new_petitions = current_petitioned.difference( original_petitioned )
rescinded_petitions = original_petitioned.difference( current_petitioned )
content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_TAG_PARENTS, HC.CONTENT_UPDATE_PEND, pair, reason = self._pairs_to_reasons[ pair ] ) for pair in new_pends ) )
content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_TAG_PARENTS, HC.CONTENT_UPDATE_RESCIND_PEND, pair ) for pair in rescinded_pends ) )
content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_TAG_PARENTS, HC.CONTENT_UPDATE_PETITION, pair, reason = self._pairs_to_reasons[ pair ] ) for pair in new_petitions ) )
content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_TAG_PARENTS, HC.CONTENT_UPDATE_RESCIND_PETITION, pair ) for pair in rescinded_petitions ) )
return ( self._service_key, content_updates )
def HasUncommittedPair( self ):
return len( self._children.GetTags() ) > 0 and len( self._parents.GetTags() ) > 0
def SetTagBoxFocus( self ):
if len( self._children.GetTags() ) == 0: self._child_input.setFocus( QC.Qt.OtherFocusReason )
else: self._parent_input.setFocus( QC.Qt.OtherFocusReason )
def THREADInitialise( self, tags, service_key ):
def qt_code( original_statuses_to_pairs, current_statuses_to_pairs, service_keys_to_work_to_do ):
if not self or not QP.isValid( self ):
return
self._original_statuses_to_pairs = original_statuses_to_pairs
self._current_statuses_to_pairs = current_statuses_to_pairs
self._status_st.setText( 'Files with a tag on the left will also be given the tag on the right.' + os.linesep + 'As an experiment, this panel will only display the \'current\' pairs for those tags entered below.' )
looking_good = True
if len( service_keys_to_work_to_do ) == 0:
looking_good = False
status_text = 'No services currently apply these parents. Changes here will have no effect unless parent application is changed later.'
else:
synced_names = sorted( ( HG.client_controller.services_manager.GetName( s_k ) for ( s_k, work_to_do ) in service_keys_to_work_to_do.items() if not work_to_do ) )
unsynced_names = sorted( ( HG.client_controller.services_manager.GetName( s_k ) for ( s_k, work_to_do ) in service_keys_to_work_to_do.items() if work_to_do ) )
synced_string = ', '.join( ( '"{}"'.format( name ) for name in synced_names ) )
unsynced_string = ', '.join( ( '"{}"'.format( name ) for name in unsynced_names ) )
if len( unsynced_names ) == 0:
service_part = '{} apply these parents and are fully synced.'.format( synced_string )
else:
looking_good = False
if len( synced_names ) > 0:
service_part = '{} apply these parents and are fully synced, but {} still have work to do.'.format( synced_string, unsynced_string )
else:
service_part = '{} apply these parents and still have sync work to do.'.format( unsynced_string )
if HG.client_controller.new_options.GetBoolean( 'tag_display_maintenance_during_active' ):
maintenance_part = 'Parents are set to sync all the time in the background.'
if looking_good:
changes_part = 'Changes from this dialog should be reflected soon after closing the dialog.'
else:
changes_part = 'It may take some time for changes here to apply everywhere, though.'
else:
looking_good = False
if HG.client_controller.new_options.GetBoolean( 'tag_display_maintenance_during_idle' ):
maintenance_part = 'Parents are set to sync only when you are not using the client.'
changes_part = 'It may take some time for changes here to apply.'
else:
maintenance_part = 'Parents are not set to sync.'
changes_part = 'Changes here will not apply unless sync is manually forced to run.'
s = os.linesep * 2
status_text = s.join( ( service_part, maintenance_part, changes_part ) )
self._sync_status_st.setText( status_text )
if looking_good:
self._sync_status_st.setObjectName( 'HydrusValid' )
else:
self._sync_status_st.setObjectName( 'HydrusWarning' )
self._sync_status_st.style().polish( self._sync_status_st )
self._count_st.setText( 'Starting with '+HydrusData.ToHumanInt(len(original_statuses_to_pairs[HC.CONTENT_STATUS_CURRENT]))+' pairs.' )
self._child_input.setEnabled( True )
self._parent_input.setEnabled( True )
if tags is None:
self._UpdateListCtrlData()
else:
self.EnterChildren( tags )
original_statuses_to_pairs = HG.client_controller.Read( 'tag_parents', service_key )
( master_service_keys_to_sibling_applicable_service_keys, master_service_keys_to_parent_applicable_service_keys ) = HG.client_controller.Read( 'tag_display_application' )
service_keys_we_care_about = { s_k for ( s_k, s_ks ) in master_service_keys_to_parent_applicable_service_keys.items() if service_key in s_ks }
service_keys_to_work_to_do = {}
for s_k in service_keys_we_care_about:
status = HG.client_controller.Read( 'tag_display_maintenance_status', s_k )
work_to_do = status[ 'num_parents_to_sync' ] > 0
service_keys_to_work_to_do[ s_k ] = work_to_do
current_statuses_to_pairs = collections.defaultdict( set )
current_statuses_to_pairs.update( { key : set( value ) for ( key, value ) in list(original_statuses_to_pairs.items()) } )
QP.CallAfter( qt_code, original_statuses_to_pairs, current_statuses_to_pairs, service_keys_to_work_to_do )
class ManageTagSiblings( ClientGUIScrolledPanels.ManagePanel ):
def __init__( self, parent, tags = None ):
ClientGUIScrolledPanels.ManagePanel.__init__( self, parent )
self._tag_repositories = ClientGUICommon.BetterNotebook( self )
#
default_tag_repository_key = HC.options[ 'default_tag_repository' ]
services = list( HG.client_controller.services_manager.GetServices( ( HC.LOCAL_TAG, ) ) )
services.extend( [ service for service in HG.client_controller.services_manager.GetServices( ( HC.TAG_REPOSITORY, ) ) if service.HasPermission( HC.CONTENT_TYPE_TAG_SIBLINGS, HC.PERMISSION_ACTION_PETITION ) ] )
for service in services:
name = service.GetName()
service_key = service.GetServiceKey()
page = self._Panel( self._tag_repositories, service_key, tags )
select = service_key == default_tag_repository_key
self._tag_repositories.addTab( page, name )
if select: self._tag_repositories.setCurrentIndex( self._tag_repositories.indexOf( page ) )
#
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._tag_repositories, CC.FLAGS_EXPAND_BOTH_WAYS )
self.widget().setLayout( vbox )
def _SetSearchFocus( self ):
page = self._tag_repositories.currentWidget()
if page is not None:
page.SetTagBoxFocus()
def CommitChanges( self ):
service_keys_to_content_updates = {}
for page in self._tag_repositories.GetPages():
( service_key, content_updates ) = page.GetContentUpdates()
if len( content_updates ) > 0:
service_keys_to_content_updates[ service_key ] = content_updates
if len( service_keys_to_content_updates ) > 0:
HG.client_controller.Write( 'content_updates', service_keys_to_content_updates )
def UserIsOKToOK( self ):
if self._tag_repositories.currentWidget().HasUncommittedPair():
message = 'Are you sure you want to OK? You have an uncommitted pair.'
result = ClientGUIDialogsQuick.GetYesNo( self, message )
if result != QW.QDialog.Accepted:
return False
return True
def EventServiceChanged( self, event ):
page = self._tag_repositories.currentWidget()
if page is not None:
HG.client_controller.CallAfterQtSafe( page, page.SetTagBoxFocus )
class _Panel( QW.QWidget ):
def __init__( self, parent, service_key, tags = None ):
QW.QWidget.__init__( self, parent )
self._service_key = service_key
self._service = HG.client_controller.services_manager.GetService( self._service_key )
self._i_am_local_tag_service = self._service.GetServiceType() == HC.LOCAL_TAG
self._original_statuses_to_pairs = collections.defaultdict( set )
self._current_statuses_to_pairs = collections.defaultdict( set )
self._pairs_to_reasons = {}
self._current_new = None
self._show_all = QW.QCheckBox( self )
listctrl_panel = ClientGUIListCtrl.BetterListCtrlPanel( self )
self._tag_siblings = ClientGUIListCtrl.BetterListCtrl( listctrl_panel, CGLC.COLUMN_LIST_TAG_SIBLINGS.ID, 8, self._ConvertPairToListCtrlTuples, delete_key_callback = self._ListCtrlActivated, activation_callback = self._ListCtrlActivated )
listctrl_panel.SetListCtrl( self._tag_siblings )
self._tag_siblings.Sort()
menu_items = []
menu_items.append( ( 'normal', 'from clipboard', 'Load siblings from text in your clipboard.', HydrusData.Call( self._ImportFromClipboard, False ) ) )
menu_items.append( ( 'normal', 'from clipboard (only add pairs--no deletions)', 'Load siblings from text in your clipboard.', HydrusData.Call( self._ImportFromClipboard, True ) ) )
menu_items.append( ( 'normal', 'from .txt file', 'Load siblings from a .txt file.', HydrusData.Call( self._ImportFromTXT, False ) ) )
menu_items.append( ( 'normal', 'from .txt file (only add pairs--no deletions)', 'Load siblings from a .txt file.', HydrusData.Call( self._ImportFromTXT, True ) ) )
listctrl_panel.AddMenuButton( 'import', menu_items )
menu_items = []
menu_items.append( ( 'normal', 'to clipboard', 'Save selected siblings to your clipboard.', self._ExportToClipboard ) )
menu_items.append( ( 'normal', 'to .txt file', 'Save selected siblings to a .txt file.', self._ExportToTXT ) )
listctrl_panel.AddMenuButton( 'export', menu_items, enabled_only_on_selection = True )
self._old_siblings = ClientGUIListBoxes.ListBoxTagsStringsAddRemove( self, self._service_key, ClientTags.TAG_DISPLAY_ACTUAL )
self._new_sibling = ClientGUICommon.BetterStaticText( self )
( gumpf, preview_height ) = ClientGUIFunctions.ConvertTextToPixels( self._old_siblings, ( 12, 6 ) )
self._old_siblings.setMinimumHeight( preview_height )
self._old_input = ClientGUIACDropdown.AutoCompleteDropdownTagsWrite( self, self.EnterOlds, CC.LOCAL_FILE_SERVICE_KEY, service_key, show_paste_button = True )
self._old_input.setEnabled( False )
self._new_input = ClientGUIACDropdown.AutoCompleteDropdownTagsWrite( self, self.SetNew, CC.LOCAL_FILE_SERVICE_KEY, service_key )
self._new_input.setEnabled( False )
self._add = QW.QPushButton( 'add', self )
self._add.clicked.connect( self.EventAddButton )
self._add.setEnabled( False )
#
self._status_st = ClientGUICommon.BetterStaticText( self, 'initialising\u2026' )
self._sync_status_st = ClientGUICommon.BetterStaticText( self, '' )
self._sync_status_st.setWordWrap( True )
self._count_st = ClientGUICommon.BetterStaticText( self, '' )
old_sibling_box = QP.VBoxLayout()
QP.AddToLayout( old_sibling_box, ClientGUICommon.BetterStaticText( self, label = 'set tags to be replaced' ), CC.FLAGS_CENTER )
QP.AddToLayout( old_sibling_box, self._old_siblings, CC.FLAGS_EXPAND_BOTH_WAYS )
new_sibling_box = QP.VBoxLayout()
QP.AddToLayout( new_sibling_box, ClientGUICommon.BetterStaticText( self, label = 'set new ideal tag' ), CC.FLAGS_CENTER )
new_sibling_box.addStretch( 1 )
QP.AddToLayout( new_sibling_box, self._new_sibling, CC.FLAGS_EXPAND_PERPENDICULAR )
new_sibling_box.addStretch( 1 )
text_box = QP.HBoxLayout()
QP.AddToLayout( text_box, old_sibling_box, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
QP.AddToLayout( text_box, new_sibling_box, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
input_box = QP.HBoxLayout()
QP.AddToLayout( input_box, self._old_input )
QP.AddToLayout( input_box, self._new_input )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._status_st, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._sync_status_st, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._count_st, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, ClientGUICommon.WrapInText(self._show_all,self,'show all pairs'), CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, listctrl_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( vbox, self._add, CC.FLAGS_ON_RIGHT )
QP.AddToLayout( vbox, text_box, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
QP.AddToLayout( vbox, input_box, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
self.setLayout( vbox )
#
self._tag_siblings.itemSelectionChanged.connect( self._SetButtonStatus )
self._show_all.clicked.connect( self._UpdateListCtrlData )
self._old_siblings.listBoxChanged.connect( self._UpdateListCtrlData )
HG.client_controller.CallToThread( self.THREADInitialise, tags, self._service_key )
def _AddPairs( self, pairs, add_only = False, remove_only = False, default_reason = None ):
pairs = list( pairs )
pairs.sort( key = lambda c_p1: HydrusTags.ConvertTagToSortable( c_p1[1] ) )
new_pairs = []
current_pairs = []
petitioned_pairs = []
pending_pairs = []
for pair in pairs:
if pair in self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ]:
if not add_only:
pending_pairs.append( pair )
elif pair in self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ]:
if not remove_only:
petitioned_pairs.append( pair )
elif pair in self._original_statuses_to_pairs[ HC.CONTENT_STATUS_CURRENT ]:
if not add_only:
current_pairs.append( pair )
elif not remove_only and self._CanAdd( pair ):
new_pairs.append( pair )
if len( new_pairs ) > 0:
do_it = True
if not self._i_am_local_tag_service:
if default_reason is not None:
reason = default_reason
elif self._service.HasPermission( HC.CONTENT_TYPE_TAG_SIBLINGS, HC.PERMISSION_ACTION_MODERATE ):
reason = 'admin'
else:
if len( new_pairs ) > 10:
pair_strings = 'The many pairs you entered.'
else:
pair_strings = os.linesep.join( ( old + '->' + new for ( old, new ) in new_pairs ) )
suggestions = []
suggestions.append( 'merging underscores/typos/phrasing/unnamespaced to a single uncontroversial good tag' )
suggestions.append( 'rewording/namespacing based on preference' )
message = 'Enter a reason for:' + os.linesep * 2 + pair_strings + os.linesep * 2 + 'To be added. A janitor will review your petition.'
with ClientGUIDialogs.DialogTextEntry( self, message, suggestions = suggestions ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
reason = dlg.GetValue()
else:
do_it = False
if do_it:
for pair in new_pairs: self._pairs_to_reasons[ pair ] = reason
if do_it:
self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ].update( new_pairs )
else:
if len( current_pairs ) > 0:
do_it = True
if not self._i_am_local_tag_service:
if default_reason is not None:
reason = default_reason
elif self._service.HasPermission( HC.CONTENT_TYPE_TAG_SIBLINGS, HC.PERMISSION_ACTION_MODERATE ):
reason = 'admin'
else:
if len( current_pairs ) > 10:
pair_strings = 'The many pairs you entered.'
else:
pair_strings = os.linesep.join( ( old + '->' + new for ( old, new ) in current_pairs ) )
message = 'Enter a reason for:'
message += os.linesep * 2
message += pair_strings
message += os.linesep * 2
message += 'to be removed. You will see the delete as soon as you upload, but a janitor will review your petition to decide if all users should receive it as well.'
suggestions = []
suggestions.append( 'obvious typo/mistake' )
suggestions.append( 'disambiguation' )
suggestions.append( 'correcting to repository standard' )
with ClientGUIDialogs.DialogTextEntry( self, message, suggestions = suggestions ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
reason = dlg.GetValue()
else:
do_it = False
if do_it:
for pair in current_pairs:
self._pairs_to_reasons[ pair ] = reason
if do_it:
self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ].update( current_pairs )
if len( pending_pairs ) > 0:
if len( pending_pairs ) > 10:
pair_strings = 'The many pairs you entered.'
else:
pair_strings = os.linesep.join( ( old + '->' + new for ( old, new ) in pending_pairs ) )
if len( pending_pairs ) > 1:
message = 'The pairs:' + os.linesep * 2 + pair_strings + os.linesep * 2 + 'Are pending.'
else:
message = 'The pair ' + pair_strings + ' is pending.'
result = ClientGUIDialogsQuick.GetYesNo( self, message, title = 'Choose what to do.', yes_label = 'rescind the pend', no_label = 'do nothing' )
if result == QW.QDialog.Accepted:
self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ].difference_update( pending_pairs )
if len( petitioned_pairs ) > 0:
if len( petitioned_pairs ) > 10:
pair_strings = 'The many pairs you entered.'
else:
pair_strings = ', '.join( ( old + '->' + new for ( old, new ) in petitioned_pairs ) )
if len( petitioned_pairs ) > 1:
message = 'The pairs:' + os.linesep * 2 + pair_strings + os.linesep * 2 + 'Are petitioned.'
else:
message = 'The pair ' + pair_strings + ' is petitioned.'
result = ClientGUIDialogsQuick.GetYesNo( self, message, title = 'Choose what to do.', yes_label = 'rescind the petition', no_label = 'do nothing' )
if result == QW.QDialog.Accepted:
self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ].difference_update( petitioned_pairs )
def _AutoPetitionConflicts( self, pairs ):
current_pairs = self._current_statuses_to_pairs[ HC.CONTENT_STATUS_CURRENT ].union( self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ] ).difference( self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ] )
current_olds_to_news = dict( current_pairs )
current_olds = { current_old for ( current_old, current_new ) in current_pairs }
pairs_to_auto_petition = set()
for ( old, new ) in pairs:
if old in current_olds:
conflicting_new = current_olds_to_news[ old ]
if conflicting_new != new:
conflicting_pair = ( old, conflicting_new )
pairs_to_auto_petition.add( conflicting_pair )
if len( pairs_to_auto_petition ) > 0:
pairs_to_auto_petition = list( pairs_to_auto_petition )
self._AddPairs( pairs_to_auto_petition, remove_only = True, default_reason = 'AUTO-PETITION TO REASSIGN TO: ' + new )
def _CanAdd( self, potential_pair ):
( potential_old, potential_new ) = potential_pair
current_pairs = self._current_statuses_to_pairs[ HC.CONTENT_STATUS_CURRENT ].union( self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ] ).difference( self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ] )
current_olds = { old for ( old, new ) in current_pairs }
# test for ambiguity
if potential_old in current_olds:
QW.QMessageBox.critical( self, 'Error', 'There already is a relationship set for the tag '+potential_old+'.' )
return False
# test for loops
if potential_new in current_olds:
seen_tags = set()
d = dict( current_pairs )
next_new = potential_new
while next_new in d:
next_new = d[ next_new ]
if next_new == potential_old:
QW.QMessageBox.critical( self, 'Error', 'Adding '+potential_old+'->'+potential_new+' would create a loop!' )
return False
if next_new in seen_tags:
message = 'The pair you mean to add seems to connect to a sibling loop already in your database! Please undo this loop first. The tags involved in the loop are:'
message += os.linesep * 2
message += ', '.join( seen_tags )
QW.QMessageBox.critical( self, 'Error', message )
return False
seen_tags.add( next_new )
return True
def _ConvertPairToListCtrlTuples( self, pair ):
( old, new ) = pair
if pair in self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ]:
status = HC.CONTENT_STATUS_PENDING
elif pair in self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ]:
status = HC.CONTENT_STATUS_PETITIONED
elif pair in self._original_statuses_to_pairs[ HC.CONTENT_STATUS_CURRENT ]:
status = HC.CONTENT_STATUS_CURRENT
sign = HydrusData.ConvertStatusToPrefix( status )
pretty_status = sign
existing_olds = self._old_siblings.GetTags()
note = ''
if old in existing_olds:
if status == HC.CONTENT_STATUS_PENDING:
note = 'CONFLICT: Will be rescinded on add.'
elif status == HC.CONTENT_STATUS_CURRENT:
note = 'CONFLICT: Will be petitioned/deleted on add.'
display_tuple = ( pretty_status, old, new, note )
sort_tuple = ( status, old, new, note )
return ( display_tuple, sort_tuple )
def _DeserialiseImportString( self, import_string ):
tags = HydrusText.DeserialiseNewlinedTexts( import_string )
if len( tags ) % 2 == 1:
raise Exception( 'Uneven number of tags found!' )
pairs = []
for i in range( len( tags ) // 2 ):
pair = ( tags[ 2 * i ], tags[ ( 2 * i ) + 1 ] )
pairs.append( pair )
return pairs
def _ExportToClipboard( self ):
export_string = self._GetExportString()
HG.client_controller.pub( 'clipboard', 'text', export_string )
def _ExportToTXT( self ):
export_string = self._GetExportString()
with QP.FileDialog( self, 'Set the export path.', default_filename = 'siblings.txt', acceptMode = QW.QFileDialog.AcceptSave, fileMode = QW.QFileDialog.AnyFile ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
path = dlg.GetPath()
with open( path, 'w', encoding = 'utf-8' ) as f:
f.write( export_string )
def _GetExportString( self ):
tags = []
for ( a, b ) in self._tag_siblings.GetData( only_selected = True ):
tags.append( a )
tags.append( b )
export_string = os.linesep.join( tags )
return export_string
def _ImportFromClipboard( self, add_only = False ):
try:
import_string = HG.client_controller.GetClipboardText()
except HydrusExceptions.DataMissing as e:
QW.QMessageBox.critical( self, 'Error', str(e) )
return
pairs = self._DeserialiseImportString( import_string )
self._AutoPetitionConflicts( pairs )
self._AddPairs( pairs, add_only = add_only )
self._UpdateListCtrlData()
def _ImportFromTXT( self, add_only = False ):
with QP.FileDialog( self, 'Select the file to import.', acceptMode = QW.QFileDialog.AcceptOpen ) as dlg:
if dlg.exec() != QW.QDialog.Accepted:
return
else:
path = dlg.GetPath()
with open( path, 'r', encoding = 'utf-8' ) as f:
import_string = f.read()
pairs = self._DeserialiseImportString( import_string )
self._AutoPetitionConflicts( pairs )
self._AddPairs( pairs, add_only = add_only )
self._UpdateListCtrlData()
def _ListCtrlActivated( self ):
pairs = self._tag_siblings.GetData( only_selected = True )
if len( pairs ) > 0:
self._AddPairs( pairs )
self._UpdateListCtrlData()
def _SetButtonStatus( self ):
if self._current_new is None or len( self._old_siblings.GetTags() ) == 0:
self._add.setEnabled( False )
else:
self._add.setEnabled( True )
def _UpdateListCtrlData( self ):
olds = self._old_siblings.GetTags()
pertinent_tags = set( olds )
if self._current_new is not None:
pertinent_tags.add( self._current_new )
self._tag_siblings.DeleteDatas( self._tag_siblings.GetData() )
all_pairs = set()
show_all = self._show_all.isChecked()
for ( status, pairs ) in self._current_statuses_to_pairs.items():
if status == HC.CONTENT_STATUS_DELETED:
continue
if len( pertinent_tags ) == 0:
if status == HC.CONTENT_STATUS_CURRENT and not show_all:
continue
# show all pending/petitioned
all_pairs.update( pairs )
else:
# show all appropriate
for pair in pairs:
( a, b ) = pair
if a in pertinent_tags or b in pertinent_tags or show_all:
all_pairs.add( pair )
self._tag_siblings.AddDatas( all_pairs )
self._tag_siblings.Sort()
def EnterOlds( self, olds ):
if self._current_new in olds:
self.SetNew( set() )
self._old_siblings.EnterTags( olds )
self._UpdateListCtrlData()
self._SetButtonStatus()
def EventAddButton( self ):
if self._current_new is not None and len( self._old_siblings.GetTags() ) > 0:
olds = self._old_siblings.GetTags()
pairs = [ ( old, self._current_new ) for old in olds ]
self._AutoPetitionConflicts( pairs )
self._AddPairs( pairs )
self._old_siblings.SetTags( set() )
self.SetNew( set() )
self._UpdateListCtrlData()
self._SetButtonStatus()
def GetContentUpdates( self ):
# we make it manually here because of the mass pending tags done (but not undone on a rescind) on a pending pair!
# we don't want to send a pend and then rescind it, cause that will spam a thousand bad tags and not undo it
# actually, we don't do this for siblings, but we do for parents, and let's have them be the same
content_updates = []
if self._i_am_local_tag_service:
for pair in self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ]:
content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_TAG_SIBLINGS, HC.CONTENT_UPDATE_ADD, pair ) )
for pair in self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ]:
content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_TAG_SIBLINGS, HC.CONTENT_UPDATE_DELETE, pair ) )
else:
current_pending = self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ]
original_pending = self._original_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ]
current_petitioned = self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ]
original_petitioned = self._original_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ]
new_pends = current_pending.difference( original_pending )
rescinded_pends = original_pending.difference( current_pending )
new_petitions = current_petitioned.difference( original_petitioned )
rescinded_petitions = original_petitioned.difference( current_petitioned )
content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_TAG_SIBLINGS, HC.CONTENT_UPDATE_PEND, pair, reason = self._pairs_to_reasons[ pair ] ) for pair in new_pends ) )
content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_TAG_SIBLINGS, HC.CONTENT_UPDATE_RESCIND_PEND, pair ) for pair in rescinded_pends ) )
content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_TAG_SIBLINGS, HC.CONTENT_UPDATE_PETITION, pair, reason = self._pairs_to_reasons[ pair ] ) for pair in new_petitions ) )
content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_TAG_SIBLINGS, HC.CONTENT_UPDATE_RESCIND_PETITION, pair ) for pair in rescinded_petitions ) )
return ( self._service_key, content_updates )
def HasUncommittedPair( self ):
return len( self._old_siblings.GetTags() ) > 0 and self._current_new is not None
def SetNew( self, new_tags ):
if len( new_tags ) == 0:
self._new_sibling.clear()
self._current_new = None
else:
new = list( new_tags )[0]
self._old_siblings.RemoveTags( { new } )
self._new_sibling.setText( new )
self._current_new = new
self._UpdateListCtrlData()
self._SetButtonStatus()
def SetTagBoxFocus( self ):
if len( self._old_siblings.GetTags() ) == 0:
self._old_input.setFocus( QC.Qt.OtherFocusReason )
else:
self._new_input.setFocus( QC.Qt.OtherFocusReason )
def THREADInitialise( self, tags, service_key ):
def qt_code( original_statuses_to_pairs, current_statuses_to_pairs, service_keys_to_work_to_do ):
if not self or not QP.isValid( self ):
return
self._original_statuses_to_pairs = original_statuses_to_pairs
self._current_statuses_to_pairs = current_statuses_to_pairs
self._status_st.setText( 'Tags on the left will be appear as those on the right.' )
looking_good = True
if len( service_keys_to_work_to_do ) == 0:
looking_good = False
status_text = 'No services currently apply these siblings. Changes here will have no effect unless sibling application is changed later.'
else:
synced_names = sorted( ( HG.client_controller.services_manager.GetName( s_k ) for ( s_k, work_to_do ) in service_keys_to_work_to_do.items() if not work_to_do ) )
unsynced_names = sorted( ( HG.client_controller.services_manager.GetName( s_k ) for ( s_k, work_to_do ) in service_keys_to_work_to_do.items() if work_to_do ) )
synced_string = ', '.join( ( '"{}"'.format( name ) for name in synced_names ) )
unsynced_string = ', '.join( ( '"{}"'.format( name ) for name in unsynced_names ) )
if len( unsynced_names ) == 0:
service_part = '{} apply these siblings and are fully synced.'.format( synced_string )
else:
looking_good = False
if len( synced_names ) > 0:
service_part = '{} apply these siblings and are fully synced, but {} still have work to do.'.format( synced_string, unsynced_string )
else:
service_part = '{} apply these siblings but still have sync work to do.'.format( unsynced_string )
if HG.client_controller.new_options.GetBoolean( 'tag_display_maintenance_during_active' ):
maintenance_part = 'Siblings are set to sync all the time in the background.'
if looking_good:
changes_part = 'Changes from this dialog should be reflected soon after closing the dialog.'
else:
changes_part = 'It may take some time for changes here to apply everywhere, though.'
else:
looking_good = False
if HG.client_controller.new_options.GetBoolean( 'tag_display_maintenance_during_idle' ):
maintenance_part = 'Siblings are set to sync only when you are not using the client.'
changes_part = 'It may take some time for changes here to apply.'
else:
maintenance_part = 'Siblings are not set to sync.'
changes_part = 'Changes here will not apply unless sync is manually forced to run.'
s = os.linesep * 2
status_text = s.join( ( service_part, maintenance_part, changes_part ) )
self._sync_status_st.setText( status_text )
if looking_good:
self._sync_status_st.setObjectName( 'HydrusValid' )
else:
self._sync_status_st.setObjectName( 'HydrusWarning' )
self._sync_status_st.style().polish( self._sync_status_st )
self._count_st.setText( 'Starting with '+HydrusData.ToHumanInt(len(original_statuses_to_pairs[HC.CONTENT_STATUS_CURRENT]))+' pairs.' )
self._old_input.setEnabled( True )
self._new_input.setEnabled( True )
if tags is None:
self._UpdateListCtrlData()
else:
self.EnterOlds( tags )
original_statuses_to_pairs = HG.client_controller.Read( 'tag_siblings', service_key )
( master_service_keys_to_sibling_applicable_service_keys, master_service_keys_to_parent_applicable_service_keys ) = HG.client_controller.Read( 'tag_display_application' )
service_keys_we_care_about = { s_k for ( s_k, s_ks ) in master_service_keys_to_sibling_applicable_service_keys.items() if service_key in s_ks }
service_keys_to_work_to_do = {}
for s_k in service_keys_we_care_about:
status = HG.client_controller.Read( 'tag_display_maintenance_status', s_k )
work_to_do = status[ 'num_siblings_to_sync' ] > 0
service_keys_to_work_to_do[ s_k ] = work_to_do
current_statuses_to_pairs = collections.defaultdict( set )
current_statuses_to_pairs.update( { key : set( value ) for ( key, value ) in original_statuses_to_pairs.items() } )
QP.CallAfter( qt_code, original_statuses_to_pairs, current_statuses_to_pairs, service_keys_to_work_to_do )
class ReviewTagDisplayMaintenancePanel( ClientGUIScrolledPanels.ReviewPanel ):
def __init__( self, parent ):
ClientGUIScrolledPanels.ReviewPanel.__init__( self, parent )
self._tag_services_notebook = ClientGUICommon.BetterNotebook( self )
min_width = ClientGUIFunctions.ConvertTextToPixelWidth( self._tag_services_notebook, 100 )
self._tag_services_notebook.setMinimumWidth( min_width )
#
services = list( HG.client_controller.services_manager.GetServices( HC.REAL_TAG_SERVICES ) )
select_service_key = services[0].GetServiceKey()
for service in services:
service_key = service.GetServiceKey()
name = service.GetName()
page = self._Panel( self._tag_services_notebook, service_key )
self._tag_services_notebook.addTab( page, name )
if service_key == select_service_key:
self._tag_services_notebook.setCurrentWidget( page )
#
vbox = QP.VBoxLayout()
message = 'Figuring out how tags should appear according to sibling and parent application rules takes time. When you set new rules, the changes do not happen immediately--the client catches up in the background. You can review current progress and force faster sync here.'
self._message = ClientGUICommon.BetterStaticText( self, label = message )
self._message.setWordWrap( True )
self._sync_status = ClientGUICommon.BetterStaticText( self )
self._sync_status.setWordWrap( True )
self._UpdateStatusText()
QP.AddToLayout( vbox, self._message, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._sync_status, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._tag_services_notebook, CC.FLAGS_EXPAND_BOTH_WAYS )
self.widget().setLayout( vbox )
HG.client_controller.sub( self, '_UpdateStatusText', 'notify_new_menu_option' )
def _UpdateStatusText( self ):
if HG.client_controller.new_options.GetBoolean( 'tag_display_maintenance_during_active' ):
self._sync_status.setText( 'Siblings and parents are set to sync all the time. If there is work to do here, it should be cleared out in real time as you watch.' )
self._sync_status.setObjectName( 'HydrusValid' )
else:
if HG.client_controller.new_options.GetBoolean( 'tag_display_maintenance_during_idle' ):
self._sync_status.setText( 'Siblings and parents are only set to sync during idle time. If there is work to do here, it should be cleared out when you are not using the client.' )
else:
self._sync_status.setText( 'Siblings and parents are not set to sync in the background at any time. If there is work to do here, you can force it now by clicking \'work now!\' button.' )
self._sync_status.setObjectName( 'HydrusWarning' )
self._sync_status.style().polish( self._sync_status )
class _Panel( QW.QWidget ):
def __init__( self, parent, service_key ):
QW.QWidget.__init__( self, parent )
self._service_key = service_key
self._siblings_and_parents_st = ClientGUICommon.BetterStaticText( self )
self._progress = ClientGUICommon.TextAndGauge( self )
self._refresh_button = ClientGUICommon.BetterBitmapButton( self, CC.global_pixmaps().refresh, self._StartRefresh )
self._go_faster_button = ClientGUICommon.BetterButton( self, 'work hard now!', self._SyncFaster )
button_hbox = QP.HBoxLayout()
QP.AddToLayout( button_hbox, self._refresh_button, CC.FLAGS_CENTER )
QP.AddToLayout( button_hbox, self._go_faster_button, CC.FLAGS_CENTER )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._siblings_and_parents_st, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._progress, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, button_hbox, CC.FLAGS_ON_RIGHT )
vbox.addStretch( 1 )
self.setLayout( vbox )
self._refresh_values_updater = self._InitialiseRefreshValuesUpdater()
HG.client_controller.sub( self, 'NotifyRefresh', 'notify_new_tag_display_sync_status' )
HG.client_controller.sub( self, '_StartRefresh', 'notify_new_tag_display_application' )
self._StartRefresh()
def _InitialiseRefreshValuesUpdater( self ):
service_key = self._service_key
def loading_callable():
self._progress.SetText( 'refreshing\u2026' )
self._refresh_button.setEnabled( False )
# keep button available to slow down
running_fast_and_button_is_slow = HG.client_controller.tag_display_maintenance_manager.CurrentlyGoingFaster( self._service_key ) and 'slow' in self._go_faster_button.text()
if not running_fast_and_button_is_slow:
self._go_faster_button.setEnabled( False )
def work_callable():
status = HG.client_controller.Read( 'tag_display_maintenance_status', service_key )
time.sleep( 0.1 ) # for user feedback more than anything
return status
def publish_callable( result ):
status = result
num_siblings_to_sync = status[ 'num_siblings_to_sync' ]
num_parents_to_sync = status[ 'num_parents_to_sync' ]
num_items_to_regen = num_siblings_to_sync + num_parents_to_sync
if num_items_to_regen == 0:
message = 'All synced!'
elif num_parents_to_sync == 0:
message = '{} siblings to sync.'.format( HydrusData.ToHumanInt( num_siblings_to_sync ) )
elif num_siblings_to_sync == 0:
message = '{} parents to sync.'.format( HydrusData.ToHumanInt( num_parents_to_sync ) )
else:
message = '{} siblings and {} parents to sync.'.format( HydrusData.ToHumanInt( num_siblings_to_sync ), HydrusData.ToHumanInt( num_parents_to_sync ) )
self._siblings_and_parents_st.setText( message )
#
num_actual_rows = status[ 'num_actual_rows' ]
num_ideal_rows = status[ 'num_ideal_rows' ]
if num_items_to_regen == 0:
if num_ideal_rows == 0:
message = 'No siblings/parents applying to this service.'
else:
message = '{} rules, all synced!'.format( HydrusData.ToHumanInt( num_ideal_rows ) )
value = 1
range = 1
sync_possible = False
else:
value = None
range = None
if num_ideal_rows == 0:
message = 'Removing all siblings/parents, {} rules remaining.'.format( HydrusData.ToHumanInt( num_actual_rows ) )
else:
message = '{} rules applied now, moving to {}.'.format( HydrusData.ToHumanInt( num_actual_rows ), HydrusData.ToHumanInt( num_ideal_rows ) )
if num_actual_rows <= num_ideal_rows:
value = num_actual_rows
range = num_ideal_rows
sync_possible = True
self._progress.SetValue( message, value, range )
self._refresh_button.setEnabled( True )
self._go_faster_button.setVisible( sync_possible )
self._go_faster_button.setEnabled( sync_possible )
if HG.client_controller.tag_display_maintenance_manager.CurrentlyGoingFaster( self._service_key ):
self._go_faster_button.setText( 'slow down!' )
else:
if not HG.client_controller.new_options.GetBoolean( 'tag_display_maintenance_during_active' ):
self._go_faster_button.setText( 'work now!' )
else:
self._go_faster_button.setText( 'work hard now!' )
return ClientGUIAsync.AsyncQtUpdater( self, loading_callable, work_callable, publish_callable )
def _StartRefresh( self ):
self._refresh_values_updater.update()
def _SyncFaster( self ):
HG.client_controller.tag_display_maintenance_manager.FlipSyncFaster( self._service_key )
self._StartRefresh()
def NotifyRefresh( self, service_key ):
if service_key == self._service_key:
self._StartRefresh()
class TagFilterButton( ClientGUICommon.BetterButton ):
def __init__( self, parent, message, tag_filter, only_show_blacklist = False, label_prefix = None ):
ClientGUICommon.BetterButton.__init__( self, parent, 'tag filter', self._EditTagFilter )
self._message = message
self._tag_filter = tag_filter
self._only_show_blacklist = only_show_blacklist
self._label_prefix = label_prefix
self._UpdateLabel()
def _EditTagFilter( self ):
if self._only_show_blacklist:
title = 'edit blacklist'
else:
title = 'edit tag filter'
with ClientGUITopLevelWindowsPanels.DialogEdit( self, title ) as dlg:
namespaces = HG.client_controller.network_engine.domain_manager.GetParserNamespaces()
panel = EditTagFilterPanel( dlg, self._tag_filter, only_show_blacklist = self._only_show_blacklist, namespaces = namespaces, message = self._message )
dlg.SetPanel( panel )
if dlg.exec() == QW.QDialog.Accepted:
self._tag_filter = panel.GetValue()
self._UpdateLabel()
def _UpdateLabel( self ):
if self._only_show_blacklist:
tt = self._tag_filter.ToBlacklistString()
else:
tt = self._tag_filter.ToPermittedString()
if self._label_prefix is not None:
tt = self._label_prefix + tt
button_text = HydrusText.ElideText( tt, 45 )
self.setText( button_text )
self.setToolTip( tt )
def GetValue( self ):
return self._tag_filter
def SetValue( self, tag_filter ):
self._tag_filter = tag_filter
self._UpdateLabel()
class TagSummaryGenerator( HydrusSerialisable.SerialisableBase ):
SERIALISABLE_TYPE = HydrusSerialisable.SERIALISABLE_TYPE_TAG_SUMMARY_GENERATOR
SERIALISABLE_NAME = 'Tag Summary Generator'
SERIALISABLE_VERSION = 2
def __init__( self, background_colour = None, text_colour = None, namespace_info = None, separator = None, example_tags = None, show = True ):
if background_colour is None:
background_colour = QG.QColor( 223, 227, 230, 255 )
if text_colour is None:
text_colour = QG.QColor( 1, 17, 26, 255 )
if namespace_info is None:
namespace_info = []
namespace_info.append( ( 'creator', '', ', ' ) )
namespace_info.append( ( 'series', '', ', ' ) )
namespace_info.append( ( 'title', '', ', ' ) )
if separator is None:
separator = ' - '
if example_tags is None:
example_tags = []
self._background_colour = background_colour
self._text_colour = text_colour
self._namespace_info = namespace_info
self._separator = separator
self._example_tags = list( example_tags )
self._show = show
self._UpdateNamespaceLookup()
def _GetSerialisableInfo( self ):
bc = self._background_colour
background_colour_rgba = [ bc.red(), bc.green(), bc.blue(), bc.alpha() ]
tc = self._text_colour
text_colour_rgba = [ tc.red(), tc.green(), tc.blue(), tc.alpha() ]
return ( background_colour_rgba, text_colour_rgba, self._namespace_info, self._separator, self._example_tags, self._show )
def _InitialiseFromSerialisableInfo( self, serialisable_info ):
( background_rgba, text_rgba, self._namespace_info, self._separator, self._example_tags, self._show ) = serialisable_info
( r, g, b, a ) = background_rgba
self._background_colour = QG.QColor( r, g, b, a )
( r, g, b, a ) = text_rgba
self._text_colour = QG.QColor( r, g, b, a )
self._namespace_info = [ tuple( row ) for row in self._namespace_info ]
self._UpdateNamespaceLookup()
def _UpdateNamespaceLookup( self ):
self._interesting_namespaces = { namespace for ( namespace, prefix, separator ) in self._namespace_info }
def _UpdateSerialisableInfo( self, version, old_serialisable_info ):
if version == 1:
( namespace_info, separator, example_tags ) = old_serialisable_info
background_rgba = ( 223, 227, 230, 255 )
text_rgba = ( 1, 17, 26, 255 )
show = True
new_serialisable_info = ( background_rgba, text_rgba, namespace_info, separator, example_tags, show )
return ( 2, new_serialisable_info )
def GenerateExampleSummary( self ):
if not self._show:
return 'not showing'
else:
return self.GenerateSummary( self._example_tags )
def GenerateSummary( self, tags, max_length = None ):
if not self._show:
return ''
namespaces_to_subtags = collections.defaultdict( list )
for tag in tags:
( namespace, subtag ) = HydrusTags.SplitTag( tag )
if namespace in self._interesting_namespaces:
namespaces_to_subtags[ namespace ].append( subtag )
for ( namespace, unsorted_l ) in list( namespaces_to_subtags.items() ):
sorted_l = HydrusTags.SortNumericTags( unsorted_l )
sorted_l = HydrusTags.CollapseMultipleSortedNumericTagsToMinMax( sorted_l )
namespaces_to_subtags[ namespace ] = sorted_l
namespace_texts = []
for ( namespace, prefix, separator ) in self._namespace_info:
subtags = namespaces_to_subtags[ namespace ]
if len( subtags ) > 0:
namespace_text = prefix + separator.join( namespaces_to_subtags[ namespace ] )
namespace_texts.append( namespace_text )
summary = self._separator.join( namespace_texts )
if max_length is not None:
summary = summary[:max_length]
return summary
def GetBackgroundColour( self ):
return self._background_colour
def GetTextColour( self ):
return self._text_colour
def ToTuple( self ):
return ( self._background_colour, self._text_colour, self._namespace_info, self._separator, self._example_tags, self._show )
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_TAG_SUMMARY_GENERATOR ] = TagSummaryGenerator
class EditTagSummaryGeneratorPanel( ClientGUIScrolledPanels.EditPanel ):
def __init__( self, parent: QW.QWidget, tag_summary_generator: TagSummaryGenerator ):
ClientGUIScrolledPanels.EditPanel.__init__( self, parent )
show_panel = ClientGUICommon.StaticBox( self, 'shows' )
self._show = QW.QCheckBox( show_panel )
edit_panel = ClientGUICommon.StaticBox( self, 'edit' )
self._background_colour = ClientGUICommon.AlphaColourControl( edit_panel )
self._text_colour = ClientGUICommon.AlphaColourControl( edit_panel )
self._namespaces_listbox = ClientGUIListBoxes.QueueListBox( edit_panel, 8, self._ConvertNamespaceToListBoxString, self._AddNamespaceInfo, self._EditNamespaceInfo )
self._separator = QW.QLineEdit( edit_panel )
example_panel = ClientGUICommon.StaticBox( self, 'example' )
self._example_tags = QW.QPlainTextEdit( example_panel )
self._test_result = QW.QLineEdit( example_panel )
self._test_result.setReadOnly( True )
#
( background_colour, text_colour, namespace_info, separator, example_tags, show ) = tag_summary_generator.ToTuple()
self._show.setChecked( show )
self._background_colour.SetValue( background_colour )
self._text_colour.SetValue( text_colour )
self._namespaces_listbox.AddDatas( namespace_info )
self._separator.setText( separator )
self._example_tags.setPlainText( os.linesep.join( example_tags ) )
self._UpdateTest()
#
rows = []
rows.append( ( 'currently shows (turn off to hide): ', self._show ) )
gridbox = ClientGUICommon.WrapInGrid( show_panel, rows )
show_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
rows = []
rows.append( ( 'background colour: ', self._background_colour ) )
rows.append( ( 'text colour: ', self._text_colour ) )
gridbox = ClientGUICommon.WrapInGrid( edit_panel, rows )
edit_panel.Add( ClientGUICommon.BetterStaticText( edit_panel, 'The colours only work for the thumbnails right now!' ), CC.FLAGS_EXPAND_PERPENDICULAR )
edit_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
edit_panel.Add( self._namespaces_listbox, CC.FLAGS_EXPAND_BOTH_WAYS )
edit_panel.Add( ClientGUICommon.WrapInText( self._separator, edit_panel, 'separator' ), CC.FLAGS_EXPAND_PERPENDICULAR )
example_panel.Add( ClientGUICommon.BetterStaticText( example_panel, 'Enter some newline-separated tags here to see what your current object would generate.' ), CC.FLAGS_EXPAND_PERPENDICULAR )
example_panel.Add( self._example_tags, CC.FLAGS_EXPAND_BOTH_WAYS )
example_panel.Add( self._test_result, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, show_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, edit_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( vbox, example_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
self.widget().setLayout( vbox )
#
self._show.clicked.connect( self._UpdateTest )
self._separator.textChanged.connect( self._UpdateTest )
self._example_tags.textChanged.connect( self._UpdateTest )
self._namespaces_listbox.listBoxChanged.connect( self._UpdateTest )
def _AddNamespaceInfo( self ):
namespace = ''
prefix = ''
separator = ', '
namespace_info = ( namespace, prefix, separator )
return self._EditNamespaceInfo( namespace_info )
def _ConvertNamespaceToListBoxString( self, namespace_info ):
( namespace, prefix, separator ) = namespace_info
if namespace == '':
pretty_namespace = 'unnamespaced'
else:
pretty_namespace = namespace
pretty_prefix = prefix
pretty_separator = separator
return pretty_namespace + ' | prefix: "' + pretty_prefix + '" | separator: "' + pretty_separator + '"'
def _EditNamespaceInfo( self, namespace_info ):
( namespace, prefix, separator ) = namespace_info
message = 'Edit namespace.'
with ClientGUIDialogs.DialogTextEntry( self, message, namespace, allow_blank = True ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
namespace = dlg.GetValue()
else:
raise HydrusExceptions.VetoException()
message = 'Edit prefix.'
with ClientGUIDialogs.DialogTextEntry( self, message, prefix, allow_blank = True ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
prefix = dlg.GetValue()
else:
raise HydrusExceptions.VetoException()
message = 'Edit separator.'
with ClientGUIDialogs.DialogTextEntry( self, message, separator, allow_blank = True ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
separator = dlg.GetValue()
namespace_info = ( namespace, prefix, separator )
return namespace_info
else:
raise HydrusExceptions.VetoException()
def _UpdateTest( self ):
tag_summary_generator = self.GetValue()
self._test_result.setText( tag_summary_generator.GenerateExampleSummary() )
def GetValue( self ) -> TagSummaryGenerator:
show = self._show.isChecked()
background_colour = self._background_colour.GetValue()
text_colour = self._text_colour.GetValue()
namespace_info = self._namespaces_listbox.GetData()
separator = self._separator.text()
example_tags = HydrusTags.CleanTags( HydrusText.DeserialiseNewlinedTexts( self._example_tags.toPlainText() ) )
return TagSummaryGenerator( background_colour, text_colour, namespace_info, separator, example_tags, show )
class TagSummaryGeneratorButton( ClientGUICommon.BetterButton ):
def __init__( self, parent: QW.QWidget, tag_summary_generator: TagSummaryGenerator ):
label = tag_summary_generator.GenerateExampleSummary()
ClientGUICommon.BetterButton.__init__( self, parent, label, self._Edit )
self._tag_summary_generator = tag_summary_generator
def _Edit( self ):
with ClientGUITopLevelWindowsPanels.DialogEdit( self, 'edit tag summary' ) as dlg:
panel = EditTagSummaryGeneratorPanel( dlg, self._tag_summary_generator )
dlg.SetPanel( panel )
if dlg.exec() == QW.QDialog.Accepted:
self._tag_summary_generator = panel.GetValue()
self.setText( self._tag_summary_generator.GenerateExampleSummary() )
def GetValue( self ) -> TagSummaryGenerator:
return self._tag_summary_generator
| 39.888148 | 356 | 0.521286 | import collections
import itertools
import os
import random
import time
import typing
from qtpy import QtCore as QC
from qtpy import QtWidgets as QW
from qtpy import QtGui as QG
from hydrus.core import HydrusConstants as HC
from hydrus.core import HydrusData
from hydrus.core import HydrusExceptions
from hydrus.core import HydrusGlobals as HG
from hydrus.core import HydrusSerialisable
from hydrus.core import HydrusTags
from hydrus.core import HydrusText
from hydrus.core.networking import HydrusNetwork
from hydrus.client import ClientApplicationCommand as CAC
from hydrus.client import ClientConstants as CC
from hydrus.client import ClientManagers
from hydrus.client.gui import ClientGUIAsync
from hydrus.client.gui import ClientGUICore as CGC
from hydrus.client.gui import ClientGUIDialogs
from hydrus.client.gui import ClientGUIDialogsQuick
from hydrus.client.gui import ClientGUIFunctions
from hydrus.client.gui import ClientGUIMenus
from hydrus.client.gui import ClientGUIScrolledPanels
from hydrus.client.gui import ClientGUIScrolledPanelsReview
from hydrus.client.gui import ClientGUIShortcuts
from hydrus.client.gui import ClientGUITagSuggestions
from hydrus.client.gui import ClientGUITopLevelWindowsPanels
from hydrus.client.gui import QtPorting as QP
from hydrus.client.gui.lists import ClientGUIListBoxes
from hydrus.client.gui.lists import ClientGUIListConstants as CGLC
from hydrus.client.gui.lists import ClientGUIListCtrl
from hydrus.client.gui.networking import ClientGUIHydrusNetwork
from hydrus.client.gui.search import ClientGUIACDropdown
from hydrus.client.gui.widgets import ClientGUICommon
from hydrus.client.gui.widgets import ClientGUIControls
from hydrus.client.gui.widgets import ClientGUIMenuButton
from hydrus.client.media import ClientMedia
from hydrus.client.metadata import ClientTags
from hydrus.client.metadata import ClientTagsHandling
class EditTagAutocompleteOptionsPanel( ClientGUIScrolledPanels.EditPanel ):
def __init__( self, parent: QW.QWidget, tag_autocomplete_options: ClientTagsHandling.TagAutocompleteOptions ):
ClientGUIScrolledPanels.EditPanel.__init__( self, parent )
self._original_tag_autocomplete_options = tag_autocomplete_options
services_manager = HG.client_controller.services_manager
all_real_tag_service_keys = services_manager.GetServiceKeys( HC.REAL_TAG_SERVICES )
all_real_file_service_keys = services_manager.GetServiceKeys( ( HC.LOCAL_FILE_DOMAIN, HC.FILE_REPOSITORY ) )
self._write_autocomplete_tag_domain = ClientGUICommon.BetterChoice( self )
self._write_autocomplete_tag_domain.setToolTip( 'A manage tags autocomplete will start with this domain. Typically only useful with this service or "all known tags".' )
self._write_autocomplete_tag_domain.addItem( services_manager.GetName( CC.COMBINED_TAG_SERVICE_KEY ), CC.COMBINED_TAG_SERVICE_KEY )
for service_key in all_real_tag_service_keys:
self._write_autocomplete_tag_domain.addItem( services_manager.GetName( service_key ), service_key )
self._override_write_autocomplete_file_domain = QW.QCheckBox( self )
self._override_write_autocomplete_file_domain.setToolTip( 'If set, a manage tags dialog autocomplete will start with a different file domain than the one that launched the dialog.' )
self._write_autocomplete_file_domain = ClientGUICommon.BetterChoice( self )
self._write_autocomplete_file_domain.setToolTip( 'A manage tags autocomplete will start with this domain. Normally only useful for "all known files" or "my files".' )
self._write_autocomplete_file_domain.addItem( services_manager.GetName( CC.COMBINED_FILE_SERVICE_KEY ), CC.COMBINED_FILE_SERVICE_KEY )
for service_key in all_real_file_service_keys:
self._write_autocomplete_file_domain.addItem( services_manager.GetName( service_key ), service_key )
self._search_namespaces_into_full_tags = QW.QCheckBox( self )
self._search_namespaces_into_full_tags.setToolTip( 'If on, a search for "ser" will return all "series:" results such as "series:metrod". On large tag services, these searches are extremely slow.' )
self._namespace_bare_fetch_all_allowed = QW.QCheckBox( self )
self._namespace_bare_fetch_all_allowed.setToolTip( 'If on, a search for "series:" will return all "series:" results. On large tag services, these searches are extremely slow.' )
self._namespace_fetch_all_allowed = QW.QCheckBox( self )
self._namespace_fetch_all_allowed.setToolTip( 'If on, a search for "series:*" will return all "series:" results. On large tag services, these searches are extremely slow.' )
self._fetch_all_allowed = QW.QCheckBox( self )
self._fetch_all_allowed.setToolTip( 'If on, a search for "*" will return all tags. On large tag services, these searches are extremely slow.' )
self._fetch_results_automatically = QW.QCheckBox( self )
self._fetch_results_automatically.setToolTip( 'If on, results will load as you type. If off, you will have to hit a shortcut (default Ctrl+Space) to load results.' )
self._exact_match_character_threshold = ClientGUICommon.NoneableSpinCtrl( self, none_phrase = 'always autocomplete (only appropriate for small tag services)', min = 1, max = 256, unit = 'characters' )
self._exact_match_character_threshold.setToolTip( 'When the search text has <= this many characters, autocomplete will not occur and you will only get results that exactly match the input. Increasing this value makes autocomplete snappier but reduces the number of results.' )
self._write_autocomplete_tag_domain.SetValue( tag_autocomplete_options.GetWriteAutocompleteTagDomain() )
self._override_write_autocomplete_file_domain.setChecked( tag_autocomplete_options.OverridesWriteAutocompleteFileDomain() )
self._write_autocomplete_file_domain.SetValue( tag_autocomplete_options.GetWriteAutocompleteFileDomain() )
self._search_namespaces_into_full_tags.setChecked( tag_autocomplete_options.SearchNamespacesIntoFullTags() )
self._namespace_bare_fetch_all_allowed.setChecked( tag_autocomplete_options.NamespaceBareFetchAllAllowed() )
self._namespace_fetch_all_allowed.setChecked( tag_autocomplete_options.NamespaceFetchAllAllowed() )
self._fetch_all_allowed.setChecked( tag_autocomplete_options.FetchAllAllowed() )
self._fetch_results_automatically.setChecked( tag_autocomplete_options.FetchResultsAutomatically() )
self._exact_match_character_threshold.SetValue( tag_autocomplete_options.GetExactMatchCharacterThreshold() )
rows = []
rows.append( ( 'Fetch results as you type: ', self._fetch_results_automatically ) )
rows.append( ( 'Do-not-autocomplete character threshold: ', self._exact_match_character_threshold ) )
if tag_autocomplete_options.GetServiceKey() == CC.COMBINED_TAG_SERVICE_KEY:
self._write_autocomplete_tag_domain.setVisible( False )
self._override_write_autocomplete_file_domain.setVisible( False )
self._write_autocomplete_file_domain.setVisible( False )
else:
rows.append( ( 'Override default autocomplete file domain in _manage tags_: ', self._override_write_autocomplete_file_domain ) )
rows.append( ( 'Default autocomplete file domain in _manage tags_: ', self._write_autocomplete_file_domain ) )
rows.append( ( 'Default autocomplete tag domain in _manage tags_: ', self._write_autocomplete_tag_domain ) )
rows.append( ( 'Search namespaces with normal input: ', self._search_namespaces_into_full_tags ) )
rows.append( ( 'Allow "namespace:": ', self._namespace_bare_fetch_all_allowed ) )
rows.append( ( 'Allow "namespace:*": ', self._namespace_fetch_all_allowed ) )
rows.append( ( 'Allow "*": ', self._fetch_all_allowed ) )
gridbox = ClientGUICommon.WrapInGrid( self, rows )
vbox = QP.VBoxLayout()
label = 'The settings that permit searching namespaces and expansive "*" queries can be very expensive on a large client and may cause problems!'
st = ClientGUICommon.BetterStaticText( self, label = label )
QP.AddToLayout( vbox, st, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, gridbox, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.addStretch( 1 )
self.widget().setLayout( vbox )
self._UpdateControls()
self._override_write_autocomplete_file_domain.stateChanged.connect( self._UpdateControls )
self._search_namespaces_into_full_tags.stateChanged.connect( self._UpdateControls )
self._namespace_bare_fetch_all_allowed.stateChanged.connect( self._UpdateControls )
def _UpdateControls( self ):
self._write_autocomplete_file_domain.setEnabled( self._override_write_autocomplete_file_domain.isChecked() )
if self._search_namespaces_into_full_tags.isChecked():
self._namespace_bare_fetch_all_allowed.setEnabled( False )
self._namespace_fetch_all_allowed.setEnabled( False )
else:
self._namespace_bare_fetch_all_allowed.setEnabled( True )
if self._namespace_bare_fetch_all_allowed.isChecked():
self._namespace_fetch_all_allowed.setEnabled( False )
else:
self._namespace_fetch_all_allowed.setEnabled( True )
for c in ( self._namespace_bare_fetch_all_allowed, self._namespace_fetch_all_allowed ):
if not c.isEnabled():
c.blockSignals( True )
c.setChecked( True )
c.blockSignals( False )
def GetValue( self ):
tag_autocomplete_options = ClientTagsHandling.TagAutocompleteOptions( self._original_tag_autocomplete_options.GetServiceKey() )
write_autocomplete_tag_domain = self._write_autocomplete_tag_domain.GetValue()
override_write_autocomplete_file_domain = self._override_write_autocomplete_file_domain.isChecked()
write_autocomplete_file_domain = self._write_autocomplete_file_domain.GetValue()
search_namespaces_into_full_tags = self._search_namespaces_into_full_tags.isChecked()
namespace_bare_fetch_all_allowed = self._namespace_bare_fetch_all_allowed.isChecked()
namespace_fetch_all_allowed = self._namespace_fetch_all_allowed.isChecked()
fetch_all_allowed = self._fetch_all_allowed.isChecked()
tag_autocomplete_options.SetTuple(
write_autocomplete_tag_domain,
override_write_autocomplete_file_domain,
write_autocomplete_file_domain,
search_namespaces_into_full_tags,
namespace_bare_fetch_all_allowed,
namespace_fetch_all_allowed,
fetch_all_allowed
)
tag_autocomplete_options.SetFetchResultsAutomatically( self._fetch_results_automatically.isChecked() )
tag_autocomplete_options.SetExactMatchCharacterThreshold( self._exact_match_character_threshold.GetValue() )
return tag_autocomplete_options
class EditTagDisplayApplication( ClientGUIScrolledPanels.EditPanel ):
def __init__( self, parent, master_service_keys_to_sibling_applicable_service_keys, master_service_keys_to_parent_applicable_service_keys ):
master_service_keys_to_sibling_applicable_service_keys = collections.defaultdict( list, master_service_keys_to_sibling_applicable_service_keys )
master_service_keys_to_parent_applicable_service_keys = collections.defaultdict( list, master_service_keys_to_parent_applicable_service_keys )
ClientGUIScrolledPanels.EditPanel.__init__( self, parent )
self._tag_services_notebook = ClientGUICommon.BetterNotebook( self )
min_width = ClientGUIFunctions.ConvertTextToPixelWidth( self._tag_services_notebook, 100 )
self._tag_services_notebook.setMinimumWidth( min_width )
services = list( HG.client_controller.services_manager.GetServices( HC.REAL_TAG_SERVICES ) )
select_service_key = services[0].GetServiceKey()
for service in services:
master_service_key = service.GetServiceKey()
name = service.GetName()
sibling_applicable_service_keys = master_service_keys_to_sibling_applicable_service_keys[ master_service_key ]
parent_applicable_service_keys = master_service_keys_to_parent_applicable_service_keys[ master_service_key ]
page = self._Panel( self._tag_services_notebook, master_service_key, sibling_applicable_service_keys, parent_applicable_service_keys )
select = master_service_key == select_service_key
self._tag_services_notebook.addTab( page, name )
if select:
self._tag_services_notebook.setCurrentWidget( page )
vbox = QP.VBoxLayout()
message = 'While a tag service normally applies its own siblings and parents to itself, it does not have to. If you want a different service\'s siblings (e.g. putting the PTR\'s siblings on your "my tags"), or multiple services\', then set it here. You can also apply no siblings or parents at all.'
message += os.linesep * 2
message += 'If there are conflicts, the services at the top of the list have precedence. Parents are collapsed by sibling rules before they are applied.'
self._message = ClientGUICommon.BetterStaticText( self, label = message )
self._message.setWordWrap( True )
self._sync_status = ClientGUICommon.BetterStaticText( self )
self._sync_status.setWordWrap( True )
if HG.client_controller.new_options.GetBoolean( 'tag_display_maintenance_during_active' ):
self._sync_status.setText( 'Siblings and parents are set to sync all the time. Changes will start applying as soon as you ok this dialog.' )
self._sync_status.setObjectName( 'HydrusValid' )
else:
if HG.client_controller.new_options.GetBoolean( 'tag_display_maintenance_during_idle' ):
self._sync_status.setText( 'Siblings and parents are only set to sync during idle time. Changes here will only start to apply when you are not using the client.' )
else:
self._sync_status.setText( 'Siblings and parents are not set to sync in the background at any time. If there is sync work to do, you will have to force it to run using the \'review\' window under _tags->siblings and parents sync_.' )
self._sync_status.setObjectName( 'HydrusWarning' )
self._sync_status.style().polish( self._sync_status )
QP.AddToLayout( vbox, self._message, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._sync_status, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._tag_services_notebook, CC.FLAGS_EXPAND_BOTH_WAYS )
self.widget().setLayout( vbox )
def GetValue( self ):
master_service_keys_to_sibling_applicable_service_keys = collections.defaultdict( list )
master_service_keys_to_parent_applicable_service_keys = collections.defaultdict( list )
for page in self._tag_services_notebook.GetPages():
( master_service_key, sibling_applicable_service_keys, parent_applicable_service_keys ) = page.GetValue()
master_service_keys_to_sibling_applicable_service_keys[ master_service_key ] = sibling_applicable_service_keys
master_service_keys_to_parent_applicable_service_keys[ master_service_key ] = parent_applicable_service_keys
return ( master_service_keys_to_sibling_applicable_service_keys, master_service_keys_to_parent_applicable_service_keys )
class _Panel( QW.QWidget ):
def __init__( self, parent: QW.QWidget, master_service_key: bytes, sibling_applicable_service_keys: typing.Sequence[ bytes ], parent_applicable_service_keys: typing.Sequence[ bytes ] ):
QW.QWidget.__init__( self, parent )
self._master_service_key = master_service_key
#
self._sibling_box = ClientGUICommon.StaticBox( self, 'sibling application' )
#
self._sibling_service_keys_listbox = ClientGUIListBoxes.QueueListBox( self._sibling_box, 4, HG.client_controller.services_manager.GetName, add_callable = self._AddSibling )
#
self._sibling_service_keys_listbox.AddDatas( sibling_applicable_service_keys )
#
self._parent_box = ClientGUICommon.StaticBox( self, 'parent application' )
#
self._parent_service_keys_listbox = ClientGUIListBoxes.QueueListBox( self._sibling_box, 4, HG.client_controller.services_manager.GetName, add_callable = self._AddParent )
#
self._parent_service_keys_listbox.AddDatas( parent_applicable_service_keys )
#
self._sibling_box.Add( self._sibling_service_keys_listbox, CC.FLAGS_EXPAND_BOTH_WAYS )
self._parent_box.Add( self._parent_service_keys_listbox, CC.FLAGS_EXPAND_BOTH_WAYS )
#
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._sibling_box, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( vbox, self._parent_box, CC.FLAGS_EXPAND_BOTH_WAYS )
self.setLayout( vbox )
def _AddParent( self ):
current_service_keys = self._parent_service_keys_listbox.GetData()
return self._AddService( current_service_keys )
def _AddService( self, current_service_keys ):
allowed_services = HG.client_controller.services_manager.GetServices( HC.REAL_TAG_SERVICES )
allowed_services = [ service for service in allowed_services if service.GetServiceKey() not in current_service_keys ]
if len( allowed_services ) == 0:
QW.QMessageBox.information( self, 'Information', 'You have all the current tag services applied to this service.' )
raise HydrusExceptions.VetoException()
choice_tuples = [ ( service.GetName(), service.GetServiceKey(), service.GetName() ) for service in allowed_services ]
try:
service_key = ClientGUIDialogsQuick.SelectFromListButtons( self, 'Which service?', choice_tuples )
return service_key
except HydrusExceptions.CancelledException:
raise HydrusExceptions.VetoException()
def _AddSibling( self ):
current_service_keys = self._sibling_service_keys_listbox.GetData()
return self._AddService( current_service_keys )
def GetValue( self ):
return ( self._master_service_key, self._sibling_service_keys_listbox.GetData(), self._parent_service_keys_listbox.GetData() )
class EditTagDisplayManagerPanel( ClientGUIScrolledPanels.EditPanel ):
def __init__( self, parent, tag_display_manager: ClientTagsHandling.TagDisplayManager ):
ClientGUIScrolledPanels.EditPanel.__init__( self, parent )
self._original_tag_display_manager = tag_display_manager
self._tag_services = ClientGUICommon.BetterNotebook( self )
min_width = ClientGUIFunctions.ConvertTextToPixelWidth( self._tag_services, 100 )
self._tag_services.setMinimumWidth( min_width )
#
services = list( HG.client_controller.services_manager.GetServices( ( HC.COMBINED_TAG, HC.LOCAL_TAG, HC.TAG_REPOSITORY ) ) )
for service in services:
service_key = service.GetServiceKey()
name = service.GetName()
page = self._Panel( self._tag_services, self._original_tag_display_manager, service_key )
select = service_key == CC.COMBINED_TAG_SERVICE_KEY
self._tag_services.addTab( page, name )
if select: self._tag_services.setCurrentWidget( page )
#
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._tag_services, CC.FLAGS_EXPAND_BOTH_WAYS )
self.widget().setLayout( vbox )
def GetValue( self ):
tag_display_manager = self._original_tag_display_manager.Duplicate()
tag_display_manager.ClearTagDisplayOptions()
for page in self._tag_services.GetPages():
( service_key, tag_display_types_to_tag_filters, tag_autocomplete_options ) = page.GetValue()
for ( tag_display_type, tag_filter ) in tag_display_types_to_tag_filters.items():
tag_display_manager.SetTagFilter( tag_display_type, service_key, tag_filter )
tag_display_manager.SetTagAutocompleteOptions( tag_autocomplete_options )
return tag_display_manager
class _Panel( QW.QWidget ):
def __init__( self, parent: QW.QWidget, tag_display_manager: ClientTagsHandling.TagDisplayManager, service_key: bytes ):
QW.QWidget.__init__( self, parent )
single_tag_filter = tag_display_manager.GetTagFilter( ClientTags.TAG_DISPLAY_SINGLE_MEDIA, service_key )
selection_tag_filter = tag_display_manager.GetTagFilter( ClientTags.TAG_DISPLAY_SELECTION_LIST, service_key )
tag_autocomplete_options = tag_display_manager.GetTagAutocompleteOptions( service_key )
self._service_key = service_key
#
self._display_box = ClientGUICommon.StaticBox( self, 'display' )
message = 'This filters which tags will show on \'single\' file views such as the media viewer and thumbnail banners.'
self._single_tag_filter_button = TagFilterButton( self._display_box, message, single_tag_filter, label_prefix = 'tags shown: ' )
message = 'This filters which tags will show on \'selection\' file views such as the \'selection tags\' list on regular search pages.'
self._selection_tag_filter_button = TagFilterButton( self._display_box, message, selection_tag_filter, label_prefix = 'tags shown: ' )
#
self._tao_box = ClientGUICommon.StaticBox( self, 'autocomplete' )
self._tag_autocomplete_options_panel = EditTagAutocompleteOptionsPanel( self._tao_box, tag_autocomplete_options )
#
rows = []
rows.append( ( 'Tag filter for single file views: ', self._single_tag_filter_button ) )
rows.append( ( 'Tag filter for multiple file views: ', self._selection_tag_filter_button ) )
gridbox = ClientGUICommon.WrapInGrid( self._display_box, rows )
self._display_box.Add( gridbox, CC.FLAGS_EXPAND_PERPENDICULAR )
#
self._tao_box.Add( self._tag_autocomplete_options_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
#
vbox = QP.VBoxLayout()
if self._service_key == CC.COMBINED_TAG_SERVICE_KEY:
message = 'These options apply to all tag services, or to where the tag domain is "all known tags".'
message += os.linesep * 2
message += 'This tag domain is the union of all other services, so it can be more computationally expensive. You most often see it on new search pages.'
else:
message = 'This is just one tag service. You most often search a specific tag service in the manage tags dialog.'
st = ClientGUICommon.BetterStaticText( self, message )
st.setWordWrap( True )
QP.AddToLayout( vbox, st, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._display_box, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._tao_box, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.addStretch( 1 )
self.setLayout( vbox )
def GetValue( self ):
tag_display_types_to_tag_filters = {}
tag_display_types_to_tag_filters[ ClientTags.TAG_DISPLAY_SINGLE_MEDIA ] = self._single_tag_filter_button.GetValue()
tag_display_types_to_tag_filters[ ClientTags.TAG_DISPLAY_SELECTION_LIST ] = self._selection_tag_filter_button.GetValue()
tag_autocomplete_options = self._tag_autocomplete_options_panel.GetValue()
return ( self._service_key, tag_display_types_to_tag_filters, tag_autocomplete_options )
class EditTagFilterPanel( ClientGUIScrolledPanels.EditPanel ):
TEST_RESULT_DEFAULT = 'Enter a tag here to test if it passes the current filter:'
TEST_RESULT_BLACKLIST_DEFAULT = 'Enter a tag here to test if it passes the blacklist (siblings tested, unnamespaced rules match namespaced tags):'
def __init__( self, parent, tag_filter, only_show_blacklist = False, namespaces = None, message = None ):
ClientGUIScrolledPanels.EditPanel.__init__( self, parent )
self._only_show_blacklist = only_show_blacklist
self._namespaces = namespaces
self._wildcard_replacements = {}
self._wildcard_replacements[ '*' ] = ''
self._wildcard_replacements[ '*:' ] = ':'
self._wildcard_replacements[ '*:*' ] = ':'
#
help_button = ClientGUICommon.BetterBitmapButton( self, CC.global_pixmaps().help, self._ShowHelp )
help_hbox = ClientGUICommon.WrapInText( help_button, self, 'help for this panel -->', QG.QColor( 0, 0, 255 ) )
#
self._import_favourite = ClientGUICommon.BetterButton( self, 'import', self._ImportFavourite )
self._export_favourite = ClientGUICommon.BetterButton( self, 'export', self._ExportFavourite )
self._load_favourite = ClientGUICommon.BetterButton( self, 'load', self._LoadFavourite )
self._save_favourite = ClientGUICommon.BetterButton( self, 'save', self._SaveFavourite )
self._delete_favourite = ClientGUICommon.BetterButton( self, 'delete', self._DeleteFavourite )
#
self._show_all_panels_button = ClientGUICommon.BetterButton( self, 'show other panels', self._ShowAllPanels )
self._show_all_panels_button.setToolTip( 'This shows the whitelist and advanced panels, in case you want to craft a clever blacklist with \'except\' rules.' )
show_the_button = self._only_show_blacklist and HG.client_controller.new_options.GetBoolean( 'advanced_mode' )
self._show_all_panels_button.setVisible( show_the_button )
#
self._notebook = ClientGUICommon.BetterNotebook( self )
#
self._advanced_panel = self._InitAdvancedPanel()
self._whitelist_panel = self._InitWhitelistPanel()
self._blacklist_panel = self._InitBlacklistPanel()
#
if self._only_show_blacklist:
self._whitelist_panel.setVisible( False )
self._notebook.addTab( self._blacklist_panel, 'blacklist' )
self._advanced_panel.setVisible( False )
else:
self._notebook.addTab( self._whitelist_panel, 'whitelist' )
self._notebook.addTab( self._blacklist_panel, 'blacklist' )
self._notebook.addTab( self._advanced_panel, 'advanced' )
#
self._redundant_st = ClientGUICommon.BetterStaticText( self, '', ellipsize_end = True )
self._current_filter_st = ClientGUICommon.BetterStaticText( self, 'currently keeping: ', ellipsize_end = True )
self._test_result_st = ClientGUICommon.BetterStaticText( self, self.TEST_RESULT_DEFAULT )
self._test_result_st.setAlignment( QC.Qt.AlignVCenter | QC.Qt.AlignRight )
self._test_result_st.setWordWrap( True )
self._test_input = QW.QPlainTextEdit( self )
#
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, help_hbox, CC.FLAGS_ON_RIGHT )
if message is not None:
st = ClientGUICommon.BetterStaticText( self, message )
st.setWordWrap( True )
QP.AddToLayout( vbox, st, CC.FLAGS_EXPAND_PERPENDICULAR )
hbox = QP.HBoxLayout()
QP.AddToLayout( hbox, self._import_favourite, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( hbox, self._export_favourite, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( hbox, self._load_favourite, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( hbox, self._save_favourite, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( hbox, self._delete_favourite, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( vbox, hbox, CC.FLAGS_ON_RIGHT )
QP.AddToLayout( vbox, self._show_all_panels_button, CC.FLAGS_ON_RIGHT )
QP.AddToLayout( vbox, self._notebook, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( vbox, self._redundant_st, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._current_filter_st, CC.FLAGS_EXPAND_PERPENDICULAR )
test_text_vbox = QP.VBoxLayout()
QP.AddToLayout( test_text_vbox, self._test_result_st, CC.FLAGS_EXPAND_PERPENDICULAR )
hbox = QP.HBoxLayout()
QP.AddToLayout( hbox, test_text_vbox, CC.FLAGS_CENTER_PERPENDICULAR_EXPAND_DEPTH )
QP.AddToLayout( hbox, self._test_input, CC.FLAGS_CENTER_PERPENDICULAR_EXPAND_DEPTH )
QP.AddToLayout( vbox, hbox, CC.FLAGS_EXPAND_PERPENDICULAR )
self.widget().setLayout( vbox )
#
self._advanced_blacklist.listBoxChanged.connect( self._UpdateStatus )
self._advanced_whitelist.listBoxChanged.connect( self._UpdateStatus )
self._simple_whitelist_global_checkboxes.clicked.connect( self.EventSimpleWhitelistGlobalCheck )
self._simple_whitelist_namespace_checkboxes.clicked.connect( self.EventSimpleWhitelistNamespaceCheck )
self._simple_blacklist_global_checkboxes.clicked.connect( self.EventSimpleBlacklistGlobalCheck )
self._simple_blacklist_namespace_checkboxes.clicked.connect( self.EventSimpleBlacklistNamespaceCheck )
self._test_input.textChanged.connect( self._UpdateTest )
self.SetValue( tag_filter )
def _AdvancedAddBlacklist( self, tag_slice ):
tag_slice = self._CleanTagSliceInput( tag_slice )
if tag_slice in self._advanced_blacklist.GetTagSlices():
self._advanced_blacklist.RemoveTagSlices( ( tag_slice, ) )
else:
self._advanced_whitelist.RemoveTagSlices( ( tag_slice, ) )
if self._CurrentlyBlocked( tag_slice ):
self._ShowRedundantError( HydrusTags.ConvertTagSliceToString( tag_slice ) + ' is already blocked by a broader rule!' )
self._advanced_blacklist.AddTagSlices( ( tag_slice, ) )
self._UpdateStatus()
def _AdvancedAddBlacklistButton( self ):
tag_slice = self._advanced_blacklist_input.GetValue()
self._AdvancedAddBlacklist( tag_slice )
def _AdvancedAddBlacklistMultiple( self, tag_slices ):
for tag_slice in tag_slices:
self._AdvancedAddBlacklist( tag_slice )
def _AdvancedAddWhitelist( self, tag_slice ):
tag_slice = self._CleanTagSliceInput( tag_slice )
if tag_slice in self._advanced_whitelist.GetTagSlices():
self._advanced_whitelist.RemoveTagSlices( ( tag_slice, ) )
else:
self._advanced_blacklist.RemoveTagSlices( ( tag_slice, ) )
# if it is still blocked after that, it needs whitelisting explicitly
if not self._CurrentlyBlocked( tag_slice ) and tag_slice not in ( '', ':' ):
self._ShowRedundantError( HydrusTags.ConvertTagSliceToString( tag_slice ) + ' is already permitted by a broader rule!' )
self._advanced_whitelist.AddTagSlices( ( tag_slice, ) )
self._UpdateStatus()
def _AdvancedAddWhitelistButton( self ):
tag_slice = self._advanced_whitelist_input.GetValue()
self._AdvancedAddWhitelist( tag_slice )
def _AdvancedAddWhitelistMultiple( self, tag_slices ):
for tag_slice in tag_slices:
self._AdvancedAddWhitelist( tag_slice )
def _AdvancedBlacklistEverything( self ):
self._advanced_blacklist.SetTagSlices( [] )
self._advanced_whitelist.RemoveTagSlices( ( '', ':' ) )
self._advanced_blacklist.AddTagSlices( ( '', ':' ) )
self._UpdateStatus()
def _AdvancedDeleteBlacklist( self ):
selected_tag_slices = self._advanced_blacklist.GetSelectedTagSlices()
if len( selected_tag_slices ) > 0:
result = ClientGUIDialogsQuick.GetYesNo( self, 'Remove all selected?' )
if result == QW.QDialog.Accepted:
self._advanced_blacklist.RemoveTagSlices( selected_tag_slices )
self._UpdateStatus()
def _AdvancedDeleteWhitelist( self ):
selected_tag_slices = self._advanced_whitelist.GetSelectedTagSlices()
if len( selected_tag_slices ) > 0:
result = ClientGUIDialogsQuick.GetYesNo( self, 'Remove all selected?' )
if result == QW.QDialog.Accepted:
self._advanced_whitelist.RemoveTagSlices( selected_tag_slices )
self._UpdateStatus()
def _CleanTagSliceInput( self, tag_slice ):
tag_slice = tag_slice.lower().strip()
while '**' in tag_slice:
tag_slice = tag_slice.replace( '**', '*' )
if tag_slice in self._wildcard_replacements:
tag_slice = self._wildcard_replacements[ tag_slice ]
if ':' in tag_slice:
( namespace, subtag ) = HydrusTags.SplitTag( tag_slice )
if subtag == '*':
tag_slice = '{}:'.format( namespace )
return tag_slice
def _CurrentlyBlocked( self, tag_slice ):
if tag_slice in ( '', ':' ):
test_slices = { tag_slice }
elif tag_slice.count( ':' ) == 1 and tag_slice.endswith( ':' ):
test_slices = { ':', tag_slice }
elif ':' in tag_slice:
( ns, st ) = HydrusTags.SplitTag( tag_slice )
test_slices = { ':', ns + ':', tag_slice }
else:
test_slices = { '', tag_slice }
blacklist = set( self._advanced_blacklist.GetTagSlices() )
return not blacklist.isdisjoint( test_slices )
def _DeleteFavourite( self ):
def do_it( name ):
names_to_tag_filters = HG.client_controller.new_options.GetFavouriteTagFilters()
if name in names_to_tag_filters:
message = 'Delete "{}"?'.format( name )
result = ClientGUIDialogsQuick.GetYesNo( self, message )
if result != QW.QDialog.Accepted:
return
del names_to_tag_filters[ name ]
HG.client_controller.new_options.SetFavouriteTagFilters( names_to_tag_filters )
names_to_tag_filters = HG.client_controller.new_options.GetFavouriteTagFilters()
menu = QW.QMenu()
if len( names_to_tag_filters ) == 0:
ClientGUIMenus.AppendMenuLabel( menu, 'no favourites set!' )
else:
for ( name, tag_filter ) in names_to_tag_filters.items():
ClientGUIMenus.AppendMenuItem( menu, name, 'delete {}'.format( name ), do_it, name )
CGC.core().PopupMenu( self, menu )
def _ExportFavourite( self ):
names_to_tag_filters = HG.client_controller.new_options.GetFavouriteTagFilters()
menu = QW.QMenu()
if len( names_to_tag_filters ) == 0:
ClientGUIMenus.AppendMenuLabel( menu, 'no favourites set!' )
else:
for ( name, tag_filter ) in names_to_tag_filters.items():
ClientGUIMenus.AppendMenuItem( menu, name, 'load {}'.format( name ), HG.client_controller.pub, 'clipboard', 'text', tag_filter.DumpToString() )
CGC.core().PopupMenu( self, menu )
def _GetWhiteBlacklistsPossible( self ):
blacklist_tag_slices = self._advanced_blacklist.GetTagSlices()
whitelist_tag_slices = self._advanced_whitelist.GetTagSlices()
blacklist_is_only_simples = set( blacklist_tag_slices ).issubset( { '', ':' } )
nothing_is_whitelisted = len( whitelist_tag_slices ) == 0
whitelist_possible = blacklist_is_only_simples
blacklist_possible = nothing_is_whitelisted
return ( whitelist_possible, blacklist_possible )
def _ImportFavourite( self ):
try:
raw_text = HG.client_controller.GetClipboardText()
except HydrusExceptions.DataMissing as e:
QW.QMessageBox.critical( self, 'Error', str(e) )
return
try:
obj = HydrusSerialisable.CreateFromString( raw_text )
except Exception as e:
QW.QMessageBox.critical( self, 'Error', 'I could not understand what was in the clipboard' )
return
if not isinstance( obj, HydrusTags.TagFilter ):
QW.QMessageBox.critical( self, 'Error', 'That object was not a Tag Filter! It seemed to be a "{}".'.format(type(obj)) )
return
tag_filter = obj
with ClientGUIDialogs.DialogTextEntry( self, 'Enter a name for the favourite.' ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
names_to_tag_filters = HG.client_controller.new_options.GetFavouriteTagFilters()
name = dlg.GetValue()
if name in names_to_tag_filters:
message = '"{}" already exists! Overwrite?'.format( name )
result = ClientGUIDialogsQuick.GetYesNo( self, message )
if result != QW.QDialog.Accepted:
return
names_to_tag_filters[ name ] = tag_filter
HG.client_controller.new_options.SetFavouriteTagFilters( names_to_tag_filters )
self.SetValue( tag_filter )
def _InitAdvancedPanel( self ):
advanced_panel = QW.QWidget( self._notebook )
#
blacklist_panel = ClientGUICommon.StaticBox( advanced_panel, 'exclude these' )
self._advanced_blacklist = ClientGUIListBoxes.ListBoxTagsFilter( blacklist_panel )
self._advanced_blacklist_input = ClientGUIControls.TextAndPasteCtrl( blacklist_panel, self._AdvancedAddBlacklistMultiple, allow_empty_input = True )
add_blacklist_button = ClientGUICommon.BetterButton( blacklist_panel, 'add', self._AdvancedAddBlacklistButton )
delete_blacklist_button = ClientGUICommon.BetterButton( blacklist_panel, 'delete', self._AdvancedDeleteBlacklist )
blacklist_everything_button = ClientGUICommon.BetterButton( blacklist_panel, 'block everything', self._AdvancedBlacklistEverything )
#
whitelist_panel = ClientGUICommon.StaticBox( advanced_panel, 'except for these' )
self._advanced_whitelist = ClientGUIListBoxes.ListBoxTagsFilter( whitelist_panel )
self._advanced_whitelist_input = ClientGUIControls.TextAndPasteCtrl( whitelist_panel, self._AdvancedAddWhitelistMultiple, allow_empty_input = True )
self._advanced_add_whitelist_button = ClientGUICommon.BetterButton( whitelist_panel, 'add', self._AdvancedAddWhitelistButton )
delete_whitelist_button = ClientGUICommon.BetterButton( whitelist_panel, 'delete', self._AdvancedDeleteWhitelist )
#
button_hbox = QP.HBoxLayout()
QP.AddToLayout( button_hbox, self._advanced_blacklist_input, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( button_hbox, add_blacklist_button, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( button_hbox, delete_blacklist_button, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( button_hbox, blacklist_everything_button, CC.FLAGS_CENTER_PERPENDICULAR )
blacklist_panel.Add( self._advanced_blacklist, CC.FLAGS_EXPAND_BOTH_WAYS )
blacklist_panel.Add( button_hbox, CC.FLAGS_EXPAND_PERPENDICULAR )
#
button_hbox = QP.HBoxLayout()
QP.AddToLayout( button_hbox, self._advanced_whitelist_input, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( button_hbox, self._advanced_add_whitelist_button, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( button_hbox, delete_whitelist_button, CC.FLAGS_CENTER_PERPENDICULAR )
whitelist_panel.Add( self._advanced_whitelist, CC.FLAGS_EXPAND_BOTH_WAYS )
whitelist_panel.Add( button_hbox, CC.FLAGS_EXPAND_PERPENDICULAR )
#
hbox = QP.HBoxLayout()
QP.AddToLayout( hbox, blacklist_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( hbox, whitelist_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
advanced_panel.setLayout( hbox )
return advanced_panel
def _InitBlacklistPanel( self ):
blacklist_panel = QW.QWidget( self._notebook )
#
self._simple_blacklist_error_st = ClientGUICommon.BetterStaticText( blacklist_panel )
self._simple_blacklist_global_checkboxes = QP.CheckListBox( blacklist_panel )
self._simple_blacklist_global_checkboxes.Append( 'unnamespaced tags', '' )
self._simple_blacklist_global_checkboxes.Append( 'namespaced tags', ':' )
self._simple_blacklist_namespace_checkboxes = QP.CheckListBox( blacklist_panel )
for namespace in self._namespaces:
if namespace == '':
continue
self._simple_blacklist_namespace_checkboxes.Append( namespace, namespace + ':' )
self._simple_blacklist = ClientGUIListBoxes.ListBoxTagsFilter( blacklist_panel )
self._simple_blacklist_input = ClientGUIControls.TextAndPasteCtrl( blacklist_panel, self._SimpleAddBlacklistMultiple, allow_empty_input = True )
#
left_vbox = QP.VBoxLayout()
QP.AddToLayout( left_vbox, self._simple_blacklist_global_checkboxes, CC.FLAGS_EXPAND_PERPENDICULAR )
left_vbox.addStretch( 1 )
QP.AddToLayout( left_vbox, self._simple_blacklist_namespace_checkboxes, CC.FLAGS_EXPAND_PERPENDICULAR )
right_vbox = QP.VBoxLayout()
QP.AddToLayout( right_vbox, self._simple_blacklist, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( right_vbox, self._simple_blacklist_input, CC.FLAGS_EXPAND_PERPENDICULAR )
main_hbox = QP.HBoxLayout()
QP.AddToLayout( main_hbox, left_vbox, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( main_hbox, right_vbox, CC.FLAGS_EXPAND_BOTH_WAYS )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._simple_blacklist_error_st, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, main_hbox, CC.FLAGS_EXPAND_BOTH_WAYS )
blacklist_panel.setLayout( vbox )
self._simple_blacklist.tagsRemoved.connect( self._SimpleBlacklistRemoved )
return blacklist_panel
def _InitWhitelistPanel( self ):
whitelist_panel = QW.QWidget( self._notebook )
#
self._simple_whitelist_error_st = ClientGUICommon.BetterStaticText( whitelist_panel )
self._simple_whitelist_global_checkboxes = QP.CheckListBox( whitelist_panel )
self._simple_whitelist_global_checkboxes.Append( 'unnamespaced tags', '' )
self._simple_whitelist_global_checkboxes.Append( 'namespaced tags', ':' )
self._simple_whitelist_namespace_checkboxes = QP.CheckListBox( whitelist_panel )
for namespace in self._namespaces:
if namespace == '':
continue
self._simple_whitelist_namespace_checkboxes.Append( namespace, namespace + ':' )
self._simple_whitelist = ClientGUIListBoxes.ListBoxTagsFilter( whitelist_panel )
self._simple_whitelist_input = ClientGUIControls.TextAndPasteCtrl( whitelist_panel, self._SimpleAddWhitelistMultiple, allow_empty_input = True )
#
left_vbox = QP.VBoxLayout()
QP.AddToLayout( left_vbox, self._simple_whitelist_global_checkboxes, CC.FLAGS_EXPAND_PERPENDICULAR )
left_vbox.addStretch( 1 )
QP.AddToLayout( left_vbox, self._simple_whitelist_namespace_checkboxes, CC.FLAGS_EXPAND_PERPENDICULAR )
right_vbox = QP.VBoxLayout()
QP.AddToLayout( right_vbox, self._simple_whitelist, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( right_vbox, self._simple_whitelist_input, CC.FLAGS_EXPAND_PERPENDICULAR )
main_hbox = QP.HBoxLayout()
QP.AddToLayout( main_hbox, left_vbox, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( main_hbox, right_vbox, CC.FLAGS_EXPAND_BOTH_WAYS )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._simple_whitelist_error_st, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, main_hbox, CC.FLAGS_EXPAND_BOTH_WAYS )
whitelist_panel.setLayout( vbox )
self._simple_whitelist.tagsRemoved.connect( self._SimpleWhitelistRemoved )
return whitelist_panel
def _LoadFavourite( self ):
names_to_tag_filters = HG.client_controller.new_options.GetFavouriteTagFilters()
menu = QW.QMenu()
if len( names_to_tag_filters ) == 0:
ClientGUIMenus.AppendMenuLabel( menu, 'no favourites set!' )
else:
for ( name, tag_filter ) in names_to_tag_filters.items():
ClientGUIMenus.AppendMenuItem( menu, name, 'load {}'.format( name ), self.SetValue, tag_filter )
CGC.core().PopupMenu( self, menu )
def _SaveFavourite( self ):
with ClientGUIDialogs.DialogTextEntry( self, 'Enter a name for the favourite.' ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
names_to_tag_filters = HG.client_controller.new_options.GetFavouriteTagFilters()
name = dlg.GetValue()
tag_filter = self.GetValue()
if name in names_to_tag_filters:
message = '"{}" already exists! Overwrite?'.format( name )
result = ClientGUIDialogsQuick.GetYesNo( self, message )
if result != QW.QDialog.Accepted:
return
names_to_tag_filters[ name ] = tag_filter
HG.client_controller.new_options.SetFavouriteTagFilters( names_to_tag_filters )
def _ShowAllPanels( self ):
self._whitelist_panel.setVisible( True )
self._advanced_panel.setVisible( True )
self._notebook.addTab( self._whitelist_panel, 'whitelist' )
self._notebook.addTab( self._advanced_panel, 'advanced' )
self._show_all_panels_button.setVisible( False )
def _ShowHelp( self ):
help = 'Here you can set rules to filter tags for one purpose or another. The default is typically to permit all tags. Check the current filter summary text at the bottom-left of the panel to ensure you have your logic correct.'
help += os.linesep * 2
help += 'The different tabs are multiple ways of looking at the filter--sometimes it is more useful to think about a filter as a whitelist (where only the listed contents are kept) or a blacklist (where everything _except_ the listed contents are kept), and there is also an advanced tab that lets you do a more complicated combination of the two.'
help += os.linesep * 2
help += 'As well as selecting broader categories of tags with the checkboxes, you can type or paste the individual tags directly--just hit enter to add each one--and double-click an existing entry in a list to remove it.'
help += os.linesep * 2
help += 'If you wish to manually type a special tag, use these shorthands:'
help += os.linesep * 2
help += '"namespace:" - all instances of that namespace'
help += os.linesep
help += '":" - all namespaced tags'
help += os.linesep
help += '"" (i.e. an empty string) - all unnamespaced tags'
QW.QMessageBox.information( self, 'Information', help )
def _ShowRedundantError( self, text ):
self._redundant_st.setText( text )
HG.client_controller.CallLaterQtSafe( self._redundant_st, 2, self._redundant_st.setText, '' )
def _SimpleAddBlacklistMultiple( self, tag_slices ):
for tag_slice in tag_slices:
self._AdvancedAddBlacklist( tag_slice )
def _SimpleAddWhitelistMultiple( self, tag_slices ):
for tag_slice in tag_slices:
if tag_slice in ( '', ':' ) and tag_slice in self._simple_whitelist.GetTagSlices():
self._AdvancedAddBlacklist( tag_slice )
else:
self._AdvancedAddWhitelist( tag_slice )
def _SimpleBlacklistRemoved( self, tag_slices ):
for tag_slice in tag_slices:
self._AdvancedAddBlacklist( tag_slice )
def _SimpleBlacklistReset( self ):
pass
def _SimpleWhitelistRemoved( self, tag_slices ):
tag_slices = set( tag_slices )
for simple in ( '', ':' ):
if simple in tag_slices:
tag_slices.discard( simple )
self._AdvancedAddBlacklist( simple )
for tag_slice in tag_slices:
self._AdvancedAddWhitelist( tag_slice )
def _SimpleWhitelistReset( self ):
pass
def _UpdateStatus( self ):
( whitelist_possible, blacklist_possible ) = self._GetWhiteBlacklistsPossible()
whitelist_tag_slices = self._advanced_whitelist.GetTagSlices()
blacklist_tag_slices = self._advanced_blacklist.GetTagSlices()
if whitelist_possible:
self._simple_whitelist_error_st.clear()
self._simple_whitelist.setEnabled( True )
self._simple_whitelist_global_checkboxes.setEnabled( True )
self._simple_whitelist_input.setEnabled( True )
whitelist_tag_slices = set( whitelist_tag_slices )
if not self._CurrentlyBlocked( '' ):
whitelist_tag_slices.add( '' )
if not self._CurrentlyBlocked( ':' ):
whitelist_tag_slices.add( ':' )
self._simple_whitelist_namespace_checkboxes.setEnabled( False )
else:
self._simple_whitelist_namespace_checkboxes.setEnabled( True )
self._simple_whitelist.SetTagSlices( whitelist_tag_slices )
for index in range( self._simple_whitelist_global_checkboxes.count() ):
check = QP.GetClientData( self._simple_whitelist_global_checkboxes, index ) in whitelist_tag_slices
self._simple_whitelist_global_checkboxes.Check( index, check )
for index in range( self._simple_whitelist_namespace_checkboxes.count() ):
check = QP.GetClientData( self._simple_whitelist_namespace_checkboxes, index ) in whitelist_tag_slices
self._simple_whitelist_namespace_checkboxes.Check( index, check )
else:
self._simple_whitelist_error_st.setText( 'The filter is currently more complicated than a simple whitelist, so cannot be shown here.' )
self._simple_whitelist.setEnabled( False )
self._simple_whitelist_global_checkboxes.setEnabled( False )
self._simple_whitelist_namespace_checkboxes.setEnabled( False )
self._simple_whitelist_input.setEnabled( False )
self._simple_whitelist.SetTagSlices( '' )
for index in range( self._simple_whitelist_global_checkboxes.count() ):
self._simple_whitelist_global_checkboxes.Check( index, False )
for index in range( self._simple_whitelist_namespace_checkboxes.count() ):
self._simple_whitelist_namespace_checkboxes.Check( index, False )
#
whitelist_tag_slices = self._advanced_whitelist.GetTagSlices()
blacklist_tag_slices = self._advanced_blacklist.GetTagSlices()
if blacklist_possible:
self._simple_blacklist_error_st.clear()
self._simple_blacklist.setEnabled( True )
self._simple_blacklist_global_checkboxes.setEnabled( True )
self._simple_blacklist_input.setEnabled( True )
if self._CurrentlyBlocked( ':' ):
self._simple_blacklist_namespace_checkboxes.setEnabled( False )
else:
self._simple_blacklist_namespace_checkboxes.setEnabled( True )
self._simple_blacklist.SetTagSlices( blacklist_tag_slices )
for index in range( self._simple_blacklist_global_checkboxes.count() ):
check = QP.GetClientData( self._simple_blacklist_global_checkboxes, index ) in blacklist_tag_slices
self._simple_blacklist_global_checkboxes.Check( index, check )
for index in range( self._simple_blacklist_namespace_checkboxes.count() ):
check = QP.GetClientData( self._simple_blacklist_namespace_checkboxes, index ) in blacklist_tag_slices
self._simple_blacklist_namespace_checkboxes.Check( index, check )
else:
self._simple_blacklist_error_st.setText( 'The filter is currently more complicated than a simple blacklist, so cannot be shown here.' )
self._simple_blacklist.setEnabled( False )
self._simple_blacklist_global_checkboxes.setEnabled( False )
self._simple_blacklist_namespace_checkboxes.setEnabled( False )
self._simple_blacklist_input.setEnabled( False )
self._simple_blacklist.SetTagSlices( '' )
for index in range( self._simple_blacklist_global_checkboxes.count() ):
self._simple_blacklist_global_checkboxes.Check( index, False )
for index in range( self._simple_blacklist_namespace_checkboxes.count() ):
self._simple_blacklist_namespace_checkboxes.Check( index, False )
#
whitelist_tag_slices = self._advanced_whitelist.GetTagSlices()
blacklist_tag_slices = self._advanced_blacklist.GetTagSlices()
if len( blacklist_tag_slices ) == 0:
self._advanced_whitelist_input.setEnabled( False )
self._advanced_add_whitelist_button.setEnabled( False )
else:
self._advanced_whitelist_input.setEnabled( True )
self._advanced_add_whitelist_button.setEnabled( True )
#
tag_filter = self.GetValue()
if self._only_show_blacklist:
pretty_tag_filter = tag_filter.ToBlacklistString()
else:
pretty_tag_filter = 'currently keeping: {}'.format( tag_filter.ToPermittedString() )
self._current_filter_st.setText( pretty_tag_filter )
self._UpdateTest()
def _UpdateTest( self ):
test_input = self._test_input.toPlainText()
if test_input == '':
if self._only_show_blacklist:
test_result_text = self.TEST_RESULT_BLACKLIST_DEFAULT
else:
test_result_text = self.TEST_RESULT_DEFAULT
self._test_result_st.setObjectName( '' )
self._test_result_st.setText( test_result_text )
self._test_result_st.style().polish( self._test_result_st )
else:
test_tags = HydrusText.DeserialiseNewlinedTexts( test_input )
test_tags = HydrusTags.CleanTags( test_tags )
tag_filter = self.GetValue()
self._test_result_st.setObjectName( '' )
self._test_result_st.clear()
self._test_result_st.style().polish( self._test_result_st )
if self._only_show_blacklist:
def work_callable():
results = []
tags_to_siblings = HG.client_controller.Read( 'tag_siblings_lookup', CC.COMBINED_TAG_SERVICE_KEY, test_tags )
for ( test_tag, siblings ) in tags_to_siblings.items():
results.append( False not in ( tag_filter.TagOK( sibling_tag, apply_unnamespaced_rules_to_namespaced_tags = True ) for sibling_tag in siblings ) )
return results
else:
def work_callable():
results = [ tag_filter.TagOK( test_tag ) for test_tag in test_tags ]
return results
def publish_callable( results ):
all_good = False not in results
all_bad = True not in results
if len( results ) == 1:
if all_good:
test_result_text = 'tag passes!'
self._test_result_st.setObjectName( 'HydrusValid' )
else:
test_result_text = 'tag blocked!'
self._test_result_st.setObjectName( 'HydrusInvalid' )
else:
if all_good:
test_result_text = 'all pass!'
self._test_result_st.setObjectName( 'HydrusValid' )
elif all_bad:
test_result_text = 'all blocked!'
self._test_result_st.setObjectName( 'HydrusInvalid' )
else:
c = collections.Counter()
c.update( results )
test_result_text = '{} pass, {} blocked!'.format( HydrusData.ToHumanInt( c[ True ] ), HydrusData.ToHumanInt( c[ False ] ) )
self._test_result_st.setObjectName( 'HydrusInvalid' )
self._test_result_st.setText( test_result_text )
self._test_result_st.style().polish( self._test_result_st )
async_job = ClientGUIAsync.AsyncQtJob( self, work_callable, publish_callable )
async_job.start()
def EventSimpleBlacklistNamespaceCheck( self, index ):
index = index.row()
if index != -1:
tag_slice = QP.GetClientData( self._simple_blacklist_namespace_checkboxes, index )
self._AdvancedAddBlacklist( tag_slice )
def EventSimpleBlacklistGlobalCheck( self, index ):
index = index.row()
if index != -1:
tag_slice = QP.GetClientData( self._simple_blacklist_global_checkboxes, index )
self._AdvancedAddBlacklist( tag_slice )
def EventSimpleWhitelistNamespaceCheck( self, index ):
index = index.row()
if index != -1:
tag_slice = QP.GetClientData( self._simple_whitelist_namespace_checkboxes, index )
self._AdvancedAddWhitelist( tag_slice )
def EventSimpleWhitelistGlobalCheck( self, index ):
index = index.row()
if index != -1:
tag_slice = QP.GetClientData( self._simple_whitelist_global_checkboxes, index )
if tag_slice in ( '', ':' ) and tag_slice in self._simple_whitelist.GetTagSlices():
self._AdvancedAddBlacklist( tag_slice )
else:
self._AdvancedAddWhitelist( tag_slice )
def GetValue( self ):
tag_filter = HydrusTags.TagFilter()
for tag_slice in self._advanced_blacklist.GetTagSlices():
tag_filter.SetRule( tag_slice, HC.FILTER_BLACKLIST )
for tag_slice in self._advanced_whitelist.GetTagSlices():
tag_filter.SetRule( tag_slice, HC.FILTER_WHITELIST )
return tag_filter
def SetValue( self, tag_filter: HydrusTags.TagFilter ):
blacklist_tag_slices = [ tag_slice for ( tag_slice, rule ) in tag_filter.GetTagSlicesToRules().items() if rule == HC.FILTER_BLACKLIST ]
whitelist_tag_slices = [ tag_slice for ( tag_slice, rule ) in tag_filter.GetTagSlicesToRules().items() if rule == HC.FILTER_WHITELIST ]
self._advanced_blacklist.SetTagSlices( blacklist_tag_slices )
self._advanced_whitelist.SetTagSlices( whitelist_tag_slices )
( whitelist_possible, blacklist_possible ) = self._GetWhiteBlacklistsPossible()
selection_tests = []
if self._only_show_blacklist:
selection_tests.append( ( blacklist_possible, self._blacklist_panel ) )
else:
selection_tests.append( ( whitelist_possible, self._whitelist_panel ) )
selection_tests.append( ( blacklist_possible, self._blacklist_panel ) )
selection_tests.append( ( True, self._advanced_panel ) )
for ( test, page ) in selection_tests:
if test:
self._notebook.SelectPage( page )
break
self._UpdateStatus()
class ManageTagsPanel( ClientGUIScrolledPanels.ManagePanel ):
def __init__( self, parent, file_service_key, media, immediate_commit = False, canvas_key = None ):
ClientGUIScrolledPanels.ManagePanel.__init__( self, parent )
self._file_service_key = file_service_key
self._immediate_commit = immediate_commit
self._canvas_key = canvas_key
media = ClientMedia.FlattenMedia( media )
self._current_media = [ m.Duplicate() for m in media ]
self._hashes = set()
for m in self._current_media:
self._hashes.update( m.GetHashes() )
self._tag_repositories = ClientGUICommon.BetterNotebook( self )
#
services = HG.client_controller.services_manager.GetServices( HC.REAL_TAG_SERVICES )
default_tag_repository_key = HC.options[ 'default_tag_repository' ]
for service in services:
service_key = service.GetServiceKey()
name = service.GetName()
page = self._Panel( self._tag_repositories, self._file_service_key, service.GetServiceKey(), self._current_media, self._immediate_commit, canvas_key = self._canvas_key )
page._add_tag_box.selectUp.connect( self.EventSelectUp )
page._add_tag_box.selectDown.connect( self.EventSelectDown )
page._add_tag_box.showPrevious.connect( self.EventShowPrevious )
page._add_tag_box.showNext.connect( self.EventShowNext )
page.okSignal.connect( self.okSignal )
select = service_key == default_tag_repository_key
self._tag_repositories.addTab( page, name )
if select: self._tag_repositories.setCurrentIndex( self._tag_repositories.count() - 1 )
#
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._tag_repositories, CC.FLAGS_EXPAND_BOTH_WAYS )
self.widget().setLayout( vbox )
if self._canvas_key is not None:
HG.client_controller.sub( self, 'CanvasHasNewMedia', 'canvas_new_display_media' )
self._my_shortcut_handler = ClientGUIShortcuts.ShortcutsHandler( self, [ 'global', 'media', 'main_gui' ] )
self._tag_repositories.currentChanged.connect( self.EventServiceChanged )
self._SetSearchFocus()
def _GetGroupsOfServiceKeysToContentUpdates( self ):
groups_of_service_keys_to_content_updates = []
for page in self._tag_repositories.GetPages():
( service_key, groups_of_content_updates ) = page.GetGroupsOfContentUpdates()
for content_updates in groups_of_content_updates:
if len( content_updates ) > 0:
service_keys_to_content_updates = { service_key : content_updates }
groups_of_service_keys_to_content_updates.append( service_keys_to_content_updates )
return groups_of_service_keys_to_content_updates
def _SetSearchFocus( self ):
page = self._tag_repositories.currentWidget()
if page is not None:
page.SetTagBoxFocus()
def CanvasHasNewMedia( self, canvas_key, new_media_singleton ):
if canvas_key == self._canvas_key:
if new_media_singleton is not None:
self._current_media = ( new_media_singleton.Duplicate(), )
for page in self._tag_repositories.GetPages():
page.SetMedia( self._current_media )
def CleanBeforeDestroy( self ):
ClientGUIScrolledPanels.ManagePanel.CleanBeforeDestroy( self )
for page in self._tag_repositories.GetPages():
page.CleanBeforeDestroy()
def CommitChanges( self ):
groups_of_service_keys_to_content_updates = self._GetGroupsOfServiceKeysToContentUpdates()
for service_keys_to_content_updates in groups_of_service_keys_to_content_updates:
HG.client_controller.WriteSynchronous( 'content_updates', service_keys_to_content_updates )
def EventSelectDown( self ):
self._tag_repositories.SelectRight()
self._SetSearchFocus()
def EventSelectUp( self ):
self._tag_repositories.SelectLeft()
self._SetSearchFocus()
def EventShowNext( self ):
if self._canvas_key is not None:
HG.client_controller.pub( 'canvas_show_next', self._canvas_key )
def EventShowPrevious( self ):
if self._canvas_key is not None:
HG.client_controller.pub( 'canvas_show_previous', self._canvas_key )
def EventServiceChanged( self, index ):
if not self or not QP.isValid( self ): # actually did get a runtime error here, on some Linux WM dialog shutdown
return
if self.sender() != self._tag_repositories:
return
page = self._tag_repositories.currentWidget()
if page is not None:
HG.client_controller.CallAfterQtSafe( page, page.SetTagBoxFocus )
def ProcessApplicationCommand( self, command: CAC.ApplicationCommand ):
command_processed = True
data = command.GetData()
if command.IsSimpleCommand():
action = data
if action == CAC.SIMPLE_MANAGE_FILE_TAGS:
self._OKParent()
elif action == CAC.SIMPLE_FOCUS_MEDIA_VIEWER:
tlws = ClientGUIFunctions.GetTLWParents( self )
from hydrus.client.gui import ClientGUICanvasFrame
command_processed = False
for tlw in tlws:
if isinstance( tlw, ClientGUICanvasFrame.CanvasFrame ):
tlw.TakeFocusForUser()
command_processed = True
break
elif action == CAC.SIMPLE_SET_SEARCH_FOCUS:
self._SetSearchFocus()
else:
command_processed = False
else:
command_processed = False
return command_processed
def UserIsOKToCancel( self ):
groups_of_service_keys_to_content_updates = self._GetGroupsOfServiceKeysToContentUpdates()
if len( groups_of_service_keys_to_content_updates ) > 0:
message = 'Are you sure you want to cancel? You have uncommitted changes that will be lost.'
result = ClientGUIDialogsQuick.GetYesNo( self, message )
if result != QW.QDialog.Accepted:
return False
return True
class _Panel( QW.QWidget ):
okSignal = QC.Signal()
def __init__( self, parent, file_service_key, tag_service_key, media, immediate_commit, canvas_key = None ):
QW.QWidget.__init__( self, parent )
self._file_service_key = file_service_key
self._tag_service_key = tag_service_key
self._immediate_commit = immediate_commit
self._canvas_key = canvas_key
self._groups_of_content_updates = []
self._service = HG.client_controller.services_manager.GetService( self._tag_service_key )
self._i_am_local_tag_service = self._service.GetServiceType() == HC.LOCAL_TAG
self._tags_box_sorter = ClientGUIListBoxes.StaticBoxSorterForListBoxTags( self, 'tags', show_siblings_sort = True )
self._tags_box = ClientGUIListBoxes.ListBoxTagsMediaTagsDialog( self._tags_box_sorter, self.EnterTags, self.RemoveTags )
self._tags_box_sorter.SetTagsBox( self._tags_box )
#
self._new_options = HG.client_controller.new_options
if self._i_am_local_tag_service:
text = 'remove all/selected tags'
else:
text = 'petition to remove all/selected tags'
self._remove_tags = ClientGUICommon.BetterButton( self._tags_box_sorter, text, self._RemoveTagsButton )
self._copy_button = ClientGUICommon.BetterBitmapButton( self._tags_box_sorter, CC.global_pixmaps().copy, self._Copy )
self._copy_button.setToolTip( 'Copy selected tags to the clipboard. If none are selected, copies all.' )
self._paste_button = ClientGUICommon.BetterBitmapButton( self._tags_box_sorter, CC.global_pixmaps().paste, self._Paste )
self._paste_button.setToolTip( 'Paste newline-separated tags from the clipboard into here.' )
self._show_deleted = False
menu_items = []
check_manager = ClientGUICommon.CheckboxManagerOptions( 'allow_remove_on_manage_tags_input' )
menu_items.append( ( 'check', 'allow remove/petition result on tag input for already existing tag', 'If checked, inputting a tag that already exists will try to remove it.', check_manager ) )
check_manager = ClientGUICommon.CheckboxManagerOptions( 'yes_no_on_remove_on_manage_tags' )
menu_items.append( ( 'check', 'confirm remove/petition tags on explicit delete actions', 'If checked, clicking the remove/petition tags button (or hitting the deleted key on the list) will first confirm the action with a yes/no dialog.', check_manager ) )
check_manager = ClientGUICommon.CheckboxManagerCalls( self._FlipShowDeleted, lambda: self._show_deleted )
menu_items.append( ( 'check', 'show deleted', 'Show deleted tags, if any.', check_manager ) )
menu_items.append( ( 'separator', 0, 0, 0 ) )
menu_items.append( ( 'normal', 'migrate tags for these files', 'Migrate the tags for the files used to launch this manage tags panel.', self._MigrateTags ) )
if not self._i_am_local_tag_service and self._service.HasPermission( HC.CONTENT_TYPE_ACCOUNTS, HC.PERMISSION_ACTION_MODERATE ):
menu_items.append( ( 'separator', 0, 0, 0 ) )
menu_items.append( ( 'normal', 'modify users who added the selected tags', 'Modify the users who added the selected tags.', self._ModifyMappers ) )
self._cog_button = ClientGUIMenuButton.MenuBitmapButton( self._tags_box_sorter, CC.global_pixmaps().cog, menu_items )
#
self._add_tag_box = ClientGUIACDropdown.AutoCompleteDropdownTagsWrite( self, self.AddTags, self._file_service_key, self._tag_service_key, null_entry_callable = self.OK )
self._tags_box.SetTagServiceKey( self._tag_service_key )
self._suggested_tags = ClientGUITagSuggestions.SuggestedTagsPanel( self, self._tag_service_key, media, self.AddTags )
self.SetMedia( media )
button_hbox = QP.HBoxLayout()
QP.AddToLayout( button_hbox, self._remove_tags, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( button_hbox, self._copy_button, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( button_hbox, self._paste_button, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( button_hbox, self._cog_button, CC.FLAGS_CENTER )
self._tags_box_sorter.Add( button_hbox, CC.FLAGS_ON_RIGHT )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._tags_box_sorter, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( vbox, self._add_tag_box )
#
hbox = QP.HBoxLayout()
QP.AddToLayout( hbox, self._suggested_tags, CC.FLAGS_EXPAND_BOTH_WAYS_POLITE )
QP.AddToLayout( hbox, vbox, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
#
self._my_shortcut_handler = ClientGUIShortcuts.ShortcutsHandler( self, [ 'global', 'main_gui' ] )
self.setLayout( hbox )
if self._immediate_commit:
HG.client_controller.sub( self, 'ProcessContentUpdates', 'content_updates_gui' )
self._suggested_tags.mouseActivationOccurred.connect( self.SetTagBoxFocus )
def _EnterTags( self, tags, only_add = False, only_remove = False, forced_reason = None ):
tags = HydrusTags.CleanTags( tags )
if not self._i_am_local_tag_service and self._service.HasPermission( HC.CONTENT_TYPE_MAPPINGS, HC.PERMISSION_ACTION_MODERATE ):
forced_reason = 'admin'
tags_managers = [ m.GetTagsManager() for m in self._media ]
currents = [ tags_manager.GetCurrent( self._tag_service_key, ClientTags.TAG_DISPLAY_STORAGE ) for tags_manager in tags_managers ]
pendings = [ tags_manager.GetPending( self._tag_service_key, ClientTags.TAG_DISPLAY_STORAGE ) for tags_manager in tags_managers ]
petitioneds = [ tags_manager.GetPetitioned( self._tag_service_key, ClientTags.TAG_DISPLAY_STORAGE ) for tags_manager in tags_managers ]
num_files = len( self._media )
# let's figure out what these tags can mean for the media--add, remove, or what?
choices = collections.defaultdict( list )
for tag in tags:
num_current = sum( ( 1 for current in currents if tag in current ) )
if self._i_am_local_tag_service:
if not only_remove:
if num_current < num_files:
num_non_current = num_files - num_current
choices[ HC.CONTENT_UPDATE_ADD ].append( ( tag, num_non_current ) )
if not only_add:
if num_current > 0:
choices[ HC.CONTENT_UPDATE_DELETE ].append( ( tag, num_current ) )
else:
num_pending = sum( ( 1 for pending in pendings if tag in pending ) )
num_petitioned = sum( ( 1 for petitioned in petitioneds if tag in petitioned ) )
if not only_remove:
if num_current + num_pending < num_files:
num_pendable = num_files - ( num_current + num_pending )
choices[ HC.CONTENT_UPDATE_PEND ].append( ( tag, num_pendable ) )
if not only_add:
if num_current > num_petitioned and not only_add:
num_petitionable = num_current - num_petitioned
choices[ HC.CONTENT_UPDATE_PETITION ].append( ( tag, num_petitionable ) )
if num_pending > 0 and not only_add:
choices[ HC.CONTENT_UPDATE_RESCIND_PEND ].append( ( tag, num_pending ) )
if not only_remove:
if num_petitioned > 0:
choices[ HC.CONTENT_UPDATE_RESCIND_PETITION ].append( ( tag, num_petitioned ) )
if len( choices ) == 0:
return
if len( choices ) == 1:
[ ( choice_action, tag_counts ) ] = list( choices.items() )
tags = { tag for ( tag, count ) in tag_counts }
else:
bdc_choices = []
preferred_order = [ HC.CONTENT_UPDATE_ADD, HC.CONTENT_UPDATE_DELETE, HC.CONTENT_UPDATE_PEND, HC.CONTENT_UPDATE_RESCIND_PEND, HC.CONTENT_UPDATE_PETITION, HC.CONTENT_UPDATE_RESCIND_PETITION ]
choice_text_lookup = {}
choice_text_lookup[ HC.CONTENT_UPDATE_ADD ] = 'add'
choice_text_lookup[ HC.CONTENT_UPDATE_DELETE ] = 'delete'
choice_text_lookup[ HC.CONTENT_UPDATE_PEND ] = 'pend (add)'
choice_text_lookup[ HC.CONTENT_UPDATE_PETITION ] = 'petition to remove'
choice_text_lookup[ HC.CONTENT_UPDATE_RESCIND_PEND ] = 'undo pend'
choice_text_lookup[ HC.CONTENT_UPDATE_RESCIND_PETITION ] = 'undo petition to remove'
choice_tooltip_lookup = {}
choice_tooltip_lookup[ HC.CONTENT_UPDATE_ADD ] = 'this adds the tags to this local tag service'
choice_tooltip_lookup[ HC.CONTENT_UPDATE_DELETE ] = 'this deletes the tags from this local tag service'
choice_tooltip_lookup[ HC.CONTENT_UPDATE_PEND ] = 'this pends the tags to be added to this tag repository when you upload'
choice_tooltip_lookup[ HC.CONTENT_UPDATE_PETITION ] = 'this petitions the tags for deletion from this tag repository when you upload'
choice_tooltip_lookup[ HC.CONTENT_UPDATE_RESCIND_PEND ] = 'this rescinds the currently pending tags, so they will not be added'
choice_tooltip_lookup[ HC.CONTENT_UPDATE_RESCIND_PETITION ] = 'this rescinds the current tag petitions, so they will not be deleted'
for choice_action in preferred_order:
if choice_action not in choices:
continue
choice_text_prefix = choice_text_lookup[ choice_action ]
tag_counts = choices[ choice_action ]
choice_tags = { tag for ( tag, count ) in tag_counts }
if len( choice_tags ) == 1:
[ ( tag, count ) ] = tag_counts
text = '{} "{}" for {} files'.format( choice_text_prefix, HydrusText.ElideText( tag, 64 ), HydrusData.ToHumanInt( count ) )
else:
text = '{} {} tags'.format( choice_text_prefix, HydrusData.ToHumanInt( len( choice_tags ) ) )
data = ( choice_action, choice_tags )
t_c_lines = [ choice_tooltip_lookup[ choice_action ] ]
if len( tag_counts ) > 25:
t_c = tag_counts[:25]
else:
t_c = tag_counts
t_c_lines.extend( ( '{} - {} files'.format( tag, HydrusData.ToHumanInt( count ) ) for ( tag, count ) in t_c ) )
if len( tag_counts ) > 25:
t_c_lines.append( 'and {} others'.format( HydrusData.ToHumanInt( len( tag_counts ) - 25 ) ) )
tooltip = os.linesep.join( t_c_lines )
bdc_choices.append( ( text, data, tooltip ) )
try:
if len( tags ) > 1:
message = 'The file{} some of those tags, but not all, so there are different things you can do.'.format( 's have' if len( self._media ) > 1 else ' has' )
else:
message = 'Of the {} files being managed, some have that tag, but not all of them do, so there are different things you can do.'.format( HydrusData.ToHumanInt( len( self._media ) ) )
( choice_action, tags ) = ClientGUIDialogsQuick.SelectFromListButtons( self, 'What would you like to do?', bdc_choices, message = message )
except HydrusExceptions.CancelledException:
return
reason = None
if choice_action == HC.CONTENT_UPDATE_PETITION:
if forced_reason is None:
# add the easy reason buttons here
if len( tags ) == 1:
( tag, ) = tags
tag_text = '"' + tag + '"'
else:
tag_text = 'the ' + HydrusData.ToHumanInt( len( tags ) ) + ' tags'
message = 'Enter a reason for ' + tag_text + ' to be removed. A janitor will review your petition.'
suggestions = []
suggestions.append( 'mangled parse/typo' )
suggestions.append( 'not applicable' )
suggestions.append( 'should be namespaced' )
suggestions.append( 'splitting filename/title/etc... into individual tags' )
with ClientGUIDialogs.DialogTextEntry( self, message, suggestions = suggestions ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
reason = dlg.GetValue()
else:
return
else:
reason = forced_reason
# we have an action and tags, so let's effect the content updates
content_updates_group = []
recent_tags = set()
medias_and_tags_managers = [ ( m, m.GetTagsManager() ) for m in self._media ]
medias_and_sets_of_tags = [ ( m, tm.GetCurrent( self._tag_service_key, ClientTags.TAG_DISPLAY_STORAGE ), tm.GetPending( self._tag_service_key, ClientTags.TAG_DISPLAY_STORAGE ), tm.GetPetitioned( self._tag_service_key, ClientTags.TAG_DISPLAY_STORAGE ) ) for ( m, tm ) in medias_and_tags_managers ]
for tag in tags:
if choice_action == HC.CONTENT_UPDATE_ADD: media_to_affect = [ m for ( m, mc, mp, mpt ) in medias_and_sets_of_tags if tag not in mc ]
elif choice_action == HC.CONTENT_UPDATE_DELETE: media_to_affect = [ m for ( m, mc, mp, mpt ) in medias_and_sets_of_tags if tag in mc ]
elif choice_action == HC.CONTENT_UPDATE_PEND: media_to_affect = [ m for ( m, mc, mp, mpt ) in medias_and_sets_of_tags if tag not in mc and tag not in mp ]
elif choice_action == HC.CONTENT_UPDATE_PETITION: media_to_affect = [ m for ( m, mc, mp, mpt ) in medias_and_sets_of_tags if tag in mc and tag not in mpt ]
elif choice_action == HC.CONTENT_UPDATE_RESCIND_PEND: media_to_affect = [ m for ( m, mc, mp, mpt ) in medias_and_sets_of_tags if tag in mp ]
elif choice_action == HC.CONTENT_UPDATE_RESCIND_PETITION: media_to_affect = [ m for ( m, mc, mp, mpt ) in medias_and_sets_of_tags if tag in mpt ]
hashes = set( itertools.chain.from_iterable( ( m.GetHashes() for m in media_to_affect ) ) )
if len( hashes ) > 0:
content_updates = []
if choice_action in ( HC.CONTENT_UPDATE_ADD, HC.CONTENT_UPDATE_PEND ):
recent_tags.add( tag )
content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, choice_action, ( tag, hashes ), reason = reason ) )
if len( content_updates ) > 0:
if not self._immediate_commit:
for m in media_to_affect:
mt = m.GetTagsManager()
for content_update in content_updates:
mt.ProcessContentUpdate( self._tag_service_key, content_update )
content_updates_group.extend( content_updates )
num_recent_tags = HG.client_controller.new_options.GetNoneableInteger( 'num_recent_tags' )
if len( recent_tags ) > 0 and num_recent_tags is not None:
if len( recent_tags ) > num_recent_tags:
recent_tags = random.sample( recent_tags, num_recent_tags )
HG.client_controller.Write( 'push_recent_tags', self._tag_service_key, recent_tags )
if len( content_updates_group ) > 0:
if self._immediate_commit:
service_keys_to_content_updates = { self._tag_service_key : content_updates_group }
HG.client_controller.WriteSynchronous( 'content_updates', service_keys_to_content_updates )
else:
self._groups_of_content_updates.append( content_updates_group )
self._suggested_tags.MediaUpdated()
self._tags_box.SetTagsByMedia( self._media )
def _MigrateTags( self ):
hashes = set()
for m in self._media:
hashes.update( m.GetHashes() )
def do_it( tag_service_key, hashes ):
tlw = HG.client_controller.GetMainTLW()
frame = ClientGUITopLevelWindowsPanels.FrameThatTakesScrollablePanel( tlw, 'migrate tags' )
panel = ClientGUIScrolledPanelsReview.MigrateTagsPanel( frame, self._tag_service_key, hashes )
frame.SetPanel( panel )
QP.CallAfter( do_it, self._tag_service_key, hashes )
self.OK()
def _Copy( self ):
tags = list( self._tags_box.GetSelectedTags() )
if len( tags ) == 0:
( current_tags_to_count, deleted_tags_to_count, pending_tags_to_count, petitioned_tags_to_count ) = ClientMedia.GetMediasTagCount( self._media, self._tag_service_key, ClientTags.TAG_DISPLAY_STORAGE )
tags = set( current_tags_to_count.keys() ).union( pending_tags_to_count.keys() )
if len( tags ) > 0:
tags = HydrusTags.SortNumericTags( tags )
text = os.linesep.join( tags )
HG.client_controller.pub( 'clipboard', 'text', text )
def _FlipShowDeleted( self ):
self._show_deleted = not self._show_deleted
self._tags_box.SetShow( 'deleted', self._show_deleted )
def _ModifyMappers( self ):
contents = []
tags = self._tags_box.GetSelectedTags()
if len( tags ) == 0:
QW.QMessageBox.information( self, 'No tags selected!', 'Please select some tags first!' )
return
hashes_and_current_tags = [ ( m.GetHashes(), m.GetTagsManager().GetCurrent( self._tag_service_key, ClientTags.TAG_DISPLAY_STORAGE ) ) for m in self._media ]
for tag in tags:
hashes_iter = itertools.chain.from_iterable( ( hashes for ( hashes, current_tags ) in hashes_and_current_tags if tag in current_tags ) )
contents.extend( [ HydrusNetwork.Content( HC.CONTENT_TYPE_MAPPING, ( tag, hash ) ) for hash in hashes_iter ] )
if len( contents ) > 0:
subject_account_identifiers = [ HydrusNetwork.AccountIdentifier( content = content ) for content in contents ]
frame = ClientGUITopLevelWindowsPanels.FrameThatTakesScrollablePanel( self.window().parentWidget(), 'manage accounts' )
panel = ClientGUIHydrusNetwork.ModifyAccountsPanel( frame, self._tag_service_key, subject_account_identifiers )
frame.SetPanel( panel )
def _Paste( self ):
try:
text = HG.client_controller.GetClipboardText()
except HydrusExceptions.DataMissing as e:
QW.QMessageBox.warning( self, 'Warning', str(e) )
return
try:
tags = HydrusText.DeserialiseNewlinedTexts( text )
tags = HydrusTags.CleanTags( tags )
self.AddTags( tags, only_add = True )
except Exception as e:
QW.QMessageBox.warning( self, 'Warning', 'I could not understand what was in the clipboard' )
def _RemoveTagsButton( self ):
tags_managers = [ m.GetTagsManager() for m in self._media ]
removable_tags = set()
for tags_manager in tags_managers:
removable_tags.update( tags_manager.GetCurrent( self._tag_service_key, ClientTags.TAG_DISPLAY_STORAGE ) )
removable_tags.update( tags_manager.GetPending( self._tag_service_key, ClientTags.TAG_DISPLAY_STORAGE ) )
selected_tags = list( self._tags_box.GetSelectedTags() )
if len( selected_tags ) == 0:
tags_to_remove = list( removable_tags )
else:
tags_to_remove = [ tag for tag in selected_tags if tag in removable_tags ]
tags_to_remove = HydrusTags.SortNumericTags( tags_to_remove )
self.RemoveTags( tags_to_remove )
def AddTags( self, tags, only_add = False ):
if not self._new_options.GetBoolean( 'allow_remove_on_manage_tags_input' ):
only_add = True
if len( tags ) > 0:
self.EnterTags( tags, only_add = only_add )
def CleanBeforeDestroy( self ):
self._add_tag_box.CancelCurrentResultsFetchJob()
def ClearMedia( self ):
self.SetMedia( set() )
def EnterTags( self, tags, only_add = False ):
if len( tags ) > 0:
self._EnterTags( tags, only_add = only_add )
def GetGroupsOfContentUpdates( self ):
return ( self._tag_service_key, self._groups_of_content_updates )
def HasChanges( self ):
return len( self._groups_of_content_updates ) > 0
def OK( self ):
self.okSignal.emit()
def ProcessApplicationCommand( self, command: CAC.ApplicationCommand ):
command_processed = True
data = command.GetData()
if command.IsSimpleCommand():
action = data
if action == CAC.SIMPLE_SET_SEARCH_FOCUS:
self.SetTagBoxFocus()
elif action in ( CAC.SIMPLE_SHOW_AND_FOCUS_MANAGE_TAGS_FAVOURITE_TAGS, CAC.SIMPLE_SHOW_AND_FOCUS_MANAGE_TAGS_RELATED_TAGS, CAC.SIMPLE_SHOW_AND_FOCUS_MANAGE_TAGS_FILE_LOOKUP_SCRIPT_TAGS, CAC.SIMPLE_SHOW_AND_FOCUS_MANAGE_TAGS_RECENT_TAGS ):
self._suggested_tags.TakeFocusForUser( action )
elif action == CAC.SIMPLE_REFRESH_RELATED_TAGS:
self._suggested_tags.RefreshRelatedThorough()
else:
command_processed = False
else:
command_processed = False
return command_processed
def ProcessContentUpdates( self, service_keys_to_content_updates ):
for ( service_key, content_updates ) in list(service_keys_to_content_updates.items()):
for content_update in content_updates:
for m in self._media:
if HydrusData.SetsIntersect( m.GetHashes(), content_update.GetHashes() ):
m.GetMediaResult().ProcessContentUpdate( service_key, content_update )
self._tags_box.SetTagsByMedia( self._media )
self._suggested_tags.MediaUpdated()
def RemoveTags( self, tags ):
if len( tags ) > 0:
if self._new_options.GetBoolean( 'yes_no_on_remove_on_manage_tags' ):
if len( tags ) < 10:
message = 'Are you sure you want to remove these tags:'
message += os.linesep * 2
message += os.linesep.join( ( HydrusText.ElideText( tag, 64 ) for tag in tags ) )
else:
message = 'Are you sure you want to remove these ' + HydrusData.ToHumanInt( len( tags ) ) + ' tags?'
result = ClientGUIDialogsQuick.GetYesNo( self, message )
if result != QW.QDialog.Accepted:
return
self._EnterTags( tags, only_remove = True )
def SetMedia( self, media ):
if media is None:
media = set()
self._media = media
self._tags_box.SetTagsByMedia( self._media )
self._suggested_tags.SetMedia( media )
def SetTagBoxFocus( self ):
self._add_tag_box.setFocus( QC.Qt.OtherFocusReason )
class ManageTagParents( ClientGUIScrolledPanels.ManagePanel ):
def __init__( self, parent, tags = None ):
ClientGUIScrolledPanels.ManagePanel.__init__( self, parent )
self._tag_repositories = ClientGUICommon.BetterNotebook( self )
default_tag_repository_key = HC.options[ 'default_tag_repository' ]
services = list( HG.client_controller.services_manager.GetServices( ( HC.LOCAL_TAG, ) ) )
services.extend( [ service for service in HG.client_controller.services_manager.GetServices( ( HC.TAG_REPOSITORY, ) ) if service.HasPermission( HC.CONTENT_TYPE_TAG_PARENTS, HC.PERMISSION_ACTION_PETITION ) ] )
for service in services:
name = service.GetName()
service_key = service.GetServiceKey()
page = self._Panel( self._tag_repositories, service_key, tags )
select = service_key == default_tag_repository_key
self._tag_repositories.addTab( page, name )
if select: self._tag_repositories.setCurrentWidget( page )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._tag_repositories, CC.FLAGS_EXPAND_BOTH_WAYS )
self.widget().setLayout( vbox )
def _SetSearchFocus( self ):
page = self._tag_repositories.currentWidget()
if page is not None:
page.SetTagBoxFocus()
def CommitChanges( self ):
service_keys_to_content_updates = {}
for page in self._tag_repositories.GetPages():
( service_key, content_updates ) = page.GetContentUpdates()
if len( content_updates ) > 0:
service_keys_to_content_updates[ service_key ] = content_updates
if len( service_keys_to_content_updates ) > 0:
HG.client_controller.Write( 'content_updates', service_keys_to_content_updates )
def UserIsOKToOK( self ):
if self._tag_repositories.currentWidget().HasUncommittedPair():
message = 'Are you sure you want to OK? You have an uncommitted pair.'
result = ClientGUIDialogsQuick.GetYesNo( self, message )
if result != QW.QDialog.Accepted:
return False
return True
class _Panel( QW.QWidget ):
def __init__( self, parent, service_key, tags = None ):
QW.QWidget.__init__( self, parent )
self._service_key = service_key
self._service = HG.client_controller.services_manager.GetService( self._service_key )
self._i_am_local_tag_service = self._service.GetServiceType() == HC.LOCAL_TAG
self._pairs_to_reasons = {}
self._original_statuses_to_pairs = collections.defaultdict( set )
self._current_statuses_to_pairs = collections.defaultdict( set )
self._show_all = QW.QCheckBox( self )
listctrl_panel = ClientGUIListCtrl.BetterListCtrlPanel( self )
self._tag_parents = ClientGUIListCtrl.BetterListCtrl( listctrl_panel, CGLC.COLUMN_LIST_TAG_PARENTS.ID, 8, self._ConvertPairToListCtrlTuples, delete_key_callback = self._ListCtrlActivated, activation_callback = self._ListCtrlActivated )
listctrl_panel.SetListCtrl( self._tag_parents )
self._tag_parents.Sort()
menu_items = []
menu_items.append( ( 'normal', 'from clipboard', 'Load parents from text in your clipboard.', HydrusData.Call( self._ImportFromClipboard, False ) ) )
menu_items.append( ( 'normal', 'from clipboard (only add pairs--no deletions)', 'Load parents from text in your clipboard.', HydrusData.Call( self._ImportFromClipboard, True ) ) )
menu_items.append( ( 'normal', 'from .txt file', 'Load parents from a .txt file.', HydrusData.Call( self._ImportFromTXT, False ) ) )
menu_items.append( ( 'normal', 'from .txt file (only add pairs--no deletions)', 'Load parents from a .txt file.', HydrusData.Call( self._ImportFromTXT, True ) ) )
listctrl_panel.AddMenuButton( 'import', menu_items )
menu_items = []
menu_items.append( ( 'normal', 'to clipboard', 'Save selected parents to your clipboard.', self._ExportToClipboard ) )
menu_items.append( ( 'normal', 'to .txt file', 'Save selected parents to a .txt file.', self._ExportToTXT ) )
listctrl_panel.AddMenuButton( 'export', menu_items, enabled_only_on_selection = True )
self._children = ClientGUIListBoxes.ListBoxTagsStringsAddRemove( self, self._service_key, ClientTags.TAG_DISPLAY_ACTUAL )
self._parents = ClientGUIListBoxes.ListBoxTagsStringsAddRemove( self, self._service_key, ClientTags.TAG_DISPLAY_ACTUAL )
( gumpf, preview_height ) = ClientGUIFunctions.ConvertTextToPixels( self._children, ( 12, 6 ) )
self._children.setMinimumHeight( preview_height )
self._parents.setMinimumHeight( preview_height )
self._child_input = ClientGUIACDropdown.AutoCompleteDropdownTagsWrite( self, self.EnterChildren, CC.LOCAL_FILE_SERVICE_KEY, service_key, show_paste_button = True )
self._child_input.setEnabled( False )
self._parent_input = ClientGUIACDropdown.AutoCompleteDropdownTagsWrite( self, self.EnterParents, CC.LOCAL_FILE_SERVICE_KEY, service_key, show_paste_button = True )
self._parent_input.setEnabled( False )
self._add = QW.QPushButton( 'add', self )
self._add.clicked.connect( self.EventAddButton )
self._add.setEnabled( False )
self._status_st = ClientGUICommon.BetterStaticText( self, 'initialising\u2026' + os.linesep + '.' )
self._sync_status_st = ClientGUICommon.BetterStaticText( self, '' )
self._sync_status_st.setWordWrap( True )
self._count_st = ClientGUICommon.BetterStaticText( self, '' )
children_vbox = QP.VBoxLayout()
QP.AddToLayout( children_vbox, ClientGUICommon.BetterStaticText( self, label = 'set children' ), CC.FLAGS_CENTER )
QP.AddToLayout( children_vbox, self._children, CC.FLAGS_EXPAND_BOTH_WAYS )
parents_vbox = QP.VBoxLayout()
QP.AddToLayout( parents_vbox, ClientGUICommon.BetterStaticText( self, label = 'set parents' ), CC.FLAGS_CENTER )
QP.AddToLayout( parents_vbox, self._parents, CC.FLAGS_EXPAND_BOTH_WAYS )
tags_box = QP.HBoxLayout()
QP.AddToLayout( tags_box, children_vbox, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( tags_box, parents_vbox, CC.FLAGS_EXPAND_BOTH_WAYS )
input_box = QP.HBoxLayout()
QP.AddToLayout( input_box, self._child_input, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( input_box, self._parent_input, CC.FLAGS_EXPAND_BOTH_WAYS )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._status_st, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._sync_status_st, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._count_st, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, ClientGUICommon.WrapInText(self._show_all,self,'show all pairs'), CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, listctrl_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( vbox, self._add, CC.FLAGS_ON_RIGHT )
QP.AddToLayout( vbox, tags_box, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
QP.AddToLayout( vbox, input_box, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
self.setLayout( vbox )
self._tag_parents.itemSelectionChanged.connect( self._SetButtonStatus )
self._children.listBoxChanged.connect( self._UpdateListCtrlData )
self._parents.listBoxChanged.connect( self._UpdateListCtrlData )
self._show_all.clicked.connect( self._UpdateListCtrlData )
HG.client_controller.CallToThread( self.THREADInitialise, tags, self._service_key )
def _AddPairs( self, pairs, add_only = False ):
pairs = list( pairs )
pairs.sort( key = lambda c_p: HydrusTags.ConvertTagToSortable( c_p[1] ) )
new_pairs = []
current_pairs = []
petitioned_pairs = []
pending_pairs = []
for pair in pairs:
if pair in self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ]:
if not add_only:
pending_pairs.append( pair )
elif pair in self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ]:
petitioned_pairs.append( pair )
elif pair in self._original_statuses_to_pairs[ HC.CONTENT_STATUS_CURRENT ]:
if not add_only:
current_pairs.append( pair )
elif self._CanAdd( pair ):
new_pairs.append( pair )
affected_pairs = []
if len( new_pairs ) > 0:
do_it = True
if not self._i_am_local_tag_service:
if self._service.HasPermission( HC.CONTENT_TYPE_TAG_PARENTS, HC.PERMISSION_ACTION_MODERATE ):
reason = 'admin'
else:
if len( new_pairs ) > 10:
pair_strings = 'The many pairs you entered.'
else:
pair_strings = os.linesep.join( ( child + '->' + parent for ( child, parent ) in new_pairs ) )
message = 'Enter a reason for:' + os.linesep * 2 + pair_strings + os.linesep * 2 + 'To be added. A janitor will review your request.'
suggestions = []
suggestions.append( 'obvious by definition (a sword is a weapon)' )
suggestions.append( 'character/series/studio/etc... belonging (character x belongs to series y)' )
with ClientGUIDialogs.DialogTextEntry( self, message, suggestions = suggestions ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
reason = dlg.GetValue()
else:
do_it = False
if do_it:
for pair in new_pairs: self._pairs_to_reasons[ pair ] = reason
if do_it:
self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ].update( new_pairs )
affected_pairs.extend( new_pairs )
else:
if len( current_pairs ) > 0:
do_it = True
if not self._i_am_local_tag_service:
if len( current_pairs ) > 10:
pair_strings = 'The many pairs you entered.'
else:
pair_strings = os.linesep.join( ( child + '->' + parent for ( child, parent ) in current_pairs ) )
if len( current_pairs ) > 1:
message = 'The pairs:' + os.linesep * 2 + pair_strings + os.linesep * 2 + 'Already exist.'
else:
message = 'The pair ' + pair_strings + ' already exists.'
result = ClientGUIDialogsQuick.GetYesNo( self, message, title = 'Choose what to do.', yes_label = 'petition to remove', no_label = 'do nothing' )
if result == QW.QDialog.Accepted:
if self._service.HasPermission( HC.CONTENT_TYPE_TAG_PARENTS, HC.PERMISSION_ACTION_MODERATE ):
reason = 'admin'
else:
message = 'Enter a reason for:'
message += os.linesep * 2
message += pair_strings
message += os.linesep * 2
message += 'to be removed. A janitor will review your petition.'
suggestions = []
suggestions.append( 'obvious typo/mistake' )
with ClientGUIDialogs.DialogTextEntry( self, message, suggestions = suggestions ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
reason = dlg.GetValue()
else:
do_it = False
if do_it:
for pair in current_pairs: self._pairs_to_reasons[ pair ] = reason
else:
do_it = False
if do_it:
self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ].update( current_pairs )
affected_pairs.extend( current_pairs )
if len( pending_pairs ) > 0:
if len( pending_pairs ) > 10:
pair_strings = 'The many pairs you entered.'
else:
pair_strings = os.linesep.join( ( child + '->' + parent for ( child, parent ) in pending_pairs ) )
if len( pending_pairs ) > 1:
message = 'The pairs:' + os.linesep * 2 + pair_strings + os.linesep * 2 + 'Are pending.'
else:
message = 'The pair ' + pair_strings + ' is pending.'
result = ClientGUIDialogsQuick.GetYesNo( self, message, title = 'Choose what to do.', yes_label = 'rescind the pend', no_label = 'do nothing' )
if result == QW.QDialog.Accepted:
self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ].difference_update( pending_pairs )
affected_pairs.extend( pending_pairs )
if len( petitioned_pairs ) > 0:
if len( petitioned_pairs ) > 10:
pair_strings = 'The many pairs you entered.'
else:
pair_strings = os.linesep.join( ( child + '->' + parent for ( child, parent ) in petitioned_pairs ) )
if len( petitioned_pairs ) > 1:
message = 'The pairs:' + os.linesep * 2 + pair_strings + os.linesep * 2 + 'Are petitioned.'
else:
message = 'The pair ' + pair_strings + ' is petitioned.'
result = ClientGUIDialogsQuick.GetYesNo( self, message, title = 'Choose what to do.', yes_label = 'rescind the petition', no_label = 'do nothing' )
if result == QW.QDialog.Accepted:
self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ].difference_update( petitioned_pairs )
affected_pairs.extend( petitioned_pairs )
if len( affected_pairs ) > 0:
def in_current( pair ):
for status in ( HC.CONTENT_STATUS_CURRENT, HC.CONTENT_STATUS_PENDING, HC.CONTENT_STATUS_PETITIONED ):
if pair in self._current_statuses_to_pairs[ status ]:
return True
return False
affected_pairs = [ ( self._tag_parents.HasData( pair ), in_current( pair ), pair ) for pair in affected_pairs ]
to_add = [ pair for ( exists, current, pair ) in affected_pairs if not exists ]
to_update = [ pair for ( exists, current, pair ) in affected_pairs if exists and current ]
to_delete = [ pair for ( exists, current, pair ) in affected_pairs if exists and not current ]
self._tag_parents.AddDatas( to_add )
self._tag_parents.UpdateDatas( to_update )
self._tag_parents.DeleteDatas( to_delete )
self._tag_parents.Sort()
def _CanAdd( self, potential_pair ):
( potential_child, potential_parent ) = potential_pair
if potential_child == potential_parent: return False
current_pairs = self._current_statuses_to_pairs[ HC.CONTENT_STATUS_CURRENT ].union( self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ] ).difference( self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ] )
current_children = { child for ( child, parent ) in current_pairs }
if potential_parent in current_children:
simple_children_to_parents = ClientManagers.BuildSimpleChildrenToParents( current_pairs )
if ClientManagers.LoopInSimpleChildrenToParents( simple_children_to_parents, potential_child, potential_parent ):
QW.QMessageBox.critical( self, 'Error', 'Adding '+potential_child+'->'+potential_parent+' would create a loop!' )
return False
return True
def _ConvertPairToListCtrlTuples( self, pair ):
( child, parent ) = pair
if pair in self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ]:
status = HC.CONTENT_STATUS_PENDING
elif pair in self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ]:
status = HC.CONTENT_STATUS_PETITIONED
elif pair in self._original_statuses_to_pairs[ HC.CONTENT_STATUS_CURRENT ]:
status = HC.CONTENT_STATUS_CURRENT
sign = HydrusData.ConvertStatusToPrefix( status )
pretty_status = sign
display_tuple = ( pretty_status, child, parent )
sort_tuple = ( status, child, parent )
return ( display_tuple, sort_tuple )
def _DeserialiseImportString( self, import_string ):
tags = HydrusText.DeserialiseNewlinedTexts( import_string )
if len( tags ) % 2 == 1:
raise Exception( 'Uneven number of tags found!' )
pairs = []
for i in range( len( tags ) // 2 ):
pair = ( tags[ 2 * i ], tags[ ( 2 * i ) + 1 ] )
pairs.append( pair )
return pairs
def _ExportToClipboard( self ):
export_string = self._GetExportString()
HG.client_controller.pub( 'clipboard', 'text', export_string )
def _ExportToTXT( self ):
export_string = self._GetExportString()
with QP.FileDialog( self, 'Set the export path.', default_filename = 'parents.txt', acceptMode = QW.QFileDialog.AcceptSave, fileMode = QW.QFileDialog.AnyFile ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
path = dlg.GetPath()
with open( path, 'w', encoding = 'utf-8' ) as f:
f.write( export_string )
def _GetExportString( self ):
tags = []
for ( a, b ) in self._tag_parents.GetData( only_selected = True ):
tags.append( a )
tags.append( b )
export_string = os.linesep.join( tags )
return export_string
def _ImportFromClipboard( self, add_only = False ):
try:
import_string = HG.client_controller.GetClipboardText()
except HydrusExceptions.DataMissing as e:
QW.QMessageBox.critical( self, 'Error', str(e) )
return
pairs = self._DeserialiseImportString( import_string )
self._AddPairs( pairs, add_only = add_only )
self._UpdateListCtrlData()
def _ImportFromTXT( self, add_only = False ):
with QP.FileDialog( self, 'Select the file to import.', acceptMode = QW.QFileDialog.AcceptOpen ) as dlg:
if dlg.exec() != QW.QDialog.Accepted:
return
else:
path = dlg.GetPath()
with open( path, 'r', encoding = 'utf-8' ) as f:
import_string = f.read()
pairs = self._DeserialiseImportString( import_string )
self._AddPairs( pairs, add_only = add_only )
self._UpdateListCtrlData()
def _ListCtrlActivated( self ):
parents_to_children = collections.defaultdict( set )
pairs = self._tag_parents.GetData( only_selected = True )
if len( pairs ) > 0:
self._AddPairs( pairs )
def _SetButtonStatus( self ):
if len( self._children.GetTags() ) == 0 or len( self._parents.GetTags() ) == 0:
self._add.setEnabled( False )
else:
self._add.setEnabled( True )
def _UpdateListCtrlData( self ):
children = self._children.GetTags()
parents = self._parents.GetTags()
pertinent_tags = children.union( parents )
self._tag_parents.DeleteDatas( self._tag_parents.GetData() )
all_pairs = set()
show_all = self._show_all.isChecked()
for ( status, pairs ) in self._current_statuses_to_pairs.items():
if status == HC.CONTENT_STATUS_DELETED:
continue
if len( pertinent_tags ) == 0:
if status == HC.CONTENT_STATUS_CURRENT and not show_all:
continue
all_pairs.update( pairs )
else:
for pair in pairs:
( a, b ) = pair
if a in pertinent_tags or b in pertinent_tags or show_all:
all_pairs.add( pair )
self._tag_parents.AddDatas( all_pairs )
self._tag_parents.Sort()
def EnterChildren( self, tags ):
if len( tags ) > 0:
self._parents.RemoveTags( tags )
self._children.EnterTags( tags )
self._UpdateListCtrlData()
self._SetButtonStatus()
def EnterParents( self, tags ):
if len( tags ) > 0:
self._children.RemoveTags( tags )
self._parents.EnterTags( tags )
self._UpdateListCtrlData()
self._SetButtonStatus()
def EventAddButton( self ):
children = self._children.GetTags()
parents = self._parents.GetTags()
pairs = list( itertools.product( children, parents ) )
self._AddPairs( pairs )
self._children.SetTags( [] )
self._parents.SetTags( [] )
self._UpdateListCtrlData()
self._SetButtonStatus()
def GetContentUpdates( self ):
content_updates = []
if self._i_am_local_tag_service:
for pair in self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ]: content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_TAG_PARENTS, HC.CONTENT_UPDATE_ADD, pair ) )
for pair in self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ]: content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_TAG_PARENTS, HC.CONTENT_UPDATE_DELETE, pair ) )
else:
current_pending = self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ]
original_pending = self._original_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ]
current_petitioned = self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ]
original_petitioned = self._original_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ]
new_pends = current_pending.difference( original_pending )
rescinded_pends = original_pending.difference( current_pending )
new_petitions = current_petitioned.difference( original_petitioned )
rescinded_petitions = original_petitioned.difference( current_petitioned )
content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_TAG_PARENTS, HC.CONTENT_UPDATE_PEND, pair, reason = self._pairs_to_reasons[ pair ] ) for pair in new_pends ) )
content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_TAG_PARENTS, HC.CONTENT_UPDATE_RESCIND_PEND, pair ) for pair in rescinded_pends ) )
content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_TAG_PARENTS, HC.CONTENT_UPDATE_PETITION, pair, reason = self._pairs_to_reasons[ pair ] ) for pair in new_petitions ) )
content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_TAG_PARENTS, HC.CONTENT_UPDATE_RESCIND_PETITION, pair ) for pair in rescinded_petitions ) )
return ( self._service_key, content_updates )
def HasUncommittedPair( self ):
return len( self._children.GetTags() ) > 0 and len( self._parents.GetTags() ) > 0
def SetTagBoxFocus( self ):
if len( self._children.GetTags() ) == 0: self._child_input.setFocus( QC.Qt.OtherFocusReason )
else: self._parent_input.setFocus( QC.Qt.OtherFocusReason )
def THREADInitialise( self, tags, service_key ):
def qt_code( original_statuses_to_pairs, current_statuses_to_pairs, service_keys_to_work_to_do ):
if not self or not QP.isValid( self ):
return
self._original_statuses_to_pairs = original_statuses_to_pairs
self._current_statuses_to_pairs = current_statuses_to_pairs
self._status_st.setText( 'Files with a tag on the left will also be given the tag on the right.' + os.linesep + 'As an experiment, this panel will only display the \'current\' pairs for those tags entered below.' )
looking_good = True
if len( service_keys_to_work_to_do ) == 0:
looking_good = False
status_text = 'No services currently apply these parents. Changes here will have no effect unless parent application is changed later.'
else:
synced_names = sorted( ( HG.client_controller.services_manager.GetName( s_k ) for ( s_k, work_to_do ) in service_keys_to_work_to_do.items() if not work_to_do ) )
unsynced_names = sorted( ( HG.client_controller.services_manager.GetName( s_k ) for ( s_k, work_to_do ) in service_keys_to_work_to_do.items() if work_to_do ) )
synced_string = ', '.join( ( '"{}"'.format( name ) for name in synced_names ) )
unsynced_string = ', '.join( ( '"{}"'.format( name ) for name in unsynced_names ) )
if len( unsynced_names ) == 0:
service_part = '{} apply these parents and are fully synced.'.format( synced_string )
else:
looking_good = False
if len( synced_names ) > 0:
service_part = '{} apply these parents and are fully synced, but {} still have work to do.'.format( synced_string, unsynced_string )
else:
service_part = '{} apply these parents and still have sync work to do.'.format( unsynced_string )
if HG.client_controller.new_options.GetBoolean( 'tag_display_maintenance_during_active' ):
maintenance_part = 'Parents are set to sync all the time in the background.'
if looking_good:
changes_part = 'Changes from this dialog should be reflected soon after closing the dialog.'
else:
changes_part = 'It may take some time for changes here to apply everywhere, though.'
else:
looking_good = False
if HG.client_controller.new_options.GetBoolean( 'tag_display_maintenance_during_idle' ):
maintenance_part = 'Parents are set to sync only when you are not using the client.'
changes_part = 'It may take some time for changes here to apply.'
else:
maintenance_part = 'Parents are not set to sync.'
changes_part = 'Changes here will not apply unless sync is manually forced to run.'
s = os.linesep * 2
status_text = s.join( ( service_part, maintenance_part, changes_part ) )
self._sync_status_st.setText( status_text )
if looking_good:
self._sync_status_st.setObjectName( 'HydrusValid' )
else:
self._sync_status_st.setObjectName( 'HydrusWarning' )
self._sync_status_st.style().polish( self._sync_status_st )
self._count_st.setText( 'Starting with '+HydrusData.ToHumanInt(len(original_statuses_to_pairs[HC.CONTENT_STATUS_CURRENT]))+' pairs.' )
self._child_input.setEnabled( True )
self._parent_input.setEnabled( True )
if tags is None:
self._UpdateListCtrlData()
else:
self.EnterChildren( tags )
original_statuses_to_pairs = HG.client_controller.Read( 'tag_parents', service_key )
( master_service_keys_to_sibling_applicable_service_keys, master_service_keys_to_parent_applicable_service_keys ) = HG.client_controller.Read( 'tag_display_application' )
service_keys_we_care_about = { s_k for ( s_k, s_ks ) in master_service_keys_to_parent_applicable_service_keys.items() if service_key in s_ks }
service_keys_to_work_to_do = {}
for s_k in service_keys_we_care_about:
status = HG.client_controller.Read( 'tag_display_maintenance_status', s_k )
work_to_do = status[ 'num_parents_to_sync' ] > 0
service_keys_to_work_to_do[ s_k ] = work_to_do
current_statuses_to_pairs = collections.defaultdict( set )
current_statuses_to_pairs.update( { key : set( value ) for ( key, value ) in list(original_statuses_to_pairs.items()) } )
QP.CallAfter( qt_code, original_statuses_to_pairs, current_statuses_to_pairs, service_keys_to_work_to_do )
class ManageTagSiblings( ClientGUIScrolledPanels.ManagePanel ):
def __init__( self, parent, tags = None ):
ClientGUIScrolledPanels.ManagePanel.__init__( self, parent )
self._tag_repositories = ClientGUICommon.BetterNotebook( self )
#
default_tag_repository_key = HC.options[ 'default_tag_repository' ]
services = list( HG.client_controller.services_manager.GetServices( ( HC.LOCAL_TAG, ) ) )
services.extend( [ service for service in HG.client_controller.services_manager.GetServices( ( HC.TAG_REPOSITORY, ) ) if service.HasPermission( HC.CONTENT_TYPE_TAG_SIBLINGS, HC.PERMISSION_ACTION_PETITION ) ] )
for service in services:
name = service.GetName()
service_key = service.GetServiceKey()
page = self._Panel( self._tag_repositories, service_key, tags )
select = service_key == default_tag_repository_key
self._tag_repositories.addTab( page, name )
if select: self._tag_repositories.setCurrentIndex( self._tag_repositories.indexOf( page ) )
#
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._tag_repositories, CC.FLAGS_EXPAND_BOTH_WAYS )
self.widget().setLayout( vbox )
def _SetSearchFocus( self ):
page = self._tag_repositories.currentWidget()
if page is not None:
page.SetTagBoxFocus()
def CommitChanges( self ):
service_keys_to_content_updates = {}
for page in self._tag_repositories.GetPages():
( service_key, content_updates ) = page.GetContentUpdates()
if len( content_updates ) > 0:
service_keys_to_content_updates[ service_key ] = content_updates
if len( service_keys_to_content_updates ) > 0:
HG.client_controller.Write( 'content_updates', service_keys_to_content_updates )
def UserIsOKToOK( self ):
if self._tag_repositories.currentWidget().HasUncommittedPair():
message = 'Are you sure you want to OK? You have an uncommitted pair.'
result = ClientGUIDialogsQuick.GetYesNo( self, message )
if result != QW.QDialog.Accepted:
return False
return True
def EventServiceChanged( self, event ):
page = self._tag_repositories.currentWidget()
if page is not None:
HG.client_controller.CallAfterQtSafe( page, page.SetTagBoxFocus )
class _Panel( QW.QWidget ):
def __init__( self, parent, service_key, tags = None ):
QW.QWidget.__init__( self, parent )
self._service_key = service_key
self._service = HG.client_controller.services_manager.GetService( self._service_key )
self._i_am_local_tag_service = self._service.GetServiceType() == HC.LOCAL_TAG
self._original_statuses_to_pairs = collections.defaultdict( set )
self._current_statuses_to_pairs = collections.defaultdict( set )
self._pairs_to_reasons = {}
self._current_new = None
self._show_all = QW.QCheckBox( self )
listctrl_panel = ClientGUIListCtrl.BetterListCtrlPanel( self )
self._tag_siblings = ClientGUIListCtrl.BetterListCtrl( listctrl_panel, CGLC.COLUMN_LIST_TAG_SIBLINGS.ID, 8, self._ConvertPairToListCtrlTuples, delete_key_callback = self._ListCtrlActivated, activation_callback = self._ListCtrlActivated )
listctrl_panel.SetListCtrl( self._tag_siblings )
self._tag_siblings.Sort()
menu_items = []
menu_items.append( ( 'normal', 'from clipboard', 'Load siblings from text in your clipboard.', HydrusData.Call( self._ImportFromClipboard, False ) ) )
menu_items.append( ( 'normal', 'from clipboard (only add pairs--no deletions)', 'Load siblings from text in your clipboard.', HydrusData.Call( self._ImportFromClipboard, True ) ) )
menu_items.append( ( 'normal', 'from .txt file', 'Load siblings from a .txt file.', HydrusData.Call( self._ImportFromTXT, False ) ) )
menu_items.append( ( 'normal', 'from .txt file (only add pairs--no deletions)', 'Load siblings from a .txt file.', HydrusData.Call( self._ImportFromTXT, True ) ) )
listctrl_panel.AddMenuButton( 'import', menu_items )
menu_items = []
menu_items.append( ( 'normal', 'to clipboard', 'Save selected siblings to your clipboard.', self._ExportToClipboard ) )
menu_items.append( ( 'normal', 'to .txt file', 'Save selected siblings to a .txt file.', self._ExportToTXT ) )
listctrl_panel.AddMenuButton( 'export', menu_items, enabled_only_on_selection = True )
self._old_siblings = ClientGUIListBoxes.ListBoxTagsStringsAddRemove( self, self._service_key, ClientTags.TAG_DISPLAY_ACTUAL )
self._new_sibling = ClientGUICommon.BetterStaticText( self )
( gumpf, preview_height ) = ClientGUIFunctions.ConvertTextToPixels( self._old_siblings, ( 12, 6 ) )
self._old_siblings.setMinimumHeight( preview_height )
self._old_input = ClientGUIACDropdown.AutoCompleteDropdownTagsWrite( self, self.EnterOlds, CC.LOCAL_FILE_SERVICE_KEY, service_key, show_paste_button = True )
self._old_input.setEnabled( False )
self._new_input = ClientGUIACDropdown.AutoCompleteDropdownTagsWrite( self, self.SetNew, CC.LOCAL_FILE_SERVICE_KEY, service_key )
self._new_input.setEnabled( False )
self._add = QW.QPushButton( 'add', self )
self._add.clicked.connect( self.EventAddButton )
self._add.setEnabled( False )
#
self._status_st = ClientGUICommon.BetterStaticText( self, 'initialising\u2026' )
self._sync_status_st = ClientGUICommon.BetterStaticText( self, '' )
self._sync_status_st.setWordWrap( True )
self._count_st = ClientGUICommon.BetterStaticText( self, '' )
old_sibling_box = QP.VBoxLayout()
QP.AddToLayout( old_sibling_box, ClientGUICommon.BetterStaticText( self, label = 'set tags to be replaced' ), CC.FLAGS_CENTER )
QP.AddToLayout( old_sibling_box, self._old_siblings, CC.FLAGS_EXPAND_BOTH_WAYS )
new_sibling_box = QP.VBoxLayout()
QP.AddToLayout( new_sibling_box, ClientGUICommon.BetterStaticText( self, label = 'set new ideal tag' ), CC.FLAGS_CENTER )
new_sibling_box.addStretch( 1 )
QP.AddToLayout( new_sibling_box, self._new_sibling, CC.FLAGS_EXPAND_PERPENDICULAR )
new_sibling_box.addStretch( 1 )
text_box = QP.HBoxLayout()
QP.AddToLayout( text_box, old_sibling_box, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
QP.AddToLayout( text_box, new_sibling_box, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
input_box = QP.HBoxLayout()
QP.AddToLayout( input_box, self._old_input )
QP.AddToLayout( input_box, self._new_input )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._status_st, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._sync_status_st, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._count_st, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, ClientGUICommon.WrapInText(self._show_all,self,'show all pairs'), CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, listctrl_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( vbox, self._add, CC.FLAGS_ON_RIGHT )
QP.AddToLayout( vbox, text_box, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
QP.AddToLayout( vbox, input_box, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
self.setLayout( vbox )
#
self._tag_siblings.itemSelectionChanged.connect( self._SetButtonStatus )
self._show_all.clicked.connect( self._UpdateListCtrlData )
self._old_siblings.listBoxChanged.connect( self._UpdateListCtrlData )
HG.client_controller.CallToThread( self.THREADInitialise, tags, self._service_key )
def _AddPairs( self, pairs, add_only = False, remove_only = False, default_reason = None ):
pairs = list( pairs )
pairs.sort( key = lambda c_p1: HydrusTags.ConvertTagToSortable( c_p1[1] ) )
new_pairs = []
current_pairs = []
petitioned_pairs = []
pending_pairs = []
for pair in pairs:
if pair in self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ]:
if not add_only:
pending_pairs.append( pair )
elif pair in self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ]:
if not remove_only:
petitioned_pairs.append( pair )
elif pair in self._original_statuses_to_pairs[ HC.CONTENT_STATUS_CURRENT ]:
if not add_only:
current_pairs.append( pair )
elif not remove_only and self._CanAdd( pair ):
new_pairs.append( pair )
if len( new_pairs ) > 0:
do_it = True
if not self._i_am_local_tag_service:
if default_reason is not None:
reason = default_reason
elif self._service.HasPermission( HC.CONTENT_TYPE_TAG_SIBLINGS, HC.PERMISSION_ACTION_MODERATE ):
reason = 'admin'
else:
if len( new_pairs ) > 10:
pair_strings = 'The many pairs you entered.'
else:
pair_strings = os.linesep.join( ( old + '->' + new for ( old, new ) in new_pairs ) )
suggestions = []
suggestions.append( 'merging underscores/typos/phrasing/unnamespaced to a single uncontroversial good tag' )
suggestions.append( 'rewording/namespacing based on preference' )
message = 'Enter a reason for:' + os.linesep * 2 + pair_strings + os.linesep * 2 + 'To be added. A janitor will review your petition.'
with ClientGUIDialogs.DialogTextEntry( self, message, suggestions = suggestions ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
reason = dlg.GetValue()
else:
do_it = False
if do_it:
for pair in new_pairs: self._pairs_to_reasons[ pair ] = reason
if do_it:
self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ].update( new_pairs )
else:
if len( current_pairs ) > 0:
do_it = True
if not self._i_am_local_tag_service:
if default_reason is not None:
reason = default_reason
elif self._service.HasPermission( HC.CONTENT_TYPE_TAG_SIBLINGS, HC.PERMISSION_ACTION_MODERATE ):
reason = 'admin'
else:
if len( current_pairs ) > 10:
pair_strings = 'The many pairs you entered.'
else:
pair_strings = os.linesep.join( ( old + '->' + new for ( old, new ) in current_pairs ) )
message = 'Enter a reason for:'
message += os.linesep * 2
message += pair_strings
message += os.linesep * 2
message += 'to be removed. You will see the delete as soon as you upload, but a janitor will review your petition to decide if all users should receive it as well.'
suggestions = []
suggestions.append( 'obvious typo/mistake' )
suggestions.append( 'disambiguation' )
suggestions.append( 'correcting to repository standard' )
with ClientGUIDialogs.DialogTextEntry( self, message, suggestions = suggestions ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
reason = dlg.GetValue()
else:
do_it = False
if do_it:
for pair in current_pairs:
self._pairs_to_reasons[ pair ] = reason
if do_it:
self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ].update( current_pairs )
if len( pending_pairs ) > 0:
if len( pending_pairs ) > 10:
pair_strings = 'The many pairs you entered.'
else:
pair_strings = os.linesep.join( ( old + '->' + new for ( old, new ) in pending_pairs ) )
if len( pending_pairs ) > 1:
message = 'The pairs:' + os.linesep * 2 + pair_strings + os.linesep * 2 + 'Are pending.'
else:
message = 'The pair ' + pair_strings + ' is pending.'
result = ClientGUIDialogsQuick.GetYesNo( self, message, title = 'Choose what to do.', yes_label = 'rescind the pend', no_label = 'do nothing' )
if result == QW.QDialog.Accepted:
self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ].difference_update( pending_pairs )
if len( petitioned_pairs ) > 0:
if len( petitioned_pairs ) > 10:
pair_strings = 'The many pairs you entered.'
else:
pair_strings = ', '.join( ( old + '->' + new for ( old, new ) in petitioned_pairs ) )
if len( petitioned_pairs ) > 1:
message = 'The pairs:' + os.linesep * 2 + pair_strings + os.linesep * 2 + 'Are petitioned.'
else:
message = 'The pair ' + pair_strings + ' is petitioned.'
result = ClientGUIDialogsQuick.GetYesNo( self, message, title = 'Choose what to do.', yes_label = 'rescind the petition', no_label = 'do nothing' )
if result == QW.QDialog.Accepted:
self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ].difference_update( petitioned_pairs )
def _AutoPetitionConflicts( self, pairs ):
current_pairs = self._current_statuses_to_pairs[ HC.CONTENT_STATUS_CURRENT ].union( self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ] ).difference( self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ] )
current_olds_to_news = dict( current_pairs )
current_olds = { current_old for ( current_old, current_new ) in current_pairs }
pairs_to_auto_petition = set()
for ( old, new ) in pairs:
if old in current_olds:
conflicting_new = current_olds_to_news[ old ]
if conflicting_new != new:
conflicting_pair = ( old, conflicting_new )
pairs_to_auto_petition.add( conflicting_pair )
if len( pairs_to_auto_petition ) > 0:
pairs_to_auto_petition = list( pairs_to_auto_petition )
self._AddPairs( pairs_to_auto_petition, remove_only = True, default_reason = 'AUTO-PETITION TO REASSIGN TO: ' + new )
def _CanAdd( self, potential_pair ):
( potential_old, potential_new ) = potential_pair
current_pairs = self._current_statuses_to_pairs[ HC.CONTENT_STATUS_CURRENT ].union( self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ] ).difference( self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ] )
current_olds = { old for ( old, new ) in current_pairs }
# test for ambiguity
if potential_old in current_olds:
QW.QMessageBox.critical( self, 'Error', 'There already is a relationship set for the tag '+potential_old+'.' )
return False
# test for loops
if potential_new in current_olds:
seen_tags = set()
d = dict( current_pairs )
next_new = potential_new
while next_new in d:
next_new = d[ next_new ]
if next_new == potential_old:
QW.QMessageBox.critical( self, 'Error', 'Adding '+potential_old+'->'+potential_new+' would create a loop!' )
return False
if next_new in seen_tags:
message = 'The pair you mean to add seems to connect to a sibling loop already in your database! Please undo this loop first. The tags involved in the loop are:'
message += os.linesep * 2
message += ', '.join( seen_tags )
QW.QMessageBox.critical( self, 'Error', message )
return False
seen_tags.add( next_new )
return True
def _ConvertPairToListCtrlTuples( self, pair ):
( old, new ) = pair
if pair in self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ]:
status = HC.CONTENT_STATUS_PENDING
elif pair in self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ]:
status = HC.CONTENT_STATUS_PETITIONED
elif pair in self._original_statuses_to_pairs[ HC.CONTENT_STATUS_CURRENT ]:
status = HC.CONTENT_STATUS_CURRENT
sign = HydrusData.ConvertStatusToPrefix( status )
pretty_status = sign
existing_olds = self._old_siblings.GetTags()
note = ''
if old in existing_olds:
if status == HC.CONTENT_STATUS_PENDING:
note = 'CONFLICT: Will be rescinded on add.'
elif status == HC.CONTENT_STATUS_CURRENT:
note = 'CONFLICT: Will be petitioned/deleted on add.'
display_tuple = ( pretty_status, old, new, note )
sort_tuple = ( status, old, new, note )
return ( display_tuple, sort_tuple )
def _DeserialiseImportString( self, import_string ):
tags = HydrusText.DeserialiseNewlinedTexts( import_string )
if len( tags ) % 2 == 1:
raise Exception( 'Uneven number of tags found!' )
pairs = []
for i in range( len( tags ) // 2 ):
pair = ( tags[ 2 * i ], tags[ ( 2 * i ) + 1 ] )
pairs.append( pair )
return pairs
def _ExportToClipboard( self ):
export_string = self._GetExportString()
HG.client_controller.pub( 'clipboard', 'text', export_string )
def _ExportToTXT( self ):
export_string = self._GetExportString()
with QP.FileDialog( self, 'Set the export path.', default_filename = 'siblings.txt', acceptMode = QW.QFileDialog.AcceptSave, fileMode = QW.QFileDialog.AnyFile ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
path = dlg.GetPath()
with open( path, 'w', encoding = 'utf-8' ) as f:
f.write( export_string )
def _GetExportString( self ):
tags = []
for ( a, b ) in self._tag_siblings.GetData( only_selected = True ):
tags.append( a )
tags.append( b )
export_string = os.linesep.join( tags )
return export_string
def _ImportFromClipboard( self, add_only = False ):
try:
import_string = HG.client_controller.GetClipboardText()
except HydrusExceptions.DataMissing as e:
QW.QMessageBox.critical( self, 'Error', str(e) )
return
pairs = self._DeserialiseImportString( import_string )
self._AutoPetitionConflicts( pairs )
self._AddPairs( pairs, add_only = add_only )
self._UpdateListCtrlData()
def _ImportFromTXT( self, add_only = False ):
with QP.FileDialog( self, 'Select the file to import.', acceptMode = QW.QFileDialog.AcceptOpen ) as dlg:
if dlg.exec() != QW.QDialog.Accepted:
return
else:
path = dlg.GetPath()
with open( path, 'r', encoding = 'utf-8' ) as f:
import_string = f.read()
pairs = self._DeserialiseImportString( import_string )
self._AutoPetitionConflicts( pairs )
self._AddPairs( pairs, add_only = add_only )
self._UpdateListCtrlData()
def _ListCtrlActivated( self ):
pairs = self._tag_siblings.GetData( only_selected = True )
if len( pairs ) > 0:
self._AddPairs( pairs )
self._UpdateListCtrlData()
def _SetButtonStatus( self ):
if self._current_new is None or len( self._old_siblings.GetTags() ) == 0:
self._add.setEnabled( False )
else:
self._add.setEnabled( True )
def _UpdateListCtrlData( self ):
olds = self._old_siblings.GetTags()
pertinent_tags = set( olds )
if self._current_new is not None:
pertinent_tags.add( self._current_new )
self._tag_siblings.DeleteDatas( self._tag_siblings.GetData() )
all_pairs = set()
show_all = self._show_all.isChecked()
for ( status, pairs ) in self._current_statuses_to_pairs.items():
if status == HC.CONTENT_STATUS_DELETED:
continue
if len( pertinent_tags ) == 0:
if status == HC.CONTENT_STATUS_CURRENT and not show_all:
continue
# show all pending/petitioned
all_pairs.update( pairs )
else:
# show all appropriate
for pair in pairs:
( a, b ) = pair
if a in pertinent_tags or b in pertinent_tags or show_all:
all_pairs.add( pair )
self._tag_siblings.AddDatas( all_pairs )
self._tag_siblings.Sort()
def EnterOlds( self, olds ):
if self._current_new in olds:
self.SetNew( set() )
self._old_siblings.EnterTags( olds )
self._UpdateListCtrlData()
self._SetButtonStatus()
def EventAddButton( self ):
if self._current_new is not None and len( self._old_siblings.GetTags() ) > 0:
olds = self._old_siblings.GetTags()
pairs = [ ( old, self._current_new ) for old in olds ]
self._AutoPetitionConflicts( pairs )
self._AddPairs( pairs )
self._old_siblings.SetTags( set() )
self.SetNew( set() )
self._UpdateListCtrlData()
self._SetButtonStatus()
def GetContentUpdates( self ):
# we make it manually here because of the mass pending tags done (but not undone on a rescind) on a pending pair!
# we don't want to send a pend and then rescind it, cause that will spam a thousand bad tags and not undo it
content_updates = []
if self._i_am_local_tag_service:
for pair in self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ]:
content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_TAG_SIBLINGS, HC.CONTENT_UPDATE_ADD, pair ) )
for pair in self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ]:
content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_TAG_SIBLINGS, HC.CONTENT_UPDATE_DELETE, pair ) )
else:
current_pending = self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ]
original_pending = self._original_statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ]
current_petitioned = self._current_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ]
original_petitioned = self._original_statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ]
new_pends = current_pending.difference( original_pending )
rescinded_pends = original_pending.difference( current_pending )
new_petitions = current_petitioned.difference( original_petitioned )
rescinded_petitions = original_petitioned.difference( current_petitioned )
content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_TAG_SIBLINGS, HC.CONTENT_UPDATE_PEND, pair, reason = self._pairs_to_reasons[ pair ] ) for pair in new_pends ) )
content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_TAG_SIBLINGS, HC.CONTENT_UPDATE_RESCIND_PEND, pair ) for pair in rescinded_pends ) )
content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_TAG_SIBLINGS, HC.CONTENT_UPDATE_PETITION, pair, reason = self._pairs_to_reasons[ pair ] ) for pair in new_petitions ) )
content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_TAG_SIBLINGS, HC.CONTENT_UPDATE_RESCIND_PETITION, pair ) for pair in rescinded_petitions ) )
return ( self._service_key, content_updates )
def HasUncommittedPair( self ):
return len( self._old_siblings.GetTags() ) > 0 and self._current_new is not None
def SetNew( self, new_tags ):
if len( new_tags ) == 0:
self._new_sibling.clear()
self._current_new = None
else:
new = list( new_tags )[0]
self._old_siblings.RemoveTags( { new } )
self._new_sibling.setText( new )
self._current_new = new
self._UpdateListCtrlData()
self._SetButtonStatus()
def SetTagBoxFocus( self ):
if len( self._old_siblings.GetTags() ) == 0:
self._old_input.setFocus( QC.Qt.OtherFocusReason )
else:
self._new_input.setFocus( QC.Qt.OtherFocusReason )
def THREADInitialise( self, tags, service_key ):
def qt_code( original_statuses_to_pairs, current_statuses_to_pairs, service_keys_to_work_to_do ):
if not self or not QP.isValid( self ):
return
self._original_statuses_to_pairs = original_statuses_to_pairs
self._current_statuses_to_pairs = current_statuses_to_pairs
self._status_st.setText( 'Tags on the left will be appear as those on the right.' )
looking_good = True
if len( service_keys_to_work_to_do ) == 0:
looking_good = False
status_text = 'No services currently apply these siblings. Changes here will have no effect unless sibling application is changed later.'
else:
synced_names = sorted( ( HG.client_controller.services_manager.GetName( s_k ) for ( s_k, work_to_do ) in service_keys_to_work_to_do.items() if not work_to_do ) )
unsynced_names = sorted( ( HG.client_controller.services_manager.GetName( s_k ) for ( s_k, work_to_do ) in service_keys_to_work_to_do.items() if work_to_do ) )
synced_string = ', '.join( ( '"{}"'.format( name ) for name in synced_names ) )
unsynced_string = ', '.join( ( '"{}"'.format( name ) for name in unsynced_names ) )
if len( unsynced_names ) == 0:
service_part = '{} apply these siblings and are fully synced.'.format( synced_string )
else:
looking_good = False
if len( synced_names ) > 0:
service_part = '{} apply these siblings and are fully synced, but {} still have work to do.'.format( synced_string, unsynced_string )
else:
service_part = '{} apply these siblings but still have sync work to do.'.format( unsynced_string )
if HG.client_controller.new_options.GetBoolean( 'tag_display_maintenance_during_active' ):
maintenance_part = 'Siblings are set to sync all the time in the background.'
if looking_good:
changes_part = 'Changes from this dialog should be reflected soon after closing the dialog.'
else:
changes_part = 'It may take some time for changes here to apply everywhere, though.'
else:
looking_good = False
if HG.client_controller.new_options.GetBoolean( 'tag_display_maintenance_during_idle' ):
maintenance_part = 'Siblings are set to sync only when you are not using the client.'
changes_part = 'It may take some time for changes here to apply.'
else:
maintenance_part = 'Siblings are not set to sync.'
changes_part = 'Changes here will not apply unless sync is manually forced to run.'
s = os.linesep * 2
status_text = s.join( ( service_part, maintenance_part, changes_part ) )
self._sync_status_st.setText( status_text )
if looking_good:
self._sync_status_st.setObjectName( 'HydrusValid' )
else:
self._sync_status_st.setObjectName( 'HydrusWarning' )
self._sync_status_st.style().polish( self._sync_status_st )
self._count_st.setText( 'Starting with '+HydrusData.ToHumanInt(len(original_statuses_to_pairs[HC.CONTENT_STATUS_CURRENT]))+' pairs.' )
self._old_input.setEnabled( True )
self._new_input.setEnabled( True )
if tags is None:
self._UpdateListCtrlData()
else:
self.EnterOlds( tags )
original_statuses_to_pairs = HG.client_controller.Read( 'tag_siblings', service_key )
( master_service_keys_to_sibling_applicable_service_keys, master_service_keys_to_parent_applicable_service_keys ) = HG.client_controller.Read( 'tag_display_application' )
service_keys_we_care_about = { s_k for ( s_k, s_ks ) in master_service_keys_to_sibling_applicable_service_keys.items() if service_key in s_ks }
service_keys_to_work_to_do = {}
for s_k in service_keys_we_care_about:
status = HG.client_controller.Read( 'tag_display_maintenance_status', s_k )
work_to_do = status[ 'num_siblings_to_sync' ] > 0
service_keys_to_work_to_do[ s_k ] = work_to_do
current_statuses_to_pairs = collections.defaultdict( set )
current_statuses_to_pairs.update( { key : set( value ) for ( key, value ) in original_statuses_to_pairs.items() } )
QP.CallAfter( qt_code, original_statuses_to_pairs, current_statuses_to_pairs, service_keys_to_work_to_do )
class ReviewTagDisplayMaintenancePanel( ClientGUIScrolledPanels.ReviewPanel ):
def __init__( self, parent ):
ClientGUIScrolledPanels.ReviewPanel.__init__( self, parent )
self._tag_services_notebook = ClientGUICommon.BetterNotebook( self )
min_width = ClientGUIFunctions.ConvertTextToPixelWidth( self._tag_services_notebook, 100 )
self._tag_services_notebook.setMinimumWidth( min_width )
services = list( HG.client_controller.services_manager.GetServices( HC.REAL_TAG_SERVICES ) )
select_service_key = services[0].GetServiceKey()
for service in services:
service_key = service.GetServiceKey()
name = service.GetName()
page = self._Panel( self._tag_services_notebook, service_key )
self._tag_services_notebook.addTab( page, name )
if service_key == select_service_key:
self._tag_services_notebook.setCurrentWidget( page )
vbox = QP.VBoxLayout()
message = 'Figuring out how tags should appear according to sibling and parent application rules takes time. When you set new rules, the changes do not happen immediately--the client catches up in the background. You can review current progress and force faster sync here.'
self._message = ClientGUICommon.BetterStaticText( self, label = message )
self._message.setWordWrap( True )
self._sync_status = ClientGUICommon.BetterStaticText( self )
self._sync_status.setWordWrap( True )
self._UpdateStatusText()
QP.AddToLayout( vbox, self._message, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._sync_status, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._tag_services_notebook, CC.FLAGS_EXPAND_BOTH_WAYS )
self.widget().setLayout( vbox )
HG.client_controller.sub( self, '_UpdateStatusText', 'notify_new_menu_option' )
def _UpdateStatusText( self ):
if HG.client_controller.new_options.GetBoolean( 'tag_display_maintenance_during_active' ):
self._sync_status.setText( 'Siblings and parents are set to sync all the time. If there is work to do here, it should be cleared out in real time as you watch.' )
self._sync_status.setObjectName( 'HydrusValid' )
else:
if HG.client_controller.new_options.GetBoolean( 'tag_display_maintenance_during_idle' ):
self._sync_status.setText( 'Siblings and parents are only set to sync during idle time. If there is work to do here, it should be cleared out when you are not using the client.' )
else:
self._sync_status.setText( 'Siblings and parents are not set to sync in the background at any time. If there is work to do here, you can force it now by clicking \'work now!\' button.' )
self._sync_status.setObjectName( 'HydrusWarning' )
self._sync_status.style().polish( self._sync_status )
class _Panel( QW.QWidget ):
def __init__( self, parent, service_key ):
QW.QWidget.__init__( self, parent )
self._service_key = service_key
self._siblings_and_parents_st = ClientGUICommon.BetterStaticText( self )
self._progress = ClientGUICommon.TextAndGauge( self )
self._refresh_button = ClientGUICommon.BetterBitmapButton( self, CC.global_pixmaps().refresh, self._StartRefresh )
self._go_faster_button = ClientGUICommon.BetterButton( self, 'work hard now!', self._SyncFaster )
button_hbox = QP.HBoxLayout()
QP.AddToLayout( button_hbox, self._refresh_button, CC.FLAGS_CENTER )
QP.AddToLayout( button_hbox, self._go_faster_button, CC.FLAGS_CENTER )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._siblings_and_parents_st, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._progress, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, button_hbox, CC.FLAGS_ON_RIGHT )
vbox.addStretch( 1 )
self.setLayout( vbox )
self._refresh_values_updater = self._InitialiseRefreshValuesUpdater()
HG.client_controller.sub( self, 'NotifyRefresh', 'notify_new_tag_display_sync_status' )
HG.client_controller.sub( self, '_StartRefresh', 'notify_new_tag_display_application' )
self._StartRefresh()
def _InitialiseRefreshValuesUpdater( self ):
service_key = self._service_key
def loading_callable():
self._progress.SetText( 'refreshing\u2026' )
self._refresh_button.setEnabled( False )
running_fast_and_button_is_slow = HG.client_controller.tag_display_maintenance_manager.CurrentlyGoingFaster( self._service_key ) and 'slow' in self._go_faster_button.text()
if not running_fast_and_button_is_slow:
self._go_faster_button.setEnabled( False )
def work_callable():
status = HG.client_controller.Read( 'tag_display_maintenance_status', service_key )
time.sleep( 0.1 )
return status
def publish_callable( result ):
status = result
num_siblings_to_sync = status[ 'num_siblings_to_sync' ]
num_parents_to_sync = status[ 'num_parents_to_sync' ]
num_items_to_regen = num_siblings_to_sync + num_parents_to_sync
if num_items_to_regen == 0:
message = 'All synced!'
elif num_parents_to_sync == 0:
message = '{} siblings to sync.'.format( HydrusData.ToHumanInt( num_siblings_to_sync ) )
elif num_siblings_to_sync == 0:
message = '{} parents to sync.'.format( HydrusData.ToHumanInt( num_parents_to_sync ) )
else:
message = '{} siblings and {} parents to sync.'.format( HydrusData.ToHumanInt( num_siblings_to_sync ), HydrusData.ToHumanInt( num_parents_to_sync ) )
self._siblings_and_parents_st.setText( message )
num_actual_rows = status[ 'num_actual_rows' ]
num_ideal_rows = status[ 'num_ideal_rows' ]
if num_items_to_regen == 0:
if num_ideal_rows == 0:
message = 'No siblings/parents applying to this service.'
else:
message = '{} rules, all synced!'.format( HydrusData.ToHumanInt( num_ideal_rows ) )
value = 1
range = 1
sync_possible = False
else:
value = None
range = None
if num_ideal_rows == 0:
message = 'Removing all siblings/parents, {} rules remaining.'.format( HydrusData.ToHumanInt( num_actual_rows ) )
else:
message = '{} rules applied now, moving to {}.'.format( HydrusData.ToHumanInt( num_actual_rows ), HydrusData.ToHumanInt( num_ideal_rows ) )
if num_actual_rows <= num_ideal_rows:
value = num_actual_rows
range = num_ideal_rows
sync_possible = True
self._progress.SetValue( message, value, range )
self._refresh_button.setEnabled( True )
self._go_faster_button.setVisible( sync_possible )
self._go_faster_button.setEnabled( sync_possible )
if HG.client_controller.tag_display_maintenance_manager.CurrentlyGoingFaster( self._service_key ):
self._go_faster_button.setText( 'slow down!' )
else:
if not HG.client_controller.new_options.GetBoolean( 'tag_display_maintenance_during_active' ):
self._go_faster_button.setText( 'work now!' )
else:
self._go_faster_button.setText( 'work hard now!' )
return ClientGUIAsync.AsyncQtUpdater( self, loading_callable, work_callable, publish_callable )
def _StartRefresh( self ):
self._refresh_values_updater.update()
def _SyncFaster( self ):
HG.client_controller.tag_display_maintenance_manager.FlipSyncFaster( self._service_key )
self._StartRefresh()
def NotifyRefresh( self, service_key ):
if service_key == self._service_key:
self._StartRefresh()
class TagFilterButton( ClientGUICommon.BetterButton ):
def __init__( self, parent, message, tag_filter, only_show_blacklist = False, label_prefix = None ):
ClientGUICommon.BetterButton.__init__( self, parent, 'tag filter', self._EditTagFilter )
self._message = message
self._tag_filter = tag_filter
self._only_show_blacklist = only_show_blacklist
self._label_prefix = label_prefix
self._UpdateLabel()
def _EditTagFilter( self ):
if self._only_show_blacklist:
title = 'edit blacklist'
else:
title = 'edit tag filter'
with ClientGUITopLevelWindowsPanels.DialogEdit( self, title ) as dlg:
namespaces = HG.client_controller.network_engine.domain_manager.GetParserNamespaces()
panel = EditTagFilterPanel( dlg, self._tag_filter, only_show_blacklist = self._only_show_blacklist, namespaces = namespaces, message = self._message )
dlg.SetPanel( panel )
if dlg.exec() == QW.QDialog.Accepted:
self._tag_filter = panel.GetValue()
self._UpdateLabel()
def _UpdateLabel( self ):
if self._only_show_blacklist:
tt = self._tag_filter.ToBlacklistString()
else:
tt = self._tag_filter.ToPermittedString()
if self._label_prefix is not None:
tt = self._label_prefix + tt
button_text = HydrusText.ElideText( tt, 45 )
self.setText( button_text )
self.setToolTip( tt )
def GetValue( self ):
return self._tag_filter
def SetValue( self, tag_filter ):
self._tag_filter = tag_filter
self._UpdateLabel()
class TagSummaryGenerator( HydrusSerialisable.SerialisableBase ):
SERIALISABLE_TYPE = HydrusSerialisable.SERIALISABLE_TYPE_TAG_SUMMARY_GENERATOR
SERIALISABLE_NAME = 'Tag Summary Generator'
SERIALISABLE_VERSION = 2
def __init__( self, background_colour = None, text_colour = None, namespace_info = None, separator = None, example_tags = None, show = True ):
if background_colour is None:
background_colour = QG.QColor( 223, 227, 230, 255 )
if text_colour is None:
text_colour = QG.QColor( 1, 17, 26, 255 )
if namespace_info is None:
namespace_info = []
namespace_info.append( ( 'creator', '', ', ' ) )
namespace_info.append( ( 'series', '', ', ' ) )
namespace_info.append( ( 'title', '', ', ' ) )
if separator is None:
separator = ' - '
if example_tags is None:
example_tags = []
self._background_colour = background_colour
self._text_colour = text_colour
self._namespace_info = namespace_info
self._separator = separator
self._example_tags = list( example_tags )
self._show = show
self._UpdateNamespaceLookup()
def _GetSerialisableInfo( self ):
bc = self._background_colour
background_colour_rgba = [ bc.red(), bc.green(), bc.blue(), bc.alpha() ]
tc = self._text_colour
text_colour_rgba = [ tc.red(), tc.green(), tc.blue(), tc.alpha() ]
return ( background_colour_rgba, text_colour_rgba, self._namespace_info, self._separator, self._example_tags, self._show )
def _InitialiseFromSerialisableInfo( self, serialisable_info ):
( background_rgba, text_rgba, self._namespace_info, self._separator, self._example_tags, self._show ) = serialisable_info
( r, g, b, a ) = background_rgba
self._background_colour = QG.QColor( r, g, b, a )
( r, g, b, a ) = text_rgba
self._text_colour = QG.QColor( r, g, b, a )
self._namespace_info = [ tuple( row ) for row in self._namespace_info ]
self._UpdateNamespaceLookup()
def _UpdateNamespaceLookup( self ):
self._interesting_namespaces = { namespace for ( namespace, prefix, separator ) in self._namespace_info }
def _UpdateSerialisableInfo( self, version, old_serialisable_info ):
if version == 1:
( namespace_info, separator, example_tags ) = old_serialisable_info
background_rgba = ( 223, 227, 230, 255 )
text_rgba = ( 1, 17, 26, 255 )
show = True
new_serialisable_info = ( background_rgba, text_rgba, namespace_info, separator, example_tags, show )
return ( 2, new_serialisable_info )
def GenerateExampleSummary( self ):
if not self._show:
return 'not showing'
else:
return self.GenerateSummary( self._example_tags )
def GenerateSummary( self, tags, max_length = None ):
if not self._show:
return ''
namespaces_to_subtags = collections.defaultdict( list )
for tag in tags:
( namespace, subtag ) = HydrusTags.SplitTag( tag )
if namespace in self._interesting_namespaces:
namespaces_to_subtags[ namespace ].append( subtag )
for ( namespace, unsorted_l ) in list( namespaces_to_subtags.items() ):
sorted_l = HydrusTags.SortNumericTags( unsorted_l )
sorted_l = HydrusTags.CollapseMultipleSortedNumericTagsToMinMax( sorted_l )
namespaces_to_subtags[ namespace ] = sorted_l
namespace_texts = []
for ( namespace, prefix, separator ) in self._namespace_info:
subtags = namespaces_to_subtags[ namespace ]
if len( subtags ) > 0:
namespace_text = prefix + separator.join( namespaces_to_subtags[ namespace ] )
namespace_texts.append( namespace_text )
summary = self._separator.join( namespace_texts )
if max_length is not None:
summary = summary[:max_length]
return summary
def GetBackgroundColour( self ):
return self._background_colour
def GetTextColour( self ):
return self._text_colour
def ToTuple( self ):
return ( self._background_colour, self._text_colour, self._namespace_info, self._separator, self._example_tags, self._show )
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_TAG_SUMMARY_GENERATOR ] = TagSummaryGenerator
class EditTagSummaryGeneratorPanel( ClientGUIScrolledPanels.EditPanel ):
def __init__( self, parent: QW.QWidget, tag_summary_generator: TagSummaryGenerator ):
ClientGUIScrolledPanels.EditPanel.__init__( self, parent )
show_panel = ClientGUICommon.StaticBox( self, 'shows' )
self._show = QW.QCheckBox( show_panel )
edit_panel = ClientGUICommon.StaticBox( self, 'edit' )
self._background_colour = ClientGUICommon.AlphaColourControl( edit_panel )
self._text_colour = ClientGUICommon.AlphaColourControl( edit_panel )
self._namespaces_listbox = ClientGUIListBoxes.QueueListBox( edit_panel, 8, self._ConvertNamespaceToListBoxString, self._AddNamespaceInfo, self._EditNamespaceInfo )
self._separator = QW.QLineEdit( edit_panel )
example_panel = ClientGUICommon.StaticBox( self, 'example' )
self._example_tags = QW.QPlainTextEdit( example_panel )
self._test_result = QW.QLineEdit( example_panel )
self._test_result.setReadOnly( True )
( background_colour, text_colour, namespace_info, separator, example_tags, show ) = tag_summary_generator.ToTuple()
self._show.setChecked( show )
self._background_colour.SetValue( background_colour )
self._text_colour.SetValue( text_colour )
self._namespaces_listbox.AddDatas( namespace_info )
self._separator.setText( separator )
self._example_tags.setPlainText( os.linesep.join( example_tags ) )
self._UpdateTest()
rows = []
rows.append( ( 'currently shows (turn off to hide): ', self._show ) )
gridbox = ClientGUICommon.WrapInGrid( show_panel, rows )
show_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
rows = []
rows.append( ( 'background colour: ', self._background_colour ) )
rows.append( ( 'text colour: ', self._text_colour ) )
gridbox = ClientGUICommon.WrapInGrid( edit_panel, rows )
edit_panel.Add( ClientGUICommon.BetterStaticText( edit_panel, 'The colours only work for the thumbnails right now!' ), CC.FLAGS_EXPAND_PERPENDICULAR )
edit_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
edit_panel.Add( self._namespaces_listbox, CC.FLAGS_EXPAND_BOTH_WAYS )
edit_panel.Add( ClientGUICommon.WrapInText( self._separator, edit_panel, 'separator' ), CC.FLAGS_EXPAND_PERPENDICULAR )
example_panel.Add( ClientGUICommon.BetterStaticText( example_panel, 'Enter some newline-separated tags here to see what your current object would generate.' ), CC.FLAGS_EXPAND_PERPENDICULAR )
example_panel.Add( self._example_tags, CC.FLAGS_EXPAND_BOTH_WAYS )
example_panel.Add( self._test_result, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, show_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, edit_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( vbox, example_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
self.widget().setLayout( vbox )
self._show.clicked.connect( self._UpdateTest )
self._separator.textChanged.connect( self._UpdateTest )
self._example_tags.textChanged.connect( self._UpdateTest )
self._namespaces_listbox.listBoxChanged.connect( self._UpdateTest )
def _AddNamespaceInfo( self ):
namespace = ''
prefix = ''
separator = ', '
namespace_info = ( namespace, prefix, separator )
return self._EditNamespaceInfo( namespace_info )
def _ConvertNamespaceToListBoxString( self, namespace_info ):
( namespace, prefix, separator ) = namespace_info
if namespace == '':
pretty_namespace = 'unnamespaced'
else:
pretty_namespace = namespace
pretty_prefix = prefix
pretty_separator = separator
return pretty_namespace + ' | prefix: "' + pretty_prefix + '" | separator: "' + pretty_separator + '"'
def _EditNamespaceInfo( self, namespace_info ):
( namespace, prefix, separator ) = namespace_info
message = 'Edit namespace.'
with ClientGUIDialogs.DialogTextEntry( self, message, namespace, allow_blank = True ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
namespace = dlg.GetValue()
else:
raise HydrusExceptions.VetoException()
message = 'Edit prefix.'
with ClientGUIDialogs.DialogTextEntry( self, message, prefix, allow_blank = True ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
prefix = dlg.GetValue()
else:
raise HydrusExceptions.VetoException()
message = 'Edit separator.'
with ClientGUIDialogs.DialogTextEntry( self, message, separator, allow_blank = True ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
separator = dlg.GetValue()
namespace_info = ( namespace, prefix, separator )
return namespace_info
else:
raise HydrusExceptions.VetoException()
def _UpdateTest( self ):
tag_summary_generator = self.GetValue()
self._test_result.setText( tag_summary_generator.GenerateExampleSummary() )
def GetValue( self ) -> TagSummaryGenerator:
show = self._show.isChecked()
background_colour = self._background_colour.GetValue()
text_colour = self._text_colour.GetValue()
namespace_info = self._namespaces_listbox.GetData()
separator = self._separator.text()
example_tags = HydrusTags.CleanTags( HydrusText.DeserialiseNewlinedTexts( self._example_tags.toPlainText() ) )
return TagSummaryGenerator( background_colour, text_colour, namespace_info, separator, example_tags, show )
class TagSummaryGeneratorButton( ClientGUICommon.BetterButton ):
def __init__( self, parent: QW.QWidget, tag_summary_generator: TagSummaryGenerator ):
label = tag_summary_generator.GenerateExampleSummary()
ClientGUICommon.BetterButton.__init__( self, parent, label, self._Edit )
self._tag_summary_generator = tag_summary_generator
def _Edit( self ):
with ClientGUITopLevelWindowsPanels.DialogEdit( self, 'edit tag summary' ) as dlg:
panel = EditTagSummaryGeneratorPanel( dlg, self._tag_summary_generator )
dlg.SetPanel( panel )
if dlg.exec() == QW.QDialog.Accepted:
self._tag_summary_generator = panel.GetValue()
self.setText( self._tag_summary_generator.GenerateExampleSummary() )
def GetValue( self ) -> TagSummaryGenerator:
return self._tag_summary_generator
| true | true |
f7250c8113f5c4b5fe8357a30be38ead88265b94 | 139 | py | Python | aos_sw_api/globel_models/network_host.py | KennethSoelberg/AOS-Switch | a5a2c54917bbb69fab044bf0b313bcf795642d30 | [
"MIT"
] | null | null | null | aos_sw_api/globel_models/network_host.py | KennethSoelberg/AOS-Switch | a5a2c54917bbb69fab044bf0b313bcf795642d30 | [
"MIT"
] | 1 | 2020-12-24T15:36:56.000Z | 2021-01-28T23:19:57.000Z | aos_sw_api/globel_models/network_host.py | KennethSoelberg/AOS-Switch | a5a2c54917bbb69fab044bf0b313bcf795642d30 | [
"MIT"
] | 1 | 2021-02-16T23:26:28.000Z | 2021-02-16T23:26:28.000Z | from pydantic import BaseModel
from .ip_address import IpAddressModel
class NetworkHostModel(BaseModel):
ip_address: IpAddressModel
| 17.375 | 38 | 0.827338 | from pydantic import BaseModel
from .ip_address import IpAddressModel
class NetworkHostModel(BaseModel):
ip_address: IpAddressModel
| true | true |
f7250f700383b7cc2166cc898173234aba8a6194 | 301 | py | Python | photo/urls.py | firdausa7/MY-GALLERY | 5d2fe2727d760929800c14c11b0ff4c6d081584b | [
"MIT"
] | null | null | null | photo/urls.py | firdausa7/MY-GALLERY | 5d2fe2727d760929800c14c11b0ff4c6d081584b | [
"MIT"
] | 3 | 2020-06-05T23:24:25.000Z | 2021-06-10T22:02:41.000Z | photo/urls.py | firdausa7/MY-GALLERY | 5d2fe2727d760929800c14c11b0ff4c6d081584b | [
"MIT"
] | null | null | null | from django.conf.urls import url
from . import views
urlpatterns=[
#index path
url('^$', views.index,name='index'),
url('location/', views.category, name='location'),
url('category', views.category, name='category'),
url('search/', views.search_results, name='search_results'),
]
| 27.363636 | 64 | 0.671096 | from django.conf.urls import url
from . import views
urlpatterns=[
url('^$', views.index,name='index'),
url('location/', views.category, name='location'),
url('category', views.category, name='category'),
url('search/', views.search_results, name='search_results'),
]
| true | true |
f7250ff72bb64a4cd0a0a78f2a6db54775d4f74e | 3,253 | py | Python | Python/Police_Car_Game/Main.py | wilsonandusa/wilsonwu | 512214c187550f05497732e943f3323c15caeee0 | [
"Unlicense"
] | null | null | null | Python/Police_Car_Game/Main.py | wilsonandusa/wilsonwu | 512214c187550f05497732e943f3323c15caeee0 | [
"Unlicense"
] | null | null | null | Python/Police_Car_Game/Main.py | wilsonandusa/wilsonwu | 512214c187550f05497732e943f3323c15caeee0 | [
"Unlicense"
] | null | null | null | '''copyright Xiaosheng Wu Python game 12/31/2015'''
import pygame, sys
from classes import *
from process import *
pygame.init()
SCREENWIDTH,SCREENHEIGHT = 767,1257
screen = pygame.display.set_mode((SCREENWIDTH,SCREENHEIGHT)) #zero for the flag 32 for color
BackGround = pygame.image.load("images/bg.png")
Header = pygame.image.load("images/Header.png")
clock = pygame.time.Clock()
FPS = 24 #frames per sec
flag = 2 #randint(0,2) # if 1
total_frames = 0#fivesecondinterval = FPS*5
if flag == 0:
car1 = Car(500,750,64,32,"images/car1.png")#if flag == 0: # both car horizontal movement
car2 = Car(300,1000,64,32,"images/car2.png")
bus = Bus(300,300,100,34,"images/bus.png")
copcar = Cop(SCREENWIDTH-90,SCREENHEIGHT-90,90,45,"images/cop.png")
elif flag==1:
car1 = Car(0,700,64,32,"images/car1_down.png")#if flag = 1 # both cars vertical movement
car2 = Car(200,350,64,32,"images/car2_down.png")
copcar = Cop(SCREENWIDTH-90,SCREENHEIGHT-90,90,45,"images/cop.png")
bus = Bus(300,300,100,34,"images/bus_down.png")
elif flag == 2:
car1 = Car(200,100,64,32,"images/car1.png")#blue car vertical red car horizontal
car2 = Car(400,300,64,32,"images/car2_down.png")
car3 = Car(600,500,64,32,"images/car1.png")
car4 = Car(100,700,64,32,"images/car2_down.png")
car5 = Car(200,900,64,32,"images/car1.png")
car6 = Car(300,1100,64,32,"images/car2_down.png")
car7 = Car(200,900,64,32,"images/car1.png")
car8 = Car(300,1100,64,32,"images/car2_down.png")
car9 = Car(200,900,64,32,"images/car1.png")
car10 = Car(300,1100,64,32,"images/car2_down.png")
bus1 = Bus(300,300,100,34,"images/bus.png")
bus2 = Bus(600,300,100,34,"images/bus_down.png")
bus3 = Bus(100,450,100,34,"images/bus_down.png")
copcar = Cop(SCREENWIDTH-90,SCREENHEIGHT-90,90,45,"images/cop.png")
#---------------Main Program Loop------------------
while True:
#PROCESS
process_onecar(copcar,FPS,total_frames,flag)
copProjectile.movement()
#LOGIC
if flag==0:
copcar.motion(SCREENWIDTH,SCREENHEIGHT)
Car.update_all(SCREENWIDTH,SCREENHEIGHT)
Car.bothmovement_x(SCREENWIDTH,SCREENHEIGHT)
Bus.bothmovement_x(SCREENWIDTH,SCREENHEIGHT)
elif flag==1:
copcar.motion(SCREENWIDTH,SCREENHEIGHT)
Car.update_all(SCREENWIDTH,SCREENHEIGHT)
Car.bothmovement_y(SCREENWIDTH,SCREENHEIGHT)
Bus.bothmovement_y(SCREENWIDTH,SCREENHEIGHT)
elif flag == 2:
copcar.motion(SCREENWIDTH,SCREENHEIGHT)
Car.update_all(SCREENWIDTH,SCREENHEIGHT)
car1.car_motion_x(SCREENWIDTH,SCREENHEIGHT)
car2.car_motion_y(SCREENWIDTH,SCREENHEIGHT)
car3.car_motion_x(SCREENWIDTH,SCREENHEIGHT)
car4.car_motion_y(SCREENWIDTH,SCREENHEIGHT)
car5.car_motion_x(SCREENWIDTH,SCREENHEIGHT)
car6.car_motion_y(SCREENWIDTH,SCREENHEIGHT)
car7.car_motion_x(SCREENWIDTH,SCREENHEIGHT)
car8.car_motion_y(SCREENWIDTH,SCREENHEIGHT)
car9.car_motion_x(SCREENWIDTH,SCREENHEIGHT)
car10.car_motion_y(SCREENWIDTH,SCREENHEIGHT)
bus1.bus_motion_x(SCREENWIDTH,SCREENHEIGHT)
bus2.bus_motion_y(SCREENWIDTH,SCREENHEIGHT)
bus3.bus_motion_y(SCREENWIDTH,SCREENHEIGHT)
#LOGIC
total_frames+=1
#DRAW
#screen.fill([255,255,255])aaaa
screen.blit(BackGround,(0,0))
screen.blit(Header,(0,0))
BaseClass.allsprites.draw(screen)
copProjectile.List.draw(screen)
pygame.display.flip()
#DRAW
clock.tick(FPS)
| 37.390805 | 93 | 0.748232 | import pygame, sys
from classes import *
from process import *
pygame.init()
SCREENWIDTH,SCREENHEIGHT = 767,1257
screen = pygame.display.set_mode((SCREENWIDTH,SCREENHEIGHT))
BackGround = pygame.image.load("images/bg.png")
Header = pygame.image.load("images/Header.png")
clock = pygame.time.Clock()
FPS = 24
flag = 2 frames = 0
if flag == 0:
car1 = Car(500,750,64,32,"images/car1.png")mages/car2.png")
bus = Bus(300,300,100,34,"images/bus.png")
copcar = Cop(SCREENWIDTH-90,SCREENHEIGHT-90,90,45,"images/cop.png")
elif flag==1:
car1 = Car(0,700,64,32,"images/car1_down.png")mages/car2_down.png")
copcar = Cop(SCREENWIDTH-90,SCREENHEIGHT-90,90,45,"images/cop.png")
bus = Bus(300,300,100,34,"images/bus_down.png")
elif flag == 2:
car1 = Car(200,100,64,32,"images/car1.png")
car2 = Car(400,300,64,32,"images/car2_down.png")
car3 = Car(600,500,64,32,"images/car1.png")
car4 = Car(100,700,64,32,"images/car2_down.png")
car5 = Car(200,900,64,32,"images/car1.png")
car6 = Car(300,1100,64,32,"images/car2_down.png")
car7 = Car(200,900,64,32,"images/car1.png")
car8 = Car(300,1100,64,32,"images/car2_down.png")
car9 = Car(200,900,64,32,"images/car1.png")
car10 = Car(300,1100,64,32,"images/car2_down.png")
bus1 = Bus(300,300,100,34,"images/bus.png")
bus2 = Bus(600,300,100,34,"images/bus_down.png")
bus3 = Bus(100,450,100,34,"images/bus_down.png")
copcar = Cop(SCREENWIDTH-90,SCREENHEIGHT-90,90,45,"images/cop.png")
while True:
process_onecar(copcar,FPS,total_frames,flag)
copProjectile.movement()
if flag==0:
copcar.motion(SCREENWIDTH,SCREENHEIGHT)
Car.update_all(SCREENWIDTH,SCREENHEIGHT)
Car.bothmovement_x(SCREENWIDTH,SCREENHEIGHT)
Bus.bothmovement_x(SCREENWIDTH,SCREENHEIGHT)
elif flag==1:
copcar.motion(SCREENWIDTH,SCREENHEIGHT)
Car.update_all(SCREENWIDTH,SCREENHEIGHT)
Car.bothmovement_y(SCREENWIDTH,SCREENHEIGHT)
Bus.bothmovement_y(SCREENWIDTH,SCREENHEIGHT)
elif flag == 2:
copcar.motion(SCREENWIDTH,SCREENHEIGHT)
Car.update_all(SCREENWIDTH,SCREENHEIGHT)
car1.car_motion_x(SCREENWIDTH,SCREENHEIGHT)
car2.car_motion_y(SCREENWIDTH,SCREENHEIGHT)
car3.car_motion_x(SCREENWIDTH,SCREENHEIGHT)
car4.car_motion_y(SCREENWIDTH,SCREENHEIGHT)
car5.car_motion_x(SCREENWIDTH,SCREENHEIGHT)
car6.car_motion_y(SCREENWIDTH,SCREENHEIGHT)
car7.car_motion_x(SCREENWIDTH,SCREENHEIGHT)
car8.car_motion_y(SCREENWIDTH,SCREENHEIGHT)
car9.car_motion_x(SCREENWIDTH,SCREENHEIGHT)
car10.car_motion_y(SCREENWIDTH,SCREENHEIGHT)
bus1.bus_motion_x(SCREENWIDTH,SCREENHEIGHT)
bus2.bus_motion_y(SCREENWIDTH,SCREENHEIGHT)
bus3.bus_motion_y(SCREENWIDTH,SCREENHEIGHT)
total_frames+=1
screen.blit(BackGround,(0,0))
screen.blit(Header,(0,0))
BaseClass.allsprites.draw(screen)
copProjectile.List.draw(screen)
pygame.display.flip()
clock.tick(FPS)
| true | true |
f7251086cbee9232ee1a4c2ae76bb737b8cda266 | 1,378 | py | Python | backend-project/small_eod/letters/migrations/0004_auto_20200221_1956.py | WlodzimierzKorza/small_eod | 027022bd71122a949a2787d0fb86518df80e48cd | [
"MIT"
] | 64 | 2019-12-30T11:24:03.000Z | 2021-06-24T01:04:56.000Z | backend-project/small_eod/letters/migrations/0004_auto_20200221_1956.py | WlodzimierzKorza/small_eod | 027022bd71122a949a2787d0fb86518df80e48cd | [
"MIT"
] | 465 | 2018-06-13T21:43:43.000Z | 2022-01-04T23:33:56.000Z | backend-project/small_eod/letters/migrations/0004_auto_20200221_1956.py | WlodzimierzKorza/small_eod | 027022bd71122a949a2787d0fb86518df80e48cd | [
"MIT"
] | 72 | 2018-12-02T19:47:03.000Z | 2022-01-04T22:54:49.000Z | # Generated by Django 3.0.3 on 2020-02-21 19:56
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('letters', '0003_auto_20200110_0200'),
]
operations = [
migrations.AlterField(
model_name='letter',
name='created_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='letter_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created by'),
),
migrations.AlterField(
model_name='letter',
name='created_on',
field=models.DateTimeField(auto_now_add=True, verbose_name='Date of creation'),
),
migrations.AlterField(
model_name='letter',
name='modified_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='letter_modified_by', to=settings.AUTH_USER_MODEL, verbose_name='Modified by'),
),
migrations.AlterField(
model_name='letter',
name='modified_on',
field=models.DateTimeField(auto_now=True, verbose_name='Date of the modification'),
),
]
| 37.243243 | 199 | 0.659652 |
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('letters', '0003_auto_20200110_0200'),
]
operations = [
migrations.AlterField(
model_name='letter',
name='created_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='letter_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created by'),
),
migrations.AlterField(
model_name='letter',
name='created_on',
field=models.DateTimeField(auto_now_add=True, verbose_name='Date of creation'),
),
migrations.AlterField(
model_name='letter',
name='modified_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='letter_modified_by', to=settings.AUTH_USER_MODEL, verbose_name='Modified by'),
),
migrations.AlterField(
model_name='letter',
name='modified_on',
field=models.DateTimeField(auto_now=True, verbose_name='Date of the modification'),
),
]
| true | true |
f725114cc0cb5e35486379975f0d3386787546b9 | 1,736 | py | Python | Labs/Lab 1 Midway RCC and mpi4py/mpi_rand_walk.py | cindychu/LargeScaleComputing_S20 | 913b0155f47914c258b503df677067a510dd23f5 | [
"MIT"
] | null | null | null | Labs/Lab 1 Midway RCC and mpi4py/mpi_rand_walk.py | cindychu/LargeScaleComputing_S20 | 913b0155f47914c258b503df677067a510dd23f5 | [
"MIT"
] | null | null | null | Labs/Lab 1 Midway RCC and mpi4py/mpi_rand_walk.py | cindychu/LargeScaleComputing_S20 | 913b0155f47914c258b503df677067a510dd23f5 | [
"MIT"
] | null | null | null |
from mpi4py import MPI
import matplotlib.pyplot as plt
import numpy as np
import time
def sim_rand_walks_parallel(n_runs):
# Get rank of process and overall size of communicator:
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
size = comm.Get_size()
# Start time:
t0 = time.time()
# Evenly distribute number of simulation runs across processes
N = int(n_runs/size)
# Simulate N random walks and specify as a NumPy Array
r_walks = []
for i in range(N):
steps = np.random.normal(loc=0, scale=1, size=100)
steps[0] = 0
r_walks.append(100 + np.cumsum(steps))
r_walks_array = np.array(r_walks)
# Gather all simulation arrays to buffer of expected size/dtype on rank 0
r_walks_all = None
if rank == 0:
r_walks_all = np.empty([N*size, 100], dtype='float')
comm.Gather(sendbuf = r_walks_array, recvbuf = r_walks_all, root=0)
# Print/plot simulation results on rank 0
if rank == 0:
# Calculate time elapsed after computing mean and std
average_finish = np.mean(r_walks_all[:,-1])
std_finish = np.std(r_walks_all[:,-1])
time_elapsed = time.time() - t0
# Print time elapsed + simulation results
print("Simulated %d Random Walks in: %f seconds on %d MPI processes"
% (n_runs, time_elapsed, size))
print("Average final position: %f, Standard Deviation: %f"
% (average_finish, std_finish))
# Plot Simulations and save to file
plt.plot(r_walks_all.transpose())
plt.savefig("r_walk_nprocs%d_nruns%d.png" % (size, n_runs))
return
def main():
sim_rand_walks_parallel(n_runs = 10000)
if __name__ == '__main__':
main()
| 30.45614 | 77 | 0.645161 |
from mpi4py import MPI
import matplotlib.pyplot as plt
import numpy as np
import time
def sim_rand_walks_parallel(n_runs):
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
size = comm.Get_size()
t0 = time.time()
N = int(n_runs/size)
r_walks = []
for i in range(N):
steps = np.random.normal(loc=0, scale=1, size=100)
steps[0] = 0
r_walks.append(100 + np.cumsum(steps))
r_walks_array = np.array(r_walks)
r_walks_all = None
if rank == 0:
r_walks_all = np.empty([N*size, 100], dtype='float')
comm.Gather(sendbuf = r_walks_array, recvbuf = r_walks_all, root=0)
if rank == 0:
average_finish = np.mean(r_walks_all[:,-1])
std_finish = np.std(r_walks_all[:,-1])
time_elapsed = time.time() - t0
print("Simulated %d Random Walks in: %f seconds on %d MPI processes"
% (n_runs, time_elapsed, size))
print("Average final position: %f, Standard Deviation: %f"
% (average_finish, std_finish))
plt.plot(r_walks_all.transpose())
plt.savefig("r_walk_nprocs%d_nruns%d.png" % (size, n_runs))
return
def main():
sim_rand_walks_parallel(n_runs = 10000)
if __name__ == '__main__':
main()
| true | true |
f72511a3099af2e0476081a70e6b3d479159a8c0 | 1,950 | py | Python | tests/testhelpers/override_testhelper_err2.py | dbarnett/pytypes | da056359a8d1dad174316195830a1cb0574893af | [
"Apache-2.0"
] | 189 | 2016-09-17T13:45:58.000Z | 2022-03-12T10:53:42.000Z | tests/testhelpers/override_testhelper_err2.py | dbarnett/pytypes | da056359a8d1dad174316195830a1cb0574893af | [
"Apache-2.0"
] | 104 | 2017-02-23T16:43:18.000Z | 2022-03-17T17:36:18.000Z | tests/testhelpers/override_testhelper_err2.py | dbarnett/pytypes | da056359a8d1dad174316195830a1cb0574893af | [
"Apache-2.0"
] | 21 | 2017-02-17T08:05:12.000Z | 2021-12-08T11:22:15.000Z | # Copyright 2017 Stefan Richthofer
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Created on 01.12.2016
"""
Designed to cause a NameError on import.
(unless typechecker.check_override_at_runtime == False)
"""
from pytypes import override
class TestClass():
def test_meth0(self, a):
# type: (int) -> str
pass
def test_meth1(self, a):
# type: (TestArg2) -> str
pass
def test_meth2(self, a):
# type: (int) -> TestResult1
pass
class TestClass2(TestClass):
@override
def test_meth0(self, a):
# type: (int) -> str
pass
@override
def test_meth1(self, a):
# type: (TestArg1) -> str
pass
@override
def test_meth2(self, a):
# type: (int) -> TestResult2
pass
class TestClass3(TestClass):
@override
def test_meth1(self, a):
# type: (TestArg1) -> str
pass
@override
def test_meth2(self, a):
# type: (int) -> TestResult2
pass
class TestArg1():
pass
class TestResult1():
pass
class TestClass3(TestClass):
@override
def test_meth1(self,
a # type: TestArg1
):
# type: (...) -> strr
pass
@override
def test_meth2(self,
a # type: int
):
# type: (...) -> TestResult2
pass
class TestArg2(TestArg1):
pass
class TestResult2(TestResult1):
pass
| 21.428571 | 74 | 0.606667 |
from pytypes import override
class TestClass():
def test_meth0(self, a):
pass
def test_meth1(self, a):
pass
def test_meth2(self, a):
pass
class TestClass2(TestClass):
@override
def test_meth0(self, a):
pass
@override
def test_meth1(self, a):
pass
@override
def test_meth2(self, a):
pass
class TestClass3(TestClass):
@override
def test_meth1(self, a):
pass
@override
def test_meth2(self, a):
pass
class TestArg1():
pass
class TestResult1():
pass
class TestClass3(TestClass):
@override
def test_meth1(self,
a
):
pass
@override
def test_meth2(self,
a
):
pass
class TestArg2(TestArg1):
pass
class TestResult2(TestResult1):
pass
| true | true |
f72512165bd2c1034b3a55e9374f6cdaed5ced1b | 2,873 | py | Python | release/stubs.min/System/Windows/Forms/__init___parts/LinkClickedEventHandler.py | htlcnn/ironpython-stubs | 780d829e2104b2789d5f4d6f32b0ec9f2930ca03 | [
"MIT"
] | 182 | 2017-06-27T02:26:15.000Z | 2022-03-30T18:53:43.000Z | release/stubs.min/System/Windows/Forms/__init___parts/LinkClickedEventHandler.py | htlcnn/ironpython-stubs | 780d829e2104b2789d5f4d6f32b0ec9f2930ca03 | [
"MIT"
] | 28 | 2017-06-27T13:38:23.000Z | 2022-03-15T11:19:44.000Z | release/stubs.min/System/Windows/Forms/__init___parts/LinkClickedEventHandler.py | htlcnn/ironpython-stubs | 780d829e2104b2789d5f4d6f32b0ec9f2930ca03 | [
"MIT"
] | 67 | 2017-06-28T09:43:59.000Z | 2022-03-20T21:17:10.000Z | class LinkClickedEventHandler(MulticastDelegate,ICloneable,ISerializable):
"""
Represents the method that will handle the System.Windows.Forms.RichTextBox.LinkClicked event of a System.Windows.Forms.RichTextBox.
LinkClickedEventHandler(object: object,method: IntPtr)
"""
def BeginInvoke(self,sender,e,callback,object):
""" BeginInvoke(self: LinkClickedEventHandler,sender: object,e: LinkClickedEventArgs,callback: AsyncCallback,object: object) -> IAsyncResult """
pass
def CombineImpl(self,*args):
"""
CombineImpl(self: MulticastDelegate,follow: Delegate) -> Delegate
Combines this System.Delegate with the specified System.Delegate to form a new delegate.
follow: The delegate to combine with this delegate.
Returns: A delegate that is the new root of the System.MulticastDelegate invocation list.
"""
pass
def DynamicInvokeImpl(self,*args):
"""
DynamicInvokeImpl(self: Delegate,args: Array[object]) -> object
Dynamically invokes (late-bound) the method represented by the current delegate.
args: An array of objects that are the arguments to pass to the method represented by the current
delegate.-or- null,if the method represented by the current delegate does not require
arguments.
Returns: The object returned by the method represented by the delegate.
"""
pass
def EndInvoke(self,result):
""" EndInvoke(self: LinkClickedEventHandler,result: IAsyncResult) """
pass
def GetMethodImpl(self,*args):
"""
GetMethodImpl(self: MulticastDelegate) -> MethodInfo
Returns a static method represented by the current System.MulticastDelegate.
Returns: A static method represented by the current System.MulticastDelegate.
"""
pass
def Invoke(self,sender,e):
""" Invoke(self: LinkClickedEventHandler,sender: object,e: LinkClickedEventArgs) """
pass
def RemoveImpl(self,*args):
"""
RemoveImpl(self: MulticastDelegate,value: Delegate) -> Delegate
Removes an element from the invocation list of this System.MulticastDelegate that is equal to
the specified delegate.
value: The delegate to search for in the invocation list.
Returns: If value is found in the invocation list for this instance,then a new System.Delegate without
value in its invocation list; otherwise,this instance with its original invocation list.
"""
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self,object,method):
""" __new__(cls: type,object: object,method: IntPtr) """
pass
def __reduce_ex__(self,*args):
pass
| 30.242105 | 215 | 0.719457 | class LinkClickedEventHandler(MulticastDelegate,ICloneable,ISerializable):
def BeginInvoke(self,sender,e,callback,object):
pass
def CombineImpl(self,*args):
pass
def DynamicInvokeImpl(self,*args):
pass
def EndInvoke(self,result):
pass
def GetMethodImpl(self,*args):
pass
def Invoke(self,sender,e):
pass
def RemoveImpl(self,*args):
pass
def __init__(self,*args):
pass
@staticmethod
def __new__(self,object,method):
pass
def __reduce_ex__(self,*args):
pass
| true | true |
f7251343722125f996eafb8fe399af9c3bd16ad4 | 694 | py | Python | cctbx/eltbx/tests/tst_wavelengths.py | rimmartin/cctbx_project | 644090f9432d9afc22cfb542fc3ab78ca8e15e5d | [
"BSD-3-Clause-LBNL"
] | null | null | null | cctbx/eltbx/tests/tst_wavelengths.py | rimmartin/cctbx_project | 644090f9432d9afc22cfb542fc3ab78ca8e15e5d | [
"BSD-3-Clause-LBNL"
] | null | null | null | cctbx/eltbx/tests/tst_wavelengths.py | rimmartin/cctbx_project | 644090f9432d9afc22cfb542fc3ab78ca8e15e5d | [
"BSD-3-Clause-LBNL"
] | null | null | null | from __future__ import division
from cctbx.eltbx import wavelengths
from libtbx.test_utils import approx_equal
def exercise():
from cctbx import factor_kev_angstrom
w = wavelengths.characteristic("CU")
assert w.label() == "Cu"
assert approx_equal(w.as_angstrom(), 1.5418)
assert approx_equal(w.as_kev(), factor_kev_angstrom / 1.5418)
assert approx_equal(w.as_ev() / 1000, factor_kev_angstrom / 1.5418)
n = 0
for w in wavelengths.characteristic_iterator():
n += 1
uu = wavelengths.characteristic(w.label())
assert uu.label() == w.label()
assert uu.as_ev() == w.as_ev()
assert n == 15
def run():
exercise()
print "OK"
if (__name__ == "__main__"):
run()
| 26.692308 | 69 | 0.700288 | from __future__ import division
from cctbx.eltbx import wavelengths
from libtbx.test_utils import approx_equal
def exercise():
from cctbx import factor_kev_angstrom
w = wavelengths.characteristic("CU")
assert w.label() == "Cu"
assert approx_equal(w.as_angstrom(), 1.5418)
assert approx_equal(w.as_kev(), factor_kev_angstrom / 1.5418)
assert approx_equal(w.as_ev() / 1000, factor_kev_angstrom / 1.5418)
n = 0
for w in wavelengths.characteristic_iterator():
n += 1
uu = wavelengths.characteristic(w.label())
assert uu.label() == w.label()
assert uu.as_ev() == w.as_ev()
assert n == 15
def run():
exercise()
print "OK"
if (__name__ == "__main__"):
run()
| false | true |
f72513565d42f73aae9ae75dc0d14b21b6416c46 | 318 | py | Python | hossein/university/595/595_v2.py | mhdehghan/quera-answers | 28dc0a9dbe3697593d7cbe05c9f05db2d3b01790 | [
"MIT"
] | null | null | null | hossein/university/595/595_v2.py | mhdehghan/quera-answers | 28dc0a9dbe3697593d7cbe05c9f05db2d3b01790 | [
"MIT"
] | null | null | null | hossein/university/595/595_v2.py | mhdehghan/quera-answers | 28dc0a9dbe3697593d7cbe05c9f05db2d3b01790 | [
"MIT"
] | null | null | null | plus = lambda x, y: x + y
current_list = [0, 1]
next_list = []
n = int(input())
if n > 0: print(1)
for i in range(n-1):
current_list.append(0)
next_list = list(map(plus, current_list[1:], current_list))
print(*next_list,sep=' ')
current_list = next_list
current_list.insert(0, 0)
next_list = [] | 26.5 | 63 | 0.628931 | plus = lambda x, y: x + y
current_list = [0, 1]
next_list = []
n = int(input())
if n > 0: print(1)
for i in range(n-1):
current_list.append(0)
next_list = list(map(plus, current_list[1:], current_list))
print(*next_list,sep=' ')
current_list = next_list
current_list.insert(0, 0)
next_list = [] | true | true |
f7251431a4069a8242c3b58bab2e52b693aa37b9 | 810 | py | Python | safe_transaction_service/tokens/tasks.py | cryptopossum/safe-transaction-service | 38069e5f4514be51c6f14e395a135d03f0c03887 | [
"MIT"
] | null | null | null | safe_transaction_service/tokens/tasks.py | cryptopossum/safe-transaction-service | 38069e5f4514be51c6f14e395a135d03f0c03887 | [
"MIT"
] | null | null | null | safe_transaction_service/tokens/tasks.py | cryptopossum/safe-transaction-service | 38069e5f4514be51c6f14e395a135d03f0c03887 | [
"MIT"
] | 1 | 2021-06-09T06:20:49.000Z | 2021-06-09T06:20:49.000Z | from typing import Optional
from celery import app
from celery.utils.log import get_task_logger
from gnosis.eth import EthereumClientProvider
from gnosis.eth.ethereum_client import EthereumNetwork
from safe_transaction_service.history.utils import close_gevent_db_connection
from .models import Token
logger = get_task_logger(__name__)
@app.shared_task()
def fix_pool_tokens_task() -> Optional[int]:
ethereum_client = EthereumClientProvider()
ethereum_network = ethereum_client.get_network()
if ethereum_network == EthereumNetwork.MAINNET:
try:
number = Token.pool_tokens.fix_all_pool_tokens()
if number:
logger.info('%d pool token names were fixed', number)
return number
finally:
close_gevent_db_connection()
| 28.928571 | 77 | 0.738272 | from typing import Optional
from celery import app
from celery.utils.log import get_task_logger
from gnosis.eth import EthereumClientProvider
from gnosis.eth.ethereum_client import EthereumNetwork
from safe_transaction_service.history.utils import close_gevent_db_connection
from .models import Token
logger = get_task_logger(__name__)
@app.shared_task()
def fix_pool_tokens_task() -> Optional[int]:
ethereum_client = EthereumClientProvider()
ethereum_network = ethereum_client.get_network()
if ethereum_network == EthereumNetwork.MAINNET:
try:
number = Token.pool_tokens.fix_all_pool_tokens()
if number:
logger.info('%d pool token names were fixed', number)
return number
finally:
close_gevent_db_connection()
| true | true |
f7251634c09abfd3f03813bfef073fd95ca209ef | 9,885 | py | Python | amqpstorm/tests/functional/management/test_queue.py | ZygusPatryk/amqpstorm | 0f3ad84a529f12769d34638a88c38f3055cb05cd | [
"MIT"
] | 140 | 2016-06-07T18:53:57.000Z | 2022-03-23T01:50:15.000Z | amqpstorm/tests/functional/management/test_queue.py | ZygusPatryk/amqpstorm | 0f3ad84a529f12769d34638a88c38f3055cb05cd | [
"MIT"
] | 85 | 2016-04-11T23:32:32.000Z | 2022-03-19T07:21:21.000Z | amqpstorm/tests/functional/management/test_queue.py | ZygusPatryk/amqpstorm | 0f3ad84a529f12769d34638a88c38f3055cb05cd | [
"MIT"
] | 38 | 2016-04-20T20:21:13.000Z | 2022-03-23T05:31:58.000Z | from amqpstorm.management import ApiError
from amqpstorm.management import ManagementApi
from amqpstorm.tests import HTTP_URL
from amqpstorm.tests import PASSWORD
from amqpstorm.tests import USERNAME
from amqpstorm.tests.functional.utility import TestFunctionalFramework
from amqpstorm.tests.functional.utility import setup
class ApiQueueFunctionalTests(TestFunctionalFramework):
@setup(queue=False)
def test_api_queue_get(self):
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
api.queue.declare(self.queue_name)
queue = api.queue.get(self.queue_name)
self.assertIsInstance(queue, dict)
self.assertIn('name', queue)
self.assertIn('auto_delete', queue)
@setup(queue=False)
def test_api_queue_list(self):
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
api.queue.declare(self.queue_name)
queues = api.queue.list()
self.assertIsInstance(queues, list)
self.assertGreater(len(queues), 0)
for queue in queues:
self.assertIsInstance(queue, dict)
self.assertIn('name', queue)
self.assertIn('vhost', queue)
self.assertIn('node', queue)
self.assertIn('durable', queue)
self.assertIn('arguments', queue)
self.assertIn('auto_delete', queue)
@setup(queue=False)
def test_api_queue_list_pagination(self):
queues_to_create = 33
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
api.virtual_host.create(self.queue_name)
try:
for index in range(queues_to_create):
api.queue.declare(
'pagination-%d' % (index + 1), virtual_host=self.queue_name
)
queues = api.queue.list(
name='pagination-',
page_size=3,
virtual_host=self.queue_name
)
finally:
for index in range(queues_to_create):
api.queue.delete(
'pagination-%d' % (index + 1), virtual_host=self.queue_name
)
self.api.virtual_host.delete(self.queue_name)
self.assertIsInstance(queues, list)
self.assertEqual(len(queues), queues_to_create)
@setup(queue=False)
def test_api_queue_list_no_pagination(self):
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
api.virtual_host.create(self.queue_name)
try:
api.queue.declare('abc', virtual_host=self.queue_name)
api.queue.declare('def', virtual_host=self.queue_name)
api.queue.declare('ghi', virtual_host=self.queue_name)
queues = api.queue.list(
page_size=None, virtual_host=self.queue_name
)
finally:
api.queue.delete('abc', virtual_host=self.queue_name)
api.queue.delete('def', virtual_host=self.queue_name)
api.queue.delete('ghi', virtual_host=self.queue_name)
self.api.virtual_host.delete(self.queue_name)
self.assertIsInstance(queues, list)
self.assertEqual(len(queues), 3)
@setup(queue=False)
def test_api_queue_list_filter_with_regex(self):
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
api.virtual_host.create(self.queue_name)
try:
api.queue.declare('abc', virtual_host=self.queue_name)
api.queue.declare('def', virtual_host=self.queue_name)
api.queue.declare('ghi', virtual_host=self.queue_name)
queues = api.queue.list(name='^ab', use_regex='true',
virtual_host=self.queue_name)
finally:
api.queue.delete('abc', virtual_host=self.queue_name)
api.queue.delete('def', virtual_host=self.queue_name)
api.queue.delete('ghi', virtual_host=self.queue_name)
self.api.virtual_host.delete(self.queue_name)
self.assertIsInstance(queues, list)
self.assertEqual(len(queues), 1)
@setup(queue=False)
def test_api_queue_list_filter_with_regex_boolean(self):
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
api.virtual_host.create(self.queue_name)
try:
api.queue.declare('abc', virtual_host=self.queue_name)
api.queue.declare('def', virtual_host=self.queue_name)
api.queue.declare('ghi', virtual_host=self.queue_name)
queues = api.queue.list(name='^ab', use_regex=True,
virtual_host=self.queue_name)
finally:
api.queue.delete('abc', virtual_host=self.queue_name)
api.queue.delete('def', virtual_host=self.queue_name)
api.queue.delete('ghi', virtual_host=self.queue_name)
self.api.virtual_host.delete(self.queue_name)
self.assertIsInstance(queues, list)
self.assertEqual(len(queues), 1)
@setup(queue=False)
def test_api_queue_list_filter_without_regex(self):
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
api.virtual_host.create(self.queue_name)
try:
api.queue.declare('abc', virtual_host=self.queue_name)
api.queue.declare('def', virtual_host=self.queue_name)
api.queue.declare('ghi', virtual_host=self.queue_name)
queues = api.queue.list(name='ab', use_regex=False,
virtual_host=self.queue_name)
finally:
api.queue.delete('abc', virtual_host=self.queue_name)
api.queue.delete('def', virtual_host=self.queue_name)
api.queue.delete('ghi', virtual_host=self.queue_name)
self.api.virtual_host.delete(self.queue_name)
self.assertIsInstance(queues, list)
self.assertEqual(len(queues), 1)
@setup(queue=False)
def test_api_queue_list_all(self):
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
api.queue.declare(self.queue_name)
queues = api.queue.list(show_all=True)
self.assertIsInstance(queues, list)
self.assertGreater(len(queues), 0)
for queue in queues:
self.assertIsInstance(queue, dict)
self.assertIn('name', queue)
self.assertIn('vhost', queue)
self.assertIn('node', queue)
self.assertIn('durable', queue)
self.assertIn('arguments', queue)
self.assertIn('auto_delete', queue)
@setup(queue=False)
def test_api_queue_declare(self):
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
self.assertIsNone(api.queue.declare(self.queue_name, durable=True))
result = api.queue.get(self.queue_name)
self.assertIsInstance(result, dict)
self.assertEqual(result['name'], self.queue_name)
self.assertEqual(result['auto_delete'], False)
self.assertEqual(result['durable'], True)
@setup(new_connection=False)
def test_api_queue_declare_passive(self):
expected_error_message = (
'NOT-FOUND - The client attempted to work '
'with a server entity that does not exist.'
)
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
try:
api.queue.declare(self.queue_name, passive=True)
except ApiError as why:
self.assertEqual(str(why), expected_error_message)
self.assertEqual(why.error_type, 'NOT-FOUND')
self.assertEqual(why.error_code, 404)
@setup(new_connection=False)
def test_api_queue_declare_passive_exists(self):
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
api.queue.declare(self.queue_name)
self.assertIsNotNone(api.queue.declare(self.queue_name, passive=True))
@setup(new_connection=False)
def test_api_queue_delete(self):
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
try:
api.queue.declare(self.queue_name, durable=True)
self.assertIsInstance(api.queue.get(self.queue_name), dict)
finally:
api.queue.delete(self.queue_name)
try:
api.queue.declare(self.queue_name, passive=True)
except ApiError as why:
self.assertEqual(why.error_code, 404)
@setup(queue=True)
def test_api_queue_purge(self):
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
api.queue.declare(self.queue_name)
self.assertIsNone(api.queue.purge(self.queue_name))
@setup(queue=True)
def test_api_queue_bind(self):
exchange_name = 'amq.direct'
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
api.queue.declare(self.queue_name)
bindings = len(api.queue.bindings(self.queue_name))
self.assertIsNone(api.queue.bind(queue=self.queue_name,
exchange=exchange_name,
routing_key=self.queue_name,
arguments=None))
self.assertEqual(len(api.queue.bindings(self.queue_name)),
bindings + 1)
@setup(queue=True)
def test_api_queue_unbind(self):
exchange_name = 'amq.direct'
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
api.queue.declare(self.queue_name)
bindings = len(api.queue.bindings(self.queue_name))
api.queue.bind(queue=self.queue_name, exchange=exchange_name,
routing_key=self.queue_name, arguments=None)
self.assertEqual(len(api.queue.bindings(self.queue_name)),
bindings + 1)
self.assertIsNone(api.queue.unbind(queue=self.queue_name,
exchange=exchange_name,
routing_key=self.queue_name))
self.assertEqual(len(api.queue.bindings(self.queue_name)), bindings)
| 36.884328 | 79 | 0.628123 | from amqpstorm.management import ApiError
from amqpstorm.management import ManagementApi
from amqpstorm.tests import HTTP_URL
from amqpstorm.tests import PASSWORD
from amqpstorm.tests import USERNAME
from amqpstorm.tests.functional.utility import TestFunctionalFramework
from amqpstorm.tests.functional.utility import setup
class ApiQueueFunctionalTests(TestFunctionalFramework):
@setup(queue=False)
def test_api_queue_get(self):
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
api.queue.declare(self.queue_name)
queue = api.queue.get(self.queue_name)
self.assertIsInstance(queue, dict)
self.assertIn('name', queue)
self.assertIn('auto_delete', queue)
@setup(queue=False)
def test_api_queue_list(self):
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
api.queue.declare(self.queue_name)
queues = api.queue.list()
self.assertIsInstance(queues, list)
self.assertGreater(len(queues), 0)
for queue in queues:
self.assertIsInstance(queue, dict)
self.assertIn('name', queue)
self.assertIn('vhost', queue)
self.assertIn('node', queue)
self.assertIn('durable', queue)
self.assertIn('arguments', queue)
self.assertIn('auto_delete', queue)
@setup(queue=False)
def test_api_queue_list_pagination(self):
queues_to_create = 33
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
api.virtual_host.create(self.queue_name)
try:
for index in range(queues_to_create):
api.queue.declare(
'pagination-%d' % (index + 1), virtual_host=self.queue_name
)
queues = api.queue.list(
name='pagination-',
page_size=3,
virtual_host=self.queue_name
)
finally:
for index in range(queues_to_create):
api.queue.delete(
'pagination-%d' % (index + 1), virtual_host=self.queue_name
)
self.api.virtual_host.delete(self.queue_name)
self.assertIsInstance(queues, list)
self.assertEqual(len(queues), queues_to_create)
@setup(queue=False)
def test_api_queue_list_no_pagination(self):
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
api.virtual_host.create(self.queue_name)
try:
api.queue.declare('abc', virtual_host=self.queue_name)
api.queue.declare('def', virtual_host=self.queue_name)
api.queue.declare('ghi', virtual_host=self.queue_name)
queues = api.queue.list(
page_size=None, virtual_host=self.queue_name
)
finally:
api.queue.delete('abc', virtual_host=self.queue_name)
api.queue.delete('def', virtual_host=self.queue_name)
api.queue.delete('ghi', virtual_host=self.queue_name)
self.api.virtual_host.delete(self.queue_name)
self.assertIsInstance(queues, list)
self.assertEqual(len(queues), 3)
@setup(queue=False)
def test_api_queue_list_filter_with_regex(self):
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
api.virtual_host.create(self.queue_name)
try:
api.queue.declare('abc', virtual_host=self.queue_name)
api.queue.declare('def', virtual_host=self.queue_name)
api.queue.declare('ghi', virtual_host=self.queue_name)
queues = api.queue.list(name='^ab', use_regex='true',
virtual_host=self.queue_name)
finally:
api.queue.delete('abc', virtual_host=self.queue_name)
api.queue.delete('def', virtual_host=self.queue_name)
api.queue.delete('ghi', virtual_host=self.queue_name)
self.api.virtual_host.delete(self.queue_name)
self.assertIsInstance(queues, list)
self.assertEqual(len(queues), 1)
@setup(queue=False)
def test_api_queue_list_filter_with_regex_boolean(self):
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
api.virtual_host.create(self.queue_name)
try:
api.queue.declare('abc', virtual_host=self.queue_name)
api.queue.declare('def', virtual_host=self.queue_name)
api.queue.declare('ghi', virtual_host=self.queue_name)
queues = api.queue.list(name='^ab', use_regex=True,
virtual_host=self.queue_name)
finally:
api.queue.delete('abc', virtual_host=self.queue_name)
api.queue.delete('def', virtual_host=self.queue_name)
api.queue.delete('ghi', virtual_host=self.queue_name)
self.api.virtual_host.delete(self.queue_name)
self.assertIsInstance(queues, list)
self.assertEqual(len(queues), 1)
@setup(queue=False)
def test_api_queue_list_filter_without_regex(self):
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
api.virtual_host.create(self.queue_name)
try:
api.queue.declare('abc', virtual_host=self.queue_name)
api.queue.declare('def', virtual_host=self.queue_name)
api.queue.declare('ghi', virtual_host=self.queue_name)
queues = api.queue.list(name='ab', use_regex=False,
virtual_host=self.queue_name)
finally:
api.queue.delete('abc', virtual_host=self.queue_name)
api.queue.delete('def', virtual_host=self.queue_name)
api.queue.delete('ghi', virtual_host=self.queue_name)
self.api.virtual_host.delete(self.queue_name)
self.assertIsInstance(queues, list)
self.assertEqual(len(queues), 1)
@setup(queue=False)
def test_api_queue_list_all(self):
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
api.queue.declare(self.queue_name)
queues = api.queue.list(show_all=True)
self.assertIsInstance(queues, list)
self.assertGreater(len(queues), 0)
for queue in queues:
self.assertIsInstance(queue, dict)
self.assertIn('name', queue)
self.assertIn('vhost', queue)
self.assertIn('node', queue)
self.assertIn('durable', queue)
self.assertIn('arguments', queue)
self.assertIn('auto_delete', queue)
@setup(queue=False)
def test_api_queue_declare(self):
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
self.assertIsNone(api.queue.declare(self.queue_name, durable=True))
result = api.queue.get(self.queue_name)
self.assertIsInstance(result, dict)
self.assertEqual(result['name'], self.queue_name)
self.assertEqual(result['auto_delete'], False)
self.assertEqual(result['durable'], True)
@setup(new_connection=False)
def test_api_queue_declare_passive(self):
expected_error_message = (
'NOT-FOUND - The client attempted to work '
'with a server entity that does not exist.'
)
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
try:
api.queue.declare(self.queue_name, passive=True)
except ApiError as why:
self.assertEqual(str(why), expected_error_message)
self.assertEqual(why.error_type, 'NOT-FOUND')
self.assertEqual(why.error_code, 404)
@setup(new_connection=False)
def test_api_queue_declare_passive_exists(self):
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
api.queue.declare(self.queue_name)
self.assertIsNotNone(api.queue.declare(self.queue_name, passive=True))
@setup(new_connection=False)
def test_api_queue_delete(self):
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
try:
api.queue.declare(self.queue_name, durable=True)
self.assertIsInstance(api.queue.get(self.queue_name), dict)
finally:
api.queue.delete(self.queue_name)
try:
api.queue.declare(self.queue_name, passive=True)
except ApiError as why:
self.assertEqual(why.error_code, 404)
@setup(queue=True)
def test_api_queue_purge(self):
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
api.queue.declare(self.queue_name)
self.assertIsNone(api.queue.purge(self.queue_name))
@setup(queue=True)
def test_api_queue_bind(self):
exchange_name = 'amq.direct'
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
api.queue.declare(self.queue_name)
bindings = len(api.queue.bindings(self.queue_name))
self.assertIsNone(api.queue.bind(queue=self.queue_name,
exchange=exchange_name,
routing_key=self.queue_name,
arguments=None))
self.assertEqual(len(api.queue.bindings(self.queue_name)),
bindings + 1)
@setup(queue=True)
def test_api_queue_unbind(self):
exchange_name = 'amq.direct'
api = ManagementApi(HTTP_URL, USERNAME, PASSWORD)
api.queue.declare(self.queue_name)
bindings = len(api.queue.bindings(self.queue_name))
api.queue.bind(queue=self.queue_name, exchange=exchange_name,
routing_key=self.queue_name, arguments=None)
self.assertEqual(len(api.queue.bindings(self.queue_name)),
bindings + 1)
self.assertIsNone(api.queue.unbind(queue=self.queue_name,
exchange=exchange_name,
routing_key=self.queue_name))
self.assertEqual(len(api.queue.bindings(self.queue_name)), bindings)
| true | true |
f72516a6c5f55d8207f7aef5e97d7acd0c0e1e7d | 350 | py | Python | BOOK/MAIN/05-file-handling/chapter-5-examples/07-count-vowels-consonants.py | kabirsrivastava3/python-practice | f56a4a0764031d3723b0ba4cd1418a1a83b1e4f5 | [
"MIT"
] | null | null | null | BOOK/MAIN/05-file-handling/chapter-5-examples/07-count-vowels-consonants.py | kabirsrivastava3/python-practice | f56a4a0764031d3723b0ba4cd1418a1a83b1e4f5 | [
"MIT"
] | null | null | null | BOOK/MAIN/05-file-handling/chapter-5-examples/07-count-vowels-consonants.py | kabirsrivastava3/python-practice | f56a4a0764031d3723b0ba4cd1418a1a83b1e4f5 | [
"MIT"
] | null | null | null |
fileObj = open('answer.txt',"r")
ch = ""
vCount = 0
cCount = 0
while ch:
ch = fileObj.read(1) #one character read from file
if ch in ['A','a','E','e','I','i','O','o','U','u']:
vCount+=1
else:
cCount+=1
print("Vowels in the file: ", vCount)
print("Consonants in the file: ",cCount)
#close the file
fileObj.close() | 21.875 | 55 | 0.56 |
fileObj = open('answer.txt',"r")
ch = ""
vCount = 0
cCount = 0
while ch:
ch = fileObj.read(1)
if ch in ['A','a','E','e','I','i','O','o','U','u']:
vCount+=1
else:
cCount+=1
print("Vowels in the file: ", vCount)
print("Consonants in the file: ",cCount)
fileObj.close() | true | true |
f72517727d88232198a9d0d468e299f69e2a632b | 4,416 | py | Python | venv/Lib/site-packages/ipyparallel/controller/mongodb.py | BoxicaLion/BasicMathFormulas | 4d9782f2c0c75ecccf4c0ea995f324f93e4fb6e2 | [
"MIT"
] | 69 | 2019-02-18T12:07:35.000Z | 2022-03-12T10:38:32.000Z | ipyparallel/controller/mongodb.py | EnjoyLifeFund/Debian_py36_packages | 1985d4c73fabd5f08f54b922e73a9306e09c77a5 | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | 12 | 2018-12-06T22:06:49.000Z | 2022-02-25T17:40:44.000Z | ipyparallel/controller/mongodb.py | EnjoyLifeFund/Debian_py36_packages | 1985d4c73fabd5f08f54b922e73a9306e09c77a5 | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | 28 | 2019-03-22T01:07:13.000Z | 2022-02-21T16:38:27.000Z | """A TaskRecord backend using mongodb
Authors:
* Min RK
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2010-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
try:
from pymongo import MongoClient
except ImportError:
from pymongo import Connection as MongoClient
# bson.Binary import moved
try:
from bson.binary import Binary
except ImportError:
from bson import Binary
from traitlets import Dict, List, Unicode, Instance
from .dictdb import BaseDB
#-----------------------------------------------------------------------------
# MongoDB class
#-----------------------------------------------------------------------------
class MongoDB(BaseDB):
"""MongoDB TaskRecord backend."""
connection_args = List(config=True,
help="""Positional arguments to be passed to pymongo.MongoClient. Only
necessary if the default mongodb configuration does not point to your
mongod instance.""")
connection_kwargs = Dict(config=True,
help="""Keyword arguments to be passed to pymongo.MongoClient. Only
necessary if the default mongodb configuration does not point to your
mongod instance."""
)
database = Unicode("ipython-tasks", config=True,
help="""The MongoDB database name to use for storing tasks for this session. If unspecified,
a new database will be created with the Hub's IDENT. Specifying the database will result
in tasks from previous sessions being available via Clients' db_query and
get_result methods.""")
_connection = Instance(MongoClient, allow_none=True) # pymongo connection
def __init__(self, **kwargs):
super(MongoDB, self).__init__(**kwargs)
if self._connection is None:
self._connection = MongoClient(*self.connection_args, **self.connection_kwargs)
if not self.database:
self.database = self.session
self._db = self._connection[self.database]
self._records = self._db['task_records']
self._records.ensure_index('msg_id', unique=True)
self._records.ensure_index('submitted') # for sorting history
# for rec in self._records.find
def _binary_buffers(self, rec):
for key in ('buffers', 'result_buffers'):
if rec.get(key, None):
rec[key] = list(map(Binary, rec[key]))
return rec
def add_record(self, msg_id, rec):
"""Add a new Task Record, by msg_id."""
# print rec
rec = self._binary_buffers(rec)
self._records.insert(rec)
def get_record(self, msg_id):
"""Get a specific Task Record, by msg_id."""
r = self._records.find_one({'msg_id': msg_id})
if not r:
# r will be '' if nothing is found
raise KeyError(msg_id)
return r
def update_record(self, msg_id, rec):
"""Update the data in an existing record."""
rec = self._binary_buffers(rec)
self._records.update({'msg_id':msg_id}, {'$set': rec})
def drop_matching_records(self, check):
"""Remove a record from the DB."""
self._records.remove(check)
def drop_record(self, msg_id):
"""Remove a record from the DB."""
self._records.remove({'msg_id':msg_id})
def find_records(self, check, keys=None):
"""Find records matching a query dict, optionally extracting subset of keys.
Returns list of matching records.
Parameters
----------
check: dict
mongodb-style query argument
keys: list of strs [optional]
if specified, the subset of keys to extract. msg_id will *always* be
included.
"""
if keys and 'msg_id' not in keys:
keys.append('msg_id')
matches = list(self._records.find(check,keys))
for rec in matches:
rec.pop('_id')
return matches
def get_history(self):
"""get all msg_ids, ordered by time submitted."""
cursor = self._records.find({},{'msg_id':1}).sort('submitted')
return [ rec['msg_id'] for rec in cursor ]
| 35.047619 | 100 | 0.585824 |
try:
from pymongo import MongoClient
except ImportError:
from pymongo import Connection as MongoClient
try:
from bson.binary import Binary
except ImportError:
from bson import Binary
from traitlets import Dict, List, Unicode, Instance
from .dictdb import BaseDB
class MongoDB(BaseDB):
connection_args = List(config=True,
help="""Positional arguments to be passed to pymongo.MongoClient. Only
necessary if the default mongodb configuration does not point to your
mongod instance.""")
connection_kwargs = Dict(config=True,
help="""Keyword arguments to be passed to pymongo.MongoClient. Only
necessary if the default mongodb configuration does not point to your
mongod instance."""
)
database = Unicode("ipython-tasks", config=True,
help="""The MongoDB database name to use for storing tasks for this session. If unspecified,
a new database will be created with the Hub's IDENT. Specifying the database will result
in tasks from previous sessions being available via Clients' db_query and
get_result methods.""")
_connection = Instance(MongoClient, allow_none=True)
def __init__(self, **kwargs):
super(MongoDB, self).__init__(**kwargs)
if self._connection is None:
self._connection = MongoClient(*self.connection_args, **self.connection_kwargs)
if not self.database:
self.database = self.session
self._db = self._connection[self.database]
self._records = self._db['task_records']
self._records.ensure_index('msg_id', unique=True)
self._records.ensure_index('submitted')
def _binary_buffers(self, rec):
for key in ('buffers', 'result_buffers'):
if rec.get(key, None):
rec[key] = list(map(Binary, rec[key]))
return rec
def add_record(self, msg_id, rec):
rec = self._binary_buffers(rec)
self._records.insert(rec)
def get_record(self, msg_id):
r = self._records.find_one({'msg_id': msg_id})
if not r:
raise KeyError(msg_id)
return r
def update_record(self, msg_id, rec):
rec = self._binary_buffers(rec)
self._records.update({'msg_id':msg_id}, {'$set': rec})
def drop_matching_records(self, check):
self._records.remove(check)
def drop_record(self, msg_id):
self._records.remove({'msg_id':msg_id})
def find_records(self, check, keys=None):
if keys and 'msg_id' not in keys:
keys.append('msg_id')
matches = list(self._records.find(check,keys))
for rec in matches:
rec.pop('_id')
return matches
def get_history(self):
cursor = self._records.find({},{'msg_id':1}).sort('submitted')
return [ rec['msg_id'] for rec in cursor ]
| true | true |
f7251940c8d1976a314e9a83de4640eaf7110298 | 1,134 | py | Python | tools/gen_shake_256_sum.py | dpensi/insights-data-schemas | a60d673ce4053b8554e09b7bd08e518f9546727c | [
"Apache-2.0"
] | 1 | 2020-12-07T09:19:32.000Z | 2020-12-07T09:19:32.000Z | tools/gen_shake_256_sum.py | dpensi/insights-data-schemas | a60d673ce4053b8554e09b7bd08e518f9546727c | [
"Apache-2.0"
] | 36 | 2020-12-31T10:02:44.000Z | 2022-02-21T12:09:56.000Z | tools/gen_shake_256_sum.py | dpensi/insights-data-schemas | a60d673ce4053b8554e09b7bd08e518f9546727c | [
"Apache-2.0"
] | 6 | 2020-12-07T09:19:35.000Z | 2022-02-01T14:39:22.000Z | #!/usr/bin/env python3
# Copyright © 2021 Pavel Tisnovsky
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generator of SHAKE-256 sum values."""
import hashlib
with open("input.txt", "r") as fin:
for input_string in fin:
# remove EOLN
input_string = input_string[:-1]
# compute hash
shake_256_sum = hashlib.shake_256()
shake_256_sum.update(input_string.encode("UTF-8"))
# prepare special chars for output
input_string = input_string.replace("\t", "<Tab>")
# generate output
print(' "{}", # "{}"'.format(shake_256_sum.hexdigest(32), input_string))
| 32.4 | 84 | 0.689594 |
import hashlib
with open("input.txt", "r") as fin:
for input_string in fin:
input_string = input_string[:-1]
shake_256_sum = hashlib.shake_256()
shake_256_sum.update(input_string.encode("UTF-8"))
input_string = input_string.replace("\t", "<Tab>")
print(' "{}", # "{}"'.format(shake_256_sum.hexdigest(32), input_string))
| true | true |
f725194997751cabcf7176a1909560de88b4ee0e | 8,176 | py | Python | src/train.py | ahernandez1801/donkey_rl_mqtt | 02bbfc3d036220a4061b95e50780984e657aff43 | [
"BSD-3-Clause"
] | null | null | null | src/train.py | ahernandez1801/donkey_rl_mqtt | 02bbfc3d036220a4061b95e50780984e657aff43 | [
"BSD-3-Clause"
] | null | null | null | src/train.py | ahernandez1801/donkey_rl_mqtt | 02bbfc3d036220a4061b95e50780984e657aff43 | [
"BSD-3-Clause"
] | null | null | null | '''
Train
Train your nerual network
Author: Tawn Kramer
'''
from __future__ import print_function
import os
import sys
import glob
import time
import fnmatch
import argparse
import numpy as np
from PIL import Image
import keras
import conf
import random
import augment
import models
'''
matplotlib can be a pain to setup. So handle the case where it is absent. When present,
use it to generate a plot of training results.
'''
try:
import matplotlib
# Force matplotlib to not use any Xwindows backend.
matplotlib.use('Agg')
import matplotlib.pyplot as plt
do_plot = True
except:
do_plot = False
def shuffle(samples):
'''
randomly mix a list and return a new list
'''
ret_arr = []
len_samples = len(samples)
while len_samples > 0:
iSample = random.randrange(0, len_samples)
ret_arr.append(samples[iSample])
del samples[iSample]
len_samples -= 1
return ret_arr
def parse_img_filepath(filepath):
basename = os.path.basename(filepath)
#less .jpg
f = basename[:-4]
f = f.split('_')
steering = float(f[3])
throttle = float(f[5])
data = {'steering':steering, 'throttle':throttle }
return data
def generator(samples, batch_size=32, perc_to_augment=0.5):
'''
Rather than keep all data in memory, we will make a function that keeps
it's state and returns just the latest batch required via the yield command.
As we load images, we can optionally augment them in some manner that doesn't
change their underlying meaning or features. This is a combination of
brightness, contrast, sharpness, and color PIL image filters applied with random
settings. Optionally a shadow image may be overlayed with some random rotation and
opacity.
We flip each image horizontally and supply it as a another sample with the steering
negated.
'''
num_samples = len(samples)
shadows = augment.load_shadow_images('./shadows/*.png')
while 1: # Loop forever so the generator never terminates
samples = shuffle(samples)
#divide batch_size in half, because we double each output by flipping image.
for offset in range(0, num_samples, batch_size):
batch_samples = samples[offset:offset+batch_size]
images = []
controls = []
for fullpath in batch_samples:
try:
data = parse_img_filepath(fullpath)
steering = data["steering"]
throttle = data["throttle"]
try:
image = Image.open(fullpath)
except:
image = None
if image is None:
print('failed to open', fullpath)
continue
#PIL Image as a numpy array
image = np.array(image)
if len(shadows) > 0 and random.uniform(0.0, 1.0) < perc_to_augment:
image = augment.augment_image(image, shadows)
center_angle = steering
images.append(image)
if conf.num_outputs == 2:
controls.append([center_angle, throttle])
elif conf.num_outputs == 1:
controls.append([center_angle])
else:
print("expected 1 or 2 ouputs")
except:
print("we threw an exception on:", fullpath)
yield [], []
# final np array to submit to training
X_train = np.array(images)
y_train = np.array(controls)
yield X_train, y_train
def get_files(filemask):
'''
use a filemask and search a path recursively for matches
'''
path, mask = os.path.split(filemask)
matches = []
for root, dirnames, filenames in os.walk(path):
for filename in fnmatch.filter(filenames, mask):
matches.append(os.path.join(root, filename))
return matches
def train_test_split(lines, test_perc):
'''
split a list into two parts, percentage of test used to seperate
'''
train = []
test = []
for line in lines:
if random.uniform(0.0, 1.0) < test_perc:
test.append(line)
else:
train.append(line)
return train, test
def make_generators(inputs, limit=None, batch_size=32, aug_perc=0.0):
'''
load the job spec from the csv and create some generator for training
'''
#get the image/steering pairs from the csv files
lines = get_files(inputs)
print("found %d files" % len(lines))
if limit is not None:
lines = lines[:limit]
print("limiting to %d files" % len(lines))
train_samples, validation_samples = train_test_split(lines, test_perc=0.2)
print("num train/val", len(train_samples), len(validation_samples))
# compile and train the model using the generator function
train_generator = generator(train_samples, batch_size=batch_size, perc_to_augment=aug_perc)
validation_generator = generator(validation_samples, batch_size=batch_size, perc_to_augment=0.0)
n_train = len(train_samples)
n_val = len(validation_samples)
return train_generator, validation_generator, n_train, n_val
def go(model_name, epochs=50, inputs='./log/*.jpg', limit=None, aug_mult=1, aug_perc=0.0):
print('working on model', model_name)
'''
modify config.json to select the model to train.
'''
model = models.get_nvidia_model(conf.num_outputs)
'''
display layer summary and weights info
'''
models.show_model_summary(model)
callbacks = [
keras.callbacks.EarlyStopping(monitor='val_loss', patience=conf.training_patience, verbose=0),
keras.callbacks.ModelCheckpoint(model_name, monitor='val_loss', save_best_only=True, verbose=0),
]
batch_size = conf.training_batch_size
#Train on session images
train_generator, validation_generator, n_train, n_val = make_generators(inputs, limit=limit, batch_size=batch_size, aug_perc=aug_perc)
if n_train == 0:
print('no training data found')
return
steps_per_epoch = n_train // batch_size
validation_steps = n_val // batch_size
print("steps_per_epoch", steps_per_epoch, "validation_steps", validation_steps)
history = model.fit_generator(train_generator,
steps_per_epoch = steps_per_epoch,
validation_data = validation_generator,
validation_steps = validation_steps,
epochs=epochs,
verbose=1,
callbacks=callbacks)
try:
if do_plot:
# summarize history for loss
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.savefig('loss.png')
except:
print("problems with loss graph")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='train script')
parser.add_argument('model', type=str, help='model name')
parser.add_argument('--epochs', type=int, default=conf.training_default_epochs, help='number of epochs')
parser.add_argument('--inputs', default='../dataset/log/*.jpg', help='input mask to gather images')
parser.add_argument('--limit', type=int, default=None, help='max number of images to train with')
parser.add_argument('--aug_mult', type=int, default=conf.training_default_aug_mult, help='how many more images to augment')
parser.add_argument('--aug_perc', type=float, default=conf.training_default_aug_percent, help='what percentage of images to augment 0 - 1')
args = parser.parse_args()
go(args.model, epochs=args.epochs, limit=args.limit, inputs=args.inputs, aug_mult=args.aug_mult, aug_perc=args.aug_perc)
#python train.py mymodel_aug_90_x4_e200 --epochs=200 --aug_mult=4 --aug_perc=0.9
| 32.316206 | 143 | 0.632583 | from __future__ import print_function
import os
import sys
import glob
import time
import fnmatch
import argparse
import numpy as np
from PIL import Image
import keras
import conf
import random
import augment
import models
try:
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
do_plot = True
except:
do_plot = False
def shuffle(samples):
ret_arr = []
len_samples = len(samples)
while len_samples > 0:
iSample = random.randrange(0, len_samples)
ret_arr.append(samples[iSample])
del samples[iSample]
len_samples -= 1
return ret_arr
def parse_img_filepath(filepath):
basename = os.path.basename(filepath)
f = basename[:-4]
f = f.split('_')
steering = float(f[3])
throttle = float(f[5])
data = {'steering':steering, 'throttle':throttle }
return data
def generator(samples, batch_size=32, perc_to_augment=0.5):
num_samples = len(samples)
shadows = augment.load_shadow_images('./shadows/*.png')
while 1:
samples = shuffle(samples)
for offset in range(0, num_samples, batch_size):
batch_samples = samples[offset:offset+batch_size]
images = []
controls = []
for fullpath in batch_samples:
try:
data = parse_img_filepath(fullpath)
steering = data["steering"]
throttle = data["throttle"]
try:
image = Image.open(fullpath)
except:
image = None
if image is None:
print('failed to open', fullpath)
continue
image = np.array(image)
if len(shadows) > 0 and random.uniform(0.0, 1.0) < perc_to_augment:
image = augment.augment_image(image, shadows)
center_angle = steering
images.append(image)
if conf.num_outputs == 2:
controls.append([center_angle, throttle])
elif conf.num_outputs == 1:
controls.append([center_angle])
else:
print("expected 1 or 2 ouputs")
except:
print("we threw an exception on:", fullpath)
yield [], []
X_train = np.array(images)
y_train = np.array(controls)
yield X_train, y_train
def get_files(filemask):
path, mask = os.path.split(filemask)
matches = []
for root, dirnames, filenames in os.walk(path):
for filename in fnmatch.filter(filenames, mask):
matches.append(os.path.join(root, filename))
return matches
def train_test_split(lines, test_perc):
train = []
test = []
for line in lines:
if random.uniform(0.0, 1.0) < test_perc:
test.append(line)
else:
train.append(line)
return train, test
def make_generators(inputs, limit=None, batch_size=32, aug_perc=0.0):
lines = get_files(inputs)
print("found %d files" % len(lines))
if limit is not None:
lines = lines[:limit]
print("limiting to %d files" % len(lines))
train_samples, validation_samples = train_test_split(lines, test_perc=0.2)
print("num train/val", len(train_samples), len(validation_samples))
train_generator = generator(train_samples, batch_size=batch_size, perc_to_augment=aug_perc)
validation_generator = generator(validation_samples, batch_size=batch_size, perc_to_augment=0.0)
n_train = len(train_samples)
n_val = len(validation_samples)
return train_generator, validation_generator, n_train, n_val
def go(model_name, epochs=50, inputs='./log/*.jpg', limit=None, aug_mult=1, aug_perc=0.0):
print('working on model', model_name)
model = models.get_nvidia_model(conf.num_outputs)
models.show_model_summary(model)
callbacks = [
keras.callbacks.EarlyStopping(monitor='val_loss', patience=conf.training_patience, verbose=0),
keras.callbacks.ModelCheckpoint(model_name, monitor='val_loss', save_best_only=True, verbose=0),
]
batch_size = conf.training_batch_size
train_generator, validation_generator, n_train, n_val = make_generators(inputs, limit=limit, batch_size=batch_size, aug_perc=aug_perc)
if n_train == 0:
print('no training data found')
return
steps_per_epoch = n_train // batch_size
validation_steps = n_val // batch_size
print("steps_per_epoch", steps_per_epoch, "validation_steps", validation_steps)
history = model.fit_generator(train_generator,
steps_per_epoch = steps_per_epoch,
validation_data = validation_generator,
validation_steps = validation_steps,
epochs=epochs,
verbose=1,
callbacks=callbacks)
try:
if do_plot:
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.savefig('loss.png')
except:
print("problems with loss graph")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='train script')
parser.add_argument('model', type=str, help='model name')
parser.add_argument('--epochs', type=int, default=conf.training_default_epochs, help='number of epochs')
parser.add_argument('--inputs', default='../dataset/log/*.jpg', help='input mask to gather images')
parser.add_argument('--limit', type=int, default=None, help='max number of images to train with')
parser.add_argument('--aug_mult', type=int, default=conf.training_default_aug_mult, help='how many more images to augment')
parser.add_argument('--aug_perc', type=float, default=conf.training_default_aug_percent, help='what percentage of images to augment 0 - 1')
args = parser.parse_args()
go(args.model, epochs=args.epochs, limit=args.limit, inputs=args.inputs, aug_mult=args.aug_mult, aug_perc=args.aug_perc)
| true | true |
f7251a2ca8385d7a240cf8759dc50191209cbf05 | 2,647 | py | Python | frappe/website/context.py | gangadhar-kadam/lgnlvefrape | 6c72c134d358030d3737ff63e5a4b8187e802f17 | [
"MIT"
] | 1 | 2022-03-05T16:02:39.000Z | 2022-03-05T16:02:39.000Z | frappe/website/context.py | gangadhar-kadam/lgnlvefrape | 6c72c134d358030d3737ff63e5a4b8187e802f17 | [
"MIT"
] | null | null | null | frappe/website/context.py | gangadhar-kadam/lgnlvefrape | 6c72c134d358030d3737ff63e5a4b8187e802f17 | [
"MIT"
] | null | null | null | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.website.doctype.website_settings.website_settings import get_website_settings
from frappe.website.template import render_blocks
from frappe.website.router import get_route_info
from frappe.website.utils import can_cache
from frappe.website.permissions import get_access
def get_context(path):
context = None
cache_key = "page_context:{}".format(path)
def add_data_path(context):
if not context.data:
context.data = {}
context.data["path"] = path
# try from memcache
if can_cache():
context = frappe.cache().get_value(cache_key)
if not context:
context = get_route_info(path)
# permission may be required for rendering
if context.doc and context.doc.doctype=="Website Group":
context["access"] = get_access(context.doc, context.pathname)
else:
context["access"] = frappe._dict({"public_read":1, "public_write":1})
context = build_context(context)
add_data_path(context)
if can_cache(context.no_cache):
frappe.cache().set_value(cache_key, context)
else:
context["access"] = frappe._dict({"public_read":1, "public_write":1})
add_data_path(context)
context.update(context.data or {})
return context
def build_context(sitemap_options):
"""get_context method of doc or module is supposed to render content templates and push it into context"""
context = frappe._dict(sitemap_options)
context.update(get_website_settings())
# provide doc
if context.doc:
context.update(context.doc.as_dict())
if hasattr(context.doc, "get_context"):
context.update(context.doc.get_context(context) or {})
elif context.controller:
module = frappe.get_module(context.controller)
if module:
if hasattr(module, "get_context"):
context.update(module.get_context(context) or {})
if hasattr(module, "get_children"):
context.get_children = module.get_children
add_metatags(context)
if context.get("base_template_path") != context.get("template") and not context.get("rendered"):
context.data = render_blocks(context)
return context
def add_metatags(context):
tags = context.get("metatags")
if tags:
if not "twitter:card" in tags:
tags["twitter:card"] = "summary"
if not "og:type" in tags:
tags["og:type"] = "article"
if tags.get("name"):
tags["og:title"] = tags["twitter:title"] = tags["name"]
if tags.get("description"):
tags["og:description"] = tags["twitter:description"] = tags["description"]
if tags.get("image"):
tags["og:image"] = tags["twitter:image:src"] = tags["image"]
| 28.771739 | 107 | 0.735172 |
from __future__ import unicode_literals
import frappe
from frappe.website.doctype.website_settings.website_settings import get_website_settings
from frappe.website.template import render_blocks
from frappe.website.router import get_route_info
from frappe.website.utils import can_cache
from frappe.website.permissions import get_access
def get_context(path):
context = None
cache_key = "page_context:{}".format(path)
def add_data_path(context):
if not context.data:
context.data = {}
context.data["path"] = path
if can_cache():
context = frappe.cache().get_value(cache_key)
if not context:
context = get_route_info(path)
if context.doc and context.doc.doctype=="Website Group":
context["access"] = get_access(context.doc, context.pathname)
else:
context["access"] = frappe._dict({"public_read":1, "public_write":1})
context = build_context(context)
add_data_path(context)
if can_cache(context.no_cache):
frappe.cache().set_value(cache_key, context)
else:
context["access"] = frappe._dict({"public_read":1, "public_write":1})
add_data_path(context)
context.update(context.data or {})
return context
def build_context(sitemap_options):
context = frappe._dict(sitemap_options)
context.update(get_website_settings())
if context.doc:
context.update(context.doc.as_dict())
if hasattr(context.doc, "get_context"):
context.update(context.doc.get_context(context) or {})
elif context.controller:
module = frappe.get_module(context.controller)
if module:
if hasattr(module, "get_context"):
context.update(module.get_context(context) or {})
if hasattr(module, "get_children"):
context.get_children = module.get_children
add_metatags(context)
if context.get("base_template_path") != context.get("template") and not context.get("rendered"):
context.data = render_blocks(context)
return context
def add_metatags(context):
tags = context.get("metatags")
if tags:
if not "twitter:card" in tags:
tags["twitter:card"] = "summary"
if not "og:type" in tags:
tags["og:type"] = "article"
if tags.get("name"):
tags["og:title"] = tags["twitter:title"] = tags["name"]
if tags.get("description"):
tags["og:description"] = tags["twitter:description"] = tags["description"]
if tags.get("image"):
tags["og:image"] = tags["twitter:image:src"] = tags["image"]
| true | true |
f7251ad863b8884ed1b5f58106eecd8cd3a5a1ce | 2,503 | py | Python | tomodachi/protocol/json_base.py | 0x1EE7/tomodachi | 8147b16d8be19b80b3bd7c5d8ed21c9863eaaa83 | [
"MIT"
] | null | null | null | tomodachi/protocol/json_base.py | 0x1EE7/tomodachi | 8147b16d8be19b80b3bd7c5d8ed21c9863eaaa83 | [
"MIT"
] | null | null | null | tomodachi/protocol/json_base.py | 0x1EE7/tomodachi | 8147b16d8be19b80b3bd7c5d8ed21c9863eaaa83 | [
"MIT"
] | null | null | null | import ujson
import uuid
import time
import zlib
import base64
from typing import Any, Dict, Tuple, Union
PROTOCOL_VERSION = 'tomodachi-json-base--1.0.0'
class JsonBase(object):
@classmethod
async def build_message(cls, service: Any, topic: str, data: Any, **kwargs: Any) -> str:
data_encoding = 'raw'
if len(ujson.dumps(data)) >= 60000:
data = base64.b64encode(zlib.compress(ujson.dumps(data).encode('utf-8'))).decode('utf-8')
data_encoding = 'base64_gzip_json'
message = {
'service': {
'name': getattr(service, 'name', None),
'uuid': getattr(service, 'uuid', None)
},
'metadata': {
'message_uuid': '{}.{}'.format(getattr(service, 'uuid', ''), str(uuid.uuid4())),
'protocol_version': PROTOCOL_VERSION,
'compatible_protocol_versions': ['json_base-wip'], # deprecated
'timestamp': time.time(),
'topic': topic,
'data_encoding': data_encoding
},
'data': data
}
return ujson.dumps(message)
@classmethod
async def parse_message(cls, payload: str, **kwargs: Any) -> Union[Dict, Tuple]:
message = ujson.loads(payload)
protocol_version = message.get('metadata', {}).get('protocol_version')
message_uuid = message.get('metadata', {}).get('message_uuid')
timestamp = message.get('metadata', {}).get('timestamp')
if message.get('metadata', {}).get('data_encoding') == 'raw':
data = message.get('data')
elif message.get('metadata', {}).get('data_encoding') == 'base64_gzip_json':
data = ujson.loads(zlib.decompress(base64.b64decode(message.get('data').encode('utf-8'))).decode('utf-8'))
return {
'service': {
'name': message.get('service', {}).get('name'),
'uuid': message.get('service', {}).get('uuid')
},
'metadata': {
'message_uuid': message.get('metadata', {}).get('message_uuid'),
'protocol_version': message.get('metadata', {}).get('protocol_version'),
'timestamp': message.get('metadata', {}).get('timestamp'),
'topic': message.get('metadata', {}).get('topic'),
'data_encoding': message.get('metadata', {}).get('data_encoding')
},
'data': data
}, message_uuid, timestamp
| 39.730159 | 118 | 0.548542 | import ujson
import uuid
import time
import zlib
import base64
from typing import Any, Dict, Tuple, Union
PROTOCOL_VERSION = 'tomodachi-json-base--1.0.0'
class JsonBase(object):
@classmethod
async def build_message(cls, service: Any, topic: str, data: Any, **kwargs: Any) -> str:
data_encoding = 'raw'
if len(ujson.dumps(data)) >= 60000:
data = base64.b64encode(zlib.compress(ujson.dumps(data).encode('utf-8'))).decode('utf-8')
data_encoding = 'base64_gzip_json'
message = {
'service': {
'name': getattr(service, 'name', None),
'uuid': getattr(service, 'uuid', None)
},
'metadata': {
'message_uuid': '{}.{}'.format(getattr(service, 'uuid', ''), str(uuid.uuid4())),
'protocol_version': PROTOCOL_VERSION,
'compatible_protocol_versions': ['json_base-wip'],
'timestamp': time.time(),
'topic': topic,
'data_encoding': data_encoding
},
'data': data
}
return ujson.dumps(message)
@classmethod
async def parse_message(cls, payload: str, **kwargs: Any) -> Union[Dict, Tuple]:
message = ujson.loads(payload)
protocol_version = message.get('metadata', {}).get('protocol_version')
message_uuid = message.get('metadata', {}).get('message_uuid')
timestamp = message.get('metadata', {}).get('timestamp')
if message.get('metadata', {}).get('data_encoding') == 'raw':
data = message.get('data')
elif message.get('metadata', {}).get('data_encoding') == 'base64_gzip_json':
data = ujson.loads(zlib.decompress(base64.b64decode(message.get('data').encode('utf-8'))).decode('utf-8'))
return {
'service': {
'name': message.get('service', {}).get('name'),
'uuid': message.get('service', {}).get('uuid')
},
'metadata': {
'message_uuid': message.get('metadata', {}).get('message_uuid'),
'protocol_version': message.get('metadata', {}).get('protocol_version'),
'timestamp': message.get('metadata', {}).get('timestamp'),
'topic': message.get('metadata', {}).get('topic'),
'data_encoding': message.get('metadata', {}).get('data_encoding')
},
'data': data
}, message_uuid, timestamp
| true | true |
f7251c3cfff5728cee204b97993228189eefc64e | 2,801 | py | Python | planning/path_generator/search_path_generator.py | HybridRobotics/cbf | d8a1b376e7e910de71df60cdf3619f68c40ab3ed | [
"Apache-2.0"
] | 9 | 2022-03-07T09:12:29.000Z | 2022-03-25T01:41:49.000Z | planning/path_generator/search_path_generator.py | HybridRobotics/cbf | d8a1b376e7e910de71df60cdf3619f68c40ab3ed | [
"Apache-2.0"
] | null | null | null | planning/path_generator/search_path_generator.py | HybridRobotics/cbf | d8a1b376e7e910de71df60cdf3619f68c40ab3ed | [
"Apache-2.0"
] | null | null | null | import sys
import numpy as np
from planning.path_generator.astar import *
def plot_global_map(path, obstacles):
fig, ax = plt.subplots()
for o in obstacles:
patch = o.get_plot_patch()
ax.add_patch(patch)
ax.plot(path[:, 0], path[:, 1])
plt.xlim([-1 * 0.15, 11 * 0.15])
plt.ylim([0 * 0.15, 8 * 0.15])
plt.show()
class AstarPathGenerator:
def __init__(self, grid, quad, margin):
self._global_path = None
self._grid = GridMap(bounds=grid[0], cell_size=grid[1], quad=quad)
self._margin = margin
def generate_path(self, sys, obstacles, goal_pos):
graph = GraphSearch(graph=self._grid, obstacles=obstacles, margin=self._margin)
path = graph.a_star(sys.get_state()[:2], goal_pos)
self._global_path = np.array([p.pos for p in path])
print(self._global_path)
if self._global_path == []:
print("Global Path not found.")
sys.exit(1)
if True:
plot_global_map(self._global_path, obstacles)
return self._global_path
def logging(self, logger):
logger._paths.append(self._global_path)
class AstarLoSPathGenerator:
def __init__(self, grid, quad, margin):
self._global_path = None
self._grid = GridMap(bounds=grid[0], cell_size=grid[1], quad=quad)
self._margin = margin
def generate_path(self, sys, obstacles, goal_pos):
graph = GraphSearch(graph=self._grid, obstacles=obstacles, margin=self._margin)
path = graph.a_star(sys.get_state()[:2], goal_pos)
path = graph.reduce_path(path)
self._global_path = np.array([p.pos for p in path])
print(self._global_path)
if self._global_path == []:
print("Global Path not found.")
sys.exit(1)
if False:
plot_global_map(self._global_path, obstacles)
return self._global_path
def logging(self, logger):
logger._paths.append(self._global_path)
class ThetaStarPathGenerator:
def __init__(self, grid, quad, margin):
self._global_path = None
self._grid = GridMap(bounds=grid[0], cell_size=grid[1], quad=False)
self._margin = margin
def generate_path(self, sys, obstacles, goal_pos):
graph = GraphSearch(graph=self._grid, obstacles=obstacles, margin=self._margin)
path = graph.theta_star(sys.get_state()[:2], goal_pos)
self._global_path = np.array([p.pos for p in path])
print(self._global_path)
if self._global_path == []:
print("Global Path not found.")
sys.exit(1)
if True:
plot_global_map(self._global_path, obstacles)
return self._global_path
def logging(self, logger):
logger._paths.append(self._global_path)
| 33.345238 | 87 | 0.633345 | import sys
import numpy as np
from planning.path_generator.astar import *
def plot_global_map(path, obstacles):
fig, ax = plt.subplots()
for o in obstacles:
patch = o.get_plot_patch()
ax.add_patch(patch)
ax.plot(path[:, 0], path[:, 1])
plt.xlim([-1 * 0.15, 11 * 0.15])
plt.ylim([0 * 0.15, 8 * 0.15])
plt.show()
class AstarPathGenerator:
def __init__(self, grid, quad, margin):
self._global_path = None
self._grid = GridMap(bounds=grid[0], cell_size=grid[1], quad=quad)
self._margin = margin
def generate_path(self, sys, obstacles, goal_pos):
graph = GraphSearch(graph=self._grid, obstacles=obstacles, margin=self._margin)
path = graph.a_star(sys.get_state()[:2], goal_pos)
self._global_path = np.array([p.pos for p in path])
print(self._global_path)
if self._global_path == []:
print("Global Path not found.")
sys.exit(1)
if True:
plot_global_map(self._global_path, obstacles)
return self._global_path
def logging(self, logger):
logger._paths.append(self._global_path)
class AstarLoSPathGenerator:
def __init__(self, grid, quad, margin):
self._global_path = None
self._grid = GridMap(bounds=grid[0], cell_size=grid[1], quad=quad)
self._margin = margin
def generate_path(self, sys, obstacles, goal_pos):
graph = GraphSearch(graph=self._grid, obstacles=obstacles, margin=self._margin)
path = graph.a_star(sys.get_state()[:2], goal_pos)
path = graph.reduce_path(path)
self._global_path = np.array([p.pos for p in path])
print(self._global_path)
if self._global_path == []:
print("Global Path not found.")
sys.exit(1)
if False:
plot_global_map(self._global_path, obstacles)
return self._global_path
def logging(self, logger):
logger._paths.append(self._global_path)
class ThetaStarPathGenerator:
def __init__(self, grid, quad, margin):
self._global_path = None
self._grid = GridMap(bounds=grid[0], cell_size=grid[1], quad=False)
self._margin = margin
def generate_path(self, sys, obstacles, goal_pos):
graph = GraphSearch(graph=self._grid, obstacles=obstacles, margin=self._margin)
path = graph.theta_star(sys.get_state()[:2], goal_pos)
self._global_path = np.array([p.pos for p in path])
print(self._global_path)
if self._global_path == []:
print("Global Path not found.")
sys.exit(1)
if True:
plot_global_map(self._global_path, obstacles)
return self._global_path
def logging(self, logger):
logger._paths.append(self._global_path)
| true | true |
f7251d422b29b0275ce1c312bda2c4763835c059 | 33,303 | py | Python | pytorch/pytorchcv/models/common.py | HyperGAN/imgclsmob | 88b9776a5a927dc9a54e85e31978c4a9ec5ecbf3 | [
"MIT"
] | null | null | null | pytorch/pytorchcv/models/common.py | HyperGAN/imgclsmob | 88b9776a5a927dc9a54e85e31978c4a9ec5ecbf3 | [
"MIT"
] | null | null | null | pytorch/pytorchcv/models/common.py | HyperGAN/imgclsmob | 88b9776a5a927dc9a54e85e31978c4a9ec5ecbf3 | [
"MIT"
] | null | null | null | """
Common routines for models in PyTorch.
"""
__all__ = ['HSwish', 'get_activation_layer', 'conv1x1', 'conv3x3', 'depthwise_conv3x3', 'ConvBlock', 'conv1x1_block',
'conv3x3_block', 'conv7x7_block', 'dwconv3x3_block', 'dwconv5x5_block', 'PreConvBlock', 'pre_conv1x1_block',
'pre_conv3x3_block', 'ChannelShuffle', 'ChannelShuffle2', 'SEBlock', 'IBN', 'Identity', 'DualPathSequential',
'Concurrent', 'ParametricSequential', 'ParametricConcurrent', 'Hourglass', 'SesquialteralHourglass',
'MultiOutputSequential', 'Flatten']
import math
from inspect import isfunction
import torch
import torch.nn as nn
import torch.nn.functional as F
class Swish(nn.Module):
"""
Swish activation function from 'Searching for Activation Functions,' https://arxiv.org/abs/1710.05941.
"""
def forward(self, x):
return x * torch.sigmoid(x)
class HSigmoid(nn.Module):
"""
Approximated sigmoid function, so-called hard-version of sigmoid from 'Searching for MobileNetV3,'
https://arxiv.org/abs/1905.02244.
"""
def forward(self, x):
return F.relu6(x + 3.0, inplace=True) / 6.0
class HSwish(nn.Module):
"""
H-Swish activation function from 'Searching for MobileNetV3,' https://arxiv.org/abs/1905.02244.
Parameters:
----------
inplace : bool
Whether to use inplace version of the module.
"""
def __init__(self, inplace=False):
super(HSwish, self).__init__()
self.inplace = inplace
def forward(self, x):
return x * F.relu6(x + 3.0, inplace=self.inplace) / 6.0
def get_activation_layer(activation):
"""
Create activation layer from string/function.
Parameters:
----------
activation : function, or str, or nn.Module
Activation function or name of activation function.
Returns
-------
nn.Module
Activation layer.
"""
assert (activation is not None)
if isfunction(activation):
return activation()
elif isinstance(activation, str):
if activation == "relu":
return nn.ReLU(inplace=True)
elif activation == "relu6":
return nn.ReLU6(inplace=True)
elif activation == "swish":
return Swish()
elif activation == "hswish":
return HSwish(inplace=True)
else:
raise NotImplementedError()
else:
assert (isinstance(activation, nn.Module))
return activation
def conv1x1(in_channels,
out_channels,
stride=1,
groups=1,
bias=False):
"""
Convolution 1x1 layer.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
stride : int or tuple/list of 2 int, default 1
Strides of the convolution.
groups : int, default 1
Number of groups.
bias : bool, default False
Whether the layer uses a bias vector.
"""
return nn.Conv2d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=1,
stride=stride,
groups=groups,
bias=bias)
def conv3x3(in_channels,
out_channels,
stride=1,
padding=1,
dilation=1,
groups=1,
bias=False):
"""
Convolution 3x3 layer.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
stride : int or tuple/list of 2 int, default 1
Strides of the convolution.
padding : int or tuple/list of 2 int, default 1
Padding value for convolution layer.
groups : int, default 1
Number of groups.
bias : bool, default False
Whether the layer uses a bias vector.
"""
return nn.Conv2d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=3,
stride=stride,
padding=padding,
dilation=dilation,
groups=groups,
bias=bias)
def depthwise_conv3x3(channels,
stride):
"""
Depthwise convolution 3x3 layer.
Parameters:
----------
channels : int
Number of input/output channels.
strides : int or tuple/list of 2 int
Strides of the convolution.
"""
return nn.Conv2d(
in_channels=channels,
out_channels=channels,
kernel_size=3,
stride=stride,
padding=1,
groups=channels,
bias=False)
class ConvBlock(nn.Module):
"""
Standard convolution block with Batch normalization and activation.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
kernel_size : int or tuple/list of 2 int
Convolution window size.
stride : int or tuple/list of 2 int
Strides of the convolution.
padding : int or tuple/list of 2 int
Padding value for convolution layer.
dilation : int or tuple/list of 2 int, default 1
Dilation value for convolution layer.
groups : int, default 1
Number of groups.
bias : bool, default False
Whether the layer uses a bias vector.
use_bn : bool, default True
Whether to use BatchNorm layer.
bn_eps : float, default 1e-5
Small float added to variance in Batch norm.
activation : function or str or None, default nn.ReLU(inplace=True)
Activation function or name of activation function.
activate : bool, default True
Whether activate the convolution block.
"""
def __init__(self,
in_channels,
out_channels,
kernel_size,
stride,
padding,
dilation=1,
groups=1,
bias=False,
use_bn=True,
bn_eps=1e-5,
activation=(lambda: nn.ReLU(inplace=True))):
super(ConvBlock, self).__init__()
self.activate = (activation is not None)
self.use_bn = use_bn
self.conv = nn.Conv2d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
dilation=dilation,
groups=groups,
bias=bias)
if self.use_bn:
self.bn = nn.BatchNorm2d(
num_features=out_channels,
eps=bn_eps)
if self.activate:
self.activ = get_activation_layer(activation)
def forward(self, x):
x = self.conv(x)
if self.use_bn:
x = self.bn(x)
if self.activate:
x = self.activ(x)
return x
def conv1x1_block(in_channels,
out_channels,
stride=1,
padding=0,
groups=1,
bias=False,
bn_eps=1e-5,
activation=(lambda: nn.ReLU(inplace=True))):
"""
1x1 version of the standard convolution block.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
stride : int or tuple/list of 2 int, default 1
Strides of the convolution.
padding : int or tuple/list of 2 int, default 0
Padding value for convolution layer.
groups : int, default 1
Number of groups.
bias : bool, default False
Whether the layer uses a bias vector.
bn_eps : float, default 1e-5
Small float added to variance in Batch norm.
activation : function or str or None, default nn.ReLU(inplace=True)
Activation function or name of activation function.
"""
return ConvBlock(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=1,
stride=stride,
padding=padding,
groups=groups,
bias=bias,
bn_eps=bn_eps,
activation=activation)
def conv3x3_block(in_channels,
out_channels,
stride=1,
padding=1,
dilation=1,
groups=1,
bias=False,
use_bn=True,
bn_eps=1e-5,
activation=(lambda: nn.ReLU(inplace=True))):
"""
3x3 version of the standard convolution block.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
stride : int or tuple/list of 2 int, default 1
Strides of the convolution.
padding : int or tuple/list of 2 int, default 1
Padding value for convolution layer.
dilation : int or tuple/list of 2 int, default 1
Dilation value for convolution layer.
groups : int, default 1
Number of groups.
bias : bool, default False
Whether the layer uses a bias vector.
use_bn : bool, default True
Whether to use BatchNorm layer.
bn_eps : float, default 1e-5
Small float added to variance in Batch norm.
activation : function or str or None, default nn.ReLU(inplace=True)
Activation function or name of activation function.
"""
return ConvBlock(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=3,
stride=stride,
padding=padding,
dilation=dilation,
groups=groups,
bias=bias,
use_bn=use_bn,
bn_eps=bn_eps,
activation=activation)
def conv5x5_block(in_channels,
out_channels,
stride=1,
padding=2,
dilation=1,
groups=1,
bias=False,
bn_eps=1e-5,
activation=(lambda: nn.ReLU(inplace=True))):
"""
5x5 version of the standard convolution block.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
stride : int or tuple/list of 2 int, default 1
Strides of the convolution.
padding : int or tuple/list of 2 int, default 2
Padding value for convolution layer.
dilation : int or tuple/list of 2 int, default 1
Dilation value for convolution layer.
groups : int, default 1
Number of groups.
bias : bool, default False
Whether the layer uses a bias vector.
bn_eps : float, default 1e-5
Small float added to variance in Batch norm.
activation : function or str or None, default nn.ReLU(inplace=True)
Activation function or name of activation function.
"""
return ConvBlock(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=5,
stride=stride,
padding=padding,
dilation=dilation,
groups=groups,
bias=bias,
bn_eps=bn_eps,
activation=activation)
def conv7x7_block(in_channels,
out_channels,
stride=1,
padding=3,
bias=False,
activation=(lambda: nn.ReLU(inplace=True))):
"""
7x7 version of the standard convolution block.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
stride : int or tuple/list of 2 int, default 1
Strides of the convolution.
padding : int or tuple/list of 2 int, default 3
Padding value for convolution layer.
bias : bool, default False
Whether the layer uses a bias vector.
activation : function or str or None, default nn.ReLU(inplace=True)
Activation function or name of activation function.
"""
return ConvBlock(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=7,
stride=stride,
padding=padding,
bias=bias,
activation=activation)
def dwconv3x3_block(in_channels,
out_channels,
stride=1,
padding=1,
dilation=1,
bias=False,
bn_eps=1e-5,
activation=(lambda: nn.ReLU(inplace=True))):
"""
3x3 depthwise version of the standard convolution block.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
stride : int or tuple/list of 2 int, default 1
Strides of the convolution.
padding : int or tuple/list of 2 int, default 1
Padding value for convolution layer.
dilation : int or tuple/list of 2 int, default 1
Dilation value for convolution layer.
bias : bool, default False
Whether the layer uses a bias vector.
bn_eps : float, default 1e-5
Small float added to variance in Batch norm.
activation : function or str or None, default nn.ReLU(inplace=True)
Activation function or name of activation function.
"""
return conv3x3_block(
in_channels=in_channels,
out_channels=out_channels,
stride=stride,
padding=padding,
dilation=dilation,
groups=out_channels,
bias=bias,
bn_eps=bn_eps,
activation=activation)
def dwconv5x5_block(in_channels,
out_channels,
stride=1,
padding=2,
dilation=1,
bias=False,
bn_eps=1e-5,
activation=(lambda: nn.ReLU(inplace=True))):
"""
5x5 depthwise version of the standard convolution block.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
stride : int or tuple/list of 2 int, default 1
Strides of the convolution.
padding : int or tuple/list of 2 int, default 2
Padding value for convolution layer.
dilation : int or tuple/list of 2 int, default 1
Dilation value for convolution layer.
bias : bool, default False
Whether the layer uses a bias vector.
bn_eps : float, default 1e-5
Small float added to variance in Batch norm.
activation : function or str or None, default nn.ReLU(inplace=True)
Activation function or name of activation function.
"""
return conv5x5_block(
in_channels=in_channels,
out_channels=out_channels,
stride=stride,
padding=padding,
dilation=dilation,
groups=out_channels,
bias=bias,
bn_eps=bn_eps,
activation=activation)
class PreConvBlock(nn.Module):
"""
Convolution block with Batch normalization and ReLU pre-activation.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
kernel_size : int or tuple/list of 2 int
Convolution window size.
stride : int or tuple/list of 2 int
Strides of the convolution.
padding : int or tuple/list of 2 int
Padding value for convolution layer.
dilation : int or tuple/list of 2 int, default 1
Dilation value for convolution layer.
bias : bool, default False
Whether the layer uses a bias vector.
return_preact : bool, default False
Whether return pre-activation. It's used by PreResNet.
activate : bool, default True
Whether activate the convolution block.
"""
def __init__(self,
in_channels,
out_channels,
kernel_size,
stride,
padding,
dilation=1,
bias=False,
return_preact=False,
activate=True):
super(PreConvBlock, self).__init__()
self.return_preact = return_preact
self.activate = activate
self.bn = nn.BatchNorm2d(num_features=in_channels)
if self.activate:
self.activ = nn.ReLU(inplace=True)
self.conv = nn.Conv2d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
dilation=dilation,
bias=bias)
def forward(self, x):
x = self.bn(x)
if self.activate:
x = self.activ(x)
if self.return_preact:
x_pre_activ = x
x = self.conv(x)
if self.return_preact:
return x, x_pre_activ
else:
return x
def pre_conv1x1_block(in_channels,
out_channels,
stride=1,
bias=False,
return_preact=False,
activate=True):
"""
1x1 version of the pre-activated convolution block.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
stride : int or tuple/list of 2 int, default 1
Strides of the convolution.
bias : bool, default False
Whether the layer uses a bias vector.
return_preact : bool, default False
Whether return pre-activation.
activate : bool, default True
Whether activate the convolution block.
"""
return PreConvBlock(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=1,
stride=stride,
padding=0,
bias=bias,
return_preact=return_preact,
activate=activate)
def pre_conv3x3_block(in_channels,
out_channels,
stride=1,
padding=1,
dilation=1,
return_preact=False,
activate=True):
"""
3x3 version of the pre-activated convolution block.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
stride : int or tuple/list of 2 int, default 1
Strides of the convolution.
padding : int or tuple/list of 2 int, default 1
Padding value for convolution layer.
dilation : int or tuple/list of 2 int, default 1
Dilation value for convolution layer.
return_preact : bool, default False
Whether return pre-activation.
activate : bool, default True
Whether activate the convolution block.
"""
return PreConvBlock(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=3,
stride=stride,
padding=padding,
dilation=dilation,
return_preact=return_preact,
activate=activate)
def channel_shuffle(x,
groups):
"""
Channel shuffle operation from 'ShuffleNet: An Extremely Efficient Convolutional Neural Network for Mobile Devices,'
https://arxiv.org/abs/1707.01083.
Parameters:
----------
x : Tensor
Input tensor.
groups : int
Number of groups.
Returns
-------
Tensor
Resulted tensor.
"""
batch, channels, height, width = x.size()
# assert (channels % groups == 0)
channels_per_group = channels // groups
x = x.view(batch, groups, channels_per_group, height, width)
x = torch.transpose(x, 1, 2).contiguous()
x = x.view(batch, channels, height, width)
return x
class ChannelShuffle(nn.Module):
"""
Channel shuffle layer. This is a wrapper over the same operation. It is designed to save the number of groups.
Parameters:
----------
channels : int
Number of channels.
groups : int
Number of groups.
"""
def __init__(self,
channels,
groups):
super(ChannelShuffle, self).__init__()
# assert (channels % groups == 0)
if channels % groups != 0:
raise ValueError('channels must be divisible by groups')
self.groups = groups
def forward(self, x):
return channel_shuffle(x, self.groups)
def channel_shuffle2(x,
groups):
"""
Channel shuffle operation from 'ShuffleNet: An Extremely Efficient Convolutional Neural Network for Mobile Devices,'
https://arxiv.org/abs/1707.01083. The alternative version.
Parameters:
----------
x : Tensor
Input tensor.
groups : int
Number of groups.
Returns
-------
Tensor
Resulted tensor.
"""
batch, channels, height, width = x.size()
# assert (channels % groups == 0)
channels_per_group = channels // groups
x = x.view(batch, channels_per_group, groups, height, width)
x = torch.transpose(x, 1, 2).contiguous()
x = x.view(batch, channels, height, width)
return x
class ChannelShuffle2(nn.Module):
"""
Channel shuffle layer. This is a wrapper over the same operation. It is designed to save the number of groups.
The alternative version.
Parameters:
----------
channels : int
Number of channels.
groups : int
Number of groups.
"""
def __init__(self,
channels,
groups):
super(ChannelShuffle2, self).__init__()
# assert (channels % groups == 0)
if channels % groups != 0:
raise ValueError('channels must be divisible by groups')
self.groups = groups
def forward(self, x):
return channel_shuffle2(x, self.groups)
class SEBlock(nn.Module):
"""
Squeeze-and-Excitation block from 'Squeeze-and-Excitation Networks,' https://arxiv.org/abs/1709.01507.
Parameters:
----------
channels : int
Number of channels.
reduction : int, default 16
Squeeze reduction value.
approx_sigmoid : bool, default False
Whether to use approximated sigmoid function.
activation : function, or str, or nn.Module
Activation function or name of activation function.
"""
def __init__(self,
channels,
reduction=16,
approx_sigmoid=False,
activation=(lambda: nn.ReLU(inplace=True))):
super(SEBlock, self).__init__()
mid_cannels = channels // reduction
self.pool = nn.AdaptiveAvgPool2d(output_size=1)
self.conv1 = conv1x1(
in_channels=channels,
out_channels=mid_cannels,
bias=True)
self.activ = get_activation_layer(activation)
self.conv2 = conv1x1(
in_channels=mid_cannels,
out_channels=channels,
bias=True)
self.sigmoid = HSigmoid() if approx_sigmoid else nn.Sigmoid()
def forward(self, x):
w = self.pool(x)
w = self.conv1(w)
w = self.activ(w)
w = self.conv2(w)
w = self.sigmoid(w)
x = x * w
return x
class IBN(nn.Module):
"""
Instance-Batch Normalization block from 'Two at Once: Enhancing Learning and Generalization Capacities via IBN-Net,'
https://arxiv.org/abs/1807.09441.
Parameters:
----------
channels : int
Number of channels.
inst_fraction : float, default 0.5
The first fraction of channels for normalization.
inst_first : bool, default True
Whether instance normalization be on the first part of channels.
"""
def __init__(self,
channels,
first_fraction=0.5,
inst_first=True):
super(IBN, self).__init__()
self.inst_first = inst_first
h1_channels = int(math.floor(channels * first_fraction))
h2_channels = channels - h1_channels
self.split_sections = [h1_channels, h2_channels]
if self.inst_first:
self.inst_norm = nn.InstanceNorm2d(
num_features=h1_channels,
affine=True)
self.batch_norm = nn.BatchNorm2d(num_features=h2_channels)
else:
self.batch_norm = nn.BatchNorm2d(num_features=h1_channels)
self.inst_norm = nn.InstanceNorm2d(
num_features=h2_channels,
affine=True)
def forward(self, x):
x1, x2 = torch.split(x, split_size_or_sections=self.split_sections, dim=1)
if self.inst_first:
x1 = self.inst_norm(x1.contiguous())
x2 = self.batch_norm(x2.contiguous())
else:
x1 = self.batch_norm(x1.contiguous())
x2 = self.inst_norm(x2.contiguous())
x = torch.cat((x1, x2), dim=1)
return x
class Identity(nn.Module):
"""
Identity block.
"""
def __init__(self):
super(Identity, self).__init__()
def forward(self, x):
return x
class DualPathSequential(nn.Sequential):
"""
A sequential container for modules with dual inputs/outputs.
Modules will be executed in the order they are added.
Parameters:
----------
return_two : bool, default True
Whether to return two output after execution.
first_ordinals : int, default 0
Number of the first modules with single input/output.
last_ordinals : int, default 0
Number of the final modules with single input/output.
dual_path_scheme : function
Scheme of dual path response for a module.
dual_path_scheme_ordinal : function
Scheme of dual path response for an ordinal module.
"""
def __init__(self,
return_two=True,
first_ordinals=0,
last_ordinals=0,
dual_path_scheme=(lambda module, x1, x2: module(x1, x2)),
dual_path_scheme_ordinal=(lambda module, x1, x2: (module(x1), x2))):
super(DualPathSequential, self).__init__()
self.return_two = return_two
self.first_ordinals = first_ordinals
self.last_ordinals = last_ordinals
self.dual_path_scheme = dual_path_scheme
self.dual_path_scheme_ordinal = dual_path_scheme_ordinal
def forward(self, x1, x2=None):
length = len(self._modules.values())
for i, module in enumerate(self._modules.values()):
if (i < self.first_ordinals) or (i >= length - self.last_ordinals):
x1, x2 = self.dual_path_scheme_ordinal(module, x1, x2)
else:
x1, x2 = self.dual_path_scheme(module, x1, x2)
if self.return_two:
return x1, x2
else:
return x1
class Concurrent(nn.Sequential):
"""
A container for concatenation of modules on the base of the sequential container.
Parameters:
----------
axis : int, default 1
The axis on which to concatenate the outputs.
stack : bool, default False
Whether to concatenate tensors along a new dimension.
"""
def __init__(self,
axis=1,
stack=False):
super(Concurrent, self).__init__()
self.axis = axis
self.stack = stack
def forward(self, x):
out = []
for module in self._modules.values():
out.append(module(x))
if self.stack:
out = torch.stack(tuple(out), dim=self.axis)
else:
out = torch.cat(tuple(out), dim=self.axis)
return out
class ParametricSequential(nn.Sequential):
"""
A sequential container for modules with parameters.
Modules will be executed in the order they are added.
"""
def __init__(self, *args):
super(ParametricSequential, self).__init__(*args)
def forward(self, x, **kwargs):
for module in self._modules.values():
x = module(x, **kwargs)
return x
class ParametricConcurrent(nn.Sequential):
"""
A container for concatenation of modules with parameters.
Parameters:
----------
axis : int, default 1
The axis on which to concatenate the outputs.
"""
def __init__(self, axis=1):
super(ParametricConcurrent, self).__init__()
self.axis = axis
def forward(self, x, **kwargs):
out = []
for module in self._modules.values():
out.append(module(x, **kwargs))
out = torch.cat(tuple(out), dim=self.axis)
return out
class Hourglass(nn.Module):
"""
A hourglass block.
Parameters:
----------
down_seq : nn.Sequential
Down modules as sequential.
up_seq : nn.Sequential
Up modules as sequential.
skip_seq : nn.Sequential
Skip connection modules as sequential.
merge_type : str, default 'add'
Type of concatenation of up and skip outputs.
return_first_skip : bool, default False
Whether return the first skip connection output. Used in ResAttNet.
"""
def __init__(self,
down_seq,
up_seq,
skip_seq,
merge_type="add",
return_first_skip=False):
super(Hourglass, self).__init__()
assert (len(up_seq) == len(down_seq))
assert (len(skip_seq) == len(down_seq))
assert (merge_type in ["add"])
self.merge_type = merge_type
self.return_first_skip = return_first_skip
self.depth = len(down_seq)
self.down_seq = down_seq
self.up_seq = up_seq
self.skip_seq = skip_seq
def forward(self, x, **kwargs):
y = None
down_outs = [x]
for down_module in self.down_seq._modules.values():
x = down_module(x)
down_outs.append(x)
for i in range(len(down_outs)):
if i != 0:
y = down_outs[self.depth - i]
skip_module = self.skip_seq[self.depth - i]
y = skip_module(y)
if (y is not None) and (self.merge_type == "add"):
x = x + y
if i != len(down_outs) - 1:
up_module = self.up_seq[self.depth - 1 - i]
x = up_module(x)
if self.return_first_skip:
return x, y
else:
return x
class SesquialteralHourglass(nn.Module):
"""
A sesquialteral hourglass block.
Parameters:
----------
down1_seq : nn.Sequential
The first down modules as sequential.
skip1_seq : nn.Sequential
The first skip connection modules as sequential.
up_seq : nn.Sequential
Up modules as sequential.
skip2_seq : nn.Sequential
The second skip connection modules as sequential.
down2_seq : nn.Sequential
The second down modules as sequential.
merge_type : str, default 'con'
Type of concatenation of up and skip outputs.
"""
def __init__(self,
down1_seq,
skip1_seq,
up_seq,
skip2_seq,
down2_seq,
merge_type="cat"):
super(SesquialteralHourglass, self).__init__()
assert (len(down1_seq) == len(up_seq))
assert (len(down1_seq) == len(down2_seq))
assert (len(skip1_seq) == len(skip2_seq))
assert (len(down1_seq) == len(skip1_seq) - 1)
assert (merge_type in ["cat", "add"])
self.merge_type = merge_type
self.depth = len(down1_seq)
self.down1_seq = down1_seq
self.skip1_seq = skip1_seq
self.up_seq = up_seq
self.skip2_seq = skip2_seq
self.down2_seq = down2_seq
def _merge(self, x, y):
if y is not None:
if self.merge_type == "cat":
x = torch.cat((x, y), dim=1)
elif self.merge_type == "add":
x = x + y
return x
def forward(self, x, **kwargs):
y = self.skip1_seq[0](x)
skip1_outs = [y]
for i in range(self.depth):
x = self.down1_seq[i](x)
y = self.skip1_seq[i + 1](x)
skip1_outs.append(y)
x = skip1_outs[self.depth]
y = self.skip2_seq[0](x)
skip2_outs = [y]
for i in range(self.depth):
x = self.up_seq[i](x)
y = skip1_outs[self.depth - 1 - i]
x = self._merge(x, y)
y = self.skip2_seq[i + 1](x)
skip2_outs.append(y)
x = self.skip2_seq[self.depth](x)
for i in range(self.depth):
x = self.down2_seq[i](x)
y = skip2_outs[self.depth - 1 - i]
x = self._merge(x, y)
return x
class MultiOutputSequential(nn.Sequential):
"""
A sequential container with multiple outputs.
Modules will be executed in the order they are added.
"""
def __init__(self):
super(MultiOutputSequential, self).__init__()
def forward(self, x):
outs = []
for module in self._modules.values():
x = module(x)
if hasattr(module, "do_output") and module.do_output:
outs.append(x)
return [x] + outs
class Flatten(nn.Module):
"""
Simple flatten module.
"""
def forward(self, x):
return x.view(x.size(0), -1)
| 30.111212 | 120 | 0.58283 |
__all__ = ['HSwish', 'get_activation_layer', 'conv1x1', 'conv3x3', 'depthwise_conv3x3', 'ConvBlock', 'conv1x1_block',
'conv3x3_block', 'conv7x7_block', 'dwconv3x3_block', 'dwconv5x5_block', 'PreConvBlock', 'pre_conv1x1_block',
'pre_conv3x3_block', 'ChannelShuffle', 'ChannelShuffle2', 'SEBlock', 'IBN', 'Identity', 'DualPathSequential',
'Concurrent', 'ParametricSequential', 'ParametricConcurrent', 'Hourglass', 'SesquialteralHourglass',
'MultiOutputSequential', 'Flatten']
import math
from inspect import isfunction
import torch
import torch.nn as nn
import torch.nn.functional as F
class Swish(nn.Module):
def forward(self, x):
return x * torch.sigmoid(x)
class HSigmoid(nn.Module):
def forward(self, x):
return F.relu6(x + 3.0, inplace=True) / 6.0
class HSwish(nn.Module):
def __init__(self, inplace=False):
super(HSwish, self).__init__()
self.inplace = inplace
def forward(self, x):
return x * F.relu6(x + 3.0, inplace=self.inplace) / 6.0
def get_activation_layer(activation):
assert (activation is not None)
if isfunction(activation):
return activation()
elif isinstance(activation, str):
if activation == "relu":
return nn.ReLU(inplace=True)
elif activation == "relu6":
return nn.ReLU6(inplace=True)
elif activation == "swish":
return Swish()
elif activation == "hswish":
return HSwish(inplace=True)
else:
raise NotImplementedError()
else:
assert (isinstance(activation, nn.Module))
return activation
def conv1x1(in_channels,
out_channels,
stride=1,
groups=1,
bias=False):
return nn.Conv2d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=1,
stride=stride,
groups=groups,
bias=bias)
def conv3x3(in_channels,
out_channels,
stride=1,
padding=1,
dilation=1,
groups=1,
bias=False):
return nn.Conv2d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=3,
stride=stride,
padding=padding,
dilation=dilation,
groups=groups,
bias=bias)
def depthwise_conv3x3(channels,
stride):
return nn.Conv2d(
in_channels=channels,
out_channels=channels,
kernel_size=3,
stride=stride,
padding=1,
groups=channels,
bias=False)
class ConvBlock(nn.Module):
def __init__(self,
in_channels,
out_channels,
kernel_size,
stride,
padding,
dilation=1,
groups=1,
bias=False,
use_bn=True,
bn_eps=1e-5,
activation=(lambda: nn.ReLU(inplace=True))):
super(ConvBlock, self).__init__()
self.activate = (activation is not None)
self.use_bn = use_bn
self.conv = nn.Conv2d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
dilation=dilation,
groups=groups,
bias=bias)
if self.use_bn:
self.bn = nn.BatchNorm2d(
num_features=out_channels,
eps=bn_eps)
if self.activate:
self.activ = get_activation_layer(activation)
def forward(self, x):
x = self.conv(x)
if self.use_bn:
x = self.bn(x)
if self.activate:
x = self.activ(x)
return x
def conv1x1_block(in_channels,
out_channels,
stride=1,
padding=0,
groups=1,
bias=False,
bn_eps=1e-5,
activation=(lambda: nn.ReLU(inplace=True))):
return ConvBlock(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=1,
stride=stride,
padding=padding,
groups=groups,
bias=bias,
bn_eps=bn_eps,
activation=activation)
def conv3x3_block(in_channels,
out_channels,
stride=1,
padding=1,
dilation=1,
groups=1,
bias=False,
use_bn=True,
bn_eps=1e-5,
activation=(lambda: nn.ReLU(inplace=True))):
return ConvBlock(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=3,
stride=stride,
padding=padding,
dilation=dilation,
groups=groups,
bias=bias,
use_bn=use_bn,
bn_eps=bn_eps,
activation=activation)
def conv5x5_block(in_channels,
out_channels,
stride=1,
padding=2,
dilation=1,
groups=1,
bias=False,
bn_eps=1e-5,
activation=(lambda: nn.ReLU(inplace=True))):
return ConvBlock(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=5,
stride=stride,
padding=padding,
dilation=dilation,
groups=groups,
bias=bias,
bn_eps=bn_eps,
activation=activation)
def conv7x7_block(in_channels,
out_channels,
stride=1,
padding=3,
bias=False,
activation=(lambda: nn.ReLU(inplace=True))):
return ConvBlock(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=7,
stride=stride,
padding=padding,
bias=bias,
activation=activation)
def dwconv3x3_block(in_channels,
out_channels,
stride=1,
padding=1,
dilation=1,
bias=False,
bn_eps=1e-5,
activation=(lambda: nn.ReLU(inplace=True))):
return conv3x3_block(
in_channels=in_channels,
out_channels=out_channels,
stride=stride,
padding=padding,
dilation=dilation,
groups=out_channels,
bias=bias,
bn_eps=bn_eps,
activation=activation)
def dwconv5x5_block(in_channels,
out_channels,
stride=1,
padding=2,
dilation=1,
bias=False,
bn_eps=1e-5,
activation=(lambda: nn.ReLU(inplace=True))):
return conv5x5_block(
in_channels=in_channels,
out_channels=out_channels,
stride=stride,
padding=padding,
dilation=dilation,
groups=out_channels,
bias=bias,
bn_eps=bn_eps,
activation=activation)
class PreConvBlock(nn.Module):
def __init__(self,
in_channels,
out_channels,
kernel_size,
stride,
padding,
dilation=1,
bias=False,
return_preact=False,
activate=True):
super(PreConvBlock, self).__init__()
self.return_preact = return_preact
self.activate = activate
self.bn = nn.BatchNorm2d(num_features=in_channels)
if self.activate:
self.activ = nn.ReLU(inplace=True)
self.conv = nn.Conv2d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
dilation=dilation,
bias=bias)
def forward(self, x):
x = self.bn(x)
if self.activate:
x = self.activ(x)
if self.return_preact:
x_pre_activ = x
x = self.conv(x)
if self.return_preact:
return x, x_pre_activ
else:
return x
def pre_conv1x1_block(in_channels,
out_channels,
stride=1,
bias=False,
return_preact=False,
activate=True):
return PreConvBlock(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=1,
stride=stride,
padding=0,
bias=bias,
return_preact=return_preact,
activate=activate)
def pre_conv3x3_block(in_channels,
out_channels,
stride=1,
padding=1,
dilation=1,
return_preact=False,
activate=True):
return PreConvBlock(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=3,
stride=stride,
padding=padding,
dilation=dilation,
return_preact=return_preact,
activate=activate)
def channel_shuffle(x,
groups):
batch, channels, height, width = x.size()
channels_per_group = channels // groups
x = x.view(batch, groups, channels_per_group, height, width)
x = torch.transpose(x, 1, 2).contiguous()
x = x.view(batch, channels, height, width)
return x
class ChannelShuffle(nn.Module):
def __init__(self,
channels,
groups):
super(ChannelShuffle, self).__init__()
if channels % groups != 0:
raise ValueError('channels must be divisible by groups')
self.groups = groups
def forward(self, x):
return channel_shuffle(x, self.groups)
def channel_shuffle2(x,
groups):
batch, channels, height, width = x.size()
channels_per_group = channels // groups
x = x.view(batch, channels_per_group, groups, height, width)
x = torch.transpose(x, 1, 2).contiguous()
x = x.view(batch, channels, height, width)
return x
class ChannelShuffle2(nn.Module):
def __init__(self,
channels,
groups):
super(ChannelShuffle2, self).__init__()
if channels % groups != 0:
raise ValueError('channels must be divisible by groups')
self.groups = groups
def forward(self, x):
return channel_shuffle2(x, self.groups)
class SEBlock(nn.Module):
def __init__(self,
channels,
reduction=16,
approx_sigmoid=False,
activation=(lambda: nn.ReLU(inplace=True))):
super(SEBlock, self).__init__()
mid_cannels = channels // reduction
self.pool = nn.AdaptiveAvgPool2d(output_size=1)
self.conv1 = conv1x1(
in_channels=channels,
out_channels=mid_cannels,
bias=True)
self.activ = get_activation_layer(activation)
self.conv2 = conv1x1(
in_channels=mid_cannels,
out_channels=channels,
bias=True)
self.sigmoid = HSigmoid() if approx_sigmoid else nn.Sigmoid()
def forward(self, x):
w = self.pool(x)
w = self.conv1(w)
w = self.activ(w)
w = self.conv2(w)
w = self.sigmoid(w)
x = x * w
return x
class IBN(nn.Module):
def __init__(self,
channels,
first_fraction=0.5,
inst_first=True):
super(IBN, self).__init__()
self.inst_first = inst_first
h1_channels = int(math.floor(channels * first_fraction))
h2_channels = channels - h1_channels
self.split_sections = [h1_channels, h2_channels]
if self.inst_first:
self.inst_norm = nn.InstanceNorm2d(
num_features=h1_channels,
affine=True)
self.batch_norm = nn.BatchNorm2d(num_features=h2_channels)
else:
self.batch_norm = nn.BatchNorm2d(num_features=h1_channels)
self.inst_norm = nn.InstanceNorm2d(
num_features=h2_channels,
affine=True)
def forward(self, x):
x1, x2 = torch.split(x, split_size_or_sections=self.split_sections, dim=1)
if self.inst_first:
x1 = self.inst_norm(x1.contiguous())
x2 = self.batch_norm(x2.contiguous())
else:
x1 = self.batch_norm(x1.contiguous())
x2 = self.inst_norm(x2.contiguous())
x = torch.cat((x1, x2), dim=1)
return x
class Identity(nn.Module):
def __init__(self):
super(Identity, self).__init__()
def forward(self, x):
return x
class DualPathSequential(nn.Sequential):
def __init__(self,
return_two=True,
first_ordinals=0,
last_ordinals=0,
dual_path_scheme=(lambda module, x1, x2: module(x1, x2)),
dual_path_scheme_ordinal=(lambda module, x1, x2: (module(x1), x2))):
super(DualPathSequential, self).__init__()
self.return_two = return_two
self.first_ordinals = first_ordinals
self.last_ordinals = last_ordinals
self.dual_path_scheme = dual_path_scheme
self.dual_path_scheme_ordinal = dual_path_scheme_ordinal
def forward(self, x1, x2=None):
length = len(self._modules.values())
for i, module in enumerate(self._modules.values()):
if (i < self.first_ordinals) or (i >= length - self.last_ordinals):
x1, x2 = self.dual_path_scheme_ordinal(module, x1, x2)
else:
x1, x2 = self.dual_path_scheme(module, x1, x2)
if self.return_two:
return x1, x2
else:
return x1
class Concurrent(nn.Sequential):
def __init__(self,
axis=1,
stack=False):
super(Concurrent, self).__init__()
self.axis = axis
self.stack = stack
def forward(self, x):
out = []
for module in self._modules.values():
out.append(module(x))
if self.stack:
out = torch.stack(tuple(out), dim=self.axis)
else:
out = torch.cat(tuple(out), dim=self.axis)
return out
class ParametricSequential(nn.Sequential):
def __init__(self, *args):
super(ParametricSequential, self).__init__(*args)
def forward(self, x, **kwargs):
for module in self._modules.values():
x = module(x, **kwargs)
return x
class ParametricConcurrent(nn.Sequential):
def __init__(self, axis=1):
super(ParametricConcurrent, self).__init__()
self.axis = axis
def forward(self, x, **kwargs):
out = []
for module in self._modules.values():
out.append(module(x, **kwargs))
out = torch.cat(tuple(out), dim=self.axis)
return out
class Hourglass(nn.Module):
def __init__(self,
down_seq,
up_seq,
skip_seq,
merge_type="add",
return_first_skip=False):
super(Hourglass, self).__init__()
assert (len(up_seq) == len(down_seq))
assert (len(skip_seq) == len(down_seq))
assert (merge_type in ["add"])
self.merge_type = merge_type
self.return_first_skip = return_first_skip
self.depth = len(down_seq)
self.down_seq = down_seq
self.up_seq = up_seq
self.skip_seq = skip_seq
def forward(self, x, **kwargs):
y = None
down_outs = [x]
for down_module in self.down_seq._modules.values():
x = down_module(x)
down_outs.append(x)
for i in range(len(down_outs)):
if i != 0:
y = down_outs[self.depth - i]
skip_module = self.skip_seq[self.depth - i]
y = skip_module(y)
if (y is not None) and (self.merge_type == "add"):
x = x + y
if i != len(down_outs) - 1:
up_module = self.up_seq[self.depth - 1 - i]
x = up_module(x)
if self.return_first_skip:
return x, y
else:
return x
class SesquialteralHourglass(nn.Module):
def __init__(self,
down1_seq,
skip1_seq,
up_seq,
skip2_seq,
down2_seq,
merge_type="cat"):
super(SesquialteralHourglass, self).__init__()
assert (len(down1_seq) == len(up_seq))
assert (len(down1_seq) == len(down2_seq))
assert (len(skip1_seq) == len(skip2_seq))
assert (len(down1_seq) == len(skip1_seq) - 1)
assert (merge_type in ["cat", "add"])
self.merge_type = merge_type
self.depth = len(down1_seq)
self.down1_seq = down1_seq
self.skip1_seq = skip1_seq
self.up_seq = up_seq
self.skip2_seq = skip2_seq
self.down2_seq = down2_seq
def _merge(self, x, y):
if y is not None:
if self.merge_type == "cat":
x = torch.cat((x, y), dim=1)
elif self.merge_type == "add":
x = x + y
return x
def forward(self, x, **kwargs):
y = self.skip1_seq[0](x)
skip1_outs = [y]
for i in range(self.depth):
x = self.down1_seq[i](x)
y = self.skip1_seq[i + 1](x)
skip1_outs.append(y)
x = skip1_outs[self.depth]
y = self.skip2_seq[0](x)
skip2_outs = [y]
for i in range(self.depth):
x = self.up_seq[i](x)
y = skip1_outs[self.depth - 1 - i]
x = self._merge(x, y)
y = self.skip2_seq[i + 1](x)
skip2_outs.append(y)
x = self.skip2_seq[self.depth](x)
for i in range(self.depth):
x = self.down2_seq[i](x)
y = skip2_outs[self.depth - 1 - i]
x = self._merge(x, y)
return x
class MultiOutputSequential(nn.Sequential):
def __init__(self):
super(MultiOutputSequential, self).__init__()
def forward(self, x):
outs = []
for module in self._modules.values():
x = module(x)
if hasattr(module, "do_output") and module.do_output:
outs.append(x)
return [x] + outs
class Flatten(nn.Module):
def forward(self, x):
return x.view(x.size(0), -1)
| true | true |
f7251df99e90dfe166f327c077139176b5b8516b | 13,102 | py | Python | tests/parameters_t.py | pan-dora/loris | bf6295eeddd8ae5820c793f28aaa57b5816bca08 | [
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null | tests/parameters_t.py | pan-dora/loris | bf6295eeddd8ae5820c793f28aaa57b5816bca08 | [
"BSD-2-Clause",
"BSD-3-Clause"
] | 1 | 2019-08-04T10:50:35.000Z | 2019-08-04T16:37:36.000Z | tests/parameters_t.py | pan-dora/loris | bf6295eeddd8ae5820c793f28aaa57b5816bca08 | [
"BSD-2-Clause",
"BSD-3-Clause"
] | 1 | 2019-08-04T03:19:31.000Z | 2019-08-04T03:19:31.000Z | # parameters_t.py
#-*- coding: utf-8 -*-
from decimal import Decimal
from loris import img_info
from loris.loris_exception import RequestException
from loris.loris_exception import SyntaxException
from loris.parameters import DECIMAL_ONE
from loris.parameters import FULL_MODE
from loris.parameters import PCT_MODE
from loris.parameters import PIXEL_MODE
from loris.parameters import RegionParameter
from loris.parameters import RotationParameter
from loris.parameters import SizeParameter
import loris_t
"""
Parameter object tests. To run this test on its own, do:
$ python -m unittest -v tests.parameters_t
from the `/loris` (not `/loris/loris`) directory.
"""
class _ParameterTest(loris_t.LorisTest):
def _get_info_long_y(self):
# jp2, y is long dimension
fp = self.test_jp2_color_fp
fmt = self.test_jp2_color_fmt
ident = self.test_jp2_color_id
uri = self.test_jp2_color_uri
return img_info.ImageInfo.from_image_file(uri, fp, fmt)
def _get_info_long_x(self):
# jpeg, x is long dimension
fp = self.test_jpeg_fp
fmt = self.test_jpeg_fmt
ident = self.test_jpeg_id
uri = self.test_jpeg_uri
return img_info.ImageInfo.from_image_file(uri, fp, fmt)
class TestRegionParameter(_ParameterTest):
def test_populate_slots_from_pct(self):
info = self._get_info_long_y()
rp = RegionParameter('pct:25,25,50,50', info)
self.assertEquals(rp.pixel_x, int(info.width*0.25))
self.assertEquals(rp.pixel_y, int(info.height*0.25))
self.assertEquals(rp.pixel_w, int(info.width*0.50))
self.assertEquals(rp.pixel_h, int(info.height*0.50))
self.assertEquals(rp.decimal_x, Decimal('0.25'))
self.assertEquals(rp.decimal_y, Decimal('0.25'))
self.assertEquals(rp.decimal_w, Decimal('0.50'))
self.assertEquals(rp.decimal_h, Decimal('0.50'))
def test_populate_slots_from_pixel(self):
info = self._get_info_long_x()
rp = RegionParameter('797,900,1594,1600', info)
self.assertEquals(rp.pixel_x, 797)
self.assertEquals(rp.pixel_y, 900)
self.assertEquals(rp.pixel_w, 1594)
self.assertEquals(rp.pixel_h, 1600)
self.assertEquals(rp.decimal_x, rp.pixel_x / Decimal(str(info.width)))
self.assertEquals(rp.decimal_y, rp.pixel_y / Decimal(str(info.height)))
self.assertEquals(rp.decimal_w, rp.pixel_w / Decimal(str(info.width)))
self.assertEquals(rp.decimal_h, rp.pixel_h / Decimal(str(info.height)))
def test_square_mode_long_y(self):
# 5906 x 7200
info = self._get_info_long_y()
rp = RegionParameter('square', info)
self.assertEquals(rp.pixel_x, 0)
self.assertEquals(rp.pixel_y, 647)
self.assertEquals(rp.pixel_w, 5906)
self.assertEquals(rp.pixel_h, 5906)
def test_square_mode_long_x(self):
# 3600 x 2987
info = self._get_info_long_x()
rp = RegionParameter('square', info)
self.assertEquals(rp.pixel_x, 306)
self.assertEquals(rp.pixel_y, 0)
self.assertEquals(rp.pixel_w, 2987)
self.assertEquals(rp.pixel_h, 2987)
def test_canonical_uri_value_oob_w_pixel(self):
info = self._get_info_long_x() # x is long dimension
x = 200
offset = 1
oob_w = info.width - x + offset
rp = RegionParameter('%d,13,%d,17' % (x,oob_w), info)
expected_canonical = '%d,13,%d,17' % (x, info.width - x)
# Note that the below will need to be changed if decimal precision is
# changed (currently 25 places)
self.assertEquals(rp.decimal_w, Decimal('0.9444444444444444444444444'))
self.assertEquals(rp.canonical_uri_value, expected_canonical)
def test_canonical_uri_value_oob_w_pct(self):
info = self._get_info_long_y() # y is long dimension
x = 20
w = 81
rp = RegionParameter('pct:%d,13,%d,27' % (x,w), info)
self.assertEquals(rp.decimal_w, Decimal('0.8'))
expected_canonical = '1181,936,4725,1944'
self.assertEquals(rp.canonical_uri_value, expected_canonical)
def test_canonical_uri_value_oob_y_pixel(self):
info = self._get_info_long_y() # y is long dimension
y = 300
offset = 1 # request would be this many pixels OOB
oob_h = info.height - y + offset
rp = RegionParameter('29,%d,31,%d' % (y,oob_h), info)
expected_canonical = '29,%d,31,%d' % (y, info.height - y)
self.assertEquals(rp.canonical_uri_value, expected_canonical)
def test_canonical_uri_value_oob_y_pct(self):
info = self._get_info_long_x() # x is long dimension
y = 28.3
h = 72.2
rp = RegionParameter('pct:13,%f,17,%f' % (y,h), info)
expected_canonical = '468,845,612,2142'
self.assertEquals(rp.canonical_uri_value, expected_canonical)
def test_syntax_exceptions(self):
info = self._get_info_long_y()
try:
with self.assertRaises(SyntaxException):
RegionParameter('n:1,2,3,4', info)
with self.assertRaises(SyntaxException):
RegionParameter('1,2,3,q', info)
with self.assertRaises(SyntaxException):
RegionParameter('1,2,3', info)
with self.assertRaises(SyntaxException):
RegionParameter('something', info)
except TypeError: # python < 2.7
self.assertRaises(SyntaxException, RegionParameter, 'something', info)
self.assertRaises(SyntaxException, RegionParameter, '1,2,3,q', info)
self.assertRaises(SyntaxException, RegionParameter, '1,2,3', info)
self.assertRaises(SyntaxException, RegionParameter, 'something', info)
def test_request_exceptions(self):
info = self._get_info_long_y()
try:
with self.assertRaises(RequestException):
RegionParameter('1,2,0,3', info)
with self.assertRaises(RequestException):
RegionParameter('1,2,3,0', info)
with self.assertRaises(RequestException):
RegionParameter('pct:100,2,3,0', info)
except TypeError: # python < 2.7
self.assertRaises(RequestException, RegionParameter, '1,2,0,3', info)
self.assertRaises(RequestException, RegionParameter, '1,2,3,0', info)
self.assertRaises(RequestException, RegionParameter, 'pct:100,2,3,0', info)
class TestSizeParameter(_ParameterTest):
def test_exceptions(self):
info = self._get_info_long_y()
rp = RegionParameter('pct:25,25,75,75', info)
try:
with self.assertRaises(SyntaxException):
SizeParameter('!25,',rp)
with self.assertRaises(SyntaxException):
SizeParameter('!25',rp)
with self.assertRaises(SyntaxException):
SizeParameter('25',rp)
except TypeError: # python < 2.7
self.assertRaises(SyntaxException, SizeParameter, '!25,', rp)
self.assertRaises(SyntaxException, SizeParameter, '!25', rp)
self.assertRaises(SyntaxException, SizeParameter, '25', rp)
def test_populate_slots_from_full(self):
# full
info = self._get_info_long_y()
rp = RegionParameter('full', info)
sp = SizeParameter('full',rp)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, FULL_MODE)
self.assertEquals(sp.canonical_uri_value, FULL_MODE)
rp = RegionParameter('256,256,256,256', info)
sp = SizeParameter('full',rp)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, FULL_MODE)
self.assertEquals(sp.canonical_uri_value, FULL_MODE)
def test_populate_slots_from_pct(self):
# pct:n
info = self._get_info_long_y()
rp = RegionParameter('full', info)
sp = SizeParameter('pct:25',rp)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, PCT_MODE)
self.assertEquals(sp.canonical_uri_value, '1476,')
rp = RegionParameter('256,256,256,256', info)
sp = SizeParameter('pct:25',rp)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, PCT_MODE)
self.assertEquals(sp.canonical_uri_value, '64,')
rp = RegionParameter('pct:0,0,50,50', info)
sp = SizeParameter('pct:25',rp)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, PCT_MODE)
self.assertEquals(sp.canonical_uri_value, '738,')
def test_populate_slots_from_w_only(self):
# w,
info = self._get_info_long_y()
rp = RegionParameter('full', info)
sp = SizeParameter('180,',rp)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, PIXEL_MODE)
self.assertEquals(sp.canonical_uri_value, '180,')
rp = RegionParameter('200,300,500,600', info)
sp = SizeParameter('125,',rp)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, PIXEL_MODE)
self.assertEquals(sp.canonical_uri_value, '125,')
self.assertEquals(type(sp.w), int)
self.assertEquals(sp.w, 125)
self.assertEquals(type(sp.h), int)
self.assertEquals(sp.h, 150)
def test_tiny_image(self):
info = self._get_info_long_x()
rp = RegionParameter('full', info)
sp = SizeParameter('1,', rp)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, PIXEL_MODE)
self.assertEquals(sp.canonical_uri_value, '1,')
self.assertEquals(sp.w, 1)
self.assertEquals(sp.h, 1)
def test_populate_slots_from_h_only(self):
# ,h
info = self._get_info_long_y()
rp = RegionParameter('full', info)
sp = SizeParameter(',90',rp)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, PIXEL_MODE)
self.assertEquals(sp.canonical_uri_value, '73,')
rp = RegionParameter('50,290,360,910', info)
sp = SizeParameter(',275',rp)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, PIXEL_MODE)
self.assertEquals(sp.canonical_uri_value, '108,')
def test_populate_slots_from_wh(self):
# w,h
info = self._get_info_long_y()
rp = RegionParameter('full', info)
sp = SizeParameter('48,48',rp)
self.assertEquals(sp.force_aspect, True)
self.assertEquals(sp.mode, PIXEL_MODE)
self.assertEquals(sp.canonical_uri_value, '48,48')
rp = RegionParameter('15,16,23,42', info)
sp = SizeParameter('60,60',rp) # upsample!
self.assertEquals(sp.force_aspect, True)
self.assertEquals(sp.mode, PIXEL_MODE)
self.assertEquals(sp.canonical_uri_value, '60,60')
def test_populate_slots_from_bang_wh(self):
# !w,h
info = self._get_info_long_y()
rp = RegionParameter('full', info)
sp = SizeParameter('!120,140',rp)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, PIXEL_MODE)
self.assertEquals(sp.canonical_uri_value, '114,')
rp = RegionParameter('0,0,125,160', info)
sp = SizeParameter('!120,140',rp,)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, PIXEL_MODE)
self.assertEquals(sp.canonical_uri_value, '109,')
rp = RegionParameter('0,0,125,160', info)
sp = SizeParameter('!130,140',rp)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, PIXEL_MODE)
self.assertEquals(sp.canonical_uri_value, '109,')
rp = RegionParameter('50,80,140,160', info)
sp = SizeParameter('!130,180',rp)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, PIXEL_MODE)
self.assertEquals(sp.canonical_uri_value, '130,')
rp = RegionParameter('50,80,140,160', info)
sp = SizeParameter('!145,165',rp)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, PIXEL_MODE)
self.assertEquals(sp.canonical_uri_value, '144,')
rp = RegionParameter('50,80,140,180', info)
sp = SizeParameter('!145,185',rp)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, PIXEL_MODE)
self.assertEquals(sp.canonical_uri_value, '143,')
class TestRotationParameter(_ParameterTest):
def test_exceptions(self):
try:
with self.assertRaises(SyntaxException):
rp = RotationParameter('a')
with self.assertRaises(SyntaxException):
rp = RotationParameter('361')
with self.assertRaises(SyntaxException):
rp = RotationParameter('-1')
with self.assertRaises(SyntaxException):
rp = RotationParameter('!-1')
with self.assertRaises(SyntaxException):
rp = RotationParameter('!361')
with self.assertRaises(SyntaxException):
rp = RotationParameter('-0.1')
except TypeError: # Python < 2.7
self.assertRaises(SyntaxException, RotationParameter, 'a')
self.assertRaises(SyntaxException, RotationParameter, '361')
self.assertRaises(SyntaxException, RotationParameter, '-1')
self.assertRaises(SyntaxException, RotationParameter, '!-1')
self.assertRaises(SyntaxException, RotationParameter, '!361')
self.assertRaises(SyntaxException, RotationParameter, '-0.1')
def test_uri_value(self):
rp = RotationParameter('0')
self.assertEquals(rp.rotation, '0')
rp = RotationParameter('46')
self.assertEquals(rp.rotation, '46')
rp = RotationParameter('180')
self.assertEquals(rp.rotation, '180')
def test_mirroring(self):
rp = RotationParameter('180')
self.assertFalse(rp.mirror)
rp = RotationParameter('!180')
self.assertTrue(rp.mirror)
def test_c14n(self):
rp = RotationParameter('42.10')
self.assertEquals(rp.canonical_uri_value, '42.1')
rp = RotationParameter('180.0')
self.assertEquals(rp.canonical_uri_value, '180')
rp = RotationParameter('!180.0')
self.assertEquals(rp.canonical_uri_value, '!180')
rp = RotationParameter('!180.10')
self.assertEquals(rp.canonical_uri_value, '!180.1')
def suite():
import unittest
test_suites = []
test_suites.append(unittest.makeSuite(TestRegionParameter, 'test'))
test_suites.append(unittest.makeSuite(TestSizeParameter, 'test'))
test_suites.append(unittest.makeSuite(TestRotationParameter, 'test'))
test_suite = unittest.TestSuite(test_suites)
return test_suite
| 35.032086 | 78 | 0.735995 |
from decimal import Decimal
from loris import img_info
from loris.loris_exception import RequestException
from loris.loris_exception import SyntaxException
from loris.parameters import DECIMAL_ONE
from loris.parameters import FULL_MODE
from loris.parameters import PCT_MODE
from loris.parameters import PIXEL_MODE
from loris.parameters import RegionParameter
from loris.parameters import RotationParameter
from loris.parameters import SizeParameter
import loris_t
"""
Parameter object tests. To run this test on its own, do:
$ python -m unittest -v tests.parameters_t
from the `/loris` (not `/loris/loris`) directory.
"""
class _ParameterTest(loris_t.LorisTest):
def _get_info_long_y(self):
fp = self.test_jp2_color_fp
fmt = self.test_jp2_color_fmt
ident = self.test_jp2_color_id
uri = self.test_jp2_color_uri
return img_info.ImageInfo.from_image_file(uri, fp, fmt)
def _get_info_long_x(self):
fp = self.test_jpeg_fp
fmt = self.test_jpeg_fmt
ident = self.test_jpeg_id
uri = self.test_jpeg_uri
return img_info.ImageInfo.from_image_file(uri, fp, fmt)
class TestRegionParameter(_ParameterTest):
def test_populate_slots_from_pct(self):
info = self._get_info_long_y()
rp = RegionParameter('pct:25,25,50,50', info)
self.assertEquals(rp.pixel_x, int(info.width*0.25))
self.assertEquals(rp.pixel_y, int(info.height*0.25))
self.assertEquals(rp.pixel_w, int(info.width*0.50))
self.assertEquals(rp.pixel_h, int(info.height*0.50))
self.assertEquals(rp.decimal_x, Decimal('0.25'))
self.assertEquals(rp.decimal_y, Decimal('0.25'))
self.assertEquals(rp.decimal_w, Decimal('0.50'))
self.assertEquals(rp.decimal_h, Decimal('0.50'))
def test_populate_slots_from_pixel(self):
info = self._get_info_long_x()
rp = RegionParameter('797,900,1594,1600', info)
self.assertEquals(rp.pixel_x, 797)
self.assertEquals(rp.pixel_y, 900)
self.assertEquals(rp.pixel_w, 1594)
self.assertEquals(rp.pixel_h, 1600)
self.assertEquals(rp.decimal_x, rp.pixel_x / Decimal(str(info.width)))
self.assertEquals(rp.decimal_y, rp.pixel_y / Decimal(str(info.height)))
self.assertEquals(rp.decimal_w, rp.pixel_w / Decimal(str(info.width)))
self.assertEquals(rp.decimal_h, rp.pixel_h / Decimal(str(info.height)))
def test_square_mode_long_y(self):
info = self._get_info_long_y()
rp = RegionParameter('square', info)
self.assertEquals(rp.pixel_x, 0)
self.assertEquals(rp.pixel_y, 647)
self.assertEquals(rp.pixel_w, 5906)
self.assertEquals(rp.pixel_h, 5906)
def test_square_mode_long_x(self):
info = self._get_info_long_x()
rp = RegionParameter('square', info)
self.assertEquals(rp.pixel_x, 306)
self.assertEquals(rp.pixel_y, 0)
self.assertEquals(rp.pixel_w, 2987)
self.assertEquals(rp.pixel_h, 2987)
def test_canonical_uri_value_oob_w_pixel(self):
info = self._get_info_long_x()
x = 200
offset = 1
oob_w = info.width - x + offset
rp = RegionParameter('%d,13,%d,17' % (x,oob_w), info)
expected_canonical = '%d,13,%d,17' % (x, info.width - x)
self.assertEquals(rp.decimal_w, Decimal('0.9444444444444444444444444'))
self.assertEquals(rp.canonical_uri_value, expected_canonical)
def test_canonical_uri_value_oob_w_pct(self):
info = self._get_info_long_y()
x = 20
w = 81
rp = RegionParameter('pct:%d,13,%d,27' % (x,w), info)
self.assertEquals(rp.decimal_w, Decimal('0.8'))
expected_canonical = '1181,936,4725,1944'
self.assertEquals(rp.canonical_uri_value, expected_canonical)
def test_canonical_uri_value_oob_y_pixel(self):
info = self._get_info_long_y()
y = 300
offset = 1
oob_h = info.height - y + offset
rp = RegionParameter('29,%d,31,%d' % (y,oob_h), info)
expected_canonical = '29,%d,31,%d' % (y, info.height - y)
self.assertEquals(rp.canonical_uri_value, expected_canonical)
def test_canonical_uri_value_oob_y_pct(self):
info = self._get_info_long_x()
y = 28.3
h = 72.2
rp = RegionParameter('pct:13,%f,17,%f' % (y,h), info)
expected_canonical = '468,845,612,2142'
self.assertEquals(rp.canonical_uri_value, expected_canonical)
def test_syntax_exceptions(self):
info = self._get_info_long_y()
try:
with self.assertRaises(SyntaxException):
RegionParameter('n:1,2,3,4', info)
with self.assertRaises(SyntaxException):
RegionParameter('1,2,3,q', info)
with self.assertRaises(SyntaxException):
RegionParameter('1,2,3', info)
with self.assertRaises(SyntaxException):
RegionParameter('something', info)
except TypeError:
self.assertRaises(SyntaxException, RegionParameter, 'something', info)
self.assertRaises(SyntaxException, RegionParameter, '1,2,3,q', info)
self.assertRaises(SyntaxException, RegionParameter, '1,2,3', info)
self.assertRaises(SyntaxException, RegionParameter, 'something', info)
def test_request_exceptions(self):
info = self._get_info_long_y()
try:
with self.assertRaises(RequestException):
RegionParameter('1,2,0,3', info)
with self.assertRaises(RequestException):
RegionParameter('1,2,3,0', info)
with self.assertRaises(RequestException):
RegionParameter('pct:100,2,3,0', info)
except TypeError:
self.assertRaises(RequestException, RegionParameter, '1,2,0,3', info)
self.assertRaises(RequestException, RegionParameter, '1,2,3,0', info)
self.assertRaises(RequestException, RegionParameter, 'pct:100,2,3,0', info)
class TestSizeParameter(_ParameterTest):
def test_exceptions(self):
info = self._get_info_long_y()
rp = RegionParameter('pct:25,25,75,75', info)
try:
with self.assertRaises(SyntaxException):
SizeParameter('!25,',rp)
with self.assertRaises(SyntaxException):
SizeParameter('!25',rp)
with self.assertRaises(SyntaxException):
SizeParameter('25',rp)
except TypeError:
self.assertRaises(SyntaxException, SizeParameter, '!25,', rp)
self.assertRaises(SyntaxException, SizeParameter, '!25', rp)
self.assertRaises(SyntaxException, SizeParameter, '25', rp)
def test_populate_slots_from_full(self):
info = self._get_info_long_y()
rp = RegionParameter('full', info)
sp = SizeParameter('full',rp)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, FULL_MODE)
self.assertEquals(sp.canonical_uri_value, FULL_MODE)
rp = RegionParameter('256,256,256,256', info)
sp = SizeParameter('full',rp)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, FULL_MODE)
self.assertEquals(sp.canonical_uri_value, FULL_MODE)
def test_populate_slots_from_pct(self):
info = self._get_info_long_y()
rp = RegionParameter('full', info)
sp = SizeParameter('pct:25',rp)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, PCT_MODE)
self.assertEquals(sp.canonical_uri_value, '1476,')
rp = RegionParameter('256,256,256,256', info)
sp = SizeParameter('pct:25',rp)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, PCT_MODE)
self.assertEquals(sp.canonical_uri_value, '64,')
rp = RegionParameter('pct:0,0,50,50', info)
sp = SizeParameter('pct:25',rp)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, PCT_MODE)
self.assertEquals(sp.canonical_uri_value, '738,')
def test_populate_slots_from_w_only(self):
info = self._get_info_long_y()
rp = RegionParameter('full', info)
sp = SizeParameter('180,',rp)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, PIXEL_MODE)
self.assertEquals(sp.canonical_uri_value, '180,')
rp = RegionParameter('200,300,500,600', info)
sp = SizeParameter('125,',rp)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, PIXEL_MODE)
self.assertEquals(sp.canonical_uri_value, '125,')
self.assertEquals(type(sp.w), int)
self.assertEquals(sp.w, 125)
self.assertEquals(type(sp.h), int)
self.assertEquals(sp.h, 150)
def test_tiny_image(self):
info = self._get_info_long_x()
rp = RegionParameter('full', info)
sp = SizeParameter('1,', rp)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, PIXEL_MODE)
self.assertEquals(sp.canonical_uri_value, '1,')
self.assertEquals(sp.w, 1)
self.assertEquals(sp.h, 1)
def test_populate_slots_from_h_only(self):
info = self._get_info_long_y()
rp = RegionParameter('full', info)
sp = SizeParameter(',90',rp)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, PIXEL_MODE)
self.assertEquals(sp.canonical_uri_value, '73,')
rp = RegionParameter('50,290,360,910', info)
sp = SizeParameter(',275',rp)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, PIXEL_MODE)
self.assertEquals(sp.canonical_uri_value, '108,')
def test_populate_slots_from_wh(self):
info = self._get_info_long_y()
rp = RegionParameter('full', info)
sp = SizeParameter('48,48',rp)
self.assertEquals(sp.force_aspect, True)
self.assertEquals(sp.mode, PIXEL_MODE)
self.assertEquals(sp.canonical_uri_value, '48,48')
rp = RegionParameter('15,16,23,42', info)
sp = SizeParameter('60,60',rp)
self.assertEquals(sp.force_aspect, True)
self.assertEquals(sp.mode, PIXEL_MODE)
self.assertEquals(sp.canonical_uri_value, '60,60')
def test_populate_slots_from_bang_wh(self):
info = self._get_info_long_y()
rp = RegionParameter('full', info)
sp = SizeParameter('!120,140',rp)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, PIXEL_MODE)
self.assertEquals(sp.canonical_uri_value, '114,')
rp = RegionParameter('0,0,125,160', info)
sp = SizeParameter('!120,140',rp,)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, PIXEL_MODE)
self.assertEquals(sp.canonical_uri_value, '109,')
rp = RegionParameter('0,0,125,160', info)
sp = SizeParameter('!130,140',rp)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, PIXEL_MODE)
self.assertEquals(sp.canonical_uri_value, '109,')
rp = RegionParameter('50,80,140,160', info)
sp = SizeParameter('!130,180',rp)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, PIXEL_MODE)
self.assertEquals(sp.canonical_uri_value, '130,')
rp = RegionParameter('50,80,140,160', info)
sp = SizeParameter('!145,165',rp)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, PIXEL_MODE)
self.assertEquals(sp.canonical_uri_value, '144,')
rp = RegionParameter('50,80,140,180', info)
sp = SizeParameter('!145,185',rp)
self.assertEquals(sp.force_aspect, False)
self.assertEquals(sp.mode, PIXEL_MODE)
self.assertEquals(sp.canonical_uri_value, '143,')
class TestRotationParameter(_ParameterTest):
def test_exceptions(self):
try:
with self.assertRaises(SyntaxException):
rp = RotationParameter('a')
with self.assertRaises(SyntaxException):
rp = RotationParameter('361')
with self.assertRaises(SyntaxException):
rp = RotationParameter('-1')
with self.assertRaises(SyntaxException):
rp = RotationParameter('!-1')
with self.assertRaises(SyntaxException):
rp = RotationParameter('!361')
with self.assertRaises(SyntaxException):
rp = RotationParameter('-0.1')
except TypeError:
self.assertRaises(SyntaxException, RotationParameter, 'a')
self.assertRaises(SyntaxException, RotationParameter, '361')
self.assertRaises(SyntaxException, RotationParameter, '-1')
self.assertRaises(SyntaxException, RotationParameter, '!-1')
self.assertRaises(SyntaxException, RotationParameter, '!361')
self.assertRaises(SyntaxException, RotationParameter, '-0.1')
def test_uri_value(self):
rp = RotationParameter('0')
self.assertEquals(rp.rotation, '0')
rp = RotationParameter('46')
self.assertEquals(rp.rotation, '46')
rp = RotationParameter('180')
self.assertEquals(rp.rotation, '180')
def test_mirroring(self):
rp = RotationParameter('180')
self.assertFalse(rp.mirror)
rp = RotationParameter('!180')
self.assertTrue(rp.mirror)
def test_c14n(self):
rp = RotationParameter('42.10')
self.assertEquals(rp.canonical_uri_value, '42.1')
rp = RotationParameter('180.0')
self.assertEquals(rp.canonical_uri_value, '180')
rp = RotationParameter('!180.0')
self.assertEquals(rp.canonical_uri_value, '!180')
rp = RotationParameter('!180.10')
self.assertEquals(rp.canonical_uri_value, '!180.1')
def suite():
import unittest
test_suites = []
test_suites.append(unittest.makeSuite(TestRegionParameter, 'test'))
test_suites.append(unittest.makeSuite(TestSizeParameter, 'test'))
test_suites.append(unittest.makeSuite(TestRotationParameter, 'test'))
test_suite = unittest.TestSuite(test_suites)
return test_suite
| false | true |
f7251e850c38e0f28697e00d751ee3f8dca92056 | 7,888 | py | Python | dynamic_image_networks/hmdb51/training_scripts/train_resnext50_hmdb51.py | DoranLyong/dynamic-images-for-action-recognition | 06a68c2337b45c44a8c7ec50e94585a9b9615ad0 | [
"MIT"
] | 22 | 2018-09-14T00:32:41.000Z | 2020-10-23T11:19:12.000Z | dynamic_image_networks/hmdb51/training_scripts/train_resnext50_hmdb51.py | DoranLyong/dynamic-images-for-action-recognition | 06a68c2337b45c44a8c7ec50e94585a9b9615ad0 | [
"MIT"
] | 1 | 2021-04-30T04:09:40.000Z | 2021-04-30T04:09:40.000Z | dynamic_image_networks/hmdb51/training_scripts/train_resnext50_hmdb51.py | DoranLyong/dynamic-images-for-action-recognition | 06a68c2337b45c44a8c7ec50e94585a9b9615ad0 | [
"MIT"
] | 7 | 2018-11-01T02:32:09.000Z | 2020-10-03T12:19:02.000Z | # import apex - !!!! INCLUDE THIS IMPORT IF YOU WANT TO USE MIXED PRECISION TRAINING !!!!
import torch
import os
import sys
import torch.optim as optim
import torch.nn as nn
from datetime import datetime
from tqdm import tqdm
from pathlib import Path
# Make sure that the project root is in your PATH (i.e., the parent folder containing 'dynamic_image_networks').
sys.path.append(str(Path('../../..').resolve()))
# ---------------------------------------------------------------
# Model / dataset choice
# ---------------------------------------------------------------
from dynamic_image_networks.hmdb51.models.resnext50_temppool import get_model
from dynamic_image_networks.hmdb51.dataloaders.hmdb51_dataloader import get_train_loader
from dynamic_image_networks.hmdb51.utilities.calculate_training_metrics import calculate_accuracy
from dynamic_image_networks.hmdb51.utilities.logger import initialize_logger
from dynamic_image_networks.hmdb51.utilities.meters import AverageMeter
def main():
# ============================================================================================
# Setup
# ============================================================================================
# ---------------------------------------------------------------
# Random seeds
# ---------------------------------------------------------------
torch.manual_seed(590238490)
torch.backends.cudnn.benchmark = True
# ---------------------------------------------------------------
# GPU
# ---------------------------------------------------------------
device = torch.device("cuda:0")
fp16 = False
if fp16:
print('!!! MIXED PRECISION TRAINING IS ENABLED -- ONLY USE FOR VOLTA AND TURING GPUs!!!')
# ---------------------------------------------------------------
# Training settings
# ---------------------------------------------------------------
batch_size = 32
num_epochs = 60
num_workers = 6
max_segment_size = 10
save_best_models = True
image_augmentation = False
# ----------------------------------------------------------------------------
# Get the model
# ----------------------------------------------------------------------------
net = get_model(num_classes=51)
net.to(device)
# ----------------------------------------------------------------------------
# Initialize optimizer and loss function
# ----------------------------------------------------------------------------
criterion = nn.CrossEntropyLoss()
optimizer = optim.SGD(net.parameters(), lr=3e-3)
scheduler = optim.lr_scheduler.ReduceLROnPlateau(optimizer, patience=5, verbose=True)
if fp16:
net, optimizer = apex.amp.initialize(net, optimizer, opt_level="O1")
# ---------------------------------------------------------------
# Logging set-up
# ---------------------------------------------------------------
# File-name
file_name = ''.join(os.path.basename(__file__).split('.py')[:-1])
logger = initialize_logger(file_name, log_dir='./logs/')
# ============================================================================================
# Train
# ============================================================================================
time_start = datetime.now()
fold_i = 1
# ---------------------------------------------------------------
# Load dataloaders
# ---------------------------------------------------------------
train_loader, validation_loader = get_train_loader(fold_id=fold_i,
batch_size=batch_size,
num_workers=num_workers,
image_augmenation=image_augmentation,
segment_size=max_segment_size)
logger.info('Starting Training on Fold: {}\n'.format(fold_i))
best_val_loss = float('inf')
best_val_acc = 0
for epoch_i in range(num_epochs):
# ---------------------------------------------------------------
# Training and validation loop
# ---------------------------------------------------------------
avg_loss, avg_acc = training_loop('train', net, device, train_loader,
optimizer, criterion, fp16)
avg_val_loss, avg_val_acc = training_loop('val', net, device, validation_loader,
None, criterion, fp16)
if scheduler:
scheduler.step(avg_val_loss)
# ---------------------------------------------------------------
# Track the best model
# ---------------------------------------------------------------
if avg_val_loss < best_val_loss:
best_val_loss = avg_val_loss
if save_best_models:
logger.info('Saving model because of best loss...')
os.makedirs('./saved_models/', exist_ok=True)
torch.save(net.state_dict(),
'./saved_models/{}_fold_{}_best_loss_state.pt'.format(file_name, fold_i))
if avg_val_acc > best_val_acc:
best_val_acc = avg_val_acc
if save_best_models:
logger.info('Saving model because of best acc...')
os.makedirs('./saved_models/', exist_ok=True)
torch.save(net.state_dict(),
'./saved_models/{}_fold_{}_best_acc_state.pt'.format(file_name, fold_i))
# ---------------------------------------------------------------
# Log the training status
# ---------------------------------------------------------------
time_elapsed = datetime.now() - time_start
output_msg = 'Fold {}, Epoch: {}/{}\n' \
'---------------------\n' \
'train loss: {:.6f}, val loss: {:.6f}\n' \
'train acc: {:.6f}, val acc: {:.6f}\n' \
'best val loss: {:.6f}, best val acc: {:.6f}\n' \
'time elapsed: {}\n'. \
format(fold_i, epoch_i, num_epochs - 1,
avg_loss, avg_val_loss,
avg_acc, avg_val_acc,
best_val_loss, best_val_acc,
str(time_elapsed).split('.')[0])
logger.info(output_msg)
logger.info('Finished Training')
def training_loop(phase, net, device, dataloader, optimizer, criterion, fp16):
loss_meter = AverageMeter()
acc_meter = AverageMeter()
# Set the model into the appropriate mode.
if phase == 'train':
net.train()
elif phase == 'val':
net.eval()
else:
raise ValueError
# Enable gradient accumulation only for the training phase.
with torch.set_grad_enabled(phase == 'train'):
for i, data in tqdm(enumerate(dataloader), total=len(dataloader)):
x, y, = data
x, y = x.to(device, non_blocking=True), y.to(device, non_blocking=True)
# Prediction.
y_pred = net(x).float()
# Loss and step.
loss = criterion(y_pred, y)
if phase == 'train':
optimizer.zero_grad()
if fp16 is True:
with apex.amp.scale_loss(loss, optimizer) as scaled_loss:
scaled_loss.backward()
else:
loss.backward()
optimizer.step()
# Metrics
batch_size = len(y)
loss_meter.add(loss.item(), batch_size)
acc_meter.add(calculate_accuracy(y_pred, y), batch_size)
avg_loss = loss_meter.get_average()
avg_acc = acc_meter.get_average()
return avg_loss, avg_acc
if __name__ == '__main__':
main()
| 41.083333 | 112 | 0.439021 |
import torch
import os
import sys
import torch.optim as optim
import torch.nn as nn
from datetime import datetime
from tqdm import tqdm
from pathlib import Path
sys.path.append(str(Path('../../..').resolve()))
from dynamic_image_networks.hmdb51.models.resnext50_temppool import get_model
from dynamic_image_networks.hmdb51.dataloaders.hmdb51_dataloader import get_train_loader
from dynamic_image_networks.hmdb51.utilities.calculate_training_metrics import calculate_accuracy
from dynamic_image_networks.hmdb51.utilities.logger import initialize_logger
from dynamic_image_networks.hmdb51.utilities.meters import AverageMeter
def main():
torch.manual_seed(590238490)
torch.backends.cudnn.benchmark = True
device = torch.device("cuda:0")
fp16 = False
if fp16:
print('!!! MIXED PRECISION TRAINING IS ENABLED -- ONLY USE FOR VOLTA AND TURING GPUs!!!')
batch_size = 32
num_epochs = 60
num_workers = 6
max_segment_size = 10
save_best_models = True
image_augmentation = False
net = get_model(num_classes=51)
net.to(device)
criterion = nn.CrossEntropyLoss()
optimizer = optim.SGD(net.parameters(), lr=3e-3)
scheduler = optim.lr_scheduler.ReduceLROnPlateau(optimizer, patience=5, verbose=True)
if fp16:
net, optimizer = apex.amp.initialize(net, optimizer, opt_level="O1")
file_name = ''.join(os.path.basename(__file__).split('.py')[:-1])
logger = initialize_logger(file_name, log_dir='./logs/')
time_start = datetime.now()
fold_i = 1
train_loader, validation_loader = get_train_loader(fold_id=fold_i,
batch_size=batch_size,
num_workers=num_workers,
image_augmenation=image_augmentation,
segment_size=max_segment_size)
logger.info('Starting Training on Fold: {}\n'.format(fold_i))
best_val_loss = float('inf')
best_val_acc = 0
for epoch_i in range(num_epochs):
avg_loss, avg_acc = training_loop('train', net, device, train_loader,
optimizer, criterion, fp16)
avg_val_loss, avg_val_acc = training_loop('val', net, device, validation_loader,
None, criterion, fp16)
if scheduler:
scheduler.step(avg_val_loss)
if avg_val_loss < best_val_loss:
best_val_loss = avg_val_loss
if save_best_models:
logger.info('Saving model because of best loss...')
os.makedirs('./saved_models/', exist_ok=True)
torch.save(net.state_dict(),
'./saved_models/{}_fold_{}_best_loss_state.pt'.format(file_name, fold_i))
if avg_val_acc > best_val_acc:
best_val_acc = avg_val_acc
if save_best_models:
logger.info('Saving model because of best acc...')
os.makedirs('./saved_models/', exist_ok=True)
torch.save(net.state_dict(),
'./saved_models/{}_fold_{}_best_acc_state.pt'.format(file_name, fold_i))
time_elapsed = datetime.now() - time_start
output_msg = 'Fold {}, Epoch: {}/{}\n' \
'---------------------\n' \
'train loss: {:.6f}, val loss: {:.6f}\n' \
'train acc: {:.6f}, val acc: {:.6f}\n' \
'best val loss: {:.6f}, best val acc: {:.6f}\n' \
'time elapsed: {}\n'. \
format(fold_i, epoch_i, num_epochs - 1,
avg_loss, avg_val_loss,
avg_acc, avg_val_acc,
best_val_loss, best_val_acc,
str(time_elapsed).split('.')[0])
logger.info(output_msg)
logger.info('Finished Training')
def training_loop(phase, net, device, dataloader, optimizer, criterion, fp16):
loss_meter = AverageMeter()
acc_meter = AverageMeter()
if phase == 'train':
net.train()
elif phase == 'val':
net.eval()
else:
raise ValueError
with torch.set_grad_enabled(phase == 'train'):
for i, data in tqdm(enumerate(dataloader), total=len(dataloader)):
x, y, = data
x, y = x.to(device, non_blocking=True), y.to(device, non_blocking=True)
y_pred = net(x).float()
loss = criterion(y_pred, y)
if phase == 'train':
optimizer.zero_grad()
if fp16 is True:
with apex.amp.scale_loss(loss, optimizer) as scaled_loss:
scaled_loss.backward()
else:
loss.backward()
optimizer.step()
batch_size = len(y)
loss_meter.add(loss.item(), batch_size)
acc_meter.add(calculate_accuracy(y_pred, y), batch_size)
avg_loss = loss_meter.get_average()
avg_acc = acc_meter.get_average()
return avg_loss, avg_acc
if __name__ == '__main__':
main()
| true | true |
f7251f33ee91ecf48c4a7be3c6944bab432a6275 | 1,011 | py | Python | main_app/urls.py | m-code12/Rescue | 24ece6ac97aeb177435ec7cc3d822d17e75724c8 | [
"MIT"
] | null | null | null | main_app/urls.py | m-code12/Rescue | 24ece6ac97aeb177435ec7cc3d822d17e75724c8 | [
"MIT"
] | 1 | 2021-02-19T17:09:40.000Z | 2021-02-19T17:09:40.000Z | main_app/urls.py | m-code12/Rescue | 24ece6ac97aeb177435ec7cc3d822d17e75724c8 | [
"MIT"
] | null | null | null | from django.urls import path
from . import views
app_name = "main_app"
urlpatterns = [
path('', views.home, name="home"),
path('home/', views.home, name="home"),
path('register/', views.register, name="register"),
path('logout/', views.logout_request, name="logout"),
path('login/', views.login_request, name="login"),
path('emergency_contact/', views.emergency_contact, name="emergency_contact"),
path("create_contact/", views.create_contact , name="create_contact"),
path("update_contact/<str:pk>/", views.update_contact, name="update_contact"),
path("delete_contact/<str:pk>/", views.delete_contact, name="delete_contact"),
path("emergency/", views.emergency, name="emergency"),
path("helpline_numbers/", views.helpline_numbers, name="helpline_numbers"),
path("women_laws/", views.women_laws, name="women_laws"),
path('women_rights/', views.women_rights, name='women_rights'),
path("developers/", views.developers, name="developers")
] | 48.142857 | 82 | 0.691395 | from django.urls import path
from . import views
app_name = "main_app"
urlpatterns = [
path('', views.home, name="home"),
path('home/', views.home, name="home"),
path('register/', views.register, name="register"),
path('logout/', views.logout_request, name="logout"),
path('login/', views.login_request, name="login"),
path('emergency_contact/', views.emergency_contact, name="emergency_contact"),
path("create_contact/", views.create_contact , name="create_contact"),
path("update_contact/<str:pk>/", views.update_contact, name="update_contact"),
path("delete_contact/<str:pk>/", views.delete_contact, name="delete_contact"),
path("emergency/", views.emergency, name="emergency"),
path("helpline_numbers/", views.helpline_numbers, name="helpline_numbers"),
path("women_laws/", views.women_laws, name="women_laws"),
path('women_rights/', views.women_rights, name='women_rights'),
path("developers/", views.developers, name="developers")
] | true | true |
f7251f49ee1a7989325fce02781fdfdd20216e2b | 40,049 | py | Python | chain/core/resources.py | ielm/chain-api | 8fba4b8ebdedbe1de65fe2bde0e0a6f330177c91 | [
"MIT"
] | 23 | 2015-08-14T02:23:51.000Z | 2021-04-16T14:59:59.000Z | chain/core/resources.py | ResEnv/chain-api | 8fba4b8ebdedbe1de65fe2bde0e0a6f330177c91 | [
"MIT"
] | 27 | 2015-05-26T22:29:57.000Z | 2020-06-05T16:40:51.000Z | chain/core/resources.py | ielm/chain-api | 8fba4b8ebdedbe1de65fe2bde0e0a6f330177c91 | [
"MIT"
] | 10 | 2015-07-05T07:15:46.000Z | 2020-06-30T18:28:08.000Z | from chain.core.api import Resource, ResourceField, CollectionField, \
MetadataCollectionField
from chain.core.api import full_reverse, render_error
from chain.core.api import CHAIN_CURIES
from chain.core.api import BadRequestException, HTTP_STATUS_BAD_REQUEST
from chain.core.api import register_resource
from chain.core.models import Site, Device, ScalarSensor, \
PresenceSensor, PresenceData, Person, Metadata
from django.conf.urls import include, patterns, url
from django.utils import timezone
from datetime import timedelta, datetime
import calendar
from chain.localsettings import INFLUX_HOST, INFLUX_PORT, INFLUX_DATABASE, INFLUX_MEASUREMENT
from chain.influx_client import InfluxClient
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.cache import cache_control
from django.utils.dateparse import parse_datetime
import json
influx_client = InfluxClient(INFLUX_HOST, INFLUX_PORT, INFLUX_DATABASE, INFLUX_MEASUREMENT)
class MetadataResource(Resource):
model = Metadata
display_field = 'timestamp'
resource_name = 'metadata'
resource_type = 'metadata'
required_fields = ['key', 'value']
model_fields = ['timestamp', 'key', 'value']
queryset = Metadata.objects
def get_queryset(self):
queryset = self._queryset.filter(**self._filters).order_by('key', '-timestamp').distinct('key')
return queryset[self._offset:self._offset + self._limit]
def get_total_count(self):
try:
return self._total_count
except AttributeError:
pass
qs = self._queryset.filter(**self._filters).order_by('key').distinct('key')
self._total_count = qs.count()
return self._total_count
def serialize_list(self, embed, cache):
if not embed:
return super(MetadataResource, self).serialize_list(embed, cache)
href = self.get_list_href()
serialized_data = {
'_links': {
'self': {'href': href},
'curies': CHAIN_CURIES,
'createForm': {
'href': self.get_create_href(),
'title': 'Add Metadata'
}
},
'totalCount': self.get_total_count()
}
objs = self.get_queryset()
serialized_data['data'] = [{
'key': obj.key,
'value': obj.value}
for obj in objs]
serialized_data = self.add_page_links(serialized_data, href)
return serialized_data
def serialize_single(self, embed=True, cache=None, rels=True, *args, **kwargs):
return super(
MetadataResource,
self).serialize_single(
embed,
cache,
rels,
*args,
**dict(kwargs, edit=False))
@classmethod
@csrf_exempt
def edit_view(cls, request, id):
return render_error(HTTP_STATUS_BAD_REQUEST,
"Metadata are immutable",
request)
class SensorDataResource(Resource):
def __init__(self, *args, **kwargs):
super(SensorDataResource, self).__init__(*args, **kwargs)
def format_time(self, timestamp):
return calendar.timegm(timestamp.timetuple())
def add_page_links(self, data, href, page_start, page_end):
timespan = page_end - page_start
data['_links']['previous'] = {
'href': self.update_href(
href, timestamp__gte=self.format_time(page_start - timespan),
timestamp__lt=self.format_time(page_start)),
'title': '%s to %s' % (page_start - timespan, page_start),
}
data['_links']['self'] = {
'href': self.update_href(
href, timestamp__gte=self.format_time(page_start),
timestamp__lt=self.format_time(page_end)),
}
data['_links']['next'] = {
'href': self.update_href(
href, timestamp__gte=self.format_time(page_end),
timestamp__lt=self.format_time(page_end + timespan)),
'title': '%s to %s' % (page_end, page_end + timespan),
}
return data
# shoot to return about 500 values per page
def default_timespan(self):
aggtime = self._filters.get('aggtime', None)
if aggtime is None:
return timedelta(hours=6)
elif aggtime == '1h':
return timedelta(hours=500)
elif aggtime == '1d':
return timedelta(days=500)
elif aggtime == '1w':
return timedelta(weeks=500)
else:
raise BadRequestException('Invalid argument for aggtime. Must be 1h, 1d, or 1w')
class ScalarSensorDataResource(SensorDataResource):
display_field = 'timestamp'
resource_name = 'scalar_data'
resource_type = 'scalar_data'
model_fields = ['timestamp', 'value']
schema_type = {'timestamp': ('string', 'date-time'),
'value': ('number', None)}
required_fields = ['value']
def __init__(self, *args, **kwargs):
super(ScalarSensorDataResource, self).__init__(*args, **kwargs)
if self._state == 'data':
# deserialize data
self.sensor_id = self._filters.get('sensor_id')
self.value = self.sanitize_field_value('value', self._data.get('value'))
self.timestamp = self.sanitize_field_value('timestamp', self._data.get('timestamp'))
# add ids up the hierarchy
sensor = ScalarSensor.objects.select_related('device', 'metric').get(id=self.sensor_id)
self.device_id = sensor.device.id
self.metric = sensor.metric
self.site_id = sensor.device.site_id
# treat sensor data like an object
self._state = 'object'
if 'queryset' in kwargs:
# we want to default to the last page, not the first page
pass
def serialize_single(self, embed=True, cache=None, rels=True):
data = {}
for field_name in self.model_fields:
data[field_name] = self.serialize_field(getattr(self, field_name))
return data
@classmethod
def sanitize_field_value(cls, field_name, value):
if field_name == 'value':
return float(value)
if field_name == 'timestamp':
from django.db import models
if value == None:
return timezone.now()
timestamp = parse_datetime(value)
if timezone.is_aware(timestamp):
return timestamp
return timezone.make_aware(timestamp, timezone.get_current_timezone())
# we store the metric as a tag in Influx for convenience of querying
# for clients that are using influx directly. It's not a real field that's
# handled by Chain
def save(self):
response = influx_client.post_data(self.site_id, self.device_id, self.sensor_id,
self.metric, self.value, self.timestamp)
return response
def serialize_list(self, embed, cache):
'''a "list" of SensorData resources is actually represented
as a single resource with a list of data points'''
if not embed:
return super(
ScalarSensorDataResource,
self).serialize_list(
embed,
cache)
href = self.get_list_href()
serialized_data = {
'_links': {
'curies': CHAIN_CURIES,
'createForm': {
'href': self.get_create_href(),
'title': 'Add Data'
}
},
'dataType': 'float'
}
request_time = timezone.now()
# if the time filters aren't given then use the most recent timespan,
# if they are given, then we need to convert them from unix time to use
# in the queryset filter
if 'timestamp__gte' in self._filters:
try:
page_start = datetime.utcfromtimestamp(
float(self._filters['timestamp__gte'])).replace(
tzinfo=timezone.utc)
except ValueError:
raise BadRequestException(
"Invalid timestamp format for lower bound of date range.")
else:
page_start = request_time - self.default_timespan()
if 'timestamp__lt' in self._filters:
try:
page_end = datetime.utcfromtimestamp(
float(self._filters['timestamp__lt'])).replace(
tzinfo=timezone.utc)
except ValueError:
raise BadRequestException(
"Invalid timestamp format for upper bound of date range.")
else:
page_end = request_time
self._filters['timestamp__gte'] = page_start
self._filters['timestamp__lt'] = page_end
objs = influx_client.get_sensor_data(self._filters)
serialized_data = self.add_page_links(serialized_data, href,
page_start, page_end)
serialized_data['data'] = [{
'value': obj['value'],
'timestamp': obj['time']}
for obj in objs]
return serialized_data
def get_cache_key(self):
return self.sensor_id, self.timestamp
def serialize_stream(self):
'''Serialize this resource for a stream'''
data = self.serialize_single(rels=False)
data['_links'] = {
'ch:sensor': {'href': full_reverse(
'scalar_sensors-single', self._request,
args=(self._filters['sensor_id'],))}
}
return data
def get_single_href(self):
return full_reverse(self.resource_name + '-single',
self._request, args=(self.sensor_id,self.timestamp))
def get_tags(self):
if not self.sensor_id:
raise ValueError(
'Tried to called get_tags on a resource without an id')
db_sensor = ScalarSensor.objects.select_related('device').get(
id=self.sensor_id)
return ['sensor-%d' % db_sensor.id,
'device-%d' % db_sensor.device_id,
'site-%d' % db_sensor.device.site_id]
@classmethod
def get_field_schema_type(cls, field_name):
if field_name in cls.model_fields:
return cls.schema_type[field_name]
else:
raise NotImplementedError(
"tried to look up field %s but didn't know where" % field_name)
@classmethod
def model_has_field(cls, field_name):
if field_name in cls.model_fields:
return True
return False
class AggregateScalarSensorDataResource(SensorDataResource):
resource_name = 'aggregate_data'
resource_type = 'aggregate_data'
model_fields = ['timestamp', 'max', 'min', 'mean', 'count']
def __init__(self, *args, **kwargs):
super(AggregateScalarSensorDataResource, self).__init__(*args, **kwargs)
def get_list_href(self, embed=False):
href = super(AggregateScalarSensorDataResource, self).get_list_href()
if not embed:
href += '{&aggtime}'
return href
def serialize_list(self, embed, cache):
if not embed:
return super(
AggregateScalarSensorDataResource,
self).serialize_list(
embed,
cache)
if 'aggtime' not in self._filters:
raise BadRequestException(
"Missing aggtime arguement")
href = self.get_list_href(True)
serialized_data = {
'_links': {
'curies': CHAIN_CURIES
},
'dataType': 'float'
}
request_time = timezone.now()
if 'timestamp__gte' in self._filters:
try:
page_start = datetime.utcfromtimestamp(
float(self._filters['timestamp__gte'])).replace(
tzinfo=timezone.utc)
except ValueError:
raise BadRequestException(
"Invalid timestamp format for lower bound of date range.")
else:
page_start = request_time - self.default_timespan()
if 'timestamp__lt' in self._filters:
try:
page_end = datetime.utcfromtimestamp(
float(self._filters['timestamp__lt'])).replace(
tzinfo=timezone.utc)
except ValueError:
raise BadRequestException(
"Invalid timestamp format for upper bound of date range.")
else:
page_end = request_time
self._filters['timestamp__gte'] = page_start
self._filters['timestamp__lt'] = page_end
objs = influx_client.get_sensor_data(self._filters)
serialized_data = self.add_page_links(serialized_data, href,
page_start, page_end)
serialized_data['data'] = [{
'max': obj['max'],
'min': obj['min'],
'mean': obj['mean'],
'count': obj['count'],
'timestamp': obj['time']}
for obj in objs]
return serialized_data
@classmethod
def urls(cls):
base_name = cls.resource_name
return patterns('',
url(r'^$',
cls.list_view, name=base_name + '-list'))
class ScalarSensorResource(Resource):
model = ScalarSensor
display_field = 'metric'
resource_name = 'scalar_sensors'
resource_type = 'scalar_sensor'
required_fields = ['metric', 'unit']
model_fields = ['active']
# for now, name is hardcoded as the only attribute of metric and unit
stub_fields = {'metric': 'name', 'unit': 'name'}
queryset = ScalarSensor.objects
related_fields = {
'ch:dataHistory': CollectionField(ScalarSensorDataResource,
reverse_name='sensor'),
'ch:aggregateData': CollectionField(AggregateScalarSensorDataResource,
reverse_name='sensor'),
'ch:device': ResourceField('chain.core.resources.DeviceResource',
'device'),
'ch:metadata': MetadataCollectionField(MetadataResource)
}
def serialize_single(self, embed, cache, *args, **kwargs):
data = super(
ScalarSensorResource,
self).serialize_single(
embed,
cache,
*args,
**kwargs)
data['sensor-type'] = "scalar"
if embed:
data['dataType'] = 'float'
# this is hammering the influx server, we should switch it
# over to doing a single bulk query. For now disabling the
# data to get things up and running
if not kwargs.get('include_data', True):
return data
else:
last_data = influx_client.get_last_sensor_data(self._obj.id)
if last_data:
# column name returned by last() selector is last
data['value'] = last_data[0]['last']
data['updated'] = last_data[0]['time']
return data
def get_tags(self):
return ['sensor-%s' % self._obj.id,
'scalar_sensor-%s' % self._obj.id,
'device-%s' % self._obj.device_id,
'site-%s' % self._obj.device.site_id]
class PresenceDataResource(SensorDataResource):
model = PresenceData
display_field = 'timestamp'
resource_name = 'presencedata'
resource_type = 'presencedata'
model_fields = ['timestamp', 'present', 'person', 'sensor']
required_fields = ['person', 'sensor', 'present']
queryset = PresenceData.objects
def __init__(self, *args, **kwargs):
super(PresenceDataResource, self).__init__(*args, **kwargs)
if 'queryset' in kwargs:
# we want to default to the last page, not the first page
pass
def serialize_single(self, embed, cache):
serialized_data = super(
PresenceDataResource,
self).serialize_single(
embed,
cache)
if 'person' in serialized_data:
del serialized_data['person']
if 'sensor' in serialized_data:
del serialized_data['sensor']
if '_links' not in serialized_data:
serialized_data['_links'] = {}
serialized_data['_links'].update(self.get_additional_links())
return serialized_data
def get_additional_links(self):
return {
'person': {
'href': self.get_person_url(
self._obj.person), 'title': "%s, %s" %
(self._obj.person.last_name, self._obj.person.first_name)}, 'sensor': {
'href': self.get_sensor_url(
self._obj.sensor), 'title': "%s->%s" %
(self._obj.sensor.device.name, self._obj.sensor.metric)}}
def serialize_list(self, embed, cache):
'''a "list" of SensorData resources is actually represented
as a single resource with a list of data points'''
if not embed:
return super(
PresenceDataResource,
self).serialize_list(
embed,
cache)
href = self.get_list_href()
items = []
serialized_data = {
'_links': {
'curies': CHAIN_CURIES,
'createForm': {
'href': self.get_create_href(),
'title': 'Add Data'
},
'items': items
},
'dataType': 'presence'
}
request_time = timezone.now()
# if the time filters aren't given then use the most recent timespan,
# if they are given, then we need to convert them from unix time to use
# in the queryset filter
if 'timestamp__gte' in self._filters:
try:
page_start = datetime.utcfromtimestamp(
float(self._filters['timestamp__gte']))
except ValueError:
raise BadRequestException(
"Invalid timestamp format for lower bound of date range.")
else:
page_start = request_time - self.default_timespan()
if 'timestamp__lt' in self._filters:
try:
page_end = datetime.utcfromtimestamp(
float(self._filters['timestamp__lt']))
except ValueError:
raise BadRequestException(
"Invalid timestamp format for upper bound of date range.")
else:
page_end = request_time
self._filters['timestamp__gte'] = page_start
self._filters['timestamp__lt'] = page_end
objs = self._queryset.filter(**self._filters).order_by('timestamp')
serialized_data = self.add_page_links(serialized_data, href,
page_start, page_end)
# Make links:
for obj in objs:
presence_data_resource = PresenceDataResource(
obj=obj,
request=self._request)
items.append(
{
'href': presence_data_resource.get_single_href(),
'title': "%s %s %s at time %s" %
(obj.person.last_name,
"at" if obj.present else "left",
obj.sensor.device,
obj.timestamp.isoformat())})
return serialized_data
def get_person_url(self, obj):
if self._request is None:
# No way to form URL, just return the person's ID
return obj.id
person_resource = PersonResource(obj=obj, request=self._request)
return person_resource.get_single_href()
def get_sensor_url(self, obj):
if self._request is None:
# No way to form URL, just return the person's ID
return obj.id
psensor_resource = PresenceSensorResource(
obj=obj,
request=self._request)
return psensor_resource.get_single_href()
def serialize_stream(self):
'''Serialize this resource for a stream'''
data = self.serialize_single(False, None) # (rels=False)
# TODO: Make useful
data['_links'] = {
'href': self.get_single_href(),
#'person':
}
data['_links'].update(self.get_additional_links())
return data
def get_tags(self):
if not self._obj:
raise ValueError(
'Tried to called get_tags on a resource without an object')
db_sensor = PresenceSensor.objects.select_related('device').get(
id=self._obj.sensor_id)
return ['person-%d' % self._obj.person_id,
'sensor-%d' % db_sensor.id,
'device-%d' % db_sensor.device_id,
'site-%d' % db_sensor.device.site_id]
def get_filled_schema(self):
schema = super(PresenceDataResource, self).get_filled_schema()
# we need to replace the sensor and/or person links with just
# the URL instead of the full object
props = schema['properties']
if 'person' in props:
person_default = props['person']['default']
props['person']['default'] = self.get_person_url(person_default)
if 'sensor' in props:
sensor_default = props['sensor']['default']
props['sensor']['default'] = self.get_sensor_url(sensor_default)
return schema
class PresenceSensorResource(Resource):
model = PresenceSensor
display_field = 'metric'
resource_name = 'presence_sensors'
resource_type = 'presence_sensor'
required_fields = ['metric']
# for now, name is hardcoded as the only attribute of metric and unit
stub_fields = {'metric': 'name'}
queryset = PresenceSensor.objects
related_fields = {
'ch:dataHistory': CollectionField(PresenceDataResource,
reverse_name='sensor'),
'ch:device': ResourceField('chain.core.resources.DeviceResource',
'device'),
'ch:metadata': MetadataCollectionField(MetadataResource)
}
def serialize_single(self, embed, cache, *args, **kwargs):
data = super(
PresenceSensorResource,
self).serialize_single(
embed,
cache,
*args,
**kwargs)
data['sensor-type'] = "presence"
data['dataType'] = "presence"
if embed:
if '_embedded' not in data:
data['_embedded'] = {}
data['_embedded'].update(self.get_additional_embedded())
if '_links' not in data:
data['_links'] = {}
data['_links'].update(self.get_additional_links())
return data
def get_additional_links(self):
links = {}
last_data = self._obj.presence_data.order_by(
'timestamp').reverse()[:1]
if last_data:
links['last-visit'] = {
'href': self.get_presense_data_url(
last_data[0]), 'title': "%s at %s" %
(last_data[0].person, last_data[0].timestamp.isoformat())}
return links
def get_additional_embedded(self):
embedded = {}
last_data = self._obj.presence_data.order_by(
'timestamp').reverse()[:1]
if last_data:
embedded['last-visit'] = PresenceDataResource(obj=last_data[0], request=self._request)\
.serialize_single(False, {})
return embedded
def get_person_url(self, obj):
if self._request is None:
# No way to form URL, just return the person's ID
return obj.id
person_resource = PersonResource(obj=obj, request=self._request)
return person_resource.get_single_href()
def get_presense_data_url(self, obj):
if self._request is None:
# No way to form URL, just return the person's ID
return obj.id
pdata_resource = PresenceDataResource(obj=obj, request=self._request)
return pdata_resource.get_single_href()
def get_sensor_url(self, obj):
if self._request is None:
# No way to form URL, just return the person's ID
return obj.id
psensor_resource = PresenceSensorResource(
obj=obj,
request=self._request)
return psensor_resource.get_single_href()
def get_tags(self):
return ['sensor-%s' % self._obj.id,
'presense_sensor-%s' % self._obj.id,
'device-%s' % self._obj.device_id,
'site-%s' % self._obj.device.site_id]
class PersonResource(Resource):
model = Person
display_field = 'last_name'
resource_name = 'people'
resource_type = 'person'
required_fields = ['first_name', 'last_name']
model_fields = ['first_name', 'last_name', 'twitter_handle', 'rfid']
related_fields = {
'ch:presence-data': CollectionField(PresenceDataResource,
reverse_name='person'),
'ch:site': ResourceField('chain.core.resources.SiteResource', 'site'),
'ch:metadata': MetadataCollectionField(MetadataResource)
}
queryset = Person.objects
def serialize_single(self, embed, cache, *args, **kwargs):
data = super(
PersonResource,
self).serialize_single(
embed,
cache,
*args,
**kwargs)
if embed:
if '_embedded' not in data:
data['_embedded'] = {}
data['_embedded'].update(self.get_additional_embedded())
if '_links' in data:
data['_links'].update(self.get_additional_links())
return data
def get_presence_data(self):
filters = {
'person': self._obj
}
return PresenceData.objects.filter(**filters).order_by('timestamp')[:1]
def get_additional_links(self):
links = {}
last_data = self.get_presence_data()
if last_data:
links['last-visit'] = {
'href': self.get_presense_data_url(
last_data[0]),
'title': "at %s->%s at time %s" %
(last_data[0].sensor.device,
last_data[0].sensor.metric,
last_data[0].timestamp.isoformat())}
if self._obj.picture_url:
links['picture'] = {
'href': self._obj.picture_url,
'title': 'Picture URL (external)'
}
return links
def get_additional_embedded(self):
embedded = {}
last_data = self.get_presence_data()
if last_data:
embedded['last-visit'] = PresenceDataResource(obj=last_data[0], request=self._request)\
.serialize_single(False, {})
return embedded
def get_presense_data_url(self, obj):
if self._request is None:
# No way to form URL, just return the person's ID
return obj.id
pdata_resource = PresenceDataResource(obj=obj, request=self._request)
return pdata_resource.get_single_href()
def get_tags(self):
# sometimes the site_id field is unicode? weird
return ['person-%d' % self._obj.id,
'site-%s' % self._obj.site_id]
'''
Merge two "JSON" style dictionary/list objects
recursively. Designed for merging schemas from
multiple sensor objects.
If two objects are not merge-able, the version from
obj1 is used.
'''
def json_merge(obj1, obj2):
if isinstance(obj1, list):
# Merge array:
set_used = set(obj1)
new_arr = obj1[:]
for el in obj2:
if el not in set_used:
new_arr.append(el)
return new_arr
elif isinstance(obj1, dict):
# Merge object:
new_obj = {}
for key in obj1:
if key in obj2:
new_obj[key] = json_merge(obj1[key], obj2[key])
else:
new_obj[key] = obj1[key]
for key in obj2:
if key not in new_obj:
new_obj[key] = obj2[key]
return new_obj
else:
# Could not merge. Select the version from
# the first object:
return obj1
class MixedSensorResource(Resource):
model = ScalarSensor
display_field = 'metric'
resource_name = 'sensors'
resource_type = 'sensor'
# for now, name is hardcoded as the only attribute of metric and unit
stub_fields = {'metric': 'name'}
queryset = ScalarSensor.objects
available_sensor_types = {
'scalar': {
'model': ScalarSensor,
'resource': ScalarSensorResource
},
# 'presence': {
# 'model': PresenceSensor,
# 'resource': PresenceSensorResource
# }
}
related_fields = {
'ch:device': ResourceField('chain.core.resources.DeviceResource',
'device')
}
@classmethod
def get_schema(cls, filters=None):
schema = {
'required': ['sensor-type'],
'type': 'object',
'properties': {
'sensor-type': {
'type': 'string',
'title': 'sensor-type',
'enum': cls.available_sensor_types.keys()
}
},
'title': 'Create Sensor'
}
for sensor_type in cls.available_sensor_types:
sub_schema = cls.available_sensor_types[
sensor_type]['resource'].get_schema(filters)
schema = json_merge(schema, sub_schema)
return schema
@classmethod
def create_list(cls, data, req):
raise Exception("Not yet implemented.")
@classmethod
def create_single(cls, data, req):
if u'sensor-type' not in data:
# raise Exception("'type' property not found")
# For temporary back-compatability, assume it
# is a ScalarSensor:
return ScalarSensorResource.create_single(data, req)
for sensor_type in cls.available_sensor_types:
if data['sensor-type'] == sensor_type:
del data['sensor-type']
return cls.available_sensor_types[sensor_type][
'resource'].create_single(data, req)
# TODO: Return 400 rather than raising an exception
raise Exception("Unrecognized sensor type.")
def serialize_single(self, embed, cache, *args, **kwargs):
data = super(
MixedSensorResource,
self).serialize_single(
embed,
cache,
*args,
**kwargs)
if embed:
pass
if '_links' in data:
data['_links'].update(self.get_links())
data['totalCount'] = len(data['_links']['items'])
return data
def serialize_list(self, embed, cache, *args, **kwargs):
data = super(
MixedSensorResource,
self).serialize_list(
embed=embed,
cache=cache,
*args,
**kwargs)
if embed:
pass
if '_links' in data:
data['_links'].update(self.get_links())
data['totalCount'] = len(data['_links']['items'])
return data
def get_links(self):
mapped_model_to_res = self.map_model_to_resource()
sensors = self.query_models()
items = []
for sensor in sensors:
items.append(
{
'href': (
mapped_model_to_res[
type(sensor)](
obj=sensor,
request=self._request)).get_single_href(),
'title': "%s" %
sensor})
return {'items': items}
def map_model_to_resource(self):
mapped = {}
for sensor_type in self.available_sensor_types:
sensor_details = self.available_sensor_types[sensor_type]
mapped[sensor_details['model']] = sensor_details['resource']
return mapped
def query_models(self):
results = []
for sensor_type in self.available_sensor_types:
modelResults = self.available_sensor_types[sensor_type][
'model'].objects.filter(**self._filters)
results.extend(modelResults)
return results
def get_tags(self):
return ['sensor-%s' % self._obj.id,
'device-%s' % self._obj.device_id,
'site-%s' % self._obj.device.site_id]
class DeviceResource(Resource):
model = Device
display_field = 'name'
resource_name = 'devices'
resource_type = 'device'
required_fields = ['name']
model_fields = ['name', 'description', 'building', 'floor', 'room', 'active']
''''ch:sensors': CollectionField(ScalarSensorResource,
reverse_name='device'),
'ch:sensors': CollectionField(PresenceSensorResource,
reverse_name='device'),'''
related_fields = {
'ch:sensors': CollectionField(MixedSensorResource,
reverse_name='device'),
'ch:site': ResourceField('chain.core.resources.SiteResource', 'site'),
'ch:metadata': MetadataCollectionField(MetadataResource)
}
queryset = Device.objects
def get_tags(self):
# sometimes the site_id field is unicode? weird
return ['device-%d' % self._obj.id,
'site-%s' % self._obj.site_id]
class SiteResource(Resource):
model = Site
# TODO _href should be the external URL if present
resource_name = 'sites'
resource_type = 'site'
display_field = 'name'
model_fields = ['name']
required_fields = ['name']
related_fields = {
'ch:devices': CollectionField(DeviceResource, reverse_name='site'),
# 'ch:people': CollectionField(PersonResource, reverse_name='site'),
'ch:metadata': MetadataCollectionField(MetadataResource)
}
queryset = Site.objects
def serialize_single(self, embed, cache):
data = super(SiteResource, self).serialize_single(embed, cache)
if embed:
stream = self._obj.raw_zmq_stream
if stream:
data['_links']['rawZMQStream'] = {
'href': stream,
'title': 'Raw ZMQ Stream'}
data['_links']['ch:siteSummary'] = {
'title': 'Summary',
'href': full_reverse('site-summary', self._request,
args=(self._obj.id,))
}
return data
def get_filled_schema(self):
schema = super(SiteResource, self).get_filled_schema()
schema['properties']['rawZMQStream']['default'] = \
self._obj.raw_zmq_stream
return schema
def deserialize(self):
super(SiteResource, self).deserialize()
if 'rawZMQStream' in self._data:
self._obj.raw_zmq_stream = self._data['rawZMQStream']
return self._obj
def update(self, data):
super(SiteResource, self).update(data)
if 'rawZMQStream' in data:
self._obj.raw_zmq_stream = data['rawZMQStream']
self._obj.save()
def get_tags(self):
return ['site-%d' % self._obj.id]
@classmethod
def get_schema(cls, filters=None):
schema = super(SiteResource, cls).get_schema(filters)
schema['properties']['rawZMQStream'] = {
'type': 'string',
'format': 'uri',
'title': 'rawZMQStream'
}
return schema
# cache for 1hr
@classmethod
@cache_control(max_age=3600)
def site_summary_view(cls, request, id):
#filters = request.GET.dict()
devices = Device.objects.filter(site_id=id).select_related(
'sensors',
'sensors__metric',
'sensors__unit'
)
response = {
'_links': {
'self': {'href': full_reverse('site-summary', request,
args=(id,))},
},
'devices': []
}
sensor_data_list = influx_client.get_last_data_from_all_sensors(id)
sensor_data_dict = {}
for data_point in sensor_data_list:
sensor_data_dict[int(data_point['sensor_id'])] = (data_point['last_value'], data_point['time'])
for device in devices:
dev_resource = DeviceResource(obj=device, request=request)
dev_data = dev_resource.serialize(rels=False)
dev_data['href'] = dev_resource.get_single_href()
response['devices'].append(dev_data)
dev_data['sensors'] = []
for sensor in device.sensors.all():
sensor_resource = ScalarSensorResource(
obj=sensor,
request=request)
sensor_data = sensor_resource.serialize(rels=False, include_data=False)
try:
sensor_data['value'] = sensor_data_dict[sensor.id][0]
sensor_data['updated'] = sensor_data_dict[sensor.id][1]
except KeyError:
# looks like we don't have any data for this sensor
pass
sensor_data['href'] = sensor_resource.get_single_href()
dev_data['sensors'].append(sensor_data)
sensor_data['data'] = []
return cls.render_response(response, request)
@classmethod
def urls(cls):
base_patterns = super(SiteResource, cls).urls()
base_patterns.append(
url(r'^(\d+)/summary$', cls.site_summary_view,
name='site-summary'))
return base_patterns
class ApiRootResource(Resource):
def __init__(self, request):
self._request = request
def serialize(self):
data = {
'_links': {
'self': {'href': full_reverse('api-root', self._request)},
'curies': CHAIN_CURIES,
'ch:sites': {
'title': 'Sites',
'href': full_reverse('sites-list', self._request)
}
}
}
return data
@classmethod
def single_view(cls, request):
resource = cls(request=request)
response_data = resource.serialize()
return cls.render_response(response_data, request)
# URL Setup:
urls = patterns(
'',
url(r'^/?$', ApiRootResource.single_view, name='api-root')
)
# add additional URLS to account for the rename of sensor to scalarsensor.
# unfortunately we can't use redirects in case clients are POSTing to outdated
# URLs. If we WERE redirecting, we would use RedirectView.as_view()
#
# put these first so they are overridden by the later ones, particularly when
# doing URL reverse lookup.
urls += patterns('',
url("^sensordata/", include(ScalarSensorDataResource.urls())),
url("^sensor/", include(ScalarSensorResource.urls())),
)
resources = [
MetadataResource,
ScalarSensorDataResource,
AggregateScalarSensorDataResource,
ScalarSensorResource,
# Disable all the person/presence stuff, which isn't being used anymore
# PresenceDataResource,
# PresenceSensorResource,
# PersonResource,
MixedSensorResource,
DeviceResource,
SiteResource]
for resource in resources:
new_url = url("^%s/" % resource.resource_name, include(resource.urls()))
urls += patterns('', new_url)
register_resource(resource)
| 35.254401 | 107 | 0.571375 | from chain.core.api import Resource, ResourceField, CollectionField, \
MetadataCollectionField
from chain.core.api import full_reverse, render_error
from chain.core.api import CHAIN_CURIES
from chain.core.api import BadRequestException, HTTP_STATUS_BAD_REQUEST
from chain.core.api import register_resource
from chain.core.models import Site, Device, ScalarSensor, \
PresenceSensor, PresenceData, Person, Metadata
from django.conf.urls import include, patterns, url
from django.utils import timezone
from datetime import timedelta, datetime
import calendar
from chain.localsettings import INFLUX_HOST, INFLUX_PORT, INFLUX_DATABASE, INFLUX_MEASUREMENT
from chain.influx_client import InfluxClient
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.cache import cache_control
from django.utils.dateparse import parse_datetime
import json
influx_client = InfluxClient(INFLUX_HOST, INFLUX_PORT, INFLUX_DATABASE, INFLUX_MEASUREMENT)
class MetadataResource(Resource):
model = Metadata
display_field = 'timestamp'
resource_name = 'metadata'
resource_type = 'metadata'
required_fields = ['key', 'value']
model_fields = ['timestamp', 'key', 'value']
queryset = Metadata.objects
def get_queryset(self):
queryset = self._queryset.filter(**self._filters).order_by('key', '-timestamp').distinct('key')
return queryset[self._offset:self._offset + self._limit]
def get_total_count(self):
try:
return self._total_count
except AttributeError:
pass
qs = self._queryset.filter(**self._filters).order_by('key').distinct('key')
self._total_count = qs.count()
return self._total_count
def serialize_list(self, embed, cache):
if not embed:
return super(MetadataResource, self).serialize_list(embed, cache)
href = self.get_list_href()
serialized_data = {
'_links': {
'self': {'href': href},
'curies': CHAIN_CURIES,
'createForm': {
'href': self.get_create_href(),
'title': 'Add Metadata'
}
},
'totalCount': self.get_total_count()
}
objs = self.get_queryset()
serialized_data['data'] = [{
'key': obj.key,
'value': obj.value}
for obj in objs]
serialized_data = self.add_page_links(serialized_data, href)
return serialized_data
def serialize_single(self, embed=True, cache=None, rels=True, *args, **kwargs):
return super(
MetadataResource,
self).serialize_single(
embed,
cache,
rels,
*args,
**dict(kwargs, edit=False))
@classmethod
@csrf_exempt
def edit_view(cls, request, id):
return render_error(HTTP_STATUS_BAD_REQUEST,
"Metadata are immutable",
request)
class SensorDataResource(Resource):
def __init__(self, *args, **kwargs):
super(SensorDataResource, self).__init__(*args, **kwargs)
def format_time(self, timestamp):
return calendar.timegm(timestamp.timetuple())
def add_page_links(self, data, href, page_start, page_end):
timespan = page_end - page_start
data['_links']['previous'] = {
'href': self.update_href(
href, timestamp__gte=self.format_time(page_start - timespan),
timestamp__lt=self.format_time(page_start)),
'title': '%s to %s' % (page_start - timespan, page_start),
}
data['_links']['self'] = {
'href': self.update_href(
href, timestamp__gte=self.format_time(page_start),
timestamp__lt=self.format_time(page_end)),
}
data['_links']['next'] = {
'href': self.update_href(
href, timestamp__gte=self.format_time(page_end),
timestamp__lt=self.format_time(page_end + timespan)),
'title': '%s to %s' % (page_end, page_end + timespan),
}
return data
def default_timespan(self):
aggtime = self._filters.get('aggtime', None)
if aggtime is None:
return timedelta(hours=6)
elif aggtime == '1h':
return timedelta(hours=500)
elif aggtime == '1d':
return timedelta(days=500)
elif aggtime == '1w':
return timedelta(weeks=500)
else:
raise BadRequestException('Invalid argument for aggtime. Must be 1h, 1d, or 1w')
class ScalarSensorDataResource(SensorDataResource):
display_field = 'timestamp'
resource_name = 'scalar_data'
resource_type = 'scalar_data'
model_fields = ['timestamp', 'value']
schema_type = {'timestamp': ('string', 'date-time'),
'value': ('number', None)}
required_fields = ['value']
def __init__(self, *args, **kwargs):
super(ScalarSensorDataResource, self).__init__(*args, **kwargs)
if self._state == 'data':
self.sensor_id = self._filters.get('sensor_id')
self.value = self.sanitize_field_value('value', self._data.get('value'))
self.timestamp = self.sanitize_field_value('timestamp', self._data.get('timestamp'))
sensor = ScalarSensor.objects.select_related('device', 'metric').get(id=self.sensor_id)
self.device_id = sensor.device.id
self.metric = sensor.metric
self.site_id = sensor.device.site_id
self._state = 'object'
if 'queryset' in kwargs:
pass
def serialize_single(self, embed=True, cache=None, rels=True):
data = {}
for field_name in self.model_fields:
data[field_name] = self.serialize_field(getattr(self, field_name))
return data
@classmethod
def sanitize_field_value(cls, field_name, value):
if field_name == 'value':
return float(value)
if field_name == 'timestamp':
from django.db import models
if value == None:
return timezone.now()
timestamp = parse_datetime(value)
if timezone.is_aware(timestamp):
return timestamp
return timezone.make_aware(timestamp, timezone.get_current_timezone())
def save(self):
response = influx_client.post_data(self.site_id, self.device_id, self.sensor_id,
self.metric, self.value, self.timestamp)
return response
def serialize_list(self, embed, cache):
if not embed:
return super(
ScalarSensorDataResource,
self).serialize_list(
embed,
cache)
href = self.get_list_href()
serialized_data = {
'_links': {
'curies': CHAIN_CURIES,
'createForm': {
'href': self.get_create_href(),
'title': 'Add Data'
}
},
'dataType': 'float'
}
request_time = timezone.now()
# if they are given, then we need to convert them from unix time to use
# in the queryset filter
if 'timestamp__gte' in self._filters:
try:
page_start = datetime.utcfromtimestamp(
float(self._filters['timestamp__gte'])).replace(
tzinfo=timezone.utc)
except ValueError:
raise BadRequestException(
"Invalid timestamp format for lower bound of date range.")
else:
page_start = request_time - self.default_timespan()
if 'timestamp__lt' in self._filters:
try:
page_end = datetime.utcfromtimestamp(
float(self._filters['timestamp__lt'])).replace(
tzinfo=timezone.utc)
except ValueError:
raise BadRequestException(
"Invalid timestamp format for upper bound of date range.")
else:
page_end = request_time
self._filters['timestamp__gte'] = page_start
self._filters['timestamp__lt'] = page_end
objs = influx_client.get_sensor_data(self._filters)
serialized_data = self.add_page_links(serialized_data, href,
page_start, page_end)
serialized_data['data'] = [{
'value': obj['value'],
'timestamp': obj['time']}
for obj in objs]
return serialized_data
def get_cache_key(self):
return self.sensor_id, self.timestamp
def serialize_stream(self):
data = self.serialize_single(rels=False)
data['_links'] = {
'ch:sensor': {'href': full_reverse(
'scalar_sensors-single', self._request,
args=(self._filters['sensor_id'],))}
}
return data
def get_single_href(self):
return full_reverse(self.resource_name + '-single',
self._request, args=(self.sensor_id,self.timestamp))
def get_tags(self):
if not self.sensor_id:
raise ValueError(
'Tried to called get_tags on a resource without an id')
db_sensor = ScalarSensor.objects.select_related('device').get(
id=self.sensor_id)
return ['sensor-%d' % db_sensor.id,
'device-%d' % db_sensor.device_id,
'site-%d' % db_sensor.device.site_id]
@classmethod
def get_field_schema_type(cls, field_name):
if field_name in cls.model_fields:
return cls.schema_type[field_name]
else:
raise NotImplementedError(
"tried to look up field %s but didn't know where" % field_name)
@classmethod
def model_has_field(cls, field_name):
if field_name in cls.model_fields:
return True
return False
class AggregateScalarSensorDataResource(SensorDataResource):
resource_name = 'aggregate_data'
resource_type = 'aggregate_data'
model_fields = ['timestamp', 'max', 'min', 'mean', 'count']
def __init__(self, *args, **kwargs):
super(AggregateScalarSensorDataResource, self).__init__(*args, **kwargs)
def get_list_href(self, embed=False):
href = super(AggregateScalarSensorDataResource, self).get_list_href()
if not embed:
href += '{&aggtime}'
return href
def serialize_list(self, embed, cache):
if not embed:
return super(
AggregateScalarSensorDataResource,
self).serialize_list(
embed,
cache)
if 'aggtime' not in self._filters:
raise BadRequestException(
"Missing aggtime arguement")
href = self.get_list_href(True)
serialized_data = {
'_links': {
'curies': CHAIN_CURIES
},
'dataType': 'float'
}
request_time = timezone.now()
if 'timestamp__gte' in self._filters:
try:
page_start = datetime.utcfromtimestamp(
float(self._filters['timestamp__gte'])).replace(
tzinfo=timezone.utc)
except ValueError:
raise BadRequestException(
"Invalid timestamp format for lower bound of date range.")
else:
page_start = request_time - self.default_timespan()
if 'timestamp__lt' in self._filters:
try:
page_end = datetime.utcfromtimestamp(
float(self._filters['timestamp__lt'])).replace(
tzinfo=timezone.utc)
except ValueError:
raise BadRequestException(
"Invalid timestamp format for upper bound of date range.")
else:
page_end = request_time
self._filters['timestamp__gte'] = page_start
self._filters['timestamp__lt'] = page_end
objs = influx_client.get_sensor_data(self._filters)
serialized_data = self.add_page_links(serialized_data, href,
page_start, page_end)
serialized_data['data'] = [{
'max': obj['max'],
'min': obj['min'],
'mean': obj['mean'],
'count': obj['count'],
'timestamp': obj['time']}
for obj in objs]
return serialized_data
@classmethod
def urls(cls):
base_name = cls.resource_name
return patterns('',
url(r'^$',
cls.list_view, name=base_name + '-list'))
class ScalarSensorResource(Resource):
model = ScalarSensor
display_field = 'metric'
resource_name = 'scalar_sensors'
resource_type = 'scalar_sensor'
required_fields = ['metric', 'unit']
model_fields = ['active']
stub_fields = {'metric': 'name', 'unit': 'name'}
queryset = ScalarSensor.objects
related_fields = {
'ch:dataHistory': CollectionField(ScalarSensorDataResource,
reverse_name='sensor'),
'ch:aggregateData': CollectionField(AggregateScalarSensorDataResource,
reverse_name='sensor'),
'ch:device': ResourceField('chain.core.resources.DeviceResource',
'device'),
'ch:metadata': MetadataCollectionField(MetadataResource)
}
def serialize_single(self, embed, cache, *args, **kwargs):
data = super(
ScalarSensorResource,
self).serialize_single(
embed,
cache,
*args,
**kwargs)
data['sensor-type'] = "scalar"
if embed:
data['dataType'] = 'float'
if not kwargs.get('include_data', True):
return data
else:
last_data = influx_client.get_last_sensor_data(self._obj.id)
if last_data:
data['value'] = last_data[0]['last']
data['updated'] = last_data[0]['time']
return data
def get_tags(self):
return ['sensor-%s' % self._obj.id,
'scalar_sensor-%s' % self._obj.id,
'device-%s' % self._obj.device_id,
'site-%s' % self._obj.device.site_id]
class PresenceDataResource(SensorDataResource):
model = PresenceData
display_field = 'timestamp'
resource_name = 'presencedata'
resource_type = 'presencedata'
model_fields = ['timestamp', 'present', 'person', 'sensor']
required_fields = ['person', 'sensor', 'present']
queryset = PresenceData.objects
def __init__(self, *args, **kwargs):
super(PresenceDataResource, self).__init__(*args, **kwargs)
if 'queryset' in kwargs:
pass
def serialize_single(self, embed, cache):
serialized_data = super(
PresenceDataResource,
self).serialize_single(
embed,
cache)
if 'person' in serialized_data:
del serialized_data['person']
if 'sensor' in serialized_data:
del serialized_data['sensor']
if '_links' not in serialized_data:
serialized_data['_links'] = {}
serialized_data['_links'].update(self.get_additional_links())
return serialized_data
def get_additional_links(self):
return {
'person': {
'href': self.get_person_url(
self._obj.person), 'title': "%s, %s" %
(self._obj.person.last_name, self._obj.person.first_name)}, 'sensor': {
'href': self.get_sensor_url(
self._obj.sensor), 'title': "%s->%s" %
(self._obj.sensor.device.name, self._obj.sensor.metric)}}
def serialize_list(self, embed, cache):
if not embed:
return super(
PresenceDataResource,
self).serialize_list(
embed,
cache)
href = self.get_list_href()
items = []
serialized_data = {
'_links': {
'curies': CHAIN_CURIES,
'createForm': {
'href': self.get_create_href(),
'title': 'Add Data'
},
'items': items
},
'dataType': 'presence'
}
request_time = timezone.now()
# if they are given, then we need to convert them from unix time to use
# in the queryset filter
if 'timestamp__gte' in self._filters:
try:
page_start = datetime.utcfromtimestamp(
float(self._filters['timestamp__gte']))
except ValueError:
raise BadRequestException(
"Invalid timestamp format for lower bound of date range.")
else:
page_start = request_time - self.default_timespan()
if 'timestamp__lt' in self._filters:
try:
page_end = datetime.utcfromtimestamp(
float(self._filters['timestamp__lt']))
except ValueError:
raise BadRequestException(
"Invalid timestamp format for upper bound of date range.")
else:
page_end = request_time
self._filters['timestamp__gte'] = page_start
self._filters['timestamp__lt'] = page_end
objs = self._queryset.filter(**self._filters).order_by('timestamp')
serialized_data = self.add_page_links(serialized_data, href,
page_start, page_end)
# Make links:
for obj in objs:
presence_data_resource = PresenceDataResource(
obj=obj,
request=self._request)
items.append(
{
'href': presence_data_resource.get_single_href(),
'title': "%s %s %s at time %s" %
(obj.person.last_name,
"at" if obj.present else "left",
obj.sensor.device,
obj.timestamp.isoformat())})
return serialized_data
def get_person_url(self, obj):
if self._request is None:
# No way to form URL, just return the person's ID
return obj.id
person_resource = PersonResource(obj=obj, request=self._request)
return person_resource.get_single_href()
def get_sensor_url(self, obj):
if self._request is None:
return obj.id
psensor_resource = PresenceSensorResource(
obj=obj,
request=self._request)
return psensor_resource.get_single_href()
def serialize_stream(self):
data = self.serialize_single(False, None) # (rels=False)
# TODO: Make useful
data['_links'] = {
'href': self.get_single_href(),
#'person':
}
data['_links'].update(self.get_additional_links())
return data
def get_tags(self):
if not self._obj:
raise ValueError(
'Tried to called get_tags on a resource without an object')
db_sensor = PresenceSensor.objects.select_related('device').get(
id=self._obj.sensor_id)
return ['person-%d' % self._obj.person_id,
'sensor-%d' % db_sensor.id,
'device-%d' % db_sensor.device_id,
'site-%d' % db_sensor.device.site_id]
def get_filled_schema(self):
schema = super(PresenceDataResource, self).get_filled_schema()
# we need to replace the sensor and/or person links with just
# the URL instead of the full object
props = schema['properties']
if 'person' in props:
person_default = props['person']['default']
props['person']['default'] = self.get_person_url(person_default)
if 'sensor' in props:
sensor_default = props['sensor']['default']
props['sensor']['default'] = self.get_sensor_url(sensor_default)
return schema
class PresenceSensorResource(Resource):
model = PresenceSensor
display_field = 'metric'
resource_name = 'presence_sensors'
resource_type = 'presence_sensor'
required_fields = ['metric']
# for now, name is hardcoded as the only attribute of metric and unit
stub_fields = {'metric': 'name'}
queryset = PresenceSensor.objects
related_fields = {
'ch:dataHistory': CollectionField(PresenceDataResource,
reverse_name='sensor'),
'ch:device': ResourceField('chain.core.resources.DeviceResource',
'device'),
'ch:metadata': MetadataCollectionField(MetadataResource)
}
def serialize_single(self, embed, cache, *args, **kwargs):
data = super(
PresenceSensorResource,
self).serialize_single(
embed,
cache,
*args,
**kwargs)
data['sensor-type'] = "presence"
data['dataType'] = "presence"
if embed:
if '_embedded' not in data:
data['_embedded'] = {}
data['_embedded'].update(self.get_additional_embedded())
if '_links' not in data:
data['_links'] = {}
data['_links'].update(self.get_additional_links())
return data
def get_additional_links(self):
links = {}
last_data = self._obj.presence_data.order_by(
'timestamp').reverse()[:1]
if last_data:
links['last-visit'] = {
'href': self.get_presense_data_url(
last_data[0]), 'title': "%s at %s" %
(last_data[0].person, last_data[0].timestamp.isoformat())}
return links
def get_additional_embedded(self):
embedded = {}
last_data = self._obj.presence_data.order_by(
'timestamp').reverse()[:1]
if last_data:
embedded['last-visit'] = PresenceDataResource(obj=last_data[0], request=self._request)\
.serialize_single(False, {})
return embedded
def get_person_url(self, obj):
if self._request is None:
# No way to form URL, just return the person's ID
return obj.id
person_resource = PersonResource(obj=obj, request=self._request)
return person_resource.get_single_href()
def get_presense_data_url(self, obj):
if self._request is None:
return obj.id
pdata_resource = PresenceDataResource(obj=obj, request=self._request)
return pdata_resource.get_single_href()
def get_sensor_url(self, obj):
if self._request is None:
# No way to form URL, just return the person's ID
return obj.id
psensor_resource = PresenceSensorResource(
obj=obj,
request=self._request)
return psensor_resource.get_single_href()
def get_tags(self):
return ['sensor-%s' % self._obj.id,
'presense_sensor-%s' % self._obj.id,
'device-%s' % self._obj.device_id,
'site-%s' % self._obj.device.site_id]
class PersonResource(Resource):
model = Person
display_field = 'last_name'
resource_name = 'people'
resource_type = 'person'
required_fields = ['first_name', 'last_name']
model_fields = ['first_name', 'last_name', 'twitter_handle', 'rfid']
related_fields = {
'ch:presence-data': CollectionField(PresenceDataResource,
reverse_name='person'),
'ch:site': ResourceField('chain.core.resources.SiteResource', 'site'),
'ch:metadata': MetadataCollectionField(MetadataResource)
}
queryset = Person.objects
def serialize_single(self, embed, cache, *args, **kwargs):
data = super(
PersonResource,
self).serialize_single(
embed,
cache,
*args,
**kwargs)
if embed:
if '_embedded' not in data:
data['_embedded'] = {}
data['_embedded'].update(self.get_additional_embedded())
if '_links' in data:
data['_links'].update(self.get_additional_links())
return data
def get_presence_data(self):
filters = {
'person': self._obj
}
return PresenceData.objects.filter(**filters).order_by('timestamp')[:1]
def get_additional_links(self):
links = {}
last_data = self.get_presence_data()
if last_data:
links['last-visit'] = {
'href': self.get_presense_data_url(
last_data[0]),
'title': "at %s->%s at time %s" %
(last_data[0].sensor.device,
last_data[0].sensor.metric,
last_data[0].timestamp.isoformat())}
if self._obj.picture_url:
links['picture'] = {
'href': self._obj.picture_url,
'title': 'Picture URL (external)'
}
return links
def get_additional_embedded(self):
embedded = {}
last_data = self.get_presence_data()
if last_data:
embedded['last-visit'] = PresenceDataResource(obj=last_data[0], request=self._request)\
.serialize_single(False, {})
return embedded
def get_presense_data_url(self, obj):
if self._request is None:
return obj.id
pdata_resource = PresenceDataResource(obj=obj, request=self._request)
return pdata_resource.get_single_href()
def get_tags(self):
# sometimes the site_id field is unicode? weird
return ['person-%d' % self._obj.id,
'site-%s' % self._obj.site_id]
def json_merge(obj1, obj2):
if isinstance(obj1, list):
# Merge array:
set_used = set(obj1)
new_arr = obj1[:]
for el in obj2:
if el not in set_used:
new_arr.append(el)
return new_arr
elif isinstance(obj1, dict):
# Merge object:
new_obj = {}
for key in obj1:
if key in obj2:
new_obj[key] = json_merge(obj1[key], obj2[key])
else:
new_obj[key] = obj1[key]
for key in obj2:
if key not in new_obj:
new_obj[key] = obj2[key]
return new_obj
else:
# Could not merge. Select the version from
# the first object:
return obj1
class MixedSensorResource(Resource):
model = ScalarSensor
display_field = 'metric'
resource_name = 'sensors'
resource_type = 'sensor'
# for now, name is hardcoded as the only attribute of metric and unit
stub_fields = {'metric': 'name'}
queryset = ScalarSensor.objects
available_sensor_types = {
'scalar': {
'model': ScalarSensor,
'resource': ScalarSensorResource
},
# 'presence': {
# 'model': PresenceSensor,
# 'resource': PresenceSensorResource
# }
}
related_fields = {
'ch:device': ResourceField('chain.core.resources.DeviceResource',
'device')
}
@classmethod
def get_schema(cls, filters=None):
schema = {
'required': ['sensor-type'],
'type': 'object',
'properties': {
'sensor-type': {
'type': 'string',
'title': 'sensor-type',
'enum': cls.available_sensor_types.keys()
}
},
'title': 'Create Sensor'
}
for sensor_type in cls.available_sensor_types:
sub_schema = cls.available_sensor_types[
sensor_type]['resource'].get_schema(filters)
schema = json_merge(schema, sub_schema)
return schema
@classmethod
def create_list(cls, data, req):
raise Exception("Not yet implemented.")
@classmethod
def create_single(cls, data, req):
if u'sensor-type' not in data:
# raise Exception("'type' property not found")
# For temporary back-compatability, assume it
# is a ScalarSensor:
return ScalarSensorResource.create_single(data, req)
for sensor_type in cls.available_sensor_types:
if data['sensor-type'] == sensor_type:
del data['sensor-type']
return cls.available_sensor_types[sensor_type][
'resource'].create_single(data, req)
# TODO: Return 400 rather than raising an exception
raise Exception("Unrecognized sensor type.")
def serialize_single(self, embed, cache, *args, **kwargs):
data = super(
MixedSensorResource,
self).serialize_single(
embed,
cache,
*args,
**kwargs)
if embed:
pass
if '_links' in data:
data['_links'].update(self.get_links())
data['totalCount'] = len(data['_links']['items'])
return data
def serialize_list(self, embed, cache, *args, **kwargs):
data = super(
MixedSensorResource,
self).serialize_list(
embed=embed,
cache=cache,
*args,
**kwargs)
if embed:
pass
if '_links' in data:
data['_links'].update(self.get_links())
data['totalCount'] = len(data['_links']['items'])
return data
def get_links(self):
mapped_model_to_res = self.map_model_to_resource()
sensors = self.query_models()
items = []
for sensor in sensors:
items.append(
{
'href': (
mapped_model_to_res[
type(sensor)](
obj=sensor,
request=self._request)).get_single_href(),
'title': "%s" %
sensor})
return {'items': items}
def map_model_to_resource(self):
mapped = {}
for sensor_type in self.available_sensor_types:
sensor_details = self.available_sensor_types[sensor_type]
mapped[sensor_details['model']] = sensor_details['resource']
return mapped
def query_models(self):
results = []
for sensor_type in self.available_sensor_types:
modelResults = self.available_sensor_types[sensor_type][
'model'].objects.filter(**self._filters)
results.extend(modelResults)
return results
def get_tags(self):
return ['sensor-%s' % self._obj.id,
'device-%s' % self._obj.device_id,
'site-%s' % self._obj.device.site_id]
class DeviceResource(Resource):
model = Device
display_field = 'name'
resource_name = 'devices'
resource_type = 'device'
required_fields = ['name']
model_fields = ['name', 'description', 'building', 'floor', 'room', 'active']
related_fields = {
'ch:sensors': CollectionField(MixedSensorResource,
reverse_name='device'),
'ch:site': ResourceField('chain.core.resources.SiteResource', 'site'),
'ch:metadata': MetadataCollectionField(MetadataResource)
}
queryset = Device.objects
def get_tags(self):
# sometimes the site_id field is unicode? weird
return ['device-%d' % self._obj.id,
'site-%s' % self._obj.site_id]
class SiteResource(Resource):
model = Site
# TODO _href should be the external URL if present
resource_name = 'sites'
resource_type = 'site'
display_field = 'name'
model_fields = ['name']
required_fields = ['name']
related_fields = {
'ch:devices': CollectionField(DeviceResource, reverse_name='site'),
# 'ch:people': CollectionField(PersonResource, reverse_name='site'),
'ch:metadata': MetadataCollectionField(MetadataResource)
}
queryset = Site.objects
def serialize_single(self, embed, cache):
data = super(SiteResource, self).serialize_single(embed, cache)
if embed:
stream = self._obj.raw_zmq_stream
if stream:
data['_links']['rawZMQStream'] = {
'href': stream,
'title': 'Raw ZMQ Stream'}
data['_links']['ch:siteSummary'] = {
'title': 'Summary',
'href': full_reverse('site-summary', self._request,
args=(self._obj.id,))
}
return data
def get_filled_schema(self):
schema = super(SiteResource, self).get_filled_schema()
schema['properties']['rawZMQStream']['default'] = \
self._obj.raw_zmq_stream
return schema
def deserialize(self):
super(SiteResource, self).deserialize()
if 'rawZMQStream' in self._data:
self._obj.raw_zmq_stream = self._data['rawZMQStream']
return self._obj
def update(self, data):
super(SiteResource, self).update(data)
if 'rawZMQStream' in data:
self._obj.raw_zmq_stream = data['rawZMQStream']
self._obj.save()
def get_tags(self):
return ['site-%d' % self._obj.id]
@classmethod
def get_schema(cls, filters=None):
schema = super(SiteResource, cls).get_schema(filters)
schema['properties']['rawZMQStream'] = {
'type': 'string',
'format': 'uri',
'title': 'rawZMQStream'
}
return schema
# cache for 1hr
@classmethod
@cache_control(max_age=3600)
def site_summary_view(cls, request, id):
#filters = request.GET.dict()
devices = Device.objects.filter(site_id=id).select_related(
'sensors',
'sensors__metric',
'sensors__unit'
)
response = {
'_links': {
'self': {'href': full_reverse('site-summary', request,
args=(id,))},
},
'devices': []
}
sensor_data_list = influx_client.get_last_data_from_all_sensors(id)
sensor_data_dict = {}
for data_point in sensor_data_list:
sensor_data_dict[int(data_point['sensor_id'])] = (data_point['last_value'], data_point['time'])
for device in devices:
dev_resource = DeviceResource(obj=device, request=request)
dev_data = dev_resource.serialize(rels=False)
dev_data['href'] = dev_resource.get_single_href()
response['devices'].append(dev_data)
dev_data['sensors'] = []
for sensor in device.sensors.all():
sensor_resource = ScalarSensorResource(
obj=sensor,
request=request)
sensor_data = sensor_resource.serialize(rels=False, include_data=False)
try:
sensor_data['value'] = sensor_data_dict[sensor.id][0]
sensor_data['updated'] = sensor_data_dict[sensor.id][1]
except KeyError:
# looks like we don't have any data for this sensor
pass
sensor_data['href'] = sensor_resource.get_single_href()
dev_data['sensors'].append(sensor_data)
sensor_data['data'] = []
return cls.render_response(response, request)
@classmethod
def urls(cls):
base_patterns = super(SiteResource, cls).urls()
base_patterns.append(
url(r'^(\d+)/summary$', cls.site_summary_view,
name='site-summary'))
return base_patterns
class ApiRootResource(Resource):
def __init__(self, request):
self._request = request
def serialize(self):
data = {
'_links': {
'self': {'href': full_reverse('api-root', self._request)},
'curies': CHAIN_CURIES,
'ch:sites': {
'title': 'Sites',
'href': full_reverse('sites-list', self._request)
}
}
}
return data
@classmethod
def single_view(cls, request):
resource = cls(request=request)
response_data = resource.serialize()
return cls.render_response(response_data, request)
urls = patterns(
'',
url(r'^/?$', ApiRootResource.single_view, name='api-root')
)
# URLs. If we WERE redirecting, we would use RedirectView.as_view()
#
# put these first so they are overridden by the later ones, particularly when
# doing URL reverse lookup.
urls += patterns('',
url("^sensordata/", include(ScalarSensorDataResource.urls())),
url("^sensor/", include(ScalarSensorResource.urls())),
)
resources = [
MetadataResource,
ScalarSensorDataResource,
AggregateScalarSensorDataResource,
ScalarSensorResource,
# Disable all the person/presence stuff, which isn't being used anymore
MixedSensorResource,
DeviceResource,
SiteResource]
for resource in resources:
new_url = url("^%s/" % resource.resource_name, include(resource.urls()))
urls += patterns('', new_url)
register_resource(resource)
| true | true |
f7251f653992e182b50932f276e0b927de32b712 | 8,687 | py | Python | tests/losses/test_fastap_loss.py | cwkeam/pytorch-metric-learning | 63e4ecb781c5735ad714f61a3eecc55f72496905 | [
"MIT"
] | 4,357 | 2020-01-15T23:42:35.000Z | 2022-03-31T08:11:48.000Z | tests/losses/test_fastap_loss.py | cwkeam/pytorch-metric-learning | 63e4ecb781c5735ad714f61a3eecc55f72496905 | [
"MIT"
] | 386 | 2020-01-16T02:06:37.000Z | 2022-03-30T07:59:47.000Z | tests/losses/test_fastap_loss.py | cwkeam/pytorch-metric-learning | 63e4ecb781c5735ad714f61a3eecc55f72496905 | [
"MIT"
] | 568 | 2020-01-16T01:08:23.000Z | 2022-03-30T09:18:48.000Z | ######################################
#######ORIGINAL IMPLEMENTATION########
######################################
# FROM https://github.com/kunhe/FastAP-metric-learning/blob/master/pytorch/FastAP_loss.py
# This code is copied directly from the official implementation
# so that we can make sure our implementation returns the same result.
# It's copied under the MIT license.
import torch
from torch.autograd import Variable
def softBinning(D, mid, Delta):
y = 1 - torch.abs(D - mid) / Delta
return torch.max(torch.tensor([0], dtype=D.dtype).to(D.device), y)
def dSoftBinning(D, mid, Delta):
side1 = (D > (mid - Delta)).type(D.dtype)
side2 = (D <= mid).type(D.dtype)
ind1 = side1 * side2 # .type(torch.uint8)
side1 = (D > mid).type(D.dtype)
side2 = (D <= (mid + Delta)).type(D.dtype)
ind2 = side1 * side2 # .type(torch.uint8)
return (ind1 - ind2) / Delta
######################################
#######ORIGINAL IMPLEMENTATION########
######################################
# FROM https://github.com/kunhe/FastAP-metric-learning/blob/master/pytorch/FastAP_loss.py
# This code is copied directly from the official implementation
# so that we can make sure our implementation returns the same result.
# It's copied under the MIT license.
class OriginalImplementationFastAP(torch.autograd.Function):
"""
FastAP - autograd function definition
This class implements the FastAP loss from the following paper:
"Deep Metric Learning to Rank",
F. Cakir, K. He, X. Xia, B. Kulis, S. Sclaroff. CVPR 2019
NOTE:
Given a input batch, FastAP does not sample triplets from it as it's not
a triplet-based method. Therefore, FastAP does not take a Sampler as input.
Rather, we specify how the input batch is selected.
"""
@staticmethod
def forward(ctx, input, target, num_bins):
"""
Args:
input: torch.Tensor(N x embed_dim), embedding matrix
target: torch.Tensor(N x 1), class labels
num_bins: int, number of bins in distance histogram
"""
N = target.size()[0]
assert input.size()[0] == N, "Batch size donesn't match!"
# 1. get affinity matrix
Y = target.unsqueeze(1)
Aff = 2 * (Y == Y.t()).type(input.dtype) - 1
Aff.masked_fill_(
torch.eye(N, N).bool().to(input.device), 0
) # set diagonal to 0
I_pos = (Aff > 0).type(input.dtype).to(input.device)
I_neg = (Aff < 0).type(input.dtype).to(input.device)
N_pos = torch.sum(I_pos, 1)
# 2. compute distances from embeddings
# squared Euclidean distance with range [0,4]
dist2 = 2 - 2 * torch.mm(input, input.t())
# 3. estimate discrete histograms
Delta = torch.tensor(4.0 / num_bins).to(input.device)
Z = torch.linspace(0.0, 4.0, steps=num_bins + 1).to(input.device)
L = Z.size()[0]
h_pos = torch.zeros((N, L), dtype=input.dtype).to(input.device)
h_neg = torch.zeros((N, L), dtype=input.dtype).to(input.device)
for l in range(L):
pulse = softBinning(dist2, Z[l], Delta)
h_pos[:, l] = torch.sum(pulse * I_pos, 1)
h_neg[:, l] = torch.sum(pulse * I_neg, 1)
H_pos = torch.cumsum(h_pos, 1)
h = h_pos + h_neg
H = torch.cumsum(h, 1)
# 4. compate FastAP
FastAP = h_pos * H_pos / H
FastAP[torch.isnan(FastAP) | torch.isinf(FastAP)] = 0
FastAP = torch.sum(FastAP, 1) / N_pos
FastAP = FastAP[~torch.isnan(FastAP)]
loss = 1 - torch.mean(FastAP)
# 6. save for backward
ctx.save_for_backward(input, target)
ctx.Z = Z
ctx.Delta = Delta
ctx.dist2 = dist2
ctx.I_pos = I_pos
ctx.I_neg = I_neg
ctx.h_pos = h_pos
ctx.h_neg = h_neg
ctx.H_pos = H_pos
ctx.N_pos = N_pos
ctx.h = h
ctx.H = H
ctx.L = torch.tensor(L)
return loss
@staticmethod
def backward(ctx, grad_output):
input, target = ctx.saved_tensors
Z = Variable(ctx.Z, requires_grad=False)
Delta = Variable(ctx.Delta, requires_grad=False)
dist2 = Variable(ctx.dist2, requires_grad=False)
I_pos = Variable(ctx.I_pos, requires_grad=False)
I_neg = Variable(ctx.I_neg, requires_grad=False)
h = Variable(ctx.h, requires_grad=False)
H = Variable(ctx.H, requires_grad=False)
h_pos = Variable(ctx.h_pos, requires_grad=False)
h_neg = Variable(ctx.h_neg, requires_grad=False)
H_pos = Variable(ctx.H_pos, requires_grad=False)
N_pos = Variable(ctx.N_pos, requires_grad=False)
L = Z.size()[0]
H2 = torch.pow(H, 2)
H_neg = H - H_pos
# 1. d(FastAP)/d(h+)
LTM1 = torch.tril(torch.ones(L, L), -1) # lower traingular matrix
tmp1 = h_pos * H_neg / H2
tmp1[torch.isnan(tmp1)] = 0
d_AP_h_pos = (H_pos * H + h_pos * H_neg) / H2
d_AP_h_pos = d_AP_h_pos + torch.mm(tmp1, LTM1.cuda())
d_AP_h_pos = d_AP_h_pos / N_pos.repeat(L, 1).t()
d_AP_h_pos[torch.isnan(d_AP_h_pos) | torch.isinf(d_AP_h_pos)] = 0
# 2. d(FastAP)/d(h-)
LTM0 = torch.tril(torch.ones(L, L), 0) # lower triangular matrix
tmp2 = -h_pos * H_pos / H2
tmp2[torch.isnan(tmp2)] = 0
d_AP_h_neg = torch.mm(tmp2, LTM0.cuda())
d_AP_h_neg = d_AP_h_neg / N_pos.repeat(L, 1).t()
d_AP_h_neg[torch.isnan(d_AP_h_neg) | torch.isinf(d_AP_h_neg)] = 0
# 3. d(FastAP)/d(embedding)
d_AP_x = 0
for l in range(L):
dpulse = dSoftBinning(dist2, Z[l], Delta)
dpulse[torch.isnan(dpulse) | torch.isinf(dpulse)] = 0
ddp = dpulse * I_pos
ddn = dpulse * I_neg
alpha_p = torch.diag(d_AP_h_pos[:, l]) # N*N
alpha_n = torch.diag(d_AP_h_neg[:, l])
Ap = torch.mm(ddp, alpha_p) + torch.mm(alpha_p, ddp)
An = torch.mm(ddn, alpha_n) + torch.mm(alpha_n, ddn)
# accumulate gradient
d_AP_x = d_AP_x - torch.mm(input.t(), (Ap + An))
grad_input = -d_AP_x
return grad_input.t(), None, None
######################################
#######ORIGINAL IMPLEMENTATION########
######################################
# FROM https://github.com/kunhe/FastAP-metric-learning/blob/master/pytorch/FastAP_loss.py
# This code is copied directly from the official implementation
# so that we can make sure our implementation returns the same result.
# It's copied under the MIT license.
class OriginalImplementationFastAPLoss(torch.nn.Module):
"""
FastAP - loss layer definition
This class implements the FastAP loss from the following paper:
"Deep Metric Learning to Rank",
F. Cakir, K. He, X. Xia, B. Kulis, S. Sclaroff. CVPR 2019
"""
def __init__(self, num_bins=10):
super(OriginalImplementationFastAPLoss, self).__init__()
self.num_bins = num_bins
def forward(self, batch, labels):
return OriginalImplementationFastAP.apply(batch, labels, self.num_bins)
### Testing this library's implementation ###
import unittest
from pytorch_metric_learning.losses import FastAPLoss
from .. import TEST_DEVICE, TEST_DTYPES
from ..zzz_testing_utils.testing_utils import angle_to_coord
class TestFastAPLoss(unittest.TestCase):
def test_fast_ap_loss(self):
num_bins = 5
loss_func = FastAPLoss(num_bins)
original_loss_func = OriginalImplementationFastAPLoss(num_bins)
ref_emb = torch.randn(32, 32)
ref_labels = torch.randint(0, 10, (32,))
for dtype in TEST_DTYPES:
embedding_angles = torch.arange(0, 180)
embeddings = torch.tensor(
[angle_to_coord(a) for a in embedding_angles],
requires_grad=True,
dtype=dtype,
).to(
TEST_DEVICE
) # 2D embeddings
labels = torch.randint(low=0, high=10, size=(180,)).to(TEST_DEVICE)
loss = loss_func(embeddings, labels)
loss.backward()
original_loss = original_loss_func(
torch.nn.functional.normalize(embeddings), labels
)
rtol = 1e-2 if dtype == torch.float16 else 1e-5
self.assertTrue(torch.isclose(loss, original_loss, rtol=rtol))
# fastap doesn't support ref_emb
self.assertRaises(
ValueError,
lambda: loss_func(
embeddings, labels, ref_emb=ref_emb, ref_labels=ref_labels
),
)
| 36.045643 | 89 | 0.590998 | torch.mm(input, input.t())
# 3. estimate discrete histograms
Delta = torch.tensor(4.0 / num_bins).to(input.device)
Z = torch.linspace(0.0, 4.0, steps=num_bins + 1).to(input.device)
L = Z.size()[0]
h_pos = torch.zeros((N, L), dtype=input.dtype).to(input.device)
h_neg = torch.zeros((N, L), dtype=input.dtype).to(input.device)
for l in range(L):
pulse = softBinning(dist2, Z[l], Delta)
h_pos[:, l] = torch.sum(pulse * I_pos, 1)
h_neg[:, l] = torch.sum(pulse * I_neg, 1)
H_pos = torch.cumsum(h_pos, 1)
h = h_pos + h_neg
H = torch.cumsum(h, 1)
# 4. compate FastAP
FastAP = h_pos * H_pos / H
FastAP[torch.isnan(FastAP) | torch.isinf(FastAP)] = 0
FastAP = torch.sum(FastAP, 1) / N_pos
FastAP = FastAP[~torch.isnan(FastAP)]
loss = 1 - torch.mean(FastAP)
# 6. save for backward
ctx.save_for_backward(input, target)
ctx.Z = Z
ctx.Delta = Delta
ctx.dist2 = dist2
ctx.I_pos = I_pos
ctx.I_neg = I_neg
ctx.h_pos = h_pos
ctx.h_neg = h_neg
ctx.H_pos = H_pos
ctx.N_pos = N_pos
ctx.h = h
ctx.H = H
ctx.L = torch.tensor(L)
return loss
@staticmethod
def backward(ctx, grad_output):
input, target = ctx.saved_tensors
Z = Variable(ctx.Z, requires_grad=False)
Delta = Variable(ctx.Delta, requires_grad=False)
dist2 = Variable(ctx.dist2, requires_grad=False)
I_pos = Variable(ctx.I_pos, requires_grad=False)
I_neg = Variable(ctx.I_neg, requires_grad=False)
h = Variable(ctx.h, requires_grad=False)
H = Variable(ctx.H, requires_grad=False)
h_pos = Variable(ctx.h_pos, requires_grad=False)
h_neg = Variable(ctx.h_neg, requires_grad=False)
H_pos = Variable(ctx.H_pos, requires_grad=False)
N_pos = Variable(ctx.N_pos, requires_grad=False)
L = Z.size()[0]
H2 = torch.pow(H, 2)
H_neg = H - H_pos
# 1. d(FastAP)/d(h+)
LTM1 = torch.tril(torch.ones(L, L), -1) # lower traingular matrix
tmp1 = h_pos * H_neg / H2
tmp1[torch.isnan(tmp1)] = 0
d_AP_h_pos = (H_pos * H + h_pos * H_neg) / H2
d_AP_h_pos = d_AP_h_pos + torch.mm(tmp1, LTM1.cuda())
d_AP_h_pos = d_AP_h_pos / N_pos.repeat(L, 1).t()
d_AP_h_pos[torch.isnan(d_AP_h_pos) | torch.isinf(d_AP_h_pos)] = 0
# 2. d(FastAP)/d(h-)
LTM0 = torch.tril(torch.ones(L, L), 0) # lower triangular matrix
tmp2 = -h_pos * H_pos / H2
tmp2[torch.isnan(tmp2)] = 0
d_AP_h_neg = torch.mm(tmp2, LTM0.cuda())
d_AP_h_neg = d_AP_h_neg / N_pos.repeat(L, 1).t()
d_AP_h_neg[torch.isnan(d_AP_h_neg) | torch.isinf(d_AP_h_neg)] = 0
# 3. d(FastAP)/d(embedding)
d_AP_x = 0
for l in range(L):
dpulse = dSoftBinning(dist2, Z[l], Delta)
dpulse[torch.isnan(dpulse) | torch.isinf(dpulse)] = 0
ddp = dpulse * I_pos
ddn = dpulse * I_neg
alpha_p = torch.diag(d_AP_h_pos[:, l]) # N*N
alpha_n = torch.diag(d_AP_h_neg[:, l])
Ap = torch.mm(ddp, alpha_p) + torch.mm(alpha_p, ddp)
An = torch.mm(ddn, alpha_n) + torch.mm(alpha_n, ddn)
# accumulate gradient
d_AP_x = d_AP_x - torch.mm(input.t(), (Ap + An))
grad_input = -d_AP_x
return grad_input.t(), None, None
######################################
#######ORIGINAL IMPLEMENTATION########
######################################
# FROM https://github.com/kunhe/FastAP-metric-learning/blob/master/pytorch/FastAP_loss.py
# This code is copied directly from the official implementation
# so that we can make sure our implementation returns the same result.
# It's copied under the MIT license.
class OriginalImplementationFastAPLoss(torch.nn.Module):
def __init__(self, num_bins=10):
super(OriginalImplementationFastAPLoss, self).__init__()
self.num_bins = num_bins
def forward(self, batch, labels):
return OriginalImplementationFastAP.apply(batch, labels, self.num_bins)
TEST_DEVICE, TEST_DTYPES
from ..zzz_testing_utils.testing_utils import angle_to_coord
class TestFastAPLoss(unittest.TestCase):
def test_fast_ap_loss(self):
num_bins = 5
loss_func = FastAPLoss(num_bins)
original_loss_func = OriginalImplementationFastAPLoss(num_bins)
ref_emb = torch.randn(32, 32)
ref_labels = torch.randint(0, 10, (32,))
for dtype in TEST_DTYPES:
embedding_angles = torch.arange(0, 180)
embeddings = torch.tensor(
[angle_to_coord(a) for a in embedding_angles],
requires_grad=True,
dtype=dtype,
).to(
TEST_DEVICE
) # 2D embeddings
labels = torch.randint(low=0, high=10, size=(180,)).to(TEST_DEVICE)
loss = loss_func(embeddings, labels)
loss.backward()
original_loss = original_loss_func(
torch.nn.functional.normalize(embeddings), labels
)
rtol = 1e-2 if dtype == torch.float16 else 1e-5
self.assertTrue(torch.isclose(loss, original_loss, rtol=rtol))
# fastap doesn't support ref_emb
self.assertRaises(
ValueError,
lambda: loss_func(
embeddings, labels, ref_emb=ref_emb, ref_labels=ref_labels
),
)
| true | true |
f72520d5e3e11a9dcb64026d7e62f1b47d0d54fa | 1,554 | py | Python | setup.py | pgjones/flake8-flask | e7099ed941086c92b6f752d4cf2e95c027f7605d | [
"MIT"
] | null | null | null | setup.py | pgjones/flake8-flask | e7099ed941086c92b6f752d4cf2e95c027f7605d | [
"MIT"
] | null | null | null | setup.py | pgjones/flake8-flask | e7099ed941086c92b6f752d4cf2e95c027f7605d | [
"MIT"
] | null | null | null | import os
from setuptools import setup
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(PROJECT_ROOT, 'flake8_flask.py')) as file_:
version_line = [line for line in file_ if line.startswith('__version__')][0]
__version__ = version_line.split('=')[1].strip().strip("'").strip('"')
with open(os.path.join(PROJECT_ROOT, 'README.md')) as file_:
long_description = file_.read()
setup(
name='flake8_flask',
version=__version__,
description='Flake8 plugin that checks Flask code against opinionated style rules',
long_description=long_description,
url='https://github.com/pgjones/flake8-flask',
author='P G Jones',
author_email='philip.graham.jones@googlemail.com',
keywords=[
'flake8',
'plugin',
'flask',
],
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Framework :: Flake8',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Quality Assurance',
],
py_modules=['flake8_flask'],
install_requires=[
'flake8',
'setuptools',
],
entry_points={
'flake8.extension': [
'F4 = flake8_flask:Linter',
],
},
zip_safe=False,
)
| 29.320755 | 87 | 0.622909 | import os
from setuptools import setup
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(PROJECT_ROOT, 'flake8_flask.py')) as file_:
version_line = [line for line in file_ if line.startswith('__version__')][0]
__version__ = version_line.split('=')[1].strip().strip("'").strip('"')
with open(os.path.join(PROJECT_ROOT, 'README.md')) as file_:
long_description = file_.read()
setup(
name='flake8_flask',
version=__version__,
description='Flake8 plugin that checks Flask code against opinionated style rules',
long_description=long_description,
url='https://github.com/pgjones/flake8-flask',
author='P G Jones',
author_email='philip.graham.jones@googlemail.com',
keywords=[
'flake8',
'plugin',
'flask',
],
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Framework :: Flake8',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Quality Assurance',
],
py_modules=['flake8_flask'],
install_requires=[
'flake8',
'setuptools',
],
entry_points={
'flake8.extension': [
'F4 = flake8_flask:Linter',
],
},
zip_safe=False,
)
| true | true |
f72520ddf2e5e6b82e03983937f6030c0042fce3 | 4,902 | py | Python | testplan/exporters/testing/pdf/renderers/entries/base.py | ymn1k/testplan | b1bde8495c449d75a74a7fe4e7c6501b0476f833 | [
"Apache-2.0"
] | null | null | null | testplan/exporters/testing/pdf/renderers/entries/base.py | ymn1k/testplan | b1bde8495c449d75a74a7fe4e7c6501b0476f833 | [
"Apache-2.0"
] | null | null | null | testplan/exporters/testing/pdf/renderers/entries/base.py | ymn1k/testplan | b1bde8495c449d75a74a7fe4e7c6501b0476f833 | [
"Apache-2.0"
] | 1 | 2019-09-11T09:13:18.000Z | 2019-09-11T09:13:18.000Z | from reportlab.lib import colors
from reportlab.lib.units import inch
from reportlab.platypus import Image
from testplan.common.exporters.pdf import RowStyle, create_table
from testplan.common.exporters.pdf import format_table_style
from testplan.common.utils.registry import Registry
from testplan.testing.multitest.entries import base
from .. import constants
from ..base import BaseRowRenderer, RowData
class SerializedEntryRegistry(Registry):
"""
Registry that is used for binding assertion classes to PDF renderers.
Keep in mind that we pass around serialized version of assertion objects
(generated via `multitest.entries.schemas`) meaning that lookup
arguments will be dictionary representation instead of assertion object
instances, hence the need to use class names instead of class objects
for `data` keys.
"""
def get_record_key(self, obj):
return obj.__name__
def get_lookup_key(self, obj):
return obj['type']
def get_category(self, obj):
return obj['meta_type']
registry = SerializedEntryRegistry()
@registry.bind_default()
class SerializedEntryRenderer(BaseRowRenderer):
"""Default fallback for all serialized entries."""
def get_header(self, source, depth, row_idx):
"""Display the description or type as the header."""
styles = [RowStyle(font=(constants.FONT, constants.FONT_SIZE_SMALL),
left_padding=constants.INDENT * depth)]
header = source['description'] or source['type']
return RowData(content=[header, '', '', '' ],
style=styles,
start=row_idx)
def get_row_content(self, source):
"""
All entries will either have a description or type,
we display whatever is available.
"""
return [source['description'] or source['type'], '', '', '']
def get_row_data(self, source, depth, row_idx):
"""
Most entries will be rendered as single rows, so we use
`get_row_content` and `get_row_style` for simplicity.
"""
result = RowData(
content=self.get_row_content(source),
style=self.get_row_style(source, depth),
start=row_idx
)
return result
def get_row_style(self, source, depth, **kwargs):
"""Default styling for all serialized entries, with small font size."""
return RowStyle(
font=(constants.FONT, constants.FONT_SIZE_SMALL),
left_padding=constants.INDENT * depth,
**kwargs
)
def get_style(self, source):
if 'passed' in source and source['passed'] is False:
return self.style.failing
return self.style.passing
def should_display(self, source):
return self.get_style(source).display_assertion
@registry.bind(base.MatPlot)
class MatPlotRenderer(SerializedEntryRenderer):
"""Render a Matplotlib assertion from a serialized entry."""
def get_row_data(self, source, depth, row_idx):
"""
Load the Matplotlib graph from the saved image, set its height and width
and add it to the row.
"""
header = self.get_header(source, depth, row_idx)
styles = [RowStyle(font=(constants.FONT, constants.FONT_SIZE_SMALL),
left_padding=constants.INDENT * (depth + 1),
text_color=colors.black)]
img = Image(source['image_file_path'])
img.drawWidth = source['width'] * inch
img.drawHeight = source['height'] * inch
return header + RowData(content=[img, '', '', ''],
start=header.end,
style=styles)
@registry.bind(base.TableLog)
class TableLogRenderer(SerializedEntryRenderer):
"""Render a Table from a serialized entry."""
def get_row_data(self, source, depth, row_idx):
"""
Reformat the rows from the serialized data into a format ReportLab
accepts. Create a header and a ReportLab table and add it to the row.
"""
header = self.get_header(source, depth, row_idx)
row_style = [RowStyle(left_padding=constants.INDENT * (depth + 1))]
table_style = format_table_style(constants.DISPLAYED_TABLE_STYLE)
max_width = constants.PAGE_WIDTH - (depth * constants.INDENT)
table = create_table(table=source['table'],
columns=source['columns'],
row_indices=source['indices'],
display_index=source['display_index'],
max_width=max_width,
style=table_style)
return header + RowData(content=table,
start=header.end,
style=row_style)
| 36.311111 | 80 | 0.621991 | from reportlab.lib import colors
from reportlab.lib.units import inch
from reportlab.platypus import Image
from testplan.common.exporters.pdf import RowStyle, create_table
from testplan.common.exporters.pdf import format_table_style
from testplan.common.utils.registry import Registry
from testplan.testing.multitest.entries import base
from .. import constants
from ..base import BaseRowRenderer, RowData
class SerializedEntryRegistry(Registry):
def get_record_key(self, obj):
return obj.__name__
def get_lookup_key(self, obj):
return obj['type']
def get_category(self, obj):
return obj['meta_type']
registry = SerializedEntryRegistry()
@registry.bind_default()
class SerializedEntryRenderer(BaseRowRenderer):
def get_header(self, source, depth, row_idx):
styles = [RowStyle(font=(constants.FONT, constants.FONT_SIZE_SMALL),
left_padding=constants.INDENT * depth)]
header = source['description'] or source['type']
return RowData(content=[header, '', '', '' ],
style=styles,
start=row_idx)
def get_row_content(self, source):
return [source['description'] or source['type'], '', '', '']
def get_row_data(self, source, depth, row_idx):
result = RowData(
content=self.get_row_content(source),
style=self.get_row_style(source, depth),
start=row_idx
)
return result
def get_row_style(self, source, depth, **kwargs):
return RowStyle(
font=(constants.FONT, constants.FONT_SIZE_SMALL),
left_padding=constants.INDENT * depth,
**kwargs
)
def get_style(self, source):
if 'passed' in source and source['passed'] is False:
return self.style.failing
return self.style.passing
def should_display(self, source):
return self.get_style(source).display_assertion
@registry.bind(base.MatPlot)
class MatPlotRenderer(SerializedEntryRenderer):
def get_row_data(self, source, depth, row_idx):
header = self.get_header(source, depth, row_idx)
styles = [RowStyle(font=(constants.FONT, constants.FONT_SIZE_SMALL),
left_padding=constants.INDENT * (depth + 1),
text_color=colors.black)]
img = Image(source['image_file_path'])
img.drawWidth = source['width'] * inch
img.drawHeight = source['height'] * inch
return header + RowData(content=[img, '', '', ''],
start=header.end,
style=styles)
@registry.bind(base.TableLog)
class TableLogRenderer(SerializedEntryRenderer):
def get_row_data(self, source, depth, row_idx):
header = self.get_header(source, depth, row_idx)
row_style = [RowStyle(left_padding=constants.INDENT * (depth + 1))]
table_style = format_table_style(constants.DISPLAYED_TABLE_STYLE)
max_width = constants.PAGE_WIDTH - (depth * constants.INDENT)
table = create_table(table=source['table'],
columns=source['columns'],
row_indices=source['indices'],
display_index=source['display_index'],
max_width=max_width,
style=table_style)
return header + RowData(content=table,
start=header.end,
style=row_style)
| true | true |
f72521063380e9fb0c546147201e0347312a3209 | 560 | py | Python | python/0011_finding_the_percentage.py | basoares/hackerrank | 3d294ba6da38efff0da496b16085ca557ce35985 | [
"MIT"
] | null | null | null | python/0011_finding_the_percentage.py | basoares/hackerrank | 3d294ba6da38efff0da496b16085ca557ce35985 | [
"MIT"
] | null | null | null | python/0011_finding_the_percentage.py | basoares/hackerrank | 3d294ba6da38efff0da496b16085ca557ce35985 | [
"MIT"
] | null | null | null | '''
The provided code stub will read in a dictionary containing key/value pairs of
name:[marks] for a list of students. Print the average of the marks array for
the student name provided, showing 2 places after the decimal.
'''
if __name__ == '__main__':
n = int(input())
student_marks = {}
for _ in range(n):
name, *line = input().split()
scores = list(map(float, line))
student_marks[name] = scores
query_name = input()
marks = student_marks[query_name]
print(f'{(sum(marks)/float(len(marks))):.2f}')
| 29.473684 | 78 | 0.648214 | if __name__ == '__main__':
n = int(input())
student_marks = {}
for _ in range(n):
name, *line = input().split()
scores = list(map(float, line))
student_marks[name] = scores
query_name = input()
marks = student_marks[query_name]
print(f'{(sum(marks)/float(len(marks))):.2f}')
| true | true |
f72522049a195e8802f79fdd1006b47629498830 | 287 | py | Python | readH5.py | ChutianShen/pointnet_kitti | 6ebd2c7c203c4fcc8172f306c85e55ea06429ba5 | [
"MIT"
] | null | null | null | readH5.py | ChutianShen/pointnet_kitti | 6ebd2c7c203c4fcc8172f306c85e55ea06429ba5 | [
"MIT"
] | null | null | null | readH5.py | ChutianShen/pointnet_kitti | 6ebd2c7c203c4fcc8172f306c85e55ea06429ba5 | [
"MIT"
] | null | null | null | import h5py
filename = './sem_seg/indoor3d_sem_seg_hdf5_data/ply_data_all_0.h5'
#filename = './sem_seg/converted_KITTI/frame_10.h5'
f = h5py.File(filename, 'r')
data_file = f['data'][:]
label_file = f['label'][:]
print (data_file.shape, label_file.shape)
print (type(label_file[0])) | 22.076923 | 67 | 0.728223 | import h5py
filename = './sem_seg/indoor3d_sem_seg_hdf5_data/ply_data_all_0.h5'
f = h5py.File(filename, 'r')
data_file = f['data'][:]
label_file = f['label'][:]
print (data_file.shape, label_file.shape)
print (type(label_file[0])) | true | true |
f725220f95e7ed6a18489ee1563dd48ce5f224d6 | 2,985 | py | Python | solutions/day18.py | nitekat1124/advent-of-code-2021 | 74501b84f0a08b33f48b4e5a2d66b8293c854150 | [
"WTFPL"
] | 3 | 2021-12-22T17:44:39.000Z | 2022-01-14T17:18:15.000Z | solutions/day18.py | nitekat1124/advent-of-code-2021 | 74501b84f0a08b33f48b4e5a2d66b8293c854150 | [
"WTFPL"
] | null | null | null | solutions/day18.py | nitekat1124/advent-of-code-2021 | 74501b84f0a08b33f48b4e5a2d66b8293c854150 | [
"WTFPL"
] | null | null | null | import re
from itertools import combinations
from utils.solution_base import SolutionBase
class Solution(SolutionBase):
def solve(self, part_num: int):
self.test_runner(part_num)
func = getattr(self, f"part{part_num}")
result = func(self.data)
return result
def test_runner(self, part_num):
test_inputs = self.get_test_input()
test_results = self.get_test_result(part_num)
test_counter = 1
func = getattr(self, f"part{part_num}")
for i, r in zip(test_inputs, test_results):
if len(r):
if func(i) == int(r[0]):
print(f"test {test_counter} passed")
else:
print(func(i))
print(r[0])
print(f"test {test_counter} NOT passed")
test_counter += 1
print()
def part1(self, data):
addition = data[0]
for i in data[1:]:
addition = f"[{addition},{i}]"
while (t := self.reduction(addition)) != addition:
addition = t
return self.calc_magnitude(addition)
def reduction(self, s: str):
# explode
depth = 0
for i, v in enumerate(s):
if v.isnumeric() and depth > 4:
pair_close_pos = s[i:].index("]")
before_pair, pair, after_pair = s[: i - 1], s[i : i + pair_close_pos], s[i + pair_close_pos + 1 :]
pair = [*map(int, pair.split(","))]
before_pair = self.add_exploded_pair(before_pair, pair, 0)
after_pair = self.add_exploded_pair(after_pair, pair, 1)
return before_pair + "0" + after_pair
else:
depth += [1, -1]["[]".index(v)] if v in "[]" else 0
# split
large_regulars = [i for i in re.findall(r"\d+", s) if int(i) > 9]
if len(large_regulars):
reg = large_regulars[0]
reg_pos = s.index(reg)
before_reg, after_reg = s[:reg_pos], s[reg_pos + len(reg) :]
reg = int(reg)
elem_left = reg // 2
elem_right = reg - elem_left
s = before_reg + f"[{elem_left},{elem_right}]" + after_reg
return s
def add_exploded_pair(self, line, pair, pair_index):
all_regulars = re.findall(r"\d+", line)
if len(all_regulars):
reg = all_regulars[pair_index - 1]
reg_pos = [line.rindex, line.index][pair_index](reg)
line = line[:reg_pos] + str(int(reg) + pair[pair_index]) + line[reg_pos + len(reg) :]
return line
def calc_magnitude(self, s: str):
while s.count("["):
pairs = re.findall(r"\[(\d+),(\d+)\]", s)
for a, b in pairs:
s = s.replace(f"[{a},{b}]", str(int(a) * 3 + int(b) * 2))
return int(s)
def part2(self, data):
return max(max(self.part1(i), self.part1(i[::-1])) for i in combinations(data, 2))
| 34.310345 | 114 | 0.524958 | import re
from itertools import combinations
from utils.solution_base import SolutionBase
class Solution(SolutionBase):
def solve(self, part_num: int):
self.test_runner(part_num)
func = getattr(self, f"part{part_num}")
result = func(self.data)
return result
def test_runner(self, part_num):
test_inputs = self.get_test_input()
test_results = self.get_test_result(part_num)
test_counter = 1
func = getattr(self, f"part{part_num}")
for i, r in zip(test_inputs, test_results):
if len(r):
if func(i) == int(r[0]):
print(f"test {test_counter} passed")
else:
print(func(i))
print(r[0])
print(f"test {test_counter} NOT passed")
test_counter += 1
print()
def part1(self, data):
addition = data[0]
for i in data[1:]:
addition = f"[{addition},{i}]"
while (t := self.reduction(addition)) != addition:
addition = t
return self.calc_magnitude(addition)
def reduction(self, s: str):
depth = 0
for i, v in enumerate(s):
if v.isnumeric() and depth > 4:
pair_close_pos = s[i:].index("]")
before_pair, pair, after_pair = s[: i - 1], s[i : i + pair_close_pos], s[i + pair_close_pos + 1 :]
pair = [*map(int, pair.split(","))]
before_pair = self.add_exploded_pair(before_pair, pair, 0)
after_pair = self.add_exploded_pair(after_pair, pair, 1)
return before_pair + "0" + after_pair
else:
depth += [1, -1]["[]".index(v)] if v in "[]" else 0
large_regulars = [i for i in re.findall(r"\d+", s) if int(i) > 9]
if len(large_regulars):
reg = large_regulars[0]
reg_pos = s.index(reg)
before_reg, after_reg = s[:reg_pos], s[reg_pos + len(reg) :]
reg = int(reg)
elem_left = reg // 2
elem_right = reg - elem_left
s = before_reg + f"[{elem_left},{elem_right}]" + after_reg
return s
def add_exploded_pair(self, line, pair, pair_index):
all_regulars = re.findall(r"\d+", line)
if len(all_regulars):
reg = all_regulars[pair_index - 1]
reg_pos = [line.rindex, line.index][pair_index](reg)
line = line[:reg_pos] + str(int(reg) + pair[pair_index]) + line[reg_pos + len(reg) :]
return line
def calc_magnitude(self, s: str):
while s.count("["):
pairs = re.findall(r"\[(\d+),(\d+)\]", s)
for a, b in pairs:
s = s.replace(f"[{a},{b}]", str(int(a) * 3 + int(b) * 2))
return int(s)
def part2(self, data):
return max(max(self.part1(i), self.part1(i[::-1])) for i in combinations(data, 2))
| true | true |
f725222b5c43f17680f783bce12dae0b4c033b90 | 1,813 | py | Python | sdk/purview/azure-purview-administration/tests/testcase.py | praveenkuttappan/azure-sdk-for-python | 4b79413667b7539750a6c7dde15737013a3d4bd5 | [
"MIT"
] | 2,728 | 2015-01-09T10:19:32.000Z | 2022-03-31T14:50:33.000Z | sdk/purview/azure-purview-administration/tests/testcase.py | v-xuto/azure-sdk-for-python | 9c6296d22094c5ede410bc83749e8df8694ccacc | [
"MIT"
] | 17,773 | 2015-01-05T15:57:17.000Z | 2022-03-31T23:50:25.000Z | sdk/purview/azure-purview-administration/tests/testcase.py | v-xuto/azure-sdk-for-python | 9c6296d22094c5ede410bc83749e8df8694ccacc | [
"MIT"
] | 1,916 | 2015-01-19T05:05:41.000Z | 2022-03-31T19:36:44.000Z | # coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import functools
from devtools_testutils import AzureTestCase, PowerShellPreparer
from azure.purview.administration.account import PurviewAccountClient
from azure.purview.administration.metadatapolicies import PurviewMetadataPoliciesClient
class PurviewAccountTest(AzureTestCase):
def __init__(self, method_name, **kwargs):
super(PurviewAccountTest, self).__init__(method_name, **kwargs)
def create_client(self, endpoint):
credential = self.get_credential(PurviewAccountClient)
return self.create_client_from_credential(
PurviewAccountClient,
credential=credential,
endpoint=endpoint,
)
PurviewAccountPowerShellPreparer = functools.partial(
PowerShellPreparer,
"purviewaccount",
purviewaccount_endpoint="https://fake_account.account.purview.azure.com"
)
class PurviewMetaPolicyTest(AzureTestCase):
def __init__(self, method_name, **kwargs):
super(PurviewMetaPolicyTest, self).__init__(method_name, **kwargs)
def create_client(self, endpoint):
credential = self.get_credential(PurviewMetadataPoliciesClient)
return self.create_client_from_credential(
PurviewMetadataPoliciesClient,
credential=credential,
endpoint=endpoint,
)
PurviewMetaPolicyPowerShellPreparer = functools.partial(
PowerShellPreparer,
"purviewmetapolicy",
purviewmetapolicy_endpoint="https://fake_account.account.purview.azure.com"
)
| 35.54902 | 87 | 0.689465 |
import functools
from devtools_testutils import AzureTestCase, PowerShellPreparer
from azure.purview.administration.account import PurviewAccountClient
from azure.purview.administration.metadatapolicies import PurviewMetadataPoliciesClient
class PurviewAccountTest(AzureTestCase):
def __init__(self, method_name, **kwargs):
super(PurviewAccountTest, self).__init__(method_name, **kwargs)
def create_client(self, endpoint):
credential = self.get_credential(PurviewAccountClient)
return self.create_client_from_credential(
PurviewAccountClient,
credential=credential,
endpoint=endpoint,
)
PurviewAccountPowerShellPreparer = functools.partial(
PowerShellPreparer,
"purviewaccount",
purviewaccount_endpoint="https://fake_account.account.purview.azure.com"
)
class PurviewMetaPolicyTest(AzureTestCase):
def __init__(self, method_name, **kwargs):
super(PurviewMetaPolicyTest, self).__init__(method_name, **kwargs)
def create_client(self, endpoint):
credential = self.get_credential(PurviewMetadataPoliciesClient)
return self.create_client_from_credential(
PurviewMetadataPoliciesClient,
credential=credential,
endpoint=endpoint,
)
PurviewMetaPolicyPowerShellPreparer = functools.partial(
PowerShellPreparer,
"purviewmetapolicy",
purviewmetapolicy_endpoint="https://fake_account.account.purview.azure.com"
)
| true | true |
f72523871d8c380b139a74c4120ab78813f97178 | 206 | py | Python | scripts/portal/NextStg.py | Snewmy/swordie | ae01ed4ec0eb20a18730e8cd209eea0b84a8dd17 | [
"MIT"
] | 9 | 2021-04-26T11:59:29.000Z | 2021-12-20T13:15:27.000Z | scripts/portal/NextStg.py | Snewmy/swordie | ae01ed4ec0eb20a18730e8cd209eea0b84a8dd17 | [
"MIT"
] | null | null | null | scripts/portal/NextStg.py | Snewmy/swordie | ae01ed4ec0eb20a18730e8cd209eea0b84a8dd17 | [
"MIT"
] | 6 | 2021-07-14T06:32:05.000Z | 2022-02-06T02:32:56.000Z | fieldID = sm.getFieldID()
if fieldID == 811000500:
sm.warpInstanceOut(811000008)
elif not sm.hasMobsInField():
sm.warp(fieldID + 100)
else:
sm.chat("The portal is not opened yet.")
sm.dispose()
| 22.888889 | 44 | 0.699029 | fieldID = sm.getFieldID()
if fieldID == 811000500:
sm.warpInstanceOut(811000008)
elif not sm.hasMobsInField():
sm.warp(fieldID + 100)
else:
sm.chat("The portal is not opened yet.")
sm.dispose()
| true | true |
f725258ae5d9a6973d2e1d634a663f91c4a30abf | 2,281 | py | Python | userbot/plugins/antiflood.py | Solivagantt/userbot | 9c2bb02ad24998e2739209381fcb66dc27daff32 | [
"MIT"
] | null | null | null | userbot/plugins/antiflood.py | Solivagantt/userbot | 9c2bb02ad24998e2739209381fcb66dc27daff32 | [
"MIT"
] | null | null | null | userbot/plugins/antiflood.py | Solivagantt/userbot | 9c2bb02ad24998e2739209381fcb66dc27daff32 | [
"MIT"
] | 2 | 2020-04-19T13:24:12.000Z | 2021-02-14T14:01:31.000Z | import asyncio
from telethon import events
from telethon.tl.functions.channels import EditBannedRequest
from telethon.tl.types import ChatBannedRights
from userbot.utils import admin_cmd
import userbot.plugins.sql_helper.antiflood_sql as sql
import userbot.utils
from userbot.utils import humanbytes, progress, time_formatter
CHAT_FLOOD = sql.__load_flood_settings()
# warn mode for anti flood
ANTI_FLOOD_WARN_MODE = ChatBannedRights(
until_date=None,
view_messages=None,
send_messages=True
)
@borg.on(admin_cmd(incoming=True))
async def _(event):
# logger.info(CHAT_FLOOD)
if not CHAT_FLOOD:
return
if not (str(event.chat_id) in CHAT_FLOOD):
return
# TODO: exempt admins from this
should_ban = sql.update_flood(event.chat_id, event.message.from_id)
if not should_ban:
return
try:
await event.client(EditBannedRequest(
event.chat_id,
event.message.from_id,
ANTI_FLOOD_WARN_MODE
))
except Exception as e: # pylint:disable=C0103,W0703
no_admin_privilege_message = await event.client.send_message(
entity=event.chat_id,
message="""**Automatic AntiFlooder**
@admin [User](tg://user?id={}) is flooding this chat.
`{}`""".format(event.message.from_id, str(e)),
reply_to=event.message.id
)
await asyncio.sleep(10)
await no_admin_privilege_message.edit(
"https://t.me/keralagram/724970",
link_preview=False
)
else:
await event.client.send_message(
entity=event.chat_id,
message="""**Automatic AntiFlooder**
[User](tg://user?id={}) has been automatically restricted
because he reached the defined flood limit.""".format(event.message.from_id),
reply_to=event.message.id
)
@borg.on(admin_cmd(pattern="setflood (.*)"))
async def _(event):
if event.fwd_from:
return
input_str = event.pattern_match.group(1)
try:
sql.set_flood(event.chat_id, input_str)
CHAT_FLOOD = sql.__load_flood_settings()
await event.edit("Antiflood updated to {} in the current chat".format(input_str))
except Exception as e: # pylint:disable=C0103,W0703
await event.edit(str(e))
| 31.680556 | 89 | 0.676019 | import asyncio
from telethon import events
from telethon.tl.functions.channels import EditBannedRequest
from telethon.tl.types import ChatBannedRights
from userbot.utils import admin_cmd
import userbot.plugins.sql_helper.antiflood_sql as sql
import userbot.utils
from userbot.utils import humanbytes, progress, time_formatter
CHAT_FLOOD = sql.__load_flood_settings()
ANTI_FLOOD_WARN_MODE = ChatBannedRights(
until_date=None,
view_messages=None,
send_messages=True
)
@borg.on(admin_cmd(incoming=True))
async def _(event):
if not CHAT_FLOOD:
return
if not (str(event.chat_id) in CHAT_FLOOD):
return
should_ban = sql.update_flood(event.chat_id, event.message.from_id)
if not should_ban:
return
try:
await event.client(EditBannedRequest(
event.chat_id,
event.message.from_id,
ANTI_FLOOD_WARN_MODE
))
except Exception as e:
no_admin_privilege_message = await event.client.send_message(
entity=event.chat_id,
message="""**Automatic AntiFlooder**
@admin [User](tg://user?id={}) is flooding this chat.
`{}`""".format(event.message.from_id, str(e)),
reply_to=event.message.id
)
await asyncio.sleep(10)
await no_admin_privilege_message.edit(
"https://t.me/keralagram/724970",
link_preview=False
)
else:
await event.client.send_message(
entity=event.chat_id,
message="""**Automatic AntiFlooder**
[User](tg://user?id={}) has been automatically restricted
because he reached the defined flood limit.""".format(event.message.from_id),
reply_to=event.message.id
)
@borg.on(admin_cmd(pattern="setflood (.*)"))
async def _(event):
if event.fwd_from:
return
input_str = event.pattern_match.group(1)
try:
sql.set_flood(event.chat_id, input_str)
CHAT_FLOOD = sql.__load_flood_settings()
await event.edit("Antiflood updated to {} in the current chat".format(input_str))
except Exception as e:
await event.edit(str(e))
| true | true |
f725258bf01873662ce2f235301796c73aa1e811 | 72,727 | py | Python | sql/engines/tests.py | bosen365/Archery | 424479fcb77646de0c5ef2d68a6fae9d3a685f00 | [
"Apache-2.0"
] | 1 | 2019-06-03T09:39:40.000Z | 2019-06-03T09:39:40.000Z | sql/engines/tests.py | bosen365/Archery | 424479fcb77646de0c5ef2d68a6fae9d3a685f00 | [
"Apache-2.0"
] | 4 | 2021-06-05T00:00:45.000Z | 2022-02-10T12:17:14.000Z | sql/engines/tests.py | bosen365/Archery | 424479fcb77646de0c5ef2d68a6fae9d3a685f00 | [
"Apache-2.0"
] | 1 | 2021-12-01T05:36:59.000Z | 2021-12-01T05:36:59.000Z | import MySQLdb
import json
from datetime import timedelta, datetime
from unittest.mock import patch, Mock, ANY
import sqlparse
from django.contrib.auth import get_user_model
from django.test import TestCase
from common.config import SysConfig
from sql.engines import EngineBase
from sql.engines.goinception import GoInceptionEngine
from sql.engines.models import ResultSet, ReviewSet, ReviewResult
from sql.engines.mssql import MssqlEngine
from sql.engines.mysql import MysqlEngine
from sql.engines.redis import RedisEngine
from sql.engines.pgsql import PgSQLEngine
from sql.engines.oracle import OracleEngine
from sql.engines.mongo import MongoEngine
from sql.engines.inception import InceptionEngine, _repair_json_str
from sql.models import Instance, SqlWorkflow, SqlWorkflowContent
User = get_user_model()
class TestReviewSet(TestCase):
def test_review_set(self):
new_review_set = ReviewSet()
new_review_set.rows = [{'id': '1679123'}]
self.assertIn('1679123', new_review_set.json())
class TestEngineBase(TestCase):
@classmethod
def setUpClass(cls):
cls.u1 = User(username='some_user', display='用户1')
cls.u1.save()
cls.ins1 = Instance(instance_name='some_ins', type='master', db_type='mssql', host='some_host',
port=1366, user='ins_user', password='some_str')
cls.ins1.save()
cls.wf1 = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer=cls.u1.username,
engineer_display=cls.u1.display,
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=cls.ins1,
db_name='some_db',
syntax_type=1
)
cls.wfc1 = SqlWorkflowContent.objects.create(
workflow=cls.wf1,
sql_content='some_sql',
execute_result=json.dumps([{
'id': 1,
'sql': 'some_content'
}]))
@classmethod
def tearDownClass(cls):
cls.wfc1.delete()
cls.wf1.delete()
cls.ins1.delete()
cls.u1.delete()
def test_init_with_ins(self):
engine = EngineBase(instance=self.ins1)
self.assertEqual(self.ins1.instance_name, engine.instance_name)
self.assertEqual(self.ins1.user, engine.user)
class TestMssql(TestCase):
@classmethod
def setUpClass(cls):
cls.ins1 = Instance(instance_name='some_ins', type='slave', db_type='mssql', host='some_host',
port=1366, user='ins_user', password='some_str')
cls.ins1.save()
cls.engine = MssqlEngine(instance=cls.ins1)
cls.wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=cls.ins1,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=cls.wf, sql_content='insert into some_tb values (1)')
@classmethod
def tearDownClass(cls):
cls.ins1.delete()
cls.wf.delete()
SqlWorkflowContent.objects.all().delete()
@patch('sql.engines.mssql.pyodbc.connect')
def testGetConnection(self, connect):
new_engine = MssqlEngine(instance=self.ins1)
new_engine.get_connection()
connect.assert_called_once()
@patch('sql.engines.mssql.pyodbc.connect')
def testQuery(self, connect):
cur = Mock()
connect.return_value.cursor = cur
cur.return_value.execute = Mock()
cur.return_value.fetchmany.return_value = (('v1', 'v2'),)
cur.return_value.description = (('k1', 'some_other_des'), ('k2', 'some_other_des'))
new_engine = MssqlEngine(instance=self.ins1)
query_result = new_engine.query(sql='some_str', limit_num=100)
cur.return_value.execute.assert_called()
cur.return_value.fetchmany.assert_called_once_with(100)
connect.return_value.close.assert_called_once()
self.assertIsInstance(query_result, ResultSet)
@patch.object(MssqlEngine, 'query')
def testAllDb(self, mock_query):
db_result = ResultSet()
db_result.rows = [('db_1',), ('db_2',)]
mock_query.return_value = db_result
new_engine = MssqlEngine(instance=self.ins1)
dbs = new_engine.get_all_databases()
self.assertEqual(dbs.rows, ['db_1', 'db_2'])
@patch.object(MssqlEngine, 'query')
def testAllTables(self, mock_query):
table_result = ResultSet()
table_result.rows = [('tb_1', 'some_des'), ('tb_2', 'some_des')]
mock_query.return_value = table_result
new_engine = MssqlEngine(instance=self.ins1)
tables = new_engine.get_all_tables('some_db')
mock_query.assert_called_once_with(db_name='some_db', sql=ANY)
self.assertEqual(tables.rows, ['tb_1', 'tb_2'])
@patch.object(MssqlEngine, 'query')
def testAllColumns(self, mock_query):
db_result = ResultSet()
db_result.rows = [('col_1', 'type'), ('col_2', 'type2')]
mock_query.return_value = db_result
new_engine = MssqlEngine(instance=self.ins1)
dbs = new_engine.get_all_columns_by_tb('some_db', 'some_tb')
self.assertEqual(dbs.rows, ['col_1', 'col_2'])
@patch.object(MssqlEngine, 'query')
def testDescribe(self, mock_query):
new_engine = MssqlEngine(instance=self.ins1)
new_engine.describe_table('some_db', 'some_db')
mock_query.assert_called_once()
def testQueryCheck(self):
new_engine = MssqlEngine(instance=self.ins1)
# 只抽查一个函数
banned_sql = 'select concat(phone,1) from user_table'
check_result = new_engine.query_check(db_name='some_db', sql=banned_sql)
self.assertTrue(check_result.get('bad_query'))
banned_sql = 'select phone from user_table where phone=concat(phone,1)'
check_result = new_engine.query_check(db_name='some_db', sql=banned_sql)
self.assertTrue(check_result.get('bad_query'))
sp_sql = "sp_helptext '[SomeName].[SomeAction]'"
check_result = new_engine.query_check(db_name='some_db', sql=sp_sql)
self.assertFalse(check_result.get('bad_query'))
self.assertEqual(check_result.get('filtered_sql'), sp_sql)
def test_filter_sql(self):
new_engine = MssqlEngine(instance=self.ins1)
# 只抽查一个函数
banned_sql = 'select user from user_table'
check_result = new_engine.filter_sql(sql=banned_sql, limit_num=10)
self.assertEqual(check_result, "select top 10 user from user_table")
def test_execute_check(self):
new_engine = MssqlEngine(instance=self.ins1)
test_sql = 'use database\ngo\nsome sql1\nGO\nsome sql2\n\r\nGo\nsome sql3\n\r\ngO\n'
check_result = new_engine.execute_check(db_name=None, sql=test_sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[1].__dict__['sql'], "use database\n")
self.assertEqual(check_result.rows[2].__dict__['sql'], "\nsome sql1\n")
self.assertEqual(check_result.rows[4].__dict__['sql'], "\nsome sql3\n\r\n")
@patch('sql.engines.mssql.MssqlEngine.execute')
def test_execute_workflow(self, mock_execute):
mock_execute.return_value.error = None
new_engine = MssqlEngine(instance=self.ins1)
new_engine.execute_workflow(self.wf)
# 有多少个备份表, 就需要execute多少次, 另外加上一条实际执行的次数
mock_execute.assert_called()
self.assertEqual(1, mock_execute.call_count)
@patch('sql.engines.mssql.MssqlEngine.get_connection')
def test_execute(self, mock_connect):
mock_cursor = Mock()
mock_connect.return_value.cursor = mock_cursor
new_engine = MssqlEngine(instance=self.ins1)
execute_result = new_engine.execute('some_db', 'some_sql')
# 验证结果, 无异常
self.assertIsNone(execute_result.error)
self.assertEqual('some_sql', execute_result.full_sql)
self.assertEqual(2, len(execute_result.rows))
mock_cursor.return_value.execute.assert_called()
mock_cursor.return_value.commit.assert_called()
mock_cursor.reset_mock()
# 验证异常
mock_cursor.return_value.execute.side_effect = Exception('Boom! some exception!')
execute_result = new_engine.execute('some_db', 'some_sql')
self.assertIn('Boom! some exception!', execute_result.error)
self.assertEqual('some_sql', execute_result.full_sql)
self.assertEqual(2, len(execute_result.rows))
mock_cursor.return_value.commit.assert_not_called()
mock_cursor.return_value.rollback.assert_called()
class TestMysql(TestCase):
def setUp(self):
self.ins1 = Instance(instance_name='some_ins', type='slave', db_type='mysql', host='some_host',
port=1366, user='ins_user', password='some_str')
self.ins1.save()
self.sys_config = SysConfig()
self.wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins1,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=self.wf)
def tearDown(self):
self.ins1.delete()
self.sys_config.purge()
SqlWorkflow.objects.all().delete()
SqlWorkflowContent.objects.all().delete()
@patch('MySQLdb.connect')
def test_engine_base_info(self, _conn):
new_engine = MysqlEngine(instance=self.ins1)
self.assertEqual(new_engine.name, 'MySQL')
self.assertEqual(new_engine.info, 'MySQL engine')
@patch('MySQLdb.connect')
def testGetConnection(self, connect):
new_engine = MysqlEngine(instance=self.ins1)
new_engine.get_connection()
connect.assert_called_once()
@patch('MySQLdb.connect')
def testQuery(self, connect):
cur = Mock()
connect.return_value.cursor = cur
cur.return_value.execute = Mock()
cur.return_value.fetchmany.return_value = (('v1', 'v2'),)
cur.return_value.description = (('k1', 'some_other_des'), ('k2', 'some_other_des'))
new_engine = MysqlEngine(instance=self.ins1)
query_result = new_engine.query(sql='some_str', limit_num=100)
cur.return_value.execute.assert_called()
cur.return_value.fetchmany.assert_called_once_with(size=100)
connect.return_value.close.assert_called_once()
self.assertIsInstance(query_result, ResultSet)
@patch.object(MysqlEngine, 'query')
def testAllDb(self, mock_query):
db_result = ResultSet()
db_result.rows = [('db_1',), ('db_2',)]
mock_query.return_value = db_result
new_engine = MysqlEngine(instance=self.ins1)
dbs = new_engine.get_all_databases()
self.assertEqual(dbs.rows, ['db_1', 'db_2'])
@patch.object(MysqlEngine, 'query')
def testAllTables(self, mock_query):
table_result = ResultSet()
table_result.rows = [('tb_1', 'some_des'), ('tb_2', 'some_des')]
mock_query.return_value = table_result
new_engine = MysqlEngine(instance=self.ins1)
tables = new_engine.get_all_tables('some_db')
mock_query.assert_called_once_with(db_name='some_db', sql=ANY)
self.assertEqual(tables.rows, ['tb_1', 'tb_2'])
@patch.object(MysqlEngine, 'query')
def testAllColumns(self, mock_query):
db_result = ResultSet()
db_result.rows = [('col_1', 'type'), ('col_2', 'type2')]
mock_query.return_value = db_result
new_engine = MysqlEngine(instance=self.ins1)
dbs = new_engine.get_all_columns_by_tb('some_db', 'some_tb')
self.assertEqual(dbs.rows, ['col_1', 'col_2'])
@patch.object(MysqlEngine, 'query')
def testDescribe(self, mock_query):
new_engine = MysqlEngine(instance=self.ins1)
new_engine.describe_table('some_db', 'some_db')
mock_query.assert_called_once()
def testQueryCheck(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = '-- 测试\n select user from usertable'
check_result = new_engine.query_check(db_name='some_db', sql=sql_without_limit)
self.assertEqual(check_result['filtered_sql'], 'select user from usertable')
def test_query_check_wrong_sql(self):
new_engine = MysqlEngine(instance=self.ins1)
wrong_sql = '-- 测试'
check_result = new_engine.query_check(db_name='some_db', sql=wrong_sql)
self.assertDictEqual(check_result,
{'msg': '不支持的查询语法类型!', 'bad_query': True, 'filtered_sql': '-- 测试', 'has_star': False})
def test_query_check_update_sql(self):
new_engine = MysqlEngine(instance=self.ins1)
update_sql = 'update user set id=0'
check_result = new_engine.query_check(db_name='some_db', sql=update_sql)
self.assertDictEqual(check_result,
{'msg': '不支持的查询语法类型!', 'bad_query': True, 'filtered_sql': 'update user set id=0',
'has_star': False})
def test_filter_sql_with_delimiter(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'select user from usertable;'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=100)
self.assertEqual(check_result, 'select user from usertable limit 100;')
def test_filter_sql_without_delimiter(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'select user from usertable'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=100)
self.assertEqual(check_result, 'select user from usertable limit 100;')
def test_filter_sql_with_limit(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'select user from usertable limit 10'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=1)
self.assertEqual(check_result, 'select user from usertable limit 1;')
def test_filter_sql_with_limit_min(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'select user from usertable limit 10'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=100)
self.assertEqual(check_result, 'select user from usertable limit 10;')
def test_filter_sql_with_limit_offset(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'select user from usertable limit 10 offset 100'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=1)
self.assertEqual(check_result, 'select user from usertable limit 1;')
def test_filter_sql_with_limit_nn(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'select user from usertable limit 10, 100'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=1)
self.assertEqual(check_result, 'select user from usertable limit 1;')
def test_filter_sql_upper(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'SELECT USER FROM usertable LIMIT 10, 100'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=1)
self.assertEqual(check_result, 'SELECT USER FROM usertable limit 1;')
def test_filter_sql_not_select(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'show create table usertable;'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=1)
self.assertEqual(check_result, 'show create table usertable;')
@patch('sql.engines.mysql.data_masking', return_value=ResultSet())
def test_query_masking(self, _data_masking):
query_result = ResultSet()
new_engine = MysqlEngine(instance=self.ins1)
masking_result = new_engine.query_masking(db_name='archery', sql='select 1', resultset=query_result)
self.assertIsInstance(masking_result, ResultSet)
@patch('sql.engines.mysql.data_masking', return_value=ResultSet())
def test_query_masking_not_select(self, _data_masking):
query_result = ResultSet()
new_engine = MysqlEngine(instance=self.ins1)
masking_result = new_engine.query_masking(db_name='archery', sql='explain select 1', resultset=query_result)
self.assertEqual(masking_result, query_result)
@patch('sql.engines.mysql.InceptionEngine')
def test_execute_check_select_sql(self, _inception_engine):
self.sys_config.set('inception', 'true')
sql = 'select * from user'
inc_row = ReviewResult(id=1,
errlevel=0,
stagestatus='Audit completed',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0, )
row = ReviewResult(id=1, errlevel=2,
stagestatus='驳回不支持语句',
errormessage='仅支持DML和DDL语句,查询语句请使用SQL查询功能!',
sql=sql)
_inception_engine.return_value.execute_check.return_value = ReviewSet(full_sql=sql, rows=[inc_row])
new_engine = MysqlEngine(instance=self.ins1)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
@patch('sql.engines.mysql.InceptionEngine')
def test_execute_check_critical_sql(self, _inception_engine):
self.sys_config.set('inception', 'true')
self.sys_config.set('critical_ddl_regex', '^|update')
self.sys_config.get_all_config()
sql = 'update user set id=1'
inc_row = ReviewResult(id=1,
errlevel=0,
stagestatus='Audit completed',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0, )
row = ReviewResult(id=1, errlevel=2,
stagestatus='驳回高危SQL',
errormessage='禁止提交匹配' + '^|update' + '条件的语句!',
sql=sql)
_inception_engine.return_value.execute_check.return_value = ReviewSet(full_sql=sql, rows=[inc_row])
new_engine = MysqlEngine(instance=self.ins1)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
@patch('sql.engines.mysql.InceptionEngine')
def test_execute_check_normal_sql(self, _inception_engine):
self.sys_config.set('inception', 'true')
sql = 'update user set id=1'
row = ReviewResult(id=1,
errlevel=0,
stagestatus='Audit completed',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0, )
_inception_engine.return_value.execute_check.return_value = ReviewSet(full_sql=sql, rows=[row])
new_engine = MysqlEngine(instance=self.ins1)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
@patch('sql.engines.mysql.InceptionEngine')
def test_execute_check_normal_sql_with_Exception(self, _inception_engine):
sql = 'update user set id=1'
_inception_engine.return_value.execute_check.side_effect = RuntimeError()
new_engine = MysqlEngine(instance=self.ins1)
with self.assertRaises(RuntimeError):
new_engine.execute_check(db_name=0, sql=sql)
@patch.object(MysqlEngine, 'query')
@patch('sql.engines.mysql.InceptionEngine')
def test_execute_workflow(self, _inception_engine, _query):
self.sys_config.set('inception', 'true')
sql = 'update user set id=1'
_inception_engine.return_value.execute.return_value = ReviewSet(full_sql=sql)
_query.return_value.rows = (('0',),)
new_engine = MysqlEngine(instance=self.ins1)
execute_result = new_engine.execute_workflow(self.wf)
self.assertIsInstance(execute_result, ReviewSet)
@patch('MySQLdb.connect.cursor.execute')
@patch('MySQLdb.connect.cursor')
@patch('MySQLdb.connect')
def test_execute(self, _connect, _cursor, _execute):
new_engine = MysqlEngine(instance=self.ins1)
execute_result = new_engine.execute(self.wf)
self.assertIsInstance(execute_result, ResultSet)
@patch('MySQLdb.connect')
def test_server_version(self, _connect):
_connect.return_value.get_server_info.return_value = '5.7.20-16log'
new_engine = MysqlEngine(instance=self.ins1)
server_version = new_engine.server_version
self.assertTupleEqual(server_version, (5, 7, 20))
@patch.object(MysqlEngine, 'query')
def test_get_variables_not_filter(self, _query):
new_engine = MysqlEngine(instance=self.ins1)
new_engine.get_variables()
_query.assert_called_once()
@patch('MySQLdb.connect')
@patch.object(MysqlEngine, 'query')
def test_get_variables_filter(self, _query, _connect):
_connect.return_value.get_server_info.return_value = '5.7.20-16log'
new_engine = MysqlEngine(instance=self.ins1)
new_engine.get_variables(variables=['binlog_format'])
_query.assert_called()
@patch.object(MysqlEngine, 'query')
def test_set_variable(self, _query):
new_engine = MysqlEngine(instance=self.ins1)
new_engine.set_variable('binlog_format', 'ROW')
_query.assert_called_once_with(sql="set global binlog_format=ROW;")
@patch('sql.engines.mysql.GoInceptionEngine')
def test_osc_go_inception(self, _inception_engine):
self.sys_config.set('inception', 'false')
_inception_engine.return_value.osc_control.return_value = ReviewSet()
command = 'get'
sqlsha1 = 'xxxxx'
new_engine = MysqlEngine(instance=self.ins1)
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
@patch('sql.engines.mysql.InceptionEngine')
def test_osc_inception(self, _inception_engine):
self.sys_config.set('inception', 'true')
_inception_engine.return_value.osc_control.return_value = ReviewSet()
command = 'get'
sqlsha1 = 'xxxxx'
new_engine = MysqlEngine(instance=self.ins1)
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
@patch.object(MysqlEngine, 'query')
def test_kill_connection(self, _query):
new_engine = MysqlEngine(instance=self.ins1)
new_engine.kill_connection(100)
_query.assert_called_once_with(sql="kill 100")
@patch.object(MysqlEngine, 'query')
def test_seconds_behind_master(self, _query):
new_engine = MysqlEngine(instance=self.ins1)
new_engine.seconds_behind_master
_query.assert_called_once_with(sql="show slave status", close_conn=False,
cursorclass=MySQLdb.cursors.DictCursor)
class TestRedis(TestCase):
@classmethod
def setUpClass(cls):
cls.ins = Instance(instance_name='some_ins', type='slave', db_type='redis', host='some_host',
port=1366, user='ins_user', password='some_str')
cls.ins.save()
@classmethod
def tearDownClass(cls):
cls.ins.delete()
SqlWorkflow.objects.all().delete()
SqlWorkflowContent.objects.all().delete()
@patch('redis.Redis')
def test_engine_base_info(self, _conn):
new_engine = RedisEngine(instance=self.ins)
self.assertEqual(new_engine.name, 'Redis')
self.assertEqual(new_engine.info, 'Redis engine')
@patch('redis.Redis')
def test_get_connection(self, _conn):
new_engine = RedisEngine(instance=self.ins)
new_engine.get_connection()
_conn.assert_called_once()
@patch('redis.Redis.execute_command', return_value=[1, 2, 3])
def test_query_return_list(self, _execute_command):
new_engine = RedisEngine(instance=self.ins)
query_result = new_engine.query(db_name=0, sql='keys *', limit_num=100)
self.assertIsInstance(query_result, ResultSet)
self.assertTupleEqual(query_result.rows, ([1], [2], [3]))
@patch('redis.Redis.execute_command', return_value='text')
def test_query_return_str(self, _execute_command):
new_engine = RedisEngine(instance=self.ins)
query_result = new_engine.query(db_name=0, sql='keys *', limit_num=100)
self.assertIsInstance(query_result, ResultSet)
self.assertTupleEqual(query_result.rows, (['text'],))
@patch('redis.Redis.execute_command', return_value='text')
def test_query_execute(self, _execute_command):
new_engine = RedisEngine(instance=self.ins)
query_result = new_engine.query(db_name=0, sql='keys *', limit_num=100)
self.assertIsInstance(query_result, ResultSet)
self.assertTupleEqual(query_result.rows, (['text'],))
@patch('redis.Redis.config_get', return_value={"databases": 4})
def test_get_all_databases(self, _config_get):
new_engine = RedisEngine(instance=self.ins)
dbs = new_engine.get_all_databases()
self.assertListEqual(dbs.rows, ['0', '1', '2', '3'])
def test_query_check_safe_cmd(self):
safe_cmd = "keys 1*"
new_engine = RedisEngine(instance=self.ins)
check_result = new_engine.query_check(db_name=0, sql=safe_cmd)
self.assertDictEqual(check_result,
{'msg': '禁止执行该命令!', 'bad_query': True, 'filtered_sql': safe_cmd, 'has_star': False})
def test_query_check_danger_cmd(self):
safe_cmd = "keys *"
new_engine = RedisEngine(instance=self.ins)
check_result = new_engine.query_check(db_name=0, sql=safe_cmd)
self.assertDictEqual(check_result,
{'msg': '禁止执行该命令!', 'bad_query': True, 'filtered_sql': safe_cmd, 'has_star': False})
def test_filter_sql(self):
safe_cmd = "keys 1*"
new_engine = RedisEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=safe_cmd, limit_num=100)
self.assertEqual(check_result, 'keys 1*')
def test_query_masking(self):
query_result = ResultSet()
new_engine = RedisEngine(instance=self.ins)
masking_result = new_engine.query_masking(db_name=0, sql='', resultset=query_result)
self.assertEqual(masking_result, query_result)
def test_execute_check(self):
sql = 'set 1 1'
row = ReviewResult(id=1,
errlevel=0,
stagestatus='Audit completed',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0)
new_engine = RedisEngine(instance=self.ins)
check_result = new_engine.execute_check(db_name=0, sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
@patch('redis.Redis.execute_command', return_value='text')
def test_execute_workflow_success(self, _execute_command):
sql = 'set 1 1'
row = ReviewResult(id=1,
errlevel=0,
stagestatus='Execute Successfully',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0)
wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=wf, sql_content=sql)
new_engine = RedisEngine(instance=self.ins)
execute_result = new_engine.execute_workflow(workflow=wf)
self.assertIsInstance(execute_result, ReviewSet)
self.assertEqual(execute_result.rows[0].__dict__.keys(), row.__dict__.keys())
class TestPgSQL(TestCase):
@classmethod
def setUpClass(cls):
cls.ins = Instance(instance_name='some_ins', type='slave', db_type='pgsql', host='some_host',
port=1366, user='ins_user', password='some_str')
cls.ins.save()
cls.sys_config = SysConfig()
@classmethod
def tearDownClass(cls):
cls.ins.delete()
cls.sys_config.purge()
@patch('psycopg2.connect')
def test_engine_base_info(self, _conn):
new_engine = PgSQLEngine(instance=self.ins)
self.assertEqual(new_engine.name, 'PgSQL')
self.assertEqual(new_engine.info, 'PgSQL engine')
@patch('psycopg2.connect')
def test_get_connection(self, _conn):
new_engine = PgSQLEngine(instance=self.ins)
new_engine.get_connection("some_dbname")
_conn.assert_called_once()
@patch('psycopg2.connect.cursor.execute')
@patch('psycopg2.connect.cursor')
@patch('psycopg2.connect')
def test_query(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchmany.return_value = [(1,)]
new_engine = PgSQLEngine(instance=self.ins)
query_result = new_engine.query(db_name="some_dbname", sql='select 1', limit_num=100, schema_name="some_schema")
self.assertIsInstance(query_result, ResultSet)
self.assertListEqual(query_result.rows, [(1,)])
@patch('psycopg2.connect.cursor.execute')
@patch('psycopg2.connect.cursor')
@patch('psycopg2.connect')
def test_query_not_limit(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchall.return_value = [(1,)]
new_engine = PgSQLEngine(instance=self.ins)
query_result = new_engine.query(db_name="some_dbname", sql='select 1', limit_num=0, schema_name="some_schema")
self.assertIsInstance(query_result, ResultSet)
self.assertListEqual(query_result.rows, [(1,)])
@patch('sql.engines.pgsql.PgSQLEngine.query',
return_value=ResultSet(rows=[('postgres',), ('archery',), ('template1',), ('template0',)]))
def test_get_all_databases(self, query):
new_engine = PgSQLEngine(instance=self.ins)
dbs = new_engine.get_all_databases()
self.assertListEqual(dbs.rows, ['archery'])
@patch('sql.engines.pgsql.PgSQLEngine.query',
return_value=ResultSet(rows=[('information_schema',), ('archery',), ('pg_catalog',)]))
def test_get_all_schemas(self, _query):
new_engine = PgSQLEngine(instance=self.ins)
schemas = new_engine.get_all_schemas(db_name='archery')
self.assertListEqual(schemas.rows, ['archery'])
@patch('sql.engines.pgsql.PgSQLEngine.query', return_value=ResultSet(rows=[('test',), ('test2',)]))
def test_get_all_tables(self, _query):
new_engine = PgSQLEngine(instance=self.ins)
tables = new_engine.get_all_tables(db_name='archery', schema_name='archery')
self.assertListEqual(tables.rows, ['test2'])
@patch('sql.engines.pgsql.PgSQLEngine.query',
return_value=ResultSet(rows=[('id',), ('name',)]))
def test_get_all_columns_by_tb(self, _query):
new_engine = PgSQLEngine(instance=self.ins)
columns = new_engine.get_all_columns_by_tb(db_name='archery', tb_name='test2', schema_name='archery')
self.assertListEqual(columns.rows, ['id', 'name'])
@patch('sql.engines.pgsql.PgSQLEngine.query',
return_value=ResultSet(rows=[('postgres',), ('archery',), ('template1',), ('template0',)]))
def test_describe_table(self, _query):
new_engine = PgSQLEngine(instance=self.ins)
describe = new_engine.describe_table(db_name='archery', schema_name='archery', tb_name='text')
self.assertIsInstance(describe, ResultSet)
def test_query_check_disable_sql(self):
sql = "update xxx set a=1 "
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.query_check(db_name='archery', sql=sql)
self.assertDictEqual(check_result,
{'msg': '不支持的查询语法类型!', 'bad_query': True, 'filtered_sql': sql.strip(), 'has_star': False})
def test_query_check_star_sql(self):
sql = "select * from xx "
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.query_check(db_name='archery', sql=sql)
self.assertDictEqual(check_result,
{'msg': 'SQL语句中含有 * ', 'bad_query': False, 'filtered_sql': sql.strip(), 'has_star': True})
def test_filter_sql_with_delimiter(self):
sql = "select * from xx;"
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=sql, limit_num=100)
self.assertEqual(check_result, "select * from xx limit 100;")
def test_filter_sql_without_delimiter(self):
sql = "select * from xx"
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=sql, limit_num=100)
self.assertEqual(check_result, "select * from xx limit 100;")
def test_filter_sql_with_limit(self):
sql = "select * from xx limit 10"
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=sql, limit_num=1)
self.assertEqual(check_result, "select * from xx limit 10;")
def test_query_masking(self):
query_result = ResultSet()
new_engine = PgSQLEngine(instance=self.ins)
masking_result = new_engine.query_masking(db_name=0, sql='', resultset=query_result)
self.assertEqual(masking_result, query_result)
def test_execute_check_select_sql(self):
sql = 'select * from user;'
row = ReviewResult(id=1, errlevel=2,
stagestatus='驳回不支持语句',
errormessage='仅支持DML和DDL语句,查询语句请使用SQL查询功能!',
sql=sql)
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
def test_execute_check_critical_sql(self):
self.sys_config.set('critical_ddl_regex', '^|update')
self.sys_config.get_all_config()
sql = 'update user set id=1'
row = ReviewResult(id=1, errlevel=2,
stagestatus='驳回高危SQL',
errormessage='禁止提交匹配' + '^|update' + '条件的语句!',
sql=sql)
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
def test_execute_check_normal_sql(self):
self.sys_config.purge()
sql = 'alter table tb set id=1'
row = ReviewResult(id=1,
errlevel=0,
stagestatus='Audit completed',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0, )
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
@patch('psycopg2.connect.cursor.execute')
@patch('psycopg2.connect.cursor')
@patch('psycopg2.connect')
def test_execute_workflow_success(self, _conn, _cursor, _execute):
sql = 'update user set id=1'
row = ReviewResult(id=1,
errlevel=0,
stagestatus='Execute Successfully',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0)
wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=wf, sql_content=sql)
new_engine = PgSQLEngine(instance=self.ins)
execute_result = new_engine.execute_workflow(workflow=wf)
self.assertIsInstance(execute_result, ReviewSet)
self.assertEqual(execute_result.rows[0].__dict__.keys(), row.__dict__.keys())
@patch('psycopg2.connect.cursor.execute')
@patch('psycopg2.connect.cursor')
@patch('psycopg2.connect', return_value=RuntimeError)
def test_execute_workflow_exception(self, _conn, _cursor, _execute):
sql = 'update user set id=1'
row = ReviewResult(id=1,
errlevel=2,
stagestatus='Execute Failed',
errormessage=f'异常信息:{f"Oracle命令执行报错,语句:{sql}"}',
sql=sql,
affected_rows=0,
execute_time=0, )
wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=wf, sql_content=sql)
with self.assertRaises(AttributeError):
new_engine = PgSQLEngine(instance=self.ins)
execute_result = new_engine.execute_workflow(workflow=wf)
self.assertIsInstance(execute_result, ReviewSet)
self.assertEqual(execute_result.rows[0].__dict__.keys(), row.__dict__.keys())
class TestModel(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_result_set_rows_shadow(self):
# 测试默认值为空列表的坑
# 如果默认值是空列表,又使用的是累加的方法更新,会导致残留上次的列表
result_set1 = ResultSet()
for i in range(10):
result_set1.rows += [i]
brand_new_result_set = ResultSet()
self.assertEqual(brand_new_result_set.rows, [])
review_set1 = ReviewSet()
for i in range(10):
review_set1.rows += [i]
brand_new_review_set = ReviewSet()
self.assertEqual(brand_new_review_set.rows, [])
class TestInception(TestCase):
def setUp(self):
self.ins = Instance.objects.create(instance_name='some_ins', type='slave', db_type='mysql', host='some_host',
port=3306, user='ins_user', password='some_str')
self.ins_inc = Instance.objects.create(instance_name='some_ins_inc', type='slave', db_type='inception',
host='some_host', port=6669)
self.wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=self.wf)
def tearDown(self):
self.ins.delete()
self.ins_inc.delete()
SqlWorkflow.objects.all().delete()
SqlWorkflowContent.objects.all().delete()
@patch('MySQLdb.connect')
def test_get_connection(self, _connect):
new_engine = InceptionEngine()
new_engine.get_connection()
_connect.assert_called_once()
@patch('MySQLdb.connect')
def test_get_backup_connection(self, _connect):
new_engine = InceptionEngine()
new_engine.get_backup_connection()
_connect.assert_called_once()
@patch('sql.engines.inception.InceptionEngine.query')
def test_execute_check_normal_sql(self, _query):
sql = 'update user set id=100'
row = [1, 'CHECKED', 0, 'Audit completed', 'None', 'use archery', 0, "'0_0_0'", 'None', '0', '']
_query.return_value = ResultSet(full_sql=sql, rows=[row])
new_engine = InceptionEngine()
check_result = new_engine.execute_check(instance=self.ins, db_name=0, sql=sql)
self.assertIsInstance(check_result, ReviewSet)
@patch('sql.engines.inception.InceptionEngine.query')
def test_execute_exception(self, _query):
sql = 'update user set id=100'
row = [1, 'CHECKED', 1, 'Execute failed', 'None', 'use archery', 0, "'0_0_0'", 'None', '0', '']
column_list = ['ID', 'stage', 'errlevel', 'stagestatus', 'errormessage', 'SQL', 'Affected_rows', 'sequence',
'backup_dbname', 'execute_time', 'sqlsha1']
_query.return_value = ResultSet(full_sql=sql, rows=[row], column_list=column_list)
new_engine = InceptionEngine()
execute_result = new_engine.execute(workflow=self.wf)
self.assertIsInstance(execute_result, ReviewSet)
@patch('sql.engines.inception.InceptionEngine.query')
def test_execute_finish(self, _query):
sql = 'update user set id=100'
row = [1, 'CHECKED', 0, 'Execute Successfully', 'None', 'use archery', 0, "'0_0_0'", 'None', '0', '']
column_list = ['ID', 'stage', 'errlevel', 'stagestatus', 'errormessage', 'SQL', 'Affected_rows', 'sequence',
'backup_dbname', 'execute_time', 'sqlsha1']
_query.return_value = ResultSet(full_sql=sql, rows=[row], column_list=column_list)
new_engine = InceptionEngine()
execute_result = new_engine.execute(workflow=self.wf)
self.assertIsInstance(execute_result, ReviewSet)
@patch('MySQLdb.connect.cursor.execute')
@patch('MySQLdb.connect.cursor')
@patch('MySQLdb.connect')
def test_query(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchall.return_value = [(1,)]
new_engine = InceptionEngine()
query_result = new_engine.query(db_name=0, sql='select 1', limit_num=100)
self.assertIsInstance(query_result, ResultSet)
@patch('MySQLdb.connect.cursor.execute')
@patch('MySQLdb.connect.cursor')
@patch('MySQLdb.connect')
def test_query_not_limit(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchall.return_value = [(1,)]
new_engine = InceptionEngine(instance=self.ins)
query_result = new_engine.query(db_name=0, sql='select 1', limit_num=0)
self.assertIsInstance(query_result, ResultSet)
@patch('sql.engines.inception.InceptionEngine.query')
def test_query_print(self, _query):
sql = 'update user set id=100'
row = [1,
'select * from sql_instance limit 100',
0,
'{"command":"select","select_list":[{"type":"FIELD_ITEM","field":"*"}],"table_ref":[{"db":"archery","table":"sql_instance"}],"limit":{"limit":[{"type":"INT_ITEM","value":"100"}]}}',
'None']
column_list = ['ID', 'statement', 'errlevel', 'query_tree', 'errmsg']
_query.return_value = ResultSet(full_sql=sql, rows=[row], column_list=column_list)
new_engine = InceptionEngine()
print_result = new_engine.query_print(self.ins, db_name=None, sql=sql)
self.assertDictEqual(print_result, json.loads(_repair_json_str(row[3])))
@patch('MySQLdb.connect')
def test_get_rollback_list(self, _connect):
self.wf.sqlworkflowcontent.execute_result = """[{
"id": 1,
"stage": "RERUN",
"errlevel": 0,
"stagestatus": "Execute Successfully",
"errormessage": "None",
"sql": "use archer_test",
"affected_rows": 0,
"sequence": "'1554135032_13038_0'",
"backup_dbname": "None",
"execute_time": "0.000",
"sqlsha1": "",
"actual_affected_rows": 0
}, {
"id": 2,
"stage": "EXECUTED",
"errlevel": 0,
"stagestatus": "Execute Successfully Backup successfully",
"errormessage": "None",
"sql": "insert into tt1 (user_name)values('A'),('B'),('C')",
"affected_rows": 3,
"sequence": "'1554135032_13038_1'",
"backup_dbname": "mysql_3306_archer_test",
"execute_time": "0.000",
"sqlsha1": "",
"actual_affected_rows": 3
}]"""
self.wf.sqlworkflowcontent.save()
new_engine = InceptionEngine()
new_engine.get_rollback(self.wf)
@patch('sql.engines.inception.InceptionEngine.query')
def test_osc_get(self, _query):
new_engine = InceptionEngine()
command = 'get'
sqlsha1 = 'xxxxx'
sql = f"inception get osc_percent '{sqlsha1}';"
_query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[])
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.inception.InceptionEngine.query')
def test_osc_kill(self, _query):
new_engine = InceptionEngine()
command = 'kill'
sqlsha1 = 'xxxxx'
sql = f"inception stop alter '{sqlsha1}';"
_query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[])
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.inception.InceptionEngine.query')
def test_osc_not_support(self, _query):
new_engine = InceptionEngine()
command = 'stop'
sqlsha1 = 'xxxxx'
sql = f"inception stop alter '{sqlsha1}';"
_query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[])
with self.assertRaisesMessage(ValueError, 'pt-osc不支持暂停和恢复,需要停止执行请使用终止按钮!'):
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
@patch('sql.engines.inception.InceptionEngine.query')
def test_get_variables(self, _query):
new_engine = InceptionEngine(instance=self.ins_inc)
new_engine.get_variables()
sql = f"inception get variables;"
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.inception.InceptionEngine.query')
def test_get_variables_filter(self, _query):
new_engine = InceptionEngine(instance=self.ins_inc)
new_engine.get_variables(variables=['inception_osc_on'])
sql = f"inception get variables 'inception_osc_on';"
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.inception.InceptionEngine.query')
def test_set_variable(self, _query):
new_engine = InceptionEngine(instance=self.ins)
new_engine.set_variable('inception_osc_on', 'on')
_query.assert_called_once_with(sql="inception set inception_osc_on=on;")
class TestGoInception(TestCase):
def setUp(self):
self.ins = Instance.objects.create(instance_name='some_ins', type='slave', db_type='mysql',
host='some_host',
port=3306, user='ins_user', password='some_str')
self.ins_inc = Instance.objects.create(instance_name='some_ins_inc', type='slave', db_type='goinception',
host='some_host', port=4000)
self.wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=self.wf)
def tearDown(self):
self.ins.delete()
self.ins_inc.delete()
SqlWorkflow.objects.all().delete()
SqlWorkflowContent.objects.all().delete()
@patch('MySQLdb.connect')
def test_get_connection(self, _connect):
new_engine = GoInceptionEngine()
new_engine.get_connection()
_connect.assert_called_once()
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_execute_check_normal_sql(self, _query):
sql = 'update user set id=100'
row = [1, 'CHECKED', 0, 'Audit completed', 'None', 'use archery', 0, "'0_0_0'", 'None', '0', '', '']
_query.return_value = ResultSet(full_sql=sql, rows=[row])
new_engine = GoInceptionEngine()
check_result = new_engine.execute_check(instance=self.ins, db_name=0, sql=sql)
self.assertIsInstance(check_result, ReviewSet)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_execute_exception(self, _query):
sql = 'update user set id=100'
row = [1, 'CHECKED', 1, 'Execute failed', 'None', 'use archery', 0, "'0_0_0'", 'None', '0', '', '']
column_list = ['order_id', 'stage', 'error_level', 'stage_status', 'error_message', 'sql',
'affected_rows', 'sequence', 'backup_dbname', 'execute_time', 'sqlsha1', 'backup_time']
_query.return_value = ResultSet(full_sql=sql, rows=[row], column_list=column_list)
new_engine = GoInceptionEngine()
execute_result = new_engine.execute(workflow=self.wf)
self.assertIsInstance(execute_result, ReviewSet)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_execute_finish(self, _query):
sql = 'update user set id=100'
row = [1, 'CHECKED', 0, 'Execute Successfully', 'None', 'use archery', 0, "'0_0_0'", 'None', '0', '', '']
column_list = ['order_id', 'stage', 'error_level', 'stage_status', 'error_message', 'sql',
'affected_rows', 'sequence', 'backup_dbname', 'execute_time', 'sqlsha1', 'backup_time']
_query.return_value = ResultSet(full_sql=sql, rows=[row], column_list=column_list)
new_engine = GoInceptionEngine()
execute_result = new_engine.execute(workflow=self.wf)
self.assertIsInstance(execute_result, ReviewSet)
@patch('MySQLdb.connect.cursor.execute')
@patch('MySQLdb.connect.cursor')
@patch('MySQLdb.connect')
def test_query(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchall.return_value = [(1,)]
new_engine = GoInceptionEngine()
query_result = new_engine.query(db_name=0, sql='select 1', limit_num=100)
self.assertIsInstance(query_result, ResultSet)
@patch('MySQLdb.connect.cursor.execute')
@patch('MySQLdb.connect.cursor')
@patch('MySQLdb.connect')
def test_query_not_limit(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchall.return_value = [(1,)]
new_engine = GoInceptionEngine(instance=self.ins)
query_result = new_engine.query(db_name=0, sql='select 1', limit_num=0)
self.assertIsInstance(query_result, ResultSet)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_osc_get(self, _query):
new_engine = GoInceptionEngine()
command = 'get'
sqlsha1 = 'xxxxx'
sql = f"inception get osc_percent '{sqlsha1}';"
_query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[])
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_osc_pause(self, _query):
new_engine = GoInceptionEngine()
command = 'pause'
sqlsha1 = 'xxxxx'
sql = f"inception {command} osc '{sqlsha1}';"
_query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[])
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_osc_resume(self, _query):
new_engine = GoInceptionEngine()
command = 'resume'
sqlsha1 = 'xxxxx'
sql = f"inception {command} osc '{sqlsha1}';"
_query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[])
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_osc_kill(self, _query):
new_engine = GoInceptionEngine()
command = 'kill'
sqlsha1 = 'xxxxx'
sql = f"inception kill osc '{sqlsha1}';"
_query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[])
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_get_variables(self, _query):
new_engine = GoInceptionEngine(instance=self.ins_inc)
new_engine.get_variables()
sql = f"inception get variables;"
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_get_variables_filter(self, _query):
new_engine = GoInceptionEngine(instance=self.ins_inc)
new_engine.get_variables(variables=['inception_osc_on'])
sql = f"inception get variables like 'inception_osc_on';"
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_set_variable(self, _query):
new_engine = GoInceptionEngine(instance=self.ins)
new_engine.set_variable('inception_osc_on', 'on')
_query.assert_called_once_with(sql="inception set inception_osc_on=on;")
class TestOracle(TestCase):
"""Oracle 测试"""
def setUp(self):
self.ins = Instance.objects.create(instance_name='some_ins', type='slave', db_type='oracle',
host='some_host', port=3306, user='ins_user', password='some_str',
sid='some_id')
self.wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=self.wf)
self.sys_config = SysConfig()
def tearDown(self):
self.ins.delete()
self.sys_config.purge()
SqlWorkflow.objects.all().delete()
SqlWorkflowContent.objects.all().delete()
@patch('cx_Oracle.makedsn')
@patch('cx_Oracle.connect')
def test_get_connection(self, _connect, _makedsn):
# 填写 sid 测试
new_engine = OracleEngine(self.ins)
new_engine.get_connection()
_connect.assert_called_once()
_makedsn.assert_called_once()
# 填写 service_name 测试
_connect.reset_mock()
_makedsn.reset_mock()
self.ins.service_name = 'some_service'
self.ins.sid = ''
self.ins.save()
new_engine = OracleEngine(self.ins)
new_engine.get_connection()
_connect.assert_called_once()
_makedsn.assert_called_once()
# 都不填写, 检测 ValueError
_connect.reset_mock()
_makedsn.reset_mock()
self.ins.service_name = ''
self.ins.sid = ''
self.ins.save()
new_engine = OracleEngine(self.ins)
with self.assertRaises(ValueError):
new_engine.get_connection()
@patch('cx_Oracle.connect')
def test_engine_base_info(self, _conn):
new_engine = OracleEngine(instance=self.ins)
self.assertEqual(new_engine.name, 'Oracle')
self.assertEqual(new_engine.info, 'Oracle engine')
_conn.return_value.version = '12.1.0.2.0'
self.assertTupleEqual(new_engine.server_version, ('12', '1', '0'))
@patch('cx_Oracle.connect.cursor.execute')
@patch('cx_Oracle.connect.cursor')
@patch('cx_Oracle.connect')
def test_query(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchmany.return_value = [(1,)]
new_engine = OracleEngine(instance=self.ins)
query_result = new_engine.query(db_name='archery', sql='select 1', limit_num=100)
self.assertIsInstance(query_result, ResultSet)
self.assertListEqual(query_result.rows, [(1,)])
@patch('cx_Oracle.connect.cursor.execute')
@patch('cx_Oracle.connect.cursor')
@patch('cx_Oracle.connect')
def test_query_not_limit(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchall.return_value = [(1,)]
new_engine = OracleEngine(instance=self.ins)
query_result = new_engine.query(db_name=0, sql='select 1', limit_num=0)
self.assertIsInstance(query_result, ResultSet)
self.assertListEqual(query_result.rows, [(1,)])
@patch('sql.engines.oracle.OracleEngine.query',
return_value=ResultSet(rows=[('AUD_SYS',), ('archery',), ('ANONYMOUS',)]))
def test_get_all_databases(self, _query):
new_engine = OracleEngine(instance=self.ins)
dbs = new_engine.get_all_databases()
self.assertListEqual(dbs.rows, ['archery'])
@patch('sql.engines.oracle.OracleEngine.query',
return_value=ResultSet(rows=[('AUD_SYS',), ('archery',), ('ANONYMOUS',)]))
def test__get_all_databases(self, _query):
new_engine = OracleEngine(instance=self.ins)
dbs = new_engine._get_all_databases()
self.assertListEqual(dbs.rows, ['AUD_SYS', 'archery', 'ANONYMOUS'])
@patch('sql.engines.oracle.OracleEngine.query',
return_value=ResultSet(rows=[('archery',)]))
def test__get_all_instances(self, _query):
new_engine = OracleEngine(instance=self.ins)
dbs = new_engine._get_all_instances()
self.assertListEqual(dbs.rows, ['archery'])
@patch('sql.engines.oracle.OracleEngine.query',
return_value=ResultSet(rows=[('ANONYMOUS',), ('archery',), ('SYSTEM',)]))
def test_get_all_schemas(self, _query):
new_engine = OracleEngine(instance=self.ins)
schemas = new_engine._get_all_schemas()
self.assertListEqual(schemas.rows, ['archery'])
@patch('sql.engines.oracle.OracleEngine.query', return_value=ResultSet(rows=[('test',), ('test2',)]))
def test_get_all_tables(self, _query):
new_engine = OracleEngine(instance=self.ins)
tables = new_engine.get_all_tables(db_name='archery')
self.assertListEqual(tables.rows, ['test2'])
@patch('sql.engines.oracle.OracleEngine.query',
return_value=ResultSet(rows=[('id',), ('name',)]))
def test_get_all_columns_by_tb(self, _query):
new_engine = OracleEngine(instance=self.ins)
columns = new_engine.get_all_columns_by_tb(db_name='archery', tb_name='test2')
self.assertListEqual(columns.rows, ['id', 'name'])
@patch('sql.engines.oracle.OracleEngine.query',
return_value=ResultSet(rows=[('archery',), ('template1',), ('template0',)]))
def test_describe_table(self, _query):
new_engine = OracleEngine(instance=self.ins)
describe = new_engine.describe_table(db_name='archery', tb_name='text')
self.assertIsInstance(describe, ResultSet)
def test_query_check_disable_sql(self):
sql = "update xxx set a=1;"
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.query_check(db_name='archery', sql=sql)
self.assertDictEqual(check_result,
{'msg': '不支持语法!', 'bad_query': True, 'filtered_sql': sql.strip(';'),
'has_star': False})
@patch('sql.engines.oracle.OracleEngine.explain_check', return_value={'msg': '', 'rows': 0})
def test_query_check_star_sql(self, _explain_check):
sql = "select * from xx;"
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.query_check(db_name='archery', sql=sql)
self.assertDictEqual(check_result,
{'msg': '禁止使用 * 关键词\n', 'bad_query': False, 'filtered_sql': sql.strip(';'),
'has_star': True})
def test_query_check_IndexError(self):
sql = ""
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.query_check(db_name='archery', sql=sql)
self.assertDictEqual(check_result,
{'msg': '没有有效的SQL语句', 'bad_query': True, 'filtered_sql': sql.strip(), 'has_star': False})
@patch('sql.engines.oracle.OracleEngine.explain_check', return_value={'msg': '', 'rows': 0})
def test_query_check_plus(self, _explain_check):
sql = "select 100+1 from tb;"
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.query_check(db_name='archery', sql=sql)
self.assertDictEqual(check_result,
{'msg': '禁止使用 + 关键词\n', 'bad_query': True, 'filtered_sql': sql.strip(';'),
'has_star': False})
def test_filter_sql_with_delimiter(self):
sql = "select * from xx;"
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=sql, limit_num=100)
self.assertEqual(check_result, "select sql_audit.* from (select * from xx) sql_audit where rownum <= 100")
def test_filter_sql_with_delimiter_and_where(self):
sql = "select * from xx where id>1;"
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=sql, limit_num=100)
self.assertEqual(check_result,
"select sql_audit.* from (select * from xx where id>1) sql_audit where rownum <= 100")
def test_filter_sql_without_delimiter(self):
sql = "select * from xx;"
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=sql, limit_num=100)
self.assertEqual(check_result, "select sql_audit.* from (select * from xx) sql_audit where rownum <= 100")
def test_filter_sql_with_limit(self):
sql = "select * from xx limit 10;"
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=sql, limit_num=1)
self.assertEqual(check_result,
"select sql_audit.* from (select * from xx limit 10) sql_audit where rownum <= 1")
def test_query_masking(self):
query_result = ResultSet()
new_engine = OracleEngine(instance=self.ins)
masking_result = new_engine.query_masking(schema_name='', sql='select 1', resultset=query_result)
self.assertEqual(masking_result, query_result)
def test_execute_check_select_sql(self):
sql = 'select * from user;'
row = ReviewResult(id=1, errlevel=2,
stagestatus='驳回不支持语句',
errormessage='仅支持DML和DDL语句,查询语句请使用SQL查询功能!',
sql=sqlparse.format(sql, strip_comments=True, reindent=True, keyword_case='lower'))
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
def test_execute_check_critical_sql(self):
self.sys_config.set('critical_ddl_regex', '^|update')
self.sys_config.get_all_config()
sql = 'update user set id=1'
row = ReviewResult(id=1, errlevel=2,
stagestatus='驳回高危SQL',
errormessage='禁止提交匹配' + '^|update' + '条件的语句!',
sql=sqlparse.format(sql, strip_comments=True, reindent=True, keyword_case='lower'))
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
@patch('sql.engines.oracle.OracleEngine.explain_check', return_value={'msg': '', 'rows': 0})
@patch('sql.engines.oracle.OracleEngine.get_sql_first_object_name', return_value='tb')
@patch('sql.engines.oracle.OracleEngine.object_name_check', return_value=True)
def test_execute_check_normal_sql(self, _explain_check, _get_sql_first_object_name, _object_name_check):
self.sys_config.purge()
sql = 'alter table tb set id=1'
row = ReviewResult(id=1,
errlevel=1,
stagestatus='当前平台,此语法不支持审核!',
errormessage='当前平台,此语法不支持审核!',
sql=sqlparse.format(sql, strip_comments=True, reindent=True, keyword_case='lower'),
affected_rows=0,
execute_time=0,
stmt_type='SQL',
object_owner='',
object_type='',
object_name='',
)
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
@patch('cx_Oracle.connect.cursor.execute')
@patch('cx_Oracle.connect.cursor')
@patch('cx_Oracle.connect')
def test_execute_workflow_success(self, _conn, _cursor, _execute):
sql = 'update user set id=1'
review_row = ReviewResult(id=1,
errlevel=0,
stagestatus='Execute Successfully',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0,
stmt_type='SQL',
object_owner='',
object_type='',
object_name='', )
execute_row = ReviewResult(id=1,
errlevel=0,
stagestatus='Execute Successfully',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0)
wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=wf, sql_content=sql,
review_content=ReviewSet(rows=[review_row]).json())
new_engine = OracleEngine(instance=self.ins)
execute_result = new_engine.execute_workflow(workflow=wf)
self.assertIsInstance(execute_result, ReviewSet)
self.assertEqual(execute_result.rows[0].__dict__.keys(), execute_row.__dict__.keys())
@patch('cx_Oracle.connect.cursor.execute')
@patch('cx_Oracle.connect.cursor')
@patch('cx_Oracle.connect', return_value=RuntimeError)
def test_execute_workflow_exception(self, _conn, _cursor, _execute):
sql = 'update user set id=1'
row = ReviewResult(id=1,
errlevel=2,
stagestatus='Execute Failed',
errormessage=f'异常信息:{f"Oracle命令执行报错,语句:{sql}"}',
sql=sql,
affected_rows=0,
execute_time=0,
stmt_type='SQL',
object_owner='',
object_type='',
object_name='',
)
wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=wf, sql_content=sql, review_content=ReviewSet(rows=[row]).json())
with self.assertRaises(AttributeError):
new_engine = OracleEngine(instance=self.ins)
execute_result = new_engine.execute_workflow(workflow=wf)
self.assertIsInstance(execute_result, ReviewSet)
self.assertEqual(execute_result.rows[0].__dict__.keys(), row.__dict__.keys())
class MongoTest(TestCase):
def setUp(self) -> None:
self.ins = Instance.objects.create(instance_name='some_ins', type='slave', db_type='mongo',
host='some_host', port=3306, user='ins_user')
self.engine = MongoEngine(instance=self.ins)
def tearDown(self) -> None:
self.ins.delete()
@patch('sql.engines.mongo.pymongo')
def test_get_connection(self, mock_pymongo):
_ = self.engine.get_connection()
mock_pymongo.MongoClient.assert_called_once()
@patch('sql.engines.mongo.MongoEngine.get_connection')
def test_query(self, mock_get_connection):
# TODO 正常查询还没做
test_sql = """{"collection": "job","count": true}"""
self.assertIsInstance(self.engine.query('archery', test_sql), ResultSet)
def test_query_check(self):
test_sql = """{"collection": "job","count": true}"""
check_result = self.engine.query_check(sql=test_sql)
self.assertEqual(False, check_result.get('bad_query'))
@patch('sql.engines.mongo.MongoEngine.get_connection')
def test_get_all_databases(self, mock_get_connection):
db_list = self.engine.get_all_databases()
self.assertIsInstance(db_list, ResultSet)
# mock_get_connection.return_value.list_database_names.assert_called_once()
@patch('sql.engines.mongo.MongoEngine.get_connection')
def test_get_all_tables(self, mock_get_connection):
mock_db = Mock()
# 下面是查表示例返回结果
mock_db.list_collection_names.return_value = ['u', 'v', 'w']
mock_get_connection.return_value = {'some_db': mock_db}
table_list = self.engine.get_all_tables('some_db')
mock_db.list_collection_names.assert_called_once()
self.assertEqual(table_list.rows, ['u', 'v', 'w'])
| 45.425984 | 196 | 0.636545 | import MySQLdb
import json
from datetime import timedelta, datetime
from unittest.mock import patch, Mock, ANY
import sqlparse
from django.contrib.auth import get_user_model
from django.test import TestCase
from common.config import SysConfig
from sql.engines import EngineBase
from sql.engines.goinception import GoInceptionEngine
from sql.engines.models import ResultSet, ReviewSet, ReviewResult
from sql.engines.mssql import MssqlEngine
from sql.engines.mysql import MysqlEngine
from sql.engines.redis import RedisEngine
from sql.engines.pgsql import PgSQLEngine
from sql.engines.oracle import OracleEngine
from sql.engines.mongo import MongoEngine
from sql.engines.inception import InceptionEngine, _repair_json_str
from sql.models import Instance, SqlWorkflow, SqlWorkflowContent
User = get_user_model()
class TestReviewSet(TestCase):
def test_review_set(self):
new_review_set = ReviewSet()
new_review_set.rows = [{'id': '1679123'}]
self.assertIn('1679123', new_review_set.json())
class TestEngineBase(TestCase):
@classmethod
def setUpClass(cls):
cls.u1 = User(username='some_user', display='用户1')
cls.u1.save()
cls.ins1 = Instance(instance_name='some_ins', type='master', db_type='mssql', host='some_host',
port=1366, user='ins_user', password='some_str')
cls.ins1.save()
cls.wf1 = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer=cls.u1.username,
engineer_display=cls.u1.display,
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=cls.ins1,
db_name='some_db',
syntax_type=1
)
cls.wfc1 = SqlWorkflowContent.objects.create(
workflow=cls.wf1,
sql_content='some_sql',
execute_result=json.dumps([{
'id': 1,
'sql': 'some_content'
}]))
@classmethod
def tearDownClass(cls):
cls.wfc1.delete()
cls.wf1.delete()
cls.ins1.delete()
cls.u1.delete()
def test_init_with_ins(self):
engine = EngineBase(instance=self.ins1)
self.assertEqual(self.ins1.instance_name, engine.instance_name)
self.assertEqual(self.ins1.user, engine.user)
class TestMssql(TestCase):
@classmethod
def setUpClass(cls):
cls.ins1 = Instance(instance_name='some_ins', type='slave', db_type='mssql', host='some_host',
port=1366, user='ins_user', password='some_str')
cls.ins1.save()
cls.engine = MssqlEngine(instance=cls.ins1)
cls.wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=cls.ins1,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=cls.wf, sql_content='insert into some_tb values (1)')
@classmethod
def tearDownClass(cls):
cls.ins1.delete()
cls.wf.delete()
SqlWorkflowContent.objects.all().delete()
@patch('sql.engines.mssql.pyodbc.connect')
def testGetConnection(self, connect):
new_engine = MssqlEngine(instance=self.ins1)
new_engine.get_connection()
connect.assert_called_once()
@patch('sql.engines.mssql.pyodbc.connect')
def testQuery(self, connect):
cur = Mock()
connect.return_value.cursor = cur
cur.return_value.execute = Mock()
cur.return_value.fetchmany.return_value = (('v1', 'v2'),)
cur.return_value.description = (('k1', 'some_other_des'), ('k2', 'some_other_des'))
new_engine = MssqlEngine(instance=self.ins1)
query_result = new_engine.query(sql='some_str', limit_num=100)
cur.return_value.execute.assert_called()
cur.return_value.fetchmany.assert_called_once_with(100)
connect.return_value.close.assert_called_once()
self.assertIsInstance(query_result, ResultSet)
@patch.object(MssqlEngine, 'query')
def testAllDb(self, mock_query):
db_result = ResultSet()
db_result.rows = [('db_1',), ('db_2',)]
mock_query.return_value = db_result
new_engine = MssqlEngine(instance=self.ins1)
dbs = new_engine.get_all_databases()
self.assertEqual(dbs.rows, ['db_1', 'db_2'])
@patch.object(MssqlEngine, 'query')
def testAllTables(self, mock_query):
table_result = ResultSet()
table_result.rows = [('tb_1', 'some_des'), ('tb_2', 'some_des')]
mock_query.return_value = table_result
new_engine = MssqlEngine(instance=self.ins1)
tables = new_engine.get_all_tables('some_db')
mock_query.assert_called_once_with(db_name='some_db', sql=ANY)
self.assertEqual(tables.rows, ['tb_1', 'tb_2'])
@patch.object(MssqlEngine, 'query')
def testAllColumns(self, mock_query):
db_result = ResultSet()
db_result.rows = [('col_1', 'type'), ('col_2', 'type2')]
mock_query.return_value = db_result
new_engine = MssqlEngine(instance=self.ins1)
dbs = new_engine.get_all_columns_by_tb('some_db', 'some_tb')
self.assertEqual(dbs.rows, ['col_1', 'col_2'])
@patch.object(MssqlEngine, 'query')
def testDescribe(self, mock_query):
new_engine = MssqlEngine(instance=self.ins1)
new_engine.describe_table('some_db', 'some_db')
mock_query.assert_called_once()
def testQueryCheck(self):
new_engine = MssqlEngine(instance=self.ins1)
banned_sql = 'select concat(phone,1) from user_table'
check_result = new_engine.query_check(db_name='some_db', sql=banned_sql)
self.assertTrue(check_result.get('bad_query'))
banned_sql = 'select phone from user_table where phone=concat(phone,1)'
check_result = new_engine.query_check(db_name='some_db', sql=banned_sql)
self.assertTrue(check_result.get('bad_query'))
sp_sql = "sp_helptext '[SomeName].[SomeAction]'"
check_result = new_engine.query_check(db_name='some_db', sql=sp_sql)
self.assertFalse(check_result.get('bad_query'))
self.assertEqual(check_result.get('filtered_sql'), sp_sql)
def test_filter_sql(self):
new_engine = MssqlEngine(instance=self.ins1)
banned_sql = 'select user from user_table'
check_result = new_engine.filter_sql(sql=banned_sql, limit_num=10)
self.assertEqual(check_result, "select top 10 user from user_table")
def test_execute_check(self):
new_engine = MssqlEngine(instance=self.ins1)
test_sql = 'use database\ngo\nsome sql1\nGO\nsome sql2\n\r\nGo\nsome sql3\n\r\ngO\n'
check_result = new_engine.execute_check(db_name=None, sql=test_sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[1].__dict__['sql'], "use database\n")
self.assertEqual(check_result.rows[2].__dict__['sql'], "\nsome sql1\n")
self.assertEqual(check_result.rows[4].__dict__['sql'], "\nsome sql3\n\r\n")
@patch('sql.engines.mssql.MssqlEngine.execute')
def test_execute_workflow(self, mock_execute):
mock_execute.return_value.error = None
new_engine = MssqlEngine(instance=self.ins1)
new_engine.execute_workflow(self.wf)
mock_execute.assert_called()
self.assertEqual(1, mock_execute.call_count)
@patch('sql.engines.mssql.MssqlEngine.get_connection')
def test_execute(self, mock_connect):
mock_cursor = Mock()
mock_connect.return_value.cursor = mock_cursor
new_engine = MssqlEngine(instance=self.ins1)
execute_result = new_engine.execute('some_db', 'some_sql')
self.assertIsNone(execute_result.error)
self.assertEqual('some_sql', execute_result.full_sql)
self.assertEqual(2, len(execute_result.rows))
mock_cursor.return_value.execute.assert_called()
mock_cursor.return_value.commit.assert_called()
mock_cursor.reset_mock()
mock_cursor.return_value.execute.side_effect = Exception('Boom! some exception!')
execute_result = new_engine.execute('some_db', 'some_sql')
self.assertIn('Boom! some exception!', execute_result.error)
self.assertEqual('some_sql', execute_result.full_sql)
self.assertEqual(2, len(execute_result.rows))
mock_cursor.return_value.commit.assert_not_called()
mock_cursor.return_value.rollback.assert_called()
class TestMysql(TestCase):
def setUp(self):
self.ins1 = Instance(instance_name='some_ins', type='slave', db_type='mysql', host='some_host',
port=1366, user='ins_user', password='some_str')
self.ins1.save()
self.sys_config = SysConfig()
self.wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins1,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=self.wf)
def tearDown(self):
self.ins1.delete()
self.sys_config.purge()
SqlWorkflow.objects.all().delete()
SqlWorkflowContent.objects.all().delete()
@patch('MySQLdb.connect')
def test_engine_base_info(self, _conn):
new_engine = MysqlEngine(instance=self.ins1)
self.assertEqual(new_engine.name, 'MySQL')
self.assertEqual(new_engine.info, 'MySQL engine')
@patch('MySQLdb.connect')
def testGetConnection(self, connect):
new_engine = MysqlEngine(instance=self.ins1)
new_engine.get_connection()
connect.assert_called_once()
@patch('MySQLdb.connect')
def testQuery(self, connect):
cur = Mock()
connect.return_value.cursor = cur
cur.return_value.execute = Mock()
cur.return_value.fetchmany.return_value = (('v1', 'v2'),)
cur.return_value.description = (('k1', 'some_other_des'), ('k2', 'some_other_des'))
new_engine = MysqlEngine(instance=self.ins1)
query_result = new_engine.query(sql='some_str', limit_num=100)
cur.return_value.execute.assert_called()
cur.return_value.fetchmany.assert_called_once_with(size=100)
connect.return_value.close.assert_called_once()
self.assertIsInstance(query_result, ResultSet)
@patch.object(MysqlEngine, 'query')
def testAllDb(self, mock_query):
db_result = ResultSet()
db_result.rows = [('db_1',), ('db_2',)]
mock_query.return_value = db_result
new_engine = MysqlEngine(instance=self.ins1)
dbs = new_engine.get_all_databases()
self.assertEqual(dbs.rows, ['db_1', 'db_2'])
@patch.object(MysqlEngine, 'query')
def testAllTables(self, mock_query):
table_result = ResultSet()
table_result.rows = [('tb_1', 'some_des'), ('tb_2', 'some_des')]
mock_query.return_value = table_result
new_engine = MysqlEngine(instance=self.ins1)
tables = new_engine.get_all_tables('some_db')
mock_query.assert_called_once_with(db_name='some_db', sql=ANY)
self.assertEqual(tables.rows, ['tb_1', 'tb_2'])
@patch.object(MysqlEngine, 'query')
def testAllColumns(self, mock_query):
db_result = ResultSet()
db_result.rows = [('col_1', 'type'), ('col_2', 'type2')]
mock_query.return_value = db_result
new_engine = MysqlEngine(instance=self.ins1)
dbs = new_engine.get_all_columns_by_tb('some_db', 'some_tb')
self.assertEqual(dbs.rows, ['col_1', 'col_2'])
@patch.object(MysqlEngine, 'query')
def testDescribe(self, mock_query):
new_engine = MysqlEngine(instance=self.ins1)
new_engine.describe_table('some_db', 'some_db')
mock_query.assert_called_once()
def testQueryCheck(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = '-- 测试\n select user from usertable'
check_result = new_engine.query_check(db_name='some_db', sql=sql_without_limit)
self.assertEqual(check_result['filtered_sql'], 'select user from usertable')
def test_query_check_wrong_sql(self):
new_engine = MysqlEngine(instance=self.ins1)
wrong_sql = '-- 测试'
check_result = new_engine.query_check(db_name='some_db', sql=wrong_sql)
self.assertDictEqual(check_result,
{'msg': '不支持的查询语法类型!', 'bad_query': True, 'filtered_sql': '-- 测试', 'has_star': False})
def test_query_check_update_sql(self):
new_engine = MysqlEngine(instance=self.ins1)
update_sql = 'update user set id=0'
check_result = new_engine.query_check(db_name='some_db', sql=update_sql)
self.assertDictEqual(check_result,
{'msg': '不支持的查询语法类型!', 'bad_query': True, 'filtered_sql': 'update user set id=0',
'has_star': False})
def test_filter_sql_with_delimiter(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'select user from usertable;'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=100)
self.assertEqual(check_result, 'select user from usertable limit 100;')
def test_filter_sql_without_delimiter(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'select user from usertable'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=100)
self.assertEqual(check_result, 'select user from usertable limit 100;')
def test_filter_sql_with_limit(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'select user from usertable limit 10'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=1)
self.assertEqual(check_result, 'select user from usertable limit 1;')
def test_filter_sql_with_limit_min(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'select user from usertable limit 10'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=100)
self.assertEqual(check_result, 'select user from usertable limit 10;')
def test_filter_sql_with_limit_offset(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'select user from usertable limit 10 offset 100'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=1)
self.assertEqual(check_result, 'select user from usertable limit 1;')
def test_filter_sql_with_limit_nn(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'select user from usertable limit 10, 100'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=1)
self.assertEqual(check_result, 'select user from usertable limit 1;')
def test_filter_sql_upper(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'SELECT USER FROM usertable LIMIT 10, 100'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=1)
self.assertEqual(check_result, 'SELECT USER FROM usertable limit 1;')
def test_filter_sql_not_select(self):
new_engine = MysqlEngine(instance=self.ins1)
sql_without_limit = 'show create table usertable;'
check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=1)
self.assertEqual(check_result, 'show create table usertable;')
@patch('sql.engines.mysql.data_masking', return_value=ResultSet())
def test_query_masking(self, _data_masking):
query_result = ResultSet()
new_engine = MysqlEngine(instance=self.ins1)
masking_result = new_engine.query_masking(db_name='archery', sql='select 1', resultset=query_result)
self.assertIsInstance(masking_result, ResultSet)
@patch('sql.engines.mysql.data_masking', return_value=ResultSet())
def test_query_masking_not_select(self, _data_masking):
query_result = ResultSet()
new_engine = MysqlEngine(instance=self.ins1)
masking_result = new_engine.query_masking(db_name='archery', sql='explain select 1', resultset=query_result)
self.assertEqual(masking_result, query_result)
@patch('sql.engines.mysql.InceptionEngine')
def test_execute_check_select_sql(self, _inception_engine):
self.sys_config.set('inception', 'true')
sql = 'select * from user'
inc_row = ReviewResult(id=1,
errlevel=0,
stagestatus='Audit completed',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0, )
row = ReviewResult(id=1, errlevel=2,
stagestatus='驳回不支持语句',
errormessage='仅支持DML和DDL语句,查询语句请使用SQL查询功能!',
sql=sql)
_inception_engine.return_value.execute_check.return_value = ReviewSet(full_sql=sql, rows=[inc_row])
new_engine = MysqlEngine(instance=self.ins1)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
@patch('sql.engines.mysql.InceptionEngine')
def test_execute_check_critical_sql(self, _inception_engine):
self.sys_config.set('inception', 'true')
self.sys_config.set('critical_ddl_regex', '^|update')
self.sys_config.get_all_config()
sql = 'update user set id=1'
inc_row = ReviewResult(id=1,
errlevel=0,
stagestatus='Audit completed',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0, )
row = ReviewResult(id=1, errlevel=2,
stagestatus='驳回高危SQL',
errormessage='禁止提交匹配' + '^|update' + '条件的语句!',
sql=sql)
_inception_engine.return_value.execute_check.return_value = ReviewSet(full_sql=sql, rows=[inc_row])
new_engine = MysqlEngine(instance=self.ins1)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
@patch('sql.engines.mysql.InceptionEngine')
def test_execute_check_normal_sql(self, _inception_engine):
self.sys_config.set('inception', 'true')
sql = 'update user set id=1'
row = ReviewResult(id=1,
errlevel=0,
stagestatus='Audit completed',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0, )
_inception_engine.return_value.execute_check.return_value = ReviewSet(full_sql=sql, rows=[row])
new_engine = MysqlEngine(instance=self.ins1)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
@patch('sql.engines.mysql.InceptionEngine')
def test_execute_check_normal_sql_with_Exception(self, _inception_engine):
sql = 'update user set id=1'
_inception_engine.return_value.execute_check.side_effect = RuntimeError()
new_engine = MysqlEngine(instance=self.ins1)
with self.assertRaises(RuntimeError):
new_engine.execute_check(db_name=0, sql=sql)
@patch.object(MysqlEngine, 'query')
@patch('sql.engines.mysql.InceptionEngine')
def test_execute_workflow(self, _inception_engine, _query):
self.sys_config.set('inception', 'true')
sql = 'update user set id=1'
_inception_engine.return_value.execute.return_value = ReviewSet(full_sql=sql)
_query.return_value.rows = (('0',),)
new_engine = MysqlEngine(instance=self.ins1)
execute_result = new_engine.execute_workflow(self.wf)
self.assertIsInstance(execute_result, ReviewSet)
@patch('MySQLdb.connect.cursor.execute')
@patch('MySQLdb.connect.cursor')
@patch('MySQLdb.connect')
def test_execute(self, _connect, _cursor, _execute):
new_engine = MysqlEngine(instance=self.ins1)
execute_result = new_engine.execute(self.wf)
self.assertIsInstance(execute_result, ResultSet)
@patch('MySQLdb.connect')
def test_server_version(self, _connect):
_connect.return_value.get_server_info.return_value = '5.7.20-16log'
new_engine = MysqlEngine(instance=self.ins1)
server_version = new_engine.server_version
self.assertTupleEqual(server_version, (5, 7, 20))
@patch.object(MysqlEngine, 'query')
def test_get_variables_not_filter(self, _query):
new_engine = MysqlEngine(instance=self.ins1)
new_engine.get_variables()
_query.assert_called_once()
@patch('MySQLdb.connect')
@patch.object(MysqlEngine, 'query')
def test_get_variables_filter(self, _query, _connect):
_connect.return_value.get_server_info.return_value = '5.7.20-16log'
new_engine = MysqlEngine(instance=self.ins1)
new_engine.get_variables(variables=['binlog_format'])
_query.assert_called()
@patch.object(MysqlEngine, 'query')
def test_set_variable(self, _query):
new_engine = MysqlEngine(instance=self.ins1)
new_engine.set_variable('binlog_format', 'ROW')
_query.assert_called_once_with(sql="set global binlog_format=ROW;")
@patch('sql.engines.mysql.GoInceptionEngine')
def test_osc_go_inception(self, _inception_engine):
self.sys_config.set('inception', 'false')
_inception_engine.return_value.osc_control.return_value = ReviewSet()
command = 'get'
sqlsha1 = 'xxxxx'
new_engine = MysqlEngine(instance=self.ins1)
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
@patch('sql.engines.mysql.InceptionEngine')
def test_osc_inception(self, _inception_engine):
self.sys_config.set('inception', 'true')
_inception_engine.return_value.osc_control.return_value = ReviewSet()
command = 'get'
sqlsha1 = 'xxxxx'
new_engine = MysqlEngine(instance=self.ins1)
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
@patch.object(MysqlEngine, 'query')
def test_kill_connection(self, _query):
new_engine = MysqlEngine(instance=self.ins1)
new_engine.kill_connection(100)
_query.assert_called_once_with(sql="kill 100")
@patch.object(MysqlEngine, 'query')
def test_seconds_behind_master(self, _query):
new_engine = MysqlEngine(instance=self.ins1)
new_engine.seconds_behind_master
_query.assert_called_once_with(sql="show slave status", close_conn=False,
cursorclass=MySQLdb.cursors.DictCursor)
class TestRedis(TestCase):
@classmethod
def setUpClass(cls):
cls.ins = Instance(instance_name='some_ins', type='slave', db_type='redis', host='some_host',
port=1366, user='ins_user', password='some_str')
cls.ins.save()
@classmethod
def tearDownClass(cls):
cls.ins.delete()
SqlWorkflow.objects.all().delete()
SqlWorkflowContent.objects.all().delete()
@patch('redis.Redis')
def test_engine_base_info(self, _conn):
new_engine = RedisEngine(instance=self.ins)
self.assertEqual(new_engine.name, 'Redis')
self.assertEqual(new_engine.info, 'Redis engine')
@patch('redis.Redis')
def test_get_connection(self, _conn):
new_engine = RedisEngine(instance=self.ins)
new_engine.get_connection()
_conn.assert_called_once()
@patch('redis.Redis.execute_command', return_value=[1, 2, 3])
def test_query_return_list(self, _execute_command):
new_engine = RedisEngine(instance=self.ins)
query_result = new_engine.query(db_name=0, sql='keys *', limit_num=100)
self.assertIsInstance(query_result, ResultSet)
self.assertTupleEqual(query_result.rows, ([1], [2], [3]))
@patch('redis.Redis.execute_command', return_value='text')
def test_query_return_str(self, _execute_command):
new_engine = RedisEngine(instance=self.ins)
query_result = new_engine.query(db_name=0, sql='keys *', limit_num=100)
self.assertIsInstance(query_result, ResultSet)
self.assertTupleEqual(query_result.rows, (['text'],))
@patch('redis.Redis.execute_command', return_value='text')
def test_query_execute(self, _execute_command):
new_engine = RedisEngine(instance=self.ins)
query_result = new_engine.query(db_name=0, sql='keys *', limit_num=100)
self.assertIsInstance(query_result, ResultSet)
self.assertTupleEqual(query_result.rows, (['text'],))
@patch('redis.Redis.config_get', return_value={"databases": 4})
def test_get_all_databases(self, _config_get):
new_engine = RedisEngine(instance=self.ins)
dbs = new_engine.get_all_databases()
self.assertListEqual(dbs.rows, ['0', '1', '2', '3'])
def test_query_check_safe_cmd(self):
safe_cmd = "keys 1*"
new_engine = RedisEngine(instance=self.ins)
check_result = new_engine.query_check(db_name=0, sql=safe_cmd)
self.assertDictEqual(check_result,
{'msg': '禁止执行该命令!', 'bad_query': True, 'filtered_sql': safe_cmd, 'has_star': False})
def test_query_check_danger_cmd(self):
safe_cmd = "keys *"
new_engine = RedisEngine(instance=self.ins)
check_result = new_engine.query_check(db_name=0, sql=safe_cmd)
self.assertDictEqual(check_result,
{'msg': '禁止执行该命令!', 'bad_query': True, 'filtered_sql': safe_cmd, 'has_star': False})
def test_filter_sql(self):
safe_cmd = "keys 1*"
new_engine = RedisEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=safe_cmd, limit_num=100)
self.assertEqual(check_result, 'keys 1*')
def test_query_masking(self):
query_result = ResultSet()
new_engine = RedisEngine(instance=self.ins)
masking_result = new_engine.query_masking(db_name=0, sql='', resultset=query_result)
self.assertEqual(masking_result, query_result)
def test_execute_check(self):
sql = 'set 1 1'
row = ReviewResult(id=1,
errlevel=0,
stagestatus='Audit completed',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0)
new_engine = RedisEngine(instance=self.ins)
check_result = new_engine.execute_check(db_name=0, sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
@patch('redis.Redis.execute_command', return_value='text')
def test_execute_workflow_success(self, _execute_command):
sql = 'set 1 1'
row = ReviewResult(id=1,
errlevel=0,
stagestatus='Execute Successfully',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0)
wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=wf, sql_content=sql)
new_engine = RedisEngine(instance=self.ins)
execute_result = new_engine.execute_workflow(workflow=wf)
self.assertIsInstance(execute_result, ReviewSet)
self.assertEqual(execute_result.rows[0].__dict__.keys(), row.__dict__.keys())
class TestPgSQL(TestCase):
@classmethod
def setUpClass(cls):
cls.ins = Instance(instance_name='some_ins', type='slave', db_type='pgsql', host='some_host',
port=1366, user='ins_user', password='some_str')
cls.ins.save()
cls.sys_config = SysConfig()
@classmethod
def tearDownClass(cls):
cls.ins.delete()
cls.sys_config.purge()
@patch('psycopg2.connect')
def test_engine_base_info(self, _conn):
new_engine = PgSQLEngine(instance=self.ins)
self.assertEqual(new_engine.name, 'PgSQL')
self.assertEqual(new_engine.info, 'PgSQL engine')
@patch('psycopg2.connect')
def test_get_connection(self, _conn):
new_engine = PgSQLEngine(instance=self.ins)
new_engine.get_connection("some_dbname")
_conn.assert_called_once()
@patch('psycopg2.connect.cursor.execute')
@patch('psycopg2.connect.cursor')
@patch('psycopg2.connect')
def test_query(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchmany.return_value = [(1,)]
new_engine = PgSQLEngine(instance=self.ins)
query_result = new_engine.query(db_name="some_dbname", sql='select 1', limit_num=100, schema_name="some_schema")
self.assertIsInstance(query_result, ResultSet)
self.assertListEqual(query_result.rows, [(1,)])
@patch('psycopg2.connect.cursor.execute')
@patch('psycopg2.connect.cursor')
@patch('psycopg2.connect')
def test_query_not_limit(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchall.return_value = [(1,)]
new_engine = PgSQLEngine(instance=self.ins)
query_result = new_engine.query(db_name="some_dbname", sql='select 1', limit_num=0, schema_name="some_schema")
self.assertIsInstance(query_result, ResultSet)
self.assertListEqual(query_result.rows, [(1,)])
@patch('sql.engines.pgsql.PgSQLEngine.query',
return_value=ResultSet(rows=[('postgres',), ('archery',), ('template1',), ('template0',)]))
def test_get_all_databases(self, query):
new_engine = PgSQLEngine(instance=self.ins)
dbs = new_engine.get_all_databases()
self.assertListEqual(dbs.rows, ['archery'])
@patch('sql.engines.pgsql.PgSQLEngine.query',
return_value=ResultSet(rows=[('information_schema',), ('archery',), ('pg_catalog',)]))
def test_get_all_schemas(self, _query):
new_engine = PgSQLEngine(instance=self.ins)
schemas = new_engine.get_all_schemas(db_name='archery')
self.assertListEqual(schemas.rows, ['archery'])
@patch('sql.engines.pgsql.PgSQLEngine.query', return_value=ResultSet(rows=[('test',), ('test2',)]))
def test_get_all_tables(self, _query):
new_engine = PgSQLEngine(instance=self.ins)
tables = new_engine.get_all_tables(db_name='archery', schema_name='archery')
self.assertListEqual(tables.rows, ['test2'])
@patch('sql.engines.pgsql.PgSQLEngine.query',
return_value=ResultSet(rows=[('id',), ('name',)]))
def test_get_all_columns_by_tb(self, _query):
new_engine = PgSQLEngine(instance=self.ins)
columns = new_engine.get_all_columns_by_tb(db_name='archery', tb_name='test2', schema_name='archery')
self.assertListEqual(columns.rows, ['id', 'name'])
@patch('sql.engines.pgsql.PgSQLEngine.query',
return_value=ResultSet(rows=[('postgres',), ('archery',), ('template1',), ('template0',)]))
def test_describe_table(self, _query):
new_engine = PgSQLEngine(instance=self.ins)
describe = new_engine.describe_table(db_name='archery', schema_name='archery', tb_name='text')
self.assertIsInstance(describe, ResultSet)
def test_query_check_disable_sql(self):
sql = "update xxx set a=1 "
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.query_check(db_name='archery', sql=sql)
self.assertDictEqual(check_result,
{'msg': '不支持的查询语法类型!', 'bad_query': True, 'filtered_sql': sql.strip(), 'has_star': False})
def test_query_check_star_sql(self):
sql = "select * from xx "
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.query_check(db_name='archery', sql=sql)
self.assertDictEqual(check_result,
{'msg': 'SQL语句中含有 * ', 'bad_query': False, 'filtered_sql': sql.strip(), 'has_star': True})
def test_filter_sql_with_delimiter(self):
sql = "select * from xx;"
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=sql, limit_num=100)
self.assertEqual(check_result, "select * from xx limit 100;")
def test_filter_sql_without_delimiter(self):
sql = "select * from xx"
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=sql, limit_num=100)
self.assertEqual(check_result, "select * from xx limit 100;")
def test_filter_sql_with_limit(self):
sql = "select * from xx limit 10"
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=sql, limit_num=1)
self.assertEqual(check_result, "select * from xx limit 10;")
def test_query_masking(self):
query_result = ResultSet()
new_engine = PgSQLEngine(instance=self.ins)
masking_result = new_engine.query_masking(db_name=0, sql='', resultset=query_result)
self.assertEqual(masking_result, query_result)
def test_execute_check_select_sql(self):
sql = 'select * from user;'
row = ReviewResult(id=1, errlevel=2,
stagestatus='驳回不支持语句',
errormessage='仅支持DML和DDL语句,查询语句请使用SQL查询功能!',
sql=sql)
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
def test_execute_check_critical_sql(self):
self.sys_config.set('critical_ddl_regex', '^|update')
self.sys_config.get_all_config()
sql = 'update user set id=1'
row = ReviewResult(id=1, errlevel=2,
stagestatus='驳回高危SQL',
errormessage='禁止提交匹配' + '^|update' + '条件的语句!',
sql=sql)
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
def test_execute_check_normal_sql(self):
self.sys_config.purge()
sql = 'alter table tb set id=1'
row = ReviewResult(id=1,
errlevel=0,
stagestatus='Audit completed',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0, )
new_engine = PgSQLEngine(instance=self.ins)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
@patch('psycopg2.connect.cursor.execute')
@patch('psycopg2.connect.cursor')
@patch('psycopg2.connect')
def test_execute_workflow_success(self, _conn, _cursor, _execute):
sql = 'update user set id=1'
row = ReviewResult(id=1,
errlevel=0,
stagestatus='Execute Successfully',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0)
wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=wf, sql_content=sql)
new_engine = PgSQLEngine(instance=self.ins)
execute_result = new_engine.execute_workflow(workflow=wf)
self.assertIsInstance(execute_result, ReviewSet)
self.assertEqual(execute_result.rows[0].__dict__.keys(), row.__dict__.keys())
@patch('psycopg2.connect.cursor.execute')
@patch('psycopg2.connect.cursor')
@patch('psycopg2.connect', return_value=RuntimeError)
def test_execute_workflow_exception(self, _conn, _cursor, _execute):
sql = 'update user set id=1'
row = ReviewResult(id=1,
errlevel=2,
stagestatus='Execute Failed',
errormessage=f'异常信息:{f"Oracle命令执行报错,语句:{sql}"}',
sql=sql,
affected_rows=0,
execute_time=0, )
wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=wf, sql_content=sql)
with self.assertRaises(AttributeError):
new_engine = PgSQLEngine(instance=self.ins)
execute_result = new_engine.execute_workflow(workflow=wf)
self.assertIsInstance(execute_result, ReviewSet)
self.assertEqual(execute_result.rows[0].__dict__.keys(), row.__dict__.keys())
class TestModel(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_result_set_rows_shadow(self):
result_set1 = ResultSet()
for i in range(10):
result_set1.rows += [i]
brand_new_result_set = ResultSet()
self.assertEqual(brand_new_result_set.rows, [])
review_set1 = ReviewSet()
for i in range(10):
review_set1.rows += [i]
brand_new_review_set = ReviewSet()
self.assertEqual(brand_new_review_set.rows, [])
class TestInception(TestCase):
def setUp(self):
self.ins = Instance.objects.create(instance_name='some_ins', type='slave', db_type='mysql', host='some_host',
port=3306, user='ins_user', password='some_str')
self.ins_inc = Instance.objects.create(instance_name='some_ins_inc', type='slave', db_type='inception',
host='some_host', port=6669)
self.wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=self.wf)
def tearDown(self):
self.ins.delete()
self.ins_inc.delete()
SqlWorkflow.objects.all().delete()
SqlWorkflowContent.objects.all().delete()
@patch('MySQLdb.connect')
def test_get_connection(self, _connect):
new_engine = InceptionEngine()
new_engine.get_connection()
_connect.assert_called_once()
@patch('MySQLdb.connect')
def test_get_backup_connection(self, _connect):
new_engine = InceptionEngine()
new_engine.get_backup_connection()
_connect.assert_called_once()
@patch('sql.engines.inception.InceptionEngine.query')
def test_execute_check_normal_sql(self, _query):
sql = 'update user set id=100'
row = [1, 'CHECKED', 0, 'Audit completed', 'None', 'use archery', 0, "'0_0_0'", 'None', '0', '']
_query.return_value = ResultSet(full_sql=sql, rows=[row])
new_engine = InceptionEngine()
check_result = new_engine.execute_check(instance=self.ins, db_name=0, sql=sql)
self.assertIsInstance(check_result, ReviewSet)
@patch('sql.engines.inception.InceptionEngine.query')
def test_execute_exception(self, _query):
sql = 'update user set id=100'
row = [1, 'CHECKED', 1, 'Execute failed', 'None', 'use archery', 0, "'0_0_0'", 'None', '0', '']
column_list = ['ID', 'stage', 'errlevel', 'stagestatus', 'errormessage', 'SQL', 'Affected_rows', 'sequence',
'backup_dbname', 'execute_time', 'sqlsha1']
_query.return_value = ResultSet(full_sql=sql, rows=[row], column_list=column_list)
new_engine = InceptionEngine()
execute_result = new_engine.execute(workflow=self.wf)
self.assertIsInstance(execute_result, ReviewSet)
@patch('sql.engines.inception.InceptionEngine.query')
def test_execute_finish(self, _query):
sql = 'update user set id=100'
row = [1, 'CHECKED', 0, 'Execute Successfully', 'None', 'use archery', 0, "'0_0_0'", 'None', '0', '']
column_list = ['ID', 'stage', 'errlevel', 'stagestatus', 'errormessage', 'SQL', 'Affected_rows', 'sequence',
'backup_dbname', 'execute_time', 'sqlsha1']
_query.return_value = ResultSet(full_sql=sql, rows=[row], column_list=column_list)
new_engine = InceptionEngine()
execute_result = new_engine.execute(workflow=self.wf)
self.assertIsInstance(execute_result, ReviewSet)
@patch('MySQLdb.connect.cursor.execute')
@patch('MySQLdb.connect.cursor')
@patch('MySQLdb.connect')
def test_query(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchall.return_value = [(1,)]
new_engine = InceptionEngine()
query_result = new_engine.query(db_name=0, sql='select 1', limit_num=100)
self.assertIsInstance(query_result, ResultSet)
@patch('MySQLdb.connect.cursor.execute')
@patch('MySQLdb.connect.cursor')
@patch('MySQLdb.connect')
def test_query_not_limit(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchall.return_value = [(1,)]
new_engine = InceptionEngine(instance=self.ins)
query_result = new_engine.query(db_name=0, sql='select 1', limit_num=0)
self.assertIsInstance(query_result, ResultSet)
@patch('sql.engines.inception.InceptionEngine.query')
def test_query_print(self, _query):
sql = 'update user set id=100'
row = [1,
'select * from sql_instance limit 100',
0,
'{"command":"select","select_list":[{"type":"FIELD_ITEM","field":"*"}],"table_ref":[{"db":"archery","table":"sql_instance"}],"limit":{"limit":[{"type":"INT_ITEM","value":"100"}]}}',
'None']
column_list = ['ID', 'statement', 'errlevel', 'query_tree', 'errmsg']
_query.return_value = ResultSet(full_sql=sql, rows=[row], column_list=column_list)
new_engine = InceptionEngine()
print_result = new_engine.query_print(self.ins, db_name=None, sql=sql)
self.assertDictEqual(print_result, json.loads(_repair_json_str(row[3])))
@patch('MySQLdb.connect')
def test_get_rollback_list(self, _connect):
self.wf.sqlworkflowcontent.execute_result = """[{
"id": 1,
"stage": "RERUN",
"errlevel": 0,
"stagestatus": "Execute Successfully",
"errormessage": "None",
"sql": "use archer_test",
"affected_rows": 0,
"sequence": "'1554135032_13038_0'",
"backup_dbname": "None",
"execute_time": "0.000",
"sqlsha1": "",
"actual_affected_rows": 0
}, {
"id": 2,
"stage": "EXECUTED",
"errlevel": 0,
"stagestatus": "Execute Successfully Backup successfully",
"errormessage": "None",
"sql": "insert into tt1 (user_name)values('A'),('B'),('C')",
"affected_rows": 3,
"sequence": "'1554135032_13038_1'",
"backup_dbname": "mysql_3306_archer_test",
"execute_time": "0.000",
"sqlsha1": "",
"actual_affected_rows": 3
}]"""
self.wf.sqlworkflowcontent.save()
new_engine = InceptionEngine()
new_engine.get_rollback(self.wf)
@patch('sql.engines.inception.InceptionEngine.query')
def test_osc_get(self, _query):
new_engine = InceptionEngine()
command = 'get'
sqlsha1 = 'xxxxx'
sql = f"inception get osc_percent '{sqlsha1}';"
_query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[])
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.inception.InceptionEngine.query')
def test_osc_kill(self, _query):
new_engine = InceptionEngine()
command = 'kill'
sqlsha1 = 'xxxxx'
sql = f"inception stop alter '{sqlsha1}';"
_query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[])
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.inception.InceptionEngine.query')
def test_osc_not_support(self, _query):
new_engine = InceptionEngine()
command = 'stop'
sqlsha1 = 'xxxxx'
sql = f"inception stop alter '{sqlsha1}';"
_query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[])
with self.assertRaisesMessage(ValueError, 'pt-osc不支持暂停和恢复,需要停止执行请使用终止按钮!'):
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
@patch('sql.engines.inception.InceptionEngine.query')
def test_get_variables(self, _query):
new_engine = InceptionEngine(instance=self.ins_inc)
new_engine.get_variables()
sql = f"inception get variables;"
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.inception.InceptionEngine.query')
def test_get_variables_filter(self, _query):
new_engine = InceptionEngine(instance=self.ins_inc)
new_engine.get_variables(variables=['inception_osc_on'])
sql = f"inception get variables 'inception_osc_on';"
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.inception.InceptionEngine.query')
def test_set_variable(self, _query):
new_engine = InceptionEngine(instance=self.ins)
new_engine.set_variable('inception_osc_on', 'on')
_query.assert_called_once_with(sql="inception set inception_osc_on=on;")
class TestGoInception(TestCase):
def setUp(self):
self.ins = Instance.objects.create(instance_name='some_ins', type='slave', db_type='mysql',
host='some_host',
port=3306, user='ins_user', password='some_str')
self.ins_inc = Instance.objects.create(instance_name='some_ins_inc', type='slave', db_type='goinception',
host='some_host', port=4000)
self.wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=self.wf)
def tearDown(self):
self.ins.delete()
self.ins_inc.delete()
SqlWorkflow.objects.all().delete()
SqlWorkflowContent.objects.all().delete()
@patch('MySQLdb.connect')
def test_get_connection(self, _connect):
new_engine = GoInceptionEngine()
new_engine.get_connection()
_connect.assert_called_once()
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_execute_check_normal_sql(self, _query):
sql = 'update user set id=100'
row = [1, 'CHECKED', 0, 'Audit completed', 'None', 'use archery', 0, "'0_0_0'", 'None', '0', '', '']
_query.return_value = ResultSet(full_sql=sql, rows=[row])
new_engine = GoInceptionEngine()
check_result = new_engine.execute_check(instance=self.ins, db_name=0, sql=sql)
self.assertIsInstance(check_result, ReviewSet)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_execute_exception(self, _query):
sql = 'update user set id=100'
row = [1, 'CHECKED', 1, 'Execute failed', 'None', 'use archery', 0, "'0_0_0'", 'None', '0', '', '']
column_list = ['order_id', 'stage', 'error_level', 'stage_status', 'error_message', 'sql',
'affected_rows', 'sequence', 'backup_dbname', 'execute_time', 'sqlsha1', 'backup_time']
_query.return_value = ResultSet(full_sql=sql, rows=[row], column_list=column_list)
new_engine = GoInceptionEngine()
execute_result = new_engine.execute(workflow=self.wf)
self.assertIsInstance(execute_result, ReviewSet)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_execute_finish(self, _query):
sql = 'update user set id=100'
row = [1, 'CHECKED', 0, 'Execute Successfully', 'None', 'use archery', 0, "'0_0_0'", 'None', '0', '', '']
column_list = ['order_id', 'stage', 'error_level', 'stage_status', 'error_message', 'sql',
'affected_rows', 'sequence', 'backup_dbname', 'execute_time', 'sqlsha1', 'backup_time']
_query.return_value = ResultSet(full_sql=sql, rows=[row], column_list=column_list)
new_engine = GoInceptionEngine()
execute_result = new_engine.execute(workflow=self.wf)
self.assertIsInstance(execute_result, ReviewSet)
@patch('MySQLdb.connect.cursor.execute')
@patch('MySQLdb.connect.cursor')
@patch('MySQLdb.connect')
def test_query(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchall.return_value = [(1,)]
new_engine = GoInceptionEngine()
query_result = new_engine.query(db_name=0, sql='select 1', limit_num=100)
self.assertIsInstance(query_result, ResultSet)
@patch('MySQLdb.connect.cursor.execute')
@patch('MySQLdb.connect.cursor')
@patch('MySQLdb.connect')
def test_query_not_limit(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchall.return_value = [(1,)]
new_engine = GoInceptionEngine(instance=self.ins)
query_result = new_engine.query(db_name=0, sql='select 1', limit_num=0)
self.assertIsInstance(query_result, ResultSet)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_osc_get(self, _query):
new_engine = GoInceptionEngine()
command = 'get'
sqlsha1 = 'xxxxx'
sql = f"inception get osc_percent '{sqlsha1}';"
_query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[])
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_osc_pause(self, _query):
new_engine = GoInceptionEngine()
command = 'pause'
sqlsha1 = 'xxxxx'
sql = f"inception {command} osc '{sqlsha1}';"
_query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[])
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_osc_resume(self, _query):
new_engine = GoInceptionEngine()
command = 'resume'
sqlsha1 = 'xxxxx'
sql = f"inception {command} osc '{sqlsha1}';"
_query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[])
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_osc_kill(self, _query):
new_engine = GoInceptionEngine()
command = 'kill'
sqlsha1 = 'xxxxx'
sql = f"inception kill osc '{sqlsha1}';"
_query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[])
new_engine.osc_control(sqlsha1=sqlsha1, command=command)
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_get_variables(self, _query):
new_engine = GoInceptionEngine(instance=self.ins_inc)
new_engine.get_variables()
sql = f"inception get variables;"
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_get_variables_filter(self, _query):
new_engine = GoInceptionEngine(instance=self.ins_inc)
new_engine.get_variables(variables=['inception_osc_on'])
sql = f"inception get variables like 'inception_osc_on';"
_query.assert_called_once_with(sql=sql)
@patch('sql.engines.goinception.GoInceptionEngine.query')
def test_set_variable(self, _query):
new_engine = GoInceptionEngine(instance=self.ins)
new_engine.set_variable('inception_osc_on', 'on')
_query.assert_called_once_with(sql="inception set inception_osc_on=on;")
class TestOracle(TestCase):
def setUp(self):
self.ins = Instance.objects.create(instance_name='some_ins', type='slave', db_type='oracle',
host='some_host', port=3306, user='ins_user', password='some_str',
sid='some_id')
self.wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=self.wf)
self.sys_config = SysConfig()
def tearDown(self):
self.ins.delete()
self.sys_config.purge()
SqlWorkflow.objects.all().delete()
SqlWorkflowContent.objects.all().delete()
@patch('cx_Oracle.makedsn')
@patch('cx_Oracle.connect')
def test_get_connection(self, _connect, _makedsn):
new_engine = OracleEngine(self.ins)
new_engine.get_connection()
_connect.assert_called_once()
_makedsn.assert_called_once()
_connect.reset_mock()
_makedsn.reset_mock()
self.ins.service_name = 'some_service'
self.ins.sid = ''
self.ins.save()
new_engine = OracleEngine(self.ins)
new_engine.get_connection()
_connect.assert_called_once()
_makedsn.assert_called_once()
_connect.reset_mock()
_makedsn.reset_mock()
self.ins.service_name = ''
self.ins.sid = ''
self.ins.save()
new_engine = OracleEngine(self.ins)
with self.assertRaises(ValueError):
new_engine.get_connection()
@patch('cx_Oracle.connect')
def test_engine_base_info(self, _conn):
new_engine = OracleEngine(instance=self.ins)
self.assertEqual(new_engine.name, 'Oracle')
self.assertEqual(new_engine.info, 'Oracle engine')
_conn.return_value.version = '12.1.0.2.0'
self.assertTupleEqual(new_engine.server_version, ('12', '1', '0'))
@patch('cx_Oracle.connect.cursor.execute')
@patch('cx_Oracle.connect.cursor')
@patch('cx_Oracle.connect')
def test_query(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchmany.return_value = [(1,)]
new_engine = OracleEngine(instance=self.ins)
query_result = new_engine.query(db_name='archery', sql='select 1', limit_num=100)
self.assertIsInstance(query_result, ResultSet)
self.assertListEqual(query_result.rows, [(1,)])
@patch('cx_Oracle.connect.cursor.execute')
@patch('cx_Oracle.connect.cursor')
@patch('cx_Oracle.connect')
def test_query_not_limit(self, _conn, _cursor, _execute):
_conn.return_value.cursor.return_value.fetchall.return_value = [(1,)]
new_engine = OracleEngine(instance=self.ins)
query_result = new_engine.query(db_name=0, sql='select 1', limit_num=0)
self.assertIsInstance(query_result, ResultSet)
self.assertListEqual(query_result.rows, [(1,)])
@patch('sql.engines.oracle.OracleEngine.query',
return_value=ResultSet(rows=[('AUD_SYS',), ('archery',), ('ANONYMOUS',)]))
def test_get_all_databases(self, _query):
new_engine = OracleEngine(instance=self.ins)
dbs = new_engine.get_all_databases()
self.assertListEqual(dbs.rows, ['archery'])
@patch('sql.engines.oracle.OracleEngine.query',
return_value=ResultSet(rows=[('AUD_SYS',), ('archery',), ('ANONYMOUS',)]))
def test__get_all_databases(self, _query):
new_engine = OracleEngine(instance=self.ins)
dbs = new_engine._get_all_databases()
self.assertListEqual(dbs.rows, ['AUD_SYS', 'archery', 'ANONYMOUS'])
@patch('sql.engines.oracle.OracleEngine.query',
return_value=ResultSet(rows=[('archery',)]))
def test__get_all_instances(self, _query):
new_engine = OracleEngine(instance=self.ins)
dbs = new_engine._get_all_instances()
self.assertListEqual(dbs.rows, ['archery'])
@patch('sql.engines.oracle.OracleEngine.query',
return_value=ResultSet(rows=[('ANONYMOUS',), ('archery',), ('SYSTEM',)]))
def test_get_all_schemas(self, _query):
new_engine = OracleEngine(instance=self.ins)
schemas = new_engine._get_all_schemas()
self.assertListEqual(schemas.rows, ['archery'])
@patch('sql.engines.oracle.OracleEngine.query', return_value=ResultSet(rows=[('test',), ('test2',)]))
def test_get_all_tables(self, _query):
new_engine = OracleEngine(instance=self.ins)
tables = new_engine.get_all_tables(db_name='archery')
self.assertListEqual(tables.rows, ['test2'])
@patch('sql.engines.oracle.OracleEngine.query',
return_value=ResultSet(rows=[('id',), ('name',)]))
def test_get_all_columns_by_tb(self, _query):
new_engine = OracleEngine(instance=self.ins)
columns = new_engine.get_all_columns_by_tb(db_name='archery', tb_name='test2')
self.assertListEqual(columns.rows, ['id', 'name'])
@patch('sql.engines.oracle.OracleEngine.query',
return_value=ResultSet(rows=[('archery',), ('template1',), ('template0',)]))
def test_describe_table(self, _query):
new_engine = OracleEngine(instance=self.ins)
describe = new_engine.describe_table(db_name='archery', tb_name='text')
self.assertIsInstance(describe, ResultSet)
def test_query_check_disable_sql(self):
sql = "update xxx set a=1;"
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.query_check(db_name='archery', sql=sql)
self.assertDictEqual(check_result,
{'msg': '不支持语法!', 'bad_query': True, 'filtered_sql': sql.strip(';'),
'has_star': False})
@patch('sql.engines.oracle.OracleEngine.explain_check', return_value={'msg': '', 'rows': 0})
def test_query_check_star_sql(self, _explain_check):
sql = "select * from xx;"
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.query_check(db_name='archery', sql=sql)
self.assertDictEqual(check_result,
{'msg': '禁止使用 * 关键词\n', 'bad_query': False, 'filtered_sql': sql.strip(';'),
'has_star': True})
def test_query_check_IndexError(self):
sql = ""
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.query_check(db_name='archery', sql=sql)
self.assertDictEqual(check_result,
{'msg': '没有有效的SQL语句', 'bad_query': True, 'filtered_sql': sql.strip(), 'has_star': False})
@patch('sql.engines.oracle.OracleEngine.explain_check', return_value={'msg': '', 'rows': 0})
def test_query_check_plus(self, _explain_check):
sql = "select 100+1 from tb;"
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.query_check(db_name='archery', sql=sql)
self.assertDictEqual(check_result,
{'msg': '禁止使用 + 关键词\n', 'bad_query': True, 'filtered_sql': sql.strip(';'),
'has_star': False})
def test_filter_sql_with_delimiter(self):
sql = "select * from xx;"
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=sql, limit_num=100)
self.assertEqual(check_result, "select sql_audit.* from (select * from xx) sql_audit where rownum <= 100")
def test_filter_sql_with_delimiter_and_where(self):
sql = "select * from xx where id>1;"
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=sql, limit_num=100)
self.assertEqual(check_result,
"select sql_audit.* from (select * from xx where id>1) sql_audit where rownum <= 100")
def test_filter_sql_without_delimiter(self):
sql = "select * from xx;"
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=sql, limit_num=100)
self.assertEqual(check_result, "select sql_audit.* from (select * from xx) sql_audit where rownum <= 100")
def test_filter_sql_with_limit(self):
sql = "select * from xx limit 10;"
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.filter_sql(sql=sql, limit_num=1)
self.assertEqual(check_result,
"select sql_audit.* from (select * from xx limit 10) sql_audit where rownum <= 1")
def test_query_masking(self):
query_result = ResultSet()
new_engine = OracleEngine(instance=self.ins)
masking_result = new_engine.query_masking(schema_name='', sql='select 1', resultset=query_result)
self.assertEqual(masking_result, query_result)
def test_execute_check_select_sql(self):
sql = 'select * from user;'
row = ReviewResult(id=1, errlevel=2,
stagestatus='驳回不支持语句',
errormessage='仅支持DML和DDL语句,查询语句请使用SQL查询功能!',
sql=sqlparse.format(sql, strip_comments=True, reindent=True, keyword_case='lower'))
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
def test_execute_check_critical_sql(self):
self.sys_config.set('critical_ddl_regex', '^|update')
self.sys_config.get_all_config()
sql = 'update user set id=1'
row = ReviewResult(id=1, errlevel=2,
stagestatus='驳回高危SQL',
errormessage='禁止提交匹配' + '^|update' + '条件的语句!',
sql=sqlparse.format(sql, strip_comments=True, reindent=True, keyword_case='lower'))
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
@patch('sql.engines.oracle.OracleEngine.explain_check', return_value={'msg': '', 'rows': 0})
@patch('sql.engines.oracle.OracleEngine.get_sql_first_object_name', return_value='tb')
@patch('sql.engines.oracle.OracleEngine.object_name_check', return_value=True)
def test_execute_check_normal_sql(self, _explain_check, _get_sql_first_object_name, _object_name_check):
self.sys_config.purge()
sql = 'alter table tb set id=1'
row = ReviewResult(id=1,
errlevel=1,
stagestatus='当前平台,此语法不支持审核!',
errormessage='当前平台,此语法不支持审核!',
sql=sqlparse.format(sql, strip_comments=True, reindent=True, keyword_case='lower'),
affected_rows=0,
execute_time=0,
stmt_type='SQL',
object_owner='',
object_type='',
object_name='',
)
new_engine = OracleEngine(instance=self.ins)
check_result = new_engine.execute_check(db_name='archery', sql=sql)
self.assertIsInstance(check_result, ReviewSet)
self.assertEqual(check_result.rows[0].__dict__, row.__dict__)
@patch('cx_Oracle.connect.cursor.execute')
@patch('cx_Oracle.connect.cursor')
@patch('cx_Oracle.connect')
def test_execute_workflow_success(self, _conn, _cursor, _execute):
sql = 'update user set id=1'
review_row = ReviewResult(id=1,
errlevel=0,
stagestatus='Execute Successfully',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0,
stmt_type='SQL',
object_owner='',
object_type='',
object_name='', )
execute_row = ReviewResult(id=1,
errlevel=0,
stagestatus='Execute Successfully',
errormessage='None',
sql=sql,
affected_rows=0,
execute_time=0)
wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=wf, sql_content=sql,
review_content=ReviewSet(rows=[review_row]).json())
new_engine = OracleEngine(instance=self.ins)
execute_result = new_engine.execute_workflow(workflow=wf)
self.assertIsInstance(execute_result, ReviewSet)
self.assertEqual(execute_result.rows[0].__dict__.keys(), execute_row.__dict__.keys())
@patch('cx_Oracle.connect.cursor.execute')
@patch('cx_Oracle.connect.cursor')
@patch('cx_Oracle.connect', return_value=RuntimeError)
def test_execute_workflow_exception(self, _conn, _cursor, _execute):
sql = 'update user set id=1'
row = ReviewResult(id=1,
errlevel=2,
stagestatus='Execute Failed',
errormessage=f'异常信息:{f"Oracle命令执行报错,语句:{sql}"}',
sql=sql,
affected_rows=0,
execute_time=0,
stmt_type='SQL',
object_owner='',
object_type='',
object_name='',
)
wf = SqlWorkflow.objects.create(
workflow_name='some_name',
group_id=1,
group_name='g1',
engineer_display='',
audit_auth_groups='some_group',
create_time=datetime.now() - timedelta(days=1),
status='workflow_finish',
is_backup=True,
instance=self.ins,
db_name='some_db',
syntax_type=1
)
SqlWorkflowContent.objects.create(workflow=wf, sql_content=sql, review_content=ReviewSet(rows=[row]).json())
with self.assertRaises(AttributeError):
new_engine = OracleEngine(instance=self.ins)
execute_result = new_engine.execute_workflow(workflow=wf)
self.assertIsInstance(execute_result, ReviewSet)
self.assertEqual(execute_result.rows[0].__dict__.keys(), row.__dict__.keys())
class MongoTest(TestCase):
def setUp(self) -> None:
self.ins = Instance.objects.create(instance_name='some_ins', type='slave', db_type='mongo',
host='some_host', port=3306, user='ins_user')
self.engine = MongoEngine(instance=self.ins)
def tearDown(self) -> None:
self.ins.delete()
@patch('sql.engines.mongo.pymongo')
def test_get_connection(self, mock_pymongo):
_ = self.engine.get_connection()
mock_pymongo.MongoClient.assert_called_once()
@patch('sql.engines.mongo.MongoEngine.get_connection')
def test_query(self, mock_get_connection):
test_sql = """{"collection": "job","count": true}"""
self.assertIsInstance(self.engine.query('archery', test_sql), ResultSet)
def test_query_check(self):
test_sql = """{"collection": "job","count": true}"""
check_result = self.engine.query_check(sql=test_sql)
self.assertEqual(False, check_result.get('bad_query'))
@patch('sql.engines.mongo.MongoEngine.get_connection')
def test_get_all_databases(self, mock_get_connection):
db_list = self.engine.get_all_databases()
self.assertIsInstance(db_list, ResultSet)
@patch('sql.engines.mongo.MongoEngine.get_connection')
def test_get_all_tables(self, mock_get_connection):
mock_db = Mock()
mock_db.list_collection_names.return_value = ['u', 'v', 'w']
mock_get_connection.return_value = {'some_db': mock_db}
table_list = self.engine.get_all_tables('some_db')
mock_db.list_collection_names.assert_called_once()
self.assertEqual(table_list.rows, ['u', 'v', 'w'])
| true | true |
f72526b6ad715d5466f1db2a8ab71d035fe309e3 | 567 | py | Python | Python/Xiaoxiang_Python/lec04/BMR_1.0.py | kevindeng123/Programming | a06e9f7773fc083bcb153af21e6e9942a4114b4a | [
"MIT"
] | null | null | null | Python/Xiaoxiang_Python/lec04/BMR_1.0.py | kevindeng123/Programming | a06e9f7773fc083bcb153af21e6e9942a4114b4a | [
"MIT"
] | null | null | null | Python/Xiaoxiang_Python/lec04/BMR_1.0.py | kevindeng123/Programming | a06e9f7773fc083bcb153af21e6e9942a4114b4a | [
"MIT"
] | null | null | null | """
作者:邓经纬
功能:BMR 计算器
版本:1.0
日期:26/10/2018
"""
def main():
"""
主函数
"""
# 性别
gender = '男'
# 体重
weight = 70
# 身高(cm)
height = 175
# 年龄
age = 25
if gender == '男':
# 男性
bmr = (13.7 * weight) + (5.0 * height) - (6.8 * age) + 66
elif gender == '女':
# 女性
bmr = (9.6 * weight) + (1.8 * height) - (4.7 * age) +655
else:
bmr = -1
if bmr != -1:
print('基础代谢率(大卡)', bmr)
else:
print('暂时不支持该性别。')
if __name__ == '__main__':
main() | 14.538462 | 65 | 0.391534 |
def main():
gender = '男'
weight = 70
height = 175
age = 25
if gender == '男':
bmr = (13.7 * weight) + (5.0 * height) - (6.8 * age) + 66
elif gender == '女':
bmr = (9.6 * weight) + (1.8 * height) - (4.7 * age) +655
else:
bmr = -1
if bmr != -1:
print('基础代谢率(大卡)', bmr)
else:
print('暂时不支持该性别。')
if __name__ == '__main__':
main() | true | true |
f725288fe158ce24e1007dc29528d22db5be4be5 | 1,248 | py | Python | tests/config/test_bucket_data_path_config.py | ranbb/justmltools | 06fa74d04eb915afbe32617eb24bcb70396289d7 | [
"MIT"
] | null | null | null | tests/config/test_bucket_data_path_config.py | ranbb/justmltools | 06fa74d04eb915afbe32617eb24bcb70396289d7 | [
"MIT"
] | 3 | 2021-09-17T06:42:03.000Z | 2021-09-17T06:42:04.000Z | tests/config/test_bucket_data_path_config.py | ranbb/justmltools | 06fa74d04eb915afbe32617eb24bcb70396289d7 | [
"MIT"
] | 1 | 2020-06-24T15:17:59.000Z | 2020-06-24T15:17:59.000Z | from unittest import TestCase
from justmltools.config.bucket_data_path_config import BucketDataPathConfig
PREFIX = "my_bucket_key_prefix"
class TestBucketDataPathConfig(TestCase):
def setUp(self) -> None:
self.sut: BucketDataPathConfig = BucketDataPathConfig(prefix=PREFIX)
def test_get_prefix(self):
self.assertEqual(PREFIX, self.sut.get_prefix())
def test_get_input_config_path(self):
self.assertEqual(f"{PREFIX}/input/config", self.sut.get_input_config_path())
def test_get_input_data_path(self):
self.assertEqual(f"{PREFIX}/input/data", self.sut.get_input_data_path())
def test_get_model_path(self):
self.assertEqual(f"{PREFIX}/model", self.sut.get_model_path())
def test_get_output_path(self):
self.assertEqual(f"{PREFIX}/output", self.sut.get_output_path())
def test_join_one_more_level(self):
joined_path: str = self.sut.join(self.sut.get_output_path(), "my_file")
self.assertEqual(f"{PREFIX}/output/my_file", joined_path)
def test_join_two_more_levels(self):
joined_path: str = self.sut.join(self.sut.get_output_path(), ["my_sub_dir", "my_file"])
self.assertEqual(f"{PREFIX}/output/my_sub_dir/my_file", joined_path)
| 35.657143 | 95 | 0.729167 | from unittest import TestCase
from justmltools.config.bucket_data_path_config import BucketDataPathConfig
PREFIX = "my_bucket_key_prefix"
class TestBucketDataPathConfig(TestCase):
def setUp(self) -> None:
self.sut: BucketDataPathConfig = BucketDataPathConfig(prefix=PREFIX)
def test_get_prefix(self):
self.assertEqual(PREFIX, self.sut.get_prefix())
def test_get_input_config_path(self):
self.assertEqual(f"{PREFIX}/input/config", self.sut.get_input_config_path())
def test_get_input_data_path(self):
self.assertEqual(f"{PREFIX}/input/data", self.sut.get_input_data_path())
def test_get_model_path(self):
self.assertEqual(f"{PREFIX}/model", self.sut.get_model_path())
def test_get_output_path(self):
self.assertEqual(f"{PREFIX}/output", self.sut.get_output_path())
def test_join_one_more_level(self):
joined_path: str = self.sut.join(self.sut.get_output_path(), "my_file")
self.assertEqual(f"{PREFIX}/output/my_file", joined_path)
def test_join_two_more_levels(self):
joined_path: str = self.sut.join(self.sut.get_output_path(), ["my_sub_dir", "my_file"])
self.assertEqual(f"{PREFIX}/output/my_sub_dir/my_file", joined_path)
| true | true |
f72528dd8a067e6d5e70862cf8082bf8332d66e0 | 572 | py | Python | revelation/app/hook/hooks/movieform.py | uyamazak/oceanus | 6158cdc313a381f8228562605d33713ad4e776f1 | [
"MIT"
] | 8 | 2017-02-10T07:24:43.000Z | 2019-06-03T07:45:29.000Z | revelation/app/hook/hooks/movieform.py | uyamazak/oceanus | 6158cdc313a381f8228562605d33713ad4e776f1 | [
"MIT"
] | 3 | 2017-02-20T10:24:20.000Z | 2017-08-15T04:54:36.000Z | revelation/app/hook/hooks/movieform.py | uyamazak/oceanus | 6158cdc313a381f8228562605d33713ad4e776f1 | [
"MIT"
] | null | null | null | from hook.base import BaseHook
from task.gspread.tasks import send2ws
class MovieformHook(BaseHook):
def main(self) -> int:
channel = self.item.get("channel")
if channel != "movieform":
return 0
data = self.item.get("data")
dt = self.item.get("dt")
count = 1
values = (dt,
data.get("cname"),
data.get("uid"),
data.get("url"),
)
send2ws.delay(data=values,
title_prefix="movie_")
return count
| 22 | 44 | 0.48951 | from hook.base import BaseHook
from task.gspread.tasks import send2ws
class MovieformHook(BaseHook):
def main(self) -> int:
channel = self.item.get("channel")
if channel != "movieform":
return 0
data = self.item.get("data")
dt = self.item.get("dt")
count = 1
values = (dt,
data.get("cname"),
data.get("uid"),
data.get("url"),
)
send2ws.delay(data=values,
title_prefix="movie_")
return count
| true | true |
f72528ea404b5476c1151c85d0a3295f31a0e405 | 6,337 | py | Python | main.py | kb1p/json-inspector | 388596bf2d2eb014ab070e9fbc8a5e5d90eb00df | [
"MIT"
] | null | null | null | main.py | kb1p/json-inspector | 388596bf2d2eb014ab070e9fbc8a5e5d90eb00df | [
"MIT"
] | null | null | null | main.py | kb1p/json-inspector | 388596bf2d2eb014ab070e9fbc8a5e5d90eb00df | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Tue Jan 28 19:58:07 2020
@author: kb1p
"""
import sys
import PyQt5.QtCore as Core
import PyQt5.QtWidgets as Gui
import PyQt5.QtGui as GuiMisc
import data_models
import dialogs
import json
class MainWindow(Gui.QMainWindow):
__slots__ = "tvStructure", "tblProps", "mdlStructure", "mdlProps", "currentFile", "config", \
"splitter", "editorDlg"
def __init__(self, p = None):
Gui.QMainWindow.__init__(self, parent = p)
# Work area
self.tvStructure = Gui.QTreeView(self)
self.tvStructure.setHeaderHidden(True)
self.tvStructure.setSelectionMode(Gui.QAbstractItemView.SingleSelection)
self.tblProps = Gui.QTableView(self)
self.splitter = Gui.QSplitter(self)
self.splitter.addWidget(self.tvStructure)
self.splitter.addWidget(self.tblProps)
self.setCentralWidget(self.splitter)
# Menu
mnuBar = Gui.QMenuBar(self)
mnuFile = mnuBar.addMenu("File")
mnuFile.addAction("Open", self.openScene, GuiMisc.QKeySequence("Ctrl+O"))
mnuFile.addAction("Save as...", self.saveSceneAs, GuiMisc.QKeySequence("Ctrl+S"))
mnuFile.addSeparator()
mnuFile.addAction("Exit", self.close)
mnuElem = mnuBar.addMenu("Element")
# mnuElem.addAction("Add sub-element", self.addElement, GuiMisc.QKeySequence("Ctrl+A"))
mnuElem.addAction("Edit JSON code", self.editElement, GuiMisc.QKeySequence("Ctrl+E"))
mnuElem.addAction("Remove", self.removeElement, GuiMisc.QKeySequence("Ctrl+R"))
self.setMenuBar(mnuBar)
self.mdlStructure = data_models.JSONTreeModel(self)
self.tvStructure.setModel(self.mdlStructure)
self.mdlProps = data_models.JSONPropertiesModel(self)
self.tblProps.setModel(self.mdlProps)
self.tvStructure.selectionModel().currentChanged.connect(self.showElement)
self.setCurrentFile(None)
self.statusBar().showMessage("No selection")
self.resize(500, 450)
self.config = Core.QSettings("kb1p", "json-inspector")
k = self.config.value("main/geometry")
if k != None:
self.restoreGeometry(k)
k = self.config.value("main/state")
if k != None:
self.restoreState(k)
k = self.config.value("splitter/state")
if k != None:
self.splitter.restoreState(k)
self.editorDlg = dialogs.EditorDialog(self, self.config)
def showElement(self, index, prevIndex):
self.mdlProps.displayElement(index)
assert self.mdlProps.selection != None
self.statusBar().showMessage(self.mdlProps.selection.fullPath())
def editElement(self):
idx = self.tvStructure.selectionModel().currentIndex()
try:
if not idx.isValid():
raise RuntimeError("Element is not selected")
elm = idx.internalPointer()
jsIn = data_models.serializeTree(elm)
strIn = json.dumps(jsIn, indent = 4, separators = (",", ": "), sort_keys = True)
strOut = self.editorDlg.requestText(elm.fullPath(), strIn)
if strOut != strIn:
jsOut = json.loads(strOut)
self.mdlStructure.layoutAboutToBeChanged.emit()
data_models.rebuildTree(jsOut, elm)
self.mdlStructure.layoutChanged.emit()
self.mdlProps.displayElement(idx)
except json.JSONDecodeError as err:
line = err.doc.splitlines()[err.lineno - 1]
Gui.QMessageBox.critical(self, \
"JSON syntax error", \
"Illegal JSON syntax: %s.\nMalformed line:\n%s" % \
(err.msg, line))
except RuntimeError as err:
Gui.QMessageBox.critical(self, "Error", str(err))
def removeElement(self):
idx = self.tvStructure.selectionModel().currentIndex()
try:
if not idx.isValid():
raise RuntimeError("Illegal element selected")
if not idx.parent().isValid():
raise RuntimeError("Cannot remove root element")
name = str(idx.data())
if Gui.QMessageBox.question(self, \
"Confirmation required", \
"Are you sure want to remove element %s?" % name) == Gui.QMessageBox.Yes:
parIdx = idx.parent()
self.mdlStructure.removeRow(idx.row(), parIdx)
self.tvStructure.selectionModel().setCurrentIndex(parIdx, Core.QItemSelectionModel.Current)
except RuntimeError as err:
Gui.QMessageBox.critical(self, "Error", str(err))
def setCurrentFile(self, fn):
self.currentFile = fn
t = self.currentFile if self.currentFile != None else "<no data>"
self.window().setWindowTitle("JSON inspector: %s" % t)
def openScene(self):
fn, _ = Gui.QFileDialog.getOpenFileName(self, "Select input file", filter = "JSON files (*.json *.gltf)")
if len(fn) > 0:
with open(fn, "r") as fin:
d = json.load(fin)
self.mdlStructure.loadData(d)
self.mdlProps.displayElement(None)
self.setCurrentFile(fn)
def saveSceneAs(self):
if self.currentFile == None:
Gui.QMessageBox.warning(self, "Warning", "No data was loaded - nothing to save")
return
fn, _ = Gui.QFileDialog.getSaveFileName(self, "Select output file", filter = "JSON files (*.json *.gltf)")
if len(fn) > 0:
with open(fn, "w") as fout:
d = self.mdlStructure.getData()
json.dump(d, fout, indent = 4, separators = (",", ": "), sort_keys = True)
self.setCurrentFile(fn)
def closeEvent(self, evt):
self.config.setValue("main/geometry", self.saveGeometry())
self.config.setValue("main/state", self.saveState())
self.config.setValue("splitter/state", self.splitter.saveState())
Gui.QMainWindow.closeEvent(self, evt)
def main(args):
app = Gui.QApplication(sys.argv)
win = MainWindow()
win.show()
return app.exec_()
if __name__ == "__main__":
sys.exit(main(sys.argv))
| 40.107595 | 114 | 0.603125 |
import sys
import PyQt5.QtCore as Core
import PyQt5.QtWidgets as Gui
import PyQt5.QtGui as GuiMisc
import data_models
import dialogs
import json
class MainWindow(Gui.QMainWindow):
__slots__ = "tvStructure", "tblProps", "mdlStructure", "mdlProps", "currentFile", "config", \
"splitter", "editorDlg"
def __init__(self, p = None):
Gui.QMainWindow.__init__(self, parent = p)
self.tvStructure = Gui.QTreeView(self)
self.tvStructure.setHeaderHidden(True)
self.tvStructure.setSelectionMode(Gui.QAbstractItemView.SingleSelection)
self.tblProps = Gui.QTableView(self)
self.splitter = Gui.QSplitter(self)
self.splitter.addWidget(self.tvStructure)
self.splitter.addWidget(self.tblProps)
self.setCentralWidget(self.splitter)
mnuBar = Gui.QMenuBar(self)
mnuFile = mnuBar.addMenu("File")
mnuFile.addAction("Open", self.openScene, GuiMisc.QKeySequence("Ctrl+O"))
mnuFile.addAction("Save as...", self.saveSceneAs, GuiMisc.QKeySequence("Ctrl+S"))
mnuFile.addSeparator()
mnuFile.addAction("Exit", self.close)
mnuElem = mnuBar.addMenu("Element")
mnuElem.addAction("Edit JSON code", self.editElement, GuiMisc.QKeySequence("Ctrl+E"))
mnuElem.addAction("Remove", self.removeElement, GuiMisc.QKeySequence("Ctrl+R"))
self.setMenuBar(mnuBar)
self.mdlStructure = data_models.JSONTreeModel(self)
self.tvStructure.setModel(self.mdlStructure)
self.mdlProps = data_models.JSONPropertiesModel(self)
self.tblProps.setModel(self.mdlProps)
self.tvStructure.selectionModel().currentChanged.connect(self.showElement)
self.setCurrentFile(None)
self.statusBar().showMessage("No selection")
self.resize(500, 450)
self.config = Core.QSettings("kb1p", "json-inspector")
k = self.config.value("main/geometry")
if k != None:
self.restoreGeometry(k)
k = self.config.value("main/state")
if k != None:
self.restoreState(k)
k = self.config.value("splitter/state")
if k != None:
self.splitter.restoreState(k)
self.editorDlg = dialogs.EditorDialog(self, self.config)
def showElement(self, index, prevIndex):
self.mdlProps.displayElement(index)
assert self.mdlProps.selection != None
self.statusBar().showMessage(self.mdlProps.selection.fullPath())
def editElement(self):
idx = self.tvStructure.selectionModel().currentIndex()
try:
if not idx.isValid():
raise RuntimeError("Element is not selected")
elm = idx.internalPointer()
jsIn = data_models.serializeTree(elm)
strIn = json.dumps(jsIn, indent = 4, separators = (",", ": "), sort_keys = True)
strOut = self.editorDlg.requestText(elm.fullPath(), strIn)
if strOut != strIn:
jsOut = json.loads(strOut)
self.mdlStructure.layoutAboutToBeChanged.emit()
data_models.rebuildTree(jsOut, elm)
self.mdlStructure.layoutChanged.emit()
self.mdlProps.displayElement(idx)
except json.JSONDecodeError as err:
line = err.doc.splitlines()[err.lineno - 1]
Gui.QMessageBox.critical(self, \
"JSON syntax error", \
"Illegal JSON syntax: %s.\nMalformed line:\n%s" % \
(err.msg, line))
except RuntimeError as err:
Gui.QMessageBox.critical(self, "Error", str(err))
def removeElement(self):
idx = self.tvStructure.selectionModel().currentIndex()
try:
if not idx.isValid():
raise RuntimeError("Illegal element selected")
if not idx.parent().isValid():
raise RuntimeError("Cannot remove root element")
name = str(idx.data())
if Gui.QMessageBox.question(self, \
"Confirmation required", \
"Are you sure want to remove element %s?" % name) == Gui.QMessageBox.Yes:
parIdx = idx.parent()
self.mdlStructure.removeRow(idx.row(), parIdx)
self.tvStructure.selectionModel().setCurrentIndex(parIdx, Core.QItemSelectionModel.Current)
except RuntimeError as err:
Gui.QMessageBox.critical(self, "Error", str(err))
def setCurrentFile(self, fn):
self.currentFile = fn
t = self.currentFile if self.currentFile != None else "<no data>"
self.window().setWindowTitle("JSON inspector: %s" % t)
def openScene(self):
fn, _ = Gui.QFileDialog.getOpenFileName(self, "Select input file", filter = "JSON files (*.json *.gltf)")
if len(fn) > 0:
with open(fn, "r") as fin:
d = json.load(fin)
self.mdlStructure.loadData(d)
self.mdlProps.displayElement(None)
self.setCurrentFile(fn)
def saveSceneAs(self):
if self.currentFile == None:
Gui.QMessageBox.warning(self, "Warning", "No data was loaded - nothing to save")
return
fn, _ = Gui.QFileDialog.getSaveFileName(self, "Select output file", filter = "JSON files (*.json *.gltf)")
if len(fn) > 0:
with open(fn, "w") as fout:
d = self.mdlStructure.getData()
json.dump(d, fout, indent = 4, separators = (",", ": "), sort_keys = True)
self.setCurrentFile(fn)
def closeEvent(self, evt):
self.config.setValue("main/geometry", self.saveGeometry())
self.config.setValue("main/state", self.saveState())
self.config.setValue("splitter/state", self.splitter.saveState())
Gui.QMainWindow.closeEvent(self, evt)
def main(args):
app = Gui.QApplication(sys.argv)
win = MainWindow()
win.show()
return app.exec_()
if __name__ == "__main__":
sys.exit(main(sys.argv))
| true | true |
f7252950efe183c9396feb149b40fe1dc37cdd64 | 10,665 | py | Python | Thesis@3.9.1/Lib/site-packages/django/db/models/fields/reverse_related.py | nverbois/TFE21-232 | 7113837b5263b5c508bfc6903cb6982b48aa7ee4 | [
"MIT"
] | null | null | null | Thesis@3.9.1/Lib/site-packages/django/db/models/fields/reverse_related.py | nverbois/TFE21-232 | 7113837b5263b5c508bfc6903cb6982b48aa7ee4 | [
"MIT"
] | null | null | null | Thesis@3.9.1/Lib/site-packages/django/db/models/fields/reverse_related.py | nverbois/TFE21-232 | 7113837b5263b5c508bfc6903cb6982b48aa7ee4 | [
"MIT"
] | null | null | null | """
"Rel objects" for related fields.
"Rel objects" (for lack of a better name) carry information about the relation
modeled by a related field and provide some utility functions. They're stored
in the ``remote_field`` attribute of the field.
They also act as reverse fields for the purposes of the Meta API because
they're the closest concept currently available.
"""
from django.core import exceptions
from django.utils.functional import cached_property
from . import BLANK_CHOICE_DASH
from .mixins import FieldCacheMixin
class ForeignObjectRel(FieldCacheMixin):
"""
Used by ForeignObject to store information about the relation.
``_meta.get_fields()`` returns this class to provide access to the field
flags for the reverse relation.
"""
# Field flags
auto_created = True
concrete = False
editable = False
is_relation = True
# Reverse relations are always nullable (Django can't enforce that a
# foreign key on the related model points to this model).
null = True
empty_strings_allowed = False
def __init__(
self,
field,
to,
related_name=None,
related_query_name=None,
limit_choices_to=None,
parent_link=False,
on_delete=None,
):
self.field = field
self.model = to
self.related_name = related_name
self.related_query_name = related_query_name
self.limit_choices_to = {} if limit_choices_to is None else limit_choices_to
self.parent_link = parent_link
self.on_delete = on_delete
self.symmetrical = False
self.multiple = True
# Some of the following cached_properties can't be initialized in
# __init__ as the field doesn't have its model yet. Calling these methods
# before field.contribute_to_class() has been called will result in
# AttributeError
@cached_property
def hidden(self):
return self.is_hidden()
@cached_property
def name(self):
return self.field.related_query_name()
@property
def remote_field(self):
return self.field
@property
def target_field(self):
"""
When filtering against this relation, return the field on the remote
model against which the filtering should happen.
"""
target_fields = self.get_path_info()[-1].target_fields
if len(target_fields) > 1:
raise exceptions.FieldError(
"Can't use target_field for multicolumn relations."
)
return target_fields[0]
@cached_property
def related_model(self):
if not self.field.model:
raise AttributeError(
"This property can't be accessed before self.field.contribute_to_class has been called."
)
return self.field.model
@cached_property
def many_to_many(self):
return self.field.many_to_many
@cached_property
def many_to_one(self):
return self.field.one_to_many
@cached_property
def one_to_many(self):
return self.field.many_to_one
@cached_property
def one_to_one(self):
return self.field.one_to_one
def get_lookup(self, lookup_name):
return self.field.get_lookup(lookup_name)
def get_internal_type(self):
return self.field.get_internal_type()
@property
def db_type(self):
return self.field.db_type
def __repr__(self):
return "<%s: %s.%s>" % (
type(self).__name__,
self.related_model._meta.app_label,
self.related_model._meta.model_name,
)
def get_choices(
self,
include_blank=True,
blank_choice=BLANK_CHOICE_DASH,
limit_choices_to=None,
ordering=(),
):
"""
Return choices with a default blank choices included, for use
as <select> choices for this field.
Analog of django.db.models.fields.Field.get_choices(), provided
initially for utilization by RelatedFieldListFilter.
"""
limit_choices_to = limit_choices_to or self.limit_choices_to
qs = self.related_model._default_manager.complex_filter(limit_choices_to)
if ordering:
qs = qs.order_by(*ordering)
return (blank_choice if include_blank else []) + [(x.pk, str(x)) for x in qs]
def is_hidden(self):
"""Should the related object be hidden?"""
return bool(self.related_name) and self.related_name[-1] == "+"
def get_joining_columns(self):
return self.field.get_reverse_joining_columns()
def get_extra_restriction(self, where_class, alias, related_alias):
return self.field.get_extra_restriction(where_class, related_alias, alias)
def set_field_name(self):
"""
Set the related field's name, this is not available until later stages
of app loading, so set_field_name is called from
set_attributes_from_rel()
"""
# By default foreign object doesn't relate to any remote field (for
# example custom multicolumn joins currently have no remote field).
self.field_name = None
def get_accessor_name(self, model=None):
# This method encapsulates the logic that decides what name to give an
# accessor descriptor that retrieves related many-to-one or
# many-to-many objects. It uses the lowercased object_name + "_set",
# but this can be overridden with the "related_name" option. Due to
# backwards compatibility ModelForms need to be able to provide an
# alternate model. See BaseInlineFormSet.get_default_prefix().
opts = model._meta if model else self.related_model._meta
model = model or self.related_model
if self.multiple:
# If this is a symmetrical m2m relation on self, there is no reverse accessor.
if self.symmetrical and model == self.model:
return None
if self.related_name:
return self.related_name
return opts.model_name + ("_set" if self.multiple else "")
def get_path_info(self, filtered_relation=None):
return self.field.get_reverse_path_info(filtered_relation)
def get_cache_name(self):
"""
Return the name of the cache key to use for storing an instance of the
forward model on the reverse model.
"""
return self.get_accessor_name()
class ManyToOneRel(ForeignObjectRel):
"""
Used by the ForeignKey field to store information about the relation.
``_meta.get_fields()`` returns this class to provide access to the field
flags for the reverse relation.
Note: Because we somewhat abuse the Rel objects by using them as reverse
fields we get the funny situation where
``ManyToOneRel.many_to_one == False`` and
``ManyToOneRel.one_to_many == True``. This is unfortunate but the actual
ManyToOneRel class is a private API and there is work underway to turn
reverse relations into actual fields.
"""
def __init__(
self,
field,
to,
field_name,
related_name=None,
related_query_name=None,
limit_choices_to=None,
parent_link=False,
on_delete=None,
):
super().__init__(
field,
to,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
parent_link=parent_link,
on_delete=on_delete,
)
self.field_name = field_name
def __getstate__(self):
state = self.__dict__.copy()
state.pop("related_model", None)
return state
def get_related_field(self):
"""
Return the Field in the 'to' object to which this relationship is tied.
"""
field = self.model._meta.get_field(self.field_name)
if not field.concrete:
raise exceptions.FieldDoesNotExist(
"No related field named '%s'" % self.field_name
)
return field
def set_field_name(self):
self.field_name = self.field_name or self.model._meta.pk.name
class OneToOneRel(ManyToOneRel):
"""
Used by OneToOneField to store information about the relation.
``_meta.get_fields()`` returns this class to provide access to the field
flags for the reverse relation.
"""
def __init__(
self,
field,
to,
field_name,
related_name=None,
related_query_name=None,
limit_choices_to=None,
parent_link=False,
on_delete=None,
):
super().__init__(
field,
to,
field_name,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
parent_link=parent_link,
on_delete=on_delete,
)
self.multiple = False
class ManyToManyRel(ForeignObjectRel):
"""
Used by ManyToManyField to store information about the relation.
``_meta.get_fields()`` returns this class to provide access to the field
flags for the reverse relation.
"""
def __init__(
self,
field,
to,
related_name=None,
related_query_name=None,
limit_choices_to=None,
symmetrical=True,
through=None,
through_fields=None,
db_constraint=True,
):
super().__init__(
field,
to,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
)
if through and not db_constraint:
raise ValueError("Can't supply a through model and db_constraint=False")
self.through = through
if through_fields and not through:
raise ValueError("Cannot specify through_fields without a through model")
self.through_fields = through_fields
self.symmetrical = symmetrical
self.db_constraint = db_constraint
def get_related_field(self):
"""
Return the field in the 'to' object to which this relationship is tied.
Provided for symmetry with ManyToOneRel.
"""
opts = self.through._meta
if self.through_fields:
field = opts.get_field(self.through_fields[0])
else:
for field in opts.fields:
rel = getattr(field, "remote_field", None)
if rel and rel.model == self.model:
break
return field.foreign_related_fields[0]
| 31.27566 | 104 | 0.644069 |
from django.core import exceptions
from django.utils.functional import cached_property
from . import BLANK_CHOICE_DASH
from .mixins import FieldCacheMixin
class ForeignObjectRel(FieldCacheMixin):
auto_created = True
concrete = False
editable = False
is_relation = True
# foreign key on the related model points to this model).
null = True
empty_strings_allowed = False
def __init__(
self,
field,
to,
related_name=None,
related_query_name=None,
limit_choices_to=None,
parent_link=False,
on_delete=None,
):
self.field = field
self.model = to
self.related_name = related_name
self.related_query_name = related_query_name
self.limit_choices_to = {} if limit_choices_to is None else limit_choices_to
self.parent_link = parent_link
self.on_delete = on_delete
self.symmetrical = False
self.multiple = True
# Some of the following cached_properties can't be initialized in
# before field.contribute_to_class() has been called will result in
# AttributeError
@cached_property
def hidden(self):
return self.is_hidden()
@cached_property
def name(self):
return self.field.related_query_name()
@property
def remote_field(self):
return self.field
@property
def target_field(self):
target_fields = self.get_path_info()[-1].target_fields
if len(target_fields) > 1:
raise exceptions.FieldError(
"Can't use target_field for multicolumn relations."
)
return target_fields[0]
@cached_property
def related_model(self):
if not self.field.model:
raise AttributeError(
"This property can't be accessed before self.field.contribute_to_class has been called."
)
return self.field.model
@cached_property
def many_to_many(self):
return self.field.many_to_many
@cached_property
def many_to_one(self):
return self.field.one_to_many
@cached_property
def one_to_many(self):
return self.field.many_to_one
@cached_property
def one_to_one(self):
return self.field.one_to_one
def get_lookup(self, lookup_name):
return self.field.get_lookup(lookup_name)
def get_internal_type(self):
return self.field.get_internal_type()
@property
def db_type(self):
return self.field.db_type
def __repr__(self):
return "<%s: %s.%s>" % (
type(self).__name__,
self.related_model._meta.app_label,
self.related_model._meta.model_name,
)
def get_choices(
self,
include_blank=True,
blank_choice=BLANK_CHOICE_DASH,
limit_choices_to=None,
ordering=(),
):
limit_choices_to = limit_choices_to or self.limit_choices_to
qs = self.related_model._default_manager.complex_filter(limit_choices_to)
if ordering:
qs = qs.order_by(*ordering)
return (blank_choice if include_blank else []) + [(x.pk, str(x)) for x in qs]
def is_hidden(self):
return bool(self.related_name) and self.related_name[-1] == "+"
def get_joining_columns(self):
return self.field.get_reverse_joining_columns()
def get_extra_restriction(self, where_class, alias, related_alias):
return self.field.get_extra_restriction(where_class, related_alias, alias)
def set_field_name(self):
# By default foreign object doesn't relate to any remote field (for
self.field_name = None
def get_accessor_name(self, model=None):
opts = model._meta if model else self.related_model._meta
model = model or self.related_model
if self.multiple:
if self.symmetrical and model == self.model:
return None
if self.related_name:
return self.related_name
return opts.model_name + ("_set" if self.multiple else "")
def get_path_info(self, filtered_relation=None):
return self.field.get_reverse_path_info(filtered_relation)
def get_cache_name(self):
return self.get_accessor_name()
class ManyToOneRel(ForeignObjectRel):
def __init__(
self,
field,
to,
field_name,
related_name=None,
related_query_name=None,
limit_choices_to=None,
parent_link=False,
on_delete=None,
):
super().__init__(
field,
to,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
parent_link=parent_link,
on_delete=on_delete,
)
self.field_name = field_name
def __getstate__(self):
state = self.__dict__.copy()
state.pop("related_model", None)
return state
def get_related_field(self):
field = self.model._meta.get_field(self.field_name)
if not field.concrete:
raise exceptions.FieldDoesNotExist(
"No related field named '%s'" % self.field_name
)
return field
def set_field_name(self):
self.field_name = self.field_name or self.model._meta.pk.name
class OneToOneRel(ManyToOneRel):
def __init__(
self,
field,
to,
field_name,
related_name=None,
related_query_name=None,
limit_choices_to=None,
parent_link=False,
on_delete=None,
):
super().__init__(
field,
to,
field_name,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
parent_link=parent_link,
on_delete=on_delete,
)
self.multiple = False
class ManyToManyRel(ForeignObjectRel):
def __init__(
self,
field,
to,
related_name=None,
related_query_name=None,
limit_choices_to=None,
symmetrical=True,
through=None,
through_fields=None,
db_constraint=True,
):
super().__init__(
field,
to,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
)
if through and not db_constraint:
raise ValueError("Can't supply a through model and db_constraint=False")
self.through = through
if through_fields and not through:
raise ValueError("Cannot specify through_fields without a through model")
self.through_fields = through_fields
self.symmetrical = symmetrical
self.db_constraint = db_constraint
def get_related_field(self):
opts = self.through._meta
if self.through_fields:
field = opts.get_field(self.through_fields[0])
else:
for field in opts.fields:
rel = getattr(field, "remote_field", None)
if rel and rel.model == self.model:
break
return field.foreign_related_fields[0]
| true | true |
f7252ab67d2d7b09f44add6e1853b7582a5b415f | 1,646 | py | Python | networks/resample2d_package/resample2d.py | ryannggy/fn-colab | 2aefe81bc50ec0e52c0eb7c5bc94178fa66d6ec8 | [
"Apache-2.0"
] | null | null | null | networks/resample2d_package/resample2d.py | ryannggy/fn-colab | 2aefe81bc50ec0e52c0eb7c5bc94178fa66d6ec8 | [
"Apache-2.0"
] | null | null | null | networks/resample2d_package/resample2d.py | ryannggy/fn-colab | 2aefe81bc50ec0e52c0eb7c5bc94178fa66d6ec8 | [
"Apache-2.0"
] | null | null | null | from torch.nn.modules.module import Module
from torch.autograd import Function, Variable
import resample2d_cuda
class Resample2dFunction(Function):
@staticmethod
def forward(ctx, input1, input2, kernel_size=1, bilinear= True):
assert input1.is_contiguous()
assert input2.is_contiguous()
ctx.save_for_backward(input1, input2)
ctx.kernel_size = kernel_size
ctx.bilinear = bilinear
_, d, _, _ = input1.size()
b, _, h, w = input2.size()
output = input1.new(b, d, h, w).zero_()
resample2d_cuda.forward(input1, input2, output, kernel_size, bilinear)
return output
@staticmethod
def backward(ctx, grad_output):
grad_output = grad_output.contiguous()
assert grad_output.is_contiguous()
input1, input2 = ctx.saved_tensors
grad_input1 = Variable(input1.new(input1.size()).zero_())
grad_input2 = Variable(input1.new(input2.size()).zero_())
resample2d_cuda.backward(input1, input2, grad_output.data,
grad_input1.data, grad_input2.data,
ctx.kernel_size, ctx.bilinear)
return grad_input1, grad_input2, None, None
class Resample2d(Module):
def __init__(self, kernel_size=1, bilinear = True):
super(Resample2d, self).__init__()
self.kernel_size = kernel_size
self.bilinear = bilinear
def forward(self, input1, input2):
input1_c = input1.contiguous()
return Resample2dFunction.apply(input1_c, input2, self.kernel_size, self.bilinear)
| 32.92 | 91 | 0.634265 | from torch.nn.modules.module import Module
from torch.autograd import Function, Variable
import resample2d_cuda
class Resample2dFunction(Function):
@staticmethod
def forward(ctx, input1, input2, kernel_size=1, bilinear= True):
assert input1.is_contiguous()
assert input2.is_contiguous()
ctx.save_for_backward(input1, input2)
ctx.kernel_size = kernel_size
ctx.bilinear = bilinear
_, d, _, _ = input1.size()
b, _, h, w = input2.size()
output = input1.new(b, d, h, w).zero_()
resample2d_cuda.forward(input1, input2, output, kernel_size, bilinear)
return output
@staticmethod
def backward(ctx, grad_output):
grad_output = grad_output.contiguous()
assert grad_output.is_contiguous()
input1, input2 = ctx.saved_tensors
grad_input1 = Variable(input1.new(input1.size()).zero_())
grad_input2 = Variable(input1.new(input2.size()).zero_())
resample2d_cuda.backward(input1, input2, grad_output.data,
grad_input1.data, grad_input2.data,
ctx.kernel_size, ctx.bilinear)
return grad_input1, grad_input2, None, None
class Resample2d(Module):
def __init__(self, kernel_size=1, bilinear = True):
super(Resample2d, self).__init__()
self.kernel_size = kernel_size
self.bilinear = bilinear
def forward(self, input1, input2):
input1_c = input1.contiguous()
return Resample2dFunction.apply(input1_c, input2, self.kernel_size, self.bilinear)
| true | true |
f7252c82c5f17709b13320a16f6f349e51df4253 | 6,134 | py | Python | cogs/permissions.py | cephox/bettermod | 13750ed01095976d594a96cf12b92bed340a4a8e | [
"MIT"
] | 1 | 2021-01-24T17:42:42.000Z | 2021-01-24T17:42:42.000Z | cogs/permissions.py | cephox/bettermod | 13750ed01095976d594a96cf12b92bed340a4a8e | [
"MIT"
] | null | null | null | cogs/permissions.py | cephox/bettermod | 13750ed01095976d594a96cf12b92bed340a4a8e | [
"MIT"
] | null | null | null | from datetime import datetime
from typing import Optional, Union
from discord.embeds import Embed
from discord.ext.commands import Cog, Context, group, has_permissions
from discord.member import Member
from discord.role import Role
from colors import Colors
from log import log
from permission import update_user_permission, list_user_permissions, get_user_permissions, has_own_permission, \
get_role_permissions, update_role_permission, list_role_permissions
from translation import get_user_language
class Permissions(Cog):
def __init__(self, bot):
self.bot = bot
@group(aliases=["permission"])
@has_permissions(administrator=True)
async def permissions(self, ctx: Context, mention: Union[Member, Role], permission: Optional[str] = "",
enabled: Optional[int] = -1):
if isinstance(mention, Member):
await self.member(ctx, mention, permission, enabled)
elif isinstance(mention, Role):
await self.role(ctx, mention, permission, enabled)
async def member(self, ctx: Context, member: Member, permission: Optional[str] = "", enabled: Optional[int] = -1):
lang = get_user_language(ctx.author.id)
if not permission:
embed = Embed(color=Colors.permission, timestamp=datetime.now())
embed.add_field(name=lang.f_permissions_permissions_for(str(member)),
value="\n".join([f"`{i.title().replace('_', ' ')}`" for i in
list_user_permissions(member)]) if list_user_permissions(
member) else lang.none)
embed.set_thumbnail(url=member.avatar_url)
await ctx.send(embed=embed)
return
if permission and enabled == -1:
perm = lang.yes if has_own_permission(permission, get_user_permissions(member)) else lang.no
embed = Embed(color=Colors.permission, timestamp=datetime.now())
embed.add_field(name=lang.f_permissions_permission_for(permission, str(member)),
value=lang.enabled + f": `{perm}`")
embed.set_thumbnail(url=member.avatar_url)
await ctx.send(embed=embed)
return
if permission and enabled != -1:
before = lang.yes if has_own_permission(permission, get_user_permissions(member)) else lang.no
update_user_permission(member, permission, enabled > 0)
embed = Embed(color=Colors.permission, timestamp=datetime.now())
embed.add_field(name=lang.f_permissions_permission_set_for(str(member)),
value="`" + permission.title().replace("_",
" ") + "` » `" + (
lang.yes if enabled > 0 else lang.no) + "`",
inline=False)
embed.add_field(name=lang.permissions_permission_before, value=f"`{before}`", inline=False)
embed.add_field(name=lang.permissions_permission_set_by, value=ctx.author.mention, inline=False)
embed.add_field(name=lang.permissions_permission_total,
value="\n".join([f"`{i.title().replace('_', ' ')}`" for i in
list_user_permissions(member)]) if list_user_permissions(
member) else lang.none)
embed.set_thumbnail(url=member.avatar_url)
embed.set_footer(text=lang.member_id + ": " + str(member.id))
await ctx.send(embed=embed)
await log(ctx, embed=embed)
async def role(self, ctx: Context, role: Role, permission: Optional[str] = "", enabled: Optional[int] = -1):
lang = get_user_language(ctx.author.id)
if not permission:
embed = Embed(color=Colors.permission, timestamp=datetime.now())
embed.add_field(name=lang.f_permissions_permissions_for("@" + str(role)),
value="\n".join([f"`{i.title().replace('_', ' ')}`" for i in
list_role_permissions(role)]) if list_role_permissions(
role) else lang.none)
await ctx.send(embed=embed)
return
if permission and enabled == -1:
perm = lang.yes if has_own_permission(permission, get_role_permissions(role)) else lang.no
embed = Embed(color=Colors.permission, timestamp=datetime.now())
embed.add_field(name=lang.f_permissions_permission_for(permission, "@" + str(role)),
value=lang.enabled + f": `{perm}`")
await ctx.send(embed=embed)
return
if permission and enabled != -1:
before = lang.yes if has_own_permission(permission, get_role_permissions(role)) else lang.no
update_role_permission(role, permission, enabled > 0)
embed = Embed(color=Colors.permission, timestamp=datetime.now())
embed.add_field(name=lang.f_permissions_permission_set_for(str(role)),
value="`" + permission.title().replace("_",
" ") + "` » `" + (
lang.yes if enabled > 0 else lang.no) + "`",
inline=False)
embed.add_field(name=lang.permissions_permission_before, value=f"`{before}`", inline=False)
embed.add_field(name=lang.permissions_permission_set_by, value=ctx.author.mention, inline=False)
embed.add_field(name=lang.permissions_permission_total,
value="\n".join([f"`{i.title().replace('_', ' ')}`" for i in
list_role_permissions(role)]) if list_role_permissions(
role) else lang.none)
embed.set_footer(text=lang.role_id + ": " + str(role.id))
await ctx.send(embed=embed)
await log(ctx, embed=embed)
def setup(bot):
bot.add_cog(Permissions(bot))
| 51.546218 | 118 | 0.584773 | from datetime import datetime
from typing import Optional, Union
from discord.embeds import Embed
from discord.ext.commands import Cog, Context, group, has_permissions
from discord.member import Member
from discord.role import Role
from colors import Colors
from log import log
from permission import update_user_permission, list_user_permissions, get_user_permissions, has_own_permission, \
get_role_permissions, update_role_permission, list_role_permissions
from translation import get_user_language
class Permissions(Cog):
def __init__(self, bot):
self.bot = bot
@group(aliases=["permission"])
@has_permissions(administrator=True)
async def permissions(self, ctx: Context, mention: Union[Member, Role], permission: Optional[str] = "",
enabled: Optional[int] = -1):
if isinstance(mention, Member):
await self.member(ctx, mention, permission, enabled)
elif isinstance(mention, Role):
await self.role(ctx, mention, permission, enabled)
async def member(self, ctx: Context, member: Member, permission: Optional[str] = "", enabled: Optional[int] = -1):
lang = get_user_language(ctx.author.id)
if not permission:
embed = Embed(color=Colors.permission, timestamp=datetime.now())
embed.add_field(name=lang.f_permissions_permissions_for(str(member)),
value="\n".join([f"`{i.title().replace('_', ' ')}`" for i in
list_user_permissions(member)]) if list_user_permissions(
member) else lang.none)
embed.set_thumbnail(url=member.avatar_url)
await ctx.send(embed=embed)
return
if permission and enabled == -1:
perm = lang.yes if has_own_permission(permission, get_user_permissions(member)) else lang.no
embed = Embed(color=Colors.permission, timestamp=datetime.now())
embed.add_field(name=lang.f_permissions_permission_for(permission, str(member)),
value=lang.enabled + f": `{perm}`")
embed.set_thumbnail(url=member.avatar_url)
await ctx.send(embed=embed)
return
if permission and enabled != -1:
before = lang.yes if has_own_permission(permission, get_user_permissions(member)) else lang.no
update_user_permission(member, permission, enabled > 0)
embed = Embed(color=Colors.permission, timestamp=datetime.now())
embed.add_field(name=lang.f_permissions_permission_set_for(str(member)),
value="`" + permission.title().replace("_",
" ") + "` » `" + (
lang.yes if enabled > 0 else lang.no) + "`",
inline=False)
embed.add_field(name=lang.permissions_permission_before, value=f"`{before}`", inline=False)
embed.add_field(name=lang.permissions_permission_set_by, value=ctx.author.mention, inline=False)
embed.add_field(name=lang.permissions_permission_total,
value="\n".join([f"`{i.title().replace('_', ' ')}`" for i in
list_user_permissions(member)]) if list_user_permissions(
member) else lang.none)
embed.set_thumbnail(url=member.avatar_url)
embed.set_footer(text=lang.member_id + ": " + str(member.id))
await ctx.send(embed=embed)
await log(ctx, embed=embed)
async def role(self, ctx: Context, role: Role, permission: Optional[str] = "", enabled: Optional[int] = -1):
lang = get_user_language(ctx.author.id)
if not permission:
embed = Embed(color=Colors.permission, timestamp=datetime.now())
embed.add_field(name=lang.f_permissions_permissions_for("@" + str(role)),
value="\n".join([f"`{i.title().replace('_', ' ')}`" for i in
list_role_permissions(role)]) if list_role_permissions(
role) else lang.none)
await ctx.send(embed=embed)
return
if permission and enabled == -1:
perm = lang.yes if has_own_permission(permission, get_role_permissions(role)) else lang.no
embed = Embed(color=Colors.permission, timestamp=datetime.now())
embed.add_field(name=lang.f_permissions_permission_for(permission, "@" + str(role)),
value=lang.enabled + f": `{perm}`")
await ctx.send(embed=embed)
return
if permission and enabled != -1:
before = lang.yes if has_own_permission(permission, get_role_permissions(role)) else lang.no
update_role_permission(role, permission, enabled > 0)
embed = Embed(color=Colors.permission, timestamp=datetime.now())
embed.add_field(name=lang.f_permissions_permission_set_for(str(role)),
value="`" + permission.title().replace("_",
" ") + "` » `" + (
lang.yes if enabled > 0 else lang.no) + "`",
inline=False)
embed.add_field(name=lang.permissions_permission_before, value=f"`{before}`", inline=False)
embed.add_field(name=lang.permissions_permission_set_by, value=ctx.author.mention, inline=False)
embed.add_field(name=lang.permissions_permission_total,
value="\n".join([f"`{i.title().replace('_', ' ')}`" for i in
list_role_permissions(role)]) if list_role_permissions(
role) else lang.none)
embed.set_footer(text=lang.role_id + ": " + str(role.id))
await ctx.send(embed=embed)
await log(ctx, embed=embed)
def setup(bot):
bot.add_cog(Permissions(bot))
| true | true |
f7252f66bf5fa79b977b4129cd0bfdf672edf213 | 4,156 | py | Python | dkist/net/globus/tests/test_auth.py | DKISTDC/dkist | 3b97d7c0db144a717cfbe648b7402b8b8f9f2da2 | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 21 | 2018-05-18T13:43:59.000Z | 2022-03-16T21:17:39.000Z | dkist/net/globus/tests/test_auth.py | Cadair/dkist | 2f4d930ea0e002db40e8ef17a79b0b4fb2e6d3f3 | [
"BSD-3-Clause"
] | 134 | 2017-12-07T16:09:24.000Z | 2022-03-17T16:13:55.000Z | dkist/net/globus/tests/test_auth.py | Cadair/dkist | 2f4d930ea0e002db40e8ef17a79b0b4fb2e6d3f3 | [
"BSD-3-Clause"
] | 4 | 2017-12-04T10:49:49.000Z | 2022-01-10T12:20:46.000Z | import json
import stat
import pathlib
import platform
import globus_sdk
import requests
from dkist.net.globus.auth import (ensure_globus_authorized, get_cache_contents,
get_cache_file_path, get_refresh_token_authorizer,
save_auth_cache, start_local_server)
def test_http_server():
server = start_local_server()
redirect_uri = "http://{a[0]}:{a[1]}".format(a=server.server_address)
inp_code = "wibble"
requests.get(redirect_uri + f"?code={inp_code}")
code = server.wait_for_code()
assert code == inp_code
def test_get_cache_file_path(mocker):
mocker.patch("appdirs.user_cache_dir", return_value="/tmp/test/")
path = get_cache_file_path()
assert isinstance(path, pathlib.Path)
assert path.parent == pathlib.Path("/tmp/test")
assert path.name == "globus_auth_cache.json"
def test_get_no_cache(mocker, tmpdir):
mocker.patch("appdirs.user_cache_dir", return_value=str(tmpdir))
# Test file not exists
cache = get_cache_contents()
assert isinstance(cache, dict)
assert not cache
def test_get_cache(mocker, tmpdir):
mocker.patch("appdirs.user_cache_dir", return_value=str(tmpdir))
with open(tmpdir / "globus_auth_cache.json", "w") as fd:
json.dump({"hello": "world"}, fd)
cache = get_cache_contents()
assert isinstance(cache, dict)
assert len(cache) == 1
assert cache == {"hello": "world"}
def test_get_cache_not_json(mocker, tmpdir):
mocker.patch("appdirs.user_cache_dir", return_value=str(tmpdir))
with open(tmpdir / "globus_auth_cache.json", "w") as fd:
fd.write("aslkjdasdjjdlsajdjklasjdj, akldjaskldjasd, lkjasdkljasldkjas")
cache = get_cache_contents()
assert isinstance(cache, dict)
assert not cache
def test_save_auth_cache(mocker, tmpdir):
filename = tmpdir / "globus_auth_cache.json"
assert not filename.exists() # Sanity check
mocker.patch("appdirs.user_cache_dir", return_value=str(tmpdir))
save_auth_cache({"hello": "world"})
assert filename.exists()
statinfo = filename.stat()
# Test that the user can read and write
assert bool(statinfo.mode & stat.S_IRUSR)
assert bool(statinfo.mode & stat.S_IWUSR)
if platform.system() != 'Windows':
# Test that neither "Group" or "Other" have read permissions
assert not bool(statinfo.mode & stat.S_IRGRP)
assert not bool(statinfo.mode & stat.S_IROTH)
def test_get_refresh_token_authorizer(mocker):
# An example cache without real tokens
cache = {
"transfer.api.globus.org": {
"scope": "urn:globus:auth:scope:transfer.api.globus.org:all",
"access_token": "buscVeATmhfB0v1tzu8VmTfFRB1nwlF8bn1R9rQTI3Q",
"refresh_token": "YSbLZowAHfmhxehUqeOF3lFvoC0FlTT11QGupfWAOX4",
"token_type": "Bearer",
"expires_at_seconds": 1553362861,
"resource_server": "transfer.api.globus.org"
}
}
mocker.patch("dkist.net.globus.auth.get_cache_contents", return_value=cache)
auth = get_refresh_token_authorizer()['transfer.api.globus.org']
assert isinstance(auth, globus_sdk.RefreshTokenAuthorizer)
assert auth.access_token == cache["transfer.api.globus.org"]["access_token"]
mocker.patch("dkist.net.globus.auth.do_native_app_authentication", return_value=cache)
auth = get_refresh_token_authorizer(force_reauth=True)['transfer.api.globus.org']
assert isinstance(auth, globus_sdk.RefreshTokenAuthorizer)
assert auth.access_token == cache["transfer.api.globus.org"]["access_token"]
def test_ensure_auth_decorator(mocker):
error = globus_sdk.AuthAPIError(mocker.MagicMock())
mocker.patch.object(error, "http_status", 400)
mocker.patch.object(error, "message", "invalid_grant")
reauth = mocker.patch("dkist.net.globus.auth.get_refresh_token_authorizer")
called = [False]
@ensure_globus_authorized
def test_func():
if not called[0]:
called[0] = True
raise error
return True
assert test_func()
assert reauth.called_once_with(force_reauth=True)
| 33.788618 | 90 | 0.698027 | import json
import stat
import pathlib
import platform
import globus_sdk
import requests
from dkist.net.globus.auth import (ensure_globus_authorized, get_cache_contents,
get_cache_file_path, get_refresh_token_authorizer,
save_auth_cache, start_local_server)
def test_http_server():
server = start_local_server()
redirect_uri = "http://{a[0]}:{a[1]}".format(a=server.server_address)
inp_code = "wibble"
requests.get(redirect_uri + f"?code={inp_code}")
code = server.wait_for_code()
assert code == inp_code
def test_get_cache_file_path(mocker):
mocker.patch("appdirs.user_cache_dir", return_value="/tmp/test/")
path = get_cache_file_path()
assert isinstance(path, pathlib.Path)
assert path.parent == pathlib.Path("/tmp/test")
assert path.name == "globus_auth_cache.json"
def test_get_no_cache(mocker, tmpdir):
mocker.patch("appdirs.user_cache_dir", return_value=str(tmpdir))
cache = get_cache_contents()
assert isinstance(cache, dict)
assert not cache
def test_get_cache(mocker, tmpdir):
mocker.patch("appdirs.user_cache_dir", return_value=str(tmpdir))
with open(tmpdir / "globus_auth_cache.json", "w") as fd:
json.dump({"hello": "world"}, fd)
cache = get_cache_contents()
assert isinstance(cache, dict)
assert len(cache) == 1
assert cache == {"hello": "world"}
def test_get_cache_not_json(mocker, tmpdir):
mocker.patch("appdirs.user_cache_dir", return_value=str(tmpdir))
with open(tmpdir / "globus_auth_cache.json", "w") as fd:
fd.write("aslkjdasdjjdlsajdjklasjdj, akldjaskldjasd, lkjasdkljasldkjas")
cache = get_cache_contents()
assert isinstance(cache, dict)
assert not cache
def test_save_auth_cache(mocker, tmpdir):
filename = tmpdir / "globus_auth_cache.json"
assert not filename.exists()
mocker.patch("appdirs.user_cache_dir", return_value=str(tmpdir))
save_auth_cache({"hello": "world"})
assert filename.exists()
statinfo = filename.stat()
assert bool(statinfo.mode & stat.S_IRUSR)
assert bool(statinfo.mode & stat.S_IWUSR)
if platform.system() != 'Windows':
assert not bool(statinfo.mode & stat.S_IRGRP)
assert not bool(statinfo.mode & stat.S_IROTH)
def test_get_refresh_token_authorizer(mocker):
cache = {
"transfer.api.globus.org": {
"scope": "urn:globus:auth:scope:transfer.api.globus.org:all",
"access_token": "buscVeATmhfB0v1tzu8VmTfFRB1nwlF8bn1R9rQTI3Q",
"refresh_token": "YSbLZowAHfmhxehUqeOF3lFvoC0FlTT11QGupfWAOX4",
"token_type": "Bearer",
"expires_at_seconds": 1553362861,
"resource_server": "transfer.api.globus.org"
}
}
mocker.patch("dkist.net.globus.auth.get_cache_contents", return_value=cache)
auth = get_refresh_token_authorizer()['transfer.api.globus.org']
assert isinstance(auth, globus_sdk.RefreshTokenAuthorizer)
assert auth.access_token == cache["transfer.api.globus.org"]["access_token"]
mocker.patch("dkist.net.globus.auth.do_native_app_authentication", return_value=cache)
auth = get_refresh_token_authorizer(force_reauth=True)['transfer.api.globus.org']
assert isinstance(auth, globus_sdk.RefreshTokenAuthorizer)
assert auth.access_token == cache["transfer.api.globus.org"]["access_token"]
def test_ensure_auth_decorator(mocker):
error = globus_sdk.AuthAPIError(mocker.MagicMock())
mocker.patch.object(error, "http_status", 400)
mocker.patch.object(error, "message", "invalid_grant")
reauth = mocker.patch("dkist.net.globus.auth.get_refresh_token_authorizer")
called = [False]
@ensure_globus_authorized
def test_func():
if not called[0]:
called[0] = True
raise error
return True
assert test_func()
assert reauth.called_once_with(force_reauth=True)
| true | true |
f7252fa094b50021b73b758dbb88c52ca1b4bf3a | 739 | py | Python | src/pytuya/devices/heater.py | python-tuya/python-tuya | c1938491a04bd6285d05defef0a9918f50d8bbc9 | [
"MIT"
] | 10 | 2017-07-11T16:58:33.000Z | 2021-03-15T23:19:28.000Z | src/pytuya/devices/heater.py | python-tuya/python-tuya | c1938491a04bd6285d05defef0a9918f50d8bbc9 | [
"MIT"
] | 3 | 2017-07-21T03:22:19.000Z | 2018-01-04T14:02:51.000Z | src/pytuya/devices/heater.py | python-tuya/python-tuya | c1938491a04bd6285d05defef0a9918f50d8bbc9 | [
"MIT"
] | null | null | null | from pytuya.devices.base import TuyaDevice
class TuyaHeater(TuyaDevice):
"""
Represents a Tuya Heater.
"""
def __init__(self, id, password, local_key, region):
super(TuyaHeater, self).__init__(id, password, local_key, region)
def state(self):
return self._last_reading.get('1', False)
def is_on(self):
return self.state()
def setting_temperature(self):
return self._last_reading.get('3', None)
def room_temperature(self):
return self._last_reading.get('4', None)
def key_lock(self):
return self._last_reading.get('2', False)
def timer(self):
return self._last_reading.get('5', 0)
def object_type(self):
return "Heater" | 24.633333 | 73 | 0.641407 | from pytuya.devices.base import TuyaDevice
class TuyaHeater(TuyaDevice):
def __init__(self, id, password, local_key, region):
super(TuyaHeater, self).__init__(id, password, local_key, region)
def state(self):
return self._last_reading.get('1', False)
def is_on(self):
return self.state()
def setting_temperature(self):
return self._last_reading.get('3', None)
def room_temperature(self):
return self._last_reading.get('4', None)
def key_lock(self):
return self._last_reading.get('2', False)
def timer(self):
return self._last_reading.get('5', 0)
def object_type(self):
return "Heater" | true | true |
f7252fb0603ecf1d70f00062b7fba0b9355d9f42 | 718 | py | Python | scripts/adapter.py | Skalwalker/BreastCancerRecognition | d934948103f6cc45eae65d6e11a351f8f69b3ae0 | [
"MIT"
] | null | null | null | scripts/adapter.py | Skalwalker/BreastCancerRecognition | d934948103f6cc45eae65d6e11a351f8f69b3ae0 | [
"MIT"
] | null | null | null | scripts/adapter.py | Skalwalker/BreastCancerRecognition | d934948103f6cc45eae65d6e11a351f8f69b3ae0 | [
"MIT"
] | 1 | 2020-11-10T15:09:34.000Z | 2020-11-10T15:09:34.000Z | import readfiles
import learnAlgorithms as learn
from plot import Plot as Plot
class Adapter(object):
def __init__(self, kernel, turnPlot, interactions):
self.log("Lendo Dados")
rf = readfiles.ReadFiles()
self.data = rf.getData()
self.labels = rf.getLabels()
self.la = learn.LearnAlgorithms(self.data, self.labels)
self.kernel = kernel
self.turnPlot = turnPlot
self.interactions = interactions
def run(self):
acs_vector, log_values = self.la.runSVM(self.kernel, self.turnPlot, self.interactions)
if(self.turnPlot):
Plot.plot_c(acs_vector, log_values)
def log(self, msg):
print('[Adapter] {}'.format(msg))
| 28.72 | 92 | 0.650418 | import readfiles
import learnAlgorithms as learn
from plot import Plot as Plot
class Adapter(object):
def __init__(self, kernel, turnPlot, interactions):
self.log("Lendo Dados")
rf = readfiles.ReadFiles()
self.data = rf.getData()
self.labels = rf.getLabels()
self.la = learn.LearnAlgorithms(self.data, self.labels)
self.kernel = kernel
self.turnPlot = turnPlot
self.interactions = interactions
def run(self):
acs_vector, log_values = self.la.runSVM(self.kernel, self.turnPlot, self.interactions)
if(self.turnPlot):
Plot.plot_c(acs_vector, log_values)
def log(self, msg):
print('[Adapter] {}'.format(msg))
| true | true |
f7252fb765341c7239f0b406db114d6920488e05 | 216,381 | py | Python | pysnmp-with-texts/Wellfleet-FRSW-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 8 | 2019-05-09T17:04:00.000Z | 2021-06-09T06:50:51.000Z | pysnmp-with-texts/Wellfleet-FRSW-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 4 | 2019-05-31T16:42:59.000Z | 2020-01-31T21:57:17.000Z | pysnmp-with-texts/Wellfleet-FRSW-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module Wellfleet-FRSW-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/Wellfleet-FRSW-MIB
# Produced by pysmi-0.3.4 at Wed May 1 15:40:14 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ValueRangeConstraint, ValueSizeConstraint, SingleValueConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ValueRangeConstraint", "ValueSizeConstraint", "SingleValueConstraint", "ConstraintsUnion")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
MibScalar, MibTable, MibTableRow, MibTableColumn, Integer32, Counter32, IpAddress, Counter64, Bits, ModuleIdentity, MibIdentifier, TimeTicks, Unsigned32, iso, NotificationType, ObjectIdentity, Gauge32 = mibBuilder.importSymbols("SNMPv2-SMI", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Integer32", "Counter32", "IpAddress", "Counter64", "Bits", "ModuleIdentity", "MibIdentifier", "TimeTicks", "Unsigned32", "iso", "NotificationType", "ObjectIdentity", "Gauge32")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
wfFrswGroup, = mibBuilder.importSymbols("Wellfleet-COMMON-MIB", "wfFrswGroup")
wfFrSwDlcmiTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1), )
if mibBuilder.loadTexts: wfFrSwDlcmiTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiTable.setDescription('The Parameters for the Data Link Connection Management Interface corresponding to any interface. Incorporates the Error table.')
wfFrSwDlcmiEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwDlcmiCircuit"))
if mibBuilder.loadTexts: wfFrSwDlcmiEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiEntry.setDescription('The parameters for a particular Data Link Connection Management Interface.')
wfFrSwDlcmiDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDelete.setDescription('Indication to delete this frame relay interface.')
wfFrSwDlcmiState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("init", 3))).clone('init')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiState.setDescription('Indicates which state of DLCMI the interface is in')
wfFrSwDlcmiNniEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiNniEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiNniEnable.setDescription('Indicates whether a NNI is enabled for this entry.')
wfFrSwDlcmiCircuit = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiCircuit.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiCircuit.setDescription('Instance identifier; the circuit number of this entry.')
wfFrSwDlcmiManagementType = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))).clone(namedValues=NamedValues(("none", 1), ("lmi", 2), ("t1617d", 3), ("t1617b", 4), ("annexa", 5), ("lmiswitch", 6), ("annexdswitch", 7), ("annexaswitch", 8), ("iwfoamenabled", 9), ("iwfoamdisabled", 10))).clone('t1617d')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiManagementType.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiManagementType.setDescription('Indicates the Data Link Connection Management scheme that is active.')
wfFrSwL3NetAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 6), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwL3NetAddress.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwL3NetAddress.setDescription('Indicates level 3 (IP) address of this frame relay interface')
wfFrSwDlcmiAddressLen = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3, 4))).clone(namedValues=NamedValues(("twobyte", 2), ("threebyte", 3), ("fourbyte", 4))).clone('twobyte')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiAddressLen.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiAddressLen.setDescription('Indicates the address length, including the control portion.')
wfFrSwDlcmiControlByteDisable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiControlByteDisable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiControlByteDisable.setDescription('Indicates inclusion of control byte in q922 format.')
wfFrSwDlcmiPollingInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 30)).clone(15)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiPollingInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiPollingInterval.setDescription('The number of seconds between successive status enquiry messages.')
wfFrSwDlcmiFullEnquiryInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(6)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiFullEnquiryInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiFullEnquiryInterval.setDescription('Indicates the number of status enquiries before a full status enquiry. (For bidirectional procedures.)')
wfFrSwDlcmiErrorThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 11), Integer32().clone(3)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiErrorThreshold.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiErrorThreshold.setDescription('Indicates the number errors monitored before declaring the interface down.')
wfFrSwDlcmiMonitoredEvents = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 12), Integer32().clone(4)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiMonitoredEvents.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiMonitoredEvents.setDescription('Indicates the events over which error threshold is kept.')
wfFrSwDlcmiRecoveryCounts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiRecoveryCounts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiRecoveryCounts.setDescription('Indicates the number of correct polling cycles during recovery.')
wfFrSwDlcmiMaxSupportedVCs = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 1024)).clone(100)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiMaxSupportedVCs.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiMaxSupportedVCs.setDescription('Indicates the maximum number of VCs allowed.')
wfFrSwDlcmiVCsInUse = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiVCsInUse.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiVCsInUse.setDescription('Indicates the number of VCs that are currently configured on this interface.')
wfFrSwSwitchHdrErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSwitchHdrErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSwitchHdrErrors.setDescription('Indicates the number of frames dropped because they were received on the remote side with an invalid switch header.')
wfFrSwDlcmiSequenceCount = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 17), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiSequenceCount.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiSequenceCount.setDescription("Indicates this switch's sequence counter; value of next to send.")
wfFrSwDlcmiLastReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 18), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiLastReceived.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiLastReceived.setDescription('Indicates the sequence number just received from the end station.')
wfFrSwDlcmiActiveSeqCount = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 19), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiActiveSeqCount.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiActiveSeqCount.setDescription("Indicates the switch's sequence counter for sending status enquiry. (For bidirectional procedures.)")
wfFrSwDlcmiActiveReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 20), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiActiveReceived.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiActiveReceived.setDescription('Indicates the sequence number just received from the enquiring station. (For bidirectional procedures.)')
wfFrSwDlcmiPolls = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 21), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiPolls.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiPolls.setDescription('This is the counter of where we are in the polling cycle.')
wfFrSwDlcmiAlarmTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 22), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiAlarmTimer.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiAlarmTimer.setDescription('Counter of 1/2 second timeouts. Indicates when to expect poll.')
wfFrSwErrType = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 23), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13))).clone(namedValues=NamedValues(("reset", 1), ("other", 2), ("short", 3), ("long", 4), ("illegaldlci", 5), ("unknowndlci", 6), ("protoerr", 7), ("unknownie", 8), ("sequenceerr", 9), ("unknownrpt", 10), ("byteerr", 11), ("hdrerr", 12), ("formaterr", 13))).clone('reset')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwErrType.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwErrType.setDescription('Indicates the type of the last specific monitored error.')
wfFrSwErrData = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 24), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwErrData.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwErrData.setDescription('Contains as much of the error packet as possible.')
wfFrSwErrTime = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 25), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwErrTime.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwErrTime.setDescription('Indicates the time the last error occurred.')
wfFrSwBcMeasurementInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 26), Integer32().subtype(subtypeSpec=ValueRangeConstraint(100, 2000)).clone(500)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwBcMeasurementInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwBcMeasurementInterval.setDescription('Indicates the Committed Burst sample window interval in msec')
wfFrSwDlcmiMcastNoBufferErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 27), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiMcastNoBufferErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiMcastNoBufferErrors.setDescription('Indicates the number of times a multicast failed partially or wholly because there are insufficient buffers available to create multiple copies of a multicast frame')
wfFrSwDlcmiFrameTooShortErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 28), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiFrameTooShortErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiFrameTooShortErrors.setDescription('Indicates the number of frames dropped that are too short to be accepted.')
wfFrSwDlcmiFrameTooLongErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 29), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiFrameTooLongErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiFrameTooLongErrors.setDescription('Indicates the number of frames dropped that are too long to be accepted.')
wfFrSwDlcmiIllegalDlciErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 30), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiIllegalDlciErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiIllegalDlciErrors.setDescription('Indicates the number of frames dropped that had an invalid DLCI value.')
wfFrSwDlcmiUnknownDlciErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 31), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiUnknownDlciErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiUnknownDlciErrors.setDescription('Indicates the number of frames dropped which had an unknown DLCI value.')
wfFrSwDlcmiProtocolErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 32), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiProtocolErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiProtocolErrors.setDescription('Indicates the number of frames dropped because of a DLCMI protocol violation.')
wfFrSwDlcmiUnknownIEErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 33), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiUnknownIEErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiUnknownIEErrors.setDescription('Indicates the number of frames dropped that had an unknown information element.')
wfFrSwDlcmiSequenceErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 34), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiSequenceErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiSequenceErrors.setDescription('Indicates the number of frames dropped because of a DLCMI sequence error.')
wfFrSwDlcmiUnknownRPTErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 35), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiUnknownRPTErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiUnknownRPTErrors.setDescription('Indicates the number of frames dropped which had an unknown report type.')
wfFrSwDlcmiControlByteErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 36), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiControlByteErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiControlByteErrors.setDescription('Indicates the number of frames dropped that had an unsupported control byte.')
wfFrSwDlcmiFormatErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 37), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiFormatErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiFormatErrors.setDescription('Indicates the number of frames dropped due to a frame format error.')
wfFrSwDlcmiOtherErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 38), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiOtherErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiOtherErrors.setDescription('Indicates the number of frames dropped due to unknown or other errors not counted by any error counter.')
wfFrSwDlcmiStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 39), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("running", 1), ("recovered", 2), ("fault", 3), ("start", 4))).clone('start')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiStatus.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiStatus.setDescription('Indicates which state of execution the DLCMI gate is in')
wfFrSwDlcmiNewVCs = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 40), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiNewVCs.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiNewVCs.setDescription('Indicates the number of newly added PVCs that we have not yet told the CPE about, by means of a full-status message.')
wfFrSwDlcmiDeletedVCs = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 41), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiDeletedVCs.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDeletedVCs.setDescription('Indicates the number of deletedly added PVCs that we have not yet told the CPE about, by means of a full-status message.')
wfFrSwDlcmiFullStatusSeq = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 42), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiFullStatusSeq.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiFullStatusSeq.setDescription('Indicates the expected sequence number for the next Status Enquiry message that will prove that the CPE received our last Full Status Message and knows about the deleted PVCs.')
wfFrSwDlcmiBidirect = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 43), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('disable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiBidirect.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiBidirect.setDescription('Indication to delete this frame relay interface.')
wfFrSwDlcmiDteStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 44), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("running", 1), ("recovered", 2), ("fault", 3), ("start", 4))).clone('start')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiDteStatus.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDteStatus.setDescription('Indicates which state of execution the DLCMI gate is in for bidirectional procedures.')
wfFrSwDlcmiDteSeqCount = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 45), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiDteSeqCount.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDteSeqCount.setDescription("Indicates the switch's sequence counter for sending status enquiry. (For bidirectional procedures.)")
wfFrSwDlcmiDteReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 46), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiDteReceived.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDteReceived.setDescription('Indicates the sequence number just received from the enquiring station. (For bidirectional procedures.)')
wfFrSwDlcmiDteLastReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 47), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiDteLastReceived.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDteLastReceived.setDescription('Indicates the sequence number just received from the end station.')
wfFrSwDlcmiDtePolls = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 48), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiDtePolls.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDtePolls.setDescription('This is the counter of where we are in the polling cycle.')
wfFrSwDlcmiDtePollingInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 49), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 30)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiDtePollingInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDtePollingInterval.setDescription('The number of seconds between successive status enquiry messages.')
wfFrSwDlcmiDteFullEnquiryInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 50), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(6)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiDteFullEnquiryInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDteFullEnquiryInterval.setDescription('Indicates the number of status enquiries before a full status enquiry. (For bidirectional procedures.)')
wfFrSwDlcmiDteErrorThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 51), Integer32().clone(3)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiDteErrorThreshold.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDteErrorThreshold.setDescription('Indicates the number errors monitored before declaring the interface down.')
wfFrSwDlcmiCrossNetEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 52), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('enable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetEnable.setDescription('Indication to delete this frame relay interface.')
wfFrSwDlcmiCrossNetPollingInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 53), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 86400)).clone(120)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetPollingInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetPollingInterval.setDescription('The number of seconds between successive status enquiry messages.')
wfFrSwDlcmiCrossNetErrorThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 54), Integer32().clone(3)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetErrorThreshold.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetErrorThreshold.setDescription('Indicates the number missed heartbeat polls before declaring the cross-net PVC inactive.')
wfFrSwDlcmiCrossNetAsyncUpdateEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 55), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetAsyncUpdateEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetAsyncUpdateEnable.setDescription('Indicates whether we are to send to the other end of the network, status updates for dlcis as soon as there is a change of status for the dlci.')
wfFrSwDlcmiBcMeasurementEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 56), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('enable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiBcMeasurementEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiBcMeasurementEnable.setDescription('Indicates whether Committed Burst Measurement is enabled for this interface. If this attribute is set to DISABLE then DE bit setting in Frame Relay frames at this interface is disabled.')
wfFrSwDlcmiAsyncUpdateEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 57), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiAsyncUpdateEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiAsyncUpdateEnable.setDescription('Indicates whether the link management entity should send an asynchronous single PVC update message when the state of a PVC is changed by a technician or by cross-net polling procedures. ')
wfFrSwDlcmiCrossNetListenEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 58), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetListenEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetListenEnable.setDescription("Indicates whether the link management entity should make a judgement of the PVC's status based on Cross Net updates.")
wfFrSwDlcmiSvcDisable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 59), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiSvcDisable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiSvcDisable.setDescription(' Indicates whether SVC is enabled or disabled for this access channel. ')
wfFrSwDlcmiL2AddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 60), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("e164", 1), ("x121", 2))).clone('e164')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiL2AddrType.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiL2AddrType.setDescription(' Indicates the address type supported on this access channel. This information is needed when wFrSwDlcmiSVCDisable is enabled. ')
wfFrSwDlcmiEscapeMode = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 61), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("disabled", 1), ("ingress", 2), ("egress", 3))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiEscapeMode.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiEscapeMode.setDescription(' Identifies the Escape mode (none, ingress or egress) to be used for PVCs with wfFrSwVcEscapeMode set to enabled.')
wfFrSwDlcmiEscapeCircuit = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 62), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiEscapeCircuit.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiEscapeCircuit.setDescription('Identifies the FR-DTE circuit number corresponding to an Escape PVC. Applies only to PVCs with wfFrSwVcEscapeEnable set to enabled.')
wfFrSwDlcmiEscapeVcCount = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 63), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiEscapeVcCount.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiEscapeVcCount.setDescription(' The number of PVCs on this DLCMI that are configured as Escape VCs')
wfFrSwDlcmiIwfMode = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 64), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("none", 1), ("sdlc2frsw", 2))).clone('none')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiIwfMode.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiIwfMode.setDescription(' Identifies the interworking mode (none, SDLC-to-FRSW) to be used for PVCs with wfFrSwVcEscapeMode set to enabled.')
wfFrSwDlcmiSvcBillingEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 65), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiSvcBillingEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiSvcBillingEnable.setDescription('Indicates whether the SVC Billing on this access channel set to enable.')
wfFrSwDlcmiSpvcAgent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 66), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("none", 1), ("cra", 2), ("caa", 3), ("craandcaa", 4))).clone('none')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiSpvcAgent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiSpvcAgent.setDescription(' Indicates if an SPVC Call Request Agent, Call Accept Agent, or both are enabled on this FRSW circuit.')
wfFrSwDlcmiCallAccDlciSelectionType = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 67), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("any", 1), ("specific", 2))).clone('any')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiCallAccDlciSelectionType.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiCallAccDlciSelectionType.setDescription('Indicates to the Call Accept Agent to accept SPVC Call Setup requests for any available DLCI or for a specific DLCI. Call Setup requests with the wrong selection type will be rejected.')
wfFrSwCctTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2), )
if mibBuilder.loadTexts: wfFrSwCctTable.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctTable.setDescription('Frame Relay Circuit table gives information about a virtual circuit.')
wfFrSwCctEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwCctNumber"), (0, "Wellfleet-FRSW-MIB", "wfFrSwCctDlci"))
if mibBuilder.loadTexts: wfFrSwCctEntry.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctEntry.setDescription('An entry in the Frame Relay (Virtual) Circuit table.')
wfFrSwCctDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2), ("system", 3))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCctDelete.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctDelete.setDescription('Indication to delete this frame relay interface.')
wfFrSwCctNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctNumber.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctNumber.setDescription('Instance identifier; the circuit number of this interface.')
wfFrSwCctDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(16, 1007, 1024, 64511, 131072, 8257535))).clone(namedValues=NamedValues(("twobyteminimum", 16), ("twobytemaximum", 1007), ("threebyteminimum", 1024), ("threebytemaximum", 64511), ("fourbyteminimum", 131072), ("fourbytemaximum", 8257535)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctDlci.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctDlci.setDescription('Instance identifier; this indicates the virtual circuit identifier')
wfFrSwCctState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("invalid", 1), ("active", 2), ("inactive", 3), ("control", 4), ("user", 5))).clone('invalid')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctState.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctState.setDescription('Indicates whether the particular virtual circuit is operational.')
wfFrSwCctMulticast = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("multicast", 1), ("unicast", 2))).clone('unicast')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCctMulticast.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctMulticast.setDescription('Indicates whether this dlci is used for multicast or single destination.')
wfFrSwCctInBc = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctInBc.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctInBc.setDescription('Indicates the Incoming Committed Burst bits for this virtual circuit.')
wfFrSwCctOutBc = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctOutBc.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctOutBc.setDescription('Indicates the Outgoing Committed Burst bits for this virtual circuit.')
wfFrSwCctInBe = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 8), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCctInBe.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctInBe.setDescription('Indicates the Incoming Excess Burst bits for this virtual circuit.')
wfFrSwCctOutBe = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 9), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCctOutBe.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctOutBe.setDescription('Indicates the Outgoing Excess Burst bits for this virtual circuit.')
wfFrSwCctInThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 10), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCctInThroughput.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctInThroughput.setDescription('Indicates the incoming throughput in bits/sec for this virtual circuit.')
wfFrSwCctOutThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 11), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCctOutThroughput.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctOutThroughput.setDescription('Indicates the outgoing throughput in bits/sec for this virtual circuit.')
wfFrSwCctCreationTime = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 12), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctCreationTime.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctCreationTime.setDescription('Indicates the value of sysUpTime when the VC was created.')
wfFrSwCctLastTimeChange = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 13), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLastTimeChange.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLastTimeChange.setDescription('Indicates the value of sysUpTime when last there was a change in VC state.')
wfFrSwCctLocalSentNonDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSentNonDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSentNonDEFrames.setDescription('Indicates the number of frames without the DE bit sent on this virtual circuit over the local interface.')
wfFrSwCctLocalSentNonDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSentNonDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSentNonDEOctets.setDescription('Indicates the number of octets without DE bit sent on this virtual circuit over the local interface.')
wfFrSwCctLocalSentDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSentDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSentDEFrames.setDescription('Indicates the number of frames with DE bit set sent on this virtual circuit over the local interface.')
wfFrSwCctLocalSentDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 17), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSentDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSentDEOctets.setDescription('Indicates the number of octets with DE bit set sent on this virtual circuit over the local interface.')
wfFrSwCctLocalSetFECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 18), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSetFECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSetFECNFrames.setDescription('Indicates the number of frames sent to the local interface on which this switch set the FECN bit .')
wfFrSwCctLocalSetFECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 19), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSetFECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSetFECNOctets.setDescription('Indicates the number of octets in frames sent to the local interface on which this switch set the FECN bit.')
wfFrSwCctLocalSetBECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 20), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSetBECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSetBECNFrames.setDescription('Indicates the number of frames sent to the local interface on which this switch set the BECN bit.')
wfFrSwCctLocalSetBECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 21), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSetBECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSetBECNOctets.setDescription('Indicates the number of octets in frames sent to the local interface on which this switch set the BECN bit.')
wfFrSwCctLocalSetDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 22), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSetDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSetDEFrames.setDescription('Indicates the number of frames sent to the local interface on which this switch set the DE bit.')
wfFrSwCctLocalSetDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 23), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSetDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSetDEOctets.setDescription('Indicates the number of octets in frames sent to the local interface on which this switch set the DE bit.')
wfFrSwCctLocalDropNonDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 24), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalDropNonDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalDropNonDEFrames.setDescription('Indicates the number of frames received over the local interface which were discarded, excluding any frames with the DE bit set.')
wfFrSwCctLocalDropNonDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 25), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalDropNonDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalDropNonDEOctets.setDescription('Indicates the number of octets in frames received over the local interface which were discarded, excluding any frames with the DE bit set.')
wfFrSwCctLocalDropDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 26), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalDropDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalDropDEFrames.setDescription('Indicates the number of frames received over the local interface, having the DE bit set, which were discarded.')
wfFrSwCctLocalDropDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 27), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalDropDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalDropDEOctets.setDescription('Indicates the number of octets in frames received over the local interface, having the DE bit set, which were discarded.')
wfFrSwCctInactiveVCDropFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 28), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctInactiveVCDropFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctInactiveVCDropFrames.setDescription('Indicates how many frames were discarded because the virtual circuit was inactive.')
wfFrSwCctInactiveVCDropOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 29), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctInactiveVCDropOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctInactiveVCDropOctets.setDescription('Indicates how many Octets were discarded because the virtual circuit was inactive.')
wfFrSwCctLocalRecvNonDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 30), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvNonDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvNonDEFrames.setDescription('Indicates the number of frames received on this virtual circuit over the local interface.')
wfFrSwCctLocalRecvNonDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 31), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvNonDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvNonDEOctets.setDescription('Indicates the number of octets received on this virtual circuit over the local interface.')
wfFrSwCctLocalRecvDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 32), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvDEFrames.setDescription('Indicates the number of frames received over the local interface with the DE bit set.')
wfFrSwCctLocalRecvDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 33), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvDEOctets.setDescription('Indicates the number of octets in frames received over the local interface with the DE bit set.')
wfFrSwCctLocalRecvFECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 34), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvFECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvFECNFrames.setDescription('Indicates the number of frames received over the local interface with the FECN bit set.')
wfFrSwCctLocalRecvFECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 35), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvFECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvFECNOctets.setDescription('Indicates the number of octets in frames received over the local interface with the FECN bit set.')
wfFrSwCctLocalRecvBECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 36), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvBECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvBECNFrames.setDescription('Indicates the number of frames received over the local interface with the BECN bit set.')
wfFrSwCctLocalRecvBECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 37), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvBECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvBECNOctets.setDescription('Indicates the number of octets in frames received over the local interface with the BECN bit set.')
wfFrSwCctLocalRecentNonDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 38), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecentNonDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecentNonDEOctets.setDescription('Indicates the number of octets received over the local interface during the most recent sampling period.')
wfFrSwCctRemoteSentNonDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 39), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSentNonDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSentNonDEFrames.setDescription('Indicates the number of Non DE set frames sent over the remote interface.')
wfFrSwCctRemoteSentNonDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 40), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSentNonDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSentNonDEOctets.setDescription('Indicates the number of Non DE set octets sent over the remote interface.')
wfFrSwCctRemoteSentDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 41), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSentDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSentDEFrames.setDescription('Indicates the number of DE set frames sent over the remote interface.')
wfFrSwCctRemoteSentDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 42), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSentDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSentDEOctets.setDescription('Indicates the number of DE set octets sent over the remote interface.')
wfFrSwCctRemoteSetFECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 43), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSetFECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSetFECNFrames.setDescription('Indicates the number of frames sent to the remote interface on which this switch set the FECN bit.')
wfFrSwCctRemoteSetFECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 44), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSetFECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSetFECNOctets.setDescription('Indicates the number of octets in frames sent to the remote interface that on which this switch set the FECN bit.')
wfFrSwCctRemoteSetBECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 45), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSetBECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSetBECNFrames.setDescription('Indicates the number of frames sent to the remote interface on which this switch set the BECN bit.')
wfFrSwCctRemoteSetBECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 46), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSetBECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSetBECNOctets.setDescription('Indicates the number of octets in frames sent to the remote interface on which this switch set the BECN bit.')
wfFrSwCctRemoteDropNonDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 47), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteDropNonDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteDropNonDEFrames.setDescription('Indicates the number of frames received over the remote interface which were discarded, excluding any frames with the DE bit set.')
wfFrSwCctRemoteDropNonDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 48), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteDropNonDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteDropNonDEOctets.setDescription('Indicates the number of octets in frames received over the remote interface which were discarded, excluding any frames with the DE bit set.')
wfFrSwCctRemoteDropDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 49), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteDropDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteDropDEFrames.setDescription('Indicates the number of frames received over the remote interface, having the DE bit set, which were discarded.')
wfFrSwCctRemoteDropDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 50), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteDropDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteDropDEOctets.setDescription('Indicates the number of octets in frames received over the remote interface, having the DE bit set, which were discarded.')
wfFrSwCctRemoteRecvNonDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 51), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvNonDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvNonDEFrames.setDescription('Indicates the number of frames received on this virtual circuit over the remote interface.')
wfFrSwCctRemoteRecvNonDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 52), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvNonDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvNonDEOctets.setDescription('Indicates the number of octets received on this virtual circuit over the remote interface.')
wfFrSwCctRemoteRecvDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 53), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvDEFrames.setDescription('Indicates the number of frames received over the remote interface with the DE bit set.')
wfFrSwCctRemoteRecvDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 54), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvDEOctets.setDescription('Indicates the number of octets in frames received over the remote interface with the DE bit set.')
wfFrSwCctRemoteRecvFECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 55), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvFECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvFECNFrames.setDescription('Indicates the number of frames received over the remote interface with the FECN bit set.')
wfFrSwCctRemoteRecvFECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 56), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvFECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvFECNOctets.setDescription('Indicates the number of octets in frames received over the remote interface with the FECN bit set.')
wfFrSwCctRemoteRecvBECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 57), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvBECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvBECNFrames.setDescription('Indicates the number of frames received over the remote interface with the BECN bit set.')
wfFrSwCctRemoteRecvBECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 58), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvBECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvBECNOctets.setDescription('Indicates the number of octets in frames received over the remote interface with the BECN bit set.')
wfFrSwCctLocalBecnState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 59), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalBecnState.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalBecnState.setDescription('Indicates the local BECN state')
wfFrSwCctRemoteBecnState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 60), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteBecnState.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteBecnState.setDescription('Indicates the remote BECN state')
wfFrSwCctLocalOrRemoteConnection = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 61), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("local", 1), ("remote", 2))).clone('remote')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalOrRemoteConnection.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalOrRemoteConnection.setDescription('Indicates whether this connection is Local to Local Connection or Local to Remote connection.')
wfFrSwCctInBcOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 62), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctInBcOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctInBcOctets.setDescription('Indicates the Incoming Committed Burst in octets for this virtual circuit.')
wfFrSwCctStateSet = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 63), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2))).clone('active')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCctStateSet.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctStateSet.setDescription('User access for setting the state of a virtual circuit')
wfFrSwCctReportedStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 64), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("acked", 1), ("unacked", 2), ("unreported", 3))).clone('unreported')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctReportedStatus.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctReportedStatus.setDescription('Record keeping for circuit status')
wfFrSwCctReceivedStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 65), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2))).clone('active')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctReceivedStatus.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctReceivedStatus.setDescription('State of a virtual circuit as reported by the network at an NNI')
wfFrSwCctCrossNetStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 66), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2))).clone('active')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctCrossNetStatus.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctCrossNetStatus.setDescription('State of a virtual circuit as reported by the other end of the network under bidirectional signalling.')
wfFrSwCctXNetSent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 67), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("unsent", 1), ("sent", 2))).clone('unsent')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctXNetSent.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctXNetSent.setDescription('Whether we have sent a cross net status message for this VC yet.')
wfFrSwCctXNetReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 68), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("recv", 1), ("unrecv", 2))).clone('unrecv')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctXNetReceived.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctXNetReceived.setDescription('Whether we have received a cross net status message for this VC during the current polling interval. ')
wfFrSwCctXNetErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 69), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctXNetErrors.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctXNetErrors.setDescription('This is the count of the consecutive errors (usually timeouts) against this VC in cross-network heartbeat polling ')
wfFrSwTupleTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 3), )
if mibBuilder.loadTexts: wfFrSwTupleTable.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwTupleTable.setDescription('The Parameters for the Tuple table, identifying the endpoints of virtual circuits as pairs of IP addresses and DLCI.')
wfFrSwTupleEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 3, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwTupleIpAddrA"), (0, "Wellfleet-FRSW-MIB", "wfFrSwTupleDlciA"), (0, "Wellfleet-FRSW-MIB", "wfFrSwTupleIpAddrB"), (0, "Wellfleet-FRSW-MIB", "wfFrSwTupleDlciB"))
if mibBuilder.loadTexts: wfFrSwTupleEntry.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwTupleEntry.setDescription('The parameters for a particular Tuple.')
wfFrSwTupleDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 3, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwTupleDelete.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwTupleDelete.setDescription('Indication to delete this tuple.')
wfFrSwTupleIpAddrA = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 3, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwTupleIpAddrA.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwTupleIpAddrA.setDescription("Instance indentifier; indicates the IP address associated with endpoint 'A' of a virtual circuit.")
wfFrSwTupleDlciA = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 3, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwTupleDlciA.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwTupleDlciA.setDescription("Instance identfier; indicates the DLCI associated with endpoint 'A' of a virtual circuit.")
wfFrSwTupleIpAddrB = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 3, 1, 4), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwTupleIpAddrB.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwTupleIpAddrB.setDescription("Instance identfier; indicates the IP address associated with endpoint 'B' of a virtual circuit.")
wfFrSwTupleDlciB = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 3, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwTupleDlciB.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwTupleDlciB.setDescription("Instance identifier; Indicates the DLCI associated with endpoint 'B' of a virtual circuit.")
wfFrSwMcastTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 4), )
if mibBuilder.loadTexts: wfFrSwMcastTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwMcastTable.setDescription('The list of multicast addresses')
wfFrSwMcastEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 4, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwMcastIndex"))
if mibBuilder.loadTexts: wfFrSwMcastEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwMcastEntry.setDescription('The parameters for a particular Multicast address.')
wfFrSwMcastDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 4, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwMcastDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwMcastDelete.setDescription('Indication to delete this multicast instance.')
wfFrSwMcastIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 4, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwMcastIndex.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwMcastIndex.setDescription('Index of this multicast DLCI instance')
wfFrSwMcastIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 4, 1, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwMcastIpAddr.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwMcastIpAddr.setDescription('IP address of the interface in which this multicast DLCI is defined.')
wfFrSwMcastDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 4, 1, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwMcastDlci.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwMcastDlci.setDescription('Identifies the multicast DLCI with which the IndividualDlci is associated.')
wfFrSwMcastIndividualDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 4, 1, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwMcastIndividualDlci.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwMcastIndividualDlci.setDescription('Indicates the DLCI associated with the above multicast DLCI.')
wfFrSwUsage = MibIdentifier((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5))
wfFrSwUsageEnable = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageEnable.setDescription('Enable/Disable FRSW billing.')
wfFrSwUsageVolume = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 14))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageVolume.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageVolume.setDescription("Indicates the file system volume number to which the billing usage data files will be written. The volume number corresponds to the slot number on which the volume resides. Note: Value 0 has the special meaning that no 'Store' and 'Flush' operations will take place. This translates to no Billing data will be written to the local file system. 'Update' operations will still be performed on each local slot. Full Billing statistics will still be available in the wfFrSwUsageTable MIB.")
wfFrSwUsageVolumeBackup = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 14))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageVolumeBackup.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageVolumeBackup.setDescription('Indicates the backup volume if wfFrSwUsageVolume becomes inoperative. Note: This feature is not implemented in this release.')
wfFrSwUsageDirectory = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageDirectory.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageDirectory.setDescription('The name of the directory where the billing usage data files are stored. ')
wfFrSwUsageFilePrefix = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 5), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageFilePrefix.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageFilePrefix.setDescription('The base name of billing usage data files.')
wfFrSwUsageTimerInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 60)).clone(20)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageTimerInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageTimerInterval.setDescription('This number determines the timer interval (number of seconds) unit for the Billing process to perform its various timer driven tasks. i.e. updating billing usage data, writing billing usage data to file system and file system management activities.')
wfFrSwUsageUpdateInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageUpdateInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageUpdateInterval.setDescription('This number specifies the interval (number of minutes) for the Billing process to collect and update billing usage data in the wfFrSwUsage MIB. Note: When converted to seconds, this must be a multiple of wfFrSwUsageTimerInterval.')
wfFrSwUsageStoreInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageStoreInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageStoreInterval.setDescription('This number specifies the interval (number of minutes) for the Billing process to write billing usage data on to the file system from the wfFrSwUsage MIB. Note: When converted to seconds, this must be a multiple of wfFrSwUsageTimerInterval.')
wfFrSwUsageFlushInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageFlushInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageFlushInterval.setDescription('This number specifies the interval (number of minutes) for the Billing process to write billing usage data on to the file system from the wfFrSwUsage MIB follow by zeroing the wfFrSwUsage MIB. Note: When converted to seconds, this must be a multiple of wfFrSwUsageTimerInterval.')
wfFrSwUsageCleanupInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageCleanupInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCleanupInterval.setDescription('This is the interval (number of minutes) for the Billing process to check and delete old billing usage data files. Note: When converted to seconds, this must be a multilple of wfFrSwUsageTimerInterval.')
wfFrSwUsageLocalTimeZone = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLocalTimeZone.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageLocalTimeZone.setDescription('Indicates local time zone of the switch')
wfFrSwUsageUpdateTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 12), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageUpdateTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageUpdateTimeStamp.setDescription('Time stamp of last wfFrSwUsageUpdateInterval timer expiration or the starting time of the current wfFrSwUsageUpdateInterval. This value is number of seconds since midnight Jan 1, 1976 (GMT).')
wfFrSwUsageStoreTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 13), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageStoreTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageStoreTimeStamp.setDescription('Time stamp of last wfFrSwUsageStoreInterval timer expiration or the starting time of the current wfFrSwUsageStoreInterval. This value is number of seconds since midnight Jan. 1, 1976 (GMT). ')
wfFrSwUsageFlushTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 14), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageFlushTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageFlushTimeStamp.setDescription('Time stamp of last wfFrSwUsageFlushInterval timer expiration or the starting time of the current wfFrSwUsageFlushInterval. This value is number of seconds since midnight Jan. 1, 1976 (GMT).')
wfFrSwUsageCleanupTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 15), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCleanupTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCleanupTimeStamp.setDescription('Time stamp of last wfFrSwUsageCleanupInterval timer expiration or the starting time of the current wfFrSwUsageCleanupInterval. This value is number of seconds since midnight Jan. 1, 1976 (GMT).')
wfFrSwUsageUpdateData = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 16), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageUpdateData.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageUpdateData.setDescription('Setting this attribute to a non-zero value will cause an immediate updating of the billing usage data. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwUsageStoreData = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 17), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageStoreData.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageStoreData.setDescription('Setting this attribute to a non-zero value will cause an immediate updating and writing of the billing usage data. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwUsageFlushData = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 18), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageFlushData.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageFlushData.setDescription('Setting this attribute to a non-zero value will cause an immediate updating and writing of the billing usage data and followed by zeroing the wfFrSwBillingUsage MIB. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwUsageFileCleanup = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 19), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageFileCleanup.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageFileCleanup.setDescription('Setting this attribute to a non-zero value will cause an immediate checking and deleting old billing usage data files. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwUsageState = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("init", 3), ("notpresent", 4))).clone('notpresent')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageState.setDescription('current state FRSW billing.')
wfFrSwUsageCurVolume = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 21), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 14))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurVolume.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurVolume.setDescription('current file system volume number used. This number is the same as wfFrSwUsageVolume except when the user sets wfFrSwUsageVolume to an invalid number.')
wfFrSwUsageCurVolumeBackup = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 22), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 14))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurVolumeBackup.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurVolumeBackup.setDescription('curent backup file system volume number used. This number is the same as wfFrSwUsageVolumeBackUp except when the user sets wfFrSwUsageVolume to an invalid number. Note: This feature is not implemented in this release.')
wfFrSwUsageCurDirectory = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 23), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurDirectory.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurDirectory.setDescription('current directory name used. This number is the same as wfFrSwUsageDirectory except when the user sets wfFrSwUsageDirectory to an invalid name.')
wfFrSwUsageCurFilePrefix = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 24), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurFilePrefix.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurFilePrefix.setDescription('current base file name used. This number is the same as wfFrSwUsageFilePrefix except when the user sets wfFrSwUsageFilePrefix to an invalid name.')
wfFrSwUsageCurTimerInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 25), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 60)).clone(20)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurTimerInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurTimerInterval.setDescription('current timer interval number used. This number is the same as wfFrSwUsageTimerInterval except when the user sets wfFrSwUsageTimerInterval to an invalid value.')
wfFrSwUsageCurUpdateInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 26), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurUpdateInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurUpdateInterval.setDescription('current update interval number used. This number is the same as wfFrSwUsageUpdateInterval except when the user sets wfFrSwUsageUpdateInterval to an invalid value.')
wfFrSwUsageCurStoreInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 27), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurStoreInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurStoreInterval.setDescription('current store timer interval number used. This number is the same as wfFrSwUsageStoreInterval except when the user sets wfFrSwUsageStoreInterval to an invalid value.')
wfFrSwUsageCurFlushInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 28), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurFlushInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurFlushInterval.setDescription('current flush timer interval number used. This number is the same as wfFrSwUsageFlushInterval except when the user sets wfFrSwUsageFlushInterval to an invalid value.')
wfFrSwUsageCurCleanupInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 29), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurCleanupInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurCleanupInterval.setDescription('current file cleanup timer interval number used. This number is the same as wfFrSwUsageCleanupInterval except when the user sets wfFrSwUsageCleanupInterval to an invalid value.')
wfFrSwUsageDebug = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 30), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageDebug.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageDebug.setDescription('Enable/Disable printing of debug edl (trap) messages. NOTE: Do not enable this attribute in operational enviornment as it will likely flood the logging facility. This attribute is reserved for specialized debugging in a controlled lab enviornment.')
wfFrSwUsageCurDebug = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 31), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurDebug.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurDebug.setDescription('current debug value used. This value is the same as wfFrSwUsageDebug except when the user sets wfFrSwUsageDeubg to an invalid value.')
wfFrSwUsageSwitchId = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 32), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSwitchId.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageSwitchId.setDescription('switch id used in the billing usage data file.')
wfFrSwUsageNumEntries = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 33), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageNumEntries.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageNumEntries.setDescription('number of entries in wfFrSwUsageTable')
wfFrSwSvcUsageEnable = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 34), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageEnable.setDescription('Enable/Disable FRSW SVC billing.')
wfFrSwSvcUsageInterimRecordEnable = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 35), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageInterimRecordEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageInterimRecordEnable.setDescription('Enable/Disable Writing FRSW SVC billing record while SVC connection is still up.')
wfFrSwSvcUsageVolume = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 36), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 14))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageVolume.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageVolume.setDescription("Indicates the file system volume number to which the SVC billing usage data files will be written. The volume number corresponds to the slot number on which the volume resides. Note: Value 0 has the special meaning that no 'Store' and 'Flush' operations will take place. This translates to no Billing data will be written to the local file system. 'Update' operations will still be performed on each local slot. Full Billing statistics will still be available in the wfFrSwUsageTable MIB.")
wfFrSwSvcUsageDirectory = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 37), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageDirectory.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageDirectory.setDescription('The name of the directory where the SVC billing usage data files are stored. ')
wfFrSwSvcUsageFilePrefix = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 38), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageFilePrefix.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageFilePrefix.setDescription('The base name of SVC billing usage data files.')
wfFrSwSvcUsageUpdateInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 39), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageUpdateInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageUpdateInterval.setDescription('This number specifies the interval (number of minutes) for the SVC Billing process to collect and update billing usage data in the wfFrSwUsage MIB. Note: When converted to seconds, this must be a multiple of wfFrSwUsageTimerInterval.')
wfFrSwSvcUsageStoreInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 40), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageStoreInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageStoreInterval.setDescription('This number specifies the interval (number of minutes) for the SVC Billing process to write billing usage data on to the file system from the wfFrSwUsage MIB. Note: When converted to seconds, this must be a multiple of wfFrSwUsageTimerInterval.')
wfFrSwSvcUsageFlushInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 41), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageFlushInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageFlushInterval.setDescription('This number specifies the interval (number of minutes) for the SVC Billing process to write billing usage data on to the file system from the wfFrSwUsage MIB follow by zeroing the wfFrSwUsage MIB. Note: When converted to seconds, this must be a multiple of wfFrSwUsageTimerInterval.')
wfFrSwSvcUsageCleanupInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 42), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageCleanupInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCleanupInterval.setDescription('This is the interval (number of minutes) for the SVC Billing process to check and delete old billing usage data files. Note: When converted to seconds, this must be a multilple of wfFrSwUsageTimerInterval.')
wfFrSwSvcUsageUpdateTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 43), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageUpdateTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageUpdateTimeStamp.setDescription('Time stamp of last wfFrSwSvcUsageUpdateInterval timer expiration or the starting time of the current wfFrSwSvcUsageUpdateInterval. This value is number of seconds since midnight Jan 1, 1976 (GMT).')
wfFrSwSvcUsageStoreTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 44), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageStoreTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageStoreTimeStamp.setDescription('Time stamp of last wfFrSwSvcUsageStoreInterval timer expiration or the starting time of the current wfFrSwSvcUsageStoreInterval. This value is number of seconds since midnight Jan. 1, 1976 (GMT). ')
wfFrSwSvcUsageFlushTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 45), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageFlushTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageFlushTimeStamp.setDescription('Time stamp of last wfFrSwSvcUsageFlushInterval timer expiration or the starting time of the current wfFrSwSvcUsageFlushInterval. This value is number of seconds since midnight Jan. 1, 1976 (GMT).')
wfFrSwSvcUsageCleanupTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 46), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCleanupTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCleanupTimeStamp.setDescription('Time stamp of last wfFrSwSvcUsageCleanupInterval timer expiration or the starting time of the current wfFrSwSvcUsageCleanupInterval. This value is number of seconds since midnight Jan. 1, 1976 (GMT).')
wfFrSwSvcUsageUpdateData = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 47), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageUpdateData.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageUpdateData.setDescription('Setting this attribute to a non-zero value will cause an immediate updating of the SVC billing usage data. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwSvcUsageStoreData = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 48), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageStoreData.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageStoreData.setDescription('Setting this attribute to a non-zero value will cause an immediate updating and writing of the SVC billing usage data. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwSvcUsageFlushData = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 49), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageFlushData.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageFlushData.setDescription('Setting this attribute to a non-zero value will cause an immediate updating and writing of the SVC billing usage data and followed by zeroing the wfFrSwBillingUsage MIB. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwSvcUsageFileCleanup = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 50), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageFileCleanup.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageFileCleanup.setDescription('Setting this attribute to a non-zero value will cause an immediate checking and deleting old SVC billing usage data files. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwSvcUsageState = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 51), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("init", 3), ("notpresent", 4))).clone('notpresent')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageState.setDescription('current state FRSW SVC billing.')
wfFrSwSvcUsageCurVolume = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 52), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 14))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCurVolume.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCurVolume.setDescription('current file system volume number used for SVC Billing. This number is the same as wfFrSwSvcUsageVolume except when the user sets wfFrSwSvcUsageVolume to an invalid number.')
wfFrSwSvcUsageCurDirectory = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 53), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCurDirectory.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCurDirectory.setDescription('current directory name used for SVC Billing. This number is the same as wfFrSwSvcUsageDirectory except when the user sets wfFrSwSvcUsageDirectory to an invalid name.')
wfFrSwSvcUsageCurFilePrefix = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 54), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCurFilePrefix.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCurFilePrefix.setDescription('current base file name used for SVC Billing. This name is the same as wfFrSwSvcUsageFilePrefix except when the user sets wfFrSwSvcUsageFilePrefix to an invalid name.')
wfFrSwSvcUsageCurUpdateInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 55), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCurUpdateInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCurUpdateInterval.setDescription('current update interval number used. This number is the same as wfFrSwSvcUsageUpdateInterval except when the user sets wfFrSwSvcUsageUpdateInterval to an invalid value.')
wfFrSwSvcUsageCurStoreInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 56), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCurStoreInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCurStoreInterval.setDescription('current store timer interval number used. This number is the same as wfFrSwSvcUsageStoreInterval except when the user sets wfFrSwSvcUsageStoreInterval to an invalid value.')
wfFrSwSvcUsageCurFlushInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 57), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCurFlushInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCurFlushInterval.setDescription('current flush timer interval number used. This number is the same as wfFrSwSvcUsageFlushInterval except when the user sets wfFrSwSvcUsageFlushInterval to an invalid value.')
wfFrSwSvcUsageCurCleanupInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 58), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCurCleanupInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCurCleanupInterval.setDescription('current file cleanup timer interval number used. This number is the same as wfFrSwSvcUsageCleanupInterval except when the user sets wfFrSwSvcUsageCleanupInterval to an invalid value.')
wfFrSwSvcUsageNumEntries = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 59), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageNumEntries.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageNumEntries.setDescription('number of entries in wfFrSwSvcUsageTable')
wfFrSwSvcUsageVersionId = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 60), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageVersionId.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageVersionId.setDescription('The Software Version ID field is a two byte, right justified, binary formated value that identifies the particular version number of the software release. High nibble of byte 1 represents the major version number. Low nibble of byte 1 represents the release number. Byte 2 represents the integration number.')
wfFrSwUsageSwitchName = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 61), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageSwitchName.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageSwitchName.setDescription('The Switch name is a 6-bytes, right justified with leading blanks as necessary. It can be combination of letters, numbers and blanks. This ID identifies the particular networks equipment for SVC billing usage data process.')
wfFrSwPvcUsageFileLayout = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 62), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwPvcUsageFileLayout.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwPvcUsageFileLayout.setDescription('PVC usage file layout version')
wfFrSwSvcUsageFileLayout = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 63), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageFileLayout.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageFileLayout.setDescription('SVC usage file layout version')
wfFrSwUsageTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6), )
if mibBuilder.loadTexts: wfFrSwUsageTable.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageTable.setDescription('The Billing usage table.')
wfFrSwUsageEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwUsageCircuitNumber"), (0, "Wellfleet-FRSW-MIB", "wfFrSwUsageDlci"))
if mibBuilder.loadTexts: wfFrSwUsageEntry.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageEntry.setDescription('The parameters for Billing Usage.')
wfFrSwUsageDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageDelete.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageDelete.setDescription('Indicates status of this entry. FRSW_USAGE_CREATED is the normal case. FRSW_USAGE_DELETED means the corresponding tuple and vc instances were deleted some time during this collection interval. This billing instance will be deleted at the end of the next wfFrSwUsageFlush period after this billing record is written out to the file system.')
wfFrSwUsageCircuitNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCircuitNumber.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageCircuitNumber.setDescription('Instance identifier; the circuit number of this interface. ')
wfFrSwUsageDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(16, 1007, 1024, 64511, 131072, 8257535))).clone(namedValues=NamedValues(("twobyteminimum", 16), ("twobytemaximum", 1007), ("threebyteminimum", 1024), ("threebytemaximum", 64511), ("fourbyteminimum", 131072), ("fourbytemaximum", 8257535)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageDlci.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageDlci.setDescription('Instance identifier; this indicates which virtual circuit. ')
wfFrSwUsageIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 4), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageIPAddress.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageIPAddress.setDescription('(Local) IP address corresponding to wfFrSwUsageCircuitNumber of this virtual circuit. ')
wfFrSwUsageStartTimeStampHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageStartTimeStampHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageStartTimeStampHigh.setDescription('Time stamp of the starting time (the high 32 bits) of last billing usage interval. This value is the number of 1/100th seconds since midnight Jan 1, 1976 (GMT).')
wfFrSwUsageStartTimeStampLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageStartTimeStampLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageStartTimeStampLow.setDescription('Time stamp of the starting time (the low 32 bits) of last billing usage interval. This value is the number of 1/100th seconds since midnight Jan 1, 1976 (GMT).')
wfFrSwUsageEndTimeStampHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageEndTimeStampHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageEndTimeStampHigh.setDescription('Time stamp of the ending time (the high 32 bits) of last billing usage interval. This value is the number of 1/100th seconds since midnight Jan 1, 1976 (GMT).')
wfFrSwUsageEndTimeStampLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageEndTimeStampLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageEndTimeStampLow.setDescription('Time stamp of the ending time (the low 32 bits) of last billing usage interval. This value is the number of 1/100th seconds since midnight Jan 1, 1976 (GMT).')
wfFrSwUsageSentNonDEFramesHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEFramesHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEFramesHigh.setDescription('Number (the high 32 bits) of local frames sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageSentNonDEFramesLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEFramesLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEFramesLow.setDescription('Number (the low 32 bits) of local frames sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageSentNonDEOctetsHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEOctetsHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEOctetsHigh.setDescription('Number (the high 32 bits) of local octets sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageSentNonDEOctetsLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEOctetsLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEOctetsLow.setDescription('Number (the low 32 bits) of local octets sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageSentDEFramesHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentDEFramesHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentDEFramesHigh.setDescription('Number (the high 32 bits) of local frames with DE bit sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageSentDEFramesLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 14), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentDEFramesLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentDEFramesLow.setDescription('Number (the low 32 bits) of local frames with DE bit sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageSentDEOctetsHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentDEOctetsHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentDEOctetsHigh.setDescription('Number (the high 32 bits) of local octets with DE bit sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageSentDEOctetsLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 16), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentDEOctetsLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentDEOctetsLow.setDescription('Number (the low 32 bits) of local octets with DE bit sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageLastNonDEFramesHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 17), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEFramesHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEFramesHigh.setDescription('The (high 32 bits) value of wfFrSwCctLocalSentNonDEFrames value at wfFrSwUsageEndTimeStamp. Note: Since wfFrSwCctLocalSentNonDEFrames is a 32-bit COUNTER, this is really a counter keeping track of number of times wfFrSwCctLocalSentNonDEFrames has wrapped around.')
wfFrSwUsageLastNonDEFramesLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 18), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEFramesLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEFramesLow.setDescription('The (low 32 bits) value of wfFrSwCctLocalSentNonDEFrames value at wfFrSwUsageEndTimeStamp. ')
wfFrSwUsageLastNonDEOctetsHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 19), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEOctetsHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEOctetsHigh.setDescription('The (high 32 bits) value of wfFrSwCctLocalSentNonDEOctets value at wfFrSwUsageEndTimeStamp. Note: Since wfFrSwCctLocalSentNonDEOctets is a 32-bit COUNTER, this is really a counter keeping track of number of times wfFrSwCctLocalSentNonDEOctets has wrapped around.')
wfFrSwUsageLastNonDEOctetsLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 20), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEOctetsLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEOctetsLow.setDescription('The (low 32 bits) value of wfFrSwCctLocalSentNonDEOctets value at wfFrSwUsageEndTimeStamp. ')
wfFrSwUsageLastDEFramesHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 21), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastDEFramesHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastDEFramesHigh.setDescription('The (high 32 bits) value of wfFrSwCctLocalSentDEFrames value at wfFrSwUsageEndTimeStamp. Note: Since wfFrSwCctLocalSentNonDEFrames is a 32-bit COUNTER, this is really a counter keeping track of number of times wfFrSwCctLocalSentDEFrames has wrapped around.')
wfFrSwUsageLastDEFramesLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 22), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastDEFramesLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastDEFramesLow.setDescription('The (low 32 bits) value of wfFrSwCctLocalSentDEFrames value at wfFrSwUsageEndTimeStamp. ')
wfFrSwUsageLastDEOctetsHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 23), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastDEOctetsHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastDEOctetsHigh.setDescription('The (high 32 bits) value of wfFrSwCctLocalSentDEOctets value at wfFrSwUsageEndTimeStamp. Note: Since wfFrSwCctLocalSentDEOctets is a 32-bit COUNTER, this is really a counter keeping track of number of times wfFrSwCctLocalSentDEOctets has wrapped around.')
wfFrSwUsageLastDEOctetsLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 24), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastDEOctetsLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastDEOctetsLow.setDescription('The (low 32 bits) value of wfFrSwCctLocalSentDEOctets value at wfFrSwUsageEndTimeStamp. ')
wfFrSwUsageRemoteIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 25), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageRemoteIPAddress.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageRemoteIPAddress.setDescription('IP address of the other side (remote) of this PVC endpoint.')
wfFrSwUsageRemoteDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 26), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(16, 1007, 1024, 64511, 131072, 8257535))).clone(namedValues=NamedValues(("twobyteminimum", 16), ("twobytemaximum", 1007), ("threebyteminimum", 1024), ("threebytemaximum", 64511), ("fourbyteminimum", 131072), ("fourbytemaximum", 8257535)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageRemoteDlci.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageRemoteDlci.setDescription('DLCI number of the other side (remote) of this PVC endpoint.')
wfFrSwVcTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7), )
if mibBuilder.loadTexts: wfFrSwVcTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcTable.setDescription('Frame Relay Virtual Circuit table gives information about a virtual circuit.')
wfFrSwVcEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwVcCircuit"), (0, "Wellfleet-FRSW-MIB", "wfFrSwVcDlci"))
if mibBuilder.loadTexts: wfFrSwVcEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcEntry.setDescription('An entry in the Frame Relay (Virtual) Circuit table.')
wfFrSwVcDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2), ("system", 3), ("svc", 4), ("spvccra", 5), ("spvccaa", 6))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDelete.setDescription('Indication to delete this virtual circuit.')
wfFrSwVcCircuit = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcCircuit.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCircuit.setDescription('Instance identifier; the circuit number of this interface (logical port).')
wfFrSwVcDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcDlci.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDlci.setDescription('Instance identifier; this indicates the virtual circuit identifier')
wfFrSwVcState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("invalid", 1), ("active", 2), ("inactive", 3), ("control", 4), ("user", 5))).clone('invalid')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcState.setDescription('Indicates whether the particular virtual circuit is operational.')
wfFrSwVcStateSet = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2))).clone('active')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcStateSet.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcStateSet.setDescription('User access for setting the state of a virtual circuit')
wfFrSwVcMulticast = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("multicast", 1), ("unicast", 2))).clone('unicast')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcMulticast.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcMulticast.setDescription('Indicates whether this dlci is used for multicast or a single destination.')
wfFrSwVcInBe = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 7), Integer32().clone(2147483647)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcInBe.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcInBe.setDescription('Indicates the maximum number Incoming Excess Burst bits that are allowed in a configured time interval (T).')
wfFrSwVcOutBe = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcOutBe.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcOutBe.setDescription('Indicates the Outgoing Excess Burst bits for this virtual circuit.')
wfFrSwVcInThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 9), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcInThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcInThroughput.setDescription('Indicates the incoming throughput in bits/sec for this virtual circuit.')
wfFrSwVcOutThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 10), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcOutThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcOutThroughput.setDescription('Indicates the outgoing throughput in bits/sec for this virtual circuit.')
wfFrSwVcOutBc = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcOutBc.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcOutBc.setDescription('Indicates the Outgoing Committed Burst bits for this virtual circuit.')
wfFrSwVcInBc = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcInBc.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcInBc.setDescription('Indicates the Incoming Committed Burst bits for this virtual circuit.')
wfFrSwVcInBcOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcInBcOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcInBcOctets.setDescription('Indicates the Incoming Committed Burst in octets for this virtual circuit.')
wfFrSwVcBecnState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 14), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcBecnState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcBecnState.setDescription('Indicates the BECN state')
wfFrSwVcReportedStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("acked", 1), ("unacked", 2), ("unreported", 3))).clone('unreported')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcReportedStatus.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcReportedStatus.setDescription('Record keeping for circuit status')
wfFrSwVcReceivedStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2))).clone('active')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcReceivedStatus.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcReceivedStatus.setDescription('State of a virtual circuit as reported by the network at an NNI')
wfFrSwVcCrossNetStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2))).clone('active')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcCrossNetStatus.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCrossNetStatus.setDescription('State of a virtual circuit as reported by the other end of the network under bidirectional signalling.')
wfFrSwVcXNetSent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("unsent", 1), ("sent", 2))).clone('unsent')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcXNetSent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcXNetSent.setDescription('Whether we have sent a cross net status message for this VC yet.')
wfFrSwVcXNetReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("recv", 1), ("unrecv", 2))).clone('unrecv')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcXNetReceived.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcXNetReceived.setDescription('Whether we have received a cross net status message for this VC during the current polling interval.')
wfFrSwVcCalledIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 20), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCalledIpAddr.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCalledIpAddr.setDescription('Indicates the IP address associated with destination of a virtual circuit.')
wfFrSwVcCalledDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 21), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 8257535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCalledDlci.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCalledDlci.setDescription('Indicates the DLCI associated with destination of a virtual circuit.')
wfFrSwVcTrfPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 999))).clone(namedValues=NamedValues(("one", 1), ("two", 2), ("three", 3), ("default", 999))).clone('default')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcTrfPriority.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcTrfPriority.setDescription('Defines the traffic priority level of all the incoming packets on this VC. FRSW_VCPRIORITY_DEFAULT - Set all incoming user traffic packets to the default priority used by the port. FRSW_VCPRIORITY_ONE - Set all incoming packets to priority 1. FRSW_VCPRIORITY_TWO - Set all incoming packets to priority 2. FRSW_VCPRIORITY_THREE - Set all incoming packets to priority 3. Priority 0 is reserved for network critical packets like OSPF, FR LMI and SMDS heartbeat and is not available for user traffic.')
wfFrSwVcCreationTime = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 23), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcCreationTime.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCreationTime.setDescription('Indicates the value of sysUpTime when the VC was created.')
wfFrSwVcLastTimeChange = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 24), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcLastTimeChange.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcLastTimeChange.setDescription('Indicates the value of sysUpTime when last there was a change in VC state.')
wfFrSwVcTxNonDeFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 25), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcTxNonDeFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcTxNonDeFrames.setDescription('Indicates the number of frames without the DE bit sent on this virtual circuit over the interface.')
wfFrSwVcTxNonDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 26), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcTxNonDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcTxNonDeOctets.setDescription('Indicates the number of octets without DE bit sent on this virtual circuit over the interface.')
wfFrSwVcTxDeFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 27), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcTxDeFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcTxDeFrames.setDescription('Indicates the number of frames with DE bit set sent on this virtual circuit over the interface.')
wfFrSwVcTxDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 28), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcTxDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcTxDeOctets.setDescription('Indicates the number of octets with DE bit set sent on this virtual circuit over the interface.')
wfFrSwVcSetFecnFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 29), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcSetFecnFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcSetFecnFrames.setDescription('Indicates the number of frames sent to the interface on which this switch set the FECN bit.')
wfFrSwVcSetFecnOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 30), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcSetFecnOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcSetFecnOctets.setDescription('Indicates the number of octets in frames sent to the interface on which this switch set the FECN bit.')
wfFrSwVcSetBecnFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 31), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcSetBecnFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcSetBecnFrames.setDescription('Indicates the number of frames sent to the interface on which this switch set the BECN bit.')
wfFrSwVcSetBecnOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 32), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcSetBecnOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcSetBecnOctets.setDescription('Indicates the number of octets in frames sent to the interface on which this switch set the BECN bit.')
wfFrSwVcSetDeFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 33), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcSetDeFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcSetDeFrames.setDescription('Indicates the number of frames sent to the interface on which this switch set the DE bit.')
wfFrSwVcSetDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 34), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcSetDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcSetDeOctets.setDescription('Indicates the number of octets in frames sent to the interface on which this switch set the DE bit.')
wfFrSwVcDropNonDeFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 35), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcDropNonDeFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDropNonDeFrames.setDescription('Indicates the number of frames received over the interface which were discarded, excluding any frames with the DE bit set.')
wfFrSwVcDropNonDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 36), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcDropNonDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDropNonDeOctets.setDescription('Indicates the number of octets in frames received over the interface which were discarded, excluding any frames with the DE bit set.')
wfFrSwVcDropDeFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 37), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcDropDeFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDropDeFrames.setDescription('Indicates the number of frames received over the interface, having the DE bit set, which were discarded.')
wfFrSwVcDropDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 38), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcDropDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDropDeOctets.setDescription('Indicates the number of octets in frames received over the interface, having the DE bit set, which were discarded.')
wfFrSwVcInactiveVcDropFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 39), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcInactiveVcDropFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcInactiveVcDropFrames.setDescription('Indicates how many frames were discarded because the virtual circuit was inactive.')
wfFrSwVcInactiveVcDropOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 40), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcInactiveVcDropOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcInactiveVcDropOctets.setDescription('Indicates how many Octets were discarded because the virtual circuit was inactive.')
wfFrSwVcRecvNonDeFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 41), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvNonDeFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvNonDeFrames.setDescription('Indicates the number of frames received on this virtual circuit over the interface.')
wfFrSwVcRecvNonDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 42), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvNonDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvNonDeOctets.setDescription('Indicates the number of octets received on this virtual circuit over the interface.')
wfFrSwVcRecvDeFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 43), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvDeFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvDeFrames.setDescription('Indicates the number of frames received over the interface with the DE bit set.')
wfFrSwVcRecvDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 44), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvDeOctets.setDescription('Indicates the number of octets in frames received over the interface with the DE bit set.')
wfFrSwVcRecvFecnFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 45), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvFecnFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvFecnFrames.setDescription('Indicates the number of frames received over the interface with the FECN bit set.')
wfFrSwVcRecvFecnOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 46), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvFecnOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvFecnOctets.setDescription('Indicates the number of octets in frames received over the interface with the FECN bit set.')
wfFrSwVcRecvBecnFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 47), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvBecnFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvBecnFrames.setDescription('Indicates the number of frames received over the interface with the BECN bit set.')
wfFrSwVcRecvBecnOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 48), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvBecnOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvBecnOctets.setDescription('Indicates the number of octets in frames received over the interface with the BECN bit set.')
wfFrSwVcRecentNonDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 49), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecentNonDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecentNonDeOctets.setDescription('Indicates the number of octets received over the interface during the most recent sampling period.')
wfFrSwVcXNetErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 50), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcXNetErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcXNetErrors.setDescription('This is the count of the consecutive errors (usually timeouts) against this VC in cross-network heartbeat polling.')
wfFrSwVcDropExcessBurstFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 51), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcDropExcessBurstFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDropExcessBurstFrames.setDescription('Indicates the number of Excess Burst Frames dropped on this virtual circuit.')
wfFrSwVcDropExcessBurstOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 52), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcDropExcessBurstOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDropExcessBurstOctets.setDescription('Indicates the number of Excess Burst Octets dropped on this virtual circuit.')
wfFrSwVcInBeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 53), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcInBeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcInBeOctets.setDescription('Indicates the maximum number Incoming Excess Burst bytes that are allowed in a configured time interval (T).')
wfFrSwVcCfgInBe = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 54), Integer32().clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCfgInBe.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCfgInBe.setDescription('The number of Excess Burst in bits')
wfFrSwVcRedirectAction = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 55), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("redirecttoprimary", 2), ("redirecttobackup", 3), ("switchondemand", 4), ("swondemandtoprimary", 5), ("swondemandtobackup", 6))).clone('redirecttoprimary')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcRedirectAction.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRedirectAction.setDescription("Perform pvc source redirect manually or based on cross-net updates: 'redirecttoprimary(2)' will force to switch to primary; 'redirecttobackup(3)' will force to switch to backup; 'switchondemand(4)' will switch based on cross-net status of the primary to and from primary; 'swondemandtoprimary(5)' will switch to primary from backup iff cross-net of primary became active; 'swondemandtobackup(6)' will switch to backup from primary iff cross-net of primary became inactive.")
wfFrSwVcRedirectType = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 56), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("transparent", 1), ("intrusiven", 2), ("intrusivea", 3))).clone('intrusivea')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcRedirectType.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRedirectType.setDescription("Type of dte notification at switching time: 'transparent(1)' will not send notification to dte; 'intrusiven(2)' will send async update with NEW bit; 'intrusivea(3)' will send async update with A bit not set.")
wfFrSwVcRedirectState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 57), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 12, 13, 21))).clone(namedValues=NamedValues(("backupinactive", 1), ("primaryactive", 2), ("switchtobackup", 3), ("backupactive", 12), ("switchtoprimary", 13), ("holddown", 21))).clone('backupinactive')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRedirectState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRedirectState.setDescription("PVC Source Redirect State: 'backupinactive(1)' - backup is not configured and/or cross-net status is inactive; will allow traffic only through primary. 'primaryactive(2)' - both primary and backup rx'ed 'active' cross-net status, currently primary is active and traffic only through primary. 'switchtobackup(3)' - primary cross-net status is inactive, but can not switch to backup due to manual (or semi-manual) operation of the redirect; will allow traffic only through primary. 'backupactive(12)' - cross-net status is 'inactive' for primary; will allow traffic only through backup. 'switchtoprimary(13)' - cross-net status is 'active' for primary, should by can not switch to primary due to manual (or semi-manual) operation of the redirect; will allow traffic only through backup. 'holddown(21)' - down state used as intermediate state at switching time (for not more then a second); all traffic is dropped.")
wfFrSwVcBackupCalledIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 58), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcBackupCalledIpAddr.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcBackupCalledIpAddr.setDescription(' Backup Called Ip Address of the remote end of the PVC.')
wfFrSwVcBackupCalledDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 59), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 8257535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcBackupCalledDlci.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcBackupCalledDlci.setDescription(' Backup Called Dlci of the remote end of the PVC.')
wfFrSwVcBackupCrossNetStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 60), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2))).clone('inactive')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcBackupCrossNetStatus.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcBackupCrossNetStatus.setDescription(' Cross net status of the backup remote end of the PVC.')
wfFrSwVcBackupCrossNetErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 61), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcBackupCrossNetErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcBackupCrossNetErrors.setDescription(' Support counter of missed cross net update from backup remote end of the PVC, range: [0, wfFrSwDlcmiCrossNetErrorThreshold].')
wfFrSwVcAtmIwfMode = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 62), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("atmDisableIwfMode", 1), ("atmServiceIwfTransparentMode", 2), ("atmServiceIwfTranslationMode", 3), ("atmNetworkIwfMode", 4))).clone('atmDisableIwfMode')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcAtmIwfMode.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcAtmIwfMode.setDescription('This attribute indicates the mode of FR-ATM interworking over this FR PVC or that FR-ATM interworking is not enabled on it.')
wfFrSwVcAtmIwfVPI = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 63), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcAtmIwfVPI.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcAtmIwfVPI.setDescription('This is relevant only when the ATM/FR interworking is enabled for this PVC. This indicates the ATM virtual path identifier associated with the Frame Relay PVC described by this record virtual circuit identifier.')
wfFrSwVcAtmIwfVCI = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 64), Integer32().subtype(subtypeSpec=ValueRangeConstraint(32, 65535)).clone(32)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcAtmIwfVCI.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcAtmIwfVCI.setDescription('This is relevant only when FR/ATM interworking is enabled for this PVC. This indicates the ATM virtual circuit identifier associated with the Frame Relay PVC described by this record.')
wfFrSwVcAtmIwfLossPriorityPolicy = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 65), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("atmiwfmapDe", 1), ("atmiwfsetDe1", 2), ("atmiwfsetDe0", 3))).clone('atmiwfmapDe')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcAtmIwfLossPriorityPolicy.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcAtmIwfLossPriorityPolicy.setDescription('This is relevant only when FR/ATM interworking is enabled for this FR PVC. This indicates the policy for translating ATM CLP to FR DE on this PVC or simply setting FR DE to a constant value for all frames.')
wfFrSwVcAtmIwfDePolicy = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 66), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("atmiwfmapClp", 1), ("atmiwfsetClp1", 2), ("atmiwfsetClp0", 3))).clone('atmiwfmapClp')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcAtmIwfDePolicy.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcAtmIwfDePolicy.setDescription('This is relevant only when FR/ATM interworking is enabled for this FR PVC. This indicates the policy on this PVC for translating FR DE to ATM CLP or simply setting CLP to a constant value for all frames.')
wfFrSwVcAtmIwfEfciPolicy = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 67), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("atmiwfmapFecn", 1), ("atmiwfsetFecn1", 2), ("atmiwfsetFecn0", 3))).clone('atmiwfmapFecn')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcAtmIwfEfciPolicy.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcAtmIwfEfciPolicy.setDescription('This is relevant only when FR/ATM interworking is enabled for this FR PVC. This indicates the policy on this PVC for translating FR FECN to ATM EFCI or simply setting ATM EFCI to a constant value for all frames.')
wfFrSwVcEscapeEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 68), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcEscapeEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcEscapeEnable.setDescription(' Identifies this PVC as either a standard FRSW PVC (escape disabled) or an Escape PVC (escape enabled). The type of Escape PVC (ingress node or egress node) is specified in the wfFrSwDlcmiEntry Object.')
wfFrSwVcSpvcCallState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 69), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("inactive", 1), ("inprogress", 2), ("active", 3))).clone('inactive')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcSpvcCallState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcSpvcCallState.setDescription('Indicates to the state of the SPVC call for this DLCI.')
wfFrSwVcCallReqCalledAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 70), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCallReqCalledAddr.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCallReqCalledAddr.setDescription('Called E.164/X.121 Address for an SPVC Call Request Agent. The address type is determined by the wfFrSwDlcmiL2AddrType attribute in wfFrSwDlcmiEntry.')
wfFrSwVcCallReqDlciSelectionType = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 71), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("any", 1), ("specific", 2))).clone('any')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCallReqDlciSelectionType.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCallReqDlciSelectionType.setDescription("Indicates to the Calling End of an SPVC Call Request whether to use any available DLCI, or a specific DLCI. If 'specific' is chosen, the called DLCI value is specified in wfFrSwVcCallReqCalledDlci.")
wfFrSwVcCallReqCalledDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 72), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 8257535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCallReqCalledDlci.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCallReqCalledDlci.setDescription("Indicates to the Calling End of an SPVC Call Request the DLCI to be used at the destination of a virtual circuit. This value should be specified when 'specific' wfFrSwVcCallReqDlciSelectionType is chosen.")
wfFrSwVcCallReqRetryTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 73), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 60)).clone(3)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCallReqRetryTimer.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCallReqRetryTimer.setDescription('Indicates the number of minutes the Call Request Agent should wait for an SPVC CONNECT message before declaring a Call Setup request REJECTED.')
wfFrSwVcCallReqMaxRetries = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 74), Integer32().clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCallReqMaxRetries.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCallReqMaxRetries.setDescription('Indicates the number of times the Call Request Agent should retry failed Call Setup requests before declaring the SPVC invalid.')
wfFrSwIsdnBaseTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 8), )
if mibBuilder.loadTexts: wfFrSwIsdnBaseTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnBaseTable.setDescription('This is a FRSW over ISDN configuration table. This table specifies whether the Calling Party (ANI) or Called Party (DNIS) ISDN Phone Number should be used to map the ISDN call to a particular FRSW UNI. The table is indexed by the Slot Number where the PRI(s) exist.')
wfFrSwIsdnBaseEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 8, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwIsdnBaseSlotNum"))
if mibBuilder.loadTexts: wfFrSwIsdnBaseEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnBaseEntry.setDescription('Instance Id for this table.')
wfFrSwIsdnBaseDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 8, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwIsdnBaseDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnBaseDelete.setDescription('Indication to delete this FRSW ISDN interface. ')
wfFrSwIsdnBaseSlotNum = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 8, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnBaseSlotNum.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnBaseSlotNum.setDescription('This number is the Slot Number for the PRI interface(s) that are being configured for FRSW ISDN. There will be one of these tables for every slot where an FRSW ISDN PRI Interface exists.')
wfFrSwIsdnBaseAssocType = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 8, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("dnis", 1), ("ani", 2))).clone('dnis')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwIsdnBaseAssocType.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnBaseAssocType.setDescription('Indicates which ISDN Phone Number (ANI or DNIS) to use to do the ISDN call to FRSW UNI mapping.')
wfFrSwIsdnAssocTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 9), )
if mibBuilder.loadTexts: wfFrSwIsdnAssocTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnAssocTable.setDescription('This table defines the Association Table to be used for the FRSW over ISDN application. The table contains a list of ISDN Phone Numbers and the associated FRSW UNI Index Number. The table is indexed by the Slot Number and the ISDN Phone Number.')
wfFrSwIsdnAssocEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 9, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwIsdnAssocSlotNum"), (0, "Wellfleet-FRSW-MIB", "wfFrSwIsdnAssocNum"))
if mibBuilder.loadTexts: wfFrSwIsdnAssocEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnAssocEntry.setDescription('Instance Id for this table.')
wfFrSwIsdnAssocDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 9, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwIsdnAssocDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnAssocDelete.setDescription('Indication to delete this Association Instance.')
wfFrSwIsdnAssocSlotNum = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 9, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnAssocSlotNum.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnAssocSlotNum.setDescription('Slot with which this ISDN Phone Number is associated.')
wfFrSwIsdnAssocNum = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 9, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnAssocNum.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnAssocNum.setDescription('ISDN Phone Number that is used to look up the appropriate FRSW UNI Index. This number is compared with either the Calling Party Number (ANI) Information Element or the Called Party Number (DNIS) Information Element contained in the ISDN Call Setup Message.')
wfFrSwIsdnAssocScrnEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 9, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwIsdnAssocScrnEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnAssocScrnEnable.setDescription('Indicate whether allowed screening should be enabled or disabled for all of the UNIs contained in the FRSW UNI Index.')
wfFrSwIsdnAssocIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 9, 1, 5), Integer32().clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwIsdnAssocIndex.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnAssocIndex.setDescription('A number that indicates the FRSW UNI Index that is is associated with the ISDN Phone Number. This FRSW UNI Index is used as a key to obtain the UNIs and the Screening information from the wfFrSwIsdnScrnEntry and wfFrSwIsdnUniEntry mibs. The default for the index is 2**31 - 1 = 2147483647 = 0x7FFFFFFF, which represents an unconfigured index number.')
wfFrSwIsdnUniTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 10), )
if mibBuilder.loadTexts: wfFrSwIsdnUniTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnUniTable.setDescription('This table is used by the FRSW over ISDN application. The table defines a list of FRSW UNIs that are to be collected into a hunt group identifiable by an Index Number.')
wfFrSwIsdnUniEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 10, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwIsdnUniIndex"), (0, "Wellfleet-FRSW-MIB", "wfFrSwIsdnUniNum"))
if mibBuilder.loadTexts: wfFrSwIsdnUniEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnUniEntry.setDescription('Instance Id for this table.')
wfFrSwIsdnUniDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 10, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwIsdnUniDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnUniDelete.setDescription('Indication to delete this FRSW UNI Index Instance.')
wfFrSwIsdnUniIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 10, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnUniIndex.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnUniIndex.setDescription('FRSW UNI Index -- a number that identifies a group of related FRSW UNIs that are collected together as a hunt group. This number ties this entry to an entry in wfFrSwIsdnAssocEntry.')
wfFrSwIsdnUniNum = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 10, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnUniNum.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnUniNum.setDescription('A FRSW UNI/Circuit.')
wfFrSwIsdnUniState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 10, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("available", 1), ("inuse", 2))).clone('available')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnUniState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnUniState.setDescription('State of this UNI (available or in-use).')
wfFrSwIsdnScrnTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 11), )
if mibBuilder.loadTexts: wfFrSwIsdnScrnTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnScrnTable.setDescription('This is the incoming call screening table for the FRSW over ISDN application. The table consists of a FRSW UNI Index and a list of allowable ISDN Phone numbers for that FRSW UNI Index. The table is indexed by both the FRSW UNI Index and the ISDN Phone Number. This table is referenced only when the wfFrSwIsdnAssocScrnEnable is set to Enabled for this FRSW UNI Index.')
wfFrSwIsdnScrnEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 11, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwIsdnScrnIndex"), (0, "Wellfleet-FRSW-MIB", "wfFrSwIsdnScrnNum"))
if mibBuilder.loadTexts: wfFrSwIsdnScrnEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnScrnEntry.setDescription(' Instance Id for this table. ')
wfFrSwIsdnScrnDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 11, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwIsdnScrnDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnScrnDelete.setDescription(' Indication to delete this Scrn Instance. ')
wfFrSwIsdnScrnIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 11, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnScrnIndex.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnScrnIndex.setDescription('FRSW UNI Index - A number that ties this entry to an entry in wfFrSwIsdnAssocEntry.')
wfFrSwIsdnScrnNum = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 11, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnScrnNum.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnScrnNum.setDescription('ISDN Phone Number of a user authorized to access the UNIs contained in the FRSW UNI Index. ')
wfFrSwSigTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12), )
if mibBuilder.loadTexts: wfFrSwSigTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigTable.setDescription(" The Frame relay signalling table contains frame relay signalling entries indexed by the frame relay access channel circuit number. An instance of wfFrSwSigEntry is required for each frame relay access channel with frame relay signalling enabled. The absence of wfFrSwSigEntry for a given frame relay access channel implies that frame relay signalling is disabled for the circuit. Note that the terms 'incoming' and 'outgoing' refer to the frame mode call with respect to the network side of the interface. The terminology used by CCITT Q.933/Q.931 is different. ")
wfFrSwSigEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwSigCircuit"))
if mibBuilder.loadTexts: wfFrSwSigEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigEntry.setDescription(' An entry in the Frame Relay signalling port information table. ')
wfFrSwSigDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigDelete.setDescription(' Indication to delete this instance ')
wfFrSwSigCircuit = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigCircuit.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigCircuit.setDescription(' The circuit number for this frame relay access channel ')
wfFrSwSigSvcDlciLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(16)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigSvcDlciLow.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigSvcDlciLow.setDescription(' Lowest DLCI to be used for SVC, the default value is for 2 octet frame header ')
wfFrSwSigSvcDlciHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(991)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigSvcDlciHigh.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigSvcDlciHigh.setDescription(' Highest DLCI to be used for SVC, the default value is for 2 octet frame header. ')
wfFrSwSigDlciAssign = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("increment", 1), ("decrement", 2))).clone('decrement')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigDlciAssign.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigDlciAssign.setDescription(" Determines if DLCI's are assigned starting at wfFrSwSigSvcDlciHigh and working towards wfFrSwSigSvcDlciLow or vice versa. ")
wfFrSwSigMaxNumOfSvcs = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(100)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigMaxNumOfSvcs.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigMaxNumOfSvcs.setDescription(' Indicates the maximum number of simultaneous switched virtual circuits allowed on the logical line. ')
wfFrSwSigNumOfSvcsInUse = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigNumOfSvcsInUse.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigNumOfSvcsInUse.setDescription(' Indicates the number of switched virtual circuits in use on the logical line. ')
wfFrSwSigDefaultThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigDefaultThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigDefaultThroughput.setDescription(' This value is used by the network in the Link Layer Core Parameters IE incoming & outgoing throughput fields when they are not included in the setup message by the user. ')
wfFrSwSigDefaultMinAcceptThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigDefaultMinAcceptThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigDefaultMinAcceptThroughput.setDescription(' This value is used by the network in the Link Layer Core Parameters IE incoming & outgoing minimum acceptable throughput fields when they are not included in the setup message by the user. ')
wfFrSwSigDefaultBc = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigDefaultBc.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigDefaultBc.setDescription(' This value is used by the network in the Link Layer Core Parameters IE incoming & outgoing Bc fields when they are not included in the setup message by the user. ')
wfFrSwSigDefaultBe = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigDefaultBe.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigDefaultBe.setDescription(' This value is used by the network in the Link Layer Core Parameters IE incoming & outgoing Be fields when they are not included in the setup message by the user. ')
wfFrSwSigMaxInThroughputPerSvc = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigMaxInThroughputPerSvc.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigMaxInThroughputPerSvc.setDescription(' This is the maximum incoming throughput that any single SVC may negotiate for a call. Calls requesting in excess of this attribute are rejected. ')
wfFrSwSigMaxOutThroughputPerSvc = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigMaxOutThroughputPerSvc.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigMaxOutThroughputPerSvc.setDescription(' This is the maximum outgoing throughput that any single SVC may negotiate for a call. Calls requesting in excess of this attribute are rejected. ')
wfFrSwSigTotalInNegotiableThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigTotalInNegotiableThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigTotalInNegotiableThroughput.setDescription(' This is the total maximum incoming throughput that is available for all frame mode calls on the port. If the sum of the incoming throughput requested by a call and wfFrSwSigTotalInCurrentThroughput is in excess of this value, the call is rejected. ')
wfFrSwSigTotalInCurrentThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigTotalInCurrentThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigTotalInCurrentThroughput.setDescription(" This is the total incoming throughput that has been negotiated for use by all SVC's on the port. ")
wfFrSwSigTotalOutNegotiableThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigTotalOutNegotiableThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigTotalOutNegotiableThroughput.setDescription(' This is the total maximum outgoing throughput that is available for all frame mode calls on the port. If the sum of the outgoing throughput requested by a call and wfFrSwSigTotalOutCurrentThroughput is in excess of this value, the call is rejected. ')
wfFrSwSigTotalOutCurrentThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 17), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigTotalOutCurrentThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigTotalOutCurrentThroughput.setDescription(" This is the total incoming throughput that has been negotiated for use by all SVC's on the port. ")
wfFrSwSigXNetClearingDisable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('enable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigXNetClearingDisable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigXNetClearingDisable.setDescription(' If cross-net polling (wfFrSwDlcmiCrossNetEnable) is enabled on this interface, and the error threshold (wfFrSwDlcmiCrossNetErrorThreshold) is exceeded, the network can clear the call. ')
wfFrSwSigCallingPartyIEMandatory = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigCallingPartyIEMandatory.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigCallingPartyIEMandatory.setDescription(' Reject the call if the Calling Party IE is absent in the setup message or if the provided Calling Party IE fails address authentication tests againt the configured address(es) on the ingress logical line.')
wfFrSwSigT301 = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 20), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 10)).clone(3)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigT301.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigT301.setDescription(' Timer number: T301 default time-out: 3 min state of call: call initiated cause for start: incoming setup normal stop: outgoing connect at the first expiry: clear call at the second expiry: timer not restarted ')
wfFrSwSigT303 = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 21), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 90)).clone(4)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigT303.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigT303.setDescription(' Timer number: T303 default time-out: 4 s state of call: call present cause for start: outgoing setup normal stop: incoming connect/call-proceeding/ release-complete at the first expiry: retransmit setup, restart T303 at the second expiry: clear call ')
wfFrSwSigT305 = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 22), Integer32().subtype(subtypeSpec=ValueRangeConstraint(20, 90)).clone(30)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigT305.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigT305.setDescription(' Timer number: T305 default time-out: 30 s state of call: disconnect ind cause for start: outgoing disconnect normal stop: incoming release/disconnect at the first expiry: outgoing release at the second expiry: timer not restarted ')
wfFrSwSigT308 = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 23), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 90)).clone(4)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigT308.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigT308.setDescription(' Timer number: T308 default time-out: 4 s state of call: release req cause for start: outgoing release normal stop: incoming release/release-complete at the first expiry: retransmit release, restart T308 at the second expiry: place access channel in maintenance ')
wfFrSwSigT310 = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 24), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 90)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigT310.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigT310.setDescription(' Timer number: T310 default time-out: 10 s state of call: incoming call proceeding cause for start: incoming call proceeding normal stop: incoming connect/disconnect at the first expiry: clear call at the second expiry: timer not restarted ')
wfFrSwSigT322 = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 25), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 90)).clone(4)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigT322.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigT322.setDescription(' Timer number: T322 default time-out: 4 s state of call: any call state cause for start: outgoing status enquiry normal stop: incoming status/disconnect/ release/release-complete at the first expiry: retransmit status-enq, restart T322 at the second expiry: resend status enq and restart T322 ')
wfFrSwSigInSetupPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 26), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInSetupPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInSetupPkts.setDescription(' number of incoming setup packets ')
wfFrSwSigInCallProceedingPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 27), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInCallProceedingPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInCallProceedingPkts.setDescription(' number of incoming call proceeding packets ')
wfFrSwSigInConnectPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 28), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInConnectPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInConnectPkts.setDescription(' number of incoming connect packets ')
wfFrSwSigInDisconnectPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 29), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInDisconnectPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInDisconnectPkts.setDescription(' number of incoming disconnect packets ')
wfFrSwSigInReleasePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 30), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInReleasePkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInReleasePkts.setDescription(' number of incoming release packets ')
wfFrSwSigInReleaseCompletePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 31), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInReleaseCompletePkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInReleaseCompletePkts.setDescription(' number of incoming release complete packets ')
wfFrSwSigInStatusEnquiryPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 32), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInStatusEnquiryPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInStatusEnquiryPkts.setDescription(' number of incoming status enquiry packets ')
wfFrSwSigInStatusPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 33), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInStatusPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInStatusPkts.setDescription(' number of incoming status packets ')
wfFrSwSigInUnknownPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 34), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInUnknownPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInUnknownPkts.setDescription(' number of incoming unknown packets ')
wfFrSwSigOutSetupPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 35), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutSetupPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutSetupPkts.setDescription(' number of outgoing setup packets ')
wfFrSwSigOutCallProceedingPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 36), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutCallProceedingPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutCallProceedingPkts.setDescription(' number of outgoing call proceeding packets ')
wfFrSwSigOutConnectPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 37), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutConnectPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutConnectPkts.setDescription(' number of outgoing connect packets ')
wfFrSwSigOutDisconnectPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 38), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutDisconnectPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutDisconnectPkts.setDescription(' number of outgoing disconnect packets ')
wfFrSwSigOutReleasePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 39), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutReleasePkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutReleasePkts.setDescription(' number of outgoing release packets ')
wfFrSwSigOutReleaseCompletePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 40), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutReleaseCompletePkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutReleaseCompletePkts.setDescription(' number of outgoing release packest ')
wfFrSwSigOutStatusEnquiryPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 41), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutStatusEnquiryPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutStatusEnquiryPkts.setDescription(' number of outgoing status enquiry packets ')
wfFrSwSigOutStatusPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 42), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutStatusPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutStatusPkts.setDescription(' number of outgoing status packets ')
wfFrSwSigRejectedConnRequests = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 43), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigRejectedConnRequests.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigRejectedConnRequests.setDescription(' number of connections rejected ')
wfFrSwSigNwrkAbortedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 44), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigNwrkAbortedConnections.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigNwrkAbortedConnections.setDescription(' number of connections aborted by network ')
wfFrSwSigL2Resets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 45), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigL2Resets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigL2Resets.setDescription(' number of L2 resets ')
wfFrSwSigDlciIEAllowed = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 46), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigDlciIEAllowed.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigDlciIEAllowed.setDescription(' Reject the call if the Dlci IE is present in the setup message and wfFrSwSigDlciIEAllowed is set to disabled.')
wfFrSwSigX213PriorityIEAllowed = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 47), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigX213PriorityIEAllowed.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigX213PriorityIEAllowed.setDescription(' Reject the call if the X213 Priority IE is present in setup message and wfFrSwSigX213PriorityIEAllowed is set to disabled.')
wfFrSwSigMaximumBe = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 48), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigMaximumBe.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigMaximumBe.setDescription('This value is the maximum allowed Be for a SVC connection')
wfFrSwGlobalE164AddrTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 13), )
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrTable.setDescription(' wfFrSwGlobalE164AddrTable is used by Directory Services to translate a range of E.164 addresses into an internal IP network address. E.164 ranges must not ever overlap. ')
wfFrSwGlobalE164AddrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 13, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwGlobalE164AddrLow"), (0, "Wellfleet-FRSW-MIB", "wfFrSwGlobalE164AddrHigh"))
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrEntry.setDescription(' An entry in the Frame Relay Global E.164 Address Table. ')
wfFrSwGlobalE164AddrDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 13, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrDelete.setDescription(' Indication to delete/create this entry. ')
wfFrSwGlobalE164AddrLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 13, 1, 2), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrLow.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrLow.setDescription(' Instance identifier; the low end of the E.164 address range. This is an 8-byte fixed length octet string format, right justified with padded leading zeros as necessary. ')
wfFrSwGlobalE164AddrHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 13, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrHigh.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrHigh.setDescription(' Instance identifier; the high end of the E.164 address range. This is an 8-byte fixed length octet string format, right justified with padded leading zeros as necessary. ')
wfFrSwGlobalE164AddrIPAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 13, 1, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrIPAddr.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrIPAddr.setDescription(' This is the internal IP network address associated with this range of E.164 addresses. ')
wfFrSwGlobalX121AddrTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 14), )
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrTable.setDescription(' wfFrSwGlobalX121AddrTable is used by Directory Services to translate a range of X.121 addresses into an internal IP network address. X.121 ranges must not ever overlap. ')
wfFrSwGlobalX121AddrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 14, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwGlobalX121AddrLow"), (0, "Wellfleet-FRSW-MIB", "wfFrSwGlobalX121AddrHigh"))
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrEntry.setDescription(' An entry in the Frame Relay Global X.121 Address Table. ')
wfFrSwGlobalX121AddrDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 14, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrDelete.setDescription(' Indication to delete/create this entry. ')
wfFrSwGlobalX121AddrLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 14, 1, 2), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrLow.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrLow.setDescription(' Instance identifier; the low end of the X.121 address range. This is an 8-byte fixed length octet string format, right justified with padded leading zeros as necessary. ')
wfFrSwGlobalX121AddrHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 14, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrHigh.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrHigh.setDescription(' Instance identifier; the high end of the X.121 address range. This is an 8-byte fixed length octet string format, right justified with padded leading zeros as necessary. ')
wfFrSwGlobalX121AddrIPAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 14, 1, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrIPAddr.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrIPAddr.setDescription(' This is the internal IP network address associated with this range of X.121 addresses. ')
wfFrSwLocalE164AddrTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 15), )
if mibBuilder.loadTexts: wfFrSwLocalE164AddrTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalE164AddrTable.setDescription(' wfFrSwLocalE164AddrTable contains E.164 addresses on the local BNX and CUG (Closed User Group) related information. ')
wfFrSwLocalE164AddrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 15, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwLocalE164AddrCct"), (0, "Wellfleet-FRSW-MIB", "wfFrSwLocalE164Address"))
if mibBuilder.loadTexts: wfFrSwLocalE164AddrEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalE164AddrEntry.setDescription(' An entry in the Frame Relay Local E.164 Address Table. ')
wfFrSwLocalE164AddrDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 15, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwLocalE164AddrDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalE164AddrDelete.setDescription(' Indication to delete/create this entry. ')
wfFrSwLocalE164AddrCct = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 15, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwLocalE164AddrCct.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalE164AddrCct.setDescription(' Instance identifier; internal CCT number associated with this E.164 address. ')
wfFrSwLocalE164Address = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 15, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwLocalE164Address.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalE164Address.setDescription(' Instance identifier; an E.164 address. This is an 8-byte fixed length octet string format, right justified with padded leading zeros as necessary. ')
wfFrSwLocalE164AddrCUG = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 15, 1, 4), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwLocalE164AddrCUG.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalE164AddrCUG.setDescription(' CUG (Closed User Group) information. The structure of the CUG information consists zero or more groups (number of groups can be derived from the OCTET STRING data type of this MIB attribute) of COI structure information. Each COI structure consists of a COI group number (4-byte integer) and a COI list. Each COI list consists of a length field (4-byte integer) which specifies the number of bytes of COI bit-encoded information belonging to this group and the COI information structure. COI information structure is a bit mask field where each bit from left to right represents whether this E.164 address belongs to a particular COI number within this COI group number. Please note that COI group numbers can not be repeated and that the COI group numbers must be in increasing order in the CUG configuration MIB wfFrSwLocalE164AddrCUG defaults to zero COI groups which means no CUG related information and hence this local wfFrSwLocalE164Address is allowed to communicate with all users. ')
wfFrSwLocalE164AddrLocalFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 15, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("local", 1), ("nonlocal", 2))).clone('local')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwLocalE164AddrLocalFlag.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalE164AddrLocalFlag.setDescription(' Local/Non-Local Identifier Flag. Used for SPVCs.')
wfFrSwLocalX121AddrTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 16), )
if mibBuilder.loadTexts: wfFrSwLocalX121AddrTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalX121AddrTable.setDescription(' wfFrSwLocalX121AddrTable contains X.121 addresses on the local BNX and CUG (Closed User Group) related information. ')
wfFrSwLocalX121AddrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 16, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwLocalX121AddrCct"), (0, "Wellfleet-FRSW-MIB", "wfFrSwLocalX121Address"))
if mibBuilder.loadTexts: wfFrSwLocalX121AddrEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalX121AddrEntry.setDescription(' An entry in the Frame Relay Local X.121 Address Table. ')
wfFrSwLocalX121AddrDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 16, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwLocalX121AddrDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalX121AddrDelete.setDescription(' Indication to delete/create this entry. ')
wfFrSwLocalX121AddrCct = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 16, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwLocalX121AddrCct.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalX121AddrCct.setDescription(' Instance identifier; internal CCT number associated with this X.121 address. ')
wfFrSwLocalX121Address = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 16, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwLocalX121Address.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalX121Address.setDescription(' Instance identifier; a X.121 address. This is an 8-byte fixed length octet string format, right justified with padded leading zeros as necessary. ')
wfFrSwLocalX121AddrCUG = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 16, 1, 4), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwLocalX121AddrCUG.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalX121AddrCUG.setDescription(' CUG (Closed User Group) information. The structure of the CUG information consists zero or more groups (number of groups can be derived from the OCTET STRING data type of this MIB attribute) of COI structure information. Each COI structure consists of a COI group number (4-byte integer) and a COI list. Each COI list consists of a length field (4-byte integer) which specifies the number of bytes of COI bit-encoded information belonging to this group and the COI information structure. COI information structure is a bit mask field where each bit from left to right represents whether this X.121 address belongs to a particular COI number within this COI group number. wfFrSwLocalX121AddrCUG defaults to zero COI groups which means no CUG related information and hence this local wfFrSwLocalX121Address is allowed to communicate with all users. ')
wfFrSwLocalX121AddrLocalFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 16, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("local", 1), ("nonlocal", 2))).clone('local')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwLocalX121AddrLocalFlag.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalX121AddrLocalFlag.setDescription(' Local/Non-Local Identifier Flag. Used for SPVCs.')
wfFrSwBase = MibIdentifier((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 17))
wfFrSwBaseDelete = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 17, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwBaseDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwBaseDelete.setDescription(' Indication to delete/create this base group ')
wfFrSwBaseIpAddr = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 17, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwBaseIpAddr.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwBaseIpAddr.setDescription(" Indicates this BNX's (circuit-less) IP address ")
wfFrSwBaseShutDown = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 17, 3), Counter32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwBaseShutDown.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwBaseShutDown.setDescription('Bit mask for slots to shutdown, slots 1-14. The MSBit represents slot 1, the next most significant bit represents slot 2, and so forth.')
wfFrSwCngcMonTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18), )
if mibBuilder.loadTexts: wfFrSwCngcMonTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonTable.setDescription('This table is used by FRSW Congestion Control application. The table is used to Monitor the congestion level of a particular circuit.')
wfFrSwCngcMonEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwCngcMonCct"))
if mibBuilder.loadTexts: wfFrSwCngcMonEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonEntry.setDescription('Instance Id for this table.')
wfFrSwCngcMonReset = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCngcMonReset.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonReset.setDescription('Indication to reset Cngc Monitor Counters.')
wfFrSwCngcMonCct = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonCct.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonCct.setDescription('Circuit to be monitored. ')
wfFrSwCngcMonP0Level1Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 3), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level1Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level1Percent.setDescription('Percentage of time congestion is at level 1. for Priority 0 Traffic.')
wfFrSwCngcMonP0Level2Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 4), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level2Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level2Percent.setDescription('Percentage of time congestion is at level 2. for Priority 0 Traffic.')
wfFrSwCngcMonP0Level3Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 5), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level3Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level3Percent.setDescription('Percentage of time congestion is at level 3. for Priority 0 Traffic.')
wfFrSwCngcMonP0Level4Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 6), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level4Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level4Percent.setDescription('Percentage of time congestion is at level 4. for Priority 0 Traffic.')
wfFrSwCngcMonP1Level1Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level1Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level1Percent.setDescription('Percentage of time congestion is at level 1. for Priority 1 Traffic.')
wfFrSwCngcMonP1Level2Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level2Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level2Percent.setDescription('Percentage of time congestion is at level 2. for Priority 1 Traffic.')
wfFrSwCngcMonP1Level3Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 9), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level3Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level3Percent.setDescription('Percentage of time congestion is at level 3. for Priority 1 Traffic.')
wfFrSwCngcMonP1Level4Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 10), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level4Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level4Percent.setDescription('Percentage of time congestion is at level 4. for Priority 1 Traffic.')
wfFrSwCngcMonP2Level1Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 11), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level1Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level1Percent.setDescription('Percentage of time congestion is at level 1. for Priority 2 Traffic.')
wfFrSwCngcMonP2Level2Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 12), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level2Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level2Percent.setDescription('Percentage of time congestion is at level 2. for Priority 2 Traffic.')
wfFrSwCngcMonP2Level3Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 13), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level3Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level3Percent.setDescription('Percentage of time congestion is at level 3. for Priority 2 Traffic.')
wfFrSwCngcMonP2Level4Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 14), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level4Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level4Percent.setDescription('Percentage of time congestion is at level 4. for Priority 2 Traffic.')
wfFrSwCngcMonP3Level1Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 15), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level1Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level1Percent.setDescription('Percentage of time congestion is at level 1. for Priority 3 Traffic.')
wfFrSwCngcMonP3Level2Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 16), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level2Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level2Percent.setDescription('Percentage of time congestion is at level 2. for Priority 3 Traffic.')
wfFrSwCngcMonP3Level3Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 17), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level3Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level3Percent.setDescription('Percentage of time congestion is at level 3. for Priority 3 Traffic.')
wfFrSwCngcMonP3Level4Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 18), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level4Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level4Percent.setDescription('Percentage of time congestion is at level 4. for Priority 3 Traffic.')
wfFrSwVirtualIntfTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 19), )
if mibBuilder.loadTexts: wfFrSwVirtualIntfTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVirtualIntfTable.setDescription("The table is used to create 'virtual' FRSW access lines.")
wfFrSwVirtualIntfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 19, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwVirtualIntfSlot"), (0, "Wellfleet-FRSW-MIB", "wfFrSwVirtualIntfCct"))
if mibBuilder.loadTexts: wfFrSwVirtualIntfEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVirtualIntfEntry.setDescription('Instance Id for this table.')
wfFrSwVirtualIntfDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 19, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVirtualIntfDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVirtualIntfDelete.setDescription('Indication to delete this virtual interface.')
wfFrSwVirtualIntfSlot = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 19, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVirtualIntfSlot.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVirtualIntfSlot.setDescription('Instance identifier; the slot number of this interface.')
wfFrSwVirtualIntfCct = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 19, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVirtualIntfCct.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVirtualIntfCct.setDescription('Instance identifier; the circuit number of this interface.')
wfFrSwVirtualIntfLineNum = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 19, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVirtualIntfLineNum.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVirtualIntfLineNum.setDescription('Line number for this virtual interface.')
wfFrSwExtFileSysTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 20), )
if mibBuilder.loadTexts: wfFrSwExtFileSysTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwExtFileSysTable.setDescription('This table is used by FRSW to extend the file system to DRAM device.')
wfFrSwExtFileSysEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 20, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwExtFileSysSlot"))
if mibBuilder.loadTexts: wfFrSwExtFileSysEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwExtFileSysEntry.setDescription('Instance Id for this table.')
wfFrSwExtFileSysDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 20, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwExtFileSysDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwExtFileSysDelete.setDescription(' Indication to delete/create this entry. ')
wfFrSwExtFileSysSlot = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 20, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwExtFileSysSlot.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwExtFileSysSlot.setDescription('A unique value for each slot. Its value ranges between 1 and 14.')
wfFrSwExtFileSysSize = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 20, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwExtFileSysSize.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwExtFileSysSize.setDescription('The memory size of the extended file system in byte unit. The value zero also means that extended file system is disabled. Non-zero value means enabled. Its suggested that the size is in multiple of 128k bytes. Some of the well-known memory sizes and their correspond decimal values are as followed: Mem size Decimal Value ^^^^^^^^ ^^^^^^^^^^^^^ 128K 131072 256K 262144 512K 524288 1M 1048576 2M 2097152 4M 4194304 8M 8388608 ')
wfFrSwExtFileSysActualSize = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 20, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwExtFileSysActualSize.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwExtFileSysActualSize.setDescription('The actual memory size the system allocated.')
wfFrSwExtFileSysState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 20, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("up", 1), ("fault", 2), ("init", 3), ("notpresent", 4))).clone('notpresent')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwExtFileSysState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwExtFileSysState.setDescription('The status of the extended file system. State up indicates that the requested memory size for the extended file system has been allocated successfully and the extended file system is in operational state. State fault indicates that the requested memory size for the extended file system has NOT been allocated successfully and the extended file system is NOT in operational state. One reason for entering the fault state is insufficient available memory. State init indicates that the system is in the initialization cycle. The extended file system is not operational. State notpresent reflects the size of zero.')
mibBuilder.exportSymbols("Wellfleet-FRSW-MIB", wfFrSwCctLocalSetFECNFrames=wfFrSwCctLocalSetFECNFrames, wfFrSwDlcmiEscapeMode=wfFrSwDlcmiEscapeMode, wfFrSwVcRedirectAction=wfFrSwVcRedirectAction, wfFrSwSigOutDisconnectPkts=wfFrSwSigOutDisconnectPkts, wfFrSwCctLocalSetDEFrames=wfFrSwCctLocalSetDEFrames, wfFrSwSigOutStatusPkts=wfFrSwSigOutStatusPkts, wfFrSwSigTotalInCurrentThroughput=wfFrSwSigTotalInCurrentThroughput, wfFrSwIsdnScrnTable=wfFrSwIsdnScrnTable, wfFrSwVcAtmIwfLossPriorityPolicy=wfFrSwVcAtmIwfLossPriorityPolicy, wfFrSwSigT303=wfFrSwSigT303, wfFrSwUsageCurVolumeBackup=wfFrSwUsageCurVolumeBackup, wfFrSwVcInactiveVcDropFrames=wfFrSwVcInactiveVcDropFrames, wfFrSwL3NetAddress=wfFrSwL3NetAddress, wfFrSwSigInConnectPkts=wfFrSwSigInConnectPkts, wfFrSwLocalE164AddrDelete=wfFrSwLocalE164AddrDelete, wfFrSwUsageUpdateData=wfFrSwUsageUpdateData, wfFrSwExtFileSysDelete=wfFrSwExtFileSysDelete, wfFrSwDlcmiActiveSeqCount=wfFrSwDlcmiActiveSeqCount, wfFrSwUsageCircuitNumber=wfFrSwUsageCircuitNumber, wfFrSwUsageStartTimeStampHigh=wfFrSwUsageStartTimeStampHigh, wfFrSwVcRecvDeOctets=wfFrSwVcRecvDeOctets, wfFrSwIsdnUniDelete=wfFrSwIsdnUniDelete, wfFrSwCngcMonP0Level1Percent=wfFrSwCngcMonP0Level1Percent, wfFrSwCctLocalRecvDEOctets=wfFrSwCctLocalRecvDEOctets, wfFrSwUsageVolumeBackup=wfFrSwUsageVolumeBackup, wfFrSwSigTotalInNegotiableThroughput=wfFrSwSigTotalInNegotiableThroughput, wfFrSwSigOutReleaseCompletePkts=wfFrSwSigOutReleaseCompletePkts, wfFrSwUsageSentDEOctetsHigh=wfFrSwUsageSentDEOctetsHigh, wfFrSwCctOutThroughput=wfFrSwCctOutThroughput, wfFrSwDlcmiDteLastReceived=wfFrSwDlcmiDteLastReceived, wfFrSwCctRemoteSentDEOctets=wfFrSwCctRemoteSentDEOctets, wfFrSwSvcUsageVersionId=wfFrSwSvcUsageVersionId, wfFrSwCctRemoteRecvNonDEOctets=wfFrSwCctRemoteRecvNonDEOctets, wfFrSwCctRemoteRecvDEOctets=wfFrSwCctRemoteRecvDEOctets, wfFrSwSvcUsageUpdateInterval=wfFrSwSvcUsageUpdateInterval, wfFrSwCctRemoteSentNonDEFrames=wfFrSwCctRemoteSentNonDEFrames, wfFrSwVcCallReqRetryTimer=wfFrSwVcCallReqRetryTimer, wfFrSwMcastDlci=wfFrSwMcastDlci, wfFrSwCctLocalBecnState=wfFrSwCctLocalBecnState, wfFrSwVcRecvBecnOctets=wfFrSwVcRecvBecnOctets, wfFrSwGlobalX121AddrDelete=wfFrSwGlobalX121AddrDelete, wfFrSwUsageVolume=wfFrSwUsageVolume, wfFrSwDlcmiCrossNetListenEnable=wfFrSwDlcmiCrossNetListenEnable, wfFrSwSvcUsageNumEntries=wfFrSwSvcUsageNumEntries, wfFrSwVcInBc=wfFrSwVcInBc, wfFrSwDlcmiDteErrorThreshold=wfFrSwDlcmiDteErrorThreshold, wfFrSwUsageState=wfFrSwUsageState, wfFrSwIsdnScrnNum=wfFrSwIsdnScrnNum, wfFrSwVcOutThroughput=wfFrSwVcOutThroughput, wfFrSwUsageStartTimeStampLow=wfFrSwUsageStartTimeStampLow, wfFrSwUsageCurDebug=wfFrSwUsageCurDebug, wfFrSwMcastIndividualDlci=wfFrSwMcastIndividualDlci, wfFrSwVcXNetReceived=wfFrSwVcXNetReceived, wfFrSwSvcUsageFlushData=wfFrSwSvcUsageFlushData, wfFrSwVcSetBecnOctets=wfFrSwVcSetBecnOctets, wfFrSwIsdnUniTable=wfFrSwIsdnUniTable, wfFrSwDlcmiCircuit=wfFrSwDlcmiCircuit, wfFrSwIsdnAssocNum=wfFrSwIsdnAssocNum, wfFrSwVcEscapeEnable=wfFrSwVcEscapeEnable, wfFrSwDlcmiDeletedVCs=wfFrSwDlcmiDeletedVCs, wfFrSwVcOutBe=wfFrSwVcOutBe, wfFrSwCctReceivedStatus=wfFrSwCctReceivedStatus, wfFrSwCctLocalRecvBECNFrames=wfFrSwCctLocalRecvBECNFrames, wfFrSwDlcmiAsyncUpdateEnable=wfFrSwDlcmiAsyncUpdateEnable, wfFrSwIsdnBaseSlotNum=wfFrSwIsdnBaseSlotNum, wfFrSwUsageFilePrefix=wfFrSwUsageFilePrefix, wfFrSwLocalE164AddrTable=wfFrSwLocalE164AddrTable, wfFrSwGlobalX121AddrTable=wfFrSwGlobalX121AddrTable, wfFrSwDlcmiCrossNetErrorThreshold=wfFrSwDlcmiCrossNetErrorThreshold, wfFrSwCctCreationTime=wfFrSwCctCreationTime, wfFrSwCctRemoteBecnState=wfFrSwCctRemoteBecnState, wfFrSwCctOutBe=wfFrSwCctOutBe, wfFrSwGlobalE164AddrLow=wfFrSwGlobalE164AddrLow, wfFrSwLocalX121AddrTable=wfFrSwLocalX121AddrTable, wfFrSwExtFileSysState=wfFrSwExtFileSysState, wfFrSwCctRemoteSetFECNFrames=wfFrSwCctRemoteSetFECNFrames, wfFrSwIsdnUniEntry=wfFrSwIsdnUniEntry, wfFrSwCctRemoteRecvFECNOctets=wfFrSwCctRemoteRecvFECNOctets, wfFrSwExtFileSysActualSize=wfFrSwExtFileSysActualSize, wfFrSwDlcmiDteFullEnquiryInterval=wfFrSwDlcmiDteFullEnquiryInterval, wfFrSwGlobalX121AddrLow=wfFrSwGlobalX121AddrLow, wfFrSwCctOutBc=wfFrSwCctOutBc, wfFrSwDlcmiDteReceived=wfFrSwDlcmiDteReceived, wfFrSwDlcmiSequenceCount=wfFrSwDlcmiSequenceCount, wfFrSwSigDlciIEAllowed=wfFrSwSigDlciIEAllowed, wfFrSwCctTable=wfFrSwCctTable, wfFrSwDlcmiVCsInUse=wfFrSwDlcmiVCsInUse, wfFrSwVcInactiveVcDropOctets=wfFrSwVcInactiveVcDropOctets, wfFrSwUsageEndTimeStampLow=wfFrSwUsageEndTimeStampLow, wfFrSwVcEntry=wfFrSwVcEntry, wfFrSwUsageCurCleanupInterval=wfFrSwUsageCurCleanupInterval, wfFrSwUsageEnable=wfFrSwUsageEnable, wfFrSwSvcUsageCurVolume=wfFrSwSvcUsageCurVolume, wfFrSwDlcmiTable=wfFrSwDlcmiTable, wfFrSwCctRemoteSentDEFrames=wfFrSwCctRemoteSentDEFrames, wfFrSwCctInThroughput=wfFrSwCctInThroughput, wfFrSwVcState=wfFrSwVcState, wfFrSwIsdnAssocIndex=wfFrSwIsdnAssocIndex, wfFrSwUsageSwitchName=wfFrSwUsageSwitchName, wfFrSwIsdnAssocEntry=wfFrSwIsdnAssocEntry, wfFrSwDlcmiState=wfFrSwDlcmiState, wfFrSwUsageTimerInterval=wfFrSwUsageTimerInterval, wfFrSwVcRecvNonDeFrames=wfFrSwVcRecvNonDeFrames, wfFrSwVcRecvFecnOctets=wfFrSwVcRecvFecnOctets, wfFrSwDlcmiDteStatus=wfFrSwDlcmiDteStatus, wfFrSwSvcUsageCurStoreInterval=wfFrSwSvcUsageCurStoreInterval, wfFrSwLocalX121AddrDelete=wfFrSwLocalX121AddrDelete, wfFrSwUsageStoreTimeStamp=wfFrSwUsageStoreTimeStamp, wfFrSwDlcmiManagementType=wfFrSwDlcmiManagementType, wfFrSwSigInStatusPkts=wfFrSwSigInStatusPkts, wfFrSwUsageLastNonDEFramesLow=wfFrSwUsageLastNonDEFramesLow, wfFrSwVcReceivedStatus=wfFrSwVcReceivedStatus, wfFrSwDlcmiControlByteDisable=wfFrSwDlcmiControlByteDisable, wfFrSwVcXNetSent=wfFrSwVcXNetSent, wfFrSwCngcMonP1Level1Percent=wfFrSwCngcMonP1Level1Percent, wfFrSwCngcMonP2Level2Percent=wfFrSwCngcMonP2Level2Percent, wfFrSwUsageUpdateTimeStamp=wfFrSwUsageUpdateTimeStamp, wfFrSwSigMaxNumOfSvcs=wfFrSwSigMaxNumOfSvcs, wfFrSwDlcmiAddressLen=wfFrSwDlcmiAddressLen, wfFrSwSigNwrkAbortedConnections=wfFrSwSigNwrkAbortedConnections, wfFrSwVcReportedStatus=wfFrSwVcReportedStatus, wfFrSwVirtualIntfLineNum=wfFrSwVirtualIntfLineNum, wfFrSwCngcMonTable=wfFrSwCngcMonTable, wfFrSwCctRemoteRecvBECNOctets=wfFrSwCctRemoteRecvBECNOctets, wfFrSwUsageSwitchId=wfFrSwUsageSwitchId, wfFrSwVcBecnState=wfFrSwVcBecnState, wfFrSwIsdnUniNum=wfFrSwIsdnUniNum, wfFrSwSvcUsageState=wfFrSwSvcUsageState, wfFrSwVcTxDeFrames=wfFrSwVcTxDeFrames, wfFrSwCctLocalSentDEOctets=wfFrSwCctLocalSentDEOctets, wfFrSwCctRemoteRecvFECNFrames=wfFrSwCctRemoteRecvFECNFrames, wfFrSwVcBackupCalledDlci=wfFrSwVcBackupCalledDlci, wfFrSwVcCallReqCalledDlci=wfFrSwVcCallReqCalledDlci, wfFrSwCctLocalRecvBECNOctets=wfFrSwCctLocalRecvBECNOctets, wfFrSwIsdnUniState=wfFrSwIsdnUniState, wfFrSwBcMeasurementInterval=wfFrSwBcMeasurementInterval, wfFrSwUsageStoreData=wfFrSwUsageStoreData, wfFrSwCctLocalRecvFECNFrames=wfFrSwCctLocalRecvFECNFrames, wfFrSwCctRemoteRecvBECNFrames=wfFrSwCctRemoteRecvBECNFrames, wfFrSwPvcUsageFileLayout=wfFrSwPvcUsageFileLayout, wfFrSwGlobalX121AddrHigh=wfFrSwGlobalX121AddrHigh, wfFrSwCngcMonP2Level4Percent=wfFrSwCngcMonP2Level4Percent, wfFrSwDlcmiBidirect=wfFrSwDlcmiBidirect, wfFrSwVcSetDeOctets=wfFrSwVcSetDeOctets, wfFrSwUsageSentDEOctetsLow=wfFrSwUsageSentDEOctetsLow, wfFrSwDlcmiUnknownIEErrors=wfFrSwDlcmiUnknownIEErrors, wfFrSwSigSvcDlciLow=wfFrSwSigSvcDlciLow, wfFrSwDlcmiSequenceErrors=wfFrSwDlcmiSequenceErrors, wfFrSwIsdnAssocSlotNum=wfFrSwIsdnAssocSlotNum, wfFrSwExtFileSysTable=wfFrSwExtFileSysTable, wfFrSwDlcmiControlByteErrors=wfFrSwDlcmiControlByteErrors, wfFrSwVirtualIntfSlot=wfFrSwVirtualIntfSlot, wfFrSwDlcmiStatus=wfFrSwDlcmiStatus, wfFrSwVcBackupCrossNetErrors=wfFrSwVcBackupCrossNetErrors, wfFrSwVirtualIntfEntry=wfFrSwVirtualIntfEntry, wfFrSwDlcmiPolls=wfFrSwDlcmiPolls, wfFrSwUsageDirectory=wfFrSwUsageDirectory, wfFrSwSvcUsageStoreTimeStamp=wfFrSwSvcUsageStoreTimeStamp, wfFrSwErrType=wfFrSwErrType, wfFrSwUsageLastNonDEOctetsHigh=wfFrSwUsageLastNonDEOctetsHigh, wfFrSwUsageCurFlushInterval=wfFrSwUsageCurFlushInterval, wfFrSwLocalX121Address=wfFrSwLocalX121Address, wfFrSwCctLocalSentNonDEFrames=wfFrSwCctLocalSentNonDEFrames, wfFrSwSigInDisconnectPkts=wfFrSwSigInDisconnectPkts, wfFrSwVcDropNonDeFrames=wfFrSwVcDropNonDeFrames, wfFrSwIsdnBaseDelete=wfFrSwIsdnBaseDelete, wfFrSwSigOutConnectPkts=wfFrSwSigOutConnectPkts, wfFrSwCngcMonP1Level2Percent=wfFrSwCngcMonP1Level2Percent, wfFrSwUsageUpdateInterval=wfFrSwUsageUpdateInterval, wfFrSwDlcmiCrossNetAsyncUpdateEnable=wfFrSwDlcmiCrossNetAsyncUpdateEnable, wfFrSwVcSetDeFrames=wfFrSwVcSetDeFrames, wfFrSwGlobalE164AddrDelete=wfFrSwGlobalE164AddrDelete, wfFrSwSigNumOfSvcsInUse=wfFrSwSigNumOfSvcsInUse, wfFrSwSigX213PriorityIEAllowed=wfFrSwSigX213PriorityIEAllowed, wfFrSwSvcUsageUpdateData=wfFrSwSvcUsageUpdateData, wfFrSwGlobalX121AddrIPAddr=wfFrSwGlobalX121AddrIPAddr, wfFrSwUsageSentDEFramesHigh=wfFrSwUsageSentDEFramesHigh, wfFrSwDlcmiBcMeasurementEnable=wfFrSwDlcmiBcMeasurementEnable, wfFrSwVcRecvDeFrames=wfFrSwVcRecvDeFrames, wfFrSwVcInBeOctets=wfFrSwVcInBeOctets, wfFrSwSigRejectedConnRequests=wfFrSwSigRejectedConnRequests, wfFrSwSvcUsageFilePrefix=wfFrSwSvcUsageFilePrefix, wfFrSwMcastIpAddr=wfFrSwMcastIpAddr, wfFrSwCngcMonP0Level2Percent=wfFrSwCngcMonP0Level2Percent, wfFrSwSvcUsageStoreInterval=wfFrSwSvcUsageStoreInterval, wfFrSwDlcmiCrossNetEnable=wfFrSwDlcmiCrossNetEnable, wfFrSwVcCalledDlci=wfFrSwVcCalledDlci, wfFrSwSigMaxInThroughputPerSvc=wfFrSwSigMaxInThroughputPerSvc, wfFrSwCctInBcOctets=wfFrSwCctInBcOctets, wfFrSwSigOutReleasePkts=wfFrSwSigOutReleasePkts, wfFrSwCctEntry=wfFrSwCctEntry, wfFrSwCngcMonP1Level3Percent=wfFrSwCngcMonP1Level3Percent, wfFrSwCctXNetSent=wfFrSwCctXNetSent, wfFrSwCctRemoteDropNonDEOctets=wfFrSwCctRemoteDropNonDEOctets, wfFrSwUsageCleanupTimeStamp=wfFrSwUsageCleanupTimeStamp, wfFrSwLocalE164AddrLocalFlag=wfFrSwLocalE164AddrLocalFlag, wfFrSwVirtualIntfCct=wfFrSwVirtualIntfCct, wfFrSwVcDropDeFrames=wfFrSwVcDropDeFrames, wfFrSwCctXNetReceived=wfFrSwCctXNetReceived, wfFrSwLocalE164AddrCUG=wfFrSwLocalE164AddrCUG, wfFrSwCctState=wfFrSwCctState, wfFrSwSvcUsageCurCleanupInterval=wfFrSwSvcUsageCurCleanupInterval, wfFrSwVcTable=wfFrSwVcTable, wfFrSwCctInactiveVCDropFrames=wfFrSwCctInactiveVCDropFrames, wfFrSwGlobalX121AddrEntry=wfFrSwGlobalX121AddrEntry, wfFrSwSvcUsageCurFilePrefix=wfFrSwSvcUsageCurFilePrefix, wfFrSwCngcMonEntry=wfFrSwCngcMonEntry, wfFrSwCctLocalDropNonDEFrames=wfFrSwCctLocalDropNonDEFrames, wfFrSwUsageFlushData=wfFrSwUsageFlushData, wfFrSwVirtualIntfDelete=wfFrSwVirtualIntfDelete, wfFrSwIsdnAssocScrnEnable=wfFrSwIsdnAssocScrnEnable, wfFrSwCngcMonP0Level4Percent=wfFrSwCngcMonP0Level4Percent, wfFrSwIsdnBaseTable=wfFrSwIsdnBaseTable, wfFrSwUsageDlci=wfFrSwUsageDlci, wfFrSwLocalX121AddrCct=wfFrSwLocalX121AddrCct, wfFrSwCctLocalSetDEOctets=wfFrSwCctLocalSetDEOctets, wfFrSwLocalE164AddrCct=wfFrSwLocalE164AddrCct, wfFrSwVcAtmIwfDePolicy=wfFrSwVcAtmIwfDePolicy, wfFrSwCctRemoteDropDEFrames=wfFrSwCctRemoteDropDEFrames, wfFrSwSvcUsageStoreData=wfFrSwSvcUsageStoreData, wfFrSwTupleDlciA=wfFrSwTupleDlciA, wfFrSwBaseShutDown=wfFrSwBaseShutDown, wfFrSwCctLastTimeChange=wfFrSwCctLastTimeChange, wfFrSwUsageTable=wfFrSwUsageTable, wfFrSwVcCreationTime=wfFrSwVcCreationTime, wfFrSwVcLastTimeChange=wfFrSwVcLastTimeChange, wfFrSwCctInBc=wfFrSwCctInBc, wfFrSwUsageCurDirectory=wfFrSwUsageCurDirectory, wfFrSwCctMulticast=wfFrSwCctMulticast, wfFrSwVcInBe=wfFrSwVcInBe, wfFrSwSigT301=wfFrSwSigT301, wfFrSwCctRemoteSentNonDEOctets=wfFrSwCctRemoteSentNonDEOctets, wfFrSwUsageFlushTimeStamp=wfFrSwUsageFlushTimeStamp, wfFrSwCctRemoteSetBECNOctets=wfFrSwCctRemoteSetBECNOctets, wfFrSwVcBackupCalledIpAddr=wfFrSwVcBackupCalledIpAddr, wfFrSwVcAtmIwfVPI=wfFrSwVcAtmIwfVPI, wfFrSwSigInReleaseCompletePkts=wfFrSwSigInReleaseCompletePkts, wfFrSwLocalX121AddrEntry=wfFrSwLocalX121AddrEntry, wfFrSwCctCrossNetStatus=wfFrSwCctCrossNetStatus, wfFrSwSvcUsageFileLayout=wfFrSwSvcUsageFileLayout, wfFrSwDlcmiFullStatusSeq=wfFrSwDlcmiFullStatusSeq, wfFrSwDlcmiSvcDisable=wfFrSwDlcmiSvcDisable, wfFrSwVcCallReqDlciSelectionType=wfFrSwVcCallReqDlciSelectionType, wfFrSwSigOutStatusEnquiryPkts=wfFrSwSigOutStatusEnquiryPkts, wfFrSwUsageSentNonDEFramesLow=wfFrSwUsageSentNonDEFramesLow, wfFrSwLocalE164AddrEntry=wfFrSwLocalE164AddrEntry, wfFrSwDlcmiDteSeqCount=wfFrSwDlcmiDteSeqCount, wfFrSwUsageFileCleanup=wfFrSwUsageFileCleanup, wfFrSwBaseDelete=wfFrSwBaseDelete, wfFrSwSvcUsageFlushInterval=wfFrSwSvcUsageFlushInterval, wfFrSwUsageLastDEFramesHigh=wfFrSwUsageLastDEFramesHigh, wfFrSwVcRedirectState=wfFrSwVcRedirectState, wfFrSwDlcmiAlarmTimer=wfFrSwDlcmiAlarmTimer, wfFrSwCctLocalOrRemoteConnection=wfFrSwCctLocalOrRemoteConnection)
mibBuilder.exportSymbols("Wellfleet-FRSW-MIB", wfFrSwSigOutCallProceedingPkts=wfFrSwSigOutCallProceedingPkts, wfFrSwDlcmiMaxSupportedVCs=wfFrSwDlcmiMaxSupportedVCs, wfFrSwDlcmiSpvcAgent=wfFrSwDlcmiSpvcAgent, wfFrSwCctRemoteDropNonDEFrames=wfFrSwCctRemoteDropNonDEFrames, wfFrSwVcCallReqMaxRetries=wfFrSwVcCallReqMaxRetries, wfFrSwSwitchHdrErrors=wfFrSwSwitchHdrErrors, wfFrSwDlcmiEscapeVcCount=wfFrSwDlcmiEscapeVcCount, wfFrSwVcCalledIpAddr=wfFrSwVcCalledIpAddr, wfFrSwUsageSentNonDEOctetsHigh=wfFrSwUsageSentNonDEOctetsHigh, wfFrSwBase=wfFrSwBase, wfFrSwDlcmiDtePolls=wfFrSwDlcmiDtePolls, wfFrSwCctRemoteSetBECNFrames=wfFrSwCctRemoteSetBECNFrames, wfFrSwVcTxNonDeOctets=wfFrSwVcTxNonDeOctets, wfFrSwDlcmiMcastNoBufferErrors=wfFrSwDlcmiMcastNoBufferErrors, wfFrSwSigCallingPartyIEMandatory=wfFrSwSigCallingPartyIEMandatory, wfFrSwUsageCurStoreInterval=wfFrSwUsageCurStoreInterval, wfFrSwDlcmiFrameTooLongErrors=wfFrSwDlcmiFrameTooLongErrors, wfFrSwSvcUsageFlushTimeStamp=wfFrSwSvcUsageFlushTimeStamp, wfFrSwCngcMonP1Level4Percent=wfFrSwCngcMonP1Level4Percent, wfFrSwVcBackupCrossNetStatus=wfFrSwVcBackupCrossNetStatus, wfFrSwSigXNetClearingDisable=wfFrSwSigXNetClearingDisable, wfFrSwSigTable=wfFrSwSigTable, wfFrSwCngcMonP3Level4Percent=wfFrSwCngcMonP3Level4Percent, wfFrSwCctLocalDropNonDEOctets=wfFrSwCctLocalDropNonDEOctets, wfFrSwCngcMonCct=wfFrSwCngcMonCct, wfFrSwVcDropExcessBurstFrames=wfFrSwVcDropExcessBurstFrames, wfFrSwUsageNumEntries=wfFrSwUsageNumEntries, wfFrSwTupleIpAddrB=wfFrSwTupleIpAddrB, wfFrSwSvcUsageUpdateTimeStamp=wfFrSwSvcUsageUpdateTimeStamp, wfFrSwSvcUsageCurUpdateInterval=wfFrSwSvcUsageCurUpdateInterval, wfFrSwDlcmiNniEnable=wfFrSwDlcmiNniEnable, wfFrSwSigDefaultMinAcceptThroughput=wfFrSwSigDefaultMinAcceptThroughput, wfFrSwUsageEntry=wfFrSwUsageEntry, wfFrSwCngcMonP2Level3Percent=wfFrSwCngcMonP2Level3Percent, wfFrSwCctDlci=wfFrSwCctDlci, wfFrSwUsageLastDEFramesLow=wfFrSwUsageLastDEFramesLow, wfFrSwSigDelete=wfFrSwSigDelete, wfFrSwUsageCurVolume=wfFrSwUsageCurVolume, wfFrSwCngcMonP3Level3Percent=wfFrSwCngcMonP3Level3Percent, wfFrSwSigInReleasePkts=wfFrSwSigInReleasePkts, wfFrSwCctReportedStatus=wfFrSwCctReportedStatus, wfFrSwDlcmiSvcBillingEnable=wfFrSwDlcmiSvcBillingEnable, wfFrSwDlcmiMonitoredEvents=wfFrSwDlcmiMonitoredEvents, wfFrSwVcCallReqCalledAddr=wfFrSwVcCallReqCalledAddr, wfFrSwSigT308=wfFrSwSigT308, wfFrSwVcCircuit=wfFrSwVcCircuit, wfFrSwBaseIpAddr=wfFrSwBaseIpAddr, wfFrSwVcDlci=wfFrSwVcDlci, wfFrSwDlcmiPollingInterval=wfFrSwDlcmiPollingInterval, wfFrSwGlobalE164AddrTable=wfFrSwGlobalE164AddrTable, wfFrSwCngcMonP3Level1Percent=wfFrSwCngcMonP3Level1Percent, wfFrSwUsageCurFilePrefix=wfFrSwUsageCurFilePrefix, wfFrSwCctLocalDropDEOctets=wfFrSwCctLocalDropDEOctets, wfFrSwUsageLocalTimeZone=wfFrSwUsageLocalTimeZone, wfFrSwVcOutBc=wfFrSwVcOutBc, wfFrSwVcAtmIwfVCI=wfFrSwVcAtmIwfVCI, wfFrSwVcCfgInBe=wfFrSwVcCfgInBe, wfFrSwVcDropNonDeOctets=wfFrSwVcDropNonDeOctets, wfFrSwVcInBcOctets=wfFrSwVcInBcOctets, wfFrSwSigCircuit=wfFrSwSigCircuit, wfFrSwVcRecentNonDeOctets=wfFrSwVcRecentNonDeOctets, wfFrSwVcCrossNetStatus=wfFrSwVcCrossNetStatus, wfFrSwTupleEntry=wfFrSwTupleEntry, wfFrSwExtFileSysSlot=wfFrSwExtFileSysSlot, wfFrSwSvcUsageCurDirectory=wfFrSwSvcUsageCurDirectory, wfFrSwUsage=wfFrSwUsage, wfFrSwTupleDlciB=wfFrSwTupleDlciB, wfFrSwUsageDebug=wfFrSwUsageDebug, wfFrSwLocalX121AddrCUG=wfFrSwLocalX121AddrCUG, wfFrSwIsdnUniIndex=wfFrSwIsdnUniIndex, wfFrSwCctLocalSentDEFrames=wfFrSwCctLocalSentDEFrames, wfFrSwSvcUsageDirectory=wfFrSwSvcUsageDirectory, wfFrSwDlcmiErrorThreshold=wfFrSwDlcmiErrorThreshold, wfFrSwDlcmiFormatErrors=wfFrSwDlcmiFormatErrors, wfFrSwDlcmiDtePollingInterval=wfFrSwDlcmiDtePollingInterval, wfFrSwCctLocalRecvNonDEOctets=wfFrSwCctLocalRecvNonDEOctets, wfFrSwSigMaxOutThroughputPerSvc=wfFrSwSigMaxOutThroughputPerSvc, wfFrSwVcInThroughput=wfFrSwVcInThroughput, wfFrSwCctXNetErrors=wfFrSwCctXNetErrors, wfFrSwMcastEntry=wfFrSwMcastEntry, wfFrSwCctStateSet=wfFrSwCctStateSet, wfFrSwCctLocalSetBECNOctets=wfFrSwCctLocalSetBECNOctets, wfFrSwCctLocalRecvNonDEFrames=wfFrSwCctLocalRecvNonDEFrames, wfFrSwVcTxDeOctets=wfFrSwVcTxDeOctets, wfFrSwSvcUsageCleanupInterval=wfFrSwSvcUsageCleanupInterval, wfFrSwUsageEndTimeStampHigh=wfFrSwUsageEndTimeStampHigh, wfFrSwSigDefaultBe=wfFrSwSigDefaultBe, wfFrSwVcSpvcCallState=wfFrSwVcSpvcCallState, wfFrSwVcDropExcessBurstOctets=wfFrSwVcDropExcessBurstOctets, wfFrSwGlobalE164AddrHigh=wfFrSwGlobalE164AddrHigh, wfFrSwTupleDelete=wfFrSwTupleDelete, wfFrSwCctRemoteDropDEOctets=wfFrSwCctRemoteDropDEOctets, wfFrSwSigOutSetupPkts=wfFrSwSigOutSetupPkts, wfFrSwIsdnScrnIndex=wfFrSwIsdnScrnIndex, wfFrSwCctRemoteRecvDEFrames=wfFrSwCctRemoteRecvDEFrames, wfFrSwTupleTable=wfFrSwTupleTable, wfFrSwUsageLastDEOctetsLow=wfFrSwUsageLastDEOctetsLow, wfFrSwIsdnBaseAssocType=wfFrSwIsdnBaseAssocType, wfFrSwLocalX121AddrLocalFlag=wfFrSwLocalX121AddrLocalFlag, wfFrSwVcDropDeOctets=wfFrSwVcDropDeOctets, wfFrSwCctDelete=wfFrSwCctDelete, wfFrSwDlcmiFullEnquiryInterval=wfFrSwDlcmiFullEnquiryInterval, wfFrSwIsdnScrnDelete=wfFrSwIsdnScrnDelete, wfFrSwIsdnBaseEntry=wfFrSwIsdnBaseEntry, wfFrSwVcAtmIwfEfciPolicy=wfFrSwVcAtmIwfEfciPolicy, wfFrSwVcStateSet=wfFrSwVcStateSet, wfFrSwDlcmiEntry=wfFrSwDlcmiEntry, wfFrSwVcTrfPriority=wfFrSwVcTrfPriority, wfFrSwDlcmiActiveReceived=wfFrSwDlcmiActiveReceived, wfFrSwDlcmiProtocolErrors=wfFrSwDlcmiProtocolErrors, wfFrSwSigDlciAssign=wfFrSwSigDlciAssign, wfFrSwExtFileSysSize=wfFrSwExtFileSysSize, wfFrSwSvcUsageInterimRecordEnable=wfFrSwSvcUsageInterimRecordEnable, wfFrSwDlcmiNewVCs=wfFrSwDlcmiNewVCs, wfFrSwUsageLastNonDEOctetsLow=wfFrSwUsageLastNonDEOctetsLow, wfFrSwDlcmiDelete=wfFrSwDlcmiDelete, wfFrSwUsageCurUpdateInterval=wfFrSwUsageCurUpdateInterval, wfFrSwCngcMonP0Level3Percent=wfFrSwCngcMonP0Level3Percent, wfFrSwVcSetBecnFrames=wfFrSwVcSetBecnFrames, wfFrSwUsageRemoteDlci=wfFrSwUsageRemoteDlci, wfFrSwUsageCurTimerInterval=wfFrSwUsageCurTimerInterval, wfFrSwIsdnAssocDelete=wfFrSwIsdnAssocDelete, wfFrSwSigTotalOutCurrentThroughput=wfFrSwSigTotalOutCurrentThroughput, wfFrSwDlcmiIwfMode=wfFrSwDlcmiIwfMode, wfFrSwSigDefaultBc=wfFrSwSigDefaultBc, wfFrSwDlcmiRecoveryCounts=wfFrSwDlcmiRecoveryCounts, wfFrSwUsageLastDEOctetsHigh=wfFrSwUsageLastDEOctetsHigh, wfFrSwVcSetFecnOctets=wfFrSwVcSetFecnOctets, wfFrSwVcDelete=wfFrSwVcDelete, wfFrSwVcRecvBecnFrames=wfFrSwVcRecvBecnFrames, wfFrSwExtFileSysEntry=wfFrSwExtFileSysEntry, wfFrSwCngcMonReset=wfFrSwCngcMonReset, wfFrSwSigMaximumBe=wfFrSwSigMaximumBe, wfFrSwSigT305=wfFrSwSigT305, wfFrSwSvcUsageEnable=wfFrSwSvcUsageEnable, wfFrSwSigT322=wfFrSwSigT322, wfFrSwSvcUsageVolume=wfFrSwSvcUsageVolume, wfFrSwDlcmiIllegalDlciErrors=wfFrSwDlcmiIllegalDlciErrors, wfFrSwIsdnAssocTable=wfFrSwIsdnAssocTable, wfFrSwCctRemoteRecvNonDEFrames=wfFrSwCctRemoteRecvNonDEFrames, wfFrSwDlcmiCrossNetPollingInterval=wfFrSwDlcmiCrossNetPollingInterval, wfFrSwLocalE164Address=wfFrSwLocalE164Address, wfFrSwUsageStoreInterval=wfFrSwUsageStoreInterval, wfFrSwSigInSetupPkts=wfFrSwSigInSetupPkts, wfFrSwUsageSentNonDEOctetsLow=wfFrSwUsageSentNonDEOctetsLow, wfFrSwSigSvcDlciHigh=wfFrSwSigSvcDlciHigh, wfFrSwDlcmiL2AddrType=wfFrSwDlcmiL2AddrType, wfFrSwMcastIndex=wfFrSwMcastIndex, wfFrSwDlcmiUnknownRPTErrors=wfFrSwDlcmiUnknownRPTErrors, wfFrSwUsageSentNonDEFramesHigh=wfFrSwUsageSentNonDEFramesHigh, wfFrSwDlcmiLastReceived=wfFrSwDlcmiLastReceived, wfFrSwCctLocalSentNonDEOctets=wfFrSwCctLocalSentNonDEOctets, wfFrSwSigInCallProceedingPkts=wfFrSwSigInCallProceedingPkts, wfFrSwSvcUsageFileCleanup=wfFrSwSvcUsageFileCleanup, wfFrSwSigEntry=wfFrSwSigEntry, wfFrSwMcastTable=wfFrSwMcastTable, wfFrSwSigL2Resets=wfFrSwSigL2Resets, wfFrSwDlcmiOtherErrors=wfFrSwDlcmiOtherErrors, wfFrSwErrTime=wfFrSwErrTime, wfFrSwUsageFlushInterval=wfFrSwUsageFlushInterval, wfFrSwVcTxNonDeFrames=wfFrSwVcTxNonDeFrames, wfFrSwUsageCleanupInterval=wfFrSwUsageCleanupInterval, wfFrSwIsdnScrnEntry=wfFrSwIsdnScrnEntry, wfFrSwUsageRemoteIPAddress=wfFrSwUsageRemoteIPAddress, wfFrSwSigInStatusEnquiryPkts=wfFrSwSigInStatusEnquiryPkts, wfFrSwVirtualIntfTable=wfFrSwVirtualIntfTable, wfFrSwCngcMonP3Level2Percent=wfFrSwCngcMonP3Level2Percent, wfFrSwUsageLastNonDEFramesHigh=wfFrSwUsageLastNonDEFramesHigh, wfFrSwCctLocalSetFECNOctets=wfFrSwCctLocalSetFECNOctets, wfFrSwVcAtmIwfMode=wfFrSwVcAtmIwfMode, wfFrSwVcRedirectType=wfFrSwVcRedirectType, wfFrSwSigT310=wfFrSwSigT310, wfFrSwCctLocalRecvFECNOctets=wfFrSwCctLocalRecvFECNOctets, wfFrSwGlobalE164AddrIPAddr=wfFrSwGlobalE164AddrIPAddr, wfFrSwDlcmiFrameTooShortErrors=wfFrSwDlcmiFrameTooShortErrors, wfFrSwVcMulticast=wfFrSwVcMulticast, wfFrSwUsageIPAddress=wfFrSwUsageIPAddress, wfFrSwSigDefaultThroughput=wfFrSwSigDefaultThroughput, wfFrSwCctLocalRecentNonDEOctets=wfFrSwCctLocalRecentNonDEOctets, wfFrSwUsageSentDEFramesLow=wfFrSwUsageSentDEFramesLow, wfFrSwSvcUsageCleanupTimeStamp=wfFrSwSvcUsageCleanupTimeStamp, wfFrSwSigInUnknownPkts=wfFrSwSigInUnknownPkts, wfFrSwCctInactiveVCDropOctets=wfFrSwCctInactiveVCDropOctets, wfFrSwDlcmiEscapeCircuit=wfFrSwDlcmiEscapeCircuit, wfFrSwUsageDelete=wfFrSwUsageDelete, wfFrSwCctNumber=wfFrSwCctNumber, wfFrSwMcastDelete=wfFrSwMcastDelete, wfFrSwSigTotalOutNegotiableThroughput=wfFrSwSigTotalOutNegotiableThroughput, wfFrSwVcRecvNonDeOctets=wfFrSwVcRecvNonDeOctets, wfFrSwCngcMonP2Level1Percent=wfFrSwCngcMonP2Level1Percent, wfFrSwVcRecvFecnFrames=wfFrSwVcRecvFecnFrames, wfFrSwCctInBe=wfFrSwCctInBe, wfFrSwCctLocalDropDEFrames=wfFrSwCctLocalDropDEFrames, wfFrSwCctLocalSetBECNFrames=wfFrSwCctLocalSetBECNFrames, wfFrSwDlcmiUnknownDlciErrors=wfFrSwDlcmiUnknownDlciErrors, wfFrSwCctLocalRecvDEFrames=wfFrSwCctLocalRecvDEFrames, wfFrSwSvcUsageCurFlushInterval=wfFrSwSvcUsageCurFlushInterval, wfFrSwGlobalE164AddrEntry=wfFrSwGlobalE164AddrEntry, wfFrSwErrData=wfFrSwErrData, wfFrSwVcSetFecnFrames=wfFrSwVcSetFecnFrames, wfFrSwDlcmiCallAccDlciSelectionType=wfFrSwDlcmiCallAccDlciSelectionType, wfFrSwTupleIpAddrA=wfFrSwTupleIpAddrA, wfFrSwCctRemoteSetFECNOctets=wfFrSwCctRemoteSetFECNOctets, wfFrSwVcXNetErrors=wfFrSwVcXNetErrors)
| 156.006489 | 12,584 | 0.795407 |
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ValueRangeConstraint, ValueSizeConstraint, SingleValueConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ValueRangeConstraint", "ValueSizeConstraint", "SingleValueConstraint", "ConstraintsUnion")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
MibScalar, MibTable, MibTableRow, MibTableColumn, Integer32, Counter32, IpAddress, Counter64, Bits, ModuleIdentity, MibIdentifier, TimeTicks, Unsigned32, iso, NotificationType, ObjectIdentity, Gauge32 = mibBuilder.importSymbols("SNMPv2-SMI", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Integer32", "Counter32", "IpAddress", "Counter64", "Bits", "ModuleIdentity", "MibIdentifier", "TimeTicks", "Unsigned32", "iso", "NotificationType", "ObjectIdentity", "Gauge32")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
wfFrswGroup, = mibBuilder.importSymbols("Wellfleet-COMMON-MIB", "wfFrswGroup")
wfFrSwDlcmiTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1), )
if mibBuilder.loadTexts: wfFrSwDlcmiTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiTable.setDescription('The Parameters for the Data Link Connection Management Interface corresponding to any interface. Incorporates the Error table.')
wfFrSwDlcmiEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwDlcmiCircuit"))
if mibBuilder.loadTexts: wfFrSwDlcmiEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiEntry.setDescription('The parameters for a particular Data Link Connection Management Interface.')
wfFrSwDlcmiDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDelete.setDescription('Indication to delete this frame relay interface.')
wfFrSwDlcmiState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("init", 3))).clone('init')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiState.setDescription('Indicates which state of DLCMI the interface is in')
wfFrSwDlcmiNniEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiNniEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiNniEnable.setDescription('Indicates whether a NNI is enabled for this entry.')
wfFrSwDlcmiCircuit = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiCircuit.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiCircuit.setDescription('Instance identifier; the circuit number of this entry.')
wfFrSwDlcmiManagementType = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))).clone(namedValues=NamedValues(("none", 1), ("lmi", 2), ("t1617d", 3), ("t1617b", 4), ("annexa", 5), ("lmiswitch", 6), ("annexdswitch", 7), ("annexaswitch", 8), ("iwfoamenabled", 9), ("iwfoamdisabled", 10))).clone('t1617d')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiManagementType.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiManagementType.setDescription('Indicates the Data Link Connection Management scheme that is active.')
wfFrSwL3NetAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 6), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwL3NetAddress.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwL3NetAddress.setDescription('Indicates level 3 (IP) address of this frame relay interface')
wfFrSwDlcmiAddressLen = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3, 4))).clone(namedValues=NamedValues(("twobyte", 2), ("threebyte", 3), ("fourbyte", 4))).clone('twobyte')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiAddressLen.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiAddressLen.setDescription('Indicates the address length, including the control portion.')
wfFrSwDlcmiControlByteDisable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiControlByteDisable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiControlByteDisable.setDescription('Indicates inclusion of control byte in q922 format.')
wfFrSwDlcmiPollingInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 30)).clone(15)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiPollingInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiPollingInterval.setDescription('The number of seconds between successive status enquiry messages.')
wfFrSwDlcmiFullEnquiryInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(6)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiFullEnquiryInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiFullEnquiryInterval.setDescription('Indicates the number of status enquiries before a full status enquiry. (For bidirectional procedures.)')
wfFrSwDlcmiErrorThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 11), Integer32().clone(3)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiErrorThreshold.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiErrorThreshold.setDescription('Indicates the number errors monitored before declaring the interface down.')
wfFrSwDlcmiMonitoredEvents = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 12), Integer32().clone(4)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiMonitoredEvents.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiMonitoredEvents.setDescription('Indicates the events over which error threshold is kept.')
wfFrSwDlcmiRecoveryCounts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiRecoveryCounts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiRecoveryCounts.setDescription('Indicates the number of correct polling cycles during recovery.')
wfFrSwDlcmiMaxSupportedVCs = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 1024)).clone(100)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiMaxSupportedVCs.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiMaxSupportedVCs.setDescription('Indicates the maximum number of VCs allowed.')
wfFrSwDlcmiVCsInUse = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiVCsInUse.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiVCsInUse.setDescription('Indicates the number of VCs that are currently configured on this interface.')
wfFrSwSwitchHdrErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSwitchHdrErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSwitchHdrErrors.setDescription('Indicates the number of frames dropped because they were received on the remote side with an invalid switch header.')
wfFrSwDlcmiSequenceCount = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 17), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiSequenceCount.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiSequenceCount.setDescription("Indicates this switch's sequence counter; value of next to send.")
wfFrSwDlcmiLastReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 18), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiLastReceived.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiLastReceived.setDescription('Indicates the sequence number just received from the end station.')
wfFrSwDlcmiActiveSeqCount = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 19), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiActiveSeqCount.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiActiveSeqCount.setDescription("Indicates the switch's sequence counter for sending status enquiry. (For bidirectional procedures.)")
wfFrSwDlcmiActiveReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 20), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiActiveReceived.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiActiveReceived.setDescription('Indicates the sequence number just received from the enquiring station. (For bidirectional procedures.)')
wfFrSwDlcmiPolls = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 21), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiPolls.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiPolls.setDescription('This is the counter of where we are in the polling cycle.')
wfFrSwDlcmiAlarmTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 22), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiAlarmTimer.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiAlarmTimer.setDescription('Counter of 1/2 second timeouts. Indicates when to expect poll.')
wfFrSwErrType = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 23), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13))).clone(namedValues=NamedValues(("reset", 1), ("other", 2), ("short", 3), ("long", 4), ("illegaldlci", 5), ("unknowndlci", 6), ("protoerr", 7), ("unknownie", 8), ("sequenceerr", 9), ("unknownrpt", 10), ("byteerr", 11), ("hdrerr", 12), ("formaterr", 13))).clone('reset')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwErrType.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwErrType.setDescription('Indicates the type of the last specific monitored error.')
wfFrSwErrData = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 24), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwErrData.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwErrData.setDescription('Contains as much of the error packet as possible.')
wfFrSwErrTime = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 25), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwErrTime.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwErrTime.setDescription('Indicates the time the last error occurred.')
wfFrSwBcMeasurementInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 26), Integer32().subtype(subtypeSpec=ValueRangeConstraint(100, 2000)).clone(500)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwBcMeasurementInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwBcMeasurementInterval.setDescription('Indicates the Committed Burst sample window interval in msec')
wfFrSwDlcmiMcastNoBufferErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 27), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiMcastNoBufferErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiMcastNoBufferErrors.setDescription('Indicates the number of times a multicast failed partially or wholly because there are insufficient buffers available to create multiple copies of a multicast frame')
wfFrSwDlcmiFrameTooShortErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 28), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiFrameTooShortErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiFrameTooShortErrors.setDescription('Indicates the number of frames dropped that are too short to be accepted.')
wfFrSwDlcmiFrameTooLongErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 29), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiFrameTooLongErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiFrameTooLongErrors.setDescription('Indicates the number of frames dropped that are too long to be accepted.')
wfFrSwDlcmiIllegalDlciErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 30), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiIllegalDlciErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiIllegalDlciErrors.setDescription('Indicates the number of frames dropped that had an invalid DLCI value.')
wfFrSwDlcmiUnknownDlciErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 31), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiUnknownDlciErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiUnknownDlciErrors.setDescription('Indicates the number of frames dropped which had an unknown DLCI value.')
wfFrSwDlcmiProtocolErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 32), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiProtocolErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiProtocolErrors.setDescription('Indicates the number of frames dropped because of a DLCMI protocol violation.')
wfFrSwDlcmiUnknownIEErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 33), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiUnknownIEErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiUnknownIEErrors.setDescription('Indicates the number of frames dropped that had an unknown information element.')
wfFrSwDlcmiSequenceErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 34), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiSequenceErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiSequenceErrors.setDescription('Indicates the number of frames dropped because of a DLCMI sequence error.')
wfFrSwDlcmiUnknownRPTErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 35), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiUnknownRPTErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiUnknownRPTErrors.setDescription('Indicates the number of frames dropped which had an unknown report type.')
wfFrSwDlcmiControlByteErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 36), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiControlByteErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiControlByteErrors.setDescription('Indicates the number of frames dropped that had an unsupported control byte.')
wfFrSwDlcmiFormatErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 37), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiFormatErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiFormatErrors.setDescription('Indicates the number of frames dropped due to a frame format error.')
wfFrSwDlcmiOtherErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 38), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiOtherErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiOtherErrors.setDescription('Indicates the number of frames dropped due to unknown or other errors not counted by any error counter.')
wfFrSwDlcmiStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 39), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("running", 1), ("recovered", 2), ("fault", 3), ("start", 4))).clone('start')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiStatus.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiStatus.setDescription('Indicates which state of execution the DLCMI gate is in')
wfFrSwDlcmiNewVCs = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 40), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiNewVCs.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiNewVCs.setDescription('Indicates the number of newly added PVCs that we have not yet told the CPE about, by means of a full-status message.')
wfFrSwDlcmiDeletedVCs = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 41), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiDeletedVCs.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDeletedVCs.setDescription('Indicates the number of deletedly added PVCs that we have not yet told the CPE about, by means of a full-status message.')
wfFrSwDlcmiFullStatusSeq = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 42), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiFullStatusSeq.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiFullStatusSeq.setDescription('Indicates the expected sequence number for the next Status Enquiry message that will prove that the CPE received our last Full Status Message and knows about the deleted PVCs.')
wfFrSwDlcmiBidirect = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 43), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('disable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiBidirect.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiBidirect.setDescription('Indication to delete this frame relay interface.')
wfFrSwDlcmiDteStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 44), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("running", 1), ("recovered", 2), ("fault", 3), ("start", 4))).clone('start')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiDteStatus.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDteStatus.setDescription('Indicates which state of execution the DLCMI gate is in for bidirectional procedures.')
wfFrSwDlcmiDteSeqCount = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 45), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiDteSeqCount.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDteSeqCount.setDescription("Indicates the switch's sequence counter for sending status enquiry. (For bidirectional procedures.)")
wfFrSwDlcmiDteReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 46), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiDteReceived.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDteReceived.setDescription('Indicates the sequence number just received from the enquiring station. (For bidirectional procedures.)')
wfFrSwDlcmiDteLastReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 47), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiDteLastReceived.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDteLastReceived.setDescription('Indicates the sequence number just received from the end station.')
wfFrSwDlcmiDtePolls = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 48), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwDlcmiDtePolls.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDtePolls.setDescription('This is the counter of where we are in the polling cycle.')
wfFrSwDlcmiDtePollingInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 49), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 30)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiDtePollingInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDtePollingInterval.setDescription('The number of seconds between successive status enquiry messages.')
wfFrSwDlcmiDteFullEnquiryInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 50), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(6)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiDteFullEnquiryInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDteFullEnquiryInterval.setDescription('Indicates the number of status enquiries before a full status enquiry. (For bidirectional procedures.)')
wfFrSwDlcmiDteErrorThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 51), Integer32().clone(3)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiDteErrorThreshold.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiDteErrorThreshold.setDescription('Indicates the number errors monitored before declaring the interface down.')
wfFrSwDlcmiCrossNetEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 52), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('enable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetEnable.setDescription('Indication to delete this frame relay interface.')
wfFrSwDlcmiCrossNetPollingInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 53), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 86400)).clone(120)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetPollingInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetPollingInterval.setDescription('The number of seconds between successive status enquiry messages.')
wfFrSwDlcmiCrossNetErrorThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 54), Integer32().clone(3)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetErrorThreshold.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetErrorThreshold.setDescription('Indicates the number missed heartbeat polls before declaring the cross-net PVC inactive.')
wfFrSwDlcmiCrossNetAsyncUpdateEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 55), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetAsyncUpdateEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetAsyncUpdateEnable.setDescription('Indicates whether we are to send to the other end of the network, status updates for dlcis as soon as there is a change of status for the dlci.')
wfFrSwDlcmiBcMeasurementEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 56), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('enable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiBcMeasurementEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiBcMeasurementEnable.setDescription('Indicates whether Committed Burst Measurement is enabled for this interface. If this attribute is set to DISABLE then DE bit setting in Frame Relay frames at this interface is disabled.')
wfFrSwDlcmiAsyncUpdateEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 57), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiAsyncUpdateEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiAsyncUpdateEnable.setDescription('Indicates whether the link management entity should send an asynchronous single PVC update message when the state of a PVC is changed by a technician or by cross-net polling procedures. ')
wfFrSwDlcmiCrossNetListenEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 58), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetListenEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiCrossNetListenEnable.setDescription("Indicates whether the link management entity should make a judgement of the PVC's status based on Cross Net updates.")
wfFrSwDlcmiSvcDisable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 59), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiSvcDisable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiSvcDisable.setDescription(' Indicates whether SVC is enabled or disabled for this access channel. ')
wfFrSwDlcmiL2AddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 60), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("e164", 1), ("x121", 2))).clone('e164')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiL2AddrType.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiL2AddrType.setDescription(' Indicates the address type supported on this access channel. This information is needed when wFrSwDlcmiSVCDisable is enabled. ')
wfFrSwDlcmiEscapeMode = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 61), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("disabled", 1), ("ingress", 2), ("egress", 3))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiEscapeMode.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiEscapeMode.setDescription(' Identifies the Escape mode (none, ingress or egress) to be used for PVCs with wfFrSwVcEscapeMode set to enabled.')
wfFrSwDlcmiEscapeCircuit = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 62), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiEscapeCircuit.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiEscapeCircuit.setDescription('Identifies the FR-DTE circuit number corresponding to an Escape PVC. Applies only to PVCs with wfFrSwVcEscapeEnable set to enabled.')
wfFrSwDlcmiEscapeVcCount = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 63), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiEscapeVcCount.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiEscapeVcCount.setDescription(' The number of PVCs on this DLCMI that are configured as Escape VCs')
wfFrSwDlcmiIwfMode = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 64), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("none", 1), ("sdlc2frsw", 2))).clone('none')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiIwfMode.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiIwfMode.setDescription(' Identifies the interworking mode (none, SDLC-to-FRSW) to be used for PVCs with wfFrSwVcEscapeMode set to enabled.')
wfFrSwDlcmiSvcBillingEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 65), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiSvcBillingEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiSvcBillingEnable.setDescription('Indicates whether the SVC Billing on this access channel set to enable.')
wfFrSwDlcmiSpvcAgent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 66), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("none", 1), ("cra", 2), ("caa", 3), ("craandcaa", 4))).clone('none')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiSpvcAgent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiSpvcAgent.setDescription(' Indicates if an SPVC Call Request Agent, Call Accept Agent, or both are enabled on this FRSW circuit.')
wfFrSwDlcmiCallAccDlciSelectionType = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 1, 1, 67), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("any", 1), ("specific", 2))).clone('any')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwDlcmiCallAccDlciSelectionType.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwDlcmiCallAccDlciSelectionType.setDescription('Indicates to the Call Accept Agent to accept SPVC Call Setup requests for any available DLCI or for a specific DLCI. Call Setup requests with the wrong selection type will be rejected.')
wfFrSwCctTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2), )
if mibBuilder.loadTexts: wfFrSwCctTable.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctTable.setDescription('Frame Relay Circuit table gives information about a virtual circuit.')
wfFrSwCctEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwCctNumber"), (0, "Wellfleet-FRSW-MIB", "wfFrSwCctDlci"))
if mibBuilder.loadTexts: wfFrSwCctEntry.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctEntry.setDescription('An entry in the Frame Relay (Virtual) Circuit table.')
wfFrSwCctDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2), ("system", 3))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCctDelete.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctDelete.setDescription('Indication to delete this frame relay interface.')
wfFrSwCctNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctNumber.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctNumber.setDescription('Instance identifier; the circuit number of this interface.')
wfFrSwCctDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(16, 1007, 1024, 64511, 131072, 8257535))).clone(namedValues=NamedValues(("twobyteminimum", 16), ("twobytemaximum", 1007), ("threebyteminimum", 1024), ("threebytemaximum", 64511), ("fourbyteminimum", 131072), ("fourbytemaximum", 8257535)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctDlci.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctDlci.setDescription('Instance identifier; this indicates the virtual circuit identifier')
wfFrSwCctState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("invalid", 1), ("active", 2), ("inactive", 3), ("control", 4), ("user", 5))).clone('invalid')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctState.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctState.setDescription('Indicates whether the particular virtual circuit is operational.')
wfFrSwCctMulticast = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("multicast", 1), ("unicast", 2))).clone('unicast')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCctMulticast.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctMulticast.setDescription('Indicates whether this dlci is used for multicast or single destination.')
wfFrSwCctInBc = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctInBc.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctInBc.setDescription('Indicates the Incoming Committed Burst bits for this virtual circuit.')
wfFrSwCctOutBc = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctOutBc.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctOutBc.setDescription('Indicates the Outgoing Committed Burst bits for this virtual circuit.')
wfFrSwCctInBe = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 8), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCctInBe.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctInBe.setDescription('Indicates the Incoming Excess Burst bits for this virtual circuit.')
wfFrSwCctOutBe = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 9), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCctOutBe.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctOutBe.setDescription('Indicates the Outgoing Excess Burst bits for this virtual circuit.')
wfFrSwCctInThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 10), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCctInThroughput.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctInThroughput.setDescription('Indicates the incoming throughput in bits/sec for this virtual circuit.')
wfFrSwCctOutThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 11), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCctOutThroughput.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctOutThroughput.setDescription('Indicates the outgoing throughput in bits/sec for this virtual circuit.')
wfFrSwCctCreationTime = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 12), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctCreationTime.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctCreationTime.setDescription('Indicates the value of sysUpTime when the VC was created.')
wfFrSwCctLastTimeChange = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 13), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLastTimeChange.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLastTimeChange.setDescription('Indicates the value of sysUpTime when last there was a change in VC state.')
wfFrSwCctLocalSentNonDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSentNonDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSentNonDEFrames.setDescription('Indicates the number of frames without the DE bit sent on this virtual circuit over the local interface.')
wfFrSwCctLocalSentNonDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSentNonDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSentNonDEOctets.setDescription('Indicates the number of octets without DE bit sent on this virtual circuit over the local interface.')
wfFrSwCctLocalSentDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSentDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSentDEFrames.setDescription('Indicates the number of frames with DE bit set sent on this virtual circuit over the local interface.')
wfFrSwCctLocalSentDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 17), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSentDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSentDEOctets.setDescription('Indicates the number of octets with DE bit set sent on this virtual circuit over the local interface.')
wfFrSwCctLocalSetFECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 18), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSetFECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSetFECNFrames.setDescription('Indicates the number of frames sent to the local interface on which this switch set the FECN bit .')
wfFrSwCctLocalSetFECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 19), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSetFECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSetFECNOctets.setDescription('Indicates the number of octets in frames sent to the local interface on which this switch set the FECN bit.')
wfFrSwCctLocalSetBECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 20), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSetBECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSetBECNFrames.setDescription('Indicates the number of frames sent to the local interface on which this switch set the BECN bit.')
wfFrSwCctLocalSetBECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 21), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSetBECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSetBECNOctets.setDescription('Indicates the number of octets in frames sent to the local interface on which this switch set the BECN bit.')
wfFrSwCctLocalSetDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 22), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSetDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSetDEFrames.setDescription('Indicates the number of frames sent to the local interface on which this switch set the DE bit.')
wfFrSwCctLocalSetDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 23), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalSetDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalSetDEOctets.setDescription('Indicates the number of octets in frames sent to the local interface on which this switch set the DE bit.')
wfFrSwCctLocalDropNonDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 24), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalDropNonDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalDropNonDEFrames.setDescription('Indicates the number of frames received over the local interface which were discarded, excluding any frames with the DE bit set.')
wfFrSwCctLocalDropNonDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 25), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalDropNonDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalDropNonDEOctets.setDescription('Indicates the number of octets in frames received over the local interface which were discarded, excluding any frames with the DE bit set.')
wfFrSwCctLocalDropDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 26), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalDropDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalDropDEFrames.setDescription('Indicates the number of frames received over the local interface, having the DE bit set, which were discarded.')
wfFrSwCctLocalDropDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 27), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalDropDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalDropDEOctets.setDescription('Indicates the number of octets in frames received over the local interface, having the DE bit set, which were discarded.')
wfFrSwCctInactiveVCDropFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 28), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctInactiveVCDropFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctInactiveVCDropFrames.setDescription('Indicates how many frames were discarded because the virtual circuit was inactive.')
wfFrSwCctInactiveVCDropOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 29), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctInactiveVCDropOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctInactiveVCDropOctets.setDescription('Indicates how many Octets were discarded because the virtual circuit was inactive.')
wfFrSwCctLocalRecvNonDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 30), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvNonDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvNonDEFrames.setDescription('Indicates the number of frames received on this virtual circuit over the local interface.')
wfFrSwCctLocalRecvNonDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 31), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvNonDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvNonDEOctets.setDescription('Indicates the number of octets received on this virtual circuit over the local interface.')
wfFrSwCctLocalRecvDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 32), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvDEFrames.setDescription('Indicates the number of frames received over the local interface with the DE bit set.')
wfFrSwCctLocalRecvDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 33), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvDEOctets.setDescription('Indicates the number of octets in frames received over the local interface with the DE bit set.')
wfFrSwCctLocalRecvFECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 34), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvFECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvFECNFrames.setDescription('Indicates the number of frames received over the local interface with the FECN bit set.')
wfFrSwCctLocalRecvFECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 35), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvFECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvFECNOctets.setDescription('Indicates the number of octets in frames received over the local interface with the FECN bit set.')
wfFrSwCctLocalRecvBECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 36), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvBECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvBECNFrames.setDescription('Indicates the number of frames received over the local interface with the BECN bit set.')
wfFrSwCctLocalRecvBECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 37), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecvBECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecvBECNOctets.setDescription('Indicates the number of octets in frames received over the local interface with the BECN bit set.')
wfFrSwCctLocalRecentNonDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 38), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalRecentNonDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalRecentNonDEOctets.setDescription('Indicates the number of octets received over the local interface during the most recent sampling period.')
wfFrSwCctRemoteSentNonDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 39), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSentNonDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSentNonDEFrames.setDescription('Indicates the number of Non DE set frames sent over the remote interface.')
wfFrSwCctRemoteSentNonDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 40), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSentNonDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSentNonDEOctets.setDescription('Indicates the number of Non DE set octets sent over the remote interface.')
wfFrSwCctRemoteSentDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 41), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSentDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSentDEFrames.setDescription('Indicates the number of DE set frames sent over the remote interface.')
wfFrSwCctRemoteSentDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 42), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSentDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSentDEOctets.setDescription('Indicates the number of DE set octets sent over the remote interface.')
wfFrSwCctRemoteSetFECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 43), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSetFECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSetFECNFrames.setDescription('Indicates the number of frames sent to the remote interface on which this switch set the FECN bit.')
wfFrSwCctRemoteSetFECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 44), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSetFECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSetFECNOctets.setDescription('Indicates the number of octets in frames sent to the remote interface that on which this switch set the FECN bit.')
wfFrSwCctRemoteSetBECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 45), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSetBECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSetBECNFrames.setDescription('Indicates the number of frames sent to the remote interface on which this switch set the BECN bit.')
wfFrSwCctRemoteSetBECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 46), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteSetBECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteSetBECNOctets.setDescription('Indicates the number of octets in frames sent to the remote interface on which this switch set the BECN bit.')
wfFrSwCctRemoteDropNonDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 47), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteDropNonDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteDropNonDEFrames.setDescription('Indicates the number of frames received over the remote interface which were discarded, excluding any frames with the DE bit set.')
wfFrSwCctRemoteDropNonDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 48), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteDropNonDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteDropNonDEOctets.setDescription('Indicates the number of octets in frames received over the remote interface which were discarded, excluding any frames with the DE bit set.')
wfFrSwCctRemoteDropDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 49), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteDropDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteDropDEFrames.setDescription('Indicates the number of frames received over the remote interface, having the DE bit set, which were discarded.')
wfFrSwCctRemoteDropDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 50), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteDropDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteDropDEOctets.setDescription('Indicates the number of octets in frames received over the remote interface, having the DE bit set, which were discarded.')
wfFrSwCctRemoteRecvNonDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 51), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvNonDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvNonDEFrames.setDescription('Indicates the number of frames received on this virtual circuit over the remote interface.')
wfFrSwCctRemoteRecvNonDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 52), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvNonDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvNonDEOctets.setDescription('Indicates the number of octets received on this virtual circuit over the remote interface.')
wfFrSwCctRemoteRecvDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 53), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvDEFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvDEFrames.setDescription('Indicates the number of frames received over the remote interface with the DE bit set.')
wfFrSwCctRemoteRecvDEOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 54), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvDEOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvDEOctets.setDescription('Indicates the number of octets in frames received over the remote interface with the DE bit set.')
wfFrSwCctRemoteRecvFECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 55), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvFECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvFECNFrames.setDescription('Indicates the number of frames received over the remote interface with the FECN bit set.')
wfFrSwCctRemoteRecvFECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 56), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvFECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvFECNOctets.setDescription('Indicates the number of octets in frames received over the remote interface with the FECN bit set.')
wfFrSwCctRemoteRecvBECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 57), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvBECNFrames.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvBECNFrames.setDescription('Indicates the number of frames received over the remote interface with the BECN bit set.')
wfFrSwCctRemoteRecvBECNOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 58), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvBECNOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteRecvBECNOctets.setDescription('Indicates the number of octets in frames received over the remote interface with the BECN bit set.')
wfFrSwCctLocalBecnState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 59), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalBecnState.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalBecnState.setDescription('Indicates the local BECN state')
wfFrSwCctRemoteBecnState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 60), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctRemoteBecnState.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctRemoteBecnState.setDescription('Indicates the remote BECN state')
wfFrSwCctLocalOrRemoteConnection = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 61), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("local", 1), ("remote", 2))).clone('remote')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctLocalOrRemoteConnection.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctLocalOrRemoteConnection.setDescription('Indicates whether this connection is Local to Local Connection or Local to Remote connection.')
wfFrSwCctInBcOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 62), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctInBcOctets.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctInBcOctets.setDescription('Indicates the Incoming Committed Burst in octets for this virtual circuit.')
wfFrSwCctStateSet = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 63), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2))).clone('active')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCctStateSet.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctStateSet.setDescription('User access for setting the state of a virtual circuit')
wfFrSwCctReportedStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 64), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("acked", 1), ("unacked", 2), ("unreported", 3))).clone('unreported')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctReportedStatus.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctReportedStatus.setDescription('Record keeping for circuit status')
wfFrSwCctReceivedStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 65), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2))).clone('active')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctReceivedStatus.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctReceivedStatus.setDescription('State of a virtual circuit as reported by the network at an NNI')
wfFrSwCctCrossNetStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 66), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2))).clone('active')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctCrossNetStatus.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctCrossNetStatus.setDescription('State of a virtual circuit as reported by the other end of the network under bidirectional signalling.')
wfFrSwCctXNetSent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 67), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("unsent", 1), ("sent", 2))).clone('unsent')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctXNetSent.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctXNetSent.setDescription('Whether we have sent a cross net status message for this VC yet.')
wfFrSwCctXNetReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 68), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("recv", 1), ("unrecv", 2))).clone('unrecv')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctXNetReceived.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctXNetReceived.setDescription('Whether we have received a cross net status message for this VC during the current polling interval. ')
wfFrSwCctXNetErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 2, 1, 69), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCctXNetErrors.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwCctXNetErrors.setDescription('This is the count of the consecutive errors (usually timeouts) against this VC in cross-network heartbeat polling ')
wfFrSwTupleTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 3), )
if mibBuilder.loadTexts: wfFrSwTupleTable.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwTupleTable.setDescription('The Parameters for the Tuple table, identifying the endpoints of virtual circuits as pairs of IP addresses and DLCI.')
wfFrSwTupleEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 3, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwTupleIpAddrA"), (0, "Wellfleet-FRSW-MIB", "wfFrSwTupleDlciA"), (0, "Wellfleet-FRSW-MIB", "wfFrSwTupleIpAddrB"), (0, "Wellfleet-FRSW-MIB", "wfFrSwTupleDlciB"))
if mibBuilder.loadTexts: wfFrSwTupleEntry.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwTupleEntry.setDescription('The parameters for a particular Tuple.')
wfFrSwTupleDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 3, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwTupleDelete.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwTupleDelete.setDescription('Indication to delete this tuple.')
wfFrSwTupleIpAddrA = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 3, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwTupleIpAddrA.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwTupleIpAddrA.setDescription("Instance indentifier; indicates the IP address associated with endpoint 'A' of a virtual circuit.")
wfFrSwTupleDlciA = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 3, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwTupleDlciA.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwTupleDlciA.setDescription("Instance identfier; indicates the DLCI associated with endpoint 'A' of a virtual circuit.")
wfFrSwTupleIpAddrB = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 3, 1, 4), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwTupleIpAddrB.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwTupleIpAddrB.setDescription("Instance identfier; indicates the IP address associated with endpoint 'B' of a virtual circuit.")
wfFrSwTupleDlciB = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 3, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwTupleDlciB.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwTupleDlciB.setDescription("Instance identifier; Indicates the DLCI associated with endpoint 'B' of a virtual circuit.")
wfFrSwMcastTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 4), )
if mibBuilder.loadTexts: wfFrSwMcastTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwMcastTable.setDescription('The list of multicast addresses')
wfFrSwMcastEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 4, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwMcastIndex"))
if mibBuilder.loadTexts: wfFrSwMcastEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwMcastEntry.setDescription('The parameters for a particular Multicast address.')
wfFrSwMcastDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 4, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwMcastDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwMcastDelete.setDescription('Indication to delete this multicast instance.')
wfFrSwMcastIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 4, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwMcastIndex.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwMcastIndex.setDescription('Index of this multicast DLCI instance')
wfFrSwMcastIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 4, 1, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwMcastIpAddr.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwMcastIpAddr.setDescription('IP address of the interface in which this multicast DLCI is defined.')
wfFrSwMcastDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 4, 1, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwMcastDlci.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwMcastDlci.setDescription('Identifies the multicast DLCI with which the IndividualDlci is associated.')
wfFrSwMcastIndividualDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 4, 1, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwMcastIndividualDlci.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwMcastIndividualDlci.setDescription('Indicates the DLCI associated with the above multicast DLCI.')
wfFrSwUsage = MibIdentifier((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5))
wfFrSwUsageEnable = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageEnable.setDescription('Enable/Disable FRSW billing.')
wfFrSwUsageVolume = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 14))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageVolume.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageVolume.setDescription("Indicates the file system volume number to which the billing usage data files will be written. The volume number corresponds to the slot number on which the volume resides. Note: Value 0 has the special meaning that no 'Store' and 'Flush' operations will take place. This translates to no Billing data will be written to the local file system. 'Update' operations will still be performed on each local slot. Full Billing statistics will still be available in the wfFrSwUsageTable MIB.")
wfFrSwUsageVolumeBackup = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 14))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageVolumeBackup.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageVolumeBackup.setDescription('Indicates the backup volume if wfFrSwUsageVolume becomes inoperative. Note: This feature is not implemented in this release.')
wfFrSwUsageDirectory = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageDirectory.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageDirectory.setDescription('The name of the directory where the billing usage data files are stored. ')
wfFrSwUsageFilePrefix = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 5), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageFilePrefix.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageFilePrefix.setDescription('The base name of billing usage data files.')
wfFrSwUsageTimerInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 60)).clone(20)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageTimerInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageTimerInterval.setDescription('This number determines the timer interval (number of seconds) unit for the Billing process to perform its various timer driven tasks. i.e. updating billing usage data, writing billing usage data to file system and file system management activities.')
wfFrSwUsageUpdateInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageUpdateInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageUpdateInterval.setDescription('This number specifies the interval (number of minutes) for the Billing process to collect and update billing usage data in the wfFrSwUsage MIB. Note: When converted to seconds, this must be a multiple of wfFrSwUsageTimerInterval.')
wfFrSwUsageStoreInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageStoreInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageStoreInterval.setDescription('This number specifies the interval (number of minutes) for the Billing process to write billing usage data on to the file system from the wfFrSwUsage MIB. Note: When converted to seconds, this must be a multiple of wfFrSwUsageTimerInterval.')
wfFrSwUsageFlushInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageFlushInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageFlushInterval.setDescription('This number specifies the interval (number of minutes) for the Billing process to write billing usage data on to the file system from the wfFrSwUsage MIB follow by zeroing the wfFrSwUsage MIB. Note: When converted to seconds, this must be a multiple of wfFrSwUsageTimerInterval.')
wfFrSwUsageCleanupInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageCleanupInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCleanupInterval.setDescription('This is the interval (number of minutes) for the Billing process to check and delete old billing usage data files. Note: When converted to seconds, this must be a multilple of wfFrSwUsageTimerInterval.')
wfFrSwUsageLocalTimeZone = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLocalTimeZone.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageLocalTimeZone.setDescription('Indicates local time zone of the switch')
wfFrSwUsageUpdateTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 12), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageUpdateTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageUpdateTimeStamp.setDescription('Time stamp of last wfFrSwUsageUpdateInterval timer expiration or the starting time of the current wfFrSwUsageUpdateInterval. This value is number of seconds since midnight Jan 1, 1976 (GMT).')
wfFrSwUsageStoreTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 13), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageStoreTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageStoreTimeStamp.setDescription('Time stamp of last wfFrSwUsageStoreInterval timer expiration or the starting time of the current wfFrSwUsageStoreInterval. This value is number of seconds since midnight Jan. 1, 1976 (GMT). ')
wfFrSwUsageFlushTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 14), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageFlushTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageFlushTimeStamp.setDescription('Time stamp of last wfFrSwUsageFlushInterval timer expiration or the starting time of the current wfFrSwUsageFlushInterval. This value is number of seconds since midnight Jan. 1, 1976 (GMT).')
wfFrSwUsageCleanupTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 15), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCleanupTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCleanupTimeStamp.setDescription('Time stamp of last wfFrSwUsageCleanupInterval timer expiration or the starting time of the current wfFrSwUsageCleanupInterval. This value is number of seconds since midnight Jan. 1, 1976 (GMT).')
wfFrSwUsageUpdateData = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 16), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageUpdateData.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageUpdateData.setDescription('Setting this attribute to a non-zero value will cause an immediate updating of the billing usage data. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwUsageStoreData = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 17), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageStoreData.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageStoreData.setDescription('Setting this attribute to a non-zero value will cause an immediate updating and writing of the billing usage data. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwUsageFlushData = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 18), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageFlushData.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageFlushData.setDescription('Setting this attribute to a non-zero value will cause an immediate updating and writing of the billing usage data and followed by zeroing the wfFrSwBillingUsage MIB. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwUsageFileCleanup = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 19), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageFileCleanup.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageFileCleanup.setDescription('Setting this attribute to a non-zero value will cause an immediate checking and deleting old billing usage data files. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwUsageState = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("init", 3), ("notpresent", 4))).clone('notpresent')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageState.setDescription('current state FRSW billing.')
wfFrSwUsageCurVolume = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 21), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 14))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurVolume.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurVolume.setDescription('current file system volume number used. This number is the same as wfFrSwUsageVolume except when the user sets wfFrSwUsageVolume to an invalid number.')
wfFrSwUsageCurVolumeBackup = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 22), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 14))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurVolumeBackup.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurVolumeBackup.setDescription('curent backup file system volume number used. This number is the same as wfFrSwUsageVolumeBackUp except when the user sets wfFrSwUsageVolume to an invalid number. Note: This feature is not implemented in this release.')
wfFrSwUsageCurDirectory = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 23), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurDirectory.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurDirectory.setDescription('current directory name used. This number is the same as wfFrSwUsageDirectory except when the user sets wfFrSwUsageDirectory to an invalid name.')
wfFrSwUsageCurFilePrefix = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 24), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurFilePrefix.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurFilePrefix.setDescription('current base file name used. This number is the same as wfFrSwUsageFilePrefix except when the user sets wfFrSwUsageFilePrefix to an invalid name.')
wfFrSwUsageCurTimerInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 25), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 60)).clone(20)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurTimerInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurTimerInterval.setDescription('current timer interval number used. This number is the same as wfFrSwUsageTimerInterval except when the user sets wfFrSwUsageTimerInterval to an invalid value.')
wfFrSwUsageCurUpdateInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 26), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurUpdateInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurUpdateInterval.setDescription('current update interval number used. This number is the same as wfFrSwUsageUpdateInterval except when the user sets wfFrSwUsageUpdateInterval to an invalid value.')
wfFrSwUsageCurStoreInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 27), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurStoreInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurStoreInterval.setDescription('current store timer interval number used. This number is the same as wfFrSwUsageStoreInterval except when the user sets wfFrSwUsageStoreInterval to an invalid value.')
wfFrSwUsageCurFlushInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 28), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurFlushInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurFlushInterval.setDescription('current flush timer interval number used. This number is the same as wfFrSwUsageFlushInterval except when the user sets wfFrSwUsageFlushInterval to an invalid value.')
wfFrSwUsageCurCleanupInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 29), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurCleanupInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurCleanupInterval.setDescription('current file cleanup timer interval number used. This number is the same as wfFrSwUsageCleanupInterval except when the user sets wfFrSwUsageCleanupInterval to an invalid value.')
wfFrSwUsageDebug = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 30), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageDebug.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageDebug.setDescription('Enable/Disable printing of debug edl (trap) messages. NOTE: Do not enable this attribute in operational enviornment as it will likely flood the logging facility. This attribute is reserved for specialized debugging in a controlled lab enviornment.')
wfFrSwUsageCurDebug = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 31), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCurDebug.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageCurDebug.setDescription('current debug value used. This value is the same as wfFrSwUsageDebug except when the user sets wfFrSwUsageDeubg to an invalid value.')
wfFrSwUsageSwitchId = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 32), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSwitchId.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageSwitchId.setDescription('switch id used in the billing usage data file.')
wfFrSwUsageNumEntries = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 33), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageNumEntries.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageNumEntries.setDescription('number of entries in wfFrSwUsageTable')
wfFrSwSvcUsageEnable = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 34), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageEnable.setDescription('Enable/Disable FRSW SVC billing.')
wfFrSwSvcUsageInterimRecordEnable = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 35), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageInterimRecordEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageInterimRecordEnable.setDescription('Enable/Disable Writing FRSW SVC billing record while SVC connection is still up.')
wfFrSwSvcUsageVolume = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 36), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 14))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageVolume.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageVolume.setDescription("Indicates the file system volume number to which the SVC billing usage data files will be written. The volume number corresponds to the slot number on which the volume resides. Note: Value 0 has the special meaning that no 'Store' and 'Flush' operations will take place. This translates to no Billing data will be written to the local file system. 'Update' operations will still be performed on each local slot. Full Billing statistics will still be available in the wfFrSwUsageTable MIB.")
wfFrSwSvcUsageDirectory = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 37), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageDirectory.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageDirectory.setDescription('The name of the directory where the SVC billing usage data files are stored. ')
wfFrSwSvcUsageFilePrefix = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 38), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageFilePrefix.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageFilePrefix.setDescription('The base name of SVC billing usage data files.')
wfFrSwSvcUsageUpdateInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 39), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageUpdateInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageUpdateInterval.setDescription('This number specifies the interval (number of minutes) for the SVC Billing process to collect and update billing usage data in the wfFrSwUsage MIB. Note: When converted to seconds, this must be a multiple of wfFrSwUsageTimerInterval.')
wfFrSwSvcUsageStoreInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 40), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageStoreInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageStoreInterval.setDescription('This number specifies the interval (number of minutes) for the SVC Billing process to write billing usage data on to the file system from the wfFrSwUsage MIB. Note: When converted to seconds, this must be a multiple of wfFrSwUsageTimerInterval.')
wfFrSwSvcUsageFlushInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 41), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageFlushInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageFlushInterval.setDescription('This number specifies the interval (number of minutes) for the SVC Billing process to write billing usage data on to the file system from the wfFrSwUsage MIB follow by zeroing the wfFrSwUsage MIB. Note: When converted to seconds, this must be a multiple of wfFrSwUsageTimerInterval.')
wfFrSwSvcUsageCleanupInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 42), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageCleanupInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCleanupInterval.setDescription('This is the interval (number of minutes) for the SVC Billing process to check and delete old billing usage data files. Note: When converted to seconds, this must be a multilple of wfFrSwUsageTimerInterval.')
wfFrSwSvcUsageUpdateTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 43), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageUpdateTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageUpdateTimeStamp.setDescription('Time stamp of last wfFrSwSvcUsageUpdateInterval timer expiration or the starting time of the current wfFrSwSvcUsageUpdateInterval. This value is number of seconds since midnight Jan 1, 1976 (GMT).')
wfFrSwSvcUsageStoreTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 44), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageStoreTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageStoreTimeStamp.setDescription('Time stamp of last wfFrSwSvcUsageStoreInterval timer expiration or the starting time of the current wfFrSwSvcUsageStoreInterval. This value is number of seconds since midnight Jan. 1, 1976 (GMT). ')
wfFrSwSvcUsageFlushTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 45), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageFlushTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageFlushTimeStamp.setDescription('Time stamp of last wfFrSwSvcUsageFlushInterval timer expiration or the starting time of the current wfFrSwSvcUsageFlushInterval. This value is number of seconds since midnight Jan. 1, 1976 (GMT).')
wfFrSwSvcUsageCleanupTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 46), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCleanupTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCleanupTimeStamp.setDescription('Time stamp of last wfFrSwSvcUsageCleanupInterval timer expiration or the starting time of the current wfFrSwSvcUsageCleanupInterval. This value is number of seconds since midnight Jan. 1, 1976 (GMT).')
wfFrSwSvcUsageUpdateData = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 47), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageUpdateData.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageUpdateData.setDescription('Setting this attribute to a non-zero value will cause an immediate updating of the SVC billing usage data. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwSvcUsageStoreData = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 48), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageStoreData.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageStoreData.setDescription('Setting this attribute to a non-zero value will cause an immediate updating and writing of the SVC billing usage data. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwSvcUsageFlushData = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 49), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageFlushData.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageFlushData.setDescription('Setting this attribute to a non-zero value will cause an immediate updating and writing of the SVC billing usage data and followed by zeroing the wfFrSwBillingUsage MIB. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwSvcUsageFileCleanup = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 50), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSvcUsageFileCleanup.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageFileCleanup.setDescription('Setting this attribute to a non-zero value will cause an immediate checking and deleting old SVC billing usage data files. Once activated, this attribute should be reset to zero to allow subsequent activations. ')
wfFrSwSvcUsageState = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 51), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("init", 3), ("notpresent", 4))).clone('notpresent')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageState.setDescription('current state FRSW SVC billing.')
wfFrSwSvcUsageCurVolume = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 52), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 14))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCurVolume.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCurVolume.setDescription('current file system volume number used for SVC Billing. This number is the same as wfFrSwSvcUsageVolume except when the user sets wfFrSwSvcUsageVolume to an invalid number.')
wfFrSwSvcUsageCurDirectory = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 53), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCurDirectory.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCurDirectory.setDescription('current directory name used for SVC Billing. This number is the same as wfFrSwSvcUsageDirectory except when the user sets wfFrSwSvcUsageDirectory to an invalid name.')
wfFrSwSvcUsageCurFilePrefix = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 54), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCurFilePrefix.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCurFilePrefix.setDescription('current base file name used for SVC Billing. This name is the same as wfFrSwSvcUsageFilePrefix except when the user sets wfFrSwSvcUsageFilePrefix to an invalid name.')
wfFrSwSvcUsageCurUpdateInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 55), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCurUpdateInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCurUpdateInterval.setDescription('current update interval number used. This number is the same as wfFrSwSvcUsageUpdateInterval except when the user sets wfFrSwSvcUsageUpdateInterval to an invalid value.')
wfFrSwSvcUsageCurStoreInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 56), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCurStoreInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCurStoreInterval.setDescription('current store timer interval number used. This number is the same as wfFrSwSvcUsageStoreInterval except when the user sets wfFrSwSvcUsageStoreInterval to an invalid value.')
wfFrSwSvcUsageCurFlushInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 57), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCurFlushInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCurFlushInterval.setDescription('current flush timer interval number used. This number is the same as wfFrSwSvcUsageFlushInterval except when the user sets wfFrSwSvcUsageFlushInterval to an invalid value.')
wfFrSwSvcUsageCurCleanupInterval = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 58), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageCurCleanupInterval.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageCurCleanupInterval.setDescription('current file cleanup timer interval number used. This number is the same as wfFrSwSvcUsageCleanupInterval except when the user sets wfFrSwSvcUsageCleanupInterval to an invalid value.')
wfFrSwSvcUsageNumEntries = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 59), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageNumEntries.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageNumEntries.setDescription('number of entries in wfFrSwSvcUsageTable')
wfFrSwSvcUsageVersionId = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 60), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageVersionId.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageVersionId.setDescription('The Software Version ID field is a two byte, right justified, binary formated value that identifies the particular version number of the software release. High nibble of byte 1 represents the major version number. Low nibble of byte 1 represents the release number. Byte 2 represents the integration number.')
wfFrSwUsageSwitchName = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 61), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwUsageSwitchName.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwUsageSwitchName.setDescription('The Switch name is a 6-bytes, right justified with leading blanks as necessary. It can be combination of letters, numbers and blanks. This ID identifies the particular networks equipment for SVC billing usage data process.')
wfFrSwPvcUsageFileLayout = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 62), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwPvcUsageFileLayout.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwPvcUsageFileLayout.setDescription('PVC usage file layout version')
wfFrSwSvcUsageFileLayout = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 5, 63), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSvcUsageFileLayout.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSvcUsageFileLayout.setDescription('SVC usage file layout version')
wfFrSwUsageTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6), )
if mibBuilder.loadTexts: wfFrSwUsageTable.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageTable.setDescription('The Billing usage table.')
wfFrSwUsageEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwUsageCircuitNumber"), (0, "Wellfleet-FRSW-MIB", "wfFrSwUsageDlci"))
if mibBuilder.loadTexts: wfFrSwUsageEntry.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageEntry.setDescription('The parameters for Billing Usage.')
wfFrSwUsageDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageDelete.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageDelete.setDescription('Indicates status of this entry. FRSW_USAGE_CREATED is the normal case. FRSW_USAGE_DELETED means the corresponding tuple and vc instances were deleted some time during this collection interval. This billing instance will be deleted at the end of the next wfFrSwUsageFlush period after this billing record is written out to the file system.')
wfFrSwUsageCircuitNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageCircuitNumber.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageCircuitNumber.setDescription('Instance identifier; the circuit number of this interface. ')
wfFrSwUsageDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(16, 1007, 1024, 64511, 131072, 8257535))).clone(namedValues=NamedValues(("twobyteminimum", 16), ("twobytemaximum", 1007), ("threebyteminimum", 1024), ("threebytemaximum", 64511), ("fourbyteminimum", 131072), ("fourbytemaximum", 8257535)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageDlci.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageDlci.setDescription('Instance identifier; this indicates which virtual circuit. ')
wfFrSwUsageIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 4), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageIPAddress.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageIPAddress.setDescription('(Local) IP address corresponding to wfFrSwUsageCircuitNumber of this virtual circuit. ')
wfFrSwUsageStartTimeStampHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageStartTimeStampHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageStartTimeStampHigh.setDescription('Time stamp of the starting time (the high 32 bits) of last billing usage interval. This value is the number of 1/100th seconds since midnight Jan 1, 1976 (GMT).')
wfFrSwUsageStartTimeStampLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageStartTimeStampLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageStartTimeStampLow.setDescription('Time stamp of the starting time (the low 32 bits) of last billing usage interval. This value is the number of 1/100th seconds since midnight Jan 1, 1976 (GMT).')
wfFrSwUsageEndTimeStampHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageEndTimeStampHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageEndTimeStampHigh.setDescription('Time stamp of the ending time (the high 32 bits) of last billing usage interval. This value is the number of 1/100th seconds since midnight Jan 1, 1976 (GMT).')
wfFrSwUsageEndTimeStampLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageEndTimeStampLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageEndTimeStampLow.setDescription('Time stamp of the ending time (the low 32 bits) of last billing usage interval. This value is the number of 1/100th seconds since midnight Jan 1, 1976 (GMT).')
wfFrSwUsageSentNonDEFramesHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEFramesHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEFramesHigh.setDescription('Number (the high 32 bits) of local frames sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageSentNonDEFramesLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEFramesLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEFramesLow.setDescription('Number (the low 32 bits) of local frames sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageSentNonDEOctetsHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEOctetsHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEOctetsHigh.setDescription('Number (the high 32 bits) of local octets sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageSentNonDEOctetsLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEOctetsLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentNonDEOctetsLow.setDescription('Number (the low 32 bits) of local octets sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageSentDEFramesHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentDEFramesHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentDEFramesHigh.setDescription('Number (the high 32 bits) of local frames with DE bit sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageSentDEFramesLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 14), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentDEFramesLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentDEFramesLow.setDescription('Number (the low 32 bits) of local frames with DE bit sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageSentDEOctetsHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentDEOctetsHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentDEOctetsHigh.setDescription('Number (the high 32 bits) of local octets with DE bit sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageSentDEOctetsLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 16), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageSentDEOctetsLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageSentDEOctetsLow.setDescription('Number (the low 32 bits) of local octets with DE bit sent from this virtual circuit between wfFrSwUsageStartTimeStamp and wfFrSwUsageEndTimeStamp.')
wfFrSwUsageLastNonDEFramesHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 17), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEFramesHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEFramesHigh.setDescription('The (high 32 bits) value of wfFrSwCctLocalSentNonDEFrames value at wfFrSwUsageEndTimeStamp. Note: Since wfFrSwCctLocalSentNonDEFrames is a 32-bit COUNTER, this is really a counter keeping track of number of times wfFrSwCctLocalSentNonDEFrames has wrapped around.')
wfFrSwUsageLastNonDEFramesLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 18), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEFramesLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEFramesLow.setDescription('The (low 32 bits) value of wfFrSwCctLocalSentNonDEFrames value at wfFrSwUsageEndTimeStamp. ')
wfFrSwUsageLastNonDEOctetsHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 19), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEOctetsHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEOctetsHigh.setDescription('The (high 32 bits) value of wfFrSwCctLocalSentNonDEOctets value at wfFrSwUsageEndTimeStamp. Note: Since wfFrSwCctLocalSentNonDEOctets is a 32-bit COUNTER, this is really a counter keeping track of number of times wfFrSwCctLocalSentNonDEOctets has wrapped around.')
wfFrSwUsageLastNonDEOctetsLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 20), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEOctetsLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastNonDEOctetsLow.setDescription('The (low 32 bits) value of wfFrSwCctLocalSentNonDEOctets value at wfFrSwUsageEndTimeStamp. ')
wfFrSwUsageLastDEFramesHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 21), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastDEFramesHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastDEFramesHigh.setDescription('The (high 32 bits) value of wfFrSwCctLocalSentDEFrames value at wfFrSwUsageEndTimeStamp. Note: Since wfFrSwCctLocalSentNonDEFrames is a 32-bit COUNTER, this is really a counter keeping track of number of times wfFrSwCctLocalSentDEFrames has wrapped around.')
wfFrSwUsageLastDEFramesLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 22), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastDEFramesLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastDEFramesLow.setDescription('The (low 32 bits) value of wfFrSwCctLocalSentDEFrames value at wfFrSwUsageEndTimeStamp. ')
wfFrSwUsageLastDEOctetsHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 23), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastDEOctetsHigh.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastDEOctetsHigh.setDescription('The (high 32 bits) value of wfFrSwCctLocalSentDEOctets value at wfFrSwUsageEndTimeStamp. Note: Since wfFrSwCctLocalSentDEOctets is a 32-bit COUNTER, this is really a counter keeping track of number of times wfFrSwCctLocalSentDEOctets has wrapped around.')
wfFrSwUsageLastDEOctetsLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 24), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageLastDEOctetsLow.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageLastDEOctetsLow.setDescription('The (low 32 bits) value of wfFrSwCctLocalSentDEOctets value at wfFrSwUsageEndTimeStamp. ')
wfFrSwUsageRemoteIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 25), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageRemoteIPAddress.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageRemoteIPAddress.setDescription('IP address of the other side (remote) of this PVC endpoint.')
wfFrSwUsageRemoteDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 6, 1, 26), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(16, 1007, 1024, 64511, 131072, 8257535))).clone(namedValues=NamedValues(("twobyteminimum", 16), ("twobytemaximum", 1007), ("threebyteminimum", 1024), ("threebytemaximum", 64511), ("fourbyteminimum", 131072), ("fourbytemaximum", 8257535)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwUsageRemoteDlci.setStatus('obsolete')
if mibBuilder.loadTexts: wfFrSwUsageRemoteDlci.setDescription('DLCI number of the other side (remote) of this PVC endpoint.')
wfFrSwVcTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7), )
if mibBuilder.loadTexts: wfFrSwVcTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcTable.setDescription('Frame Relay Virtual Circuit table gives information about a virtual circuit.')
wfFrSwVcEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwVcCircuit"), (0, "Wellfleet-FRSW-MIB", "wfFrSwVcDlci"))
if mibBuilder.loadTexts: wfFrSwVcEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcEntry.setDescription('An entry in the Frame Relay (Virtual) Circuit table.')
wfFrSwVcDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2), ("system", 3), ("svc", 4), ("spvccra", 5), ("spvccaa", 6))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDelete.setDescription('Indication to delete this virtual circuit.')
wfFrSwVcCircuit = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcCircuit.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCircuit.setDescription('Instance identifier; the circuit number of this interface (logical port).')
wfFrSwVcDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcDlci.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDlci.setDescription('Instance identifier; this indicates the virtual circuit identifier')
wfFrSwVcState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("invalid", 1), ("active", 2), ("inactive", 3), ("control", 4), ("user", 5))).clone('invalid')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcState.setDescription('Indicates whether the particular virtual circuit is operational.')
wfFrSwVcStateSet = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2))).clone('active')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcStateSet.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcStateSet.setDescription('User access for setting the state of a virtual circuit')
wfFrSwVcMulticast = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("multicast", 1), ("unicast", 2))).clone('unicast')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcMulticast.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcMulticast.setDescription('Indicates whether this dlci is used for multicast or a single destination.')
wfFrSwVcInBe = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 7), Integer32().clone(2147483647)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcInBe.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcInBe.setDescription('Indicates the maximum number Incoming Excess Burst bits that are allowed in a configured time interval (T).')
wfFrSwVcOutBe = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcOutBe.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcOutBe.setDescription('Indicates the Outgoing Excess Burst bits for this virtual circuit.')
wfFrSwVcInThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 9), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcInThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcInThroughput.setDescription('Indicates the incoming throughput in bits/sec for this virtual circuit.')
wfFrSwVcOutThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 10), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcOutThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcOutThroughput.setDescription('Indicates the outgoing throughput in bits/sec for this virtual circuit.')
wfFrSwVcOutBc = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcOutBc.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcOutBc.setDescription('Indicates the Outgoing Committed Burst bits for this virtual circuit.')
wfFrSwVcInBc = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcInBc.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcInBc.setDescription('Indicates the Incoming Committed Burst bits for this virtual circuit.')
wfFrSwVcInBcOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcInBcOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcInBcOctets.setDescription('Indicates the Incoming Committed Burst in octets for this virtual circuit.')
wfFrSwVcBecnState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 14), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcBecnState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcBecnState.setDescription('Indicates the BECN state')
wfFrSwVcReportedStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("acked", 1), ("unacked", 2), ("unreported", 3))).clone('unreported')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcReportedStatus.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcReportedStatus.setDescription('Record keeping for circuit status')
wfFrSwVcReceivedStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2))).clone('active')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcReceivedStatus.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcReceivedStatus.setDescription('State of a virtual circuit as reported by the network at an NNI')
wfFrSwVcCrossNetStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2))).clone('active')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcCrossNetStatus.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCrossNetStatus.setDescription('State of a virtual circuit as reported by the other end of the network under bidirectional signalling.')
wfFrSwVcXNetSent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("unsent", 1), ("sent", 2))).clone('unsent')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcXNetSent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcXNetSent.setDescription('Whether we have sent a cross net status message for this VC yet.')
wfFrSwVcXNetReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("recv", 1), ("unrecv", 2))).clone('unrecv')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcXNetReceived.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcXNetReceived.setDescription('Whether we have received a cross net status message for this VC during the current polling interval.')
wfFrSwVcCalledIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 20), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCalledIpAddr.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCalledIpAddr.setDescription('Indicates the IP address associated with destination of a virtual circuit.')
wfFrSwVcCalledDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 21), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 8257535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCalledDlci.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCalledDlci.setDescription('Indicates the DLCI associated with destination of a virtual circuit.')
wfFrSwVcTrfPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 999))).clone(namedValues=NamedValues(("one", 1), ("two", 2), ("three", 3), ("default", 999))).clone('default')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcTrfPriority.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcTrfPriority.setDescription('Defines the traffic priority level of all the incoming packets on this VC. FRSW_VCPRIORITY_DEFAULT - Set all incoming user traffic packets to the default priority used by the port. FRSW_VCPRIORITY_ONE - Set all incoming packets to priority 1. FRSW_VCPRIORITY_TWO - Set all incoming packets to priority 2. FRSW_VCPRIORITY_THREE - Set all incoming packets to priority 3. Priority 0 is reserved for network critical packets like OSPF, FR LMI and SMDS heartbeat and is not available for user traffic.')
wfFrSwVcCreationTime = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 23), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcCreationTime.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCreationTime.setDescription('Indicates the value of sysUpTime when the VC was created.')
wfFrSwVcLastTimeChange = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 24), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcLastTimeChange.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcLastTimeChange.setDescription('Indicates the value of sysUpTime when last there was a change in VC state.')
wfFrSwVcTxNonDeFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 25), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcTxNonDeFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcTxNonDeFrames.setDescription('Indicates the number of frames without the DE bit sent on this virtual circuit over the interface.')
wfFrSwVcTxNonDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 26), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcTxNonDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcTxNonDeOctets.setDescription('Indicates the number of octets without DE bit sent on this virtual circuit over the interface.')
wfFrSwVcTxDeFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 27), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcTxDeFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcTxDeFrames.setDescription('Indicates the number of frames with DE bit set sent on this virtual circuit over the interface.')
wfFrSwVcTxDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 28), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcTxDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcTxDeOctets.setDescription('Indicates the number of octets with DE bit set sent on this virtual circuit over the interface.')
wfFrSwVcSetFecnFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 29), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcSetFecnFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcSetFecnFrames.setDescription('Indicates the number of frames sent to the interface on which this switch set the FECN bit.')
wfFrSwVcSetFecnOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 30), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcSetFecnOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcSetFecnOctets.setDescription('Indicates the number of octets in frames sent to the interface on which this switch set the FECN bit.')
wfFrSwVcSetBecnFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 31), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcSetBecnFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcSetBecnFrames.setDescription('Indicates the number of frames sent to the interface on which this switch set the BECN bit.')
wfFrSwVcSetBecnOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 32), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcSetBecnOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcSetBecnOctets.setDescription('Indicates the number of octets in frames sent to the interface on which this switch set the BECN bit.')
wfFrSwVcSetDeFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 33), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcSetDeFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcSetDeFrames.setDescription('Indicates the number of frames sent to the interface on which this switch set the DE bit.')
wfFrSwVcSetDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 34), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcSetDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcSetDeOctets.setDescription('Indicates the number of octets in frames sent to the interface on which this switch set the DE bit.')
wfFrSwVcDropNonDeFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 35), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcDropNonDeFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDropNonDeFrames.setDescription('Indicates the number of frames received over the interface which were discarded, excluding any frames with the DE bit set.')
wfFrSwVcDropNonDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 36), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcDropNonDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDropNonDeOctets.setDescription('Indicates the number of octets in frames received over the interface which were discarded, excluding any frames with the DE bit set.')
wfFrSwVcDropDeFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 37), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcDropDeFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDropDeFrames.setDescription('Indicates the number of frames received over the interface, having the DE bit set, which were discarded.')
wfFrSwVcDropDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 38), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcDropDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDropDeOctets.setDescription('Indicates the number of octets in frames received over the interface, having the DE bit set, which were discarded.')
wfFrSwVcInactiveVcDropFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 39), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcInactiveVcDropFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcInactiveVcDropFrames.setDescription('Indicates how many frames were discarded because the virtual circuit was inactive.')
wfFrSwVcInactiveVcDropOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 40), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcInactiveVcDropOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcInactiveVcDropOctets.setDescription('Indicates how many Octets were discarded because the virtual circuit was inactive.')
wfFrSwVcRecvNonDeFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 41), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvNonDeFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvNonDeFrames.setDescription('Indicates the number of frames received on this virtual circuit over the interface.')
wfFrSwVcRecvNonDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 42), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvNonDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvNonDeOctets.setDescription('Indicates the number of octets received on this virtual circuit over the interface.')
wfFrSwVcRecvDeFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 43), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvDeFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvDeFrames.setDescription('Indicates the number of frames received over the interface with the DE bit set.')
wfFrSwVcRecvDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 44), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvDeOctets.setDescription('Indicates the number of octets in frames received over the interface with the DE bit set.')
wfFrSwVcRecvFecnFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 45), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvFecnFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvFecnFrames.setDescription('Indicates the number of frames received over the interface with the FECN bit set.')
wfFrSwVcRecvFecnOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 46), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvFecnOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvFecnOctets.setDescription('Indicates the number of octets in frames received over the interface with the FECN bit set.')
wfFrSwVcRecvBecnFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 47), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvBecnFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvBecnFrames.setDescription('Indicates the number of frames received over the interface with the BECN bit set.')
wfFrSwVcRecvBecnOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 48), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecvBecnOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecvBecnOctets.setDescription('Indicates the number of octets in frames received over the interface with the BECN bit set.')
wfFrSwVcRecentNonDeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 49), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRecentNonDeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRecentNonDeOctets.setDescription('Indicates the number of octets received over the interface during the most recent sampling period.')
wfFrSwVcXNetErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 50), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcXNetErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcXNetErrors.setDescription('This is the count of the consecutive errors (usually timeouts) against this VC in cross-network heartbeat polling.')
wfFrSwVcDropExcessBurstFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 51), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcDropExcessBurstFrames.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDropExcessBurstFrames.setDescription('Indicates the number of Excess Burst Frames dropped on this virtual circuit.')
wfFrSwVcDropExcessBurstOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 52), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcDropExcessBurstOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcDropExcessBurstOctets.setDescription('Indicates the number of Excess Burst Octets dropped on this virtual circuit.')
wfFrSwVcInBeOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 53), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcInBeOctets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcInBeOctets.setDescription('Indicates the maximum number Incoming Excess Burst bytes that are allowed in a configured time interval (T).')
wfFrSwVcCfgInBe = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 54), Integer32().clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCfgInBe.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCfgInBe.setDescription('The number of Excess Burst in bits')
wfFrSwVcRedirectAction = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 55), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("redirecttoprimary", 2), ("redirecttobackup", 3), ("switchondemand", 4), ("swondemandtoprimary", 5), ("swondemandtobackup", 6))).clone('redirecttoprimary')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcRedirectAction.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRedirectAction.setDescription("Perform pvc source redirect manually or based on cross-net updates: 'redirecttoprimary(2)' will force to switch to primary; 'redirecttobackup(3)' will force to switch to backup; 'switchondemand(4)' will switch based on cross-net status of the primary to and from primary; 'swondemandtoprimary(5)' will switch to primary from backup iff cross-net of primary became active; 'swondemandtobackup(6)' will switch to backup from primary iff cross-net of primary became inactive.")
wfFrSwVcRedirectType = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 56), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("transparent", 1), ("intrusiven", 2), ("intrusivea", 3))).clone('intrusivea')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcRedirectType.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRedirectType.setDescription("Type of dte notification at switching time: 'transparent(1)' will not send notification to dte; 'intrusiven(2)' will send async update with NEW bit; 'intrusivea(3)' will send async update with A bit not set.")
wfFrSwVcRedirectState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 57), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 12, 13, 21))).clone(namedValues=NamedValues(("backupinactive", 1), ("primaryactive", 2), ("switchtobackup", 3), ("backupactive", 12), ("switchtoprimary", 13), ("holddown", 21))).clone('backupinactive')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcRedirectState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcRedirectState.setDescription("PVC Source Redirect State: 'backupinactive(1)' - backup is not configured and/or cross-net status is inactive; will allow traffic only through primary. 'primaryactive(2)' - both primary and backup rx'ed 'active' cross-net status, currently primary is active and traffic only through primary. 'switchtobackup(3)' - primary cross-net status is inactive, but can not switch to backup due to manual (or semi-manual) operation of the redirect; will allow traffic only through primary. 'backupactive(12)' - cross-net status is 'inactive' for primary; will allow traffic only through backup. 'switchtoprimary(13)' - cross-net status is 'active' for primary, should by can not switch to primary due to manual (or semi-manual) operation of the redirect; will allow traffic only through backup. 'holddown(21)' - down state used as intermediate state at switching time (for not more then a second); all traffic is dropped.")
wfFrSwVcBackupCalledIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 58), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcBackupCalledIpAddr.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcBackupCalledIpAddr.setDescription(' Backup Called Ip Address of the remote end of the PVC.')
wfFrSwVcBackupCalledDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 59), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 8257535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcBackupCalledDlci.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcBackupCalledDlci.setDescription(' Backup Called Dlci of the remote end of the PVC.')
wfFrSwVcBackupCrossNetStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 60), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2))).clone('inactive')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcBackupCrossNetStatus.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcBackupCrossNetStatus.setDescription(' Cross net status of the backup remote end of the PVC.')
wfFrSwVcBackupCrossNetErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 61), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcBackupCrossNetErrors.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcBackupCrossNetErrors.setDescription(' Support counter of missed cross net update from backup remote end of the PVC, range: [0, wfFrSwDlcmiCrossNetErrorThreshold].')
wfFrSwVcAtmIwfMode = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 62), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("atmDisableIwfMode", 1), ("atmServiceIwfTransparentMode", 2), ("atmServiceIwfTranslationMode", 3), ("atmNetworkIwfMode", 4))).clone('atmDisableIwfMode')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcAtmIwfMode.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcAtmIwfMode.setDescription('This attribute indicates the mode of FR-ATM interworking over this FR PVC or that FR-ATM interworking is not enabled on it.')
wfFrSwVcAtmIwfVPI = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 63), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcAtmIwfVPI.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcAtmIwfVPI.setDescription('This is relevant only when the ATM/FR interworking is enabled for this PVC. This indicates the ATM virtual path identifier associated with the Frame Relay PVC described by this record virtual circuit identifier.')
wfFrSwVcAtmIwfVCI = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 64), Integer32().subtype(subtypeSpec=ValueRangeConstraint(32, 65535)).clone(32)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcAtmIwfVCI.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcAtmIwfVCI.setDescription('This is relevant only when FR/ATM interworking is enabled for this PVC. This indicates the ATM virtual circuit identifier associated with the Frame Relay PVC described by this record.')
wfFrSwVcAtmIwfLossPriorityPolicy = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 65), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("atmiwfmapDe", 1), ("atmiwfsetDe1", 2), ("atmiwfsetDe0", 3))).clone('atmiwfmapDe')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcAtmIwfLossPriorityPolicy.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcAtmIwfLossPriorityPolicy.setDescription('This is relevant only when FR/ATM interworking is enabled for this FR PVC. This indicates the policy for translating ATM CLP to FR DE on this PVC or simply setting FR DE to a constant value for all frames.')
wfFrSwVcAtmIwfDePolicy = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 66), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("atmiwfmapClp", 1), ("atmiwfsetClp1", 2), ("atmiwfsetClp0", 3))).clone('atmiwfmapClp')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcAtmIwfDePolicy.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcAtmIwfDePolicy.setDescription('This is relevant only when FR/ATM interworking is enabled for this FR PVC. This indicates the policy on this PVC for translating FR DE to ATM CLP or simply setting CLP to a constant value for all frames.')
wfFrSwVcAtmIwfEfciPolicy = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 67), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("atmiwfmapFecn", 1), ("atmiwfsetFecn1", 2), ("atmiwfsetFecn0", 3))).clone('atmiwfmapFecn')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcAtmIwfEfciPolicy.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcAtmIwfEfciPolicy.setDescription('This is relevant only when FR/ATM interworking is enabled for this FR PVC. This indicates the policy on this PVC for translating FR FECN to ATM EFCI or simply setting ATM EFCI to a constant value for all frames.')
wfFrSwVcEscapeEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 68), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcEscapeEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcEscapeEnable.setDescription(' Identifies this PVC as either a standard FRSW PVC (escape disabled) or an Escape PVC (escape enabled). The type of Escape PVC (ingress node or egress node) is specified in the wfFrSwDlcmiEntry Object.')
wfFrSwVcSpvcCallState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 69), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("inactive", 1), ("inprogress", 2), ("active", 3))).clone('inactive')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVcSpvcCallState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcSpvcCallState.setDescription('Indicates to the state of the SPVC call for this DLCI.')
wfFrSwVcCallReqCalledAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 70), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCallReqCalledAddr.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCallReqCalledAddr.setDescription('Called E.164/X.121 Address for an SPVC Call Request Agent. The address type is determined by the wfFrSwDlcmiL2AddrType attribute in wfFrSwDlcmiEntry.')
wfFrSwVcCallReqDlciSelectionType = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 71), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("any", 1), ("specific", 2))).clone('any')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCallReqDlciSelectionType.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCallReqDlciSelectionType.setDescription("Indicates to the Calling End of an SPVC Call Request whether to use any available DLCI, or a specific DLCI. If 'specific' is chosen, the called DLCI value is specified in wfFrSwVcCallReqCalledDlci.")
wfFrSwVcCallReqCalledDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 72), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 8257535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCallReqCalledDlci.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCallReqCalledDlci.setDescription("Indicates to the Calling End of an SPVC Call Request the DLCI to be used at the destination of a virtual circuit. This value should be specified when 'specific' wfFrSwVcCallReqDlciSelectionType is chosen.")
wfFrSwVcCallReqRetryTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 73), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 60)).clone(3)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCallReqRetryTimer.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCallReqRetryTimer.setDescription('Indicates the number of minutes the Call Request Agent should wait for an SPVC CONNECT message before declaring a Call Setup request REJECTED.')
wfFrSwVcCallReqMaxRetries = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 7, 1, 74), Integer32().clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVcCallReqMaxRetries.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVcCallReqMaxRetries.setDescription('Indicates the number of times the Call Request Agent should retry failed Call Setup requests before declaring the SPVC invalid.')
wfFrSwIsdnBaseTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 8), )
if mibBuilder.loadTexts: wfFrSwIsdnBaseTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnBaseTable.setDescription('This is a FRSW over ISDN configuration table. This table specifies whether the Calling Party (ANI) or Called Party (DNIS) ISDN Phone Number should be used to map the ISDN call to a particular FRSW UNI. The table is indexed by the Slot Number where the PRI(s) exist.')
wfFrSwIsdnBaseEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 8, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwIsdnBaseSlotNum"))
if mibBuilder.loadTexts: wfFrSwIsdnBaseEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnBaseEntry.setDescription('Instance Id for this table.')
wfFrSwIsdnBaseDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 8, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwIsdnBaseDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnBaseDelete.setDescription('Indication to delete this FRSW ISDN interface. ')
wfFrSwIsdnBaseSlotNum = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 8, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnBaseSlotNum.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnBaseSlotNum.setDescription('This number is the Slot Number for the PRI interface(s) that are being configured for FRSW ISDN. There will be one of these tables for every slot where an FRSW ISDN PRI Interface exists.')
wfFrSwIsdnBaseAssocType = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 8, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("dnis", 1), ("ani", 2))).clone('dnis')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwIsdnBaseAssocType.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnBaseAssocType.setDescription('Indicates which ISDN Phone Number (ANI or DNIS) to use to do the ISDN call to FRSW UNI mapping.')
wfFrSwIsdnAssocTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 9), )
if mibBuilder.loadTexts: wfFrSwIsdnAssocTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnAssocTable.setDescription('This table defines the Association Table to be used for the FRSW over ISDN application. The table contains a list of ISDN Phone Numbers and the associated FRSW UNI Index Number. The table is indexed by the Slot Number and the ISDN Phone Number.')
wfFrSwIsdnAssocEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 9, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwIsdnAssocSlotNum"), (0, "Wellfleet-FRSW-MIB", "wfFrSwIsdnAssocNum"))
if mibBuilder.loadTexts: wfFrSwIsdnAssocEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnAssocEntry.setDescription('Instance Id for this table.')
wfFrSwIsdnAssocDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 9, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwIsdnAssocDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnAssocDelete.setDescription('Indication to delete this Association Instance.')
wfFrSwIsdnAssocSlotNum = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 9, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnAssocSlotNum.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnAssocSlotNum.setDescription('Slot with which this ISDN Phone Number is associated.')
wfFrSwIsdnAssocNum = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 9, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnAssocNum.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnAssocNum.setDescription('ISDN Phone Number that is used to look up the appropriate FRSW UNI Index. This number is compared with either the Calling Party Number (ANI) Information Element or the Called Party Number (DNIS) Information Element contained in the ISDN Call Setup Message.')
wfFrSwIsdnAssocScrnEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 9, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwIsdnAssocScrnEnable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnAssocScrnEnable.setDescription('Indicate whether allowed screening should be enabled or disabled for all of the UNIs contained in the FRSW UNI Index.')
wfFrSwIsdnAssocIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 9, 1, 5), Integer32().clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwIsdnAssocIndex.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnAssocIndex.setDescription('A number that indicates the FRSW UNI Index that is is associated with the ISDN Phone Number. This FRSW UNI Index is used as a key to obtain the UNIs and the Screening information from the wfFrSwIsdnScrnEntry and wfFrSwIsdnUniEntry mibs. The default for the index is 2**31 - 1 = 2147483647 = 0x7FFFFFFF, which represents an unconfigured index number.')
wfFrSwIsdnUniTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 10), )
if mibBuilder.loadTexts: wfFrSwIsdnUniTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnUniTable.setDescription('This table is used by the FRSW over ISDN application. The table defines a list of FRSW UNIs that are to be collected into a hunt group identifiable by an Index Number.')
wfFrSwIsdnUniEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 10, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwIsdnUniIndex"), (0, "Wellfleet-FRSW-MIB", "wfFrSwIsdnUniNum"))
if mibBuilder.loadTexts: wfFrSwIsdnUniEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnUniEntry.setDescription('Instance Id for this table.')
wfFrSwIsdnUniDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 10, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwIsdnUniDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnUniDelete.setDescription('Indication to delete this FRSW UNI Index Instance.')
wfFrSwIsdnUniIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 10, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnUniIndex.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnUniIndex.setDescription('FRSW UNI Index -- a number that identifies a group of related FRSW UNIs that are collected together as a hunt group. This number ties this entry to an entry in wfFrSwIsdnAssocEntry.')
wfFrSwIsdnUniNum = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 10, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnUniNum.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnUniNum.setDescription('A FRSW UNI/Circuit.')
wfFrSwIsdnUniState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 10, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("available", 1), ("inuse", 2))).clone('available')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnUniState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnUniState.setDescription('State of this UNI (available or in-use).')
wfFrSwIsdnScrnTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 11), )
if mibBuilder.loadTexts: wfFrSwIsdnScrnTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnScrnTable.setDescription('This is the incoming call screening table for the FRSW over ISDN application. The table consists of a FRSW UNI Index and a list of allowable ISDN Phone numbers for that FRSW UNI Index. The table is indexed by both the FRSW UNI Index and the ISDN Phone Number. This table is referenced only when the wfFrSwIsdnAssocScrnEnable is set to Enabled for this FRSW UNI Index.')
wfFrSwIsdnScrnEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 11, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwIsdnScrnIndex"), (0, "Wellfleet-FRSW-MIB", "wfFrSwIsdnScrnNum"))
if mibBuilder.loadTexts: wfFrSwIsdnScrnEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnScrnEntry.setDescription(' Instance Id for this table. ')
wfFrSwIsdnScrnDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 11, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwIsdnScrnDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnScrnDelete.setDescription(' Indication to delete this Scrn Instance. ')
wfFrSwIsdnScrnIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 11, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnScrnIndex.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnScrnIndex.setDescription('FRSW UNI Index - A number that ties this entry to an entry in wfFrSwIsdnAssocEntry.')
wfFrSwIsdnScrnNum = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 11, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwIsdnScrnNum.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwIsdnScrnNum.setDescription('ISDN Phone Number of a user authorized to access the UNIs contained in the FRSW UNI Index. ')
wfFrSwSigTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12), )
if mibBuilder.loadTexts: wfFrSwSigTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigTable.setDescription(" The Frame relay signalling table contains frame relay signalling entries indexed by the frame relay access channel circuit number. An instance of wfFrSwSigEntry is required for each frame relay access channel with frame relay signalling enabled. The absence of wfFrSwSigEntry for a given frame relay access channel implies that frame relay signalling is disabled for the circuit. Note that the terms 'incoming' and 'outgoing' refer to the frame mode call with respect to the network side of the interface. The terminology used by CCITT Q.933/Q.931 is different. ")
wfFrSwSigEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwSigCircuit"))
if mibBuilder.loadTexts: wfFrSwSigEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigEntry.setDescription(' An entry in the Frame Relay signalling port information table. ')
wfFrSwSigDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigDelete.setDescription(' Indication to delete this instance ')
wfFrSwSigCircuit = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigCircuit.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigCircuit.setDescription(' The circuit number for this frame relay access channel ')
wfFrSwSigSvcDlciLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(16)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigSvcDlciLow.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigSvcDlciLow.setDescription(' Lowest DLCI to be used for SVC, the default value is for 2 octet frame header ')
wfFrSwSigSvcDlciHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(991)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigSvcDlciHigh.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigSvcDlciHigh.setDescription(' Highest DLCI to be used for SVC, the default value is for 2 octet frame header. ')
wfFrSwSigDlciAssign = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("increment", 1), ("decrement", 2))).clone('decrement')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigDlciAssign.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigDlciAssign.setDescription(" Determines if DLCI's are assigned starting at wfFrSwSigSvcDlciHigh and working towards wfFrSwSigSvcDlciLow or vice versa. ")
wfFrSwSigMaxNumOfSvcs = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(100)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigMaxNumOfSvcs.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigMaxNumOfSvcs.setDescription(' Indicates the maximum number of simultaneous switched virtual circuits allowed on the logical line. ')
wfFrSwSigNumOfSvcsInUse = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigNumOfSvcsInUse.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigNumOfSvcsInUse.setDescription(' Indicates the number of switched virtual circuits in use on the logical line. ')
wfFrSwSigDefaultThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigDefaultThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigDefaultThroughput.setDescription(' This value is used by the network in the Link Layer Core Parameters IE incoming & outgoing throughput fields when they are not included in the setup message by the user. ')
wfFrSwSigDefaultMinAcceptThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigDefaultMinAcceptThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigDefaultMinAcceptThroughput.setDescription(' This value is used by the network in the Link Layer Core Parameters IE incoming & outgoing minimum acceptable throughput fields when they are not included in the setup message by the user. ')
wfFrSwSigDefaultBc = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigDefaultBc.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigDefaultBc.setDescription(' This value is used by the network in the Link Layer Core Parameters IE incoming & outgoing Bc fields when they are not included in the setup message by the user. ')
wfFrSwSigDefaultBe = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigDefaultBe.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigDefaultBe.setDescription(' This value is used by the network in the Link Layer Core Parameters IE incoming & outgoing Be fields when they are not included in the setup message by the user. ')
wfFrSwSigMaxInThroughputPerSvc = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigMaxInThroughputPerSvc.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigMaxInThroughputPerSvc.setDescription(' This is the maximum incoming throughput that any single SVC may negotiate for a call. Calls requesting in excess of this attribute are rejected. ')
wfFrSwSigMaxOutThroughputPerSvc = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigMaxOutThroughputPerSvc.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigMaxOutThroughputPerSvc.setDescription(' This is the maximum outgoing throughput that any single SVC may negotiate for a call. Calls requesting in excess of this attribute are rejected. ')
wfFrSwSigTotalInNegotiableThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigTotalInNegotiableThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigTotalInNegotiableThroughput.setDescription(' This is the total maximum incoming throughput that is available for all frame mode calls on the port. If the sum of the incoming throughput requested by a call and wfFrSwSigTotalInCurrentThroughput is in excess of this value, the call is rejected. ')
wfFrSwSigTotalInCurrentThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigTotalInCurrentThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigTotalInCurrentThroughput.setDescription(" This is the total incoming throughput that has been negotiated for use by all SVC's on the port. ")
wfFrSwSigTotalOutNegotiableThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigTotalOutNegotiableThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigTotalOutNegotiableThroughput.setDescription(' This is the total maximum outgoing throughput that is available for all frame mode calls on the port. If the sum of the outgoing throughput requested by a call and wfFrSwSigTotalOutCurrentThroughput is in excess of this value, the call is rejected. ')
wfFrSwSigTotalOutCurrentThroughput = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 17), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigTotalOutCurrentThroughput.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigTotalOutCurrentThroughput.setDescription(" This is the total incoming throughput that has been negotiated for use by all SVC's on the port. ")
wfFrSwSigXNetClearingDisable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('enable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigXNetClearingDisable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigXNetClearingDisable.setDescription(' If cross-net polling (wfFrSwDlcmiCrossNetEnable) is enabled on this interface, and the error threshold (wfFrSwDlcmiCrossNetErrorThreshold) is exceeded, the network can clear the call. ')
wfFrSwSigCallingPartyIEMandatory = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigCallingPartyIEMandatory.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigCallingPartyIEMandatory.setDescription(' Reject the call if the Calling Party IE is absent in the setup message or if the provided Calling Party IE fails address authentication tests againt the configured address(es) on the ingress logical line.')
wfFrSwSigT301 = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 20), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 10)).clone(3)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigT301.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigT301.setDescription(' Timer number: T301 default time-out: 3 min state of call: call initiated cause for start: incoming setup normal stop: outgoing connect at the first expiry: clear call at the second expiry: timer not restarted ')
wfFrSwSigT303 = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 21), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 90)).clone(4)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigT303.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigT303.setDescription(' Timer number: T303 default time-out: 4 s state of call: call present cause for start: outgoing setup normal stop: incoming connect/call-proceeding/ release-complete at the first expiry: retransmit setup, restart T303 at the second expiry: clear call ')
wfFrSwSigT305 = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 22), Integer32().subtype(subtypeSpec=ValueRangeConstraint(20, 90)).clone(30)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigT305.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigT305.setDescription(' Timer number: T305 default time-out: 30 s state of call: disconnect ind cause for start: outgoing disconnect normal stop: incoming release/disconnect at the first expiry: outgoing release at the second expiry: timer not restarted ')
wfFrSwSigT308 = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 23), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 90)).clone(4)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigT308.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigT308.setDescription(' Timer number: T308 default time-out: 4 s state of call: release req cause for start: outgoing release normal stop: incoming release/release-complete at the first expiry: retransmit release, restart T308 at the second expiry: place access channel in maintenance ')
wfFrSwSigT310 = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 24), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 90)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigT310.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigT310.setDescription(' Timer number: T310 default time-out: 10 s state of call: incoming call proceeding cause for start: incoming call proceeding normal stop: incoming connect/disconnect at the first expiry: clear call at the second expiry: timer not restarted ')
wfFrSwSigT322 = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 25), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 90)).clone(4)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigT322.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigT322.setDescription(' Timer number: T322 default time-out: 4 s state of call: any call state cause for start: outgoing status enquiry normal stop: incoming status/disconnect/ release/release-complete at the first expiry: retransmit status-enq, restart T322 at the second expiry: resend status enq and restart T322 ')
wfFrSwSigInSetupPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 26), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInSetupPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInSetupPkts.setDescription(' number of incoming setup packets ')
wfFrSwSigInCallProceedingPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 27), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInCallProceedingPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInCallProceedingPkts.setDescription(' number of incoming call proceeding packets ')
wfFrSwSigInConnectPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 28), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInConnectPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInConnectPkts.setDescription(' number of incoming connect packets ')
wfFrSwSigInDisconnectPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 29), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInDisconnectPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInDisconnectPkts.setDescription(' number of incoming disconnect packets ')
wfFrSwSigInReleasePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 30), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInReleasePkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInReleasePkts.setDescription(' number of incoming release packets ')
wfFrSwSigInReleaseCompletePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 31), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInReleaseCompletePkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInReleaseCompletePkts.setDescription(' number of incoming release complete packets ')
wfFrSwSigInStatusEnquiryPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 32), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInStatusEnquiryPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInStatusEnquiryPkts.setDescription(' number of incoming status enquiry packets ')
wfFrSwSigInStatusPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 33), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInStatusPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInStatusPkts.setDescription(' number of incoming status packets ')
wfFrSwSigInUnknownPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 34), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigInUnknownPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigInUnknownPkts.setDescription(' number of incoming unknown packets ')
wfFrSwSigOutSetupPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 35), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutSetupPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutSetupPkts.setDescription(' number of outgoing setup packets ')
wfFrSwSigOutCallProceedingPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 36), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutCallProceedingPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutCallProceedingPkts.setDescription(' number of outgoing call proceeding packets ')
wfFrSwSigOutConnectPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 37), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutConnectPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutConnectPkts.setDescription(' number of outgoing connect packets ')
wfFrSwSigOutDisconnectPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 38), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutDisconnectPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutDisconnectPkts.setDescription(' number of outgoing disconnect packets ')
wfFrSwSigOutReleasePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 39), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutReleasePkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutReleasePkts.setDescription(' number of outgoing release packets ')
wfFrSwSigOutReleaseCompletePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 40), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutReleaseCompletePkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutReleaseCompletePkts.setDescription(' number of outgoing release packest ')
wfFrSwSigOutStatusEnquiryPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 41), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutStatusEnquiryPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutStatusEnquiryPkts.setDescription(' number of outgoing status enquiry packets ')
wfFrSwSigOutStatusPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 42), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigOutStatusPkts.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigOutStatusPkts.setDescription(' number of outgoing status packets ')
wfFrSwSigRejectedConnRequests = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 43), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigRejectedConnRequests.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigRejectedConnRequests.setDescription(' number of connections rejected ')
wfFrSwSigNwrkAbortedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 44), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigNwrkAbortedConnections.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigNwrkAbortedConnections.setDescription(' number of connections aborted by network ')
wfFrSwSigL2Resets = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 45), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwSigL2Resets.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigL2Resets.setDescription(' number of L2 resets ')
wfFrSwSigDlciIEAllowed = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 46), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigDlciIEAllowed.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigDlciIEAllowed.setDescription(' Reject the call if the Dlci IE is present in the setup message and wfFrSwSigDlciIEAllowed is set to disabled.')
wfFrSwSigX213PriorityIEAllowed = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 47), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigX213PriorityIEAllowed.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigX213PriorityIEAllowed.setDescription(' Reject the call if the X213 Priority IE is present in setup message and wfFrSwSigX213PriorityIEAllowed is set to disabled.')
wfFrSwSigMaximumBe = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 12, 1, 48), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(2147483647)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwSigMaximumBe.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwSigMaximumBe.setDescription('This value is the maximum allowed Be for a SVC connection')
wfFrSwGlobalE164AddrTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 13), )
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrTable.setDescription(' wfFrSwGlobalE164AddrTable is used by Directory Services to translate a range of E.164 addresses into an internal IP network address. E.164 ranges must not ever overlap. ')
wfFrSwGlobalE164AddrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 13, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwGlobalE164AddrLow"), (0, "Wellfleet-FRSW-MIB", "wfFrSwGlobalE164AddrHigh"))
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrEntry.setDescription(' An entry in the Frame Relay Global E.164 Address Table. ')
wfFrSwGlobalE164AddrDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 13, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrDelete.setDescription(' Indication to delete/create this entry. ')
wfFrSwGlobalE164AddrLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 13, 1, 2), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrLow.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrLow.setDescription(' Instance identifier; the low end of the E.164 address range. This is an 8-byte fixed length octet string format, right justified with padded leading zeros as necessary. ')
wfFrSwGlobalE164AddrHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 13, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrHigh.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrHigh.setDescription(' Instance identifier; the high end of the E.164 address range. This is an 8-byte fixed length octet string format, right justified with padded leading zeros as necessary. ')
wfFrSwGlobalE164AddrIPAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 13, 1, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrIPAddr.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalE164AddrIPAddr.setDescription(' This is the internal IP network address associated with this range of E.164 addresses. ')
wfFrSwGlobalX121AddrTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 14), )
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrTable.setDescription(' wfFrSwGlobalX121AddrTable is used by Directory Services to translate a range of X.121 addresses into an internal IP network address. X.121 ranges must not ever overlap. ')
wfFrSwGlobalX121AddrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 14, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwGlobalX121AddrLow"), (0, "Wellfleet-FRSW-MIB", "wfFrSwGlobalX121AddrHigh"))
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrEntry.setDescription(' An entry in the Frame Relay Global X.121 Address Table. ')
wfFrSwGlobalX121AddrDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 14, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrDelete.setDescription(' Indication to delete/create this entry. ')
wfFrSwGlobalX121AddrLow = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 14, 1, 2), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrLow.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrLow.setDescription(' Instance identifier; the low end of the X.121 address range. This is an 8-byte fixed length octet string format, right justified with padded leading zeros as necessary. ')
wfFrSwGlobalX121AddrHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 14, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrHigh.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrHigh.setDescription(' Instance identifier; the high end of the X.121 address range. This is an 8-byte fixed length octet string format, right justified with padded leading zeros as necessary. ')
wfFrSwGlobalX121AddrIPAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 14, 1, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrIPAddr.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwGlobalX121AddrIPAddr.setDescription(' This is the internal IP network address associated with this range of X.121 addresses. ')
wfFrSwLocalE164AddrTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 15), )
if mibBuilder.loadTexts: wfFrSwLocalE164AddrTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalE164AddrTable.setDescription(' wfFrSwLocalE164AddrTable contains E.164 addresses on the local BNX and CUG (Closed User Group) related information. ')
wfFrSwLocalE164AddrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 15, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwLocalE164AddrCct"), (0, "Wellfleet-FRSW-MIB", "wfFrSwLocalE164Address"))
if mibBuilder.loadTexts: wfFrSwLocalE164AddrEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalE164AddrEntry.setDescription(' An entry in the Frame Relay Local E.164 Address Table. ')
wfFrSwLocalE164AddrDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 15, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwLocalE164AddrDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalE164AddrDelete.setDescription(' Indication to delete/create this entry. ')
wfFrSwLocalE164AddrCct = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 15, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwLocalE164AddrCct.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalE164AddrCct.setDescription(' Instance identifier; internal CCT number associated with this E.164 address. ')
wfFrSwLocalE164Address = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 15, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwLocalE164Address.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalE164Address.setDescription(' Instance identifier; an E.164 address. This is an 8-byte fixed length octet string format, right justified with padded leading zeros as necessary. ')
wfFrSwLocalE164AddrCUG = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 15, 1, 4), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwLocalE164AddrCUG.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalE164AddrCUG.setDescription(' CUG (Closed User Group) information. The structure of the CUG information consists zero or more groups (number of groups can be derived from the OCTET STRING data type of this MIB attribute) of COI structure information. Each COI structure consists of a COI group number (4-byte integer) and a COI list. Each COI list consists of a length field (4-byte integer) which specifies the number of bytes of COI bit-encoded information belonging to this group and the COI information structure. COI information structure is a bit mask field where each bit from left to right represents whether this E.164 address belongs to a particular COI number within this COI group number. Please note that COI group numbers can not be repeated and that the COI group numbers must be in increasing order in the CUG configuration MIB wfFrSwLocalE164AddrCUG defaults to zero COI groups which means no CUG related information and hence this local wfFrSwLocalE164Address is allowed to communicate with all users. ')
wfFrSwLocalE164AddrLocalFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 15, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("local", 1), ("nonlocal", 2))).clone('local')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwLocalE164AddrLocalFlag.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalE164AddrLocalFlag.setDescription(' Local/Non-Local Identifier Flag. Used for SPVCs.')
wfFrSwLocalX121AddrTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 16), )
if mibBuilder.loadTexts: wfFrSwLocalX121AddrTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalX121AddrTable.setDescription(' wfFrSwLocalX121AddrTable contains X.121 addresses on the local BNX and CUG (Closed User Group) related information. ')
wfFrSwLocalX121AddrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 16, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwLocalX121AddrCct"), (0, "Wellfleet-FRSW-MIB", "wfFrSwLocalX121Address"))
if mibBuilder.loadTexts: wfFrSwLocalX121AddrEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalX121AddrEntry.setDescription(' An entry in the Frame Relay Local X.121 Address Table. ')
wfFrSwLocalX121AddrDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 16, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwLocalX121AddrDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalX121AddrDelete.setDescription(' Indication to delete/create this entry. ')
wfFrSwLocalX121AddrCct = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 16, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwLocalX121AddrCct.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalX121AddrCct.setDescription(' Instance identifier; internal CCT number associated with this X.121 address. ')
wfFrSwLocalX121Address = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 16, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwLocalX121Address.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalX121Address.setDescription(' Instance identifier; a X.121 address. This is an 8-byte fixed length octet string format, right justified with padded leading zeros as necessary. ')
wfFrSwLocalX121AddrCUG = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 16, 1, 4), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwLocalX121AddrCUG.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalX121AddrCUG.setDescription(' CUG (Closed User Group) information. The structure of the CUG information consists zero or more groups (number of groups can be derived from the OCTET STRING data type of this MIB attribute) of COI structure information. Each COI structure consists of a COI group number (4-byte integer) and a COI list. Each COI list consists of a length field (4-byte integer) which specifies the number of bytes of COI bit-encoded information belonging to this group and the COI information structure. COI information structure is a bit mask field where each bit from left to right represents whether this X.121 address belongs to a particular COI number within this COI group number. wfFrSwLocalX121AddrCUG defaults to zero COI groups which means no CUG related information and hence this local wfFrSwLocalX121Address is allowed to communicate with all users. ')
wfFrSwLocalX121AddrLocalFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 16, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("local", 1), ("nonlocal", 2))).clone('local')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwLocalX121AddrLocalFlag.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwLocalX121AddrLocalFlag.setDescription(' Local/Non-Local Identifier Flag. Used for SPVCs.')
wfFrSwBase = MibIdentifier((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 17))
wfFrSwBaseDelete = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 17, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwBaseDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwBaseDelete.setDescription(' Indication to delete/create this base group ')
wfFrSwBaseIpAddr = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 17, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwBaseIpAddr.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwBaseIpAddr.setDescription(" Indicates this BNX's (circuit-less) IP address ")
wfFrSwBaseShutDown = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 17, 3), Counter32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwBaseShutDown.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwBaseShutDown.setDescription('Bit mask for slots to shutdown, slots 1-14. The MSBit represents slot 1, the next most significant bit represents slot 2, and so forth.')
wfFrSwCngcMonTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18), )
if mibBuilder.loadTexts: wfFrSwCngcMonTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonTable.setDescription('This table is used by FRSW Congestion Control application. The table is used to Monitor the congestion level of a particular circuit.')
wfFrSwCngcMonEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwCngcMonCct"))
if mibBuilder.loadTexts: wfFrSwCngcMonEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonEntry.setDescription('Instance Id for this table.')
wfFrSwCngcMonReset = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwCngcMonReset.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonReset.setDescription('Indication to reset Cngc Monitor Counters.')
wfFrSwCngcMonCct = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonCct.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonCct.setDescription('Circuit to be monitored. ')
wfFrSwCngcMonP0Level1Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 3), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level1Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level1Percent.setDescription('Percentage of time congestion is at level 1. for Priority 0 Traffic.')
wfFrSwCngcMonP0Level2Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 4), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level2Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level2Percent.setDescription('Percentage of time congestion is at level 2. for Priority 0 Traffic.')
wfFrSwCngcMonP0Level3Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 5), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level3Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level3Percent.setDescription('Percentage of time congestion is at level 3. for Priority 0 Traffic.')
wfFrSwCngcMonP0Level4Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 6), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level4Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP0Level4Percent.setDescription('Percentage of time congestion is at level 4. for Priority 0 Traffic.')
wfFrSwCngcMonP1Level1Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level1Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level1Percent.setDescription('Percentage of time congestion is at level 1. for Priority 1 Traffic.')
wfFrSwCngcMonP1Level2Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level2Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level2Percent.setDescription('Percentage of time congestion is at level 2. for Priority 1 Traffic.')
wfFrSwCngcMonP1Level3Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 9), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level3Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level3Percent.setDescription('Percentage of time congestion is at level 3. for Priority 1 Traffic.')
wfFrSwCngcMonP1Level4Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 10), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level4Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP1Level4Percent.setDescription('Percentage of time congestion is at level 4. for Priority 1 Traffic.')
wfFrSwCngcMonP2Level1Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 11), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level1Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level1Percent.setDescription('Percentage of time congestion is at level 1. for Priority 2 Traffic.')
wfFrSwCngcMonP2Level2Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 12), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level2Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level2Percent.setDescription('Percentage of time congestion is at level 2. for Priority 2 Traffic.')
wfFrSwCngcMonP2Level3Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 13), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level3Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level3Percent.setDescription('Percentage of time congestion is at level 3. for Priority 2 Traffic.')
wfFrSwCngcMonP2Level4Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 14), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level4Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP2Level4Percent.setDescription('Percentage of time congestion is at level 4. for Priority 2 Traffic.')
wfFrSwCngcMonP3Level1Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 15), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level1Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level1Percent.setDescription('Percentage of time congestion is at level 1. for Priority 3 Traffic.')
wfFrSwCngcMonP3Level2Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 16), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level2Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level2Percent.setDescription('Percentage of time congestion is at level 2. for Priority 3 Traffic.')
wfFrSwCngcMonP3Level3Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 17), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level3Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level3Percent.setDescription('Percentage of time congestion is at level 3. for Priority 3 Traffic.')
wfFrSwCngcMonP3Level4Percent = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 18, 1, 18), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level4Percent.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwCngcMonP3Level4Percent.setDescription('Percentage of time congestion is at level 4. for Priority 3 Traffic.')
wfFrSwVirtualIntfTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 19), )
if mibBuilder.loadTexts: wfFrSwVirtualIntfTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVirtualIntfTable.setDescription("The table is used to create 'virtual' FRSW access lines.")
wfFrSwVirtualIntfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 19, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwVirtualIntfSlot"), (0, "Wellfleet-FRSW-MIB", "wfFrSwVirtualIntfCct"))
if mibBuilder.loadTexts: wfFrSwVirtualIntfEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVirtualIntfEntry.setDescription('Instance Id for this table.')
wfFrSwVirtualIntfDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 19, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwVirtualIntfDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVirtualIntfDelete.setDescription('Indication to delete this virtual interface.')
wfFrSwVirtualIntfSlot = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 19, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVirtualIntfSlot.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVirtualIntfSlot.setDescription('Instance identifier; the slot number of this interface.')
wfFrSwVirtualIntfCct = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 19, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVirtualIntfCct.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVirtualIntfCct.setDescription('Instance identifier; the circuit number of this interface.')
wfFrSwVirtualIntfLineNum = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 19, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwVirtualIntfLineNum.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwVirtualIntfLineNum.setDescription('Line number for this virtual interface.')
wfFrSwExtFileSysTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 20), )
if mibBuilder.loadTexts: wfFrSwExtFileSysTable.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwExtFileSysTable.setDescription('This table is used by FRSW to extend the file system to DRAM device.')
wfFrSwExtFileSysEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 20, 1), ).setIndexNames((0, "Wellfleet-FRSW-MIB", "wfFrSwExtFileSysSlot"))
if mibBuilder.loadTexts: wfFrSwExtFileSysEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwExtFileSysEntry.setDescription('Instance Id for this table.')
wfFrSwExtFileSysDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 20, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwExtFileSysDelete.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwExtFileSysDelete.setDescription(' Indication to delete/create this entry. ')
wfFrSwExtFileSysSlot = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 20, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwExtFileSysSlot.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwExtFileSysSlot.setDescription('A unique value for each slot. Its value ranges between 1 and 14.')
wfFrSwExtFileSysSize = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 20, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfFrSwExtFileSysSize.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwExtFileSysSize.setDescription('The memory size of the extended file system in byte unit. The value zero also means that extended file system is disabled. Non-zero value means enabled. Its suggested that the size is in multiple of 128k bytes. Some of the well-known memory sizes and their correspond decimal values are as followed: Mem size Decimal Value ^^^^^^^^ ^^^^^^^^^^^^^ 128K 131072 256K 262144 512K 524288 1M 1048576 2M 2097152 4M 4194304 8M 8388608 ')
wfFrSwExtFileSysActualSize = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 20, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwExtFileSysActualSize.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwExtFileSysActualSize.setDescription('The actual memory size the system allocated.')
wfFrSwExtFileSysState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 9, 6, 20, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("up", 1), ("fault", 2), ("init", 3), ("notpresent", 4))).clone('notpresent')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfFrSwExtFileSysState.setStatus('mandatory')
if mibBuilder.loadTexts: wfFrSwExtFileSysState.setDescription('The status of the extended file system. State up indicates that the requested memory size for the extended file system has been allocated successfully and the extended file system is in operational state. State fault indicates that the requested memory size for the extended file system has NOT been allocated successfully and the extended file system is NOT in operational state. One reason for entering the fault state is insufficient available memory. State init indicates that the system is in the initialization cycle. The extended file system is not operational. State notpresent reflects the size of zero.')
mibBuilder.exportSymbols("Wellfleet-FRSW-MIB", wfFrSwCctLocalSetFECNFrames=wfFrSwCctLocalSetFECNFrames, wfFrSwDlcmiEscapeMode=wfFrSwDlcmiEscapeMode, wfFrSwVcRedirectAction=wfFrSwVcRedirectAction, wfFrSwSigOutDisconnectPkts=wfFrSwSigOutDisconnectPkts, wfFrSwCctLocalSetDEFrames=wfFrSwCctLocalSetDEFrames, wfFrSwSigOutStatusPkts=wfFrSwSigOutStatusPkts, wfFrSwSigTotalInCurrentThroughput=wfFrSwSigTotalInCurrentThroughput, wfFrSwIsdnScrnTable=wfFrSwIsdnScrnTable, wfFrSwVcAtmIwfLossPriorityPolicy=wfFrSwVcAtmIwfLossPriorityPolicy, wfFrSwSigT303=wfFrSwSigT303, wfFrSwUsageCurVolumeBackup=wfFrSwUsageCurVolumeBackup, wfFrSwVcInactiveVcDropFrames=wfFrSwVcInactiveVcDropFrames, wfFrSwL3NetAddress=wfFrSwL3NetAddress, wfFrSwSigInConnectPkts=wfFrSwSigInConnectPkts, wfFrSwLocalE164AddrDelete=wfFrSwLocalE164AddrDelete, wfFrSwUsageUpdateData=wfFrSwUsageUpdateData, wfFrSwExtFileSysDelete=wfFrSwExtFileSysDelete, wfFrSwDlcmiActiveSeqCount=wfFrSwDlcmiActiveSeqCount, wfFrSwUsageCircuitNumber=wfFrSwUsageCircuitNumber, wfFrSwUsageStartTimeStampHigh=wfFrSwUsageStartTimeStampHigh, wfFrSwVcRecvDeOctets=wfFrSwVcRecvDeOctets, wfFrSwIsdnUniDelete=wfFrSwIsdnUniDelete, wfFrSwCngcMonP0Level1Percent=wfFrSwCngcMonP0Level1Percent, wfFrSwCctLocalRecvDEOctets=wfFrSwCctLocalRecvDEOctets, wfFrSwUsageVolumeBackup=wfFrSwUsageVolumeBackup, wfFrSwSigTotalInNegotiableThroughput=wfFrSwSigTotalInNegotiableThroughput, wfFrSwSigOutReleaseCompletePkts=wfFrSwSigOutReleaseCompletePkts, wfFrSwUsageSentDEOctetsHigh=wfFrSwUsageSentDEOctetsHigh, wfFrSwCctOutThroughput=wfFrSwCctOutThroughput, wfFrSwDlcmiDteLastReceived=wfFrSwDlcmiDteLastReceived, wfFrSwCctRemoteSentDEOctets=wfFrSwCctRemoteSentDEOctets, wfFrSwSvcUsageVersionId=wfFrSwSvcUsageVersionId, wfFrSwCctRemoteRecvNonDEOctets=wfFrSwCctRemoteRecvNonDEOctets, wfFrSwCctRemoteRecvDEOctets=wfFrSwCctRemoteRecvDEOctets, wfFrSwSvcUsageUpdateInterval=wfFrSwSvcUsageUpdateInterval, wfFrSwCctRemoteSentNonDEFrames=wfFrSwCctRemoteSentNonDEFrames, wfFrSwVcCallReqRetryTimer=wfFrSwVcCallReqRetryTimer, wfFrSwMcastDlci=wfFrSwMcastDlci, wfFrSwCctLocalBecnState=wfFrSwCctLocalBecnState, wfFrSwVcRecvBecnOctets=wfFrSwVcRecvBecnOctets, wfFrSwGlobalX121AddrDelete=wfFrSwGlobalX121AddrDelete, wfFrSwUsageVolume=wfFrSwUsageVolume, wfFrSwDlcmiCrossNetListenEnable=wfFrSwDlcmiCrossNetListenEnable, wfFrSwSvcUsageNumEntries=wfFrSwSvcUsageNumEntries, wfFrSwVcInBc=wfFrSwVcInBc, wfFrSwDlcmiDteErrorThreshold=wfFrSwDlcmiDteErrorThreshold, wfFrSwUsageState=wfFrSwUsageState, wfFrSwIsdnScrnNum=wfFrSwIsdnScrnNum, wfFrSwVcOutThroughput=wfFrSwVcOutThroughput, wfFrSwUsageStartTimeStampLow=wfFrSwUsageStartTimeStampLow, wfFrSwUsageCurDebug=wfFrSwUsageCurDebug, wfFrSwMcastIndividualDlci=wfFrSwMcastIndividualDlci, wfFrSwVcXNetReceived=wfFrSwVcXNetReceived, wfFrSwSvcUsageFlushData=wfFrSwSvcUsageFlushData, wfFrSwVcSetBecnOctets=wfFrSwVcSetBecnOctets, wfFrSwIsdnUniTable=wfFrSwIsdnUniTable, wfFrSwDlcmiCircuit=wfFrSwDlcmiCircuit, wfFrSwIsdnAssocNum=wfFrSwIsdnAssocNum, wfFrSwVcEscapeEnable=wfFrSwVcEscapeEnable, wfFrSwDlcmiDeletedVCs=wfFrSwDlcmiDeletedVCs, wfFrSwVcOutBe=wfFrSwVcOutBe, wfFrSwCctReceivedStatus=wfFrSwCctReceivedStatus, wfFrSwCctLocalRecvBECNFrames=wfFrSwCctLocalRecvBECNFrames, wfFrSwDlcmiAsyncUpdateEnable=wfFrSwDlcmiAsyncUpdateEnable, wfFrSwIsdnBaseSlotNum=wfFrSwIsdnBaseSlotNum, wfFrSwUsageFilePrefix=wfFrSwUsageFilePrefix, wfFrSwLocalE164AddrTable=wfFrSwLocalE164AddrTable, wfFrSwGlobalX121AddrTable=wfFrSwGlobalX121AddrTable, wfFrSwDlcmiCrossNetErrorThreshold=wfFrSwDlcmiCrossNetErrorThreshold, wfFrSwCctCreationTime=wfFrSwCctCreationTime, wfFrSwCctRemoteBecnState=wfFrSwCctRemoteBecnState, wfFrSwCctOutBe=wfFrSwCctOutBe, wfFrSwGlobalE164AddrLow=wfFrSwGlobalE164AddrLow, wfFrSwLocalX121AddrTable=wfFrSwLocalX121AddrTable, wfFrSwExtFileSysState=wfFrSwExtFileSysState, wfFrSwCctRemoteSetFECNFrames=wfFrSwCctRemoteSetFECNFrames, wfFrSwIsdnUniEntry=wfFrSwIsdnUniEntry, wfFrSwCctRemoteRecvFECNOctets=wfFrSwCctRemoteRecvFECNOctets, wfFrSwExtFileSysActualSize=wfFrSwExtFileSysActualSize, wfFrSwDlcmiDteFullEnquiryInterval=wfFrSwDlcmiDteFullEnquiryInterval, wfFrSwGlobalX121AddrLow=wfFrSwGlobalX121AddrLow, wfFrSwCctOutBc=wfFrSwCctOutBc, wfFrSwDlcmiDteReceived=wfFrSwDlcmiDteReceived, wfFrSwDlcmiSequenceCount=wfFrSwDlcmiSequenceCount, wfFrSwSigDlciIEAllowed=wfFrSwSigDlciIEAllowed, wfFrSwCctTable=wfFrSwCctTable, wfFrSwDlcmiVCsInUse=wfFrSwDlcmiVCsInUse, wfFrSwVcInactiveVcDropOctets=wfFrSwVcInactiveVcDropOctets, wfFrSwUsageEndTimeStampLow=wfFrSwUsageEndTimeStampLow, wfFrSwVcEntry=wfFrSwVcEntry, wfFrSwUsageCurCleanupInterval=wfFrSwUsageCurCleanupInterval, wfFrSwUsageEnable=wfFrSwUsageEnable, wfFrSwSvcUsageCurVolume=wfFrSwSvcUsageCurVolume, wfFrSwDlcmiTable=wfFrSwDlcmiTable, wfFrSwCctRemoteSentDEFrames=wfFrSwCctRemoteSentDEFrames, wfFrSwCctInThroughput=wfFrSwCctInThroughput, wfFrSwVcState=wfFrSwVcState, wfFrSwIsdnAssocIndex=wfFrSwIsdnAssocIndex, wfFrSwUsageSwitchName=wfFrSwUsageSwitchName, wfFrSwIsdnAssocEntry=wfFrSwIsdnAssocEntry, wfFrSwDlcmiState=wfFrSwDlcmiState, wfFrSwUsageTimerInterval=wfFrSwUsageTimerInterval, wfFrSwVcRecvNonDeFrames=wfFrSwVcRecvNonDeFrames, wfFrSwVcRecvFecnOctets=wfFrSwVcRecvFecnOctets, wfFrSwDlcmiDteStatus=wfFrSwDlcmiDteStatus, wfFrSwSvcUsageCurStoreInterval=wfFrSwSvcUsageCurStoreInterval, wfFrSwLocalX121AddrDelete=wfFrSwLocalX121AddrDelete, wfFrSwUsageStoreTimeStamp=wfFrSwUsageStoreTimeStamp, wfFrSwDlcmiManagementType=wfFrSwDlcmiManagementType, wfFrSwSigInStatusPkts=wfFrSwSigInStatusPkts, wfFrSwUsageLastNonDEFramesLow=wfFrSwUsageLastNonDEFramesLow, wfFrSwVcReceivedStatus=wfFrSwVcReceivedStatus, wfFrSwDlcmiControlByteDisable=wfFrSwDlcmiControlByteDisable, wfFrSwVcXNetSent=wfFrSwVcXNetSent, wfFrSwCngcMonP1Level1Percent=wfFrSwCngcMonP1Level1Percent, wfFrSwCngcMonP2Level2Percent=wfFrSwCngcMonP2Level2Percent, wfFrSwUsageUpdateTimeStamp=wfFrSwUsageUpdateTimeStamp, wfFrSwSigMaxNumOfSvcs=wfFrSwSigMaxNumOfSvcs, wfFrSwDlcmiAddressLen=wfFrSwDlcmiAddressLen, wfFrSwSigNwrkAbortedConnections=wfFrSwSigNwrkAbortedConnections, wfFrSwVcReportedStatus=wfFrSwVcReportedStatus, wfFrSwVirtualIntfLineNum=wfFrSwVirtualIntfLineNum, wfFrSwCngcMonTable=wfFrSwCngcMonTable, wfFrSwCctRemoteRecvBECNOctets=wfFrSwCctRemoteRecvBECNOctets, wfFrSwUsageSwitchId=wfFrSwUsageSwitchId, wfFrSwVcBecnState=wfFrSwVcBecnState, wfFrSwIsdnUniNum=wfFrSwIsdnUniNum, wfFrSwSvcUsageState=wfFrSwSvcUsageState, wfFrSwVcTxDeFrames=wfFrSwVcTxDeFrames, wfFrSwCctLocalSentDEOctets=wfFrSwCctLocalSentDEOctets, wfFrSwCctRemoteRecvFECNFrames=wfFrSwCctRemoteRecvFECNFrames, wfFrSwVcBackupCalledDlci=wfFrSwVcBackupCalledDlci, wfFrSwVcCallReqCalledDlci=wfFrSwVcCallReqCalledDlci, wfFrSwCctLocalRecvBECNOctets=wfFrSwCctLocalRecvBECNOctets, wfFrSwIsdnUniState=wfFrSwIsdnUniState, wfFrSwBcMeasurementInterval=wfFrSwBcMeasurementInterval, wfFrSwUsageStoreData=wfFrSwUsageStoreData, wfFrSwCctLocalRecvFECNFrames=wfFrSwCctLocalRecvFECNFrames, wfFrSwCctRemoteRecvBECNFrames=wfFrSwCctRemoteRecvBECNFrames, wfFrSwPvcUsageFileLayout=wfFrSwPvcUsageFileLayout, wfFrSwGlobalX121AddrHigh=wfFrSwGlobalX121AddrHigh, wfFrSwCngcMonP2Level4Percent=wfFrSwCngcMonP2Level4Percent, wfFrSwDlcmiBidirect=wfFrSwDlcmiBidirect, wfFrSwVcSetDeOctets=wfFrSwVcSetDeOctets, wfFrSwUsageSentDEOctetsLow=wfFrSwUsageSentDEOctetsLow, wfFrSwDlcmiUnknownIEErrors=wfFrSwDlcmiUnknownIEErrors, wfFrSwSigSvcDlciLow=wfFrSwSigSvcDlciLow, wfFrSwDlcmiSequenceErrors=wfFrSwDlcmiSequenceErrors, wfFrSwIsdnAssocSlotNum=wfFrSwIsdnAssocSlotNum, wfFrSwExtFileSysTable=wfFrSwExtFileSysTable, wfFrSwDlcmiControlByteErrors=wfFrSwDlcmiControlByteErrors, wfFrSwVirtualIntfSlot=wfFrSwVirtualIntfSlot, wfFrSwDlcmiStatus=wfFrSwDlcmiStatus, wfFrSwVcBackupCrossNetErrors=wfFrSwVcBackupCrossNetErrors, wfFrSwVirtualIntfEntry=wfFrSwVirtualIntfEntry, wfFrSwDlcmiPolls=wfFrSwDlcmiPolls, wfFrSwUsageDirectory=wfFrSwUsageDirectory, wfFrSwSvcUsageStoreTimeStamp=wfFrSwSvcUsageStoreTimeStamp, wfFrSwErrType=wfFrSwErrType, wfFrSwUsageLastNonDEOctetsHigh=wfFrSwUsageLastNonDEOctetsHigh, wfFrSwUsageCurFlushInterval=wfFrSwUsageCurFlushInterval, wfFrSwLocalX121Address=wfFrSwLocalX121Address, wfFrSwCctLocalSentNonDEFrames=wfFrSwCctLocalSentNonDEFrames, wfFrSwSigInDisconnectPkts=wfFrSwSigInDisconnectPkts, wfFrSwVcDropNonDeFrames=wfFrSwVcDropNonDeFrames, wfFrSwIsdnBaseDelete=wfFrSwIsdnBaseDelete, wfFrSwSigOutConnectPkts=wfFrSwSigOutConnectPkts, wfFrSwCngcMonP1Level2Percent=wfFrSwCngcMonP1Level2Percent, wfFrSwUsageUpdateInterval=wfFrSwUsageUpdateInterval, wfFrSwDlcmiCrossNetAsyncUpdateEnable=wfFrSwDlcmiCrossNetAsyncUpdateEnable, wfFrSwVcSetDeFrames=wfFrSwVcSetDeFrames, wfFrSwGlobalE164AddrDelete=wfFrSwGlobalE164AddrDelete, wfFrSwSigNumOfSvcsInUse=wfFrSwSigNumOfSvcsInUse, wfFrSwSigX213PriorityIEAllowed=wfFrSwSigX213PriorityIEAllowed, wfFrSwSvcUsageUpdateData=wfFrSwSvcUsageUpdateData, wfFrSwGlobalX121AddrIPAddr=wfFrSwGlobalX121AddrIPAddr, wfFrSwUsageSentDEFramesHigh=wfFrSwUsageSentDEFramesHigh, wfFrSwDlcmiBcMeasurementEnable=wfFrSwDlcmiBcMeasurementEnable, wfFrSwVcRecvDeFrames=wfFrSwVcRecvDeFrames, wfFrSwVcInBeOctets=wfFrSwVcInBeOctets, wfFrSwSigRejectedConnRequests=wfFrSwSigRejectedConnRequests, wfFrSwSvcUsageFilePrefix=wfFrSwSvcUsageFilePrefix, wfFrSwMcastIpAddr=wfFrSwMcastIpAddr, wfFrSwCngcMonP0Level2Percent=wfFrSwCngcMonP0Level2Percent, wfFrSwSvcUsageStoreInterval=wfFrSwSvcUsageStoreInterval, wfFrSwDlcmiCrossNetEnable=wfFrSwDlcmiCrossNetEnable, wfFrSwVcCalledDlci=wfFrSwVcCalledDlci, wfFrSwSigMaxInThroughputPerSvc=wfFrSwSigMaxInThroughputPerSvc, wfFrSwCctInBcOctets=wfFrSwCctInBcOctets, wfFrSwSigOutReleasePkts=wfFrSwSigOutReleasePkts, wfFrSwCctEntry=wfFrSwCctEntry, wfFrSwCngcMonP1Level3Percent=wfFrSwCngcMonP1Level3Percent, wfFrSwCctXNetSent=wfFrSwCctXNetSent, wfFrSwCctRemoteDropNonDEOctets=wfFrSwCctRemoteDropNonDEOctets, wfFrSwUsageCleanupTimeStamp=wfFrSwUsageCleanupTimeStamp, wfFrSwLocalE164AddrLocalFlag=wfFrSwLocalE164AddrLocalFlag, wfFrSwVirtualIntfCct=wfFrSwVirtualIntfCct, wfFrSwVcDropDeFrames=wfFrSwVcDropDeFrames, wfFrSwCctXNetReceived=wfFrSwCctXNetReceived, wfFrSwLocalE164AddrCUG=wfFrSwLocalE164AddrCUG, wfFrSwCctState=wfFrSwCctState, wfFrSwSvcUsageCurCleanupInterval=wfFrSwSvcUsageCurCleanupInterval, wfFrSwVcTable=wfFrSwVcTable, wfFrSwCctInactiveVCDropFrames=wfFrSwCctInactiveVCDropFrames, wfFrSwGlobalX121AddrEntry=wfFrSwGlobalX121AddrEntry, wfFrSwSvcUsageCurFilePrefix=wfFrSwSvcUsageCurFilePrefix, wfFrSwCngcMonEntry=wfFrSwCngcMonEntry, wfFrSwCctLocalDropNonDEFrames=wfFrSwCctLocalDropNonDEFrames, wfFrSwUsageFlushData=wfFrSwUsageFlushData, wfFrSwVirtualIntfDelete=wfFrSwVirtualIntfDelete, wfFrSwIsdnAssocScrnEnable=wfFrSwIsdnAssocScrnEnable, wfFrSwCngcMonP0Level4Percent=wfFrSwCngcMonP0Level4Percent, wfFrSwIsdnBaseTable=wfFrSwIsdnBaseTable, wfFrSwUsageDlci=wfFrSwUsageDlci, wfFrSwLocalX121AddrCct=wfFrSwLocalX121AddrCct, wfFrSwCctLocalSetDEOctets=wfFrSwCctLocalSetDEOctets, wfFrSwLocalE164AddrCct=wfFrSwLocalE164AddrCct, wfFrSwVcAtmIwfDePolicy=wfFrSwVcAtmIwfDePolicy, wfFrSwCctRemoteDropDEFrames=wfFrSwCctRemoteDropDEFrames, wfFrSwSvcUsageStoreData=wfFrSwSvcUsageStoreData, wfFrSwTupleDlciA=wfFrSwTupleDlciA, wfFrSwBaseShutDown=wfFrSwBaseShutDown, wfFrSwCctLastTimeChange=wfFrSwCctLastTimeChange, wfFrSwUsageTable=wfFrSwUsageTable, wfFrSwVcCreationTime=wfFrSwVcCreationTime, wfFrSwVcLastTimeChange=wfFrSwVcLastTimeChange, wfFrSwCctInBc=wfFrSwCctInBc, wfFrSwUsageCurDirectory=wfFrSwUsageCurDirectory, wfFrSwCctMulticast=wfFrSwCctMulticast, wfFrSwVcInBe=wfFrSwVcInBe, wfFrSwSigT301=wfFrSwSigT301, wfFrSwCctRemoteSentNonDEOctets=wfFrSwCctRemoteSentNonDEOctets, wfFrSwUsageFlushTimeStamp=wfFrSwUsageFlushTimeStamp, wfFrSwCctRemoteSetBECNOctets=wfFrSwCctRemoteSetBECNOctets, wfFrSwVcBackupCalledIpAddr=wfFrSwVcBackupCalledIpAddr, wfFrSwVcAtmIwfVPI=wfFrSwVcAtmIwfVPI, wfFrSwSigInReleaseCompletePkts=wfFrSwSigInReleaseCompletePkts, wfFrSwLocalX121AddrEntry=wfFrSwLocalX121AddrEntry, wfFrSwCctCrossNetStatus=wfFrSwCctCrossNetStatus, wfFrSwSvcUsageFileLayout=wfFrSwSvcUsageFileLayout, wfFrSwDlcmiFullStatusSeq=wfFrSwDlcmiFullStatusSeq, wfFrSwDlcmiSvcDisable=wfFrSwDlcmiSvcDisable, wfFrSwVcCallReqDlciSelectionType=wfFrSwVcCallReqDlciSelectionType, wfFrSwSigOutStatusEnquiryPkts=wfFrSwSigOutStatusEnquiryPkts, wfFrSwUsageSentNonDEFramesLow=wfFrSwUsageSentNonDEFramesLow, wfFrSwLocalE164AddrEntry=wfFrSwLocalE164AddrEntry, wfFrSwDlcmiDteSeqCount=wfFrSwDlcmiDteSeqCount, wfFrSwUsageFileCleanup=wfFrSwUsageFileCleanup, wfFrSwBaseDelete=wfFrSwBaseDelete, wfFrSwSvcUsageFlushInterval=wfFrSwSvcUsageFlushInterval, wfFrSwUsageLastDEFramesHigh=wfFrSwUsageLastDEFramesHigh, wfFrSwVcRedirectState=wfFrSwVcRedirectState, wfFrSwDlcmiAlarmTimer=wfFrSwDlcmiAlarmTimer, wfFrSwCctLocalOrRemoteConnection=wfFrSwCctLocalOrRemoteConnection)
mibBuilder.exportSymbols("Wellfleet-FRSW-MIB", wfFrSwSigOutCallProceedingPkts=wfFrSwSigOutCallProceedingPkts, wfFrSwDlcmiMaxSupportedVCs=wfFrSwDlcmiMaxSupportedVCs, wfFrSwDlcmiSpvcAgent=wfFrSwDlcmiSpvcAgent, wfFrSwCctRemoteDropNonDEFrames=wfFrSwCctRemoteDropNonDEFrames, wfFrSwVcCallReqMaxRetries=wfFrSwVcCallReqMaxRetries, wfFrSwSwitchHdrErrors=wfFrSwSwitchHdrErrors, wfFrSwDlcmiEscapeVcCount=wfFrSwDlcmiEscapeVcCount, wfFrSwVcCalledIpAddr=wfFrSwVcCalledIpAddr, wfFrSwUsageSentNonDEOctetsHigh=wfFrSwUsageSentNonDEOctetsHigh, wfFrSwBase=wfFrSwBase, wfFrSwDlcmiDtePolls=wfFrSwDlcmiDtePolls, wfFrSwCctRemoteSetBECNFrames=wfFrSwCctRemoteSetBECNFrames, wfFrSwVcTxNonDeOctets=wfFrSwVcTxNonDeOctets, wfFrSwDlcmiMcastNoBufferErrors=wfFrSwDlcmiMcastNoBufferErrors, wfFrSwSigCallingPartyIEMandatory=wfFrSwSigCallingPartyIEMandatory, wfFrSwUsageCurStoreInterval=wfFrSwUsageCurStoreInterval, wfFrSwDlcmiFrameTooLongErrors=wfFrSwDlcmiFrameTooLongErrors, wfFrSwSvcUsageFlushTimeStamp=wfFrSwSvcUsageFlushTimeStamp, wfFrSwCngcMonP1Level4Percent=wfFrSwCngcMonP1Level4Percent, wfFrSwVcBackupCrossNetStatus=wfFrSwVcBackupCrossNetStatus, wfFrSwSigXNetClearingDisable=wfFrSwSigXNetClearingDisable, wfFrSwSigTable=wfFrSwSigTable, wfFrSwCngcMonP3Level4Percent=wfFrSwCngcMonP3Level4Percent, wfFrSwCctLocalDropNonDEOctets=wfFrSwCctLocalDropNonDEOctets, wfFrSwCngcMonCct=wfFrSwCngcMonCct, wfFrSwVcDropExcessBurstFrames=wfFrSwVcDropExcessBurstFrames, wfFrSwUsageNumEntries=wfFrSwUsageNumEntries, wfFrSwTupleIpAddrB=wfFrSwTupleIpAddrB, wfFrSwSvcUsageUpdateTimeStamp=wfFrSwSvcUsageUpdateTimeStamp, wfFrSwSvcUsageCurUpdateInterval=wfFrSwSvcUsageCurUpdateInterval, wfFrSwDlcmiNniEnable=wfFrSwDlcmiNniEnable, wfFrSwSigDefaultMinAcceptThroughput=wfFrSwSigDefaultMinAcceptThroughput, wfFrSwUsageEntry=wfFrSwUsageEntry, wfFrSwCngcMonP2Level3Percent=wfFrSwCngcMonP2Level3Percent, wfFrSwCctDlci=wfFrSwCctDlci, wfFrSwUsageLastDEFramesLow=wfFrSwUsageLastDEFramesLow, wfFrSwSigDelete=wfFrSwSigDelete, wfFrSwUsageCurVolume=wfFrSwUsageCurVolume, wfFrSwCngcMonP3Level3Percent=wfFrSwCngcMonP3Level3Percent, wfFrSwSigInReleasePkts=wfFrSwSigInReleasePkts, wfFrSwCctReportedStatus=wfFrSwCctReportedStatus, wfFrSwDlcmiSvcBillingEnable=wfFrSwDlcmiSvcBillingEnable, wfFrSwDlcmiMonitoredEvents=wfFrSwDlcmiMonitoredEvents, wfFrSwVcCallReqCalledAddr=wfFrSwVcCallReqCalledAddr, wfFrSwSigT308=wfFrSwSigT308, wfFrSwVcCircuit=wfFrSwVcCircuit, wfFrSwBaseIpAddr=wfFrSwBaseIpAddr, wfFrSwVcDlci=wfFrSwVcDlci, wfFrSwDlcmiPollingInterval=wfFrSwDlcmiPollingInterval, wfFrSwGlobalE164AddrTable=wfFrSwGlobalE164AddrTable, wfFrSwCngcMonP3Level1Percent=wfFrSwCngcMonP3Level1Percent, wfFrSwUsageCurFilePrefix=wfFrSwUsageCurFilePrefix, wfFrSwCctLocalDropDEOctets=wfFrSwCctLocalDropDEOctets, wfFrSwUsageLocalTimeZone=wfFrSwUsageLocalTimeZone, wfFrSwVcOutBc=wfFrSwVcOutBc, wfFrSwVcAtmIwfVCI=wfFrSwVcAtmIwfVCI, wfFrSwVcCfgInBe=wfFrSwVcCfgInBe, wfFrSwVcDropNonDeOctets=wfFrSwVcDropNonDeOctets, wfFrSwVcInBcOctets=wfFrSwVcInBcOctets, wfFrSwSigCircuit=wfFrSwSigCircuit, wfFrSwVcRecentNonDeOctets=wfFrSwVcRecentNonDeOctets, wfFrSwVcCrossNetStatus=wfFrSwVcCrossNetStatus, wfFrSwTupleEntry=wfFrSwTupleEntry, wfFrSwExtFileSysSlot=wfFrSwExtFileSysSlot, wfFrSwSvcUsageCurDirectory=wfFrSwSvcUsageCurDirectory, wfFrSwUsage=wfFrSwUsage, wfFrSwTupleDlciB=wfFrSwTupleDlciB, wfFrSwUsageDebug=wfFrSwUsageDebug, wfFrSwLocalX121AddrCUG=wfFrSwLocalX121AddrCUG, wfFrSwIsdnUniIndex=wfFrSwIsdnUniIndex, wfFrSwCctLocalSentDEFrames=wfFrSwCctLocalSentDEFrames, wfFrSwSvcUsageDirectory=wfFrSwSvcUsageDirectory, wfFrSwDlcmiErrorThreshold=wfFrSwDlcmiErrorThreshold, wfFrSwDlcmiFormatErrors=wfFrSwDlcmiFormatErrors, wfFrSwDlcmiDtePollingInterval=wfFrSwDlcmiDtePollingInterval, wfFrSwCctLocalRecvNonDEOctets=wfFrSwCctLocalRecvNonDEOctets, wfFrSwSigMaxOutThroughputPerSvc=wfFrSwSigMaxOutThroughputPerSvc, wfFrSwVcInThroughput=wfFrSwVcInThroughput, wfFrSwCctXNetErrors=wfFrSwCctXNetErrors, wfFrSwMcastEntry=wfFrSwMcastEntry, wfFrSwCctStateSet=wfFrSwCctStateSet, wfFrSwCctLocalSetBECNOctets=wfFrSwCctLocalSetBECNOctets, wfFrSwCctLocalRecvNonDEFrames=wfFrSwCctLocalRecvNonDEFrames, wfFrSwVcTxDeOctets=wfFrSwVcTxDeOctets, wfFrSwSvcUsageCleanupInterval=wfFrSwSvcUsageCleanupInterval, wfFrSwUsageEndTimeStampHigh=wfFrSwUsageEndTimeStampHigh, wfFrSwSigDefaultBe=wfFrSwSigDefaultBe, wfFrSwVcSpvcCallState=wfFrSwVcSpvcCallState, wfFrSwVcDropExcessBurstOctets=wfFrSwVcDropExcessBurstOctets, wfFrSwGlobalE164AddrHigh=wfFrSwGlobalE164AddrHigh, wfFrSwTupleDelete=wfFrSwTupleDelete, wfFrSwCctRemoteDropDEOctets=wfFrSwCctRemoteDropDEOctets, wfFrSwSigOutSetupPkts=wfFrSwSigOutSetupPkts, wfFrSwIsdnScrnIndex=wfFrSwIsdnScrnIndex, wfFrSwCctRemoteRecvDEFrames=wfFrSwCctRemoteRecvDEFrames, wfFrSwTupleTable=wfFrSwTupleTable, wfFrSwUsageLastDEOctetsLow=wfFrSwUsageLastDEOctetsLow, wfFrSwIsdnBaseAssocType=wfFrSwIsdnBaseAssocType, wfFrSwLocalX121AddrLocalFlag=wfFrSwLocalX121AddrLocalFlag, wfFrSwVcDropDeOctets=wfFrSwVcDropDeOctets, wfFrSwCctDelete=wfFrSwCctDelete, wfFrSwDlcmiFullEnquiryInterval=wfFrSwDlcmiFullEnquiryInterval, wfFrSwIsdnScrnDelete=wfFrSwIsdnScrnDelete, wfFrSwIsdnBaseEntry=wfFrSwIsdnBaseEntry, wfFrSwVcAtmIwfEfciPolicy=wfFrSwVcAtmIwfEfciPolicy, wfFrSwVcStateSet=wfFrSwVcStateSet, wfFrSwDlcmiEntry=wfFrSwDlcmiEntry, wfFrSwVcTrfPriority=wfFrSwVcTrfPriority, wfFrSwDlcmiActiveReceived=wfFrSwDlcmiActiveReceived, wfFrSwDlcmiProtocolErrors=wfFrSwDlcmiProtocolErrors, wfFrSwSigDlciAssign=wfFrSwSigDlciAssign, wfFrSwExtFileSysSize=wfFrSwExtFileSysSize, wfFrSwSvcUsageInterimRecordEnable=wfFrSwSvcUsageInterimRecordEnable, wfFrSwDlcmiNewVCs=wfFrSwDlcmiNewVCs, wfFrSwUsageLastNonDEOctetsLow=wfFrSwUsageLastNonDEOctetsLow, wfFrSwDlcmiDelete=wfFrSwDlcmiDelete, wfFrSwUsageCurUpdateInterval=wfFrSwUsageCurUpdateInterval, wfFrSwCngcMonP0Level3Percent=wfFrSwCngcMonP0Level3Percent, wfFrSwVcSetBecnFrames=wfFrSwVcSetBecnFrames, wfFrSwUsageRemoteDlci=wfFrSwUsageRemoteDlci, wfFrSwUsageCurTimerInterval=wfFrSwUsageCurTimerInterval, wfFrSwIsdnAssocDelete=wfFrSwIsdnAssocDelete, wfFrSwSigTotalOutCurrentThroughput=wfFrSwSigTotalOutCurrentThroughput, wfFrSwDlcmiIwfMode=wfFrSwDlcmiIwfMode, wfFrSwSigDefaultBc=wfFrSwSigDefaultBc, wfFrSwDlcmiRecoveryCounts=wfFrSwDlcmiRecoveryCounts, wfFrSwUsageLastDEOctetsHigh=wfFrSwUsageLastDEOctetsHigh, wfFrSwVcSetFecnOctets=wfFrSwVcSetFecnOctets, wfFrSwVcDelete=wfFrSwVcDelete, wfFrSwVcRecvBecnFrames=wfFrSwVcRecvBecnFrames, wfFrSwExtFileSysEntry=wfFrSwExtFileSysEntry, wfFrSwCngcMonReset=wfFrSwCngcMonReset, wfFrSwSigMaximumBe=wfFrSwSigMaximumBe, wfFrSwSigT305=wfFrSwSigT305, wfFrSwSvcUsageEnable=wfFrSwSvcUsageEnable, wfFrSwSigT322=wfFrSwSigT322, wfFrSwSvcUsageVolume=wfFrSwSvcUsageVolume, wfFrSwDlcmiIllegalDlciErrors=wfFrSwDlcmiIllegalDlciErrors, wfFrSwIsdnAssocTable=wfFrSwIsdnAssocTable, wfFrSwCctRemoteRecvNonDEFrames=wfFrSwCctRemoteRecvNonDEFrames, wfFrSwDlcmiCrossNetPollingInterval=wfFrSwDlcmiCrossNetPollingInterval, wfFrSwLocalE164Address=wfFrSwLocalE164Address, wfFrSwUsageStoreInterval=wfFrSwUsageStoreInterval, wfFrSwSigInSetupPkts=wfFrSwSigInSetupPkts, wfFrSwUsageSentNonDEOctetsLow=wfFrSwUsageSentNonDEOctetsLow, wfFrSwSigSvcDlciHigh=wfFrSwSigSvcDlciHigh, wfFrSwDlcmiL2AddrType=wfFrSwDlcmiL2AddrType, wfFrSwMcastIndex=wfFrSwMcastIndex, wfFrSwDlcmiUnknownRPTErrors=wfFrSwDlcmiUnknownRPTErrors, wfFrSwUsageSentNonDEFramesHigh=wfFrSwUsageSentNonDEFramesHigh, wfFrSwDlcmiLastReceived=wfFrSwDlcmiLastReceived, wfFrSwCctLocalSentNonDEOctets=wfFrSwCctLocalSentNonDEOctets, wfFrSwSigInCallProceedingPkts=wfFrSwSigInCallProceedingPkts, wfFrSwSvcUsageFileCleanup=wfFrSwSvcUsageFileCleanup, wfFrSwSigEntry=wfFrSwSigEntry, wfFrSwMcastTable=wfFrSwMcastTable, wfFrSwSigL2Resets=wfFrSwSigL2Resets, wfFrSwDlcmiOtherErrors=wfFrSwDlcmiOtherErrors, wfFrSwErrTime=wfFrSwErrTime, wfFrSwUsageFlushInterval=wfFrSwUsageFlushInterval, wfFrSwVcTxNonDeFrames=wfFrSwVcTxNonDeFrames, wfFrSwUsageCleanupInterval=wfFrSwUsageCleanupInterval, wfFrSwIsdnScrnEntry=wfFrSwIsdnScrnEntry, wfFrSwUsageRemoteIPAddress=wfFrSwUsageRemoteIPAddress, wfFrSwSigInStatusEnquiryPkts=wfFrSwSigInStatusEnquiryPkts, wfFrSwVirtualIntfTable=wfFrSwVirtualIntfTable, wfFrSwCngcMonP3Level2Percent=wfFrSwCngcMonP3Level2Percent, wfFrSwUsageLastNonDEFramesHigh=wfFrSwUsageLastNonDEFramesHigh, wfFrSwCctLocalSetFECNOctets=wfFrSwCctLocalSetFECNOctets, wfFrSwVcAtmIwfMode=wfFrSwVcAtmIwfMode, wfFrSwVcRedirectType=wfFrSwVcRedirectType, wfFrSwSigT310=wfFrSwSigT310, wfFrSwCctLocalRecvFECNOctets=wfFrSwCctLocalRecvFECNOctets, wfFrSwGlobalE164AddrIPAddr=wfFrSwGlobalE164AddrIPAddr, wfFrSwDlcmiFrameTooShortErrors=wfFrSwDlcmiFrameTooShortErrors, wfFrSwVcMulticast=wfFrSwVcMulticast, wfFrSwUsageIPAddress=wfFrSwUsageIPAddress, wfFrSwSigDefaultThroughput=wfFrSwSigDefaultThroughput, wfFrSwCctLocalRecentNonDEOctets=wfFrSwCctLocalRecentNonDEOctets, wfFrSwUsageSentDEFramesLow=wfFrSwUsageSentDEFramesLow, wfFrSwSvcUsageCleanupTimeStamp=wfFrSwSvcUsageCleanupTimeStamp, wfFrSwSigInUnknownPkts=wfFrSwSigInUnknownPkts, wfFrSwCctInactiveVCDropOctets=wfFrSwCctInactiveVCDropOctets, wfFrSwDlcmiEscapeCircuit=wfFrSwDlcmiEscapeCircuit, wfFrSwUsageDelete=wfFrSwUsageDelete, wfFrSwCctNumber=wfFrSwCctNumber, wfFrSwMcastDelete=wfFrSwMcastDelete, wfFrSwSigTotalOutNegotiableThroughput=wfFrSwSigTotalOutNegotiableThroughput, wfFrSwVcRecvNonDeOctets=wfFrSwVcRecvNonDeOctets, wfFrSwCngcMonP2Level1Percent=wfFrSwCngcMonP2Level1Percent, wfFrSwVcRecvFecnFrames=wfFrSwVcRecvFecnFrames, wfFrSwCctInBe=wfFrSwCctInBe, wfFrSwCctLocalDropDEFrames=wfFrSwCctLocalDropDEFrames, wfFrSwCctLocalSetBECNFrames=wfFrSwCctLocalSetBECNFrames, wfFrSwDlcmiUnknownDlciErrors=wfFrSwDlcmiUnknownDlciErrors, wfFrSwCctLocalRecvDEFrames=wfFrSwCctLocalRecvDEFrames, wfFrSwSvcUsageCurFlushInterval=wfFrSwSvcUsageCurFlushInterval, wfFrSwGlobalE164AddrEntry=wfFrSwGlobalE164AddrEntry, wfFrSwErrData=wfFrSwErrData, wfFrSwVcSetFecnFrames=wfFrSwVcSetFecnFrames, wfFrSwDlcmiCallAccDlciSelectionType=wfFrSwDlcmiCallAccDlciSelectionType, wfFrSwTupleIpAddrA=wfFrSwTupleIpAddrA, wfFrSwCctRemoteSetFECNOctets=wfFrSwCctRemoteSetFECNOctets, wfFrSwVcXNetErrors=wfFrSwVcXNetErrors)
| true | true |
f7253028cdd82bf123f765e9eee0f96a6ac55fad | 6,819 | py | Python | src/walax/metadata.py | hazelmollusk/django-walax | 60cd05483e155bdd817df60a0c9fc7922f80c500 | [
"MIT"
] | null | null | null | src/walax/metadata.py | hazelmollusk/django-walax | 60cd05483e155bdd817df60a0c9fc7922f80c500 | [
"MIT"
] | null | null | null | src/walax/metadata.py | hazelmollusk/django-walax | 60cd05483e155bdd817df60a0c9fc7922f80c500 | [
"MIT"
] | null | null | null | from django.core.exceptions import PermissionDenied
from django.http import Http404
from django.utils.encoding import force_str
from rest_framework import exceptions, serializers
from rest_framework.relations import PrimaryKeyRelatedField
from rest_framework.request import clone_request
from collections import OrderedDict
from rest_framework.metadata import BaseMetadata
from rest_framework.utils.field_mapping import ClassLookupDict
class WalaxModelMetadata(BaseMetadata):
"""
This is the default metadata implementation.
It returns an ad-hoc set of information about the view.
There are not any formalized standards for `OPTIONS` responses
for us to base this on.
"""
label_lookup = ClassLookupDict(
{
serializers.Field: "field",
serializers.PrimaryKeyRelatedField: "related",
serializers.RelatedField: "related",
serializers.BooleanField: "boolean",
serializers.NullBooleanField: "boolean",
serializers.CharField: "string",
serializers.UUIDField: "string",
serializers.URLField: "url",
serializers.EmailField: "email",
serializers.RegexField: "regex",
serializers.SlugField: "slug",
serializers.IntegerField: "integer",
serializers.FloatField: "float",
serializers.DecimalField: "decimal",
serializers.DateField: "date",
serializers.DateTimeField: "datetime",
serializers.TimeField: "time",
serializers.ChoiceField: "choice",
serializers.MultipleChoiceField: "multiple choice",
serializers.FileField: "file upload",
serializers.ImageField: "image upload",
serializers.ListField: "list",
serializers.DictField: "nested object",
serializers.Serializer: "nested object",
}
)
def determine_metadata(self, request, view):
metadata = OrderedDict()
metadata["name"] = view.get_view_name()
metadata["description"] = view.get_view_description()
metadata["renders"] = [
renderer.media_type for renderer in view.renderer_classes
]
metadata["parses"] = [
parser.media_type for parser in view.parser_classes]
metadata["model"] = view.queryset.model.__name__
self.model = view.queryset.model
metadata['extra_actions'] = self.determine_extra_actions(request, view)
if hasattr(view, "get_serializer"):
actions = self.determine_actions(request, view)
if actions:
metadata["actions"] = actions
return metadata
def determine_extra_actions(self, request, view):
"""
Return list of extra callable actions
"""
import inspect
actions = []
for fn, f in [(fn, f) for (fn, f) in inspect.getmembers(self.model) if inspect.isfunction(f) and getattr(f, 'walax_action', False)]:
actions.append({
'method': 'post',
'type': 'instance',
'name': fn
})
return actions
def determine_actions(self, request, view):
"""
For generic class based views we return information about
the fields that are accepted for 'PUT' and 'POST' methods.
"""
actions = {}
for method in {"PUT", "POST"} & set(view.allowed_methods):
view.request = clone_request(request, method)
try:
# Test global permissions
if hasattr(view, "check_permissions"):
view.check_permissions(view.request)
# Test object permissions
if method == "PUT" and hasattr(view, "get_object"):
view.get_object()
except (exceptions.APIException, PermissionDenied, Http404):
pass
else:
# If user has appropriate permissions for the view, include
# appropriate metadata about the fields that should be supplied.
serializer = view.get_serializer()
actions[method] = self.get_serializer_info(serializer)
finally:
view.request = request
return actions
def get_serializer_info(self, serializer):
"""
Given an instance of a serializer, return a dictionary of metadata
about its fields.
"""
if hasattr(serializer, "child"):
# If this is a `ListSerializer` then we want to examine the
# underlying child serializer instance instead.
serializer = serializer.child
return OrderedDict(
[
(field_name, self.get_field_info(field, field_name))
for field_name, field in serializer.fields.items()
if not isinstance(field, serializers.HiddenField)
]
)
def get_field_info(self, field, field_name):
"""
Given an instance of a serializer field, return a dictionary
of metadata about it.
"""
field_info = OrderedDict()
field_info["type"] = self.label_lookup[field]
field_info["required"] = getattr(field, "required", False)
if field_info["type"].startswith("related"):
field_info["model"] = field.queryset.model.__name__
field_info["related_name"] = getattr(field, "related_name", None)
attrs = [
"read_only",
"label",
"help_text",
"min_length",
"max_length",
"min_value",
"max_value",
"related_name",
]
if getattr(self.model._meta.get_field(field_name), "primary_key", False):
field_info["primary_key"] = "true"
for attr in attrs:
value = getattr(field, attr, None)
if value is not None and value != "":
field_info[attr] = force_str(value, strings_only=True)
if getattr(field, "child", None):
field_info["child"] = self.get_field_info(field.child)
elif getattr(field, "fields", None):
field_info["children"] = self.get_serializer_info(field)
if (
not field_info.get("read_only")
and not isinstance(
field, (serializers.RelatedField, serializers.ManyRelatedField)
)
and hasattr(field, "choices")
):
field_info["choices"] = [
{
"value": choice_value,
"display_name": force_str(choice_name, strings_only=True),
}
for choice_value, choice_name in field.choices.items()
]
return field_info
| 38.308989 | 140 | 0.592169 | from django.core.exceptions import PermissionDenied
from django.http import Http404
from django.utils.encoding import force_str
from rest_framework import exceptions, serializers
from rest_framework.relations import PrimaryKeyRelatedField
from rest_framework.request import clone_request
from collections import OrderedDict
from rest_framework.metadata import BaseMetadata
from rest_framework.utils.field_mapping import ClassLookupDict
class WalaxModelMetadata(BaseMetadata):
label_lookup = ClassLookupDict(
{
serializers.Field: "field",
serializers.PrimaryKeyRelatedField: "related",
serializers.RelatedField: "related",
serializers.BooleanField: "boolean",
serializers.NullBooleanField: "boolean",
serializers.CharField: "string",
serializers.UUIDField: "string",
serializers.URLField: "url",
serializers.EmailField: "email",
serializers.RegexField: "regex",
serializers.SlugField: "slug",
serializers.IntegerField: "integer",
serializers.FloatField: "float",
serializers.DecimalField: "decimal",
serializers.DateField: "date",
serializers.DateTimeField: "datetime",
serializers.TimeField: "time",
serializers.ChoiceField: "choice",
serializers.MultipleChoiceField: "multiple choice",
serializers.FileField: "file upload",
serializers.ImageField: "image upload",
serializers.ListField: "list",
serializers.DictField: "nested object",
serializers.Serializer: "nested object",
}
)
def determine_metadata(self, request, view):
metadata = OrderedDict()
metadata["name"] = view.get_view_name()
metadata["description"] = view.get_view_description()
metadata["renders"] = [
renderer.media_type for renderer in view.renderer_classes
]
metadata["parses"] = [
parser.media_type for parser in view.parser_classes]
metadata["model"] = view.queryset.model.__name__
self.model = view.queryset.model
metadata['extra_actions'] = self.determine_extra_actions(request, view)
if hasattr(view, "get_serializer"):
actions = self.determine_actions(request, view)
if actions:
metadata["actions"] = actions
return metadata
def determine_extra_actions(self, request, view):
import inspect
actions = []
for fn, f in [(fn, f) for (fn, f) in inspect.getmembers(self.model) if inspect.isfunction(f) and getattr(f, 'walax_action', False)]:
actions.append({
'method': 'post',
'type': 'instance',
'name': fn
})
return actions
def determine_actions(self, request, view):
actions = {}
for method in {"PUT", "POST"} & set(view.allowed_methods):
view.request = clone_request(request, method)
try:
if hasattr(view, "check_permissions"):
view.check_permissions(view.request)
if method == "PUT" and hasattr(view, "get_object"):
view.get_object()
except (exceptions.APIException, PermissionDenied, Http404):
pass
else:
serializer = view.get_serializer()
actions[method] = self.get_serializer_info(serializer)
finally:
view.request = request
return actions
def get_serializer_info(self, serializer):
if hasattr(serializer, "child"):
serializer = serializer.child
return OrderedDict(
[
(field_name, self.get_field_info(field, field_name))
for field_name, field in serializer.fields.items()
if not isinstance(field, serializers.HiddenField)
]
)
def get_field_info(self, field, field_name):
field_info = OrderedDict()
field_info["type"] = self.label_lookup[field]
field_info["required"] = getattr(field, "required", False)
if field_info["type"].startswith("related"):
field_info["model"] = field.queryset.model.__name__
field_info["related_name"] = getattr(field, "related_name", None)
attrs = [
"read_only",
"label",
"help_text",
"min_length",
"max_length",
"min_value",
"max_value",
"related_name",
]
if getattr(self.model._meta.get_field(field_name), "primary_key", False):
field_info["primary_key"] = "true"
for attr in attrs:
value = getattr(field, attr, None)
if value is not None and value != "":
field_info[attr] = force_str(value, strings_only=True)
if getattr(field, "child", None):
field_info["child"] = self.get_field_info(field.child)
elif getattr(field, "fields", None):
field_info["children"] = self.get_serializer_info(field)
if (
not field_info.get("read_only")
and not isinstance(
field, (serializers.RelatedField, serializers.ManyRelatedField)
)
and hasattr(field, "choices")
):
field_info["choices"] = [
{
"value": choice_value,
"display_name": force_str(choice_name, strings_only=True),
}
for choice_value, choice_name in field.choices.items()
]
return field_info
| true | true |
f725309690ad014e3b8fcbe2e6561e01b841f7ec | 574 | py | Python | subframe/datatables.py | joshbode/subframe | af035ee75f2c9a0d11f538dd88b9491c92389b65 | [
"MIT"
] | 2 | 2015-08-24T02:19:01.000Z | 2015-08-24T03:31:06.000Z | subframe/datatables.py | joshbode/subframe | af035ee75f2c9a0d11f538dd88b9491c92389b65 | [
"MIT"
] | null | null | null | subframe/datatables.py | joshbode/subframe | af035ee75f2c9a0d11f538dd88b9491c92389b65 | [
"MIT"
] | null | null | null | """
DataTable display.
"""
from .subframe import SubFrame
from .plugin import plugins
class DataTable(SubFrame):
"""Display a DataFrame as a DataTable."""
_plugins = [plugins.datatables]
def _js(self, data):
"""Javascript callback body."""
data = data.to_records()
data = self._json({
'data': data.tolist(),
'columns': [
{'title': x} for x in self._map_columns(data.dtype.names)
]
})
return "element.append('<table />').find('table').DataTable({});".format(data)
| 22.076923 | 86 | 0.562718 |
from .subframe import SubFrame
from .plugin import plugins
class DataTable(SubFrame):
_plugins = [plugins.datatables]
def _js(self, data):
data = data.to_records()
data = self._json({
'data': data.tolist(),
'columns': [
{'title': x} for x in self._map_columns(data.dtype.names)
]
})
return "element.append('<table />').find('table').DataTable({});".format(data)
| true | true |
f72530ff5351dea5d4081b0e5aac7da571510f0f | 346 | py | Python | setup.py | Qman11010101/blogen_neo | a56bd5e7a0622488e4f3a4dd87c6e4cf126ed2cb | [
"MIT"
] | null | null | null | setup.py | Qman11010101/blogen_neo | a56bd5e7a0622488e4f3a4dd87c6e4cf126ed2cb | [
"MIT"
] | null | null | null | setup.py | Qman11010101/blogen_neo | a56bd5e7a0622488e4f3a4dd87c6e4cf126ed2cb | [
"MIT"
] | null | null | null | from setuptools import setup
setup(
name="blogen_neo",
version="0.0.1",
description="Simple static site generator for blog",
author="Kjuman Enobikto",
author_email="qmanenobikto@gmail.com",
install_requires=["jinja2", "fire"],
entry_points={
"console_scripts": [
"blogen = main"
]
}
)
| 21.625 | 56 | 0.612717 | from setuptools import setup
setup(
name="blogen_neo",
version="0.0.1",
description="Simple static site generator for blog",
author="Kjuman Enobikto",
author_email="qmanenobikto@gmail.com",
install_requires=["jinja2", "fire"],
entry_points={
"console_scripts": [
"blogen = main"
]
}
)
| true | true |
f7253166d60df35b6e5baa2e8773dc05e8fdf3db | 4,335 | py | Python | src/runners/episode_runner.py | dennismalmgren/marl | baa846dc4144cf6f53e51d8cf1e2fcf5800c9f95 | [
"Apache-2.0"
] | null | null | null | src/runners/episode_runner.py | dennismalmgren/marl | baa846dc4144cf6f53e51d8cf1e2fcf5800c9f95 | [
"Apache-2.0"
] | null | null | null | src/runners/episode_runner.py | dennismalmgren/marl | baa846dc4144cf6f53e51d8cf1e2fcf5800c9f95 | [
"Apache-2.0"
] | null | null | null | from envs import REGISTRY as env_REGISTRY
from functools import partial
from components.episode_buffer import EpisodeBatch
import numpy as np
class EpisodeRunner:
def __init__(self, args, logger):
self.args = args
self.logger = logger
self.batch_size = self.args.batch_size_run
assert self.batch_size == 1
self.env = env_REGISTRY[self.args.env](**self.args.env_args)
self.episode_limit = self.env.episode_limit
self.t = 0
self.t_env = 0
self.train_returns = []
self.test_returns = []
self.train_stats = {}
self.test_stats = {}
# Log the first run
self.log_train_stats_t = -1000000
def setup(self, scheme, groups, preprocess, mac):
self.new_batch = partial(EpisodeBatch, scheme, groups, self.batch_size, self.episode_limit + 1,
preprocess=preprocess, device=self.args.device)
self.mac = mac
def get_env_info(self):
return self.env.get_env_info()
def save_replay(self):
self.env.save_replay()
def close_env(self):
self.env.close()
def reset(self):
self.batch = self.new_batch()
self.env.reset()
self.t = 0
def run(self, test_mode=False):
self.reset()
terminated = False
episode_return = 0
self.mac.init_hidden(batch_size=self.batch_size)
while not terminated:
pre_transition_data = {
"state": [self.env.get_state()],
"avail_actions": [self.env.get_avail_actions()],
"obs": [self.env.get_obs()]
}
self.batch.update(pre_transition_data, ts=self.t)
# Pass the entire batch of experiences up till now to the agents
# Receive the actions for each agent at this timestep in a batch of size 1
actions = self.mac.select_actions(self.batch, t_ep=self.t, t_env=self.t_env, test_mode=test_mode)
reward, terminated, env_info = self.env.step(actions[0])
episode_return += reward
post_transition_data = {
"actions": actions,
"reward": [(reward,)],
"terminated": [(terminated != env_info.get("episode_limit", False),)],
}
self.batch.update(post_transition_data, ts=self.t)
self.t += 1
last_data = {
"state": [self.env.get_state()],
"avail_actions": [self.env.get_avail_actions()],
"obs": [self.env.get_obs()]
}
self.batch.update(last_data, ts=self.t)
# Select actions in the last stored state
actions = self.mac.select_actions(self.batch, t_ep=self.t, t_env=self.t_env, test_mode=test_mode)
self.batch.update({"actions": actions}, ts=self.t)
cur_stats = self.test_stats if test_mode else self.train_stats
cur_returns = self.test_returns if test_mode else self.train_returns
log_prefix = "test_" if test_mode else ""
cur_stats.update({k: cur_stats.get(k, 0) + env_info.get(k, 0) for k in set(cur_stats) | set(env_info)})
cur_stats["n_episodes"] = 1 + cur_stats.get("n_episodes", 0)
cur_stats["ep_length"] = self.t + cur_stats.get("ep_length", 0)
if not test_mode:
self.t_env += self.t
cur_returns.append(episode_return)
if test_mode and (len(self.test_returns) == self.args.test_nepisode):
self._log(cur_returns, cur_stats, log_prefix)
elif self.t_env - self.log_train_stats_t >= self.args.runner_log_interval:
self._log(cur_returns, cur_stats, log_prefix)
if hasattr(self.mac.action_selector, "epsilon"):
self.logger.log_stat("epsilon", self.mac.action_selector.epsilon, self.t_env)
self.log_train_stats_t = self.t_env
return self.batch
def _log(self, returns, stats, prefix):
self.logger.log_stat(prefix + "return_mean", np.mean(returns), self.t_env)
self.logger.log_stat(prefix + "return_std", np.std(returns), self.t_env)
returns.clear()
for k, v in stats.items():
if k != "n_episodes":
self.logger.log_stat(prefix + k + "_mean" , v/stats["n_episodes"], self.t_env)
stats.clear()
| 34.959677 | 111 | 0.608074 | from envs import REGISTRY as env_REGISTRY
from functools import partial
from components.episode_buffer import EpisodeBatch
import numpy as np
class EpisodeRunner:
def __init__(self, args, logger):
self.args = args
self.logger = logger
self.batch_size = self.args.batch_size_run
assert self.batch_size == 1
self.env = env_REGISTRY[self.args.env](**self.args.env_args)
self.episode_limit = self.env.episode_limit
self.t = 0
self.t_env = 0
self.train_returns = []
self.test_returns = []
self.train_stats = {}
self.test_stats = {}
self.log_train_stats_t = -1000000
def setup(self, scheme, groups, preprocess, mac):
self.new_batch = partial(EpisodeBatch, scheme, groups, self.batch_size, self.episode_limit + 1,
preprocess=preprocess, device=self.args.device)
self.mac = mac
def get_env_info(self):
return self.env.get_env_info()
def save_replay(self):
self.env.save_replay()
def close_env(self):
self.env.close()
def reset(self):
self.batch = self.new_batch()
self.env.reset()
self.t = 0
def run(self, test_mode=False):
self.reset()
terminated = False
episode_return = 0
self.mac.init_hidden(batch_size=self.batch_size)
while not terminated:
pre_transition_data = {
"state": [self.env.get_state()],
"avail_actions": [self.env.get_avail_actions()],
"obs": [self.env.get_obs()]
}
self.batch.update(pre_transition_data, ts=self.t)
actions = self.mac.select_actions(self.batch, t_ep=self.t, t_env=self.t_env, test_mode=test_mode)
reward, terminated, env_info = self.env.step(actions[0])
episode_return += reward
post_transition_data = {
"actions": actions,
"reward": [(reward,)],
"terminated": [(terminated != env_info.get("episode_limit", False),)],
}
self.batch.update(post_transition_data, ts=self.t)
self.t += 1
last_data = {
"state": [self.env.get_state()],
"avail_actions": [self.env.get_avail_actions()],
"obs": [self.env.get_obs()]
}
self.batch.update(last_data, ts=self.t)
actions = self.mac.select_actions(self.batch, t_ep=self.t, t_env=self.t_env, test_mode=test_mode)
self.batch.update({"actions": actions}, ts=self.t)
cur_stats = self.test_stats if test_mode else self.train_stats
cur_returns = self.test_returns if test_mode else self.train_returns
log_prefix = "test_" if test_mode else ""
cur_stats.update({k: cur_stats.get(k, 0) + env_info.get(k, 0) for k in set(cur_stats) | set(env_info)})
cur_stats["n_episodes"] = 1 + cur_stats.get("n_episodes", 0)
cur_stats["ep_length"] = self.t + cur_stats.get("ep_length", 0)
if not test_mode:
self.t_env += self.t
cur_returns.append(episode_return)
if test_mode and (len(self.test_returns) == self.args.test_nepisode):
self._log(cur_returns, cur_stats, log_prefix)
elif self.t_env - self.log_train_stats_t >= self.args.runner_log_interval:
self._log(cur_returns, cur_stats, log_prefix)
if hasattr(self.mac.action_selector, "epsilon"):
self.logger.log_stat("epsilon", self.mac.action_selector.epsilon, self.t_env)
self.log_train_stats_t = self.t_env
return self.batch
def _log(self, returns, stats, prefix):
self.logger.log_stat(prefix + "return_mean", np.mean(returns), self.t_env)
self.logger.log_stat(prefix + "return_std", np.std(returns), self.t_env)
returns.clear()
for k, v in stats.items():
if k != "n_episodes":
self.logger.log_stat(prefix + k + "_mean" , v/stats["n_episodes"], self.t_env)
stats.clear()
| true | true |
f72532605be0861ccc1b4e26456972b2f7cf7351 | 997 | py | Python | pynsot/serializers.py | dropbox/pynsot | 3a0ff2f6994860beaea147486d914fc0e7e37080 | [
"Apache-2.0"
] | 41 | 2015-01-17T02:59:44.000Z | 2021-12-01T16:16:25.000Z | pynsot/serializers.py | dropbox/pynsot | 3a0ff2f6994860beaea147486d914fc0e7e37080 | [
"Apache-2.0"
] | 88 | 2015-03-09T21:30:53.000Z | 2021-05-21T14:58:23.000Z | pynsot/serializers.py | dropbox/pynsot | 3a0ff2f6994860beaea147486d914fc0e7e37080 | [
"Apache-2.0"
] | 30 | 2015-01-17T02:59:15.000Z | 2021-04-19T22:32:57.000Z | # -*- coding: utf-8 -*-
"""
Specialized serializers for NSoT API client.
This is an example of how you would use this with the Client object, to make it
return objects instead of dicts::
>>> serializer = ModelSerializer()
>>> api = Client(url, serializer=serializer)
>>> obj = api.sites(1).get()
>>> obj
<Site(id=1, description=u'Foo site', name=u'Foo')>
"""
from __future__ import unicode_literals
from __future__ import absolute_import
from slumber.serialize import JsonSerializer
from .import models
__author__ = 'Jathan McCollum'
__maintainer__ = 'Jathan McCollum'
__email__ = 'jathan@dropbox.com'
__copyright__ = 'Copyright (c) 2015-2016 Dropbox, Inc.'
class ModelSerializer(JsonSerializer):
"""This serializes to a model instead of a dict."""
key = 'model'
def get_serializer(self, *args, **kwargs):
return self
def loads(self, data):
obj_data = super(ModelSerializer, self).loads(data)
return models.ApiModel(obj_data)
| 25.564103 | 79 | 0.697091 |
from __future__ import unicode_literals
from __future__ import absolute_import
from slumber.serialize import JsonSerializer
from .import models
__author__ = 'Jathan McCollum'
__maintainer__ = 'Jathan McCollum'
__email__ = 'jathan@dropbox.com'
__copyright__ = 'Copyright (c) 2015-2016 Dropbox, Inc.'
class ModelSerializer(JsonSerializer):
key = 'model'
def get_serializer(self, *args, **kwargs):
return self
def loads(self, data):
obj_data = super(ModelSerializer, self).loads(data)
return models.ApiModel(obj_data)
| true | true |
f72532c82404f85f49d2567399b1b9784361b2ba | 779 | py | Python | app/Model/Model_facedetection.py | Renanrbsc/System_Face_Recognition | 45a6778e18325245bea27abe41d3a646fa09863c | [
"MIT"
] | null | null | null | app/Model/Model_facedetection.py | Renanrbsc/System_Face_Recognition | 45a6778e18325245bea27abe41d3a646fa09863c | [
"MIT"
] | null | null | null | app/Model/Model_facedetection.py | Renanrbsc/System_Face_Recognition | 45a6778e18325245bea27abe41d3a646fa09863c | [
"MIT"
] | null | null | null | import cv2
from app.Model.Model_cascades import Cascades
class FaceDetection:
def __init__(self):
self.type_cascade = Cascades.FACECASCADE
def get_type_cascade(self):
return self.type_cascade
def detection_rectangle_dimensions(self):
scaleFactor = 1.3
minNeighbors = 5
minSize = (30, 30)
return [scaleFactor, minNeighbors, minSize]
def format_rectangle(self, image, x, y, w, h):
cv2.rectangle(image, (x, y), (x + w, y + h), (255, 0, 0), 2)
def detection_rectangle(self, rectangle: list, x, y, w, h):
new_rectangle = rectangle[y:y + h, x:x + w]
return new_rectangle
def detection_color(self, image):
color = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
return color
| 27.821429 | 68 | 0.640565 | import cv2
from app.Model.Model_cascades import Cascades
class FaceDetection:
def __init__(self):
self.type_cascade = Cascades.FACECASCADE
def get_type_cascade(self):
return self.type_cascade
def detection_rectangle_dimensions(self):
scaleFactor = 1.3
minNeighbors = 5
minSize = (30, 30)
return [scaleFactor, minNeighbors, minSize]
def format_rectangle(self, image, x, y, w, h):
cv2.rectangle(image, (x, y), (x + w, y + h), (255, 0, 0), 2)
def detection_rectangle(self, rectangle: list, x, y, w, h):
new_rectangle = rectangle[y:y + h, x:x + w]
return new_rectangle
def detection_color(self, image):
color = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
return color
| true | true |
f72534dd903118747d36c4ba1c73abaee618366c | 825 | py | Python | setup.py | elliotnunn/macresources | cc7c6aacec7d241c945d925c3a2473c3917ef4e0 | [
"MIT"
] | 5 | 2019-09-25T01:09:07.000Z | 2021-11-03T02:39:42.000Z | setup.py | elliotnunn/macresources | cc7c6aacec7d241c945d925c3a2473c3917ef4e0 | [
"MIT"
] | null | null | null | setup.py | elliotnunn/macresources | cc7c6aacec7d241c945d925c3a2473c3917ef4e0 | [
"MIT"
] | null | null | null | from setuptools import setup
setup(
name='macresources',
version='1.2',
author='Elliot Nunn',
author_email='elliotnunn@me.com',
description='Library for working with legacy Macintosh resource forks',
long_description=open('README.md').read(),
long_description_content_type='text/markdown',
license='MIT',
url='https://github.com/elliotnunn/macresources',
classifiers=[
'Programming Language :: Python :: 3 :: Only',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
'Topic :: System :: Filesystems',
'Development Status :: 3 - Alpha',
],
packages=['macresources'],
scripts=['bin/SimpleRez', 'bin/SimpleDeRez', 'bin/hexrez', 'bin/rezhex', 'bin/sortrez', 'bin/rfx', 'bin/greggybits', 'bin/instacomp'],
)
| 35.869565 | 138 | 0.647273 | from setuptools import setup
setup(
name='macresources',
version='1.2',
author='Elliot Nunn',
author_email='elliotnunn@me.com',
description='Library for working with legacy Macintosh resource forks',
long_description=open('README.md').read(),
long_description_content_type='text/markdown',
license='MIT',
url='https://github.com/elliotnunn/macresources',
classifiers=[
'Programming Language :: Python :: 3 :: Only',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
'Topic :: System :: Filesystems',
'Development Status :: 3 - Alpha',
],
packages=['macresources'],
scripts=['bin/SimpleRez', 'bin/SimpleDeRez', 'bin/hexrez', 'bin/rezhex', 'bin/sortrez', 'bin/rfx', 'bin/greggybits', 'bin/instacomp'],
)
| true | true |
f72535000dbea0756b1c0b1ca77caaa8aa396926 | 3,406 | py | Python | main.py | chanhee0222/feed2resp | 16dc7071f17af56cbf019eeabcd12a5dbd0693e7 | [
"MIT"
] | null | null | null | main.py | chanhee0222/feed2resp | 16dc7071f17af56cbf019eeabcd12a5dbd0693e7 | [
"MIT"
] | null | null | null | main.py | chanhee0222/feed2resp | 16dc7071f17af56cbf019eeabcd12a5dbd0693e7 | [
"MIT"
] | null | null | null | import argparse
import datetime
import glob
import logging
import os
import time
import torch
from logging_helper import init_logger
from models import Discriminator, BartSystem
from train import train
from transformer_base import add_generic_args, generic_train
class Config():
# data_path = './data/chatbot/'
# log_dir = 'runs/exp'
save_path = './save'
# pretrained_embed_path = './embedding/'
device = torch.device('cuda' if True and torch.cuda.is_available() else 'cpu')
# device = torch.device('cpu')
discriminator_method = 'Multi' # 'Multi' or 'Cond'
load_pretrained_embed = False
min_freq = 3
max_length = 1024 # max_source_length
# embed_size = 256
d_model = 256
h = 4
num_styles = 2
num_classes = num_styles + 1 if discriminator_method == 'Multi' else 2
num_layers = 4
# batch_size = 64
lr_F = 5e-6
lr_D = 1e-4
L2 = 0
iter_D = 10
iter_F = 5
F_pretrain_iter = 1
log_steps = 5
eval_steps = 25
learned_pos_embed = True
dropout = 0
drop_rate_config = [(1, 0)]
temperature_config = [(1, 0)]
slf_factor = 0.25
cyc_factor = 0.5
adv_factor = 1
inp_shuffle_len = 0
inp_unk_drop_fac = 0
inp_rand_drop_fac = 0
inp_drop_prob = 0
### Bart system
output_dir='feedback_sum'
do_predict=True
max_source_length=1024
max_target_length=56
data_dir="feedback"
def get_n_params(model):
pp=0
for p in list(model.parameters()):
nn=1
for s in list(p.size()):
nn = nn*s
pp += nn
return pp
def main():
config = Config()
parser = argparse.ArgumentParser()
add_generic_args(parser, os.getcwd())
parser = BartSystem.add_model_specific_args(parser, os.getcwd())
args = parser.parse_args()
# Some values from Config class needs to be copied to args to work.
setattr(config, "num_train_epochs", args.num_train_epochs)
setattr(config, "save_path", args.output_dir)
setattr(args, "learning_rate", config.lr_F)
# Create output directory.
timestamp = datetime.datetime.now().strftime('%Y%m%d-%H%M%S')
setattr(config, "save_folder", os.path.join(config.save_path, timestamp))
os.makedirs(os.path.join(config.save_folder, 'ckpts'))
init_logger(config.save_folder)
logger = logging.getLogger(__name__)
model_F = BartSystem(args).to(config.device)
# Don't use the trainer to fit the model
args.do_train = False
# trainer = generic_train(model_F, args)
if args.output_dir:
try:
checkpoints = list(sorted(glob.glob(os.path.join(args.output_dir, "checkpointepoch=*.ckpt"), recursive=True)))
if checkpoints[-1]:
BartSystem.load_from_checkpoint(checkpoints[-1])
logger.info("Load checkpoint sucessfully!")
except:
logger.info("Failed to load checkpoint!")
# train_iters, dev_iters, test_iters, vocab = load_dataset(config)
train_iters, dev_iters, test_iters = model_F.train_dataloader(), model_F.val_dataloader(), model_F.test_dataloader()
model_D = Discriminator(config, model_F.tokenizer).to(config.device)
logger.info(config.discriminator_method)
# import pdb
# pdb.set_trace()
logger.info(model_D)
train(config, model_F, model_D, train_iters, dev_iters, test_iters)
if __name__ == '__main__':
main()
| 28.383333 | 122 | 0.668526 | import argparse
import datetime
import glob
import logging
import os
import time
import torch
from logging_helper import init_logger
from models import Discriminator, BartSystem
from train import train
from transformer_base import add_generic_args, generic_train
class Config():
save_path = './save'
device = torch.device('cuda' if True and torch.cuda.is_available() else 'cpu')
discriminator_method = 'Multi'
load_pretrained_embed = False
min_freq = 3
max_length = 1024
d_model = 256
h = 4
num_styles = 2
num_classes = num_styles + 1 if discriminator_method == 'Multi' else 2
num_layers = 4
lr_F = 5e-6
lr_D = 1e-4
L2 = 0
iter_D = 10
iter_F = 5
F_pretrain_iter = 1
log_steps = 5
eval_steps = 25
learned_pos_embed = True
dropout = 0
drop_rate_config = [(1, 0)]
temperature_config = [(1, 0)]
slf_factor = 0.25
cyc_factor = 0.5
adv_factor = 1
inp_shuffle_len = 0
inp_unk_drop_fac = 0
inp_rand_drop_fac = 0
inp_drop_prob = 0
um'
do_predict=True
max_source_length=1024
max_target_length=56
data_dir="feedback"
def get_n_params(model):
pp=0
for p in list(model.parameters()):
nn=1
for s in list(p.size()):
nn = nn*s
pp += nn
return pp
def main():
config = Config()
parser = argparse.ArgumentParser()
add_generic_args(parser, os.getcwd())
parser = BartSystem.add_model_specific_args(parser, os.getcwd())
args = parser.parse_args()
setattr(config, "num_train_epochs", args.num_train_epochs)
setattr(config, "save_path", args.output_dir)
setattr(args, "learning_rate", config.lr_F)
timestamp = datetime.datetime.now().strftime('%Y%m%d-%H%M%S')
setattr(config, "save_folder", os.path.join(config.save_path, timestamp))
os.makedirs(os.path.join(config.save_folder, 'ckpts'))
init_logger(config.save_folder)
logger = logging.getLogger(__name__)
model_F = BartSystem(args).to(config.device)
args.do_train = False
# trainer = generic_train(model_F, args)
if args.output_dir:
try:
checkpoints = list(sorted(glob.glob(os.path.join(args.output_dir, "checkpointepoch=*.ckpt"), recursive=True)))
if checkpoints[-1]:
BartSystem.load_from_checkpoint(checkpoints[-1])
logger.info("Load checkpoint sucessfully!")
except:
logger.info("Failed to load checkpoint!")
# train_iters, dev_iters, test_iters, vocab = load_dataset(config)
train_iters, dev_iters, test_iters = model_F.train_dataloader(), model_F.val_dataloader(), model_F.test_dataloader()
model_D = Discriminator(config, model_F.tokenizer).to(config.device)
logger.info(config.discriminator_method)
# import pdb
# pdb.set_trace()
logger.info(model_D)
train(config, model_F, model_D, train_iters, dev_iters, test_iters)
if __name__ == '__main__':
main()
| true | true |
f7253541b69cfbfcfbec4e8411f5009f4337f8ed | 6,853 | py | Python | homeassistant/components/mqtt/number.py | Kiskae/core | f538e07902b5370fdf448627798444df43a32085 | [
"Apache-2.0"
] | 3 | 2021-03-31T12:56:27.000Z | 2021-05-25T15:26:01.000Z | homeassistant/components/mqtt/number.py | Kiskae/core | f538e07902b5370fdf448627798444df43a32085 | [
"Apache-2.0"
] | 65 | 2018-10-14T08:59:06.000Z | 2022-03-31T06:04:07.000Z | homeassistant/components/mqtt/number.py | Kiskae/core | f538e07902b5370fdf448627798444df43a32085 | [
"Apache-2.0"
] | 1 | 2021-03-29T18:56:52.000Z | 2021-03-29T18:56:52.000Z | """Configure number in a device through MQTT topic."""
import functools
import logging
import voluptuous as vol
from homeassistant.components import number
from homeassistant.components.number import (
DEFAULT_MAX_VALUE,
DEFAULT_MIN_VALUE,
DEFAULT_STEP,
NumberEntity,
)
from homeassistant.const import CONF_NAME, CONF_OPTIMISTIC, CONF_VALUE_TEMPLATE
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.reload import async_setup_reload_service
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.typing import ConfigType
from . import (
CONF_COMMAND_TOPIC,
CONF_QOS,
CONF_STATE_TOPIC,
DOMAIN,
PLATFORMS,
subscription,
)
from .. import mqtt
from .const import CONF_RETAIN
from .debug_info import log_messages
from .mixins import MQTT_ENTITY_COMMON_SCHEMA, MqttEntity, async_setup_entry_helper
_LOGGER = logging.getLogger(__name__)
CONF_MIN = "min"
CONF_MAX = "max"
CONF_STEP = "step"
DEFAULT_NAME = "MQTT Number"
DEFAULT_OPTIMISTIC = False
def validate_config(config):
"""Validate that the configuration is valid, throws if it isn't."""
if config.get(CONF_MIN) >= config.get(CONF_MAX):
raise vol.Invalid(f"'{CONF_MAX}' must be > '{CONF_MIN}'")
return config
PLATFORM_SCHEMA = vol.All(
mqtt.MQTT_RW_PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_MAX, default=DEFAULT_MAX_VALUE): vol.Coerce(float),
vol.Optional(CONF_MIN, default=DEFAULT_MIN_VALUE): vol.Coerce(float),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
vol.Optional(CONF_STEP, default=DEFAULT_STEP): vol.All(
vol.Coerce(float), vol.Range(min=1e-3)
),
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
},
).extend(MQTT_ENTITY_COMMON_SCHEMA.schema),
validate_config,
)
async def async_setup_platform(
hass: HomeAssistant, config: ConfigType, async_add_entities, discovery_info=None
):
"""Set up MQTT number through configuration.yaml."""
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
await _async_setup_entity(hass, async_add_entities, config)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up MQTT number dynamically through MQTT discovery."""
setup = functools.partial(
_async_setup_entity, hass, async_add_entities, config_entry=config_entry
)
await async_setup_entry_helper(hass, number.DOMAIN, setup, PLATFORM_SCHEMA)
async def _async_setup_entity(
hass, async_add_entities, config, config_entry=None, discovery_data=None
):
"""Set up the MQTT number."""
async_add_entities([MqttNumber(hass, config, config_entry, discovery_data)])
class MqttNumber(MqttEntity, NumberEntity, RestoreEntity):
"""representation of an MQTT number."""
def __init__(self, hass, config, config_entry, discovery_data):
"""Initialize the MQTT Number."""
self._config = config
self._optimistic = False
self._sub_state = None
self._current_number = None
NumberEntity.__init__(self)
MqttEntity.__init__(self, hass, config, config_entry, discovery_data)
@staticmethod
def config_schema():
"""Return the config schema."""
return PLATFORM_SCHEMA
def _setup_from_config(self, config):
"""(Re)Setup the entity."""
self._optimistic = config[CONF_OPTIMISTIC]
value_template = self._config.get(CONF_VALUE_TEMPLATE)
if value_template is not None:
value_template.hass = self.hass
async def _subscribe_topics(self):
"""(Re)Subscribe to topics."""
@callback
@log_messages(self.hass, self.entity_id)
def message_received(msg):
"""Handle new MQTT messages."""
payload = msg.payload
value_template = self._config.get(CONF_VALUE_TEMPLATE)
if value_template is not None:
payload = value_template.async_render_with_possible_json_value(payload)
try:
if payload.isnumeric():
num_value = int(payload)
else:
num_value = float(payload)
except ValueError:
_LOGGER.warning("Payload '%s' is not a Number", msg.payload)
return
if num_value < self.min_value or num_value > self.max_value:
_LOGGER.error(
"Invalid value for %s: %s (range %s - %s)",
self.entity_id,
num_value,
self.min_value,
self.max_value,
)
return
self._current_number = num_value
self.async_write_ha_state()
if self._config.get(CONF_STATE_TOPIC) is None:
# Force into optimistic mode.
self._optimistic = True
else:
self._sub_state = await subscription.async_subscribe_topics(
self.hass,
self._sub_state,
{
"state_topic": {
"topic": self._config.get(CONF_STATE_TOPIC),
"msg_callback": message_received,
"qos": self._config[CONF_QOS],
}
},
)
if self._optimistic:
last_state = await self.async_get_last_state()
if last_state:
self._current_number = last_state.state
@property
def min_value(self) -> float:
"""Return the minimum value."""
return self._config[CONF_MIN]
@property
def max_value(self) -> float:
"""Return the maximum value."""
return self._config[CONF_MAX]
@property
def step(self) -> float:
"""Return the increment/decrement step."""
return self._config[CONF_STEP]
@property
def value(self):
"""Return the current value."""
return self._current_number
async def async_set_value(self, value: float) -> None:
"""Update the current value."""
current_number = value
if value.is_integer():
current_number = int(value)
if self._optimistic:
self._current_number = current_number
self.async_write_ha_state()
mqtt.async_publish(
self.hass,
self._config[CONF_COMMAND_TOPIC],
current_number,
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
@property
def assumed_state(self):
"""Return true if we do optimistic updates."""
return self._optimistic
| 31.726852 | 87 | 0.637531 | import functools
import logging
import voluptuous as vol
from homeassistant.components import number
from homeassistant.components.number import (
DEFAULT_MAX_VALUE,
DEFAULT_MIN_VALUE,
DEFAULT_STEP,
NumberEntity,
)
from homeassistant.const import CONF_NAME, CONF_OPTIMISTIC, CONF_VALUE_TEMPLATE
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.reload import async_setup_reload_service
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.typing import ConfigType
from . import (
CONF_COMMAND_TOPIC,
CONF_QOS,
CONF_STATE_TOPIC,
DOMAIN,
PLATFORMS,
subscription,
)
from .. import mqtt
from .const import CONF_RETAIN
from .debug_info import log_messages
from .mixins import MQTT_ENTITY_COMMON_SCHEMA, MqttEntity, async_setup_entry_helper
_LOGGER = logging.getLogger(__name__)
CONF_MIN = "min"
CONF_MAX = "max"
CONF_STEP = "step"
DEFAULT_NAME = "MQTT Number"
DEFAULT_OPTIMISTIC = False
def validate_config(config):
if config.get(CONF_MIN) >= config.get(CONF_MAX):
raise vol.Invalid(f"'{CONF_MAX}' must be > '{CONF_MIN}'")
return config
PLATFORM_SCHEMA = vol.All(
mqtt.MQTT_RW_PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_MAX, default=DEFAULT_MAX_VALUE): vol.Coerce(float),
vol.Optional(CONF_MIN, default=DEFAULT_MIN_VALUE): vol.Coerce(float),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
vol.Optional(CONF_STEP, default=DEFAULT_STEP): vol.All(
vol.Coerce(float), vol.Range(min=1e-3)
),
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
},
).extend(MQTT_ENTITY_COMMON_SCHEMA.schema),
validate_config,
)
async def async_setup_platform(
hass: HomeAssistant, config: ConfigType, async_add_entities, discovery_info=None
):
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
await _async_setup_entity(hass, async_add_entities, config)
async def async_setup_entry(hass, config_entry, async_add_entities):
setup = functools.partial(
_async_setup_entity, hass, async_add_entities, config_entry=config_entry
)
await async_setup_entry_helper(hass, number.DOMAIN, setup, PLATFORM_SCHEMA)
async def _async_setup_entity(
hass, async_add_entities, config, config_entry=None, discovery_data=None
):
async_add_entities([MqttNumber(hass, config, config_entry, discovery_data)])
class MqttNumber(MqttEntity, NumberEntity, RestoreEntity):
def __init__(self, hass, config, config_entry, discovery_data):
self._config = config
self._optimistic = False
self._sub_state = None
self._current_number = None
NumberEntity.__init__(self)
MqttEntity.__init__(self, hass, config, config_entry, discovery_data)
@staticmethod
def config_schema():
return PLATFORM_SCHEMA
def _setup_from_config(self, config):
self._optimistic = config[CONF_OPTIMISTIC]
value_template = self._config.get(CONF_VALUE_TEMPLATE)
if value_template is not None:
value_template.hass = self.hass
async def _subscribe_topics(self):
@callback
@log_messages(self.hass, self.entity_id)
def message_received(msg):
payload = msg.payload
value_template = self._config.get(CONF_VALUE_TEMPLATE)
if value_template is not None:
payload = value_template.async_render_with_possible_json_value(payload)
try:
if payload.isnumeric():
num_value = int(payload)
else:
num_value = float(payload)
except ValueError:
_LOGGER.warning("Payload '%s' is not a Number", msg.payload)
return
if num_value < self.min_value or num_value > self.max_value:
_LOGGER.error(
"Invalid value for %s: %s (range %s - %s)",
self.entity_id,
num_value,
self.min_value,
self.max_value,
)
return
self._current_number = num_value
self.async_write_ha_state()
if self._config.get(CONF_STATE_TOPIC) is None:
self._optimistic = True
else:
self._sub_state = await subscription.async_subscribe_topics(
self.hass,
self._sub_state,
{
"state_topic": {
"topic": self._config.get(CONF_STATE_TOPIC),
"msg_callback": message_received,
"qos": self._config[CONF_QOS],
}
},
)
if self._optimistic:
last_state = await self.async_get_last_state()
if last_state:
self._current_number = last_state.state
@property
def min_value(self) -> float:
return self._config[CONF_MIN]
@property
def max_value(self) -> float:
return self._config[CONF_MAX]
@property
def step(self) -> float:
return self._config[CONF_STEP]
@property
def value(self):
return self._current_number
async def async_set_value(self, value: float) -> None:
current_number = value
if value.is_integer():
current_number = int(value)
if self._optimistic:
self._current_number = current_number
self.async_write_ha_state()
mqtt.async_publish(
self.hass,
self._config[CONF_COMMAND_TOPIC],
current_number,
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
@property
def assumed_state(self):
return self._optimistic
| true | true |
f72536cc54c5ab45ec1ef476582c6f686e353776 | 9,038 | py | Python | netapp/santricity/models/v2/discovery_start_request.py | NetApp/santricity-webapi-pythonsdk | 1d3df4a00561192f4cdcdd1890f4d27547ed2de2 | [
"BSD-3-Clause-Clear"
] | 5 | 2016-08-23T17:52:22.000Z | 2019-05-16T08:45:30.000Z | netapp/santricity/models/v2/discovery_start_request.py | NetApp/santricity-webapi-pythonsdk | 1d3df4a00561192f4cdcdd1890f4d27547ed2de2 | [
"BSD-3-Clause-Clear"
] | 2 | 2016-11-10T05:30:21.000Z | 2019-04-05T15:03:37.000Z | netapp/santricity/models/v2/discovery_start_request.py | NetApp/santricity-webapi-pythonsdk | 1d3df4a00561192f4cdcdd1890f4d27547ed2de2 | [
"BSD-3-Clause-Clear"
] | 7 | 2016-08-25T16:11:44.000Z | 2021-02-22T05:31:25.000Z | # coding: utf-8
"""
DiscoveryStartRequest.py
The Clear BSD License
Copyright (c) – 2016, NetApp, Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted (subject to the limitations in the disclaimer below) provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of NetApp, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from pprint import pformat
from six import iteritems
class DiscoveryStartRequest(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
DiscoveryStartRequest - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'start_ip': 'str', # (required parameter)
'end_ip': 'str', # (required parameter)
'use_agents': 'bool',
'connection_timeout': 'int',
'max_ports_to_use': 'int'
}
self.attribute_map = {
'start_ip': 'startIP', # (required parameter)
'end_ip': 'endIP', # (required parameter)
'use_agents': 'useAgents',
'connection_timeout': 'connectionTimeout',
'max_ports_to_use': 'maxPortsToUse'
}
self._start_ip = None
self._end_ip = None
self._use_agents = None
self._connection_timeout = None
self._max_ports_to_use = None
@property
def start_ip(self):
"""
Gets the start_ip of this DiscoveryStartRequest.
Starting IP address
:return: The start_ip of this DiscoveryStartRequest.
:rtype: str
:required/optional: required
"""
return self._start_ip
@start_ip.setter
def start_ip(self, start_ip):
"""
Sets the start_ip of this DiscoveryStartRequest.
Starting IP address
:param start_ip: The start_ip of this DiscoveryStartRequest.
:type: str
"""
self._start_ip = start_ip
@property
def end_ip(self):
"""
Gets the end_ip of this DiscoveryStartRequest.
Ending IP Address
:return: The end_ip of this DiscoveryStartRequest.
:rtype: str
:required/optional: required
"""
return self._end_ip
@end_ip.setter
def end_ip(self, end_ip):
"""
Sets the end_ip of this DiscoveryStartRequest.
Ending IP Address
:param end_ip: The end_ip of this DiscoveryStartRequest.
:type: str
"""
self._end_ip = end_ip
@property
def use_agents(self):
"""
Gets the use_agents of this DiscoveryStartRequest.
In-band management agents will be queried if this value is true. If you are not sure, its best to leave this as default
:return: The use_agents of this DiscoveryStartRequest.
:rtype: bool
:required/optional: optional
"""
return self._use_agents
@use_agents.setter
def use_agents(self, use_agents):
"""
Sets the use_agents of this DiscoveryStartRequest.
In-band management agents will be queried if this value is true. If you are not sure, its best to leave this as default
:param use_agents: The use_agents of this DiscoveryStartRequest.
:type: bool
"""
self._use_agents = use_agents
@property
def connection_timeout(self):
"""
Gets the connection_timeout of this DiscoveryStartRequest.
Optional parameter. This defaults to 30 seconds. Longer times may be needed for WAN discoveries, but will slow down the whole process. Shorter times speed up the process. This is only the timeout value for the initial TCP connection
:return: The connection_timeout of this DiscoveryStartRequest.
:rtype: int
:required/optional: optional
"""
return self._connection_timeout
@connection_timeout.setter
def connection_timeout(self, connection_timeout):
"""
Sets the connection_timeout of this DiscoveryStartRequest.
Optional parameter. This defaults to 30 seconds. Longer times may be needed for WAN discoveries, but will slow down the whole process. Shorter times speed up the process. This is only the timeout value for the initial TCP connection
:param connection_timeout: The connection_timeout of this DiscoveryStartRequest.
:type: int
"""
self._connection_timeout = connection_timeout
@property
def max_ports_to_use(self):
"""
Gets the max_ports_to_use of this DiscoveryStartRequest.
Number of ports to use for discovery. A port is a file handler in most operating systems. Watch your ULimits. More ports the faster discovery goes. But, for example, windows only has a few thousand available ports. If file handlers are exceeded, then this process will adjust down but other processes on the server may be affected as well as client connections
:return: The max_ports_to_use of this DiscoveryStartRequest.
:rtype: int
:required/optional: optional
"""
return self._max_ports_to_use
@max_ports_to_use.setter
def max_ports_to_use(self, max_ports_to_use):
"""
Sets the max_ports_to_use of this DiscoveryStartRequest.
Number of ports to use for discovery. A port is a file handler in most operating systems. Watch your ULimits. More ports the faster discovery goes. But, for example, windows only has a few thousand available ports. If file handlers are exceeded, then this process will adjust down but other processes on the server may be affected as well as client connections
:param max_ports_to_use: The max_ports_to_use of this DiscoveryStartRequest.
:type: int
"""
self._max_ports_to_use = max_ports_to_use
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
if self is None:
return None
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if self is None or other is None:
return None
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| 39.295652 | 844 | 0.645829 |
from pprint import pformat
from six import iteritems
class DiscoveryStartRequest(object):
def __init__(self):
self.swagger_types = {
'start_ip': 'str',
'end_ip': 'str',
'use_agents': 'bool',
'connection_timeout': 'int',
'max_ports_to_use': 'int'
}
self.attribute_map = {
'start_ip': 'startIP',
'end_ip': 'endIP',
'use_agents': 'useAgents',
'connection_timeout': 'connectionTimeout',
'max_ports_to_use': 'maxPortsToUse'
}
self._start_ip = None
self._end_ip = None
self._use_agents = None
self._connection_timeout = None
self._max_ports_to_use = None
@property
def start_ip(self):
return self._start_ip
@start_ip.setter
def start_ip(self, start_ip):
self._start_ip = start_ip
@property
def end_ip(self):
return self._end_ip
@end_ip.setter
def end_ip(self, end_ip):
self._end_ip = end_ip
@property
def use_agents(self):
return self._use_agents
@use_agents.setter
def use_agents(self, use_agents):
self._use_agents = use_agents
@property
def connection_timeout(self):
return self._connection_timeout
@connection_timeout.setter
def connection_timeout(self, connection_timeout):
self._connection_timeout = connection_timeout
@property
def max_ports_to_use(self):
return self._max_ports_to_use
@max_ports_to_use.setter
def max_ports_to_use(self, max_ports_to_use):
self._max_ports_to_use = max_ports_to_use
def to_dict(self):
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
return pformat(self.to_dict())
def __repr__(self):
if self is None:
return None
return self.to_str()
def __eq__(self, other):
if self is None or other is None:
return None
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
f72536e7634a6d0146e21e210682f4ef9a21937b | 1,108 | py | Python | tests/test_async_manager.py | yehonadav/qaviton_io | 936c444db06d5c0a9fa75a56742c70a0deebee65 | [
"Apache-2.0"
] | null | null | null | tests/test_async_manager.py | yehonadav/qaviton_io | 936c444db06d5c0a9fa75a56742c70a0deebee65 | [
"Apache-2.0"
] | null | null | null | tests/test_async_manager.py | yehonadav/qaviton_io | 936c444db06d5c0a9fa75a56742c70a0deebee65 | [
"Apache-2.0"
] | null | null | null | from time import time
from requests import get
from qaviton_io.async_manager import AsyncManager
from tests.utils import server
def test_simple_requests():
def execute_tasks(number_of_tasks: int):
errors = {}
rs = []
def task():
try:
with server() as (host, port):
r = get(f'http://{host}:{port}')
r.raise_for_status()
rs.append(r)
except Exception as e:
name = f'{e.__traceback__}{e}'
if name in errors:
errors[name] += 1
else:
errors[name] = 1
tasks = [task for _ in range(number_of_tasks)]
manager = AsyncManager()
manager.log.clear()
t = time()
manager.run(tasks)
t = time() - t
print(f'took {round(t, 3)}s')
for e, n in errors.items():
print(f'{e} this error occurred {n} times')
assert not errors
return t
print("")
t1 = execute_tasks(1)
t2 = execute_tasks(20)
assert t2 < t1 * 2
| 26.380952 | 55 | 0.50361 | from time import time
from requests import get
from qaviton_io.async_manager import AsyncManager
from tests.utils import server
def test_simple_requests():
def execute_tasks(number_of_tasks: int):
errors = {}
rs = []
def task():
try:
with server() as (host, port):
r = get(f'http://{host}:{port}')
r.raise_for_status()
rs.append(r)
except Exception as e:
name = f'{e.__traceback__}{e}'
if name in errors:
errors[name] += 1
else:
errors[name] = 1
tasks = [task for _ in range(number_of_tasks)]
manager = AsyncManager()
manager.log.clear()
t = time()
manager.run(tasks)
t = time() - t
print(f'took {round(t, 3)}s')
for e, n in errors.items():
print(f'{e} this error occurred {n} times')
assert not errors
return t
print("")
t1 = execute_tasks(1)
t2 = execute_tasks(20)
assert t2 < t1 * 2
| true | true |
f72537a956b381dcd8b8b8636b46fb369b4e987b | 4,514 | py | Python | pymare/stats.py | tsalo/PyMARE | 7eb950fb137b6221f2ea5d381ca91d16eb4b8a35 | [
"MIT"
] | null | null | null | pymare/stats.py | tsalo/PyMARE | 7eb950fb137b6221f2ea5d381ca91d16eb4b8a35 | [
"MIT"
] | null | null | null | pymare/stats.py | tsalo/PyMARE | 7eb950fb137b6221f2ea5d381ca91d16eb4b8a35 | [
"MIT"
] | null | null | null | """Miscellaneous statistical functions."""
import numpy as np
import scipy.stats as ss
from scipy.optimize import Bounds, minimize
def weighted_least_squares(y, v, X, tau2=0.0, return_cov=False):
"""2-D weighted least squares.
Args:
y (NDArray): 2-d array of estimates (studies x parallel datasets)
v (NDArray): 2-d array of sampling variances
X (NDArray): Fixed effect design matrix
tau2 (float): tau^2 estimate to use for weights
return_cov (bool): Whether or not to return the inverse cov matrix
Returns:
If return_cov is True, returns both fixed parameter estimates and the
inverse covariance matrix; if False, only the parameter estimates.
"""
w = 1.0 / (v + tau2)
# Einsum indices: k = studies, p = predictors, i = parallel iterates
wX = np.einsum("kp,ki->ipk", X, w)
cov = wX.dot(X)
# numpy >= 1.8 inverts stacked matrices along the first N - 2 dims, so we
# can vectorize computation along the second dimension (parallel datasets)
precision = np.linalg.pinv(cov).T
pwX = np.einsum("ipk,qpi->iqk", wX, precision)
beta = np.einsum("ipk,ik->ip", pwX, y.T).T
return (beta, precision) if return_cov else beta
def ensure_2d(arr):
"""Ensure the passed array has 2 dimensions."""
if arr is None:
return arr
try:
arr = np.array(arr)
except:
return arr
if arr.ndim == 1:
arr = arr[:, None]
return arr
def q_profile(y, v, X, alpha=0.05):
"""Get the CI for tau^2 via the Q-Profile method (Viechtbauer, 2007).
Args:
y (ndarray): 1d array of study-level estimates
v (ndarray): 1d array of study-level variances
X (ndarray): 1d or 2d array containing study-level predictors
(including intercept); has dimensions K x P, where K is the number
of studies and P is the number of predictor variables.
alpha (float, optional): alpha value defining the coverage of the CIs,
where width(CI) = 1 - alpha. Defaults to 0.05.
Returns:
A dictionary with keys 'ci_l' and 'ci_u', corresponding to the lower
and upper bounds of the tau^2 confidence interval, respectively.
Notes:
Following the Viechtbauer implementation, this method returns the
interval that gives an equal probability mass at both tails (i.e.,
P(tau^2 <= lower_bound) == P(tau^2 >= upper_bound) == alpha/2), and
*not* the smallest possible range of tau^2 values that provides the
desired coverage.
References:
Viechtbauer, W. (2007). Confidence intervals for the amount of
heterogeneity in meta-analysis. Statistics in Medicine, 26(1), 37-52.
"""
k, p = X.shape
df = k - p
l_crit = ss.chi2.ppf(1 - alpha / 2, df)
u_crit = ss.chi2.ppf(alpha / 2, df)
args = (ensure_2d(y), ensure_2d(v), X)
bds = Bounds([0], [np.inf], keep_feasible=True)
# Use the D-L estimate of tau^2 as a starting point; when using a fixed
# value, minimize() sometimes fails to stay in bounds.
from .estimators import DerSimonianLaird
ub_start = 2 * DerSimonianLaird().fit(y, v, X).params_["tau2"]
lb = minimize(lambda x: (q_gen(*args, x) - l_crit) ** 2, [0], bounds=bds).x[0]
ub = minimize(lambda x: (q_gen(*args, x) - u_crit) ** 2, [ub_start], bounds=bds).x[0]
return {"ci_l": lb, "ci_u": ub}
def q_gen(y, v, X, tau2):
"""Generalized form of Cochran's Q-statistic.
Args:
y (ndarray): 1d array of study-level estimates
v (ndarray): 1d array of study-level variances
X (ndarray): 1d or 2d array containing study-level predictors
(including intercept); has dimensions K x P, where K is the number
of studies and P is the number of predictor variables.
tau2 (float): Between-study variance. Must be >= 0.
Returns:
A float giving the value of Cochran's Q-statistic.
References:
Veroniki, A. A., Jackson, D., Viechtbauer, W., Bender, R., Bowden, J.,
Knapp, G., Kuss, O., Higgins, J. P., Langan, D., & Salanti, G. (2016).
Methods to estimate the between-study variance and its uncertainty in
meta-analysis. Research synthesis methods, 7(1), 55–79.
https://doi.org/10.1002/jrsm.1164
"""
if np.any(tau2 < 0):
raise ValueError("Value of tau^2 must be >= 0.")
beta = weighted_least_squares(y, v, X, tau2)
w = 1.0 / (v + tau2)
return (w * (y - X.dot(beta)) ** 2).sum(0)
| 36.699187 | 89 | 0.634692 |
import numpy as np
import scipy.stats as ss
from scipy.optimize import Bounds, minimize
def weighted_least_squares(y, v, X, tau2=0.0, return_cov=False):
w = 1.0 / (v + tau2)
wX = np.einsum("kp,ki->ipk", X, w)
cov = wX.dot(X)
precision = np.linalg.pinv(cov).T
pwX = np.einsum("ipk,qpi->iqk", wX, precision)
beta = np.einsum("ipk,ik->ip", pwX, y.T).T
return (beta, precision) if return_cov else beta
def ensure_2d(arr):
if arr is None:
return arr
try:
arr = np.array(arr)
except:
return arr
if arr.ndim == 1:
arr = arr[:, None]
return arr
def q_profile(y, v, X, alpha=0.05):
k, p = X.shape
df = k - p
l_crit = ss.chi2.ppf(1 - alpha / 2, df)
u_crit = ss.chi2.ppf(alpha / 2, df)
args = (ensure_2d(y), ensure_2d(v), X)
bds = Bounds([0], [np.inf], keep_feasible=True)
from .estimators import DerSimonianLaird
ub_start = 2 * DerSimonianLaird().fit(y, v, X).params_["tau2"]
lb = minimize(lambda x: (q_gen(*args, x) - l_crit) ** 2, [0], bounds=bds).x[0]
ub = minimize(lambda x: (q_gen(*args, x) - u_crit) ** 2, [ub_start], bounds=bds).x[0]
return {"ci_l": lb, "ci_u": ub}
def q_gen(y, v, X, tau2):
if np.any(tau2 < 0):
raise ValueError("Value of tau^2 must be >= 0.")
beta = weighted_least_squares(y, v, X, tau2)
w = 1.0 / (v + tau2)
return (w * (y - X.dot(beta)) ** 2).sum(0)
| true | true |
f72538c76f49ccac9bcd5d18c35fad8c0e5bdbe6 | 691 | py | Python | backtracking/0216_combination_sum_3.py | MartinMa28/Algorithms_review | 3f2297038c00f5a560941360ca702e6868530f34 | [
"MIT"
] | null | null | null | backtracking/0216_combination_sum_3.py | MartinMa28/Algorithms_review | 3f2297038c00f5a560941360ca702e6868530f34 | [
"MIT"
] | null | null | null | backtracking/0216_combination_sum_3.py | MartinMa28/Algorithms_review | 3f2297038c00f5a560941360ca702e6868530f34 | [
"MIT"
] | null | null | null | class Solution:
def __init__(self):
self.combs = []
def _backtrack(self, candidates, cur, target, k):
if len(cur) == k and sum(cur) == target:
self.combs.append(cur[:])
return
if sum(cur) > target:
return
elif len(cur) < k:
for idx, candi in enumerate(candidates):
cur.append(candi)
self._backtrack(candidates[idx + 1:], cur, target, k)
# backtracking
cur.pop()
def combinationSum3(self, k: int, n: int) -> list:
self._backtrack(range(1, 10), [], n, k)
return self.combs | 30.043478 | 69 | 0.474674 | class Solution:
def __init__(self):
self.combs = []
def _backtrack(self, candidates, cur, target, k):
if len(cur) == k and sum(cur) == target:
self.combs.append(cur[:])
return
if sum(cur) > target:
return
elif len(cur) < k:
for idx, candi in enumerate(candidates):
cur.append(candi)
self._backtrack(candidates[idx + 1:], cur, target, k)
cur.pop()
def combinationSum3(self, k: int, n: int) -> list:
self._backtrack(range(1, 10), [], n, k)
return self.combs | true | true |
f72538f645e6c0711034952f80384b2e12169de0 | 1,211 | py | Python | src/Aula19ex94UneDicLista.py | maberf/python | 0d36f1586c5f52081c2b27d42a1d37cee13116b0 | [
"MIT"
] | null | null | null | src/Aula19ex94UneDicLista.py | maberf/python | 0d36f1586c5f52081c2b27d42a1d37cee13116b0 | [
"MIT"
] | null | null | null | src/Aula19ex94UneDicLista.py | maberf/python | 0d36f1586c5f52081c2b27d42a1d37cee13116b0 | [
"MIT"
] | null | null | null | #CADASTRO DE PESSOAS em dicionário - AULA 19 EXERCÍCIO 94
#dados das pessos: nome, sexo e idade
#todos os dicionários numa lista
#Informar quantos cadastrados, média de idade, lista de mulheres e nomes de pessoas de idade acima da média
#
pessoa = dict()
grupo = list()
somaidades = media = 0
while True:
pessoa.clear() #limnpeza do dicionário senão dá erro nos laços
pessoa["nome"] = str(input('Nome: ')).strip()
pessoa["sexo"] = str(input('Sexo: [M/F] ')).strip().upper()
pessoa["idade"] = int(input('Idade: '))
grupo.append(pessoa.copy()) #cópia do dicionário para lista
cont = str(input('Continuar? [S/N] ')).strip().lower()
somaidades += pessoa["idade"]
if cont == 'n':
break
media = somaidades/len(grupo)
print('-'*50)
print(f'A) Pessoas cadastradas: {len(grupo)}')
print(f'B) Média de idade: {media:.2f} anos')
print(f'C) Mulheres cadastradas: ', end='')
for i in range(len(grupo)):
if grupo[i]["sexo"] == 'F':
print(f'{grupo[i]["nome"]} ', end='')
print()
print(f'D) Acima da média: ', end='')
for i in range(len(grupo)):
if grupo[i]["idade"] > media:
print(f'{grupo[i]["nome"]} {grupo[i]["idade"]} anos ', end='')
print()
print('-'*50)
| 35.617647 | 107 | 0.630058 |
pessoa = dict()
grupo = list()
somaidades = media = 0
while True:
pessoa.clear()
pessoa["nome"] = str(input('Nome: ')).strip()
pessoa["sexo"] = str(input('Sexo: [M/F] ')).strip().upper()
pessoa["idade"] = int(input('Idade: '))
grupo.append(pessoa.copy())
cont = str(input('Continuar? [S/N] ')).strip().lower()
somaidades += pessoa["idade"]
if cont == 'n':
break
media = somaidades/len(grupo)
print('-'*50)
print(f'A) Pessoas cadastradas: {len(grupo)}')
print(f'B) Média de idade: {media:.2f} anos')
print(f'C) Mulheres cadastradas: ', end='')
for i in range(len(grupo)):
if grupo[i]["sexo"] == 'F':
print(f'{grupo[i]["nome"]} ', end='')
print()
print(f'D) Acima da média: ', end='')
for i in range(len(grupo)):
if grupo[i]["idade"] > media:
print(f'{grupo[i]["nome"]} {grupo[i]["idade"]} anos ', end='')
print()
print('-'*50)
| true | true |
f72539689a05d85f216a3c81b99fb4e90b4b862f | 2,147 | py | Python | doc/ramonifyARA.py | kkutten1/ndregOld | 7a3f5426c5a592e46cb3ed14765ed1f4f302f4a6 | [
"Apache-2.0"
] | null | null | null | doc/ramonifyARA.py | kkutten1/ndregOld | 7a3f5426c5a592e46cb3ed14765ed1f4f302f4a6 | [
"Apache-2.0"
] | null | null | null | doc/ramonifyARA.py | kkutten1/ndregOld | 7a3f5426c5a592e46cb3ed14765ed1f4f302f4a6 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import requests
import json
from ndreg import *
import ndio.ramon as ndramon
import ndio.remote.neurodata as neurodata
"""
Here we show how to RAMONify Allen Reference Atlas data.
First we download annotation ontology from Allen Brain Atlas API.
It returns a JSON tree in which larger parent structures are divided into smaller children regions.
For example the "corpus callosum" parent is has children "corpus callosum, anterior forceps", "genu of corpus callosum", "corpus callosum, body", etc
"""
url = "http://api.brain-map.org/api/v2/structure_graph_download/1.json"
jsonRaw = requests.get(url).content
jsonDict = json.loads(jsonRaw)
"""
Next we collect the names and ids of all of the regions.
Since our json data is a tree we can walk through it in arecursive manner.
Thus starting from the root...
"""
root = jsonDict['msg'][0]
"""
...we define a recursive function ...
"""
#leafList = []
def getChildrenNames(parent, childrenNames={}):
#if len(parent['children']) == 0:
# leafList.append(parent['id'])
for childIndex in range(len(parent['children'])):
child = parent['children'][childIndex]
childrenNames[child['id']] = child['name']
childrenNames = getChildrenNames(child, childrenNames)
return childrenNames
"""
... and collect all of the region names in a dictionary with the "id" field as keys.
"""
regionDict = getChildrenNames(root)
#print(leafList)
#for key in regionDict.keys():
# print('{0}, "{1}"'.format(key, regionDict[key]))
#print(regionDict)
#sys.exit()
"""
Next we RAMONify the data
"""
token = "ara3_to_AutA"
channel = "annotation_draft"
nd = neurodata(hostname='synaptomes.neurodata.io/nd/')
for regionId in regionDict.keys():
regionName = regionDict[regionId]
kvpairs = {'name': regionName}
ramonObj = ndramon.RAMONGeneric(id=regionId, resolution=0, kvpairs=kvpairs)
try:
nd.post_ramon(token, channel, ramonObj)
print "Successfully posted ramon obj {0} for {1}".format(regionId, regionName)
except:
print "Failed to post ramon obj {0} for {1}".format(regionId, regionName)
| 31.115942 | 149 | 0.708896 |
import requests
import json
from ndreg import *
import ndio.ramon as ndramon
import ndio.remote.neurodata as neurodata
"""
Here we show how to RAMONify Allen Reference Atlas data.
First we download annotation ontology from Allen Brain Atlas API.
It returns a JSON tree in which larger parent structures are divided into smaller children regions.
For example the "corpus callosum" parent is has children "corpus callosum, anterior forceps", "genu of corpus callosum", "corpus callosum, body", etc
"""
url = "http://api.brain-map.org/api/v2/structure_graph_download/1.json"
jsonRaw = requests.get(url).content
jsonDict = json.loads(jsonRaw)
"""
Next we collect the names and ids of all of the regions.
Since our json data is a tree we can walk through it in arecursive manner.
Thus starting from the root...
"""
root = jsonDict['msg'][0]
"""
...we define a recursive function ...
"""
def getChildrenNames(parent, childrenNames={}):
for childIndex in range(len(parent['children'])):
child = parent['children'][childIndex]
childrenNames[child['id']] = child['name']
childrenNames = getChildrenNames(child, childrenNames)
return childrenNames
"""
... and collect all of the region names in a dictionary with the "id" field as keys.
"""
regionDict = getChildrenNames(root)
"""
Next we RAMONify the data
"""
token = "ara3_to_AutA"
channel = "annotation_draft"
nd = neurodata(hostname='synaptomes.neurodata.io/nd/')
for regionId in regionDict.keys():
regionName = regionDict[regionId]
kvpairs = {'name': regionName}
ramonObj = ndramon.RAMONGeneric(id=regionId, resolution=0, kvpairs=kvpairs)
try:
nd.post_ramon(token, channel, ramonObj)
print "Successfully posted ramon obj {0} for {1}".format(regionId, regionName)
except:
print "Failed to post ramon obj {0} for {1}".format(regionId, regionName)
| false | true |
f7253987a63d4e5b5b25935dff1c1a6614e9432f | 804 | py | Python | app.py | computercavemen/web-scraping-challenge | 3a68fd3bd6ddfcfc3fbd033d6f1d472ab6a76a10 | [
"ADSL"
] | null | null | null | app.py | computercavemen/web-scraping-challenge | 3a68fd3bd6ddfcfc3fbd033d6f1d472ab6a76a10 | [
"ADSL"
] | null | null | null | app.py | computercavemen/web-scraping-challenge | 3a68fd3bd6ddfcfc3fbd033d6f1d472ab6a76a10 | [
"ADSL"
] | null | null | null | from flask import Flask, render_template, redirect
from jinja2 import Template
from splinter import browser
from flask_pymongo import PyMongo
import scrape_mars
# Create an instance of our Flask app.
app = Flask(__name__)
# Use flask_pymongo to set up mongo connection
app.config["MONGO_URI"] = "mongodb://localhost:27017/mars_app"
mongo = PyMongo(app)
mongo.db.mars_page.drop()
# Set route
@app.route("/")
def home():
mars_page = mongo.db.mars_page.find_one()
return render_template("index.html", mars_page = mars_page)
# Set route
@app.route("/scrape")
def scraper():
mars_page = mongo.db.mars_page
mars_page_data = scrape_mars.scrape()
mars_page.update({}, mars_page_data, upsert=True)
return redirect("/", code=302)
if __name__ == "__main__":
app.run(debug=True)
| 23.647059 | 63 | 0.732587 | from flask import Flask, render_template, redirect
from jinja2 import Template
from splinter import browser
from flask_pymongo import PyMongo
import scrape_mars
app = Flask(__name__)
app.config["MONGO_URI"] = "mongodb://localhost:27017/mars_app"
mongo = PyMongo(app)
mongo.db.mars_page.drop()
@app.route("/")
def home():
mars_page = mongo.db.mars_page.find_one()
return render_template("index.html", mars_page = mars_page)
@app.route("/scrape")
def scraper():
mars_page = mongo.db.mars_page
mars_page_data = scrape_mars.scrape()
mars_page.update({}, mars_page_data, upsert=True)
return redirect("/", code=302)
if __name__ == "__main__":
app.run(debug=True)
| true | true |
f72539f2e8f0879fdd0b4a7738c5479966731504 | 14,082 | py | Python | python/ccxt/async_support/base/exchange.py | Beaxy/ccxt | 4ade917da0f202dfbe614240223ab74832a3fc0d | [
"MIT"
] | null | null | null | python/ccxt/async_support/base/exchange.py | Beaxy/ccxt | 4ade917da0f202dfbe614240223ab74832a3fc0d | [
"MIT"
] | null | null | null | python/ccxt/async_support/base/exchange.py | Beaxy/ccxt | 4ade917da0f202dfbe614240223ab74832a3fc0d | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
__version__ = '1.40.88'
# -----------------------------------------------------------------------------
import asyncio
import concurrent.futures
import socket
import certifi
import aiohttp
import ssl
import sys
import yarl
# -----------------------------------------------------------------------------
from ccxt.async_support.base.throttle import throttle
# -----------------------------------------------------------------------------
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.errors import RequestTimeout
from ccxt.base.errors import NotSupported
# -----------------------------------------------------------------------------
from ccxt.base.exchange import Exchange as BaseExchange
# -----------------------------------------------------------------------------
__all__ = [
'BaseExchange',
'Exchange',
]
# -----------------------------------------------------------------------------
class Exchange(BaseExchange):
def __init__(self, config={}):
if 'asyncio_loop' in config:
self.asyncio_loop = config['asyncio_loop']
self.asyncio_loop = self.asyncio_loop or asyncio.get_event_loop()
self.aiohttp_trust_env = config.get('aiohttp_trust_env', self.aiohttp_trust_env)
self.verify = config.get('verify', self.verify)
self.own_session = 'session' not in config
self.cafile = config.get('cafile', certifi.where())
super(Exchange, self).__init__(config)
self.init_rest_rate_limiter()
self.markets_loading = None
self.reloading_markets = False
def init_rest_rate_limiter(self):
self.throttle = throttle(self.extend({
'loop': self.asyncio_loop,
}, self.tokenBucket))
def __del__(self):
if self.session is not None:
self.logger.warning(self.id + " requires to release all resources with an explicit call to the .close() coroutine. If you are using the exchange instance with async coroutines, add exchange.close() to your code into a place when you're done with the exchange and don't need the exchange instance anymore (at the end of your async coroutine).")
if sys.version_info >= (3, 5):
async def __aenter__(self):
self.open()
return self
async def __aexit__(self, exc_type, exc, tb):
await self.close()
def open(self):
if self.own_session and self.session is None:
# Create our SSL context object with our CA cert file
context = ssl.create_default_context(cafile=self.cafile) if self.verify else self.verify
# Pass this SSL context to aiohttp and create a TCPConnector
connector = aiohttp.TCPConnector(ssl=context, loop=self.asyncio_loop, enable_cleanup_closed=True)
self.session = aiohttp.ClientSession(loop=self.asyncio_loop, connector=connector, trust_env=self.aiohttp_trust_env)
async def close(self):
if self.session is not None:
if self.own_session:
await self.session.close()
self.session = None
async def fetch2(self, path, api='public', method='GET', params={}, headers=None, body=None):
"""A better wrapper over request for deferred signing"""
if self.enableRateLimit:
await self.throttle(self.rateLimit)
self.lastRestRequestTimestamp = self.milliseconds()
request = self.sign(path, api, method, params, headers, body)
return await self.fetch(request['url'], request['method'], request['headers'], request['body'])
async def fetch(self, url, method='GET', headers=None, body=None):
"""Perform a HTTP request and return decoded JSON data"""
request_headers = self.prepare_request_headers(headers)
url = self.proxy + url
if self.verbose:
self.print("\nRequest:", method, url, headers, body)
self.logger.debug("%s %s, Request: %s %s", method, url, headers, body)
request_body = body
encoded_body = body.encode() if body else None
self.open()
session_method = getattr(self.session, method.lower())
http_response = None
http_status_code = None
http_status_text = None
json_response = None
try:
async with session_method(yarl.URL(url, encoded=True),
data=encoded_body,
headers=request_headers,
timeout=(self.timeout / 1000),
proxy=self.aiohttp_proxy) as response:
http_response = await response.text()
http_response = http_response.strip()
http_status_code = response.status
http_status_text = response.reason
json_response = self.parse_json(http_response)
headers = response.headers
if self.enableLastHttpResponse:
self.last_http_response = http_response
if self.enableLastResponseHeaders:
self.last_response_headers = headers
if self.enableLastJsonResponse:
self.last_json_response = json_response
if self.verbose:
self.print("\nResponse:", method, url, http_status_code, headers, http_response)
self.logger.debug("%s %s, Response: %s %s %s", method, url, http_status_code, headers, http_response)
except socket.gaierror as e:
details = ' '.join([self.id, method, url])
raise ExchangeNotAvailable(details) from e
except (concurrent.futures.TimeoutError, asyncio.TimeoutError) as e:
details = ' '.join([self.id, method, url])
raise RequestTimeout(details) from e
except aiohttp.ClientConnectionError as e:
details = ' '.join([self.id, method, url])
raise ExchangeNotAvailable(details) from e
except aiohttp.ClientError as e: # base exception class
details = ' '.join([self.id, method, url])
raise ExchangeError(details) from e
self.handle_errors(http_status_code, http_status_text, url, method, headers, http_response, json_response, request_headers, request_body)
self.handle_http_status_code(http_status_code, http_status_text, url, method, http_response)
if json_response is not None:
return json_response
if self.is_text_response(headers):
return http_response
return response.content
async def load_markets_helper(self, reload=False, params={}):
if not reload:
if self.markets:
if not self.markets_by_id:
return self.set_markets(self.markets)
return self.markets
currencies = None
if self.has['fetchCurrencies']:
currencies = await self.fetch_currencies()
markets = await self.fetch_markets(params)
return self.set_markets(markets, currencies)
async def load_markets(self, reload=False, params={}):
if (reload and not self.reloading_markets) or not self.markets_loading:
self.reloading_markets = True
coroutine = self.load_markets_helper(reload, params)
# coroutines can only be awaited once so we wrap it in a task
self.markets_loading = asyncio.ensure_future(coroutine)
try:
result = await self.markets_loading
except Exception as e:
self.reloading_markets = False
self.markets_loading = None
raise e
self.reloading_markets = False
return result
async def fetch_fees(self):
trading = {}
funding = {}
if self.has['fetchTradingFees']:
trading = await self.fetch_trading_fees()
if self.has['fetchFundingFees']:
funding = await self.fetch_funding_fees()
return {
'trading': trading,
'funding': funding,
}
async def load_fees(self, reload=False):
if not reload:
if self.loaded_fees != Exchange.loaded_fees:
return self.loaded_fees
self.loaded_fees = self.deep_extend(self.loaded_fees, await self.fetch_fees())
return self.loaded_fees
async def fetch_markets(self, params={}):
# markets are returned as a list
# currencies are returned as a dict
# this is for historical reasons
# and may be changed for consistency later
return self.to_array(self.markets)
async def fetch_currencies(self, params={}):
# markets are returned as a list
# currencies are returned as a dict
# this is for historical reasons
# and may be changed for consistency later
return self.currencies
async def fetch_status(self, params={}):
if self.has['fetchTime']:
updated = await self.fetch_time(params)
self.status['updated'] = updated
return self.status
async def fetch_order_status(self, id, symbol=None, params={}):
order = await self.fetch_order(id, symbol, params)
return order['status']
async def fetch_partial_balance(self, part, params={}):
balance = await self.fetch_balance(params)
return balance[part]
async def fetch_l2_order_book(self, symbol, limit=None, params={}):
orderbook = await self.fetch_order_book(symbol, limit, params)
return self.extend(orderbook, {
'bids': self.sort_by(self.aggregate(orderbook['bids']), 0, True),
'asks': self.sort_by(self.aggregate(orderbook['asks']), 0),
})
async def perform_order_book_request(self, market, limit=None, params={}):
raise NotSupported(self.id + ' performOrderBookRequest not supported yet')
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
orderbook = await self.perform_order_book_request(market, limit, params)
return self.parse_order_book(orderbook, market, limit, params)
async def fetch_ohlcvc(self, symbol, timeframe='1m', since=None, limit=None, params={}):
if not self.has['fetchTrades']:
raise NotSupported('fetch_ohlcv() not implemented yet')
await self.load_markets()
trades = await self.fetch_trades(symbol, since, limit, params)
return self.build_ohlcvc(trades, timeframe, since, limit)
async def fetchOHLCVC(self, symbol, timeframe='1m', since=None, limit=None, params={}):
return await self.fetch_ohlcvc(symbol, timeframe, since, limit, params)
async def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
ohlcvs = await self.fetch_ohlcvc(symbol, timeframe, since, limit, params)
return [ohlcv[0:-1] for ohlcv in ohlcvs]
async def fetchOHLCV(self, symbol, timeframe='1m', since=None, limit=None, params={}):
return await self.fetch_ohlcv(symbol, timeframe, since, limit, params)
async def fetch_full_tickers(self, symbols=None, params={}):
return await self.fetch_tickers(symbols, params)
async def edit_order(self, id, symbol, *args):
if not self.enableRateLimit:
raise ExchangeError('updateOrder() requires enableRateLimit = true')
await self.cancel_order(id, symbol)
return await self.create_order(symbol, *args)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
raise NotSupported('create_order() not supported yet')
async def cancel_order(self, id, symbol=None, params={}):
raise NotSupported('cancel_order() not supported yet')
async def fetch_trading_fees(self, params={}):
raise NotSupported('fetch_trading_fees() not supported yet')
async def fetch_trading_fee(self, symbol, params={}):
if not self.has['fetchTradingFees']:
raise NotSupported('fetch_trading_fee() not supported yet')
return await self.fetch_trading_fees(params)
async def load_trading_limits(self, symbols=None, reload=False, params={}):
if self.has['fetchTradingLimits']:
if reload or not('limitsLoaded' in list(self.options.keys())):
response = await self.fetch_trading_limits(symbols)
for i in range(0, len(symbols)):
symbol = symbols[i]
self.markets[symbol] = self.deep_extend(self.markets[symbol], response[symbol])
self.options['limitsLoaded'] = self.milliseconds()
return self.markets
async def load_accounts(self, reload=False, params={}):
if reload:
self.accounts = await self.fetch_accounts(params)
else:
if self.accounts:
return self.accounts
else:
self.accounts = await self.fetch_accounts(params)
self.accountsById = self.index_by(self.accounts, 'id')
return self.accounts
async def fetch_ticker(self, symbol, params={}):
raise NotSupported('fetch_ticker() not supported yet')
async def fetch_transactions(self, code=None, since=None, limit=None, params={}):
raise NotSupported('fetch_transactions() is not supported yet')
async def fetch_deposits(self, code=None, since=None, limit=None, params={}):
raise NotSupported('fetch_deposits() is not supported yet')
async def fetch_withdrawals(self, code=None, since=None, limit=None, params={}):
raise NotSupported('fetch_withdrawals() is not supported yet')
async def fetch_deposit_address(self, code=None, since=None, limit=None, params={}):
raise NotSupported('fetch_deposit_address() is not supported yet')
async def sleep(self, milliseconds):
return await asyncio.sleep(milliseconds / 1000)
| 42.802432 | 355 | 0.619656 |
__version__ = '1.40.88'
import asyncio
import concurrent.futures
import socket
import certifi
import aiohttp
import ssl
import sys
import yarl
from ccxt.async_support.base.throttle import throttle
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.errors import RequestTimeout
from ccxt.base.errors import NotSupported
from ccxt.base.exchange import Exchange as BaseExchange
__all__ = [
'BaseExchange',
'Exchange',
]
class Exchange(BaseExchange):
def __init__(self, config={}):
if 'asyncio_loop' in config:
self.asyncio_loop = config['asyncio_loop']
self.asyncio_loop = self.asyncio_loop or asyncio.get_event_loop()
self.aiohttp_trust_env = config.get('aiohttp_trust_env', self.aiohttp_trust_env)
self.verify = config.get('verify', self.verify)
self.own_session = 'session' not in config
self.cafile = config.get('cafile', certifi.where())
super(Exchange, self).__init__(config)
self.init_rest_rate_limiter()
self.markets_loading = None
self.reloading_markets = False
def init_rest_rate_limiter(self):
self.throttle = throttle(self.extend({
'loop': self.asyncio_loop,
}, self.tokenBucket))
def __del__(self):
if self.session is not None:
self.logger.warning(self.id + " requires to release all resources with an explicit call to the .close() coroutine. If you are using the exchange instance with async coroutines, add exchange.close() to your code into a place when you're done with the exchange and don't need the exchange instance anymore (at the end of your async coroutine).")
if sys.version_info >= (3, 5):
async def __aenter__(self):
self.open()
return self
async def __aexit__(self, exc_type, exc, tb):
await self.close()
def open(self):
if self.own_session and self.session is None:
context = ssl.create_default_context(cafile=self.cafile) if self.verify else self.verify
connector = aiohttp.TCPConnector(ssl=context, loop=self.asyncio_loop, enable_cleanup_closed=True)
self.session = aiohttp.ClientSession(loop=self.asyncio_loop, connector=connector, trust_env=self.aiohttp_trust_env)
async def close(self):
if self.session is not None:
if self.own_session:
await self.session.close()
self.session = None
async def fetch2(self, path, api='public', method='GET', params={}, headers=None, body=None):
if self.enableRateLimit:
await self.throttle(self.rateLimit)
self.lastRestRequestTimestamp = self.milliseconds()
request = self.sign(path, api, method, params, headers, body)
return await self.fetch(request['url'], request['method'], request['headers'], request['body'])
async def fetch(self, url, method='GET', headers=None, body=None):
request_headers = self.prepare_request_headers(headers)
url = self.proxy + url
if self.verbose:
self.print("\nRequest:", method, url, headers, body)
self.logger.debug("%s %s, Request: %s %s", method, url, headers, body)
request_body = body
encoded_body = body.encode() if body else None
self.open()
session_method = getattr(self.session, method.lower())
http_response = None
http_status_code = None
http_status_text = None
json_response = None
try:
async with session_method(yarl.URL(url, encoded=True),
data=encoded_body,
headers=request_headers,
timeout=(self.timeout / 1000),
proxy=self.aiohttp_proxy) as response:
http_response = await response.text()
http_response = http_response.strip()
http_status_code = response.status
http_status_text = response.reason
json_response = self.parse_json(http_response)
headers = response.headers
if self.enableLastHttpResponse:
self.last_http_response = http_response
if self.enableLastResponseHeaders:
self.last_response_headers = headers
if self.enableLastJsonResponse:
self.last_json_response = json_response
if self.verbose:
self.print("\nResponse:", method, url, http_status_code, headers, http_response)
self.logger.debug("%s %s, Response: %s %s %s", method, url, http_status_code, headers, http_response)
except socket.gaierror as e:
details = ' '.join([self.id, method, url])
raise ExchangeNotAvailable(details) from e
except (concurrent.futures.TimeoutError, asyncio.TimeoutError) as e:
details = ' '.join([self.id, method, url])
raise RequestTimeout(details) from e
except aiohttp.ClientConnectionError as e:
details = ' '.join([self.id, method, url])
raise ExchangeNotAvailable(details) from e
except aiohttp.ClientError as e:
details = ' '.join([self.id, method, url])
raise ExchangeError(details) from e
self.handle_errors(http_status_code, http_status_text, url, method, headers, http_response, json_response, request_headers, request_body)
self.handle_http_status_code(http_status_code, http_status_text, url, method, http_response)
if json_response is not None:
return json_response
if self.is_text_response(headers):
return http_response
return response.content
async def load_markets_helper(self, reload=False, params={}):
if not reload:
if self.markets:
if not self.markets_by_id:
return self.set_markets(self.markets)
return self.markets
currencies = None
if self.has['fetchCurrencies']:
currencies = await self.fetch_currencies()
markets = await self.fetch_markets(params)
return self.set_markets(markets, currencies)
async def load_markets(self, reload=False, params={}):
if (reload and not self.reloading_markets) or not self.markets_loading:
self.reloading_markets = True
coroutine = self.load_markets_helper(reload, params)
self.markets_loading = asyncio.ensure_future(coroutine)
try:
result = await self.markets_loading
except Exception as e:
self.reloading_markets = False
self.markets_loading = None
raise e
self.reloading_markets = False
return result
async def fetch_fees(self):
trading = {}
funding = {}
if self.has['fetchTradingFees']:
trading = await self.fetch_trading_fees()
if self.has['fetchFundingFees']:
funding = await self.fetch_funding_fees()
return {
'trading': trading,
'funding': funding,
}
async def load_fees(self, reload=False):
if not reload:
if self.loaded_fees != Exchange.loaded_fees:
return self.loaded_fees
self.loaded_fees = self.deep_extend(self.loaded_fees, await self.fetch_fees())
return self.loaded_fees
async def fetch_markets(self, params={}):
return self.to_array(self.markets)
async def fetch_currencies(self, params={}):
return self.currencies
async def fetch_status(self, params={}):
if self.has['fetchTime']:
updated = await self.fetch_time(params)
self.status['updated'] = updated
return self.status
async def fetch_order_status(self, id, symbol=None, params={}):
order = await self.fetch_order(id, symbol, params)
return order['status']
async def fetch_partial_balance(self, part, params={}):
balance = await self.fetch_balance(params)
return balance[part]
async def fetch_l2_order_book(self, symbol, limit=None, params={}):
orderbook = await self.fetch_order_book(symbol, limit, params)
return self.extend(orderbook, {
'bids': self.sort_by(self.aggregate(orderbook['bids']), 0, True),
'asks': self.sort_by(self.aggregate(orderbook['asks']), 0),
})
async def perform_order_book_request(self, market, limit=None, params={}):
raise NotSupported(self.id + ' performOrderBookRequest not supported yet')
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
orderbook = await self.perform_order_book_request(market, limit, params)
return self.parse_order_book(orderbook, market, limit, params)
async def fetch_ohlcvc(self, symbol, timeframe='1m', since=None, limit=None, params={}):
if not self.has['fetchTrades']:
raise NotSupported('fetch_ohlcv() not implemented yet')
await self.load_markets()
trades = await self.fetch_trades(symbol, since, limit, params)
return self.build_ohlcvc(trades, timeframe, since, limit)
async def fetchOHLCVC(self, symbol, timeframe='1m', since=None, limit=None, params={}):
return await self.fetch_ohlcvc(symbol, timeframe, since, limit, params)
async def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
ohlcvs = await self.fetch_ohlcvc(symbol, timeframe, since, limit, params)
return [ohlcv[0:-1] for ohlcv in ohlcvs]
async def fetchOHLCV(self, symbol, timeframe='1m', since=None, limit=None, params={}):
return await self.fetch_ohlcv(symbol, timeframe, since, limit, params)
async def fetch_full_tickers(self, symbols=None, params={}):
return await self.fetch_tickers(symbols, params)
async def edit_order(self, id, symbol, *args):
if not self.enableRateLimit:
raise ExchangeError('updateOrder() requires enableRateLimit = true')
await self.cancel_order(id, symbol)
return await self.create_order(symbol, *args)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
raise NotSupported('create_order() not supported yet')
async def cancel_order(self, id, symbol=None, params={}):
raise NotSupported('cancel_order() not supported yet')
async def fetch_trading_fees(self, params={}):
raise NotSupported('fetch_trading_fees() not supported yet')
async def fetch_trading_fee(self, symbol, params={}):
if not self.has['fetchTradingFees']:
raise NotSupported('fetch_trading_fee() not supported yet')
return await self.fetch_trading_fees(params)
async def load_trading_limits(self, symbols=None, reload=False, params={}):
if self.has['fetchTradingLimits']:
if reload or not('limitsLoaded' in list(self.options.keys())):
response = await self.fetch_trading_limits(symbols)
for i in range(0, len(symbols)):
symbol = symbols[i]
self.markets[symbol] = self.deep_extend(self.markets[symbol], response[symbol])
self.options['limitsLoaded'] = self.milliseconds()
return self.markets
async def load_accounts(self, reload=False, params={}):
if reload:
self.accounts = await self.fetch_accounts(params)
else:
if self.accounts:
return self.accounts
else:
self.accounts = await self.fetch_accounts(params)
self.accountsById = self.index_by(self.accounts, 'id')
return self.accounts
async def fetch_ticker(self, symbol, params={}):
raise NotSupported('fetch_ticker() not supported yet')
async def fetch_transactions(self, code=None, since=None, limit=None, params={}):
raise NotSupported('fetch_transactions() is not supported yet')
async def fetch_deposits(self, code=None, since=None, limit=None, params={}):
raise NotSupported('fetch_deposits() is not supported yet')
async def fetch_withdrawals(self, code=None, since=None, limit=None, params={}):
raise NotSupported('fetch_withdrawals() is not supported yet')
async def fetch_deposit_address(self, code=None, since=None, limit=None, params={}):
raise NotSupported('fetch_deposit_address() is not supported yet')
async def sleep(self, milliseconds):
return await asyncio.sleep(milliseconds / 1000)
| true | true |
f7253a302858e90ec0abb748061c7a21bef6c41b | 844 | py | Python | casbin/model/assertion.py | goodrain/pycasbin | 1a481ba1af7619e1cc7e83896581d14976927d80 | [
"Apache-2.0"
] | null | null | null | casbin/model/assertion.py | goodrain/pycasbin | 1a481ba1af7619e1cc7e83896581d14976927d80 | [
"Apache-2.0"
] | null | null | null | casbin/model/assertion.py | goodrain/pycasbin | 1a481ba1af7619e1cc7e83896581d14976927d80 | [
"Apache-2.0"
] | null | null | null | from casbin import log
class Assertion:
key = ""
value = ""
tokens = []
policy = []
rm = None
def build_role_links(self, rm):
self.rm = rm
count = self.value.count("_")
for rule in self.policy:
if count < 2:
raise RuntimeError('the number of "_" in role definition should be at least 2')
if len(rule) < count:
raise RuntimeError("grouping policy elements do not meet role definition")
if count == 2:
self.rm.add_link(rule[0], rule[1])
elif count == 3:
self.rm.add_link(rule[0], rule[1], rule[2])
elif count == 4:
self.rm.add_link(rule[0], rule[1], rule[2], rule[3])
log.log_print("Role links for: " + self.key)
self.rm.print_roles()
| 27.225806 | 95 | 0.520142 | from casbin import log
class Assertion:
key = ""
value = ""
tokens = []
policy = []
rm = None
def build_role_links(self, rm):
self.rm = rm
count = self.value.count("_")
for rule in self.policy:
if count < 2:
raise RuntimeError('the number of "_" in role definition should be at least 2')
if len(rule) < count:
raise RuntimeError("grouping policy elements do not meet role definition")
if count == 2:
self.rm.add_link(rule[0], rule[1])
elif count == 3:
self.rm.add_link(rule[0], rule[1], rule[2])
elif count == 4:
self.rm.add_link(rule[0], rule[1], rule[2], rule[3])
log.log_print("Role links for: " + self.key)
self.rm.print_roles()
| true | true |
f7253a8356e22c76da232f80805a7d9f35a574cc | 292 | py | Python | src/cli.py | nlitz88/ipmifan | 0c479298d8e7e8c9cd2a439e96dc182eca4866af | [
"MIT"
] | null | null | null | src/cli.py | nlitz88/ipmifan | 0c479298d8e7e8c9cd2a439e96dc182eca4866af | [
"MIT"
] | null | null | null | src/cli.py | nlitz88/ipmifan | 0c479298d8e7e8c9cd2a439e96dc182eca4866af | [
"MIT"
] | null | null | null |
# This file is just meant to include functions that can be called from the command line to interact with the service.
# or in other words, these functions will basically make up the service. Perhaps these will actually just end up in the controller.py file.
# Created this file for planning. | 73 | 139 | 0.791096 | true | true | |
f7253ad485c45ab64e6260d07fb70431869f4c85 | 4,179 | py | Python | third_party/rust_crates/vendor/ct-logs/build.py | zhangpf/fuchsia-rs | 903568f28ddf45f09157ead36d61b50322c9cf49 | [
"BSD-3-Clause"
] | 14 | 2020-10-25T05:48:36.000Z | 2021-09-20T02:46:20.000Z | third_party/rust_crates/vendor/ct-logs/build.py | zhangpf/fuchsia-rs | 903568f28ddf45f09157ead36d61b50322c9cf49 | [
"BSD-3-Clause"
] | 16 | 2020-09-04T19:01:11.000Z | 2021-05-28T03:23:09.000Z | third_party/rust_crates/vendor/ct-logs/build.py | ZVNexus/fuchsia | c5610ad15208208c98693618a79c705af935270c | [
"BSD-3-Clause"
] | 4 | 2020-12-28T17:04:45.000Z | 2022-03-12T03:20:44.000Z | # -*- coding: utf-8 -*-
import subprocess
import sys
import json
import hashlib
import time
import base64
from binascii import hexlify
from collections import namedtuple
HEADER = """//!
//! This library is automatically generated from Google's list of known CT
//! logs. Don't edit it.
//!
//! The generation is done deterministically so you can verify it
//! yourself by inspecting and re-running the generation process.
//!
#![forbid(unsafe_code,
unstable_features)]
#![deny(trivial_casts,
trivial_numeric_casts,
unused_import_braces,
unused_extern_crates,
unused_qualifications)]
pub static LOGS: [&sct::Log; %d] = ["""
FOOTER = """];"""
Log = namedtuple('Log', 'name url mmd operator key keyid json'.split())
LOG_LIST = 'https://www.gstatic.com/ct/log_list/log_list.json'
LOG_LIST_SIG = 'https://www.gstatic.com/ct/log_list/log_list.sig'
def fetch_and_check_sig():
for cmd in (['curl', '-o', 'log_list.sig', LOG_LIST_SIG],
['curl', '-o', 'log_list.json', LOG_LIST],
['openssl', 'dgst', '-sha256', '-verify',
'log_list_pubkey.pem', '-signature', 'log_list.sig', 'log_list.json'],
):
subprocess.check_call(cmd, stdout = subprocess.PIPE)
return json.load(open('log_list.json'))
def convert_json(json):
operators = { v['id']: v['name'] for v in json['operators'] }
for lj in json['logs']:
operator = ', '.join(operators[op] for op in lj['operated_by'])
key = base64.b64decode(lj['key'])
keyid = hashlib.sha256(key).digest()
disqualification = lj.get('disqualified_at', None)
if disqualification and time.time() > disqualification:
continue
log = Log(lj['description'],
lj['url'],
lj['maximum_merge_delay'],
operator,
key,
keyid,
lj)
yield log
def commentify(cert):
lines = cert.splitlines()
lines = [ll[2:] if ll.startswith('# ') else ll for ll in lines]
return '/*\n * ' + ('\n * '.join(lines)) + '\n */'
def convert_bytes(bb):
return ''.join('\\x{:02x}'.format(b) for b in bb)
def raw_public_key(spki):
def take_byte(b):
return b[0], b[1:]
def take_len(b):
v, b = take_byte(b)
if v & 0x80:
r = 0
for _ in range(v & 3):
x, b = take_byte(b)
r <<= 8
r |= x
return r, b
return v, b
def take_seq(b):
tag, b = take_byte(b)
ll, b = take_len(b)
assert tag == 0x30
return b[:ll], b[ll:]
def take_bitstring(b):
tag, b = take_byte(b)
ll, b = take_len(b)
bits, b = take_byte(b)
assert tag == 0x03
assert bits == 0
return b[:ll-1], b[ll-1:]
open('key.bin', 'wb').write(spki)
spki, rest = take_seq(spki)
assert len(rest) == 0
id, data = take_seq(spki)
keydata, rest = take_bitstring(data)
assert len(rest) == 0
return keydata
def print_log(log):
comment = commentify(
json.dumps(log.json,
indent = 2,
separators = (',', ': '),
sort_keys = True)
)
id_up = hexlify(log.key).upper()[:16]
description = log.name
url = log.url
operator = log.operator
key = convert_bytes(raw_public_key(log.key))
keyid_hex = ', '.join('0x{:02x}'.format(x) for x in log.keyid)
mmd = log.mmd
print(""" %(comment)s
&sct::Log {
description: "%(description)s",
url: "%(url)s",
operated_by: "%(operator)s",
key: b"%(key)s",
id: [ %(keyid_hex)s ],
max_merge_delay: %(mmd)d,
},
""" % locals())
if __name__ == '__main__':
if sys.platform == "win32":
import os, msvcrt
msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
data = fetch_and_check_sig()
logs = {}
for log in convert_json(data):
logs[hexlify(log.keyid)] = log
print(HEADER % len(list(logs.keys())))
for id in sorted(logs.keys()):
print_log(logs[id])
print(FOOTER)
| 26.96129 | 87 | 0.554439 |
import subprocess
import sys
import json
import hashlib
import time
import base64
from binascii import hexlify
from collections import namedtuple
HEADER = """//!
//! This library is automatically generated from Google's list of known CT
//! logs. Don't edit it.
//!
//! The generation is done deterministically so you can verify it
//! yourself by inspecting and re-running the generation process.
//!
#![forbid(unsafe_code,
unstable_features)]
#![deny(trivial_casts,
trivial_numeric_casts,
unused_import_braces,
unused_extern_crates,
unused_qualifications)]
pub static LOGS: [&sct::Log; %d] = ["""
FOOTER = """];"""
Log = namedtuple('Log', 'name url mmd operator key keyid json'.split())
LOG_LIST = 'https://www.gstatic.com/ct/log_list/log_list.json'
LOG_LIST_SIG = 'https://www.gstatic.com/ct/log_list/log_list.sig'
def fetch_and_check_sig():
for cmd in (['curl', '-o', 'log_list.sig', LOG_LIST_SIG],
['curl', '-o', 'log_list.json', LOG_LIST],
['openssl', 'dgst', '-sha256', '-verify',
'log_list_pubkey.pem', '-signature', 'log_list.sig', 'log_list.json'],
):
subprocess.check_call(cmd, stdout = subprocess.PIPE)
return json.load(open('log_list.json'))
def convert_json(json):
operators = { v['id']: v['name'] for v in json['operators'] }
for lj in json['logs']:
operator = ', '.join(operators[op] for op in lj['operated_by'])
key = base64.b64decode(lj['key'])
keyid = hashlib.sha256(key).digest()
disqualification = lj.get('disqualified_at', None)
if disqualification and time.time() > disqualification:
continue
log = Log(lj['description'],
lj['url'],
lj['maximum_merge_delay'],
operator,
key,
keyid,
lj)
yield log
def commentify(cert):
lines = cert.splitlines()
lines = [ll[2:] if ll.startswith('# ') else ll for ll in lines]
return '/*\n * ' + ('\n * '.join(lines)) + '\n */'
def convert_bytes(bb):
return ''.join('\\x{:02x}'.format(b) for b in bb)
def raw_public_key(spki):
def take_byte(b):
return b[0], b[1:]
def take_len(b):
v, b = take_byte(b)
if v & 0x80:
r = 0
for _ in range(v & 3):
x, b = take_byte(b)
r <<= 8
r |= x
return r, b
return v, b
def take_seq(b):
tag, b = take_byte(b)
ll, b = take_len(b)
assert tag == 0x30
return b[:ll], b[ll:]
def take_bitstring(b):
tag, b = take_byte(b)
ll, b = take_len(b)
bits, b = take_byte(b)
assert tag == 0x03
assert bits == 0
return b[:ll-1], b[ll-1:]
open('key.bin', 'wb').write(spki)
spki, rest = take_seq(spki)
assert len(rest) == 0
id, data = take_seq(spki)
keydata, rest = take_bitstring(data)
assert len(rest) == 0
return keydata
def print_log(log):
comment = commentify(
json.dumps(log.json,
indent = 2,
separators = (',', ': '),
sort_keys = True)
)
id_up = hexlify(log.key).upper()[:16]
description = log.name
url = log.url
operator = log.operator
key = convert_bytes(raw_public_key(log.key))
keyid_hex = ', '.join('0x{:02x}'.format(x) for x in log.keyid)
mmd = log.mmd
print(""" %(comment)s
&sct::Log {
description: "%(description)s",
url: "%(url)s",
operated_by: "%(operator)s",
key: b"%(key)s",
id: [ %(keyid_hex)s ],
max_merge_delay: %(mmd)d,
},
""" % locals())
if __name__ == '__main__':
if sys.platform == "win32":
import os, msvcrt
msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
data = fetch_and_check_sig()
logs = {}
for log in convert_json(data):
logs[hexlify(log.keyid)] = log
print(HEADER % len(list(logs.keys())))
for id in sorted(logs.keys()):
print_log(logs[id])
print(FOOTER)
| true | true |
f7253effa72e1fa6a01a0b839772bc4261a78df2 | 5,904 | py | Python | fixture/contact.py | talareq/jenkins | 6371b1faedf1990b8d1de57392dff5f57d239246 | [
"Apache-2.0"
] | null | null | null | fixture/contact.py | talareq/jenkins | 6371b1faedf1990b8d1de57392dff5f57d239246 | [
"Apache-2.0"
] | null | null | null | fixture/contact.py | talareq/jenkins | 6371b1faedf1990b8d1de57392dff5f57d239246 | [
"Apache-2.0"
] | null | null | null | from model.formfiller import Contact
import re
class ContactHelper:
def __init__(self, app):
self.app = app
def add_new_contact(self, contact):
wd = self.app.wd
if not len(wd.find_elements_by_name("searchstring")) > 0:
self.app.open_home_page()
# add mew contact
wd.find_element_by_xpath("//div[@id='nav']//a[.='nowy wpis']").click()
self.fill_contact_form(contact)
# accept
wd.find_element_by_xpath("//div[@id='content']/form/input[21]").click()
self.contact_cache = None
def delete_first_contact(self):
wd = self.app.wd
self.delete_contact_by_index(0)
def select_contact_by_index(self, index):
wd = self.app.wd
wd.find_elements_by_name("selected[]")[index].click()
def delete_contact_by_index(self, index):
wd = self.app.wd
if not len(wd.find_elements_by_name("searchstring")) > 0:
self.app.open_home_page()
self.select_contact_by_index(index)
#submit deletion
wd.find_element_by_xpath("//div[@id='content']/form[2]/div[2]/input").click()
wd.switch_to_alert().accept()
self.contact_cache = None
def modify_first_contact(self):
wd = self.app.wd
self.modify_contact_by_index(0)
def modify_contact_by_index(self, index, contact):
wd = self.app.wd
if not len(wd.find_elements_by_name("searchstring")) > 0:
self.app.open_home_page()
row = wd.find_elements_by_name("entry")[index]
cells = row.find_elements_by_tag_name("td")
cells[7].click()
self.fill_contact_form(contact)
#accept changes
wd.find_element_by_xpath("//div[@id='content']/form[1]/input[22]").click()
self.contact_cache = None
def count(self):
wd = self.app.wd
self.app.open_home_page()
return len(wd.find_elements_by_name("selected[]"))
def fill_contact_form(self, contact):
wd = self.app.wd
self.change_field_value("firstname", contact.firstname)
self.change_field_value("lastname", contact.lastname)
self.change_field_value("homephone", contact.homephone)
self.change_field_value("mobilephone", contact.mobilephone)
self.change_field_value("workphone", contact.workphone)
self.change_field_value("secondaryphone", contact.secondaryphone)
def change_field_value(self, field_name, text):
wd = self.app.wd
if text is not None:
wd.find_element_by_name(field_name).click()
wd.find_element_by_name(field_name).clear()
wd.find_element_by_name(field_name).send_keys(text)
contact_cache = None
def get_contact_list(self):
if self.contact_cache is None:
wd = self.app.wd
self.app.open_home_page()
self.contact_cache = []
for row in wd.find_elements_by_name("entry"):
cells = row.find_elements_by_tag_name("td")
firstname = cells[2].text
lastname = cells[1].text
id = cells[0].find_element_by_tag_name("input").get_attribute("value")
all_phones = cells[5].text
adress = cells[3].text
all_emails = cells[4].text
self.contact_cache.append(Contact(firstname=firstname, lastname=lastname,
id=id, all_phones_from_home_page=all_phones, all_emails_from_home_page=all_emails, adress=adress))
return list(self.contact_cache)
def open_contact_to_edit_by_index(self, index):
wd = self.app.wd
self.app.open_home_page()
row = wd.find_elements_by_name("entry")[index]
cells = row.find_elements_by_tag_name("td")[7]
cells.find_element_by_tag_name("a").click()
def open_contact_to_view_by_index(self, index):
wd = self.app.wd
self.app.open_home_page()
row = wd.find_elements_by_name("entry")[index]
cells = row.find_elements_by_tag_name("td")[6]
cells.find_element_by_tag_name("a").click()
def get_contact_info_from_edit_page(self, index):
wd = self.app.wd
self.open_contact_to_edit_by_index(index)
firstname = wd.find_element_by_name("firstname").get_attribute("value")
lastname = wd.find_element_by_name("lastname").get_attribute("value")
id = wd.find_element_by_name("id").get_attribute("value")
homephone = wd.find_element_by_name("home").get_attribute("value")
workphone = wd.find_element_by_name("work").get_attribute("value")
mobilephone = wd.find_element_by_name("mobile").get_attribute("value")
secondaryphone = wd.find_element_by_name("phone2").get_attribute("value")
email = wd.find_element_by_name("email").get_attribute("value")
email2 = wd.find_element_by_name("email2").get_attribute("value")
email3 = wd.find_element_by_name("email3").get_attribute("value")
adress=wd.find_element_by_name("address").get_attribute("value")
return Contact(firstname=firstname, lastname=lastname, id=id,
homephone=homephone, mobilephone=mobilephone, workphone=workphone, secondaryphone=secondaryphone,
email=email, email2=email2, email3=email3, adress=adress)
def get_contact_view_page(self, index):
wd = self.app.wd
self.open_contact_to_view_by_index(index)
text = wd.find_element_by_id("content").text
homephone = re.search("H: (.*)", text).group(1)
workphone = re.search("W: (.*)", text).group(1)
mobilephone = re.search("M: (.*)", text).group(1)
secondaryphone = re.search("P: (.*)", text).group(1)
return Contact(homephone=homephone, mobilephone=mobilephone, workphone=workphone, secondaryphone=secondaryphone)
| 39.624161 | 148 | 0.646172 | from model.formfiller import Contact
import re
class ContactHelper:
def __init__(self, app):
self.app = app
def add_new_contact(self, contact):
wd = self.app.wd
if not len(wd.find_elements_by_name("searchstring")) > 0:
self.app.open_home_page()
wd.find_element_by_xpath("//div[@id='nav']//a[.='nowy wpis']").click()
self.fill_contact_form(contact)
wd.find_element_by_xpath("//div[@id='content']/form/input[21]").click()
self.contact_cache = None
def delete_first_contact(self):
wd = self.app.wd
self.delete_contact_by_index(0)
def select_contact_by_index(self, index):
wd = self.app.wd
wd.find_elements_by_name("selected[]")[index].click()
def delete_contact_by_index(self, index):
wd = self.app.wd
if not len(wd.find_elements_by_name("searchstring")) > 0:
self.app.open_home_page()
self.select_contact_by_index(index)
wd.find_element_by_xpath("//div[@id='content']/form[2]/div[2]/input").click()
wd.switch_to_alert().accept()
self.contact_cache = None
def modify_first_contact(self):
wd = self.app.wd
self.modify_contact_by_index(0)
def modify_contact_by_index(self, index, contact):
wd = self.app.wd
if not len(wd.find_elements_by_name("searchstring")) > 0:
self.app.open_home_page()
row = wd.find_elements_by_name("entry")[index]
cells = row.find_elements_by_tag_name("td")
cells[7].click()
self.fill_contact_form(contact)
wd.find_element_by_xpath("//div[@id='content']/form[1]/input[22]").click()
self.contact_cache = None
def count(self):
wd = self.app.wd
self.app.open_home_page()
return len(wd.find_elements_by_name("selected[]"))
def fill_contact_form(self, contact):
wd = self.app.wd
self.change_field_value("firstname", contact.firstname)
self.change_field_value("lastname", contact.lastname)
self.change_field_value("homephone", contact.homephone)
self.change_field_value("mobilephone", contact.mobilephone)
self.change_field_value("workphone", contact.workphone)
self.change_field_value("secondaryphone", contact.secondaryphone)
def change_field_value(self, field_name, text):
wd = self.app.wd
if text is not None:
wd.find_element_by_name(field_name).click()
wd.find_element_by_name(field_name).clear()
wd.find_element_by_name(field_name).send_keys(text)
contact_cache = None
def get_contact_list(self):
if self.contact_cache is None:
wd = self.app.wd
self.app.open_home_page()
self.contact_cache = []
for row in wd.find_elements_by_name("entry"):
cells = row.find_elements_by_tag_name("td")
firstname = cells[2].text
lastname = cells[1].text
id = cells[0].find_element_by_tag_name("input").get_attribute("value")
all_phones = cells[5].text
adress = cells[3].text
all_emails = cells[4].text
self.contact_cache.append(Contact(firstname=firstname, lastname=lastname,
id=id, all_phones_from_home_page=all_phones, all_emails_from_home_page=all_emails, adress=adress))
return list(self.contact_cache)
def open_contact_to_edit_by_index(self, index):
wd = self.app.wd
self.app.open_home_page()
row = wd.find_elements_by_name("entry")[index]
cells = row.find_elements_by_tag_name("td")[7]
cells.find_element_by_tag_name("a").click()
def open_contact_to_view_by_index(self, index):
wd = self.app.wd
self.app.open_home_page()
row = wd.find_elements_by_name("entry")[index]
cells = row.find_elements_by_tag_name("td")[6]
cells.find_element_by_tag_name("a").click()
def get_contact_info_from_edit_page(self, index):
wd = self.app.wd
self.open_contact_to_edit_by_index(index)
firstname = wd.find_element_by_name("firstname").get_attribute("value")
lastname = wd.find_element_by_name("lastname").get_attribute("value")
id = wd.find_element_by_name("id").get_attribute("value")
homephone = wd.find_element_by_name("home").get_attribute("value")
workphone = wd.find_element_by_name("work").get_attribute("value")
mobilephone = wd.find_element_by_name("mobile").get_attribute("value")
secondaryphone = wd.find_element_by_name("phone2").get_attribute("value")
email = wd.find_element_by_name("email").get_attribute("value")
email2 = wd.find_element_by_name("email2").get_attribute("value")
email3 = wd.find_element_by_name("email3").get_attribute("value")
adress=wd.find_element_by_name("address").get_attribute("value")
return Contact(firstname=firstname, lastname=lastname, id=id,
homephone=homephone, mobilephone=mobilephone, workphone=workphone, secondaryphone=secondaryphone,
email=email, email2=email2, email3=email3, adress=adress)
def get_contact_view_page(self, index):
wd = self.app.wd
self.open_contact_to_view_by_index(index)
text = wd.find_element_by_id("content").text
homephone = re.search("H: (.*)", text).group(1)
workphone = re.search("W: (.*)", text).group(1)
mobilephone = re.search("M: (.*)", text).group(1)
secondaryphone = re.search("P: (.*)", text).group(1)
return Contact(homephone=homephone, mobilephone=mobilephone, workphone=workphone, secondaryphone=secondaryphone)
| true | true |
f7253f2da62ae21d3ff22ab8085cb80e2d6cdb84 | 2,260 | py | Python | ryu/app/openstate/playground/start_many_to_1_ctrl.py | Tesi-Luca-Davide/ryu | f4b74d55d594dab0938bae0656d5143e284e0846 | [
"Apache-2.0"
] | null | null | null | ryu/app/openstate/playground/start_many_to_1_ctrl.py | Tesi-Luca-Davide/ryu | f4b74d55d594dab0938bae0656d5143e284e0846 | [
"Apache-2.0"
] | null | null | null | ryu/app/openstate/playground/start_many_to_1_ctrl.py | Tesi-Luca-Davide/ryu | f4b74d55d594dab0938bae0656d5143e284e0846 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
from mininet.net import Mininet
from mininet.topo import Topo
from mininet.cli import CLI
from mininet.node import UserSwitch,RemoteController
from mininet.term import makeTerm
import os, time
class MyTopo( Topo ):
"Simple topology example."
def __init__( self):
"Create custom topo."
# Add default members to class.
Topo.__init__(self)
# Add nodes
Host1=self.addHost('h1', ip='10.0.0.1/24')
Host2=self.addHost('h2', ip='10.0.0.2/24')
switch1=self.addSwitch('s1')
switch2=self.addSwitch('s2')
switch3=self.addSwitch('s3')
switch4=self.addSwitch('s4')
switch5=self.addSwitch('s5')
# Add edges
self.addLink( Host1, switch1, 1, 1)
self.addLink( switch1, switch2, 2, 1)
self.addLink( switch1, switch3, 3, 1)
self.addLink( switch1, switch4, 4, 1)
self.addLink( switch2, switch5, 2, 1)
self.addLink( switch3, switch5, 2, 2)
self.addLink( switch4, switch5, 2, 3)
self.addLink( switch5, Host2, 4, 1)
######Starting controller
os.system("xterm -e 'ryu-manager ~/ryu/ryu/app/openstate/playground/forwarding_consistency_many_to_1_ctrl.py'&")
######Starting mininet
topos = { 'mytopo': ( lambda: MyTopo() ) }
mytopo=MyTopo()
time.sleep(1)
print("\n********************************** HELP *********************************************")
print("Type \"python ~/ryu/ryu/app/openstate/echo_server.py 200\" in h2's xterm")
print("Type \"nc 10.0.0.2 200\" in h1's xterm")
print("Watching the tcpdump results, it is possible to see that forwarding consistency is guaranteed\n"
"In order to test new path selection, close and reopen netcat")
print("\nTo exit type \"ctrl+D\" or exit")
print("*************************************************************************************")
net = Mininet(topo=mytopo,switch=UserSwitch,controller=RemoteController,cleanup=True,autoSetMacs=True,autoStaticArp=True,listenPort=6634)
net.start()
os.system("xterm -e 'tcpdump -i s2-eth1'&")
os.system("xterm -e 'tcpdump -i s3-eth1'&")
os.system("xterm -e 'tcpdump -i s4-eth1'&")
h1,h2 = net.hosts[0], net.hosts[1]
makeTerm(h1)
makeTerm(h2)
CLI(net)
net.stop()
os.system("sudo mn -c")
os.system("kill -9 $(pidof -x ryu-manager)")
| 32.753623 | 137 | 0.626991 |
from mininet.net import Mininet
from mininet.topo import Topo
from mininet.cli import CLI
from mininet.node import UserSwitch,RemoteController
from mininet.term import makeTerm
import os, time
class MyTopo( Topo ):
def __init__( self):
Topo.__init__(self)
Host1=self.addHost('h1', ip='10.0.0.1/24')
Host2=self.addHost('h2', ip='10.0.0.2/24')
switch1=self.addSwitch('s1')
switch2=self.addSwitch('s2')
switch3=self.addSwitch('s3')
switch4=self.addSwitch('s4')
switch5=self.addSwitch('s5')
self.addLink( Host1, switch1, 1, 1)
self.addLink( switch1, switch2, 2, 1)
self.addLink( switch1, switch3, 3, 1)
self.addLink( switch1, switch4, 4, 1)
self.addLink( switch2, switch5, 2, 1)
self.addLink( switch3, switch5, 2, 2)
self.addLink( switch4, switch5, 2, 3)
self.addLink( switch5, Host2, 4, 1)
y'&")
********************** HELP *********************************************")
print("Type \"python ~/ryu/ryu/app/openstate/echo_server.py 200\" in h2's xterm")
print("Type \"nc 10.0.0.2 200\" in h1's xterm")
print("Watching the tcpdump results, it is possible to see that forwarding consistency is guaranteed\n"
"In order to test new path selection, close and reopen netcat")
print("\nTo exit type \"ctrl+D\" or exit")
print("*************************************************************************************")
net = Mininet(topo=mytopo,switch=UserSwitch,controller=RemoteController,cleanup=True,autoSetMacs=True,autoStaticArp=True,listenPort=6634)
net.start()
os.system("xterm -e 'tcpdump -i s2-eth1'&")
os.system("xterm -e 'tcpdump -i s3-eth1'&")
os.system("xterm -e 'tcpdump -i s4-eth1'&")
h1,h2 = net.hosts[0], net.hosts[1]
makeTerm(h1)
makeTerm(h2)
CLI(net)
net.stop()
os.system("sudo mn -c")
os.system("kill -9 $(pidof -x ryu-manager)")
| true | true |
f7254068f9b6b41261cccdb04ae15cf40d62dba9 | 8,954 | py | Python | tseries_crossval.py | Yashgh7076/CU-Thesis | 59a7c6e8009395b5773b1ee47c38ca287ed6c189 | [
"MIT"
] | 1 | 2021-03-03T22:11:29.000Z | 2021-03-03T22:11:29.000Z | tseries_crossval.py | Yashgh7076/CU-Thesis | 59a7c6e8009395b5773b1ee47c38ca287ed6c189 | [
"MIT"
] | null | null | null | tseries_crossval.py | Yashgh7076/CU-Thesis | 59a7c6e8009395b5773b1ee47c38ca287ed6c189 | [
"MIT"
] | null | null | null | import numpy as np
import sys
import os
import math
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' # Suppress TF info
import tensorflow as tf
#import matplotlib.pyplot as plt
# Define constants
stride = 15 #1 second @ 15 Hz sampling
window = 30*15 #30 seconds window considered
folder = sys.argv[1]
if not os.path.exists(folder):
print("Unable to open folder containing data, check that folder exists \n")
exit(0)
total_files = 488
total_sum = 0
for i in range(1,total_files + 1):
file_no = 'output' + str(i) + '.txt'
full_path = os.path.join(folder, file_no)
#print(full_path)
f = open(full_path,'r')
d=[[float(x) for x in line.split()] for line in f]
f.close()
N = len(d)
total_sum = total_sum + N
M = len(d[0])
measurements = int((M-window)/6)
dataset = np.zeros((total_sum,measurements,6))
vectors = np.zeros((total_sum,window),dtype=np.uint8)
windows_in_recording = np.zeros((total_files), dtype=np.uint32)
total_windows = 0
for i in range(1,total_files + 1):
file_no = 'output' + str(i) + '.txt'
full_path = os.path.join(folder, file_no)
f = open(full_path,'r')
d=[[float(x) for x in line.split()] for line in f]
f.close()
# Need to recalculate the number of windows each time
N = len(d)
labels = np.zeros(shape = (N,window), dtype=np.uint8) # np.uint8 -> each sample is labeled from 0 to 5
data = np.zeros(shape = (N,measurements,6))
data_max = np.zeros((6)) # Create placeholders
data_min = np.zeros((6))
temp_3 = np.zeros((6))
temp_4 = np.zeros((6))
for j in range(N):
temp = d[j]
temp_1 = temp[0:window]
temp_2 = temp[window:M]
labels[j,:] = temp_1
for k in range(measurements): # Read data
for l in range(6):
data[j,k,l] = temp_2[(6*k) + l]
for j in range(N):
if(j == 1):
data_max = np.amax(data[j,:,:], axis=0)
data_min = np.amin(data[j,:,:], axis=0)
else:
temp_3 = np.amax(data[j,:,:], axis=0)
temp_4 = np.amin(data[j,:,:], axis=0)
for k in range(6):
if(temp_3[k] >= data_max[k]):
data_max[k] = temp_3[k]
if(temp_4[k] <= data_min[k]):
data_min[k] = temp_4[k]
# Normalize each recording (meal)
for j in range(N):
for k in range(measurements):
data[j,k,:] = data[j,k,:] - data_min # Vector subtraction
data[j,k,:] = data[j,k,:]/(data_max - data_min) # Element-wise division
dataset[total_windows:total_windows + N, :, :] = data
vectors[total_windows:total_windows + N,:] = labels
total_windows = total_windows + N
windows_in_recording[i-1] = total_windows #Calculates all windows till this meal -> That is what we want!
# Clear variables from memory
del data, labels, d, temp_1, temp_2, temp_3, temp_4
# Print out to verify
#f = open('segments_data.txt','w')
#for j in range(measurements):
# for k in range(6):
# f.write("%f " % (dataset[0,j,k]))
# f.write("\n") # --> correct way of newline in Python!
#f.close()
#f = open('segments_labels.txt','w')
#for j in range(total_windows):
# for k in range(window):
# f.write("%u " % (vectors[j,k]))
# f.write("\n")
#f.close()
# Cross-validation starts here, split data into five parts, use validation_split (keras) for simplicity
part_1 = windows_in_recording[math.floor((0.2*total_files)) -1]
part_2 = windows_in_recording[math.floor((0.4*total_files)) -1]
part_3 = windows_in_recording[math.floor((0.6*total_files)) -1]
part_4 = windows_in_recording[math.floor((0.8*total_files)) -1]
for iter in range(5):
if(iter == 0):
tst_data = dataset[0:part_1,:,:]
trn_data = dataset[part_1:total_windows,:,:]
tst_vcts = vectors[0:part_1,:]
trn_vcts = vectors[part_1:total_windows,:]
elif(iter == 1):
tst_data = dataset[part_1:part_2,:,:]
temp_1 = dataset[0:part_1,:,:]
temp_2 = dataset[part_2:total_windows,:,:]
trn_data = np.concatenate((temp_1, temp_2), axis=0)
tst_vcts = vectors[part_1:part_2,:]
temp_3 = vectors[0:part_1,:]
temp_4 = vectors[part_2:total_windows,:]
trn_vcts = np.concatenate((temp_3, temp_4), axis=0)
elif(iter == 2):
tst_data = dataset[part_2:part_3,:,:]
temp_1 = dataset[0:part_2,:,:]
temp_2 = dataset[part_3:total_windows,:,:]
trn_data = np.concatenate((temp_1, temp_2), axis=0)
tst_vcts = vectors[part_2:part_3,:]
temp_3 = vectors[0:part_2,:]
temp_4 = vectors[part_3:total_windows,:]
trn_vcts = np.concatenate((temp_3, temp_4), axis=0)
elif(iter == 3):
tst_data = dataset[part_3:part_4,:,:]
temp_1 = dataset[0:part_3,:,:]
temp_2 = dataset[part_4:total_windows,:,:]
trn_data = np.concatenate((temp_1, temp_2), axis=0)
tst_vcts = vectors[part_3:part_4,:]
temp_3 = vectors[0:part_3,:]
temp_4 = vectors[part_4:total_windows,:]
trn_vcts = np.concatenate((temp_3, temp_4), axis=0)
elif(iter == 4):
tst_data = dataset[part_4:total_windows,:,:]
trn_data = dataset[0:part_4,:,:]
tst_vcts = vectors[part_4:total_windows,:]
trn_vcts = vectors[0:part_4,:]
# Reshape labels -> needed for keras compatibility
trn_size = trn_data.shape[0]
trn_vcts = np.reshape(trn_vcts, newshape=(trn_size, 1, window)) # Each vector is of size 1 x training_window => 1 x N image of labels
# Neural network training starts here
print("Creating model", iter, "here")
inputs = tf.keras.layers.Input(shape=(measurements, 6))
reshape = tf.keras.layers.Reshape((1, measurements, 6))(inputs) # Data is a 1 x 450 'image' of 6 channels
# Downstream --> Encoder
conv_1 = tf.keras.layers.Conv2D(filters=8, kernel_size=(1,15), strides=1, padding='same', activation='linear')(reshape)
bn_1 = tf.keras.layers.BatchNormalization(axis=3)(conv_1)
act_1 = tf.keras.layers.ReLU()(bn_1)
pool_1 = tf.keras.layers.MaxPool2D(pool_size=(1,2))(act_1)
conv_2 = tf.keras.layers.Conv2D(filters=16, kernel_size=(1,7), strides=1, padding='same', activation='linear')(pool_1)
bn_2 = tf.keras.layers.BatchNormalization(axis=3)(conv_2)
act_2 = tf.keras.layers.ReLU()(bn_2)
pool_2 = tf.keras.layers.MaxPool2D(pool_size=(1,2))(act_2)
conv_3 = tf.keras.layers.Conv2D(filters=32, kernel_size=(1,5), strides=1, padding='same', activation='linear')(pool_2)
bn_3 = tf.keras.layers.BatchNormalization(axis=3)(conv_3)
act_3 = tf.keras.layers.ReLU()(bn_3)
pool_3 = tf.keras.layers.MaxPool2D(pool_size=(1,2))(act_3)
# Upstream --> Decoder
up_conv1 = tf.keras.layers.Conv2DTranspose(filters=32, kernel_size=(1,5),padding='same',strides=(1,2),activation='linear')(pool_3)
bn_4 = tf.keras.layers.BatchNormalization(axis=3)(up_conv1)
act_4 = tf.keras.layers.ReLU()(bn_4)
concat = tf.keras.layers.Concatenate()
cc_1 = concat([act_4, pool_2])
up_conv2 = tf.keras.layers.Conv2DTranspose(filters=16, kernel_size=(1,7),padding='same',strides=(1,2),activation='linear')(cc_1)
bn_5 = tf.keras.layers.BatchNormalization(axis=3)(up_conv2)
act_5 = tf.keras.layers.ReLU()(bn_5)
pad_1 = tf.keras.layers.ZeroPadding2D(padding=((0,0),(0,1)))(act_5)
cc_2 = concat([pad_1, pool_1])
# Final Layer
pen_ult = tf.keras.layers.Conv2DTranspose(filters=6,kernel_size=(1,3),strides=(1,2),activation='softmax')(cc_2)
outputs = tf.keras.layers.Cropping2D(cropping=((0,0),(0,1)))(pen_ult)
model = tf.keras.Model(inputs=inputs, outputs=outputs)
model.compile(optimizer = 'adam', loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits='True'), metrics=[tf.keras.losses.SparseCategoricalCrossentropy(from_logits='True')])
if(iter == 0):
model.summary()
# Store training sequence to .txt file
training_log = 'crossval_fold_' + str(iter) + '.txt'
csv_logger = tf.keras.callbacks.CSVLogger(training_log, append = True, separator=' ')
print("Training for fold", iter)
metrics = model.fit(trn_data, trn_vcts, epochs=200, validation_split= 0.2, verbose=2, callbacks=[csv_logger])
print("Saving model for fold", iter)
model_ID = 'crossval_modelID_' + str(iter) + '.h5'
tf.keras.models.save_model(model,model_ID)
#del model -> Most likely not needed....
##print("Predict")
##op = model.predict(dataset[0:10,:,:])
##print(op.shape)
##temp = op[0,:,:,:]
##temp = np.reshape(temp,(window, 6))
##for i in range(window):
## print(temp[i,:], np.argmax(temp[i,:]))
| 39.619469 | 189 | 0.615814 | import numpy as np
import sys
import os
import math
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
import tensorflow as tf
stride = 15
window = 30*15
folder = sys.argv[1]
if not os.path.exists(folder):
print("Unable to open folder containing data, check that folder exists \n")
exit(0)
total_files = 488
total_sum = 0
for i in range(1,total_files + 1):
file_no = 'output' + str(i) + '.txt'
full_path = os.path.join(folder, file_no)
f = open(full_path,'r')
d=[[float(x) for x in line.split()] for line in f]
f.close()
N = len(d)
total_sum = total_sum + N
M = len(d[0])
measurements = int((M-window)/6)
dataset = np.zeros((total_sum,measurements,6))
vectors = np.zeros((total_sum,window),dtype=np.uint8)
windows_in_recording = np.zeros((total_files), dtype=np.uint32)
total_windows = 0
for i in range(1,total_files + 1):
file_no = 'output' + str(i) + '.txt'
full_path = os.path.join(folder, file_no)
f = open(full_path,'r')
d=[[float(x) for x in line.split()] for line in f]
f.close()
N = len(d)
labels = np.zeros(shape = (N,window), dtype=np.uint8)
data = np.zeros(shape = (N,measurements,6))
data_max = np.zeros((6))
data_min = np.zeros((6))
temp_3 = np.zeros((6))
temp_4 = np.zeros((6))
for j in range(N):
temp = d[j]
temp_1 = temp[0:window]
temp_2 = temp[window:M]
labels[j,:] = temp_1
for k in range(measurements):
for l in range(6):
data[j,k,l] = temp_2[(6*k) + l]
for j in range(N):
if(j == 1):
data_max = np.amax(data[j,:,:], axis=0)
data_min = np.amin(data[j,:,:], axis=0)
else:
temp_3 = np.amax(data[j,:,:], axis=0)
temp_4 = np.amin(data[j,:,:], axis=0)
for k in range(6):
if(temp_3[k] >= data_max[k]):
data_max[k] = temp_3[k]
if(temp_4[k] <= data_min[k]):
data_min[k] = temp_4[k]
for j in range(N):
for k in range(measurements):
data[j,k,:] = data[j,k,:] - data_min
data[j,k,:] = data[j,k,:]/(data_max - data_min)
dataset[total_windows:total_windows + N, :, :] = data
vectors[total_windows:total_windows + N,:] = labels
total_windows = total_windows + N
windows_in_recording[i-1] = total_windows
del data, labels, d, temp_1, temp_2, temp_3, temp_4
ng[math.floor((0.2*total_files)) -1]
part_2 = windows_in_recording[math.floor((0.4*total_files)) -1]
part_3 = windows_in_recording[math.floor((0.6*total_files)) -1]
part_4 = windows_in_recording[math.floor((0.8*total_files)) -1]
for iter in range(5):
if(iter == 0):
tst_data = dataset[0:part_1,:,:]
trn_data = dataset[part_1:total_windows,:,:]
tst_vcts = vectors[0:part_1,:]
trn_vcts = vectors[part_1:total_windows,:]
elif(iter == 1):
tst_data = dataset[part_1:part_2,:,:]
temp_1 = dataset[0:part_1,:,:]
temp_2 = dataset[part_2:total_windows,:,:]
trn_data = np.concatenate((temp_1, temp_2), axis=0)
tst_vcts = vectors[part_1:part_2,:]
temp_3 = vectors[0:part_1,:]
temp_4 = vectors[part_2:total_windows,:]
trn_vcts = np.concatenate((temp_3, temp_4), axis=0)
elif(iter == 2):
tst_data = dataset[part_2:part_3,:,:]
temp_1 = dataset[0:part_2,:,:]
temp_2 = dataset[part_3:total_windows,:,:]
trn_data = np.concatenate((temp_1, temp_2), axis=0)
tst_vcts = vectors[part_2:part_3,:]
temp_3 = vectors[0:part_2,:]
temp_4 = vectors[part_3:total_windows,:]
trn_vcts = np.concatenate((temp_3, temp_4), axis=0)
elif(iter == 3):
tst_data = dataset[part_3:part_4,:,:]
temp_1 = dataset[0:part_3,:,:]
temp_2 = dataset[part_4:total_windows,:,:]
trn_data = np.concatenate((temp_1, temp_2), axis=0)
tst_vcts = vectors[part_3:part_4,:]
temp_3 = vectors[0:part_3,:]
temp_4 = vectors[part_4:total_windows,:]
trn_vcts = np.concatenate((temp_3, temp_4), axis=0)
elif(iter == 4):
tst_data = dataset[part_4:total_windows,:,:]
trn_data = dataset[0:part_4,:,:]
tst_vcts = vectors[part_4:total_windows,:]
trn_vcts = vectors[0:part_4,:]
trn_size = trn_data.shape[0]
trn_vcts = np.reshape(trn_vcts, newshape=(trn_size, 1, window))
print("Creating model", iter, "here")
inputs = tf.keras.layers.Input(shape=(measurements, 6))
reshape = tf.keras.layers.Reshape((1, measurements, 6))(inputs)
conv_1 = tf.keras.layers.Conv2D(filters=8, kernel_size=(1,15), strides=1, padding='same', activation='linear')(reshape)
bn_1 = tf.keras.layers.BatchNormalization(axis=3)(conv_1)
act_1 = tf.keras.layers.ReLU()(bn_1)
pool_1 = tf.keras.layers.MaxPool2D(pool_size=(1,2))(act_1)
conv_2 = tf.keras.layers.Conv2D(filters=16, kernel_size=(1,7), strides=1, padding='same', activation='linear')(pool_1)
bn_2 = tf.keras.layers.BatchNormalization(axis=3)(conv_2)
act_2 = tf.keras.layers.ReLU()(bn_2)
pool_2 = tf.keras.layers.MaxPool2D(pool_size=(1,2))(act_2)
conv_3 = tf.keras.layers.Conv2D(filters=32, kernel_size=(1,5), strides=1, padding='same', activation='linear')(pool_2)
bn_3 = tf.keras.layers.BatchNormalization(axis=3)(conv_3)
act_3 = tf.keras.layers.ReLU()(bn_3)
pool_3 = tf.keras.layers.MaxPool2D(pool_size=(1,2))(act_3)
up_conv1 = tf.keras.layers.Conv2DTranspose(filters=32, kernel_size=(1,5),padding='same',strides=(1,2),activation='linear')(pool_3)
bn_4 = tf.keras.layers.BatchNormalization(axis=3)(up_conv1)
act_4 = tf.keras.layers.ReLU()(bn_4)
concat = tf.keras.layers.Concatenate()
cc_1 = concat([act_4, pool_2])
up_conv2 = tf.keras.layers.Conv2DTranspose(filters=16, kernel_size=(1,7),padding='same',strides=(1,2),activation='linear')(cc_1)
bn_5 = tf.keras.layers.BatchNormalization(axis=3)(up_conv2)
act_5 = tf.keras.layers.ReLU()(bn_5)
pad_1 = tf.keras.layers.ZeroPadding2D(padding=((0,0),(0,1)))(act_5)
cc_2 = concat([pad_1, pool_1])
pen_ult = tf.keras.layers.Conv2DTranspose(filters=6,kernel_size=(1,3),strides=(1,2),activation='softmax')(cc_2)
outputs = tf.keras.layers.Cropping2D(cropping=((0,0),(0,1)))(pen_ult)
model = tf.keras.Model(inputs=inputs, outputs=outputs)
model.compile(optimizer = 'adam', loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits='True'), metrics=[tf.keras.losses.SparseCategoricalCrossentropy(from_logits='True')])
if(iter == 0):
model.summary()
training_log = 'crossval_fold_' + str(iter) + '.txt'
csv_logger = tf.keras.callbacks.CSVLogger(training_log, append = True, separator=' ')
print("Training for fold", iter)
metrics = model.fit(trn_data, trn_vcts, epochs=200, validation_split= 0.2, verbose=2, callbacks=[csv_logger])
print("Saving model for fold", iter)
model_ID = 'crossval_modelID_' + str(iter) + '.h5'
tf.keras.models.save_model(model,model_ID)
| true | true |
f725440da7378c0c7c83b48d5b2433930ec3d062 | 5,063 | py | Python | catalog/bindings/csw/animate_color_type.py | NIVANorge/s-enda-playground | 56ae0a8978f0ba8a5546330786c882c31e17757a | [
"Apache-2.0"
] | null | null | null | catalog/bindings/csw/animate_color_type.py | NIVANorge/s-enda-playground | 56ae0a8978f0ba8a5546330786c882c31e17757a | [
"Apache-2.0"
] | null | null | null | catalog/bindings/csw/animate_color_type.py | NIVANorge/s-enda-playground | 56ae0a8978f0ba8a5546330786c882c31e17757a | [
"Apache-2.0"
] | null | null | null | from dataclasses import dataclass, field
from decimal import Decimal
from typing import Dict, List, Optional, Union
from bindings.csw.anim_mode_attrs_calc_mode import AnimModeAttrsCalcMode
from bindings.csw.animate_color_prototype import AnimateColorPrototype
from bindings.csw.fill_default_type import FillDefaultType
from bindings.csw.fill_timing_attrs_type import FillTimingAttrsType
from bindings.csw.lang_value import LangValue
from bindings.csw.restart_default_type import RestartDefaultType
from bindings.csw.restart_timing_type import RestartTimingType
from bindings.csw.sync_behavior_default_type import SyncBehaviorDefaultType
from bindings.csw.sync_behavior_type import SyncBehaviorType
__NAMESPACE__ = "http://www.w3.org/2001/SMIL20/Language"
@dataclass
class AnimateColorType(AnimateColorPrototype):
class Meta:
name = "animateColorType"
other_element: List[object] = field(
default_factory=list,
metadata={
"type": "Wildcard",
"namespace": "##other",
},
)
id: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
class_value: Optional[str] = field(
default=None,
metadata={
"name": "class",
"type": "Attribute",
},
)
lang: Optional[Union[str, LangValue]] = field(
default=None,
metadata={
"type": "Attribute",
"namespace": "http://www.w3.org/XML/1998/namespace",
},
)
alt: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
longdesc: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
begin: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
end: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
dur: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
repeat_dur: Optional[str] = field(
default=None,
metadata={
"name": "repeatDur",
"type": "Attribute",
},
)
repeat_count: Optional[Decimal] = field(
default=None,
metadata={
"name": "repeatCount",
"type": "Attribute",
"min_inclusive": Decimal("0.0"),
},
)
repeat: Optional[int] = field(
default=None,
metadata={
"type": "Attribute",
},
)
min: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
max: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
sync_behavior: SyncBehaviorType = field(
default=SyncBehaviorType.DEFAULT,
metadata={
"name": "syncBehavior",
"type": "Attribute",
},
)
sync_tolerance: Optional[str] = field(
default=None,
metadata={
"name": "syncTolerance",
"type": "Attribute",
},
)
sync_behavior_default: SyncBehaviorDefaultType = field(
default=SyncBehaviorDefaultType.INHERIT,
metadata={
"name": "syncBehaviorDefault",
"type": "Attribute",
},
)
sync_tolerance_default: str = field(
default="inherit",
metadata={
"name": "syncToleranceDefault",
"type": "Attribute",
},
)
restart: RestartTimingType = field(
default=RestartTimingType.DEFAULT,
metadata={
"type": "Attribute",
},
)
restart_default: RestartDefaultType = field(
default=RestartDefaultType.INHERIT,
metadata={
"name": "restartDefault",
"type": "Attribute",
},
)
fill: FillTimingAttrsType = field(
default=FillTimingAttrsType.DEFAULT,
metadata={
"type": "Attribute",
},
)
fill_default: FillDefaultType = field(
default=FillDefaultType.INHERIT,
metadata={
"name": "fillDefault",
"type": "Attribute",
},
)
target_element: Optional[str] = field(
default=None,
metadata={
"name": "targetElement",
"type": "Attribute",
},
)
calc_mode: AnimModeAttrsCalcMode = field(
default=AnimModeAttrsCalcMode.LINEAR,
metadata={
"name": "calcMode",
"type": "Attribute",
},
)
skip_content: bool = field(
default=True,
metadata={
"name": "skip-content",
"type": "Attribute",
},
)
any_attributes: Dict[str, str] = field(
default_factory=dict,
metadata={
"type": "Attributes",
"namespace": "##any",
},
)
| 26.097938 | 75 | 0.540786 | from dataclasses import dataclass, field
from decimal import Decimal
from typing import Dict, List, Optional, Union
from bindings.csw.anim_mode_attrs_calc_mode import AnimModeAttrsCalcMode
from bindings.csw.animate_color_prototype import AnimateColorPrototype
from bindings.csw.fill_default_type import FillDefaultType
from bindings.csw.fill_timing_attrs_type import FillTimingAttrsType
from bindings.csw.lang_value import LangValue
from bindings.csw.restart_default_type import RestartDefaultType
from bindings.csw.restart_timing_type import RestartTimingType
from bindings.csw.sync_behavior_default_type import SyncBehaviorDefaultType
from bindings.csw.sync_behavior_type import SyncBehaviorType
__NAMESPACE__ = "http://www.w3.org/2001/SMIL20/Language"
@dataclass
class AnimateColorType(AnimateColorPrototype):
class Meta:
name = "animateColorType"
other_element: List[object] = field(
default_factory=list,
metadata={
"type": "Wildcard",
"namespace": "##other",
},
)
id: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
class_value: Optional[str] = field(
default=None,
metadata={
"name": "class",
"type": "Attribute",
},
)
lang: Optional[Union[str, LangValue]] = field(
default=None,
metadata={
"type": "Attribute",
"namespace": "http://www.w3.org/XML/1998/namespace",
},
)
alt: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
longdesc: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
begin: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
end: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
dur: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
repeat_dur: Optional[str] = field(
default=None,
metadata={
"name": "repeatDur",
"type": "Attribute",
},
)
repeat_count: Optional[Decimal] = field(
default=None,
metadata={
"name": "repeatCount",
"type": "Attribute",
"min_inclusive": Decimal("0.0"),
},
)
repeat: Optional[int] = field(
default=None,
metadata={
"type": "Attribute",
},
)
min: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
max: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
},
)
sync_behavior: SyncBehaviorType = field(
default=SyncBehaviorType.DEFAULT,
metadata={
"name": "syncBehavior",
"type": "Attribute",
},
)
sync_tolerance: Optional[str] = field(
default=None,
metadata={
"name": "syncTolerance",
"type": "Attribute",
},
)
sync_behavior_default: SyncBehaviorDefaultType = field(
default=SyncBehaviorDefaultType.INHERIT,
metadata={
"name": "syncBehaviorDefault",
"type": "Attribute",
},
)
sync_tolerance_default: str = field(
default="inherit",
metadata={
"name": "syncToleranceDefault",
"type": "Attribute",
},
)
restart: RestartTimingType = field(
default=RestartTimingType.DEFAULT,
metadata={
"type": "Attribute",
},
)
restart_default: RestartDefaultType = field(
default=RestartDefaultType.INHERIT,
metadata={
"name": "restartDefault",
"type": "Attribute",
},
)
fill: FillTimingAttrsType = field(
default=FillTimingAttrsType.DEFAULT,
metadata={
"type": "Attribute",
},
)
fill_default: FillDefaultType = field(
default=FillDefaultType.INHERIT,
metadata={
"name": "fillDefault",
"type": "Attribute",
},
)
target_element: Optional[str] = field(
default=None,
metadata={
"name": "targetElement",
"type": "Attribute",
},
)
calc_mode: AnimModeAttrsCalcMode = field(
default=AnimModeAttrsCalcMode.LINEAR,
metadata={
"name": "calcMode",
"type": "Attribute",
},
)
skip_content: bool = field(
default=True,
metadata={
"name": "skip-content",
"type": "Attribute",
},
)
any_attributes: Dict[str, str] = field(
default_factory=dict,
metadata={
"type": "Attributes",
"namespace": "##any",
},
)
| true | true |
f7254419547fb3e85242fc0a78ab9478810397a4 | 4,518 | py | Python | contrib/testgen/gen_base58_test_vectors.py | artiqox/artiqox | 782d58837ec8a8a84a41f0508a71b060af7ed9fc | [
"MIT"
] | 9 | 2018-04-01T23:21:15.000Z | 2018-08-10T20:59:16.000Z | contrib/testgen/gen_base58_test_vectors.py | artiqox/artiqox | 782d58837ec8a8a84a41f0508a71b060af7ed9fc | [
"MIT"
] | 1 | 2019-06-16T00:58:29.000Z | 2019-06-23T23:46:33.000Z | contrib/testgen/gen_base58_test_vectors.py | artiqox/artiqox | 782d58837ec8a8a84a41f0508a71b060af7ed9fc | [
"MIT"
] | 5 | 2018-03-27T09:26:45.000Z | 2019-10-23T00:15:00.000Z | #!/usr/bin/env python
# Copyright (c) 2012-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Generate valid and invalid base58 address and private key test vectors.
Usage:
gen_base58_test_vectors.py valid 50 > ../../src/test/data/base58_keys_valid.json
gen_base58_test_vectors.py invalid 50 > ../../src/test/data/base58_keys_invalid.json
'''
# 2012 Wladimir J. van der Laan
# Released under MIT License
import os
from itertools import islice
from base58 import b58encode, b58decode, b58encode_chk, b58decode_chk, b58chars
import random
from binascii import b2a_hex
# key types
PUBKEY_ADDRESS = 23
SCRIPT_ADDRESS = 82
PUBKEY_ADDRESS_TEST = 113
SCRIPT_ADDRESS_TEST = 196
PRIVKEY = 158
PRIVKEY_TEST = 241
metadata_keys = ['isPrivkey', 'isTestnet', 'addrType', 'isCompressed']
# templates for valid sequences
templates = [
# prefix, payload_size, suffix, metadata
# None = N/A
((PUBKEY_ADDRESS,), 20, (), (False, False, 'pubkey', None)),
((SCRIPT_ADDRESS,), 20, (), (False, False, 'script', None)),
((PUBKEY_ADDRESS_TEST,), 20, (), (False, True, 'pubkey', None)),
((SCRIPT_ADDRESS_TEST,), 20, (), (False, True, 'script', None)),
((PRIVKEY,), 32, (), (True, False, None, False)),
((PRIVKEY,), 32, (1,), (True, False, None, True)),
((PRIVKEY_TEST,), 32, (), (True, True, None, False)),
((PRIVKEY_TEST,), 32, (1,), (True, True, None, True))
]
def is_valid(v):
'''Check vector v for validity'''
result = b58decode_chk(v)
if result is None:
return False
for template in templates:
prefix = str(bytearray(template[0]))
suffix = str(bytearray(template[2]))
if result.startswith(prefix) and result.endswith(suffix):
if (len(result) - len(prefix) - len(suffix)) == template[1]:
return True
return False
def gen_valid_vectors():
'''Generate valid test vectors'''
while True:
for template in templates:
prefix = str(bytearray(template[0]))
payload = os.urandom(template[1])
suffix = str(bytearray(template[2]))
rv = b58encode_chk(prefix + payload + suffix)
assert is_valid(rv)
metadata = dict([(x,y) for (x,y) in zip(metadata_keys,template[3]) if y is not None])
yield (rv, b2a_hex(payload), metadata)
def gen_invalid_vector(template, corrupt_prefix, randomize_payload_size, corrupt_suffix):
'''Generate possibly invalid vector'''
if corrupt_prefix:
prefix = os.urandom(1)
else:
prefix = str(bytearray(template[0]))
if randomize_payload_size:
payload = os.urandom(max(int(random.expovariate(0.5)), 50))
else:
payload = os.urandom(template[1])
if corrupt_suffix:
suffix = os.urandom(len(template[2]))
else:
suffix = str(bytearray(template[2]))
return b58encode_chk(prefix + payload + suffix)
def randbool(p = 0.5):
'''Return True with P(p)'''
return random.random() < p
def gen_invalid_vectors():
'''Generate invalid test vectors'''
# start with some manual edge-cases
yield "",
yield "x",
while True:
# kinds of invalid vectors:
# invalid prefix
# invalid payload length
# invalid (randomized) suffix (add random data)
# corrupt checksum
for template in templates:
val = gen_invalid_vector(template, randbool(0.2), randbool(0.2), randbool(0.2))
if random.randint(0,10)<1: # line corruption
if randbool(): # add random character to end
val += random.choice(b58chars)
else: # replace random character in the middle
n = random.randint(0, len(val))
val = val[0:n] + random.choice(b58chars) + val[n+1:]
if not is_valid(val):
yield val,
if __name__ == '__main__':
import sys, json
iters = {'valid':gen_valid_vectors, 'invalid':gen_invalid_vectors}
try:
uiter = iters[sys.argv[1]]
except IndexError:
uiter = gen_valid_vectors
try:
count = int(sys.argv[2])
except IndexError:
count = 0
data = list(islice(uiter(), count))
json.dump(data, sys.stdout, sort_keys=True, indent=4)
sys.stdout.write('\n')
| 35.023256 | 97 | 0.615095 |
import os
from itertools import islice
from base58 import b58encode, b58decode, b58encode_chk, b58decode_chk, b58chars
import random
from binascii import b2a_hex
PUBKEY_ADDRESS = 23
SCRIPT_ADDRESS = 82
PUBKEY_ADDRESS_TEST = 113
SCRIPT_ADDRESS_TEST = 196
PRIVKEY = 158
PRIVKEY_TEST = 241
metadata_keys = ['isPrivkey', 'isTestnet', 'addrType', 'isCompressed']
templates = [
((PUBKEY_ADDRESS,), 20, (), (False, False, 'pubkey', None)),
((SCRIPT_ADDRESS,), 20, (), (False, False, 'script', None)),
((PUBKEY_ADDRESS_TEST,), 20, (), (False, True, 'pubkey', None)),
((SCRIPT_ADDRESS_TEST,), 20, (), (False, True, 'script', None)),
((PRIVKEY,), 32, (), (True, False, None, False)),
((PRIVKEY,), 32, (1,), (True, False, None, True)),
((PRIVKEY_TEST,), 32, (), (True, True, None, False)),
((PRIVKEY_TEST,), 32, (1,), (True, True, None, True))
]
def is_valid(v):
result = b58decode_chk(v)
if result is None:
return False
for template in templates:
prefix = str(bytearray(template[0]))
suffix = str(bytearray(template[2]))
if result.startswith(prefix) and result.endswith(suffix):
if (len(result) - len(prefix) - len(suffix)) == template[1]:
return True
return False
def gen_valid_vectors():
while True:
for template in templates:
prefix = str(bytearray(template[0]))
payload = os.urandom(template[1])
suffix = str(bytearray(template[2]))
rv = b58encode_chk(prefix + payload + suffix)
assert is_valid(rv)
metadata = dict([(x,y) for (x,y) in zip(metadata_keys,template[3]) if y is not None])
yield (rv, b2a_hex(payload), metadata)
def gen_invalid_vector(template, corrupt_prefix, randomize_payload_size, corrupt_suffix):
if corrupt_prefix:
prefix = os.urandom(1)
else:
prefix = str(bytearray(template[0]))
if randomize_payload_size:
payload = os.urandom(max(int(random.expovariate(0.5)), 50))
else:
payload = os.urandom(template[1])
if corrupt_suffix:
suffix = os.urandom(len(template[2]))
else:
suffix = str(bytearray(template[2]))
return b58encode_chk(prefix + payload + suffix)
def randbool(p = 0.5):
return random.random() < p
def gen_invalid_vectors():
yield "",
yield "x",
while True:
for template in templates:
val = gen_invalid_vector(template, randbool(0.2), randbool(0.2), randbool(0.2))
if random.randint(0,10)<1:
if randbool():
val += random.choice(b58chars)
else:
n = random.randint(0, len(val))
val = val[0:n] + random.choice(b58chars) + val[n+1:]
if not is_valid(val):
yield val,
if __name__ == '__main__':
import sys, json
iters = {'valid':gen_valid_vectors, 'invalid':gen_invalid_vectors}
try:
uiter = iters[sys.argv[1]]
except IndexError:
uiter = gen_valid_vectors
try:
count = int(sys.argv[2])
except IndexError:
count = 0
data = list(islice(uiter(), count))
json.dump(data, sys.stdout, sort_keys=True, indent=4)
sys.stdout.write('\n')
| true | true |
f7254560c04c87549cd65488408ce3ddfcd4bf5f | 104,573 | py | Python | youtube_dl/YoutubeDL.py | 404NotFoundJ/ytubr | 7c4aa6fd6fd6fadf1cf1942c279cd5c0ff5ae498 | [
"Unlicense"
] | null | null | null | youtube_dl/YoutubeDL.py | 404NotFoundJ/ytubr | 7c4aa6fd6fd6fadf1cf1942c279cd5c0ff5ae498 | [
"Unlicense"
] | null | null | null | youtube_dl/YoutubeDL.py | 404NotFoundJ/ytubr | 7c4aa6fd6fd6fadf1cf1942c279cd5c0ff5ae498 | [
"Unlicense"
] | null | null | null | #!/usr/bin/env python
# coding: utf-8
from __future__ import absolute_import, unicode_literals
import collections
import contextlib
import copy
import datetime
import errno
import fileinput
import io
import itertools
import json
import locale
import operator
import os
import platform
import re
import shutil
import subprocess
import socket
import sys
import time
import tokenize
import traceback
import random
from .compat import (
compat_basestring,
compat_cookiejar,
compat_expanduser,
compat_get_terminal_size,
compat_http_client,
compat_kwargs,
compat_numeric_types,
compat_os_name,
compat_str,
compat_tokenize_tokenize,
compat_urllib_error,
compat_urllib_request,
compat_urllib_request_DataHandler,
)
from .utils import (
age_restricted,
args_to_str,
ContentTooShortError,
date_from_str,
DateRange,
DEFAULT_OUTTMPL,
determine_ext,
determine_protocol,
DownloadError,
encode_compat_str,
encodeFilename,
error_to_compat_str,
ExtractorError,
format_bytes,
formatSeconds,
GeoRestrictedError,
ISO3166Utils,
locked_file,
make_HTTPS_handler,
MaxDownloadsReached,
PagedList,
parse_filesize,
PerRequestProxyHandler,
platform_name,
PostProcessingError,
preferredencoding,
prepend_extension,
register_socks_protocols,
render_table,
replace_extension,
SameFileError,
sanitize_filename,
sanitize_path,
sanitize_url,
sanitized_Request,
std_headers,
subtitles_filename,
UnavailableVideoError,
url_basename,
version_tuple,
write_json_file,
write_string,
YoutubeDLCookieProcessor,
YoutubeDLHandler,
)
from .cache import Cache
from .extractor import get_info_extractor, gen_extractor_classes, _LAZY_LOADER
from .downloader import get_suitable_downloader
from .downloader.rtmp import rtmpdump_version
from .postprocessor import (
FFmpegFixupM3u8PP,
FFmpegFixupM4aPP,
FFmpegFixupStretchedPP,
FFmpegMergerPP,
FFmpegPostProcessor,
get_postprocessor,
)
from .version import __version__
if compat_os_name == 'nt':
import ctypes
class YoutubeDL(object):
"""YoutubeDL class.
YoutubeDL objects are the ones responsible of downloading the
actual video file and writing it to disk if the user has requested
it, among some other tasks. In most cases there should be one per
program. As, given a video URL, the downloader doesn't know how to
extract all the needed information, task that InfoExtractors do, it
has to pass the URL to one of them.
For this, YoutubeDL objects have a method that allows
InfoExtractors to be registered in a given order. When it is passed
a URL, the YoutubeDL object handles it to the first InfoExtractor it
finds that reports being able to handle it. The InfoExtractor extracts
all the information about the video or videos the URL refers to, and
YoutubeDL process the extracted information, possibly using a File
Downloader to download the video.
YoutubeDL objects accept a lot of parameters. In order not to saturate
the object constructor with arguments, it receives a dictionary of
options instead. These options are available through the params
attribute for the InfoExtractors to use. The YoutubeDL also
registers itself as the downloader in charge for the InfoExtractors
that are added to it, so this is a "mutual registration".
Available options:
username: Username for authentication purposes.
password: Password for authentication purposes.
videopassword: Password for accessing a video.
ap_mso: Adobe Pass multiple-system operator identifier.
ap_username: Multiple-system operator account username.
ap_password: Multiple-system operator account password.
usenetrc: Use netrc for authentication instead.
verbose: Print additional info to stdout.
quiet: Do not print messages to stdout.
no_warnings: Do not print out anything for warnings.
forceurl: Force printing final URL.
forcetitle: Force printing title.
forceid: Force printing ID.
forcethumbnail: Force printing thumbnail URL.
forcedescription: Force printing description.
forcefilename: Force printing final filename.
forceduration: Force printing duration.
forcejson: Force printing info_dict as JSON.
dump_single_json: Force printing the info_dict of the whole playlist
(or video) as a single JSON line.
simulate: Do not download the video files.
format: Video format code. See options.py for more information.
outtmpl: Template for output names.
restrictfilenames: Do not allow "&" and spaces in file names
ignoreerrors: Do not stop on download errors.
force_generic_extractor: Force downloader to use the generic extractor
nooverwrites: Prevent overwriting files.
playliststart: Playlist item to start at.
playlistend: Playlist item to end at.
playlist_items: Specific indices of playlist to download.
playlistreverse: Download playlist items in reverse order.
playlistrandom: Download playlist items in random order.
matchtitle: Download only matching titles.
rejecttitle: Reject downloads for matching titles.
logger: Log messages to a logging.Logger instance.
logtostderr: Log messages to stderr instead of stdout.
writedescription: Write the video description to a .description file
writeinfojson: Write the video description to a .info.json file
writeannotations: Write the video annotations to a .annotations.xml file
writethumbnail: Write the thumbnail image to a file
write_all_thumbnails: Write all thumbnail formats to files
writesubtitles: Write the video subtitles to a file
writeautomaticsub: Write the automatically generated subtitles to a file
allsubtitles: Downloads all the subtitles of the video
(requires writesubtitles or writeautomaticsub)
listsubtitles: Lists all available subtitles for the video
subtitlesformat: The format code for subtitles
subtitleslangs: List of languages of the subtitles to download
keepvideo: Keep the video file after post-processing
daterange: A DateRange object, download only if the upload_date is in the range.
skip_download: Skip the actual download of the video file
cachedir: Location of the cache files in the filesystem.
False to disable filesystem cache.
noplaylist: Download single video instead of a playlist if in doubt.
age_limit: An integer representing the user's age in years.
Unsuitable videos for the given age are skipped.
min_views: An integer representing the minimum view count the video
must have in order to not be skipped.
Videos without view count information are always
downloaded. None for no limit.
max_views: An integer representing the maximum view count.
Videos that are more popular than that are not
downloaded.
Videos without view count information are always
downloaded. None for no limit.
download_archive: File name of a file where all downloads are recorded.
Videos already present in the file are not downloaded
again.
cookiefile: File name where cookies should be read from and dumped to.
nocheckcertificate:Do not verify SSL certificates
prefer_insecure: Use HTTP instead of HTTPS to retrieve information.
At the moment, this is only supported by YouTube.
proxy: URL of the proxy server to use
geo_verification_proxy: URL of the proxy to use for IP address verification
on geo-restricted sites. (Experimental)
socket_timeout: Time to wait for unresponsive hosts, in seconds
bidi_workaround: Work around buggy terminals without bidirectional text
support, using fridibi
debug_printtraffic:Print out sent and received HTTP traffic
include_ads: Download ads as well
default_search: Prepend this string if an input url is not valid.
'auto' for elaborate guessing
encoding: Use this encoding instead of the system-specified.
extract_flat: Do not resolve URLs, return the immediate result.
Pass in 'in_playlist' to only show this behavior for
playlist items.
postprocessors: A list of dictionaries, each with an entry
* key: The name of the postprocessor. See
youtube_dl/postprocessor/__init__.py for a list.
as well as any further keyword arguments for the
postprocessor.
progress_hooks: A list of functions that get called on download
progress, with a dictionary with the entries
* status: One of "downloading", "error", or "finished".
Check this first and ignore unknown values.
If status is one of "downloading", or "finished", the
following properties may also be present:
* filename: The final filename (always present)
* tmpfilename: The filename we're currently writing to
* downloaded_bytes: Bytes on disk
* total_bytes: Size of the whole file, None if unknown
* total_bytes_estimate: Guess of the eventual file size,
None if unavailable.
* elapsed: The number of seconds since download started.
* eta: The estimated time in seconds, None if unknown
* speed: The download speed in bytes/second, None if
unknown
* fragment_index: The counter of the currently
downloaded video fragment.
* fragment_count: The number of fragments (= individual
files that will be merged)
Progress hooks are guaranteed to be called at least once
(with status "finished") if the download is successful.
merge_output_format: Extension to use when merging formats.
fixup: Automatically correct known faults of the file.
One of:
- "never": do nothing
- "warn": only emit a warning
- "detect_or_warn": check whether we can do anything
about it, warn otherwise (default)
source_address: (Experimental) Client-side IP address to bind to.
call_home: Boolean, true iff we are allowed to contact the
youtube-dl servers for debugging.
sleep_interval: Number of seconds to sleep before each download when
used alone or a lower bound of a range for randomized
sleep before each download (minimum possible number
of seconds to sleep) when used along with
max_sleep_interval.
max_sleep_interval:Upper bound of a range for randomized sleep before each
download (maximum possible number of seconds to sleep).
Must only be used along with sleep_interval.
Actual sleep time will be a random float from range
[sleep_interval; max_sleep_interval].
listformats: Print an overview of available video formats and exit.
list_thumbnails: Print a table of all thumbnails and exit.
match_filter: A function that gets called with the info_dict of
every video.
If it returns a message, the video is ignored.
If it returns None, the video is downloaded.
match_filter_func in utils.py is one example for this.
no_color: Do not emit color codes in output.
geo_bypass: Bypass geographic restriction via faking X-Forwarded-For
HTTP header (experimental)
geo_bypass_country:
Two-letter ISO 3166-2 country code that will be used for
explicit geographic restriction bypassing via faking
X-Forwarded-For HTTP header (experimental)
The following options determine which downloader is picked:
external_downloader: Executable of the external downloader to call.
None or unset for standard (built-in) downloader.
hls_prefer_native: Use the native HLS downloader instead of ffmpeg/avconv
if True, otherwise use ffmpeg/avconv if False, otherwise
use downloader suggested by extractor if None.
The following parameters are not used by YoutubeDL itself, they are used by
the downloader (see youtube_dl/downloader/common.py):
nopart, updatetime, buffersize, ratelimit, min_filesize, max_filesize, test,
noresizebuffer, retries, continuedl, noprogress, consoletitle,
xattr_set_filesize, external_downloader_args, hls_use_mpegts.
The following options are used by the post processors:
prefer_ffmpeg: If True, use ffmpeg instead of avconv if both are available,
otherwise prefer avconv.
postprocessor_args: A list of additional command-line arguments for the
postprocessor.
"""
params = None
_ies = []
_pps = []
_download_retcode = None
_num_downloads = None
_screen_file = None
def __init__(self, params=None, auto_init=True):
"""Create a FileDownloader object with the given options."""
if params is None:
params = {}
self._ies = []
self._ies_instances = {}
self._pps = []
self._progress_hooks = []
self._download_retcode = 0
self._num_downloads = 0
self._screen_file = [sys.stdout, sys.stderr][params.get('logtostderr', False)]
self._err_file = sys.stderr
self.params = {
# Default parameters
'nocheckcertificate': False,
}
self.params.update(params)
self.cache = Cache(self)
def check_deprecated(param, option, suggestion):
if self.params.get(param) is not None:
self.report_warning(
'%s is deprecated. Use %s instead.' % (option, suggestion))
return True
return False
if check_deprecated('cn_verification_proxy', '--cn-verification-proxy', '--geo-verification-proxy'):
if self.params.get('geo_verification_proxy') is None:
self.params['geo_verification_proxy'] = self.params['cn_verification_proxy']
check_deprecated('autonumber_size', '--autonumber-size', 'output template with %(autonumber)0Nd, where N in the number of digits')
check_deprecated('autonumber', '--auto-number', '-o "%(autonumber)s-%(title)s.%(ext)s"')
check_deprecated('usetitle', '--title', '-o "%(title)s-%(id)s.%(ext)s"')
if params.get('bidi_workaround', False):
try:
import pty
master, slave = pty.openpty()
width = compat_get_terminal_size().columns
if width is None:
width_args = []
else:
width_args = ['-w', str(width)]
sp_kwargs = dict(
stdin=subprocess.PIPE,
stdout=slave,
stderr=self._err_file)
try:
self._output_process = subprocess.Popen(
['bidiv'] + width_args, **sp_kwargs
)
except OSError:
self._output_process = subprocess.Popen(
['fribidi', '-c', 'UTF-8'] + width_args, **sp_kwargs)
self._output_channel = os.fdopen(master, 'rb')
except OSError as ose:
if ose.errno == errno.ENOENT:
self.report_warning('Could not find fribidi executable, ignoring --bidi-workaround . Make sure that fribidi is an executable file in one of the directories in your $PATH.')
else:
raise
if (sys.version_info >= (3,) and sys.platform != 'win32' and
sys.getfilesystemencoding() in ['ascii', 'ANSI_X3.4-1968'] and
not params.get('restrictfilenames', False)):
# On Python 3, the Unicode filesystem API will throw errors (#1474)
self.report_warning(
'Assuming --restrict-filenames since file system encoding '
'cannot encode all characters. '
'Set the LC_ALL environment variable to fix this.')
self.params['restrictfilenames'] = True
if isinstance(params.get('outtmpl'), bytes):
self.report_warning(
'Parameter outtmpl is bytes, but should be a unicode string. '
'Put from __future__ import unicode_literals at the top of your code file or consider switching to Python 3.x.')
self._setup_opener()
if auto_init:
self.print_debug_header()
self.add_default_info_extractors()
for pp_def_raw in self.params.get('postprocessors', []):
pp_class = get_postprocessor(pp_def_raw['key'])
pp_def = dict(pp_def_raw)
del pp_def['key']
pp = pp_class(self, **compat_kwargs(pp_def))
self.add_post_processor(pp)
for ph in self.params.get('progress_hooks', []):
self.add_progress_hook(ph)
register_socks_protocols()
def warn_if_short_id(self, argv):
# short YouTube ID starting with dash?
idxs = [
i for i, a in enumerate(argv)
if re.match(r'^-[0-9A-Za-z_-]{10}$', a)]
if idxs:
correct_argv = (
['youtube-dl'] +
[a for i, a in enumerate(argv) if i not in idxs] +
['--'] + [argv[i] for i in idxs]
)
self.report_warning(
'Long argument string detected. '
'Use -- to separate parameters and URLs, like this:\n%s\n' %
args_to_str(correct_argv))
def add_info_extractor(self, ie):
"""Add an InfoExtractor object to the end of the list."""
self._ies.append(ie)
if not isinstance(ie, type):
self._ies_instances[ie.ie_key()] = ie
ie.set_downloader(self)
def get_info_extractor(self, ie_key):
"""
Get an instance of an IE with name ie_key, it will try to get one from
the _ies list, if there's no instance it will create a new one and add
it to the extractor list.
"""
ie = self._ies_instances.get(ie_key)
if ie is None:
ie = get_info_extractor(ie_key)()
self.add_info_extractor(ie)
return ie
def add_default_info_extractors(self):
"""
Add the InfoExtractors returned by gen_extractors to the end of the list
"""
for ie in gen_extractor_classes():
self.add_info_extractor(ie)
def add_post_processor(self, pp):
"""Add a PostProcessor object to the end of the chain."""
self._pps.append(pp)
pp.set_downloader(self)
def add_progress_hook(self, ph):
"""Add the progress hook (currently only for the file downloader)"""
self._progress_hooks.append(ph)
def _bidi_workaround(self, message):
if not hasattr(self, '_output_channel'):
return message
assert hasattr(self, '_output_process')
assert isinstance(message, compat_str)
line_count = message.count('\n') + 1
self._output_process.stdin.write((message + '\n').encode('utf-8'))
self._output_process.stdin.flush()
res = ''.join(self._output_channel.readline().decode('utf-8')
for _ in range(line_count))
return res[:-len('\n')]
def to_screen(self, message, skip_eol=False):
"""Print message to stdout if not in quiet mode."""
return self.to_stdout(message, skip_eol, check_quiet=True)
def _write_string(self, s, out=None):
write_string(s, out=out, encoding=self.params.get('encoding'))
def to_stdout(self, message, skip_eol=False, check_quiet=False):
"""Print message to stdout if not in quiet mode."""
if self.params.get('logger'):
self.params['logger'].debug(message)
elif not check_quiet or not self.params.get('quiet', False):
message = self._bidi_workaround(message)
terminator = ['\n', ''][skip_eol]
output = message + terminator
self._write_string(output, self._screen_file)
def to_stderr(self, message):
"""Print message to stderr."""
assert isinstance(message, compat_str)
if self.params.get('logger'):
self.params['logger'].error(message)
else:
message = self._bidi_workaround(message)
output = message + '\n'
self._write_string(output, self._err_file)
def to_console_title(self, message):
if not self.params.get('consoletitle', False):
return
if compat_os_name == 'nt' and ctypes.windll.kernel32.GetConsoleWindow():
# c_wchar_p() might not be necessary if `message` is
# already of type unicode()
ctypes.windll.kernel32.SetConsoleTitleW(ctypes.c_wchar_p(message))
elif 'TERM' in os.environ:
self._write_string('\033]0;%s\007' % message, self._screen_file)
def save_console_title(self):
if not self.params.get('consoletitle', False):
return
if 'TERM' in os.environ:
# Save the title on stack
self._write_string('\033[22;0t', self._screen_file)
def restore_console_title(self):
if not self.params.get('consoletitle', False):
return
if 'TERM' in os.environ:
# Restore the title from stack
self._write_string('\033[23;0t', self._screen_file)
def __enter__(self):
self.save_console_title()
return self
def __exit__(self, *args):
self.restore_console_title()
if self.params.get('cookiefile') is not None:
self.cookiejar.save()
def trouble(self, message=None, tb=None):
"""Determine action to take when a download problem appears.
Depending on if the downloader has been configured to ignore
download errors or not, this method may throw an exception or
not when errors are found, after printing the message.
tb, if given, is additional traceback information.
"""
if message is not None:
self.to_stderr(message)
if self.params.get('verbose'):
if tb is None:
if sys.exc_info()[0]: # if .trouble has been called from an except block
tb = ''
if hasattr(sys.exc_info()[1], 'exc_info') and sys.exc_info()[1].exc_info[0]:
tb += ''.join(traceback.format_exception(*sys.exc_info()[1].exc_info))
tb += encode_compat_str(traceback.format_exc())
else:
tb_data = traceback.format_list(traceback.extract_stack())
tb = ''.join(tb_data)
self.to_stderr(tb)
if not self.params.get('ignoreerrors', False):
if sys.exc_info()[0] and hasattr(sys.exc_info()[1], 'exc_info') and sys.exc_info()[1].exc_info[0]:
exc_info = sys.exc_info()[1].exc_info
else:
exc_info = sys.exc_info()
raise DownloadError(message, exc_info)
self._download_retcode = 1
def report_warning(self, message):
'''
Print the message to stderr, it will be prefixed with 'WARNING:'
If stderr is a tty file the 'WARNING:' will be colored
'''
if self.params.get('logger') is not None:
self.params['logger'].warning(message)
else:
if self.params.get('no_warnings'):
return
if not self.params.get('no_color') and self._err_file.isatty() and compat_os_name != 'nt':
_msg_header = '\033[0;33mWARNING:\033[0m'
else:
_msg_header = 'WARNING:'
warning_message = '%s %s' % (_msg_header, message)
self.to_stderr(warning_message)
def report_error(self, message, tb=None):
'''
Do the same as trouble, but prefixes the message with 'ERROR:', colored
in red if stderr is a tty file.
'''
if not self.params.get('no_color') and self._err_file.isatty() and compat_os_name != 'nt':
_msg_header = '\033[0;31mERROR:\033[0m'
else:
_msg_header = 'ERROR:'
error_message = '%s %s' % (_msg_header, message)
self.trouble(error_message, tb)
def report_file_already_downloaded(self, file_name):
"""Report file has already been fully downloaded."""
try:
self.to_screen('[download] %s has already been downloaded' % file_name)
except UnicodeEncodeError:
self.to_screen('[download] The file has already been downloaded')
def prepare_filename(self, info_dict):
"""Generate the output filename."""
try:
template_dict = dict(info_dict)
template_dict['epoch'] = int(time.time())
autonumber_size = self.params.get('autonumber_size')
if autonumber_size is None:
autonumber_size = 5
template_dict['autonumber'] = self.params.get('autonumber_start', 1) - 1 + self._num_downloads
if template_dict.get('resolution') is None:
if template_dict.get('width') and template_dict.get('height'):
template_dict['resolution'] = '%dx%d' % (template_dict['width'], template_dict['height'])
elif template_dict.get('height'):
template_dict['resolution'] = '%sp' % template_dict['height']
elif template_dict.get('width'):
template_dict['resolution'] = '%dx?' % template_dict['width']
sanitize = lambda k, v: sanitize_filename(
compat_str(v),
restricted=self.params.get('restrictfilenames'),
is_id=(k == 'id'))
template_dict = dict((k, v if isinstance(v, compat_numeric_types) else sanitize(k, v))
for k, v in template_dict.items()
if v is not None and not isinstance(v, (list, tuple, dict)))
template_dict = collections.defaultdict(lambda: 'NA', template_dict)
outtmpl = self.params.get('outtmpl', DEFAULT_OUTTMPL)
# For fields playlist_index and autonumber convert all occurrences
# of %(field)s to %(field)0Nd for backward compatibility
field_size_compat_map = {
'playlist_index': len(str(template_dict['n_entries'])),
'autonumber': autonumber_size,
}
FIELD_SIZE_COMPAT_RE = r'(?<!%)%\((?P<field>autonumber|playlist_index)\)s'
mobj = re.search(FIELD_SIZE_COMPAT_RE, outtmpl)
if mobj:
outtmpl = re.sub(
FIELD_SIZE_COMPAT_RE,
r'%%(\1)0%dd' % field_size_compat_map[mobj.group('field')],
outtmpl)
NUMERIC_FIELDS = set((
'width', 'height', 'tbr', 'abr', 'asr', 'vbr', 'fps', 'filesize', 'filesize_approx',
'upload_year', 'upload_month', 'upload_day',
'duration', 'view_count', 'like_count', 'dislike_count', 'repost_count',
'average_rating', 'comment_count', 'age_limit',
'start_time', 'end_time',
'chapter_number', 'season_number', 'episode_number',
'track_number', 'disc_number', 'release_year',
'playlist_index',
))
# Missing numeric fields used together with integer presentation types
# in format specification will break the argument substitution since
# string 'NA' is returned for missing fields. We will patch output
# template for missing fields to meet string presentation type.
for numeric_field in NUMERIC_FIELDS:
if numeric_field not in template_dict:
# As of [1] format syntax is:
# %[mapping_key][conversion_flags][minimum_width][.precision][length_modifier]type
# 1. https://docs.python.org/2/library/stdtypes.html#string-formatting
FORMAT_RE = r'''(?x)
(?<!%)
%
\({0}\) # mapping key
(?:[#0\-+ ]+)? # conversion flags (optional)
(?:\d+)? # minimum field width (optional)
(?:\.\d+)? # precision (optional)
[hlL]? # length modifier (optional)
[diouxXeEfFgGcrs%] # conversion type
'''
outtmpl = re.sub(
FORMAT_RE.format(numeric_field),
r'%({0})s'.format(numeric_field), outtmpl)
tmpl = compat_expanduser(outtmpl)
filename = tmpl % template_dict
# Temporary fix for #4787
# 'Treat' all problem characters by passing filename through preferredencoding
# to workaround encoding issues with subprocess on python2 @ Windows
if sys.version_info < (3, 0) and sys.platform == 'win32':
filename = encodeFilename(filename, True).decode(preferredencoding())
return sanitize_path(filename)
except ValueError as err:
self.report_error('Error in output template: ' + str(err) + ' (encoding: ' + repr(preferredencoding()) + ')')
return None
def _match_entry(self, info_dict, incomplete):
""" Returns None iff the file should be downloaded """
video_title = info_dict.get('title', info_dict.get('id', 'video'))
if 'title' in info_dict:
# This can happen when we're just evaluating the playlist
title = info_dict['title']
matchtitle = self.params.get('matchtitle', False)
if matchtitle:
if not re.search(matchtitle, title, re.IGNORECASE):
return '"' + title + '" title did not match pattern "' + matchtitle + '"'
rejecttitle = self.params.get('rejecttitle', False)
if rejecttitle:
if re.search(rejecttitle, title, re.IGNORECASE):
return '"' + title + '" title matched reject pattern "' + rejecttitle + '"'
date = info_dict.get('upload_date')
if date is not None:
dateRange = self.params.get('daterange', DateRange())
if date not in dateRange:
return '%s upload date is not in range %s' % (date_from_str(date).isoformat(), dateRange)
view_count = info_dict.get('view_count')
if view_count is not None:
min_views = self.params.get('min_views')
if min_views is not None and view_count < min_views:
return 'Skipping %s, because it has not reached minimum view count (%d/%d)' % (video_title, view_count, min_views)
max_views = self.params.get('max_views')
if max_views is not None and view_count > max_views:
return 'Skipping %s, because it has exceeded the maximum view count (%d/%d)' % (video_title, view_count, max_views)
if age_restricted(info_dict.get('age_limit'), self.params.get('age_limit')):
return 'Skipping "%s" because it is age restricted' % video_title
if self.in_download_archive(info_dict):
return '%s has already been recorded in archive' % video_title
if not incomplete:
match_filter = self.params.get('match_filter')
if match_filter is not None:
ret = match_filter(info_dict)
if ret is not None:
return ret
return None
@staticmethod
def add_extra_info(info_dict, extra_info):
'''Set the keys from extra_info in info dict if they are missing'''
for key, value in extra_info.items():
info_dict.setdefault(key, value)
def extract_info(self, url, download=True, ie_key=None, extra_info={},
process=True, force_generic_extractor=False):
'''
Returns a list with a dictionary for each video we find.
If 'download', also downloads the videos.
extra_info is a dict containing the extra values to add to each result
'''
if not ie_key and force_generic_extractor:
ie_key = 'Generic'
if ie_key:
ies = [self.get_info_extractor(ie_key)]
else:
ies = self._ies
for ie in ies:
if not ie.suitable(url):
continue
ie = self.get_info_extractor(ie.ie_key())
if not ie.working():
self.report_warning('The program functionality for this site has been marked as broken, '
'and will probably not work.')
try:
ie_result = ie.extract(url)
if ie_result is None: # Finished already (backwards compatibility; listformats and friends should be moved here)
break
if isinstance(ie_result, list):
# Backwards compatibility: old IE result format
ie_result = {
'_type': 'compat_list',
'entries': ie_result,
}
self.add_default_extra_info(ie_result, ie, url)
if process:
return self.process_ie_result(ie_result, download, extra_info)
else:
return ie_result
except GeoRestrictedError as e:
msg = e.msg
if e.countries:
msg += '\nThis video is available in %s.' % ', '.join(
map(ISO3166Utils.short2full, e.countries))
msg += '\nYou might want to use a VPN or a proxy server (with --proxy) to workaround.'
self.report_error(msg)
break
except ExtractorError as e: # An error we somewhat expected
self.report_error(compat_str(e), e.format_traceback())
break
except MaxDownloadsReached:
raise
except Exception as e:
if self.params.get('ignoreerrors', False):
self.report_error(error_to_compat_str(e), tb=encode_compat_str(traceback.format_exc()))
break
else:
raise
else:
self.report_error('no suitable InfoExtractor for URL %s' % url)
def add_default_extra_info(self, ie_result, ie, url):
self.add_extra_info(ie_result, {
'extractor': ie.IE_NAME,
'webpage_url': url,
'webpage_url_basename': url_basename(url),
'extractor_key': ie.ie_key(),
})
def process_ie_result(self, ie_result, download=True, extra_info={}):
"""
Take the result of the ie(may be modified) and resolve all unresolved
references (URLs, playlist items).
It will also download the videos if 'download'.
Returns the resolved ie_result.
"""
result_type = ie_result.get('_type', 'video')
if result_type in ('url', 'url_transparent'):
ie_result['url'] = sanitize_url(ie_result['url'])
extract_flat = self.params.get('extract_flat', False)
if ((extract_flat == 'in_playlist' and 'playlist' in extra_info) or
extract_flat is True):
if self.params.get('forcejson', False):
self.to_stdout(json.dumps(ie_result))
return ie_result
if result_type == 'video':
self.add_extra_info(ie_result, extra_info)
return self.process_video_result(ie_result, download=download)
elif result_type == 'url':
# We have to add extra_info to the results because it may be
# contained in a playlist
return self.extract_info(ie_result['url'],
download,
ie_key=ie_result.get('ie_key'),
extra_info=extra_info)
elif result_type == 'url_transparent':
# Use the information from the embedding page
info = self.extract_info(
ie_result['url'], ie_key=ie_result.get('ie_key'),
extra_info=extra_info, download=False, process=False)
force_properties = dict(
(k, v) for k, v in ie_result.items() if v is not None)
for f in ('_type', 'url', 'ie_key'):
if f in force_properties:
del force_properties[f]
new_result = info.copy()
new_result.update(force_properties)
assert new_result.get('_type') != 'url_transparent'
return self.process_ie_result(
new_result, download=download, extra_info=extra_info)
elif result_type == 'playlist' or result_type == 'multi_video':
# We process each entry in the playlist
playlist = ie_result.get('title') or ie_result.get('id')
self.to_screen('[download] Downloading playlist: %s' % playlist)
playlist_results = []
playliststart = self.params.get('playliststart', 1) - 1
playlistend = self.params.get('playlistend')
# For backwards compatibility, interpret -1 as whole list
if playlistend == -1:
playlistend = None
playlistitems_str = self.params.get('playlist_items')
playlistitems = None
if playlistitems_str is not None:
def iter_playlistitems(format):
for string_segment in format.split(','):
if '-' in string_segment:
start, end = string_segment.split('-')
for item in range(int(start), int(end) + 1):
yield int(item)
else:
yield int(string_segment)
playlistitems = iter_playlistitems(playlistitems_str)
ie_entries = ie_result['entries']
if isinstance(ie_entries, list):
n_all_entries = len(ie_entries)
if playlistitems:
entries = [
ie_entries[i - 1] for i in playlistitems
if -n_all_entries <= i - 1 < n_all_entries]
else:
entries = ie_entries[playliststart:playlistend]
n_entries = len(entries)
self.to_screen(
'[%s] playlist %s: Collected %d video ids (downloading %d of them)' %
(ie_result['extractor'], playlist, n_all_entries, n_entries))
elif isinstance(ie_entries, PagedList):
if playlistitems:
entries = []
for item in playlistitems:
entries.extend(ie_entries.getslice(
item - 1, item
))
else:
entries = ie_entries.getslice(
playliststart, playlistend)
n_entries = len(entries)
self.to_screen(
'[%s] playlist %s: Downloading %d videos' %
(ie_result['extractor'], playlist, n_entries))
else: # iterable
if playlistitems:
entry_list = list(ie_entries)
entries = [entry_list[i - 1] for i in playlistitems]
else:
entries = list(itertools.islice(
ie_entries, playliststart, playlistend))
n_entries = len(entries)
self.to_screen(
'[%s] playlist %s: Downloading %d videos' %
(ie_result['extractor'], playlist, n_entries))
if self.params.get('playlistreverse', False):
entries = entries[::-1]
if self.params.get('playlistrandom', False):
random.shuffle(entries)
x_forwarded_for = ie_result.get('__x_forwarded_for_ip')
for i, entry in enumerate(entries, 1):
self.to_screen('[download] Downloading video %s of %s' % (i, n_entries))
# This __x_forwarded_for_ip thing is a bit ugly but requires
# minimal changes
if x_forwarded_for:
entry['__x_forwarded_for_ip'] = x_forwarded_for
extra = {
'n_entries': n_entries,
'playlist': playlist,
'playlist_id': ie_result.get('id'),
'playlist_title': ie_result.get('title'),
'playlist_index': i + playliststart,
'extractor': ie_result['extractor'],
'webpage_url': ie_result['webpage_url'],
'webpage_url_basename': url_basename(ie_result['webpage_url']),
'extractor_key': ie_result['extractor_key'],
}
reason = self._match_entry(entry, incomplete=True)
if reason is not None:
self.to_screen('[download] ' + reason)
continue
entry_result = self.process_ie_result(entry,
download=download,
extra_info=extra)
playlist_results.append(entry_result)
ie_result['entries'] = playlist_results
self.to_screen('[download] Finished downloading playlist: %s' % playlist)
return ie_result
elif result_type == 'compat_list':
self.report_warning(
'Extractor %s returned a compat_list result. '
'It needs to be updated.' % ie_result.get('extractor'))
def _fixup(r):
self.add_extra_info(
r,
{
'extractor': ie_result['extractor'],
'webpage_url': ie_result['webpage_url'],
'webpage_url_basename': url_basename(ie_result['webpage_url']),
'extractor_key': ie_result['extractor_key'],
}
)
return r
ie_result['entries'] = [
self.process_ie_result(_fixup(r), download, extra_info)
for r in ie_result['entries']
]
return ie_result
else:
raise Exception('Invalid result type: %s' % result_type)
def _build_format_filter(self, filter_spec):
" Returns a function to filter the formats according to the filter_spec "
OPERATORS = {
'<': operator.lt,
'<=': operator.le,
'>': operator.gt,
'>=': operator.ge,
'=': operator.eq,
'!=': operator.ne,
}
operator_rex = re.compile(r'''(?x)\s*
(?P<key>width|height|tbr|abr|vbr|asr|filesize|fps)
\s*(?P<op>%s)(?P<none_inclusive>\s*\?)?\s*
(?P<value>[0-9.]+(?:[kKmMgGtTpPeEzZyY]i?[Bb]?)?)
$
''' % '|'.join(map(re.escape, OPERATORS.keys())))
m = operator_rex.search(filter_spec)
if m:
try:
comparison_value = int(m.group('value'))
except ValueError:
comparison_value = parse_filesize(m.group('value'))
if comparison_value is None:
comparison_value = parse_filesize(m.group('value') + 'B')
if comparison_value is None:
raise ValueError(
'Invalid value %r in format specification %r' % (
m.group('value'), filter_spec))
op = OPERATORS[m.group('op')]
if not m:
STR_OPERATORS = {
'=': operator.eq,
'!=': operator.ne,
'^=': lambda attr, value: attr.startswith(value),
'$=': lambda attr, value: attr.endswith(value),
'*=': lambda attr, value: value in attr,
}
str_operator_rex = re.compile(r'''(?x)
\s*(?P<key>ext|acodec|vcodec|container|protocol|format_id)
\s*(?P<op>%s)(?P<none_inclusive>\s*\?)?
\s*(?P<value>[a-zA-Z0-9._-]+)
\s*$
''' % '|'.join(map(re.escape, STR_OPERATORS.keys())))
m = str_operator_rex.search(filter_spec)
if m:
comparison_value = m.group('value')
op = STR_OPERATORS[m.group('op')]
if not m:
raise ValueError('Invalid filter specification %r' % filter_spec)
def _filter(f):
actual_value = f.get(m.group('key'))
if actual_value is None:
return m.group('none_inclusive')
return op(actual_value, comparison_value)
return _filter
def build_format_selector(self, format_spec):
def syntax_error(note, start):
message = (
'Invalid format specification: '
'{0}\n\t{1}\n\t{2}^'.format(note, format_spec, ' ' * start[1]))
return SyntaxError(message)
PICKFIRST = 'PICKFIRST'
MERGE = 'MERGE'
SINGLE = 'SINGLE'
GROUP = 'GROUP'
FormatSelector = collections.namedtuple('FormatSelector', ['type', 'selector', 'filters'])
def _parse_filter(tokens):
filter_parts = []
for type, string, start, _, _ in tokens:
if type == tokenize.OP and string == ']':
return ''.join(filter_parts)
else:
filter_parts.append(string)
def _remove_unused_ops(tokens):
# Remove operators that we don't use and join them with the surrounding strings
# for example: 'mp4' '-' 'baseline' '-' '16x9' is converted to 'mp4-baseline-16x9'
ALLOWED_OPS = ('/', '+', ',', '(', ')')
last_string, last_start, last_end, last_line = None, None, None, None
for type, string, start, end, line in tokens:
if type == tokenize.OP and string == '[':
if last_string:
yield tokenize.NAME, last_string, last_start, last_end, last_line
last_string = None
yield type, string, start, end, line
# everything inside brackets will be handled by _parse_filter
for type, string, start, end, line in tokens:
yield type, string, start, end, line
if type == tokenize.OP and string == ']':
break
elif type == tokenize.OP and string in ALLOWED_OPS:
if last_string:
yield tokenize.NAME, last_string, last_start, last_end, last_line
last_string = None
yield type, string, start, end, line
elif type in [tokenize.NAME, tokenize.NUMBER, tokenize.OP]:
if not last_string:
last_string = string
last_start = start
last_end = end
else:
last_string += string
if last_string:
yield tokenize.NAME, last_string, last_start, last_end, last_line
def _parse_format_selection(tokens, inside_merge=False, inside_choice=False, inside_group=False):
selectors = []
current_selector = None
for type, string, start, _, _ in tokens:
# ENCODING is only defined in python 3.x
if type == getattr(tokenize, 'ENCODING', None):
continue
elif type in [tokenize.NAME, tokenize.NUMBER]:
current_selector = FormatSelector(SINGLE, string, [])
elif type == tokenize.OP:
if string == ')':
if not inside_group:
# ')' will be handled by the parentheses group
tokens.restore_last_token()
break
elif inside_merge and string in ['/', ',']:
tokens.restore_last_token()
break
elif inside_choice and string == ',':
tokens.restore_last_token()
break
elif string == ',':
if not current_selector:
raise syntax_error('"," must follow a format selector', start)
selectors.append(current_selector)
current_selector = None
elif string == '/':
if not current_selector:
raise syntax_error('"/" must follow a format selector', start)
first_choice = current_selector
second_choice = _parse_format_selection(tokens, inside_choice=True)
current_selector = FormatSelector(PICKFIRST, (first_choice, second_choice), [])
elif string == '[':
if not current_selector:
current_selector = FormatSelector(SINGLE, 'best', [])
format_filter = _parse_filter(tokens)
current_selector.filters.append(format_filter)
elif string == '(':
if current_selector:
raise syntax_error('Unexpected "("', start)
group = _parse_format_selection(tokens, inside_group=True)
current_selector = FormatSelector(GROUP, group, [])
elif string == '+':
video_selector = current_selector
audio_selector = _parse_format_selection(tokens, inside_merge=True)
if not video_selector or not audio_selector:
raise syntax_error('"+" must be between two format selectors', start)
current_selector = FormatSelector(MERGE, (video_selector, audio_selector), [])
else:
raise syntax_error('Operator not recognized: "{0}"'.format(string), start)
elif type == tokenize.ENDMARKER:
break
if current_selector:
selectors.append(current_selector)
return selectors
def _build_selector_function(selector):
if isinstance(selector, list):
fs = [_build_selector_function(s) for s in selector]
def selector_function(ctx):
for f in fs:
for format in f(ctx):
yield format
return selector_function
elif selector.type == GROUP:
selector_function = _build_selector_function(selector.selector)
elif selector.type == PICKFIRST:
fs = [_build_selector_function(s) for s in selector.selector]
def selector_function(ctx):
for f in fs:
picked_formats = list(f(ctx))
if picked_formats:
return picked_formats
return []
elif selector.type == SINGLE:
format_spec = selector.selector
def selector_function(ctx):
formats = list(ctx['formats'])
if not formats:
return
if format_spec == 'all':
for f in formats:
yield f
elif format_spec in ['best', 'worst', None]:
format_idx = 0 if format_spec == 'worst' else -1
audiovideo_formats = [
f for f in formats
if f.get('vcodec') != 'none' and f.get('acodec') != 'none']
if audiovideo_formats:
yield audiovideo_formats[format_idx]
# for extractors with incomplete formats (audio only (soundcloud)
# or video only (imgur)) we will fallback to best/worst
# {video,audio}-only format
elif ctx['incomplete_formats']:
yield formats[format_idx]
elif format_spec == 'bestaudio':
audio_formats = [
f for f in formats
if f.get('vcodec') == 'none']
if audio_formats:
yield audio_formats[-1]
elif format_spec == 'worstaudio':
audio_formats = [
f for f in formats
if f.get('vcodec') == 'none']
if audio_formats:
yield audio_formats[0]
elif format_spec == 'bestvideo':
video_formats = [
f for f in formats
if f.get('acodec') == 'none']
if video_formats:
yield video_formats[-1]
elif format_spec == 'worstvideo':
video_formats = [
f for f in formats
if f.get('acodec') == 'none']
if video_formats:
yield video_formats[0]
else:
extensions = ['mp4', 'flv', 'webm', '3gp', 'm4a', 'mp3', 'ogg', 'aac', 'wav']
if format_spec in extensions:
filter_f = lambda f: f['ext'] == format_spec
else:
filter_f = lambda f: f['format_id'] == format_spec
matches = list(filter(filter_f, formats))
if matches:
yield matches[-1]
elif selector.type == MERGE:
def _merge(formats_info):
format_1, format_2 = [f['format_id'] for f in formats_info]
# The first format must contain the video and the
# second the audio
if formats_info[0].get('vcodec') == 'none':
self.report_error('The first format must '
'contain the video, try using '
'"-f %s+%s"' % (format_2, format_1))
return
# Formats must be opposite (video+audio)
if formats_info[0].get('acodec') == 'none' and formats_info[1].get('acodec') == 'none':
self.report_error(
'Both formats %s and %s are video-only, you must specify "-f video+audio"'
% (format_1, format_2))
return
output_ext = (
formats_info[0]['ext']
if self.params.get('merge_output_format') is None
else self.params['merge_output_format'])
return {
'requested_formats': formats_info,
'format': '%s+%s' % (formats_info[0].get('format'),
formats_info[1].get('format')),
'format_id': '%s+%s' % (formats_info[0].get('format_id'),
formats_info[1].get('format_id')),
'width': formats_info[0].get('width'),
'height': formats_info[0].get('height'),
'resolution': formats_info[0].get('resolution'),
'fps': formats_info[0].get('fps'),
'vcodec': formats_info[0].get('vcodec'),
'vbr': formats_info[0].get('vbr'),
'stretched_ratio': formats_info[0].get('stretched_ratio'),
'acodec': formats_info[1].get('acodec'),
'abr': formats_info[1].get('abr'),
'ext': output_ext,
}
video_selector, audio_selector = map(_build_selector_function, selector.selector)
def selector_function(ctx):
for pair in itertools.product(
video_selector(copy.deepcopy(ctx)), audio_selector(copy.deepcopy(ctx))):
yield _merge(pair)
filters = [self._build_format_filter(f) for f in selector.filters]
def final_selector(ctx):
ctx_copy = copy.deepcopy(ctx)
for _filter in filters:
ctx_copy['formats'] = list(filter(_filter, ctx_copy['formats']))
return selector_function(ctx_copy)
return final_selector
stream = io.BytesIO(format_spec.encode('utf-8'))
try:
tokens = list(_remove_unused_ops(compat_tokenize_tokenize(stream.readline)))
except tokenize.TokenError:
raise syntax_error('Missing closing/opening brackets or parenthesis', (0, len(format_spec)))
class TokenIterator(object):
def __init__(self, tokens):
self.tokens = tokens
self.counter = 0
def __iter__(self):
return self
def __next__(self):
if self.counter >= len(self.tokens):
raise StopIteration()
value = self.tokens[self.counter]
self.counter += 1
return value
next = __next__
def restore_last_token(self):
self.counter -= 1
parsed_selector = _parse_format_selection(iter(TokenIterator(tokens)))
return _build_selector_function(parsed_selector)
def _calc_headers(self, info_dict):
res = std_headers.copy()
add_headers = info_dict.get('http_headers')
if add_headers:
res.update(add_headers)
cookies = self._calc_cookies(info_dict)
if cookies:
res['Cookie'] = cookies
if 'X-Forwarded-For' not in res:
x_forwarded_for_ip = info_dict.get('__x_forwarded_for_ip')
if x_forwarded_for_ip:
res['X-Forwarded-For'] = x_forwarded_for_ip
return res
def _calc_cookies(self, info_dict):
pr = sanitized_Request(info_dict['url'])
self.cookiejar.add_cookie_header(pr)
return pr.get_header('Cookie')
def process_video_result(self, info_dict, download=True):
assert info_dict.get('_type', 'video') == 'video'
if 'id' not in info_dict:
raise ExtractorError('Missing "id" field in extractor result')
if 'title' not in info_dict:
raise ExtractorError('Missing "title" field in extractor result')
if not isinstance(info_dict['id'], compat_str):
self.report_warning('"id" field is not a string - forcing string conversion')
info_dict['id'] = compat_str(info_dict['id'])
if 'playlist' not in info_dict:
# It isn't part of a playlist
info_dict['playlist'] = None
info_dict['playlist_index'] = None
thumbnails = info_dict.get('thumbnails')
if thumbnails is None:
thumbnail = info_dict.get('thumbnail')
if thumbnail:
info_dict['thumbnails'] = thumbnails = [{'url': thumbnail}]
if thumbnails:
thumbnails.sort(key=lambda t: (
t.get('preference') if t.get('preference') is not None else -1,
t.get('width') if t.get('width') is not None else -1,
t.get('height') if t.get('height') is not None else -1,
t.get('id') if t.get('id') is not None else '', t.get('url')))
for i, t in enumerate(thumbnails):
t['url'] = sanitize_url(t['url'])
if t.get('width') and t.get('height'):
t['resolution'] = '%dx%d' % (t['width'], t['height'])
if t.get('id') is None:
t['id'] = '%d' % i
if self.params.get('list_thumbnails'):
self.list_thumbnails(info_dict)
return
thumbnail = info_dict.get('thumbnail')
if thumbnail:
info_dict['thumbnail'] = sanitize_url(thumbnail)
elif thumbnails:
info_dict['thumbnail'] = thumbnails[-1]['url']
if 'display_id' not in info_dict and 'id' in info_dict:
info_dict['display_id'] = info_dict['id']
if info_dict.get('upload_date') is None and info_dict.get('timestamp') is not None:
# Working around out-of-range timestamp values (e.g. negative ones on Windows,
# see http://bugs.python.org/issue1646728)
try:
upload_date = datetime.datetime.utcfromtimestamp(info_dict['timestamp'])
info_dict['upload_date'] = upload_date.strftime('%Y%m%d')
except (ValueError, OverflowError, OSError):
pass
# Auto generate title fields corresponding to the *_number fields when missing
# in order to always have clean titles. This is very common for TV series.
for field in ('chapter', 'season', 'episode'):
if info_dict.get('%s_number' % field) is not None and not info_dict.get(field):
info_dict[field] = '%s %d' % (field.capitalize(), info_dict['%s_number' % field])
subtitles = info_dict.get('subtitles')
if subtitles:
for _, subtitle in subtitles.items():
for subtitle_format in subtitle:
if subtitle_format.get('url'):
subtitle_format['url'] = sanitize_url(subtitle_format['url'])
if subtitle_format.get('ext') is None:
subtitle_format['ext'] = determine_ext(subtitle_format['url']).lower()
if self.params.get('listsubtitles', False):
if 'automatic_captions' in info_dict:
self.list_subtitles(info_dict['id'], info_dict.get('automatic_captions'), 'automatic captions')
self.list_subtitles(info_dict['id'], subtitles, 'subtitles')
return
info_dict['requested_subtitles'] = self.process_subtitles(
info_dict['id'], subtitles,
info_dict.get('automatic_captions'))
# We now pick which formats have to be downloaded
if info_dict.get('formats') is None:
# There's only one format available
formats = [info_dict]
else:
formats = info_dict['formats']
if not formats:
raise ExtractorError('No video formats found!')
formats_dict = {}
# We check that all the formats have the format and format_id fields
for i, format in enumerate(formats):
if 'url' not in format:
raise ExtractorError('Missing "url" key in result (index %d)' % i)
format['url'] = sanitize_url(format['url'])
if format.get('format_id') is None:
format['format_id'] = compat_str(i)
else:
# Sanitize format_id from characters used in format selector expression
format['format_id'] = re.sub(r'[\s,/+\[\]()]', '_', format['format_id'])
format_id = format['format_id']
if format_id not in formats_dict:
formats_dict[format_id] = []
formats_dict[format_id].append(format)
# Make sure all formats have unique format_id
for format_id, ambiguous_formats in formats_dict.items():
if len(ambiguous_formats) > 1:
for i, format in enumerate(ambiguous_formats):
format['format_id'] = '%s-%d' % (format_id, i)
for i, format in enumerate(formats):
if format.get('format') is None:
format['format'] = '{id} - {res}{note}'.format(
id=format['format_id'],
res=self.format_resolution(format),
note=' ({0})'.format(format['format_note']) if format.get('format_note') is not None else '',
)
# Automatically determine file extension if missing
if format.get('ext') is None:
format['ext'] = determine_ext(format['url']).lower()
# Automatically determine protocol if missing (useful for format
# selection purposes)
if format.get('protocol') is None:
format['protocol'] = determine_protocol(format)
# Add HTTP headers, so that external programs can use them from the
# json output
full_format_info = info_dict.copy()
full_format_info.update(format)
format['http_headers'] = self._calc_headers(full_format_info)
# Remove private housekeeping stuff
if '__x_forwarded_for_ip' in info_dict:
del info_dict['__x_forwarded_for_ip']
# TODO Central sorting goes here
if formats[0] is not info_dict:
# only set the 'formats' fields if the original info_dict list them
# otherwise we end up with a circular reference, the first (and unique)
# element in the 'formats' field in info_dict is info_dict itself,
# which can't be exported to json
info_dict['formats'] = formats
if self.params.get('listformats'):
self.list_formats(info_dict)
return
req_format = self.params.get('format')
if req_format is None:
req_format_list = []
if (self.params.get('outtmpl', DEFAULT_OUTTMPL) != '-' and
not info_dict.get('is_live')):
merger = FFmpegMergerPP(self)
if merger.available and merger.can_merge():
req_format_list.append('bestvideo+bestaudio')
req_format_list.append('best')
req_format = '/'.join(req_format_list)
format_selector = self.build_format_selector(req_format)
# While in format selection we may need to have an access to the original
# format set in order to calculate some metrics or do some processing.
# For now we need to be able to guess whether original formats provided
# by extractor are incomplete or not (i.e. whether extractor provides only
# video-only or audio-only formats) for proper formats selection for
# extractors with such incomplete formats (see
# https://github.com/rg3/youtube-dl/pull/5556).
# Since formats may be filtered during format selection and may not match
# the original formats the results may be incorrect. Thus original formats
# or pre-calculated metrics should be passed to format selection routines
# as well.
# We will pass a context object containing all necessary additional data
# instead of just formats.
# This fixes incorrect format selection issue (see
# https://github.com/rg3/youtube-dl/issues/10083).
incomplete_formats = (
# All formats are video-only or
all(f.get('vcodec') != 'none' and f.get('acodec') == 'none' for f in formats) or
# all formats are audio-only
all(f.get('vcodec') == 'none' and f.get('acodec') != 'none' for f in formats))
ctx = {
'formats': formats,
'incomplete_formats': incomplete_formats,
}
formats_to_download = list(format_selector(ctx))
if not formats_to_download:
raise ExtractorError('requested format not available',
expected=True)
if download:
if len(formats_to_download) > 1:
self.to_screen('[info] %s: downloading video in %s formats' % (info_dict['id'], len(formats_to_download)))
for format in formats_to_download:
new_info = dict(info_dict)
new_info.update(format)
self.process_info(new_info)
# We update the info dict with the best quality format (backwards compatibility)
info_dict.update(formats_to_download[-1])
return info_dict
def process_subtitles(self, video_id, normal_subtitles, automatic_captions):
"""Select the requested subtitles and their format"""
available_subs = {}
if normal_subtitles and self.params.get('writesubtitles'):
available_subs.update(normal_subtitles)
if automatic_captions and self.params.get('writeautomaticsub'):
for lang, cap_info in automatic_captions.items():
if lang not in available_subs:
available_subs[lang] = cap_info
if (not self.params.get('writesubtitles') and not
self.params.get('writeautomaticsub') or not
available_subs):
return None
if self.params.get('allsubtitles', False):
requested_langs = available_subs.keys()
else:
if self.params.get('subtitleslangs', False):
requested_langs = self.params.get('subtitleslangs')
elif 'en' in available_subs:
requested_langs = ['en']
else:
requested_langs = [list(available_subs.keys())[0]]
formats_query = self.params.get('subtitlesformat', 'best')
formats_preference = formats_query.split('/') if formats_query else []
subs = {}
for lang in requested_langs:
formats = available_subs.get(lang)
if formats is None:
self.report_warning('%s subtitles not available for %s' % (lang, video_id))
continue
for ext in formats_preference:
if ext == 'best':
f = formats[-1]
break
matches = list(filter(lambda f: f['ext'] == ext, formats))
if matches:
f = matches[-1]
break
else:
f = formats[-1]
self.report_warning(
'No subtitle format found matching "%s" for language %s, '
'using %s' % (formats_query, lang, f['ext']))
subs[lang] = f
return subs
def process_info(self, info_dict):
"""Process a single resolved IE result."""
assert info_dict.get('_type', 'video') == 'video'
max_downloads = self.params.get('max_downloads')
if max_downloads is not None:
if self._num_downloads >= int(max_downloads):
raise MaxDownloadsReached()
info_dict['fulltitle'] = info_dict['title']
if len(info_dict['title']) > 200:
info_dict['title'] = info_dict['title'][:197] + '...'
if 'format' not in info_dict:
info_dict['format'] = info_dict['ext']
reason = self._match_entry(info_dict, incomplete=False)
if reason is not None:
self.to_screen('[download] ' + reason)
return
self._num_downloads += 1
info_dict['_filename'] = filename = self.prepare_filename(info_dict)
# Forced printings
if self.params.get('forcetitle', False):
self.to_stdout(info_dict['fulltitle'])
if self.params.get('forceid', False):
self.to_stdout(info_dict['id'])
if self.params.get('forceurl', False):
if info_dict.get('requested_formats') is not None:
for f in info_dict['requested_formats']:
self.to_stdout(f['url'] + f.get('play_path', ''))
else:
# For RTMP URLs, also include the playpath
self.to_stdout(info_dict['url'] + info_dict.get('play_path', ''))
if self.params.get('forcethumbnail', False) and info_dict.get('thumbnail') is not None:
self.to_stdout(info_dict['thumbnail'])
if self.params.get('forcedescription', False) and info_dict.get('description') is not None:
self.to_stdout(info_dict['description'])
if self.params.get('forcefilename', False) and filename is not None:
self.to_stdout(filename)
if self.params.get('forceduration', False) and info_dict.get('duration') is not None:
self.to_stdout(formatSeconds(info_dict['duration']))
if self.params.get('forceformat', False):
self.to_stdout(info_dict['format'])
if self.params.get('forcejson', False):
self.to_stdout(json.dumps(info_dict))
# Do nothing else if in simulate mode
if self.params.get('simulate', False):
return
if filename is None:
return
try:
dn = os.path.dirname(sanitize_path(encodeFilename(filename)))
if dn and not os.path.exists(dn):
os.makedirs(dn)
except (OSError, IOError) as err:
self.report_error('unable to create directory ' + error_to_compat_str(err))
return
if self.params.get('writedescription', False):
descfn = replace_extension(filename, 'description', info_dict.get('ext'))
if self.params.get('nooverwrites', False) and os.path.exists(encodeFilename(descfn)):
self.to_screen('[info] Video description is already present')
elif info_dict.get('description') is None:
self.report_warning('There\'s no description to write.')
else:
try:
self.to_screen('[info] Writing video description to: ' + descfn)
with io.open(encodeFilename(descfn), 'w', encoding='utf-8') as descfile:
descfile.write(info_dict['description'])
except (OSError, IOError):
self.report_error('Cannot write description file ' + descfn)
return
if self.params.get('writeannotations', False):
annofn = replace_extension(filename, 'annotations.xml', info_dict.get('ext'))
if self.params.get('nooverwrites', False) and os.path.exists(encodeFilename(annofn)):
self.to_screen('[info] Video annotations are already present')
else:
try:
self.to_screen('[info] Writing video annotations to: ' + annofn)
with io.open(encodeFilename(annofn), 'w', encoding='utf-8') as annofile:
annofile.write(info_dict['annotations'])
except (KeyError, TypeError):
self.report_warning('There are no annotations to write.')
except (OSError, IOError):
self.report_error('Cannot write annotations file: ' + annofn)
return
subtitles_are_requested = any([self.params.get('writesubtitles', False),
self.params.get('writeautomaticsub')])
if subtitles_are_requested and info_dict.get('requested_subtitles'):
# subtitles download errors are already managed as troubles in relevant IE
# that way it will silently go on when used with unsupporting IE
subtitles = info_dict['requested_subtitles']
ie = self.get_info_extractor(info_dict['extractor_key'])
for sub_lang, sub_info in subtitles.items():
sub_format = sub_info['ext']
if sub_info.get('data') is not None:
sub_data = sub_info['data']
else:
try:
sub_data = ie._download_webpage(
sub_info['url'], info_dict['id'], note=False)
except ExtractorError as err:
self.report_warning('Unable to download subtitle for "%s": %s' %
(sub_lang, error_to_compat_str(err.cause)))
continue
try:
sub_filename = subtitles_filename(filename, sub_lang, sub_format)
if self.params.get('nooverwrites', False) and os.path.exists(encodeFilename(sub_filename)):
self.to_screen('[info] Video subtitle %s.%s is already_present' % (sub_lang, sub_format))
else:
self.to_screen('[info] Writing video subtitles to: ' + sub_filename)
# Use newline='' to prevent conversion of newline characters
# See https://github.com/rg3/youtube-dl/issues/10268
with io.open(encodeFilename(sub_filename), 'w', encoding='utf-8', newline='') as subfile:
subfile.write(sub_data)
except (OSError, IOError):
self.report_error('Cannot write subtitles file ' + sub_filename)
return
if self.params.get('writeinfojson', False):
infofn = replace_extension(filename, 'info.json', info_dict.get('ext'))
if self.params.get('nooverwrites', False) and os.path.exists(encodeFilename(infofn)):
self.to_screen('[info] Video description metadata is already present')
else:
self.to_screen('[info] Writing video description metadata as JSON to: ' + infofn)
try:
write_json_file(self.filter_requested_info(info_dict), infofn)
except (OSError, IOError):
self.report_error('Cannot write metadata to JSON file ' + infofn)
return
self._write_thumbnails(info_dict, filename)
if not self.params.get('skip_download', False):
try:
def dl(name, info):
fd = get_suitable_downloader(info, self.params)(self, self.params)
for ph in self._progress_hooks:
fd.add_progress_hook(ph)
if self.params.get('verbose'):
self.to_stdout('[debug] Invoking downloader on %r' % info.get('url'))
return fd.download(name, info)
if info_dict.get('requested_formats') is not None:
downloaded = []
success = True
merger = FFmpegMergerPP(self)
if not merger.available:
postprocessors = []
self.report_warning('You have requested multiple '
'formats but ffmpeg or avconv are not installed.'
' The formats won\'t be merged.')
else:
postprocessors = [merger]
def compatible_formats(formats):
video, audio = formats
# Check extension
video_ext, audio_ext = audio.get('ext'), video.get('ext')
if video_ext and audio_ext:
COMPATIBLE_EXTS = (
('mp3', 'mp4', 'm4a', 'm4p', 'm4b', 'm4r', 'm4v', 'ismv', 'isma'),
('webm')
)
for exts in COMPATIBLE_EXTS:
if video_ext in exts and audio_ext in exts:
return True
# TODO: Check acodec/vcodec
return False
filename_real_ext = os.path.splitext(filename)[1][1:]
filename_wo_ext = (
os.path.splitext(filename)[0]
if filename_real_ext == info_dict['ext']
else filename)
requested_formats = info_dict['requested_formats']
if self.params.get('merge_output_format') is None and not compatible_formats(requested_formats):
info_dict['ext'] = 'mkv'
self.report_warning(
'Requested formats are incompatible for merge and will be merged into mkv.')
# Ensure filename always has a correct extension for successful merge
filename = '%s.%s' % (filename_wo_ext, info_dict['ext'])
if os.path.exists(encodeFilename(filename)):
self.to_screen(
'[download] %s has already been downloaded and '
'merged' % filename)
else:
for f in requested_formats:
new_info = dict(info_dict)
new_info.update(f)
fname = self.prepare_filename(new_info)
fname = prepend_extension(fname, 'f%s' % f['format_id'], new_info['ext'])
downloaded.append(fname)
partial_success = dl(fname, new_info)
success = success and partial_success
info_dict['__postprocessors'] = postprocessors
info_dict['__files_to_merge'] = downloaded
else:
# Just a single file
success = dl(filename, info_dict)
except (compat_urllib_error.URLError, compat_http_client.HTTPException, socket.error) as err:
self.report_error('unable to download video data: %s' % error_to_compat_str(err))
return
except (OSError, IOError) as err:
raise UnavailableVideoError(err)
except (ContentTooShortError, ) as err:
self.report_error('content too short (expected %s bytes and served %s)' % (err.expected, err.downloaded))
return
if success and filename != '-':
# Fixup content
fixup_policy = self.params.get('fixup')
if fixup_policy is None:
fixup_policy = 'detect_or_warn'
INSTALL_FFMPEG_MESSAGE = 'Install ffmpeg or avconv to fix this automatically.'
stretched_ratio = info_dict.get('stretched_ratio')
if stretched_ratio is not None and stretched_ratio != 1:
if fixup_policy == 'warn':
self.report_warning('%s: Non-uniform pixel ratio (%s)' % (
info_dict['id'], stretched_ratio))
elif fixup_policy == 'detect_or_warn':
stretched_pp = FFmpegFixupStretchedPP(self)
if stretched_pp.available:
info_dict.setdefault('__postprocessors', [])
info_dict['__postprocessors'].append(stretched_pp)
else:
self.report_warning(
'%s: Non-uniform pixel ratio (%s). %s'
% (info_dict['id'], stretched_ratio, INSTALL_FFMPEG_MESSAGE))
else:
assert fixup_policy in ('ignore', 'never')
if (info_dict.get('requested_formats') is None and
info_dict.get('container') == 'm4a_dash'):
if fixup_policy == 'warn':
self.report_warning(
'%s: writing DASH m4a. '
'Only some players support this container.'
% info_dict['id'])
elif fixup_policy == 'detect_or_warn':
fixup_pp = FFmpegFixupM4aPP(self)
if fixup_pp.available:
info_dict.setdefault('__postprocessors', [])
info_dict['__postprocessors'].append(fixup_pp)
else:
self.report_warning(
'%s: writing DASH m4a. '
'Only some players support this container. %s'
% (info_dict['id'], INSTALL_FFMPEG_MESSAGE))
else:
assert fixup_policy in ('ignore', 'never')
if (info_dict.get('protocol') == 'm3u8_native' or
info_dict.get('protocol') == 'm3u8' and
self.params.get('hls_prefer_native')):
if fixup_policy == 'warn':
self.report_warning('%s: malformated aac bitstream.' % (
info_dict['id']))
elif fixup_policy == 'detect_or_warn':
fixup_pp = FFmpegFixupM3u8PP(self)
if fixup_pp.available:
info_dict.setdefault('__postprocessors', [])
info_dict['__postprocessors'].append(fixup_pp)
else:
self.report_warning(
'%s: malformated aac bitstream. %s'
% (info_dict['id'], INSTALL_FFMPEG_MESSAGE))
else:
assert fixup_policy in ('ignore', 'never')
try:
self.post_process(filename, info_dict)
except (PostProcessingError) as err:
self.report_error('postprocessing: %s' % str(err))
return
self.record_download_archive(info_dict)
def download(self, url_list):
"""Download a given list of URLs."""
outtmpl = self.params.get('outtmpl', DEFAULT_OUTTMPL)
if (len(url_list) > 1 and
'%' not in outtmpl and
self.params.get('max_downloads') != 1):
raise SameFileError(outtmpl)
for url in url_list:
try:
# It also downloads the videos
res = self.extract_info(
url, force_generic_extractor=self.params.get('force_generic_extractor', False))
except UnavailableVideoError:
self.report_error('unable to download video')
except MaxDownloadsReached:
self.to_screen('[info] Maximum number of downloaded files reached.')
raise
else:
if self.params.get('dump_single_json', False):
self.to_stdout(json.dumps(res))
return self._download_retcode
def download_with_info_file(self, info_filename):
with contextlib.closing(fileinput.FileInput(
[info_filename], mode='r',
openhook=fileinput.hook_encoded('utf-8'))) as f:
# FileInput doesn't have a read method, we can't call json.load
info = self.filter_requested_info(json.loads('\n'.join(f)))
try:
self.process_ie_result(info, download=True)
except DownloadError:
webpage_url = info.get('webpage_url')
if webpage_url is not None:
self.report_warning('The info failed to download, trying with "%s"' % webpage_url)
return self.download([webpage_url])
else:
raise
return self._download_retcode
@staticmethod
def filter_requested_info(info_dict):
return dict(
(k, v) for k, v in info_dict.items()
if k not in ['requested_formats', 'requested_subtitles'])
def post_process(self, filename, ie_info):
"""Run all the postprocessors on the given file."""
info = dict(ie_info)
info['filepath'] = filename
pps_chain = []
if ie_info.get('__postprocessors') is not None:
pps_chain.extend(ie_info['__postprocessors'])
pps_chain.extend(self._pps)
for pp in pps_chain:
files_to_delete = []
try:
files_to_delete, info = pp.run(info)
except PostProcessingError as e:
self.report_error(e.msg)
if files_to_delete and not self.params.get('keepvideo', False):
for old_filename in files_to_delete:
self.to_screen('Deleting original file %s (pass -k to keep)' % old_filename)
try:
os.remove(encodeFilename(old_filename))
except (IOError, OSError):
self.report_warning('Unable to remove downloaded original file')
def _make_archive_id(self, info_dict):
# Future-proof against any change in case
# and backwards compatibility with prior versions
extractor = info_dict.get('extractor_key')
if extractor is None:
if 'id' in info_dict:
extractor = info_dict.get('ie_key') # key in a playlist
if extractor is None:
return None # Incomplete video information
return extractor.lower() + ' ' + info_dict['id']
def in_download_archive(self, info_dict):
fn = self.params.get('download_archive')
if fn is None:
return False
vid_id = self._make_archive_id(info_dict)
if vid_id is None:
return False # Incomplete video information
try:
with locked_file(fn, 'r', encoding='utf-8') as archive_file:
for line in archive_file:
if line.strip() == vid_id:
return True
except IOError as ioe:
if ioe.errno != errno.ENOENT:
raise
return False
def record_download_archive(self, info_dict):
fn = self.params.get('download_archive')
if fn is None:
return
vid_id = self._make_archive_id(info_dict)
assert vid_id
with locked_file(fn, 'a', encoding='utf-8') as archive_file:
archive_file.write(vid_id + '\n')
@staticmethod
def format_resolution(format, default='unknown'):
if format.get('vcodec') == 'none':
return 'audio only'
if format.get('resolution') is not None:
return format['resolution']
if format.get('height') is not None:
if format.get('width') is not None:
res = '%sx%s' % (format['width'], format['height'])
else:
res = '%sp' % format['height']
elif format.get('width') is not None:
res = '%dx?' % format['width']
else:
res = default
return res
def _format_note(self, fdict):
res = ''
if fdict.get('ext') in ['f4f', 'f4m']:
res += '(unsupported) '
if fdict.get('language'):
if res:
res += ' '
res += '[%s] ' % fdict['language']
if fdict.get('format_note') is not None:
res += fdict['format_note'] + ' '
if fdict.get('tbr') is not None:
res += '%4dk ' % fdict['tbr']
if fdict.get('container') is not None:
if res:
res += ', '
res += '%s container' % fdict['container']
if (fdict.get('vcodec') is not None and
fdict.get('vcodec') != 'none'):
if res:
res += ', '
res += fdict['vcodec']
if fdict.get('vbr') is not None:
res += '@'
elif fdict.get('vbr') is not None and fdict.get('abr') is not None:
res += 'video@'
if fdict.get('vbr') is not None:
res += '%4dk' % fdict['vbr']
if fdict.get('fps') is not None:
if res:
res += ', '
res += '%sfps' % fdict['fps']
if fdict.get('acodec') is not None:
if res:
res += ', '
if fdict['acodec'] == 'none':
res += 'video only'
else:
res += '%-5s' % fdict['acodec']
elif fdict.get('abr') is not None:
if res:
res += ', '
res += 'audio'
if fdict.get('abr') is not None:
res += '@%3dk' % fdict['abr']
if fdict.get('asr') is not None:
res += ' (%5dHz)' % fdict['asr']
if fdict.get('filesize') is not None:
if res:
res += ', '
res += format_bytes(fdict['filesize'])
elif fdict.get('filesize_approx') is not None:
if res:
res += ', '
res += '~' + format_bytes(fdict['filesize_approx'])
return res
def list_formats(self, info_dict):
formats = info_dict.get('formats', [info_dict])
table = [
[f['format_id'], f['ext'], self.format_resolution(f), self._format_note(f)]
for f in formats
if f.get('preference') is None or f['preference'] >= -1000]
if len(formats) > 1:
table[-1][-1] += (' ' if table[-1][-1] else '') + '(best)'
header_line = ['format code', 'extension', 'resolution', 'note']
self.to_screen(
'[info] Available formats for %s:\n%s' %
(info_dict['id'], render_table(header_line, table)))
def list_thumbnails(self, info_dict):
thumbnails = info_dict.get('thumbnails')
if not thumbnails:
self.to_screen('[info] No thumbnails present for %s' % info_dict['id'])
return
self.to_screen(
'[info] Thumbnails for %s:' % info_dict['id'])
self.to_screen(render_table(
['ID', 'width', 'height', 'URL'],
[[t['id'], t.get('width', 'unknown'), t.get('height', 'unknown'), t['url']] for t in thumbnails]))
def list_subtitles(self, video_id, subtitles, name='subtitles'):
if not subtitles:
self.to_screen('%s has no %s' % (video_id, name))
return
self.to_screen(
'Available %s for %s:' % (name, video_id))
self.to_screen(render_table(
['Language', 'formats'],
[[lang, ', '.join(f['ext'] for f in reversed(formats))]
for lang, formats in subtitles.items()]))
def urlopen(self, req):
""" Start an HTTP download """
if isinstance(req, compat_basestring):
req = sanitized_Request(req)
return self._opener.open(req, timeout=self._socket_timeout)
def print_debug_header(self):
if not self.params.get('verbose'):
return
if type('') is not compat_str:
# Python 2.6 on SLES11 SP1 (https://github.com/rg3/youtube-dl/issues/3326)
self.report_warning(
'Your Python is broken! Update to a newer and supported version')
stdout_encoding = getattr(
sys.stdout, 'encoding', 'missing (%s)' % type(sys.stdout).__name__)
encoding_str = (
'[debug] Encodings: locale %s, fs %s, out %s, pref %s\n' % (
locale.getpreferredencoding(),
sys.getfilesystemencoding(),
stdout_encoding,
self.get_encoding()))
write_string(encoding_str, encoding=None)
self._write_string('[debug] youtube-dl version ' + __version__ + '\n')
if _LAZY_LOADER:
self._write_string('[debug] Lazy loading extractors enabled' + '\n')
try:
sp = subprocess.Popen(
['git', 'rev-parse', '--short', 'HEAD'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
cwd=os.path.dirname(os.path.abspath(__file__)))
out, err = sp.communicate()
out = out.decode().strip()
if re.match('[0-9a-f]+', out):
self._write_string('[debug] Git HEAD: ' + out + '\n')
except Exception:
try:
sys.exc_clear()
except Exception:
pass
self._write_string('[debug] Python version %s - %s\n' % (
platform.python_version(), platform_name()))
exe_versions = FFmpegPostProcessor.get_versions(self)
exe_versions['rtmpdump'] = rtmpdump_version()
exe_str = ', '.join(
'%s %s' % (exe, v)
for exe, v in sorted(exe_versions.items())
if v
)
if not exe_str:
exe_str = 'none'
self._write_string('[debug] exe versions: %s\n' % exe_str)
proxy_map = {}
for handler in self._opener.handlers:
if hasattr(handler, 'proxies'):
proxy_map.update(handler.proxies)
self._write_string('[debug] Proxy map: ' + compat_str(proxy_map) + '\n')
if self.params.get('call_home', False):
ipaddr = self.urlopen('https://yt-dl.org/ip').read().decode('utf-8')
self._write_string('[debug] Public IP address: %s\n' % ipaddr)
latest_version = self.urlopen(
'https://yt-dl.org/latest/version').read().decode('utf-8')
if version_tuple(latest_version) > version_tuple(__version__):
self.report_warning(
'You are using an outdated version (newest version: %s)! '
'See https://yt-dl.org/update if you need help updating.' %
latest_version)
def _setup_opener(self):
timeout_val = self.params.get('socket_timeout')
self._socket_timeout = 600 if timeout_val is None else float(timeout_val)
opts_cookiefile = self.params.get('cookiefile')
opts_proxy = self.params.get('proxy')
if opts_cookiefile is None:
self.cookiejar = compat_cookiejar.CookieJar()
else:
opts_cookiefile = compat_expanduser(opts_cookiefile)
self.cookiejar = compat_cookiejar.MozillaCookieJar(
opts_cookiefile)
if os.access(opts_cookiefile, os.R_OK):
self.cookiejar.load()
cookie_processor = YoutubeDLCookieProcessor(self.cookiejar)
if opts_proxy is not None:
if opts_proxy == '':
proxies = {}
else:
proxies = {'http': opts_proxy, 'https': opts_proxy}
else:
proxies = compat_urllib_request.getproxies()
# Set HTTPS proxy to HTTP one if given (https://github.com/rg3/youtube-dl/issues/805)
if 'http' in proxies and 'https' not in proxies:
proxies['https'] = proxies['http']
proxy_handler = PerRequestProxyHandler(proxies)
debuglevel = 1 if self.params.get('debug_printtraffic') else 0
https_handler = make_HTTPS_handler(self.params, debuglevel=debuglevel)
ydlh = YoutubeDLHandler(self.params, debuglevel=debuglevel)
data_handler = compat_urllib_request_DataHandler()
# When passing our own FileHandler instance, build_opener won't add the
# default FileHandler and allows us to disable the file protocol, which
# can be used for malicious purposes (see
# https://github.com/rg3/youtube-dl/issues/8227)
file_handler = compat_urllib_request.FileHandler()
def file_open(*args, **kwargs):
raise compat_urllib_error.URLError('file:// scheme is explicitly disabled in youtube-dl for security reasons')
file_handler.file_open = file_open
opener = compat_urllib_request.build_opener(
proxy_handler, https_handler, cookie_processor, ydlh, data_handler, file_handler)
# Delete the default user-agent header, which would otherwise apply in
# cases where our custom HTTP handler doesn't come into play
# (See https://github.com/rg3/youtube-dl/issues/1309 for details)
opener.addheaders = []
self._opener = opener
def encode(self, s):
if isinstance(s, bytes):
return s # Already encoded
try:
return s.encode(self.get_encoding())
except UnicodeEncodeError as err:
err.reason = err.reason + '. Check your system encoding configuration or use the --encoding option.'
raise
def get_encoding(self):
encoding = self.params.get('encoding')
if encoding is None:
encoding = preferredencoding()
return encoding
def _write_thumbnails(self, info_dict, filename):
if self.params.get('writethumbnail', False):
thumbnails = info_dict.get('thumbnails')
if thumbnails:
thumbnails = [thumbnails[-1]]
elif self.params.get('write_all_thumbnails', False):
thumbnails = info_dict.get('thumbnails')
else:
return
if not thumbnails:
# No thumbnails present, so return immediately
return
for t in thumbnails:
thumb_ext = determine_ext(t['url'], 'jpg')
suffix = '_%s' % t['id'] if len(thumbnails) > 1 else ''
thumb_display_id = '%s ' % t['id'] if len(thumbnails) > 1 else ''
t['filename'] = thumb_filename = os.path.splitext(filename)[0] + suffix + '.' + thumb_ext
if self.params.get('nooverwrites', False) and os.path.exists(encodeFilename(thumb_filename)):
self.to_screen('[%s] %s: Thumbnail %sis already present' %
(info_dict['extractor'], info_dict['id'], thumb_display_id))
else:
self.to_screen('[%s] %s: Downloading thumbnail %s...' %
(info_dict['extractor'], info_dict['id'], thumb_display_id))
try:
uf = self.urlopen(t['url'])
with open(encodeFilename(thumb_filename), 'wb') as thumbf:
shutil.copyfileobj(uf, thumbf)
self.to_screen('[%s] %s: Writing thumbnail %sto: %s' %
(info_dict['extractor'], info_dict['id'], thumb_display_id, thumb_filename))
except (compat_urllib_error.URLError, compat_http_client.HTTPException, socket.error) as err:
self.report_warning('Unable to download thumbnail "%s": %s' %
(t['url'], error_to_compat_str(err)))
| 46.14872 | 194 | 0.551653 |
from __future__ import absolute_import, unicode_literals
import collections
import contextlib
import copy
import datetime
import errno
import fileinput
import io
import itertools
import json
import locale
import operator
import os
import platform
import re
import shutil
import subprocess
import socket
import sys
import time
import tokenize
import traceback
import random
from .compat import (
compat_basestring,
compat_cookiejar,
compat_expanduser,
compat_get_terminal_size,
compat_http_client,
compat_kwargs,
compat_numeric_types,
compat_os_name,
compat_str,
compat_tokenize_tokenize,
compat_urllib_error,
compat_urllib_request,
compat_urllib_request_DataHandler,
)
from .utils import (
age_restricted,
args_to_str,
ContentTooShortError,
date_from_str,
DateRange,
DEFAULT_OUTTMPL,
determine_ext,
determine_protocol,
DownloadError,
encode_compat_str,
encodeFilename,
error_to_compat_str,
ExtractorError,
format_bytes,
formatSeconds,
GeoRestrictedError,
ISO3166Utils,
locked_file,
make_HTTPS_handler,
MaxDownloadsReached,
PagedList,
parse_filesize,
PerRequestProxyHandler,
platform_name,
PostProcessingError,
preferredencoding,
prepend_extension,
register_socks_protocols,
render_table,
replace_extension,
SameFileError,
sanitize_filename,
sanitize_path,
sanitize_url,
sanitized_Request,
std_headers,
subtitles_filename,
UnavailableVideoError,
url_basename,
version_tuple,
write_json_file,
write_string,
YoutubeDLCookieProcessor,
YoutubeDLHandler,
)
from .cache import Cache
from .extractor import get_info_extractor, gen_extractor_classes, _LAZY_LOADER
from .downloader import get_suitable_downloader
from .downloader.rtmp import rtmpdump_version
from .postprocessor import (
FFmpegFixupM3u8PP,
FFmpegFixupM4aPP,
FFmpegFixupStretchedPP,
FFmpegMergerPP,
FFmpegPostProcessor,
get_postprocessor,
)
from .version import __version__
if compat_os_name == 'nt':
import ctypes
class YoutubeDL(object):
params = None
_ies = []
_pps = []
_download_retcode = None
_num_downloads = None
_screen_file = None
def __init__(self, params=None, auto_init=True):
if params is None:
params = {}
self._ies = []
self._ies_instances = {}
self._pps = []
self._progress_hooks = []
self._download_retcode = 0
self._num_downloads = 0
self._screen_file = [sys.stdout, sys.stderr][params.get('logtostderr', False)]
self._err_file = sys.stderr
self.params = {
'nocheckcertificate': False,
}
self.params.update(params)
self.cache = Cache(self)
def check_deprecated(param, option, suggestion):
if self.params.get(param) is not None:
self.report_warning(
'%s is deprecated. Use %s instead.' % (option, suggestion))
return True
return False
if check_deprecated('cn_verification_proxy', '--cn-verification-proxy', '--geo-verification-proxy'):
if self.params.get('geo_verification_proxy') is None:
self.params['geo_verification_proxy'] = self.params['cn_verification_proxy']
check_deprecated('autonumber_size', '--autonumber-size', 'output template with %(autonumber)0Nd, where N in the number of digits')
check_deprecated('autonumber', '--auto-number', '-o "%(autonumber)s-%(title)s.%(ext)s"')
check_deprecated('usetitle', '--title', '-o "%(title)s-%(id)s.%(ext)s"')
if params.get('bidi_workaround', False):
try:
import pty
master, slave = pty.openpty()
width = compat_get_terminal_size().columns
if width is None:
width_args = []
else:
width_args = ['-w', str(width)]
sp_kwargs = dict(
stdin=subprocess.PIPE,
stdout=slave,
stderr=self._err_file)
try:
self._output_process = subprocess.Popen(
['bidiv'] + width_args, **sp_kwargs
)
except OSError:
self._output_process = subprocess.Popen(
['fribidi', '-c', 'UTF-8'] + width_args, **sp_kwargs)
self._output_channel = os.fdopen(master, 'rb')
except OSError as ose:
if ose.errno == errno.ENOENT:
self.report_warning('Could not find fribidi executable, ignoring --bidi-workaround . Make sure that fribidi is an executable file in one of the directories in your $PATH.')
else:
raise
if (sys.version_info >= (3,) and sys.platform != 'win32' and
sys.getfilesystemencoding() in ['ascii', 'ANSI_X3.4-1968'] and
not params.get('restrictfilenames', False)):
self.report_warning(
'Assuming --restrict-filenames since file system encoding '
'cannot encode all characters. '
'Set the LC_ALL environment variable to fix this.')
self.params['restrictfilenames'] = True
if isinstance(params.get('outtmpl'), bytes):
self.report_warning(
'Parameter outtmpl is bytes, but should be a unicode string. '
'Put from __future__ import unicode_literals at the top of your code file or consider switching to Python 3.x.')
self._setup_opener()
if auto_init:
self.print_debug_header()
self.add_default_info_extractors()
for pp_def_raw in self.params.get('postprocessors', []):
pp_class = get_postprocessor(pp_def_raw['key'])
pp_def = dict(pp_def_raw)
del pp_def['key']
pp = pp_class(self, **compat_kwargs(pp_def))
self.add_post_processor(pp)
for ph in self.params.get('progress_hooks', []):
self.add_progress_hook(ph)
register_socks_protocols()
def warn_if_short_id(self, argv):
idxs = [
i for i, a in enumerate(argv)
if re.match(r'^-[0-9A-Za-z_-]{10}$', a)]
if idxs:
correct_argv = (
['youtube-dl'] +
[a for i, a in enumerate(argv) if i not in idxs] +
['--'] + [argv[i] for i in idxs]
)
self.report_warning(
'Long argument string detected. '
'Use -- to separate parameters and URLs, like this:\n%s\n' %
args_to_str(correct_argv))
def add_info_extractor(self, ie):
self._ies.append(ie)
if not isinstance(ie, type):
self._ies_instances[ie.ie_key()] = ie
ie.set_downloader(self)
def get_info_extractor(self, ie_key):
ie = self._ies_instances.get(ie_key)
if ie is None:
ie = get_info_extractor(ie_key)()
self.add_info_extractor(ie)
return ie
def add_default_info_extractors(self):
for ie in gen_extractor_classes():
self.add_info_extractor(ie)
def add_post_processor(self, pp):
self._pps.append(pp)
pp.set_downloader(self)
def add_progress_hook(self, ph):
self._progress_hooks.append(ph)
def _bidi_workaround(self, message):
if not hasattr(self, '_output_channel'):
return message
assert hasattr(self, '_output_process')
assert isinstance(message, compat_str)
line_count = message.count('\n') + 1
self._output_process.stdin.write((message + '\n').encode('utf-8'))
self._output_process.stdin.flush()
res = ''.join(self._output_channel.readline().decode('utf-8')
for _ in range(line_count))
return res[:-len('\n')]
def to_screen(self, message, skip_eol=False):
return self.to_stdout(message, skip_eol, check_quiet=True)
def _write_string(self, s, out=None):
write_string(s, out=out, encoding=self.params.get('encoding'))
def to_stdout(self, message, skip_eol=False, check_quiet=False):
if self.params.get('logger'):
self.params['logger'].debug(message)
elif not check_quiet or not self.params.get('quiet', False):
message = self._bidi_workaround(message)
terminator = ['\n', ''][skip_eol]
output = message + terminator
self._write_string(output, self._screen_file)
def to_stderr(self, message):
assert isinstance(message, compat_str)
if self.params.get('logger'):
self.params['logger'].error(message)
else:
message = self._bidi_workaround(message)
output = message + '\n'
self._write_string(output, self._err_file)
def to_console_title(self, message):
if not self.params.get('consoletitle', False):
return
if compat_os_name == 'nt' and ctypes.windll.kernel32.GetConsoleWindow():
ctypes.windll.kernel32.SetConsoleTitleW(ctypes.c_wchar_p(message))
elif 'TERM' in os.environ:
self._write_string('\033]0;%s\007' % message, self._screen_file)
def save_console_title(self):
if not self.params.get('consoletitle', False):
return
if 'TERM' in os.environ:
self._write_string('\033[22;0t', self._screen_file)
def restore_console_title(self):
if not self.params.get('consoletitle', False):
return
if 'TERM' in os.environ:
self._write_string('\033[23;0t', self._screen_file)
def __enter__(self):
self.save_console_title()
return self
def __exit__(self, *args):
self.restore_console_title()
if self.params.get('cookiefile') is not None:
self.cookiejar.save()
def trouble(self, message=None, tb=None):
if message is not None:
self.to_stderr(message)
if self.params.get('verbose'):
if tb is None:
if sys.exc_info()[0]:
tb = ''
if hasattr(sys.exc_info()[1], 'exc_info') and sys.exc_info()[1].exc_info[0]:
tb += ''.join(traceback.format_exception(*sys.exc_info()[1].exc_info))
tb += encode_compat_str(traceback.format_exc())
else:
tb_data = traceback.format_list(traceback.extract_stack())
tb = ''.join(tb_data)
self.to_stderr(tb)
if not self.params.get('ignoreerrors', False):
if sys.exc_info()[0] and hasattr(sys.exc_info()[1], 'exc_info') and sys.exc_info()[1].exc_info[0]:
exc_info = sys.exc_info()[1].exc_info
else:
exc_info = sys.exc_info()
raise DownloadError(message, exc_info)
self._download_retcode = 1
def report_warning(self, message):
if self.params.get('logger') is not None:
self.params['logger'].warning(message)
else:
if self.params.get('no_warnings'):
return
if not self.params.get('no_color') and self._err_file.isatty() and compat_os_name != 'nt':
_msg_header = '\033[0;33mWARNING:\033[0m'
else:
_msg_header = 'WARNING:'
warning_message = '%s %s' % (_msg_header, message)
self.to_stderr(warning_message)
def report_error(self, message, tb=None):
if not self.params.get('no_color') and self._err_file.isatty() and compat_os_name != 'nt':
_msg_header = '\033[0;31mERROR:\033[0m'
else:
_msg_header = 'ERROR:'
error_message = '%s %s' % (_msg_header, message)
self.trouble(error_message, tb)
def report_file_already_downloaded(self, file_name):
try:
self.to_screen('[download] %s has already been downloaded' % file_name)
except UnicodeEncodeError:
self.to_screen('[download] The file has already been downloaded')
def prepare_filename(self, info_dict):
try:
template_dict = dict(info_dict)
template_dict['epoch'] = int(time.time())
autonumber_size = self.params.get('autonumber_size')
if autonumber_size is None:
autonumber_size = 5
template_dict['autonumber'] = self.params.get('autonumber_start', 1) - 1 + self._num_downloads
if template_dict.get('resolution') is None:
if template_dict.get('width') and template_dict.get('height'):
template_dict['resolution'] = '%dx%d' % (template_dict['width'], template_dict['height'])
elif template_dict.get('height'):
template_dict['resolution'] = '%sp' % template_dict['height']
elif template_dict.get('width'):
template_dict['resolution'] = '%dx?' % template_dict['width']
sanitize = lambda k, v: sanitize_filename(
compat_str(v),
restricted=self.params.get('restrictfilenames'),
is_id=(k == 'id'))
template_dict = dict((k, v if isinstance(v, compat_numeric_types) else sanitize(k, v))
for k, v in template_dict.items()
if v is not None and not isinstance(v, (list, tuple, dict)))
template_dict = collections.defaultdict(lambda: 'NA', template_dict)
outtmpl = self.params.get('outtmpl', DEFAULT_OUTTMPL)
field_size_compat_map = {
'playlist_index': len(str(template_dict['n_entries'])),
'autonumber': autonumber_size,
}
FIELD_SIZE_COMPAT_RE = r'(?<!%)%\((?P<field>autonumber|playlist_index)\)s'
mobj = re.search(FIELD_SIZE_COMPAT_RE, outtmpl)
if mobj:
outtmpl = re.sub(
FIELD_SIZE_COMPAT_RE,
r'%%(\1)0%dd' % field_size_compat_map[mobj.group('field')],
outtmpl)
NUMERIC_FIELDS = set((
'width', 'height', 'tbr', 'abr', 'asr', 'vbr', 'fps', 'filesize', 'filesize_approx',
'upload_year', 'upload_month', 'upload_day',
'duration', 'view_count', 'like_count', 'dislike_count', 'repost_count',
'average_rating', 'comment_count', 'age_limit',
'start_time', 'end_time',
'chapter_number', 'season_number', 'episode_number',
'track_number', 'disc_number', 'release_year',
'playlist_index',
))
for numeric_field in NUMERIC_FIELDS:
if numeric_field not in template_dict:
FORMAT_RE = r'''(?x)
(?<!%)
%
\({0}\) # mapping key
(?:[#0\-+ ]+)? # conversion flags (optional)
(?:\d+)? # minimum field width (optional)
(?:\.\d+)? # precision (optional)
[hlL]? # length modifier (optional)
[diouxXeEfFgGcrs%] # conversion type
'''
outtmpl = re.sub(
FORMAT_RE.format(numeric_field),
r'%({0})s'.format(numeric_field), outtmpl)
tmpl = compat_expanduser(outtmpl)
filename = tmpl % template_dict
if sys.version_info < (3, 0) and sys.platform == 'win32':
filename = encodeFilename(filename, True).decode(preferredencoding())
return sanitize_path(filename)
except ValueError as err:
self.report_error('Error in output template: ' + str(err) + ' (encoding: ' + repr(preferredencoding()) + ')')
return None
def _match_entry(self, info_dict, incomplete):
video_title = info_dict.get('title', info_dict.get('id', 'video'))
if 'title' in info_dict:
title = info_dict['title']
matchtitle = self.params.get('matchtitle', False)
if matchtitle:
if not re.search(matchtitle, title, re.IGNORECASE):
return '"' + title + '" title did not match pattern "' + matchtitle + '"'
rejecttitle = self.params.get('rejecttitle', False)
if rejecttitle:
if re.search(rejecttitle, title, re.IGNORECASE):
return '"' + title + '" title matched reject pattern "' + rejecttitle + '"'
date = info_dict.get('upload_date')
if date is not None:
dateRange = self.params.get('daterange', DateRange())
if date not in dateRange:
return '%s upload date is not in range %s' % (date_from_str(date).isoformat(), dateRange)
view_count = info_dict.get('view_count')
if view_count is not None:
min_views = self.params.get('min_views')
if min_views is not None and view_count < min_views:
return 'Skipping %s, because it has not reached minimum view count (%d/%d)' % (video_title, view_count, min_views)
max_views = self.params.get('max_views')
if max_views is not None and view_count > max_views:
return 'Skipping %s, because it has exceeded the maximum view count (%d/%d)' % (video_title, view_count, max_views)
if age_restricted(info_dict.get('age_limit'), self.params.get('age_limit')):
return 'Skipping "%s" because it is age restricted' % video_title
if self.in_download_archive(info_dict):
return '%s has already been recorded in archive' % video_title
if not incomplete:
match_filter = self.params.get('match_filter')
if match_filter is not None:
ret = match_filter(info_dict)
if ret is not None:
return ret
return None
@staticmethod
def add_extra_info(info_dict, extra_info):
for key, value in extra_info.items():
info_dict.setdefault(key, value)
def extract_info(self, url, download=True, ie_key=None, extra_info={},
process=True, force_generic_extractor=False):
if not ie_key and force_generic_extractor:
ie_key = 'Generic'
if ie_key:
ies = [self.get_info_extractor(ie_key)]
else:
ies = self._ies
for ie in ies:
if not ie.suitable(url):
continue
ie = self.get_info_extractor(ie.ie_key())
if not ie.working():
self.report_warning('The program functionality for this site has been marked as broken, '
'and will probably not work.')
try:
ie_result = ie.extract(url)
if ie_result is None: # Finished already (backwards compatibility; listformats and friends should be moved here)
break
if isinstance(ie_result, list):
# Backwards compatibility: old IE result format
ie_result = {
'_type': 'compat_list',
'entries': ie_result,
}
self.add_default_extra_info(ie_result, ie, url)
if process:
return self.process_ie_result(ie_result, download, extra_info)
else:
return ie_result
except GeoRestrictedError as e:
msg = e.msg
if e.countries:
msg += '\nThis video is available in %s.' % ', '.join(
map(ISO3166Utils.short2full, e.countries))
msg += '\nYou might want to use a VPN or a proxy server (with --proxy) to workaround.'
self.report_error(msg)
break
except ExtractorError as e: # An error we somewhat expected
self.report_error(compat_str(e), e.format_traceback())
break
except MaxDownloadsReached:
raise
except Exception as e:
if self.params.get('ignoreerrors', False):
self.report_error(error_to_compat_str(e), tb=encode_compat_str(traceback.format_exc()))
break
else:
raise
else:
self.report_error('no suitable InfoExtractor for URL %s' % url)
def add_default_extra_info(self, ie_result, ie, url):
self.add_extra_info(ie_result, {
'extractor': ie.IE_NAME,
'webpage_url': url,
'webpage_url_basename': url_basename(url),
'extractor_key': ie.ie_key(),
})
def process_ie_result(self, ie_result, download=True, extra_info={}):
result_type = ie_result.get('_type', 'video')
if result_type in ('url', 'url_transparent'):
ie_result['url'] = sanitize_url(ie_result['url'])
extract_flat = self.params.get('extract_flat', False)
if ((extract_flat == 'in_playlist' and 'playlist' in extra_info) or
extract_flat is True):
if self.params.get('forcejson', False):
self.to_stdout(json.dumps(ie_result))
return ie_result
if result_type == 'video':
self.add_extra_info(ie_result, extra_info)
return self.process_video_result(ie_result, download=download)
elif result_type == 'url':
# We have to add extra_info to the results because it may be
# contained in a playlist
return self.extract_info(ie_result['url'],
download,
ie_key=ie_result.get('ie_key'),
extra_info=extra_info)
elif result_type == 'url_transparent':
# Use the information from the embedding page
info = self.extract_info(
ie_result['url'], ie_key=ie_result.get('ie_key'),
extra_info=extra_info, download=False, process=False)
force_properties = dict(
(k, v) for k, v in ie_result.items() if v is not None)
for f in ('_type', 'url', 'ie_key'):
if f in force_properties:
del force_properties[f]
new_result = info.copy()
new_result.update(force_properties)
assert new_result.get('_type') != 'url_transparent'
return self.process_ie_result(
new_result, download=download, extra_info=extra_info)
elif result_type == 'playlist' or result_type == 'multi_video':
# We process each entry in the playlist
playlist = ie_result.get('title') or ie_result.get('id')
self.to_screen('[download] Downloading playlist: %s' % playlist)
playlist_results = []
playliststart = self.params.get('playliststart', 1) - 1
playlistend = self.params.get('playlistend')
# For backwards compatibility, interpret -1 as whole list
if playlistend == -1:
playlistend = None
playlistitems_str = self.params.get('playlist_items')
playlistitems = None
if playlistitems_str is not None:
def iter_playlistitems(format):
for string_segment in format.split(','):
if '-' in string_segment:
start, end = string_segment.split('-')
for item in range(int(start), int(end) + 1):
yield int(item)
else:
yield int(string_segment)
playlistitems = iter_playlistitems(playlistitems_str)
ie_entries = ie_result['entries']
if isinstance(ie_entries, list):
n_all_entries = len(ie_entries)
if playlistitems:
entries = [
ie_entries[i - 1] for i in playlistitems
if -n_all_entries <= i - 1 < n_all_entries]
else:
entries = ie_entries[playliststart:playlistend]
n_entries = len(entries)
self.to_screen(
'[%s] playlist %s: Collected %d video ids (downloading %d of them)' %
(ie_result['extractor'], playlist, n_all_entries, n_entries))
elif isinstance(ie_entries, PagedList):
if playlistitems:
entries = []
for item in playlistitems:
entries.extend(ie_entries.getslice(
item - 1, item
))
else:
entries = ie_entries.getslice(
playliststart, playlistend)
n_entries = len(entries)
self.to_screen(
'[%s] playlist %s: Downloading %d videos' %
(ie_result['extractor'], playlist, n_entries))
else: # iterable
if playlistitems:
entry_list = list(ie_entries)
entries = [entry_list[i - 1] for i in playlistitems]
else:
entries = list(itertools.islice(
ie_entries, playliststart, playlistend))
n_entries = len(entries)
self.to_screen(
'[%s] playlist %s: Downloading %d videos' %
(ie_result['extractor'], playlist, n_entries))
if self.params.get('playlistreverse', False):
entries = entries[::-1]
if self.params.get('playlistrandom', False):
random.shuffle(entries)
x_forwarded_for = ie_result.get('__x_forwarded_for_ip')
for i, entry in enumerate(entries, 1):
self.to_screen('[download] Downloading video %s of %s' % (i, n_entries))
# This __x_forwarded_for_ip thing is a bit ugly but requires
# minimal changes
if x_forwarded_for:
entry['__x_forwarded_for_ip'] = x_forwarded_for
extra = {
'n_entries': n_entries,
'playlist': playlist,
'playlist_id': ie_result.get('id'),
'playlist_title': ie_result.get('title'),
'playlist_index': i + playliststart,
'extractor': ie_result['extractor'],
'webpage_url': ie_result['webpage_url'],
'webpage_url_basename': url_basename(ie_result['webpage_url']),
'extractor_key': ie_result['extractor_key'],
}
reason = self._match_entry(entry, incomplete=True)
if reason is not None:
self.to_screen('[download] ' + reason)
continue
entry_result = self.process_ie_result(entry,
download=download,
extra_info=extra)
playlist_results.append(entry_result)
ie_result['entries'] = playlist_results
self.to_screen('[download] Finished downloading playlist: %s' % playlist)
return ie_result
elif result_type == 'compat_list':
self.report_warning(
'Extractor %s returned a compat_list result. '
'It needs to be updated.' % ie_result.get('extractor'))
def _fixup(r):
self.add_extra_info(
r,
{
'extractor': ie_result['extractor'],
'webpage_url': ie_result['webpage_url'],
'webpage_url_basename': url_basename(ie_result['webpage_url']),
'extractor_key': ie_result['extractor_key'],
}
)
return r
ie_result['entries'] = [
self.process_ie_result(_fixup(r), download, extra_info)
for r in ie_result['entries']
]
return ie_result
else:
raise Exception('Invalid result type: %s' % result_type)
def _build_format_filter(self, filter_spec):
OPERATORS = {
'<': operator.lt,
'<=': operator.le,
'>': operator.gt,
'>=': operator.ge,
'=': operator.eq,
'!=': operator.ne,
}
operator_rex = re.compile(r'''(?x)\s*
(?P<key>width|height|tbr|abr|vbr|asr|filesize|fps)
\s*(?P<op>%s)(?P<none_inclusive>\s*\?)?\s*
(?P<value>[0-9.]+(?:[kKmMgGtTpPeEzZyY]i?[Bb]?)?)
$
''' % '|'.join(map(re.escape, OPERATORS.keys())))
m = operator_rex.search(filter_spec)
if m:
try:
comparison_value = int(m.group('value'))
except ValueError:
comparison_value = parse_filesize(m.group('value'))
if comparison_value is None:
comparison_value = parse_filesize(m.group('value') + 'B')
if comparison_value is None:
raise ValueError(
'Invalid value %r in format specification %r' % (
m.group('value'), filter_spec))
op = OPERATORS[m.group('op')]
if not m:
STR_OPERATORS = {
'=': operator.eq,
'!=': operator.ne,
'^=': lambda attr, value: attr.startswith(value),
'$=': lambda attr, value: attr.endswith(value),
'*=': lambda attr, value: value in attr,
}
str_operator_rex = re.compile(r'''(?x)
\s*(?P<key>ext|acodec|vcodec|container|protocol|format_id)
\s*(?P<op>%s)(?P<none_inclusive>\s*\?)?
\s*(?P<value>[a-zA-Z0-9._-]+)
\s*$
''' % '|'.join(map(re.escape, STR_OPERATORS.keys())))
m = str_operator_rex.search(filter_spec)
if m:
comparison_value = m.group('value')
op = STR_OPERATORS[m.group('op')]
if not m:
raise ValueError('Invalid filter specification %r' % filter_spec)
def _filter(f):
actual_value = f.get(m.group('key'))
if actual_value is None:
return m.group('none_inclusive')
return op(actual_value, comparison_value)
return _filter
def build_format_selector(self, format_spec):
def syntax_error(note, start):
message = (
'Invalid format specification: '
'{0}\n\t{1}\n\t{2}^'.format(note, format_spec, ' ' * start[1]))
return SyntaxError(message)
PICKFIRST = 'PICKFIRST'
MERGE = 'MERGE'
SINGLE = 'SINGLE'
GROUP = 'GROUP'
FormatSelector = collections.namedtuple('FormatSelector', ['type', 'selector', 'filters'])
def _parse_filter(tokens):
filter_parts = []
for type, string, start, _, _ in tokens:
if type == tokenize.OP and string == ']':
return ''.join(filter_parts)
else:
filter_parts.append(string)
def _remove_unused_ops(tokens):
# Remove operators that we don't use and join them with the surrounding strings
ALLOWED_OPS = ('/', '+', ',', '(', ')')
last_string, last_start, last_end, last_line = None, None, None, None
for type, string, start, end, line in tokens:
if type == tokenize.OP and string == '[':
if last_string:
yield tokenize.NAME, last_string, last_start, last_end, last_line
last_string = None
yield type, string, start, end, line
for type, string, start, end, line in tokens:
yield type, string, start, end, line
if type == tokenize.OP and string == ']':
break
elif type == tokenize.OP and string in ALLOWED_OPS:
if last_string:
yield tokenize.NAME, last_string, last_start, last_end, last_line
last_string = None
yield type, string, start, end, line
elif type in [tokenize.NAME, tokenize.NUMBER, tokenize.OP]:
if not last_string:
last_string = string
last_start = start
last_end = end
else:
last_string += string
if last_string:
yield tokenize.NAME, last_string, last_start, last_end, last_line
def _parse_format_selection(tokens, inside_merge=False, inside_choice=False, inside_group=False):
selectors = []
current_selector = None
for type, string, start, _, _ in tokens:
if type == getattr(tokenize, 'ENCODING', None):
continue
elif type in [tokenize.NAME, tokenize.NUMBER]:
current_selector = FormatSelector(SINGLE, string, [])
elif type == tokenize.OP:
if string == ')':
if not inside_group:
tokens.restore_last_token()
break
elif inside_merge and string in ['/', ',']:
tokens.restore_last_token()
break
elif inside_choice and string == ',':
tokens.restore_last_token()
break
elif string == ',':
if not current_selector:
raise syntax_error('"," must follow a format selector', start)
selectors.append(current_selector)
current_selector = None
elif string == '/':
if not current_selector:
raise syntax_error('"/" must follow a format selector', start)
first_choice = current_selector
second_choice = _parse_format_selection(tokens, inside_choice=True)
current_selector = FormatSelector(PICKFIRST, (first_choice, second_choice), [])
elif string == '[':
if not current_selector:
current_selector = FormatSelector(SINGLE, 'best', [])
format_filter = _parse_filter(tokens)
current_selector.filters.append(format_filter)
elif string == '(':
if current_selector:
raise syntax_error('Unexpected "("', start)
group = _parse_format_selection(tokens, inside_group=True)
current_selector = FormatSelector(GROUP, group, [])
elif string == '+':
video_selector = current_selector
audio_selector = _parse_format_selection(tokens, inside_merge=True)
if not video_selector or not audio_selector:
raise syntax_error('"+" must be between two format selectors', start)
current_selector = FormatSelector(MERGE, (video_selector, audio_selector), [])
else:
raise syntax_error('Operator not recognized: "{0}"'.format(string), start)
elif type == tokenize.ENDMARKER:
break
if current_selector:
selectors.append(current_selector)
return selectors
def _build_selector_function(selector):
if isinstance(selector, list):
fs = [_build_selector_function(s) for s in selector]
def selector_function(ctx):
for f in fs:
for format in f(ctx):
yield format
return selector_function
elif selector.type == GROUP:
selector_function = _build_selector_function(selector.selector)
elif selector.type == PICKFIRST:
fs = [_build_selector_function(s) for s in selector.selector]
def selector_function(ctx):
for f in fs:
picked_formats = list(f(ctx))
if picked_formats:
return picked_formats
return []
elif selector.type == SINGLE:
format_spec = selector.selector
def selector_function(ctx):
formats = list(ctx['formats'])
if not formats:
return
if format_spec == 'all':
for f in formats:
yield f
elif format_spec in ['best', 'worst', None]:
format_idx = 0 if format_spec == 'worst' else -1
audiovideo_formats = [
f for f in formats
if f.get('vcodec') != 'none' and f.get('acodec') != 'none']
if audiovideo_formats:
yield audiovideo_formats[format_idx]
elif ctx['incomplete_formats']:
yield formats[format_idx]
elif format_spec == 'bestaudio':
audio_formats = [
f for f in formats
if f.get('vcodec') == 'none']
if audio_formats:
yield audio_formats[-1]
elif format_spec == 'worstaudio':
audio_formats = [
f for f in formats
if f.get('vcodec') == 'none']
if audio_formats:
yield audio_formats[0]
elif format_spec == 'bestvideo':
video_formats = [
f for f in formats
if f.get('acodec') == 'none']
if video_formats:
yield video_formats[-1]
elif format_spec == 'worstvideo':
video_formats = [
f for f in formats
if f.get('acodec') == 'none']
if video_formats:
yield video_formats[0]
else:
extensions = ['mp4', 'flv', 'webm', '3gp', 'm4a', 'mp3', 'ogg', 'aac', 'wav']
if format_spec in extensions:
filter_f = lambda f: f['ext'] == format_spec
else:
filter_f = lambda f: f['format_id'] == format_spec
matches = list(filter(filter_f, formats))
if matches:
yield matches[-1]
elif selector.type == MERGE:
def _merge(formats_info):
format_1, format_2 = [f['format_id'] for f in formats_info]
if formats_info[0].get('vcodec') == 'none':
self.report_error('The first format must '
'contain the video, try using '
'"-f %s+%s"' % (format_2, format_1))
return
if formats_info[0].get('acodec') == 'none' and formats_info[1].get('acodec') == 'none':
self.report_error(
'Both formats %s and %s are video-only, you must specify "-f video+audio"'
% (format_1, format_2))
return
output_ext = (
formats_info[0]['ext']
if self.params.get('merge_output_format') is None
else self.params['merge_output_format'])
return {
'requested_formats': formats_info,
'format': '%s+%s' % (formats_info[0].get('format'),
formats_info[1].get('format')),
'format_id': '%s+%s' % (formats_info[0].get('format_id'),
formats_info[1].get('format_id')),
'width': formats_info[0].get('width'),
'height': formats_info[0].get('height'),
'resolution': formats_info[0].get('resolution'),
'fps': formats_info[0].get('fps'),
'vcodec': formats_info[0].get('vcodec'),
'vbr': formats_info[0].get('vbr'),
'stretched_ratio': formats_info[0].get('stretched_ratio'),
'acodec': formats_info[1].get('acodec'),
'abr': formats_info[1].get('abr'),
'ext': output_ext,
}
video_selector, audio_selector = map(_build_selector_function, selector.selector)
def selector_function(ctx):
for pair in itertools.product(
video_selector(copy.deepcopy(ctx)), audio_selector(copy.deepcopy(ctx))):
yield _merge(pair)
filters = [self._build_format_filter(f) for f in selector.filters]
def final_selector(ctx):
ctx_copy = copy.deepcopy(ctx)
for _filter in filters:
ctx_copy['formats'] = list(filter(_filter, ctx_copy['formats']))
return selector_function(ctx_copy)
return final_selector
stream = io.BytesIO(format_spec.encode('utf-8'))
try:
tokens = list(_remove_unused_ops(compat_tokenize_tokenize(stream.readline)))
except tokenize.TokenError:
raise syntax_error('Missing closing/opening brackets or parenthesis', (0, len(format_spec)))
class TokenIterator(object):
def __init__(self, tokens):
self.tokens = tokens
self.counter = 0
def __iter__(self):
return self
def __next__(self):
if self.counter >= len(self.tokens):
raise StopIteration()
value = self.tokens[self.counter]
self.counter += 1
return value
next = __next__
def restore_last_token(self):
self.counter -= 1
parsed_selector = _parse_format_selection(iter(TokenIterator(tokens)))
return _build_selector_function(parsed_selector)
def _calc_headers(self, info_dict):
res = std_headers.copy()
add_headers = info_dict.get('http_headers')
if add_headers:
res.update(add_headers)
cookies = self._calc_cookies(info_dict)
if cookies:
res['Cookie'] = cookies
if 'X-Forwarded-For' not in res:
x_forwarded_for_ip = info_dict.get('__x_forwarded_for_ip')
if x_forwarded_for_ip:
res['X-Forwarded-For'] = x_forwarded_for_ip
return res
def _calc_cookies(self, info_dict):
pr = sanitized_Request(info_dict['url'])
self.cookiejar.add_cookie_header(pr)
return pr.get_header('Cookie')
def process_video_result(self, info_dict, download=True):
assert info_dict.get('_type', 'video') == 'video'
if 'id' not in info_dict:
raise ExtractorError('Missing "id" field in extractor result')
if 'title' not in info_dict:
raise ExtractorError('Missing "title" field in extractor result')
if not isinstance(info_dict['id'], compat_str):
self.report_warning('"id" field is not a string - forcing string conversion')
info_dict['id'] = compat_str(info_dict['id'])
if 'playlist' not in info_dict:
info_dict['playlist'] = None
info_dict['playlist_index'] = None
thumbnails = info_dict.get('thumbnails')
if thumbnails is None:
thumbnail = info_dict.get('thumbnail')
if thumbnail:
info_dict['thumbnails'] = thumbnails = [{'url': thumbnail}]
if thumbnails:
thumbnails.sort(key=lambda t: (
t.get('preference') if t.get('preference') is not None else -1,
t.get('width') if t.get('width') is not None else -1,
t.get('height') if t.get('height') is not None else -1,
t.get('id') if t.get('id') is not None else '', t.get('url')))
for i, t in enumerate(thumbnails):
t['url'] = sanitize_url(t['url'])
if t.get('width') and t.get('height'):
t['resolution'] = '%dx%d' % (t['width'], t['height'])
if t.get('id') is None:
t['id'] = '%d' % i
if self.params.get('list_thumbnails'):
self.list_thumbnails(info_dict)
return
thumbnail = info_dict.get('thumbnail')
if thumbnail:
info_dict['thumbnail'] = sanitize_url(thumbnail)
elif thumbnails:
info_dict['thumbnail'] = thumbnails[-1]['url']
if 'display_id' not in info_dict and 'id' in info_dict:
info_dict['display_id'] = info_dict['id']
if info_dict.get('upload_date') is None and info_dict.get('timestamp') is not None:
# Working around out-of-range timestamp values (e.g. negative ones on Windows,
# see http://bugs.python.org/issue1646728)
try:
upload_date = datetime.datetime.utcfromtimestamp(info_dict['timestamp'])
info_dict['upload_date'] = upload_date.strftime('%Y%m%d')
except (ValueError, OverflowError, OSError):
pass
# Auto generate title fields corresponding to the *_number fields when missing
# in order to always have clean titles. This is very common for TV series.
for field in ('chapter', 'season', 'episode'):
if info_dict.get('%s_number' % field) is not None and not info_dict.get(field):
info_dict[field] = '%s %d' % (field.capitalize(), info_dict['%s_number' % field])
subtitles = info_dict.get('subtitles')
if subtitles:
for _, subtitle in subtitles.items():
for subtitle_format in subtitle:
if subtitle_format.get('url'):
subtitle_format['url'] = sanitize_url(subtitle_format['url'])
if subtitle_format.get('ext') is None:
subtitle_format['ext'] = determine_ext(subtitle_format['url']).lower()
if self.params.get('listsubtitles', False):
if 'automatic_captions' in info_dict:
self.list_subtitles(info_dict['id'], info_dict.get('automatic_captions'), 'automatic captions')
self.list_subtitles(info_dict['id'], subtitles, 'subtitles')
return
info_dict['requested_subtitles'] = self.process_subtitles(
info_dict['id'], subtitles,
info_dict.get('automatic_captions'))
# We now pick which formats have to be downloaded
if info_dict.get('formats') is None:
# There's only one format available
formats = [info_dict]
else:
formats = info_dict['formats']
if not formats:
raise ExtractorError('No video formats found!')
formats_dict = {}
for i, format in enumerate(formats):
if 'url' not in format:
raise ExtractorError('Missing "url" key in result (index %d)' % i)
format['url'] = sanitize_url(format['url'])
if format.get('format_id') is None:
format['format_id'] = compat_str(i)
else:
format['format_id'] = re.sub(r'[\s,/+\[\]()]', '_', format['format_id'])
format_id = format['format_id']
if format_id not in formats_dict:
formats_dict[format_id] = []
formats_dict[format_id].append(format)
for format_id, ambiguous_formats in formats_dict.items():
if len(ambiguous_formats) > 1:
for i, format in enumerate(ambiguous_formats):
format['format_id'] = '%s-%d' % (format_id, i)
for i, format in enumerate(formats):
if format.get('format') is None:
format['format'] = '{id} - {res}{note}'.format(
id=format['format_id'],
res=self.format_resolution(format),
note=' ({0})'.format(format['format_note']) if format.get('format_note') is not None else '',
)
if format.get('ext') is None:
format['ext'] = determine_ext(format['url']).lower()
if format.get('protocol') is None:
format['protocol'] = determine_protocol(format)
full_format_info = info_dict.copy()
full_format_info.update(format)
format['http_headers'] = self._calc_headers(full_format_info)
if '__x_forwarded_for_ip' in info_dict:
del info_dict['__x_forwarded_for_ip']
if formats[0] is not info_dict:
info_dict['formats'] = formats
if self.params.get('listformats'):
self.list_formats(info_dict)
return
req_format = self.params.get('format')
if req_format is None:
req_format_list = []
if (self.params.get('outtmpl', DEFAULT_OUTTMPL) != '-' and
not info_dict.get('is_live')):
merger = FFmpegMergerPP(self)
if merger.available and merger.can_merge():
req_format_list.append('bestvideo+bestaudio')
req_format_list.append('best')
req_format = '/'.join(req_format_list)
format_selector = self.build_format_selector(req_format)
# While in format selection we may need to have an access to the original
# format set in order to calculate some metrics or do some processing.
# For now we need to be able to guess whether original formats provided
# by extractor are incomplete or not (i.e. whether extractor provides only
# video-only or audio-only formats) for proper formats selection for
# extractors with such incomplete formats (see
# https://github.com/rg3/youtube-dl/pull/5556).
# Since formats may be filtered during format selection and may not match
# the original formats the results may be incorrect. Thus original formats
# or pre-calculated metrics should be passed to format selection routines
# as well.
# We will pass a context object containing all necessary additional data
# instead of just formats.
# This fixes incorrect format selection issue (see
# https://github.com/rg3/youtube-dl/issues/10083).
incomplete_formats = (
# All formats are video-only or
all(f.get('vcodec') != 'none' and f.get('acodec') == 'none' for f in formats) or
# all formats are audio-only
all(f.get('vcodec') == 'none' and f.get('acodec') != 'none' for f in formats))
ctx = {
'formats': formats,
'incomplete_formats': incomplete_formats,
}
formats_to_download = list(format_selector(ctx))
if not formats_to_download:
raise ExtractorError('requested format not available',
expected=True)
if download:
if len(formats_to_download) > 1:
self.to_screen('[info] %s: downloading video in %s formats' % (info_dict['id'], len(formats_to_download)))
for format in formats_to_download:
new_info = dict(info_dict)
new_info.update(format)
self.process_info(new_info)
# We update the info dict with the best quality format (backwards compatibility)
info_dict.update(formats_to_download[-1])
return info_dict
def process_subtitles(self, video_id, normal_subtitles, automatic_captions):
available_subs = {}
if normal_subtitles and self.params.get('writesubtitles'):
available_subs.update(normal_subtitles)
if automatic_captions and self.params.get('writeautomaticsub'):
for lang, cap_info in automatic_captions.items():
if lang not in available_subs:
available_subs[lang] = cap_info
if (not self.params.get('writesubtitles') and not
self.params.get('writeautomaticsub') or not
available_subs):
return None
if self.params.get('allsubtitles', False):
requested_langs = available_subs.keys()
else:
if self.params.get('subtitleslangs', False):
requested_langs = self.params.get('subtitleslangs')
elif 'en' in available_subs:
requested_langs = ['en']
else:
requested_langs = [list(available_subs.keys())[0]]
formats_query = self.params.get('subtitlesformat', 'best')
formats_preference = formats_query.split('/') if formats_query else []
subs = {}
for lang in requested_langs:
formats = available_subs.get(lang)
if formats is None:
self.report_warning('%s subtitles not available for %s' % (lang, video_id))
continue
for ext in formats_preference:
if ext == 'best':
f = formats[-1]
break
matches = list(filter(lambda f: f['ext'] == ext, formats))
if matches:
f = matches[-1]
break
else:
f = formats[-1]
self.report_warning(
'No subtitle format found matching "%s" for language %s, '
'using %s' % (formats_query, lang, f['ext']))
subs[lang] = f
return subs
def process_info(self, info_dict):
assert info_dict.get('_type', 'video') == 'video'
max_downloads = self.params.get('max_downloads')
if max_downloads is not None:
if self._num_downloads >= int(max_downloads):
raise MaxDownloadsReached()
info_dict['fulltitle'] = info_dict['title']
if len(info_dict['title']) > 200:
info_dict['title'] = info_dict['title'][:197] + '...'
if 'format' not in info_dict:
info_dict['format'] = info_dict['ext']
reason = self._match_entry(info_dict, incomplete=False)
if reason is not None:
self.to_screen('[download] ' + reason)
return
self._num_downloads += 1
info_dict['_filename'] = filename = self.prepare_filename(info_dict)
# Forced printings
if self.params.get('forcetitle', False):
self.to_stdout(info_dict['fulltitle'])
if self.params.get('forceid', False):
self.to_stdout(info_dict['id'])
if self.params.get('forceurl', False):
if info_dict.get('requested_formats') is not None:
for f in info_dict['requested_formats']:
self.to_stdout(f['url'] + f.get('play_path', ''))
else:
# For RTMP URLs, also include the playpath
self.to_stdout(info_dict['url'] + info_dict.get('play_path', ''))
if self.params.get('forcethumbnail', False) and info_dict.get('thumbnail') is not None:
self.to_stdout(info_dict['thumbnail'])
if self.params.get('forcedescription', False) and info_dict.get('description') is not None:
self.to_stdout(info_dict['description'])
if self.params.get('forcefilename', False) and filename is not None:
self.to_stdout(filename)
if self.params.get('forceduration', False) and info_dict.get('duration') is not None:
self.to_stdout(formatSeconds(info_dict['duration']))
if self.params.get('forceformat', False):
self.to_stdout(info_dict['format'])
if self.params.get('forcejson', False):
self.to_stdout(json.dumps(info_dict))
# Do nothing else if in simulate mode
if self.params.get('simulate', False):
return
if filename is None:
return
try:
dn = os.path.dirname(sanitize_path(encodeFilename(filename)))
if dn and not os.path.exists(dn):
os.makedirs(dn)
except (OSError, IOError) as err:
self.report_error('unable to create directory ' + error_to_compat_str(err))
return
if self.params.get('writedescription', False):
descfn = replace_extension(filename, 'description', info_dict.get('ext'))
if self.params.get('nooverwrites', False) and os.path.exists(encodeFilename(descfn)):
self.to_screen('[info] Video description is already present')
elif info_dict.get('description') is None:
self.report_warning('There\'s no description to write.')
else:
try:
self.to_screen('[info] Writing video description to: ' + descfn)
with io.open(encodeFilename(descfn), 'w', encoding='utf-8') as descfile:
descfile.write(info_dict['description'])
except (OSError, IOError):
self.report_error('Cannot write description file ' + descfn)
return
if self.params.get('writeannotations', False):
annofn = replace_extension(filename, 'annotations.xml', info_dict.get('ext'))
if self.params.get('nooverwrites', False) and os.path.exists(encodeFilename(annofn)):
self.to_screen('[info] Video annotations are already present')
else:
try:
self.to_screen('[info] Writing video annotations to: ' + annofn)
with io.open(encodeFilename(annofn), 'w', encoding='utf-8') as annofile:
annofile.write(info_dict['annotations'])
except (KeyError, TypeError):
self.report_warning('There are no annotations to write.')
except (OSError, IOError):
self.report_error('Cannot write annotations file: ' + annofn)
return
subtitles_are_requested = any([self.params.get('writesubtitles', False),
self.params.get('writeautomaticsub')])
if subtitles_are_requested and info_dict.get('requested_subtitles'):
subtitles = info_dict['requested_subtitles']
ie = self.get_info_extractor(info_dict['extractor_key'])
for sub_lang, sub_info in subtitles.items():
sub_format = sub_info['ext']
if sub_info.get('data') is not None:
sub_data = sub_info['data']
else:
try:
sub_data = ie._download_webpage(
sub_info['url'], info_dict['id'], note=False)
except ExtractorError as err:
self.report_warning('Unable to download subtitle for "%s": %s' %
(sub_lang, error_to_compat_str(err.cause)))
continue
try:
sub_filename = subtitles_filename(filename, sub_lang, sub_format)
if self.params.get('nooverwrites', False) and os.path.exists(encodeFilename(sub_filename)):
self.to_screen('[info] Video subtitle %s.%s is already_present' % (sub_lang, sub_format))
else:
self.to_screen('[info] Writing video subtitles to: ' + sub_filename)
with io.open(encodeFilename(sub_filename), 'w', encoding='utf-8', newline='') as subfile:
subfile.write(sub_data)
except (OSError, IOError):
self.report_error('Cannot write subtitles file ' + sub_filename)
return
if self.params.get('writeinfojson', False):
infofn = replace_extension(filename, 'info.json', info_dict.get('ext'))
if self.params.get('nooverwrites', False) and os.path.exists(encodeFilename(infofn)):
self.to_screen('[info] Video description metadata is already present')
else:
self.to_screen('[info] Writing video description metadata as JSON to: ' + infofn)
try:
write_json_file(self.filter_requested_info(info_dict), infofn)
except (OSError, IOError):
self.report_error('Cannot write metadata to JSON file ' + infofn)
return
self._write_thumbnails(info_dict, filename)
if not self.params.get('skip_download', False):
try:
def dl(name, info):
fd = get_suitable_downloader(info, self.params)(self, self.params)
for ph in self._progress_hooks:
fd.add_progress_hook(ph)
if self.params.get('verbose'):
self.to_stdout('[debug] Invoking downloader on %r' % info.get('url'))
return fd.download(name, info)
if info_dict.get('requested_formats') is not None:
downloaded = []
success = True
merger = FFmpegMergerPP(self)
if not merger.available:
postprocessors = []
self.report_warning('You have requested multiple '
'formats but ffmpeg or avconv are not installed.'
' The formats won\'t be merged.')
else:
postprocessors = [merger]
def compatible_formats(formats):
video, audio = formats
# Check extension
video_ext, audio_ext = audio.get('ext'), video.get('ext')
if video_ext and audio_ext:
COMPATIBLE_EXTS = (
('mp3', 'mp4', 'm4a', 'm4p', 'm4b', 'm4r', 'm4v', 'ismv', 'isma'),
('webm')
)
for exts in COMPATIBLE_EXTS:
if video_ext in exts and audio_ext in exts:
return True
# TODO: Check acodec/vcodec
return False
filename_real_ext = os.path.splitext(filename)[1][1:]
filename_wo_ext = (
os.path.splitext(filename)[0]
if filename_real_ext == info_dict['ext']
else filename)
requested_formats = info_dict['requested_formats']
if self.params.get('merge_output_format') is None and not compatible_formats(requested_formats):
info_dict['ext'] = 'mkv'
self.report_warning(
'Requested formats are incompatible for merge and will be merged into mkv.')
# Ensure filename always has a correct extension for successful merge
filename = '%s.%s' % (filename_wo_ext, info_dict['ext'])
if os.path.exists(encodeFilename(filename)):
self.to_screen(
'[download] %s has already been downloaded and '
'merged' % filename)
else:
for f in requested_formats:
new_info = dict(info_dict)
new_info.update(f)
fname = self.prepare_filename(new_info)
fname = prepend_extension(fname, 'f%s' % f['format_id'], new_info['ext'])
downloaded.append(fname)
partial_success = dl(fname, new_info)
success = success and partial_success
info_dict['__postprocessors'] = postprocessors
info_dict['__files_to_merge'] = downloaded
else:
# Just a single file
success = dl(filename, info_dict)
except (compat_urllib_error.URLError, compat_http_client.HTTPException, socket.error) as err:
self.report_error('unable to download video data: %s' % error_to_compat_str(err))
return
except (OSError, IOError) as err:
raise UnavailableVideoError(err)
except (ContentTooShortError, ) as err:
self.report_error('content too short (expected %s bytes and served %s)' % (err.expected, err.downloaded))
return
if success and filename != '-':
# Fixup content
fixup_policy = self.params.get('fixup')
if fixup_policy is None:
fixup_policy = 'detect_or_warn'
INSTALL_FFMPEG_MESSAGE = 'Install ffmpeg or avconv to fix this automatically.'
stretched_ratio = info_dict.get('stretched_ratio')
if stretched_ratio is not None and stretched_ratio != 1:
if fixup_policy == 'warn':
self.report_warning('%s: Non-uniform pixel ratio (%s)' % (
info_dict['id'], stretched_ratio))
elif fixup_policy == 'detect_or_warn':
stretched_pp = FFmpegFixupStretchedPP(self)
if stretched_pp.available:
info_dict.setdefault('__postprocessors', [])
info_dict['__postprocessors'].append(stretched_pp)
else:
self.report_warning(
'%s: Non-uniform pixel ratio (%s). %s'
% (info_dict['id'], stretched_ratio, INSTALL_FFMPEG_MESSAGE))
else:
assert fixup_policy in ('ignore', 'never')
if (info_dict.get('requested_formats') is None and
info_dict.get('container') == 'm4a_dash'):
if fixup_policy == 'warn':
self.report_warning(
'%s: writing DASH m4a. '
'Only some players support this container.'
% info_dict['id'])
elif fixup_policy == 'detect_or_warn':
fixup_pp = FFmpegFixupM4aPP(self)
if fixup_pp.available:
info_dict.setdefault('__postprocessors', [])
info_dict['__postprocessors'].append(fixup_pp)
else:
self.report_warning(
'%s: writing DASH m4a. '
'Only some players support this container. %s'
% (info_dict['id'], INSTALL_FFMPEG_MESSAGE))
else:
assert fixup_policy in ('ignore', 'never')
if (info_dict.get('protocol') == 'm3u8_native' or
info_dict.get('protocol') == 'm3u8' and
self.params.get('hls_prefer_native')):
if fixup_policy == 'warn':
self.report_warning('%s: malformated aac bitstream.' % (
info_dict['id']))
elif fixup_policy == 'detect_or_warn':
fixup_pp = FFmpegFixupM3u8PP(self)
if fixup_pp.available:
info_dict.setdefault('__postprocessors', [])
info_dict['__postprocessors'].append(fixup_pp)
else:
self.report_warning(
'%s: malformated aac bitstream. %s'
% (info_dict['id'], INSTALL_FFMPEG_MESSAGE))
else:
assert fixup_policy in ('ignore', 'never')
try:
self.post_process(filename, info_dict)
except (PostProcessingError) as err:
self.report_error('postprocessing: %s' % str(err))
return
self.record_download_archive(info_dict)
def download(self, url_list):
outtmpl = self.params.get('outtmpl', DEFAULT_OUTTMPL)
if (len(url_list) > 1 and
'%' not in outtmpl and
self.params.get('max_downloads') != 1):
raise SameFileError(outtmpl)
for url in url_list:
try:
# It also downloads the videos
res = self.extract_info(
url, force_generic_extractor=self.params.get('force_generic_extractor', False))
except UnavailableVideoError:
self.report_error('unable to download video')
except MaxDownloadsReached:
self.to_screen('[info] Maximum number of downloaded files reached.')
raise
else:
if self.params.get('dump_single_json', False):
self.to_stdout(json.dumps(res))
return self._download_retcode
def download_with_info_file(self, info_filename):
with contextlib.closing(fileinput.FileInput(
[info_filename], mode='r',
openhook=fileinput.hook_encoded('utf-8'))) as f:
# FileInput doesn't have a read method, we can't call json.load
info = self.filter_requested_info(json.loads('\n'.join(f)))
try:
self.process_ie_result(info, download=True)
except DownloadError:
webpage_url = info.get('webpage_url')
if webpage_url is not None:
self.report_warning('The info failed to download, trying with "%s"' % webpage_url)
return self.download([webpage_url])
else:
raise
return self._download_retcode
@staticmethod
def filter_requested_info(info_dict):
return dict(
(k, v) for k, v in info_dict.items()
if k not in ['requested_formats', 'requested_subtitles'])
def post_process(self, filename, ie_info):
info = dict(ie_info)
info['filepath'] = filename
pps_chain = []
if ie_info.get('__postprocessors') is not None:
pps_chain.extend(ie_info['__postprocessors'])
pps_chain.extend(self._pps)
for pp in pps_chain:
files_to_delete = []
try:
files_to_delete, info = pp.run(info)
except PostProcessingError as e:
self.report_error(e.msg)
if files_to_delete and not self.params.get('keepvideo', False):
for old_filename in files_to_delete:
self.to_screen('Deleting original file %s (pass -k to keep)' % old_filename)
try:
os.remove(encodeFilename(old_filename))
except (IOError, OSError):
self.report_warning('Unable to remove downloaded original file')
def _make_archive_id(self, info_dict):
# Future-proof against any change in case
# and backwards compatibility with prior versions
extractor = info_dict.get('extractor_key')
if extractor is None:
if 'id' in info_dict:
extractor = info_dict.get('ie_key') # key in a playlist
if extractor is None:
return None # Incomplete video information
return extractor.lower() + ' ' + info_dict['id']
def in_download_archive(self, info_dict):
fn = self.params.get('download_archive')
if fn is None:
return False
vid_id = self._make_archive_id(info_dict)
if vid_id is None:
return False # Incomplete video information
try:
with locked_file(fn, 'r', encoding='utf-8') as archive_file:
for line in archive_file:
if line.strip() == vid_id:
return True
except IOError as ioe:
if ioe.errno != errno.ENOENT:
raise
return False
def record_download_archive(self, info_dict):
fn = self.params.get('download_archive')
if fn is None:
return
vid_id = self._make_archive_id(info_dict)
assert vid_id
with locked_file(fn, 'a', encoding='utf-8') as archive_file:
archive_file.write(vid_id + '\n')
@staticmethod
def format_resolution(format, default='unknown'):
if format.get('vcodec') == 'none':
return 'audio only'
if format.get('resolution') is not None:
return format['resolution']
if format.get('height') is not None:
if format.get('width') is not None:
res = '%sx%s' % (format['width'], format['height'])
else:
res = '%sp' % format['height']
elif format.get('width') is not None:
res = '%dx?' % format['width']
else:
res = default
return res
def _format_note(self, fdict):
res = ''
if fdict.get('ext') in ['f4f', 'f4m']:
res += '(unsupported) '
if fdict.get('language'):
if res:
res += ' '
res += '[%s] ' % fdict['language']
if fdict.get('format_note') is not None:
res += fdict['format_note'] + ' '
if fdict.get('tbr') is not None:
res += '%4dk ' % fdict['tbr']
if fdict.get('container') is not None:
if res:
res += ', '
res += '%s container' % fdict['container']
if (fdict.get('vcodec') is not None and
fdict.get('vcodec') != 'none'):
if res:
res += ', '
res += fdict['vcodec']
if fdict.get('vbr') is not None:
res += '@'
elif fdict.get('vbr') is not None and fdict.get('abr') is not None:
res += 'video@'
if fdict.get('vbr') is not None:
res += '%4dk' % fdict['vbr']
if fdict.get('fps') is not None:
if res:
res += ', '
res += '%sfps' % fdict['fps']
if fdict.get('acodec') is not None:
if res:
res += ', '
if fdict['acodec'] == 'none':
res += 'video only'
else:
res += '%-5s' % fdict['acodec']
elif fdict.get('abr') is not None:
if res:
res += ', '
res += 'audio'
if fdict.get('abr') is not None:
res += '@%3dk' % fdict['abr']
if fdict.get('asr') is not None:
res += ' (%5dHz)' % fdict['asr']
if fdict.get('filesize') is not None:
if res:
res += ', '
res += format_bytes(fdict['filesize'])
elif fdict.get('filesize_approx') is not None:
if res:
res += ', '
res += '~' + format_bytes(fdict['filesize_approx'])
return res
def list_formats(self, info_dict):
formats = info_dict.get('formats', [info_dict])
table = [
[f['format_id'], f['ext'], self.format_resolution(f), self._format_note(f)]
for f in formats
if f.get('preference') is None or f['preference'] >= -1000]
if len(formats) > 1:
table[-1][-1] += (' ' if table[-1][-1] else '') + '(best)'
header_line = ['format code', 'extension', 'resolution', 'note']
self.to_screen(
'[info] Available formats for %s:\n%s' %
(info_dict['id'], render_table(header_line, table)))
def list_thumbnails(self, info_dict):
thumbnails = info_dict.get('thumbnails')
if not thumbnails:
self.to_screen('[info] No thumbnails present for %s' % info_dict['id'])
return
self.to_screen(
'[info] Thumbnails for %s:' % info_dict['id'])
self.to_screen(render_table(
['ID', 'width', 'height', 'URL'],
[[t['id'], t.get('width', 'unknown'), t.get('height', 'unknown'), t['url']] for t in thumbnails]))
def list_subtitles(self, video_id, subtitles, name='subtitles'):
if not subtitles:
self.to_screen('%s has no %s' % (video_id, name))
return
self.to_screen(
'Available %s for %s:' % (name, video_id))
self.to_screen(render_table(
['Language', 'formats'],
[[lang, ', '.join(f['ext'] for f in reversed(formats))]
for lang, formats in subtitles.items()]))
def urlopen(self, req):
if isinstance(req, compat_basestring):
req = sanitized_Request(req)
return self._opener.open(req, timeout=self._socket_timeout)
def print_debug_header(self):
if not self.params.get('verbose'):
return
if type('') is not compat_str:
# Python 2.6 on SLES11 SP1 (https://github.com/rg3/youtube-dl/issues/3326)
self.report_warning(
'Your Python is broken! Update to a newer and supported version')
stdout_encoding = getattr(
sys.stdout, 'encoding', 'missing (%s)' % type(sys.stdout).__name__)
encoding_str = (
'[debug] Encodings: locale %s, fs %s, out %s, pref %s\n' % (
locale.getpreferredencoding(),
sys.getfilesystemencoding(),
stdout_encoding,
self.get_encoding()))
write_string(encoding_str, encoding=None)
self._write_string('[debug] youtube-dl version ' + __version__ + '\n')
if _LAZY_LOADER:
self._write_string('[debug] Lazy loading extractors enabled' + '\n')
try:
sp = subprocess.Popen(
['git', 'rev-parse', '--short', 'HEAD'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
cwd=os.path.dirname(os.path.abspath(__file__)))
out, err = sp.communicate()
out = out.decode().strip()
if re.match('[0-9a-f]+', out):
self._write_string('[debug] Git HEAD: ' + out + '\n')
except Exception:
try:
sys.exc_clear()
except Exception:
pass
self._write_string('[debug] Python version %s - %s\n' % (
platform.python_version(), platform_name()))
exe_versions = FFmpegPostProcessor.get_versions(self)
exe_versions['rtmpdump'] = rtmpdump_version()
exe_str = ', '.join(
'%s %s' % (exe, v)
for exe, v in sorted(exe_versions.items())
if v
)
if not exe_str:
exe_str = 'none'
self._write_string('[debug] exe versions: %s\n' % exe_str)
proxy_map = {}
for handler in self._opener.handlers:
if hasattr(handler, 'proxies'):
proxy_map.update(handler.proxies)
self._write_string('[debug] Proxy map: ' + compat_str(proxy_map) + '\n')
if self.params.get('call_home', False):
ipaddr = self.urlopen('https://yt-dl.org/ip').read().decode('utf-8')
self._write_string('[debug] Public IP address: %s\n' % ipaddr)
latest_version = self.urlopen(
'https://yt-dl.org/latest/version').read().decode('utf-8')
if version_tuple(latest_version) > version_tuple(__version__):
self.report_warning(
'You are using an outdated version (newest version: %s)! '
'See https://yt-dl.org/update if you need help updating.' %
latest_version)
def _setup_opener(self):
timeout_val = self.params.get('socket_timeout')
self._socket_timeout = 600 if timeout_val is None else float(timeout_val)
opts_cookiefile = self.params.get('cookiefile')
opts_proxy = self.params.get('proxy')
if opts_cookiefile is None:
self.cookiejar = compat_cookiejar.CookieJar()
else:
opts_cookiefile = compat_expanduser(opts_cookiefile)
self.cookiejar = compat_cookiejar.MozillaCookieJar(
opts_cookiefile)
if os.access(opts_cookiefile, os.R_OK):
self.cookiejar.load()
cookie_processor = YoutubeDLCookieProcessor(self.cookiejar)
if opts_proxy is not None:
if opts_proxy == '':
proxies = {}
else:
proxies = {'http': opts_proxy, 'https': opts_proxy}
else:
proxies = compat_urllib_request.getproxies()
# Set HTTPS proxy to HTTP one if given (https://github.com/rg3/youtube-dl/issues/805)
if 'http' in proxies and 'https' not in proxies:
proxies['https'] = proxies['http']
proxy_handler = PerRequestProxyHandler(proxies)
debuglevel = 1 if self.params.get('debug_printtraffic') else 0
https_handler = make_HTTPS_handler(self.params, debuglevel=debuglevel)
ydlh = YoutubeDLHandler(self.params, debuglevel=debuglevel)
data_handler = compat_urllib_request_DataHandler()
# When passing our own FileHandler instance, build_opener won't add the
file_handler = compat_urllib_request.FileHandler()
def file_open(*args, **kwargs):
raise compat_urllib_error.URLError('file:// scheme is explicitly disabled in youtube-dl for security reasons')
file_handler.file_open = file_open
opener = compat_urllib_request.build_opener(
proxy_handler, https_handler, cookie_processor, ydlh, data_handler, file_handler)
# (See https://github.com/rg3/youtube-dl/issues/1309 for details)
opener.addheaders = []
self._opener = opener
def encode(self, s):
if isinstance(s, bytes):
return s # Already encoded
try:
return s.encode(self.get_encoding())
except UnicodeEncodeError as err:
err.reason = err.reason + '. Check your system encoding configuration or use the --encoding option.'
raise
def get_encoding(self):
encoding = self.params.get('encoding')
if encoding is None:
encoding = preferredencoding()
return encoding
def _write_thumbnails(self, info_dict, filename):
if self.params.get('writethumbnail', False):
thumbnails = info_dict.get('thumbnails')
if thumbnails:
thumbnails = [thumbnails[-1]]
elif self.params.get('write_all_thumbnails', False):
thumbnails = info_dict.get('thumbnails')
else:
return
if not thumbnails:
# No thumbnails present, so return immediately
return
for t in thumbnails:
thumb_ext = determine_ext(t['url'], 'jpg')
suffix = '_%s' % t['id'] if len(thumbnails) > 1 else ''
thumb_display_id = '%s ' % t['id'] if len(thumbnails) > 1 else ''
t['filename'] = thumb_filename = os.path.splitext(filename)[0] + suffix + '.' + thumb_ext
if self.params.get('nooverwrites', False) and os.path.exists(encodeFilename(thumb_filename)):
self.to_screen('[%s] %s: Thumbnail %sis already present' %
(info_dict['extractor'], info_dict['id'], thumb_display_id))
else:
self.to_screen('[%s] %s: Downloading thumbnail %s...' %
(info_dict['extractor'], info_dict['id'], thumb_display_id))
try:
uf = self.urlopen(t['url'])
with open(encodeFilename(thumb_filename), 'wb') as thumbf:
shutil.copyfileobj(uf, thumbf)
self.to_screen('[%s] %s: Writing thumbnail %sto: %s' %
(info_dict['extractor'], info_dict['id'], thumb_display_id, thumb_filename))
except (compat_urllib_error.URLError, compat_http_client.HTTPException, socket.error) as err:
self.report_warning('Unable to download thumbnail "%s": %s' %
(t['url'], error_to_compat_str(err)))
| true | true |
f725461411704fbc79dc7ae2a9d1d39784ab8ad4 | 678 | py | Python | var/spack/repos/builtin/packages/shapelib/package.py | player1537-forks/spack | 822b7632222ec5a91dc7b7cda5fc0e08715bd47c | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 11 | 2015-10-04T02:17:46.000Z | 2018-02-07T18:23:00.000Z | var/spack/repos/builtin/packages/shapelib/package.py | player1537-forks/spack | 822b7632222ec5a91dc7b7cda5fc0e08715bd47c | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 22 | 2017-08-01T22:45:10.000Z | 2022-03-10T07:46:31.000Z | var/spack/repos/builtin/packages/shapelib/package.py | player1537-forks/spack | 822b7632222ec5a91dc7b7cda5fc0e08715bd47c | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 4 | 2016-06-10T17:57:39.000Z | 2018-09-11T04:59:38.000Z | # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Shapelib(CMakePackage):
"""The Shapefile C Library provides the ability to write simple C programs
for reading, writing and updating (to a limited extent) ESRI Shapefiles,
and the associated attribute file (.dbf).
"""
homepage = "http://shapelib.maptools.org/"
url = "https://github.com/OSGeo/shapelib/archive/v1.5.0.tar.gz"
version('1.5.0', sha256='48de3a6a8691b0b111b909c0b908af4627635c75322b3a501c0c0885f3558cad')
| 35.684211 | 95 | 0.738938 |
from spack import *
class Shapelib(CMakePackage):
homepage = "http://shapelib.maptools.org/"
url = "https://github.com/OSGeo/shapelib/archive/v1.5.0.tar.gz"
version('1.5.0', sha256='48de3a6a8691b0b111b909c0b908af4627635c75322b3a501c0c0885f3558cad')
| true | true |
f725468d7e8fa6ad83dd70dda266a65d8900285a | 4,522 | py | Python | tests/extension/thread_/stream_reduce_source_join/thread_stream_reduce_source_join.py | Pyverilog/veriloggen | 381ac8920088d986925cf87cb838366eb48a4889 | [
"Apache-2.0"
] | 232 | 2015-09-01T16:07:48.000Z | 2022-03-28T14:53:28.000Z | tests/extension/thread_/stream_reduce_source_join/thread_stream_reduce_source_join.py | Pyverilog/veriloggen | 381ac8920088d986925cf87cb838366eb48a4889 | [
"Apache-2.0"
] | 34 | 2015-08-21T09:13:03.000Z | 2022-03-21T23:52:44.000Z | tests/extension/thread_/stream_reduce_source_join/thread_stream_reduce_source_join.py | shtaxxx/veriloggen | 381ac8920088d986925cf87cb838366eb48a4889 | [
"Apache-2.0"
] | 46 | 2015-09-24T14:39:57.000Z | 2022-02-23T21:59:56.000Z | from __future__ import absolute_import
from __future__ import print_function
import sys
import os
# the next line can be removed after installation
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(
os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))))
from veriloggen import *
import veriloggen.thread as vthread
import veriloggen.types.axi as axi
def mkLed():
m = Module('blinkled')
clk = m.Input('CLK')
rst = m.Input('RST')
datawidth = 32
addrwidth = 10
myaxi = vthread.AXIM(m, 'myaxi', clk, rst, datawidth)
ram_a = vthread.RAM(m, 'ram_a', clk, rst, datawidth, addrwidth)
ram_b = vthread.RAM(m, 'ram_b', clk, rst, datawidth, addrwidth)
strm = vthread.Stream(m, 'mystream', clk, rst)
a = strm.source('a')
size = strm.parameter('size')
sum, sum_valid = strm.ReduceAddValid(a, size)
strm.sink(sum, 'sum', when=sum_valid, when_name='sum_valid')
def comp_stream(size, offset):
strm.set_source('a', ram_a, offset, size)
strm.set_parameter('size', size)
strm.set_sink('sum', ram_b, offset, 1)
strm.run()
strm.set_source('a', ram_a, offset + size, size + size)
strm.set_parameter('size', size + size)
strm.set_sink('sum', ram_b, offset + 1, 1)
strm.source_join()
strm.run()
strm.set_source('a', ram_a, offset + size + size + size, size + size + size)
strm.set_parameter('size', size + size + size)
strm.set_sink('sum', ram_b, offset + 2, 1)
strm.source_join()
strm.run()
strm.source_join()
strm.join()
def comp_sequential(size, offset):
sum = 0
for i in range(size):
a = ram_a.read(i + offset)
sum += a
ram_b.write(offset, sum)
sum = 0
for i in range(size + size):
a = ram_a.read(i + offset + size)
sum += a
ram_b.write(offset + 1, sum)
sum = 0
for i in range(size + size + size):
a = ram_a.read(i + offset + size + size + size)
sum += a
ram_b.write(offset + 2, sum)
def check(size, offset_stream, offset_seq):
all_ok = True
for i in range(size):
st = ram_b.read(i + offset_stream)
sq = ram_b.read(i + offset_seq)
if vthread.verilog.NotEql(st, sq):
all_ok = False
if all_ok:
print('# verify: PASSED')
else:
print('# verify: FAILED')
def comp(size):
# stream
offset = 0
myaxi.dma_read(ram_a, offset, 0, size * 6)
comp_stream(size, offset)
myaxi.dma_write(ram_b, offset, 1024, 3)
# sequential
offset = size
myaxi.dma_read(ram_a, offset, 0, size * 6)
comp_sequential(size, offset)
myaxi.dma_write(ram_b, offset, 1024 * 2, 3)
# verification
myaxi.dma_read(ram_b, 0, 1024, 3)
myaxi.dma_read(ram_b, offset, 1024 * 2, 3)
check(3, 0, offset)
vthread.finish()
th = vthread.Thread(m, 'th_comp', clk, rst, comp)
fsm = th.start(32)
return m
def mkTest(memimg_name=None):
m = Module('test')
# target instance
led = mkLed()
# copy paras and ports
params = m.copy_params(led)
ports = m.copy_sim_ports(led)
clk = ports['CLK']
rst = ports['RST']
memory = axi.AxiMemoryModel(m, 'memory', clk, rst, memimg_name=memimg_name)
memory.connect(ports, 'myaxi')
uut = m.Instance(led, 'uut',
params=m.connect_params(led),
ports=m.connect_ports(led))
# simulation.setup_waveform(m, uut)
simulation.setup_clock(m, clk, hperiod=5)
init = simulation.setup_reset(m, rst, m.make_reset(), period=100)
init.add(
Delay(1000000),
Systask('finish'),
)
return m
def run(filename='tmp.v', simtype='iverilog', outputfile=None):
if outputfile is None:
outputfile = os.path.splitext(os.path.basename(__file__))[0] + '.out'
memimg_name = 'memimg_' + outputfile
test = mkTest(memimg_name=memimg_name)
if filename is not None:
test.to_verilog(filename)
sim = simulation.Simulator(test, sim=simtype)
rslt = sim.run(outputfile=outputfile)
lines = rslt.splitlines()
if simtype == 'verilator' and lines[-1].startswith('-'):
rslt = '\n'.join(lines[:-1])
return rslt
if __name__ == '__main__':
rslt = run(filename='tmp.v')
print(rslt)
| 27.406061 | 84 | 0.587351 | from __future__ import absolute_import
from __future__ import print_function
import sys
import os
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(
os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))))
from veriloggen import *
import veriloggen.thread as vthread
import veriloggen.types.axi as axi
def mkLed():
m = Module('blinkled')
clk = m.Input('CLK')
rst = m.Input('RST')
datawidth = 32
addrwidth = 10
myaxi = vthread.AXIM(m, 'myaxi', clk, rst, datawidth)
ram_a = vthread.RAM(m, 'ram_a', clk, rst, datawidth, addrwidth)
ram_b = vthread.RAM(m, 'ram_b', clk, rst, datawidth, addrwidth)
strm = vthread.Stream(m, 'mystream', clk, rst)
a = strm.source('a')
size = strm.parameter('size')
sum, sum_valid = strm.ReduceAddValid(a, size)
strm.sink(sum, 'sum', when=sum_valid, when_name='sum_valid')
def comp_stream(size, offset):
strm.set_source('a', ram_a, offset, size)
strm.set_parameter('size', size)
strm.set_sink('sum', ram_b, offset, 1)
strm.run()
strm.set_source('a', ram_a, offset + size, size + size)
strm.set_parameter('size', size + size)
strm.set_sink('sum', ram_b, offset + 1, 1)
strm.source_join()
strm.run()
strm.set_source('a', ram_a, offset + size + size + size, size + size + size)
strm.set_parameter('size', size + size + size)
strm.set_sink('sum', ram_b, offset + 2, 1)
strm.source_join()
strm.run()
strm.source_join()
strm.join()
def comp_sequential(size, offset):
sum = 0
for i in range(size):
a = ram_a.read(i + offset)
sum += a
ram_b.write(offset, sum)
sum = 0
for i in range(size + size):
a = ram_a.read(i + offset + size)
sum += a
ram_b.write(offset + 1, sum)
sum = 0
for i in range(size + size + size):
a = ram_a.read(i + offset + size + size + size)
sum += a
ram_b.write(offset + 2, sum)
def check(size, offset_stream, offset_seq):
all_ok = True
for i in range(size):
st = ram_b.read(i + offset_stream)
sq = ram_b.read(i + offset_seq)
if vthread.verilog.NotEql(st, sq):
all_ok = False
if all_ok:
print('# verify: PASSED')
else:
print('# verify: FAILED')
def comp(size):
offset = 0
myaxi.dma_read(ram_a, offset, 0, size * 6)
comp_stream(size, offset)
myaxi.dma_write(ram_b, offset, 1024, 3)
offset = size
myaxi.dma_read(ram_a, offset, 0, size * 6)
comp_sequential(size, offset)
myaxi.dma_write(ram_b, offset, 1024 * 2, 3)
myaxi.dma_read(ram_b, 0, 1024, 3)
myaxi.dma_read(ram_b, offset, 1024 * 2, 3)
check(3, 0, offset)
vthread.finish()
th = vthread.Thread(m, 'th_comp', clk, rst, comp)
fsm = th.start(32)
return m
def mkTest(memimg_name=None):
m = Module('test')
led = mkLed()
params = m.copy_params(led)
ports = m.copy_sim_ports(led)
clk = ports['CLK']
rst = ports['RST']
memory = axi.AxiMemoryModel(m, 'memory', clk, rst, memimg_name=memimg_name)
memory.connect(ports, 'myaxi')
uut = m.Instance(led, 'uut',
params=m.connect_params(led),
ports=m.connect_ports(led))
simulation.setup_clock(m, clk, hperiod=5)
init = simulation.setup_reset(m, rst, m.make_reset(), period=100)
init.add(
Delay(1000000),
Systask('finish'),
)
return m
def run(filename='tmp.v', simtype='iverilog', outputfile=None):
if outputfile is None:
outputfile = os.path.splitext(os.path.basename(__file__))[0] + '.out'
memimg_name = 'memimg_' + outputfile
test = mkTest(memimg_name=memimg_name)
if filename is not None:
test.to_verilog(filename)
sim = simulation.Simulator(test, sim=simtype)
rslt = sim.run(outputfile=outputfile)
lines = rslt.splitlines()
if simtype == 'verilator' and lines[-1].startswith('-'):
rslt = '\n'.join(lines[:-1])
return rslt
if __name__ == '__main__':
rslt = run(filename='tmp.v')
print(rslt)
| true | true |
f72546b650f48e4a4338cf5ec715dc344a3257bd | 14,857 | py | Python | slack_sdk/socket_mode/aiohttp/__init__.py | jans-forks/python-slackclient | ff798cbe00ead477ce98efa8468cb2c1c99635f3 | [
"MIT"
] | null | null | null | slack_sdk/socket_mode/aiohttp/__init__.py | jans-forks/python-slackclient | ff798cbe00ead477ce98efa8468cb2c1c99635f3 | [
"MIT"
] | null | null | null | slack_sdk/socket_mode/aiohttp/__init__.py | jans-forks/python-slackclient | ff798cbe00ead477ce98efa8468cb2c1c99635f3 | [
"MIT"
] | null | null | null | """aiohttp based Socket Mode client
* https://api.slack.com/apis/connections/socket
* https://slack.dev/python-slack-sdk/socket-mode/
* https://pypi.org/project/aiohttp/
"""
import asyncio
import logging
import time
from asyncio import Future, Lock
from asyncio import Queue
from logging import Logger
from typing import Union, Optional, List, Callable, Awaitable
import aiohttp
from aiohttp import ClientWebSocketResponse, WSMessage, WSMsgType, ClientConnectionError
from slack_sdk.proxy_env_variable_loader import load_http_proxy_from_env
from slack_sdk.socket_mode.async_client import AsyncBaseSocketModeClient
from slack_sdk.socket_mode.async_listeners import (
AsyncWebSocketMessageListener,
AsyncSocketModeRequestListener,
)
from slack_sdk.socket_mode.request import SocketModeRequest
from slack_sdk.web.async_client import AsyncWebClient
class SocketModeClient(AsyncBaseSocketModeClient):
logger: Logger
web_client: AsyncWebClient
app_token: str
wss_uri: Optional[str]
auto_reconnect_enabled: bool
message_queue: Queue
message_listeners: List[
Union[
AsyncWebSocketMessageListener,
Callable[
["AsyncBaseSocketModeClient", dict, Optional[str]], Awaitable[None]
],
]
]
socket_mode_request_listeners: List[
Union[
AsyncSocketModeRequestListener,
Callable[["AsyncBaseSocketModeClient", SocketModeRequest], Awaitable[None]],
]
]
message_receiver: Optional[Future]
message_processor: Future
proxy: Optional[str]
ping_interval: float
trace_enabled: bool
last_ping_pong_time: Optional[float]
current_session: Optional[ClientWebSocketResponse]
current_session_monitor: Optional[Future]
auto_reconnect_enabled: bool
default_auto_reconnect_enabled: bool
closed: bool
stale: bool
connect_operation_lock: Lock
on_message_listeners: List[Callable[[WSMessage], Awaitable[None]]]
on_error_listeners: List[Callable[[WSMessage], Awaitable[None]]]
on_close_listeners: List[Callable[[WSMessage], Awaitable[None]]]
def __init__(
self,
app_token: str,
logger: Optional[Logger] = None,
web_client: Optional[AsyncWebClient] = None,
proxy: Optional[str] = None,
auto_reconnect_enabled: bool = True,
ping_interval: float = 5,
trace_enabled: bool = False,
on_message_listeners: Optional[List[Callable[[WSMessage], None]]] = None,
on_error_listeners: Optional[List[Callable[[WSMessage], None]]] = None,
on_close_listeners: Optional[List[Callable[[WSMessage], None]]] = None,
):
"""Socket Mode client
Args:
app_token: App-level token
logger: Custom logger
web_client: Web API client
auto_reconnect_enabled: True if automatic reconnection is enabled (default: True)
ping_interval: interval for ping-pong with Slack servers (seconds)
trace_enabled: True if more verbose logs to see what's happening under the hood
proxy: the HTTP proxy URL
on_message_listeners: listener functions for on_message
on_error_listeners: listener functions for on_error
on_close_listeners: listener functions for on_close
"""
self.app_token = app_token
self.logger = logger or logging.getLogger(__name__)
self.web_client = web_client or AsyncWebClient()
self.closed = False
self.stale = False
self.connect_operation_lock = Lock()
self.proxy = proxy
if self.proxy is None or len(self.proxy.strip()) == 0:
env_variable = load_http_proxy_from_env(self.logger)
if env_variable is not None:
self.proxy = env_variable
self.default_auto_reconnect_enabled = auto_reconnect_enabled
self.auto_reconnect_enabled = self.default_auto_reconnect_enabled
self.ping_interval = ping_interval
self.trace_enabled = trace_enabled
self.last_ping_pong_time = None
self.wss_uri = None
self.message_queue = Queue()
self.message_listeners = []
self.socket_mode_request_listeners = []
self.current_session = None
self.current_session_monitor = None
# https://docs.aiohttp.org/en/stable/client_reference.html
# Unless you are connecting to a large, unknown number of different servers
# over the lifetime of your application,
# it is suggested you use a single session for the lifetime of your application
# to benefit from connection pooling.
self.aiohttp_client_session = aiohttp.ClientSession()
self.on_message_listeners = on_message_listeners or []
self.on_error_listeners = on_error_listeners or []
self.on_close_listeners = on_close_listeners or []
self.message_receiver = None
self.message_processor = asyncio.ensure_future(self.process_messages())
async def monitor_current_session(self) -> None:
try:
while not self.closed:
try:
await asyncio.sleep(self.ping_interval)
if self.current_session is not None:
t = time.time()
if self.last_ping_pong_time is None:
self.last_ping_pong_time = float(t)
await self.current_session.ping(f"sdk-ping-pong:{t}")
if self.auto_reconnect_enabled:
should_reconnect = False
if self.current_session is None or self.current_session.closed:
self.logger.info(
"The session seems to be already closed. Reconnecting..."
)
should_reconnect = True
if self.last_ping_pong_time is not None:
disconnected_seconds = int(
time.time() - self.last_ping_pong_time
)
if disconnected_seconds >= (self.ping_interval * 4):
self.logger.info(
"The connection seems to be stale. Reconnecting..."
f" reason: disconnected for {disconnected_seconds}+ seconds)"
)
self.stale = True
self.last_ping_pong_time = None
should_reconnect = True
if should_reconnect is True or not await self.is_connected():
await self.connect_to_new_endpoint()
except Exception as e:
self.logger.error(
"Failed to check the current session or reconnect to the server "
f"(error: {type(e).__name__}, message: {e})"
)
except asyncio.CancelledError:
if self.trace_enabled:
self.logger.debug(
"The running monitor_current_session task is now cancelled"
)
raise
async def receive_messages(self) -> None:
try:
consecutive_error_count = 0
while not self.closed:
try:
message: WSMessage = await self.current_session.receive()
if self.trace_enabled and self.logger.level <= logging.DEBUG:
type = WSMsgType(message.type)
message_type = type.name if type is not None else message.type
message_data = message.data
if isinstance(message_data, bytes):
message_data = message_data.decode("utf-8")
if len(message_data) > 0:
# To skip the empty message that Slack server-side often sends
self.logger.debug(
f"Received message (type: {message_type}, data: {message_data}, extra: {message.extra})"
)
if message is not None:
if message.type == WSMsgType.TEXT:
message_data = message.data
await self.enqueue_message(message_data)
for listener in self.on_message_listeners:
await listener(message)
elif message.type == WSMsgType.CLOSE:
if self.auto_reconnect_enabled:
self.logger.info(
"Received CLOSE event. Reconnecting..."
)
await self.connect_to_new_endpoint()
for listener in self.on_close_listeners:
await listener(message)
elif message.type == WSMsgType.ERROR:
for listener in self.on_error_listeners:
await listener(message)
elif message.type == WSMsgType.CLOSED:
await asyncio.sleep(self.ping_interval)
continue
elif message.type == WSMsgType.PING:
await self.current_session.pong(message.data)
continue
elif message.type == WSMsgType.PONG:
if message.data is not None:
str_message_data = message.data.decode("utf-8")
elements = str_message_data.split(":")
if (
len(elements) == 2
and elements[0] == "sdk-ping-pong"
):
try:
self.last_ping_pong_time = float(elements[1])
except Exception as e:
self.logger.warning(
f"Failed to parse the last_ping_pong_time value from {str_message_data}"
f" - error : {e}"
)
continue
consecutive_error_count = 0
except Exception as e:
consecutive_error_count += 1
self.logger.error(
f"Failed to receive or enqueue a message: {type(e).__name__}, {e}"
)
if isinstance(e, ClientConnectionError):
await asyncio.sleep(self.ping_interval)
else:
await asyncio.sleep(consecutive_error_count)
except asyncio.CancelledError:
if self.trace_enabled:
self.logger.debug("The running receive_messages task is now cancelled")
raise
async def is_connected(self) -> bool:
return (
not self.closed
and not self.stale
and self.current_session is not None
and not self.current_session.closed
)
async def connect(self):
old_session = None if self.current_session is None else self.current_session
if self.wss_uri is None:
self.wss_uri = await self.issue_new_wss_url()
self.current_session = await self.aiohttp_client_session.ws_connect(
self.wss_uri,
autoping=False,
heartbeat=self.ping_interval,
proxy=self.proxy,
)
self.auto_reconnect_enabled = self.default_auto_reconnect_enabled
self.stale = False
self.logger.info("A new session has been established")
if self.current_session_monitor is not None:
self.current_session_monitor.cancel()
self.current_session_monitor = asyncio.ensure_future(
self.monitor_current_session()
)
if self.message_receiver is not None:
self.message_receiver.cancel()
self.message_receiver = asyncio.ensure_future(self.receive_messages())
if old_session is not None:
await old_session.close()
self.logger.info("The old session has been abandoned")
async def disconnect(self):
if self.current_session is not None:
await self.current_session.close()
self.logger.info("The session has been abandoned")
async def send_message(self, message: str):
if self.logger.level <= logging.DEBUG:
self.logger.debug(f"Sending a message: {message}")
try:
await self.current_session.send_str(message)
except ConnectionError as e:
# We rarely get this exception while replacing the underlying WebSocket connections.
# We can do one more try here as the self.current_session should be ready now.
if self.logger.level <= logging.DEBUG:
self.logger.debug(
f"Failed to send a message (error: {e}, message: {message})"
" as the underlying connection was replaced. Retrying the same request only one time..."
)
# Although acquiring self.connect_operation_lock also for the first method call is the safest way,
# we avoid synchronizing a lot for better performance. That's why we are doing a retry here.
try:
await self.connect_operation_lock.acquire()
if await self.is_connected():
await self.current_session.send_str(message)
else:
self.logger.warning(
"The current session is no longer active. Failed to send a message"
)
raise e
finally:
if self.connect_operation_lock.locked() is True:
self.connect_operation_lock.release()
async def close(self):
self.closed = True
self.auto_reconnect_enabled = False
await self.disconnect()
if self.message_processor is not None:
self.message_processor.cancel()
if self.current_session_monitor is not None:
self.current_session_monitor.cancel()
if self.message_receiver is not None:
self.message_receiver.cancel()
if self.aiohttp_client_session is not None:
await self.aiohttp_client_session.close()
| 43.825959 | 120 | 0.568755 | import asyncio
import logging
import time
from asyncio import Future, Lock
from asyncio import Queue
from logging import Logger
from typing import Union, Optional, List, Callable, Awaitable
import aiohttp
from aiohttp import ClientWebSocketResponse, WSMessage, WSMsgType, ClientConnectionError
from slack_sdk.proxy_env_variable_loader import load_http_proxy_from_env
from slack_sdk.socket_mode.async_client import AsyncBaseSocketModeClient
from slack_sdk.socket_mode.async_listeners import (
AsyncWebSocketMessageListener,
AsyncSocketModeRequestListener,
)
from slack_sdk.socket_mode.request import SocketModeRequest
from slack_sdk.web.async_client import AsyncWebClient
class SocketModeClient(AsyncBaseSocketModeClient):
logger: Logger
web_client: AsyncWebClient
app_token: str
wss_uri: Optional[str]
auto_reconnect_enabled: bool
message_queue: Queue
message_listeners: List[
Union[
AsyncWebSocketMessageListener,
Callable[
["AsyncBaseSocketModeClient", dict, Optional[str]], Awaitable[None]
],
]
]
socket_mode_request_listeners: List[
Union[
AsyncSocketModeRequestListener,
Callable[["AsyncBaseSocketModeClient", SocketModeRequest], Awaitable[None]],
]
]
message_receiver: Optional[Future]
message_processor: Future
proxy: Optional[str]
ping_interval: float
trace_enabled: bool
last_ping_pong_time: Optional[float]
current_session: Optional[ClientWebSocketResponse]
current_session_monitor: Optional[Future]
auto_reconnect_enabled: bool
default_auto_reconnect_enabled: bool
closed: bool
stale: bool
connect_operation_lock: Lock
on_message_listeners: List[Callable[[WSMessage], Awaitable[None]]]
on_error_listeners: List[Callable[[WSMessage], Awaitable[None]]]
on_close_listeners: List[Callable[[WSMessage], Awaitable[None]]]
def __init__(
self,
app_token: str,
logger: Optional[Logger] = None,
web_client: Optional[AsyncWebClient] = None,
proxy: Optional[str] = None,
auto_reconnect_enabled: bool = True,
ping_interval: float = 5,
trace_enabled: bool = False,
on_message_listeners: Optional[List[Callable[[WSMessage], None]]] = None,
on_error_listeners: Optional[List[Callable[[WSMessage], None]]] = None,
on_close_listeners: Optional[List[Callable[[WSMessage], None]]] = None,
):
self.app_token = app_token
self.logger = logger or logging.getLogger(__name__)
self.web_client = web_client or AsyncWebClient()
self.closed = False
self.stale = False
self.connect_operation_lock = Lock()
self.proxy = proxy
if self.proxy is None or len(self.proxy.strip()) == 0:
env_variable = load_http_proxy_from_env(self.logger)
if env_variable is not None:
self.proxy = env_variable
self.default_auto_reconnect_enabled = auto_reconnect_enabled
self.auto_reconnect_enabled = self.default_auto_reconnect_enabled
self.ping_interval = ping_interval
self.trace_enabled = trace_enabled
self.last_ping_pong_time = None
self.wss_uri = None
self.message_queue = Queue()
self.message_listeners = []
self.socket_mode_request_listeners = []
self.current_session = None
self.current_session_monitor = None
self.aiohttp_client_session = aiohttp.ClientSession()
self.on_message_listeners = on_message_listeners or []
self.on_error_listeners = on_error_listeners or []
self.on_close_listeners = on_close_listeners or []
self.message_receiver = None
self.message_processor = asyncio.ensure_future(self.process_messages())
async def monitor_current_session(self) -> None:
try:
while not self.closed:
try:
await asyncio.sleep(self.ping_interval)
if self.current_session is not None:
t = time.time()
if self.last_ping_pong_time is None:
self.last_ping_pong_time = float(t)
await self.current_session.ping(f"sdk-ping-pong:{t}")
if self.auto_reconnect_enabled:
should_reconnect = False
if self.current_session is None or self.current_session.closed:
self.logger.info(
"The session seems to be already closed. Reconnecting..."
)
should_reconnect = True
if self.last_ping_pong_time is not None:
disconnected_seconds = int(
time.time() - self.last_ping_pong_time
)
if disconnected_seconds >= (self.ping_interval * 4):
self.logger.info(
"The connection seems to be stale. Reconnecting..."
f" reason: disconnected for {disconnected_seconds}+ seconds)"
)
self.stale = True
self.last_ping_pong_time = None
should_reconnect = True
if should_reconnect is True or not await self.is_connected():
await self.connect_to_new_endpoint()
except Exception as e:
self.logger.error(
"Failed to check the current session or reconnect to the server "
f"(error: {type(e).__name__}, message: {e})"
)
except asyncio.CancelledError:
if self.trace_enabled:
self.logger.debug(
"The running monitor_current_session task is now cancelled"
)
raise
async def receive_messages(self) -> None:
try:
consecutive_error_count = 0
while not self.closed:
try:
message: WSMessage = await self.current_session.receive()
if self.trace_enabled and self.logger.level <= logging.DEBUG:
type = WSMsgType(message.type)
message_type = type.name if type is not None else message.type
message_data = message.data
if isinstance(message_data, bytes):
message_data = message_data.decode("utf-8")
if len(message_data) > 0:
self.logger.debug(
f"Received message (type: {message_type}, data: {message_data}, extra: {message.extra})"
)
if message is not None:
if message.type == WSMsgType.TEXT:
message_data = message.data
await self.enqueue_message(message_data)
for listener in self.on_message_listeners:
await listener(message)
elif message.type == WSMsgType.CLOSE:
if self.auto_reconnect_enabled:
self.logger.info(
"Received CLOSE event. Reconnecting..."
)
await self.connect_to_new_endpoint()
for listener in self.on_close_listeners:
await listener(message)
elif message.type == WSMsgType.ERROR:
for listener in self.on_error_listeners:
await listener(message)
elif message.type == WSMsgType.CLOSED:
await asyncio.sleep(self.ping_interval)
continue
elif message.type == WSMsgType.PING:
await self.current_session.pong(message.data)
continue
elif message.type == WSMsgType.PONG:
if message.data is not None:
str_message_data = message.data.decode("utf-8")
elements = str_message_data.split(":")
if (
len(elements) == 2
and elements[0] == "sdk-ping-pong"
):
try:
self.last_ping_pong_time = float(elements[1])
except Exception as e:
self.logger.warning(
f"Failed to parse the last_ping_pong_time value from {str_message_data}"
f" - error : {e}"
)
continue
consecutive_error_count = 0
except Exception as e:
consecutive_error_count += 1
self.logger.error(
f"Failed to receive or enqueue a message: {type(e).__name__}, {e}"
)
if isinstance(e, ClientConnectionError):
await asyncio.sleep(self.ping_interval)
else:
await asyncio.sleep(consecutive_error_count)
except asyncio.CancelledError:
if self.trace_enabled:
self.logger.debug("The running receive_messages task is now cancelled")
raise
async def is_connected(self) -> bool:
return (
not self.closed
and not self.stale
and self.current_session is not None
and not self.current_session.closed
)
async def connect(self):
old_session = None if self.current_session is None else self.current_session
if self.wss_uri is None:
self.wss_uri = await self.issue_new_wss_url()
self.current_session = await self.aiohttp_client_session.ws_connect(
self.wss_uri,
autoping=False,
heartbeat=self.ping_interval,
proxy=self.proxy,
)
self.auto_reconnect_enabled = self.default_auto_reconnect_enabled
self.stale = False
self.logger.info("A new session has been established")
if self.current_session_monitor is not None:
self.current_session_monitor.cancel()
self.current_session_monitor = asyncio.ensure_future(
self.monitor_current_session()
)
if self.message_receiver is not None:
self.message_receiver.cancel()
self.message_receiver = asyncio.ensure_future(self.receive_messages())
if old_session is not None:
await old_session.close()
self.logger.info("The old session has been abandoned")
async def disconnect(self):
if self.current_session is not None:
await self.current_session.close()
self.logger.info("The session has been abandoned")
async def send_message(self, message: str):
if self.logger.level <= logging.DEBUG:
self.logger.debug(f"Sending a message: {message}")
try:
await self.current_session.send_str(message)
except ConnectionError as e:
if self.logger.level <= logging.DEBUG:
self.logger.debug(
f"Failed to send a message (error: {e}, message: {message})"
" as the underlying connection was replaced. Retrying the same request only one time..."
)
try:
await self.connect_operation_lock.acquire()
if await self.is_connected():
await self.current_session.send_str(message)
else:
self.logger.warning(
"The current session is no longer active. Failed to send a message"
)
raise e
finally:
if self.connect_operation_lock.locked() is True:
self.connect_operation_lock.release()
async def close(self):
self.closed = True
self.auto_reconnect_enabled = False
await self.disconnect()
if self.message_processor is not None:
self.message_processor.cancel()
if self.current_session_monitor is not None:
self.current_session_monitor.cancel()
if self.message_receiver is not None:
self.message_receiver.cancel()
if self.aiohttp_client_session is not None:
await self.aiohttp_client_session.close()
| true | true |
f72547daa09175d778fa9ae8f7c4bef863150db5 | 840 | py | Python | Setup/PreRelease/setup.py | tushariyer/sit-rep | c7ff39182e78b6630922765289f6343a7f14e18a | [
"MIT"
] | null | null | null | Setup/PreRelease/setup.py | tushariyer/sit-rep | c7ff39182e78b6630922765289f6343a7f14e18a | [
"MIT"
] | null | null | null | Setup/PreRelease/setup.py | tushariyer/sit-rep | c7ff39182e78b6630922765289f6343a7f14e18a | [
"MIT"
] | null | null | null | #!/usr/bin/env python
from distutils.core import setup
long_desc = 'Licensed under the generic MIT License.\"sit-rep\" can either be downloaded from the ' \
'Releases page on GitHub and manually added to PATH or installed via \"pip\".'
version = ''
with open("Setup/version.txt", "r", encoding="utf-8") as fh:
version = fh.read()
fh.close()
setup(name='sit-rep-prerelease',
version=version,
py_modules=['sit-rep-prerelease'],
description='Sit Rep [Pre-Release]| The System Situation Report',
long_description=long_desc,
long_description_content_type='text/markdown',
author='Tushar Iyer',
author_email='',
url='https://github.com/tushariyer/sit-rep',
project_urls={
"Bug Tracker": "https://github.com/tushariyer/sit-rep/issues",
}
) | 33.6 | 101 | 0.65 |
from distutils.core import setup
long_desc = 'Licensed under the generic MIT License.\"sit-rep\" can either be downloaded from the ' \
'Releases page on GitHub and manually added to PATH or installed via \"pip\".'
version = ''
with open("Setup/version.txt", "r", encoding="utf-8") as fh:
version = fh.read()
fh.close()
setup(name='sit-rep-prerelease',
version=version,
py_modules=['sit-rep-prerelease'],
description='Sit Rep [Pre-Release]| The System Situation Report',
long_description=long_desc,
long_description_content_type='text/markdown',
author='Tushar Iyer',
author_email='',
url='https://github.com/tushariyer/sit-rep',
project_urls={
"Bug Tracker": "https://github.com/tushariyer/sit-rep/issues",
}
) | true | true |
f72547e17a96865f9a04a22b21403532cfa71d79 | 3,673 | py | Python | src/tools/vis_tracking_kittimots.py | gafaua/PolyTrack | 5a4b409732b9396be8271f5cba4ad426808d5af5 | [
"MIT"
] | 10 | 2021-11-07T04:25:08.000Z | 2022-03-25T03:33:21.000Z | src/tools/vis_tracking_kittimots.py | gafaua/PolyTrack | 5a4b409732b9396be8271f5cba4ad426808d5af5 | [
"MIT"
] | null | null | null | src/tools/vis_tracking_kittimots.py | gafaua/PolyTrack | 5a4b409732b9396be8271f5cba4ad426808d5af5 | [
"MIT"
] | 6 | 2021-11-03T21:27:06.000Z | 2022-03-27T17:27:40.000Z | import numpy as np
import cv2
import os
import glob
import sys
from collections import defaultdict
from pathlib import Path
import pycocotools.mask as rletools
from PIL import Image, ImageDraw
import matplotlib.pyplot as plt
DATA_PATH = '../../data/KITTIMOTS/'
IMG_PATH = DATA_PATH + 'train/'
SAVE_VIDEO = False
IS_GT = True
cats = ['Car', 'Pedestrian']
cat_ids = {cat: i for i, cat in enumerate(cats)}
COLORS = [(255, 0, 255), (122, 122, 255), (255, 0, 0)]
def draw_bbox(img, bboxes, c=(255, 0, 255)):
for bbox in bboxes:
color = COLORS[int(bbox[5])]
cv2.rectangle(img, (int(bbox[0]), int(bbox[1])),
(int(bbox[2]), int(bbox[3])),
color, 2, lineType=cv2.LINE_AA)
ct = [(bbox[0] + bbox[2]) / 2, (bbox[1] + bbox[3]) / 2]
txt = '{}'.format(int(bbox[4]))
cv2.putText(img, txt, (int(ct[0]), int(ct[1])),
cv2.FONT_HERSHEY_SIMPLEX, 0.5,
color, thickness=1, lineType=cv2.LINE_AA)
if __name__ == '__main__':
# seqs = os.listdir(IMG_PATH)
seqs = ['0001']
for seq in sorted(seqs):
print('seq', seq)
if '.DS_Store' in seq:
continue
gt_file = DATA_PATH + 'instances_txt/' + seq + '.txt'
with open(gt_file, 'r') as f:
lines = f.readlines()
lines = [l.split() for l in lines]
frame_count = -1
im_to_inst = {}
for l in lines:
frame, oid, cid, h, w, rle = l
if int(cid) - 1 not in cat_ids.values():
continue
frame = int(frame)
if frame_count != frame:
frame_count = frame
im_to_inst[frame] = []
im_to_inst[frame].append(rle)
for i in im_to_inst:
#img = cv2.imread(os.path.join(IMG_PATH, '{}/{:06d}.png'.format(seq, i)))
img = Image.open(os.path.join(IMG_PATH, '{}/{:06d}.png'.format(seq, i))).convert('RGBA')
#img.putalpha(128)
size = [int(h), int(w)]
merged = np.zeros(size, dtype=np.float)
print(f'Frame {i}: {len(im_to_inst[i])} masks')
for mask in im_to_inst[i]:
m = {'size': size, 'counts': mask.encode(encoding='UTF-8')}
binary_mask = rletools.decode(m)
merged = np.logical_or(merged, binary_mask)
merged_mask = Image.fromarray(np.uint8(merged * 128), mode='L')
color = Image.new('RGBA', (size[1], size[0]), (228, 150, 150, 255))
# plt.imshow(merged_mask)
# plt.imshow(img)
# plt.show()
image = Image.composite(color, img, merged_mask)
image.save('../../data/KITTIMOTS/examples/{:06d}.png'.format(i))
# preds = {}
# for K in range(1, len(sys.argv)):
# pred_path = sys.argv[K] + '/{}.txt'.format(seq)
# pred_file = open(pred_path, 'r')
# preds[K] = defaultdict(list)
# for line in pred_file:
# tmp = line[:-1].split(' ')
# frame_id = int(tmp[0])
# track_id = int(tmp[1])
# cat_id = cat_ids[tmp[2]]
# bbox = [float(tmp[6]), float(tmp[7]), float(tmp[8]), float(tmp[9])]
# score = float(tmp[17])
# preds[K][frame_id].append(bbox + [track_id, cat_id, score])
# images_path = '{}/{}/'.format(IMG_PATH, seq)
# images = os.listdir(images_path)
# num_images = len([image for image in images if 'png' in image])
# for i in range(num_images):
# frame_id = i
# file_path = '{}/{:06d}.png'.format(images_path, i)
# img = cv2.imread(file_path)
# for K in range(1, len(sys.argv)):
# img_pred = img.copy()
# draw_bbox(img_pred, preds[K][frame_id])
# cv2.imshow('pred{}'.format(K), img_pred)
# cv2.waitKey()
# if SAVE_VIDEO:
# video.write(img_pred)
# if SAVE_VIDEO:
# video.release()
| 30.865546 | 94 | 0.572829 | import numpy as np
import cv2
import os
import glob
import sys
from collections import defaultdict
from pathlib import Path
import pycocotools.mask as rletools
from PIL import Image, ImageDraw
import matplotlib.pyplot as plt
DATA_PATH = '../../data/KITTIMOTS/'
IMG_PATH = DATA_PATH + 'train/'
SAVE_VIDEO = False
IS_GT = True
cats = ['Car', 'Pedestrian']
cat_ids = {cat: i for i, cat in enumerate(cats)}
COLORS = [(255, 0, 255), (122, 122, 255), (255, 0, 0)]
def draw_bbox(img, bboxes, c=(255, 0, 255)):
for bbox in bboxes:
color = COLORS[int(bbox[5])]
cv2.rectangle(img, (int(bbox[0]), int(bbox[1])),
(int(bbox[2]), int(bbox[3])),
color, 2, lineType=cv2.LINE_AA)
ct = [(bbox[0] + bbox[2]) / 2, (bbox[1] + bbox[3]) / 2]
txt = '{}'.format(int(bbox[4]))
cv2.putText(img, txt, (int(ct[0]), int(ct[1])),
cv2.FONT_HERSHEY_SIMPLEX, 0.5,
color, thickness=1, lineType=cv2.LINE_AA)
if __name__ == '__main__':
seqs = ['0001']
for seq in sorted(seqs):
print('seq', seq)
if '.DS_Store' in seq:
continue
gt_file = DATA_PATH + 'instances_txt/' + seq + '.txt'
with open(gt_file, 'r') as f:
lines = f.readlines()
lines = [l.split() for l in lines]
frame_count = -1
im_to_inst = {}
for l in lines:
frame, oid, cid, h, w, rle = l
if int(cid) - 1 not in cat_ids.values():
continue
frame = int(frame)
if frame_count != frame:
frame_count = frame
im_to_inst[frame] = []
im_to_inst[frame].append(rle)
for i in im_to_inst:
img = Image.open(os.path.join(IMG_PATH, '{}/{:06d}.png'.format(seq, i))).convert('RGBA')
size = [int(h), int(w)]
merged = np.zeros(size, dtype=np.float)
print(f'Frame {i}: {len(im_to_inst[i])} masks')
for mask in im_to_inst[i]:
m = {'size': size, 'counts': mask.encode(encoding='UTF-8')}
binary_mask = rletools.decode(m)
merged = np.logical_or(merged, binary_mask)
merged_mask = Image.fromarray(np.uint8(merged * 128), mode='L')
color = Image.new('RGBA', (size[1], size[0]), (228, 150, 150, 255))
image = Image.composite(color, img, merged_mask)
image.save('../../data/KITTIMOTS/examples/{:06d}.png'.format(i))
| true | true |
f7254891c728997635a95c7943f2f2e7d783a797 | 14,518 | py | Python | src/test/tinc/tincrepo/mpp/gpdb/tests/storage/walrepl/syncrep/test_basic.py | rodel-talampas/gpdb | 9c955e350334abbd922102f289f782697eb52069 | [
"PostgreSQL",
"Apache-2.0"
] | 9 | 2018-04-20T03:31:01.000Z | 2020-05-13T14:10:53.000Z | src/test/tinc/tincrepo/mpp/gpdb/tests/storage/walrepl/syncrep/test_basic.py | rodel-talampas/gpdb | 9c955e350334abbd922102f289f782697eb52069 | [
"PostgreSQL",
"Apache-2.0"
] | 36 | 2017-09-21T09:12:27.000Z | 2020-06-17T16:40:48.000Z | src/test/tinc/tincrepo/mpp/gpdb/tests/storage/walrepl/syncrep/test_basic.py | rodel-talampas/gpdb | 9c955e350334abbd922102f289f782697eb52069 | [
"PostgreSQL",
"Apache-2.0"
] | 32 | 2017-08-31T12:50:52.000Z | 2022-03-01T07:34:53.000Z | #!/usr/bin/env python
"""
Copyright (c) 2004-Present Pivotal Software, Inc.
This program and the accompanying materials are made available under
the terms of the under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from tinctest import logger
from mpp.lib.PSQL import PSQL
from mpp.models import MPPTestCase
from gppylib.db import dbconn
from mpp.gpdb.tests.storage.walrepl.run import StandbyRunMixin
from mpp.gpdb.tests.storage import walrepl
from mpp.gpdb.tests.storage.walrepl.lib.walcomm import *
from mpp.gpdb.tests.storage.walrepl.lib import PgControlData
from gppylib.commands.base import Command
import os
import re
import select
import signal
import subprocess
import time
import sys
class syncrep(StandbyRunMixin, MPPTestCase):
def generate_trigger_file(self, content):
filename = 'wal_rcv_test'
self.assertTrue(content is not None)
filepath = os.path.join(self.standby.datadir, filename)
with open(filepath, 'wb') as f:
f.write(content)
def wait_stdout(self, proc, timeout):
rlist = [proc.stdout.fileno()]
(rout, _, _) = select.select(rlist, [], [], timeout)
return len(rout) > 0
def set_guc(self, guc_name, guc_value):
logger.info('Configuring ' + guc_name +' ...')
cmd = Command("gpconfig " + guc_name,
"gpconfig -c " + guc_name + " -v " + guc_value)
cmd.run()
self.assertEqual(cmd.get_results().rc, 0, str(cmd))
logger.info('gpstop -u to reload config files...')
cmd = Command("gpstop -u",
"gpstop -u")
cmd.run()
self.assertEqual(cmd.get_results().rc, 0, str(cmd))
def test_syncrep(self):
# 1. Initiate the Standby
# 2. Once the WAL receiver starts, signal it to suspend post xlog flush
# but before sending the ack.
# 3. Now execute a transaction and commit it. The backend is expected
# be blocked.
# 4. Resume the WALReceiver and see the transaction passed and its
# results are visible.
# cleanup
PSQL.run_sql_command('DROP table if exists foo')
# 1. create standby and start
res = self.standby.create()
self.assertEqual(res, 0)
res = self.standby.start()
self.assertTrue(res.wasSuccessful())
# wait for the walreceiver to start
num_walsender = self.wait_for_walsender()
self.assertEqual(num_walsender, 1)
# 2. Once the WAL receiver starts, signal it to suspend post xlog flush
# but before sending the ack.
proc = subprocess.Popen(['ps', '-ef'], stdout=subprocess.PIPE)
stdout = proc.communicate()[0]
search = "wal receiver process"
for line in stdout.split('\n'):
if (line.find(search) > 0):
split_line = re.split(r'\s+', line.strip())
break
self.assertTrue(len(split_line) > 0)
wal_rcv_pid = int(split_line[1])
logger.info('Suspending WAL Receiver(' + str(wal_rcv_pid) +')...')
self.generate_trigger_file('wait_before_send_ack')
os.kill(wal_rcv_pid, signal.SIGUSR2)
# 3. Now execute a transaction and commit it. The backend is expected
# be blocked.
logger.info('Create table foo...')
# we use subprocess since we expect it'll be blocked.
proc = subprocess.Popen(['psql', '-c', 'create table foo (a int)'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
readable = self.wait_stdout(proc, 5.0)
self.assertFalse(readable, 'psql did not block')
# 4. Resume the WALReceiver and see the transaction passed and its
# results are visible.
logger.info('Resume the WAL Receiver...')
self.generate_trigger_file('resume')
os.kill(wal_rcv_pid, signal.SIGUSR2)
readable = self.wait_stdout(proc, 5.0)
self.assertTrue(readable, 'psql still blocks')
proc.communicate()
logger.info('No blocked backend found!')
logger.info('Verifying if table exists ? ...')
PSQL(sql_cmd='select * from foo').run(validateAfter=True)
logger.info('Pass')
def test_unblock_while_catchup_out_of_range(self):
"""
This test verifies if a backend gets blocked in case
the WAL sender is still in catchup mode.
"""
with WalClient("replication=true") as client:
(sysid, tli, xpos) = client.identify_system()
sql_startup = "SELECT count(*) FROM pg_stat_replication where state = 'startup'"
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
curs = dbconn.execSQL(conn, sql_startup)
results = curs.fetchall()
self.assertEqual(int(results[0][0]), 1, "No WAL sender in startup phase found")
logger.info('WAL sender is alive and now is in startup phase...')
# Generate enough xlog in WAL sender startup phase. None of the sql statements
# should get blocked. If blocked we have some issue.
# Checkpointing causes full page writes on updates/inserts. Hence helps
# xlog generation.
i = 0
logger.info('Running a bunch of SQLs to generate enough xlog to maintain catchup phase...')
while (i < 10):
PSQL.run_sql_command('DROP TABLE IF EXISTS foo; CREATE TABLE foo(a int, b int); CHECKPOINT;')
i = i + 1
logger.info('Pass - Database does not block if WAL sender is alive and in startup phase')
logger.info('Creating some xlog seg files to simulate catchup out-of-range..')
i = 0
while(i < 3):
PSQL.run_sql_command('select pg_switch_xlog();select pg_switch_xlog();checkpoint;')
i = i + 1
xpos_ptr = XLogRecPtr.from_string(xpos)
client.start_replication(xpos_ptr)
while True:
msg = client.receive(1000)
if isinstance(msg, WalMessageData):
header = msg.header
# walsender must be still in catchup phase as a lot xlog needs to be sent
sql_catchup = "SELECT count(*) FROM pg_stat_replication where state = 'catchup'"
sql_table_present = "SELECT count(*) from pg_class where relname = 'foo'"
sql_bkd_count = ("SELECT count(*) from pg_stat_activity where waiting ='t' and waiting_reason = 'replication'")
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
curs = dbconn.execSQL(conn, sql_catchup)
results = curs.fetchall()
self.assertEqual(int(results[0][0]), 1, "No Catchup WAL sender found")
logger.info('WAL sender is alive and now is in catchup phase...')
logger.info('In catchup phase, run some sql...')
PSQL.run_sql_command('DROP TABLE IF EXISTS foo; CREATE TABLE foo(a int, b int);'
,dbname='postgres')
while (i < 5):
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
# verify if the previous backend is blocked
curs = dbconn.execSQL(conn, sql_bkd_count)
results = curs.fetchall()
if (int(results[0][0]) > 0):
self.assertTrue(0, "Previous backend was blocked ...")
i = i + 1
logger.info('Create table is NOT blocked...')
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
# verify if WAL sender is still in catchup phase
curs = dbconn.execSQL(conn, sql_catchup)
results = curs.fetchall()
self.assertEqual(int(results[0][0]), 1,
"WAL sender catchup phase over before verification")
logger.info('WAL sender is alive and still in catchup phase...')
with dbconn.connect(dbconn.DbURL(dbname='postgres'), utility=True) as conn:
# verify if WAL sender is still in catchup phase
curs = dbconn.execSQL(conn, sql_table_present)
results = curs.fetchall()
self.assertEqual(int(results[0][0]), 1, "Table foo not found")
# sync replication needs a reply otherwise backend blocks
client.reply(header.walEnd, header.walEnd, header.walEnd)
# success, should get some 'w' message
logger.info ("Pass - Database does not block if WAL sender is alive and "
"the catchup is out-of-range")
break
elif isinstance(msg, WalMessageNoData):
# could be timeout
client.reply(xpos_ptr, xpos_ptr, xpos_ptr)
else:
raise StandardError(msg.errmsg)
def test_block_while_catchup_within_range(self):
"""
This test verifies if a backend gets blocked in case
the WAL sender is still in catchup mode.
"""
with WalClient("replication=true") as client:
(sysid, tli, xpos) = client.identify_system()
# Set the guc to > 1 so that we can verify the test
# using less amount of xlog
self.set_guc('repl_catchup_within_range', '3')
# Generate enough xlog in WAL sender startup phase. None of the sql statements
# should get blocked. If blocked we have some issue.
# Checkpointing causes full page writes on updates/inserts. Hence helps
# xlog generation.
i = 0
logger.info('Running a bunch of SQLs to generate enough xlog to maintain catchup phase...')
while (i < 10):
PSQL.run_sql_command('DROP TABLE IF EXISTS foo; CREATE TABLE foo(a int, b int); CHECKPOINT;')
i = i + 1
xpos_ptr = XLogRecPtr.from_string(xpos)
client.start_replication(xpos_ptr)
while True:
msg = client.receive(1000)
if isinstance(msg, WalMessageData):
header = msg.header
# walsender must be still in catchup phase as a lot xlog needs to be sent
sql_catchup = "SELECT count(*) FROM pg_stat_replication where state = 'catchup'"
sql_table_present = "SELECT count(*) from pg_class where relname = 'foo'"
sql_bkd_count = ("SELECT count(*) from pg_stat_activity where waiting ='t' and waiting_reason = 'replication'")
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
curs = dbconn.execSQL(conn, sql_catchup)
results = curs.fetchall()
self.assertEqual(int(results[0][0]), 1, "No Catchup WAL sender found")
logger.info('WAL sender is alive and now is in catchup phase...')
logger.info('In catchup phase, create table...')
subprocess.Popen(['psql', '-c',
'DROP TABLE IF EXISTS raghav; create table raghav (a int);'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
# verify if WAL sender is still in catchup phase
curs = dbconn.execSQL(conn, sql_catchup)
results = curs.fetchall()
self.assertEqual(int(results[0][0]), 1,
"WAL sender catchup phase over before verification")
logger.info('WAL sender is alive, still in catchup phase ..')
while (i < 5):
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
# verify if the previous backend is blocked
curs = dbconn.execSQL(conn, sql_bkd_count)
results = curs.fetchall()
if (int(results[0][0]) == 1):
break;
if (i == 4):
self.assertTrue(0, "Previous backend not blocked ...")
i = i + 1
logger.info('But, create table is blocked...')
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
# verify if WAL sender is still in catchup phase
curs = dbconn.execSQL(conn, sql_catchup)
results = curs.fetchall()
self.assertEqual(int(results[0][0]), 1,
"WAL sender catchup phase over before verification")
logger.info('WAL sender is alive, in catchup phase and backend is blocked...')
# sync replication needs a reply otherwise backend blocks
client.reply(header.walEnd, header.walEnd, header.walEnd)
# success, should get some 'w' message
logger.info ("Pass - Backends block if WAL sender is alive and the catchup is within-range")
break
elif isinstance(msg, WalMessageNoData):
# could be timeout
client.reply(xpos_ptr, xpos_ptr, xpos_ptr)
else:
raise StandardError(msg.errmsg)
logger.info ("Pass")
self.set_guc('repl_catchup_within_range', '1')
| 44.533742 | 131 | 0.564403 |
from tinctest import logger
from mpp.lib.PSQL import PSQL
from mpp.models import MPPTestCase
from gppylib.db import dbconn
from mpp.gpdb.tests.storage.walrepl.run import StandbyRunMixin
from mpp.gpdb.tests.storage import walrepl
from mpp.gpdb.tests.storage.walrepl.lib.walcomm import *
from mpp.gpdb.tests.storage.walrepl.lib import PgControlData
from gppylib.commands.base import Command
import os
import re
import select
import signal
import subprocess
import time
import sys
class syncrep(StandbyRunMixin, MPPTestCase):
def generate_trigger_file(self, content):
filename = 'wal_rcv_test'
self.assertTrue(content is not None)
filepath = os.path.join(self.standby.datadir, filename)
with open(filepath, 'wb') as f:
f.write(content)
def wait_stdout(self, proc, timeout):
rlist = [proc.stdout.fileno()]
(rout, _, _) = select.select(rlist, [], [], timeout)
return len(rout) > 0
def set_guc(self, guc_name, guc_value):
logger.info('Configuring ' + guc_name +' ...')
cmd = Command("gpconfig " + guc_name,
"gpconfig -c " + guc_name + " -v " + guc_value)
cmd.run()
self.assertEqual(cmd.get_results().rc, 0, str(cmd))
logger.info('gpstop -u to reload config files...')
cmd = Command("gpstop -u",
"gpstop -u")
cmd.run()
self.assertEqual(cmd.get_results().rc, 0, str(cmd))
def test_syncrep(self):
PSQL.run_sql_command('DROP table if exists foo')
res = self.standby.create()
self.assertEqual(res, 0)
res = self.standby.start()
self.assertTrue(res.wasSuccessful())
num_walsender = self.wait_for_walsender()
self.assertEqual(num_walsender, 1)
proc = subprocess.Popen(['ps', '-ef'], stdout=subprocess.PIPE)
stdout = proc.communicate()[0]
search = "wal receiver process"
for line in stdout.split('\n'):
if (line.find(search) > 0):
split_line = re.split(r'\s+', line.strip())
break
self.assertTrue(len(split_line) > 0)
wal_rcv_pid = int(split_line[1])
logger.info('Suspending WAL Receiver(' + str(wal_rcv_pid) +')...')
self.generate_trigger_file('wait_before_send_ack')
os.kill(wal_rcv_pid, signal.SIGUSR2)
logger.info('Create table foo...')
proc = subprocess.Popen(['psql', '-c', 'create table foo (a int)'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
readable = self.wait_stdout(proc, 5.0)
self.assertFalse(readable, 'psql did not block')
# 4. Resume the WALReceiver and see the transaction passed and its
# results are visible.
logger.info('Resume the WAL Receiver...')
self.generate_trigger_file('resume')
os.kill(wal_rcv_pid, signal.SIGUSR2)
readable = self.wait_stdout(proc, 5.0)
self.assertTrue(readable, 'psql still blocks')
proc.communicate()
logger.info('No blocked backend found!')
logger.info('Verifying if table exists ? ...')
PSQL(sql_cmd='select * from foo').run(validateAfter=True)
logger.info('Pass')
def test_unblock_while_catchup_out_of_range(self):
with WalClient("replication=true") as client:
(sysid, tli, xpos) = client.identify_system()
sql_startup = "SELECT count(*) FROM pg_stat_replication where state = 'startup'"
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
curs = dbconn.execSQL(conn, sql_startup)
results = curs.fetchall()
self.assertEqual(int(results[0][0]), 1, "No WAL sender in startup phase found")
logger.info('WAL sender is alive and now is in startup phase...')
# Generate enough xlog in WAL sender startup phase. None of the sql statements
# should get blocked. If blocked we have some issue.
# Checkpointing causes full page writes on updates/inserts. Hence helps
# xlog generation.
i = 0
logger.info('Running a bunch of SQLs to generate enough xlog to maintain catchup phase...')
while (i < 10):
PSQL.run_sql_command('DROP TABLE IF EXISTS foo; CREATE TABLE foo(a int, b int); CHECKPOINT;')
i = i + 1
logger.info('Pass - Database does not block if WAL sender is alive and in startup phase')
logger.info('Creating some xlog seg files to simulate catchup out-of-range..')
i = 0
while(i < 3):
PSQL.run_sql_command('select pg_switch_xlog();select pg_switch_xlog();checkpoint;')
i = i + 1
xpos_ptr = XLogRecPtr.from_string(xpos)
client.start_replication(xpos_ptr)
while True:
msg = client.receive(1000)
if isinstance(msg, WalMessageData):
header = msg.header
# walsender must be still in catchup phase as a lot xlog needs to be sent
sql_catchup = "SELECT count(*) FROM pg_stat_replication where state = 'catchup'"
sql_table_present = "SELECT count(*) from pg_class where relname = 'foo'"
sql_bkd_count = ("SELECT count(*) from pg_stat_activity where waiting ='t' and waiting_reason = 'replication'")
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
curs = dbconn.execSQL(conn, sql_catchup)
results = curs.fetchall()
self.assertEqual(int(results[0][0]), 1, "No Catchup WAL sender found")
logger.info('WAL sender is alive and now is in catchup phase...')
logger.info('In catchup phase, run some sql...')
PSQL.run_sql_command('DROP TABLE IF EXISTS foo; CREATE TABLE foo(a int, b int);'
,dbname='postgres')
while (i < 5):
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
# verify if the previous backend is blocked
curs = dbconn.execSQL(conn, sql_bkd_count)
results = curs.fetchall()
if (int(results[0][0]) > 0):
self.assertTrue(0, "Previous backend was blocked ...")
i = i + 1
logger.info('Create table is NOT blocked...')
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
# verify if WAL sender is still in catchup phase
curs = dbconn.execSQL(conn, sql_catchup)
results = curs.fetchall()
self.assertEqual(int(results[0][0]), 1,
"WAL sender catchup phase over before verification")
logger.info('WAL sender is alive and still in catchup phase...')
with dbconn.connect(dbconn.DbURL(dbname='postgres'), utility=True) as conn:
# verify if WAL sender is still in catchup phase
curs = dbconn.execSQL(conn, sql_table_present)
results = curs.fetchall()
self.assertEqual(int(results[0][0]), 1, "Table foo not found")
# sync replication needs a reply otherwise backend blocks
client.reply(header.walEnd, header.walEnd, header.walEnd)
# success, should get some 'w' message
logger.info ("Pass - Database does not block if WAL sender is alive and "
"the catchup is out-of-range")
break
elif isinstance(msg, WalMessageNoData):
# could be timeout
client.reply(xpos_ptr, xpos_ptr, xpos_ptr)
else:
raise StandardError(msg.errmsg)
def test_block_while_catchup_within_range(self):
with WalClient("replication=true") as client:
(sysid, tli, xpos) = client.identify_system()
# Set the guc to > 1 so that we can verify the test
# using less amount of xlog
self.set_guc('repl_catchup_within_range', '3')
# Generate enough xlog in WAL sender startup phase. None of the sql statements
# should get blocked. If blocked we have some issue.
# Checkpointing causes full page writes on updates/inserts. Hence helps
# xlog generation.
i = 0
logger.info('Running a bunch of SQLs to generate enough xlog to maintain catchup phase...')
while (i < 10):
PSQL.run_sql_command('DROP TABLE IF EXISTS foo; CREATE TABLE foo(a int, b int); CHECKPOINT;')
i = i + 1
xpos_ptr = XLogRecPtr.from_string(xpos)
client.start_replication(xpos_ptr)
while True:
msg = client.receive(1000)
if isinstance(msg, WalMessageData):
header = msg.header
# walsender must be still in catchup phase as a lot xlog needs to be sent
sql_catchup = "SELECT count(*) FROM pg_stat_replication where state = 'catchup'"
sql_table_present = "SELECT count(*) from pg_class where relname = 'foo'"
sql_bkd_count = ("SELECT count(*) from pg_stat_activity where waiting ='t' and waiting_reason = 'replication'")
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
curs = dbconn.execSQL(conn, sql_catchup)
results = curs.fetchall()
self.assertEqual(int(results[0][0]), 1, "No Catchup WAL sender found")
logger.info('WAL sender is alive and now is in catchup phase...')
logger.info('In catchup phase, create table...')
subprocess.Popen(['psql', '-c',
'DROP TABLE IF EXISTS raghav; create table raghav (a int);'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
# verify if WAL sender is still in catchup phase
curs = dbconn.execSQL(conn, sql_catchup)
results = curs.fetchall()
self.assertEqual(int(results[0][0]), 1,
"WAL sender catchup phase over before verification")
logger.info('WAL sender is alive, still in catchup phase ..')
while (i < 5):
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
# verify if the previous backend is blocked
curs = dbconn.execSQL(conn, sql_bkd_count)
results = curs.fetchall()
if (int(results[0][0]) == 1):
break;
if (i == 4):
self.assertTrue(0, "Previous backend not blocked ...")
i = i + 1
logger.info('But, create table is blocked...')
with dbconn.connect(dbconn.DbURL(), utility=True) as conn:
# verify if WAL sender is still in catchup phase
curs = dbconn.execSQL(conn, sql_catchup)
results = curs.fetchall()
self.assertEqual(int(results[0][0]), 1,
"WAL sender catchup phase over before verification")
logger.info('WAL sender is alive, in catchup phase and backend is blocked...')
# sync replication needs a reply otherwise backend blocks
client.reply(header.walEnd, header.walEnd, header.walEnd)
# success, should get some 'w' message
logger.info ("Pass - Backends block if WAL sender is alive and the catchup is within-range")
break
elif isinstance(msg, WalMessageNoData):
# could be timeout
client.reply(xpos_ptr, xpos_ptr, xpos_ptr)
else:
raise StandardError(msg.errmsg)
logger.info ("Pass")
self.set_guc('repl_catchup_within_range', '1')
| true | true |
f7254a9efae6931fc674550384bd79222176447d | 3,222 | py | Python | commands/FBAutoLayoutCommands.py | zddd/chisel | 7782bdde3062e15ccbdc5f617aa3a8f096b6751b | [
"MIT"
] | 1 | 2020-03-04T20:24:33.000Z | 2020-03-04T20:24:33.000Z | commands/FBAutoLayoutCommands.py | zddd/chisel | 7782bdde3062e15ccbdc5f617aa3a8f096b6751b | [
"MIT"
] | null | null | null | commands/FBAutoLayoutCommands.py | zddd/chisel | 7782bdde3062e15ccbdc5f617aa3a8f096b6751b | [
"MIT"
] | null | null | null | #!/usr/bin/python
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import lldb
import fblldbbase as fb
import fblldbviewhelpers as viewHelpers
def lldbcommands():
return [
FBPrintAutolayoutTrace(),
FBAutolayoutBorderAmbiguous(),
FBAutolayoutUnborderAmbiguous(),
]
class FBPrintAutolayoutTrace(fb.FBCommand):
def name(self):
return 'paltrace'
def description(self):
return "Print the Auto Layout trace for the given view. Defaults to the key window."
def args(self):
return [ fb.FBCommandArgument(arg='view', type='UIView *', help='The view to print the Auto Layout trace for.', default='(id)[[UIApplication sharedApplication] keyWindow]') ]
def run(self, arguments, options):
view = fb.evaluateInputExpression(arguments[0])
opt = fb.evaluateBooleanExpression('[UIView instancesRespondToSelector:@selector(_autolayoutTraceRecursively:)]')
traceCall = '_autolayoutTraceRecursively:1' if opt else '_autolayoutTrace'
print(fb.describeObject('[{} {}]'.format(view, traceCall)))
def setBorderOnAmbiguousViewRecursive(view, width, color):
if not fb.evaluateBooleanExpression('[(id)%s isKindOfClass:(Class)[UIView class]]' % view):
return
isAmbiguous = fb.evaluateBooleanExpression('(BOOL)[%s hasAmbiguousLayout]' % view)
if isAmbiguous:
layer = viewHelpers.convertToLayer(view)
fb.evaluateEffect('[%s setBorderWidth:(CGFloat)%s]' % (layer, width))
fb.evaluateEffect('[%s setBorderColor:(CGColorRef)[(id)[UIColor %sColor] CGColor]]' % (layer, color))
subviews = fb.evaluateExpression('(id)[%s subviews]' % view)
subviewsCount = int(fb.evaluateExpression('(int)[(id)%s count]' % subviews))
if subviewsCount > 0:
for i in range(0, subviewsCount):
subview = fb.evaluateExpression('(id)[%s objectAtIndex:%i]' % (subviews, i))
setBorderOnAmbiguousViewRecursive(subview, width, color)
class FBAutolayoutBorderAmbiguous(fb.FBCommand):
def name(self):
return 'alamborder'
def description(self):
return "Put a border around views with an ambiguous layout"
def options(self):
return [
fb.FBCommandArgument(short='-c', long='--color', arg='color', type='string', default='red', help='A color name such as \'red\', \'green\', \'magenta\', etc.'),
fb.FBCommandArgument(short='-w', long='--width', arg='width', type='CGFloat', default=2.0, help='Desired width of border.')
]
def run(self, arguments, options):
keyWindow = fb.evaluateExpression('(id)[[UIApplication sharedApplication] keyWindow]')
setBorderOnAmbiguousViewRecursive(keyWindow, options.width, options.color)
lldb.debugger.HandleCommand('caflush')
class FBAutolayoutUnborderAmbiguous(fb.FBCommand):
def name(self):
return 'alamunborder'
def description(self):
return "Removes the border around views with an ambiguous layout"
def run(self, arguments, options):
keyWindow = fb.evaluateExpression('(id)[[UIApplication sharedApplication] keyWindow]')
setBorderOnAmbiguousViewRecursive(keyWindow, 0, "red")
lldb.debugger.HandleCommand('caflush')
| 37.905882 | 178 | 0.724705 |
import lldb
import fblldbbase as fb
import fblldbviewhelpers as viewHelpers
def lldbcommands():
return [
FBPrintAutolayoutTrace(),
FBAutolayoutBorderAmbiguous(),
FBAutolayoutUnborderAmbiguous(),
]
class FBPrintAutolayoutTrace(fb.FBCommand):
def name(self):
return 'paltrace'
def description(self):
return "Print the Auto Layout trace for the given view. Defaults to the key window."
def args(self):
return [ fb.FBCommandArgument(arg='view', type='UIView *', help='The view to print the Auto Layout trace for.', default='(id)[[UIApplication sharedApplication] keyWindow]') ]
def run(self, arguments, options):
view = fb.evaluateInputExpression(arguments[0])
opt = fb.evaluateBooleanExpression('[UIView instancesRespondToSelector:@selector(_autolayoutTraceRecursively:)]')
traceCall = '_autolayoutTraceRecursively:1' if opt else '_autolayoutTrace'
print(fb.describeObject('[{} {}]'.format(view, traceCall)))
def setBorderOnAmbiguousViewRecursive(view, width, color):
if not fb.evaluateBooleanExpression('[(id)%s isKindOfClass:(Class)[UIView class]]' % view):
return
isAmbiguous = fb.evaluateBooleanExpression('(BOOL)[%s hasAmbiguousLayout]' % view)
if isAmbiguous:
layer = viewHelpers.convertToLayer(view)
fb.evaluateEffect('[%s setBorderWidth:(CGFloat)%s]' % (layer, width))
fb.evaluateEffect('[%s setBorderColor:(CGColorRef)[(id)[UIColor %sColor] CGColor]]' % (layer, color))
subviews = fb.evaluateExpression('(id)[%s subviews]' % view)
subviewsCount = int(fb.evaluateExpression('(int)[(id)%s count]' % subviews))
if subviewsCount > 0:
for i in range(0, subviewsCount):
subview = fb.evaluateExpression('(id)[%s objectAtIndex:%i]' % (subviews, i))
setBorderOnAmbiguousViewRecursive(subview, width, color)
class FBAutolayoutBorderAmbiguous(fb.FBCommand):
def name(self):
return 'alamborder'
def description(self):
return "Put a border around views with an ambiguous layout"
def options(self):
return [
fb.FBCommandArgument(short='-c', long='--color', arg='color', type='string', default='red', help='A color name such as \'red\', \'green\', \'magenta\', etc.'),
fb.FBCommandArgument(short='-w', long='--width', arg='width', type='CGFloat', default=2.0, help='Desired width of border.')
]
def run(self, arguments, options):
keyWindow = fb.evaluateExpression('(id)[[UIApplication sharedApplication] keyWindow]')
setBorderOnAmbiguousViewRecursive(keyWindow, options.width, options.color)
lldb.debugger.HandleCommand('caflush')
class FBAutolayoutUnborderAmbiguous(fb.FBCommand):
def name(self):
return 'alamunborder'
def description(self):
return "Removes the border around views with an ambiguous layout"
def run(self, arguments, options):
keyWindow = fb.evaluateExpression('(id)[[UIApplication sharedApplication] keyWindow]')
setBorderOnAmbiguousViewRecursive(keyWindow, 0, "red")
lldb.debugger.HandleCommand('caflush')
| true | true |
f7254bd8d5c655e025a0e95e3c6aac92193bfd17 | 658 | py | Python | gans/datasets/abstract_dataset.py | tlatkowski/gans-2.0 | 974efc5bbcea39c0a7dec9405ba4514ada6dc39c | [
"MIT"
] | 78 | 2019-09-25T15:09:18.000Z | 2022-02-09T09:56:15.000Z | gans/datasets/abstract_dataset.py | tlatkowski/gans-2.0 | 974efc5bbcea39c0a7dec9405ba4514ada6dc39c | [
"MIT"
] | 23 | 2019-10-09T21:24:39.000Z | 2022-03-12T00:00:53.000Z | gans/datasets/abstract_dataset.py | tlatkowski/gans-2.0 | 974efc5bbcea39c0a7dec9405ba4514ada6dc39c | [
"MIT"
] | 18 | 2020-01-24T13:13:57.000Z | 2022-02-15T18:58:12.000Z | import abc
from abc import abstractmethod
class Dataset(abc.ABC):
def __init__(
self,
input_params,
with_labels=False,
):
self.batch_size = input_params.batch_size
self.buffer_size = input_params.buffer_size
if with_labels:
self.train_dataset = self.load_data_with_labels()
else:
self.train_dataset = self.load_data()
@abstractmethod
def load_data(self):
raise NotImplementedError
@abstractmethod
def load_data_with_labels(self):
raise NotImplementedError
def __iter__(self):
return iter(self.train_dataset)
| 22.689655 | 61 | 0.641337 | import abc
from abc import abstractmethod
class Dataset(abc.ABC):
def __init__(
self,
input_params,
with_labels=False,
):
self.batch_size = input_params.batch_size
self.buffer_size = input_params.buffer_size
if with_labels:
self.train_dataset = self.load_data_with_labels()
else:
self.train_dataset = self.load_data()
@abstractmethod
def load_data(self):
raise NotImplementedError
@abstractmethod
def load_data_with_labels(self):
raise NotImplementedError
def __iter__(self):
return iter(self.train_dataset)
| true | true |
f7254bdd4bb068fb20e4ad809d0645054278cee9 | 5,620 | py | Python | SimpleLoggingServerToCsvFile.py | II43/SimpleLoggingServerToCsvFile | d3d50778041a5995e58b6a8f623519e3cb41a5ce | [
"MIT"
] | null | null | null | SimpleLoggingServerToCsvFile.py | II43/SimpleLoggingServerToCsvFile | d3d50778041a5995e58b6a8f623519e3cb41a5ce | [
"MIT"
] | null | null | null | SimpleLoggingServerToCsvFile.py | II43/SimpleLoggingServerToCsvFile | d3d50778041a5995e58b6a8f623519e3cb41a5ce | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
"""
Simple HTTP server in Python for logging events to CSV file
Motivation: Use this CSV file later for data agregation and plotting
Inspired by: Very simple HTTP server in Python for logging requests
https://gist.github.com/mdonkers/63e115cc0c79b4f6b8b3a6b797e485c7
Usage::
./SimpleLoggingServerToCsvFile.py [<port>]
"""
#----------------------------------------------------------------------#
# Import #
#----------------------------------------------------------------------#
from http.server import BaseHTTPRequestHandler, HTTPServer
import logging
from datetime import datetime
import csv
from os import curdir, sep, path
from shutil import copyfile
#----------------------------------------------------------------------#
# Configuration #
#----------------------------------------------------------------------#
# Log file
LOG_FILE = r'events.log'
# Master key
MASTER_KEY = "jQw5xZVq9Kp4fm7hiZko"
# All the allowed keys
KEYS = ["q67idhrJ56oQj7IElukH",
MASTER_KEY]
#----------------------------------------------------------------------#
# Classes #
#----------------------------------------------------------------------#
class S(BaseHTTPRequestHandler):
def prepare_for_html_response(self):
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
def do_GET(self):
# datetime object containing current date and time
now = datetime.now()
print("now =", now)
# dd/mm/YY H:M:S
time_stamp = now.strftime("%d/%m/%Y %H:%M:%S")
print("date and time =", time_stamp)
logging.info("GET request,\nPath: %s\nHeaders:\n%s\n", str(self.path), str(self.headers))
self.prepare_for_html_response()
#self.wfile.write("<html><head><title>Title goes here.</title></head>")
#self.wfile.write("<body><p>This is a test.</p>")
#self.wfile.write("<p>You accessed path: %s</p>" % self.path)
#self.wfile.write("</body></html>")
# self.wfile.write("GET request for {}".format(self.path).encode('utf-8'))
# Return HTML,CSV or LOG file if requested
if self.path.endswith(".html") or self.path.endswith(".csv") or self.path.endswith(".log") or \
self.path.endswith(".js") or self.path.endswith(".css"):
f_path = curdir + sep + self.path
if not path.exists(f_path):
# Requested file doesn't exists
self.wfile.write("Request file does not exist!".encode('utf-8'))
else:
#Open the static HTML file requested and send it
f = open(f_path,'rb')
self.wfile.write(f.read())
f.close()
# Nothing more to do
return;
# Otherwise try to log the event for given key
received_key = str(self.path)[1:]
isKeyValid = False
for key in KEYS:
if key == received_key:
self.wfile.write("Valid key! Logging event to a output file!".encode('utf-8'))
isKeyValid = True
# If master key is received, logger file is replaced with new one
if received_key == MASTER_KEY:
method_to_log = 'w'
# Back the logger file
copyfile(LOG_FILE, LOG_FILE + ".backup")
else:
method_to_log = 'a'
# Logging an event to CSV
with open(LOG_FILE, method_to_log, newline='\n') as f:
writer = csv.writer(f)
if method_to_log == 'w':
writer.writerow(["Timestamp", "Key"])
writer.writerow([time_stamp, received_key])
if not isKeyValid:
# No valid key had been received
self.wfile.write("Unknown key! Nothing to do!".encode('utf-8'))
def do_POST(self):
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
logging.info("POST request,\nPath: %s\nHeaders:\n%s\n\nBody:\n%s\n",
str(self.path), str(self.headers), post_data.decode('utf-8'))
self.prepare_for_html_response()
self.wfile.write("POST request for {}".format(self.path).encode('utf-8'))
#----------------------------------------------------------------------#
# Functions #
#----------------------------------------------------------------------#
def run(server_class=HTTPServer, handler_class=S, port=8080):
logging.basicConfig(level=logging.INFO)
server_address = ('', port)
httpd = server_class(server_address, handler_class)
logging.info('Starting httpd...\n')
try:
httpd.serve_forever()
except KeyboardInterrupt:
pass
httpd.server_close()
logging.info('Stopping httpd...\n')
#----------------------------------------------------------------------#
# Main #
#----------------------------------------------------------------------#
if __name__ == '__main__':
from sys import argv
if len(argv) == 2:
run(port=int(argv[1]))
else:
run() | 37.972973 | 103 | 0.477224 |
from http.server import BaseHTTPRequestHandler, HTTPServer
import logging
from datetime import datetime
import csv
from os import curdir, sep, path
from shutil import copyfile
LOG_FILE = r'events.log'
MASTER_KEY = "jQw5xZVq9Kp4fm7hiZko"
KEYS = ["q67idhrJ56oQj7IElukH",
MASTER_KEY]
class S(BaseHTTPRequestHandler):
def prepare_for_html_response(self):
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
def do_GET(self):
now = datetime.now()
print("now =", now)
time_stamp = now.strftime("%d/%m/%Y %H:%M:%S")
print("date and time =", time_stamp)
logging.info("GET request,\nPath: %s\nHeaders:\n%s\n", str(self.path), str(self.headers))
self.prepare_for_html_response()
if self.path.endswith(".html") or self.path.endswith(".csv") or self.path.endswith(".log") or \
self.path.endswith(".js") or self.path.endswith(".css"):
f_path = curdir + sep + self.path
if not path.exists(f_path):
self.wfile.write("Request file does not exist!".encode('utf-8'))
else:
#Open the static HTML file requested and send it
f = open(f_path,'rb')
self.wfile.write(f.read())
f.close()
# Nothing more to do
return;
# Otherwise try to log the event for given key
received_key = str(self.path)[1:]
isKeyValid = False
for key in KEYS:
if key == received_key:
self.wfile.write("Valid key! Logging event to a output file!".encode('utf-8'))
isKeyValid = True
# If master key is received, logger file is replaced with new one
if received_key == MASTER_KEY:
method_to_log = 'w'
# Back the logger file
copyfile(LOG_FILE, LOG_FILE + ".backup")
else:
method_to_log = 'a'
# Logging an event to CSV
with open(LOG_FILE, method_to_log, newline='\n') as f:
writer = csv.writer(f)
if method_to_log == 'w':
writer.writerow(["Timestamp", "Key"])
writer.writerow([time_stamp, received_key])
if not isKeyValid:
# No valid key had been received
self.wfile.write("Unknown key! Nothing to do!".encode('utf-8'))
def do_POST(self):
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
logging.info("POST request,\nPath: %s\nHeaders:\n%s\n\nBody:\n%s\n",
str(self.path), str(self.headers), post_data.decode('utf-8'))
self.prepare_for_html_response()
self.wfile.write("POST request for {}".format(self.path).encode('utf-8'))
#----------------------------------------------------------------------#
# Functions #
#----------------------------------------------------------------------#
def run(server_class=HTTPServer, handler_class=S, port=8080):
logging.basicConfig(level=logging.INFO)
server_address = ('', port)
httpd = server_class(server_address, handler_class)
logging.info('Starting httpd...\n')
try:
httpd.serve_forever()
except KeyboardInterrupt:
pass
httpd.server_close()
logging.info('Stopping httpd...\n')
#----------------------------------------------------------------------#
# Main #
#----------------------------------------------------------------------#
if __name__ == '__main__':
from sys import argv
if len(argv) == 2:
run(port=int(argv[1]))
else:
run() | true | true |
f7254c126abf533a0ae20a41208a5dc83bf968ca | 712 | py | Python | src/schctest/pypacket_dissector/decoder.py | saguilarDevel/open_schc | ac7f2a84b6120964c8fdaabf9f5c8ca8ae39c289 | [
"MIT"
] | 21 | 2018-11-05T06:48:32.000Z | 2022-02-28T14:38:09.000Z | src/schctest/pypacket_dissector/decoder.py | saguilarDevel/open_schc | ac7f2a84b6120964c8fdaabf9f5c8ca8ae39c289 | [
"MIT"
] | 34 | 2019-01-28T01:32:41.000Z | 2021-05-06T09:40:14.000Z | src/schctest/pypacket_dissector/decoder.py | saguilarDevel/open_schc | ac7f2a84b6120964c8fdaabf9f5c8ca8ae39c289 | [
"MIT"
] | 28 | 2018-10-31T22:21:26.000Z | 2022-03-17T09:44:40.000Z | try:
from _json_keys import *
from _util import *
from defs_L3 import dissectors_L3
except:
from ._json_keys import *
from ._util import *
from .defs_L3 import dissectors_L3
def decoder(x):
'''
return (dissectors_L3)
or
return { JK_EMSG:(error-message) }
'''
this = None
# only show ipv6 packets
if len(x) < 1:
return { JK_EMSG:"invalid packet length" }
proto = (x[0]&0xf0)>>4
if proto in dissectors_L3:
if this != None:
this[JK_PAYLOAD] = dissectors_L3[proto](x)
return this
else:
return dissectors_L3[proto](x)
else:
return { JK_EMSG:"unsupported. L3 proto=%d" % proto }
| 22.967742 | 61 | 0.588483 | try:
from _json_keys import *
from _util import *
from defs_L3 import dissectors_L3
except:
from ._json_keys import *
from ._util import *
from .defs_L3 import dissectors_L3
def decoder(x):
this = None
if len(x) < 1:
return { JK_EMSG:"invalid packet length" }
proto = (x[0]&0xf0)>>4
if proto in dissectors_L3:
if this != None:
this[JK_PAYLOAD] = dissectors_L3[proto](x)
return this
else:
return dissectors_L3[proto](x)
else:
return { JK_EMSG:"unsupported. L3 proto=%d" % proto }
| true | true |
f7254c2e00bea89d99a5f93b0d09b97a572ca11a | 1,521 | py | Python | tests/test_stock.py | condereis/mean-variance-portfolio | 526b1e86d1e92f08ceca9a7c204b043089272744 | [
"MIT"
] | 7 | 2018-08-22T19:16:33.000Z | 2021-08-14T03:50:08.000Z | tests/test_stock.py | condereis/mean-variance-portfolio | 526b1e86d1e92f08ceca9a7c204b043089272744 | [
"MIT"
] | 280 | 2018-07-12T20:20:20.000Z | 2022-03-27T20:01:20.000Z | tests/test_stock.py | condereis/mean-variance-portfolio | 526b1e86d1e92f08ceca9a7c204b043089272744 | [
"MIT"
] | 2 | 2020-04-02T02:30:42.000Z | 2021-07-22T21:13:04.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for `mvport` package."""
import unittest
import numpy as np
from mvport.stock import Stock
class TestStock(unittest.TestCase):
"""Tests for `mvport` package."""
def setUp(self):
"""SetUp."""
self.ticker = 'AAPL'
self.returns = [-2, -1, 0, 1, 2]
self.stock = Stock(self.ticker, self.returns)
def test_get_ticker(self):
"""Test get_ticker."""
self.assertEqual(self.stock.get_ticker(), self.ticker)
def test_set_ticker(self):
"""Test set_ticker."""
self.stock.set_ticker('new_ticker')
self.assertEqual(self.stock.get_ticker(), 'new_ticker')
def test_get_returns(self):
"""Test get_returns."""
np.testing.assert_array_equal(self.stock.get_returns(), np.array(self.returns))
def test_set_returns(self):
"""Test set_ticker."""
self.stock.set_returns([-1, 0, 1])
np.testing.assert_array_equal(self.stock.get_returns(), np.array([-1, 0, 1]))
def test_get_mean(self):
"""Test get_mean."""
self.assertEqual(self.stock.get_mean(), 0)
self.stock.set_returns([0, 1, 2])
self.assertEqual(self.stock.get_mean(), 1)
def test_get_variance(self):
"""Test get_variance."""
self.assertEqual(self.stock.get_variance(), 2)
self.stock.set_returns([-3,-1,0,1,3])
self.assertEqual(self.stock.get_variance(), 4)
if __name__ == '__main__':
sys.exit(unittest.main())
| 27.654545 | 87 | 0.618014 |
import unittest
import numpy as np
from mvport.stock import Stock
class TestStock(unittest.TestCase):
def setUp(self):
self.ticker = 'AAPL'
self.returns = [-2, -1, 0, 1, 2]
self.stock = Stock(self.ticker, self.returns)
def test_get_ticker(self):
self.assertEqual(self.stock.get_ticker(), self.ticker)
def test_set_ticker(self):
self.stock.set_ticker('new_ticker')
self.assertEqual(self.stock.get_ticker(), 'new_ticker')
def test_get_returns(self):
np.testing.assert_array_equal(self.stock.get_returns(), np.array(self.returns))
def test_set_returns(self):
self.stock.set_returns([-1, 0, 1])
np.testing.assert_array_equal(self.stock.get_returns(), np.array([-1, 0, 1]))
def test_get_mean(self):
self.assertEqual(self.stock.get_mean(), 0)
self.stock.set_returns([0, 1, 2])
self.assertEqual(self.stock.get_mean(), 1)
def test_get_variance(self):
self.assertEqual(self.stock.get_variance(), 2)
self.stock.set_returns([-3,-1,0,1,3])
self.assertEqual(self.stock.get_variance(), 4)
if __name__ == '__main__':
sys.exit(unittest.main())
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.