id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
51
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
245,400
amanusk/s-tui
s_tui/sources/source.py
Source.get_sensors_summary
def get_sensors_summary(self): """ This returns a dict of sensor of the source and their values """ sub_title_list = self.get_sensor_list() graph_vector_summary = OrderedDict() for graph_idx, graph_data in enumerate(self.last_measurement): val_str = str(round(graph_data, 1)) graph_vector_summary[sub_title_list[graph_idx]] = val_str return graph_vector_summary
python
def get_sensors_summary(self): sub_title_list = self.get_sensor_list() graph_vector_summary = OrderedDict() for graph_idx, graph_data in enumerate(self.last_measurement): val_str = str(round(graph_data, 1)) graph_vector_summary[sub_title_list[graph_idx]] = val_str return graph_vector_summary
[ "def", "get_sensors_summary", "(", "self", ")", ":", "sub_title_list", "=", "self", ".", "get_sensor_list", "(", ")", "graph_vector_summary", "=", "OrderedDict", "(", ")", "for", "graph_idx", ",", "graph_data", "in", "enumerate", "(", "self", ".", "last_measurem...
This returns a dict of sensor of the source and their values
[ "This", "returns", "a", "dict", "of", "sensor", "of", "the", "source", "and", "their", "values" ]
5e89d15081e716024db28ec03b1e3a7710330951
https://github.com/amanusk/s-tui/blob/5e89d15081e716024db28ec03b1e3a7710330951/s_tui/sources/source.py#L53-L62
245,401
amanusk/s-tui
s_tui/sources/source.py
Source.get_summary
def get_summary(self): """ Returns a dict of source name and sensors with their values """ graph_vector_summary = OrderedDict() graph_vector_summary[self.get_source_name()] = ( '[' + self.measurement_unit + ']') graph_vector_summary.update(self.get_sensors_summary()) return graph_vector_summary
python
def get_summary(self): graph_vector_summary = OrderedDict() graph_vector_summary[self.get_source_name()] = ( '[' + self.measurement_unit + ']') graph_vector_summary.update(self.get_sensors_summary()) return graph_vector_summary
[ "def", "get_summary", "(", "self", ")", ":", "graph_vector_summary", "=", "OrderedDict", "(", ")", "graph_vector_summary", "[", "self", ".", "get_source_name", "(", ")", "]", "=", "(", "'['", "+", "self", ".", "measurement_unit", "+", "']'", ")", "graph_vect...
Returns a dict of source name and sensors with their values
[ "Returns", "a", "dict", "of", "source", "name", "and", "sensors", "with", "their", "values" ]
5e89d15081e716024db28ec03b1e3a7710330951
https://github.com/amanusk/s-tui/blob/5e89d15081e716024db28ec03b1e3a7710330951/s_tui/sources/source.py#L64-L70
245,402
amanusk/s-tui
s_tui/sources/source.py
Source.eval_hooks
def eval_hooks(self): """ Evaluate the current state of this Source and invoke any attached hooks if they've been triggered """ logging.debug("Evaluating hooks") if self.get_edge_triggered(): logging.debug("Hook triggered") for hook in [h for h in self.edge_hooks if h.is_ready()]: logging.debug("Hook invoked") hook.invoke()
python
def eval_hooks(self): logging.debug("Evaluating hooks") if self.get_edge_triggered(): logging.debug("Hook triggered") for hook in [h for h in self.edge_hooks if h.is_ready()]: logging.debug("Hook invoked") hook.invoke()
[ "def", "eval_hooks", "(", "self", ")", ":", "logging", ".", "debug", "(", "\"Evaluating hooks\"", ")", "if", "self", ".", "get_edge_triggered", "(", ")", ":", "logging", ".", "debug", "(", "\"Hook triggered\"", ")", "for", "hook", "in", "[", "h", "for", ...
Evaluate the current state of this Source and invoke any attached hooks if they've been triggered
[ "Evaluate", "the", "current", "state", "of", "this", "Source", "and", "invoke", "any", "attached", "hooks", "if", "they", "ve", "been", "triggered" ]
5e89d15081e716024db28ec03b1e3a7710330951
https://github.com/amanusk/s-tui/blob/5e89d15081e716024db28ec03b1e3a7710330951/s_tui/sources/source.py#L109-L119
245,403
amanusk/s-tui
s_tui/sources/hook.py
Hook.invoke
def invoke(self): """ Run callback, optionally passing a variable number of arguments `callback_args` """ # Don't sleep a hook if it has never run if self.timeout_milliseconds > 0: self.ready_time = ( datetime.now() + timedelta(milliseconds=self.timeout_milliseconds)) self.callback(self.callback_args)
python
def invoke(self): # Don't sleep a hook if it has never run if self.timeout_milliseconds > 0: self.ready_time = ( datetime.now() + timedelta(milliseconds=self.timeout_milliseconds)) self.callback(self.callback_args)
[ "def", "invoke", "(", "self", ")", ":", "# Don't sleep a hook if it has never run", "if", "self", ".", "timeout_milliseconds", ">", "0", ":", "self", ".", "ready_time", "=", "(", "datetime", ".", "now", "(", ")", "+", "timedelta", "(", "milliseconds", "=", "...
Run callback, optionally passing a variable number of arguments `callback_args`
[ "Run", "callback", "optionally", "passing", "a", "variable", "number", "of", "arguments", "callback_args" ]
5e89d15081e716024db28ec03b1e3a7710330951
https://github.com/amanusk/s-tui/blob/5e89d15081e716024db28ec03b1e3a7710330951/s_tui/sources/hook.py#L42-L54
245,404
amanusk/s-tui
s_tui/sturwid/ui_elements.py
radio_button
def radio_button(g, l, fn): """ Inheriting radio button of urwid """ w = urwid.RadioButton(g, l, False, on_state_change=fn) w = urwid.AttrWrap(w, 'button normal', 'button select') return w
python
def radio_button(g, l, fn): w = urwid.RadioButton(g, l, False, on_state_change=fn) w = urwid.AttrWrap(w, 'button normal', 'button select') return w
[ "def", "radio_button", "(", "g", ",", "l", ",", "fn", ")", ":", "w", "=", "urwid", ".", "RadioButton", "(", "g", ",", "l", ",", "False", ",", "on_state_change", "=", "fn", ")", "w", "=", "urwid", ".", "AttrWrap", "(", "w", ",", "'button normal'", ...
Inheriting radio button of urwid
[ "Inheriting", "radio", "button", "of", "urwid" ]
5e89d15081e716024db28ec03b1e3a7710330951
https://github.com/amanusk/s-tui/blob/5e89d15081e716024db28ec03b1e3a7710330951/s_tui/sturwid/ui_elements.py#L95-L99
245,405
amanusk/s-tui
s_tui/s_tui.py
StressController.start_stress
def start_stress(self, stress_cmd): """ Starts a new stress process with a given cmd """ with open(os.devnull, 'w') as dev_null: try: stress_proc = subprocess.Popen(stress_cmd, stdout=dev_null, stderr=dev_null) self.set_stress_process(psutil.Process(stress_proc.pid)) except OSError: logging.debug("Unable to start stress")
python
def start_stress(self, stress_cmd): with open(os.devnull, 'w') as dev_null: try: stress_proc = subprocess.Popen(stress_cmd, stdout=dev_null, stderr=dev_null) self.set_stress_process(psutil.Process(stress_proc.pid)) except OSError: logging.debug("Unable to start stress")
[ "def", "start_stress", "(", "self", ",", "stress_cmd", ")", ":", "with", "open", "(", "os", ".", "devnull", ",", "'w'", ")", "as", "dev_null", ":", "try", ":", "stress_proc", "=", "subprocess", ".", "Popen", "(", "stress_cmd", ",", "stdout", "=", "dev_...
Starts a new stress process with a given cmd
[ "Starts", "a", "new", "stress", "process", "with", "a", "given", "cmd" ]
5e89d15081e716024db28ec03b1e3a7710330951
https://github.com/amanusk/s-tui/blob/5e89d15081e716024db28ec03b1e3a7710330951/s_tui/s_tui.py#L164-L172
245,406
amanusk/s-tui
s_tui/s_tui.py
GraphView.update_displayed_information
def update_displayed_information(self): """ Update all the graphs that are being displayed """ for source in self.controller.sources: source_name = source.get_source_name() if (any(self.graphs_menu.active_sensors[source_name]) or any(self.summary_menu.active_sensors[source_name])): source.update() for graph in self.visible_graphs.values(): graph.update() # update graph summery for summary in self.visible_summaries.values(): summary.update() # Only update clock if not is stress mode if self.controller.stress_conroller.get_current_mode() != 'Monitor': self.clock_view.set_text(seconds_to_text( (timeit.default_timer() - self.controller.stress_start_time)))
python
def update_displayed_information(self): for source in self.controller.sources: source_name = source.get_source_name() if (any(self.graphs_menu.active_sensors[source_name]) or any(self.summary_menu.active_sensors[source_name])): source.update() for graph in self.visible_graphs.values(): graph.update() # update graph summery for summary in self.visible_summaries.values(): summary.update() # Only update clock if not is stress mode if self.controller.stress_conroller.get_current_mode() != 'Monitor': self.clock_view.set_text(seconds_to_text( (timeit.default_timer() - self.controller.stress_start_time)))
[ "def", "update_displayed_information", "(", "self", ")", ":", "for", "source", "in", "self", ".", "controller", ".", "sources", ":", "source_name", "=", "source", ".", "get_source_name", "(", ")", "if", "(", "any", "(", "self", ".", "graphs_menu", ".", "ac...
Update all the graphs that are being displayed
[ "Update", "all", "the", "graphs", "that", "are", "being", "displayed" ]
5e89d15081e716024db28ec03b1e3a7710330951
https://github.com/amanusk/s-tui/blob/5e89d15081e716024db28ec03b1e3a7710330951/s_tui/s_tui.py#L235-L254
245,407
amanusk/s-tui
s_tui/s_tui.py
GraphView.on_reset_button
def on_reset_button(self, _): """Reset graph data and display empty graph""" for graph in self.visible_graphs.values(): graph.reset() for graph in self.graphs.values(): try: graph.source.reset() except NotImplementedError: pass # Reset clock self.clock_view.set_text(ZERO_TIME) self.update_displayed_information()
python
def on_reset_button(self, _): for graph in self.visible_graphs.values(): graph.reset() for graph in self.graphs.values(): try: graph.source.reset() except NotImplementedError: pass # Reset clock self.clock_view.set_text(ZERO_TIME) self.update_displayed_information()
[ "def", "on_reset_button", "(", "self", ",", "_", ")", ":", "for", "graph", "in", "self", ".", "visible_graphs", ".", "values", "(", ")", ":", "graph", ".", "reset", "(", ")", "for", "graph", "in", "self", ".", "graphs", ".", "values", "(", ")", ":"...
Reset graph data and display empty graph
[ "Reset", "graph", "data", "and", "display", "empty", "graph" ]
5e89d15081e716024db28ec03b1e3a7710330951
https://github.com/amanusk/s-tui/blob/5e89d15081e716024db28ec03b1e3a7710330951/s_tui/s_tui.py#L256-L268
245,408
amanusk/s-tui
s_tui/s_tui.py
GraphView.on_stress_menu_open
def on_stress_menu_open(self, widget): """Open stress options""" self.original_widget = urwid.Overlay(self.stress_menu.main_window, self.original_widget, ('relative', self.left_margin), self.stress_menu.get_size()[1], ('relative', self.top_margin), self.stress_menu.get_size()[0])
python
def on_stress_menu_open(self, widget): self.original_widget = urwid.Overlay(self.stress_menu.main_window, self.original_widget, ('relative', self.left_margin), self.stress_menu.get_size()[1], ('relative', self.top_margin), self.stress_menu.get_size()[0])
[ "def", "on_stress_menu_open", "(", "self", ",", "widget", ")", ":", "self", ".", "original_widget", "=", "urwid", ".", "Overlay", "(", "self", ".", "stress_menu", ".", "main_window", ",", "self", ".", "original_widget", ",", "(", "'relative'", ",", "self", ...
Open stress options
[ "Open", "stress", "options" ]
5e89d15081e716024db28ec03b1e3a7710330951
https://github.com/amanusk/s-tui/blob/5e89d15081e716024db28ec03b1e3a7710330951/s_tui/s_tui.py#L309-L316
245,409
amanusk/s-tui
s_tui/s_tui.py
GraphView.on_help_menu_open
def on_help_menu_open(self, widget): """Open Help menu""" self.original_widget = urwid.Overlay(self.help_menu.main_window, self.original_widget, ('relative', self.left_margin), self.help_menu.get_size()[1], ('relative', self.top_margin), self.help_menu.get_size()[0])
python
def on_help_menu_open(self, widget): self.original_widget = urwid.Overlay(self.help_menu.main_window, self.original_widget, ('relative', self.left_margin), self.help_menu.get_size()[1], ('relative', self.top_margin), self.help_menu.get_size()[0])
[ "def", "on_help_menu_open", "(", "self", ",", "widget", ")", ":", "self", ".", "original_widget", "=", "urwid", ".", "Overlay", "(", "self", ".", "help_menu", ".", "main_window", ",", "self", ".", "original_widget", ",", "(", "'relative'", ",", "self", "."...
Open Help menu
[ "Open", "Help", "menu" ]
5e89d15081e716024db28ec03b1e3a7710330951
https://github.com/amanusk/s-tui/blob/5e89d15081e716024db28ec03b1e3a7710330951/s_tui/s_tui.py#L318-L325
245,410
amanusk/s-tui
s_tui/s_tui.py
GraphView.on_about_menu_open
def on_about_menu_open(self, widget): """Open About menu""" self.original_widget = urwid.Overlay(self.about_menu.main_window, self.original_widget, ('relative', self.left_margin), self.about_menu.get_size()[1], ('relative', self.top_margin), self.about_menu.get_size()[0])
python
def on_about_menu_open(self, widget): self.original_widget = urwid.Overlay(self.about_menu.main_window, self.original_widget, ('relative', self.left_margin), self.about_menu.get_size()[1], ('relative', self.top_margin), self.about_menu.get_size()[0])
[ "def", "on_about_menu_open", "(", "self", ",", "widget", ")", ":", "self", ".", "original_widget", "=", "urwid", ".", "Overlay", "(", "self", ".", "about_menu", ".", "main_window", ",", "self", ".", "original_widget", ",", "(", "'relative'", ",", "self", "...
Open About menu
[ "Open", "About", "menu" ]
5e89d15081e716024db28ec03b1e3a7710330951
https://github.com/amanusk/s-tui/blob/5e89d15081e716024db28ec03b1e3a7710330951/s_tui/s_tui.py#L327-L334
245,411
amanusk/s-tui
s_tui/s_tui.py
GraphView.on_mode_button
def on_mode_button(self, my_button, state): """Notify the controller of a new mode setting.""" if state: # The new mode is the label of the button self.controller.set_mode(my_button.get_label())
python
def on_mode_button(self, my_button, state): if state: # The new mode is the label of the button self.controller.set_mode(my_button.get_label())
[ "def", "on_mode_button", "(", "self", ",", "my_button", ",", "state", ")", ":", "if", "state", ":", "# The new mode is the label of the button", "self", ".", "controller", ".", "set_mode", "(", "my_button", ".", "get_label", "(", ")", ")" ]
Notify the controller of a new mode setting.
[ "Notify", "the", "controller", "of", "a", "new", "mode", "setting", "." ]
5e89d15081e716024db28ec03b1e3a7710330951
https://github.com/amanusk/s-tui/blob/5e89d15081e716024db28ec03b1e3a7710330951/s_tui/s_tui.py#L356-L360
245,412
amanusk/s-tui
s_tui/s_tui.py
GraphView.on_unicode_checkbox
def on_unicode_checkbox(self, w=None, state=False): """Enable smooth edges if utf-8 is supported""" logging.debug("unicode State is %s", state) # Update the controller to the state of the checkbox self.controller.smooth_graph_mode = state if state: self.hline = urwid.AttrWrap( urwid.SolidFill(u'\N{LOWER ONE QUARTER BLOCK}'), 'line') else: self.hline = urwid.AttrWrap(urwid.SolidFill(u' '), 'line') for graph in self.graphs.values(): graph.set_smooth_colors(state) self.show_graphs()
python
def on_unicode_checkbox(self, w=None, state=False): logging.debug("unicode State is %s", state) # Update the controller to the state of the checkbox self.controller.smooth_graph_mode = state if state: self.hline = urwid.AttrWrap( urwid.SolidFill(u'\N{LOWER ONE QUARTER BLOCK}'), 'line') else: self.hline = urwid.AttrWrap(urwid.SolidFill(u' '), 'line') for graph in self.graphs.values(): graph.set_smooth_colors(state) self.show_graphs()
[ "def", "on_unicode_checkbox", "(", "self", ",", "w", "=", "None", ",", "state", "=", "False", ")", ":", "logging", ".", "debug", "(", "\"unicode State is %s\"", ",", "state", ")", "# Update the controller to the state of the checkbox", "self", ".", "controller", "...
Enable smooth edges if utf-8 is supported
[ "Enable", "smooth", "edges", "if", "utf", "-", "8", "is", "supported" ]
5e89d15081e716024db28ec03b1e3a7710330951
https://github.com/amanusk/s-tui/blob/5e89d15081e716024db28ec03b1e3a7710330951/s_tui/s_tui.py#L362-L377
245,413
amanusk/s-tui
s_tui/s_tui.py
GraphView._generate_graph_controls
def _generate_graph_controls(self): """ Display sidebar controls. i.e. buttons, and controls""" # setup mode radio buttons stress_modes = self.controller.stress_conroller.get_modes() group = [] for mode in stress_modes: self.mode_buttons.append(radio_button(group, mode, self.on_mode_button)) # Set default radio button to "Monitor" mode self.mode_buttons[0].set_state(True, do_callback=False) # Create list of buttons control_options = list() control_options.append(button('Graphs', self.on_graphs_menu_open)) control_options.append(button('Summaries', self.on_summary_menu_open)) if self.controller.stress_exe: control_options.append(button('Stress Options', self.on_stress_menu_open)) control_options.append(button("Reset", self.on_reset_button)) control_options.append(button('Help', self.on_help_menu_open)) control_options.append(button('About', self.on_about_menu_open)) control_options.append(button("Save Settings", self.on_save_settings)) control_options.append(button("Quit", self.on_exit_program)) # Create the menu animate_controls = urwid.GridFlow(control_options, 18, 2, 0, 'center') # Create smooth graph selection button default_smooth = self.controller.smooth_graph_mode if urwid.get_encoding_mode() == "utf8": unicode_checkbox = urwid.CheckBox( "UTF-8", state=default_smooth, on_state_change=self.on_unicode_checkbox) # Init the state of the graph accoding to the selected mode self.on_unicode_checkbox(state=default_smooth) else: unicode_checkbox = urwid.Text( "[N/A] UTF-8") install_stress_message = urwid.Text("") if not self.controller.stress_exe: install_stress_message = urwid.Text( ('button normal', u"(N/A) install stress")) controls = [urwid.Text(('bold text', u"Modes"), align="center")] controls += self.mode_buttons controls += [ install_stress_message, urwid.Text(('bold text', u"Stress Timer"), align="center"), self.clock_view, urwid.Divider(), urwid.Text(('bold text', u"Control Options"), align="center"), animate_controls, urwid.Divider(), urwid.Text(('bold text', u"Visual Options"), align="center"), unicode_checkbox, self.refresh_rate_ctrl, urwid.Divider(), urwid.Text(('bold text', u"Summaries"), align="center"), ] return controls
python
def _generate_graph_controls(self): # setup mode radio buttons stress_modes = self.controller.stress_conroller.get_modes() group = [] for mode in stress_modes: self.mode_buttons.append(radio_button(group, mode, self.on_mode_button)) # Set default radio button to "Monitor" mode self.mode_buttons[0].set_state(True, do_callback=False) # Create list of buttons control_options = list() control_options.append(button('Graphs', self.on_graphs_menu_open)) control_options.append(button('Summaries', self.on_summary_menu_open)) if self.controller.stress_exe: control_options.append(button('Stress Options', self.on_stress_menu_open)) control_options.append(button("Reset", self.on_reset_button)) control_options.append(button('Help', self.on_help_menu_open)) control_options.append(button('About', self.on_about_menu_open)) control_options.append(button("Save Settings", self.on_save_settings)) control_options.append(button("Quit", self.on_exit_program)) # Create the menu animate_controls = urwid.GridFlow(control_options, 18, 2, 0, 'center') # Create smooth graph selection button default_smooth = self.controller.smooth_graph_mode if urwid.get_encoding_mode() == "utf8": unicode_checkbox = urwid.CheckBox( "UTF-8", state=default_smooth, on_state_change=self.on_unicode_checkbox) # Init the state of the graph accoding to the selected mode self.on_unicode_checkbox(state=default_smooth) else: unicode_checkbox = urwid.Text( "[N/A] UTF-8") install_stress_message = urwid.Text("") if not self.controller.stress_exe: install_stress_message = urwid.Text( ('button normal', u"(N/A) install stress")) controls = [urwid.Text(('bold text', u"Modes"), align="center")] controls += self.mode_buttons controls += [ install_stress_message, urwid.Text(('bold text', u"Stress Timer"), align="center"), self.clock_view, urwid.Divider(), urwid.Text(('bold text', u"Control Options"), align="center"), animate_controls, urwid.Divider(), urwid.Text(('bold text', u"Visual Options"), align="center"), unicode_checkbox, self.refresh_rate_ctrl, urwid.Divider(), urwid.Text(('bold text', u"Summaries"), align="center"), ] return controls
[ "def", "_generate_graph_controls", "(", "self", ")", ":", "# setup mode radio buttons", "stress_modes", "=", "self", ".", "controller", ".", "stress_conroller", ".", "get_modes", "(", ")", "group", "=", "[", "]", "for", "mode", "in", "stress_modes", ":", "self",...
Display sidebar controls. i.e. buttons, and controls
[ "Display", "sidebar", "controls", ".", "i", ".", "e", ".", "buttons", "and", "controls" ]
5e89d15081e716024db28ec03b1e3a7710330951
https://github.com/amanusk/s-tui/blob/5e89d15081e716024db28ec03b1e3a7710330951/s_tui/s_tui.py#L387-L452
245,414
amanusk/s-tui
s_tui/s_tui.py
GraphView._generate_cpu_stats
def _generate_cpu_stats(): """Read and display processor name """ cpu_name = urwid.Text("CPU Name N/A", align="center") try: cpu_name = urwid.Text(get_processor_name().strip(), align="center") except OSError: logging.info("CPU name not available") return [urwid.Text(('bold text', "CPU Detected"), align="center"), cpu_name, urwid.Divider()]
python
def _generate_cpu_stats(): cpu_name = urwid.Text("CPU Name N/A", align="center") try: cpu_name = urwid.Text(get_processor_name().strip(), align="center") except OSError: logging.info("CPU name not available") return [urwid.Text(('bold text', "CPU Detected"), align="center"), cpu_name, urwid.Divider()]
[ "def", "_generate_cpu_stats", "(", ")", ":", "cpu_name", "=", "urwid", ".", "Text", "(", "\"CPU Name N/A\"", ",", "align", "=", "\"center\"", ")", "try", ":", "cpu_name", "=", "urwid", ".", "Text", "(", "get_processor_name", "(", ")", ".", "strip", "(", ...
Read and display processor name
[ "Read", "and", "display", "processor", "name" ]
5e89d15081e716024db28ec03b1e3a7710330951
https://github.com/amanusk/s-tui/blob/5e89d15081e716024db28ec03b1e3a7710330951/s_tui/s_tui.py#L455-L463
245,415
amanusk/s-tui
s_tui/s_tui.py
GraphView.show_graphs
def show_graphs(self): """Show a pile of the graph selected for dislpay""" elements = itertools.chain.from_iterable( ([graph] for graph in self.visible_graphs.values())) self.graph_place_holder.original_widget = urwid.Pile(elements)
python
def show_graphs(self): elements = itertools.chain.from_iterable( ([graph] for graph in self.visible_graphs.values())) self.graph_place_holder.original_widget = urwid.Pile(elements)
[ "def", "show_graphs", "(", "self", ")", ":", "elements", "=", "itertools", ".", "chain", ".", "from_iterable", "(", "(", "[", "graph", "]", "for", "graph", "in", "self", ".", "visible_graphs", ".", "values", "(", ")", ")", ")", "self", ".", "graph_plac...
Show a pile of the graph selected for dislpay
[ "Show", "a", "pile", "of", "the", "graph", "selected", "for", "dislpay" ]
5e89d15081e716024db28ec03b1e3a7710330951
https://github.com/amanusk/s-tui/blob/5e89d15081e716024db28ec03b1e3a7710330951/s_tui/s_tui.py#L475-L480
245,416
amanusk/s-tui
s_tui/s_tui.py
GraphController._load_config
def _load_config(self, t_thresh): """ Uses configurations defined by user to configure sources for display. This should be the only place where sources are initiated This returns a list of sources after configurations are applied """ # Load and configure user config dir when controller starts if not user_config_dir_exists(): user_config_dir = make_user_config_dir() else: user_config_dir = get_user_config_dir() if user_config_dir is None: logging.warning("Failed to find or create scripts directory,\ proceeding without scripting support") self.script_hooks_enabled = False else: self.script_loader = ScriptHookLoader(user_config_dir) # Use user config file if one was saved before self.conf = None if user_config_file_exists(): self.conf = configparser.ConfigParser() self.conf.read(get_user_config_file()) else: logging.debug("Config file not found") # Load refresh refresh rate from config try: self.refresh_rate = str(self.conf.getfloat( 'GraphControll', 'refresh')) logging.debug("User refresh rate: %s", self.refresh_rate) except (AttributeError, ValueError, configparser.NoOptionError, configparser.NoSectionError): logging.debug("No refresh rate configed") # Change UTF8 setting from config try: if self.conf.getboolean('GraphControll', 'UTF8'): self.smooth_graph_mode = True else: logging.debug("UTF8 selected as %s", self.conf.get('GraphControll', 'UTF8')) except (AttributeError, ValueError, configparser.NoOptionError, configparser.NoSectionError): logging.debug("No user config for utf8") # Try to load high temperature threshold if configured if t_thresh is None: try: self.temp_thresh = self.conf.get('GraphControll', 'TTHRESH') logging.debug("Temperature threshold set to %s", self.temp_thresh) except (AttributeError, ValueError, configparser.NoOptionError, configparser.NoSectionError): logging.debug("No user config for temp threshold") # This should be the only place where sources are configured possible_sources = [TempSource(self.temp_thresh), FreqSource(), UtilSource(), RaplPowerSource(), FanSource()] # Load sensors config if available sources = [x.get_source_name() for x in possible_sources if x.get_is_available()] for source in sources: try: options = list(self.conf.items(source + ",Graphs")) for option in options: # Returns tuples of values in order self.graphs_default_conf[source].append( str_to_bool(option[1])) options = list(self.conf.items(source + ",Summaries")) for option in options: # Returns tuples of values in order self.summary_default_conf[source].append( str_to_bool(option[1])) except (AttributeError, ValueError, configparser.NoOptionError, configparser.NoSectionError): logging.debug("Error reading sensors config") return possible_sources
python
def _load_config(self, t_thresh): # Load and configure user config dir when controller starts if not user_config_dir_exists(): user_config_dir = make_user_config_dir() else: user_config_dir = get_user_config_dir() if user_config_dir is None: logging.warning("Failed to find or create scripts directory,\ proceeding without scripting support") self.script_hooks_enabled = False else: self.script_loader = ScriptHookLoader(user_config_dir) # Use user config file if one was saved before self.conf = None if user_config_file_exists(): self.conf = configparser.ConfigParser() self.conf.read(get_user_config_file()) else: logging.debug("Config file not found") # Load refresh refresh rate from config try: self.refresh_rate = str(self.conf.getfloat( 'GraphControll', 'refresh')) logging.debug("User refresh rate: %s", self.refresh_rate) except (AttributeError, ValueError, configparser.NoOptionError, configparser.NoSectionError): logging.debug("No refresh rate configed") # Change UTF8 setting from config try: if self.conf.getboolean('GraphControll', 'UTF8'): self.smooth_graph_mode = True else: logging.debug("UTF8 selected as %s", self.conf.get('GraphControll', 'UTF8')) except (AttributeError, ValueError, configparser.NoOptionError, configparser.NoSectionError): logging.debug("No user config for utf8") # Try to load high temperature threshold if configured if t_thresh is None: try: self.temp_thresh = self.conf.get('GraphControll', 'TTHRESH') logging.debug("Temperature threshold set to %s", self.temp_thresh) except (AttributeError, ValueError, configparser.NoOptionError, configparser.NoSectionError): logging.debug("No user config for temp threshold") # This should be the only place where sources are configured possible_sources = [TempSource(self.temp_thresh), FreqSource(), UtilSource(), RaplPowerSource(), FanSource()] # Load sensors config if available sources = [x.get_source_name() for x in possible_sources if x.get_is_available()] for source in sources: try: options = list(self.conf.items(source + ",Graphs")) for option in options: # Returns tuples of values in order self.graphs_default_conf[source].append( str_to_bool(option[1])) options = list(self.conf.items(source + ",Summaries")) for option in options: # Returns tuples of values in order self.summary_default_conf[source].append( str_to_bool(option[1])) except (AttributeError, ValueError, configparser.NoOptionError, configparser.NoSectionError): logging.debug("Error reading sensors config") return possible_sources
[ "def", "_load_config", "(", "self", ",", "t_thresh", ")", ":", "# Load and configure user config dir when controller starts", "if", "not", "user_config_dir_exists", "(", ")", ":", "user_config_dir", "=", "make_user_config_dir", "(", ")", "else", ":", "user_config_dir", ...
Uses configurations defined by user to configure sources for display. This should be the only place where sources are initiated This returns a list of sources after configurations are applied
[ "Uses", "configurations", "defined", "by", "user", "to", "configure", "sources", "for", "display", ".", "This", "should", "be", "the", "only", "place", "where", "sources", "are", "initiated" ]
5e89d15081e716024db28ec03b1e3a7710330951
https://github.com/amanusk/s-tui/blob/5e89d15081e716024db28ec03b1e3a7710330951/s_tui/s_tui.py#L564-L648
245,417
amanusk/s-tui
s_tui/s_tui.py
GraphController._config_stress
def _config_stress(self): """ Configures the possible stress processes and modes """ # Configure stress_process self.stress_exe = None stress_installed = False self.stress_exe = which('stress') if self.stress_exe: stress_installed = True else: self.stress_exe = which('stress-ng') if self.stress_exe: stress_installed = True self.firestarter = None firestarter_installed = False if os.path.isfile('./FIRESTARTER/FIRESTARTER'): self.firestarter = os.path.join(os.getcwd(), 'FIRESTARTER', 'FIRESTARTER') firestarter_installed = True else: firestarter_exe = which('FIRESTARTER') if firestarter_exe is not None: self.firestarter = firestarter_exe firestarter_installed = True return StressController(stress_installed, firestarter_installed)
python
def _config_stress(self): # Configure stress_process self.stress_exe = None stress_installed = False self.stress_exe = which('stress') if self.stress_exe: stress_installed = True else: self.stress_exe = which('stress-ng') if self.stress_exe: stress_installed = True self.firestarter = None firestarter_installed = False if os.path.isfile('./FIRESTARTER/FIRESTARTER'): self.firestarter = os.path.join(os.getcwd(), 'FIRESTARTER', 'FIRESTARTER') firestarter_installed = True else: firestarter_exe = which('FIRESTARTER') if firestarter_exe is not None: self.firestarter = firestarter_exe firestarter_installed = True return StressController(stress_installed, firestarter_installed)
[ "def", "_config_stress", "(", "self", ")", ":", "# Configure stress_process", "self", ".", "stress_exe", "=", "None", "stress_installed", "=", "False", "self", ".", "stress_exe", "=", "which", "(", "'stress'", ")", "if", "self", ".", "stress_exe", ":", "stress...
Configures the possible stress processes and modes
[ "Configures", "the", "possible", "stress", "processes", "and", "modes" ]
5e89d15081e716024db28ec03b1e3a7710330951
https://github.com/amanusk/s-tui/blob/5e89d15081e716024db28ec03b1e3a7710330951/s_tui/s_tui.py#L650-L675
245,418
amanusk/s-tui
s_tui/s_tui.py
GraphController.main
def main(self): """ Starts the main loop and graph animation """ loop = MainLoop(self.view, DEFAULT_PALETTE, handle_mouse=self.handle_mouse) self.view.show_graphs() self.animate_graph(loop) try: loop.run() except (ZeroDivisionError) as err: # In case of Zero division, we want an error to return, and # get a clue where this happens logging.debug("Some stat caused divide by zero exception. Exiting") logging.error(err, exc_info=True) print(ERROR_MESSAGE) except (AttributeError) as err: # In this case we restart the loop, to address bug #50, where # urwid crashes on multiple presses on 'esc' logging.debug("Catch attribute Error in urwid and restart") logging.debug(err, exc_info=True) self.main() except (psutil.NoSuchProcess) as err: # This might happen if the stress process is not found, in this # case, we want to know why logging.error("No such process error") logging.error(err, exc_info=True) print(ERROR_MESSAGE)
python
def main(self): loop = MainLoop(self.view, DEFAULT_PALETTE, handle_mouse=self.handle_mouse) self.view.show_graphs() self.animate_graph(loop) try: loop.run() except (ZeroDivisionError) as err: # In case of Zero division, we want an error to return, and # get a clue where this happens logging.debug("Some stat caused divide by zero exception. Exiting") logging.error(err, exc_info=True) print(ERROR_MESSAGE) except (AttributeError) as err: # In this case we restart the loop, to address bug #50, where # urwid crashes on multiple presses on 'esc' logging.debug("Catch attribute Error in urwid and restart") logging.debug(err, exc_info=True) self.main() except (psutil.NoSuchProcess) as err: # This might happen if the stress process is not found, in this # case, we want to know why logging.error("No such process error") logging.error(err, exc_info=True) print(ERROR_MESSAGE)
[ "def", "main", "(", "self", ")", ":", "loop", "=", "MainLoop", "(", "self", ".", "view", ",", "DEFAULT_PALETTE", ",", "handle_mouse", "=", "self", ".", "handle_mouse", ")", "self", ".", "view", ".", "show_graphs", "(", ")", "self", ".", "animate_graph", ...
Starts the main loop and graph animation
[ "Starts", "the", "main", "loop", "and", "graph", "animation" ]
5e89d15081e716024db28ec03b1e3a7710330951
https://github.com/amanusk/s-tui/blob/5e89d15081e716024db28ec03b1e3a7710330951/s_tui/s_tui.py#L724-L749
245,419
amanusk/s-tui
s_tui/s_tui.py
GraphController.update_stress_mode
def update_stress_mode(self): """ Updates stress mode according to radio buttons state """ self.stress_conroller.kill_stress_process() # Start a new clock upon starting a new stress test self.view.clock_view.set_text(ZERO_TIME) self.stress_start_time = timeit.default_timer() if self.stress_conroller.get_current_mode() == 'Stress': stress_cmd = self.view.stress_menu.get_stress_cmd() self.stress_conroller.start_stress(stress_cmd) elif self.stress_conroller.get_current_mode() == 'FIRESTARTER': stress_cmd = [self.firestarter] self.stress_conroller.start_stress(stress_cmd)
python
def update_stress_mode(self): self.stress_conroller.kill_stress_process() # Start a new clock upon starting a new stress test self.view.clock_view.set_text(ZERO_TIME) self.stress_start_time = timeit.default_timer() if self.stress_conroller.get_current_mode() == 'Stress': stress_cmd = self.view.stress_menu.get_stress_cmd() self.stress_conroller.start_stress(stress_cmd) elif self.stress_conroller.get_current_mode() == 'FIRESTARTER': stress_cmd = [self.firestarter] self.stress_conroller.start_stress(stress_cmd)
[ "def", "update_stress_mode", "(", "self", ")", ":", "self", ".", "stress_conroller", ".", "kill_stress_process", "(", ")", "# Start a new clock upon starting a new stress test", "self", ".", "view", ".", "clock_view", ".", "set_text", "(", "ZERO_TIME", ")", "self", ...
Updates stress mode according to radio buttons state
[ "Updates", "stress", "mode", "according", "to", "radio", "buttons", "state" ]
5e89d15081e716024db28ec03b1e3a7710330951
https://github.com/amanusk/s-tui/blob/5e89d15081e716024db28ec03b1e3a7710330951/s_tui/s_tui.py#L751-L766
245,420
amanusk/s-tui
s_tui/s_tui.py
GraphController.save_settings
def save_settings(self): """ Save the current configuration to a user config file """ def _save_displayed_setting(conf, submenu): for source, visible_sensors in \ self.view.graphs_menu.active_sensors.items(): section = source + "," + submenu conf.add_section(section) sources = self.sources logging.debug("Saving settings for %s", source) logging.debug("Visible sensors %s", visible_sensors) # TODO: consider changing sensors_list to dict curr_sensor = [x for x in sources if x.get_source_name() == source][0] sensor_list = curr_sensor.get_sensor_list() for sensor_id, sensor in enumerate(sensor_list): try: conf.set(section, sensor, str( visible_sensors[sensor_id])) except IndexError: conf.set(section, sensor, str(True)) if not user_config_dir_exists(): make_user_config_dir() conf = configparser.ConfigParser() config_file = get_user_config_file() with open(config_file, 'w') as cfgfile: conf.add_section('GraphControll') # Save the configured refresh rete conf.set('GraphControll', 'refresh', str( self.refresh_rate)) # Save the configured UTF8 setting conf.set('GraphControll', 'UTF8', str( self.smooth_graph_mode)) # Save the configured t_thresh if self.temp_thresh: conf.set('GraphControll', 'TTHRESH', str( self.temp_thresh)) _save_displayed_setting(conf, "Graphs") _save_displayed_setting(conf, "Summaries") conf.write(cfgfile)
python
def save_settings(self): def _save_displayed_setting(conf, submenu): for source, visible_sensors in \ self.view.graphs_menu.active_sensors.items(): section = source + "," + submenu conf.add_section(section) sources = self.sources logging.debug("Saving settings for %s", source) logging.debug("Visible sensors %s", visible_sensors) # TODO: consider changing sensors_list to dict curr_sensor = [x for x in sources if x.get_source_name() == source][0] sensor_list = curr_sensor.get_sensor_list() for sensor_id, sensor in enumerate(sensor_list): try: conf.set(section, sensor, str( visible_sensors[sensor_id])) except IndexError: conf.set(section, sensor, str(True)) if not user_config_dir_exists(): make_user_config_dir() conf = configparser.ConfigParser() config_file = get_user_config_file() with open(config_file, 'w') as cfgfile: conf.add_section('GraphControll') # Save the configured refresh rete conf.set('GraphControll', 'refresh', str( self.refresh_rate)) # Save the configured UTF8 setting conf.set('GraphControll', 'UTF8', str( self.smooth_graph_mode)) # Save the configured t_thresh if self.temp_thresh: conf.set('GraphControll', 'TTHRESH', str( self.temp_thresh)) _save_displayed_setting(conf, "Graphs") _save_displayed_setting(conf, "Summaries") conf.write(cfgfile)
[ "def", "save_settings", "(", "self", ")", ":", "def", "_save_displayed_setting", "(", "conf", ",", "submenu", ")", ":", "for", "source", ",", "visible_sensors", "in", "self", ".", "view", ".", "graphs_menu", ".", "active_sensors", ".", "items", "(", ")", "...
Save the current configuration to a user config file
[ "Save", "the", "current", "configuration", "to", "a", "user", "config", "file" ]
5e89d15081e716024db28ec03b1e3a7710330951
https://github.com/amanusk/s-tui/blob/5e89d15081e716024db28ec03b1e3a7710330951/s_tui/s_tui.py#L768-L810
245,421
amanusk/s-tui
s_tui/s_tui.py
GraphController.animate_graph
def animate_graph(self, loop, user_data=None): """ Update the graph and schedule the next update This is where the magic happens """ self.view.update_displayed_information() # Save to CSV if configured if self.save_csv or self.csv_file is not None: output_to_csv(self.view.summaries, self.csv_file) # Set next update self.animate_alarm = loop.set_alarm_in( float(self.refresh_rate), self.animate_graph) if self.args.debug_run: # refresh rate is a string in float format self.debug_run_counter += int(float(self.refresh_rate)) if self.debug_run_counter >= 8: self.exit_program()
python
def animate_graph(self, loop, user_data=None): self.view.update_displayed_information() # Save to CSV if configured if self.save_csv or self.csv_file is not None: output_to_csv(self.view.summaries, self.csv_file) # Set next update self.animate_alarm = loop.set_alarm_in( float(self.refresh_rate), self.animate_graph) if self.args.debug_run: # refresh rate is a string in float format self.debug_run_counter += int(float(self.refresh_rate)) if self.debug_run_counter >= 8: self.exit_program()
[ "def", "animate_graph", "(", "self", ",", "loop", ",", "user_data", "=", "None", ")", ":", "self", ".", "view", ".", "update_displayed_information", "(", ")", "# Save to CSV if configured", "if", "self", ".", "save_csv", "or", "self", ".", "csv_file", "is", ...
Update the graph and schedule the next update This is where the magic happens
[ "Update", "the", "graph", "and", "schedule", "the", "next", "update", "This", "is", "where", "the", "magic", "happens" ]
5e89d15081e716024db28ec03b1e3a7710330951
https://github.com/amanusk/s-tui/blob/5e89d15081e716024db28ec03b1e3a7710330951/s_tui/s_tui.py#L817-L836
245,422
liftoff/pyminifier
pyminifier/obfuscate.py
obfuscation_machine
def obfuscation_machine(use_unicode=False, identifier_length=1): """ A generator that returns short sequential combinations of lower and upper-case letters that will never repeat. If *use_unicode* is ``True``, use nonlatin cryllic, arabic, and syriac letters instead of the usual ABCs. The *identifier_length* represents the length of the string to return using the aforementioned characters. """ # This generates a list of the letters a-z: lowercase = list(map(chr, range(97, 123))) # Same thing but ALL CAPS: uppercase = list(map(chr, range(65, 90))) if use_unicode: # Python 3 lets us have some *real* fun: allowed_categories = ('LC', 'Ll', 'Lu', 'Lo', 'Lu') # All the fun characters start at 1580 (hehe): big_list = list(map(chr, range(1580, HIGHEST_UNICODE))) max_chars = 1000 # Ought to be enough for anybody :) combined = [] rtl_categories = ('AL', 'R') # AL == Arabic, R == Any right-to-left last_orientation = 'L' # L = Any left-to-right # Find a good mix of left-to-right and right-to-left characters while len(combined) < max_chars: char = choice(big_list) if unicodedata.category(char) in allowed_categories: orientation = unicodedata.bidirectional(char) if last_orientation in rtl_categories: if orientation not in rtl_categories: combined.append(char) else: if orientation in rtl_categories: combined.append(char) last_orientation = orientation else: combined = lowercase + uppercase shuffle(combined) # Randomize it all to keep things interesting while True: for perm in permutations(combined, identifier_length): perm = "".join(perm) if perm not in RESERVED_WORDS: # Can't replace reserved words yield perm identifier_length += 1
python
def obfuscation_machine(use_unicode=False, identifier_length=1): # This generates a list of the letters a-z: lowercase = list(map(chr, range(97, 123))) # Same thing but ALL CAPS: uppercase = list(map(chr, range(65, 90))) if use_unicode: # Python 3 lets us have some *real* fun: allowed_categories = ('LC', 'Ll', 'Lu', 'Lo', 'Lu') # All the fun characters start at 1580 (hehe): big_list = list(map(chr, range(1580, HIGHEST_UNICODE))) max_chars = 1000 # Ought to be enough for anybody :) combined = [] rtl_categories = ('AL', 'R') # AL == Arabic, R == Any right-to-left last_orientation = 'L' # L = Any left-to-right # Find a good mix of left-to-right and right-to-left characters while len(combined) < max_chars: char = choice(big_list) if unicodedata.category(char) in allowed_categories: orientation = unicodedata.bidirectional(char) if last_orientation in rtl_categories: if orientation not in rtl_categories: combined.append(char) else: if orientation in rtl_categories: combined.append(char) last_orientation = orientation else: combined = lowercase + uppercase shuffle(combined) # Randomize it all to keep things interesting while True: for perm in permutations(combined, identifier_length): perm = "".join(perm) if perm not in RESERVED_WORDS: # Can't replace reserved words yield perm identifier_length += 1
[ "def", "obfuscation_machine", "(", "use_unicode", "=", "False", ",", "identifier_length", "=", "1", ")", ":", "# This generates a list of the letters a-z:", "lowercase", "=", "list", "(", "map", "(", "chr", ",", "range", "(", "97", ",", "123", ")", ")", ")", ...
A generator that returns short sequential combinations of lower and upper-case letters that will never repeat. If *use_unicode* is ``True``, use nonlatin cryllic, arabic, and syriac letters instead of the usual ABCs. The *identifier_length* represents the length of the string to return using the aforementioned characters.
[ "A", "generator", "that", "returns", "short", "sequential", "combinations", "of", "lower", "and", "upper", "-", "case", "letters", "that", "will", "never", "repeat", "." ]
087ea7b0c8c964f1f907c3f350f5ce281798db86
https://github.com/liftoff/pyminifier/blob/087ea7b0c8c964f1f907c3f350f5ce281798db86/pyminifier/obfuscate.py#L33-L77
245,423
liftoff/pyminifier
pyminifier/obfuscate.py
apply_obfuscation
def apply_obfuscation(source): """ Returns 'source' all obfuscated. """ global keyword_args global imported_modules tokens = token_utils.listified_tokenizer(source) keyword_args = analyze.enumerate_keyword_args(tokens) imported_modules = analyze.enumerate_imports(tokens) variables = find_obfuscatables(tokens, obfuscatable_variable) classes = find_obfuscatables(tokens, obfuscatable_class) functions = find_obfuscatables(tokens, obfuscatable_function) for variable in variables: replace_obfuscatables( tokens, obfuscate_variable, variable, name_generator) for function in functions: replace_obfuscatables( tokens, obfuscate_function, function, name_generator) for _class in classes: replace_obfuscatables(tokens, obfuscate_class, _class, name_generator) return token_utils.untokenize(tokens)
python
def apply_obfuscation(source): global keyword_args global imported_modules tokens = token_utils.listified_tokenizer(source) keyword_args = analyze.enumerate_keyword_args(tokens) imported_modules = analyze.enumerate_imports(tokens) variables = find_obfuscatables(tokens, obfuscatable_variable) classes = find_obfuscatables(tokens, obfuscatable_class) functions = find_obfuscatables(tokens, obfuscatable_function) for variable in variables: replace_obfuscatables( tokens, obfuscate_variable, variable, name_generator) for function in functions: replace_obfuscatables( tokens, obfuscate_function, function, name_generator) for _class in classes: replace_obfuscatables(tokens, obfuscate_class, _class, name_generator) return token_utils.untokenize(tokens)
[ "def", "apply_obfuscation", "(", "source", ")", ":", "global", "keyword_args", "global", "imported_modules", "tokens", "=", "token_utils", ".", "listified_tokenizer", "(", "source", ")", "keyword_args", "=", "analyze", ".", "enumerate_keyword_args", "(", "tokens", "...
Returns 'source' all obfuscated.
[ "Returns", "source", "all", "obfuscated", "." ]
087ea7b0c8c964f1f907c3f350f5ce281798db86
https://github.com/liftoff/pyminifier/blob/087ea7b0c8c964f1f907c3f350f5ce281798db86/pyminifier/obfuscate.py#L79-L99
245,424
liftoff/pyminifier
pyminifier/compression.py
bz2_pack
def bz2_pack(source): """ Returns 'source' as a bzip2-compressed, self-extracting python script. .. note:: This method uses up more space than the zip_pack method but it has the advantage in that the resulting .py file can still be imported into a python program. """ import bz2, base64 out = "" # Preserve shebangs (don't care about encodings for this) first_line = source.split('\n')[0] if analyze.shebang.match(first_line): if py3: if first_line.rstrip().endswith('python'): # Make it python3 first_line = first_line.rstrip() first_line += '3' #!/usr/bin/env python3 out = first_line + '\n' compressed_source = bz2.compress(source.encode('utf-8')) out += 'import bz2, base64\n' out += "exec(bz2.decompress(base64.b64decode('" out += base64.b64encode(compressed_source).decode('utf-8') out += "')))\n" return out
python
def bz2_pack(source): import bz2, base64 out = "" # Preserve shebangs (don't care about encodings for this) first_line = source.split('\n')[0] if analyze.shebang.match(first_line): if py3: if first_line.rstrip().endswith('python'): # Make it python3 first_line = first_line.rstrip() first_line += '3' #!/usr/bin/env python3 out = first_line + '\n' compressed_source = bz2.compress(source.encode('utf-8')) out += 'import bz2, base64\n' out += "exec(bz2.decompress(base64.b64decode('" out += base64.b64encode(compressed_source).decode('utf-8') out += "')))\n" return out
[ "def", "bz2_pack", "(", "source", ")", ":", "import", "bz2", ",", "base64", "out", "=", "\"\"", "# Preserve shebangs (don't care about encodings for this)", "first_line", "=", "source", ".", "split", "(", "'\\n'", ")", "[", "0", "]", "if", "analyze", ".", "she...
Returns 'source' as a bzip2-compressed, self-extracting python script. .. note:: This method uses up more space than the zip_pack method but it has the advantage in that the resulting .py file can still be imported into a python program.
[ "Returns", "source", "as", "a", "bzip2", "-", "compressed", "self", "-", "extracting", "python", "script", "." ]
087ea7b0c8c964f1f907c3f350f5ce281798db86
https://github.com/liftoff/pyminifier/blob/087ea7b0c8c964f1f907c3f350f5ce281798db86/pyminifier/compression.py#L51-L76
245,425
peopledoc/workalendar
workalendar/registry.py
iso_register
def iso_register(iso_code): """ Registers Calendar class as country or region in IsoRegistry. Registered country must set class variables ``iso`` using this decorator. >>> from workalendar.core import Calendar >>> @iso_register('MC-MR') >>> class MyRegion(Calendar): >>> 'My Region' Region calendar is then retrievable from registry: >>> calendar = registry.get_calendar_class('MC-MR') """ def wrapper(cls): registry.register(iso_code, cls) return cls return wrapper
python
def iso_register(iso_code): def wrapper(cls): registry.register(iso_code, cls) return cls return wrapper
[ "def", "iso_register", "(", "iso_code", ")", ":", "def", "wrapper", "(", "cls", ")", ":", "registry", ".", "register", "(", "iso_code", ",", "cls", ")", "return", "cls", "return", "wrapper" ]
Registers Calendar class as country or region in IsoRegistry. Registered country must set class variables ``iso`` using this decorator. >>> from workalendar.core import Calendar >>> @iso_register('MC-MR') >>> class MyRegion(Calendar): >>> 'My Region' Region calendar is then retrievable from registry: >>> calendar = registry.get_calendar_class('MC-MR')
[ "Registers", "Calendar", "class", "as", "country", "or", "region", "in", "IsoRegistry", "." ]
d044d5dfc1709ec388db34dab583dd554cc66c4e
https://github.com/peopledoc/workalendar/blob/d044d5dfc1709ec388db34dab583dd554cc66c4e/workalendar/registry.py#L94-L112
245,426
peopledoc/workalendar
workalendar/registry.py
IsoRegistry.get_calendar_class
def get_calendar_class(self, iso_code): """ Retrieves calendar class associated with given ``iso_code``. If calendar of subdivision is not registered (for subdivision like ISO codes, e.g. GB-ENG) returns calendar of containing region (e.g. United Kingdom for ISO code GB) if it's available. :rtype: Calendar """ code_elements, is_subregion = self._code_elements(iso_code) if is_subregion and iso_code not in self.region_registry: # subregion code not in region_registry code = code_elements[0] else: # subregion code in region_registry or is not a subregion code = iso_code return self.region_registry.get(code)
python
def get_calendar_class(self, iso_code): code_elements, is_subregion = self._code_elements(iso_code) if is_subregion and iso_code not in self.region_registry: # subregion code not in region_registry code = code_elements[0] else: # subregion code in region_registry or is not a subregion code = iso_code return self.region_registry.get(code)
[ "def", "get_calendar_class", "(", "self", ",", "iso_code", ")", ":", "code_elements", ",", "is_subregion", "=", "self", ".", "_code_elements", "(", "iso_code", ")", "if", "is_subregion", "and", "iso_code", "not", "in", "self", ".", "region_registry", ":", "# s...
Retrieves calendar class associated with given ``iso_code``. If calendar of subdivision is not registered (for subdivision like ISO codes, e.g. GB-ENG) returns calendar of containing region (e.g. United Kingdom for ISO code GB) if it's available. :rtype: Calendar
[ "Retrieves", "calendar", "class", "associated", "with", "given", "iso_code", "." ]
d044d5dfc1709ec388db34dab583dd554cc66c4e
https://github.com/peopledoc/workalendar/blob/d044d5dfc1709ec388db34dab583dd554cc66c4e/workalendar/registry.py#L29-L47
245,427
peopledoc/workalendar
workalendar/registry.py
IsoRegistry.get_subregions
def get_subregions(self, iso_code): """ Returns subregion calendar classes for given region iso_code. >>> registry = IsoRegistry() >>> # assuming calendars registered are: DE, DE-HH, DE-BE >>> registry.get_subregions('DE') {'DE-HH': <class 'workalendar.europe.germany.Hamburg'>, 'DE-BE': <class 'workalendar.europe.germany.Berlin'>} :rtype dict :return dict where keys are ISO codes strings and values are calendar classes """ items = OrderedDict() for key, value in self.region_registry.items(): code_elements, is_subregion = self._code_elements(key) if is_subregion and code_elements[0] == iso_code: items[key] = value return items
python
def get_subregions(self, iso_code): items = OrderedDict() for key, value in self.region_registry.items(): code_elements, is_subregion = self._code_elements(key) if is_subregion and code_elements[0] == iso_code: items[key] = value return items
[ "def", "get_subregions", "(", "self", ",", "iso_code", ")", ":", "items", "=", "OrderedDict", "(", ")", "for", "key", ",", "value", "in", "self", ".", "region_registry", ".", "items", "(", ")", ":", "code_elements", ",", "is_subregion", "=", "self", ".",...
Returns subregion calendar classes for given region iso_code. >>> registry = IsoRegistry() >>> # assuming calendars registered are: DE, DE-HH, DE-BE >>> registry.get_subregions('DE') {'DE-HH': <class 'workalendar.europe.germany.Hamburg'>, 'DE-BE': <class 'workalendar.europe.germany.Berlin'>} :rtype dict :return dict where keys are ISO codes strings and values are calendar classes
[ "Returns", "subregion", "calendar", "classes", "for", "given", "region", "iso_code", "." ]
d044d5dfc1709ec388db34dab583dd554cc66c4e
https://github.com/peopledoc/workalendar/blob/d044d5dfc1709ec388db34dab583dd554cc66c4e/workalendar/registry.py#L49-L67
245,428
peopledoc/workalendar
workalendar/registry.py
IsoRegistry.items
def items(self, region_codes, include_subregions=False): """ Returns calendar classes for regions :param region_codes list of ISO codes for selected regions :param include_subregions boolean if subregions of selected regions should be included in result :rtype dict :return dict where keys are ISO codes strings and values are calendar classes """ items = OrderedDict() for code in region_codes: try: items[code] = self.region_registry[code] except KeyError: continue if include_subregions: items.update(self.get_subregions(code)) return items
python
def items(self, region_codes, include_subregions=False): items = OrderedDict() for code in region_codes: try: items[code] = self.region_registry[code] except KeyError: continue if include_subregions: items.update(self.get_subregions(code)) return items
[ "def", "items", "(", "self", ",", "region_codes", ",", "include_subregions", "=", "False", ")", ":", "items", "=", "OrderedDict", "(", ")", "for", "code", "in", "region_codes", ":", "try", ":", "items", "[", "code", "]", "=", "self", ".", "region_registr...
Returns calendar classes for regions :param region_codes list of ISO codes for selected regions :param include_subregions boolean if subregions of selected regions should be included in result :rtype dict :return dict where keys are ISO codes strings and values are calendar classes
[ "Returns", "calendar", "classes", "for", "regions" ]
d044d5dfc1709ec388db34dab583dd554cc66c4e
https://github.com/peopledoc/workalendar/blob/d044d5dfc1709ec388db34dab583dd554cc66c4e/workalendar/registry.py#L69-L88
245,429
peopledoc/workalendar
workalendar/core.py
cleaned_date
def cleaned_date(day, keep_datetime=False): """ Return a "clean" date type. * keep a `date` unchanged * convert a datetime into a date, * convert any "duck date" type into a date using its `date()` method. """ if not isinstance(day, (date, datetime)): raise UnsupportedDateType( "`{}` is of unsupported type ({})".format(day, type(day))) if not keep_datetime: if hasattr(day, 'date') and callable(day.date): day = day.date() return day
python
def cleaned_date(day, keep_datetime=False): if not isinstance(day, (date, datetime)): raise UnsupportedDateType( "`{}` is of unsupported type ({})".format(day, type(day))) if not keep_datetime: if hasattr(day, 'date') and callable(day.date): day = day.date() return day
[ "def", "cleaned_date", "(", "day", ",", "keep_datetime", "=", "False", ")", ":", "if", "not", "isinstance", "(", "day", ",", "(", "date", ",", "datetime", ")", ")", ":", "raise", "UnsupportedDateType", "(", "\"`{}` is of unsupported type ({})\"", ".", "format"...
Return a "clean" date type. * keep a `date` unchanged * convert a datetime into a date, * convert any "duck date" type into a date using its `date()` method.
[ "Return", "a", "clean", "date", "type", "." ]
d044d5dfc1709ec388db34dab583dd554cc66c4e
https://github.com/peopledoc/workalendar/blob/d044d5dfc1709ec388db34dab583dd554cc66c4e/workalendar/core.py#L32-L46
245,430
peopledoc/workalendar
workalendar/core.py
Calendar.get_fixed_holidays
def get_fixed_holidays(self, year): """Return the fixed days according to the FIXED_HOLIDAYS class property """ days = [] for month, day, label in self.FIXED_HOLIDAYS: days.append((date(year, month, day), label)) return days
python
def get_fixed_holidays(self, year): days = [] for month, day, label in self.FIXED_HOLIDAYS: days.append((date(year, month, day), label)) return days
[ "def", "get_fixed_holidays", "(", "self", ",", "year", ")", ":", "days", "=", "[", "]", "for", "month", ",", "day", ",", "label", "in", "self", ".", "FIXED_HOLIDAYS", ":", "days", ".", "append", "(", "(", "date", "(", "year", ",", "month", ",", "da...
Return the fixed days according to the FIXED_HOLIDAYS class property
[ "Return", "the", "fixed", "days", "according", "to", "the", "FIXED_HOLIDAYS", "class", "property" ]
d044d5dfc1709ec388db34dab583dd554cc66c4e
https://github.com/peopledoc/workalendar/blob/d044d5dfc1709ec388db34dab583dd554cc66c4e/workalendar/core.py#L66-L72
245,431
peopledoc/workalendar
workalendar/core.py
Calendar.get_holiday_label
def get_holiday_label(self, day): """Return the label of the holiday, if the date is a holiday""" day = cleaned_date(day) return {day: label for day, label in self.holidays(day.year) }.get(day)
python
def get_holiday_label(self, day): day = cleaned_date(day) return {day: label for day, label in self.holidays(day.year) }.get(day)
[ "def", "get_holiday_label", "(", "self", ",", "day", ")", ":", "day", "=", "cleaned_date", "(", "day", ")", "return", "{", "day", ":", "label", "for", "day", ",", "label", "in", "self", ".", "holidays", "(", "day", ".", "year", ")", "}", ".", "get"...
Return the label of the holiday, if the date is a holiday
[ "Return", "the", "label", "of", "the", "holiday", "if", "the", "date", "is", "a", "holiday" ]
d044d5dfc1709ec388db34dab583dd554cc66c4e
https://github.com/peopledoc/workalendar/blob/d044d5dfc1709ec388db34dab583dd554cc66c4e/workalendar/core.py#L99-L103
245,432
peopledoc/workalendar
workalendar/core.py
Calendar.is_working_day
def is_working_day(self, day, extra_working_days=None, extra_holidays=None): """Return True if it's a working day. In addition to the regular holidays, you can add exceptions. By providing ``extra_working_days``, you'll state that these dates **are** working days. By providing ``extra_holidays``, you'll state that these dates **are** holidays, even if not in the regular calendar holidays (or weekends). Please note that the ``extra_working_days`` list has priority over the ``extra_holidays`` list. """ day = cleaned_date(day) if extra_working_days: extra_working_days = tuple(map(cleaned_date, extra_working_days)) if extra_holidays: extra_holidays = tuple(map(cleaned_date, extra_holidays)) # Extra lists exceptions if extra_working_days and day in extra_working_days: return True # Regular rules if day.weekday() in self.get_weekend_days(): return False return not self.is_holiday(day, extra_holidays=extra_holidays)
python
def is_working_day(self, day, extra_working_days=None, extra_holidays=None): day = cleaned_date(day) if extra_working_days: extra_working_days = tuple(map(cleaned_date, extra_working_days)) if extra_holidays: extra_holidays = tuple(map(cleaned_date, extra_holidays)) # Extra lists exceptions if extra_working_days and day in extra_working_days: return True # Regular rules if day.weekday() in self.get_weekend_days(): return False return not self.is_holiday(day, extra_holidays=extra_holidays)
[ "def", "is_working_day", "(", "self", ",", "day", ",", "extra_working_days", "=", "None", ",", "extra_holidays", "=", "None", ")", ":", "day", "=", "cleaned_date", "(", "day", ")", "if", "extra_working_days", ":", "extra_working_days", "=", "tuple", "(", "ma...
Return True if it's a working day. In addition to the regular holidays, you can add exceptions. By providing ``extra_working_days``, you'll state that these dates **are** working days. By providing ``extra_holidays``, you'll state that these dates **are** holidays, even if not in the regular calendar holidays (or weekends). Please note that the ``extra_working_days`` list has priority over the ``extra_holidays`` list.
[ "Return", "True", "if", "it", "s", "a", "working", "day", ".", "In", "addition", "to", "the", "regular", "holidays", "you", "can", "add", "exceptions", "." ]
d044d5dfc1709ec388db34dab583dd554cc66c4e
https://github.com/peopledoc/workalendar/blob/d044d5dfc1709ec388db34dab583dd554cc66c4e/workalendar/core.py#L122-L151
245,433
peopledoc/workalendar
workalendar/core.py
Calendar.is_holiday
def is_holiday(self, day, extra_holidays=None): """Return True if it's an holiday. In addition to the regular holidays, you can add exceptions. By providing ``extra_holidays``, you'll state that these dates **are** holidays, even if not in the regular calendar holidays (or weekends). """ day = cleaned_date(day) if extra_holidays: extra_holidays = tuple(map(cleaned_date, extra_holidays)) if extra_holidays and day in extra_holidays: return True return day in self.holidays_set(day.year)
python
def is_holiday(self, day, extra_holidays=None): day = cleaned_date(day) if extra_holidays: extra_holidays = tuple(map(cleaned_date, extra_holidays)) if extra_holidays and day in extra_holidays: return True return day in self.holidays_set(day.year)
[ "def", "is_holiday", "(", "self", ",", "day", ",", "extra_holidays", "=", "None", ")", ":", "day", "=", "cleaned_date", "(", "day", ")", "if", "extra_holidays", ":", "extra_holidays", "=", "tuple", "(", "map", "(", "cleaned_date", ",", "extra_holidays", ")...
Return True if it's an holiday. In addition to the regular holidays, you can add exceptions. By providing ``extra_holidays``, you'll state that these dates **are** holidays, even if not in the regular calendar holidays (or weekends).
[ "Return", "True", "if", "it", "s", "an", "holiday", ".", "In", "addition", "to", "the", "regular", "holidays", "you", "can", "add", "exceptions", "." ]
d044d5dfc1709ec388db34dab583dd554cc66c4e
https://github.com/peopledoc/workalendar/blob/d044d5dfc1709ec388db34dab583dd554cc66c4e/workalendar/core.py#L153-L169
245,434
peopledoc/workalendar
workalendar/core.py
Calendar.add_working_days
def add_working_days(self, day, delta, extra_working_days=None, extra_holidays=None, keep_datetime=False): """Add `delta` working days to the date. You can provide either a date or a datetime to this function that will output a ``date`` result. You can alter this behaviour using the ``keep_datetime`` option set to ``True``. the ``delta`` parameter might be positive or negative. If it's negative, you may want to use the ``sub_working_days()`` method with a positive ``delta`` argument. By providing ``extra_working_days``, you'll state that these dates **are** working days. By providing ``extra_holidays``, you'll state that these dates **are** holidays, even if not in the regular calendar holidays (or weekends). Please note that the ``extra_working_days`` list has priority over the ``extra_holidays`` list. """ day = cleaned_date(day, keep_datetime) if extra_working_days: extra_working_days = tuple(map(cleaned_date, extra_working_days)) if extra_holidays: extra_holidays = tuple(map(cleaned_date, extra_holidays)) days = 0 temp_day = day if type(temp_day) is datetime and not keep_datetime: temp_day = temp_day.date() day_added = 1 if delta >= 0 else -1 delta = abs(delta) while days < delta: temp_day = temp_day + timedelta(days=day_added) if self.is_working_day(temp_day, extra_working_days=extra_working_days, extra_holidays=extra_holidays): days += 1 return temp_day
python
def add_working_days(self, day, delta, extra_working_days=None, extra_holidays=None, keep_datetime=False): day = cleaned_date(day, keep_datetime) if extra_working_days: extra_working_days = tuple(map(cleaned_date, extra_working_days)) if extra_holidays: extra_holidays = tuple(map(cleaned_date, extra_holidays)) days = 0 temp_day = day if type(temp_day) is datetime and not keep_datetime: temp_day = temp_day.date() day_added = 1 if delta >= 0 else -1 delta = abs(delta) while days < delta: temp_day = temp_day + timedelta(days=day_added) if self.is_working_day(temp_day, extra_working_days=extra_working_days, extra_holidays=extra_holidays): days += 1 return temp_day
[ "def", "add_working_days", "(", "self", ",", "day", ",", "delta", ",", "extra_working_days", "=", "None", ",", "extra_holidays", "=", "None", ",", "keep_datetime", "=", "False", ")", ":", "day", "=", "cleaned_date", "(", "day", ",", "keep_datetime", ")", "...
Add `delta` working days to the date. You can provide either a date or a datetime to this function that will output a ``date`` result. You can alter this behaviour using the ``keep_datetime`` option set to ``True``. the ``delta`` parameter might be positive or negative. If it's negative, you may want to use the ``sub_working_days()`` method with a positive ``delta`` argument. By providing ``extra_working_days``, you'll state that these dates **are** working days. By providing ``extra_holidays``, you'll state that these dates **are** holidays, even if not in the regular calendar holidays (or weekends). Please note that the ``extra_working_days`` list has priority over the ``extra_holidays`` list.
[ "Add", "delta", "working", "days", "to", "the", "date", "." ]
d044d5dfc1709ec388db34dab583dd554cc66c4e
https://github.com/peopledoc/workalendar/blob/d044d5dfc1709ec388db34dab583dd554cc66c4e/workalendar/core.py#L171-L213
245,435
peopledoc/workalendar
workalendar/core.py
Calendar.sub_working_days
def sub_working_days(self, day, delta, extra_working_days=None, extra_holidays=None, keep_datetime=False): """ Substract `delta` working days to the date. This method is a shortcut / helper. Users may want to use either:: cal.add_working_days(my_date, -7) cal.sub_working_days(my_date, 7) The other parameters are to be used exactly as in the ``add_working_days`` method. A negative ``delta`` argument will be converted into its absolute value. Hence, the two following calls are equivalent:: cal.sub_working_days(my_date, -7) cal.sub_working_days(my_date, 7) As in ``add_working_days()`` you can set the parameter ``keep_datetime`` to ``True`` to make sure that if your ``day`` argument is a ``datetime``, the returned date will also be a ``datetime`` object. """ delta = abs(delta) return self.add_working_days( day, -delta, extra_working_days, extra_holidays, keep_datetime=keep_datetime)
python
def sub_working_days(self, day, delta, extra_working_days=None, extra_holidays=None, keep_datetime=False): delta = abs(delta) return self.add_working_days( day, -delta, extra_working_days, extra_holidays, keep_datetime=keep_datetime)
[ "def", "sub_working_days", "(", "self", ",", "day", ",", "delta", ",", "extra_working_days", "=", "None", ",", "extra_holidays", "=", "None", ",", "keep_datetime", "=", "False", ")", ":", "delta", "=", "abs", "(", "delta", ")", "return", "self", ".", "ad...
Substract `delta` working days to the date. This method is a shortcut / helper. Users may want to use either:: cal.add_working_days(my_date, -7) cal.sub_working_days(my_date, 7) The other parameters are to be used exactly as in the ``add_working_days`` method. A negative ``delta`` argument will be converted into its absolute value. Hence, the two following calls are equivalent:: cal.sub_working_days(my_date, -7) cal.sub_working_days(my_date, 7) As in ``add_working_days()`` you can set the parameter ``keep_datetime`` to ``True`` to make sure that if your ``day`` argument is a ``datetime``, the returned date will also be a ``datetime`` object.
[ "Substract", "delta", "working", "days", "to", "the", "date", "." ]
d044d5dfc1709ec388db34dab583dd554cc66c4e
https://github.com/peopledoc/workalendar/blob/d044d5dfc1709ec388db34dab583dd554cc66c4e/workalendar/core.py#L215-L244
245,436
peopledoc/workalendar
workalendar/core.py
Calendar.find_following_working_day
def find_following_working_day(self, day): """Looks for the following working day, if not already a working day. **WARNING**: this function doesn't take into account the calendar holidays, only the days of the week and the weekend days parameters. """ day = cleaned_date(day) while day.weekday() in self.get_weekend_days(): day = day + timedelta(days=1) return day
python
def find_following_working_day(self, day): day = cleaned_date(day) while day.weekday() in self.get_weekend_days(): day = day + timedelta(days=1) return day
[ "def", "find_following_working_day", "(", "self", ",", "day", ")", ":", "day", "=", "cleaned_date", "(", "day", ")", "while", "day", ".", "weekday", "(", ")", "in", "self", ".", "get_weekend_days", "(", ")", ":", "day", "=", "day", "+", "timedelta", "(...
Looks for the following working day, if not already a working day. **WARNING**: this function doesn't take into account the calendar holidays, only the days of the week and the weekend days parameters.
[ "Looks", "for", "the", "following", "working", "day", "if", "not", "already", "a", "working", "day", "." ]
d044d5dfc1709ec388db34dab583dd554cc66c4e
https://github.com/peopledoc/workalendar/blob/d044d5dfc1709ec388db34dab583dd554cc66c4e/workalendar/core.py#L246-L256
245,437
peopledoc/workalendar
workalendar/core.py
Calendar.get_first_weekday_after
def get_first_weekday_after(day, weekday): """Get the first weekday after a given day. If the day is the same weekday, the same day will be returned. >>> # the first monday after Apr 1 2015 >>> Calendar.get_first_weekday_after(date(2015, 4, 1), MON) datetime.date(2015, 4, 6) >>> # the first tuesday after Apr 14 2015 >>> Calendar.get_first_weekday_after(date(2015, 4, 14), TUE) datetime.date(2015, 4, 14) """ day_delta = (weekday - day.weekday()) % 7 day = day + timedelta(days=day_delta) return day
python
def get_first_weekday_after(day, weekday): day_delta = (weekday - day.weekday()) % 7 day = day + timedelta(days=day_delta) return day
[ "def", "get_first_weekday_after", "(", "day", ",", "weekday", ")", ":", "day_delta", "=", "(", "weekday", "-", "day", ".", "weekday", "(", ")", ")", "%", "7", "day", "=", "day", "+", "timedelta", "(", "days", "=", "day_delta", ")", "return", "day" ]
Get the first weekday after a given day. If the day is the same weekday, the same day will be returned. >>> # the first monday after Apr 1 2015 >>> Calendar.get_first_weekday_after(date(2015, 4, 1), MON) datetime.date(2015, 4, 6) >>> # the first tuesday after Apr 14 2015 >>> Calendar.get_first_weekday_after(date(2015, 4, 14), TUE) datetime.date(2015, 4, 14)
[ "Get", "the", "first", "weekday", "after", "a", "given", "day", ".", "If", "the", "day", "is", "the", "same", "weekday", "the", "same", "day", "will", "be", "returned", "." ]
d044d5dfc1709ec388db34dab583dd554cc66c4e
https://github.com/peopledoc/workalendar/blob/d044d5dfc1709ec388db34dab583dd554cc66c4e/workalendar/core.py#L304-L318
245,438
peopledoc/workalendar
workalendar/core.py
Calendar.get_working_days_delta
def get_working_days_delta(self, start, end): """ Return the number of working day between two given dates. The order of the dates provided doesn't matter. In the following example, there are 5 days, because of the week-end: >>> cal = WesternCalendar() # does not include easter monday >>> day1 = date(2018, 3, 29) >>> day2 = date(2018, 4, 5) >>> cal.get_working_days_delta(day1, day2) 5 In France, April 1st 2018 is a holiday because it's Easter monday: >>> cal = France() >>> cal.get_working_days_delta(day1, day2) 4 This method should even work if your ``start`` and ``end`` arguments are datetimes. """ start = cleaned_date(start) end = cleaned_date(end) if start == end: return 0 if start > end: start, end = end, start # Starting count here count = 0 while start < end: start += timedelta(days=1) if self.is_working_day(start): count += 1 return count
python
def get_working_days_delta(self, start, end): start = cleaned_date(start) end = cleaned_date(end) if start == end: return 0 if start > end: start, end = end, start # Starting count here count = 0 while start < end: start += timedelta(days=1) if self.is_working_day(start): count += 1 return count
[ "def", "get_working_days_delta", "(", "self", ",", "start", ",", "end", ")", ":", "start", "=", "cleaned_date", "(", "start", ")", "end", "=", "cleaned_date", "(", "end", ")", "if", "start", "==", "end", ":", "return", "0", "if", "start", ">", "end", ...
Return the number of working day between two given dates. The order of the dates provided doesn't matter. In the following example, there are 5 days, because of the week-end: >>> cal = WesternCalendar() # does not include easter monday >>> day1 = date(2018, 3, 29) >>> day2 = date(2018, 4, 5) >>> cal.get_working_days_delta(day1, day2) 5 In France, April 1st 2018 is a holiday because it's Easter monday: >>> cal = France() >>> cal.get_working_days_delta(day1, day2) 4 This method should even work if your ``start`` and ``end`` arguments are datetimes.
[ "Return", "the", "number", "of", "working", "day", "between", "two", "given", "dates", ".", "The", "order", "of", "the", "dates", "provided", "doesn", "t", "matter", "." ]
d044d5dfc1709ec388db34dab583dd554cc66c4e
https://github.com/peopledoc/workalendar/blob/d044d5dfc1709ec388db34dab583dd554cc66c4e/workalendar/core.py#L320-L356
245,439
peopledoc/workalendar
workalendar/core.py
ChristianMixin.get_holy_thursday
def get_holy_thursday(self, year): "Return the date of the last thursday before easter" sunday = self.get_easter_sunday(year) return sunday - timedelta(days=3)
python
def get_holy_thursday(self, year): "Return the date of the last thursday before easter" sunday = self.get_easter_sunday(year) return sunday - timedelta(days=3)
[ "def", "get_holy_thursday", "(", "self", ",", "year", ")", ":", "sunday", "=", "self", ".", "get_easter_sunday", "(", "year", ")", "return", "sunday", "-", "timedelta", "(", "days", "=", "3", ")" ]
Return the date of the last thursday before easter
[ "Return", "the", "date", "of", "the", "last", "thursday", "before", "easter" ]
d044d5dfc1709ec388db34dab583dd554cc66c4e
https://github.com/peopledoc/workalendar/blob/d044d5dfc1709ec388db34dab583dd554cc66c4e/workalendar/core.py#L397-L400
245,440
peopledoc/workalendar
workalendar/core.py
ChristianMixin.get_good_friday
def get_good_friday(self, year): "Return the date of the last friday before easter" sunday = self.get_easter_sunday(year) return sunday - timedelta(days=2)
python
def get_good_friday(self, year): "Return the date of the last friday before easter" sunday = self.get_easter_sunday(year) return sunday - timedelta(days=2)
[ "def", "get_good_friday", "(", "self", ",", "year", ")", ":", "sunday", "=", "self", ".", "get_easter_sunday", "(", "year", ")", "return", "sunday", "-", "timedelta", "(", "days", "=", "2", ")" ]
Return the date of the last friday before easter
[ "Return", "the", "date", "of", "the", "last", "friday", "before", "easter" ]
d044d5dfc1709ec388db34dab583dd554cc66c4e
https://github.com/peopledoc/workalendar/blob/d044d5dfc1709ec388db34dab583dd554cc66c4e/workalendar/core.py#L402-L405
245,441
peopledoc/workalendar
workalendar/core.py
ChristianMixin.get_clean_monday
def get_clean_monday(self, year): "Return the clean monday date" sunday = self.get_easter_sunday(year) return sunday - timedelta(days=48)
python
def get_clean_monday(self, year): "Return the clean monday date" sunday = self.get_easter_sunday(year) return sunday - timedelta(days=48)
[ "def", "get_clean_monday", "(", "self", ",", "year", ")", ":", "sunday", "=", "self", ".", "get_easter_sunday", "(", "year", ")", "return", "sunday", "-", "timedelta", "(", "days", "=", "48", ")" ]
Return the clean monday date
[ "Return", "the", "clean", "monday", "date" ]
d044d5dfc1709ec388db34dab583dd554cc66c4e
https://github.com/peopledoc/workalendar/blob/d044d5dfc1709ec388db34dab583dd554cc66c4e/workalendar/core.py#L407-L410
245,442
peopledoc/workalendar
workalendar/core.py
ChristianMixin.get_easter_saturday
def get_easter_saturday(self, year): "Return the Easter Saturday date" sunday = self.get_easter_sunday(year) return sunday - timedelta(days=1)
python
def get_easter_saturday(self, year): "Return the Easter Saturday date" sunday = self.get_easter_sunday(year) return sunday - timedelta(days=1)
[ "def", "get_easter_saturday", "(", "self", ",", "year", ")", ":", "sunday", "=", "self", ".", "get_easter_sunday", "(", "year", ")", "return", "sunday", "-", "timedelta", "(", "days", "=", "1", ")" ]
Return the Easter Saturday date
[ "Return", "the", "Easter", "Saturday", "date" ]
d044d5dfc1709ec388db34dab583dd554cc66c4e
https://github.com/peopledoc/workalendar/blob/d044d5dfc1709ec388db34dab583dd554cc66c4e/workalendar/core.py#L412-L415
245,443
peopledoc/workalendar
workalendar/core.py
ChristianMixin.get_easter_monday
def get_easter_monday(self, year): "Return the date of the monday after easter" sunday = self.get_easter_sunday(year) return sunday + timedelta(days=1)
python
def get_easter_monday(self, year): "Return the date of the monday after easter" sunday = self.get_easter_sunday(year) return sunday + timedelta(days=1)
[ "def", "get_easter_monday", "(", "self", ",", "year", ")", ":", "sunday", "=", "self", ".", "get_easter_sunday", "(", "year", ")", "return", "sunday", "+", "timedelta", "(", "days", "=", "1", ")" ]
Return the date of the monday after easter
[ "Return", "the", "date", "of", "the", "monday", "after", "easter" ]
d044d5dfc1709ec388db34dab583dd554cc66c4e
https://github.com/peopledoc/workalendar/blob/d044d5dfc1709ec388db34dab583dd554cc66c4e/workalendar/core.py#L421-L424
245,444
peopledoc/workalendar
workalendar/core.py
ChristianMixin.get_variable_days
def get_variable_days(self, year): # noqa "Return the christian holidays list according to the mixin" days = super(ChristianMixin, self).get_variable_days(year) if self.include_epiphany: days.append((date(year, 1, 6), "Epiphany")) if self.include_clean_monday: days.append((self.get_clean_monday(year), "Clean Monday")) if self.include_annunciation: days.append((date(year, 3, 25), "Annunciation")) if self.include_ash_wednesday: days.append( (self.get_ash_wednesday(year), self.ash_wednesday_label) ) if self.include_palm_sunday: days.append((self.get_palm_sunday(year), "Palm Sunday")) if self.include_holy_thursday: days.append((self.get_holy_thursday(year), "Holy Thursday")) if self.include_good_friday: days.append((self.get_good_friday(year), self.good_friday_label)) if self.include_easter_saturday: days.append((self.get_easter_saturday(year), "Easter Saturday")) if self.include_easter_sunday: days.append((self.get_easter_sunday(year), "Easter Sunday")) if self.include_easter_monday: days.append((self.get_easter_monday(year), "Easter Monday")) if self.include_assumption: days.append((date(year, 8, 15), "Assumption of Mary to Heaven")) if self.include_all_saints: days.append((date(year, 11, 1), "All Saints Day")) if self.include_all_souls: days.append((date(year, 11, 2), "All Souls Day")) if self.include_immaculate_conception: days.append((date(year, 12, 8), self.immaculate_conception_label)) if self.include_christmas: days.append((date(year, 12, 25), "Christmas Day")) if self.include_christmas_eve: days.append((date(year, 12, 24), "Christmas Eve")) if self.include_boxing_day: days.append((date(year, 12, 26), self.boxing_day_label)) if self.include_ascension: days.append(( self.get_ascension_thursday(year), "Ascension Thursday")) if self.include_whit_monday: days.append((self.get_whit_monday(year), self.whit_monday_label)) if self.include_whit_sunday: days.append((self.get_whit_sunday(year), self.whit_sunday_label)) if self.include_corpus_christi: days.append((self.get_corpus_christi(year), "Corpus Christi")) return days
python
def get_variable_days(self, year): # noqa "Return the christian holidays list according to the mixin" days = super(ChristianMixin, self).get_variable_days(year) if self.include_epiphany: days.append((date(year, 1, 6), "Epiphany")) if self.include_clean_monday: days.append((self.get_clean_monday(year), "Clean Monday")) if self.include_annunciation: days.append((date(year, 3, 25), "Annunciation")) if self.include_ash_wednesday: days.append( (self.get_ash_wednesday(year), self.ash_wednesday_label) ) if self.include_palm_sunday: days.append((self.get_palm_sunday(year), "Palm Sunday")) if self.include_holy_thursday: days.append((self.get_holy_thursday(year), "Holy Thursday")) if self.include_good_friday: days.append((self.get_good_friday(year), self.good_friday_label)) if self.include_easter_saturday: days.append((self.get_easter_saturday(year), "Easter Saturday")) if self.include_easter_sunday: days.append((self.get_easter_sunday(year), "Easter Sunday")) if self.include_easter_monday: days.append((self.get_easter_monday(year), "Easter Monday")) if self.include_assumption: days.append((date(year, 8, 15), "Assumption of Mary to Heaven")) if self.include_all_saints: days.append((date(year, 11, 1), "All Saints Day")) if self.include_all_souls: days.append((date(year, 11, 2), "All Souls Day")) if self.include_immaculate_conception: days.append((date(year, 12, 8), self.immaculate_conception_label)) if self.include_christmas: days.append((date(year, 12, 25), "Christmas Day")) if self.include_christmas_eve: days.append((date(year, 12, 24), "Christmas Eve")) if self.include_boxing_day: days.append((date(year, 12, 26), self.boxing_day_label)) if self.include_ascension: days.append(( self.get_ascension_thursday(year), "Ascension Thursday")) if self.include_whit_monday: days.append((self.get_whit_monday(year), self.whit_monday_label)) if self.include_whit_sunday: days.append((self.get_whit_sunday(year), self.whit_sunday_label)) if self.include_corpus_christi: days.append((self.get_corpus_christi(year), "Corpus Christi")) return days
[ "def", "get_variable_days", "(", "self", ",", "year", ")", ":", "# noqa", "days", "=", "super", "(", "ChristianMixin", ",", "self", ")", ".", "get_variable_days", "(", "year", ")", "if", "self", ".", "include_epiphany", ":", "days", ".", "append", "(", "...
Return the christian holidays list according to the mixin
[ "Return", "the", "christian", "holidays", "list", "according", "to", "the", "mixin" ]
d044d5dfc1709ec388db34dab583dd554cc66c4e
https://github.com/peopledoc/workalendar/blob/d044d5dfc1709ec388db34dab583dd554cc66c4e/workalendar/core.py#L458-L506
245,445
peopledoc/workalendar
workalendar/core.py
ChineseNewYearCalendar.get_chinese_new_year
def get_chinese_new_year(self, year): """ Compute Chinese New Year days. To return a list of holidays. By default, it'll at least return the Chinese New Year holidays chosen using the following options: * ``include_chinese_new_year_eve`` * ``include_chinese_new_year`` (on by default) * ``include_chinese_second_day`` If the ``shift_sunday_holidays`` option is on, the rules are the following. * If the CNY1 falls on MON-FRI, there's not shift. * If the CNY1 falls on SAT, the CNY2 is shifted to the Monday after. * If the CNY1 falls on SUN, the CNY1 is shifted to the Monday after, and CNY2 is shifted to the Tuesday after. """ days = [] lunar_first_day = ChineseNewYearCalendar.lunar(year, 1, 1) # Chinese new year's eve if self.include_chinese_new_year_eve: days.append(( lunar_first_day - timedelta(days=1), self.chinese_new_year_eve_label )) # Chinese new year (is included by default) if self.include_chinese_new_year: days.append((lunar_first_day, self.chinese_new_year_label)) if self.include_chinese_second_day: lunar_second_day = lunar_first_day + timedelta(days=1) days.append(( lunar_second_day, self.chinese_second_day_label )) if self.include_chinese_third_day: lunar_third_day = lunar_first_day + timedelta(days=2) days.append(( lunar_third_day, self.chinese_third_day_label )) if self.shift_sunday_holidays: if lunar_first_day.weekday() == SUN: if self.shift_start_cny_sunday: days.append( (lunar_first_day - timedelta(days=1), "Chinese Lunar New Year shift"), ) else: if self.include_chinese_third_day: shift_day = lunar_third_day else: shift_day = lunar_second_day days.append( (shift_day + timedelta(days=1), "Chinese Lunar New Year shift"), ) if (lunar_second_day.weekday() == SUN and self.include_chinese_third_day): days.append( (lunar_third_day + timedelta(days=1), "Chinese Lunar New Year shift"), ) return days
python
def get_chinese_new_year(self, year): days = [] lunar_first_day = ChineseNewYearCalendar.lunar(year, 1, 1) # Chinese new year's eve if self.include_chinese_new_year_eve: days.append(( lunar_first_day - timedelta(days=1), self.chinese_new_year_eve_label )) # Chinese new year (is included by default) if self.include_chinese_new_year: days.append((lunar_first_day, self.chinese_new_year_label)) if self.include_chinese_second_day: lunar_second_day = lunar_first_day + timedelta(days=1) days.append(( lunar_second_day, self.chinese_second_day_label )) if self.include_chinese_third_day: lunar_third_day = lunar_first_day + timedelta(days=2) days.append(( lunar_third_day, self.chinese_third_day_label )) if self.shift_sunday_holidays: if lunar_first_day.weekday() == SUN: if self.shift_start_cny_sunday: days.append( (lunar_first_day - timedelta(days=1), "Chinese Lunar New Year shift"), ) else: if self.include_chinese_third_day: shift_day = lunar_third_day else: shift_day = lunar_second_day days.append( (shift_day + timedelta(days=1), "Chinese Lunar New Year shift"), ) if (lunar_second_day.weekday() == SUN and self.include_chinese_third_day): days.append( (lunar_third_day + timedelta(days=1), "Chinese Lunar New Year shift"), ) return days
[ "def", "get_chinese_new_year", "(", "self", ",", "year", ")", ":", "days", "=", "[", "]", "lunar_first_day", "=", "ChineseNewYearCalendar", ".", "lunar", "(", "year", ",", "1", ",", "1", ")", "# Chinese new year's eve", "if", "self", ".", "include_chinese_new_...
Compute Chinese New Year days. To return a list of holidays. By default, it'll at least return the Chinese New Year holidays chosen using the following options: * ``include_chinese_new_year_eve`` * ``include_chinese_new_year`` (on by default) * ``include_chinese_second_day`` If the ``shift_sunday_holidays`` option is on, the rules are the following. * If the CNY1 falls on MON-FRI, there's not shift. * If the CNY1 falls on SAT, the CNY2 is shifted to the Monday after. * If the CNY1 falls on SUN, the CNY1 is shifted to the Monday after, and CNY2 is shifted to the Tuesday after.
[ "Compute", "Chinese", "New", "Year", "days", ".", "To", "return", "a", "list", "of", "holidays", "." ]
d044d5dfc1709ec388db34dab583dd554cc66c4e
https://github.com/peopledoc/workalendar/blob/d044d5dfc1709ec388db34dab583dd554cc66c4e/workalendar/core.py#L566-L633
245,446
peopledoc/workalendar
workalendar/core.py
ChineseNewYearCalendar.get_shifted_holidays
def get_shifted_holidays(self, dates): """ Taking a list of existing holidays, yield a list of 'shifted' days if the holiday falls on SUN. """ for holiday, label in dates: if holiday.weekday() == SUN: yield ( holiday + timedelta(days=1), label + ' shift' )
python
def get_shifted_holidays(self, dates): for holiday, label in dates: if holiday.weekday() == SUN: yield ( holiday + timedelta(days=1), label + ' shift' )
[ "def", "get_shifted_holidays", "(", "self", ",", "dates", ")", ":", "for", "holiday", ",", "label", "in", "dates", ":", "if", "holiday", ".", "weekday", "(", ")", "==", "SUN", ":", "yield", "(", "holiday", "+", "timedelta", "(", "days", "=", "1", ")"...
Taking a list of existing holidays, yield a list of 'shifted' days if the holiday falls on SUN.
[ "Taking", "a", "list", "of", "existing", "holidays", "yield", "a", "list", "of", "shifted", "days", "if", "the", "holiday", "falls", "on", "SUN", "." ]
d044d5dfc1709ec388db34dab583dd554cc66c4e
https://github.com/peopledoc/workalendar/blob/d044d5dfc1709ec388db34dab583dd554cc66c4e/workalendar/core.py#L640-L650
245,447
peopledoc/workalendar
workalendar/core.py
ChineseNewYearCalendar.get_calendar_holidays
def get_calendar_holidays(self, year): """ Take into account the eventual shift to the next MON if any holiday falls on SUN. """ # Unshifted days are here: days = super(ChineseNewYearCalendar, self).get_calendar_holidays(year) if self.shift_sunday_holidays: days_to_inspect = copy(days) for day_shifted in self.get_shifted_holidays(days_to_inspect): days.append(day_shifted) return days
python
def get_calendar_holidays(self, year): # Unshifted days are here: days = super(ChineseNewYearCalendar, self).get_calendar_holidays(year) if self.shift_sunday_holidays: days_to_inspect = copy(days) for day_shifted in self.get_shifted_holidays(days_to_inspect): days.append(day_shifted) return days
[ "def", "get_calendar_holidays", "(", "self", ",", "year", ")", ":", "# Unshifted days are here:", "days", "=", "super", "(", "ChineseNewYearCalendar", ",", "self", ")", ".", "get_calendar_holidays", "(", "year", ")", "if", "self", ".", "shift_sunday_holidays", ":"...
Take into account the eventual shift to the next MON if any holiday falls on SUN.
[ "Take", "into", "account", "the", "eventual", "shift", "to", "the", "next", "MON", "if", "any", "holiday", "falls", "on", "SUN", "." ]
d044d5dfc1709ec388db34dab583dd554cc66c4e
https://github.com/peopledoc/workalendar/blob/d044d5dfc1709ec388db34dab583dd554cc66c4e/workalendar/core.py#L652-L663
245,448
peopledoc/workalendar
workalendar/core.py
EphemMixin.calculate_equinoxes
def calculate_equinoxes(self, year, timezone='UTC'): """ calculate equinox with time zone """ tz = pytz.timezone(timezone) d1 = ephem.next_equinox(str(year)) d = ephem.Date(str(d1)) equinox1 = d.datetime() + tz.utcoffset(d.datetime()) d2 = ephem.next_equinox(d1) d = ephem.Date(str(d2)) equinox2 = d.datetime() + tz.utcoffset(d.datetime()) return (equinox1.date(), equinox2.date())
python
def calculate_equinoxes(self, year, timezone='UTC'): tz = pytz.timezone(timezone) d1 = ephem.next_equinox(str(year)) d = ephem.Date(str(d1)) equinox1 = d.datetime() + tz.utcoffset(d.datetime()) d2 = ephem.next_equinox(d1) d = ephem.Date(str(d2)) equinox2 = d.datetime() + tz.utcoffset(d.datetime()) return (equinox1.date(), equinox2.date())
[ "def", "calculate_equinoxes", "(", "self", ",", "year", ",", "timezone", "=", "'UTC'", ")", ":", "tz", "=", "pytz", ".", "timezone", "(", "timezone", ")", "d1", "=", "ephem", ".", "next_equinox", "(", "str", "(", "year", ")", ")", "d", "=", "ephem", ...
calculate equinox with time zone
[ "calculate", "equinox", "with", "time", "zone" ]
d044d5dfc1709ec388db34dab583dd554cc66c4e
https://github.com/peopledoc/workalendar/blob/d044d5dfc1709ec388db34dab583dd554cc66c4e/workalendar/core.py#L667-L680
245,449
peopledoc/workalendar
workalendar/core.py
EphemMixin.solar_term
def solar_term(self, year, degrees, timezone='UTC'): """ Returns the date of the solar term for the given longitude and the given year. Solar terms are used for Chinese and Taiwanese holidays (e.g. Qingming Festival in Taiwan). More information: - https://en.wikipedia.org/wiki/Solar_term - https://en.wikipedia.org/wiki/Qingming This function is adapted from the following topic: https://answers.launchpad.net/pyephem/+question/110832 """ twopi = 2 * pi tz = pytz.timezone(timezone) # Find out the sun's current longitude. sun = ephem.Sun(ephem.Date(str(year))) current_longitude = sun.hlong - pi # Find approximately the right time of year. target_longitude = degrees * ephem.degree difference = (target_longitude - current_longitude) % twopi t0 = ephem.Date(str(year)) + 365.25 * difference / twopi # Zero in on the exact moment. def f(t): sun.compute(t) longitude = sun.hlong - pi return ephem.degrees(target_longitude - longitude).znorm d = ephem.Date(ephem.newton(f, t0, t0 + ephem.minute)) solar_term = d.datetime() + tz.utcoffset(d.datetime()) return solar_term.date()
python
def solar_term(self, year, degrees, timezone='UTC'): twopi = 2 * pi tz = pytz.timezone(timezone) # Find out the sun's current longitude. sun = ephem.Sun(ephem.Date(str(year))) current_longitude = sun.hlong - pi # Find approximately the right time of year. target_longitude = degrees * ephem.degree difference = (target_longitude - current_longitude) % twopi t0 = ephem.Date(str(year)) + 365.25 * difference / twopi # Zero in on the exact moment. def f(t): sun.compute(t) longitude = sun.hlong - pi return ephem.degrees(target_longitude - longitude).znorm d = ephem.Date(ephem.newton(f, t0, t0 + ephem.minute)) solar_term = d.datetime() + tz.utcoffset(d.datetime()) return solar_term.date()
[ "def", "solar_term", "(", "self", ",", "year", ",", "degrees", ",", "timezone", "=", "'UTC'", ")", ":", "twopi", "=", "2", "*", "pi", "tz", "=", "pytz", ".", "timezone", "(", "timezone", ")", "# Find out the sun's current longitude.", "sun", "=", "ephem", ...
Returns the date of the solar term for the given longitude and the given year. Solar terms are used for Chinese and Taiwanese holidays (e.g. Qingming Festival in Taiwan). More information: - https://en.wikipedia.org/wiki/Solar_term - https://en.wikipedia.org/wiki/Qingming This function is adapted from the following topic: https://answers.launchpad.net/pyephem/+question/110832
[ "Returns", "the", "date", "of", "the", "solar", "term", "for", "the", "given", "longitude", "and", "the", "given", "year", "." ]
d044d5dfc1709ec388db34dab583dd554cc66c4e
https://github.com/peopledoc/workalendar/blob/d044d5dfc1709ec388db34dab583dd554cc66c4e/workalendar/core.py#L682-L721
245,450
peopledoc/workalendar
workalendar/europe/scotland/__init__.py
Edinburgh.get_spring_holiday
def get_spring_holiday(self, year): """ Return Spring Holiday for Edinburgh. Set to the 3rd Monday of April, unless it falls on Easter Monday, then it's shifted to previous week. """ easter = self.get_easter_monday(year) spring_holiday = self.get_nth_weekday_in_month(year, 4, MON, 3) if easter == spring_holiday: spring_holiday = self.get_nth_weekday_in_month( year, 4, MON, 2) return (spring_holiday, self.spring_holiday_label)
python
def get_spring_holiday(self, year): easter = self.get_easter_monday(year) spring_holiday = self.get_nth_weekday_in_month(year, 4, MON, 3) if easter == spring_holiday: spring_holiday = self.get_nth_weekday_in_month( year, 4, MON, 2) return (spring_holiday, self.spring_holiday_label)
[ "def", "get_spring_holiday", "(", "self", ",", "year", ")", ":", "easter", "=", "self", ".", "get_easter_monday", "(", "year", ")", "spring_holiday", "=", "self", ".", "get_nth_weekday_in_month", "(", "year", ",", "4", ",", "MON", ",", "3", ")", "if", "e...
Return Spring Holiday for Edinburgh. Set to the 3rd Monday of April, unless it falls on Easter Monday, then it's shifted to previous week.
[ "Return", "Spring", "Holiday", "for", "Edinburgh", "." ]
d044d5dfc1709ec388db34dab583dd554cc66c4e
https://github.com/peopledoc/workalendar/blob/d044d5dfc1709ec388db34dab583dd554cc66c4e/workalendar/europe/scotland/__init__.py#L230-L243
245,451
peopledoc/workalendar
workalendar/europe/scotland/__init__.py
Edinburgh.get_victoria_day
def get_victoria_day(self, year): """ Return Victoria Day for Edinburgh. Set to the Monday strictly before May 24th. It means that if May 24th is a Monday, it's shifted to the week before. """ may_24th = date(year, 5, 24) # Since "MON(day) == 0", it's either the difference between MON and the # current weekday (starting at 0), or 7 days before the May 24th shift = may_24th.weekday() or 7 victoria_day = may_24th - timedelta(days=shift) return (victoria_day, "Victoria Day")
python
def get_victoria_day(self, year): may_24th = date(year, 5, 24) # Since "MON(day) == 0", it's either the difference between MON and the # current weekday (starting at 0), or 7 days before the May 24th shift = may_24th.weekday() or 7 victoria_day = may_24th - timedelta(days=shift) return (victoria_day, "Victoria Day")
[ "def", "get_victoria_day", "(", "self", ",", "year", ")", ":", "may_24th", "=", "date", "(", "year", ",", "5", ",", "24", ")", "# Since \"MON(day) == 0\", it's either the difference between MON and the", "# current weekday (starting at 0), or 7 days before the May 24th", "shi...
Return Victoria Day for Edinburgh. Set to the Monday strictly before May 24th. It means that if May 24th is a Monday, it's shifted to the week before.
[ "Return", "Victoria", "Day", "for", "Edinburgh", "." ]
d044d5dfc1709ec388db34dab583dd554cc66c4e
https://github.com/peopledoc/workalendar/blob/d044d5dfc1709ec388db34dab583dd554cc66c4e/workalendar/europe/scotland/__init__.py#L245-L257
245,452
peopledoc/workalendar
setup.py
read_relative_file
def read_relative_file(filename): """ Return the contents of the given file. Its path is supposed relative to this module. """ path = join(dirname(abspath(__file__)), filename) with io.open(path, encoding='utf-8') as f: return f.read()
python
def read_relative_file(filename): path = join(dirname(abspath(__file__)), filename) with io.open(path, encoding='utf-8') as f: return f.read()
[ "def", "read_relative_file", "(", "filename", ")", ":", "path", "=", "join", "(", "dirname", "(", "abspath", "(", "__file__", ")", ")", ",", "filename", ")", "with", "io", ".", "open", "(", "path", ",", "encoding", "=", "'utf-8'", ")", "as", "f", ":"...
Return the contents of the given file. Its path is supposed relative to this module.
[ "Return", "the", "contents", "of", "the", "given", "file", "." ]
d044d5dfc1709ec388db34dab583dd554cc66c4e
https://github.com/peopledoc/workalendar/blob/d044d5dfc1709ec388db34dab583dd554cc66c4e/setup.py#L8-L16
245,453
bmuller/kademlia
kademlia/routing.py
RoutingTable.lonely_buckets
def lonely_buckets(self): """ Get all of the buckets that haven't been updated in over an hour. """ hrago = time.monotonic() - 3600 return [b for b in self.buckets if b.last_updated < hrago]
python
def lonely_buckets(self): hrago = time.monotonic() - 3600 return [b for b in self.buckets if b.last_updated < hrago]
[ "def", "lonely_buckets", "(", "self", ")", ":", "hrago", "=", "time", ".", "monotonic", "(", ")", "-", "3600", "return", "[", "b", "for", "b", "in", "self", ".", "buckets", "if", "b", ".", "last_updated", "<", "hrago", "]" ]
Get all of the buckets that haven't been updated in over an hour.
[ "Get", "all", "of", "the", "buckets", "that", "haven", "t", "been", "updated", "in", "over", "an", "hour", "." ]
4a8d445c9ee8f3ca10f56107e4445daed4933c8a
https://github.com/bmuller/kademlia/blob/4a8d445c9ee8f3ca10f56107e4445daed4933c8a/kademlia/routing.py#L140-L146
245,454
bmuller/kademlia
kademlia/routing.py
RoutingTable.get_bucket_for
def get_bucket_for(self, node): """ Get the index of the bucket that the given node would fall into. """ for index, bucket in enumerate(self.buckets): if node.long_id < bucket.range[1]: return index # we should never be here, but make linter happy return None
python
def get_bucket_for(self, node): for index, bucket in enumerate(self.buckets): if node.long_id < bucket.range[1]: return index # we should never be here, but make linter happy return None
[ "def", "get_bucket_for", "(", "self", ",", "node", ")", ":", "for", "index", ",", "bucket", "in", "enumerate", "(", "self", ".", "buckets", ")", ":", "if", "node", ".", "long_id", "<", "bucket", ".", "range", "[", "1", "]", ":", "return", "index", ...
Get the index of the bucket that the given node would fall into.
[ "Get", "the", "index", "of", "the", "bucket", "that", "the", "given", "node", "would", "fall", "into", "." ]
4a8d445c9ee8f3ca10f56107e4445daed4933c8a
https://github.com/bmuller/kademlia/blob/4a8d445c9ee8f3ca10f56107e4445daed4933c8a/kademlia/routing.py#L172-L180
245,455
bmuller/kademlia
kademlia/crawling.py
SpiderCrawl._find
async def _find(self, rpcmethod): """ Get either a value or list of nodes. Args: rpcmethod: The protocol's callfindValue or call_find_node. The process: 1. calls find_* to current ALPHA nearest not already queried nodes, adding results to current nearest list of k nodes. 2. current nearest list needs to keep track of who has been queried already sort by nearest, keep KSIZE 3. if list is same as last time, next call should be to everyone not yet queried 4. repeat, unless nearest list has all been queried, then ur done """ log.info("crawling network with nearest: %s", str(tuple(self.nearest))) count = self.alpha if self.nearest.get_ids() == self.last_ids_crawled: count = len(self.nearest) self.last_ids_crawled = self.nearest.get_ids() dicts = {} for peer in self.nearest.get_uncontacted()[:count]: dicts[peer.id] = rpcmethod(peer, self.node) self.nearest.mark_contacted(peer) found = await gather_dict(dicts) return await self._nodes_found(found)
python
async def _find(self, rpcmethod): log.info("crawling network with nearest: %s", str(tuple(self.nearest))) count = self.alpha if self.nearest.get_ids() == self.last_ids_crawled: count = len(self.nearest) self.last_ids_crawled = self.nearest.get_ids() dicts = {} for peer in self.nearest.get_uncontacted()[:count]: dicts[peer.id] = rpcmethod(peer, self.node) self.nearest.mark_contacted(peer) found = await gather_dict(dicts) return await self._nodes_found(found)
[ "async", "def", "_find", "(", "self", ",", "rpcmethod", ")", ":", "log", ".", "info", "(", "\"crawling network with nearest: %s\"", ",", "str", "(", "tuple", "(", "self", ".", "nearest", ")", ")", ")", "count", "=", "self", ".", "alpha", "if", "self", ...
Get either a value or list of nodes. Args: rpcmethod: The protocol's callfindValue or call_find_node. The process: 1. calls find_* to current ALPHA nearest not already queried nodes, adding results to current nearest list of k nodes. 2. current nearest list needs to keep track of who has been queried already sort by nearest, keep KSIZE 3. if list is same as last time, next call should be to everyone not yet queried 4. repeat, unless nearest list has all been queried, then ur done
[ "Get", "either", "a", "value", "or", "list", "of", "nodes", "." ]
4a8d445c9ee8f3ca10f56107e4445daed4933c8a
https://github.com/bmuller/kademlia/blob/4a8d445c9ee8f3ca10f56107e4445daed4933c8a/kademlia/crawling.py#L38-L65
245,456
bmuller/kademlia
kademlia/network.py
check_dht_value_type
def check_dht_value_type(value): """ Checks to see if the type of the value is a valid type for placing in the dht. """ typeset = [ int, float, bool, str, bytes ] return type(value) in typeset
python
def check_dht_value_type(value): typeset = [ int, float, bool, str, bytes ] return type(value) in typeset
[ "def", "check_dht_value_type", "(", "value", ")", ":", "typeset", "=", "[", "int", ",", "float", ",", "bool", ",", "str", ",", "bytes", "]", "return", "type", "(", "value", ")", "in", "typeset" ]
Checks to see if the type of the value is a valid type for placing in the dht.
[ "Checks", "to", "see", "if", "the", "type", "of", "the", "value", "is", "a", "valid", "type", "for", "placing", "in", "the", "dht", "." ]
4a8d445c9ee8f3ca10f56107e4445daed4933c8a
https://github.com/bmuller/kademlia/blob/4a8d445c9ee8f3ca10f56107e4445daed4933c8a/kademlia/network.py#L245-L257
245,457
bmuller/kademlia
kademlia/network.py
Server.listen
async def listen(self, port, interface='0.0.0.0'): """ Start listening on the given port. Provide interface="::" to accept ipv6 address """ loop = asyncio.get_event_loop() listen = loop.create_datagram_endpoint(self._create_protocol, local_addr=(interface, port)) log.info("Node %i listening on %s:%i", self.node.long_id, interface, port) self.transport, self.protocol = await listen # finally, schedule refreshing table self.refresh_table()
python
async def listen(self, port, interface='0.0.0.0'): loop = asyncio.get_event_loop() listen = loop.create_datagram_endpoint(self._create_protocol, local_addr=(interface, port)) log.info("Node %i listening on %s:%i", self.node.long_id, interface, port) self.transport, self.protocol = await listen # finally, schedule refreshing table self.refresh_table()
[ "async", "def", "listen", "(", "self", ",", "port", ",", "interface", "=", "'0.0.0.0'", ")", ":", "loop", "=", "asyncio", ".", "get_event_loop", "(", ")", "listen", "=", "loop", ".", "create_datagram_endpoint", "(", "self", ".", "_create_protocol", ",", "l...
Start listening on the given port. Provide interface="::" to accept ipv6 address
[ "Start", "listening", "on", "the", "given", "port", "." ]
4a8d445c9ee8f3ca10f56107e4445daed4933c8a
https://github.com/bmuller/kademlia/blob/4a8d445c9ee8f3ca10f56107e4445daed4933c8a/kademlia/network.py#L61-L74
245,458
bmuller/kademlia
kademlia/network.py
Server.bootstrap
async def bootstrap(self, addrs): """ Bootstrap the server by connecting to other known nodes in the network. Args: addrs: A `list` of (ip, port) `tuple` pairs. Note that only IP addresses are acceptable - hostnames will cause an error. """ log.debug("Attempting to bootstrap node with %i initial contacts", len(addrs)) cos = list(map(self.bootstrap_node, addrs)) gathered = await asyncio.gather(*cos) nodes = [node for node in gathered if node is not None] spider = NodeSpiderCrawl(self.protocol, self.node, nodes, self.ksize, self.alpha) return await spider.find()
python
async def bootstrap(self, addrs): log.debug("Attempting to bootstrap node with %i initial contacts", len(addrs)) cos = list(map(self.bootstrap_node, addrs)) gathered = await asyncio.gather(*cos) nodes = [node for node in gathered if node is not None] spider = NodeSpiderCrawl(self.protocol, self.node, nodes, self.ksize, self.alpha) return await spider.find()
[ "async", "def", "bootstrap", "(", "self", ",", "addrs", ")", ":", "log", ".", "debug", "(", "\"Attempting to bootstrap node with %i initial contacts\"", ",", "len", "(", "addrs", ")", ")", "cos", "=", "list", "(", "map", "(", "self", ".", "bootstrap_node", "...
Bootstrap the server by connecting to other known nodes in the network. Args: addrs: A `list` of (ip, port) `tuple` pairs. Note that only IP addresses are acceptable - hostnames will cause an error.
[ "Bootstrap", "the", "server", "by", "connecting", "to", "other", "known", "nodes", "in", "the", "network", "." ]
4a8d445c9ee8f3ca10f56107e4445daed4933c8a
https://github.com/bmuller/kademlia/blob/4a8d445c9ee8f3ca10f56107e4445daed4933c8a/kademlia/network.py#L115-L130
245,459
bmuller/kademlia
kademlia/network.py
Server.get
async def get(self, key): """ Get a key if the network has it. Returns: :class:`None` if not found, the value otherwise. """ log.info("Looking up key %s", key) dkey = digest(key) # if this node has it, return it if self.storage.get(dkey) is not None: return self.storage.get(dkey) node = Node(dkey) nearest = self.protocol.router.find_neighbors(node) if not nearest: log.warning("There are no known neighbors to get key %s", key) return None spider = ValueSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) return await spider.find()
python
async def get(self, key): log.info("Looking up key %s", key) dkey = digest(key) # if this node has it, return it if self.storage.get(dkey) is not None: return self.storage.get(dkey) node = Node(dkey) nearest = self.protocol.router.find_neighbors(node) if not nearest: log.warning("There are no known neighbors to get key %s", key) return None spider = ValueSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) return await spider.find()
[ "async", "def", "get", "(", "self", ",", "key", ")", ":", "log", ".", "info", "(", "\"Looking up key %s\"", ",", "key", ")", "dkey", "=", "digest", "(", "key", ")", "# if this node has it, return it", "if", "self", ".", "storage", ".", "get", "(", "dkey"...
Get a key if the network has it. Returns: :class:`None` if not found, the value otherwise.
[ "Get", "a", "key", "if", "the", "network", "has", "it", "." ]
4a8d445c9ee8f3ca10f56107e4445daed4933c8a
https://github.com/bmuller/kademlia/blob/4a8d445c9ee8f3ca10f56107e4445daed4933c8a/kademlia/network.py#L136-L155
245,460
bmuller/kademlia
kademlia/network.py
Server.set
async def set(self, key, value): """ Set the given string key to the given value in the network. """ if not check_dht_value_type(value): raise TypeError( "Value must be of type int, float, bool, str, or bytes" ) log.info("setting '%s' = '%s' on network", key, value) dkey = digest(key) return await self.set_digest(dkey, value)
python
async def set(self, key, value): if not check_dht_value_type(value): raise TypeError( "Value must be of type int, float, bool, str, or bytes" ) log.info("setting '%s' = '%s' on network", key, value) dkey = digest(key) return await self.set_digest(dkey, value)
[ "async", "def", "set", "(", "self", ",", "key", ",", "value", ")", ":", "if", "not", "check_dht_value_type", "(", "value", ")", ":", "raise", "TypeError", "(", "\"Value must be of type int, float, bool, str, or bytes\"", ")", "log", ".", "info", "(", "\"setting ...
Set the given string key to the given value in the network.
[ "Set", "the", "given", "string", "key", "to", "the", "given", "value", "in", "the", "network", "." ]
4a8d445c9ee8f3ca10f56107e4445daed4933c8a
https://github.com/bmuller/kademlia/blob/4a8d445c9ee8f3ca10f56107e4445daed4933c8a/kademlia/network.py#L157-L167
245,461
bmuller/kademlia
kademlia/network.py
Server.save_state_regularly
def save_state_regularly(self, fname, frequency=600): """ Save the state of node with a given regularity to the given filename. Args: fname: File name to save retularly to frequency: Frequency in seconds that the state should be saved. By default, 10 minutes. """ self.save_state(fname) loop = asyncio.get_event_loop() self.save_state_loop = loop.call_later(frequency, self.save_state_regularly, fname, frequency)
python
def save_state_regularly(self, fname, frequency=600): self.save_state(fname) loop = asyncio.get_event_loop() self.save_state_loop = loop.call_later(frequency, self.save_state_regularly, fname, frequency)
[ "def", "save_state_regularly", "(", "self", ",", "fname", ",", "frequency", "=", "600", ")", ":", "self", ".", "save_state", "(", "fname", ")", "loop", "=", "asyncio", ".", "get_event_loop", "(", ")", "self", ".", "save_state_loop", "=", "loop", ".", "ca...
Save the state of node with a given regularity to the given filename. Args: fname: File name to save retularly to frequency: Frequency in seconds that the state should be saved. By default, 10 minutes.
[ "Save", "the", "state", "of", "node", "with", "a", "given", "regularity", "to", "the", "given", "filename", "." ]
4a8d445c9ee8f3ca10f56107e4445daed4933c8a
https://github.com/bmuller/kademlia/blob/4a8d445c9ee8f3ca10f56107e4445daed4933c8a/kademlia/network.py#L227-L242
245,462
bmuller/kademlia
kademlia/node.py
NodeHeap.push
def push(self, nodes): """ Push nodes onto heap. @param nodes: This can be a single item or a C{list}. """ if not isinstance(nodes, list): nodes = [nodes] for node in nodes: if node not in self: distance = self.node.distance_to(node) heapq.heappush(self.heap, (distance, node))
python
def push(self, nodes): if not isinstance(nodes, list): nodes = [nodes] for node in nodes: if node not in self: distance = self.node.distance_to(node) heapq.heappush(self.heap, (distance, node))
[ "def", "push", "(", "self", ",", "nodes", ")", ":", "if", "not", "isinstance", "(", "nodes", ",", "list", ")", ":", "nodes", "=", "[", "nodes", "]", "for", "node", "in", "nodes", ":", "if", "node", "not", "in", "self", ":", "distance", "=", "self...
Push nodes onto heap. @param nodes: This can be a single item or a C{list}.
[ "Push", "nodes", "onto", "heap", "." ]
4a8d445c9ee8f3ca10f56107e4445daed4933c8a
https://github.com/bmuller/kademlia/blob/4a8d445c9ee8f3ca10f56107e4445daed4933c8a/kademlia/node.py#L85-L97
245,463
bmuller/kademlia
kademlia/utils.py
shared_prefix
def shared_prefix(args): """ Find the shared prefix between the strings. For instance: sharedPrefix(['blahblah', 'blahwhat']) returns 'blah'. """ i = 0 while i < min(map(len, args)): if len(set(map(operator.itemgetter(i), args))) != 1: break i += 1 return args[0][:i]
python
def shared_prefix(args): i = 0 while i < min(map(len, args)): if len(set(map(operator.itemgetter(i), args))) != 1: break i += 1 return args[0][:i]
[ "def", "shared_prefix", "(", "args", ")", ":", "i", "=", "0", "while", "i", "<", "min", "(", "map", "(", "len", ",", "args", ")", ")", ":", "if", "len", "(", "set", "(", "map", "(", "operator", ".", "itemgetter", "(", "i", ")", ",", "args", "...
Find the shared prefix between the strings. For instance: sharedPrefix(['blahblah', 'blahwhat']) returns 'blah'.
[ "Find", "the", "shared", "prefix", "between", "the", "strings", "." ]
4a8d445c9ee8f3ca10f56107e4445daed4933c8a
https://github.com/bmuller/kademlia/blob/4a8d445c9ee8f3ca10f56107e4445daed4933c8a/kademlia/utils.py#L21-L36
245,464
bmuller/kademlia
kademlia/protocol.py
KademliaProtocol.get_refresh_ids
def get_refresh_ids(self): """ Get ids to search for to keep old buckets up to date. """ ids = [] for bucket in self.router.lonely_buckets(): rid = random.randint(*bucket.range).to_bytes(20, byteorder='big') ids.append(rid) return ids
python
def get_refresh_ids(self): ids = [] for bucket in self.router.lonely_buckets(): rid = random.randint(*bucket.range).to_bytes(20, byteorder='big') ids.append(rid) return ids
[ "def", "get_refresh_ids", "(", "self", ")", ":", "ids", "=", "[", "]", "for", "bucket", "in", "self", ".", "router", ".", "lonely_buckets", "(", ")", ":", "rid", "=", "random", ".", "randint", "(", "*", "bucket", ".", "range", ")", ".", "to_bytes", ...
Get ids to search for to keep old buckets up to date.
[ "Get", "ids", "to", "search", "for", "to", "keep", "old", "buckets", "up", "to", "date", "." ]
4a8d445c9ee8f3ca10f56107e4445daed4933c8a
https://github.com/bmuller/kademlia/blob/4a8d445c9ee8f3ca10f56107e4445daed4933c8a/kademlia/protocol.py#L21-L29
245,465
bmuller/kademlia
kademlia/protocol.py
KademliaProtocol.handle_call_response
def handle_call_response(self, result, node): """ If we get a response, add the node to the routing table. If we get no response, make sure it's removed from the routing table. """ if not result[0]: log.warning("no response from %s, removing from router", node) self.router.remove_contact(node) return result log.info("got successful response from %s", node) self.welcome_if_new(node) return result
python
def handle_call_response(self, result, node): if not result[0]: log.warning("no response from %s, removing from router", node) self.router.remove_contact(node) return result log.info("got successful response from %s", node) self.welcome_if_new(node) return result
[ "def", "handle_call_response", "(", "self", ",", "result", ",", "node", ")", ":", "if", "not", "result", "[", "0", "]", ":", "log", ".", "warning", "(", "\"no response from %s, removing from router\"", ",", "node", ")", "self", ".", "router", ".", "remove_co...
If we get a response, add the node to the routing table. If we get no response, make sure it's removed from the routing table.
[ "If", "we", "get", "a", "response", "add", "the", "node", "to", "the", "routing", "table", ".", "If", "we", "get", "no", "response", "make", "sure", "it", "s", "removed", "from", "the", "routing", "table", "." ]
4a8d445c9ee8f3ca10f56107e4445daed4933c8a
https://github.com/bmuller/kademlia/blob/4a8d445c9ee8f3ca10f56107e4445daed4933c8a/kademlia/protocol.py#L116-L128
245,466
adafruit/Adafruit_CircuitPython_NeoPixel
neopixel.py
NeoPixel.deinit
def deinit(self): """Blank out the NeoPixels and release the pin.""" for i in range(len(self.buf)): self.buf[i] = 0 neopixel_write(self.pin, self.buf) self.pin.deinit()
python
def deinit(self): for i in range(len(self.buf)): self.buf[i] = 0 neopixel_write(self.pin, self.buf) self.pin.deinit()
[ "def", "deinit", "(", "self", ")", ":", "for", "i", "in", "range", "(", "len", "(", "self", ".", "buf", ")", ")", ":", "self", ".", "buf", "[", "i", "]", "=", "0", "neopixel_write", "(", "self", ".", "pin", ",", "self", ".", "buf", ")", "self...
Blank out the NeoPixels and release the pin.
[ "Blank", "out", "the", "NeoPixels", "and", "release", "the", "pin", "." ]
c0ed34813a608b64ed044826553918ddbad12f0c
https://github.com/adafruit/Adafruit_CircuitPython_NeoPixel/blob/c0ed34813a608b64ed044826553918ddbad12f0c/neopixel.py#L107-L112
245,467
adafruit/Adafruit_CircuitPython_NeoPixel
neopixel.py
NeoPixel.show
def show(self): """Shows the new colors on the pixels themselves if they haven't already been autowritten. The colors may or may not be showing after this function returns because it may be done asynchronously.""" if self.brightness > 0.99: neopixel_write(self.pin, self.buf) else: neopixel_write(self.pin, bytearray([int(i * self.brightness) for i in self.buf]))
python
def show(self): if self.brightness > 0.99: neopixel_write(self.pin, self.buf) else: neopixel_write(self.pin, bytearray([int(i * self.brightness) for i in self.buf]))
[ "def", "show", "(", "self", ")", ":", "if", "self", ".", "brightness", ">", "0.99", ":", "neopixel_write", "(", "self", ".", "pin", ",", "self", ".", "buf", ")", "else", ":", "neopixel_write", "(", "self", ".", "pin", ",", "bytearray", "(", "[", "i...
Shows the new colors on the pixels themselves if they haven't already been autowritten. The colors may or may not be showing after this function returns because it may be done asynchronously.
[ "Shows", "the", "new", "colors", "on", "the", "pixels", "themselves", "if", "they", "haven", "t", "already", "been", "autowritten", "." ]
c0ed34813a608b64ed044826553918ddbad12f0c
https://github.com/adafruit/Adafruit_CircuitPython_NeoPixel/blob/c0ed34813a608b64ed044826553918ddbad12f0c/neopixel.py#L223-L232
245,468
dask/dask-kubernetes
dask_kubernetes/objects.py
_set_k8s_attribute
def _set_k8s_attribute(obj, attribute, value): """ Set a specific value on a kubernetes object's attribute obj an object from Kubernetes Python API client attribute Should be a Kubernetes API style attribute (with camelCase) value Can be anything (string, list, dict, k8s objects) that can be accepted by the k8s python client """ current_value = None attribute_name = None # All k8s python client objects have an 'attribute_map' property # which has as keys python style attribute names (api_client) # and as values the kubernetes JSON API style attribute names # (apiClient). We want to allow users to use the JSON API style attribute # names only. for python_attribute, json_attribute in obj.attribute_map.items(): if json_attribute == attribute: attribute_name = python_attribute break else: raise ValueError('Attribute must be one of {}'.format(obj.attribute_map.values())) if hasattr(obj, attribute_name): current_value = getattr(obj, attribute_name) if current_value is not None: # This will ensure that current_value is something JSONable, # so a dict, list, or scalar current_value = SERIALIZATION_API_CLIENT.sanitize_for_serialization( current_value ) if isinstance(current_value, dict): # Deep merge our dictionaries! setattr(obj, attribute_name, merge_dictionaries(current_value, value)) elif isinstance(current_value, list): # Just append lists setattr(obj, attribute_name, current_value + value) else: # Replace everything else setattr(obj, attribute_name, value)
python
def _set_k8s_attribute(obj, attribute, value): current_value = None attribute_name = None # All k8s python client objects have an 'attribute_map' property # which has as keys python style attribute names (api_client) # and as values the kubernetes JSON API style attribute names # (apiClient). We want to allow users to use the JSON API style attribute # names only. for python_attribute, json_attribute in obj.attribute_map.items(): if json_attribute == attribute: attribute_name = python_attribute break else: raise ValueError('Attribute must be one of {}'.format(obj.attribute_map.values())) if hasattr(obj, attribute_name): current_value = getattr(obj, attribute_name) if current_value is not None: # This will ensure that current_value is something JSONable, # so a dict, list, or scalar current_value = SERIALIZATION_API_CLIENT.sanitize_for_serialization( current_value ) if isinstance(current_value, dict): # Deep merge our dictionaries! setattr(obj, attribute_name, merge_dictionaries(current_value, value)) elif isinstance(current_value, list): # Just append lists setattr(obj, attribute_name, current_value + value) else: # Replace everything else setattr(obj, attribute_name, value)
[ "def", "_set_k8s_attribute", "(", "obj", ",", "attribute", ",", "value", ")", ":", "current_value", "=", "None", "attribute_name", "=", "None", "# All k8s python client objects have an 'attribute_map' property", "# which has as keys python style attribute names (api_client)", "# ...
Set a specific value on a kubernetes object's attribute obj an object from Kubernetes Python API client attribute Should be a Kubernetes API style attribute (with camelCase) value Can be anything (string, list, dict, k8s objects) that can be accepted by the k8s python client
[ "Set", "a", "specific", "value", "on", "a", "kubernetes", "object", "s", "attribute" ]
8a4883ecd902460b446bb1f43ed97efe398a135e
https://github.com/dask/dask-kubernetes/blob/8a4883ecd902460b446bb1f43ed97efe398a135e/dask_kubernetes/objects.py#L20-L64
245,469
dask/dask-kubernetes
dask_kubernetes/objects.py
merge_dictionaries
def merge_dictionaries(a, b, path=None, update=True): """ Merge two dictionaries recursively. From https://stackoverflow.com/a/25270947 """ if path is None: path = [] for key in b: if key in a: if isinstance(a[key], dict) and isinstance(b[key], dict): merge_dictionaries(a[key], b[key], path + [str(key)]) elif a[key] == b[key]: pass # same leaf value elif isinstance(a[key], list) and isinstance(b[key], list): for idx, val in enumerate(b[key]): a[key][idx] = merge_dictionaries(a[key][idx], b[key][idx], path + [str(key), str(idx)], update=update) elif update: a[key] = b[key] else: raise Exception('Conflict at %s' % '.'.join(path + [str(key)])) else: a[key] = b[key] return a
python
def merge_dictionaries(a, b, path=None, update=True): if path is None: path = [] for key in b: if key in a: if isinstance(a[key], dict) and isinstance(b[key], dict): merge_dictionaries(a[key], b[key], path + [str(key)]) elif a[key] == b[key]: pass # same leaf value elif isinstance(a[key], list) and isinstance(b[key], list): for idx, val in enumerate(b[key]): a[key][idx] = merge_dictionaries(a[key][idx], b[key][idx], path + [str(key), str(idx)], update=update) elif update: a[key] = b[key] else: raise Exception('Conflict at %s' % '.'.join(path + [str(key)])) else: a[key] = b[key] return a
[ "def", "merge_dictionaries", "(", "a", ",", "b", ",", "path", "=", "None", ",", "update", "=", "True", ")", ":", "if", "path", "is", "None", ":", "path", "=", "[", "]", "for", "key", "in", "b", ":", "if", "key", "in", "a", ":", "if", "isinstanc...
Merge two dictionaries recursively. From https://stackoverflow.com/a/25270947
[ "Merge", "two", "dictionaries", "recursively", "." ]
8a4883ecd902460b446bb1f43ed97efe398a135e
https://github.com/dask/dask-kubernetes/blob/8a4883ecd902460b446bb1f43ed97efe398a135e/dask_kubernetes/objects.py#L67-L93
245,470
dask/dask-kubernetes
dask_kubernetes/objects.py
make_pod_spec
def make_pod_spec( image, labels={}, threads_per_worker=1, env={}, extra_container_config={}, extra_pod_config={}, memory_limit=None, memory_request=None, cpu_limit=None, cpu_request=None, ): """ Create generic pod template from input parameters Examples -------- >>> make_pod_spec(image='daskdev/dask:latest', memory_limit='4G', memory_request='4G') """ args = [ 'dask-worker', '$(DASK_SCHEDULER_ADDRESS)', '--nthreads', str(threads_per_worker), '--death-timeout', '60', ] if memory_limit: args.extend(['--memory-limit', str(memory_limit)]) pod = client.V1Pod( metadata=client.V1ObjectMeta( labels=labels ), spec=client.V1PodSpec( restart_policy='Never', containers=[ client.V1Container( name='dask-worker', image=image, args=args, env=[client.V1EnvVar(name=k, value=v) for k, v in env.items()], ) ], tolerations=[ client.V1Toleration( key='k8s.dask.org/dedicated', operator='Equal', value='worker', effect='NoSchedule', ), # GKE currently does not permit creating taints on a node pool # with a `/` in the key field client.V1Toleration( key='k8s.dask.org_dedicated', operator='Equal', value='worker', effect='NoSchedule', ), ] ) ) resources = client.V1ResourceRequirements(limits={}, requests={}) if cpu_request: resources.requests['cpu'] = cpu_request if memory_request: resources.requests['memory'] = memory_request if cpu_limit: resources.limits['cpu'] = cpu_limit if memory_limit: resources.limits['memory'] = memory_limit pod.spec.containers[0].resources = resources for key, value in extra_container_config.items(): _set_k8s_attribute( pod.spec.containers[0], key, value ) for key, value in extra_pod_config.items(): _set_k8s_attribute( pod.spec, key, value ) return pod
python
def make_pod_spec( image, labels={}, threads_per_worker=1, env={}, extra_container_config={}, extra_pod_config={}, memory_limit=None, memory_request=None, cpu_limit=None, cpu_request=None, ): args = [ 'dask-worker', '$(DASK_SCHEDULER_ADDRESS)', '--nthreads', str(threads_per_worker), '--death-timeout', '60', ] if memory_limit: args.extend(['--memory-limit', str(memory_limit)]) pod = client.V1Pod( metadata=client.V1ObjectMeta( labels=labels ), spec=client.V1PodSpec( restart_policy='Never', containers=[ client.V1Container( name='dask-worker', image=image, args=args, env=[client.V1EnvVar(name=k, value=v) for k, v in env.items()], ) ], tolerations=[ client.V1Toleration( key='k8s.dask.org/dedicated', operator='Equal', value='worker', effect='NoSchedule', ), # GKE currently does not permit creating taints on a node pool # with a `/` in the key field client.V1Toleration( key='k8s.dask.org_dedicated', operator='Equal', value='worker', effect='NoSchedule', ), ] ) ) resources = client.V1ResourceRequirements(limits={}, requests={}) if cpu_request: resources.requests['cpu'] = cpu_request if memory_request: resources.requests['memory'] = memory_request if cpu_limit: resources.limits['cpu'] = cpu_limit if memory_limit: resources.limits['memory'] = memory_limit pod.spec.containers[0].resources = resources for key, value in extra_container_config.items(): _set_k8s_attribute( pod.spec.containers[0], key, value ) for key, value in extra_pod_config.items(): _set_k8s_attribute( pod.spec, key, value ) return pod
[ "def", "make_pod_spec", "(", "image", ",", "labels", "=", "{", "}", ",", "threads_per_worker", "=", "1", ",", "env", "=", "{", "}", ",", "extra_container_config", "=", "{", "}", ",", "extra_pod_config", "=", "{", "}", ",", "memory_limit", "=", "None", ...
Create generic pod template from input parameters Examples -------- >>> make_pod_spec(image='daskdev/dask:latest', memory_limit='4G', memory_request='4G')
[ "Create", "generic", "pod", "template", "from", "input", "parameters" ]
8a4883ecd902460b446bb1f43ed97efe398a135e
https://github.com/dask/dask-kubernetes/blob/8a4883ecd902460b446bb1f43ed97efe398a135e/dask_kubernetes/objects.py#L96-L184
245,471
dask/dask-kubernetes
dask_kubernetes/objects.py
clean_pod_template
def clean_pod_template(pod_template): """ Normalize pod template and check for type errors """ if isinstance(pod_template, str): msg = ('Expected a kubernetes.client.V1Pod object, got %s' 'If trying to pass a yaml filename then use ' 'KubeCluster.from_yaml') raise TypeError(msg % pod_template) if isinstance(pod_template, dict): msg = ('Expected a kubernetes.client.V1Pod object, got %s' 'If trying to pass a dictionary specification then use ' 'KubeCluster.from_dict') raise TypeError(msg % str(pod_template)) pod_template = copy.deepcopy(pod_template) # Make sure metadata / labels / env objects exist, so they can be modified # later without a lot of `is None` checks if pod_template.metadata is None: pod_template.metadata = client.V1ObjectMeta() if pod_template.metadata.labels is None: pod_template.metadata.labels = {} if pod_template.spec.containers[0].env is None: pod_template.spec.containers[0].env = [] return pod_template
python
def clean_pod_template(pod_template): if isinstance(pod_template, str): msg = ('Expected a kubernetes.client.V1Pod object, got %s' 'If trying to pass a yaml filename then use ' 'KubeCluster.from_yaml') raise TypeError(msg % pod_template) if isinstance(pod_template, dict): msg = ('Expected a kubernetes.client.V1Pod object, got %s' 'If trying to pass a dictionary specification then use ' 'KubeCluster.from_dict') raise TypeError(msg % str(pod_template)) pod_template = copy.deepcopy(pod_template) # Make sure metadata / labels / env objects exist, so they can be modified # later without a lot of `is None` checks if pod_template.metadata is None: pod_template.metadata = client.V1ObjectMeta() if pod_template.metadata.labels is None: pod_template.metadata.labels = {} if pod_template.spec.containers[0].env is None: pod_template.spec.containers[0].env = [] return pod_template
[ "def", "clean_pod_template", "(", "pod_template", ")", ":", "if", "isinstance", "(", "pod_template", ",", "str", ")", ":", "msg", "=", "(", "'Expected a kubernetes.client.V1Pod object, got %s'", "'If trying to pass a yaml filename then use '", "'KubeCluster.from_yaml'", ")", ...
Normalize pod template and check for type errors
[ "Normalize", "pod", "template", "and", "check", "for", "type", "errors" ]
8a4883ecd902460b446bb1f43ed97efe398a135e
https://github.com/dask/dask-kubernetes/blob/8a4883ecd902460b446bb1f43ed97efe398a135e/dask_kubernetes/objects.py#L199-L225
245,472
dask/dask-kubernetes
dask_kubernetes/core.py
_cleanup_pods
def _cleanup_pods(namespace, labels): """ Remove all pods with these labels in this namespace """ api = kubernetes.client.CoreV1Api() pods = api.list_namespaced_pod(namespace, label_selector=format_labels(labels)) for pod in pods.items: try: api.delete_namespaced_pod(pod.metadata.name, namespace) logger.info('Deleted pod: %s', pod.metadata.name) except kubernetes.client.rest.ApiException as e: # ignore error if pod is already removed if e.status != 404: raise
python
def _cleanup_pods(namespace, labels): api = kubernetes.client.CoreV1Api() pods = api.list_namespaced_pod(namespace, label_selector=format_labels(labels)) for pod in pods.items: try: api.delete_namespaced_pod(pod.metadata.name, namespace) logger.info('Deleted pod: %s', pod.metadata.name) except kubernetes.client.rest.ApiException as e: # ignore error if pod is already removed if e.status != 404: raise
[ "def", "_cleanup_pods", "(", "namespace", ",", "labels", ")", ":", "api", "=", "kubernetes", ".", "client", ".", "CoreV1Api", "(", ")", "pods", "=", "api", ".", "list_namespaced_pod", "(", "namespace", ",", "label_selector", "=", "format_labels", "(", "label...
Remove all pods with these labels in this namespace
[ "Remove", "all", "pods", "with", "these", "labels", "in", "this", "namespace" ]
8a4883ecd902460b446bb1f43ed97efe398a135e
https://github.com/dask/dask-kubernetes/blob/8a4883ecd902460b446bb1f43ed97efe398a135e/dask_kubernetes/core.py#L500-L511
245,473
dask/dask-kubernetes
dask_kubernetes/core.py
format_labels
def format_labels(labels): """ Convert a dictionary of labels into a comma separated string """ if labels: return ','.join(['{}={}'.format(k, v) for k, v in labels.items()]) else: return ''
python
def format_labels(labels): if labels: return ','.join(['{}={}'.format(k, v) for k, v in labels.items()]) else: return ''
[ "def", "format_labels", "(", "labels", ")", ":", "if", "labels", ":", "return", "','", ".", "join", "(", "[", "'{}={}'", ".", "format", "(", "k", ",", "v", ")", "for", "k", ",", "v", "in", "labels", ".", "items", "(", ")", "]", ")", "else", ":"...
Convert a dictionary of labels into a comma separated string
[ "Convert", "a", "dictionary", "of", "labels", "into", "a", "comma", "separated", "string" ]
8a4883ecd902460b446bb1f43ed97efe398a135e
https://github.com/dask/dask-kubernetes/blob/8a4883ecd902460b446bb1f43ed97efe398a135e/dask_kubernetes/core.py#L514-L519
245,474
dask/dask-kubernetes
dask_kubernetes/core.py
_namespace_default
def _namespace_default(): """ Get current namespace if running in a k8s cluster If not in a k8s cluster with service accounts enabled, default to 'default' Taken from https://github.com/jupyterhub/kubespawner/blob/master/kubespawner/spawner.py#L125 """ ns_path = '/var/run/secrets/kubernetes.io/serviceaccount/namespace' if os.path.exists(ns_path): with open(ns_path) as f: return f.read().strip() return 'default'
python
def _namespace_default(): ns_path = '/var/run/secrets/kubernetes.io/serviceaccount/namespace' if os.path.exists(ns_path): with open(ns_path) as f: return f.read().strip() return 'default'
[ "def", "_namespace_default", "(", ")", ":", "ns_path", "=", "'/var/run/secrets/kubernetes.io/serviceaccount/namespace'", "if", "os", ".", "path", ".", "exists", "(", "ns_path", ")", ":", "with", "open", "(", "ns_path", ")", "as", "f", ":", "return", "f", ".", ...
Get current namespace if running in a k8s cluster If not in a k8s cluster with service accounts enabled, default to 'default' Taken from https://github.com/jupyterhub/kubespawner/blob/master/kubespawner/spawner.py#L125
[ "Get", "current", "namespace", "if", "running", "in", "a", "k8s", "cluster" ]
8a4883ecd902460b446bb1f43ed97efe398a135e
https://github.com/dask/dask-kubernetes/blob/8a4883ecd902460b446bb1f43ed97efe398a135e/dask_kubernetes/core.py#L522-L535
245,475
dask/dask-kubernetes
dask_kubernetes/core.py
select_workers_to_close
def select_workers_to_close(scheduler, n_to_close): """ Select n workers to close from scheduler """ workers = list(scheduler.workers.values()) assert n_to_close <= len(workers) key = lambda ws: ws.metrics['memory'] to_close = set(sorted(scheduler.idle, key=key)[:n_to_close]) if len(to_close) < n_to_close: rest = sorted(workers, key=key, reverse=True) while len(to_close) < n_to_close: to_close.add(rest.pop()) return [ws.address for ws in to_close]
python
def select_workers_to_close(scheduler, n_to_close): workers = list(scheduler.workers.values()) assert n_to_close <= len(workers) key = lambda ws: ws.metrics['memory'] to_close = set(sorted(scheduler.idle, key=key)[:n_to_close]) if len(to_close) < n_to_close: rest = sorted(workers, key=key, reverse=True) while len(to_close) < n_to_close: to_close.add(rest.pop()) return [ws.address for ws in to_close]
[ "def", "select_workers_to_close", "(", "scheduler", ",", "n_to_close", ")", ":", "workers", "=", "list", "(", "scheduler", ".", "workers", ".", "values", "(", ")", ")", "assert", "n_to_close", "<=", "len", "(", "workers", ")", "key", "=", "lambda", "ws", ...
Select n workers to close from scheduler
[ "Select", "n", "workers", "to", "close", "from", "scheduler" ]
8a4883ecd902460b446bb1f43ed97efe398a135e
https://github.com/dask/dask-kubernetes/blob/8a4883ecd902460b446bb1f43ed97efe398a135e/dask_kubernetes/core.py#L538-L550
245,476
dask/dask-kubernetes
dask_kubernetes/core.py
KubeCluster.from_yaml
def from_yaml(cls, yaml_path, **kwargs): """ Create cluster with worker pod spec defined by a YAML file We can start a cluster with pods defined in an accompanying YAML file like the following: .. code-block:: yaml kind: Pod metadata: labels: foo: bar baz: quux spec: containers: - image: daskdev/dask:latest name: dask-worker args: [dask-worker, $(DASK_SCHEDULER_ADDRESS), --nthreads, '2', --memory-limit, 8GB] restartPolicy: Never Examples -------- >>> cluster = KubeCluster.from_yaml('pod.yaml', namespace='my-ns') # doctest: +SKIP See Also -------- KubeCluster.from_dict """ if not yaml: raise ImportError("PyYaml is required to use yaml functionality, please install it!") with open(yaml_path) as f: d = yaml.safe_load(f) d = dask.config.expand_environment_variables(d) return cls.from_dict(d, **kwargs)
python
def from_yaml(cls, yaml_path, **kwargs): if not yaml: raise ImportError("PyYaml is required to use yaml functionality, please install it!") with open(yaml_path) as f: d = yaml.safe_load(f) d = dask.config.expand_environment_variables(d) return cls.from_dict(d, **kwargs)
[ "def", "from_yaml", "(", "cls", ",", "yaml_path", ",", "*", "*", "kwargs", ")", ":", "if", "not", "yaml", ":", "raise", "ImportError", "(", "\"PyYaml is required to use yaml functionality, please install it!\"", ")", "with", "open", "(", "yaml_path", ")", "as", ...
Create cluster with worker pod spec defined by a YAML file We can start a cluster with pods defined in an accompanying YAML file like the following: .. code-block:: yaml kind: Pod metadata: labels: foo: bar baz: quux spec: containers: - image: daskdev/dask:latest name: dask-worker args: [dask-worker, $(DASK_SCHEDULER_ADDRESS), --nthreads, '2', --memory-limit, 8GB] restartPolicy: Never Examples -------- >>> cluster = KubeCluster.from_yaml('pod.yaml', namespace='my-ns') # doctest: +SKIP See Also -------- KubeCluster.from_dict
[ "Create", "cluster", "with", "worker", "pod", "spec", "defined", "by", "a", "YAML", "file" ]
8a4883ecd902460b446bb1f43ed97efe398a135e
https://github.com/dask/dask-kubernetes/blob/8a4883ecd902460b446bb1f43ed97efe398a135e/dask_kubernetes/core.py#L251-L284
245,477
dask/dask-kubernetes
dask_kubernetes/core.py
KubeCluster.pods
def pods(self): """ A list of kubernetes pods corresponding to current workers See Also -------- KubeCluster.logs """ return self.core_api.list_namespaced_pod( self.namespace, label_selector=format_labels(self.pod_template.metadata.labels) ).items
python
def pods(self): return self.core_api.list_namespaced_pod( self.namespace, label_selector=format_labels(self.pod_template.metadata.labels) ).items
[ "def", "pods", "(", "self", ")", ":", "return", "self", ".", "core_api", ".", "list_namespaced_pod", "(", "self", ".", "namespace", ",", "label_selector", "=", "format_labels", "(", "self", ".", "pod_template", ".", "metadata", ".", "labels", ")", ")", "."...
A list of kubernetes pods corresponding to current workers See Also -------- KubeCluster.logs
[ "A", "list", "of", "kubernetes", "pods", "corresponding", "to", "current", "workers" ]
8a4883ecd902460b446bb1f43ed97efe398a135e
https://github.com/dask/dask-kubernetes/blob/8a4883ecd902460b446bb1f43ed97efe398a135e/dask_kubernetes/core.py#L306-L316
245,478
dask/dask-kubernetes
dask_kubernetes/core.py
KubeCluster.logs
def logs(self, pod=None): """ Logs from a worker pod You can get this pod object from the ``pods`` method. If no pod is specified all pod logs will be returned. On large clusters this could end up being rather large. Parameters ---------- pod: kubernetes.client.V1Pod The pod from which we want to collect logs. See Also -------- KubeCluster.pods Client.get_worker_logs """ if pod is None: return {pod.status.pod_ip: self.logs(pod) for pod in self.pods()} return self.core_api.read_namespaced_pod_log(pod.metadata.name, pod.metadata.namespace)
python
def logs(self, pod=None): if pod is None: return {pod.status.pod_ip: self.logs(pod) for pod in self.pods()} return self.core_api.read_namespaced_pod_log(pod.metadata.name, pod.metadata.namespace)
[ "def", "logs", "(", "self", ",", "pod", "=", "None", ")", ":", "if", "pod", "is", "None", ":", "return", "{", "pod", ".", "status", ".", "pod_ip", ":", "self", ".", "logs", "(", "pod", ")", "for", "pod", "in", "self", ".", "pods", "(", ")", "...
Logs from a worker pod You can get this pod object from the ``pods`` method. If no pod is specified all pod logs will be returned. On large clusters this could end up being rather large. Parameters ---------- pod: kubernetes.client.V1Pod The pod from which we want to collect logs. See Also -------- KubeCluster.pods Client.get_worker_logs
[ "Logs", "from", "a", "worker", "pod" ]
8a4883ecd902460b446bb1f43ed97efe398a135e
https://github.com/dask/dask-kubernetes/blob/8a4883ecd902460b446bb1f43ed97efe398a135e/dask_kubernetes/core.py#L318-L340
245,479
dask/dask-kubernetes
dask_kubernetes/core.py
KubeCluster.scale
def scale(self, n): """ Scale cluster to n workers Parameters ---------- n: int Target number of workers Example ------- >>> cluster.scale(10) # scale cluster to ten workers See Also -------- KubeCluster.scale_up KubeCluster.scale_down """ pods = self._cleanup_terminated_pods(self.pods()) if n >= len(pods): return self.scale_up(n, pods=pods) else: n_to_delete = len(pods) - n # Before trying to close running workers, check if we can cancel # pending pods (in case the kubernetes cluster was too full to # provision those pods in the first place). running_workers = list(self.scheduler.workers.keys()) running_ips = set(urlparse(worker).hostname for worker in running_workers) pending_pods = [p for p in pods if p.status.pod_ip not in running_ips] if pending_pods: pending_to_delete = pending_pods[:n_to_delete] logger.debug("Deleting pending pods: %s", pending_to_delete) self._delete_pods(pending_to_delete) n_to_delete = n_to_delete - len(pending_to_delete) if n_to_delete <= 0: return to_close = select_workers_to_close(self.scheduler, n_to_delete) logger.debug("Closing workers: %s", to_close) if len(to_close) < len(self.scheduler.workers): # Close workers cleanly to migrate any temporary results to # remaining workers. @gen.coroutine def f(to_close): yield self.scheduler.retire_workers( workers=to_close, remove=True, close_workers=True) yield offload(self.scale_down, to_close) self.scheduler.loop.add_callback(f, to_close) return # Terminate all pods without waiting for clean worker shutdown self.scale_down(to_close)
python
def scale(self, n): pods = self._cleanup_terminated_pods(self.pods()) if n >= len(pods): return self.scale_up(n, pods=pods) else: n_to_delete = len(pods) - n # Before trying to close running workers, check if we can cancel # pending pods (in case the kubernetes cluster was too full to # provision those pods in the first place). running_workers = list(self.scheduler.workers.keys()) running_ips = set(urlparse(worker).hostname for worker in running_workers) pending_pods = [p for p in pods if p.status.pod_ip not in running_ips] if pending_pods: pending_to_delete = pending_pods[:n_to_delete] logger.debug("Deleting pending pods: %s", pending_to_delete) self._delete_pods(pending_to_delete) n_to_delete = n_to_delete - len(pending_to_delete) if n_to_delete <= 0: return to_close = select_workers_to_close(self.scheduler, n_to_delete) logger.debug("Closing workers: %s", to_close) if len(to_close) < len(self.scheduler.workers): # Close workers cleanly to migrate any temporary results to # remaining workers. @gen.coroutine def f(to_close): yield self.scheduler.retire_workers( workers=to_close, remove=True, close_workers=True) yield offload(self.scale_down, to_close) self.scheduler.loop.add_callback(f, to_close) return # Terminate all pods without waiting for clean worker shutdown self.scale_down(to_close)
[ "def", "scale", "(", "self", ",", "n", ")", ":", "pods", "=", "self", ".", "_cleanup_terminated_pods", "(", "self", ".", "pods", "(", ")", ")", "if", "n", ">=", "len", "(", "pods", ")", ":", "return", "self", ".", "scale_up", "(", "n", ",", "pods...
Scale cluster to n workers Parameters ---------- n: int Target number of workers Example ------- >>> cluster.scale(10) # scale cluster to ten workers See Also -------- KubeCluster.scale_up KubeCluster.scale_down
[ "Scale", "cluster", "to", "n", "workers" ]
8a4883ecd902460b446bb1f43ed97efe398a135e
https://github.com/dask/dask-kubernetes/blob/8a4883ecd902460b446bb1f43ed97efe398a135e/dask_kubernetes/core.py#L342-L395
245,480
dask/dask-kubernetes
dask_kubernetes/core.py
KubeCluster.scale_up
def scale_up(self, n, pods=None, **kwargs): """ Make sure we have n dask-workers available for this cluster Examples -------- >>> cluster.scale_up(20) # ask for twenty workers """ maximum = dask.config.get('kubernetes.count.max') if maximum is not None and maximum < n: logger.info("Tried to scale beyond maximum number of workers %d > %d", n, maximum) n = maximum pods = pods or self._cleanup_terminated_pods(self.pods()) to_create = n - len(pods) new_pods = [] for i in range(3): try: for _ in range(to_create): new_pods.append(self.core_api.create_namespaced_pod( self.namespace, self.pod_template)) to_create -= 1 break except kubernetes.client.rest.ApiException as e: if e.status == 500 and 'ServerTimeout' in e.body: logger.info("Server timeout, retry #%d", i + 1) time.sleep(1) last_exception = e continue else: raise else: raise last_exception return new_pods
python
def scale_up(self, n, pods=None, **kwargs): maximum = dask.config.get('kubernetes.count.max') if maximum is not None and maximum < n: logger.info("Tried to scale beyond maximum number of workers %d > %d", n, maximum) n = maximum pods = pods or self._cleanup_terminated_pods(self.pods()) to_create = n - len(pods) new_pods = [] for i in range(3): try: for _ in range(to_create): new_pods.append(self.core_api.create_namespaced_pod( self.namespace, self.pod_template)) to_create -= 1 break except kubernetes.client.rest.ApiException as e: if e.status == 500 and 'ServerTimeout' in e.body: logger.info("Server timeout, retry #%d", i + 1) time.sleep(1) last_exception = e continue else: raise else: raise last_exception return new_pods
[ "def", "scale_up", "(", "self", ",", "n", ",", "pods", "=", "None", ",", "*", "*", "kwargs", ")", ":", "maximum", "=", "dask", ".", "config", ".", "get", "(", "'kubernetes.count.max'", ")", "if", "maximum", "is", "not", "None", "and", "maximum", "<",...
Make sure we have n dask-workers available for this cluster Examples -------- >>> cluster.scale_up(20) # ask for twenty workers
[ "Make", "sure", "we", "have", "n", "dask", "-", "workers", "available", "for", "this", "cluster" ]
8a4883ecd902460b446bb1f43ed97efe398a135e
https://github.com/dask/dask-kubernetes/blob/8a4883ecd902460b446bb1f43ed97efe398a135e/dask_kubernetes/core.py#L421-L455
245,481
dask/dask-kubernetes
dask_kubernetes/core.py
KubeCluster.scale_down
def scale_down(self, workers, pods=None): """ Remove the pods for the requested list of workers When scale_down is called by the _adapt async loop, the workers are assumed to have been cleanly closed first and in-memory data has been migrated to the remaining workers. Note that when the worker process exits, Kubernetes leaves the pods in a 'Succeeded' state that we collect here. If some workers have not been closed, we just delete the pods with matching ip addresses. Parameters ---------- workers: List[str] List of addresses of workers to close """ # Get the existing worker pods pods = pods or self._cleanup_terminated_pods(self.pods()) # Work out the list of pods that we are going to delete # Each worker to delete is given in the form "tcp://<worker ip>:<port>" # Convert this to a set of IPs ips = set(urlparse(worker).hostname for worker in workers) to_delete = [p for p in pods if p.status.pod_ip in ips] if not to_delete: return self._delete_pods(to_delete)
python
def scale_down(self, workers, pods=None): # Get the existing worker pods pods = pods or self._cleanup_terminated_pods(self.pods()) # Work out the list of pods that we are going to delete # Each worker to delete is given in the form "tcp://<worker ip>:<port>" # Convert this to a set of IPs ips = set(urlparse(worker).hostname for worker in workers) to_delete = [p for p in pods if p.status.pod_ip in ips] if not to_delete: return self._delete_pods(to_delete)
[ "def", "scale_down", "(", "self", ",", "workers", ",", "pods", "=", "None", ")", ":", "# Get the existing worker pods", "pods", "=", "pods", "or", "self", ".", "_cleanup_terminated_pods", "(", "self", ".", "pods", "(", ")", ")", "# Work out the list of pods that...
Remove the pods for the requested list of workers When scale_down is called by the _adapt async loop, the workers are assumed to have been cleanly closed first and in-memory data has been migrated to the remaining workers. Note that when the worker process exits, Kubernetes leaves the pods in a 'Succeeded' state that we collect here. If some workers have not been closed, we just delete the pods with matching ip addresses. Parameters ---------- workers: List[str] List of addresses of workers to close
[ "Remove", "the", "pods", "for", "the", "requested", "list", "of", "workers" ]
8a4883ecd902460b446bb1f43ed97efe398a135e
https://github.com/dask/dask-kubernetes/blob/8a4883ecd902460b446bb1f43ed97efe398a135e/dask_kubernetes/core.py#L458-L485
245,482
dask/dask-kubernetes
dask_kubernetes/core.py
KubeCluster.close
def close(self, **kwargs): """ Close this cluster """ self.scale_down(self.cluster.scheduler.workers) return self.cluster.close(**kwargs)
python
def close(self, **kwargs): self.scale_down(self.cluster.scheduler.workers) return self.cluster.close(**kwargs)
[ "def", "close", "(", "self", ",", "*", "*", "kwargs", ")", ":", "self", ".", "scale_down", "(", "self", ".", "cluster", ".", "scheduler", ".", "workers", ")", "return", "self", ".", "cluster", ".", "close", "(", "*", "*", "kwargs", ")" ]
Close this cluster
[ "Close", "this", "cluster" ]
8a4883ecd902460b446bb1f43ed97efe398a135e
https://github.com/dask/dask-kubernetes/blob/8a4883ecd902460b446bb1f43ed97efe398a135e/dask_kubernetes/core.py#L490-L493
245,483
viniciuschiele/flask-apscheduler
flask_apscheduler/utils.py
job_to_dict
def job_to_dict(job): """Converts a job to an OrderedDict.""" data = OrderedDict() data['id'] = job.id data['name'] = job.name data['func'] = job.func_ref data['args'] = job.args data['kwargs'] = job.kwargs data.update(trigger_to_dict(job.trigger)) if not job.pending: data['misfire_grace_time'] = job.misfire_grace_time data['max_instances'] = job.max_instances data['next_run_time'] = None if job.next_run_time is None else job.next_run_time return data
python
def job_to_dict(job): data = OrderedDict() data['id'] = job.id data['name'] = job.name data['func'] = job.func_ref data['args'] = job.args data['kwargs'] = job.kwargs data.update(trigger_to_dict(job.trigger)) if not job.pending: data['misfire_grace_time'] = job.misfire_grace_time data['max_instances'] = job.max_instances data['next_run_time'] = None if job.next_run_time is None else job.next_run_time return data
[ "def", "job_to_dict", "(", "job", ")", ":", "data", "=", "OrderedDict", "(", ")", "data", "[", "'id'", "]", "=", "job", ".", "id", "data", "[", "'name'", "]", "=", "job", ".", "name", "data", "[", "'func'", "]", "=", "job", ".", "func_ref", "data...
Converts a job to an OrderedDict.
[ "Converts", "a", "job", "to", "an", "OrderedDict", "." ]
cc52c39e1948c4e8de5da0d01db45f1779f61997
https://github.com/viniciuschiele/flask-apscheduler/blob/cc52c39e1948c4e8de5da0d01db45f1779f61997/flask_apscheduler/utils.py#L26-L43
245,484
viniciuschiele/flask-apscheduler
flask_apscheduler/utils.py
pop_trigger
def pop_trigger(data): """Pops trigger and trigger args from a given dict.""" trigger_name = data.pop('trigger') trigger_args = {} if trigger_name == 'date': trigger_arg_names = ('run_date', 'timezone') elif trigger_name == 'interval': trigger_arg_names = ('weeks', 'days', 'hours', 'minutes', 'seconds', 'start_date', 'end_date', 'timezone') elif trigger_name == 'cron': trigger_arg_names = ('year', 'month', 'day', 'week', 'day_of_week', 'hour', 'minute', 'second', 'start_date', 'end_date', 'timezone') else: raise Exception('Trigger %s is not supported.' % trigger_name) for arg_name in trigger_arg_names: if arg_name in data: trigger_args[arg_name] = data.pop(arg_name) return trigger_name, trigger_args
python
def pop_trigger(data): trigger_name = data.pop('trigger') trigger_args = {} if trigger_name == 'date': trigger_arg_names = ('run_date', 'timezone') elif trigger_name == 'interval': trigger_arg_names = ('weeks', 'days', 'hours', 'minutes', 'seconds', 'start_date', 'end_date', 'timezone') elif trigger_name == 'cron': trigger_arg_names = ('year', 'month', 'day', 'week', 'day_of_week', 'hour', 'minute', 'second', 'start_date', 'end_date', 'timezone') else: raise Exception('Trigger %s is not supported.' % trigger_name) for arg_name in trigger_arg_names: if arg_name in data: trigger_args[arg_name] = data.pop(arg_name) return trigger_name, trigger_args
[ "def", "pop_trigger", "(", "data", ")", ":", "trigger_name", "=", "data", ".", "pop", "(", "'trigger'", ")", "trigger_args", "=", "{", "}", "if", "trigger_name", "==", "'date'", ":", "trigger_arg_names", "=", "(", "'run_date'", ",", "'timezone'", ")", "eli...
Pops trigger and trigger args from a given dict.
[ "Pops", "trigger", "and", "trigger", "args", "from", "a", "given", "dict", "." ]
cc52c39e1948c4e8de5da0d01db45f1779f61997
https://github.com/viniciuschiele/flask-apscheduler/blob/cc52c39e1948c4e8de5da0d01db45f1779f61997/flask_apscheduler/utils.py#L46-L65
245,485
viniciuschiele/flask-apscheduler
flask_apscheduler/utils.py
trigger_to_dict
def trigger_to_dict(trigger): """Converts a trigger to an OrderedDict.""" data = OrderedDict() if isinstance(trigger, DateTrigger): data['trigger'] = 'date' data['run_date'] = trigger.run_date elif isinstance(trigger, IntervalTrigger): data['trigger'] = 'interval' data['start_date'] = trigger.start_date if trigger.end_date: data['end_date'] = trigger.end_date w, d, hh, mm, ss = extract_timedelta(trigger.interval) if w > 0: data['weeks'] = w if d > 0: data['days'] = d if hh > 0: data['hours'] = hh if mm > 0: data['minutes'] = mm if ss > 0: data['seconds'] = ss elif isinstance(trigger, CronTrigger): data['trigger'] = 'cron' if trigger.start_date: data['start_date'] = trigger.start_date if trigger.end_date: data['end_date'] = trigger.end_date for field in trigger.fields: if not field.is_default: data[field.name] = str(field) else: data['trigger'] = str(trigger) return data
python
def trigger_to_dict(trigger): data = OrderedDict() if isinstance(trigger, DateTrigger): data['trigger'] = 'date' data['run_date'] = trigger.run_date elif isinstance(trigger, IntervalTrigger): data['trigger'] = 'interval' data['start_date'] = trigger.start_date if trigger.end_date: data['end_date'] = trigger.end_date w, d, hh, mm, ss = extract_timedelta(trigger.interval) if w > 0: data['weeks'] = w if d > 0: data['days'] = d if hh > 0: data['hours'] = hh if mm > 0: data['minutes'] = mm if ss > 0: data['seconds'] = ss elif isinstance(trigger, CronTrigger): data['trigger'] = 'cron' if trigger.start_date: data['start_date'] = trigger.start_date if trigger.end_date: data['end_date'] = trigger.end_date for field in trigger.fields: if not field.is_default: data[field.name] = str(field) else: data['trigger'] = str(trigger) return data
[ "def", "trigger_to_dict", "(", "trigger", ")", ":", "data", "=", "OrderedDict", "(", ")", "if", "isinstance", "(", "trigger", ",", "DateTrigger", ")", ":", "data", "[", "'trigger'", "]", "=", "'date'", "data", "[", "'run_date'", "]", "=", "trigger", ".",...
Converts a trigger to an OrderedDict.
[ "Converts", "a", "trigger", "to", "an", "OrderedDict", "." ]
cc52c39e1948c4e8de5da0d01db45f1779f61997
https://github.com/viniciuschiele/flask-apscheduler/blob/cc52c39e1948c4e8de5da0d01db45f1779f61997/flask_apscheduler/utils.py#L68-L110
245,486
viniciuschiele/flask-apscheduler
flask_apscheduler/utils.py
fix_job_def
def fix_job_def(job_def): """ Replaces the datetime in string by datetime object. """ if six.PY2 and isinstance(job_def.get('func'), six.text_type): # when a job comes from the endpoint, strings are unicode # because that's how json package deserialize the bytes. # we had a case where APScheduler failed to import the func based # on its name because Py2 expected a str and not unicode on __import__(). # it happened only for a user, I wasn't able to determine why that occurred for him, # a workaround is to convert the func to str. # full story: https://github.com/viniciuschiele/flask-apscheduler/issues/75 job_def['func'] = str(job_def.get('func')) if isinstance(job_def.get('start_date'), six.string_types): job_def['start_date'] = dateutil.parser.parse(job_def.get('start_date')) if isinstance(job_def.get('end_date'), six.string_types): job_def['end_date'] = dateutil.parser.parse(job_def.get('end_date')) if isinstance(job_def.get('run_date'), six.string_types): job_def['run_date'] = dateutil.parser.parse(job_def.get('run_date')) # it keeps compatibility backward if isinstance(job_def.get('trigger'), dict): trigger = job_def.pop('trigger') job_def['trigger'] = trigger.pop('type', 'date') job_def.update(trigger)
python
def fix_job_def(job_def): if six.PY2 and isinstance(job_def.get('func'), six.text_type): # when a job comes from the endpoint, strings are unicode # because that's how json package deserialize the bytes. # we had a case where APScheduler failed to import the func based # on its name because Py2 expected a str and not unicode on __import__(). # it happened only for a user, I wasn't able to determine why that occurred for him, # a workaround is to convert the func to str. # full story: https://github.com/viniciuschiele/flask-apscheduler/issues/75 job_def['func'] = str(job_def.get('func')) if isinstance(job_def.get('start_date'), six.string_types): job_def['start_date'] = dateutil.parser.parse(job_def.get('start_date')) if isinstance(job_def.get('end_date'), six.string_types): job_def['end_date'] = dateutil.parser.parse(job_def.get('end_date')) if isinstance(job_def.get('run_date'), six.string_types): job_def['run_date'] = dateutil.parser.parse(job_def.get('run_date')) # it keeps compatibility backward if isinstance(job_def.get('trigger'), dict): trigger = job_def.pop('trigger') job_def['trigger'] = trigger.pop('type', 'date') job_def.update(trigger)
[ "def", "fix_job_def", "(", "job_def", ")", ":", "if", "six", ".", "PY2", "and", "isinstance", "(", "job_def", ".", "get", "(", "'func'", ")", ",", "six", ".", "text_type", ")", ":", "# when a job comes from the endpoint, strings are unicode", "# because that's how...
Replaces the datetime in string by datetime object.
[ "Replaces", "the", "datetime", "in", "string", "by", "datetime", "object", "." ]
cc52c39e1948c4e8de5da0d01db45f1779f61997
https://github.com/viniciuschiele/flask-apscheduler/blob/cc52c39e1948c4e8de5da0d01db45f1779f61997/flask_apscheduler/utils.py#L113-L142
245,487
viniciuschiele/flask-apscheduler
flask_apscheduler/scheduler.py
APScheduler.init_app
def init_app(self, app): """Initialize the APScheduler with a Flask application instance.""" self.app = app self.app.apscheduler = self self._load_config() self._load_jobs() if self.api_enabled: self._load_api()
python
def init_app(self, app): self.app = app self.app.apscheduler = self self._load_config() self._load_jobs() if self.api_enabled: self._load_api()
[ "def", "init_app", "(", "self", ",", "app", ")", ":", "self", ".", "app", "=", "app", "self", ".", "app", ".", "apscheduler", "=", "self", "self", ".", "_load_config", "(", ")", "self", ".", "_load_jobs", "(", ")", "if", "self", ".", "api_enabled", ...
Initialize the APScheduler with a Flask application instance.
[ "Initialize", "the", "APScheduler", "with", "a", "Flask", "application", "instance", "." ]
cc52c39e1948c4e8de5da0d01db45f1779f61997
https://github.com/viniciuschiele/flask-apscheduler/blob/cc52c39e1948c4e8de5da0d01db45f1779f61997/flask_apscheduler/scheduler.py#L75-L85
245,488
viniciuschiele/flask-apscheduler
flask_apscheduler/scheduler.py
APScheduler.add_listener
def add_listener(self, callback, mask=EVENT_ALL): """ Add a listener for scheduler events. When a matching event occurs, ``callback`` is executed with the event object as its sole argument. If the ``mask`` parameter is not provided, the callback will receive events of all types. For further info: https://apscheduler.readthedocs.io/en/latest/userguide.html#scheduler-events :param callback: any callable that takes one argument :param int mask: bitmask that indicates which events should be listened to """ self._scheduler.add_listener(callback, mask)
python
def add_listener(self, callback, mask=EVENT_ALL): self._scheduler.add_listener(callback, mask)
[ "def", "add_listener", "(", "self", ",", "callback", ",", "mask", "=", "EVENT_ALL", ")", ":", "self", ".", "_scheduler", ".", "add_listener", "(", "callback", ",", "mask", ")" ]
Add a listener for scheduler events. When a matching event occurs, ``callback`` is executed with the event object as its sole argument. If the ``mask`` parameter is not provided, the callback will receive events of all types. For further info: https://apscheduler.readthedocs.io/en/latest/userguide.html#scheduler-events :param callback: any callable that takes one argument :param int mask: bitmask that indicates which events should be listened to
[ "Add", "a", "listener", "for", "scheduler", "events", "." ]
cc52c39e1948c4e8de5da0d01db45f1779f61997
https://github.com/viniciuschiele/flask-apscheduler/blob/cc52c39e1948c4e8de5da0d01db45f1779f61997/flask_apscheduler/scheduler.py#L124-L137
245,489
viniciuschiele/flask-apscheduler
flask_apscheduler/scheduler.py
APScheduler.add_job
def add_job(self, id, func, **kwargs): """ Add the given job to the job list and wakes up the scheduler if it's already running. :param str id: explicit identifier for the job (for modifying it later) :param func: callable (or a textual reference to one) to run at the given time """ job_def = dict(kwargs) job_def['id'] = id job_def['func'] = func job_def['name'] = job_def.get('name') or id fix_job_def(job_def) return self._scheduler.add_job(**job_def)
python
def add_job(self, id, func, **kwargs): job_def = dict(kwargs) job_def['id'] = id job_def['func'] = func job_def['name'] = job_def.get('name') or id fix_job_def(job_def) return self._scheduler.add_job(**job_def)
[ "def", "add_job", "(", "self", ",", "id", ",", "func", ",", "*", "*", "kwargs", ")", ":", "job_def", "=", "dict", "(", "kwargs", ")", "job_def", "[", "'id'", "]", "=", "id", "job_def", "[", "'func'", "]", "=", "func", "job_def", "[", "'name'", "]...
Add the given job to the job list and wakes up the scheduler if it's already running. :param str id: explicit identifier for the job (for modifying it later) :param func: callable (or a textual reference to one) to run at the given time
[ "Add", "the", "given", "job", "to", "the", "job", "list", "and", "wakes", "up", "the", "scheduler", "if", "it", "s", "already", "running", "." ]
cc52c39e1948c4e8de5da0d01db45f1779f61997
https://github.com/viniciuschiele/flask-apscheduler/blob/cc52c39e1948c4e8de5da0d01db45f1779f61997/flask_apscheduler/scheduler.py#L145-L160
245,490
viniciuschiele/flask-apscheduler
flask_apscheduler/scheduler.py
APScheduler.delete_job
def delete_job(self, id, jobstore=None): """ DEPRECATED, use remove_job instead. Remove a job, preventing it from being run any more. :param str id: the identifier of the job :param str jobstore: alias of the job store that contains the job """ warnings.warn('delete_job has been deprecated, use remove_job instead.', DeprecationWarning) self.remove_job(id, jobstore)
python
def delete_job(self, id, jobstore=None): warnings.warn('delete_job has been deprecated, use remove_job instead.', DeprecationWarning) self.remove_job(id, jobstore)
[ "def", "delete_job", "(", "self", ",", "id", ",", "jobstore", "=", "None", ")", ":", "warnings", ".", "warn", "(", "'delete_job has been deprecated, use remove_job instead.'", ",", "DeprecationWarning", ")", "self", ".", "remove_job", "(", "id", ",", "jobstore", ...
DEPRECATED, use remove_job instead. Remove a job, preventing it from being run any more. :param str id: the identifier of the job :param str jobstore: alias of the job store that contains the job
[ "DEPRECATED", "use", "remove_job", "instead", "." ]
cc52c39e1948c4e8de5da0d01db45f1779f61997
https://github.com/viniciuschiele/flask-apscheduler/blob/cc52c39e1948c4e8de5da0d01db45f1779f61997/flask_apscheduler/scheduler.py#L162-L173
245,491
viniciuschiele/flask-apscheduler
flask_apscheduler/scheduler.py
APScheduler.modify_job
def modify_job(self, id, jobstore=None, **changes): """ Modify the properties of a single job. Modifications are passed to this method as extra keyword arguments. :param str id: the identifier of the job :param str jobstore: alias of the job store that contains the job """ fix_job_def(changes) if 'trigger' in changes: trigger, trigger_args = pop_trigger(changes) self._scheduler.reschedule_job(id, jobstore, trigger, **trigger_args) return self._scheduler.modify_job(id, jobstore, **changes)
python
def modify_job(self, id, jobstore=None, **changes): fix_job_def(changes) if 'trigger' in changes: trigger, trigger_args = pop_trigger(changes) self._scheduler.reschedule_job(id, jobstore, trigger, **trigger_args) return self._scheduler.modify_job(id, jobstore, **changes)
[ "def", "modify_job", "(", "self", ",", "id", ",", "jobstore", "=", "None", ",", "*", "*", "changes", ")", ":", "fix_job_def", "(", "changes", ")", "if", "'trigger'", "in", "changes", ":", "trigger", ",", "trigger_args", "=", "pop_trigger", "(", "changes"...
Modify the properties of a single job. Modifications are passed to this method as extra keyword arguments. :param str id: the identifier of the job :param str jobstore: alias of the job store that contains the job
[ "Modify", "the", "properties", "of", "a", "single", "job", ".", "Modifications", "are", "passed", "to", "this", "method", "as", "extra", "keyword", "arguments", "." ]
cc52c39e1948c4e8de5da0d01db45f1779f61997
https://github.com/viniciuschiele/flask-apscheduler/blob/cc52c39e1948c4e8de5da0d01db45f1779f61997/flask_apscheduler/scheduler.py#L230-L244
245,492
viniciuschiele/flask-apscheduler
flask_apscheduler/scheduler.py
APScheduler._load_config
def _load_config(self): """ Load the configuration from the Flask configuration. """ options = dict() job_stores = self.app.config.get('SCHEDULER_JOBSTORES') if job_stores: options['jobstores'] = job_stores executors = self.app.config.get('SCHEDULER_EXECUTORS') if executors: options['executors'] = executors job_defaults = self.app.config.get('SCHEDULER_JOB_DEFAULTS') if job_defaults: options['job_defaults'] = job_defaults timezone = self.app.config.get('SCHEDULER_TIMEZONE') if timezone: options['timezone'] = timezone self._scheduler.configure(**options) self.auth = self.app.config.get('SCHEDULER_AUTH', self.auth) self.api_enabled = self.app.config.get('SCHEDULER_VIEWS_ENABLED', self.api_enabled) # for compatibility reason self.api_enabled = self.app.config.get('SCHEDULER_API_ENABLED', self.api_enabled) self.api_prefix = self.app.config.get('SCHEDULER_API_PREFIX', self.api_prefix) self.endpoint_prefix = self.app.config.get('SCHEDULER_ENDPOINT_PREFIX', self.endpoint_prefix) self.allowed_hosts = self.app.config.get('SCHEDULER_ALLOWED_HOSTS', self.allowed_hosts)
python
def _load_config(self): options = dict() job_stores = self.app.config.get('SCHEDULER_JOBSTORES') if job_stores: options['jobstores'] = job_stores executors = self.app.config.get('SCHEDULER_EXECUTORS') if executors: options['executors'] = executors job_defaults = self.app.config.get('SCHEDULER_JOB_DEFAULTS') if job_defaults: options['job_defaults'] = job_defaults timezone = self.app.config.get('SCHEDULER_TIMEZONE') if timezone: options['timezone'] = timezone self._scheduler.configure(**options) self.auth = self.app.config.get('SCHEDULER_AUTH', self.auth) self.api_enabled = self.app.config.get('SCHEDULER_VIEWS_ENABLED', self.api_enabled) # for compatibility reason self.api_enabled = self.app.config.get('SCHEDULER_API_ENABLED', self.api_enabled) self.api_prefix = self.app.config.get('SCHEDULER_API_PREFIX', self.api_prefix) self.endpoint_prefix = self.app.config.get('SCHEDULER_ENDPOINT_PREFIX', self.endpoint_prefix) self.allowed_hosts = self.app.config.get('SCHEDULER_ALLOWED_HOSTS', self.allowed_hosts)
[ "def", "_load_config", "(", "self", ")", ":", "options", "=", "dict", "(", ")", "job_stores", "=", "self", ".", "app", ".", "config", ".", "get", "(", "'SCHEDULER_JOBSTORES'", ")", "if", "job_stores", ":", "options", "[", "'jobstores'", "]", "=", "job_st...
Load the configuration from the Flask configuration.
[ "Load", "the", "configuration", "from", "the", "Flask", "configuration", "." ]
cc52c39e1948c4e8de5da0d01db45f1779f61997
https://github.com/viniciuschiele/flask-apscheduler/blob/cc52c39e1948c4e8de5da0d01db45f1779f61997/flask_apscheduler/scheduler.py#L286-L315
245,493
viniciuschiele/flask-apscheduler
flask_apscheduler/scheduler.py
APScheduler._load_jobs
def _load_jobs(self): """ Load the job definitions from the Flask configuration. """ jobs = self.app.config.get('SCHEDULER_JOBS') if not jobs: jobs = self.app.config.get('JOBS') if jobs: for job in jobs: self.add_job(**job)
python
def _load_jobs(self): jobs = self.app.config.get('SCHEDULER_JOBS') if not jobs: jobs = self.app.config.get('JOBS') if jobs: for job in jobs: self.add_job(**job)
[ "def", "_load_jobs", "(", "self", ")", ":", "jobs", "=", "self", ".", "app", ".", "config", ".", "get", "(", "'SCHEDULER_JOBS'", ")", "if", "not", "jobs", ":", "jobs", "=", "self", ".", "app", ".", "config", ".", "get", "(", "'JOBS'", ")", "if", ...
Load the job definitions from the Flask configuration.
[ "Load", "the", "job", "definitions", "from", "the", "Flask", "configuration", "." ]
cc52c39e1948c4e8de5da0d01db45f1779f61997
https://github.com/viniciuschiele/flask-apscheduler/blob/cc52c39e1948c4e8de5da0d01db45f1779f61997/flask_apscheduler/scheduler.py#L317-L328
245,494
viniciuschiele/flask-apscheduler
flask_apscheduler/scheduler.py
APScheduler._load_api
def _load_api(self): """ Add the routes for the scheduler API. """ self._add_url_route('get_scheduler_info', '', api.get_scheduler_info, 'GET') self._add_url_route('add_job', '/jobs', api.add_job, 'POST') self._add_url_route('get_job', '/jobs/<job_id>', api.get_job, 'GET') self._add_url_route('get_jobs', '/jobs', api.get_jobs, 'GET') self._add_url_route('delete_job', '/jobs/<job_id>', api.delete_job, 'DELETE') self._add_url_route('update_job', '/jobs/<job_id>', api.update_job, 'PATCH') self._add_url_route('pause_job', '/jobs/<job_id>/pause', api.pause_job, 'POST') self._add_url_route('resume_job', '/jobs/<job_id>/resume', api.resume_job, 'POST') self._add_url_route('run_job', '/jobs/<job_id>/run', api.run_job, 'POST')
python
def _load_api(self): self._add_url_route('get_scheduler_info', '', api.get_scheduler_info, 'GET') self._add_url_route('add_job', '/jobs', api.add_job, 'POST') self._add_url_route('get_job', '/jobs/<job_id>', api.get_job, 'GET') self._add_url_route('get_jobs', '/jobs', api.get_jobs, 'GET') self._add_url_route('delete_job', '/jobs/<job_id>', api.delete_job, 'DELETE') self._add_url_route('update_job', '/jobs/<job_id>', api.update_job, 'PATCH') self._add_url_route('pause_job', '/jobs/<job_id>/pause', api.pause_job, 'POST') self._add_url_route('resume_job', '/jobs/<job_id>/resume', api.resume_job, 'POST') self._add_url_route('run_job', '/jobs/<job_id>/run', api.run_job, 'POST')
[ "def", "_load_api", "(", "self", ")", ":", "self", ".", "_add_url_route", "(", "'get_scheduler_info'", ",", "''", ",", "api", ".", "get_scheduler_info", ",", "'GET'", ")", "self", ".", "_add_url_route", "(", "'add_job'", ",", "'/jobs'", ",", "api", ".", "a...
Add the routes for the scheduler API.
[ "Add", "the", "routes", "for", "the", "scheduler", "API", "." ]
cc52c39e1948c4e8de5da0d01db45f1779f61997
https://github.com/viniciuschiele/flask-apscheduler/blob/cc52c39e1948c4e8de5da0d01db45f1779f61997/flask_apscheduler/scheduler.py#L330-L342
245,495
viniciuschiele/flask-apscheduler
flask_apscheduler/scheduler.py
APScheduler._handle_authentication_error
def _handle_authentication_error(self): """ Return an authentication error. """ response = make_response('Access Denied') response.headers['WWW-Authenticate'] = self.auth.get_authenticate_header() response.status_code = 401 return response
python
def _handle_authentication_error(self): response = make_response('Access Denied') response.headers['WWW-Authenticate'] = self.auth.get_authenticate_header() response.status_code = 401 return response
[ "def", "_handle_authentication_error", "(", "self", ")", ":", "response", "=", "make_response", "(", "'Access Denied'", ")", "response", ".", "headers", "[", "'WWW-Authenticate'", "]", "=", "self", ".", "auth", ".", "get_authenticate_header", "(", ")", "response",...
Return an authentication error.
[ "Return", "an", "authentication", "error", "." ]
cc52c39e1948c4e8de5da0d01db45f1779f61997
https://github.com/viniciuschiele/flask-apscheduler/blob/cc52c39e1948c4e8de5da0d01db45f1779f61997/flask_apscheduler/scheduler.py#L387-L394
245,496
viniciuschiele/flask-apscheduler
flask_apscheduler/api.py
get_scheduler_info
def get_scheduler_info(): """Gets the scheduler info.""" scheduler = current_app.apscheduler d = OrderedDict([ ('current_host', scheduler.host_name), ('allowed_hosts', scheduler.allowed_hosts), ('running', scheduler.running) ]) return jsonify(d)
python
def get_scheduler_info(): scheduler = current_app.apscheduler d = OrderedDict([ ('current_host', scheduler.host_name), ('allowed_hosts', scheduler.allowed_hosts), ('running', scheduler.running) ]) return jsonify(d)
[ "def", "get_scheduler_info", "(", ")", ":", "scheduler", "=", "current_app", ".", "apscheduler", "d", "=", "OrderedDict", "(", "[", "(", "'current_host'", ",", "scheduler", ".", "host_name", ")", ",", "(", "'allowed_hosts'", ",", "scheduler", ".", "allowed_hos...
Gets the scheduler info.
[ "Gets", "the", "scheduler", "info", "." ]
cc52c39e1948c4e8de5da0d01db45f1779f61997
https://github.com/viniciuschiele/flask-apscheduler/blob/cc52c39e1948c4e8de5da0d01db45f1779f61997/flask_apscheduler/api.py#L21-L32
245,497
viniciuschiele/flask-apscheduler
flask_apscheduler/api.py
add_job
def add_job(): """Adds a new job.""" data = request.get_json(force=True) try: job = current_app.apscheduler.add_job(**data) return jsonify(job) except ConflictingIdError: return jsonify(dict(error_message='Job %s already exists.' % data.get('id')), status=409) except Exception as e: return jsonify(dict(error_message=str(e)), status=500)
python
def add_job(): data = request.get_json(force=True) try: job = current_app.apscheduler.add_job(**data) return jsonify(job) except ConflictingIdError: return jsonify(dict(error_message='Job %s already exists.' % data.get('id')), status=409) except Exception as e: return jsonify(dict(error_message=str(e)), status=500)
[ "def", "add_job", "(", ")", ":", "data", "=", "request", ".", "get_json", "(", "force", "=", "True", ")", "try", ":", "job", "=", "current_app", ".", "apscheduler", ".", "add_job", "(", "*", "*", "data", ")", "return", "jsonify", "(", "job", ")", "...
Adds a new job.
[ "Adds", "a", "new", "job", "." ]
cc52c39e1948c4e8de5da0d01db45f1779f61997
https://github.com/viniciuschiele/flask-apscheduler/blob/cc52c39e1948c4e8de5da0d01db45f1779f61997/flask_apscheduler/api.py#L35-L46
245,498
viniciuschiele/flask-apscheduler
flask_apscheduler/api.py
get_job
def get_job(job_id): """Gets a job.""" job = current_app.apscheduler.get_job(job_id) if not job: return jsonify(dict(error_message='Job %s not found' % job_id), status=404) return jsonify(job)
python
def get_job(job_id): job = current_app.apscheduler.get_job(job_id) if not job: return jsonify(dict(error_message='Job %s not found' % job_id), status=404) return jsonify(job)
[ "def", "get_job", "(", "job_id", ")", ":", "job", "=", "current_app", ".", "apscheduler", ".", "get_job", "(", "job_id", ")", "if", "not", "job", ":", "return", "jsonify", "(", "dict", "(", "error_message", "=", "'Job %s not found'", "%", "job_id", ")", ...
Gets a job.
[ "Gets", "a", "job", "." ]
cc52c39e1948c4e8de5da0d01db45f1779f61997
https://github.com/viniciuschiele/flask-apscheduler/blob/cc52c39e1948c4e8de5da0d01db45f1779f61997/flask_apscheduler/api.py#L61-L69
245,499
viniciuschiele/flask-apscheduler
flask_apscheduler/api.py
get_jobs
def get_jobs(): """Gets all scheduled jobs.""" jobs = current_app.apscheduler.get_jobs() job_states = [] for job in jobs: job_states.append(job) return jsonify(job_states)
python
def get_jobs(): jobs = current_app.apscheduler.get_jobs() job_states = [] for job in jobs: job_states.append(job) return jsonify(job_states)
[ "def", "get_jobs", "(", ")", ":", "jobs", "=", "current_app", ".", "apscheduler", ".", "get_jobs", "(", ")", "job_states", "=", "[", "]", "for", "job", "in", "jobs", ":", "job_states", ".", "append", "(", "job", ")", "return", "jsonify", "(", "job_stat...
Gets all scheduled jobs.
[ "Gets", "all", "scheduled", "jobs", "." ]
cc52c39e1948c4e8de5da0d01db45f1779f61997
https://github.com/viniciuschiele/flask-apscheduler/blob/cc52c39e1948c4e8de5da0d01db45f1779f61997/flask_apscheduler/api.py#L72-L82