repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
listlengths
20
707
docstring
stringlengths
3
17.3k
docstring_tokens
listlengths
3
222
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
idx
int64
0
252k
IdentityPython/oidcendpoint
src/oidcendpoint/userinfo.py
userinfo_in_id_token_claims
def userinfo_in_id_token_claims(endpoint_context, session, def_itc=None): """ Collect user info claims that are to be placed in the id token. :param endpoint_context: Endpoint context :param session: Session information :param def_itc: Default ID Token claims :return: User information or None """ if def_itc: itc = def_itc else: itc = {} itc.update(id_token_claims(session)) if not itc: return None _claims = by_schema(endpoint_context.id_token_schema, **itc) if _claims: return collect_user_info(endpoint_context, session, _claims) else: return None
python
def userinfo_in_id_token_claims(endpoint_context, session, def_itc=None): """ Collect user info claims that are to be placed in the id token. :param endpoint_context: Endpoint context :param session: Session information :param def_itc: Default ID Token claims :return: User information or None """ if def_itc: itc = def_itc else: itc = {} itc.update(id_token_claims(session)) if not itc: return None _claims = by_schema(endpoint_context.id_token_schema, **itc) if _claims: return collect_user_info(endpoint_context, session, _claims) else: return None
[ "def", "userinfo_in_id_token_claims", "(", "endpoint_context", ",", "session", ",", "def_itc", "=", "None", ")", ":", "if", "def_itc", ":", "itc", "=", "def_itc", "else", ":", "itc", "=", "{", "}", "itc", ".", "update", "(", "id_token_claims", "(", "sessio...
Collect user info claims that are to be placed in the id token. :param endpoint_context: Endpoint context :param session: Session information :param def_itc: Default ID Token claims :return: User information or None
[ "Collect", "user", "info", "claims", "that", "are", "to", "be", "placed", "in", "the", "id", "token", "." ]
6c1d729d51bfb6332816117fe476073df7a1d823
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/userinfo.py#L164-L188
train
49,500
thombashi/tabledata
tabledata/_core.py
TableData.value_matrix
def value_matrix(self): """Converted rows of tabular data. Returns: |list| or |tuple|: Table rows. """ if self.__value_matrix: return self.__value_matrix self.__value_matrix = [ [value_dp.data for value_dp in value_dp_list] for value_dp_list in self.value_dp_matrix ] return self.__value_matrix
python
def value_matrix(self): """Converted rows of tabular data. Returns: |list| or |tuple|: Table rows. """ if self.__value_matrix: return self.__value_matrix self.__value_matrix = [ [value_dp.data for value_dp in value_dp_list] for value_dp_list in self.value_dp_matrix ] return self.__value_matrix
[ "def", "value_matrix", "(", "self", ")", ":", "if", "self", ".", "__value_matrix", ":", "return", "self", ".", "__value_matrix", "self", ".", "__value_matrix", "=", "[", "[", "value_dp", ".", "data", "for", "value_dp", "in", "value_dp_list", "]", "for", "v...
Converted rows of tabular data. Returns: |list| or |tuple|: Table rows.
[ "Converted", "rows", "of", "tabular", "data", "." ]
03d623be30fc62381f1b7fb2aa0e17a0e26ad473
https://github.com/thombashi/tabledata/blob/03d623be30fc62381f1b7fb2aa0e17a0e26ad473/tabledata/_core.py#L79-L93
train
49,501
thombashi/tabledata
tabledata/_core.py
TableData.from_dataframe
def from_dataframe(dataframe, table_name=""): """ Initialize TableData instance from a pandas.DataFrame instance. :param pandas.DataFrame dataframe: :param str table_name: Table name to create. """ return TableData(table_name, list(dataframe.columns.values), dataframe.values.tolist())
python
def from_dataframe(dataframe, table_name=""): """ Initialize TableData instance from a pandas.DataFrame instance. :param pandas.DataFrame dataframe: :param str table_name: Table name to create. """ return TableData(table_name, list(dataframe.columns.values), dataframe.values.tolist())
[ "def", "from_dataframe", "(", "dataframe", ",", "table_name", "=", "\"\"", ")", ":", "return", "TableData", "(", "table_name", ",", "list", "(", "dataframe", ".", "columns", ".", "values", ")", ",", "dataframe", ".", "values", ".", "tolist", "(", ")", ")...
Initialize TableData instance from a pandas.DataFrame instance. :param pandas.DataFrame dataframe: :param str table_name: Table name to create.
[ "Initialize", "TableData", "instance", "from", "a", "pandas", ".", "DataFrame", "instance", "." ]
03d623be30fc62381f1b7fb2aa0e17a0e26ad473
https://github.com/thombashi/tabledata/blob/03d623be30fc62381f1b7fb2aa0e17a0e26ad473/tabledata/_core.py#L475-L483
train
49,502
volafiled/python-volapi
volapi/auxo.py
call_async
def call_async(func): """Decorates a function to be called async on the loop thread""" @wraps(func) def wrapper(self, *args, **kw): """Wraps instance method to be called on loop thread""" def call(): """Calls function on loop thread""" try: func(self, *args, **kw) except Exception: logger.exception( "failed to call async [%r] with [%r] [%r]", func, args, kw ) self.loop.call_soon_threadsafe(call) return wrapper
python
def call_async(func): """Decorates a function to be called async on the loop thread""" @wraps(func) def wrapper(self, *args, **kw): """Wraps instance method to be called on loop thread""" def call(): """Calls function on loop thread""" try: func(self, *args, **kw) except Exception: logger.exception( "failed to call async [%r] with [%r] [%r]", func, args, kw ) self.loop.call_soon_threadsafe(call) return wrapper
[ "def", "call_async", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "wrapper", "(", "self", ",", "*", "args", ",", "*", "*", "kw", ")", ":", "\"\"\"Wraps instance method to be called on loop thread\"\"\"", "def", "call", "(", ")", ":", "\"\"...
Decorates a function to be called async on the loop thread
[ "Decorates", "a", "function", "to", "be", "called", "async", "on", "the", "loop", "thread" ]
5f0bc03dbde703264ac6ed494e2050761f688a3e
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/auxo.py#L33-L51
train
49,503
volafiled/python-volapi
volapi/auxo.py
call_sync
def call_sync(func): """Decorates a function to be called sync on the loop thread""" @wraps(func) def wrapper(self, *args, **kw): """Wraps instance method to be called on loop thread""" # Just return when already on the event thread if self.thread.ident == get_ident(): return func(self, *args, **kw) barrier = Barrier(2) result = None ex = None def call(): """Calls function on loop thread""" nonlocal result, ex try: result = func(self, *args, **kw) except Exception as exc: ex = exc finally: barrier.wait() self.loop.call_soon_threadsafe(call) barrier.wait() if ex: raise ex or Exception("Unknown error") return result return wrapper
python
def call_sync(func): """Decorates a function to be called sync on the loop thread""" @wraps(func) def wrapper(self, *args, **kw): """Wraps instance method to be called on loop thread""" # Just return when already on the event thread if self.thread.ident == get_ident(): return func(self, *args, **kw) barrier = Barrier(2) result = None ex = None def call(): """Calls function on loop thread""" nonlocal result, ex try: result = func(self, *args, **kw) except Exception as exc: ex = exc finally: barrier.wait() self.loop.call_soon_threadsafe(call) barrier.wait() if ex: raise ex or Exception("Unknown error") return result return wrapper
[ "def", "call_sync", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "wrapper", "(", "self", ",", "*", "args", ",", "*", "*", "kw", ")", ":", "\"\"\"Wraps instance method to be called on loop thread\"\"\"", "# Just return when already on the event threa...
Decorates a function to be called sync on the loop thread
[ "Decorates", "a", "function", "to", "be", "called", "sync", "on", "the", "loop", "thread" ]
5f0bc03dbde703264ac6ed494e2050761f688a3e
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/auxo.py#L54-L85
train
49,504
volafiled/python-volapi
volapi/auxo.py
ListenerArbitrator.close
def close(self, proto): # pylint: disable=no-self-use """Closes a connection""" try: proto.sendClose() except Exception as ex: logger.exception("Failed to send close") proto.reraise(ex)
python
def close(self, proto): # pylint: disable=no-self-use """Closes a connection""" try: proto.sendClose() except Exception as ex: logger.exception("Failed to send close") proto.reraise(ex)
[ "def", "close", "(", "self", ",", "proto", ")", ":", "# pylint: disable=no-self-use", "try", ":", "proto", ".", "sendClose", "(", ")", "except", "Exception", "as", "ex", ":", "logger", ".", "exception", "(", "\"Failed to send close\"", ")", "proto", ".", "re...
Closes a connection
[ "Closes", "a", "connection" ]
5f0bc03dbde703264ac6ed494e2050761f688a3e
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/auxo.py#L199-L207
train
49,505
volafiled/python-volapi
volapi/auxo.py
Listeners.process
def process(self): """Process queue for these listeners. Only the items with type that matches """ with self.lock, self.enlock: queue = copy(self.queue) self.queue.clear() callbacks = copy(self.callbacks) with self.lock: rm_cb = False for ki, vi in queue.items(): if ki in self.callbacks: for item in vi: for cb in self.callbacks[ki]: if cb(item) is False: callbacks[ki].remove(cb) if not callbacks[ki]: del callbacks[ki] rm_cb = True with self.lock: if rm_cb: self.callbacks.clear() for k, v in callbacks.items(): self.callbacks[k].extend(v) return len(self.callbacks)
python
def process(self): """Process queue for these listeners. Only the items with type that matches """ with self.lock, self.enlock: queue = copy(self.queue) self.queue.clear() callbacks = copy(self.callbacks) with self.lock: rm_cb = False for ki, vi in queue.items(): if ki in self.callbacks: for item in vi: for cb in self.callbacks[ki]: if cb(item) is False: callbacks[ki].remove(cb) if not callbacks[ki]: del callbacks[ki] rm_cb = True with self.lock: if rm_cb: self.callbacks.clear() for k, v in callbacks.items(): self.callbacks[k].extend(v) return len(self.callbacks)
[ "def", "process", "(", "self", ")", ":", "with", "self", ".", "lock", ",", "self", ".", "enlock", ":", "queue", "=", "copy", "(", "self", ".", "queue", ")", "self", ".", "queue", ".", "clear", "(", ")", "callbacks", "=", "copy", "(", "self", ".",...
Process queue for these listeners. Only the items with type that matches
[ "Process", "queue", "for", "these", "listeners", ".", "Only", "the", "items", "with", "type", "that", "matches" ]
5f0bc03dbde703264ac6ed494e2050761f688a3e
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/auxo.py#L228-L254
train
49,506
volafiled/python-volapi
volapi/auxo.py
Listeners.add
def add(self, callback_type, callback): """Add a new listener""" with self.lock: self.callbacks[callback_type].append(callback)
python
def add(self, callback_type, callback): """Add a new listener""" with self.lock: self.callbacks[callback_type].append(callback)
[ "def", "add", "(", "self", ",", "callback_type", ",", "callback", ")", ":", "with", "self", ".", "lock", ":", "self", ".", "callbacks", "[", "callback_type", "]", ".", "append", "(", "callback", ")" ]
Add a new listener
[ "Add", "a", "new", "listener" ]
5f0bc03dbde703264ac6ed494e2050761f688a3e
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/auxo.py#L256-L260
train
49,507
volafiled/python-volapi
volapi/auxo.py
Listeners.enqueue
def enqueue(self, item_type, item): """Queue a new data item, make item iterable""" with self.enlock: self.queue[item_type].append(item)
python
def enqueue(self, item_type, item): """Queue a new data item, make item iterable""" with self.enlock: self.queue[item_type].append(item)
[ "def", "enqueue", "(", "self", ",", "item_type", ",", "item", ")", ":", "with", "self", ".", "enlock", ":", "self", ".", "queue", "[", "item_type", "]", ".", "append", "(", "item", ")" ]
Queue a new data item, make item iterable
[ "Queue", "a", "new", "data", "item", "make", "item", "iterable" ]
5f0bc03dbde703264ac6ed494e2050761f688a3e
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/auxo.py#L262-L266
train
49,508
guaix-ucm/pyemir
emirdrp/processing/wavecal/slitlet2darc.py
Slitlet2dArc.xy_spectrail_arc_intersections
def xy_spectrail_arc_intersections(self, slitlet2d=None): """Compute intersection points of spectrum trails with arc lines. The member list_arc_lines is updated with new keyword:keyval values for each arc line. Parameters ---------- slitlet2d : numpy array Slitlet image to be displayed with the computed boundaries and intersecting points overplotted. This argument is optional. """ # protections if self.list_arc_lines is None: raise ValueError("Arc lines not sought") number_spectrum_trails = len(self.list_spectrails) if number_spectrum_trails == 0: raise ValueError("Number of available spectrum trails is 0") number_arc_lines = len(self.list_arc_lines) if number_arc_lines == 0: raise ValueError("Number of available arc lines is 0") # intersection of the arc lines with the spectrum trails # (note: the coordinates are computed using pixel values, # ranging from 1 to EMIR_NAXIS1, as given in the original # image reference system ---not in the slitlet image reference # system---) self.x_inter_rect = np.array([]) # rectified image coordinates self.y_inter_rect = np.array([]) # rectified image coordinates for arcline in self.list_arc_lines: # middle spectrum trail spectrail = self.list_spectrails[self.i_middle_spectrail] xroot, yroot = intersection_spectrail_arcline( spectrail=spectrail, arcline=arcline ) arcline.x_rectified = xroot self.x_inter_rect = np.append( self.x_inter_rect, [xroot] * number_spectrum_trails ) for spectrail in self.list_spectrails: # compute expected ordinate y_expected in the rectified # image y_expected = self.corr_yrect_a + self.corr_yrect_b * \ spectrail.y_rectified self.y_inter_rect = np.append(self.y_inter_rect, y_expected) if abs(self.debugplot) >= 10: print('>>> y0_frontier_lower_expected........: ', self.y0_frontier_lower_expected) print('>>> y0_frontier_upper_expected........: ', self.y0_frontier_upper_expected) print('>>> shifted y0_frontier_upper_expected: ', self.corr_yrect_a + self.corr_yrect_b * self.y0_frontier_lower) print('>>> shifted y0_frontier_lower_expected: ', self.corr_yrect_a + self.corr_yrect_b * self.y0_frontier_upper) # self.x_inter_orig = np.array([]) # original image coordinates self.y_inter_orig = np.array([]) # original image coordinates for arcline in self.list_arc_lines: for spectrail in self.list_spectrails: xroot, yroot = intersection_spectrail_arcline( spectrail=spectrail, arcline=arcline ) self.x_inter_orig = np.append(self.x_inter_orig, xroot) self.y_inter_orig = np.append(self.y_inter_orig, yroot) # display intersection points if abs(self.debugplot % 10) != 0 and slitlet2d is not None: # display image with zscale cuts title = "Slitlet#" + str(self.islitlet) + \ " (xy_spectrail_arc_intersections)" ax = ximshow(slitlet2d, title=title, first_pixel=(self.bb_nc1_orig, self.bb_ns1_orig), show=False) # spectrum trails for spectrail in self.list_spectrails: xdum, ydum = spectrail.linspace_pix(start=self.bb_nc1_orig, stop=self.bb_nc2_orig) ax.plot(xdum, ydum, 'g') # arc lines for arcline in self.list_arc_lines: xdum, ydum = arcline.linspace_pix(start=self.bb_ns1_orig, stop=self.bb_ns2_orig) ax.plot(xdum, ydum, 'g') # intersection points ax.plot(self.x_inter_orig, self.y_inter_orig, 'co') ax.plot(self.x_inter_rect, self.y_inter_rect, 'bo') # show plot pause_debugplot(self.debugplot, pltshow=True)
python
def xy_spectrail_arc_intersections(self, slitlet2d=None): """Compute intersection points of spectrum trails with arc lines. The member list_arc_lines is updated with new keyword:keyval values for each arc line. Parameters ---------- slitlet2d : numpy array Slitlet image to be displayed with the computed boundaries and intersecting points overplotted. This argument is optional. """ # protections if self.list_arc_lines is None: raise ValueError("Arc lines not sought") number_spectrum_trails = len(self.list_spectrails) if number_spectrum_trails == 0: raise ValueError("Number of available spectrum trails is 0") number_arc_lines = len(self.list_arc_lines) if number_arc_lines == 0: raise ValueError("Number of available arc lines is 0") # intersection of the arc lines with the spectrum trails # (note: the coordinates are computed using pixel values, # ranging from 1 to EMIR_NAXIS1, as given in the original # image reference system ---not in the slitlet image reference # system---) self.x_inter_rect = np.array([]) # rectified image coordinates self.y_inter_rect = np.array([]) # rectified image coordinates for arcline in self.list_arc_lines: # middle spectrum trail spectrail = self.list_spectrails[self.i_middle_spectrail] xroot, yroot = intersection_spectrail_arcline( spectrail=spectrail, arcline=arcline ) arcline.x_rectified = xroot self.x_inter_rect = np.append( self.x_inter_rect, [xroot] * number_spectrum_trails ) for spectrail in self.list_spectrails: # compute expected ordinate y_expected in the rectified # image y_expected = self.corr_yrect_a + self.corr_yrect_b * \ spectrail.y_rectified self.y_inter_rect = np.append(self.y_inter_rect, y_expected) if abs(self.debugplot) >= 10: print('>>> y0_frontier_lower_expected........: ', self.y0_frontier_lower_expected) print('>>> y0_frontier_upper_expected........: ', self.y0_frontier_upper_expected) print('>>> shifted y0_frontier_upper_expected: ', self.corr_yrect_a + self.corr_yrect_b * self.y0_frontier_lower) print('>>> shifted y0_frontier_lower_expected: ', self.corr_yrect_a + self.corr_yrect_b * self.y0_frontier_upper) # self.x_inter_orig = np.array([]) # original image coordinates self.y_inter_orig = np.array([]) # original image coordinates for arcline in self.list_arc_lines: for spectrail in self.list_spectrails: xroot, yroot = intersection_spectrail_arcline( spectrail=spectrail, arcline=arcline ) self.x_inter_orig = np.append(self.x_inter_orig, xroot) self.y_inter_orig = np.append(self.y_inter_orig, yroot) # display intersection points if abs(self.debugplot % 10) != 0 and slitlet2d is not None: # display image with zscale cuts title = "Slitlet#" + str(self.islitlet) + \ " (xy_spectrail_arc_intersections)" ax = ximshow(slitlet2d, title=title, first_pixel=(self.bb_nc1_orig, self.bb_ns1_orig), show=False) # spectrum trails for spectrail in self.list_spectrails: xdum, ydum = spectrail.linspace_pix(start=self.bb_nc1_orig, stop=self.bb_nc2_orig) ax.plot(xdum, ydum, 'g') # arc lines for arcline in self.list_arc_lines: xdum, ydum = arcline.linspace_pix(start=self.bb_ns1_orig, stop=self.bb_ns2_orig) ax.plot(xdum, ydum, 'g') # intersection points ax.plot(self.x_inter_orig, self.y_inter_orig, 'co') ax.plot(self.x_inter_rect, self.y_inter_rect, 'bo') # show plot pause_debugplot(self.debugplot, pltshow=True)
[ "def", "xy_spectrail_arc_intersections", "(", "self", ",", "slitlet2d", "=", "None", ")", ":", "# protections", "if", "self", ".", "list_arc_lines", "is", "None", ":", "raise", "ValueError", "(", "\"Arc lines not sought\"", ")", "number_spectrum_trails", "=", "len",...
Compute intersection points of spectrum trails with arc lines. The member list_arc_lines is updated with new keyword:keyval values for each arc line. Parameters ---------- slitlet2d : numpy array Slitlet image to be displayed with the computed boundaries and intersecting points overplotted. This argument is optional.
[ "Compute", "intersection", "points", "of", "spectrum", "trails", "with", "arc", "lines", "." ]
fef6bbabcb13f80123cafd1800a0f508a3c21702
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/processing/wavecal/slitlet2darc.py#L856-L948
train
49,509
guaix-ucm/pyemir
emirdrp/recipes/image/shared.py
intersection
def intersection(a, b, scale=1): '''Intersection between two segments.''' try: a1, a2 = a except TypeError: a1 = a.start a2 = a.stop try: b1, b2 = b except TypeError: b1 = b.start b2 = b.stop if a2 <= b1: return None if a1 >= b2: return None # a2 > b1 and a1 < b2 if a2 <= b2: if a1 <= b1: return slice(b1 * scale, a2 * scale) else: return slice(a1 * scale, a2 * scale) else: if a1 <= b1: return slice(b1 * scale, b2 * scale) else: return slice(a1 * scale, b2 * scale)
python
def intersection(a, b, scale=1): '''Intersection between two segments.''' try: a1, a2 = a except TypeError: a1 = a.start a2 = a.stop try: b1, b2 = b except TypeError: b1 = b.start b2 = b.stop if a2 <= b1: return None if a1 >= b2: return None # a2 > b1 and a1 < b2 if a2 <= b2: if a1 <= b1: return slice(b1 * scale, a2 * scale) else: return slice(a1 * scale, a2 * scale) else: if a1 <= b1: return slice(b1 * scale, b2 * scale) else: return slice(a1 * scale, b2 * scale)
[ "def", "intersection", "(", "a", ",", "b", ",", "scale", "=", "1", ")", ":", "try", ":", "a1", ",", "a2", "=", "a", "except", "TypeError", ":", "a1", "=", "a", ".", "start", "a2", "=", "a", ".", "stop", "try", ":", "b1", ",", "b2", "=", "b"...
Intersection between two segments.
[ "Intersection", "between", "two", "segments", "." ]
fef6bbabcb13f80123cafd1800a0f508a3c21702
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/recipes/image/shared.py#L67-L97
train
49,510
guaix-ucm/pyemir
emirdrp/recipes/image/shared.py
clip_slices
def clip_slices(r, region, scale=1): '''Intersect slices with a region.''' t = [] for ch in r: a1 = intersection(ch[0], region[0], scale=scale) if a1 is None: continue a2 = intersection(ch[1], region[1], scale=scale) if a2 is None: continue t.append((a1, a2)) return t
python
def clip_slices(r, region, scale=1): '''Intersect slices with a region.''' t = [] for ch in r: a1 = intersection(ch[0], region[0], scale=scale) if a1 is None: continue a2 = intersection(ch[1], region[1], scale=scale) if a2 is None: continue t.append((a1, a2)) return t
[ "def", "clip_slices", "(", "r", ",", "region", ",", "scale", "=", "1", ")", ":", "t", "=", "[", "]", "for", "ch", "in", "r", ":", "a1", "=", "intersection", "(", "ch", "[", "0", "]", ",", "region", "[", "0", "]", ",", "scale", "=", "scale", ...
Intersect slices with a region.
[ "Intersect", "slices", "with", "a", "region", "." ]
fef6bbabcb13f80123cafd1800a0f508a3c21702
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/recipes/image/shared.py#L100-L113
train
49,511
BeyondTheClouds/enoslib
enoslib/api.py
_load_defaults
def _load_defaults(inventory_path=None, roles=None, extra_vars=None, tags=None, basedir=False): """Load common defaults data structures. For factorization purpose.""" extra_vars = extra_vars or {} tags = tags or [] loader = DataLoader() if basedir: loader.set_basedir(basedir) inventory = EnosInventory(loader=loader, sources=inventory_path, roles=roles) variable_manager = VariableManager(loader=loader, inventory=inventory) # seems mandatory to load group_vars variable if basedir: variable_manager.safe_basedir = True if extra_vars: variable_manager.extra_vars = extra_vars # NOTE(msimonin): The ansible api is "low level" in the # sense that we are redefining here all the default values # that are usually enforce by ansible called from the cli Options = namedtuple("Options", ["listtags", "listtasks", "listhosts", "syntax", "connection", "module_path", "forks", "private_key_file", "ssh_common_args", "ssh_extra_args", "sftp_extra_args", "scp_extra_args", "become", "become_method", "become_user", "remote_user", "verbosity", "check", "tags", "diff", "basedir"]) options = Options(listtags=False, listtasks=False, listhosts=False, syntax=False, connection="ssh", module_path=None, forks=100, private_key_file=None, ssh_common_args=None, ssh_extra_args=None, sftp_extra_args=None, scp_extra_args=None, become=None, become_method="sudo", become_user="root", remote_user=None, verbosity=2, check=False, tags=tags, diff=None, basedir=basedir) return inventory, variable_manager, loader, options
python
def _load_defaults(inventory_path=None, roles=None, extra_vars=None, tags=None, basedir=False): """Load common defaults data structures. For factorization purpose.""" extra_vars = extra_vars or {} tags = tags or [] loader = DataLoader() if basedir: loader.set_basedir(basedir) inventory = EnosInventory(loader=loader, sources=inventory_path, roles=roles) variable_manager = VariableManager(loader=loader, inventory=inventory) # seems mandatory to load group_vars variable if basedir: variable_manager.safe_basedir = True if extra_vars: variable_manager.extra_vars = extra_vars # NOTE(msimonin): The ansible api is "low level" in the # sense that we are redefining here all the default values # that are usually enforce by ansible called from the cli Options = namedtuple("Options", ["listtags", "listtasks", "listhosts", "syntax", "connection", "module_path", "forks", "private_key_file", "ssh_common_args", "ssh_extra_args", "sftp_extra_args", "scp_extra_args", "become", "become_method", "become_user", "remote_user", "verbosity", "check", "tags", "diff", "basedir"]) options = Options(listtags=False, listtasks=False, listhosts=False, syntax=False, connection="ssh", module_path=None, forks=100, private_key_file=None, ssh_common_args=None, ssh_extra_args=None, sftp_extra_args=None, scp_extra_args=None, become=None, become_method="sudo", become_user="root", remote_user=None, verbosity=2, check=False, tags=tags, diff=None, basedir=basedir) return inventory, variable_manager, loader, options
[ "def", "_load_defaults", "(", "inventory_path", "=", "None", ",", "roles", "=", "None", ",", "extra_vars", "=", "None", ",", "tags", "=", "None", ",", "basedir", "=", "False", ")", ":", "extra_vars", "=", "extra_vars", "or", "{", "}", "tags", "=", "tag...
Load common defaults data structures. For factorization purpose.
[ "Load", "common", "defaults", "data", "structures", "." ]
fb00be58e56a7848cfe482187d659744919fe2f7
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/api.py#L45-L96
train
49,512
BeyondTheClouds/enoslib
enoslib/api.py
run_play
def run_play(play_source, inventory_path=None, roles=None, extra_vars=None, on_error_continue=False): """Run a play. Args: pattern_hosts (str): pattern to describe ansible hosts to target. see https://docs.ansible.com/ansible/latest/intro_patterns.html play_source (dict): ansible task inventory_path (str): inventory to use extra_vars (dict): extra_vars to use on_error_continue(bool): Don't throw any exception in case a host is unreachable or the playbooks run with errors Raises: :py:class:`enoslib.errors.EnosFailedHostsError`: if a task returns an error on a host and ``on_error_continue==False`` :py:class:`enoslib.errors.EnosUnreachableHostsError`: if a host is unreachable (through ssh) and ``on_error_continue==False`` Returns: List of all the results """ # NOTE(msimonin): inventory could be infered from a host list (maybe) results = [] inventory, variable_manager, loader, options = _load_defaults( inventory_path=inventory_path, roles=roles, extra_vars=extra_vars) callback = _MyCallback(results) passwords = {} tqm = task_queue_manager.TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, options=options, passwords=passwords, stdout_callback=callback) # create play play_inst = play.Play().load(play_source, variable_manager=variable_manager, loader=loader) # actually run it try: tqm.run(play_inst) finally: tqm.cleanup() # Handling errors failed_hosts = [] unreachable_hosts = [] for r in results: if r.status == STATUS_UNREACHABLE: unreachable_hosts.append(r) if r.status == STATUS_FAILED: failed_hosts.append(r) if len(failed_hosts) > 0: logger.error("Failed hosts: %s" % failed_hosts) if not on_error_continue: raise EnosFailedHostsError(failed_hosts) if len(unreachable_hosts) > 0: logger.error("Unreachable hosts: %s" % unreachable_hosts) if not on_error_continue: raise EnosUnreachableHostsError(unreachable_hosts) return results
python
def run_play(play_source, inventory_path=None, roles=None, extra_vars=None, on_error_continue=False): """Run a play. Args: pattern_hosts (str): pattern to describe ansible hosts to target. see https://docs.ansible.com/ansible/latest/intro_patterns.html play_source (dict): ansible task inventory_path (str): inventory to use extra_vars (dict): extra_vars to use on_error_continue(bool): Don't throw any exception in case a host is unreachable or the playbooks run with errors Raises: :py:class:`enoslib.errors.EnosFailedHostsError`: if a task returns an error on a host and ``on_error_continue==False`` :py:class:`enoslib.errors.EnosUnreachableHostsError`: if a host is unreachable (through ssh) and ``on_error_continue==False`` Returns: List of all the results """ # NOTE(msimonin): inventory could be infered from a host list (maybe) results = [] inventory, variable_manager, loader, options = _load_defaults( inventory_path=inventory_path, roles=roles, extra_vars=extra_vars) callback = _MyCallback(results) passwords = {} tqm = task_queue_manager.TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, options=options, passwords=passwords, stdout_callback=callback) # create play play_inst = play.Play().load(play_source, variable_manager=variable_manager, loader=loader) # actually run it try: tqm.run(play_inst) finally: tqm.cleanup() # Handling errors failed_hosts = [] unreachable_hosts = [] for r in results: if r.status == STATUS_UNREACHABLE: unreachable_hosts.append(r) if r.status == STATUS_FAILED: failed_hosts.append(r) if len(failed_hosts) > 0: logger.error("Failed hosts: %s" % failed_hosts) if not on_error_continue: raise EnosFailedHostsError(failed_hosts) if len(unreachable_hosts) > 0: logger.error("Unreachable hosts: %s" % unreachable_hosts) if not on_error_continue: raise EnosUnreachableHostsError(unreachable_hosts) return results
[ "def", "run_play", "(", "play_source", ",", "inventory_path", "=", "None", ",", "roles", "=", "None", ",", "extra_vars", "=", "None", ",", "on_error_continue", "=", "False", ")", ":", "# NOTE(msimonin): inventory could be infered from a host list (maybe)", "results", ...
Run a play. Args: pattern_hosts (str): pattern to describe ansible hosts to target. see https://docs.ansible.com/ansible/latest/intro_patterns.html play_source (dict): ansible task inventory_path (str): inventory to use extra_vars (dict): extra_vars to use on_error_continue(bool): Don't throw any exception in case a host is unreachable or the playbooks run with errors Raises: :py:class:`enoslib.errors.EnosFailedHostsError`: if a task returns an error on a host and ``on_error_continue==False`` :py:class:`enoslib.errors.EnosUnreachableHostsError`: if a host is unreachable (through ssh) and ``on_error_continue==False`` Returns: List of all the results
[ "Run", "a", "play", "." ]
fb00be58e56a7848cfe482187d659744919fe2f7
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/api.py#L135-L202
train
49,513
BeyondTheClouds/enoslib
enoslib/api.py
run_command
def run_command(pattern_hosts, command, inventory_path=None, roles=None, extra_vars=None, on_error_continue=False): """Run a shell command on some remote hosts. Args: pattern_hosts (str): pattern to describe ansible hosts to target. see https://docs.ansible.com/ansible/latest/intro_patterns.html command (str): the command to run inventory_path (str): inventory to use extra_vars (dict): extra_vars to use on_error_continue(bool): Don't throw any exception in case a host is unreachable or the playbooks run with errors Raises: :py:class:`enoslib.errors.EnosFailedHostsError`: if a task returns an error on a host and ``on_error_continue==False`` :py:class:`enoslib.errors.EnosUnreachableHostsError`: if a host is unreachable (through ssh) and ``on_error_continue==False`` Returns: Dict combining the stdout and stderr of ok and failed hosts and every results of tasks executed (this may include the fact gathering tasks) Example: .. code-block:: python # Inventory [control1] enos-0 [control2] enos-1 # Python result = run_command("control*", "date", inventory) # Result { 'failed': {}, 'ok': { u'enos-0': { 'stderr': u'', 'stdout': u'Tue Oct 31 04:53:04 GMT 2017' }, u'enos-1': { 'stderr': u'', 'stdout': u'Tue Oct 31 04:53:05 GMT 2017'} }, 'results': [...] } If facts are gathers this is possible to use ansible templating .. code-block:: python result = run_command("control*", "ping -c 1 {{hostvars['enos-1']['ansible_' + n1].ipv4.address}}", inventory) """ def filter_results(results, status): _r = [r for r in results if r.status == status and r.task == COMMAND_NAME] s = dict([[r.host, {"stdout": r.payload.get("stdout"), "stderr": r.payload.get("stderr")}] for r in _r]) return s play_source = { "hosts": pattern_hosts, "tasks": [{ "name": COMMAND_NAME, "shell": command, }] } results = run_play(play_source, inventory_path=inventory_path, roles=roles, extra_vars=extra_vars) ok = filter_results(results, STATUS_OK) failed = filter_results(results, STATUS_FAILED) return {"ok": ok, "failed": failed, "results": results}
python
def run_command(pattern_hosts, command, inventory_path=None, roles=None, extra_vars=None, on_error_continue=False): """Run a shell command on some remote hosts. Args: pattern_hosts (str): pattern to describe ansible hosts to target. see https://docs.ansible.com/ansible/latest/intro_patterns.html command (str): the command to run inventory_path (str): inventory to use extra_vars (dict): extra_vars to use on_error_continue(bool): Don't throw any exception in case a host is unreachable or the playbooks run with errors Raises: :py:class:`enoslib.errors.EnosFailedHostsError`: if a task returns an error on a host and ``on_error_continue==False`` :py:class:`enoslib.errors.EnosUnreachableHostsError`: if a host is unreachable (through ssh) and ``on_error_continue==False`` Returns: Dict combining the stdout and stderr of ok and failed hosts and every results of tasks executed (this may include the fact gathering tasks) Example: .. code-block:: python # Inventory [control1] enos-0 [control2] enos-1 # Python result = run_command("control*", "date", inventory) # Result { 'failed': {}, 'ok': { u'enos-0': { 'stderr': u'', 'stdout': u'Tue Oct 31 04:53:04 GMT 2017' }, u'enos-1': { 'stderr': u'', 'stdout': u'Tue Oct 31 04:53:05 GMT 2017'} }, 'results': [...] } If facts are gathers this is possible to use ansible templating .. code-block:: python result = run_command("control*", "ping -c 1 {{hostvars['enos-1']['ansible_' + n1].ipv4.address}}", inventory) """ def filter_results(results, status): _r = [r for r in results if r.status == status and r.task == COMMAND_NAME] s = dict([[r.host, {"stdout": r.payload.get("stdout"), "stderr": r.payload.get("stderr")}] for r in _r]) return s play_source = { "hosts": pattern_hosts, "tasks": [{ "name": COMMAND_NAME, "shell": command, }] } results = run_play(play_source, inventory_path=inventory_path, roles=roles, extra_vars=extra_vars) ok = filter_results(results, STATUS_OK) failed = filter_results(results, STATUS_FAILED) return {"ok": ok, "failed": failed, "results": results}
[ "def", "run_command", "(", "pattern_hosts", ",", "command", ",", "inventory_path", "=", "None", ",", "roles", "=", "None", ",", "extra_vars", "=", "None", ",", "on_error_continue", "=", "False", ")", ":", "def", "filter_results", "(", "results", ",", "status...
Run a shell command on some remote hosts. Args: pattern_hosts (str): pattern to describe ansible hosts to target. see https://docs.ansible.com/ansible/latest/intro_patterns.html command (str): the command to run inventory_path (str): inventory to use extra_vars (dict): extra_vars to use on_error_continue(bool): Don't throw any exception in case a host is unreachable or the playbooks run with errors Raises: :py:class:`enoslib.errors.EnosFailedHostsError`: if a task returns an error on a host and ``on_error_continue==False`` :py:class:`enoslib.errors.EnosUnreachableHostsError`: if a host is unreachable (through ssh) and ``on_error_continue==False`` Returns: Dict combining the stdout and stderr of ok and failed hosts and every results of tasks executed (this may include the fact gathering tasks) Example: .. code-block:: python # Inventory [control1] enos-0 [control2] enos-1 # Python result = run_command("control*", "date", inventory) # Result { 'failed': {}, 'ok': { u'enos-0': { 'stderr': u'', 'stdout': u'Tue Oct 31 04:53:04 GMT 2017' }, u'enos-1': { 'stderr': u'', 'stdout': u'Tue Oct 31 04:53:05 GMT 2017'} }, 'results': [...] } If facts are gathers this is possible to use ansible templating .. code-block:: python result = run_command("control*", "ping -c 1 {{hostvars['enos-1']['ansible_' + n1].ipv4.address}}", inventory)
[ "Run", "a", "shell", "command", "on", "some", "remote", "hosts", "." ]
fb00be58e56a7848cfe482187d659744919fe2f7
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/api.py#L306-L390
train
49,514
BeyondTheClouds/enoslib
enoslib/api.py
run_ansible
def run_ansible(playbooks, inventory_path=None, roles=None, extra_vars=None, tags=None, on_error_continue=False, basedir='.'): """Run Ansible. Args: playbooks (list): list of paths to the playbooks to run inventory_path (str): path to the hosts file (inventory) extra_var (dict): extra vars to pass tags (list): list of tags to run on_error_continue(bool): Don't throw any exception in case a host is unreachable or the playbooks run with errors Raises: :py:class:`enoslib.errors.EnosFailedHostsError`: if a task returns an error on a host and ``on_error_continue==False`` :py:class:`enoslib.errors.EnosUnreachableHostsError`: if a host is unreachable (through ssh) and ``on_error_continue==False`` """ inventory, variable_manager, loader, options = _load_defaults( inventory_path=inventory_path, roles=roles, extra_vars=extra_vars, tags=tags, basedir=basedir ) passwords = {} for path in playbooks: logger.info("Running playbook %s with vars:\n%s" % (path, extra_vars)) pbex = PlaybookExecutor( playbooks=[path], inventory=inventory, variable_manager=variable_manager, loader=loader, options=options, passwords=passwords ) code = pbex.run() stats = pbex._tqm._stats hosts = stats.processed.keys() result = [{h: stats.summarize(h)} for h in hosts] results = {"code": code, "result": result, "playbook": path} print(results) failed_hosts = [] unreachable_hosts = [] for h in hosts: t = stats.summarize(h) if t["failures"] > 0: failed_hosts.append(h) if t["unreachable"] > 0: unreachable_hosts.append(h) if len(failed_hosts) > 0: logger.error("Failed hosts: %s" % failed_hosts) if not on_error_continue: raise EnosFailedHostsError(failed_hosts) if len(unreachable_hosts) > 0: logger.error("Unreachable hosts: %s" % unreachable_hosts) if not on_error_continue: raise EnosUnreachableHostsError(unreachable_hosts)
python
def run_ansible(playbooks, inventory_path=None, roles=None, extra_vars=None, tags=None, on_error_continue=False, basedir='.'): """Run Ansible. Args: playbooks (list): list of paths to the playbooks to run inventory_path (str): path to the hosts file (inventory) extra_var (dict): extra vars to pass tags (list): list of tags to run on_error_continue(bool): Don't throw any exception in case a host is unreachable or the playbooks run with errors Raises: :py:class:`enoslib.errors.EnosFailedHostsError`: if a task returns an error on a host and ``on_error_continue==False`` :py:class:`enoslib.errors.EnosUnreachableHostsError`: if a host is unreachable (through ssh) and ``on_error_continue==False`` """ inventory, variable_manager, loader, options = _load_defaults( inventory_path=inventory_path, roles=roles, extra_vars=extra_vars, tags=tags, basedir=basedir ) passwords = {} for path in playbooks: logger.info("Running playbook %s with vars:\n%s" % (path, extra_vars)) pbex = PlaybookExecutor( playbooks=[path], inventory=inventory, variable_manager=variable_manager, loader=loader, options=options, passwords=passwords ) code = pbex.run() stats = pbex._tqm._stats hosts = stats.processed.keys() result = [{h: stats.summarize(h)} for h in hosts] results = {"code": code, "result": result, "playbook": path} print(results) failed_hosts = [] unreachable_hosts = [] for h in hosts: t = stats.summarize(h) if t["failures"] > 0: failed_hosts.append(h) if t["unreachable"] > 0: unreachable_hosts.append(h) if len(failed_hosts) > 0: logger.error("Failed hosts: %s" % failed_hosts) if not on_error_continue: raise EnosFailedHostsError(failed_hosts) if len(unreachable_hosts) > 0: logger.error("Unreachable hosts: %s" % unreachable_hosts) if not on_error_continue: raise EnosUnreachableHostsError(unreachable_hosts)
[ "def", "run_ansible", "(", "playbooks", ",", "inventory_path", "=", "None", ",", "roles", "=", "None", ",", "extra_vars", "=", "None", ",", "tags", "=", "None", ",", "on_error_continue", "=", "False", ",", "basedir", "=", "'.'", ")", ":", "inventory", ",...
Run Ansible. Args: playbooks (list): list of paths to the playbooks to run inventory_path (str): path to the hosts file (inventory) extra_var (dict): extra vars to pass tags (list): list of tags to run on_error_continue(bool): Don't throw any exception in case a host is unreachable or the playbooks run with errors Raises: :py:class:`enoslib.errors.EnosFailedHostsError`: if a task returns an error on a host and ``on_error_continue==False`` :py:class:`enoslib.errors.EnosUnreachableHostsError`: if a host is unreachable (through ssh) and ``on_error_continue==False``
[ "Run", "Ansible", "." ]
fb00be58e56a7848cfe482187d659744919fe2f7
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/api.py#L393-L456
train
49,515
BeyondTheClouds/enoslib
enoslib/api.py
discover_networks
def discover_networks(roles, networks, fake_interfaces=None, fake_networks=None): """Checks the network interfaces on the nodes. This enables to auto-discover the mapping interface name <-> network role. Beware, this has a side effect on each Host in roles. Args: roles (dict): role->hosts mapping as returned by :py:meth:`enoslib.infra.provider.Provider.init` networks (list): network list as returned by :py:meth:`enoslib.infra.provider.Provider.init` fake_interfaces (list): names of optionnal dummy interfaces to create fake_networks (list): names of the roles to associate with the fake interfaces. Like reguilar network interfaces, the mapping will be added to the host vars. Internally this will be zipped with the fake_interfaces to produce the mapping. If the command is successful each host will be added some variables. Assuming that one network whose role is `mynetwork` has been declared, the following variables will be available through the ansible hostvars: - ``mynetwork=eth1``, `eth1` has been discovered has the interface in the network `mynetwork`. - ``mynetwork_dev=eth1``, same as above with a different accessor names - ``mynetwork_ip=192.168.42.42``, this indicates the ip in the network `mynetwork` for this node All of this variable can then be accessed by the other nodes through the hostvars: ``hostvars[remote_node]["mynetwork_ip"]`` """ def get_devices(facts): """Extract the network devices information from the facts.""" devices = [] for interface in facts['ansible_interfaces']: ansible_interface = 'ansible_' + interface # filter here (active/ name...) if 'ansible_' + interface in facts: interface = facts[ansible_interface] devices.append(interface) return devices wait_ssh(roles) tmpdir = os.path.join(os.getcwd(), TMP_DIRNAME) _check_tmpdir(tmpdir) fake_interfaces = fake_interfaces or [] fake_networks = fake_networks or [] utils_playbook = os.path.join(ANSIBLE_DIR, 'utils.yml') facts_file = os.path.join(tmpdir, 'facts.json') options = { 'enos_action': 'check_network', 'facts_file': facts_file, 'fake_interfaces': fake_interfaces } run_ansible([utils_playbook], roles=roles, extra_vars=options, on_error_continue=False) # Read the file # Match provider networks to interface names for each host with open(facts_file) as f: facts = json.load(f) for _, host_facts in facts.items(): host_nets = _map_device_on_host_networks(networks, get_devices(host_facts)) # Add the mapping : networks <-> nic name host_facts['networks'] = host_nets # Finally update the env with this information # generate the extra_mapping for the fake interfaces extra_mapping = dict(zip(fake_networks, fake_interfaces)) _update_hosts(roles, facts, extra_mapping=extra_mapping)
python
def discover_networks(roles, networks, fake_interfaces=None, fake_networks=None): """Checks the network interfaces on the nodes. This enables to auto-discover the mapping interface name <-> network role. Beware, this has a side effect on each Host in roles. Args: roles (dict): role->hosts mapping as returned by :py:meth:`enoslib.infra.provider.Provider.init` networks (list): network list as returned by :py:meth:`enoslib.infra.provider.Provider.init` fake_interfaces (list): names of optionnal dummy interfaces to create fake_networks (list): names of the roles to associate with the fake interfaces. Like reguilar network interfaces, the mapping will be added to the host vars. Internally this will be zipped with the fake_interfaces to produce the mapping. If the command is successful each host will be added some variables. Assuming that one network whose role is `mynetwork` has been declared, the following variables will be available through the ansible hostvars: - ``mynetwork=eth1``, `eth1` has been discovered has the interface in the network `mynetwork`. - ``mynetwork_dev=eth1``, same as above with a different accessor names - ``mynetwork_ip=192.168.42.42``, this indicates the ip in the network `mynetwork` for this node All of this variable can then be accessed by the other nodes through the hostvars: ``hostvars[remote_node]["mynetwork_ip"]`` """ def get_devices(facts): """Extract the network devices information from the facts.""" devices = [] for interface in facts['ansible_interfaces']: ansible_interface = 'ansible_' + interface # filter here (active/ name...) if 'ansible_' + interface in facts: interface = facts[ansible_interface] devices.append(interface) return devices wait_ssh(roles) tmpdir = os.path.join(os.getcwd(), TMP_DIRNAME) _check_tmpdir(tmpdir) fake_interfaces = fake_interfaces or [] fake_networks = fake_networks or [] utils_playbook = os.path.join(ANSIBLE_DIR, 'utils.yml') facts_file = os.path.join(tmpdir, 'facts.json') options = { 'enos_action': 'check_network', 'facts_file': facts_file, 'fake_interfaces': fake_interfaces } run_ansible([utils_playbook], roles=roles, extra_vars=options, on_error_continue=False) # Read the file # Match provider networks to interface names for each host with open(facts_file) as f: facts = json.load(f) for _, host_facts in facts.items(): host_nets = _map_device_on_host_networks(networks, get_devices(host_facts)) # Add the mapping : networks <-> nic name host_facts['networks'] = host_nets # Finally update the env with this information # generate the extra_mapping for the fake interfaces extra_mapping = dict(zip(fake_networks, fake_interfaces)) _update_hosts(roles, facts, extra_mapping=extra_mapping)
[ "def", "discover_networks", "(", "roles", ",", "networks", ",", "fake_interfaces", "=", "None", ",", "fake_networks", "=", "None", ")", ":", "def", "get_devices", "(", "facts", ")", ":", "\"\"\"Extract the network devices information from the facts.\"\"\"", "devices", ...
Checks the network interfaces on the nodes. This enables to auto-discover the mapping interface name <-> network role. Beware, this has a side effect on each Host in roles. Args: roles (dict): role->hosts mapping as returned by :py:meth:`enoslib.infra.provider.Provider.init` networks (list): network list as returned by :py:meth:`enoslib.infra.provider.Provider.init` fake_interfaces (list): names of optionnal dummy interfaces to create fake_networks (list): names of the roles to associate with the fake interfaces. Like reguilar network interfaces, the mapping will be added to the host vars. Internally this will be zipped with the fake_interfaces to produce the mapping. If the command is successful each host will be added some variables. Assuming that one network whose role is `mynetwork` has been declared, the following variables will be available through the ansible hostvars: - ``mynetwork=eth1``, `eth1` has been discovered has the interface in the network `mynetwork`. - ``mynetwork_dev=eth1``, same as above with a different accessor names - ``mynetwork_ip=192.168.42.42``, this indicates the ip in the network `mynetwork` for this node All of this variable can then be accessed by the other nodes through the hostvars: ``hostvars[remote_node]["mynetwork_ip"]``
[ "Checks", "the", "network", "interfaces", "on", "the", "nodes", "." ]
fb00be58e56a7848cfe482187d659744919fe2f7
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/api.py#L459-L532
train
49,516
BeyondTheClouds/enoslib
enoslib/api.py
generate_inventory
def generate_inventory(roles, networks, inventory_path, check_networks=False, fake_interfaces=None, fake_networks=None): """Generate an inventory file in the ini format. The inventory is generated using the ``roles`` in the ``ini`` format. If ``check_network == True``, the function will try to discover which networks interfaces are available and map them to one network of the ``networks`` parameters. Note that this auto-discovery feature requires the servers to have their IP set. Args: roles (dict): role->hosts mapping as returned by :py:meth:`enoslib.infra.provider.Provider.init` networks (list): network list as returned by :py:meth:`enoslib.infra.provider.Provider.init` inventory_path (str): path to the inventory to generate check_networks (bool): True to enable the auto-discovery of the mapping interface name <-> network role fake_interfaces (list): names of optionnal dummy interfaces to create on the nodes fake_networks (list): names of the roles to associate with the fake interfaces. Like reguilar network interfaces, the mapping will be added to the host vars. Internally this will be zipped with the fake_interfaces to produce the mapping. """ with open(inventory_path, "w") as f: f.write(_generate_inventory(roles)) if check_networks: discover_networks( roles, networks, fake_interfaces=fake_interfaces, fake_networks=fake_networks ) with open(inventory_path, "w") as f: f.write(_generate_inventory(roles))
python
def generate_inventory(roles, networks, inventory_path, check_networks=False, fake_interfaces=None, fake_networks=None): """Generate an inventory file in the ini format. The inventory is generated using the ``roles`` in the ``ini`` format. If ``check_network == True``, the function will try to discover which networks interfaces are available and map them to one network of the ``networks`` parameters. Note that this auto-discovery feature requires the servers to have their IP set. Args: roles (dict): role->hosts mapping as returned by :py:meth:`enoslib.infra.provider.Provider.init` networks (list): network list as returned by :py:meth:`enoslib.infra.provider.Provider.init` inventory_path (str): path to the inventory to generate check_networks (bool): True to enable the auto-discovery of the mapping interface name <-> network role fake_interfaces (list): names of optionnal dummy interfaces to create on the nodes fake_networks (list): names of the roles to associate with the fake interfaces. Like reguilar network interfaces, the mapping will be added to the host vars. Internally this will be zipped with the fake_interfaces to produce the mapping. """ with open(inventory_path, "w") as f: f.write(_generate_inventory(roles)) if check_networks: discover_networks( roles, networks, fake_interfaces=fake_interfaces, fake_networks=fake_networks ) with open(inventory_path, "w") as f: f.write(_generate_inventory(roles))
[ "def", "generate_inventory", "(", "roles", ",", "networks", ",", "inventory_path", ",", "check_networks", "=", "False", ",", "fake_interfaces", "=", "None", ",", "fake_networks", "=", "None", ")", ":", "with", "open", "(", "inventory_path", ",", "\"w\"", ")", ...
Generate an inventory file in the ini format. The inventory is generated using the ``roles`` in the ``ini`` format. If ``check_network == True``, the function will try to discover which networks interfaces are available and map them to one network of the ``networks`` parameters. Note that this auto-discovery feature requires the servers to have their IP set. Args: roles (dict): role->hosts mapping as returned by :py:meth:`enoslib.infra.provider.Provider.init` networks (list): network list as returned by :py:meth:`enoslib.infra.provider.Provider.init` inventory_path (str): path to the inventory to generate check_networks (bool): True to enable the auto-discovery of the mapping interface name <-> network role fake_interfaces (list): names of optionnal dummy interfaces to create on the nodes fake_networks (list): names of the roles to associate with the fake interfaces. Like reguilar network interfaces, the mapping will be added to the host vars. Internally this will be zipped with the fake_interfaces to produce the mapping.
[ "Generate", "an", "inventory", "file", "in", "the", "ini", "format", "." ]
fb00be58e56a7848cfe482187d659744919fe2f7
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/api.py#L535-L571
train
49,517
BeyondTheClouds/enoslib
enoslib/api.py
emulate_network
def emulate_network(network_constraints, roles=None, inventory_path=None, extra_vars=None): """Emulate network links. Read ``network_constraints`` and apply ``tc`` rules on all the nodes. Constraints are applied between groups of machines. Theses groups are described in the ``network_constraints`` variable and must be found in the inventory file. The newtwork constraints support ``delay``, ``rate`` and ``loss``. Args: network_constraints (dict): network constraints to apply roles (dict): role->hosts mapping as returned by :py:meth:`enoslib.infra.provider.Provider.init` inventory_path(string): path to an inventory extra_vars (dict): extra_vars to pass to ansible Examples: * Using defaults The following will apply the network constraints between every groups. For instance the constraints will be applied for the communication between "n1" and "n3" but not between "n1" and "n2". Note that using default leads to symetric constraints. .. code-block:: python roles = { "grp1": ["n1", "n2"], "grp2": ["n3", "n4"], "grp3": ["n3", "n4"], } tc = { "enable": True, "default_delay": "20ms", "default_rate": "1gbit", } emulate_network(roles, tc) If you want to control more precisely which groups need to be taken into account, you can use ``except`` or ``groups`` key .. code-block:: python tc = { "enable": True, "default_delay": "20ms", "default_rate": "1gbit", "except": "grp3" } emulate_network(roles, tc) is equivalent to .. code-block:: python tc = { "enable": True, "default_delay": "20ms", "default_rate": "1gbit", "groups": ["grp1", "grp2"] } emulate_network(roles, inventory, tc) * Using ``src`` and ``dst`` The following will enforce a symetric constraint between ``grp1`` and ``grp2``. .. code-block:: python tc = { "enable": True, "default_delay": "20ms", "default_rate": "1gbit", "constraints": [{ "src": "grp1" "dst": "grp2" "delay": "10ms" "symetric": True }] } emulate_network(roles, inventory, tc) """ # 1) Retrieve the list of ips for all nodes (Ansible) # 2) Build all the constraints (Python) # {source:src, target: ip_dest, device: if, rate:x, delay:y} # 3) Enforce those constraints (Ansible) if not network_constraints: return if roles is None and inventory is None: raise ValueError("roles and inventory can't be None") if not extra_vars: extra_vars = {} # 1. getting ips/devices information logger.debug('Getting the ips of all nodes') tmpdir = os.path.join(os.getcwd(), TMP_DIRNAME) _check_tmpdir(tmpdir) utils_playbook = os.path.join(ANSIBLE_DIR, 'utils.yml') ips_file = os.path.join(tmpdir, 'ips.txt') options = {'enos_action': 'tc_ips', 'ips_file': ips_file} run_ansible([utils_playbook], roles=roles, extra_vars=options) # 2.a building the group constraints logger.debug('Building all the constraints') constraints = _build_grp_constraints(roles, network_constraints) # 2.b Building the ip/device level constaints with open(ips_file) as f: ips = yaml.safe_load(f) # will hold every single constraint ips_with_constraints = _build_ip_constraints(roles, ips, constraints) # dumping it for debugging purpose ips_with_constraints_file = os.path.join(tmpdir, 'ips_with_constraints.yml') with open(ips_with_constraints_file, 'w') as g: yaml.dump(ips_with_constraints, g) # 3. Enforcing those constraints logger.info('Enforcing the constraints') # enabling/disabling network constraints enable = network_constraints.setdefault('enable', True) utils_playbook = os.path.join(ANSIBLE_DIR, 'utils.yml') options = { 'enos_action': 'tc_apply', 'ips_with_constraints': ips_with_constraints, 'tc_enable': enable, } options.update(extra_vars) run_ansible([utils_playbook], roles=roles, inventory_path=inventory_path, extra_vars=options)
python
def emulate_network(network_constraints, roles=None, inventory_path=None, extra_vars=None): """Emulate network links. Read ``network_constraints`` and apply ``tc`` rules on all the nodes. Constraints are applied between groups of machines. Theses groups are described in the ``network_constraints`` variable and must be found in the inventory file. The newtwork constraints support ``delay``, ``rate`` and ``loss``. Args: network_constraints (dict): network constraints to apply roles (dict): role->hosts mapping as returned by :py:meth:`enoslib.infra.provider.Provider.init` inventory_path(string): path to an inventory extra_vars (dict): extra_vars to pass to ansible Examples: * Using defaults The following will apply the network constraints between every groups. For instance the constraints will be applied for the communication between "n1" and "n3" but not between "n1" and "n2". Note that using default leads to symetric constraints. .. code-block:: python roles = { "grp1": ["n1", "n2"], "grp2": ["n3", "n4"], "grp3": ["n3", "n4"], } tc = { "enable": True, "default_delay": "20ms", "default_rate": "1gbit", } emulate_network(roles, tc) If you want to control more precisely which groups need to be taken into account, you can use ``except`` or ``groups`` key .. code-block:: python tc = { "enable": True, "default_delay": "20ms", "default_rate": "1gbit", "except": "grp3" } emulate_network(roles, tc) is equivalent to .. code-block:: python tc = { "enable": True, "default_delay": "20ms", "default_rate": "1gbit", "groups": ["grp1", "grp2"] } emulate_network(roles, inventory, tc) * Using ``src`` and ``dst`` The following will enforce a symetric constraint between ``grp1`` and ``grp2``. .. code-block:: python tc = { "enable": True, "default_delay": "20ms", "default_rate": "1gbit", "constraints": [{ "src": "grp1" "dst": "grp2" "delay": "10ms" "symetric": True }] } emulate_network(roles, inventory, tc) """ # 1) Retrieve the list of ips for all nodes (Ansible) # 2) Build all the constraints (Python) # {source:src, target: ip_dest, device: if, rate:x, delay:y} # 3) Enforce those constraints (Ansible) if not network_constraints: return if roles is None and inventory is None: raise ValueError("roles and inventory can't be None") if not extra_vars: extra_vars = {} # 1. getting ips/devices information logger.debug('Getting the ips of all nodes') tmpdir = os.path.join(os.getcwd(), TMP_DIRNAME) _check_tmpdir(tmpdir) utils_playbook = os.path.join(ANSIBLE_DIR, 'utils.yml') ips_file = os.path.join(tmpdir, 'ips.txt') options = {'enos_action': 'tc_ips', 'ips_file': ips_file} run_ansible([utils_playbook], roles=roles, extra_vars=options) # 2.a building the group constraints logger.debug('Building all the constraints') constraints = _build_grp_constraints(roles, network_constraints) # 2.b Building the ip/device level constaints with open(ips_file) as f: ips = yaml.safe_load(f) # will hold every single constraint ips_with_constraints = _build_ip_constraints(roles, ips, constraints) # dumping it for debugging purpose ips_with_constraints_file = os.path.join(tmpdir, 'ips_with_constraints.yml') with open(ips_with_constraints_file, 'w') as g: yaml.dump(ips_with_constraints, g) # 3. Enforcing those constraints logger.info('Enforcing the constraints') # enabling/disabling network constraints enable = network_constraints.setdefault('enable', True) utils_playbook = os.path.join(ANSIBLE_DIR, 'utils.yml') options = { 'enos_action': 'tc_apply', 'ips_with_constraints': ips_with_constraints, 'tc_enable': enable, } options.update(extra_vars) run_ansible([utils_playbook], roles=roles, inventory_path=inventory_path, extra_vars=options)
[ "def", "emulate_network", "(", "network_constraints", ",", "roles", "=", "None", ",", "inventory_path", "=", "None", ",", "extra_vars", "=", "None", ")", ":", "# 1) Retrieve the list of ips for all nodes (Ansible)", "# 2) Build all the constraints (Python)", "# {source:src...
Emulate network links. Read ``network_constraints`` and apply ``tc`` rules on all the nodes. Constraints are applied between groups of machines. Theses groups are described in the ``network_constraints`` variable and must be found in the inventory file. The newtwork constraints support ``delay``, ``rate`` and ``loss``. Args: network_constraints (dict): network constraints to apply roles (dict): role->hosts mapping as returned by :py:meth:`enoslib.infra.provider.Provider.init` inventory_path(string): path to an inventory extra_vars (dict): extra_vars to pass to ansible Examples: * Using defaults The following will apply the network constraints between every groups. For instance the constraints will be applied for the communication between "n1" and "n3" but not between "n1" and "n2". Note that using default leads to symetric constraints. .. code-block:: python roles = { "grp1": ["n1", "n2"], "grp2": ["n3", "n4"], "grp3": ["n3", "n4"], } tc = { "enable": True, "default_delay": "20ms", "default_rate": "1gbit", } emulate_network(roles, tc) If you want to control more precisely which groups need to be taken into account, you can use ``except`` or ``groups`` key .. code-block:: python tc = { "enable": True, "default_delay": "20ms", "default_rate": "1gbit", "except": "grp3" } emulate_network(roles, tc) is equivalent to .. code-block:: python tc = { "enable": True, "default_delay": "20ms", "default_rate": "1gbit", "groups": ["grp1", "grp2"] } emulate_network(roles, inventory, tc) * Using ``src`` and ``dst`` The following will enforce a symetric constraint between ``grp1`` and ``grp2``. .. code-block:: python tc = { "enable": True, "default_delay": "20ms", "default_rate": "1gbit", "constraints": [{ "src": "grp1" "dst": "grp2" "delay": "10ms" "symetric": True }] } emulate_network(roles, inventory, tc)
[ "Emulate", "network", "links", "." ]
fb00be58e56a7848cfe482187d659744919fe2f7
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/api.py#L574-L717
train
49,518
BeyondTheClouds/enoslib
enoslib/api.py
wait_ssh
def wait_ssh(roles, retries=100, interval=30): """Wait for all the machines to be ssh-reachable Let ansible initiates a communication and retries if needed. Args: inventory (string): path to the inventoy file to test retries (int): Number of time we'll be retrying an SSH connection interval (int): Interval to wait in seconds between two retries """ utils_playbook = os.path.join(ANSIBLE_DIR, 'utils.yml') options = {'enos_action': 'ping'} for i in range(0, retries): try: run_ansible([utils_playbook], roles=roles, extra_vars=options, on_error_continue=False) break except EnosUnreachableHostsError as e: logger.info("Hosts unreachable: %s " % e.hosts) logger.info("Retrying... %s/%s" % (i + 1, retries)) time.sleep(interval) else: raise EnosSSHNotReady('Maximum retries reached')
python
def wait_ssh(roles, retries=100, interval=30): """Wait for all the machines to be ssh-reachable Let ansible initiates a communication and retries if needed. Args: inventory (string): path to the inventoy file to test retries (int): Number of time we'll be retrying an SSH connection interval (int): Interval to wait in seconds between two retries """ utils_playbook = os.path.join(ANSIBLE_DIR, 'utils.yml') options = {'enos_action': 'ping'} for i in range(0, retries): try: run_ansible([utils_playbook], roles=roles, extra_vars=options, on_error_continue=False) break except EnosUnreachableHostsError as e: logger.info("Hosts unreachable: %s " % e.hosts) logger.info("Retrying... %s/%s" % (i + 1, retries)) time.sleep(interval) else: raise EnosSSHNotReady('Maximum retries reached')
[ "def", "wait_ssh", "(", "roles", ",", "retries", "=", "100", ",", "interval", "=", "30", ")", ":", "utils_playbook", "=", "os", ".", "path", ".", "join", "(", "ANSIBLE_DIR", ",", "'utils.yml'", ")", "options", "=", "{", "'enos_action'", ":", "'ping'", ...
Wait for all the machines to be ssh-reachable Let ansible initiates a communication and retries if needed. Args: inventory (string): path to the inventoy file to test retries (int): Number of time we'll be retrying an SSH connection interval (int): Interval to wait in seconds between two retries
[ "Wait", "for", "all", "the", "machines", "to", "be", "ssh", "-", "reachable" ]
fb00be58e56a7848cfe482187d659744919fe2f7
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/api.py#L782-L807
train
49,519
BeyondTheClouds/enoslib
enoslib/api.py
expand_groups
def expand_groups(grp): """Expand group names. Args: grp (string): group names to expand Returns: list of groups Examples: * grp[1-3] will be expanded to [grp1, grp2, grp3] * grp1 will be expanded to [grp1] """ p = re.compile(r"(?P<name>.+)\[(?P<start>\d+)-(?P<end>\d+)\]") m = p.match(grp) if m is not None: s = int(m.group('start')) e = int(m.group('end')) n = m.group('name') return list(map(lambda x: n + str(x), range(s, e + 1))) else: return [grp]
python
def expand_groups(grp): """Expand group names. Args: grp (string): group names to expand Returns: list of groups Examples: * grp[1-3] will be expanded to [grp1, grp2, grp3] * grp1 will be expanded to [grp1] """ p = re.compile(r"(?P<name>.+)\[(?P<start>\d+)-(?P<end>\d+)\]") m = p.match(grp) if m is not None: s = int(m.group('start')) e = int(m.group('end')) n = m.group('name') return list(map(lambda x: n + str(x), range(s, e + 1))) else: return [grp]
[ "def", "expand_groups", "(", "grp", ")", ":", "p", "=", "re", ".", "compile", "(", "r\"(?P<name>.+)\\[(?P<start>\\d+)-(?P<end>\\d+)\\]\"", ")", "m", "=", "p", ".", "match", "(", "grp", ")", "if", "m", "is", "not", "None", ":", "s", "=", "int", "(", "m"...
Expand group names. Args: grp (string): group names to expand Returns: list of groups Examples: * grp[1-3] will be expanded to [grp1, grp2, grp3] * grp1 will be expanded to [grp1]
[ "Expand", "group", "names", "." ]
fb00be58e56a7848cfe482187d659744919fe2f7
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/api.py#L810-L832
train
49,520
BeyondTheClouds/enoslib
enoslib/api.py
_generate_default_grp_constraints
def _generate_default_grp_constraints(roles, network_constraints): """Generate default symetric grp constraints. """ default_delay = network_constraints.get('default_delay') default_rate = network_constraints.get('default_rate') default_loss = network_constraints.get('default_loss', 0) except_groups = network_constraints.get('except', []) grps = network_constraints.get('groups', roles.keys()) # expand each groups grps = [expand_groups(g) for g in grps] # flatten grps = [x for expanded_group in grps for x in expanded_group] # building the default group constraints return [{'src': grp1, 'dst': grp2, 'delay': default_delay, 'rate': default_rate, 'loss': default_loss} for grp1 in grps for grp2 in grps if ((grp1 != grp2 or _src_equals_dst_in_constraints(network_constraints, grp1)) and grp1 not in except_groups and grp2 not in except_groups)]
python
def _generate_default_grp_constraints(roles, network_constraints): """Generate default symetric grp constraints. """ default_delay = network_constraints.get('default_delay') default_rate = network_constraints.get('default_rate') default_loss = network_constraints.get('default_loss', 0) except_groups = network_constraints.get('except', []) grps = network_constraints.get('groups', roles.keys()) # expand each groups grps = [expand_groups(g) for g in grps] # flatten grps = [x for expanded_group in grps for x in expanded_group] # building the default group constraints return [{'src': grp1, 'dst': grp2, 'delay': default_delay, 'rate': default_rate, 'loss': default_loss} for grp1 in grps for grp2 in grps if ((grp1 != grp2 or _src_equals_dst_in_constraints(network_constraints, grp1)) and grp1 not in except_groups and grp2 not in except_groups)]
[ "def", "_generate_default_grp_constraints", "(", "roles", ",", "network_constraints", ")", ":", "default_delay", "=", "network_constraints", ".", "get", "(", "'default_delay'", ")", "default_rate", "=", "network_constraints", ".", "get", "(", "'default_rate'", ")", "d...
Generate default symetric grp constraints.
[ "Generate", "default", "symetric", "grp", "constraints", "." ]
fb00be58e56a7848cfe482187d659744919fe2f7
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/api.py#L922-L943
train
49,521
BeyondTheClouds/enoslib
enoslib/api.py
_generate_actual_grp_constraints
def _generate_actual_grp_constraints(network_constraints): """Generate the user specified constraints """ if 'constraints' not in network_constraints: return [] constraints = network_constraints['constraints'] actual = [] for desc in constraints: descs = _expand_description(desc) for desc in descs: actual.append(desc) if 'symetric' in desc: sym = desc.copy() sym['src'] = desc['dst'] sym['dst'] = desc['src'] actual.append(sym) return actual
python
def _generate_actual_grp_constraints(network_constraints): """Generate the user specified constraints """ if 'constraints' not in network_constraints: return [] constraints = network_constraints['constraints'] actual = [] for desc in constraints: descs = _expand_description(desc) for desc in descs: actual.append(desc) if 'symetric' in desc: sym = desc.copy() sym['src'] = desc['dst'] sym['dst'] = desc['src'] actual.append(sym) return actual
[ "def", "_generate_actual_grp_constraints", "(", "network_constraints", ")", ":", "if", "'constraints'", "not", "in", "network_constraints", ":", "return", "[", "]", "constraints", "=", "network_constraints", "[", "'constraints'", "]", "actual", "=", "[", "]", "for",...
Generate the user specified constraints
[ "Generate", "the", "user", "specified", "constraints" ]
fb00be58e56a7848cfe482187d659744919fe2f7
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/api.py#L946-L963
train
49,522
BeyondTheClouds/enoslib
enoslib/api.py
_merge_constraints
def _merge_constraints(constraints, overrides): """Merge the constraints avoiding duplicates Change constraints in place. """ for o in overrides: i = 0 while i < len(constraints): c = constraints[i] if _same(o, c): constraints[i].update(o) break i = i + 1
python
def _merge_constraints(constraints, overrides): """Merge the constraints avoiding duplicates Change constraints in place. """ for o in overrides: i = 0 while i < len(constraints): c = constraints[i] if _same(o, c): constraints[i].update(o) break i = i + 1
[ "def", "_merge_constraints", "(", "constraints", ",", "overrides", ")", ":", "for", "o", "in", "overrides", ":", "i", "=", "0", "while", "i", "<", "len", "(", "constraints", ")", ":", "c", "=", "constraints", "[", "i", "]", "if", "_same", "(", "o", ...
Merge the constraints avoiding duplicates Change constraints in place.
[ "Merge", "the", "constraints", "avoiding", "duplicates", "Change", "constraints", "in", "place", "." ]
fb00be58e56a7848cfe482187d659744919fe2f7
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/api.py#L966-L977
train
49,523
BeyondTheClouds/enoslib
enoslib/api.py
_build_grp_constraints
def _build_grp_constraints(roles, network_constraints): """Generate constraints at the group level, It expands the group names and deal with symetric constraints. """ # generate defaults constraints constraints = _generate_default_grp_constraints(roles, network_constraints) # Updating the constraints if necessary if 'constraints' in network_constraints: actual = _generate_actual_grp_constraints(network_constraints) _merge_constraints(constraints, actual) return constraints
python
def _build_grp_constraints(roles, network_constraints): """Generate constraints at the group level, It expands the group names and deal with symetric constraints. """ # generate defaults constraints constraints = _generate_default_grp_constraints(roles, network_constraints) # Updating the constraints if necessary if 'constraints' in network_constraints: actual = _generate_actual_grp_constraints(network_constraints) _merge_constraints(constraints, actual) return constraints
[ "def", "_build_grp_constraints", "(", "roles", ",", "network_constraints", ")", ":", "# generate defaults constraints", "constraints", "=", "_generate_default_grp_constraints", "(", "roles", ",", "network_constraints", ")", "# Updating the constraints if necessary", "if", "'con...
Generate constraints at the group level, It expands the group names and deal with symetric constraints.
[ "Generate", "constraints", "at", "the", "group", "level", "It", "expands", "the", "group", "names", "and", "deal", "with", "symetric", "constraints", "." ]
fb00be58e56a7848cfe482187d659744919fe2f7
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/api.py#L980-L992
train
49,524
BeyondTheClouds/enoslib
enoslib/api.py
_map_device_on_host_networks
def _map_device_on_host_networks(provider_nets, devices): """Decorate each networks with the corresponding nic name.""" networks = copy.deepcopy(provider_nets) for network in networks: for device in devices: network.setdefault('device', None) ip_set = IPSet([network['cidr']]) if 'ipv4' not in device: continue ips = device['ipv4'] if not isinstance(ips, list): ips = [ips] if len(ips) < 1: continue ip = IPAddress(ips[0]['address']) if ip in ip_set: network['device'] = device['device'] continue return networks
python
def _map_device_on_host_networks(provider_nets, devices): """Decorate each networks with the corresponding nic name.""" networks = copy.deepcopy(provider_nets) for network in networks: for device in devices: network.setdefault('device', None) ip_set = IPSet([network['cidr']]) if 'ipv4' not in device: continue ips = device['ipv4'] if not isinstance(ips, list): ips = [ips] if len(ips) < 1: continue ip = IPAddress(ips[0]['address']) if ip in ip_set: network['device'] = device['device'] continue return networks
[ "def", "_map_device_on_host_networks", "(", "provider_nets", ",", "devices", ")", ":", "networks", "=", "copy", ".", "deepcopy", "(", "provider_nets", ")", "for", "network", "in", "networks", ":", "for", "device", "in", "devices", ":", "network", ".", "setdefa...
Decorate each networks with the corresponding nic name.
[ "Decorate", "each", "networks", "with", "the", "corresponding", "nic", "name", "." ]
fb00be58e56a7848cfe482187d659744919fe2f7
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/api.py#L1038-L1056
train
49,525
BreakingBytes/simkit
examples/PVPower/pvpower/formulas/utils.py
f_energy
def f_energy(ac_power, times): """ Calculate the total energy accumulated from AC power at the end of each timestep between the given times. :param ac_power: AC Power [W] :param times: times :type times: np.datetime64[s] :return: energy [W*h] and energy times """ dt = np.diff(times) # calculate timesteps # convert timedeltas to quantities dt = dt.astype('timedelta64[s]').astype('float') / sc_const.hour # energy accumulate during timestep energy = dt * (ac_power[:-1] + ac_power[1:]) / 2 return energy, times[1:]
python
def f_energy(ac_power, times): """ Calculate the total energy accumulated from AC power at the end of each timestep between the given times. :param ac_power: AC Power [W] :param times: times :type times: np.datetime64[s] :return: energy [W*h] and energy times """ dt = np.diff(times) # calculate timesteps # convert timedeltas to quantities dt = dt.astype('timedelta64[s]').astype('float') / sc_const.hour # energy accumulate during timestep energy = dt * (ac_power[:-1] + ac_power[1:]) / 2 return energy, times[1:]
[ "def", "f_energy", "(", "ac_power", ",", "times", ")", ":", "dt", "=", "np", ".", "diff", "(", "times", ")", "# calculate timesteps", "# convert timedeltas to quantities", "dt", "=", "dt", ".", "astype", "(", "'timedelta64[s]'", ")", ".", "astype", "(", "'fl...
Calculate the total energy accumulated from AC power at the end of each timestep between the given times. :param ac_power: AC Power [W] :param times: times :type times: np.datetime64[s] :return: energy [W*h] and energy times
[ "Calculate", "the", "total", "energy", "accumulated", "from", "AC", "power", "at", "the", "end", "of", "each", "timestep", "between", "the", "given", "times", "." ]
205163d879d3880b6c9ef609f1b723a58773026b
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/examples/PVPower/pvpower/formulas/utils.py#L39-L54
train
49,526
volafiled/python-volapi
volapi/file.py
File.fileupdate
def fileupdate(self, data): """Method to update extra metadata fields with dict obtained through `fileinfo`""" self.name = data["name"] add = self.__additional add["filetype"] = "other" for filetype in ("book", "image", "video", "audio", "archive"): if filetype in data: add["filetype"] = filetype break if add["filetype"] in ("image", "video", "audio"): add["thumb"] = data.get("thumb", dict()) # checksum is md5 add["checksum"] = data["checksum"] add["expire_time"] = data["expires"] / 1000 add["size"] = data["size"] add["info"] = data.get(add["filetype"], dict()) add["uploader"] = data["user"] if self.room.admin: add["info"].update({"room": data.get("room")}) add["info"].update({"uploader_ip": data.get("uploader_ip")}) self.updated = True
python
def fileupdate(self, data): """Method to update extra metadata fields with dict obtained through `fileinfo`""" self.name = data["name"] add = self.__additional add["filetype"] = "other" for filetype in ("book", "image", "video", "audio", "archive"): if filetype in data: add["filetype"] = filetype break if add["filetype"] in ("image", "video", "audio"): add["thumb"] = data.get("thumb", dict()) # checksum is md5 add["checksum"] = data["checksum"] add["expire_time"] = data["expires"] / 1000 add["size"] = data["size"] add["info"] = data.get(add["filetype"], dict()) add["uploader"] = data["user"] if self.room.admin: add["info"].update({"room": data.get("room")}) add["info"].update({"uploader_ip": data.get("uploader_ip")}) self.updated = True
[ "def", "fileupdate", "(", "self", ",", "data", ")", ":", "self", ".", "name", "=", "data", "[", "\"name\"", "]", "add", "=", "self", ".", "__additional", "add", "[", "\"filetype\"", "]", "=", "\"other\"", "for", "filetype", "in", "(", "\"book\"", ",", ...
Method to update extra metadata fields with dict obtained through `fileinfo`
[ "Method", "to", "update", "extra", "metadata", "fields", "with", "dict", "obtained", "through", "fileinfo" ]
5f0bc03dbde703264ac6ed494e2050761f688a3e
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/file.py#L38-L60
train
49,527
volafiled/python-volapi
volapi/file.py
File.thumbnail
def thumbnail(self): """Returns the thumbnail's url for this image, audio, or video file. Returns empty string if the file has no thumbnail""" if self.filetype not in ("video", "image", "audio"): raise RuntimeError("Only video, audio and image files can have thumbnails") thumb_srv = self.thumb.get("server") url = f"https://{thumb_srv}" if thumb_srv else None return f"{url}/asset/{self.fid}/thumb" if url else ""
python
def thumbnail(self): """Returns the thumbnail's url for this image, audio, or video file. Returns empty string if the file has no thumbnail""" if self.filetype not in ("video", "image", "audio"): raise RuntimeError("Only video, audio and image files can have thumbnails") thumb_srv = self.thumb.get("server") url = f"https://{thumb_srv}" if thumb_srv else None return f"{url}/asset/{self.fid}/thumb" if url else ""
[ "def", "thumbnail", "(", "self", ")", ":", "if", "self", ".", "filetype", "not", "in", "(", "\"video\"", ",", "\"image\"", ",", "\"audio\"", ")", ":", "raise", "RuntimeError", "(", "\"Only video, audio and image files can have thumbnails\"", ")", "thumb_srv", "=",...
Returns the thumbnail's url for this image, audio, or video file. Returns empty string if the file has no thumbnail
[ "Returns", "the", "thumbnail", "s", "url", "for", "this", "image", "audio", "or", "video", "file", ".", "Returns", "empty", "string", "if", "the", "file", "has", "no", "thumbnail" ]
5f0bc03dbde703264ac6ed494e2050761f688a3e
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/file.py#L81-L89
train
49,528
volafiled/python-volapi
volapi/file.py
File.duration
def duration(self): """Returns the duration in seconds of this audio or video file""" if self.filetype not in ("video", "audio"): raise RuntimeError("Only videos and audio have durations") return self.info.get("length") or self.info.get("duration")
python
def duration(self): """Returns the duration in seconds of this audio or video file""" if self.filetype not in ("video", "audio"): raise RuntimeError("Only videos and audio have durations") return self.info.get("length") or self.info.get("duration")
[ "def", "duration", "(", "self", ")", ":", "if", "self", ".", "filetype", "not", "in", "(", "\"video\"", ",", "\"audio\"", ")", ":", "raise", "RuntimeError", "(", "\"Only videos and audio have durations\"", ")", "return", "self", ".", "info", ".", "get", "(",...
Returns the duration in seconds of this audio or video file
[ "Returns", "the", "duration", "in", "seconds", "of", "this", "audio", "or", "video", "file" ]
5f0bc03dbde703264ac6ed494e2050761f688a3e
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/file.py#L100-L105
train
49,529
volafiled/python-volapi
volapi/file.py
File.delete
def delete(self): """ Remove this file """ self.room.check_owner() self.conn.make_call("deleteFiles", [self.fid])
python
def delete(self): """ Remove this file """ self.room.check_owner() self.conn.make_call("deleteFiles", [self.fid])
[ "def", "delete", "(", "self", ")", ":", "self", ".", "room", ".", "check_owner", "(", ")", "self", ".", "conn", ".", "make_call", "(", "\"deleteFiles\"", ",", "[", "self", ".", "fid", "]", ")" ]
Remove this file
[ "Remove", "this", "file" ]
5f0bc03dbde703264ac6ed494e2050761f688a3e
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/file.py#L151-L154
train
49,530
volafiled/python-volapi
volapi/file.py
File.timeout
def timeout(self, duration=3600): """ Timeout the uploader of this file """ self.room.check_owner() self.conn.make_call("timeoutFile", self.fid, duration)
python
def timeout(self, duration=3600): """ Timeout the uploader of this file """ self.room.check_owner() self.conn.make_call("timeoutFile", self.fid, duration)
[ "def", "timeout", "(", "self", ",", "duration", "=", "3600", ")", ":", "self", ".", "room", ".", "check_owner", "(", ")", "self", ".", "conn", ".", "make_call", "(", "\"timeoutFile\"", ",", "self", ".", "fid", ",", "duration", ")" ]
Timeout the uploader of this file
[ "Timeout", "the", "uploader", "of", "this", "file" ]
5f0bc03dbde703264ac6ed494e2050761f688a3e
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/file.py#L156-L159
train
49,531
Scout24/succubus
src/main/python/succubus/daemonize.py
Daemon.set_gid
def set_gid(self): """Change the group of the running process""" if self.group: gid = getgrnam(self.group).gr_gid try: os.setgid(gid) except Exception: message = ("Unable to switch ownership to {0}:{1}. " + "Did you start the daemon as root?") print(message.format(self.user, self.group)) sys.exit(1)
python
def set_gid(self): """Change the group of the running process""" if self.group: gid = getgrnam(self.group).gr_gid try: os.setgid(gid) except Exception: message = ("Unable to switch ownership to {0}:{1}. " + "Did you start the daemon as root?") print(message.format(self.user, self.group)) sys.exit(1)
[ "def", "set_gid", "(", "self", ")", ":", "if", "self", ".", "group", ":", "gid", "=", "getgrnam", "(", "self", ".", "group", ")", ".", "gr_gid", "try", ":", "os", ".", "setgid", "(", "gid", ")", "except", "Exception", ":", "message", "=", "(", "\...
Change the group of the running process
[ "Change", "the", "group", "of", "the", "running", "process" ]
465d6ef978be165b1b652736935b7cf0ee5ebe9e
https://github.com/Scout24/succubus/blob/465d6ef978be165b1b652736935b7cf0ee5ebe9e/src/main/python/succubus/daemonize.py#L46-L56
train
49,532
Scout24/succubus
src/main/python/succubus/daemonize.py
Daemon.set_uid
def set_uid(self): """Change the user of the running process""" if self.user: uid = getpwnam(self.user).pw_uid try: os.setuid(uid) except Exception: message = ('Unable to switch ownership to {0}:{1}. ' + 'Did you start the daemon as root?') print(message.format(self.user, self.group)) sys.exit(1)
python
def set_uid(self): """Change the user of the running process""" if self.user: uid = getpwnam(self.user).pw_uid try: os.setuid(uid) except Exception: message = ('Unable to switch ownership to {0}:{1}. ' + 'Did you start the daemon as root?') print(message.format(self.user, self.group)) sys.exit(1)
[ "def", "set_uid", "(", "self", ")", ":", "if", "self", ".", "user", ":", "uid", "=", "getpwnam", "(", "self", ".", "user", ")", ".", "pw_uid", "try", ":", "os", ".", "setuid", "(", "uid", ")", "except", "Exception", ":", "message", "=", "(", "'Un...
Change the user of the running process
[ "Change", "the", "user", "of", "the", "running", "process" ]
465d6ef978be165b1b652736935b7cf0ee5ebe9e
https://github.com/Scout24/succubus/blob/465d6ef978be165b1b652736935b7cf0ee5ebe9e/src/main/python/succubus/daemonize.py#L58-L68
train
49,533
Scout24/succubus
src/main/python/succubus/daemonize.py
Daemon.setup_logging
def setup_logging(self): """Set up self.logger This function is called after load_configuration() and after changing to new user/group IDs (if configured). Logging to syslog using the root logger is configured by default, you can override this method if you want something else. """ self.logger = logging.getLogger() if os.path.exists('/dev/log'): handler = SysLogHandler('/dev/log') else: handler = SysLogHandler() self.logger.addHandler(handler)
python
def setup_logging(self): """Set up self.logger This function is called after load_configuration() and after changing to new user/group IDs (if configured). Logging to syslog using the root logger is configured by default, you can override this method if you want something else. """ self.logger = logging.getLogger() if os.path.exists('/dev/log'): handler = SysLogHandler('/dev/log') else: handler = SysLogHandler() self.logger.addHandler(handler)
[ "def", "setup_logging", "(", "self", ")", ":", "self", ".", "logger", "=", "logging", ".", "getLogger", "(", ")", "if", "os", ".", "path", ".", "exists", "(", "'/dev/log'", ")", ":", "handler", "=", "SysLogHandler", "(", "'/dev/log'", ")", "else", ":",...
Set up self.logger This function is called after load_configuration() and after changing to new user/group IDs (if configured). Logging to syslog using the root logger is configured by default, you can override this method if you want something else.
[ "Set", "up", "self", ".", "logger" ]
465d6ef978be165b1b652736935b7cf0ee5ebe9e
https://github.com/Scout24/succubus/blob/465d6ef978be165b1b652736935b7cf0ee5ebe9e/src/main/python/succubus/daemonize.py#L91-L105
train
49,534
Scout24/succubus
src/main/python/succubus/daemonize.py
Daemon.status
def status(self): """Determine the status of the daemon""" my_name = os.path.basename(sys.argv[0]) if self._already_running(): message = "{0} (pid {1}) is running...\n".format(my_name, self.pid) sys.stdout.write(message) return 0 sys.stdout.write("{0} is stopped\n".format(my_name)) return 3
python
def status(self): """Determine the status of the daemon""" my_name = os.path.basename(sys.argv[0]) if self._already_running(): message = "{0} (pid {1}) is running...\n".format(my_name, self.pid) sys.stdout.write(message) return 0 sys.stdout.write("{0} is stopped\n".format(my_name)) return 3
[ "def", "status", "(", "self", ")", ":", "my_name", "=", "os", ".", "path", ".", "basename", "(", "sys", ".", "argv", "[", "0", "]", ")", "if", "self", ".", "_already_running", "(", ")", ":", "message", "=", "\"{0} (pid {1}) is running...\\n\"", ".", "...
Determine the status of the daemon
[ "Determine", "the", "status", "of", "the", "daemon" ]
465d6ef978be165b1b652736935b7cf0ee5ebe9e
https://github.com/Scout24/succubus/blob/465d6ef978be165b1b652736935b7cf0ee5ebe9e/src/main/python/succubus/daemonize.py#L242-L250
train
49,535
nigma/django-easy-pdf
easy_pdf/rendering.py
fetch_resources
def fetch_resources(uri, rel): """ Retrieves embeddable resource from given ``uri``. For now only local resources (images, fonts) are supported. :param str uri: path or url to image or font resource :returns: path to local resource file. :rtype: str :raises: :exc:`~easy_pdf.exceptions.UnsupportedMediaPathException` """ if settings.STATIC_URL and uri.startswith(settings.STATIC_URL): path = os.path.join(settings.STATIC_ROOT, uri.replace(settings.STATIC_URL, "")) elif settings.MEDIA_URL and uri.startswith(settings.MEDIA_URL): path = os.path.join(settings.MEDIA_ROOT, uri.replace(settings.MEDIA_URL, "")) else: path = os.path.join(settings.STATIC_ROOT, uri) if not os.path.isfile(path): raise UnsupportedMediaPathException( "media urls must start with {} or {}".format( settings.MEDIA_ROOT, settings.STATIC_ROOT ) ) return path.replace("\\", "/")
python
def fetch_resources(uri, rel): """ Retrieves embeddable resource from given ``uri``. For now only local resources (images, fonts) are supported. :param str uri: path or url to image or font resource :returns: path to local resource file. :rtype: str :raises: :exc:`~easy_pdf.exceptions.UnsupportedMediaPathException` """ if settings.STATIC_URL and uri.startswith(settings.STATIC_URL): path = os.path.join(settings.STATIC_ROOT, uri.replace(settings.STATIC_URL, "")) elif settings.MEDIA_URL and uri.startswith(settings.MEDIA_URL): path = os.path.join(settings.MEDIA_ROOT, uri.replace(settings.MEDIA_URL, "")) else: path = os.path.join(settings.STATIC_ROOT, uri) if not os.path.isfile(path): raise UnsupportedMediaPathException( "media urls must start with {} or {}".format( settings.MEDIA_ROOT, settings.STATIC_ROOT ) ) return path.replace("\\", "/")
[ "def", "fetch_resources", "(", "uri", ",", "rel", ")", ":", "if", "settings", ".", "STATIC_URL", "and", "uri", ".", "startswith", "(", "settings", ".", "STATIC_URL", ")", ":", "path", "=", "os", ".", "path", ".", "join", "(", "settings", ".", "STATIC_R...
Retrieves embeddable resource from given ``uri``. For now only local resources (images, fonts) are supported. :param str uri: path or url to image or font resource :returns: path to local resource file. :rtype: str :raises: :exc:`~easy_pdf.exceptions.UnsupportedMediaPathException`
[ "Retrieves", "embeddable", "resource", "from", "given", "uri", "." ]
327605b91a445b453d8969b341ef74b12ab00a83
https://github.com/nigma/django-easy-pdf/blob/327605b91a445b453d8969b341ef74b12ab00a83/easy_pdf/rendering.py#L23-L48
train
49,536
nigma/django-easy-pdf
easy_pdf/rendering.py
html_to_pdf
def html_to_pdf(content, encoding="utf-8", link_callback=fetch_resources, **kwargs): """ Converts html ``content`` into PDF document. :param unicode content: html content :returns: PDF content :rtype: :class:`bytes` :raises: :exc:`~easy_pdf.exceptions.PDFRenderingError` """ src = BytesIO(content.encode(encoding)) dest = BytesIO() pdf = pisa.pisaDocument(src, dest, encoding=encoding, link_callback=link_callback, **kwargs) if pdf.err: logger.error("Error rendering PDF document") for entry in pdf.log: if entry[0] == xhtml2pdf.default.PML_ERROR: logger_x2p.error("line %s, msg: %s, fragment: %s", entry[1], entry[2], entry[3]) raise PDFRenderingError("Errors rendering PDF", content=content, log=pdf.log) if pdf.warn: for entry in pdf.log: if entry[0] == xhtml2pdf.default.PML_WARNING: logger_x2p.warning("line %s, msg: %s, fragment: %s", entry[1], entry[2], entry[3]) return dest.getvalue()
python
def html_to_pdf(content, encoding="utf-8", link_callback=fetch_resources, **kwargs): """ Converts html ``content`` into PDF document. :param unicode content: html content :returns: PDF content :rtype: :class:`bytes` :raises: :exc:`~easy_pdf.exceptions.PDFRenderingError` """ src = BytesIO(content.encode(encoding)) dest = BytesIO() pdf = pisa.pisaDocument(src, dest, encoding=encoding, link_callback=link_callback, **kwargs) if pdf.err: logger.error("Error rendering PDF document") for entry in pdf.log: if entry[0] == xhtml2pdf.default.PML_ERROR: logger_x2p.error("line %s, msg: %s, fragment: %s", entry[1], entry[2], entry[3]) raise PDFRenderingError("Errors rendering PDF", content=content, log=pdf.log) if pdf.warn: for entry in pdf.log: if entry[0] == xhtml2pdf.default.PML_WARNING: logger_x2p.warning("line %s, msg: %s, fragment: %s", entry[1], entry[2], entry[3]) return dest.getvalue()
[ "def", "html_to_pdf", "(", "content", ",", "encoding", "=", "\"utf-8\"", ",", "link_callback", "=", "fetch_resources", ",", "*", "*", "kwargs", ")", ":", "src", "=", "BytesIO", "(", "content", ".", "encode", "(", "encoding", ")", ")", "dest", "=", "Bytes...
Converts html ``content`` into PDF document. :param unicode content: html content :returns: PDF content :rtype: :class:`bytes` :raises: :exc:`~easy_pdf.exceptions.PDFRenderingError`
[ "Converts", "html", "content", "into", "PDF", "document", "." ]
327605b91a445b453d8969b341ef74b12ab00a83
https://github.com/nigma/django-easy-pdf/blob/327605b91a445b453d8969b341ef74b12ab00a83/easy_pdf/rendering.py#L51-L78
train
49,537
nigma/django-easy-pdf
easy_pdf/rendering.py
make_response
def make_response(content, filename=None, content_type="application/pdf"): """ Wraps content into HTTP response. If ``filename`` is specified then ``Content-Disposition: attachment`` header is added to the response. Default ``Content-Type`` is ``application/pdf``. :param bytes content: response content :param str filename: optional filename for file download :param str content_type: response content type :rtype: :class:`django.http.HttpResponse` """ response = HttpResponse(content, content_type=content_type) if filename is not None: response["Content-Disposition"] = "attachment; %s" % encode_filename(filename) return response
python
def make_response(content, filename=None, content_type="application/pdf"): """ Wraps content into HTTP response. If ``filename`` is specified then ``Content-Disposition: attachment`` header is added to the response. Default ``Content-Type`` is ``application/pdf``. :param bytes content: response content :param str filename: optional filename for file download :param str content_type: response content type :rtype: :class:`django.http.HttpResponse` """ response = HttpResponse(content, content_type=content_type) if filename is not None: response["Content-Disposition"] = "attachment; %s" % encode_filename(filename) return response
[ "def", "make_response", "(", "content", ",", "filename", "=", "None", ",", "content_type", "=", "\"application/pdf\"", ")", ":", "response", "=", "HttpResponse", "(", "content", ",", "content_type", "=", "content_type", ")", "if", "filename", "is", "not", "Non...
Wraps content into HTTP response. If ``filename`` is specified then ``Content-Disposition: attachment`` header is added to the response. Default ``Content-Type`` is ``application/pdf``. :param bytes content: response content :param str filename: optional filename for file download :param str content_type: response content type :rtype: :class:`django.http.HttpResponse`
[ "Wraps", "content", "into", "HTTP", "response", "." ]
327605b91a445b453d8969b341ef74b12ab00a83
https://github.com/nigma/django-easy-pdf/blob/327605b91a445b453d8969b341ef74b12ab00a83/easy_pdf/rendering.py#L102-L119
train
49,538
nigma/django-easy-pdf
easy_pdf/rendering.py
render_to_pdf
def render_to_pdf(template, context, using=None, request=None, encoding="utf-8", **kwargs): """ Create PDF document from Django html template. :param str template: Path to Django template :param dict context: Template context :param using: Optional Django template engine :param request: Django HTTP request :type request: :class:`django.http.HttpRequest` :returns: rendered PDF :rtype: :class:`bytes` :raises: :exc:`~easy_pdf.exceptions.PDFRenderingError`, :exc:`~easy_pdf.exceptions.UnsupportedMediaPathException` """ content = loader.render_to_string(template, context, request=request, using=using) return html_to_pdf(content, encoding, **kwargs)
python
def render_to_pdf(template, context, using=None, request=None, encoding="utf-8", **kwargs): """ Create PDF document from Django html template. :param str template: Path to Django template :param dict context: Template context :param using: Optional Django template engine :param request: Django HTTP request :type request: :class:`django.http.HttpRequest` :returns: rendered PDF :rtype: :class:`bytes` :raises: :exc:`~easy_pdf.exceptions.PDFRenderingError`, :exc:`~easy_pdf.exceptions.UnsupportedMediaPathException` """ content = loader.render_to_string(template, context, request=request, using=using) return html_to_pdf(content, encoding, **kwargs)
[ "def", "render_to_pdf", "(", "template", ",", "context", ",", "using", "=", "None", ",", "request", "=", "None", ",", "encoding", "=", "\"utf-8\"", ",", "*", "*", "kwargs", ")", ":", "content", "=", "loader", ".", "render_to_string", "(", "template", ","...
Create PDF document from Django html template. :param str template: Path to Django template :param dict context: Template context :param using: Optional Django template engine :param request: Django HTTP request :type request: :class:`django.http.HttpRequest` :returns: rendered PDF :rtype: :class:`bytes` :raises: :exc:`~easy_pdf.exceptions.PDFRenderingError`, :exc:`~easy_pdf.exceptions.UnsupportedMediaPathException`
[ "Create", "PDF", "document", "from", "Django", "html", "template", "." ]
327605b91a445b453d8969b341ef74b12ab00a83
https://github.com/nigma/django-easy-pdf/blob/327605b91a445b453d8969b341ef74b12ab00a83/easy_pdf/rendering.py#L122-L138
train
49,539
nigma/django-easy-pdf
easy_pdf/rendering.py
render_to_pdf_response
def render_to_pdf_response(request, template, context, using=None, filename=None, encoding="utf-8", **kwargs): """ Renders a PDF response using given ``request``, ``template`` and ``context``. If ``filename`` param is specified then the response ``Content-Disposition`` header will be set to ``attachment`` making the browser display a "Save as.." dialog. :param request: Django HTTP request :type request: :class:`django.http.HttpRequest` :param str template: Path to Django template :param dict context: Template context :param using: Optional Django template engine :rtype: :class:`django.http.HttpResponse` """ try: pdf = render_to_pdf(template, context, using=using, encoding=encoding, **kwargs) return make_response(pdf, filename) except PDFRenderingError as e: logger.exception(e.message) return HttpResponse(e.message)
python
def render_to_pdf_response(request, template, context, using=None, filename=None, encoding="utf-8", **kwargs): """ Renders a PDF response using given ``request``, ``template`` and ``context``. If ``filename`` param is specified then the response ``Content-Disposition`` header will be set to ``attachment`` making the browser display a "Save as.." dialog. :param request: Django HTTP request :type request: :class:`django.http.HttpRequest` :param str template: Path to Django template :param dict context: Template context :param using: Optional Django template engine :rtype: :class:`django.http.HttpResponse` """ try: pdf = render_to_pdf(template, context, using=using, encoding=encoding, **kwargs) return make_response(pdf, filename) except PDFRenderingError as e: logger.exception(e.message) return HttpResponse(e.message)
[ "def", "render_to_pdf_response", "(", "request", ",", "template", ",", "context", ",", "using", "=", "None", ",", "filename", "=", "None", ",", "encoding", "=", "\"utf-8\"", ",", "*", "*", "kwargs", ")", ":", "try", ":", "pdf", "=", "render_to_pdf", "(",...
Renders a PDF response using given ``request``, ``template`` and ``context``. If ``filename`` param is specified then the response ``Content-Disposition`` header will be set to ``attachment`` making the browser display a "Save as.." dialog. :param request: Django HTTP request :type request: :class:`django.http.HttpRequest` :param str template: Path to Django template :param dict context: Template context :param using: Optional Django template engine :rtype: :class:`django.http.HttpResponse`
[ "Renders", "a", "PDF", "response", "using", "given", "request", "template", "and", "context", "." ]
327605b91a445b453d8969b341ef74b12ab00a83
https://github.com/nigma/django-easy-pdf/blob/327605b91a445b453d8969b341ef74b12ab00a83/easy_pdf/rendering.py#L141-L162
train
49,540
nigma/django-easy-pdf
easy_pdf/views.py
PDFTemplateResponseMixin.get_pdf_response
def get_pdf_response(self, context, **response_kwargs): """ Renders PDF document and prepares response. :returns: Django HTTP response :rtype: :class:`django.http.HttpResponse` """ return render_to_pdf_response( request=self.request, template=self.get_template_names(), context=context, using=self.template_engine, filename=self.get_pdf_filename(), **self.get_pdf_kwargs() )
python
def get_pdf_response(self, context, **response_kwargs): """ Renders PDF document and prepares response. :returns: Django HTTP response :rtype: :class:`django.http.HttpResponse` """ return render_to_pdf_response( request=self.request, template=self.get_template_names(), context=context, using=self.template_engine, filename=self.get_pdf_filename(), **self.get_pdf_kwargs() )
[ "def", "get_pdf_response", "(", "self", ",", "context", ",", "*", "*", "response_kwargs", ")", ":", "return", "render_to_pdf_response", "(", "request", "=", "self", ".", "request", ",", "template", "=", "self", ".", "get_template_names", "(", ")", ",", "cont...
Renders PDF document and prepares response. :returns: Django HTTP response :rtype: :class:`django.http.HttpResponse`
[ "Renders", "PDF", "document", "and", "prepares", "response", "." ]
327605b91a445b453d8969b341ef74b12ab00a83
https://github.com/nigma/django-easy-pdf/blob/327605b91a445b453d8969b341ef74b12ab00a83/easy_pdf/views.py#L47-L61
train
49,541
lk-geimfari/expynent
expynent/compiled.py
compile_patterns_in_dictionary
def compile_patterns_in_dictionary(dictionary): """ Replace all strings in dictionary with compiled version of themselves and return dictionary. """ for key, value in dictionary.items(): if isinstance(value, str): dictionary[key] = re.compile(value) elif isinstance(value, dict): compile_patterns_in_dictionary(value) return dictionary
python
def compile_patterns_in_dictionary(dictionary): """ Replace all strings in dictionary with compiled version of themselves and return dictionary. """ for key, value in dictionary.items(): if isinstance(value, str): dictionary[key] = re.compile(value) elif isinstance(value, dict): compile_patterns_in_dictionary(value) return dictionary
[ "def", "compile_patterns_in_dictionary", "(", "dictionary", ")", ":", "for", "key", ",", "value", "in", "dictionary", ".", "items", "(", ")", ":", "if", "isinstance", "(", "value", ",", "str", ")", ":", "dictionary", "[", "key", "]", "=", "re", ".", "c...
Replace all strings in dictionary with compiled version of themselves and return dictionary.
[ "Replace", "all", "strings", "in", "dictionary", "with", "compiled", "version", "of", "themselves", "and", "return", "dictionary", "." ]
85a1f6e681f669238202becb934381dd9a2313f4
https://github.com/lk-geimfari/expynent/blob/85a1f6e681f669238202becb934381dd9a2313f4/expynent/compiled.py#L10-L20
train
49,542
facelessuser/pyspelling
pyspelling/filters/context.py
ContextFilter.filter
def filter(self, source_file, encoding): # noqa A001 """Parse file.""" with codecs.open(source_file, 'r', encoding=encoding) as f: text = f.read() return [filters.SourceText(self._filter(text), source_file, encoding, 'context')]
python
def filter(self, source_file, encoding): # noqa A001 """Parse file.""" with codecs.open(source_file, 'r', encoding=encoding) as f: text = f.read() return [filters.SourceText(self._filter(text), source_file, encoding, 'context')]
[ "def", "filter", "(", "self", ",", "source_file", ",", "encoding", ")", ":", "# noqa A001", "with", "codecs", ".", "open", "(", "source_file", ",", "'r'", ",", "encoding", "=", "encoding", ")", "as", "f", ":", "text", "=", "f", ".", "read", "(", ")",...
Parse file.
[ "Parse", "file", "." ]
c25d5292cc2687ad65891a12ead43f7182ca8bb3
https://github.com/facelessuser/pyspelling/blob/c25d5292cc2687ad65891a12ead43f7182ca8bb3/pyspelling/filters/context.py#L78-L84
train
49,543
facelessuser/pyspelling
pyspelling/filters/context.py
ContextFilter._filter
def _filter(self, text): """Context delimiter filter.""" if self.line_endings: text = self.norm_nl(text) new_text = [] index = 0 last = 0 end = len(text) while index < end: m = self.escapes.match(text, pos=index) if self.escapes else None if m: index = m.end(0) continue handled = False for delimiter in self.delimiters: m = delimiter[0].match(text, pos=index) if m: if self.context_visible_first is True: new_text.append(text[last:m.start(0)]) else: new_text.append(m.group(delimiter[1])) index = m.end(0) last = index handled = True break if handled: continue index += 1 if last < end and self.context_visible_first is True: new_text.append(text[last:end]) return ' '.join(new_text)
python
def _filter(self, text): """Context delimiter filter.""" if self.line_endings: text = self.norm_nl(text) new_text = [] index = 0 last = 0 end = len(text) while index < end: m = self.escapes.match(text, pos=index) if self.escapes else None if m: index = m.end(0) continue handled = False for delimiter in self.delimiters: m = delimiter[0].match(text, pos=index) if m: if self.context_visible_first is True: new_text.append(text[last:m.start(0)]) else: new_text.append(m.group(delimiter[1])) index = m.end(0) last = index handled = True break if handled: continue index += 1 if last < end and self.context_visible_first is True: new_text.append(text[last:end]) return ' '.join(new_text)
[ "def", "_filter", "(", "self", ",", "text", ")", ":", "if", "self", ".", "line_endings", ":", "text", "=", "self", ".", "norm_nl", "(", "text", ")", "new_text", "=", "[", "]", "index", "=", "0", "last", "=", "0", "end", "=", "len", "(", "text", ...
Context delimiter filter.
[ "Context", "delimiter", "filter", "." ]
c25d5292cc2687ad65891a12ead43f7182ca8bb3
https://github.com/facelessuser/pyspelling/blob/c25d5292cc2687ad65891a12ead43f7182ca8bb3/pyspelling/filters/context.py#L91-L124
train
49,544
horazont/aioopenssl
aioopenssl/__init__.py
create_starttls_connection
def create_starttls_connection( loop, protocol_factory, host=None, port=None, *, sock=None, ssl_context_factory=None, use_starttls=False, local_addr=None, **kwargs): """ Create a connection which can later be upgraded to use TLS. .. versionchanged:: 0.4 The `local_addr` argument was added. :param loop: The event loop to use. :type loop: :class:`asyncio.BaseEventLoop` :param protocol_factory: Factory for the protocol for the connection :param host: The host name or address to connect to :type host: :class:`str` or :data:`None` :param port: The port to connect to :type port: :class:`int` or :data:`None` :param sock: A socket to wrap (conflicts with `host` and `port`) :type sock: :class:`socket.socket` :param ssl_context_factory: Function which returns a :class:`OpenSSL.SSL.Context` to use for TLS operations :param use_starttls: Flag to control whether TLS is negotiated right away or deferredly. :type use_starttls: :class:`bool` :param local_addr: Address to bind to This is roughly a copy of the asyncio implementation of :meth:`asyncio.BaseEventLoop.create_connection`. It returns a pair ``(transport, protocol)``, where `transport` is a newly created :class:`STARTTLSTransport` instance. Further keyword arguments are forwarded to the constructor of :class:`STARTTLSTransport`. `loop` must be a :class:`asyncio.BaseEventLoop`, with support for :meth:`asyncio.BaseEventLoop.add_reader` and the corresponding writer and removal functions for sockets. This is typically a selector type event loop. `protocol_factory` must be a callable which (without any arguments) returns a :class:`asyncio.Protocol` which will be connected to the STARTTLS transport. `host` and `port` must be a hostname and a port number, or both :data:`None`. Both must be :data:`None`, if and only if `sock` is not :data:`None`. In that case, `sock` is used instead of a newly created socket. `sock` is put into non-blocking mode and must be a stream socket. If `use_starttls` is :data:`True`, no TLS handshake will be performed initially. Instead, the connection is established without any transport-layer security. It is expected that the :meth:`STARTTLSTransport.starttls` method is used when the application protocol requires TLS. If `use_starttls` is :data:`False`, the TLS handshake is initiated right away. `local_addr` may be an address to bind this side of the socket to. If omitted or :data:`None`, the local address is assigned by the operating system. This coroutine returns when the stream is established. If `use_starttls` is :data:`False`, this means that the full TLS handshake has to be finished for this coroutine to return. Otherwise, no TLS handshake takes place. It must be invoked using the :meth:`STARTTLSTransport.starttls` coroutine. """ if host is not None and port is not None: host_addrs = yield from loop.getaddrinfo( host, port, type=socket.SOCK_STREAM) exceptions = [] for family, type, proto, cname, address in host_addrs: sock = None try: sock = socket.socket(family=family, type=type, proto=proto) sock.setblocking(False) if local_addr is not None: sock.bind(local_addr) yield from loop.sock_connect(sock, address) except OSError as exc: if sock is not None: sock.close() exceptions.append(exc) else: break else: if len(exceptions) == 1: raise exceptions[0] model = str(exceptions[0]) if all(str(exc) == model for exc in exceptions): raise exceptions[0] try: from aioxmpp.errors import MultiOSError except ImportError: MultiOSError = OSError exc = MultiOSError( "could not connect to [{}]:{}".format(host, port), exceptions) raise exc elif sock is None: raise ValueError("sock must not be None if host and/or port are None") else: sock.setblocking(False) protocol = protocol_factory() waiter = asyncio.Future(loop=loop) transport = STARTTLSTransport(loop, sock, protocol, ssl_context_factory=ssl_context_factory, waiter=waiter, use_starttls=use_starttls, **kwargs) yield from waiter return transport, protocol
python
def create_starttls_connection( loop, protocol_factory, host=None, port=None, *, sock=None, ssl_context_factory=None, use_starttls=False, local_addr=None, **kwargs): """ Create a connection which can later be upgraded to use TLS. .. versionchanged:: 0.4 The `local_addr` argument was added. :param loop: The event loop to use. :type loop: :class:`asyncio.BaseEventLoop` :param protocol_factory: Factory for the protocol for the connection :param host: The host name or address to connect to :type host: :class:`str` or :data:`None` :param port: The port to connect to :type port: :class:`int` or :data:`None` :param sock: A socket to wrap (conflicts with `host` and `port`) :type sock: :class:`socket.socket` :param ssl_context_factory: Function which returns a :class:`OpenSSL.SSL.Context` to use for TLS operations :param use_starttls: Flag to control whether TLS is negotiated right away or deferredly. :type use_starttls: :class:`bool` :param local_addr: Address to bind to This is roughly a copy of the asyncio implementation of :meth:`asyncio.BaseEventLoop.create_connection`. It returns a pair ``(transport, protocol)``, where `transport` is a newly created :class:`STARTTLSTransport` instance. Further keyword arguments are forwarded to the constructor of :class:`STARTTLSTransport`. `loop` must be a :class:`asyncio.BaseEventLoop`, with support for :meth:`asyncio.BaseEventLoop.add_reader` and the corresponding writer and removal functions for sockets. This is typically a selector type event loop. `protocol_factory` must be a callable which (without any arguments) returns a :class:`asyncio.Protocol` which will be connected to the STARTTLS transport. `host` and `port` must be a hostname and a port number, or both :data:`None`. Both must be :data:`None`, if and only if `sock` is not :data:`None`. In that case, `sock` is used instead of a newly created socket. `sock` is put into non-blocking mode and must be a stream socket. If `use_starttls` is :data:`True`, no TLS handshake will be performed initially. Instead, the connection is established without any transport-layer security. It is expected that the :meth:`STARTTLSTransport.starttls` method is used when the application protocol requires TLS. If `use_starttls` is :data:`False`, the TLS handshake is initiated right away. `local_addr` may be an address to bind this side of the socket to. If omitted or :data:`None`, the local address is assigned by the operating system. This coroutine returns when the stream is established. If `use_starttls` is :data:`False`, this means that the full TLS handshake has to be finished for this coroutine to return. Otherwise, no TLS handshake takes place. It must be invoked using the :meth:`STARTTLSTransport.starttls` coroutine. """ if host is not None and port is not None: host_addrs = yield from loop.getaddrinfo( host, port, type=socket.SOCK_STREAM) exceptions = [] for family, type, proto, cname, address in host_addrs: sock = None try: sock = socket.socket(family=family, type=type, proto=proto) sock.setblocking(False) if local_addr is not None: sock.bind(local_addr) yield from loop.sock_connect(sock, address) except OSError as exc: if sock is not None: sock.close() exceptions.append(exc) else: break else: if len(exceptions) == 1: raise exceptions[0] model = str(exceptions[0]) if all(str(exc) == model for exc in exceptions): raise exceptions[0] try: from aioxmpp.errors import MultiOSError except ImportError: MultiOSError = OSError exc = MultiOSError( "could not connect to [{}]:{}".format(host, port), exceptions) raise exc elif sock is None: raise ValueError("sock must not be None if host and/or port are None") else: sock.setblocking(False) protocol = protocol_factory() waiter = asyncio.Future(loop=loop) transport = STARTTLSTransport(loop, sock, protocol, ssl_context_factory=ssl_context_factory, waiter=waiter, use_starttls=use_starttls, **kwargs) yield from waiter return transport, protocol
[ "def", "create_starttls_connection", "(", "loop", ",", "protocol_factory", ",", "host", "=", "None", ",", "port", "=", "None", ",", "*", ",", "sock", "=", "None", ",", "ssl_context_factory", "=", "None", ",", "use_starttls", "=", "False", ",", "local_addr", ...
Create a connection which can later be upgraded to use TLS. .. versionchanged:: 0.4 The `local_addr` argument was added. :param loop: The event loop to use. :type loop: :class:`asyncio.BaseEventLoop` :param protocol_factory: Factory for the protocol for the connection :param host: The host name or address to connect to :type host: :class:`str` or :data:`None` :param port: The port to connect to :type port: :class:`int` or :data:`None` :param sock: A socket to wrap (conflicts with `host` and `port`) :type sock: :class:`socket.socket` :param ssl_context_factory: Function which returns a :class:`OpenSSL.SSL.Context` to use for TLS operations :param use_starttls: Flag to control whether TLS is negotiated right away or deferredly. :type use_starttls: :class:`bool` :param local_addr: Address to bind to This is roughly a copy of the asyncio implementation of :meth:`asyncio.BaseEventLoop.create_connection`. It returns a pair ``(transport, protocol)``, where `transport` is a newly created :class:`STARTTLSTransport` instance. Further keyword arguments are forwarded to the constructor of :class:`STARTTLSTransport`. `loop` must be a :class:`asyncio.BaseEventLoop`, with support for :meth:`asyncio.BaseEventLoop.add_reader` and the corresponding writer and removal functions for sockets. This is typically a selector type event loop. `protocol_factory` must be a callable which (without any arguments) returns a :class:`asyncio.Protocol` which will be connected to the STARTTLS transport. `host` and `port` must be a hostname and a port number, or both :data:`None`. Both must be :data:`None`, if and only if `sock` is not :data:`None`. In that case, `sock` is used instead of a newly created socket. `sock` is put into non-blocking mode and must be a stream socket. If `use_starttls` is :data:`True`, no TLS handshake will be performed initially. Instead, the connection is established without any transport-layer security. It is expected that the :meth:`STARTTLSTransport.starttls` method is used when the application protocol requires TLS. If `use_starttls` is :data:`False`, the TLS handshake is initiated right away. `local_addr` may be an address to bind this side of the socket to. If omitted or :data:`None`, the local address is assigned by the operating system. This coroutine returns when the stream is established. If `use_starttls` is :data:`False`, this means that the full TLS handshake has to be finished for this coroutine to return. Otherwise, no TLS handshake takes place. It must be invoked using the :meth:`STARTTLSTransport.starttls` coroutine.
[ "Create", "a", "connection", "which", "can", "later", "be", "upgraded", "to", "use", "TLS", "." ]
95cb39b5904d6a9702afcef6704181c850371081
https://github.com/horazont/aioopenssl/blob/95cb39b5904d6a9702afcef6704181c850371081/aioopenssl/__init__.py#L726-L849
train
49,545
horazont/aioopenssl
aioopenssl/__init__.py
STARTTLSTransport._call_connection_lost_and_clean_up
def _call_connection_lost_and_clean_up(self, exc): """ Clean up all resources and call the protocols connection lost method. """ self._state = _State.CLOSED try: self._protocol.connection_lost(exc) finally: self._rawsock.close() if self._tls_conn is not None: self._tls_conn.set_app_data(None) self._tls_conn = None self._rawsock = None self._protocol = None self._loop = None
python
def _call_connection_lost_and_clean_up(self, exc): """ Clean up all resources and call the protocols connection lost method. """ self._state = _State.CLOSED try: self._protocol.connection_lost(exc) finally: self._rawsock.close() if self._tls_conn is not None: self._tls_conn.set_app_data(None) self._tls_conn = None self._rawsock = None self._protocol = None self._loop = None
[ "def", "_call_connection_lost_and_clean_up", "(", "self", ",", "exc", ")", ":", "self", ".", "_state", "=", "_State", ".", "CLOSED", "try", ":", "self", ".", "_protocol", ".", "connection_lost", "(", "exc", ")", "finally", ":", "self", ".", "_rawsock", "."...
Clean up all resources and call the protocols connection lost method.
[ "Clean", "up", "all", "resources", "and", "call", "the", "protocols", "connection", "lost", "method", "." ]
95cb39b5904d6a9702afcef6704181c850371081
https://github.com/horazont/aioopenssl/blob/95cb39b5904d6a9702afcef6704181c850371081/aioopenssl/__init__.py#L271-L286
train
49,546
horazont/aioopenssl
aioopenssl/__init__.py
STARTTLSTransport.abort
def abort(self): """ Immediately close the stream, without sending remaining buffers or performing a proper shutdown. """ if self._state == _State.CLOSED: self._invalid_state("abort() called") return self._force_close(None)
python
def abort(self): """ Immediately close the stream, without sending remaining buffers or performing a proper shutdown. """ if self._state == _State.CLOSED: self._invalid_state("abort() called") return self._force_close(None)
[ "def", "abort", "(", "self", ")", ":", "if", "self", ".", "_state", "==", "_State", ".", "CLOSED", ":", "self", ".", "_invalid_state", "(", "\"abort() called\"", ")", "return", "self", ".", "_force_close", "(", "None", ")" ]
Immediately close the stream, without sending remaining buffers or performing a proper shutdown.
[ "Immediately", "close", "the", "stream", "without", "sending", "remaining", "buffers", "or", "performing", "a", "proper", "shutdown", "." ]
95cb39b5904d6a9702afcef6704181c850371081
https://github.com/horazont/aioopenssl/blob/95cb39b5904d6a9702afcef6704181c850371081/aioopenssl/__init__.py#L576-L585
train
49,547
horazont/aioopenssl
aioopenssl/__init__.py
STARTTLSTransport.starttls
def starttls(self, ssl_context=None, post_handshake_callback=None): """ Start a TLS stream on top of the socket. This is an invalid operation if the stream is not in RAW_OPEN state. If `ssl_context` is set, it overrides the `ssl_context` passed to the constructor. If `post_handshake_callback` is set, it overrides the `post_handshake_callback` passed to the constructor. .. versionchanged:: 0.4 This method is now a barrier with respect to reads and writes: before the handshake is completed (including the post handshake callback, if any), no data is received or sent. """ if self._state != _State.RAW_OPEN or self._closing: raise self._invalid_state("starttls() called") if ssl_context is not None: self._ssl_context = ssl_context self._extra.update( sslcontext=ssl_context ) else: self._ssl_context = self._ssl_context_factory(self) if post_handshake_callback is not None: self._tls_post_handshake_callback = post_handshake_callback self._waiter = asyncio.Future() self._waiter.add_done_callback(self._waiter_done) self._initiate_tls() try: yield from self._waiter finally: self._waiter = None
python
def starttls(self, ssl_context=None, post_handshake_callback=None): """ Start a TLS stream on top of the socket. This is an invalid operation if the stream is not in RAW_OPEN state. If `ssl_context` is set, it overrides the `ssl_context` passed to the constructor. If `post_handshake_callback` is set, it overrides the `post_handshake_callback` passed to the constructor. .. versionchanged:: 0.4 This method is now a barrier with respect to reads and writes: before the handshake is completed (including the post handshake callback, if any), no data is received or sent. """ if self._state != _State.RAW_OPEN or self._closing: raise self._invalid_state("starttls() called") if ssl_context is not None: self._ssl_context = ssl_context self._extra.update( sslcontext=ssl_context ) else: self._ssl_context = self._ssl_context_factory(self) if post_handshake_callback is not None: self._tls_post_handshake_callback = post_handshake_callback self._waiter = asyncio.Future() self._waiter.add_done_callback(self._waiter_done) self._initiate_tls() try: yield from self._waiter finally: self._waiter = None
[ "def", "starttls", "(", "self", ",", "ssl_context", "=", "None", ",", "post_handshake_callback", "=", "None", ")", ":", "if", "self", ".", "_state", "!=", "_State", ".", "RAW_OPEN", "or", "self", ".", "_closing", ":", "raise", "self", ".", "_invalid_state"...
Start a TLS stream on top of the socket. This is an invalid operation if the stream is not in RAW_OPEN state. If `ssl_context` is set, it overrides the `ssl_context` passed to the constructor. If `post_handshake_callback` is set, it overrides the `post_handshake_callback` passed to the constructor. .. versionchanged:: 0.4 This method is now a barrier with respect to reads and writes: before the handshake is completed (including the post handshake callback, if any), no data is received or sent.
[ "Start", "a", "TLS", "stream", "on", "top", "of", "the", "socket", ".", "This", "is", "an", "invalid", "operation", "if", "the", "stream", "is", "not", "in", "RAW_OPEN", "state", "." ]
95cb39b5904d6a9702afcef6704181c850371081
https://github.com/horazont/aioopenssl/blob/95cb39b5904d6a9702afcef6704181c850371081/aioopenssl/__init__.py#L649-L685
train
49,548
horazont/aioopenssl
aioopenssl/__init__.py
STARTTLSTransport.write
def write(self, data): """ Write data to the transport. This is an invalid operation if the stream is not writable, that is, if it is closed. During TLS negotiation, the data is buffered. """ if not isinstance(data, (bytes, bytearray, memoryview)): raise TypeError('data argument must be byte-ish (%r)', type(data)) if not self._state.is_writable or self._closing: raise self._invalid_state("write() called") if not data: return if not self._buffer: self._loop.add_writer(self._raw_fd, self._write_ready) self._buffer.extend(data)
python
def write(self, data): """ Write data to the transport. This is an invalid operation if the stream is not writable, that is, if it is closed. During TLS negotiation, the data is buffered. """ if not isinstance(data, (bytes, bytearray, memoryview)): raise TypeError('data argument must be byte-ish (%r)', type(data)) if not self._state.is_writable or self._closing: raise self._invalid_state("write() called") if not data: return if not self._buffer: self._loop.add_writer(self._raw_fd, self._write_ready) self._buffer.extend(data)
[ "def", "write", "(", "self", ",", "data", ")", ":", "if", "not", "isinstance", "(", "data", ",", "(", "bytes", ",", "bytearray", ",", "memoryview", ")", ")", ":", "raise", "TypeError", "(", "'data argument must be byte-ish (%r)'", ",", "type", "(", "data",...
Write data to the transport. This is an invalid operation if the stream is not writable, that is, if it is closed. During TLS negotiation, the data is buffered.
[ "Write", "data", "to", "the", "transport", ".", "This", "is", "an", "invalid", "operation", "if", "the", "stream", "is", "not", "writable", "that", "is", "if", "it", "is", "closed", ".", "During", "TLS", "negotiation", "the", "data", "is", "buffered", "....
95cb39b5904d6a9702afcef6704181c850371081
https://github.com/horazont/aioopenssl/blob/95cb39b5904d6a9702afcef6704181c850371081/aioopenssl/__init__.py#L687-L706
train
49,549
BBVA/data-refinery
datarefinery/FieldOperations.py
explode
def explode(prefix: str): """ given an array of objects de-normalized into fields """ def _app(i, e=None): if i is not None: return {k: v for (k, v) in iter_fields(i)}, None return i, e def iter_fields(event_field: Union[dict, list]): if type(event_field) is dict: for key, val in event_field.items(): yield (key, val) elif type(event_field) is list: for i, value in enumerate(event_field): for key, val in value.items(): if not i == 0: yield ("{}_{}".format(key, i), val) else: yield (key, val) return compose(_app, add_column_prefix(prefix))
python
def explode(prefix: str): """ given an array of objects de-normalized into fields """ def _app(i, e=None): if i is not None: return {k: v for (k, v) in iter_fields(i)}, None return i, e def iter_fields(event_field: Union[dict, list]): if type(event_field) is dict: for key, val in event_field.items(): yield (key, val) elif type(event_field) is list: for i, value in enumerate(event_field): for key, val in value.items(): if not i == 0: yield ("{}_{}".format(key, i), val) else: yield (key, val) return compose(_app, add_column_prefix(prefix))
[ "def", "explode", "(", "prefix", ":", "str", ")", ":", "def", "_app", "(", "i", ",", "e", "=", "None", ")", ":", "if", "i", "is", "not", "None", ":", "return", "{", "k", ":", "v", "for", "(", "k", ",", "v", ")", "in", "iter_fields", "(", "i...
given an array of objects de-normalized into fields
[ "given", "an", "array", "of", "objects", "de", "-", "normalized", "into", "fields" ]
4ff19186ac570269f64a245ad6297cf882c70aa4
https://github.com/BBVA/data-refinery/blob/4ff19186ac570269f64a245ad6297cf882c70aa4/datarefinery/FieldOperations.py#L186-L207
train
49,550
facelessuser/pyspelling
pyspelling/filters/xml.py
XmlFilter._has_xml_encode
def _has_xml_encode(self, content): """Check XML encoding.""" encode = None m = RE_XML_START.match(content) if m: if m.group(1): m2 = RE_XML_ENCODE.match(m.group(1)) if m2: enc = m2.group(2).decode('ascii') try: codecs.getencoder(enc) encode = enc except LookupError: pass else: if m.group(2): enc = 'utf-32-be' text = m.group(2) elif m.group(3): enc = 'utf-32-le' text = m.group(3) elif m.group(4): enc = 'utf-16-be' text = m.group(4) elif m.group(5): enc = 'utf-16-le' text = m.group(5) try: m2 = RE_XML_ENCODE_U.match(text.decode(enc)) except Exception: # pragma: no cover m2 = None if m2: enc = m2.group(2) try: codecs.getencoder(enc) encode = enc except Exception: pass return encode
python
def _has_xml_encode(self, content): """Check XML encoding.""" encode = None m = RE_XML_START.match(content) if m: if m.group(1): m2 = RE_XML_ENCODE.match(m.group(1)) if m2: enc = m2.group(2).decode('ascii') try: codecs.getencoder(enc) encode = enc except LookupError: pass else: if m.group(2): enc = 'utf-32-be' text = m.group(2) elif m.group(3): enc = 'utf-32-le' text = m.group(3) elif m.group(4): enc = 'utf-16-be' text = m.group(4) elif m.group(5): enc = 'utf-16-le' text = m.group(5) try: m2 = RE_XML_ENCODE_U.match(text.decode(enc)) except Exception: # pragma: no cover m2 = None if m2: enc = m2.group(2) try: codecs.getencoder(enc) encode = enc except Exception: pass return encode
[ "def", "_has_xml_encode", "(", "self", ",", "content", ")", ":", "encode", "=", "None", "m", "=", "RE_XML_START", ".", "match", "(", "content", ")", "if", "m", ":", "if", "m", ".", "group", "(", "1", ")", ":", "m2", "=", "RE_XML_ENCODE", ".", "matc...
Check XML encoding.
[ "Check", "XML", "encoding", "." ]
c25d5292cc2687ad65891a12ead43f7182ca8bb3
https://github.com/facelessuser/pyspelling/blob/c25d5292cc2687ad65891a12ead43f7182ca8bb3/pyspelling/filters/xml.py#L71-L116
train
49,551
facelessuser/pyspelling
pyspelling/filters/xml.py
XmlFilter.to_text
def to_text(self, tree, force_root=False): """ Extract text from tags. Skip any selectors specified and include attributes if specified. Ignored tags will not have their attributes scanned either. """ self.extract_tag_metadata(tree) text = [] attributes = [] comments = [] blocks = [] if not (self.ignores.match(tree) if self.ignores else None): # The root of the document is the BeautifulSoup object capture = self.captures.match(tree) if self.captures is not None else None # Check attributes for normal tags if capture: for attr in self.attributes: value = tree.attrs.get(attr, '').strip() if value: sel = self.construct_selector(tree, attr=attr) attributes.append((value, sel)) # Walk children for child in tree.children: string = str(child).strip() is_comment = isinstance(child, bs4.Comment) if isinstance(child, bs4.element.Tag): t, b, a, c = self.to_text(child) text.extend(t) attributes.extend(a) comments.extend(c) blocks.extend(b) # Get content if not the root and not a comment (unless we want comments). elif not isinstance(child, NON_CONTENT) and (not is_comment or self.comments): string = str(child).strip() if string: if is_comment: sel = self.construct_selector(tree) + '<!--comment-->' comments.append((string, sel)) elif capture: text.append(string) text.append(' ') elif self.comments: for child in tree.descendants: if isinstance(child, bs4.Comment): string = str(child).strip() if string: sel = self.construct_selector(tree) + '<!--comment-->' comments.append((string, sel)) text = self.store_blocks(tree, blocks, text, force_root) if tree.parent is None or force_root: return blocks, attributes, comments else: return text, blocks, attributes, comments
python
def to_text(self, tree, force_root=False): """ Extract text from tags. Skip any selectors specified and include attributes if specified. Ignored tags will not have their attributes scanned either. """ self.extract_tag_metadata(tree) text = [] attributes = [] comments = [] blocks = [] if not (self.ignores.match(tree) if self.ignores else None): # The root of the document is the BeautifulSoup object capture = self.captures.match(tree) if self.captures is not None else None # Check attributes for normal tags if capture: for attr in self.attributes: value = tree.attrs.get(attr, '').strip() if value: sel = self.construct_selector(tree, attr=attr) attributes.append((value, sel)) # Walk children for child in tree.children: string = str(child).strip() is_comment = isinstance(child, bs4.Comment) if isinstance(child, bs4.element.Tag): t, b, a, c = self.to_text(child) text.extend(t) attributes.extend(a) comments.extend(c) blocks.extend(b) # Get content if not the root and not a comment (unless we want comments). elif not isinstance(child, NON_CONTENT) and (not is_comment or self.comments): string = str(child).strip() if string: if is_comment: sel = self.construct_selector(tree) + '<!--comment-->' comments.append((string, sel)) elif capture: text.append(string) text.append(' ') elif self.comments: for child in tree.descendants: if isinstance(child, bs4.Comment): string = str(child).strip() if string: sel = self.construct_selector(tree) + '<!--comment-->' comments.append((string, sel)) text = self.store_blocks(tree, blocks, text, force_root) if tree.parent is None or force_root: return blocks, attributes, comments else: return text, blocks, attributes, comments
[ "def", "to_text", "(", "self", ",", "tree", ",", "force_root", "=", "False", ")", ":", "self", ".", "extract_tag_metadata", "(", "tree", ")", "text", "=", "[", "]", "attributes", "=", "[", "]", "comments", "=", "[", "]", "blocks", "=", "[", "]", "i...
Extract text from tags. Skip any selectors specified and include attributes if specified. Ignored tags will not have their attributes scanned either.
[ "Extract", "text", "from", "tags", "." ]
c25d5292cc2687ad65891a12ead43f7182ca8bb3
https://github.com/facelessuser/pyspelling/blob/c25d5292cc2687ad65891a12ead43f7182ca8bb3/pyspelling/filters/xml.py#L167-L226
train
49,552
facelessuser/pyspelling
pyspelling/filters/xml.py
XmlFilter._filter
def _filter(self, text, context, encoding): """Filter the source text.""" content = [] blocks, attributes, comments = self.to_text(bs4.BeautifulSoup(text, self.parser)) if self.comments: for c, desc in comments: content.append(filters.SourceText(c, context + ': ' + desc, encoding, self.type + 'comment')) if self.attributes: for a, desc in attributes: content.append(filters.SourceText(a, context + ': ' + desc, encoding, self.type + 'attribute')) for b, desc in blocks: content.append(filters.SourceText(b, context + ': ' + desc, encoding, self.type + 'content')) return content
python
def _filter(self, text, context, encoding): """Filter the source text.""" content = [] blocks, attributes, comments = self.to_text(bs4.BeautifulSoup(text, self.parser)) if self.comments: for c, desc in comments: content.append(filters.SourceText(c, context + ': ' + desc, encoding, self.type + 'comment')) if self.attributes: for a, desc in attributes: content.append(filters.SourceText(a, context + ': ' + desc, encoding, self.type + 'attribute')) for b, desc in blocks: content.append(filters.SourceText(b, context + ': ' + desc, encoding, self.type + 'content')) return content
[ "def", "_filter", "(", "self", ",", "text", ",", "context", ",", "encoding", ")", ":", "content", "=", "[", "]", "blocks", ",", "attributes", ",", "comments", "=", "self", ".", "to_text", "(", "bs4", ".", "BeautifulSoup", "(", "text", ",", "self", "....
Filter the source text.
[ "Filter", "the", "source", "text", "." ]
c25d5292cc2687ad65891a12ead43f7182ca8bb3
https://github.com/facelessuser/pyspelling/blob/c25d5292cc2687ad65891a12ead43f7182ca8bb3/pyspelling/filters/xml.py#L228-L241
train
49,553
JMSwag/dsdev-utils
dsdev_utils/helpers.py
lazy_import
def lazy_import(func): """Decorator for declaring a lazy import. This decorator turns a function into an object that will act as a lazy importer. Whenever the object's attributes are accessed, the function is called and its return value used in place of the object. So you can declare lazy imports like this: @lazy_import def socket(): import socket return socket The name "socket" will then be bound to a transparent object proxy which will import the socket module upon first use. The syntax here is slightly more verbose than other lazy import recipes, but it's designed not to hide the actual "import" statements from tools like pyinstaller or grep. """ try: f = sys._getframe(1) except Exception: # pragma: no cover namespace = None else: namespace = f.f_locals return _LazyImport(func.__name__, func, namespace)
python
def lazy_import(func): """Decorator for declaring a lazy import. This decorator turns a function into an object that will act as a lazy importer. Whenever the object's attributes are accessed, the function is called and its return value used in place of the object. So you can declare lazy imports like this: @lazy_import def socket(): import socket return socket The name "socket" will then be bound to a transparent object proxy which will import the socket module upon first use. The syntax here is slightly more verbose than other lazy import recipes, but it's designed not to hide the actual "import" statements from tools like pyinstaller or grep. """ try: f = sys._getframe(1) except Exception: # pragma: no cover namespace = None else: namespace = f.f_locals return _LazyImport(func.__name__, func, namespace)
[ "def", "lazy_import", "(", "func", ")", ":", "try", ":", "f", "=", "sys", ".", "_getframe", "(", "1", ")", "except", "Exception", ":", "# pragma: no cover", "namespace", "=", "None", "else", ":", "namespace", "=", "f", ".", "f_locals", "return", "_LazyIm...
Decorator for declaring a lazy import. This decorator turns a function into an object that will act as a lazy importer. Whenever the object's attributes are accessed, the function is called and its return value used in place of the object. So you can declare lazy imports like this: @lazy_import def socket(): import socket return socket The name "socket" will then be bound to a transparent object proxy which will import the socket module upon first use. The syntax here is slightly more verbose than other lazy import recipes, but it's designed not to hide the actual "import" statements from tools like pyinstaller or grep.
[ "Decorator", "for", "declaring", "a", "lazy", "import", "." ]
5adbf9b3fd9fff92d1dd714423b08e26a5038e14
https://github.com/JMSwag/dsdev-utils/blob/5adbf9b3fd9fff92d1dd714423b08e26a5038e14/dsdev_utils/helpers.py#L64-L90
train
49,554
BBVA/data-refinery
datarefinery/tuple/Formats.py
csv_to_map
def csv_to_map(fields, delimiter=','): """ Convert csv to dict :param delimiter: :param fields: :return: """ def _csv_to_list(csv_input): """ Util function to overcome the use of files by in-memory io buffer :param csv_input: :return: """ io_file = io.StringIO(csv_input) return next(csv.reader(io_file, delimiter=delimiter)) def _app(current_tuple, e=None): if current_tuple is None or len(current_tuple) == 0: return None, "no input" csv_list = _csv_to_list(current_tuple) if len(csv_list) != len(fields): e = {"input": "unexpected number of fields {} obtained {} expected".format(len(csv_list), len(fields))} return None, e return {k: v for (k, v) in zip(fields, csv_list)}, e if fields is None or len(fields) == 0: return fixed_input(None, "no fields provided, cannot proceed without order") return _app
python
def csv_to_map(fields, delimiter=','): """ Convert csv to dict :param delimiter: :param fields: :return: """ def _csv_to_list(csv_input): """ Util function to overcome the use of files by in-memory io buffer :param csv_input: :return: """ io_file = io.StringIO(csv_input) return next(csv.reader(io_file, delimiter=delimiter)) def _app(current_tuple, e=None): if current_tuple is None or len(current_tuple) == 0: return None, "no input" csv_list = _csv_to_list(current_tuple) if len(csv_list) != len(fields): e = {"input": "unexpected number of fields {} obtained {} expected".format(len(csv_list), len(fields))} return None, e return {k: v for (k, v) in zip(fields, csv_list)}, e if fields is None or len(fields) == 0: return fixed_input(None, "no fields provided, cannot proceed without order") return _app
[ "def", "csv_to_map", "(", "fields", ",", "delimiter", "=", "','", ")", ":", "def", "_csv_to_list", "(", "csv_input", ")", ":", "\"\"\"\n Util function to overcome the use of files by in-memory io buffer\n\n :param csv_input:\n :return:\n \"\"\"", "io_fil...
Convert csv to dict :param delimiter: :param fields: :return:
[ "Convert", "csv", "to", "dict" ]
4ff19186ac570269f64a245ad6297cf882c70aa4
https://github.com/BBVA/data-refinery/blob/4ff19186ac570269f64a245ad6297cf882c70aa4/datarefinery/tuple/Formats.py#L34-L63
train
49,555
BBVA/data-refinery
datarefinery/tuple/Formats.py
map_to_csv
def map_to_csv(fields, delimiter=","): """ Convert dict to csv :param fields: :return: """ def _list_to_csv(l): """ Util function to overcome the use of files by in-memory io buffer :param l: :return: """ io_file = io.StringIO() writer = csv.writer(io_file, quoting=csv.QUOTE_NONNUMERIC, lineterminator='', delimiter=delimiter) writer.writerow(l) return io_file.getvalue() def _app(current_tuple, e=None): if e is not None: return None, e csv_list = [] for f in fields: if f in current_tuple: csv_list.append(current_tuple[f]) else: e.update({"output": "expected field {} not found".format(f)}) return None, e return _list_to_csv(csv_list), e if fields is None or len(fields) == 0: return fixed_input(None, "no fields provided, cannot proceed without order") return _app
python
def map_to_csv(fields, delimiter=","): """ Convert dict to csv :param fields: :return: """ def _list_to_csv(l): """ Util function to overcome the use of files by in-memory io buffer :param l: :return: """ io_file = io.StringIO() writer = csv.writer(io_file, quoting=csv.QUOTE_NONNUMERIC, lineterminator='', delimiter=delimiter) writer.writerow(l) return io_file.getvalue() def _app(current_tuple, e=None): if e is not None: return None, e csv_list = [] for f in fields: if f in current_tuple: csv_list.append(current_tuple[f]) else: e.update({"output": "expected field {} not found".format(f)}) return None, e return _list_to_csv(csv_list), e if fields is None or len(fields) == 0: return fixed_input(None, "no fields provided, cannot proceed without order") return _app
[ "def", "map_to_csv", "(", "fields", ",", "delimiter", "=", "\",\"", ")", ":", "def", "_list_to_csv", "(", "l", ")", ":", "\"\"\"\n Util function to overcome the use of files by in-memory io buffer\n\n :param l:\n :return:\n \"\"\"", "io_file", "=", "...
Convert dict to csv :param fields: :return:
[ "Convert", "dict", "to", "csv" ]
4ff19186ac570269f64a245ad6297cf882c70aa4
https://github.com/BBVA/data-refinery/blob/4ff19186ac570269f64a245ad6297cf882c70aa4/datarefinery/tuple/Formats.py#L66-L99
train
49,556
deshima-dev/decode
decode/core/array/decorators.py
xarrayfunc
def xarrayfunc(func): """Make a function compatible with xarray.DataArray. This function is intended to be used as a decorator like:: >>> @dc.xarrayfunc >>> def func(array): ... # do something ... return newarray >>> >>> result = func(array) Args: func (function): Function to be wrapped. The first argument of the function must be an array to be processed. Returns: wrapper (function): Wrapped function. """ @wraps(func) def wrapper(*args, **kwargs): if any(isinstance(arg, xr.DataArray) for arg in args): newargs = [] for arg in args: if isinstance(arg, xr.DataArray): newargs.append(arg.values) else: newargs.append(arg) return dc.full_like(args[0], func(*newargs, **kwargs)) else: return func(*args, **kwargs) return wrapper
python
def xarrayfunc(func): """Make a function compatible with xarray.DataArray. This function is intended to be used as a decorator like:: >>> @dc.xarrayfunc >>> def func(array): ... # do something ... return newarray >>> >>> result = func(array) Args: func (function): Function to be wrapped. The first argument of the function must be an array to be processed. Returns: wrapper (function): Wrapped function. """ @wraps(func) def wrapper(*args, **kwargs): if any(isinstance(arg, xr.DataArray) for arg in args): newargs = [] for arg in args: if isinstance(arg, xr.DataArray): newargs.append(arg.values) else: newargs.append(arg) return dc.full_like(args[0], func(*newargs, **kwargs)) else: return func(*args, **kwargs) return wrapper
[ "def", "xarrayfunc", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "any", "(", "isinstance", "(", "arg", ",", "xr", ".", "DataArray", ")", "for", "arg", "in", ...
Make a function compatible with xarray.DataArray. This function is intended to be used as a decorator like:: >>> @dc.xarrayfunc >>> def func(array): ... # do something ... return newarray >>> >>> result = func(array) Args: func (function): Function to be wrapped. The first argument of the function must be an array to be processed. Returns: wrapper (function): Wrapped function.
[ "Make", "a", "function", "compatible", "with", "xarray", ".", "DataArray", "." ]
e789e174cd316e7ec8bc55be7009ad35baced3c0
https://github.com/deshima-dev/decode/blob/e789e174cd316e7ec8bc55be7009ad35baced3c0/decode/core/array/decorators.py#L30-L63
train
49,557
deshima-dev/decode
decode/core/array/decorators.py
chunk
def chunk(*argnames, concatfunc=None): """Make a function compatible with multicore chunk processing. This function is intended to be used as a decorator like:: >>> @dc.chunk('array') >>> def func(array): ... # do something ... return newarray >>> >>> result = func(array, timechunk=10) or you can set a global chunk parameter outside the function:: >>> timechunk = 10 >>> result = func(array) """ def _chunk(func): depth = [s.function for s in stack()].index('<module>') f_globals = getframe(depth).f_globals # original (unwrapped) function orgname = '_original_' + func.__name__ orgfunc = dc.utils.copy_function(func, orgname) f_globals[orgname] = orgfunc @wraps(func) def wrapper(*args, **kwargs): depth = [s.function for s in stack()].index('<module>') f_globals = getframe(depth).f_globals # parse args and kwargs params = signature(func).parameters for i, (key, val) in enumerate(params.items()): if not val.kind == Parameter.POSITIONAL_OR_KEYWORD: break try: kwargs.update({key: args[i]}) except IndexError: kwargs.setdefault(key, val.default) # n_chunks and n_processes n_chunks = DEFAULT_N_CHUNKS n_processes = MAX_WORKERS if argnames: length = len(kwargs[argnames[0]]) if 'numchunk' in kwargs: n_chunks = kwargs.pop('numchunk') elif 'timechunk' in kwargs: n_chunks = round(length / kwargs.pop('timechunk')) elif 'numchunk' in f_globals: n_chunks = f_globals['numchunk'] elif 'timechunk' in f_globals: n_chunks = round(length / f_globals['timechunk']) if 'n_processes' in kwargs: n_processes = kwargs.pop('n_processes') elif 'n_processes' in f_globals: n_processes = f_globals['n_processes'] # make chunked args chunks = {} for name in argnames: arg = kwargs.pop(name) try: chunks.update({name: np.array_split(arg, n_chunks)}) except TypeError: chunks.update({name: np.tile(arg, n_chunks)}) # run the function futures = [] results = [] with dc.utils.one_thread_per_process(), Pool(n_processes) as p: for i in range(n_chunks): chunk = {key: val[i] for key, val in chunks.items()} futures.append(p.submit(orgfunc, **{**chunk, **kwargs})) for future in futures: results.append(future.result()) # make an output if concatfunc is not None: return concatfunc(results) try: return xr.concat(results, 't') except TypeError: return np.concatenate(results, 0) return wrapper return _chunk
python
def chunk(*argnames, concatfunc=None): """Make a function compatible with multicore chunk processing. This function is intended to be used as a decorator like:: >>> @dc.chunk('array') >>> def func(array): ... # do something ... return newarray >>> >>> result = func(array, timechunk=10) or you can set a global chunk parameter outside the function:: >>> timechunk = 10 >>> result = func(array) """ def _chunk(func): depth = [s.function for s in stack()].index('<module>') f_globals = getframe(depth).f_globals # original (unwrapped) function orgname = '_original_' + func.__name__ orgfunc = dc.utils.copy_function(func, orgname) f_globals[orgname] = orgfunc @wraps(func) def wrapper(*args, **kwargs): depth = [s.function for s in stack()].index('<module>') f_globals = getframe(depth).f_globals # parse args and kwargs params = signature(func).parameters for i, (key, val) in enumerate(params.items()): if not val.kind == Parameter.POSITIONAL_OR_KEYWORD: break try: kwargs.update({key: args[i]}) except IndexError: kwargs.setdefault(key, val.default) # n_chunks and n_processes n_chunks = DEFAULT_N_CHUNKS n_processes = MAX_WORKERS if argnames: length = len(kwargs[argnames[0]]) if 'numchunk' in kwargs: n_chunks = kwargs.pop('numchunk') elif 'timechunk' in kwargs: n_chunks = round(length / kwargs.pop('timechunk')) elif 'numchunk' in f_globals: n_chunks = f_globals['numchunk'] elif 'timechunk' in f_globals: n_chunks = round(length / f_globals['timechunk']) if 'n_processes' in kwargs: n_processes = kwargs.pop('n_processes') elif 'n_processes' in f_globals: n_processes = f_globals['n_processes'] # make chunked args chunks = {} for name in argnames: arg = kwargs.pop(name) try: chunks.update({name: np.array_split(arg, n_chunks)}) except TypeError: chunks.update({name: np.tile(arg, n_chunks)}) # run the function futures = [] results = [] with dc.utils.one_thread_per_process(), Pool(n_processes) as p: for i in range(n_chunks): chunk = {key: val[i] for key, val in chunks.items()} futures.append(p.submit(orgfunc, **{**chunk, **kwargs})) for future in futures: results.append(future.result()) # make an output if concatfunc is not None: return concatfunc(results) try: return xr.concat(results, 't') except TypeError: return np.concatenate(results, 0) return wrapper return _chunk
[ "def", "chunk", "(", "*", "argnames", ",", "concatfunc", "=", "None", ")", ":", "def", "_chunk", "(", "func", ")", ":", "depth", "=", "[", "s", ".", "function", "for", "s", "in", "stack", "(", ")", "]", ".", "index", "(", "'<module>'", ")", "f_gl...
Make a function compatible with multicore chunk processing. This function is intended to be used as a decorator like:: >>> @dc.chunk('array') >>> def func(array): ... # do something ... return newarray >>> >>> result = func(array, timechunk=10) or you can set a global chunk parameter outside the function:: >>> timechunk = 10 >>> result = func(array)
[ "Make", "a", "function", "compatible", "with", "multicore", "chunk", "processing", "." ]
e789e174cd316e7ec8bc55be7009ad35baced3c0
https://github.com/deshima-dev/decode/blob/e789e174cd316e7ec8bc55be7009ad35baced3c0/decode/core/array/decorators.py#L66-L160
train
49,558
facelessuser/pyspelling
pyspelling/filters/markdown.py
MarkdownFilter._filter
def _filter(self, text): """Filter markdown.""" self.markdown.reset() return self.markdown.convert(text)
python
def _filter(self, text): """Filter markdown.""" self.markdown.reset() return self.markdown.convert(text)
[ "def", "_filter", "(", "self", ",", "text", ")", ":", "self", ".", "markdown", ".", "reset", "(", ")", "return", "self", ".", "markdown", ".", "convert", "(", "text", ")" ]
Filter markdown.
[ "Filter", "markdown", "." ]
c25d5292cc2687ad65891a12ead43f7182ca8bb3
https://github.com/facelessuser/pyspelling/blob/c25d5292cc2687ad65891a12ead43f7182ca8bb3/pyspelling/filters/markdown.py#L45-L49
train
49,559
runfalk/spans
benchmark.py
format_sec
def format_sec(s): """ Format seconds in a more human readable way. It supports units down to nanoseconds. :param s: Float of seconds to format :return: String second representation, like 12.4 us """ prefixes = ["", "m", "u", "n"] unit = 0 while s < 1 and unit + 1 < len(prefixes): s *= 1000 unit += 1 return "{:.1f} {}s".format(s, prefixes[unit])
python
def format_sec(s): """ Format seconds in a more human readable way. It supports units down to nanoseconds. :param s: Float of seconds to format :return: String second representation, like 12.4 us """ prefixes = ["", "m", "u", "n"] unit = 0 while s < 1 and unit + 1 < len(prefixes): s *= 1000 unit += 1 return "{:.1f} {}s".format(s, prefixes[unit])
[ "def", "format_sec", "(", "s", ")", ":", "prefixes", "=", "[", "\"\"", ",", "\"m\"", ",", "\"u\"", ",", "\"n\"", "]", "unit", "=", "0", "while", "s", "<", "1", "and", "unit", "+", "1", "<", "len", "(", "prefixes", ")", ":", "s", "*=", "1000", ...
Format seconds in a more human readable way. It supports units down to nanoseconds. :param s: Float of seconds to format :return: String second representation, like 12.4 us
[ "Format", "seconds", "in", "a", "more", "human", "readable", "way", ".", "It", "supports", "units", "down", "to", "nanoseconds", "." ]
59ed73407a569c3be86cfdb4b8f438cb8c794540
https://github.com/runfalk/spans/blob/59ed73407a569c3be86cfdb4b8f438cb8c794540/benchmark.py#L6-L22
train
49,560
mrcagney/make_gtfs
make_gtfs/cli.py
make_gtfs
def make_gtfs(source_path, target_path, buffer, ndigits): """ Create a GTFS feed from the files in the directory SOURCE_PATH. See the project README for a description of the required source files. Save the feed to the file or directory TARGET_PATH. If the target path ends in '.zip', then write the feed as a zip archive. Otherwise assume the path is a directory, and write the feed as a collection of CSV files to that directory, creating the directory if it does not exist. If a stops file is present, then search within ``buffer`` meters on the traffic side of trip paths for stops. Round all decimals to ndigits decimal places. All distances in the resulting GTFS feed will be in kilometers. """ pfeed = pf.read_protofeed(source_path) feed = m.build_feed(pfeed, buffer=buffer) gt.write_gtfs(feed, target_path, ndigits=ndigits)
python
def make_gtfs(source_path, target_path, buffer, ndigits): """ Create a GTFS feed from the files in the directory SOURCE_PATH. See the project README for a description of the required source files. Save the feed to the file or directory TARGET_PATH. If the target path ends in '.zip', then write the feed as a zip archive. Otherwise assume the path is a directory, and write the feed as a collection of CSV files to that directory, creating the directory if it does not exist. If a stops file is present, then search within ``buffer`` meters on the traffic side of trip paths for stops. Round all decimals to ndigits decimal places. All distances in the resulting GTFS feed will be in kilometers. """ pfeed = pf.read_protofeed(source_path) feed = m.build_feed(pfeed, buffer=buffer) gt.write_gtfs(feed, target_path, ndigits=ndigits)
[ "def", "make_gtfs", "(", "source_path", ",", "target_path", ",", "buffer", ",", "ndigits", ")", ":", "pfeed", "=", "pf", ".", "read_protofeed", "(", "source_path", ")", "feed", "=", "m", ".", "build_feed", "(", "pfeed", ",", "buffer", "=", "buffer", ")",...
Create a GTFS feed from the files in the directory SOURCE_PATH. See the project README for a description of the required source files. Save the feed to the file or directory TARGET_PATH. If the target path ends in '.zip', then write the feed as a zip archive. Otherwise assume the path is a directory, and write the feed as a collection of CSV files to that directory, creating the directory if it does not exist. If a stops file is present, then search within ``buffer`` meters on the traffic side of trip paths for stops. Round all decimals to ndigits decimal places. All distances in the resulting GTFS feed will be in kilometers.
[ "Create", "a", "GTFS", "feed", "from", "the", "files", "in", "the", "directory", "SOURCE_PATH", ".", "See", "the", "project", "README", "for", "a", "description", "of", "the", "required", "source", "files", ".", "Save", "the", "feed", "to", "the", "file", ...
37b6f88e03bac708c2e85d6f4b6d48a0c92e4a59
https://github.com/mrcagney/make_gtfs/blob/37b6f88e03bac708c2e85d6f4b6d48a0c92e4a59/make_gtfs/cli.py#L18-L37
train
49,561
deshima-dev/decode
decode/utils/ndarray/functions.py
psd
def psd(data, dt, ndivide=1, window=hanning, overlap_half=False): """Calculate power spectrum density of data. Args: data (np.ndarray): Input data. dt (float): Time between each data. ndivide (int): Do averaging (split data into ndivide, get psd of each, and average them). ax (matplotlib.axes): Axis you want to plot on. doplot (bool): Plot how averaging works. overlap_half (bool): Split data to half-overlapped regions. Returns: vk (np.ndarray): Frequency. psd (np.ndarray): PSD """ logger = getLogger('decode.utils.ndarray.psd') if overlap_half: step = int(len(data) / (ndivide + 1)) size = step * 2 else: step = int(len(data) / ndivide) size = step if bin(len(data)).count('1') != 1: logger.warning('warning: length of data is not power of 2: {}'.format(len(data))) size = int(len(data) / ndivide) if bin(size).count('1') != 1.: if overlap_half: logger.warning('warning: ((length of data) / (ndivide+1)) * 2 is not power of 2: {}'.format(size)) else: logger.warning('warning: (length of data) / ndivide is not power of 2: {}'.format(size)) psd = np.zeros(size) T = (size - 1) * dt vs = 1 / dt vk_ = fftfreq(size, dt) vk = vk_[np.where(vk_ >= 0)] for i in range(ndivide): d = data[i * step:i * step + size] if window is None: w = np.ones(size) corr = 1.0 else: w = window(size) corr = np.mean(w**2) psd = psd + 2 * (np.abs(fft(d * w)))**2 / size * dt / corr return vk, psd[:len(vk)] / ndivide
python
def psd(data, dt, ndivide=1, window=hanning, overlap_half=False): """Calculate power spectrum density of data. Args: data (np.ndarray): Input data. dt (float): Time between each data. ndivide (int): Do averaging (split data into ndivide, get psd of each, and average them). ax (matplotlib.axes): Axis you want to plot on. doplot (bool): Plot how averaging works. overlap_half (bool): Split data to half-overlapped regions. Returns: vk (np.ndarray): Frequency. psd (np.ndarray): PSD """ logger = getLogger('decode.utils.ndarray.psd') if overlap_half: step = int(len(data) / (ndivide + 1)) size = step * 2 else: step = int(len(data) / ndivide) size = step if bin(len(data)).count('1') != 1: logger.warning('warning: length of data is not power of 2: {}'.format(len(data))) size = int(len(data) / ndivide) if bin(size).count('1') != 1.: if overlap_half: logger.warning('warning: ((length of data) / (ndivide+1)) * 2 is not power of 2: {}'.format(size)) else: logger.warning('warning: (length of data) / ndivide is not power of 2: {}'.format(size)) psd = np.zeros(size) T = (size - 1) * dt vs = 1 / dt vk_ = fftfreq(size, dt) vk = vk_[np.where(vk_ >= 0)] for i in range(ndivide): d = data[i * step:i * step + size] if window is None: w = np.ones(size) corr = 1.0 else: w = window(size) corr = np.mean(w**2) psd = psd + 2 * (np.abs(fft(d * w)))**2 / size * dt / corr return vk, psd[:len(vk)] / ndivide
[ "def", "psd", "(", "data", ",", "dt", ",", "ndivide", "=", "1", ",", "window", "=", "hanning", ",", "overlap_half", "=", "False", ")", ":", "logger", "=", "getLogger", "(", "'decode.utils.ndarray.psd'", ")", "if", "overlap_half", ":", "step", "=", "int",...
Calculate power spectrum density of data. Args: data (np.ndarray): Input data. dt (float): Time between each data. ndivide (int): Do averaging (split data into ndivide, get psd of each, and average them). ax (matplotlib.axes): Axis you want to plot on. doplot (bool): Plot how averaging works. overlap_half (bool): Split data to half-overlapped regions. Returns: vk (np.ndarray): Frequency. psd (np.ndarray): PSD
[ "Calculate", "power", "spectrum", "density", "of", "data", "." ]
e789e174cd316e7ec8bc55be7009ad35baced3c0
https://github.com/deshima-dev/decode/blob/e789e174cd316e7ec8bc55be7009ad35baced3c0/decode/utils/ndarray/functions.py#L20-L68
train
49,562
deshima-dev/decode
decode/utils/ndarray/functions.py
allan_variance
def allan_variance(data, dt, tmax=10): """Calculate Allan variance. Args: data (np.ndarray): Input data. dt (float): Time between each data. tmax (float): Maximum time. Returns: vk (np.ndarray): Frequency. allanvar (np.ndarray): Allan variance. """ allanvar = [] nmax = len(data) if len(data) < tmax / dt else int(tmax / dt) for i in range(1, nmax+1): databis = data[len(data) % i:] y = databis.reshape(len(data)//i, i).mean(axis=1) allanvar.append(((y[1:] - y[:-1])**2).mean() / 2) return dt * np.arange(1, nmax+1), np.array(allanvar)
python
def allan_variance(data, dt, tmax=10): """Calculate Allan variance. Args: data (np.ndarray): Input data. dt (float): Time between each data. tmax (float): Maximum time. Returns: vk (np.ndarray): Frequency. allanvar (np.ndarray): Allan variance. """ allanvar = [] nmax = len(data) if len(data) < tmax / dt else int(tmax / dt) for i in range(1, nmax+1): databis = data[len(data) % i:] y = databis.reshape(len(data)//i, i).mean(axis=1) allanvar.append(((y[1:] - y[:-1])**2).mean() / 2) return dt * np.arange(1, nmax+1), np.array(allanvar)
[ "def", "allan_variance", "(", "data", ",", "dt", ",", "tmax", "=", "10", ")", ":", "allanvar", "=", "[", "]", "nmax", "=", "len", "(", "data", ")", "if", "len", "(", "data", ")", "<", "tmax", "/", "dt", "else", "int", "(", "tmax", "/", "dt", ...
Calculate Allan variance. Args: data (np.ndarray): Input data. dt (float): Time between each data. tmax (float): Maximum time. Returns: vk (np.ndarray): Frequency. allanvar (np.ndarray): Allan variance.
[ "Calculate", "Allan", "variance", "." ]
e789e174cd316e7ec8bc55be7009ad35baced3c0
https://github.com/deshima-dev/decode/blob/e789e174cd316e7ec8bc55be7009ad35baced3c0/decode/utils/ndarray/functions.py#L71-L89
train
49,563
etesync/radicale_storage_etesync
radicale_storage_etesync/__init__.py
Collection.discover
def discover(cls, path, depth="0"): """Discover a list of collections under the given ``path``. If ``depth`` is "0", only the actual object under ``path`` is returned. If ``depth`` is anything but "0", it is considered as "1" and direct children are included in the result. The ``path`` is relative. The root collection "/" must always exist. """ # Path should already be sanitized attributes = _get_attributes_from_path(path) try: if len(attributes) == 3: # If an item, create a collection for the item. item = attributes.pop() path = "/".join(attributes) collection = cls(path, _is_principal(path)) yield collection.get(item) return collection = cls(path, _is_principal(path)) except api.exceptions.DoesNotExist: return yield collection if depth == "0": return if len(attributes) == 0: yield cls(posixpath.join(path, cls.user), principal=True) elif len(attributes) == 1: for journal in cls.etesync.list(): if journal.collection.TYPE in (api.AddressBook.TYPE, api.Calendar.TYPE, api.TaskList.TYPE): yield cls(posixpath.join(path, journal.uid), principal=False) elif len(attributes) == 2: for item in collection.list(): yield collection.get(item) elif len(attributes) > 2: raise RuntimeError("Found more than one attribute. Shouldn't happen")
python
def discover(cls, path, depth="0"): """Discover a list of collections under the given ``path``. If ``depth`` is "0", only the actual object under ``path`` is returned. If ``depth`` is anything but "0", it is considered as "1" and direct children are included in the result. The ``path`` is relative. The root collection "/" must always exist. """ # Path should already be sanitized attributes = _get_attributes_from_path(path) try: if len(attributes) == 3: # If an item, create a collection for the item. item = attributes.pop() path = "/".join(attributes) collection = cls(path, _is_principal(path)) yield collection.get(item) return collection = cls(path, _is_principal(path)) except api.exceptions.DoesNotExist: return yield collection if depth == "0": return if len(attributes) == 0: yield cls(posixpath.join(path, cls.user), principal=True) elif len(attributes) == 1: for journal in cls.etesync.list(): if journal.collection.TYPE in (api.AddressBook.TYPE, api.Calendar.TYPE, api.TaskList.TYPE): yield cls(posixpath.join(path, journal.uid), principal=False) elif len(attributes) == 2: for item in collection.list(): yield collection.get(item) elif len(attributes) > 2: raise RuntimeError("Found more than one attribute. Shouldn't happen")
[ "def", "discover", "(", "cls", ",", "path", ",", "depth", "=", "\"0\"", ")", ":", "# Path should already be sanitized", "attributes", "=", "_get_attributes_from_path", "(", "path", ")", "try", ":", "if", "len", "(", "attributes", ")", "==", "3", ":", "# If a...
Discover a list of collections under the given ``path``. If ``depth`` is "0", only the actual object under ``path`` is returned. If ``depth`` is anything but "0", it is considered as "1" and direct children are included in the result. The ``path`` is relative. The root collection "/" must always exist.
[ "Discover", "a", "list", "of", "collections", "under", "the", "given", "path", "." ]
73d549bad7a37f060ece65c653c18a859a9962f2
https://github.com/etesync/radicale_storage_etesync/blob/73d549bad7a37f060ece65c653c18a859a9962f2/radicale_storage_etesync/__init__.py#L185-L232
train
49,564
etesync/radicale_storage_etesync
radicale_storage_etesync/__init__.py
Collection.create_collection
def create_collection(cls, href, collection=None, props=None): """Create a collection. If the collection already exists and neither ``collection`` nor ``props`` are set, this method shouldn't do anything. Otherwise the existing collection must be replaced. ``collection`` is a list of vobject components. ``props`` are metadata values for the collection. ``props["tag"]`` is the type of collection (VCALENDAR or VADDRESSBOOK). If the key ``tag`` is missing, it is guessed from the collection. """ # Path should already be sanitized attributes = _get_attributes_from_path(href) if len(attributes) <= 1: raise PrincipalNotAllowedError # Try to infer tag if not props: props = {} if not props.get("tag") and collection: props["tag"] = collection[0].name # Try first getting the collection if exists, or create a new one otherwise. try: self = cls(href, principal=False, tag=props.get("tag")) except api.exceptions.DoesNotExist: user_path = posixpath.join('/', cls.user) collection_name = hashlib.sha256(str(time.time()).encode()).hexdigest() sane_path = posixpath.join(user_path, collection_name) if props.get("tag") == "VCALENDAR": inst = api.Calendar.create(cls.etesync, collection_name, None) elif props.get("tag") == "VADDRESSBOOK": inst = api.AddressBook.create(cls.etesync, collection_name, None) else: raise RuntimeError("Bad tag.") inst.save() self = cls(sane_path, principal=False) self.set_meta(props) if collection: if props.get("tag") == "VCALENDAR": collection, = collection items = [] for content in ("vevent", "vtodo", "vjournal"): items.extend( getattr(collection, "%s_list" % content, [])) items_by_uid = groupby(sorted(items, key=get_uid), get_uid) vobject_items = {} for uid, items in items_by_uid: new_collection = vobject.iCalendar() for item in items: new_collection.add(item) href = self._find_available_file_name( vobject_items.get) vobject_items[href] = new_collection self.upload_all_nonatomic(vobject_items) elif props.get("tag") == "VADDRESSBOOK": vobject_items = {} for card in collection: href = self._find_available_file_name( vobject_items.get) vobject_items[href] = card self.upload_all_nonatomic(vobject_items) return self
python
def create_collection(cls, href, collection=None, props=None): """Create a collection. If the collection already exists and neither ``collection`` nor ``props`` are set, this method shouldn't do anything. Otherwise the existing collection must be replaced. ``collection`` is a list of vobject components. ``props`` are metadata values for the collection. ``props["tag"]`` is the type of collection (VCALENDAR or VADDRESSBOOK). If the key ``tag`` is missing, it is guessed from the collection. """ # Path should already be sanitized attributes = _get_attributes_from_path(href) if len(attributes) <= 1: raise PrincipalNotAllowedError # Try to infer tag if not props: props = {} if not props.get("tag") and collection: props["tag"] = collection[0].name # Try first getting the collection if exists, or create a new one otherwise. try: self = cls(href, principal=False, tag=props.get("tag")) except api.exceptions.DoesNotExist: user_path = posixpath.join('/', cls.user) collection_name = hashlib.sha256(str(time.time()).encode()).hexdigest() sane_path = posixpath.join(user_path, collection_name) if props.get("tag") == "VCALENDAR": inst = api.Calendar.create(cls.etesync, collection_name, None) elif props.get("tag") == "VADDRESSBOOK": inst = api.AddressBook.create(cls.etesync, collection_name, None) else: raise RuntimeError("Bad tag.") inst.save() self = cls(sane_path, principal=False) self.set_meta(props) if collection: if props.get("tag") == "VCALENDAR": collection, = collection items = [] for content in ("vevent", "vtodo", "vjournal"): items.extend( getattr(collection, "%s_list" % content, [])) items_by_uid = groupby(sorted(items, key=get_uid), get_uid) vobject_items = {} for uid, items in items_by_uid: new_collection = vobject.iCalendar() for item in items: new_collection.add(item) href = self._find_available_file_name( vobject_items.get) vobject_items[href] = new_collection self.upload_all_nonatomic(vobject_items) elif props.get("tag") == "VADDRESSBOOK": vobject_items = {} for card in collection: href = self._find_available_file_name( vobject_items.get) vobject_items[href] = card self.upload_all_nonatomic(vobject_items) return self
[ "def", "create_collection", "(", "cls", ",", "href", ",", "collection", "=", "None", ",", "props", "=", "None", ")", ":", "# Path should already be sanitized", "attributes", "=", "_get_attributes_from_path", "(", "href", ")", "if", "len", "(", "attributes", ")",...
Create a collection. If the collection already exists and neither ``collection`` nor ``props`` are set, this method shouldn't do anything. Otherwise the existing collection must be replaced. ``collection`` is a list of vobject components. ``props`` are metadata values for the collection. ``props["tag"]`` is the type of collection (VCALENDAR or VADDRESSBOOK). If the key ``tag`` is missing, it is guessed from the collection.
[ "Create", "a", "collection", "." ]
73d549bad7a37f060ece65c653c18a859a9962f2
https://github.com/etesync/radicale_storage_etesync/blob/73d549bad7a37f060ece65c653c18a859a9962f2/radicale_storage_etesync/__init__.py#L247-L319
train
49,565
etesync/radicale_storage_etesync
radicale_storage_etesync/__init__.py
Collection.sync
def sync(self, old_token=None): """Get the current sync token and changed items for synchronization. ``old_token`` an old sync token which is used as the base of the delta update. If sync token is missing, all items are returned. ValueError is raised for invalid or old tokens. """ # FIXME: Actually implement token = "http://radicale.org/ns/sync/%s" % self.etag.strip("\"") if old_token: raise ValueError("Sync token are not supported (you can ignore this warning)") return token, self.list()
python
def sync(self, old_token=None): """Get the current sync token and changed items for synchronization. ``old_token`` an old sync token which is used as the base of the delta update. If sync token is missing, all items are returned. ValueError is raised for invalid or old tokens. """ # FIXME: Actually implement token = "http://radicale.org/ns/sync/%s" % self.etag.strip("\"") if old_token: raise ValueError("Sync token are not supported (you can ignore this warning)") return token, self.list()
[ "def", "sync", "(", "self", ",", "old_token", "=", "None", ")", ":", "# FIXME: Actually implement", "token", "=", "\"http://radicale.org/ns/sync/%s\"", "%", "self", ".", "etag", ".", "strip", "(", "\"\\\"\"", ")", "if", "old_token", ":", "raise", "ValueError", ...
Get the current sync token and changed items for synchronization. ``old_token`` an old sync token which is used as the base of the delta update. If sync token is missing, all items are returned. ValueError is raised for invalid or old tokens.
[ "Get", "the", "current", "sync", "token", "and", "changed", "items", "for", "synchronization", "." ]
73d549bad7a37f060ece65c653c18a859a9962f2
https://github.com/etesync/radicale_storage_etesync/blob/73d549bad7a37f060ece65c653c18a859a9962f2/radicale_storage_etesync/__init__.py#L321-L332
train
49,566
etesync/radicale_storage_etesync
radicale_storage_etesync/__init__.py
Collection.list
def list(self): """List collection items.""" if self.is_fake: return for item in self.collection.list(): yield item.uid + self.content_suffix
python
def list(self): """List collection items.""" if self.is_fake: return for item in self.collection.list(): yield item.uid + self.content_suffix
[ "def", "list", "(", "self", ")", ":", "if", "self", ".", "is_fake", ":", "return", "for", "item", "in", "self", ".", "collection", ".", "list", "(", ")", ":", "yield", "item", ".", "uid", "+", "self", ".", "content_suffix" ]
List collection items.
[ "List", "collection", "items", "." ]
73d549bad7a37f060ece65c653c18a859a9962f2
https://github.com/etesync/radicale_storage_etesync/blob/73d549bad7a37f060ece65c653c18a859a9962f2/radicale_storage_etesync/__init__.py#L334-L340
train
49,567
etesync/radicale_storage_etesync
radicale_storage_etesync/__init__.py
Collection.get
def get(self, href): """Fetch a single item.""" if self.is_fake: return uid = _trim_suffix(href, ('.ics', '.ical', '.vcf')) etesync_item = self.collection.get(uid) if etesync_item is None: return None try: item = vobject.readOne(etesync_item.content) except Exception as e: raise RuntimeError("Failed to parse item %r in %r" % (href, self.path)) from e # FIXME: Make this sensible last_modified = time.strftime( "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(time.time())) return EteSyncItem(self, item, href, last_modified=last_modified, etesync_item=etesync_item)
python
def get(self, href): """Fetch a single item.""" if self.is_fake: return uid = _trim_suffix(href, ('.ics', '.ical', '.vcf')) etesync_item = self.collection.get(uid) if etesync_item is None: return None try: item = vobject.readOne(etesync_item.content) except Exception as e: raise RuntimeError("Failed to parse item %r in %r" % (href, self.path)) from e # FIXME: Make this sensible last_modified = time.strftime( "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(time.time())) return EteSyncItem(self, item, href, last_modified=last_modified, etesync_item=etesync_item)
[ "def", "get", "(", "self", ",", "href", ")", ":", "if", "self", ".", "is_fake", ":", "return", "uid", "=", "_trim_suffix", "(", "href", ",", "(", "'.ics'", ",", "'.ical'", ",", "'.vcf'", ")", ")", "etesync_item", "=", "self", ".", "collection", ".", ...
Fetch a single item.
[ "Fetch", "a", "single", "item", "." ]
73d549bad7a37f060ece65c653c18a859a9962f2
https://github.com/etesync/radicale_storage_etesync/blob/73d549bad7a37f060ece65c653c18a859a9962f2/radicale_storage_etesync/__init__.py#L342-L361
train
49,568
etesync/radicale_storage_etesync
radicale_storage_etesync/__init__.py
Collection.upload
def upload(self, href, vobject_item): """Upload a new or replace an existing item.""" if self.is_fake: return content = vobject_item.serialize() try: item = self.get(href) etesync_item = item.etesync_item etesync_item.content = content except api.exceptions.DoesNotExist: etesync_item = self.collection.get_content_class().create(self.collection, content) etesync_item.save() return self.get(href)
python
def upload(self, href, vobject_item): """Upload a new or replace an existing item.""" if self.is_fake: return content = vobject_item.serialize() try: item = self.get(href) etesync_item = item.etesync_item etesync_item.content = content except api.exceptions.DoesNotExist: etesync_item = self.collection.get_content_class().create(self.collection, content) etesync_item.save() return self.get(href)
[ "def", "upload", "(", "self", ",", "href", ",", "vobject_item", ")", ":", "if", "self", ".", "is_fake", ":", "return", "content", "=", "vobject_item", ".", "serialize", "(", ")", "try", ":", "item", "=", "self", ".", "get", "(", "href", ")", "etesync...
Upload a new or replace an existing item.
[ "Upload", "a", "new", "or", "replace", "an", "existing", "item", "." ]
73d549bad7a37f060ece65c653c18a859a9962f2
https://github.com/etesync/radicale_storage_etesync/blob/73d549bad7a37f060ece65c653c18a859a9962f2/radicale_storage_etesync/__init__.py#L363-L378
train
49,569
etesync/radicale_storage_etesync
radicale_storage_etesync/__init__.py
Collection.get_meta
def get_meta(self, key=None): """Get metadata value for collection.""" if self.is_fake: return {} if key == "tag": return self.tag elif key is None: ret = {} for key in self.journal.info.keys(): ret[key] = self.meta_mappings.map_get(self.journal.info, key)[1] return ret else: key, value = self.meta_mappings.map_get(self.journal.info, key) return value
python
def get_meta(self, key=None): """Get metadata value for collection.""" if self.is_fake: return {} if key == "tag": return self.tag elif key is None: ret = {} for key in self.journal.info.keys(): ret[key] = self.meta_mappings.map_get(self.journal.info, key)[1] return ret else: key, value = self.meta_mappings.map_get(self.journal.info, key) return value
[ "def", "get_meta", "(", "self", ",", "key", "=", "None", ")", ":", "if", "self", ".", "is_fake", ":", "return", "{", "}", "if", "key", "==", "\"tag\"", ":", "return", "self", ".", "tag", "elif", "key", "is", "None", ":", "ret", "=", "{", "}", "...
Get metadata value for collection.
[ "Get", "metadata", "value", "for", "collection", "." ]
73d549bad7a37f060ece65c653c18a859a9962f2
https://github.com/etesync/radicale_storage_etesync/blob/73d549bad7a37f060ece65c653c18a859a9962f2/radicale_storage_etesync/__init__.py#L399-L413
train
49,570
etesync/radicale_storage_etesync
radicale_storage_etesync/__init__.py
Collection.last_modified
def last_modified(self): """Get the HTTP-datetime of when the collection was modified.""" # FIXME: Make this sensible last_modified = time.strftime( "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(time.time())) return last_modified
python
def last_modified(self): """Get the HTTP-datetime of when the collection was modified.""" # FIXME: Make this sensible last_modified = time.strftime( "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(time.time())) return last_modified
[ "def", "last_modified", "(", "self", ")", ":", "# FIXME: Make this sensible", "last_modified", "=", "time", ".", "strftime", "(", "\"%a, %d %b %Y %H:%M:%S GMT\"", ",", "time", ".", "gmtime", "(", "time", ".", "time", "(", ")", ")", ")", "return", "last_modified"...
Get the HTTP-datetime of when the collection was modified.
[ "Get", "the", "HTTP", "-", "datetime", "of", "when", "the", "collection", "was", "modified", "." ]
73d549bad7a37f060ece65c653c18a859a9962f2
https://github.com/etesync/radicale_storage_etesync/blob/73d549bad7a37f060ece65c653c18a859a9962f2/radicale_storage_etesync/__init__.py#L447-L453
train
49,571
etesync/radicale_storage_etesync
radicale_storage_etesync/__init__.py
Collection.serialize
def serialize(self): """Get the unicode string representing the whole collection.""" import datetime items = [] time_begin = datetime.datetime.now() for href in self.list(): items.append(self.get(href).item) time_end = datetime.datetime.now() self.logger.info( "Collection read %d items in %s sec from %s", len(items), (time_end - time_begin).total_seconds(), self.path) if self.get_meta("tag") == "VCALENDAR": collection = vobject.iCalendar() for item in items: for content in ("vevent", "vtodo", "vjournal"): if content in item.contents: for item_part in getattr(item, "%s_list" % content): collection.add(item_part) break return collection.serialize() elif self.get_meta("tag") == "VADDRESSBOOK": return "".join([item.serialize() for item in items]) return ""
python
def serialize(self): """Get the unicode string representing the whole collection.""" import datetime items = [] time_begin = datetime.datetime.now() for href in self.list(): items.append(self.get(href).item) time_end = datetime.datetime.now() self.logger.info( "Collection read %d items in %s sec from %s", len(items), (time_end - time_begin).total_seconds(), self.path) if self.get_meta("tag") == "VCALENDAR": collection = vobject.iCalendar() for item in items: for content in ("vevent", "vtodo", "vjournal"): if content in item.contents: for item_part in getattr(item, "%s_list" % content): collection.add(item_part) break return collection.serialize() elif self.get_meta("tag") == "VADDRESSBOOK": return "".join([item.serialize() for item in items]) return ""
[ "def", "serialize", "(", "self", ")", ":", "import", "datetime", "items", "=", "[", "]", "time_begin", "=", "datetime", ".", "datetime", ".", "now", "(", ")", "for", "href", "in", "self", ".", "list", "(", ")", ":", "items", ".", "append", "(", "se...
Get the unicode string representing the whole collection.
[ "Get", "the", "unicode", "string", "representing", "the", "whole", "collection", "." ]
73d549bad7a37f060ece65c653c18a859a9962f2
https://github.com/etesync/radicale_storage_etesync/blob/73d549bad7a37f060ece65c653c18a859a9962f2/radicale_storage_etesync/__init__.py#L455-L477
train
49,572
etesync/radicale_storage_etesync
radicale_storage_etesync/__init__.py
Collection.acquire_lock
def acquire_lock(cls, mode, user=None): """Set a context manager to lock the whole storage. ``mode`` must either be "r" for shared access or "w" for exclusive access. ``user`` is the name of the logged in user or empty. """ if not user: return with EteSyncCache.lock: cls.user = user cls.etesync = cls._get_etesync_for_user(cls.user) if cls._should_sync(): cls._mark_sync() cls.etesync.get_or_create_user_info(force_fetch=True) cls.etesync.sync_journal_list() for journal in cls.etesync.list(): cls.etesync.pull_journal(journal.uid) yield if cls.etesync.journal_list_is_dirty(): cls.etesync.sync_journal_list() for journal in cls.etesync.list(): if cls.etesync.journal_is_dirty(journal.uid): cls.etesync.sync_journal(journal.uid) cls.etesync = None cls.user = None
python
def acquire_lock(cls, mode, user=None): """Set a context manager to lock the whole storage. ``mode`` must either be "r" for shared access or "w" for exclusive access. ``user`` is the name of the logged in user or empty. """ if not user: return with EteSyncCache.lock: cls.user = user cls.etesync = cls._get_etesync_for_user(cls.user) if cls._should_sync(): cls._mark_sync() cls.etesync.get_or_create_user_info(force_fetch=True) cls.etesync.sync_journal_list() for journal in cls.etesync.list(): cls.etesync.pull_journal(journal.uid) yield if cls.etesync.journal_list_is_dirty(): cls.etesync.sync_journal_list() for journal in cls.etesync.list(): if cls.etesync.journal_is_dirty(journal.uid): cls.etesync.sync_journal(journal.uid) cls.etesync = None cls.user = None
[ "def", "acquire_lock", "(", "cls", ",", "mode", ",", "user", "=", "None", ")", ":", "if", "not", "user", ":", "return", "with", "EteSyncCache", ".", "lock", ":", "cls", ".", "user", "=", "user", "cls", ".", "etesync", "=", "cls", ".", "_get_etesync_f...
Set a context manager to lock the whole storage. ``mode`` must either be "r" for shared access or "w" for exclusive access. ``user`` is the name of the logged in user or empty.
[ "Set", "a", "context", "manager", "to", "lock", "the", "whole", "storage", "." ]
73d549bad7a37f060ece65c653c18a859a9962f2
https://github.com/etesync/radicale_storage_etesync/blob/73d549bad7a37f060ece65c653c18a859a9962f2/radicale_storage_etesync/__init__.py#L505-L536
train
49,573
facelessuser/pyspelling
pyspelling/filters/cpp.py
CppFilter.match_string
def match_string(self, stype): """Match string type.""" return not (stype - self.string_types) or bool(stype & self.wild_string_types)
python
def match_string(self, stype): """Match string type.""" return not (stype - self.string_types) or bool(stype & self.wild_string_types)
[ "def", "match_string", "(", "self", ",", "stype", ")", ":", "return", "not", "(", "stype", "-", "self", ".", "string_types", ")", "or", "bool", "(", "stype", "&", "self", ".", "wild_string_types", ")" ]
Match string type.
[ "Match", "string", "type", "." ]
c25d5292cc2687ad65891a12ead43f7182ca8bb3
https://github.com/facelessuser/pyspelling/blob/c25d5292cc2687ad65891a12ead43f7182ca8bb3/pyspelling/filters/cpp.py#L169-L172
train
49,574
facelessuser/pyspelling
pyspelling/filters/cpp.py
CppFilter.get_encoding_name
def get_encoding_name(self, name): """Get encoding name.""" name = codecs.lookup( filters.PYTHON_ENCODING_NAMES.get(name, name).lower() ).name if name.startswith(('utf-32', 'utf-16')): name = name[:6] if CURRENT_ENDIAN == BIG_ENDIAN: name += '-be' else: name += '-le' if name == 'utf-8-sig': name = 'utf-8' return name
python
def get_encoding_name(self, name): """Get encoding name.""" name = codecs.lookup( filters.PYTHON_ENCODING_NAMES.get(name, name).lower() ).name if name.startswith(('utf-32', 'utf-16')): name = name[:6] if CURRENT_ENDIAN == BIG_ENDIAN: name += '-be' else: name += '-le' if name == 'utf-8-sig': name = 'utf-8' return name
[ "def", "get_encoding_name", "(", "self", ",", "name", ")", ":", "name", "=", "codecs", ".", "lookup", "(", "filters", ".", "PYTHON_ENCODING_NAMES", ".", "get", "(", "name", ",", "name", ")", ".", "lower", "(", ")", ")", ".", "name", "if", "name", "."...
Get encoding name.
[ "Get", "encoding", "name", "." ]
c25d5292cc2687ad65891a12ead43f7182ca8bb3
https://github.com/facelessuser/pyspelling/blob/c25d5292cc2687ad65891a12ead43f7182ca8bb3/pyspelling/filters/cpp.py#L179-L195
train
49,575
facelessuser/pyspelling
pyspelling/filters/cpp.py
CppFilter.evaluate_inline_tail
def evaluate_inline_tail(self, groups): """Evaluate inline comments at the tail of source code.""" if self.lines: self.line_comments.append([groups['line'][2:].replace('\\\n', ''), self.line_num, self.current_encoding])
python
def evaluate_inline_tail(self, groups): """Evaluate inline comments at the tail of source code.""" if self.lines: self.line_comments.append([groups['line'][2:].replace('\\\n', ''), self.line_num, self.current_encoding])
[ "def", "evaluate_inline_tail", "(", "self", ",", "groups", ")", ":", "if", "self", ".", "lines", ":", "self", ".", "line_comments", ".", "append", "(", "[", "groups", "[", "'line'", "]", "[", "2", ":", "]", ".", "replace", "(", "'\\\\\\n'", ",", "''"...
Evaluate inline comments at the tail of source code.
[ "Evaluate", "inline", "comments", "at", "the", "tail", "of", "source", "code", "." ]
c25d5292cc2687ad65891a12ead43f7182ca8bb3
https://github.com/facelessuser/pyspelling/blob/c25d5292cc2687ad65891a12ead43f7182ca8bb3/pyspelling/filters/cpp.py#L222-L226
train
49,576
facelessuser/pyspelling
pyspelling/filters/cpp.py
CppFilter.evaluate_inline
def evaluate_inline(self, groups): """Evaluate inline comments on their own lines.""" # Consecutive lines with only comments with same leading whitespace # will be captured as a single block. if self.lines: if ( self.group_comments and self.line_num == self.prev_line + 1 and groups['leading_space'] == self.leading ): self.line_comments[-1][0] += '\n' + groups['line'][2:].replace('\\\n', '') else: self.line_comments.append( [groups['line'][2:].replace('\\\n', ''), self.line_num, self.current_encoding] ) self.leading = groups['leading_space'] self.prev_line = self.line_num
python
def evaluate_inline(self, groups): """Evaluate inline comments on their own lines.""" # Consecutive lines with only comments with same leading whitespace # will be captured as a single block. if self.lines: if ( self.group_comments and self.line_num == self.prev_line + 1 and groups['leading_space'] == self.leading ): self.line_comments[-1][0] += '\n' + groups['line'][2:].replace('\\\n', '') else: self.line_comments.append( [groups['line'][2:].replace('\\\n', ''), self.line_num, self.current_encoding] ) self.leading = groups['leading_space'] self.prev_line = self.line_num
[ "def", "evaluate_inline", "(", "self", ",", "groups", ")", ":", "# Consecutive lines with only comments with same leading whitespace", "# will be captured as a single block.", "if", "self", ".", "lines", ":", "if", "(", "self", ".", "group_comments", "and", "self", ".", ...
Evaluate inline comments on their own lines.
[ "Evaluate", "inline", "comments", "on", "their", "own", "lines", "." ]
c25d5292cc2687ad65891a12ead43f7182ca8bb3
https://github.com/facelessuser/pyspelling/blob/c25d5292cc2687ad65891a12ead43f7182ca8bb3/pyspelling/filters/cpp.py#L228-L245
train
49,577
facelessuser/pyspelling
pyspelling/filters/cpp.py
CppFilter.evaluate_unicode
def evaluate_unicode(self, value): """Evaluate Unicode.""" if value.startswith('u8'): length = 1 value = value[3:-1] encoding = 'utf-8' elif value.startswith('u'): length = 2 value = value[2:-1] encoding = 'utf-16' else: length = 4 value = value[2:-1] encoding = 'utf-32' def replace_unicode(m): """Replace Unicode.""" groups = m.groupdict() esc = m.group(0) value = esc if groups.get('special'): # Handle basic string escapes. value = BACK_SLASH_TRANSLATION[esc] elif groups.get('char') or groups.get('oct'): # Handle character escapes. integer = int(esc[2:], 16) if groups.get('char') else int(esc[1:], 8) if ( (length < 2 and integer <= 0xFF) or (length < 4 and integer <= 0xFFFF) or (length >= 4 and integer <= 0x10FFFF) ): try: value = chr(integer) except Exception: value = ' ' return value return self.norm_nl(RE_UESC.sub(replace_unicode, value).replace('\x00', '\n')), encoding
python
def evaluate_unicode(self, value): """Evaluate Unicode.""" if value.startswith('u8'): length = 1 value = value[3:-1] encoding = 'utf-8' elif value.startswith('u'): length = 2 value = value[2:-1] encoding = 'utf-16' else: length = 4 value = value[2:-1] encoding = 'utf-32' def replace_unicode(m): """Replace Unicode.""" groups = m.groupdict() esc = m.group(0) value = esc if groups.get('special'): # Handle basic string escapes. value = BACK_SLASH_TRANSLATION[esc] elif groups.get('char') or groups.get('oct'): # Handle character escapes. integer = int(esc[2:], 16) if groups.get('char') else int(esc[1:], 8) if ( (length < 2 and integer <= 0xFF) or (length < 4 and integer <= 0xFFFF) or (length >= 4 and integer <= 0x10FFFF) ): try: value = chr(integer) except Exception: value = ' ' return value return self.norm_nl(RE_UESC.sub(replace_unicode, value).replace('\x00', '\n')), encoding
[ "def", "evaluate_unicode", "(", "self", ",", "value", ")", ":", "if", "value", ".", "startswith", "(", "'u8'", ")", ":", "length", "=", "1", "value", "=", "value", "[", "3", ":", "-", "1", "]", "encoding", "=", "'utf-8'", "elif", "value", ".", "sta...
Evaluate Unicode.
[ "Evaluate", "Unicode", "." ]
c25d5292cc2687ad65891a12ead43f7182ca8bb3
https://github.com/facelessuser/pyspelling/blob/c25d5292cc2687ad65891a12ead43f7182ca8bb3/pyspelling/filters/cpp.py#L247-L286
train
49,578
facelessuser/pyspelling
pyspelling/filters/cpp.py
CppFilter.evaluate_normal
def evaluate_normal(self, value): """Evaluate normal string.""" if value.startswith('L'): size = self.wide_charset_size encoding = self.wide_exec_charset value = value[2:-1] pack = BYTE_STORE[size | CURRENT_ENDIAN] else: size = self.charset_size encoding = self.exec_charset value = value[1:-1] pack = BYTE_STORE[size | CURRENT_ENDIAN] max_value = 2 ** (size * 8) - 1 def replace(m): """Replace.""" groups = m.groupdict() esc = m.group(0) value = esc if groups.get('special'): # Handle basic string escapes. value = BACK_SLASH_TRANSLATION[esc] elif groups.get('char'): # Handle hex/Unicode character escapes if value.startswith('\\x'): values = [int(x, 16) for x in value[2:].split('\\x')] for i, v in enumerate(values): if v <= max_value: values[i] = struct.pack(pack, v) else: values[i] = b' ' value = b''.join(values).decode(encoding, errors='replace') else: integer = int(value[2:], 16) value = chr(integer).encode(encoding, errors='replace').decode(encoding) elif groups.get('oct'): # Handle octal escapes. values = [int(x, 8) for x in value[1:].split('\\')] for i, v in enumerate(values): if v <= max_value: values[i] = struct.pack(pack, v) else: values[i] = b' ' value = b''.join(values).decode(encoding, errors='replace') return value return self.norm_nl(RE_ESC.sub(replace, value)).replace('\x00', '\n'), encoding
python
def evaluate_normal(self, value): """Evaluate normal string.""" if value.startswith('L'): size = self.wide_charset_size encoding = self.wide_exec_charset value = value[2:-1] pack = BYTE_STORE[size | CURRENT_ENDIAN] else: size = self.charset_size encoding = self.exec_charset value = value[1:-1] pack = BYTE_STORE[size | CURRENT_ENDIAN] max_value = 2 ** (size * 8) - 1 def replace(m): """Replace.""" groups = m.groupdict() esc = m.group(0) value = esc if groups.get('special'): # Handle basic string escapes. value = BACK_SLASH_TRANSLATION[esc] elif groups.get('char'): # Handle hex/Unicode character escapes if value.startswith('\\x'): values = [int(x, 16) for x in value[2:].split('\\x')] for i, v in enumerate(values): if v <= max_value: values[i] = struct.pack(pack, v) else: values[i] = b' ' value = b''.join(values).decode(encoding, errors='replace') else: integer = int(value[2:], 16) value = chr(integer).encode(encoding, errors='replace').decode(encoding) elif groups.get('oct'): # Handle octal escapes. values = [int(x, 8) for x in value[1:].split('\\')] for i, v in enumerate(values): if v <= max_value: values[i] = struct.pack(pack, v) else: values[i] = b' ' value = b''.join(values).decode(encoding, errors='replace') return value return self.norm_nl(RE_ESC.sub(replace, value)).replace('\x00', '\n'), encoding
[ "def", "evaluate_normal", "(", "self", ",", "value", ")", ":", "if", "value", ".", "startswith", "(", "'L'", ")", ":", "size", "=", "self", ".", "wide_charset_size", "encoding", "=", "self", ".", "wide_exec_charset", "value", "=", "value", "[", "2", ":",...
Evaluate normal string.
[ "Evaluate", "normal", "string", "." ]
c25d5292cc2687ad65891a12ead43f7182ca8bb3
https://github.com/facelessuser/pyspelling/blob/c25d5292cc2687ad65891a12ead43f7182ca8bb3/pyspelling/filters/cpp.py#L288-L336
train
49,579
facelessuser/pyspelling
pyspelling/filters/cpp.py
CppFilter.extend_src_text
def extend_src_text(self, content, context, text_list, category): """Extend the source text list with the gathered text data.""" prefix = self.prefix + '-' if self.prefix else '' for comment, line, encoding in text_list: content.append( filters.SourceText( textwrap.dedent(comment), "%s (%d)" % (context, line), encoding, prefix + category ) )
python
def extend_src_text(self, content, context, text_list, category): """Extend the source text list with the gathered text data.""" prefix = self.prefix + '-' if self.prefix else '' for comment, line, encoding in text_list: content.append( filters.SourceText( textwrap.dedent(comment), "%s (%d)" % (context, line), encoding, prefix + category ) )
[ "def", "extend_src_text", "(", "self", ",", "content", ",", "context", ",", "text_list", ",", "category", ")", ":", "prefix", "=", "self", ".", "prefix", "+", "'-'", "if", "self", ".", "prefix", "else", "''", "for", "comment", ",", "line", ",", "encodi...
Extend the source text list with the gathered text data.
[ "Extend", "the", "source", "text", "list", "with", "the", "gathered", "text", "data", "." ]
c25d5292cc2687ad65891a12ead43f7182ca8bb3
https://github.com/facelessuser/pyspelling/blob/c25d5292cc2687ad65891a12ead43f7182ca8bb3/pyspelling/filters/cpp.py#L392-L405
train
49,580
deshima-dev/decode
decode/core/cube/functions.py
cube
def cube(data, xcoords=None, ycoords=None, chcoords=None, scalarcoords=None, datacoords=None, attrs=None, name=None): """Create a cube as an instance of xarray.DataArray with Decode accessor. Args: data (numpy.ndarray): 3D (x x y x channel) array. xcoords (dict, optional): Dictionary of arrays that label x axis. ycoords (dict, optional): Dictionary of arrays that label y axis. chcoords (dict, optional): Dictionary of arrays that label channel axis. scalarcoords (dict, optional): Dictionary of values that don't label any axes (point-like). datacoords (dict, optional): Dictionary of arrays that label x, y, and channel axes. attrs (dict, optional): Dictionary of attributes to add to the instance. name (str, optional): String that names the instance. Returns: decode cube (decode.cube): Decode cube. """ # initialize coords with default values cube = xr.DataArray(data, dims=('x', 'y', 'ch'), attrs=attrs, name=name) cube.dcc._initcoords() # update coords with input values (if any) if xcoords is not None: cube.coords.update({key: ('x', xcoords[key]) for key in xcoords}) if ycoords is not None: cube.coords.update({key: ('y', ycoords[key]) for key in ycoords}) if chcoords is not None: cube.coords.update({key: ('ch', chcoords[key]) for key in chcoords}) if datacoords is not None: cube.coords.update({key: (('x', 'y', 'ch'), datacoords[key]) for key in datacoords}) if scalarcoords is not None: cube.coords.update(scalarcoords) return cube
python
def cube(data, xcoords=None, ycoords=None, chcoords=None, scalarcoords=None, datacoords=None, attrs=None, name=None): """Create a cube as an instance of xarray.DataArray with Decode accessor. Args: data (numpy.ndarray): 3D (x x y x channel) array. xcoords (dict, optional): Dictionary of arrays that label x axis. ycoords (dict, optional): Dictionary of arrays that label y axis. chcoords (dict, optional): Dictionary of arrays that label channel axis. scalarcoords (dict, optional): Dictionary of values that don't label any axes (point-like). datacoords (dict, optional): Dictionary of arrays that label x, y, and channel axes. attrs (dict, optional): Dictionary of attributes to add to the instance. name (str, optional): String that names the instance. Returns: decode cube (decode.cube): Decode cube. """ # initialize coords with default values cube = xr.DataArray(data, dims=('x', 'y', 'ch'), attrs=attrs, name=name) cube.dcc._initcoords() # update coords with input values (if any) if xcoords is not None: cube.coords.update({key: ('x', xcoords[key]) for key in xcoords}) if ycoords is not None: cube.coords.update({key: ('y', ycoords[key]) for key in ycoords}) if chcoords is not None: cube.coords.update({key: ('ch', chcoords[key]) for key in chcoords}) if datacoords is not None: cube.coords.update({key: (('x', 'y', 'ch'), datacoords[key]) for key in datacoords}) if scalarcoords is not None: cube.coords.update(scalarcoords) return cube
[ "def", "cube", "(", "data", ",", "xcoords", "=", "None", ",", "ycoords", "=", "None", ",", "chcoords", "=", "None", ",", "scalarcoords", "=", "None", ",", "datacoords", "=", "None", ",", "attrs", "=", "None", ",", "name", "=", "None", ")", ":", "# ...
Create a cube as an instance of xarray.DataArray with Decode accessor. Args: data (numpy.ndarray): 3D (x x y x channel) array. xcoords (dict, optional): Dictionary of arrays that label x axis. ycoords (dict, optional): Dictionary of arrays that label y axis. chcoords (dict, optional): Dictionary of arrays that label channel axis. scalarcoords (dict, optional): Dictionary of values that don't label any axes (point-like). datacoords (dict, optional): Dictionary of arrays that label x, y, and channel axes. attrs (dict, optional): Dictionary of attributes to add to the instance. name (str, optional): String that names the instance. Returns: decode cube (decode.cube): Decode cube.
[ "Create", "a", "cube", "as", "an", "instance", "of", "xarray", ".", "DataArray", "with", "Decode", "accessor", "." ]
e789e174cd316e7ec8bc55be7009ad35baced3c0
https://github.com/deshima-dev/decode/blob/e789e174cd316e7ec8bc55be7009ad35baced3c0/decode/core/cube/functions.py#L25-L61
train
49,581
deshima-dev/decode
decode/core/cube/functions.py
fromcube
def fromcube(cube, template): """Covert a decode cube to a decode array. Args: cube (decode.cube): Decode cube to be cast. template (decode.array): Decode array whose shape the cube is cast on. Returns: decode array (decode.array): Decode array. Notes: This functions is under development. """ array = dc.zeros_like(template) y, x = array.y.values, array.x.values gy, gx = cube.y.values, cube.x.values iy = interp1d(gy, np.arange(len(gy)))(y) ix = interp1d(gx, np.arange(len(gx)))(x) for ch in range(len(cube.ch)): array[:,ch] = map_coordinates(cube.values[:,:,ch], (ix, iy)) return array
python
def fromcube(cube, template): """Covert a decode cube to a decode array. Args: cube (decode.cube): Decode cube to be cast. template (decode.array): Decode array whose shape the cube is cast on. Returns: decode array (decode.array): Decode array. Notes: This functions is under development. """ array = dc.zeros_like(template) y, x = array.y.values, array.x.values gy, gx = cube.y.values, cube.x.values iy = interp1d(gy, np.arange(len(gy)))(y) ix = interp1d(gx, np.arange(len(gx)))(x) for ch in range(len(cube.ch)): array[:,ch] = map_coordinates(cube.values[:,:,ch], (ix, iy)) return array
[ "def", "fromcube", "(", "cube", ",", "template", ")", ":", "array", "=", "dc", ".", "zeros_like", "(", "template", ")", "y", ",", "x", "=", "array", ".", "y", ".", "values", ",", "array", ".", "x", ".", "values", "gy", ",", "gx", "=", "cube", "...
Covert a decode cube to a decode array. Args: cube (decode.cube): Decode cube to be cast. template (decode.array): Decode array whose shape the cube is cast on. Returns: decode array (decode.array): Decode array. Notes: This functions is under development.
[ "Covert", "a", "decode", "cube", "to", "a", "decode", "array", "." ]
e789e174cd316e7ec8bc55be7009ad35baced3c0
https://github.com/deshima-dev/decode/blob/e789e174cd316e7ec8bc55be7009ad35baced3c0/decode/core/cube/functions.py#L64-L87
train
49,582
deshima-dev/decode
decode/core/cube/functions.py
makecontinuum
def makecontinuum(cube, **kwargs): """Make a continuum array. Args: cube (decode.cube): Decode cube which will be averaged over channels. kwargs (optional): Other arguments. inchs (list): Included channel kidids. exchs (list): Excluded channel kidids. Returns: decode cube (decode.cube): Decode cube (2d). """ ### pick up kwargs inchs = kwargs.pop('inchs', None) exchs = kwargs.pop('exchs', None) if (inchs is not None) or (exchs is not None): raise KeyError('Inchs and exchs are no longer supported. Use weight instead.') # if inchs is not None: # logger.info('inchs') # logger.info('{}'.format(inchs)) # subcube = cube[:, :, inchs] # else: # mask = np.full(len(cube.ch), True) # if exchs is not None: # logger.info('exchs') # logger.info('{}'.format(exchs)) # mask[exchs] = False # subcube = cube[:, :, mask] if weight is None: weight = 1. # else: # cont = (subcube * (1 / subcube.noise**2)).sum(dim='ch') / (1 / subcube.noise**2).sum(dim='ch') # cont = cont.expand_dims(dim='ch', axis=2) cont = (cube * (1 / weight**2)).sum(dim='ch') / (1 / weight**2).sum(dim='ch') ### define coordinates xcoords = {'x': cube.x.values} ycoords = {'y': cube.y.values} chcoords = {'masterid': np.array([0]), # np.array([int(subcube.masterid.mean(dim='ch'))]), 'kidid': np.array([0]), # np.array([int(subcube.kidid.mean(dim='ch'))]), 'kidfq': np.array([0]), # np.array([float(subcube.kidfq.mean(dim='ch'))]), 'kidtp': np.array([1])} # np.array([1])} scalarcoords = {'coordsys': cube.coordsys.values, 'datatype': cube.datatype.values, 'xref': cube.xref.values, 'yref': cube.yref.values} return dc.cube(cont.values, xcoords=xcoords, ycoords=ycoords, chcoords=chcoords, scalarcoords=scalarcoords)
python
def makecontinuum(cube, **kwargs): """Make a continuum array. Args: cube (decode.cube): Decode cube which will be averaged over channels. kwargs (optional): Other arguments. inchs (list): Included channel kidids. exchs (list): Excluded channel kidids. Returns: decode cube (decode.cube): Decode cube (2d). """ ### pick up kwargs inchs = kwargs.pop('inchs', None) exchs = kwargs.pop('exchs', None) if (inchs is not None) or (exchs is not None): raise KeyError('Inchs and exchs are no longer supported. Use weight instead.') # if inchs is not None: # logger.info('inchs') # logger.info('{}'.format(inchs)) # subcube = cube[:, :, inchs] # else: # mask = np.full(len(cube.ch), True) # if exchs is not None: # logger.info('exchs') # logger.info('{}'.format(exchs)) # mask[exchs] = False # subcube = cube[:, :, mask] if weight is None: weight = 1. # else: # cont = (subcube * (1 / subcube.noise**2)).sum(dim='ch') / (1 / subcube.noise**2).sum(dim='ch') # cont = cont.expand_dims(dim='ch', axis=2) cont = (cube * (1 / weight**2)).sum(dim='ch') / (1 / weight**2).sum(dim='ch') ### define coordinates xcoords = {'x': cube.x.values} ycoords = {'y': cube.y.values} chcoords = {'masterid': np.array([0]), # np.array([int(subcube.masterid.mean(dim='ch'))]), 'kidid': np.array([0]), # np.array([int(subcube.kidid.mean(dim='ch'))]), 'kidfq': np.array([0]), # np.array([float(subcube.kidfq.mean(dim='ch'))]), 'kidtp': np.array([1])} # np.array([1])} scalarcoords = {'coordsys': cube.coordsys.values, 'datatype': cube.datatype.values, 'xref': cube.xref.values, 'yref': cube.yref.values} return dc.cube(cont.values, xcoords=xcoords, ycoords=ycoords, chcoords=chcoords, scalarcoords=scalarcoords)
[ "def", "makecontinuum", "(", "cube", ",", "*", "*", "kwargs", ")", ":", "### pick up kwargs", "inchs", "=", "kwargs", ".", "pop", "(", "'inchs'", ",", "None", ")", "exchs", "=", "kwargs", ".", "pop", "(", "'exchs'", ",", "None", ")", "if", "(", "inch...
Make a continuum array. Args: cube (decode.cube): Decode cube which will be averaged over channels. kwargs (optional): Other arguments. inchs (list): Included channel kidids. exchs (list): Excluded channel kidids. Returns: decode cube (decode.cube): Decode cube (2d).
[ "Make", "a", "continuum", "array", "." ]
e789e174cd316e7ec8bc55be7009ad35baced3c0
https://github.com/deshima-dev/decode/blob/e789e174cd316e7ec8bc55be7009ad35baced3c0/decode/core/cube/functions.py#L234-L283
train
49,583
facelessuser/pyspelling
pyspelling/filters/__init__.py
Filter._verify_encoding
def _verify_encoding(self, enc): """Verify encoding is okay.""" enc = PYTHON_ENCODING_NAMES.get(enc, enc) try: codecs.getencoder(enc) encoding = enc except LookupError: encoding = None return encoding
python
def _verify_encoding(self, enc): """Verify encoding is okay.""" enc = PYTHON_ENCODING_NAMES.get(enc, enc) try: codecs.getencoder(enc) encoding = enc except LookupError: encoding = None return encoding
[ "def", "_verify_encoding", "(", "self", ",", "enc", ")", ":", "enc", "=", "PYTHON_ENCODING_NAMES", ".", "get", "(", "enc", ",", "enc", ")", "try", ":", "codecs", ".", "getencoder", "(", "enc", ")", "encoding", "=", "enc", "except", "LookupError", ":", ...
Verify encoding is okay.
[ "Verify", "encoding", "is", "okay", "." ]
c25d5292cc2687ad65891a12ead43f7182ca8bb3
https://github.com/facelessuser/pyspelling/blob/c25d5292cc2687ad65891a12ead43f7182ca8bb3/pyspelling/filters/__init__.py#L95-L104
train
49,584
facelessuser/pyspelling
pyspelling/filters/__init__.py
Filter.has_bom
def has_bom(self, f): """Check for UTF8, UTF16, and UTF32 BOMs.""" content = f.read(4) encoding = None m = RE_UTF_BOM.match(content) if m is not None: if m.group(1): encoding = 'utf-8-sig' elif m.group(2): encoding = 'utf-32' elif m.group(3): encoding = 'utf-32' elif m.group(4): encoding = 'utf-16' elif m.group(5): encoding = 'utf-16' return encoding
python
def has_bom(self, f): """Check for UTF8, UTF16, and UTF32 BOMs.""" content = f.read(4) encoding = None m = RE_UTF_BOM.match(content) if m is not None: if m.group(1): encoding = 'utf-8-sig' elif m.group(2): encoding = 'utf-32' elif m.group(3): encoding = 'utf-32' elif m.group(4): encoding = 'utf-16' elif m.group(5): encoding = 'utf-16' return encoding
[ "def", "has_bom", "(", "self", ",", "f", ")", ":", "content", "=", "f", ".", "read", "(", "4", ")", "encoding", "=", "None", "m", "=", "RE_UTF_BOM", ".", "match", "(", "content", ")", "if", "m", "is", "not", "None", ":", "if", "m", ".", "group"...
Check for UTF8, UTF16, and UTF32 BOMs.
[ "Check", "for", "UTF8", "UTF16", "and", "UTF32", "BOMs", "." ]
c25d5292cc2687ad65891a12ead43f7182ca8bb3
https://github.com/facelessuser/pyspelling/blob/c25d5292cc2687ad65891a12ead43f7182ca8bb3/pyspelling/filters/__init__.py#L106-L123
train
49,585
facelessuser/pyspelling
pyspelling/filters/__init__.py
Filter._utf_strip_bom
def _utf_strip_bom(self, encoding): """Return an encoding that will ignore the BOM.""" if encoding is None: pass elif encoding.lower() == 'utf-8': encoding = 'utf-8-sig' elif encoding.lower().startswith('utf-16'): encoding = 'utf-16' elif encoding.lower().startswith('utf-32'): encoding = 'utf-32' return encoding
python
def _utf_strip_bom(self, encoding): """Return an encoding that will ignore the BOM.""" if encoding is None: pass elif encoding.lower() == 'utf-8': encoding = 'utf-8-sig' elif encoding.lower().startswith('utf-16'): encoding = 'utf-16' elif encoding.lower().startswith('utf-32'): encoding = 'utf-32' return encoding
[ "def", "_utf_strip_bom", "(", "self", ",", "encoding", ")", ":", "if", "encoding", "is", "None", ":", "pass", "elif", "encoding", ".", "lower", "(", ")", "==", "'utf-8'", ":", "encoding", "=", "'utf-8-sig'", "elif", "encoding", ".", "lower", "(", ")", ...
Return an encoding that will ignore the BOM.
[ "Return", "an", "encoding", "that", "will", "ignore", "the", "BOM", "." ]
c25d5292cc2687ad65891a12ead43f7182ca8bb3
https://github.com/facelessuser/pyspelling/blob/c25d5292cc2687ad65891a12ead43f7182ca8bb3/pyspelling/filters/__init__.py#L125-L136
train
49,586
facelessuser/pyspelling
pyspelling/filters/__init__.py
Filter._detect_buffer_encoding
def _detect_buffer_encoding(self, f): """Guess by checking BOM, and checking `_special_encode_check`, and using memory map.""" encoding = None with contextlib.closing(mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)) as m: encoding = self._analyze_file(m) return encoding
python
def _detect_buffer_encoding(self, f): """Guess by checking BOM, and checking `_special_encode_check`, and using memory map.""" encoding = None with contextlib.closing(mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)) as m: encoding = self._analyze_file(m) return encoding
[ "def", "_detect_buffer_encoding", "(", "self", ",", "f", ")", ":", "encoding", "=", "None", "with", "contextlib", ".", "closing", "(", "mmap", ".", "mmap", "(", "f", ".", "fileno", "(", ")", ",", "0", ",", "access", "=", "mmap", ".", "ACCESS_READ", "...
Guess by checking BOM, and checking `_special_encode_check`, and using memory map.
[ "Guess", "by", "checking", "BOM", "and", "checking", "_special_encode_check", "and", "using", "memory", "map", "." ]
c25d5292cc2687ad65891a12ead43f7182ca8bb3
https://github.com/facelessuser/pyspelling/blob/c25d5292cc2687ad65891a12ead43f7182ca8bb3/pyspelling/filters/__init__.py#L138-L144
train
49,587
facelessuser/pyspelling
pyspelling/filters/__init__.py
Filter._analyze_file
def _analyze_file(self, f): """Analyze the file.""" f.seek(0) # Check for BOMs if self.CHECK_BOM: encoding = self.has_bom(f) f.seek(0) else: util.warn_deprecated( "'CHECK_BOM' attribute is deprecated. " "Please override 'has_bom` function to control or avoid BOM detection." ) # Check file extensions if encoding is None: encoding = self._utf_strip_bom(self.header_check(f.read(1024))) f.seek(0) if encoding is None: encoding = self._utf_strip_bom(self.content_check(f)) f.seek(0) return encoding
python
def _analyze_file(self, f): """Analyze the file.""" f.seek(0) # Check for BOMs if self.CHECK_BOM: encoding = self.has_bom(f) f.seek(0) else: util.warn_deprecated( "'CHECK_BOM' attribute is deprecated. " "Please override 'has_bom` function to control or avoid BOM detection." ) # Check file extensions if encoding is None: encoding = self._utf_strip_bom(self.header_check(f.read(1024))) f.seek(0) if encoding is None: encoding = self._utf_strip_bom(self.content_check(f)) f.seek(0) return encoding
[ "def", "_analyze_file", "(", "self", ",", "f", ")", ":", "f", ".", "seek", "(", "0", ")", "# Check for BOMs", "if", "self", ".", "CHECK_BOM", ":", "encoding", "=", "self", ".", "has_bom", "(", "f", ")", "f", ".", "seek", "(", "0", ")", "else", ":...
Analyze the file.
[ "Analyze", "the", "file", "." ]
c25d5292cc2687ad65891a12ead43f7182ca8bb3
https://github.com/facelessuser/pyspelling/blob/c25d5292cc2687ad65891a12ead43f7182ca8bb3/pyspelling/filters/__init__.py#L146-L167
train
49,588
facelessuser/pyspelling
pyspelling/filters/__init__.py
Filter._detect_encoding
def _detect_encoding(self, source_file): """Detect encoding.""" encoding = self._guess(source_file) # If we didn't explicitly detect an encoding, assume default. if encoding is None: encoding = self.default_encoding return encoding
python
def _detect_encoding(self, source_file): """Detect encoding.""" encoding = self._guess(source_file) # If we didn't explicitly detect an encoding, assume default. if encoding is None: encoding = self.default_encoding return encoding
[ "def", "_detect_encoding", "(", "self", ",", "source_file", ")", ":", "encoding", "=", "self", ".", "_guess", "(", "source_file", ")", "# If we didn't explicitly detect an encoding, assume default.", "if", "encoding", "is", "None", ":", "encoding", "=", "self", ".",...
Detect encoding.
[ "Detect", "encoding", "." ]
c25d5292cc2687ad65891a12ead43f7182ca8bb3
https://github.com/facelessuser/pyspelling/blob/c25d5292cc2687ad65891a12ead43f7182ca8bb3/pyspelling/filters/__init__.py#L169-L177
train
49,589
facelessuser/pyspelling
pyspelling/filters/__init__.py
Filter._run_first
def _run_first(self, source_file): """Run on as first in chain.""" self.reset() self.current_encoding = self.default_encoding encoding = None try: encoding = self._detect_encoding(source_file) content = self.filter(source_file, encoding) except UnicodeDecodeError: if not encoding or encoding != self.default_encoding: content = self.filter(source_file, self.default_encoding) else: raise return content
python
def _run_first(self, source_file): """Run on as first in chain.""" self.reset() self.current_encoding = self.default_encoding encoding = None try: encoding = self._detect_encoding(source_file) content = self.filter(source_file, encoding) except UnicodeDecodeError: if not encoding or encoding != self.default_encoding: content = self.filter(source_file, self.default_encoding) else: raise return content
[ "def", "_run_first", "(", "self", ",", "source_file", ")", ":", "self", ".", "reset", "(", ")", "self", ".", "current_encoding", "=", "self", ".", "default_encoding", "encoding", "=", "None", "try", ":", "encoding", "=", "self", ".", "_detect_encoding", "(...
Run on as first in chain.
[ "Run", "on", "as", "first", "in", "chain", "." ]
c25d5292cc2687ad65891a12ead43f7182ca8bb3
https://github.com/facelessuser/pyspelling/blob/c25d5292cc2687ad65891a12ead43f7182ca8bb3/pyspelling/filters/__init__.py#L179-L193
train
49,590
facelessuser/pyspelling
pyspelling/filters/__init__.py
Filter._guess
def _guess(self, filename): """Guess the encoding and decode the content of the file.""" encoding = None file_size = os.path.getsize(filename) # If the file is really big, lets just call it binary. # We don't have time to let Python chug through a massive file. if not self._is_very_large(file_size): with open(filename, "rb") as f: if file_size == 0: encoding = 'ascii' else: encoding = self._detect_buffer_encoding(f) if encoding is None: raise UnicodeDecodeError('None', b'', 0, 0, 'Unicode cannot be detected.') if encoding != BINARY_ENCODE: encoding = self._verify_encoding(encoding) else: # pragma: no cover raise UnicodeDecodeError('None', b'', 0, 0, 'Unicode detection is not applied to very large files!') return encoding
python
def _guess(self, filename): """Guess the encoding and decode the content of the file.""" encoding = None file_size = os.path.getsize(filename) # If the file is really big, lets just call it binary. # We don't have time to let Python chug through a massive file. if not self._is_very_large(file_size): with open(filename, "rb") as f: if file_size == 0: encoding = 'ascii' else: encoding = self._detect_buffer_encoding(f) if encoding is None: raise UnicodeDecodeError('None', b'', 0, 0, 'Unicode cannot be detected.') if encoding != BINARY_ENCODE: encoding = self._verify_encoding(encoding) else: # pragma: no cover raise UnicodeDecodeError('None', b'', 0, 0, 'Unicode detection is not applied to very large files!') return encoding
[ "def", "_guess", "(", "self", ",", "filename", ")", ":", "encoding", "=", "None", "file_size", "=", "os", ".", "path", ".", "getsize", "(", "filename", ")", "# If the file is really big, lets just call it binary.", "# We don't have time to let Python chug through a massiv...
Guess the encoding and decode the content of the file.
[ "Guess", "the", "encoding", "and", "decode", "the", "content", "of", "the", "file", "." ]
c25d5292cc2687ad65891a12ead43f7182ca8bb3
https://github.com/facelessuser/pyspelling/blob/c25d5292cc2687ad65891a12ead43f7182ca8bb3/pyspelling/filters/__init__.py#L201-L222
train
49,591
JMSwag/dsdev-utils
dsdev_utils/terminal.py
ask_yes_no
def ask_yes_no(question, default='no', answer=None): u"""Will ask a question and keeps prompting until answered. Args: question (str): Question to ask end user default (str): Default answer if user just press enter at prompt answer (str): Used for testing Returns: (bool) Meaning: True - Answer is yes False - Answer is no """ default = default.lower() yes = [u'yes', u'ye', u'y'] no = [u'no', u'n'] if default in no: help_ = u'[N/y]?' default = False else: default = True help_ = u'[Y/n]?' while 1: display = question + '\n' + help_ if answer is None: log.debug(u'Under None') answer = six.moves.input(display) answer = answer.lower() if answer == u'': log.debug(u'Under blank') return default if answer in yes: log.debug(u'Must be true') return True elif answer in no: log.debug(u'Must be false') return False else: sys.stdout.write(u'Please answer yes or no only!\n\n') sys.stdout.flush() answer = None six.moves.input(u'Press enter to continue') sys.stdout.write('\n\n\n\n\n') sys.stdout.flush()
python
def ask_yes_no(question, default='no', answer=None): u"""Will ask a question and keeps prompting until answered. Args: question (str): Question to ask end user default (str): Default answer if user just press enter at prompt answer (str): Used for testing Returns: (bool) Meaning: True - Answer is yes False - Answer is no """ default = default.lower() yes = [u'yes', u'ye', u'y'] no = [u'no', u'n'] if default in no: help_ = u'[N/y]?' default = False else: default = True help_ = u'[Y/n]?' while 1: display = question + '\n' + help_ if answer is None: log.debug(u'Under None') answer = six.moves.input(display) answer = answer.lower() if answer == u'': log.debug(u'Under blank') return default if answer in yes: log.debug(u'Must be true') return True elif answer in no: log.debug(u'Must be false') return False else: sys.stdout.write(u'Please answer yes or no only!\n\n') sys.stdout.flush() answer = None six.moves.input(u'Press enter to continue') sys.stdout.write('\n\n\n\n\n') sys.stdout.flush()
[ "def", "ask_yes_no", "(", "question", ",", "default", "=", "'no'", ",", "answer", "=", "None", ")", ":", "default", "=", "default", ".", "lower", "(", ")", "yes", "=", "[", "u'yes'", ",", "u'ye'", ",", "u'y'", "]", "no", "=", "[", "u'no'", ",", "...
u"""Will ask a question and keeps prompting until answered. Args: question (str): Question to ask end user default (str): Default answer if user just press enter at prompt answer (str): Used for testing Returns: (bool) Meaning: True - Answer is yes False - Answer is no
[ "u", "Will", "ask", "a", "question", "and", "keeps", "prompting", "until", "answered", "." ]
5adbf9b3fd9fff92d1dd714423b08e26a5038e14
https://github.com/JMSwag/dsdev-utils/blob/5adbf9b3fd9fff92d1dd714423b08e26a5038e14/dsdev_utils/terminal.py#L192-L242
train
49,592
JMSwag/dsdev-utils
dsdev_utils/terminal.py
get_correct_answer
def get_correct_answer(question, default=None, required=False, answer=None, is_answer_correct=None): u"""Ask user a question and confirm answer Args: question (str): Question to ask user default (str): Default answer if no input from user required (str): Require user to input answer answer (str): Used for testing is_answer_correct (str): Used for testing """ while 1: if default is None: msg = u' - No Default Available' else: msg = (u'\n[DEFAULT] -> {}\nPress Enter To ' u'Use Default'.format(default)) prompt = question + msg + u'\n--> ' if answer is None: answer = six.moves.input(prompt) if answer == '' and required and default is not None: print(u'You have to enter a value\n\n') six.moves.input(u'Press enter to continue') print(u'\n\n') answer = None continue if answer == u'' and default is not None: answer = default _ans = ask_yes_no(u'You entered {}, is this ' u'correct?'.format(answer), answer=is_answer_correct) if _ans: return answer else: answer = None
python
def get_correct_answer(question, default=None, required=False, answer=None, is_answer_correct=None): u"""Ask user a question and confirm answer Args: question (str): Question to ask user default (str): Default answer if no input from user required (str): Require user to input answer answer (str): Used for testing is_answer_correct (str): Used for testing """ while 1: if default is None: msg = u' - No Default Available' else: msg = (u'\n[DEFAULT] -> {}\nPress Enter To ' u'Use Default'.format(default)) prompt = question + msg + u'\n--> ' if answer is None: answer = six.moves.input(prompt) if answer == '' and required and default is not None: print(u'You have to enter a value\n\n') six.moves.input(u'Press enter to continue') print(u'\n\n') answer = None continue if answer == u'' and default is not None: answer = default _ans = ask_yes_no(u'You entered {}, is this ' u'correct?'.format(answer), answer=is_answer_correct) if _ans: return answer else: answer = None
[ "def", "get_correct_answer", "(", "question", ",", "default", "=", "None", ",", "required", "=", "False", ",", "answer", "=", "None", ",", "is_answer_correct", "=", "None", ")", ":", "while", "1", ":", "if", "default", "is", "None", ":", "msg", "=", "u...
u"""Ask user a question and confirm answer Args: question (str): Question to ask user default (str): Default answer if no input from user required (str): Require user to input answer answer (str): Used for testing is_answer_correct (str): Used for testing
[ "u", "Ask", "user", "a", "question", "and", "confirm", "answer" ]
5adbf9b3fd9fff92d1dd714423b08e26a5038e14
https://github.com/JMSwag/dsdev-utils/blob/5adbf9b3fd9fff92d1dd714423b08e26a5038e14/dsdev_utils/terminal.py#L245-L284
train
49,593
facelessuser/pyspelling
pyspelling/util/__init__.py
get_process
def get_process(cmd): """Get a command process.""" if sys.platform.startswith('win'): startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW process = subprocess.Popen( cmd, startupinfo=startupinfo, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, stdin=subprocess.PIPE, shell=False ) else: process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, stdin=subprocess.PIPE, shell=False ) return process
python
def get_process(cmd): """Get a command process.""" if sys.platform.startswith('win'): startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW process = subprocess.Popen( cmd, startupinfo=startupinfo, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, stdin=subprocess.PIPE, shell=False ) else: process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, stdin=subprocess.PIPE, shell=False ) return process
[ "def", "get_process", "(", "cmd", ")", ":", "if", "sys", ".", "platform", ".", "startswith", "(", "'win'", ")", ":", "startupinfo", "=", "subprocess", ".", "STARTUPINFO", "(", ")", "startupinfo", ".", "dwFlags", "|=", "subprocess", ".", "STARTF_USESHOWWINDOW...
Get a command process.
[ "Get", "a", "command", "process", "." ]
c25d5292cc2687ad65891a12ead43f7182ca8bb3
https://github.com/facelessuser/pyspelling/blob/c25d5292cc2687ad65891a12ead43f7182ca8bb3/pyspelling/util/__init__.py#L52-L74
train
49,594
facelessuser/pyspelling
pyspelling/util/__init__.py
get_process_output
def get_process_output(process, encoding=None): """Get the output from the process.""" output = process.communicate() returncode = process.returncode if not encoding: try: encoding = sys.stdout.encoding except Exception: encoding = locale.getpreferredencoding() if returncode != 0: raise RuntimeError("Runtime Error: %s" % (output[0].rstrip().decode(encoding, errors='replace'))) return output[0].decode(encoding, errors='replace')
python
def get_process_output(process, encoding=None): """Get the output from the process.""" output = process.communicate() returncode = process.returncode if not encoding: try: encoding = sys.stdout.encoding except Exception: encoding = locale.getpreferredencoding() if returncode != 0: raise RuntimeError("Runtime Error: %s" % (output[0].rstrip().decode(encoding, errors='replace'))) return output[0].decode(encoding, errors='replace')
[ "def", "get_process_output", "(", "process", ",", "encoding", "=", "None", ")", ":", "output", "=", "process", ".", "communicate", "(", ")", "returncode", "=", "process", ".", "returncode", "if", "not", "encoding", ":", "try", ":", "encoding", "=", "sys", ...
Get the output from the process.
[ "Get", "the", "output", "from", "the", "process", "." ]
c25d5292cc2687ad65891a12ead43f7182ca8bb3
https://github.com/facelessuser/pyspelling/blob/c25d5292cc2687ad65891a12ead43f7182ca8bb3/pyspelling/util/__init__.py#L77-L92
train
49,595
facelessuser/pyspelling
pyspelling/util/__init__.py
call
def call(cmd, input_file=None, input_text=None, encoding=None): """Call with arguments.""" process = get_process(cmd) if input_file is not None: with open(input_file, 'rb') as f: process.stdin.write(f.read()) if input_text is not None: process.stdin.write(input_text) return get_process_output(process, encoding)
python
def call(cmd, input_file=None, input_text=None, encoding=None): """Call with arguments.""" process = get_process(cmd) if input_file is not None: with open(input_file, 'rb') as f: process.stdin.write(f.read()) if input_text is not None: process.stdin.write(input_text) return get_process_output(process, encoding)
[ "def", "call", "(", "cmd", ",", "input_file", "=", "None", ",", "input_text", "=", "None", ",", "encoding", "=", "None", ")", ":", "process", "=", "get_process", "(", "cmd", ")", "if", "input_file", "is", "not", "None", ":", "with", "open", "(", "inp...
Call with arguments.
[ "Call", "with", "arguments", "." ]
c25d5292cc2687ad65891a12ead43f7182ca8bb3
https://github.com/facelessuser/pyspelling/blob/c25d5292cc2687ad65891a12ead43f7182ca8bb3/pyspelling/util/__init__.py#L95-L106
train
49,596
facelessuser/pyspelling
pyspelling/util/__init__.py
call_spellchecker
def call_spellchecker(cmd, input_text=None, encoding=None): """Call spell checker with arguments.""" process = get_process(cmd) # A buffer has been provided if input_text is not None: for line in input_text.splitlines(): # Hunspell truncates lines at `0x1fff` (at least on Windows this has been observed) # Avoid truncation by chunking the line on white space and inserting a new line to break it. offset = 0 end = len(line) while True: chunk_end = offset + 0x1fff m = None if chunk_end >= end else RE_LAST_SPACE_IN_CHUNK.search(line, offset, chunk_end) if m: chunk_end = m.start(1) chunk = line[offset:m.start(1)] offset = m.end(1) else: chunk = line[offset:chunk_end] offset = chunk_end # Avoid wasted calls to empty strings if chunk and not chunk.isspace(): process.stdin.write(chunk + b'\n') if offset >= end: break return get_process_output(process, encoding)
python
def call_spellchecker(cmd, input_text=None, encoding=None): """Call spell checker with arguments.""" process = get_process(cmd) # A buffer has been provided if input_text is not None: for line in input_text.splitlines(): # Hunspell truncates lines at `0x1fff` (at least on Windows this has been observed) # Avoid truncation by chunking the line on white space and inserting a new line to break it. offset = 0 end = len(line) while True: chunk_end = offset + 0x1fff m = None if chunk_end >= end else RE_LAST_SPACE_IN_CHUNK.search(line, offset, chunk_end) if m: chunk_end = m.start(1) chunk = line[offset:m.start(1)] offset = m.end(1) else: chunk = line[offset:chunk_end] offset = chunk_end # Avoid wasted calls to empty strings if chunk and not chunk.isspace(): process.stdin.write(chunk + b'\n') if offset >= end: break return get_process_output(process, encoding)
[ "def", "call_spellchecker", "(", "cmd", ",", "input_text", "=", "None", ",", "encoding", "=", "None", ")", ":", "process", "=", "get_process", "(", "cmd", ")", "# A buffer has been provided", "if", "input_text", "is", "not", "None", ":", "for", "line", "in",...
Call spell checker with arguments.
[ "Call", "spell", "checker", "with", "arguments", "." ]
c25d5292cc2687ad65891a12ead43f7182ca8bb3
https://github.com/facelessuser/pyspelling/blob/c25d5292cc2687ad65891a12ead43f7182ca8bb3/pyspelling/util/__init__.py#L109-L137
train
49,597
facelessuser/pyspelling
pyspelling/util/__init__.py
random_name_gen
def random_name_gen(size=6): """Generate a random python attribute name.""" return ''.join( [random.choice(string.ascii_uppercase)] + [random.choice(string.ascii_uppercase + string.digits) for i in range(size - 1)] ) if size > 0 else ''
python
def random_name_gen(size=6): """Generate a random python attribute name.""" return ''.join( [random.choice(string.ascii_uppercase)] + [random.choice(string.ascii_uppercase + string.digits) for i in range(size - 1)] ) if size > 0 else ''
[ "def", "random_name_gen", "(", "size", "=", "6", ")", ":", "return", "''", ".", "join", "(", "[", "random", ".", "choice", "(", "string", ".", "ascii_uppercase", ")", "]", "+", "[", "random", ".", "choice", "(", "string", ".", "ascii_uppercase", "+", ...
Generate a random python attribute name.
[ "Generate", "a", "random", "python", "attribute", "name", "." ]
c25d5292cc2687ad65891a12ead43f7182ca8bb3
https://github.com/facelessuser/pyspelling/blob/c25d5292cc2687ad65891a12ead43f7182ca8bb3/pyspelling/util/__init__.py#L140-L146
train
49,598
facelessuser/pyspelling
pyspelling/util/__init__.py
yaml_load
def yaml_load(source, loader=yaml.Loader): """ Wrap PyYaml's loader so we can extend it to suit our needs. Load all strings as Unicode: http://stackoverflow.com/a/2967461/3609487. """ def construct_yaml_str(self, node): """Override the default string handling function to always return Unicode objects.""" return self.construct_scalar(node) class Loader(loader): """Define a custom loader to leave the global loader unaltered.""" # Attach our Unicode constructor to our custom loader ensuring all strings # will be Unicode on translation. Loader.add_constructor('tag:yaml.org,2002:str', construct_yaml_str) return yaml.load(source, Loader)
python
def yaml_load(source, loader=yaml.Loader): """ Wrap PyYaml's loader so we can extend it to suit our needs. Load all strings as Unicode: http://stackoverflow.com/a/2967461/3609487. """ def construct_yaml_str(self, node): """Override the default string handling function to always return Unicode objects.""" return self.construct_scalar(node) class Loader(loader): """Define a custom loader to leave the global loader unaltered.""" # Attach our Unicode constructor to our custom loader ensuring all strings # will be Unicode on translation. Loader.add_constructor('tag:yaml.org,2002:str', construct_yaml_str) return yaml.load(source, Loader)
[ "def", "yaml_load", "(", "source", ",", "loader", "=", "yaml", ".", "Loader", ")", ":", "def", "construct_yaml_str", "(", "self", ",", "node", ")", ":", "\"\"\"Override the default string handling function to always return Unicode objects.\"\"\"", "return", "self", ".",...
Wrap PyYaml's loader so we can extend it to suit our needs. Load all strings as Unicode: http://stackoverflow.com/a/2967461/3609487.
[ "Wrap", "PyYaml", "s", "loader", "so", "we", "can", "extend", "it", "to", "suit", "our", "needs", "." ]
c25d5292cc2687ad65891a12ead43f7182ca8bb3
https://github.com/facelessuser/pyspelling/blob/c25d5292cc2687ad65891a12ead43f7182ca8bb3/pyspelling/util/__init__.py#L149-L167
train
49,599