code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def draw_weights(W=None, second=10, saveable=True, shape=None, name='mnist', fig_idx=2396512): """Visualize every columns of the weight matrix to a group of Greyscale img. Parameters ---------- W : numpy.array The weight matrix second : int The display second(s) for the image(s), if saveable is False. saveable : boolean Save or plot the figure. shape : a list with 2 int or None The shape of feature image, MNIST is [28, 80]. name : a string A name to save the image, if saveable is True. fig_idx : int matplotlib figure index. Examples -------- >>> tl.visualize.draw_weights(network.all_params[0].eval(), second=10, saveable=True, name='weight_of_1st_layer', fig_idx=2012) """ if shape is None: shape = [28, 28] import matplotlib.pyplot as plt if saveable is False: plt.ion() fig = plt.figure(fig_idx) # show all feature images n_units = W.shape[1] num_r = int(np.sqrt(n_units)) # 每行显示的个数 若25个hidden unit -> 每行显示5个 num_c = int(np.ceil(n_units / num_r)) count = int(1) for _row in range(1, num_r + 1): for _col in range(1, num_c + 1): if count > n_units: break fig.add_subplot(num_r, num_c, count) # ------------------------------------------------------------ # plt.imshow(np.reshape(W[:,count-1],(28,28)), cmap='gray') # ------------------------------------------------------------ feature = W[:, count - 1] / np.sqrt((W[:, count - 1]**2).sum()) # feature[feature<0.0001] = 0 # value threshold # if count == 1 or count == 2: # print(np.mean(feature)) # if np.std(feature) < 0.03: # condition threshold # feature = np.zeros_like(feature) # if np.mean(feature) < -0.015: # condition threshold # feature = np.zeros_like(feature) plt.imshow( np.reshape(feature, (shape[0], shape[1])), cmap='gray', interpolation="nearest" ) # , vmin=np.min(feature), vmax=np.max(feature)) # plt.title(name) # ------------------------------------------------------------ # plt.imshow(np.reshape(W[:,count-1] ,(np.sqrt(size),np.sqrt(size))), cmap='gray', interpolation="nearest") plt.gca().xaxis.set_major_locator(plt.NullLocator()) # distable tick plt.gca().yaxis.set_major_locator(plt.NullLocator()) count = count + 1 if saveable: plt.savefig(name + '.pdf', format='pdf') else: plt.draw() plt.pause(second)
def function[draw_weights, parameter[W, second, saveable, shape, name, fig_idx]]: constant[Visualize every columns of the weight matrix to a group of Greyscale img. Parameters ---------- W : numpy.array The weight matrix second : int The display second(s) for the image(s), if saveable is False. saveable : boolean Save or plot the figure. shape : a list with 2 int or None The shape of feature image, MNIST is [28, 80]. name : a string A name to save the image, if saveable is True. fig_idx : int matplotlib figure index. Examples -------- >>> tl.visualize.draw_weights(network.all_params[0].eval(), second=10, saveable=True, name='weight_of_1st_layer', fig_idx=2012) ] if compare[name[shape] is constant[None]] begin[:] variable[shape] assign[=] list[[<ast.Constant object at 0x7da204564460>, <ast.Constant object at 0x7da204565bd0>]] import module[matplotlib.pyplot] as alias[plt] if compare[name[saveable] is constant[False]] begin[:] call[name[plt].ion, parameter[]] variable[fig] assign[=] call[name[plt].figure, parameter[name[fig_idx]]] variable[n_units] assign[=] call[name[W].shape][constant[1]] variable[num_r] assign[=] call[name[int], parameter[call[name[np].sqrt, parameter[name[n_units]]]]] variable[num_c] assign[=] call[name[int], parameter[call[name[np].ceil, parameter[binary_operation[name[n_units] / name[num_r]]]]]] variable[count] assign[=] call[name[int], parameter[constant[1]]] for taget[name[_row]] in starred[call[name[range], parameter[constant[1], binary_operation[name[num_r] + constant[1]]]]] begin[:] for taget[name[_col]] in starred[call[name[range], parameter[constant[1], binary_operation[name[num_c] + constant[1]]]]] begin[:] if compare[name[count] greater[>] name[n_units]] begin[:] break call[name[fig].add_subplot, parameter[name[num_r], name[num_c], name[count]]] variable[feature] assign[=] binary_operation[call[name[W]][tuple[[<ast.Slice object at 0x7da20c6c4160>, <ast.BinOp object at 0x7da20c6c74c0>]]] / call[name[np].sqrt, parameter[call[binary_operation[call[name[W]][tuple[[<ast.Slice object at 0x7da20c6c5b40>, <ast.BinOp object at 0x7da20c6c7340>]]] ** constant[2]].sum, parameter[]]]]] call[name[plt].imshow, parameter[call[name[np].reshape, parameter[name[feature], tuple[[<ast.Subscript object at 0x7da20c6c5ae0>, <ast.Subscript object at 0x7da20c6c4ac0>]]]]]] call[call[name[plt].gca, parameter[]].xaxis.set_major_locator, parameter[call[name[plt].NullLocator, parameter[]]]] call[call[name[plt].gca, parameter[]].yaxis.set_major_locator, parameter[call[name[plt].NullLocator, parameter[]]]] variable[count] assign[=] binary_operation[name[count] + constant[1]] if name[saveable] begin[:] call[name[plt].savefig, parameter[binary_operation[name[name] + constant[.pdf]]]]
keyword[def] identifier[draw_weights] ( identifier[W] = keyword[None] , identifier[second] = literal[int] , identifier[saveable] = keyword[True] , identifier[shape] = keyword[None] , identifier[name] = literal[string] , identifier[fig_idx] = literal[int] ): literal[string] keyword[if] identifier[shape] keyword[is] keyword[None] : identifier[shape] =[ literal[int] , literal[int] ] keyword[import] identifier[matplotlib] . identifier[pyplot] keyword[as] identifier[plt] keyword[if] identifier[saveable] keyword[is] keyword[False] : identifier[plt] . identifier[ion] () identifier[fig] = identifier[plt] . identifier[figure] ( identifier[fig_idx] ) identifier[n_units] = identifier[W] . identifier[shape] [ literal[int] ] identifier[num_r] = identifier[int] ( identifier[np] . identifier[sqrt] ( identifier[n_units] )) identifier[num_c] = identifier[int] ( identifier[np] . identifier[ceil] ( identifier[n_units] / identifier[num_r] )) identifier[count] = identifier[int] ( literal[int] ) keyword[for] identifier[_row] keyword[in] identifier[range] ( literal[int] , identifier[num_r] + literal[int] ): keyword[for] identifier[_col] keyword[in] identifier[range] ( literal[int] , identifier[num_c] + literal[int] ): keyword[if] identifier[count] > identifier[n_units] : keyword[break] identifier[fig] . identifier[add_subplot] ( identifier[num_r] , identifier[num_c] , identifier[count] ) identifier[feature] = identifier[W] [:, identifier[count] - literal[int] ]/ identifier[np] . identifier[sqrt] (( identifier[W] [:, identifier[count] - literal[int] ]** literal[int] ). identifier[sum] ()) identifier[plt] . identifier[imshow] ( identifier[np] . identifier[reshape] ( identifier[feature] ,( identifier[shape] [ literal[int] ], identifier[shape] [ literal[int] ])), identifier[cmap] = literal[string] , identifier[interpolation] = literal[string] ) identifier[plt] . identifier[gca] (). identifier[xaxis] . identifier[set_major_locator] ( identifier[plt] . identifier[NullLocator] ()) identifier[plt] . identifier[gca] (). identifier[yaxis] . identifier[set_major_locator] ( identifier[plt] . identifier[NullLocator] ()) identifier[count] = identifier[count] + literal[int] keyword[if] identifier[saveable] : identifier[plt] . identifier[savefig] ( identifier[name] + literal[string] , identifier[format] = literal[string] ) keyword[else] : identifier[plt] . identifier[draw] () identifier[plt] . identifier[pause] ( identifier[second] )
def draw_weights(W=None, second=10, saveable=True, shape=None, name='mnist', fig_idx=2396512): """Visualize every columns of the weight matrix to a group of Greyscale img. Parameters ---------- W : numpy.array The weight matrix second : int The display second(s) for the image(s), if saveable is False. saveable : boolean Save or plot the figure. shape : a list with 2 int or None The shape of feature image, MNIST is [28, 80]. name : a string A name to save the image, if saveable is True. fig_idx : int matplotlib figure index. Examples -------- >>> tl.visualize.draw_weights(network.all_params[0].eval(), second=10, saveable=True, name='weight_of_1st_layer', fig_idx=2012) """ if shape is None: shape = [28, 28] # depends on [control=['if'], data=['shape']] import matplotlib.pyplot as plt if saveable is False: plt.ion() # depends on [control=['if'], data=[]] fig = plt.figure(fig_idx) # show all feature images n_units = W.shape[1] num_r = int(np.sqrt(n_units)) # 每行显示的个数 若25个hidden unit -> 每行显示5个 num_c = int(np.ceil(n_units / num_r)) count = int(1) for _row in range(1, num_r + 1): for _col in range(1, num_c + 1): if count > n_units: break # depends on [control=['if'], data=[]] fig.add_subplot(num_r, num_c, count) # ------------------------------------------------------------ # plt.imshow(np.reshape(W[:,count-1],(28,28)), cmap='gray') # ------------------------------------------------------------ feature = W[:, count - 1] / np.sqrt((W[:, count - 1] ** 2).sum()) # feature[feature<0.0001] = 0 # value threshold # if count == 1 or count == 2: # print(np.mean(feature)) # if np.std(feature) < 0.03: # condition threshold # feature = np.zeros_like(feature) # if np.mean(feature) < -0.015: # condition threshold # feature = np.zeros_like(feature) plt.imshow(np.reshape(feature, (shape[0], shape[1])), cmap='gray', interpolation='nearest') # , vmin=np.min(feature), vmax=np.max(feature)) # plt.title(name) # ------------------------------------------------------------ # plt.imshow(np.reshape(W[:,count-1] ,(np.sqrt(size),np.sqrt(size))), cmap='gray', interpolation="nearest") plt.gca().xaxis.set_major_locator(plt.NullLocator()) # distable tick plt.gca().yaxis.set_major_locator(plt.NullLocator()) count = count + 1 # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] if saveable: plt.savefig(name + '.pdf', format='pdf') # depends on [control=['if'], data=[]] else: plt.draw() plt.pause(second)
def get_option_value(elem): """ Get the value attribute, or if it doesn't exist the text content. <option value="foo">bar</option> => "foo" <option>bar</option> => "bar" :param elem: a soup element """ value = elem.get("value") if value is None: value = elem.text.strip() if value is None or value == "": msg = u"Error parsing value from {}.".format(elem) raise ValueError(msg) return value
def function[get_option_value, parameter[elem]]: constant[ Get the value attribute, or if it doesn't exist the text content. <option value="foo">bar</option> => "foo" <option>bar</option> => "bar" :param elem: a soup element ] variable[value] assign[=] call[name[elem].get, parameter[constant[value]]] if compare[name[value] is constant[None]] begin[:] variable[value] assign[=] call[name[elem].text.strip, parameter[]] if <ast.BoolOp object at 0x7da1b2886980> begin[:] variable[msg] assign[=] call[constant[Error parsing value from {}.].format, parameter[name[elem]]] <ast.Raise object at 0x7da1b28844f0> return[name[value]]
keyword[def] identifier[get_option_value] ( identifier[elem] ): literal[string] identifier[value] = identifier[elem] . identifier[get] ( literal[string] ) keyword[if] identifier[value] keyword[is] keyword[None] : identifier[value] = identifier[elem] . identifier[text] . identifier[strip] () keyword[if] identifier[value] keyword[is] keyword[None] keyword[or] identifier[value] == literal[string] : identifier[msg] = literal[string] . identifier[format] ( identifier[elem] ) keyword[raise] identifier[ValueError] ( identifier[msg] ) keyword[return] identifier[value]
def get_option_value(elem): """ Get the value attribute, or if it doesn't exist the text content. <option value="foo">bar</option> => "foo" <option>bar</option> => "bar" :param elem: a soup element """ value = elem.get('value') if value is None: value = elem.text.strip() # depends on [control=['if'], data=['value']] if value is None or value == '': msg = u'Error parsing value from {}.'.format(elem) raise ValueError(msg) # depends on [control=['if'], data=[]] return value
def unquoted(cls, value, literal=False): """Helper to create a string with no quotes.""" return cls(value, quotes=None, literal=literal)
def function[unquoted, parameter[cls, value, literal]]: constant[Helper to create a string with no quotes.] return[call[name[cls], parameter[name[value]]]]
keyword[def] identifier[unquoted] ( identifier[cls] , identifier[value] , identifier[literal] = keyword[False] ): literal[string] keyword[return] identifier[cls] ( identifier[value] , identifier[quotes] = keyword[None] , identifier[literal] = identifier[literal] )
def unquoted(cls, value, literal=False): """Helper to create a string with no quotes.""" return cls(value, quotes=None, literal=literal)
def background_model(self, ignore_black=True, use_hsv=False, scale=8): """Creates a background model for the given image. The background color is given by the modes of each channel's histogram. Parameters ---------- ignore_black : bool If True, the zero pixels will be ignored when computing the background model. use_hsv : bool If True, image will be converted to HSV for background model generation. scale : int Size of background histogram bins -- there will be BINARY_IM_MAX_VAL/size bins in the color histogram for each channel. Returns ------- A list containing the red, green, and blue channel modes of the background. """ # hsv color data = self.data if use_hsv: pil_im = PImage.fromarray(self._data) pil_im = pil_im.convert('HSV') data = np.asarray(pil_im) # find the black pixels nonblack_pixels = np.where(np.sum(self.data, axis=2) > 0) r_data = self.r_data g_data = self.g_data b_data = self.b_data if ignore_black: r_data = r_data[nonblack_pixels[0], nonblack_pixels[1]] g_data = g_data[nonblack_pixels[0], nonblack_pixels[1]] b_data = b_data[nonblack_pixels[0], nonblack_pixels[1]] # generate histograms for each channel bounds = (0, np.iinfo(np.uint8).max + 1) num_bins = bounds[1] / scale r_hist, _ = np.histogram(r_data, bins=num_bins, range=bounds) g_hist, _ = np.histogram(g_data, bins=num_bins, range=bounds) b_hist, _ = np.histogram(b_data, bins=num_bins, range=bounds) hists = (r_hist, g_hist, b_hist) # find the thesholds as the modes of the image modes = [0 for i in range(self.channels)] for i in range(self.channels): modes[i] = scale * np.argmax(hists[i]) return modes
def function[background_model, parameter[self, ignore_black, use_hsv, scale]]: constant[Creates a background model for the given image. The background color is given by the modes of each channel's histogram. Parameters ---------- ignore_black : bool If True, the zero pixels will be ignored when computing the background model. use_hsv : bool If True, image will be converted to HSV for background model generation. scale : int Size of background histogram bins -- there will be BINARY_IM_MAX_VAL/size bins in the color histogram for each channel. Returns ------- A list containing the red, green, and blue channel modes of the background. ] variable[data] assign[=] name[self].data if name[use_hsv] begin[:] variable[pil_im] assign[=] call[name[PImage].fromarray, parameter[name[self]._data]] variable[pil_im] assign[=] call[name[pil_im].convert, parameter[constant[HSV]]] variable[data] assign[=] call[name[np].asarray, parameter[name[pil_im]]] variable[nonblack_pixels] assign[=] call[name[np].where, parameter[compare[call[name[np].sum, parameter[name[self].data]] greater[>] constant[0]]]] variable[r_data] assign[=] name[self].r_data variable[g_data] assign[=] name[self].g_data variable[b_data] assign[=] name[self].b_data if name[ignore_black] begin[:] variable[r_data] assign[=] call[name[r_data]][tuple[[<ast.Subscript object at 0x7da20c6e7f40>, <ast.Subscript object at 0x7da20c6e7d30>]]] variable[g_data] assign[=] call[name[g_data]][tuple[[<ast.Subscript object at 0x7da20c6e6200>, <ast.Subscript object at 0x7da20c6e54e0>]]] variable[b_data] assign[=] call[name[b_data]][tuple[[<ast.Subscript object at 0x7da20c6c6560>, <ast.Subscript object at 0x7da20c6c70a0>]]] variable[bounds] assign[=] tuple[[<ast.Constant object at 0x7da20c6c41f0>, <ast.BinOp object at 0x7da20c6c7340>]] variable[num_bins] assign[=] binary_operation[call[name[bounds]][constant[1]] / name[scale]] <ast.Tuple object at 0x7da20c6c4ca0> assign[=] call[name[np].histogram, parameter[name[r_data]]] <ast.Tuple object at 0x7da20c6c6710> assign[=] call[name[np].histogram, parameter[name[g_data]]] <ast.Tuple object at 0x7da20c6c5b10> assign[=] call[name[np].histogram, parameter[name[b_data]]] variable[hists] assign[=] tuple[[<ast.Name object at 0x7da20e954dc0>, <ast.Name object at 0x7da20e955750>, <ast.Name object at 0x7da20e9566b0>]] variable[modes] assign[=] <ast.ListComp object at 0x7da20e957580> for taget[name[i]] in starred[call[name[range], parameter[name[self].channels]]] begin[:] call[name[modes]][name[i]] assign[=] binary_operation[name[scale] * call[name[np].argmax, parameter[call[name[hists]][name[i]]]]] return[name[modes]]
keyword[def] identifier[background_model] ( identifier[self] , identifier[ignore_black] = keyword[True] , identifier[use_hsv] = keyword[False] , identifier[scale] = literal[int] ): literal[string] identifier[data] = identifier[self] . identifier[data] keyword[if] identifier[use_hsv] : identifier[pil_im] = identifier[PImage] . identifier[fromarray] ( identifier[self] . identifier[_data] ) identifier[pil_im] = identifier[pil_im] . identifier[convert] ( literal[string] ) identifier[data] = identifier[np] . identifier[asarray] ( identifier[pil_im] ) identifier[nonblack_pixels] = identifier[np] . identifier[where] ( identifier[np] . identifier[sum] ( identifier[self] . identifier[data] , identifier[axis] = literal[int] )> literal[int] ) identifier[r_data] = identifier[self] . identifier[r_data] identifier[g_data] = identifier[self] . identifier[g_data] identifier[b_data] = identifier[self] . identifier[b_data] keyword[if] identifier[ignore_black] : identifier[r_data] = identifier[r_data] [ identifier[nonblack_pixels] [ literal[int] ], identifier[nonblack_pixels] [ literal[int] ]] identifier[g_data] = identifier[g_data] [ identifier[nonblack_pixels] [ literal[int] ], identifier[nonblack_pixels] [ literal[int] ]] identifier[b_data] = identifier[b_data] [ identifier[nonblack_pixels] [ literal[int] ], identifier[nonblack_pixels] [ literal[int] ]] identifier[bounds] =( literal[int] , identifier[np] . identifier[iinfo] ( identifier[np] . identifier[uint8] ). identifier[max] + literal[int] ) identifier[num_bins] = identifier[bounds] [ literal[int] ]/ identifier[scale] identifier[r_hist] , identifier[_] = identifier[np] . identifier[histogram] ( identifier[r_data] , identifier[bins] = identifier[num_bins] , identifier[range] = identifier[bounds] ) identifier[g_hist] , identifier[_] = identifier[np] . identifier[histogram] ( identifier[g_data] , identifier[bins] = identifier[num_bins] , identifier[range] = identifier[bounds] ) identifier[b_hist] , identifier[_] = identifier[np] . identifier[histogram] ( identifier[b_data] , identifier[bins] = identifier[num_bins] , identifier[range] = identifier[bounds] ) identifier[hists] =( identifier[r_hist] , identifier[g_hist] , identifier[b_hist] ) identifier[modes] =[ literal[int] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[channels] )] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[channels] ): identifier[modes] [ identifier[i] ]= identifier[scale] * identifier[np] . identifier[argmax] ( identifier[hists] [ identifier[i] ]) keyword[return] identifier[modes]
def background_model(self, ignore_black=True, use_hsv=False, scale=8): """Creates a background model for the given image. The background color is given by the modes of each channel's histogram. Parameters ---------- ignore_black : bool If True, the zero pixels will be ignored when computing the background model. use_hsv : bool If True, image will be converted to HSV for background model generation. scale : int Size of background histogram bins -- there will be BINARY_IM_MAX_VAL/size bins in the color histogram for each channel. Returns ------- A list containing the red, green, and blue channel modes of the background. """ # hsv color data = self.data if use_hsv: pil_im = PImage.fromarray(self._data) pil_im = pil_im.convert('HSV') data = np.asarray(pil_im) # depends on [control=['if'], data=[]] # find the black pixels nonblack_pixels = np.where(np.sum(self.data, axis=2) > 0) r_data = self.r_data g_data = self.g_data b_data = self.b_data if ignore_black: r_data = r_data[nonblack_pixels[0], nonblack_pixels[1]] g_data = g_data[nonblack_pixels[0], nonblack_pixels[1]] b_data = b_data[nonblack_pixels[0], nonblack_pixels[1]] # depends on [control=['if'], data=[]] # generate histograms for each channel bounds = (0, np.iinfo(np.uint8).max + 1) num_bins = bounds[1] / scale (r_hist, _) = np.histogram(r_data, bins=num_bins, range=bounds) (g_hist, _) = np.histogram(g_data, bins=num_bins, range=bounds) (b_hist, _) = np.histogram(b_data, bins=num_bins, range=bounds) hists = (r_hist, g_hist, b_hist) # find the thesholds as the modes of the image modes = [0 for i in range(self.channels)] for i in range(self.channels): modes[i] = scale * np.argmax(hists[i]) # depends on [control=['for'], data=['i']] return modes
def print_stdout(self, always_print=False): """ Prints the stdout to console - if there is any stdout, otherwise does nothing. :param always_print: print the stdout, even if there is nothing in the buffer (default: false) """ if self.__stdout or always_print: self.__echo.info("--{ STDOUT }---" + "-" * 100) self.__format_lines_info(self.stdout) self.__echo.info("---------------" + "-" * 100)
def function[print_stdout, parameter[self, always_print]]: constant[ Prints the stdout to console - if there is any stdout, otherwise does nothing. :param always_print: print the stdout, even if there is nothing in the buffer (default: false) ] if <ast.BoolOp object at 0x7da1b0016c80> begin[:] call[name[self].__echo.info, parameter[binary_operation[constant[--{ STDOUT }---] + binary_operation[constant[-] * constant[100]]]]] call[name[self].__format_lines_info, parameter[name[self].stdout]] call[name[self].__echo.info, parameter[binary_operation[constant[---------------] + binary_operation[constant[-] * constant[100]]]]]
keyword[def] identifier[print_stdout] ( identifier[self] , identifier[always_print] = keyword[False] ): literal[string] keyword[if] identifier[self] . identifier[__stdout] keyword[or] identifier[always_print] : identifier[self] . identifier[__echo] . identifier[info] ( literal[string] + literal[string] * literal[int] ) identifier[self] . identifier[__format_lines_info] ( identifier[self] . identifier[stdout] ) identifier[self] . identifier[__echo] . identifier[info] ( literal[string] + literal[string] * literal[int] )
def print_stdout(self, always_print=False): """ Prints the stdout to console - if there is any stdout, otherwise does nothing. :param always_print: print the stdout, even if there is nothing in the buffer (default: false) """ if self.__stdout or always_print: self.__echo.info('--{ STDOUT }---' + '-' * 100) self.__format_lines_info(self.stdout) self.__echo.info('---------------' + '-' * 100) # depends on [control=['if'], data=[]]
def load(self, typedef, value, **kwargs): """ Return the result of the bound load method for a typedef Looks up the load function that was bound to the engine for a typedef, and return the result of passing the given `value` and any `context` to that function. Parameters ---------- typedef : :class:`~TypeDefinition` The typedef whose bound load method should be used value : object The value to be passed into the bound load method **kwargs : kwargs Context for the value being loaded Returns ------- loaded_value : object The return value of the load function for the input value Raises ------ exc : :class:`KeyError` If the input typedef is not bound to this engine Example ------- .. code-block:: python class Account(TypeDefinition): prefix = "::account" def load(self, value, **context): return value + Account.prefix def dump(self, value, **context): return value[:-len(Account.prefix)] typedef = Account() engine = TypeEngine("accounts") engine.register(typedef) engine.bind() assert engine.dump(typedef, "Jill::account") == "Jill" """ try: bound_type = self.bound_types[typedef] except KeyError: raise DeclareException( "Can't load unknown type {}".format(typedef)) else: # Don't need to try/catch since load/dump are bound together return bound_type["load"](value, **kwargs)
def function[load, parameter[self, typedef, value]]: constant[ Return the result of the bound load method for a typedef Looks up the load function that was bound to the engine for a typedef, and return the result of passing the given `value` and any `context` to that function. Parameters ---------- typedef : :class:`~TypeDefinition` The typedef whose bound load method should be used value : object The value to be passed into the bound load method **kwargs : kwargs Context for the value being loaded Returns ------- loaded_value : object The return value of the load function for the input value Raises ------ exc : :class:`KeyError` If the input typedef is not bound to this engine Example ------- .. code-block:: python class Account(TypeDefinition): prefix = "::account" def load(self, value, **context): return value + Account.prefix def dump(self, value, **context): return value[:-len(Account.prefix)] typedef = Account() engine = TypeEngine("accounts") engine.register(typedef) engine.bind() assert engine.dump(typedef, "Jill::account") == "Jill" ] <ast.Try object at 0x7da20c6e7850>
keyword[def] identifier[load] ( identifier[self] , identifier[typedef] , identifier[value] ,** identifier[kwargs] ): literal[string] keyword[try] : identifier[bound_type] = identifier[self] . identifier[bound_types] [ identifier[typedef] ] keyword[except] identifier[KeyError] : keyword[raise] identifier[DeclareException] ( literal[string] . identifier[format] ( identifier[typedef] )) keyword[else] : keyword[return] identifier[bound_type] [ literal[string] ]( identifier[value] ,** identifier[kwargs] )
def load(self, typedef, value, **kwargs): """ Return the result of the bound load method for a typedef Looks up the load function that was bound to the engine for a typedef, and return the result of passing the given `value` and any `context` to that function. Parameters ---------- typedef : :class:`~TypeDefinition` The typedef whose bound load method should be used value : object The value to be passed into the bound load method **kwargs : kwargs Context for the value being loaded Returns ------- loaded_value : object The return value of the load function for the input value Raises ------ exc : :class:`KeyError` If the input typedef is not bound to this engine Example ------- .. code-block:: python class Account(TypeDefinition): prefix = "::account" def load(self, value, **context): return value + Account.prefix def dump(self, value, **context): return value[:-len(Account.prefix)] typedef = Account() engine = TypeEngine("accounts") engine.register(typedef) engine.bind() assert engine.dump(typedef, "Jill::account") == "Jill" """ try: bound_type = self.bound_types[typedef] # depends on [control=['try'], data=[]] except KeyError: raise DeclareException("Can't load unknown type {}".format(typedef)) # depends on [control=['except'], data=[]] else: # Don't need to try/catch since load/dump are bound together return bound_type['load'](value, **kwargs)
def get_paginated_catalog_courses(self, catalog_id, querystring=None): """ Return paginated response for all catalog courses. Returns: dict: API response with links to next and previous pages. """ return self._load_data( self.CATALOGS_COURSES_ENDPOINT.format(catalog_id), default=[], querystring=querystring, traverse_pagination=False, many=False, )
def function[get_paginated_catalog_courses, parameter[self, catalog_id, querystring]]: constant[ Return paginated response for all catalog courses. Returns: dict: API response with links to next and previous pages. ] return[call[name[self]._load_data, parameter[call[name[self].CATALOGS_COURSES_ENDPOINT.format, parameter[name[catalog_id]]]]]]
keyword[def] identifier[get_paginated_catalog_courses] ( identifier[self] , identifier[catalog_id] , identifier[querystring] = keyword[None] ): literal[string] keyword[return] identifier[self] . identifier[_load_data] ( identifier[self] . identifier[CATALOGS_COURSES_ENDPOINT] . identifier[format] ( identifier[catalog_id] ), identifier[default] =[], identifier[querystring] = identifier[querystring] , identifier[traverse_pagination] = keyword[False] , identifier[many] = keyword[False] , )
def get_paginated_catalog_courses(self, catalog_id, querystring=None): """ Return paginated response for all catalog courses. Returns: dict: API response with links to next and previous pages. """ return self._load_data(self.CATALOGS_COURSES_ENDPOINT.format(catalog_id), default=[], querystring=querystring, traverse_pagination=False, many=False)
def build_functional(self, *pattern, **kwargs): """ Builds a new functional pattern :param pattern: :type pattern: :param kwargs: :type kwargs: :return: :rtype: """ set_defaults(self._functional_defaults, kwargs) set_defaults(self._defaults, kwargs) return FunctionalPattern(*pattern, **kwargs)
def function[build_functional, parameter[self]]: constant[ Builds a new functional pattern :param pattern: :type pattern: :param kwargs: :type kwargs: :return: :rtype: ] call[name[set_defaults], parameter[name[self]._functional_defaults, name[kwargs]]] call[name[set_defaults], parameter[name[self]._defaults, name[kwargs]]] return[call[name[FunctionalPattern], parameter[<ast.Starred object at 0x7da18eb57af0>]]]
keyword[def] identifier[build_functional] ( identifier[self] ,* identifier[pattern] ,** identifier[kwargs] ): literal[string] identifier[set_defaults] ( identifier[self] . identifier[_functional_defaults] , identifier[kwargs] ) identifier[set_defaults] ( identifier[self] . identifier[_defaults] , identifier[kwargs] ) keyword[return] identifier[FunctionalPattern] (* identifier[pattern] ,** identifier[kwargs] )
def build_functional(self, *pattern, **kwargs): """ Builds a new functional pattern :param pattern: :type pattern: :param kwargs: :type kwargs: :return: :rtype: """ set_defaults(self._functional_defaults, kwargs) set_defaults(self._defaults, kwargs) return FunctionalPattern(*pattern, **kwargs)
def unpack_binary(data_pointer, definitions, data): """Unpack binary data using ``struct.unpack`` :param data_pointer: metadata for the ``data_pointer`` attribute for this data stream :type data_pointer: ``ahds.header.Block`` :param definitions: definitions specified in the header :type definitions: ``ahds.header.Block`` :param bytes data: raw binary data to be unpacked :return tuple output: unpacked data """ if data_pointer.data_dimension: data_dimension = data_pointer.data_dimension else: data_dimension = 1 # if data_dimension is None if data_pointer.data_type == "float": data_type = "f" * data_dimension elif data_pointer.data_type == "int": data_type = "i" * data_dimension # assume signed int elif data_pointer.data_type == "byte": data_type = "b" * data_dimension # assume signed char # get this streams size from the definitions try: data_length = int(getattr(definitions, data_pointer.data_name)) except AttributeError: # quickfix """ :TODO: nNodes definition fix """ try: data_length = int(getattr(definitions, 'Nodes')) except AttributeError: x, y, z = definitions.Lattice data_length = x * y * z output = numpy.array(struct.unpack('<' + '{}'.format(data_type) * data_length, data)) # assume little-endian output = output.reshape(data_length, data_dimension) return output
def function[unpack_binary, parameter[data_pointer, definitions, data]]: constant[Unpack binary data using ``struct.unpack`` :param data_pointer: metadata for the ``data_pointer`` attribute for this data stream :type data_pointer: ``ahds.header.Block`` :param definitions: definitions specified in the header :type definitions: ``ahds.header.Block`` :param bytes data: raw binary data to be unpacked :return tuple output: unpacked data ] if name[data_pointer].data_dimension begin[:] variable[data_dimension] assign[=] name[data_pointer].data_dimension if compare[name[data_pointer].data_type equal[==] constant[float]] begin[:] variable[data_type] assign[=] binary_operation[constant[f] * name[data_dimension]] <ast.Try object at 0x7da1b09b6cb0> variable[output] assign[=] call[name[numpy].array, parameter[call[name[struct].unpack, parameter[binary_operation[constant[<] + binary_operation[call[constant[{}].format, parameter[name[data_type]]] * name[data_length]]], name[data]]]]] variable[output] assign[=] call[name[output].reshape, parameter[name[data_length], name[data_dimension]]] return[name[output]]
keyword[def] identifier[unpack_binary] ( identifier[data_pointer] , identifier[definitions] , identifier[data] ): literal[string] keyword[if] identifier[data_pointer] . identifier[data_dimension] : identifier[data_dimension] = identifier[data_pointer] . identifier[data_dimension] keyword[else] : identifier[data_dimension] = literal[int] keyword[if] identifier[data_pointer] . identifier[data_type] == literal[string] : identifier[data_type] = literal[string] * identifier[data_dimension] keyword[elif] identifier[data_pointer] . identifier[data_type] == literal[string] : identifier[data_type] = literal[string] * identifier[data_dimension] keyword[elif] identifier[data_pointer] . identifier[data_type] == literal[string] : identifier[data_type] = literal[string] * identifier[data_dimension] keyword[try] : identifier[data_length] = identifier[int] ( identifier[getattr] ( identifier[definitions] , identifier[data_pointer] . identifier[data_name] )) keyword[except] identifier[AttributeError] : literal[string] keyword[try] : identifier[data_length] = identifier[int] ( identifier[getattr] ( identifier[definitions] , literal[string] )) keyword[except] identifier[AttributeError] : identifier[x] , identifier[y] , identifier[z] = identifier[definitions] . identifier[Lattice] identifier[data_length] = identifier[x] * identifier[y] * identifier[z] identifier[output] = identifier[numpy] . identifier[array] ( identifier[struct] . identifier[unpack] ( literal[string] + literal[string] . identifier[format] ( identifier[data_type] )* identifier[data_length] , identifier[data] )) identifier[output] = identifier[output] . identifier[reshape] ( identifier[data_length] , identifier[data_dimension] ) keyword[return] identifier[output]
def unpack_binary(data_pointer, definitions, data): """Unpack binary data using ``struct.unpack`` :param data_pointer: metadata for the ``data_pointer`` attribute for this data stream :type data_pointer: ``ahds.header.Block`` :param definitions: definitions specified in the header :type definitions: ``ahds.header.Block`` :param bytes data: raw binary data to be unpacked :return tuple output: unpacked data """ if data_pointer.data_dimension: data_dimension = data_pointer.data_dimension # depends on [control=['if'], data=[]] else: data_dimension = 1 # if data_dimension is None if data_pointer.data_type == 'float': data_type = 'f' * data_dimension # depends on [control=['if'], data=[]] elif data_pointer.data_type == 'int': data_type = 'i' * data_dimension # assume signed int # depends on [control=['if'], data=[]] elif data_pointer.data_type == 'byte': data_type = 'b' * data_dimension # assume signed char # depends on [control=['if'], data=[]] # get this streams size from the definitions try: data_length = int(getattr(definitions, data_pointer.data_name)) # depends on [control=['try'], data=[]] except AttributeError: # quickfix '\n :TODO: nNodes definition fix\n ' try: data_length = int(getattr(definitions, 'Nodes')) # depends on [control=['try'], data=[]] except AttributeError: (x, y, z) = definitions.Lattice data_length = x * y * z # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]] output = numpy.array(struct.unpack('<' + '{}'.format(data_type) * data_length, data)) # assume little-endian output = output.reshape(data_length, data_dimension) return output
def html_to_tags(code): """ Convert HTML code to tags. ``code`` is a string containing HTML code. The return value is a list of corresponding instances of ``TagBase``. """ code = ('<div>' + code + '</div>').encode('utf8') el = ET.fromstring(code) return [tag_from_element(c) for c in el]
def function[html_to_tags, parameter[code]]: constant[ Convert HTML code to tags. ``code`` is a string containing HTML code. The return value is a list of corresponding instances of ``TagBase``. ] variable[code] assign[=] call[binary_operation[binary_operation[constant[<div>] + name[code]] + constant[</div>]].encode, parameter[constant[utf8]]] variable[el] assign[=] call[name[ET].fromstring, parameter[name[code]]] return[<ast.ListComp object at 0x7da1b042cc10>]
keyword[def] identifier[html_to_tags] ( identifier[code] ): literal[string] identifier[code] =( literal[string] + identifier[code] + literal[string] ). identifier[encode] ( literal[string] ) identifier[el] = identifier[ET] . identifier[fromstring] ( identifier[code] ) keyword[return] [ identifier[tag_from_element] ( identifier[c] ) keyword[for] identifier[c] keyword[in] identifier[el] ]
def html_to_tags(code): """ Convert HTML code to tags. ``code`` is a string containing HTML code. The return value is a list of corresponding instances of ``TagBase``. """ code = ('<div>' + code + '</div>').encode('utf8') el = ET.fromstring(code) return [tag_from_element(c) for c in el]
def copy_logstore(self, from_project, from_logstore, to_logstore, to_project=None, to_client=None): """ copy logstore, index, logtail config to target logstore, machine group are not included yet. the target logstore will be crated if not existing :type from_project: string :param from_project: project name :type from_logstore: string :param from_logstore: logstore name :type to_logstore: string :param to_logstore: target logstore name :type to_project: string :param to_project: target project name, copy to same project if not being specified, will try to create it if not being specified :type to_client: LogClient :param to_client: logclient instance, use it to operate on the "to_project" if being specified for cross region purpose :return: """ return copy_logstore(self, from_project, from_logstore, to_logstore, to_project=to_project, to_client=to_client)
def function[copy_logstore, parameter[self, from_project, from_logstore, to_logstore, to_project, to_client]]: constant[ copy logstore, index, logtail config to target logstore, machine group are not included yet. the target logstore will be crated if not existing :type from_project: string :param from_project: project name :type from_logstore: string :param from_logstore: logstore name :type to_logstore: string :param to_logstore: target logstore name :type to_project: string :param to_project: target project name, copy to same project if not being specified, will try to create it if not being specified :type to_client: LogClient :param to_client: logclient instance, use it to operate on the "to_project" if being specified for cross region purpose :return: ] return[call[name[copy_logstore], parameter[name[self], name[from_project], name[from_logstore], name[to_logstore]]]]
keyword[def] identifier[copy_logstore] ( identifier[self] , identifier[from_project] , identifier[from_logstore] , identifier[to_logstore] , identifier[to_project] = keyword[None] , identifier[to_client] = keyword[None] ): literal[string] keyword[return] identifier[copy_logstore] ( identifier[self] , identifier[from_project] , identifier[from_logstore] , identifier[to_logstore] , identifier[to_project] = identifier[to_project] , identifier[to_client] = identifier[to_client] )
def copy_logstore(self, from_project, from_logstore, to_logstore, to_project=None, to_client=None): """ copy logstore, index, logtail config to target logstore, machine group are not included yet. the target logstore will be crated if not existing :type from_project: string :param from_project: project name :type from_logstore: string :param from_logstore: logstore name :type to_logstore: string :param to_logstore: target logstore name :type to_project: string :param to_project: target project name, copy to same project if not being specified, will try to create it if not being specified :type to_client: LogClient :param to_client: logclient instance, use it to operate on the "to_project" if being specified for cross region purpose :return: """ return copy_logstore(self, from_project, from_logstore, to_logstore, to_project=to_project, to_client=to_client)
def _parse(self, e): """Parses an exception, returns its message.""" # MySQL matches = re.search(r"^\(_mysql_exceptions\.OperationalError\) \(\d+, \"(.+)\"\)$", str(e)) if matches: return matches.group(1) # PostgreSQL matches = re.search(r"^\(psycopg2\.OperationalError\) (.+)$", str(e)) if matches: return matches.group(1) # SQLite matches = re.search(r"^\(sqlite3\.OperationalError\) (.+)$", str(e)) if matches: return matches.group(1) # Default return str(e)
def function[_parse, parameter[self, e]]: constant[Parses an exception, returns its message.] variable[matches] assign[=] call[name[re].search, parameter[constant[^\(_mysql_exceptions\.OperationalError\) \(\d+, \"(.+)\"\)$], call[name[str], parameter[name[e]]]]] if name[matches] begin[:] return[call[name[matches].group, parameter[constant[1]]]] variable[matches] assign[=] call[name[re].search, parameter[constant[^\(psycopg2\.OperationalError\) (.+)$], call[name[str], parameter[name[e]]]]] if name[matches] begin[:] return[call[name[matches].group, parameter[constant[1]]]] variable[matches] assign[=] call[name[re].search, parameter[constant[^\(sqlite3\.OperationalError\) (.+)$], call[name[str], parameter[name[e]]]]] if name[matches] begin[:] return[call[name[matches].group, parameter[constant[1]]]] return[call[name[str], parameter[name[e]]]]
keyword[def] identifier[_parse] ( identifier[self] , identifier[e] ): literal[string] identifier[matches] = identifier[re] . identifier[search] ( literal[string] , identifier[str] ( identifier[e] )) keyword[if] identifier[matches] : keyword[return] identifier[matches] . identifier[group] ( literal[int] ) identifier[matches] = identifier[re] . identifier[search] ( literal[string] , identifier[str] ( identifier[e] )) keyword[if] identifier[matches] : keyword[return] identifier[matches] . identifier[group] ( literal[int] ) identifier[matches] = identifier[re] . identifier[search] ( literal[string] , identifier[str] ( identifier[e] )) keyword[if] identifier[matches] : keyword[return] identifier[matches] . identifier[group] ( literal[int] ) keyword[return] identifier[str] ( identifier[e] )
def _parse(self, e): """Parses an exception, returns its message.""" # MySQL matches = re.search('^\\(_mysql_exceptions\\.OperationalError\\) \\(\\d+, \\"(.+)\\"\\)$', str(e)) if matches: return matches.group(1) # depends on [control=['if'], data=[]] # PostgreSQL matches = re.search('^\\(psycopg2\\.OperationalError\\) (.+)$', str(e)) if matches: return matches.group(1) # depends on [control=['if'], data=[]] # SQLite matches = re.search('^\\(sqlite3\\.OperationalError\\) (.+)$', str(e)) if matches: return matches.group(1) # depends on [control=['if'], data=[]] # Default return str(e)
def check_dirty(self): ''' .. versionchanged:: 0.20 Do not log size change. ''' if self._dirty_size is None: if self._dirty_render: self.render() self._dirty_render = False if self._dirty_draw: self.draw() self._dirty_draw = False return True width, height = self._dirty_size self._dirty_size = None self.reset_canvas(width, height) self._dirty_render = True self._dirty_draw = True return True
def function[check_dirty, parameter[self]]: constant[ .. versionchanged:: 0.20 Do not log size change. ] if compare[name[self]._dirty_size is constant[None]] begin[:] if name[self]._dirty_render begin[:] call[name[self].render, parameter[]] name[self]._dirty_render assign[=] constant[False] if name[self]._dirty_draw begin[:] call[name[self].draw, parameter[]] name[self]._dirty_draw assign[=] constant[False] return[constant[True]] <ast.Tuple object at 0x7da20c6abc70> assign[=] name[self]._dirty_size name[self]._dirty_size assign[=] constant[None] call[name[self].reset_canvas, parameter[name[width], name[height]]] name[self]._dirty_render assign[=] constant[True] name[self]._dirty_draw assign[=] constant[True] return[constant[True]]
keyword[def] identifier[check_dirty] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_dirty_size] keyword[is] keyword[None] : keyword[if] identifier[self] . identifier[_dirty_render] : identifier[self] . identifier[render] () identifier[self] . identifier[_dirty_render] = keyword[False] keyword[if] identifier[self] . identifier[_dirty_draw] : identifier[self] . identifier[draw] () identifier[self] . identifier[_dirty_draw] = keyword[False] keyword[return] keyword[True] identifier[width] , identifier[height] = identifier[self] . identifier[_dirty_size] identifier[self] . identifier[_dirty_size] = keyword[None] identifier[self] . identifier[reset_canvas] ( identifier[width] , identifier[height] ) identifier[self] . identifier[_dirty_render] = keyword[True] identifier[self] . identifier[_dirty_draw] = keyword[True] keyword[return] keyword[True]
def check_dirty(self): """ .. versionchanged:: 0.20 Do not log size change. """ if self._dirty_size is None: if self._dirty_render: self.render() self._dirty_render = False # depends on [control=['if'], data=[]] if self._dirty_draw: self.draw() self._dirty_draw = False # depends on [control=['if'], data=[]] return True # depends on [control=['if'], data=[]] (width, height) = self._dirty_size self._dirty_size = None self.reset_canvas(width, height) self._dirty_render = True self._dirty_draw = True return True
def convert_squeeze(params, w_name, scope_name, inputs, layers, weights, names): """ Convert squeeze operation. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting squeeze ...') if len(params['axes']) > 1: raise AssertionError('Cannot convert squeeze by multiple dimensions') def target_layer(x, axis=int(params['axes'][0])): import tensorflow as tf return tf.squeeze(x, axis=axis) lambda_layer = keras.layers.Lambda(target_layer) layers[scope_name] = lambda_layer(layers[inputs[0]])
def function[convert_squeeze, parameter[params, w_name, scope_name, inputs, layers, weights, names]]: constant[ Convert squeeze operation. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers ] call[name[print], parameter[constant[Converting squeeze ...]]] if compare[call[name[len], parameter[call[name[params]][constant[axes]]]] greater[>] constant[1]] begin[:] <ast.Raise object at 0x7da1b016e920> def function[target_layer, parameter[x, axis]]: import module[tensorflow] as alias[tf] return[call[name[tf].squeeze, parameter[name[x]]]] variable[lambda_layer] assign[=] call[name[keras].layers.Lambda, parameter[name[target_layer]]] call[name[layers]][name[scope_name]] assign[=] call[name[lambda_layer], parameter[call[name[layers]][call[name[inputs]][constant[0]]]]]
keyword[def] identifier[convert_squeeze] ( identifier[params] , identifier[w_name] , identifier[scope_name] , identifier[inputs] , identifier[layers] , identifier[weights] , identifier[names] ): literal[string] identifier[print] ( literal[string] ) keyword[if] identifier[len] ( identifier[params] [ literal[string] ])> literal[int] : keyword[raise] identifier[AssertionError] ( literal[string] ) keyword[def] identifier[target_layer] ( identifier[x] , identifier[axis] = identifier[int] ( identifier[params] [ literal[string] ][ literal[int] ])): keyword[import] identifier[tensorflow] keyword[as] identifier[tf] keyword[return] identifier[tf] . identifier[squeeze] ( identifier[x] , identifier[axis] = identifier[axis] ) identifier[lambda_layer] = identifier[keras] . identifier[layers] . identifier[Lambda] ( identifier[target_layer] ) identifier[layers] [ identifier[scope_name] ]= identifier[lambda_layer] ( identifier[layers] [ identifier[inputs] [ literal[int] ]])
def convert_squeeze(params, w_name, scope_name, inputs, layers, weights, names): """ Convert squeeze operation. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers """ print('Converting squeeze ...') if len(params['axes']) > 1: raise AssertionError('Cannot convert squeeze by multiple dimensions') # depends on [control=['if'], data=[]] def target_layer(x, axis=int(params['axes'][0])): import tensorflow as tf return tf.squeeze(x, axis=axis) lambda_layer = keras.layers.Lambda(target_layer) layers[scope_name] = lambda_layer(layers[inputs[0]])
def proper_case_section(self, section): """Verify proper casing is retrieved, when available, for each dependency in the section. """ # Casing for section. changed_values = False unknown_names = [k for k in section.keys() if k not in set(self.proper_names)] # Replace each package with proper casing. for dep in unknown_names: try: # Get new casing for package name. new_casing = proper_case(dep) except IOError: # Unable to normalize package name. continue if new_casing != dep: changed_values = True self.register_proper_name(new_casing) # Replace old value with new value. old_value = section[dep] section[new_casing] = old_value del section[dep] # Return whether or not values have been changed. return changed_values
def function[proper_case_section, parameter[self, section]]: constant[Verify proper casing is retrieved, when available, for each dependency in the section. ] variable[changed_values] assign[=] constant[False] variable[unknown_names] assign[=] <ast.ListComp object at 0x7da1b1e8ee60> for taget[name[dep]] in starred[name[unknown_names]] begin[:] <ast.Try object at 0x7da1b1e8ceb0> if compare[name[new_casing] not_equal[!=] name[dep]] begin[:] variable[changed_values] assign[=] constant[True] call[name[self].register_proper_name, parameter[name[new_casing]]] variable[old_value] assign[=] call[name[section]][name[dep]] call[name[section]][name[new_casing]] assign[=] name[old_value] <ast.Delete object at 0x7da1b1e8e110> return[name[changed_values]]
keyword[def] identifier[proper_case_section] ( identifier[self] , identifier[section] ): literal[string] identifier[changed_values] = keyword[False] identifier[unknown_names] =[ identifier[k] keyword[for] identifier[k] keyword[in] identifier[section] . identifier[keys] () keyword[if] identifier[k] keyword[not] keyword[in] identifier[set] ( identifier[self] . identifier[proper_names] )] keyword[for] identifier[dep] keyword[in] identifier[unknown_names] : keyword[try] : identifier[new_casing] = identifier[proper_case] ( identifier[dep] ) keyword[except] identifier[IOError] : keyword[continue] keyword[if] identifier[new_casing] != identifier[dep] : identifier[changed_values] = keyword[True] identifier[self] . identifier[register_proper_name] ( identifier[new_casing] ) identifier[old_value] = identifier[section] [ identifier[dep] ] identifier[section] [ identifier[new_casing] ]= identifier[old_value] keyword[del] identifier[section] [ identifier[dep] ] keyword[return] identifier[changed_values]
def proper_case_section(self, section): """Verify proper casing is retrieved, when available, for each dependency in the section. """ # Casing for section. changed_values = False unknown_names = [k for k in section.keys() if k not in set(self.proper_names)] # Replace each package with proper casing. for dep in unknown_names: try: # Get new casing for package name. new_casing = proper_case(dep) # depends on [control=['try'], data=[]] except IOError: # Unable to normalize package name. continue # depends on [control=['except'], data=[]] if new_casing != dep: changed_values = True self.register_proper_name(new_casing) # Replace old value with new value. old_value = section[dep] section[new_casing] = old_value del section[dep] # depends on [control=['if'], data=['new_casing', 'dep']] # depends on [control=['for'], data=['dep']] # Return whether or not values have been changed. return changed_values
def get_label(self): """Get the label of the Dataset. Returns ------- label : numpy array or None The label information from the Dataset. """ if self.label is None: self.label = self.get_field('label') return self.label
def function[get_label, parameter[self]]: constant[Get the label of the Dataset. Returns ------- label : numpy array or None The label information from the Dataset. ] if compare[name[self].label is constant[None]] begin[:] name[self].label assign[=] call[name[self].get_field, parameter[constant[label]]] return[name[self].label]
keyword[def] identifier[get_label] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[label] keyword[is] keyword[None] : identifier[self] . identifier[label] = identifier[self] . identifier[get_field] ( literal[string] ) keyword[return] identifier[self] . identifier[label]
def get_label(self): """Get the label of the Dataset. Returns ------- label : numpy array or None The label information from the Dataset. """ if self.label is None: self.label = self.get_field('label') # depends on [control=['if'], data=[]] return self.label
def remove_child(self, child): """ Remove a child widget from this widget. :param child: Object inheriting :class:`BaseElement` """ self.children.remove(child) child.parent = None if self.view and self.view.is_loaded: self.view.dispatch({ 'name': 'remove', 'selector': '#' + child.id })
def function[remove_child, parameter[self, child]]: constant[ Remove a child widget from this widget. :param child: Object inheriting :class:`BaseElement` ] call[name[self].children.remove, parameter[name[child]]] name[child].parent assign[=] constant[None] if <ast.BoolOp object at 0x7da18fe91c60> begin[:] call[name[self].view.dispatch, parameter[dictionary[[<ast.Constant object at 0x7da18fe909d0>, <ast.Constant object at 0x7da18fe92e90>], [<ast.Constant object at 0x7da18fe91240>, <ast.BinOp object at 0x7da18fe915a0>]]]]
keyword[def] identifier[remove_child] ( identifier[self] , identifier[child] ): literal[string] identifier[self] . identifier[children] . identifier[remove] ( identifier[child] ) identifier[child] . identifier[parent] = keyword[None] keyword[if] identifier[self] . identifier[view] keyword[and] identifier[self] . identifier[view] . identifier[is_loaded] : identifier[self] . identifier[view] . identifier[dispatch] ({ literal[string] : literal[string] , literal[string] : literal[string] + identifier[child] . identifier[id] })
def remove_child(self, child): """ Remove a child widget from this widget. :param child: Object inheriting :class:`BaseElement` """ self.children.remove(child) child.parent = None if self.view and self.view.is_loaded: self.view.dispatch({'name': 'remove', 'selector': '#' + child.id}) # depends on [control=['if'], data=[]]
def save(self, obj): """ Subclass the save method, to hash ndarray subclass, rather than pickling them. Off course, this is a total abuse of the Pickler class. """ if isinstance(obj, self.np.ndarray) and not obj.dtype.hasobject: # Compute a hash of the object # The update function of the hash requires a c_contiguous buffer. if obj.shape == (): # 0d arrays need to be flattened because viewing them as bytes # raises a ValueError exception. obj_c_contiguous = obj.flatten() elif obj.flags.c_contiguous: obj_c_contiguous = obj elif obj.flags.f_contiguous: obj_c_contiguous = obj.T else: # Cater for non-single-segment arrays: this creates a # copy, and thus aleviates this issue. # XXX: There might be a more efficient way of doing this obj_c_contiguous = obj.flatten() # memoryview is not supported for some dtypes, e.g. datetime64, see # https://github.com/numpy/numpy/issues/4983. The # workaround is to view the array as bytes before # taking the memoryview. self._hash.update( self._getbuffer(obj_c_contiguous.view(self.np.uint8))) # We store the class, to be able to distinguish between # Objects with the same binary content, but different # classes. if self.coerce_mmap and isinstance(obj, self.np.memmap): # We don't make the difference between memmap and # normal ndarrays, to be able to reload previously # computed results with memmap. klass = self.np.ndarray else: klass = obj.__class__ # We also return the dtype and the shape, to distinguish # different views on the same data with different dtypes. # The object will be pickled by the pickler hashed at the end. obj = (klass, ('HASHED', obj.dtype, obj.shape, obj.strides)) elif isinstance(obj, self.np.dtype): # Atomic dtype objects are interned by their default constructor: # np.dtype('f8') is np.dtype('f8') # This interning is not maintained by a # pickle.loads + pickle.dumps cycle, because __reduce__ # uses copy=True in the dtype constructor. This # non-deterministic behavior causes the internal memoizer # of the hasher to generate different hash values # depending on the history of the dtype object. # To prevent the hash from being sensitive to this, we use # .descr which is a full (and never interned) description of # the array dtype according to the numpy doc. klass = obj.__class__ obj = (klass, ('HASHED', obj.descr)) Hasher.save(self, obj)
def function[save, parameter[self, obj]]: constant[ Subclass the save method, to hash ndarray subclass, rather than pickling them. Off course, this is a total abuse of the Pickler class. ] if <ast.BoolOp object at 0x7da1b0e3ada0> begin[:] if compare[name[obj].shape equal[==] tuple[[]]] begin[:] variable[obj_c_contiguous] assign[=] call[name[obj].flatten, parameter[]] call[name[self]._hash.update, parameter[call[name[self]._getbuffer, parameter[call[name[obj_c_contiguous].view, parameter[name[self].np.uint8]]]]]] if <ast.BoolOp object at 0x7da1b0e39720> begin[:] variable[klass] assign[=] name[self].np.ndarray variable[obj] assign[=] tuple[[<ast.Name object at 0x7da1b0e38ca0>, <ast.Tuple object at 0x7da1b0e396c0>]] call[name[Hasher].save, parameter[name[self], name[obj]]]
keyword[def] identifier[save] ( identifier[self] , identifier[obj] ): literal[string] keyword[if] identifier[isinstance] ( identifier[obj] , identifier[self] . identifier[np] . identifier[ndarray] ) keyword[and] keyword[not] identifier[obj] . identifier[dtype] . identifier[hasobject] : keyword[if] identifier[obj] . identifier[shape] ==(): identifier[obj_c_contiguous] = identifier[obj] . identifier[flatten] () keyword[elif] identifier[obj] . identifier[flags] . identifier[c_contiguous] : identifier[obj_c_contiguous] = identifier[obj] keyword[elif] identifier[obj] . identifier[flags] . identifier[f_contiguous] : identifier[obj_c_contiguous] = identifier[obj] . identifier[T] keyword[else] : identifier[obj_c_contiguous] = identifier[obj] . identifier[flatten] () identifier[self] . identifier[_hash] . identifier[update] ( identifier[self] . identifier[_getbuffer] ( identifier[obj_c_contiguous] . identifier[view] ( identifier[self] . identifier[np] . identifier[uint8] ))) keyword[if] identifier[self] . identifier[coerce_mmap] keyword[and] identifier[isinstance] ( identifier[obj] , identifier[self] . identifier[np] . identifier[memmap] ): identifier[klass] = identifier[self] . identifier[np] . identifier[ndarray] keyword[else] : identifier[klass] = identifier[obj] . identifier[__class__] identifier[obj] =( identifier[klass] ,( literal[string] , identifier[obj] . identifier[dtype] , identifier[obj] . identifier[shape] , identifier[obj] . identifier[strides] )) keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[self] . identifier[np] . identifier[dtype] ): identifier[klass] = identifier[obj] . identifier[__class__] identifier[obj] =( identifier[klass] ,( literal[string] , identifier[obj] . identifier[descr] )) identifier[Hasher] . identifier[save] ( identifier[self] , identifier[obj] )
def save(self, obj): """ Subclass the save method, to hash ndarray subclass, rather than pickling them. Off course, this is a total abuse of the Pickler class. """ if isinstance(obj, self.np.ndarray) and (not obj.dtype.hasobject): # Compute a hash of the object # The update function of the hash requires a c_contiguous buffer. if obj.shape == (): # 0d arrays need to be flattened because viewing them as bytes # raises a ValueError exception. obj_c_contiguous = obj.flatten() # depends on [control=['if'], data=[]] elif obj.flags.c_contiguous: obj_c_contiguous = obj # depends on [control=['if'], data=[]] elif obj.flags.f_contiguous: obj_c_contiguous = obj.T # depends on [control=['if'], data=[]] else: # Cater for non-single-segment arrays: this creates a # copy, and thus aleviates this issue. # XXX: There might be a more efficient way of doing this obj_c_contiguous = obj.flatten() # memoryview is not supported for some dtypes, e.g. datetime64, see # https://github.com/numpy/numpy/issues/4983. The # workaround is to view the array as bytes before # taking the memoryview. self._hash.update(self._getbuffer(obj_c_contiguous.view(self.np.uint8))) # We store the class, to be able to distinguish between # Objects with the same binary content, but different # classes. if self.coerce_mmap and isinstance(obj, self.np.memmap): # We don't make the difference between memmap and # normal ndarrays, to be able to reload previously # computed results with memmap. klass = self.np.ndarray # depends on [control=['if'], data=[]] else: klass = obj.__class__ # We also return the dtype and the shape, to distinguish # different views on the same data with different dtypes. # The object will be pickled by the pickler hashed at the end. obj = (klass, ('HASHED', obj.dtype, obj.shape, obj.strides)) # depends on [control=['if'], data=[]] elif isinstance(obj, self.np.dtype): # Atomic dtype objects are interned by their default constructor: # np.dtype('f8') is np.dtype('f8') # This interning is not maintained by a # pickle.loads + pickle.dumps cycle, because __reduce__ # uses copy=True in the dtype constructor. This # non-deterministic behavior causes the internal memoizer # of the hasher to generate different hash values # depending on the history of the dtype object. # To prevent the hash from being sensitive to this, we use # .descr which is a full (and never interned) description of # the array dtype according to the numpy doc. klass = obj.__class__ obj = (klass, ('HASHED', obj.descr)) # depends on [control=['if'], data=[]] Hasher.save(self, obj)
def _series(self, name, interval, config, buckets, **kws): ''' Fetch a series of buckets. ''' pipe = self._client.pipeline(transaction=False) step = config['step'] resolution = config.get('resolution',step) fetch = kws.get('fetch') or self._type_get process_row = kws.get('process_row') or self._process_row rval = OrderedDict() for interval_bucket in buckets: i_key = '%s%s:%s:%s'%(self._prefix, name, interval, interval_bucket) if config['coarse']: fetch(pipe, i_key) else: pipe.smembers(i_key) res = pipe.execute() # TODO: a memory efficient way to use a single pipeline for this. for idx,data in enumerate(res): # TODO: use closures on the config for generating this interval key interval_bucket = buckets[idx] #start_bucket + idx interval_key = '%s%s:%s:%s'%(self._prefix, name, interval, interval_bucket) if config['coarse']: data = process_row( data ) rval[ config['i_calc'].from_bucket(interval_bucket) ] = data else: rval[ config['i_calc'].from_bucket(interval_bucket) ] = OrderedDict() pipe = self._client.pipeline(transaction=False) resolution_buckets = sorted(map(int,data)) for bucket in resolution_buckets: # TODO: use closures on the config for generating this resolution key resolution_key = '%s:%s'%(interval_key, bucket) fetch(pipe, resolution_key) resolution_res = pipe.execute() for x,data in enumerate(resolution_res): i_t = config['i_calc'].from_bucket(interval_bucket) r_t = config['r_calc'].from_bucket(resolution_buckets[x]) rval[ i_t ][ r_t ] = process_row(data) return rval
def function[_series, parameter[self, name, interval, config, buckets]]: constant[ Fetch a series of buckets. ] variable[pipe] assign[=] call[name[self]._client.pipeline, parameter[]] variable[step] assign[=] call[name[config]][constant[step]] variable[resolution] assign[=] call[name[config].get, parameter[constant[resolution], name[step]]] variable[fetch] assign[=] <ast.BoolOp object at 0x7da1b0432dd0> variable[process_row] assign[=] <ast.BoolOp object at 0x7da1b04332b0> variable[rval] assign[=] call[name[OrderedDict], parameter[]] for taget[name[interval_bucket]] in starred[name[buckets]] begin[:] variable[i_key] assign[=] binary_operation[constant[%s%s:%s:%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b0431420>, <ast.Name object at 0x7da1b0431870>, <ast.Name object at 0x7da1b0431c30>, <ast.Name object at 0x7da1b0430a60>]]] if call[name[config]][constant[coarse]] begin[:] call[name[fetch], parameter[name[pipe], name[i_key]]] variable[res] assign[=] call[name[pipe].execute, parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b04301c0>, <ast.Name object at 0x7da1b0430760>]]] in starred[call[name[enumerate], parameter[name[res]]]] begin[:] variable[interval_bucket] assign[=] call[name[buckets]][name[idx]] variable[interval_key] assign[=] binary_operation[constant[%s%s:%s:%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b0431330>, <ast.Name object at 0x7da1b0431390>, <ast.Name object at 0x7da1b04311e0>, <ast.Name object at 0x7da1b04333a0>]]] if call[name[config]][constant[coarse]] begin[:] variable[data] assign[=] call[name[process_row], parameter[name[data]]] call[name[rval]][call[call[name[config]][constant[i_calc]].from_bucket, parameter[name[interval_bucket]]]] assign[=] name[data] return[name[rval]]
keyword[def] identifier[_series] ( identifier[self] , identifier[name] , identifier[interval] , identifier[config] , identifier[buckets] ,** identifier[kws] ): literal[string] identifier[pipe] = identifier[self] . identifier[_client] . identifier[pipeline] ( identifier[transaction] = keyword[False] ) identifier[step] = identifier[config] [ literal[string] ] identifier[resolution] = identifier[config] . identifier[get] ( literal[string] , identifier[step] ) identifier[fetch] = identifier[kws] . identifier[get] ( literal[string] ) keyword[or] identifier[self] . identifier[_type_get] identifier[process_row] = identifier[kws] . identifier[get] ( literal[string] ) keyword[or] identifier[self] . identifier[_process_row] identifier[rval] = identifier[OrderedDict] () keyword[for] identifier[interval_bucket] keyword[in] identifier[buckets] : identifier[i_key] = literal[string] %( identifier[self] . identifier[_prefix] , identifier[name] , identifier[interval] , identifier[interval_bucket] ) keyword[if] identifier[config] [ literal[string] ]: identifier[fetch] ( identifier[pipe] , identifier[i_key] ) keyword[else] : identifier[pipe] . identifier[smembers] ( identifier[i_key] ) identifier[res] = identifier[pipe] . identifier[execute] () keyword[for] identifier[idx] , identifier[data] keyword[in] identifier[enumerate] ( identifier[res] ): identifier[interval_bucket] = identifier[buckets] [ identifier[idx] ] identifier[interval_key] = literal[string] %( identifier[self] . identifier[_prefix] , identifier[name] , identifier[interval] , identifier[interval_bucket] ) keyword[if] identifier[config] [ literal[string] ]: identifier[data] = identifier[process_row] ( identifier[data] ) identifier[rval] [ identifier[config] [ literal[string] ]. identifier[from_bucket] ( identifier[interval_bucket] )]= identifier[data] keyword[else] : identifier[rval] [ identifier[config] [ literal[string] ]. identifier[from_bucket] ( identifier[interval_bucket] )]= identifier[OrderedDict] () identifier[pipe] = identifier[self] . identifier[_client] . identifier[pipeline] ( identifier[transaction] = keyword[False] ) identifier[resolution_buckets] = identifier[sorted] ( identifier[map] ( identifier[int] , identifier[data] )) keyword[for] identifier[bucket] keyword[in] identifier[resolution_buckets] : identifier[resolution_key] = literal[string] %( identifier[interval_key] , identifier[bucket] ) identifier[fetch] ( identifier[pipe] , identifier[resolution_key] ) identifier[resolution_res] = identifier[pipe] . identifier[execute] () keyword[for] identifier[x] , identifier[data] keyword[in] identifier[enumerate] ( identifier[resolution_res] ): identifier[i_t] = identifier[config] [ literal[string] ]. identifier[from_bucket] ( identifier[interval_bucket] ) identifier[r_t] = identifier[config] [ literal[string] ]. identifier[from_bucket] ( identifier[resolution_buckets] [ identifier[x] ]) identifier[rval] [ identifier[i_t] ][ identifier[r_t] ]= identifier[process_row] ( identifier[data] ) keyword[return] identifier[rval]
def _series(self, name, interval, config, buckets, **kws): """ Fetch a series of buckets. """ pipe = self._client.pipeline(transaction=False) step = config['step'] resolution = config.get('resolution', step) fetch = kws.get('fetch') or self._type_get process_row = kws.get('process_row') or self._process_row rval = OrderedDict() for interval_bucket in buckets: i_key = '%s%s:%s:%s' % (self._prefix, name, interval, interval_bucket) if config['coarse']: fetch(pipe, i_key) # depends on [control=['if'], data=[]] else: pipe.smembers(i_key) # depends on [control=['for'], data=['interval_bucket']] res = pipe.execute() # TODO: a memory efficient way to use a single pipeline for this. for (idx, data) in enumerate(res): # TODO: use closures on the config for generating this interval key interval_bucket = buckets[idx] #start_bucket + idx interval_key = '%s%s:%s:%s' % (self._prefix, name, interval, interval_bucket) if config['coarse']: data = process_row(data) rval[config['i_calc'].from_bucket(interval_bucket)] = data # depends on [control=['if'], data=[]] else: rval[config['i_calc'].from_bucket(interval_bucket)] = OrderedDict() pipe = self._client.pipeline(transaction=False) resolution_buckets = sorted(map(int, data)) for bucket in resolution_buckets: # TODO: use closures on the config for generating this resolution key resolution_key = '%s:%s' % (interval_key, bucket) fetch(pipe, resolution_key) # depends on [control=['for'], data=['bucket']] resolution_res = pipe.execute() for (x, data) in enumerate(resolution_res): i_t = config['i_calc'].from_bucket(interval_bucket) r_t = config['r_calc'].from_bucket(resolution_buckets[x]) rval[i_t][r_t] = process_row(data) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] return rval
def peakSpectrum(self, ref=None, segmentLengthMultiplier=1, mode=None, window='hann'): """ analyses the source to generate the max values per bin per segment :param ref: the reference value for dB purposes. :param segmentLengthMultiplier: allow for increased resolution. :param mode: cq or none. :return: f : ndarray Array of sample frequencies. Pxx : ndarray linear spectrum max values. """ def analysisFunc(x, nperseg): freqs, _, Pxy = signal.spectrogram(self.samples, self.fs, window=window, nperseg=int(nperseg), noverlap=int(nperseg // 2), detrend=False, scaling='spectrum') Pxy_max = np.sqrt(Pxy.max(axis=-1).real) if x > 0: Pxy_max = Pxy_max / (10 ** ((3 * x) / 20)) if ref is not None: Pxy_max = librosa.amplitude_to_db(Pxy_max, ref=ref) return freqs, Pxy_max if mode == 'cq': return self._cq(analysisFunc, segmentLengthMultiplier) else: return analysisFunc(0, self.getSegmentLength() * segmentLengthMultiplier)
def function[peakSpectrum, parameter[self, ref, segmentLengthMultiplier, mode, window]]: constant[ analyses the source to generate the max values per bin per segment :param ref: the reference value for dB purposes. :param segmentLengthMultiplier: allow for increased resolution. :param mode: cq or none. :return: f : ndarray Array of sample frequencies. Pxx : ndarray linear spectrum max values. ] def function[analysisFunc, parameter[x, nperseg]]: <ast.Tuple object at 0x7da1b0e63c40> assign[=] call[name[signal].spectrogram, parameter[name[self].samples, name[self].fs]] variable[Pxy_max] assign[=] call[name[np].sqrt, parameter[call[name[Pxy].max, parameter[]].real]] if compare[name[x] greater[>] constant[0]] begin[:] variable[Pxy_max] assign[=] binary_operation[name[Pxy_max] / binary_operation[constant[10] ** binary_operation[binary_operation[constant[3] * name[x]] / constant[20]]]] if compare[name[ref] is_not constant[None]] begin[:] variable[Pxy_max] assign[=] call[name[librosa].amplitude_to_db, parameter[name[Pxy_max]]] return[tuple[[<ast.Name object at 0x7da1b0e62d70>, <ast.Name object at 0x7da1b0e60b50>]]] if compare[name[mode] equal[==] constant[cq]] begin[:] return[call[name[self]._cq, parameter[name[analysisFunc], name[segmentLengthMultiplier]]]]
keyword[def] identifier[peakSpectrum] ( identifier[self] , identifier[ref] = keyword[None] , identifier[segmentLengthMultiplier] = literal[int] , identifier[mode] = keyword[None] , identifier[window] = literal[string] ): literal[string] keyword[def] identifier[analysisFunc] ( identifier[x] , identifier[nperseg] ): identifier[freqs] , identifier[_] , identifier[Pxy] = identifier[signal] . identifier[spectrogram] ( identifier[self] . identifier[samples] , identifier[self] . identifier[fs] , identifier[window] = identifier[window] , identifier[nperseg] = identifier[int] ( identifier[nperseg] ), identifier[noverlap] = identifier[int] ( identifier[nperseg] // literal[int] ), identifier[detrend] = keyword[False] , identifier[scaling] = literal[string] ) identifier[Pxy_max] = identifier[np] . identifier[sqrt] ( identifier[Pxy] . identifier[max] ( identifier[axis] =- literal[int] ). identifier[real] ) keyword[if] identifier[x] > literal[int] : identifier[Pxy_max] = identifier[Pxy_max] /( literal[int] **(( literal[int] * identifier[x] )/ literal[int] )) keyword[if] identifier[ref] keyword[is] keyword[not] keyword[None] : identifier[Pxy_max] = identifier[librosa] . identifier[amplitude_to_db] ( identifier[Pxy_max] , identifier[ref] = identifier[ref] ) keyword[return] identifier[freqs] , identifier[Pxy_max] keyword[if] identifier[mode] == literal[string] : keyword[return] identifier[self] . identifier[_cq] ( identifier[analysisFunc] , identifier[segmentLengthMultiplier] ) keyword[else] : keyword[return] identifier[analysisFunc] ( literal[int] , identifier[self] . identifier[getSegmentLength] ()* identifier[segmentLengthMultiplier] )
def peakSpectrum(self, ref=None, segmentLengthMultiplier=1, mode=None, window='hann'): """ analyses the source to generate the max values per bin per segment :param ref: the reference value for dB purposes. :param segmentLengthMultiplier: allow for increased resolution. :param mode: cq or none. :return: f : ndarray Array of sample frequencies. Pxx : ndarray linear spectrum max values. """ def analysisFunc(x, nperseg): (freqs, _, Pxy) = signal.spectrogram(self.samples, self.fs, window=window, nperseg=int(nperseg), noverlap=int(nperseg // 2), detrend=False, scaling='spectrum') Pxy_max = np.sqrt(Pxy.max(axis=-1).real) if x > 0: Pxy_max = Pxy_max / 10 ** (3 * x / 20) # depends on [control=['if'], data=['x']] if ref is not None: Pxy_max = librosa.amplitude_to_db(Pxy_max, ref=ref) # depends on [control=['if'], data=['ref']] return (freqs, Pxy_max) if mode == 'cq': return self._cq(analysisFunc, segmentLengthMultiplier) # depends on [control=['if'], data=[]] else: return analysisFunc(0, self.getSegmentLength() * segmentLengthMultiplier)
def main(): # pragma: no cover """Main entry point.""" try: # Exit on broken pipe. signal.signal(signal.SIGPIPE, signal.SIG_DFL) except AttributeError: # SIGPIPE is not available on Windows. pass try: return _main(sys.argv, standard_out=sys.stdout, standard_error=sys.stderr) except KeyboardInterrupt: return 2
def function[main, parameter[]]: constant[Main entry point.] <ast.Try object at 0x7da1b26ad390> <ast.Try object at 0x7da1b26acbe0>
keyword[def] identifier[main] (): literal[string] keyword[try] : identifier[signal] . identifier[signal] ( identifier[signal] . identifier[SIGPIPE] , identifier[signal] . identifier[SIG_DFL] ) keyword[except] identifier[AttributeError] : keyword[pass] keyword[try] : keyword[return] identifier[_main] ( identifier[sys] . identifier[argv] , identifier[standard_out] = identifier[sys] . identifier[stdout] , identifier[standard_error] = identifier[sys] . identifier[stderr] ) keyword[except] identifier[KeyboardInterrupt] : keyword[return] literal[int]
def main(): # pragma: no cover 'Main entry point.' try: # Exit on broken pipe. signal.signal(signal.SIGPIPE, signal.SIG_DFL) # depends on [control=['try'], data=[]] except AttributeError: # SIGPIPE is not available on Windows. pass # depends on [control=['except'], data=[]] try: return _main(sys.argv, standard_out=sys.stdout, standard_error=sys.stderr) # depends on [control=['try'], data=[]] except KeyboardInterrupt: return 2 # depends on [control=['except'], data=[]]
def glir(self): """ The GLIR queue corresponding to the current canvas """ canvas = get_current_canvas() if canvas is None: msg = ("If you want to use gloo without vispy.app, " + "use a gloo.context.FakeCanvas.") raise RuntimeError('Gloo requires a Canvas to run.\n' + msg) return canvas.context.glir
def function[glir, parameter[self]]: constant[ The GLIR queue corresponding to the current canvas ] variable[canvas] assign[=] call[name[get_current_canvas], parameter[]] if compare[name[canvas] is constant[None]] begin[:] variable[msg] assign[=] binary_operation[constant[If you want to use gloo without vispy.app, ] + constant[use a gloo.context.FakeCanvas.]] <ast.Raise object at 0x7da1b0ea1ae0> return[name[canvas].context.glir]
keyword[def] identifier[glir] ( identifier[self] ): literal[string] identifier[canvas] = identifier[get_current_canvas] () keyword[if] identifier[canvas] keyword[is] keyword[None] : identifier[msg] =( literal[string] + literal[string] ) keyword[raise] identifier[RuntimeError] ( literal[string] + identifier[msg] ) keyword[return] identifier[canvas] . identifier[context] . identifier[glir]
def glir(self): """ The GLIR queue corresponding to the current canvas """ canvas = get_current_canvas() if canvas is None: msg = 'If you want to use gloo without vispy.app, ' + 'use a gloo.context.FakeCanvas.' raise RuntimeError('Gloo requires a Canvas to run.\n' + msg) # depends on [control=['if'], data=[]] return canvas.context.glir
def enable_backups(self): """ Enable Backups for this Instance. When enabled, we will automatically backup your Instance's data so that it can be restored at a later date. For more information on Instance's Backups service and pricing, see our `Backups Page`_ .. _Backups Page: https://www.linode.com/backups """ self._client.post("{}/backups/enable".format(Instance.api_endpoint), model=self) self.invalidate() return True
def function[enable_backups, parameter[self]]: constant[ Enable Backups for this Instance. When enabled, we will automatically backup your Instance's data so that it can be restored at a later date. For more information on Instance's Backups service and pricing, see our `Backups Page`_ .. _Backups Page: https://www.linode.com/backups ] call[name[self]._client.post, parameter[call[constant[{}/backups/enable].format, parameter[name[Instance].api_endpoint]]]] call[name[self].invalidate, parameter[]] return[constant[True]]
keyword[def] identifier[enable_backups] ( identifier[self] ): literal[string] identifier[self] . identifier[_client] . identifier[post] ( literal[string] . identifier[format] ( identifier[Instance] . identifier[api_endpoint] ), identifier[model] = identifier[self] ) identifier[self] . identifier[invalidate] () keyword[return] keyword[True]
def enable_backups(self): """ Enable Backups for this Instance. When enabled, we will automatically backup your Instance's data so that it can be restored at a later date. For more information on Instance's Backups service and pricing, see our `Backups Page`_ .. _Backups Page: https://www.linode.com/backups """ self._client.post('{}/backups/enable'.format(Instance.api_endpoint), model=self) self.invalidate() return True
async def dispatch(self, request, **kwargs): """Dispatch a request.""" # Authorize request self.auth = await self.authorize(request) # Load collection self.collection = await self.load_many(request) # Load resource self.resource = await self.load_one(request) if request.method == 'GET' and self.resource is None: # Filter collection self.collection = await self.filter(request) # Sort collection self.columns_sort = request.query.get('ap-sort', self.columns_sort) if self.columns_sort: reverse = self.columns_sort.startswith('-') self.columns_sort = self.columns_sort.lstrip('+-') self.collection = await self.sort(request, reverse=reverse) # Paginate collection try: self.offset = int(request.query.get('ap-offset', 0)) if self.limit: self.count = await self.count(request) self.collection = await self.paginate(request) except ValueError: pass return await super(AdminHandler, self).dispatch(request, **kwargs)
<ast.AsyncFunctionDef object at 0x7da18f00c5e0>
keyword[async] keyword[def] identifier[dispatch] ( identifier[self] , identifier[request] ,** identifier[kwargs] ): literal[string] identifier[self] . identifier[auth] = keyword[await] identifier[self] . identifier[authorize] ( identifier[request] ) identifier[self] . identifier[collection] = keyword[await] identifier[self] . identifier[load_many] ( identifier[request] ) identifier[self] . identifier[resource] = keyword[await] identifier[self] . identifier[load_one] ( identifier[request] ) keyword[if] identifier[request] . identifier[method] == literal[string] keyword[and] identifier[self] . identifier[resource] keyword[is] keyword[None] : identifier[self] . identifier[collection] = keyword[await] identifier[self] . identifier[filter] ( identifier[request] ) identifier[self] . identifier[columns_sort] = identifier[request] . identifier[query] . identifier[get] ( literal[string] , identifier[self] . identifier[columns_sort] ) keyword[if] identifier[self] . identifier[columns_sort] : identifier[reverse] = identifier[self] . identifier[columns_sort] . identifier[startswith] ( literal[string] ) identifier[self] . identifier[columns_sort] = identifier[self] . identifier[columns_sort] . identifier[lstrip] ( literal[string] ) identifier[self] . identifier[collection] = keyword[await] identifier[self] . identifier[sort] ( identifier[request] , identifier[reverse] = identifier[reverse] ) keyword[try] : identifier[self] . identifier[offset] = identifier[int] ( identifier[request] . identifier[query] . identifier[get] ( literal[string] , literal[int] )) keyword[if] identifier[self] . identifier[limit] : identifier[self] . identifier[count] = keyword[await] identifier[self] . identifier[count] ( identifier[request] ) identifier[self] . identifier[collection] = keyword[await] identifier[self] . identifier[paginate] ( identifier[request] ) keyword[except] identifier[ValueError] : keyword[pass] keyword[return] keyword[await] identifier[super] ( identifier[AdminHandler] , identifier[self] ). identifier[dispatch] ( identifier[request] ,** identifier[kwargs] )
async def dispatch(self, request, **kwargs): """Dispatch a request.""" # Authorize request self.auth = await self.authorize(request) # Load collection self.collection = await self.load_many(request) # Load resource self.resource = await self.load_one(request) if request.method == 'GET' and self.resource is None: # Filter collection self.collection = await self.filter(request) # Sort collection self.columns_sort = request.query.get('ap-sort', self.columns_sort) if self.columns_sort: reverse = self.columns_sort.startswith('-') self.columns_sort = self.columns_sort.lstrip('+-') self.collection = await self.sort(request, reverse=reverse) # depends on [control=['if'], data=[]] # Paginate collection try: self.offset = int(request.query.get('ap-offset', 0)) if self.limit: self.count = await self.count(request) self.collection = await self.paginate(request) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except ValueError: pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] return await super(AdminHandler, self).dispatch(request, **kwargs)
def convert_attrs_to_uppercase(obj: Any, attrs: Iterable[str]) -> None: """ Converts the specified attributes of an object to upper case, modifying the object in place. """ for a in attrs: value = getattr(obj, a) if value is None: continue setattr(obj, a, value.upper())
def function[convert_attrs_to_uppercase, parameter[obj, attrs]]: constant[ Converts the specified attributes of an object to upper case, modifying the object in place. ] for taget[name[a]] in starred[name[attrs]] begin[:] variable[value] assign[=] call[name[getattr], parameter[name[obj], name[a]]] if compare[name[value] is constant[None]] begin[:] continue call[name[setattr], parameter[name[obj], name[a], call[name[value].upper, parameter[]]]]
keyword[def] identifier[convert_attrs_to_uppercase] ( identifier[obj] : identifier[Any] , identifier[attrs] : identifier[Iterable] [ identifier[str] ])-> keyword[None] : literal[string] keyword[for] identifier[a] keyword[in] identifier[attrs] : identifier[value] = identifier[getattr] ( identifier[obj] , identifier[a] ) keyword[if] identifier[value] keyword[is] keyword[None] : keyword[continue] identifier[setattr] ( identifier[obj] , identifier[a] , identifier[value] . identifier[upper] ())
def convert_attrs_to_uppercase(obj: Any, attrs: Iterable[str]) -> None: """ Converts the specified attributes of an object to upper case, modifying the object in place. """ for a in attrs: value = getattr(obj, a) if value is None: continue # depends on [control=['if'], data=[]] setattr(obj, a, value.upper()) # depends on [control=['for'], data=['a']]
def flatten(self, df, column_name): """Flatten a column in the dataframe that contains lists""" _exp_list = [[md5, x] for md5, value_list in zip(df['md5'], df[column_name]) for x in value_list] return pd.DataFrame(_exp_list, columns=['md5',column_name])
def function[flatten, parameter[self, df, column_name]]: constant[Flatten a column in the dataframe that contains lists] variable[_exp_list] assign[=] <ast.ListComp object at 0x7da20c7cbb20> return[call[name[pd].DataFrame, parameter[name[_exp_list]]]]
keyword[def] identifier[flatten] ( identifier[self] , identifier[df] , identifier[column_name] ): literal[string] identifier[_exp_list] =[[ identifier[md5] , identifier[x] ] keyword[for] identifier[md5] , identifier[value_list] keyword[in] identifier[zip] ( identifier[df] [ literal[string] ], identifier[df] [ identifier[column_name] ]) keyword[for] identifier[x] keyword[in] identifier[value_list] ] keyword[return] identifier[pd] . identifier[DataFrame] ( identifier[_exp_list] , identifier[columns] =[ literal[string] , identifier[column_name] ])
def flatten(self, df, column_name): """Flatten a column in the dataframe that contains lists""" _exp_list = [[md5, x] for (md5, value_list) in zip(df['md5'], df[column_name]) for x in value_list] return pd.DataFrame(_exp_list, columns=['md5', column_name])
def check_messages(*messages: str) -> Callable: """decorator to store messages that are handled by a checker method""" def store_messages(func): func.checks_msgs = messages return func return store_messages
def function[check_messages, parameter[]]: constant[decorator to store messages that are handled by a checker method] def function[store_messages, parameter[func]]: name[func].checks_msgs assign[=] name[messages] return[name[func]] return[name[store_messages]]
keyword[def] identifier[check_messages] (* identifier[messages] : identifier[str] )-> identifier[Callable] : literal[string] keyword[def] identifier[store_messages] ( identifier[func] ): identifier[func] . identifier[checks_msgs] = identifier[messages] keyword[return] identifier[func] keyword[return] identifier[store_messages]
def check_messages(*messages: str) -> Callable: """decorator to store messages that are handled by a checker method""" def store_messages(func): func.checks_msgs = messages return func return store_messages
def xpath(source_xml, xpath_expr, req_format='string'): """ Filter xml based on an xpath expression. Purpose: This function applies an Xpath expression to the XML | supplied by source_xml. Returns a string subtree or | subtrees that match the Xpath expression. It can also return | an xml object if desired. @param source_xml: Plain text XML that will be filtered @type source_xml: str or lxml.etree.ElementTree.Element object @param xpath_expr: Xpath expression that we will filter the XML by. @type xpath_expr: str @param req_format: the desired format of the response, accepts string or | xml. @type req_format: str @returns: The filtered XML if filtering was successful. Otherwise, | an empty string. @rtype: str or ElementTree """ tree = source_xml if not isinstance(source_xml, ET.Element): tree = objectify.fromstring(source_xml) # clean up the namespace in the tags, as namespaces appear to confuse # xpath method for elem in tree.getiterator(): # beware of factory functions such as Comment if isinstance(elem.tag, basestring): i = elem.tag.find('}') if i >= 0: elem.tag = elem.tag[i+1:] # remove unused namespaces objectify.deannotate(tree, cleanup_namespaces=True) filtered_list = tree.xpath(xpath_expr) # Return string from the list of Elements or pure xml if req_format == 'xml': return filtered_list matches = ''.join(etree.tostring( element, pretty_print=True) for element in filtered_list) return matches if matches else ""
def function[xpath, parameter[source_xml, xpath_expr, req_format]]: constant[ Filter xml based on an xpath expression. Purpose: This function applies an Xpath expression to the XML | supplied by source_xml. Returns a string subtree or | subtrees that match the Xpath expression. It can also return | an xml object if desired. @param source_xml: Plain text XML that will be filtered @type source_xml: str or lxml.etree.ElementTree.Element object @param xpath_expr: Xpath expression that we will filter the XML by. @type xpath_expr: str @param req_format: the desired format of the response, accepts string or | xml. @type req_format: str @returns: The filtered XML if filtering was successful. Otherwise, | an empty string. @rtype: str or ElementTree ] variable[tree] assign[=] name[source_xml] if <ast.UnaryOp object at 0x7da20c6ab8e0> begin[:] variable[tree] assign[=] call[name[objectify].fromstring, parameter[name[source_xml]]] for taget[name[elem]] in starred[call[name[tree].getiterator, parameter[]]] begin[:] if call[name[isinstance], parameter[name[elem].tag, name[basestring]]] begin[:] variable[i] assign[=] call[name[elem].tag.find, parameter[constant[}]]] if compare[name[i] greater_or_equal[>=] constant[0]] begin[:] name[elem].tag assign[=] call[name[elem].tag][<ast.Slice object at 0x7da20c6a84f0>] call[name[objectify].deannotate, parameter[name[tree]]] variable[filtered_list] assign[=] call[name[tree].xpath, parameter[name[xpath_expr]]] if compare[name[req_format] equal[==] constant[xml]] begin[:] return[name[filtered_list]] variable[matches] assign[=] call[constant[].join, parameter[<ast.GeneratorExp object at 0x7da20c6ab2e0>]] return[<ast.IfExp object at 0x7da20c6a91e0>]
keyword[def] identifier[xpath] ( identifier[source_xml] , identifier[xpath_expr] , identifier[req_format] = literal[string] ): literal[string] identifier[tree] = identifier[source_xml] keyword[if] keyword[not] identifier[isinstance] ( identifier[source_xml] , identifier[ET] . identifier[Element] ): identifier[tree] = identifier[objectify] . identifier[fromstring] ( identifier[source_xml] ) keyword[for] identifier[elem] keyword[in] identifier[tree] . identifier[getiterator] (): keyword[if] identifier[isinstance] ( identifier[elem] . identifier[tag] , identifier[basestring] ): identifier[i] = identifier[elem] . identifier[tag] . identifier[find] ( literal[string] ) keyword[if] identifier[i] >= literal[int] : identifier[elem] . identifier[tag] = identifier[elem] . identifier[tag] [ identifier[i] + literal[int] :] identifier[objectify] . identifier[deannotate] ( identifier[tree] , identifier[cleanup_namespaces] = keyword[True] ) identifier[filtered_list] = identifier[tree] . identifier[xpath] ( identifier[xpath_expr] ) keyword[if] identifier[req_format] == literal[string] : keyword[return] identifier[filtered_list] identifier[matches] = literal[string] . identifier[join] ( identifier[etree] . identifier[tostring] ( identifier[element] , identifier[pretty_print] = keyword[True] ) keyword[for] identifier[element] keyword[in] identifier[filtered_list] ) keyword[return] identifier[matches] keyword[if] identifier[matches] keyword[else] literal[string]
def xpath(source_xml, xpath_expr, req_format='string'): """ Filter xml based on an xpath expression. Purpose: This function applies an Xpath expression to the XML | supplied by source_xml. Returns a string subtree or | subtrees that match the Xpath expression. It can also return | an xml object if desired. @param source_xml: Plain text XML that will be filtered @type source_xml: str or lxml.etree.ElementTree.Element object @param xpath_expr: Xpath expression that we will filter the XML by. @type xpath_expr: str @param req_format: the desired format of the response, accepts string or | xml. @type req_format: str @returns: The filtered XML if filtering was successful. Otherwise, | an empty string. @rtype: str or ElementTree """ tree = source_xml if not isinstance(source_xml, ET.Element): tree = objectify.fromstring(source_xml) # depends on [control=['if'], data=[]] # clean up the namespace in the tags, as namespaces appear to confuse # xpath method for elem in tree.getiterator(): # beware of factory functions such as Comment if isinstance(elem.tag, basestring): i = elem.tag.find('}') if i >= 0: elem.tag = elem.tag[i + 1:] # depends on [control=['if'], data=['i']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['elem']] # remove unused namespaces objectify.deannotate(tree, cleanup_namespaces=True) filtered_list = tree.xpath(xpath_expr) # Return string from the list of Elements or pure xml if req_format == 'xml': return filtered_list # depends on [control=['if'], data=[]] matches = ''.join((etree.tostring(element, pretty_print=True) for element in filtered_list)) return matches if matches else ''
def get_emails(self, mailinglist_dir, all, exclude_lists): """Generator function that get the emails from each mailing list dump dirctory. If `all` is set to True all the emails in the mbox will be imported if not it will just resume from the last message previously imported. The lists set in `exclude_lists` won't be imported. Yield: A tuple in the form: (mailing list name, email message). """ self.log("Getting emails dumps from: %s" % mailinglist_dir) # Get the list of directories ending with .mbox mailing_lists_mboxes = (mbox for mbox in os.listdir(mailinglist_dir) if mbox.endswith('.mbox')) # Get messages from each mbox for mbox in mailing_lists_mboxes: mbox_path = os.path.join(mailinglist_dir, mbox, mbox) mailinglist_name = mbox.split('.')[0] # Check if the mailinglist is set not to be imported if exclude_lists and mailinglist_name in exclude_lists: continue # Find the index of the last imported message if all: n_msgs = 0 else: try: mailinglist = MailingList.objects.get( name=mailinglist_name ) n_msgs = mailinglist.last_imported_index except MailingList.DoesNotExist: n_msgs = 0 for index, msg in self.parse_emails(mbox_path, n_msgs): yield mailinglist_name, msg, index
def function[get_emails, parameter[self, mailinglist_dir, all, exclude_lists]]: constant[Generator function that get the emails from each mailing list dump dirctory. If `all` is set to True all the emails in the mbox will be imported if not it will just resume from the last message previously imported. The lists set in `exclude_lists` won't be imported. Yield: A tuple in the form: (mailing list name, email message). ] call[name[self].log, parameter[binary_operation[constant[Getting emails dumps from: %s] <ast.Mod object at 0x7da2590d6920> name[mailinglist_dir]]]] variable[mailing_lists_mboxes] assign[=] <ast.GeneratorExp object at 0x7da18eb54400> for taget[name[mbox]] in starred[name[mailing_lists_mboxes]] begin[:] variable[mbox_path] assign[=] call[name[os].path.join, parameter[name[mailinglist_dir], name[mbox], name[mbox]]] variable[mailinglist_name] assign[=] call[call[name[mbox].split, parameter[constant[.]]]][constant[0]] if <ast.BoolOp object at 0x7da18eb54340> begin[:] continue if name[all] begin[:] variable[n_msgs] assign[=] constant[0] for taget[tuple[[<ast.Name object at 0x7da18eb548b0>, <ast.Name object at 0x7da18eb56a10>]]] in starred[call[name[self].parse_emails, parameter[name[mbox_path], name[n_msgs]]]] begin[:] <ast.Yield object at 0x7da18eb57d90>
keyword[def] identifier[get_emails] ( identifier[self] , identifier[mailinglist_dir] , identifier[all] , identifier[exclude_lists] ): literal[string] identifier[self] . identifier[log] ( literal[string] % identifier[mailinglist_dir] ) identifier[mailing_lists_mboxes] =( identifier[mbox] keyword[for] identifier[mbox] keyword[in] identifier[os] . identifier[listdir] ( identifier[mailinglist_dir] ) keyword[if] identifier[mbox] . identifier[endswith] ( literal[string] )) keyword[for] identifier[mbox] keyword[in] identifier[mailing_lists_mboxes] : identifier[mbox_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[mailinglist_dir] , identifier[mbox] , identifier[mbox] ) identifier[mailinglist_name] = identifier[mbox] . identifier[split] ( literal[string] )[ literal[int] ] keyword[if] identifier[exclude_lists] keyword[and] identifier[mailinglist_name] keyword[in] identifier[exclude_lists] : keyword[continue] keyword[if] identifier[all] : identifier[n_msgs] = literal[int] keyword[else] : keyword[try] : identifier[mailinglist] = identifier[MailingList] . identifier[objects] . identifier[get] ( identifier[name] = identifier[mailinglist_name] ) identifier[n_msgs] = identifier[mailinglist] . identifier[last_imported_index] keyword[except] identifier[MailingList] . identifier[DoesNotExist] : identifier[n_msgs] = literal[int] keyword[for] identifier[index] , identifier[msg] keyword[in] identifier[self] . identifier[parse_emails] ( identifier[mbox_path] , identifier[n_msgs] ): keyword[yield] identifier[mailinglist_name] , identifier[msg] , identifier[index]
def get_emails(self, mailinglist_dir, all, exclude_lists): """Generator function that get the emails from each mailing list dump dirctory. If `all` is set to True all the emails in the mbox will be imported if not it will just resume from the last message previously imported. The lists set in `exclude_lists` won't be imported. Yield: A tuple in the form: (mailing list name, email message). """ self.log('Getting emails dumps from: %s' % mailinglist_dir) # Get the list of directories ending with .mbox mailing_lists_mboxes = (mbox for mbox in os.listdir(mailinglist_dir) if mbox.endswith('.mbox')) # Get messages from each mbox for mbox in mailing_lists_mboxes: mbox_path = os.path.join(mailinglist_dir, mbox, mbox) mailinglist_name = mbox.split('.')[0] # Check if the mailinglist is set not to be imported if exclude_lists and mailinglist_name in exclude_lists: continue # depends on [control=['if'], data=[]] # Find the index of the last imported message if all: n_msgs = 0 # depends on [control=['if'], data=[]] else: try: mailinglist = MailingList.objects.get(name=mailinglist_name) n_msgs = mailinglist.last_imported_index # depends on [control=['try'], data=[]] except MailingList.DoesNotExist: n_msgs = 0 # depends on [control=['except'], data=[]] for (index, msg) in self.parse_emails(mbox_path, n_msgs): yield (mailinglist_name, msg, index) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['mbox']]
def sym(self, nested_scope=None): """Return the correspond symbolic number.""" operation = self.children[0].operation() expr = self.children[1].sym(nested_scope) return operation(expr)
def function[sym, parameter[self, nested_scope]]: constant[Return the correspond symbolic number.] variable[operation] assign[=] call[call[name[self].children][constant[0]].operation, parameter[]] variable[expr] assign[=] call[call[name[self].children][constant[1]].sym, parameter[name[nested_scope]]] return[call[name[operation], parameter[name[expr]]]]
keyword[def] identifier[sym] ( identifier[self] , identifier[nested_scope] = keyword[None] ): literal[string] identifier[operation] = identifier[self] . identifier[children] [ literal[int] ]. identifier[operation] () identifier[expr] = identifier[self] . identifier[children] [ literal[int] ]. identifier[sym] ( identifier[nested_scope] ) keyword[return] identifier[operation] ( identifier[expr] )
def sym(self, nested_scope=None): """Return the correspond symbolic number.""" operation = self.children[0].operation() expr = self.children[1].sym(nested_scope) return operation(expr)
def options(parser, help_menu=False): """ Summary: parse cli parameter options Returns: TYPE: argparse object, parser argument set """ parser.add_argument("-p", "--profile", nargs='?', default="default", required=False, help="type (default: %(default)s)") parser.add_argument("-i", "--install", dest='install', default='NA', type=str, choices=VALID_INSTALL, required=False) parser.add_argument("-a", "--auto", dest='auto', action='store_true', required=False) parser.add_argument("-c", "--configure", dest='configure', action='store_true', required=False) parser.add_argument("-d", "--debug", dest='debug', action='store_true', required=False) parser.add_argument("-V", "--version", dest='version', action='store_true', required=False) parser.add_argument("-h", "--help", dest='help', action='store_true', required=False) return parser.parse_args()
def function[options, parameter[parser, help_menu]]: constant[ Summary: parse cli parameter options Returns: TYPE: argparse object, parser argument set ] call[name[parser].add_argument, parameter[constant[-p], constant[--profile]]] call[name[parser].add_argument, parameter[constant[-i], constant[--install]]] call[name[parser].add_argument, parameter[constant[-a], constant[--auto]]] call[name[parser].add_argument, parameter[constant[-c], constant[--configure]]] call[name[parser].add_argument, parameter[constant[-d], constant[--debug]]] call[name[parser].add_argument, parameter[constant[-V], constant[--version]]] call[name[parser].add_argument, parameter[constant[-h], constant[--help]]] return[call[name[parser].parse_args, parameter[]]]
keyword[def] identifier[options] ( identifier[parser] , identifier[help_menu] = keyword[False] ): literal[string] identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[nargs] = literal[string] , identifier[default] = literal[string] , identifier[required] = keyword[False] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[dest] = literal[string] , identifier[default] = literal[string] , identifier[type] = identifier[str] , identifier[choices] = identifier[VALID_INSTALL] , identifier[required] = keyword[False] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[dest] = literal[string] , identifier[action] = literal[string] , identifier[required] = keyword[False] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[dest] = literal[string] , identifier[action] = literal[string] , identifier[required] = keyword[False] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[dest] = literal[string] , identifier[action] = literal[string] , identifier[required] = keyword[False] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[dest] = literal[string] , identifier[action] = literal[string] , identifier[required] = keyword[False] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[dest] = literal[string] , identifier[action] = literal[string] , identifier[required] = keyword[False] ) keyword[return] identifier[parser] . identifier[parse_args] ()
def options(parser, help_menu=False): """ Summary: parse cli parameter options Returns: TYPE: argparse object, parser argument set """ parser.add_argument('-p', '--profile', nargs='?', default='default', required=False, help='type (default: %(default)s)') parser.add_argument('-i', '--install', dest='install', default='NA', type=str, choices=VALID_INSTALL, required=False) parser.add_argument('-a', '--auto', dest='auto', action='store_true', required=False) parser.add_argument('-c', '--configure', dest='configure', action='store_true', required=False) parser.add_argument('-d', '--debug', dest='debug', action='store_true', required=False) parser.add_argument('-V', '--version', dest='version', action='store_true', required=False) parser.add_argument('-h', '--help', dest='help', action='store_true', required=False) return parser.parse_args()
def set_site(): """ This method is part of the prepare_data helper. Sets the site. Default implementation uses localhost. For production settings refine this helper. :return: """ from django.contrib.sites.models import Site from django.conf import settings # Initially set localhost as default domain # site = Site.objects.get(id=settings.SITE_ID) site.domain = 'http://localhost:8000' site.name = 'http://localhost:8000' site.save()
def function[set_site, parameter[]]: constant[ This method is part of the prepare_data helper. Sets the site. Default implementation uses localhost. For production settings refine this helper. :return: ] from relative_module[django.contrib.sites.models] import module[Site] from relative_module[django.conf] import module[settings] variable[site] assign[=] call[name[Site].objects.get, parameter[]] name[site].domain assign[=] constant[http://localhost:8000] name[site].name assign[=] constant[http://localhost:8000] call[name[site].save, parameter[]]
keyword[def] identifier[set_site] (): literal[string] keyword[from] identifier[django] . identifier[contrib] . identifier[sites] . identifier[models] keyword[import] identifier[Site] keyword[from] identifier[django] . identifier[conf] keyword[import] identifier[settings] identifier[site] = identifier[Site] . identifier[objects] . identifier[get] ( identifier[id] = identifier[settings] . identifier[SITE_ID] ) identifier[site] . identifier[domain] = literal[string] identifier[site] . identifier[name] = literal[string] identifier[site] . identifier[save] ()
def set_site(): """ This method is part of the prepare_data helper. Sets the site. Default implementation uses localhost. For production settings refine this helper. :return: """ from django.contrib.sites.models import Site from django.conf import settings # Initially set localhost as default domain # site = Site.objects.get(id=settings.SITE_ID) site.domain = 'http://localhost:8000' site.name = 'http://localhost:8000' site.save()
def getLinkInterfaces(self, node1, node2): ''' Given two node names that identify a link, return the pair of interface names assigned at each endpoint (as a tuple in the same order as the nodes given). ''' linkdata = self.getLink(node1,node2) return linkdata[node1],linkdata[node1]
def function[getLinkInterfaces, parameter[self, node1, node2]]: constant[ Given two node names that identify a link, return the pair of interface names assigned at each endpoint (as a tuple in the same order as the nodes given). ] variable[linkdata] assign[=] call[name[self].getLink, parameter[name[node1], name[node2]]] return[tuple[[<ast.Subscript object at 0x7da18eb57370>, <ast.Subscript object at 0x7da18eb56b60>]]]
keyword[def] identifier[getLinkInterfaces] ( identifier[self] , identifier[node1] , identifier[node2] ): literal[string] identifier[linkdata] = identifier[self] . identifier[getLink] ( identifier[node1] , identifier[node2] ) keyword[return] identifier[linkdata] [ identifier[node1] ], identifier[linkdata] [ identifier[node1] ]
def getLinkInterfaces(self, node1, node2): """ Given two node names that identify a link, return the pair of interface names assigned at each endpoint (as a tuple in the same order as the nodes given). """ linkdata = self.getLink(node1, node2) return (linkdata[node1], linkdata[node1])
def matches(self, new, old): ''' Whether two parameters match values. If either ``new`` or ``old`` is a NumPy array or Pandas Series or Index, then the result of ``np.array_equal`` will determine if the values match. Otherwise, the result of standard Python equality will be returned. Returns: True, if new and old match, False otherwise ''' if isinstance(new, np.ndarray) or isinstance(old, np.ndarray): return np.array_equal(new, old) if pd: if isinstance(new, pd.Series) or isinstance(old, pd.Series): return np.array_equal(new, old) if isinstance(new, pd.Index) or isinstance(old, pd.Index): return np.array_equal(new, old) try: # this handles the special but common case where there is a dict with array # or series as values (e.g. the .data property of a ColumnDataSource) if isinstance(new, dict) and isinstance(old, dict): if set(new.keys()) != set(old.keys()): return False return all(self.matches(new[k], old[k]) for k in new) return new == old # if the comparison fails for some reason, just punt and return no-match except ValueError: return False
def function[matches, parameter[self, new, old]]: constant[ Whether two parameters match values. If either ``new`` or ``old`` is a NumPy array or Pandas Series or Index, then the result of ``np.array_equal`` will determine if the values match. Otherwise, the result of standard Python equality will be returned. Returns: True, if new and old match, False otherwise ] if <ast.BoolOp object at 0x7da20c6aa140> begin[:] return[call[name[np].array_equal, parameter[name[new], name[old]]]] if name[pd] begin[:] if <ast.BoolOp object at 0x7da1b20ef940> begin[:] return[call[name[np].array_equal, parameter[name[new], name[old]]]] if <ast.BoolOp object at 0x7da1b20ed5d0> begin[:] return[call[name[np].array_equal, parameter[name[new], name[old]]]] <ast.Try object at 0x7da1b1f708b0>
keyword[def] identifier[matches] ( identifier[self] , identifier[new] , identifier[old] ): literal[string] keyword[if] identifier[isinstance] ( identifier[new] , identifier[np] . identifier[ndarray] ) keyword[or] identifier[isinstance] ( identifier[old] , identifier[np] . identifier[ndarray] ): keyword[return] identifier[np] . identifier[array_equal] ( identifier[new] , identifier[old] ) keyword[if] identifier[pd] : keyword[if] identifier[isinstance] ( identifier[new] , identifier[pd] . identifier[Series] ) keyword[or] identifier[isinstance] ( identifier[old] , identifier[pd] . identifier[Series] ): keyword[return] identifier[np] . identifier[array_equal] ( identifier[new] , identifier[old] ) keyword[if] identifier[isinstance] ( identifier[new] , identifier[pd] . identifier[Index] ) keyword[or] identifier[isinstance] ( identifier[old] , identifier[pd] . identifier[Index] ): keyword[return] identifier[np] . identifier[array_equal] ( identifier[new] , identifier[old] ) keyword[try] : keyword[if] identifier[isinstance] ( identifier[new] , identifier[dict] ) keyword[and] identifier[isinstance] ( identifier[old] , identifier[dict] ): keyword[if] identifier[set] ( identifier[new] . identifier[keys] ())!= identifier[set] ( identifier[old] . identifier[keys] ()): keyword[return] keyword[False] keyword[return] identifier[all] ( identifier[self] . identifier[matches] ( identifier[new] [ identifier[k] ], identifier[old] [ identifier[k] ]) keyword[for] identifier[k] keyword[in] identifier[new] ) keyword[return] identifier[new] == identifier[old] keyword[except] identifier[ValueError] : keyword[return] keyword[False]
def matches(self, new, old): """ Whether two parameters match values. If either ``new`` or ``old`` is a NumPy array or Pandas Series or Index, then the result of ``np.array_equal`` will determine if the values match. Otherwise, the result of standard Python equality will be returned. Returns: True, if new and old match, False otherwise """ if isinstance(new, np.ndarray) or isinstance(old, np.ndarray): return np.array_equal(new, old) # depends on [control=['if'], data=[]] if pd: if isinstance(new, pd.Series) or isinstance(old, pd.Series): return np.array_equal(new, old) # depends on [control=['if'], data=[]] if isinstance(new, pd.Index) or isinstance(old, pd.Index): return np.array_equal(new, old) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] try: # this handles the special but common case where there is a dict with array # or series as values (e.g. the .data property of a ColumnDataSource) if isinstance(new, dict) and isinstance(old, dict): if set(new.keys()) != set(old.keys()): return False # depends on [control=['if'], data=[]] return all((self.matches(new[k], old[k]) for k in new)) # depends on [control=['if'], data=[]] return new == old # depends on [control=['try'], data=[]] # if the comparison fails for some reason, just punt and return no-match except ValueError: return False # depends on [control=['except'], data=[]]
def __value_compare(self, target): """ Comparing result based on expectation if arg_type is "VALUE" Args: Anything Return: Boolean """ if self.expectation == "__ANY__": return True elif self.expectation == "__DEFINED__": return True if target is not None else False elif self.expectation == "__TYPE__": return True if type(target) == self.target_type else False #pylint:disable=unidiomatic-typecheck elif self.expectation == "__INSTANCE__": return True if isinstance(target, self.target_type.__class__) else False else: return True if target == self.expectation else False
def function[__value_compare, parameter[self, target]]: constant[ Comparing result based on expectation if arg_type is "VALUE" Args: Anything Return: Boolean ] if compare[name[self].expectation equal[==] constant[__ANY__]] begin[:] return[constant[True]]
keyword[def] identifier[__value_compare] ( identifier[self] , identifier[target] ): literal[string] keyword[if] identifier[self] . identifier[expectation] == literal[string] : keyword[return] keyword[True] keyword[elif] identifier[self] . identifier[expectation] == literal[string] : keyword[return] keyword[True] keyword[if] identifier[target] keyword[is] keyword[not] keyword[None] keyword[else] keyword[False] keyword[elif] identifier[self] . identifier[expectation] == literal[string] : keyword[return] keyword[True] keyword[if] identifier[type] ( identifier[target] )== identifier[self] . identifier[target_type] keyword[else] keyword[False] keyword[elif] identifier[self] . identifier[expectation] == literal[string] : keyword[return] keyword[True] keyword[if] identifier[isinstance] ( identifier[target] , identifier[self] . identifier[target_type] . identifier[__class__] ) keyword[else] keyword[False] keyword[else] : keyword[return] keyword[True] keyword[if] identifier[target] == identifier[self] . identifier[expectation] keyword[else] keyword[False]
def __value_compare(self, target): """ Comparing result based on expectation if arg_type is "VALUE" Args: Anything Return: Boolean """ if self.expectation == '__ANY__': return True # depends on [control=['if'], data=[]] elif self.expectation == '__DEFINED__': return True if target is not None else False # depends on [control=['if'], data=[]] elif self.expectation == '__TYPE__': return True if type(target) == self.target_type else False #pylint:disable=unidiomatic-typecheck # depends on [control=['if'], data=[]] elif self.expectation == '__INSTANCE__': return True if isinstance(target, self.target_type.__class__) else False # depends on [control=['if'], data=[]] else: return True if target == self.expectation else False
def close(self): """Close the pooled shared connection.""" # Instead of actually closing the connection, # unshare it and/or return it to the pool. if self._con: self._pool.unshare(self._shared_con) self._shared_con = self._con = None
def function[close, parameter[self]]: constant[Close the pooled shared connection.] if name[self]._con begin[:] call[name[self]._pool.unshare, parameter[name[self]._shared_con]] name[self]._shared_con assign[=] constant[None]
keyword[def] identifier[close] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_con] : identifier[self] . identifier[_pool] . identifier[unshare] ( identifier[self] . identifier[_shared_con] ) identifier[self] . identifier[_shared_con] = identifier[self] . identifier[_con] = keyword[None]
def close(self): """Close the pooled shared connection.""" # Instead of actually closing the connection, # unshare it and/or return it to the pool. if self._con: self._pool.unshare(self._shared_con) self._shared_con = self._con = None # depends on [control=['if'], data=[]]
def set_final_window_rect(cls, settings, window): """Sets the final size and location of the main window of guake. The height is the window_height property, width is window_width and the horizontal alignment is given by window_alignment. """ # fetch settings height_percents = settings.general.get_int('window-height') width_percents = settings.general.get_int('window-width') halignment = settings.general.get_int('window-halignment') valignment = settings.general.get_int('window-valignment') vdisplacement = settings.general.get_int('window-vertical-displacement') hdisplacement = settings.general.get_int('window-horizontal-displacement') log.debug("set_final_window_rect") log.debug(" height_percents = %s", height_percents) log.debug(" width_percents = %s", width_percents) log.debug(" halignment = %s", halignment) log.debug(" valignment = %s", valignment) log.debug(" vdisplacement = %s", vdisplacement) log.debug(" hdisplacement = %s", hdisplacement) # get the rectangle just from the destination monitor screen = window.get_screen() monitor = cls.get_final_window_monitor(settings, window) window_rect = screen.get_monitor_geometry(monitor) log.debug("Current monitor geometry") log.debug(" window_rect.x: %s", window_rect.x) log.debug(" window_rect.y: %s", window_rect.y) log.debug(" window_rect.height: %s", window_rect.height) log.debug(" window_rect.width: %s", window_rect.width) log.debug("is unity: %s", cls.is_using_unity(settings, window)) # TODO PORT remove this UNITY is DEAD if cls.is_using_unity(settings, window): # For Ubuntu 12.10 and above, try to use dconf: # see if unity dock is hidden => unity_hide # and the width of unity dock => unity_dock # and the position of the unity dock. => unity_pos # found = False unity_hide = 0 unity_dock = 0 unity_pos = "Left" # float() conversion might mess things up. Add 0.01 so the comparison will always be # valid, even in case of float("10.10") = 10.099999999999999 if float(platform.linux_distribution()[1]) + 0.01 >= 12.10: try: unity_hide = int( subprocess.check_output([ '/usr/bin/dconf', 'read', '/org/compiz/profiles/unity/plugins/unityshell/launcher-hide-mode' ]) ) unity_dock = int( subprocess.check_output([ '/usr/bin/dconf', 'read', '/org/compiz/profiles/unity/plugins/unityshell/icon-size' ]) or "48" ) unity_pos = subprocess.check_output([ '/usr/bin/dconf', 'read', '/com/canonical/unity/launcher/launcher-position' ]) or "Left" # found = True except Exception as e: # in case of error, just ignore it, 'found' will not be set to True and so # we execute the fallback pass # FIXME: remove self.client dependency # if not found: # # Fallback: try to bet from gconf # unity_hide = self.client.get_int( # KEY('/apps/compiz-1/plugins/unityshell/screen0/options/launcher_hide_mode') # ) # unity_icon_size = self.client.get_int( # KEY('/apps/compiz-1/plugins/unityshell/screen0/options/icon_size') # ) # unity_dock = unity_icon_size + 17 # launcher_hide_mode = 1 => autohide # only adjust guake window width if Unity dock is positioned "Left" or "Right" if unity_hide != 1 and unity_pos not in ("Left", "Right"): log.debug( "correcting window width because of launcher position %s " "and width %s (from %s to %s)", unity_pos, unity_dock, window_rect.width, window_rect.width - unity_dock ) window_rect.width = window_rect.width - unity_dock total_width = window_rect.width total_height = window_rect.height log.debug("Correcteed monitor size:") log.debug(" total_width: %s", total_width) log.debug(" total_height: %s", total_height) window_rect.height = int(float(window_rect.height) * float(height_percents) / 100.0) window_rect.width = int(float(window_rect.width) * float(width_percents) / 100.0) if window_rect.width < total_width: if halignment == ALIGN_CENTER: # log.debug("aligning to center!") window_rect.x += (total_width - window_rect.width) / 2 elif halignment == ALIGN_LEFT: # log.debug("aligning to left!") window_rect.x += 0 + hdisplacement elif halignment == ALIGN_RIGHT: # log.debug("aligning to right!") window_rect.x += total_width - window_rect.width - hdisplacement if window_rect.height < total_height: if valignment == ALIGN_BOTTOM: window_rect.y += (total_height - window_rect.height) if valignment == ALIGN_TOP: window_rect.y += vdisplacement elif valignment == ALIGN_BOTTOM: window_rect.y -= vdisplacement if width_percents == 100 and height_percents == 100: log.debug("MAXIMIZING MAIN WINDOW") window.maximize() elif not FullscreenManager(settings, window).is_fullscreen(): log.debug("RESIZING MAIN WINDOW TO THE FOLLOWING VALUES:") window.unmaximize() log.debug(" window_rect.x: %s", window_rect.x) log.debug(" window_rect.y: %s", window_rect.y) log.debug(" window_rect.height: %s", window_rect.height) log.debug(" window_rect.width: %s", window_rect.width) # Note: move_resize is only on GTK3 window.resize(window_rect.width, window_rect.height) window.move(window_rect.x, window_rect.y) window.move(window_rect.x, window_rect.y) log.debug("Updated window position: %r", window.get_position()) return window_rect
def function[set_final_window_rect, parameter[cls, settings, window]]: constant[Sets the final size and location of the main window of guake. The height is the window_height property, width is window_width and the horizontal alignment is given by window_alignment. ] variable[height_percents] assign[=] call[name[settings].general.get_int, parameter[constant[window-height]]] variable[width_percents] assign[=] call[name[settings].general.get_int, parameter[constant[window-width]]] variable[halignment] assign[=] call[name[settings].general.get_int, parameter[constant[window-halignment]]] variable[valignment] assign[=] call[name[settings].general.get_int, parameter[constant[window-valignment]]] variable[vdisplacement] assign[=] call[name[settings].general.get_int, parameter[constant[window-vertical-displacement]]] variable[hdisplacement] assign[=] call[name[settings].general.get_int, parameter[constant[window-horizontal-displacement]]] call[name[log].debug, parameter[constant[set_final_window_rect]]] call[name[log].debug, parameter[constant[ height_percents = %s], name[height_percents]]] call[name[log].debug, parameter[constant[ width_percents = %s], name[width_percents]]] call[name[log].debug, parameter[constant[ halignment = %s], name[halignment]]] call[name[log].debug, parameter[constant[ valignment = %s], name[valignment]]] call[name[log].debug, parameter[constant[ vdisplacement = %s], name[vdisplacement]]] call[name[log].debug, parameter[constant[ hdisplacement = %s], name[hdisplacement]]] variable[screen] assign[=] call[name[window].get_screen, parameter[]] variable[monitor] assign[=] call[name[cls].get_final_window_monitor, parameter[name[settings], name[window]]] variable[window_rect] assign[=] call[name[screen].get_monitor_geometry, parameter[name[monitor]]] call[name[log].debug, parameter[constant[Current monitor geometry]]] call[name[log].debug, parameter[constant[ window_rect.x: %s], name[window_rect].x]] call[name[log].debug, parameter[constant[ window_rect.y: %s], name[window_rect].y]] call[name[log].debug, parameter[constant[ window_rect.height: %s], name[window_rect].height]] call[name[log].debug, parameter[constant[ window_rect.width: %s], name[window_rect].width]] call[name[log].debug, parameter[constant[is unity: %s], call[name[cls].is_using_unity, parameter[name[settings], name[window]]]]] if call[name[cls].is_using_unity, parameter[name[settings], name[window]]] begin[:] variable[unity_hide] assign[=] constant[0] variable[unity_dock] assign[=] constant[0] variable[unity_pos] assign[=] constant[Left] if compare[binary_operation[call[name[float], parameter[call[call[name[platform].linux_distribution, parameter[]]][constant[1]]]] + constant[0.01]] greater_or_equal[>=] constant[12.1]] begin[:] <ast.Try object at 0x7da1b2345270> if <ast.BoolOp object at 0x7da1b2347eb0> begin[:] call[name[log].debug, parameter[constant[correcting window width because of launcher position %s and width %s (from %s to %s)], name[unity_pos], name[unity_dock], name[window_rect].width, binary_operation[name[window_rect].width - name[unity_dock]]]] name[window_rect].width assign[=] binary_operation[name[window_rect].width - name[unity_dock]] variable[total_width] assign[=] name[window_rect].width variable[total_height] assign[=] name[window_rect].height call[name[log].debug, parameter[constant[Correcteed monitor size:]]] call[name[log].debug, parameter[constant[ total_width: %s], name[total_width]]] call[name[log].debug, parameter[constant[ total_height: %s], name[total_height]]] name[window_rect].height assign[=] call[name[int], parameter[binary_operation[binary_operation[call[name[float], parameter[name[window_rect].height]] * call[name[float], parameter[name[height_percents]]]] / constant[100.0]]]] name[window_rect].width assign[=] call[name[int], parameter[binary_operation[binary_operation[call[name[float], parameter[name[window_rect].width]] * call[name[float], parameter[name[width_percents]]]] / constant[100.0]]]] if compare[name[window_rect].width less[<] name[total_width]] begin[:] if compare[name[halignment] equal[==] name[ALIGN_CENTER]] begin[:] <ast.AugAssign object at 0x7da1b2346380> if compare[name[window_rect].height less[<] name[total_height]] begin[:] if compare[name[valignment] equal[==] name[ALIGN_BOTTOM]] begin[:] <ast.AugAssign object at 0x7da18f09e380> if compare[name[valignment] equal[==] name[ALIGN_TOP]] begin[:] <ast.AugAssign object at 0x7da18f09d600> if <ast.BoolOp object at 0x7da18f09ef50> begin[:] call[name[log].debug, parameter[constant[MAXIMIZING MAIN WINDOW]]] call[name[window].maximize, parameter[]] return[name[window_rect]]
keyword[def] identifier[set_final_window_rect] ( identifier[cls] , identifier[settings] , identifier[window] ): literal[string] identifier[height_percents] = identifier[settings] . identifier[general] . identifier[get_int] ( literal[string] ) identifier[width_percents] = identifier[settings] . identifier[general] . identifier[get_int] ( literal[string] ) identifier[halignment] = identifier[settings] . identifier[general] . identifier[get_int] ( literal[string] ) identifier[valignment] = identifier[settings] . identifier[general] . identifier[get_int] ( literal[string] ) identifier[vdisplacement] = identifier[settings] . identifier[general] . identifier[get_int] ( literal[string] ) identifier[hdisplacement] = identifier[settings] . identifier[general] . identifier[get_int] ( literal[string] ) identifier[log] . identifier[debug] ( literal[string] ) identifier[log] . identifier[debug] ( literal[string] , identifier[height_percents] ) identifier[log] . identifier[debug] ( literal[string] , identifier[width_percents] ) identifier[log] . identifier[debug] ( literal[string] , identifier[halignment] ) identifier[log] . identifier[debug] ( literal[string] , identifier[valignment] ) identifier[log] . identifier[debug] ( literal[string] , identifier[vdisplacement] ) identifier[log] . identifier[debug] ( literal[string] , identifier[hdisplacement] ) identifier[screen] = identifier[window] . identifier[get_screen] () identifier[monitor] = identifier[cls] . identifier[get_final_window_monitor] ( identifier[settings] , identifier[window] ) identifier[window_rect] = identifier[screen] . identifier[get_monitor_geometry] ( identifier[monitor] ) identifier[log] . identifier[debug] ( literal[string] ) identifier[log] . identifier[debug] ( literal[string] , identifier[window_rect] . identifier[x] ) identifier[log] . identifier[debug] ( literal[string] , identifier[window_rect] . identifier[y] ) identifier[log] . identifier[debug] ( literal[string] , identifier[window_rect] . identifier[height] ) identifier[log] . identifier[debug] ( literal[string] , identifier[window_rect] . identifier[width] ) identifier[log] . identifier[debug] ( literal[string] , identifier[cls] . identifier[is_using_unity] ( identifier[settings] , identifier[window] )) keyword[if] identifier[cls] . identifier[is_using_unity] ( identifier[settings] , identifier[window] ): identifier[unity_hide] = literal[int] identifier[unity_dock] = literal[int] identifier[unity_pos] = literal[string] keyword[if] identifier[float] ( identifier[platform] . identifier[linux_distribution] ()[ literal[int] ])+ literal[int] >= literal[int] : keyword[try] : identifier[unity_hide] = identifier[int] ( identifier[subprocess] . identifier[check_output] ([ literal[string] , literal[string] , literal[string] ]) ) identifier[unity_dock] = identifier[int] ( identifier[subprocess] . identifier[check_output] ([ literal[string] , literal[string] , literal[string] ]) keyword[or] literal[string] ) identifier[unity_pos] = identifier[subprocess] . identifier[check_output] ([ literal[string] , literal[string] , literal[string] ]) keyword[or] literal[string] keyword[except] identifier[Exception] keyword[as] identifier[e] : keyword[pass] keyword[if] identifier[unity_hide] != literal[int] keyword[and] identifier[unity_pos] keyword[not] keyword[in] ( literal[string] , literal[string] ): identifier[log] . identifier[debug] ( literal[string] literal[string] , identifier[unity_pos] , identifier[unity_dock] , identifier[window_rect] . identifier[width] , identifier[window_rect] . identifier[width] - identifier[unity_dock] ) identifier[window_rect] . identifier[width] = identifier[window_rect] . identifier[width] - identifier[unity_dock] identifier[total_width] = identifier[window_rect] . identifier[width] identifier[total_height] = identifier[window_rect] . identifier[height] identifier[log] . identifier[debug] ( literal[string] ) identifier[log] . identifier[debug] ( literal[string] , identifier[total_width] ) identifier[log] . identifier[debug] ( literal[string] , identifier[total_height] ) identifier[window_rect] . identifier[height] = identifier[int] ( identifier[float] ( identifier[window_rect] . identifier[height] )* identifier[float] ( identifier[height_percents] )/ literal[int] ) identifier[window_rect] . identifier[width] = identifier[int] ( identifier[float] ( identifier[window_rect] . identifier[width] )* identifier[float] ( identifier[width_percents] )/ literal[int] ) keyword[if] identifier[window_rect] . identifier[width] < identifier[total_width] : keyword[if] identifier[halignment] == identifier[ALIGN_CENTER] : identifier[window_rect] . identifier[x] +=( identifier[total_width] - identifier[window_rect] . identifier[width] )/ literal[int] keyword[elif] identifier[halignment] == identifier[ALIGN_LEFT] : identifier[window_rect] . identifier[x] += literal[int] + identifier[hdisplacement] keyword[elif] identifier[halignment] == identifier[ALIGN_RIGHT] : identifier[window_rect] . identifier[x] += identifier[total_width] - identifier[window_rect] . identifier[width] - identifier[hdisplacement] keyword[if] identifier[window_rect] . identifier[height] < identifier[total_height] : keyword[if] identifier[valignment] == identifier[ALIGN_BOTTOM] : identifier[window_rect] . identifier[y] +=( identifier[total_height] - identifier[window_rect] . identifier[height] ) keyword[if] identifier[valignment] == identifier[ALIGN_TOP] : identifier[window_rect] . identifier[y] += identifier[vdisplacement] keyword[elif] identifier[valignment] == identifier[ALIGN_BOTTOM] : identifier[window_rect] . identifier[y] -= identifier[vdisplacement] keyword[if] identifier[width_percents] == literal[int] keyword[and] identifier[height_percents] == literal[int] : identifier[log] . identifier[debug] ( literal[string] ) identifier[window] . identifier[maximize] () keyword[elif] keyword[not] identifier[FullscreenManager] ( identifier[settings] , identifier[window] ). identifier[is_fullscreen] (): identifier[log] . identifier[debug] ( literal[string] ) identifier[window] . identifier[unmaximize] () identifier[log] . identifier[debug] ( literal[string] , identifier[window_rect] . identifier[x] ) identifier[log] . identifier[debug] ( literal[string] , identifier[window_rect] . identifier[y] ) identifier[log] . identifier[debug] ( literal[string] , identifier[window_rect] . identifier[height] ) identifier[log] . identifier[debug] ( literal[string] , identifier[window_rect] . identifier[width] ) identifier[window] . identifier[resize] ( identifier[window_rect] . identifier[width] , identifier[window_rect] . identifier[height] ) identifier[window] . identifier[move] ( identifier[window_rect] . identifier[x] , identifier[window_rect] . identifier[y] ) identifier[window] . identifier[move] ( identifier[window_rect] . identifier[x] , identifier[window_rect] . identifier[y] ) identifier[log] . identifier[debug] ( literal[string] , identifier[window] . identifier[get_position] ()) keyword[return] identifier[window_rect]
def set_final_window_rect(cls, settings, window): """Sets the final size and location of the main window of guake. The height is the window_height property, width is window_width and the horizontal alignment is given by window_alignment. """ # fetch settings height_percents = settings.general.get_int('window-height') width_percents = settings.general.get_int('window-width') halignment = settings.general.get_int('window-halignment') valignment = settings.general.get_int('window-valignment') vdisplacement = settings.general.get_int('window-vertical-displacement') hdisplacement = settings.general.get_int('window-horizontal-displacement') log.debug('set_final_window_rect') log.debug(' height_percents = %s', height_percents) log.debug(' width_percents = %s', width_percents) log.debug(' halignment = %s', halignment) log.debug(' valignment = %s', valignment) log.debug(' vdisplacement = %s', vdisplacement) log.debug(' hdisplacement = %s', hdisplacement) # get the rectangle just from the destination monitor screen = window.get_screen() monitor = cls.get_final_window_monitor(settings, window) window_rect = screen.get_monitor_geometry(monitor) log.debug('Current monitor geometry') log.debug(' window_rect.x: %s', window_rect.x) log.debug(' window_rect.y: %s', window_rect.y) log.debug(' window_rect.height: %s', window_rect.height) log.debug(' window_rect.width: %s', window_rect.width) log.debug('is unity: %s', cls.is_using_unity(settings, window)) # TODO PORT remove this UNITY is DEAD if cls.is_using_unity(settings, window): # For Ubuntu 12.10 and above, try to use dconf: # see if unity dock is hidden => unity_hide # and the width of unity dock => unity_dock # and the position of the unity dock. => unity_pos # found = False unity_hide = 0 unity_dock = 0 unity_pos = 'Left' # float() conversion might mess things up. Add 0.01 so the comparison will always be # valid, even in case of float("10.10") = 10.099999999999999 if float(platform.linux_distribution()[1]) + 0.01 >= 12.1: try: unity_hide = int(subprocess.check_output(['/usr/bin/dconf', 'read', '/org/compiz/profiles/unity/plugins/unityshell/launcher-hide-mode'])) unity_dock = int(subprocess.check_output(['/usr/bin/dconf', 'read', '/org/compiz/profiles/unity/plugins/unityshell/icon-size']) or '48') unity_pos = subprocess.check_output(['/usr/bin/dconf', 'read', '/com/canonical/unity/launcher/launcher-position']) or 'Left' # depends on [control=['try'], data=[]] # found = True except Exception as e: # in case of error, just ignore it, 'found' will not be set to True and so # we execute the fallback pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # FIXME: remove self.client dependency # if not found: # # Fallback: try to bet from gconf # unity_hide = self.client.get_int( # KEY('/apps/compiz-1/plugins/unityshell/screen0/options/launcher_hide_mode') # ) # unity_icon_size = self.client.get_int( # KEY('/apps/compiz-1/plugins/unityshell/screen0/options/icon_size') # ) # unity_dock = unity_icon_size + 17 # launcher_hide_mode = 1 => autohide # only adjust guake window width if Unity dock is positioned "Left" or "Right" if unity_hide != 1 and unity_pos not in ('Left', 'Right'): log.debug('correcting window width because of launcher position %s and width %s (from %s to %s)', unity_pos, unity_dock, window_rect.width, window_rect.width - unity_dock) window_rect.width = window_rect.width - unity_dock # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] total_width = window_rect.width total_height = window_rect.height log.debug('Correcteed monitor size:') log.debug(' total_width: %s', total_width) log.debug(' total_height: %s', total_height) window_rect.height = int(float(window_rect.height) * float(height_percents) / 100.0) window_rect.width = int(float(window_rect.width) * float(width_percents) / 100.0) if window_rect.width < total_width: if halignment == ALIGN_CENTER: # log.debug("aligning to center!") window_rect.x += (total_width - window_rect.width) / 2 # depends on [control=['if'], data=[]] elif halignment == ALIGN_LEFT: # log.debug("aligning to left!") window_rect.x += 0 + hdisplacement # depends on [control=['if'], data=[]] elif halignment == ALIGN_RIGHT: # log.debug("aligning to right!") window_rect.x += total_width - window_rect.width - hdisplacement # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['total_width']] if window_rect.height < total_height: if valignment == ALIGN_BOTTOM: window_rect.y += total_height - window_rect.height # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['total_height']] if valignment == ALIGN_TOP: window_rect.y += vdisplacement # depends on [control=['if'], data=[]] elif valignment == ALIGN_BOTTOM: window_rect.y -= vdisplacement # depends on [control=['if'], data=[]] if width_percents == 100 and height_percents == 100: log.debug('MAXIMIZING MAIN WINDOW') window.maximize() # depends on [control=['if'], data=[]] elif not FullscreenManager(settings, window).is_fullscreen(): log.debug('RESIZING MAIN WINDOW TO THE FOLLOWING VALUES:') window.unmaximize() log.debug(' window_rect.x: %s', window_rect.x) log.debug(' window_rect.y: %s', window_rect.y) log.debug(' window_rect.height: %s', window_rect.height) log.debug(' window_rect.width: %s', window_rect.width) # Note: move_resize is only on GTK3 window.resize(window_rect.width, window_rect.height) window.move(window_rect.x, window_rect.y) window.move(window_rect.x, window_rect.y) log.debug('Updated window position: %r', window.get_position()) # depends on [control=['if'], data=[]] return window_rect
def handle(self): """ Creates a child process that is fully controlled by this request handler, and serves data to and from it via the protocol handler. """ pid, fd = pty.fork() if pid: protocol = TelnetServerProtocolHandler(self.request, fd) protocol.handle() else: self.execute()
def function[handle, parameter[self]]: constant[ Creates a child process that is fully controlled by this request handler, and serves data to and from it via the protocol handler. ] <ast.Tuple object at 0x7da20c6c66b0> assign[=] call[name[pty].fork, parameter[]] if name[pid] begin[:] variable[protocol] assign[=] call[name[TelnetServerProtocolHandler], parameter[name[self].request, name[fd]]] call[name[protocol].handle, parameter[]]
keyword[def] identifier[handle] ( identifier[self] ): literal[string] identifier[pid] , identifier[fd] = identifier[pty] . identifier[fork] () keyword[if] identifier[pid] : identifier[protocol] = identifier[TelnetServerProtocolHandler] ( identifier[self] . identifier[request] , identifier[fd] ) identifier[protocol] . identifier[handle] () keyword[else] : identifier[self] . identifier[execute] ()
def handle(self): """ Creates a child process that is fully controlled by this request handler, and serves data to and from it via the protocol handler. """ (pid, fd) = pty.fork() if pid: protocol = TelnetServerProtocolHandler(self.request, fd) protocol.handle() # depends on [control=['if'], data=[]] else: self.execute()
def intersects(self, other): '''Returns True iff this record's reference positions overlap the other record reference positions (and are on same chromosome)''' return self.CHROM == other.CHROM and self.POS <= other.ref_end_pos() and other.POS <= self.ref_end_pos()
def function[intersects, parameter[self, other]]: constant[Returns True iff this record's reference positions overlap the other record reference positions (and are on same chromosome)] return[<ast.BoolOp object at 0x7da1b1da4dc0>]
keyword[def] identifier[intersects] ( identifier[self] , identifier[other] ): literal[string] keyword[return] identifier[self] . identifier[CHROM] == identifier[other] . identifier[CHROM] keyword[and] identifier[self] . identifier[POS] <= identifier[other] . identifier[ref_end_pos] () keyword[and] identifier[other] . identifier[POS] <= identifier[self] . identifier[ref_end_pos] ()
def intersects(self, other): """Returns True iff this record's reference positions overlap the other record reference positions (and are on same chromosome)""" return self.CHROM == other.CHROM and self.POS <= other.ref_end_pos() and (other.POS <= self.ref_end_pos())
def as_dict(self): """ :return: Table data as a |dict| instance. :rtype: dict :Sample Code: .. code:: python from tabledata import TableData TableData( "sample", ["a", "b"], [[1, 2], [3.3, 4.4]] ).as_dict() :Output: .. code:: json {'sample': [OrderedDict([('a', 1), ('b', 2)]), OrderedDict([('a', 3.3), ('b', 4.4)])]} """ dict_body = [] for row in self.rows: if not row: continue values = [ (header, value) for header, value in zip(self.headers, row) if value is not None ] if not values: continue dict_body.append(OrderedDict(values)) return {self.table_name: dict_body}
def function[as_dict, parameter[self]]: constant[ :return: Table data as a |dict| instance. :rtype: dict :Sample Code: .. code:: python from tabledata import TableData TableData( "sample", ["a", "b"], [[1, 2], [3.3, 4.4]] ).as_dict() :Output: .. code:: json {'sample': [OrderedDict([('a', 1), ('b', 2)]), OrderedDict([('a', 3.3), ('b', 4.4)])]} ] variable[dict_body] assign[=] list[[]] for taget[name[row]] in starred[name[self].rows] begin[:] if <ast.UnaryOp object at 0x7da2044c2e60> begin[:] continue variable[values] assign[=] <ast.ListComp object at 0x7da2044c0df0> if <ast.UnaryOp object at 0x7da2044c3ac0> begin[:] continue call[name[dict_body].append, parameter[call[name[OrderedDict], parameter[name[values]]]]] return[dictionary[[<ast.Attribute object at 0x7da2044c2ec0>], [<ast.Name object at 0x7da2044c1a20>]]]
keyword[def] identifier[as_dict] ( identifier[self] ): literal[string] identifier[dict_body] =[] keyword[for] identifier[row] keyword[in] identifier[self] . identifier[rows] : keyword[if] keyword[not] identifier[row] : keyword[continue] identifier[values] =[ ( identifier[header] , identifier[value] ) keyword[for] identifier[header] , identifier[value] keyword[in] identifier[zip] ( identifier[self] . identifier[headers] , identifier[row] ) keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] ] keyword[if] keyword[not] identifier[values] : keyword[continue] identifier[dict_body] . identifier[append] ( identifier[OrderedDict] ( identifier[values] )) keyword[return] { identifier[self] . identifier[table_name] : identifier[dict_body] }
def as_dict(self): """ :return: Table data as a |dict| instance. :rtype: dict :Sample Code: .. code:: python from tabledata import TableData TableData( "sample", ["a", "b"], [[1, 2], [3.3, 4.4]] ).as_dict() :Output: .. code:: json {'sample': [OrderedDict([('a', 1), ('b', 2)]), OrderedDict([('a', 3.3), ('b', 4.4)])]} """ dict_body = [] for row in self.rows: if not row: continue # depends on [control=['if'], data=[]] values = [(header, value) for (header, value) in zip(self.headers, row) if value is not None] if not values: continue # depends on [control=['if'], data=[]] dict_body.append(OrderedDict(values)) # depends on [control=['for'], data=['row']] return {self.table_name: dict_body}
def save(hdf5_filename, array): """ Export a numpy array to a HDF5 file. Arguments: hdf5_filename (str): A filename to which to save the HDF5 data array (numpy.ndarray): The numpy array to save to HDF5 Returns: String. The expanded filename that now holds the HDF5 data """ # Expand filename to be absolute hdf5_filename = os.path.expanduser(hdf5_filename) try: h = h5py.File(hdf5_filename, "w") h.create_dataset('CUTOUT', data=array) h.close() except Exception as e: raise ValueError("Could not save HDF5 file {0}.".format(hdf5_filename)) return hdf5_filename
def function[save, parameter[hdf5_filename, array]]: constant[ Export a numpy array to a HDF5 file. Arguments: hdf5_filename (str): A filename to which to save the HDF5 data array (numpy.ndarray): The numpy array to save to HDF5 Returns: String. The expanded filename that now holds the HDF5 data ] variable[hdf5_filename] assign[=] call[name[os].path.expanduser, parameter[name[hdf5_filename]]] <ast.Try object at 0x7da1b0212e60> return[name[hdf5_filename]]
keyword[def] identifier[save] ( identifier[hdf5_filename] , identifier[array] ): literal[string] identifier[hdf5_filename] = identifier[os] . identifier[path] . identifier[expanduser] ( identifier[hdf5_filename] ) keyword[try] : identifier[h] = identifier[h5py] . identifier[File] ( identifier[hdf5_filename] , literal[string] ) identifier[h] . identifier[create_dataset] ( literal[string] , identifier[data] = identifier[array] ) identifier[h] . identifier[close] () keyword[except] identifier[Exception] keyword[as] identifier[e] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[hdf5_filename] )) keyword[return] identifier[hdf5_filename]
def save(hdf5_filename, array): """ Export a numpy array to a HDF5 file. Arguments: hdf5_filename (str): A filename to which to save the HDF5 data array (numpy.ndarray): The numpy array to save to HDF5 Returns: String. The expanded filename that now holds the HDF5 data """ # Expand filename to be absolute hdf5_filename = os.path.expanduser(hdf5_filename) try: h = h5py.File(hdf5_filename, 'w') h.create_dataset('CUTOUT', data=array) h.close() # depends on [control=['try'], data=[]] except Exception as e: raise ValueError('Could not save HDF5 file {0}.'.format(hdf5_filename)) # depends on [control=['except'], data=[]] return hdf5_filename
def get_row_data(self, row, name=None): """ Returns a dict with all available data for a row in the extension Parameters ---------- row : tuple, list, string A valid index for the extension DataFrames name : string, optional If given, adds a key 'name' with the given value to the dict. In that case the dict can be used directly to build a new extension. Returns ------- dict object with the data (pandas DataFrame)for the specific rows """ retdict = {} for rowname, data in zip(self.get_DataFrame(), self.get_DataFrame(data=True)): retdict[rowname] = pd.DataFrame(data.ix[row]) if name: retdict['name'] = name return retdict
def function[get_row_data, parameter[self, row, name]]: constant[ Returns a dict with all available data for a row in the extension Parameters ---------- row : tuple, list, string A valid index for the extension DataFrames name : string, optional If given, adds a key 'name' with the given value to the dict. In that case the dict can be used directly to build a new extension. Returns ------- dict object with the data (pandas DataFrame)for the specific rows ] variable[retdict] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da1b0479b40>, <ast.Name object at 0x7da1b047b460>]]] in starred[call[name[zip], parameter[call[name[self].get_DataFrame, parameter[]], call[name[self].get_DataFrame, parameter[]]]]] begin[:] call[name[retdict]][name[rowname]] assign[=] call[name[pd].DataFrame, parameter[call[name[data].ix][name[row]]]] if name[name] begin[:] call[name[retdict]][constant[name]] assign[=] name[name] return[name[retdict]]
keyword[def] identifier[get_row_data] ( identifier[self] , identifier[row] , identifier[name] = keyword[None] ): literal[string] identifier[retdict] ={} keyword[for] identifier[rowname] , identifier[data] keyword[in] identifier[zip] ( identifier[self] . identifier[get_DataFrame] (), identifier[self] . identifier[get_DataFrame] ( identifier[data] = keyword[True] )): identifier[retdict] [ identifier[rowname] ]= identifier[pd] . identifier[DataFrame] ( identifier[data] . identifier[ix] [ identifier[row] ]) keyword[if] identifier[name] : identifier[retdict] [ literal[string] ]= identifier[name] keyword[return] identifier[retdict]
def get_row_data(self, row, name=None): """ Returns a dict with all available data for a row in the extension Parameters ---------- row : tuple, list, string A valid index for the extension DataFrames name : string, optional If given, adds a key 'name' with the given value to the dict. In that case the dict can be used directly to build a new extension. Returns ------- dict object with the data (pandas DataFrame)for the specific rows """ retdict = {} for (rowname, data) in zip(self.get_DataFrame(), self.get_DataFrame(data=True)): retdict[rowname] = pd.DataFrame(data.ix[row]) # depends on [control=['for'], data=[]] if name: retdict['name'] = name # depends on [control=['if'], data=[]] return retdict
def _reset(self): """ Rebuilds structure for AST and resets internal data. """ self._filename = None self._block_map = {} self._ast = [] self._ast.append(None) # header self._ast.append([]) # options list self._ast.append([])
def function[_reset, parameter[self]]: constant[ Rebuilds structure for AST and resets internal data. ] name[self]._filename assign[=] constant[None] name[self]._block_map assign[=] dictionary[[], []] name[self]._ast assign[=] list[[]] call[name[self]._ast.append, parameter[constant[None]]] call[name[self]._ast.append, parameter[list[[]]]] call[name[self]._ast.append, parameter[list[[]]]]
keyword[def] identifier[_reset] ( identifier[self] ): literal[string] identifier[self] . identifier[_filename] = keyword[None] identifier[self] . identifier[_block_map] ={} identifier[self] . identifier[_ast] =[] identifier[self] . identifier[_ast] . identifier[append] ( keyword[None] ) identifier[self] . identifier[_ast] . identifier[append] ([]) identifier[self] . identifier[_ast] . identifier[append] ([])
def _reset(self): """ Rebuilds structure for AST and resets internal data. """ self._filename = None self._block_map = {} self._ast = [] self._ast.append(None) # header self._ast.append([]) # options list self._ast.append([])
def computeGaussKernel(x): """Compute the gaussian kernel on a 1D vector.""" xnorm = np.power(euclidean_distances(x, x), 2) return np.exp(-xnorm / (2.0))
def function[computeGaussKernel, parameter[x]]: constant[Compute the gaussian kernel on a 1D vector.] variable[xnorm] assign[=] call[name[np].power, parameter[call[name[euclidean_distances], parameter[name[x], name[x]]], constant[2]]] return[call[name[np].exp, parameter[binary_operation[<ast.UnaryOp object at 0x7da1b0158a60> / constant[2.0]]]]]
keyword[def] identifier[computeGaussKernel] ( identifier[x] ): literal[string] identifier[xnorm] = identifier[np] . identifier[power] ( identifier[euclidean_distances] ( identifier[x] , identifier[x] ), literal[int] ) keyword[return] identifier[np] . identifier[exp] (- identifier[xnorm] /( literal[int] ))
def computeGaussKernel(x): """Compute the gaussian kernel on a 1D vector.""" xnorm = np.power(euclidean_distances(x, x), 2) return np.exp(-xnorm / 2.0)
def find_prime_polynomials(generator=2, c_exp=8, fast_primes=False, single=False): '''Compute the list of prime polynomials for the given generator and galois field characteristic exponent.''' # fast_primes will output less results but will be significantly faster. # single will output the first prime polynomial found, so if all you want is to just find one prime polynomial to generate the LUT for Reed-Solomon to work, then just use that. # A prime polynomial (necessarily irreducible) is necessary to reduce the multiplications in the Galois Field, so as to avoid overflows. # Why do we need a "prime polynomial"? Can't we just reduce modulo 255 (for GF(2^8) for example)? Because we need the values to be unique. # For example: if the generator (alpha) = 2 and c_exp = 8 (GF(2^8) == GF(256)), then the generated Galois Field (0, 1, α, α^1, α^2, ..., α^(p-1)) will be galois field it becomes 0, 1, 2, 4, 8, 16, etc. However, upon reaching 128, the next value will be doubled (ie, next power of 2), which will give 256. Then we must reduce, because we have overflowed above the maximum value of 255. But, if we modulo 255, this will generate 256 == 1. Then 2, 4, 8, 16, etc. giving us a repeating pattern of numbers. This is very bad, as it's then not anymore a bijection (ie, a non-zero value doesn't have a unique index). That's why we can't just modulo 255, but we need another number above 255, which is called the prime polynomial. # Why so much hassle? Because we are using precomputed look-up tables for multiplication: instead of multiplying a*b, we precompute alpha^a, alpha^b and alpha^(a+b), so that we can just use our lookup table at alpha^(a+b) and get our result. But just like in our original field we had 0,1,2,...,p-1 distinct unique values, in our "LUT" field using alpha we must have unique distinct values (we don't care that they are different from the original field as long as they are unique and distinct). That's why we need to avoid duplicated values, and to avoid duplicated values we need to use a prime irreducible polynomial. # Here is implemented a bruteforce approach to find all these prime polynomials, by generating every possible prime polynomials (ie, every integers between field_charac+1 and field_charac*2), and then we build the whole Galois Field, and we reject the candidate prime polynomial if it duplicates even one value or if it generates a value above field_charac (ie, cause an overflow). # Note that this algorithm is slow if the field is too big (above 12), because it's an exhaustive search algorithm. There are probabilistic approaches, and almost surely prime approaches, but there is no determistic polynomial time algorithm to find irreducible monic polynomials. More info can be found at: http://people.mpi-inf.mpg.de/~csaha/lectures/lec9.pdf # Another faster algorithm may be found at Adleman, Leonard M., and Hendrik W. Lenstra. "Finding irreducible polynomials over finite fields." Proceedings of the eighteenth annual ACM symposium on Theory of computing. ACM, 1986. # Prepare the finite field characteristic (2^p - 1), this also represent the maximum possible value in this field root_charac = 2 # we're in GF(2) field_charac = int(root_charac**c_exp - 1) field_charac_next = int(root_charac**(c_exp+1) - 1) prim_candidates = [] if fast_primes: prim_candidates = rwh_primes1(field_charac_next) # generate maybe prime polynomials and check later if they really are irreducible prim_candidates = [x for x in prim_candidates if x > field_charac] # filter out too small primes else: prim_candidates = _range(field_charac+2, field_charac_next, root_charac) # try each possible prime polynomial, but skip even numbers (because divisible by 2 so necessarily not irreducible) # Start of the main loop correct_primes = [] for prim in prim_candidates: # try potential candidates primitive irreducible polys seen = bytearray(field_charac+1) # memory variable to indicate if a value was already generated in the field (value at index x is set to 1) or not (set to 0 by default) conflict = False # flag to know if there was at least one conflict # Second loop, build the whole Galois Field x = GF2int(1) for i in _range(field_charac): # Compute the next value in the field (ie, the next power of alpha/generator) x = x.multiply(generator, prim, field_charac+1) # Rejection criterion: if the value overflowed (above field_charac) or is a duplicate of a previously generated power of alpha, then we reject this polynomial (not prime) if x > field_charac or seen[x] == 1: conflict = True break # Else we flag this value as seen (to maybe detect future duplicates), and we continue onto the next power of alpha else: seen[x] = 1 # End of the second loop: if there's no conflict (no overflow nor duplicated value), this is a prime polynomial! if not conflict: correct_primes.append(prim) if single: return prim # Return the list of all prime polynomials return correct_primes
def function[find_prime_polynomials, parameter[generator, c_exp, fast_primes, single]]: constant[Compute the list of prime polynomials for the given generator and galois field characteristic exponent.] variable[root_charac] assign[=] constant[2] variable[field_charac] assign[=] call[name[int], parameter[binary_operation[binary_operation[name[root_charac] ** name[c_exp]] - constant[1]]]] variable[field_charac_next] assign[=] call[name[int], parameter[binary_operation[binary_operation[name[root_charac] ** binary_operation[name[c_exp] + constant[1]]] - constant[1]]]] variable[prim_candidates] assign[=] list[[]] if name[fast_primes] begin[:] variable[prim_candidates] assign[=] call[name[rwh_primes1], parameter[name[field_charac_next]]] variable[prim_candidates] assign[=] <ast.ListComp object at 0x7da18bccbd90> variable[correct_primes] assign[=] list[[]] for taget[name[prim]] in starred[name[prim_candidates]] begin[:] variable[seen] assign[=] call[name[bytearray], parameter[binary_operation[name[field_charac] + constant[1]]]] variable[conflict] assign[=] constant[False] variable[x] assign[=] call[name[GF2int], parameter[constant[1]]] for taget[name[i]] in starred[call[name[_range], parameter[name[field_charac]]]] begin[:] variable[x] assign[=] call[name[x].multiply, parameter[name[generator], name[prim], binary_operation[name[field_charac] + constant[1]]]] if <ast.BoolOp object at 0x7da18bcc8370> begin[:] variable[conflict] assign[=] constant[True] break if <ast.UnaryOp object at 0x7da18f00d540> begin[:] call[name[correct_primes].append, parameter[name[prim]]] if name[single] begin[:] return[name[prim]] return[name[correct_primes]]
keyword[def] identifier[find_prime_polynomials] ( identifier[generator] = literal[int] , identifier[c_exp] = literal[int] , identifier[fast_primes] = keyword[False] , identifier[single] = keyword[False] ): literal[string] identifier[root_charac] = literal[int] identifier[field_charac] = identifier[int] ( identifier[root_charac] ** identifier[c_exp] - literal[int] ) identifier[field_charac_next] = identifier[int] ( identifier[root_charac] **( identifier[c_exp] + literal[int] )- literal[int] ) identifier[prim_candidates] =[] keyword[if] identifier[fast_primes] : identifier[prim_candidates] = identifier[rwh_primes1] ( identifier[field_charac_next] ) identifier[prim_candidates] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[prim_candidates] keyword[if] identifier[x] > identifier[field_charac] ] keyword[else] : identifier[prim_candidates] = identifier[_range] ( identifier[field_charac] + literal[int] , identifier[field_charac_next] , identifier[root_charac] ) identifier[correct_primes] =[] keyword[for] identifier[prim] keyword[in] identifier[prim_candidates] : identifier[seen] = identifier[bytearray] ( identifier[field_charac] + literal[int] ) identifier[conflict] = keyword[False] identifier[x] = identifier[GF2int] ( literal[int] ) keyword[for] identifier[i] keyword[in] identifier[_range] ( identifier[field_charac] ): identifier[x] = identifier[x] . identifier[multiply] ( identifier[generator] , identifier[prim] , identifier[field_charac] + literal[int] ) keyword[if] identifier[x] > identifier[field_charac] keyword[or] identifier[seen] [ identifier[x] ]== literal[int] : identifier[conflict] = keyword[True] keyword[break] keyword[else] : identifier[seen] [ identifier[x] ]= literal[int] keyword[if] keyword[not] identifier[conflict] : identifier[correct_primes] . identifier[append] ( identifier[prim] ) keyword[if] identifier[single] : keyword[return] identifier[prim] keyword[return] identifier[correct_primes]
def find_prime_polynomials(generator=2, c_exp=8, fast_primes=False, single=False): """Compute the list of prime polynomials for the given generator and galois field characteristic exponent.""" # fast_primes will output less results but will be significantly faster. # single will output the first prime polynomial found, so if all you want is to just find one prime polynomial to generate the LUT for Reed-Solomon to work, then just use that. # A prime polynomial (necessarily irreducible) is necessary to reduce the multiplications in the Galois Field, so as to avoid overflows. # Why do we need a "prime polynomial"? Can't we just reduce modulo 255 (for GF(2^8) for example)? Because we need the values to be unique. # For example: if the generator (alpha) = 2 and c_exp = 8 (GF(2^8) == GF(256)), then the generated Galois Field (0, 1, α, α^1, α^2, ..., α^(p-1)) will be galois field it becomes 0, 1, 2, 4, 8, 16, etc. However, upon reaching 128, the next value will be doubled (ie, next power of 2), which will give 256. Then we must reduce, because we have overflowed above the maximum value of 255. But, if we modulo 255, this will generate 256 == 1. Then 2, 4, 8, 16, etc. giving us a repeating pattern of numbers. This is very bad, as it's then not anymore a bijection (ie, a non-zero value doesn't have a unique index). That's why we can't just modulo 255, but we need another number above 255, which is called the prime polynomial. # Why so much hassle? Because we are using precomputed look-up tables for multiplication: instead of multiplying a*b, we precompute alpha^a, alpha^b and alpha^(a+b), so that we can just use our lookup table at alpha^(a+b) and get our result. But just like in our original field we had 0,1,2,...,p-1 distinct unique values, in our "LUT" field using alpha we must have unique distinct values (we don't care that they are different from the original field as long as they are unique and distinct). That's why we need to avoid duplicated values, and to avoid duplicated values we need to use a prime irreducible polynomial. # Here is implemented a bruteforce approach to find all these prime polynomials, by generating every possible prime polynomials (ie, every integers between field_charac+1 and field_charac*2), and then we build the whole Galois Field, and we reject the candidate prime polynomial if it duplicates even one value or if it generates a value above field_charac (ie, cause an overflow). # Note that this algorithm is slow if the field is too big (above 12), because it's an exhaustive search algorithm. There are probabilistic approaches, and almost surely prime approaches, but there is no determistic polynomial time algorithm to find irreducible monic polynomials. More info can be found at: http://people.mpi-inf.mpg.de/~csaha/lectures/lec9.pdf # Another faster algorithm may be found at Adleman, Leonard M., and Hendrik W. Lenstra. "Finding irreducible polynomials over finite fields." Proceedings of the eighteenth annual ACM symposium on Theory of computing. ACM, 1986. # Prepare the finite field characteristic (2^p - 1), this also represent the maximum possible value in this field root_charac = 2 # we're in GF(2) field_charac = int(root_charac ** c_exp - 1) field_charac_next = int(root_charac ** (c_exp + 1) - 1) prim_candidates = [] if fast_primes: prim_candidates = rwh_primes1(field_charac_next) # generate maybe prime polynomials and check later if they really are irreducible prim_candidates = [x for x in prim_candidates if x > field_charac] # filter out too small primes # depends on [control=['if'], data=[]] else: prim_candidates = _range(field_charac + 2, field_charac_next, root_charac) # try each possible prime polynomial, but skip even numbers (because divisible by 2 so necessarily not irreducible) # Start of the main loop correct_primes = [] for prim in prim_candidates: # try potential candidates primitive irreducible polys seen = bytearray(field_charac + 1) # memory variable to indicate if a value was already generated in the field (value at index x is set to 1) or not (set to 0 by default) conflict = False # flag to know if there was at least one conflict # Second loop, build the whole Galois Field x = GF2int(1) for i in _range(field_charac): # Compute the next value in the field (ie, the next power of alpha/generator) x = x.multiply(generator, prim, field_charac + 1) # Rejection criterion: if the value overflowed (above field_charac) or is a duplicate of a previously generated power of alpha, then we reject this polynomial (not prime) if x > field_charac or seen[x] == 1: conflict = True break # depends on [control=['if'], data=[]] else: # Else we flag this value as seen (to maybe detect future duplicates), and we continue onto the next power of alpha seen[x] = 1 # depends on [control=['for'], data=[]] # End of the second loop: if there's no conflict (no overflow nor duplicated value), this is a prime polynomial! if not conflict: correct_primes.append(prim) if single: return prim # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['prim']] # Return the list of all prime polynomials return correct_primes
def _identify_surface_sites(self, thickness): """Label surface sites and add ports above them. """ for atom in self.particles(): if len(self.bond_graph.neighbors(atom)) == 1: if atom.name == 'O' and atom.pos[2] > thickness: atom.name = 'OS' port = mb.Port(anchor=atom) port.spin(np.pi/2, [1, 0, 0]) port.translate(np.array([0.0, 0.0, 0.1])) self.add(port, "port_{}".format(len(self.referenced_ports())))
def function[_identify_surface_sites, parameter[self, thickness]]: constant[Label surface sites and add ports above them. ] for taget[name[atom]] in starred[call[name[self].particles, parameter[]]] begin[:] if compare[call[name[len], parameter[call[name[self].bond_graph.neighbors, parameter[name[atom]]]]] equal[==] constant[1]] begin[:] if <ast.BoolOp object at 0x7da1b1d51c30> begin[:] name[atom].name assign[=] constant[OS] variable[port] assign[=] call[name[mb].Port, parameter[]] call[name[port].spin, parameter[binary_operation[name[np].pi / constant[2]], list[[<ast.Constant object at 0x7da1b1d51120>, <ast.Constant object at 0x7da1b1d51f00>, <ast.Constant object at 0x7da1b1d50a90>]]]] call[name[port].translate, parameter[call[name[np].array, parameter[list[[<ast.Constant object at 0x7da1b1d50cd0>, <ast.Constant object at 0x7da1b1d531c0>, <ast.Constant object at 0x7da1b1d51840>]]]]]] call[name[self].add, parameter[name[port], call[constant[port_{}].format, parameter[call[name[len], parameter[call[name[self].referenced_ports, parameter[]]]]]]]]
keyword[def] identifier[_identify_surface_sites] ( identifier[self] , identifier[thickness] ): literal[string] keyword[for] identifier[atom] keyword[in] identifier[self] . identifier[particles] (): keyword[if] identifier[len] ( identifier[self] . identifier[bond_graph] . identifier[neighbors] ( identifier[atom] ))== literal[int] : keyword[if] identifier[atom] . identifier[name] == literal[string] keyword[and] identifier[atom] . identifier[pos] [ literal[int] ]> identifier[thickness] : identifier[atom] . identifier[name] = literal[string] identifier[port] = identifier[mb] . identifier[Port] ( identifier[anchor] = identifier[atom] ) identifier[port] . identifier[spin] ( identifier[np] . identifier[pi] / literal[int] ,[ literal[int] , literal[int] , literal[int] ]) identifier[port] . identifier[translate] ( identifier[np] . identifier[array] ([ literal[int] , literal[int] , literal[int] ])) identifier[self] . identifier[add] ( identifier[port] , literal[string] . identifier[format] ( identifier[len] ( identifier[self] . identifier[referenced_ports] ())))
def _identify_surface_sites(self, thickness): """Label surface sites and add ports above them. """ for atom in self.particles(): if len(self.bond_graph.neighbors(atom)) == 1: if atom.name == 'O' and atom.pos[2] > thickness: atom.name = 'OS' port = mb.Port(anchor=atom) port.spin(np.pi / 2, [1, 0, 0]) port.translate(np.array([0.0, 0.0, 0.1])) self.add(port, 'port_{}'.format(len(self.referenced_ports()))) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['atom']]
def _ivy_jvm_options(self, repo): """Get the JVM options for ivy authentication, if needed.""" # Get authentication for the publish repo if needed. if not repo.get('auth'): # No need to copy here, as this list isn't modified by the caller. return self._jvm_options # Create a copy of the options, so that the modification is appropriately transient. jvm_options = copy(self._jvm_options) user = repo.get('username') password = repo.get('password') if user and password: jvm_options.append('-Dlogin={}'.format(user)) jvm_options.append('-Dpassword={}'.format(password)) else: raise TaskError('Unable to publish to {}. {}' .format(repo.get('resolver'), repo.get('help', ''))) return jvm_options
def function[_ivy_jvm_options, parameter[self, repo]]: constant[Get the JVM options for ivy authentication, if needed.] if <ast.UnaryOp object at 0x7da1b1e8cdf0> begin[:] return[name[self]._jvm_options] variable[jvm_options] assign[=] call[name[copy], parameter[name[self]._jvm_options]] variable[user] assign[=] call[name[repo].get, parameter[constant[username]]] variable[password] assign[=] call[name[repo].get, parameter[constant[password]]] if <ast.BoolOp object at 0x7da1b1e8e6b0> begin[:] call[name[jvm_options].append, parameter[call[constant[-Dlogin={}].format, parameter[name[user]]]]] call[name[jvm_options].append, parameter[call[constant[-Dpassword={}].format, parameter[name[password]]]]] return[name[jvm_options]]
keyword[def] identifier[_ivy_jvm_options] ( identifier[self] , identifier[repo] ): literal[string] keyword[if] keyword[not] identifier[repo] . identifier[get] ( literal[string] ): keyword[return] identifier[self] . identifier[_jvm_options] identifier[jvm_options] = identifier[copy] ( identifier[self] . identifier[_jvm_options] ) identifier[user] = identifier[repo] . identifier[get] ( literal[string] ) identifier[password] = identifier[repo] . identifier[get] ( literal[string] ) keyword[if] identifier[user] keyword[and] identifier[password] : identifier[jvm_options] . identifier[append] ( literal[string] . identifier[format] ( identifier[user] )) identifier[jvm_options] . identifier[append] ( literal[string] . identifier[format] ( identifier[password] )) keyword[else] : keyword[raise] identifier[TaskError] ( literal[string] . identifier[format] ( identifier[repo] . identifier[get] ( literal[string] ), identifier[repo] . identifier[get] ( literal[string] , literal[string] ))) keyword[return] identifier[jvm_options]
def _ivy_jvm_options(self, repo): """Get the JVM options for ivy authentication, if needed.""" # Get authentication for the publish repo if needed. if not repo.get('auth'): # No need to copy here, as this list isn't modified by the caller. return self._jvm_options # depends on [control=['if'], data=[]] # Create a copy of the options, so that the modification is appropriately transient. jvm_options = copy(self._jvm_options) user = repo.get('username') password = repo.get('password') if user and password: jvm_options.append('-Dlogin={}'.format(user)) jvm_options.append('-Dpassword={}'.format(password)) # depends on [control=['if'], data=[]] else: raise TaskError('Unable to publish to {}. {}'.format(repo.get('resolver'), repo.get('help', ''))) return jvm_options
def are_values_same_type(first_val, second_val): """ Method to verify that both values belong to same type. Float and integer are considered as same type. Args: first_val: Value to validate. second_Val: Value to validate. Returns: Boolean: True if both values belong to same type. Otherwise False. """ first_val_type = type(first_val) second_val_type = type(second_val) # use isinstance to accomodate Python 2 unicode and str types. if isinstance(first_val, string_types) and isinstance(second_val, string_types): return True # Compare types if one of the values is bool because bool is a subclass on Integer. if isinstance(first_val, bool) or isinstance(second_val, bool): return first_val_type == second_val_type # Treat ints and floats as same type. if isinstance(first_val, (numbers.Integral, float)) and isinstance(second_val, (numbers.Integral, float)): return True return False
def function[are_values_same_type, parameter[first_val, second_val]]: constant[ Method to verify that both values belong to same type. Float and integer are considered as same type. Args: first_val: Value to validate. second_Val: Value to validate. Returns: Boolean: True if both values belong to same type. Otherwise False. ] variable[first_val_type] assign[=] call[name[type], parameter[name[first_val]]] variable[second_val_type] assign[=] call[name[type], parameter[name[second_val]]] if <ast.BoolOp object at 0x7da18f09ddb0> begin[:] return[constant[True]] if <ast.BoolOp object at 0x7da18f09de40> begin[:] return[compare[name[first_val_type] equal[==] name[second_val_type]]] if <ast.BoolOp object at 0x7da18f09e4a0> begin[:] return[constant[True]] return[constant[False]]
keyword[def] identifier[are_values_same_type] ( identifier[first_val] , identifier[second_val] ): literal[string] identifier[first_val_type] = identifier[type] ( identifier[first_val] ) identifier[second_val_type] = identifier[type] ( identifier[second_val] ) keyword[if] identifier[isinstance] ( identifier[first_val] , identifier[string_types] ) keyword[and] identifier[isinstance] ( identifier[second_val] , identifier[string_types] ): keyword[return] keyword[True] keyword[if] identifier[isinstance] ( identifier[first_val] , identifier[bool] ) keyword[or] identifier[isinstance] ( identifier[second_val] , identifier[bool] ): keyword[return] identifier[first_val_type] == identifier[second_val_type] keyword[if] identifier[isinstance] ( identifier[first_val] ,( identifier[numbers] . identifier[Integral] , identifier[float] )) keyword[and] identifier[isinstance] ( identifier[second_val] ,( identifier[numbers] . identifier[Integral] , identifier[float] )): keyword[return] keyword[True] keyword[return] keyword[False]
def are_values_same_type(first_val, second_val): """ Method to verify that both values belong to same type. Float and integer are considered as same type. Args: first_val: Value to validate. second_Val: Value to validate. Returns: Boolean: True if both values belong to same type. Otherwise False. """ first_val_type = type(first_val) second_val_type = type(second_val) # use isinstance to accomodate Python 2 unicode and str types. if isinstance(first_val, string_types) and isinstance(second_val, string_types): return True # depends on [control=['if'], data=[]] # Compare types if one of the values is bool because bool is a subclass on Integer. if isinstance(first_val, bool) or isinstance(second_val, bool): return first_val_type == second_val_type # depends on [control=['if'], data=[]] # Treat ints and floats as same type. if isinstance(first_val, (numbers.Integral, float)) and isinstance(second_val, (numbers.Integral, float)): return True # depends on [control=['if'], data=[]] return False
def _run_cmd(cmd): """Run command specified by :cmd: and return stdout, stderr and code.""" if not os.path.exists(cmd[0]): cmd[0] = shutil.which(cmd[0]) assert cmd[0] is not None shebang_parts = parseshebang.parse(cmd[0]) proc = subprocess.Popen(shebang_parts + cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = proc.communicate() return { "stdout": stdout, "stderr": stderr, "code": proc.returncode }
def function[_run_cmd, parameter[cmd]]: constant[Run command specified by :cmd: and return stdout, stderr and code.] if <ast.UnaryOp object at 0x7da20e955120> begin[:] call[name[cmd]][constant[0]] assign[=] call[name[shutil].which, parameter[call[name[cmd]][constant[0]]]] assert[compare[call[name[cmd]][constant[0]] is_not constant[None]]] variable[shebang_parts] assign[=] call[name[parseshebang].parse, parameter[call[name[cmd]][constant[0]]]] variable[proc] assign[=] call[name[subprocess].Popen, parameter[binary_operation[name[shebang_parts] + name[cmd]]]] <ast.Tuple object at 0x7da20e74b2e0> assign[=] call[name[proc].communicate, parameter[]] return[dictionary[[<ast.Constant object at 0x7da20e74bd90>, <ast.Constant object at 0x7da20e749720>, <ast.Constant object at 0x7da20e74a1a0>], [<ast.Name object at 0x7da20e74af80>, <ast.Name object at 0x7da20e749ab0>, <ast.Attribute object at 0x7da20e74ba30>]]]
keyword[def] identifier[_run_cmd] ( identifier[cmd] ): literal[string] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[cmd] [ literal[int] ]): identifier[cmd] [ literal[int] ]= identifier[shutil] . identifier[which] ( identifier[cmd] [ literal[int] ]) keyword[assert] identifier[cmd] [ literal[int] ] keyword[is] keyword[not] keyword[None] identifier[shebang_parts] = identifier[parseshebang] . identifier[parse] ( identifier[cmd] [ literal[int] ]) identifier[proc] = identifier[subprocess] . identifier[Popen] ( identifier[shebang_parts] + identifier[cmd] , identifier[stdout] = identifier[subprocess] . identifier[PIPE] , identifier[stderr] = identifier[subprocess] . identifier[PIPE] ) identifier[stdout] , identifier[stderr] = identifier[proc] . identifier[communicate] () keyword[return] { literal[string] : identifier[stdout] , literal[string] : identifier[stderr] , literal[string] : identifier[proc] . identifier[returncode] }
def _run_cmd(cmd): """Run command specified by :cmd: and return stdout, stderr and code.""" if not os.path.exists(cmd[0]): cmd[0] = shutil.which(cmd[0]) assert cmd[0] is not None # depends on [control=['if'], data=[]] shebang_parts = parseshebang.parse(cmd[0]) proc = subprocess.Popen(shebang_parts + cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdout, stderr) = proc.communicate() return {'stdout': stdout, 'stderr': stderr, 'code': proc.returncode}
def handle_block( mediator_state: MediatorTransferState, state_change: Block, channelidentifiers_to_channels: ChannelMap, pseudo_random_generator: random.Random, ) -> TransitionResult[MediatorTransferState]: """ After Raiden learns about a new block this function must be called to handle expiration of the hash time locks. Args: state: The current state. Return: TransitionResult: The resulting iteration """ expired_locks_events = events_to_remove_expired_locks( mediator_state, channelidentifiers_to_channels, state_change.block_number, pseudo_random_generator, ) secret_reveal_events = events_for_onchain_secretreveal_if_dangerzone( channelmap=channelidentifiers_to_channels, secrethash=mediator_state.secrethash, transfers_pair=mediator_state.transfers_pair, block_number=state_change.block_number, block_hash=state_change.block_hash, ) unlock_fail_events = events_for_expired_pairs( channelidentifiers_to_channels=channelidentifiers_to_channels, transfers_pair=mediator_state.transfers_pair, waiting_transfer=mediator_state.waiting_transfer, block_number=state_change.block_number, ) iteration = TransitionResult( mediator_state, unlock_fail_events + secret_reveal_events + expired_locks_events, ) return iteration
def function[handle_block, parameter[mediator_state, state_change, channelidentifiers_to_channels, pseudo_random_generator]]: constant[ After Raiden learns about a new block this function must be called to handle expiration of the hash time locks. Args: state: The current state. Return: TransitionResult: The resulting iteration ] variable[expired_locks_events] assign[=] call[name[events_to_remove_expired_locks], parameter[name[mediator_state], name[channelidentifiers_to_channels], name[state_change].block_number, name[pseudo_random_generator]]] variable[secret_reveal_events] assign[=] call[name[events_for_onchain_secretreveal_if_dangerzone], parameter[]] variable[unlock_fail_events] assign[=] call[name[events_for_expired_pairs], parameter[]] variable[iteration] assign[=] call[name[TransitionResult], parameter[name[mediator_state], binary_operation[binary_operation[name[unlock_fail_events] + name[secret_reveal_events]] + name[expired_locks_events]]]] return[name[iteration]]
keyword[def] identifier[handle_block] ( identifier[mediator_state] : identifier[MediatorTransferState] , identifier[state_change] : identifier[Block] , identifier[channelidentifiers_to_channels] : identifier[ChannelMap] , identifier[pseudo_random_generator] : identifier[random] . identifier[Random] , )-> identifier[TransitionResult] [ identifier[MediatorTransferState] ]: literal[string] identifier[expired_locks_events] = identifier[events_to_remove_expired_locks] ( identifier[mediator_state] , identifier[channelidentifiers_to_channels] , identifier[state_change] . identifier[block_number] , identifier[pseudo_random_generator] , ) identifier[secret_reveal_events] = identifier[events_for_onchain_secretreveal_if_dangerzone] ( identifier[channelmap] = identifier[channelidentifiers_to_channels] , identifier[secrethash] = identifier[mediator_state] . identifier[secrethash] , identifier[transfers_pair] = identifier[mediator_state] . identifier[transfers_pair] , identifier[block_number] = identifier[state_change] . identifier[block_number] , identifier[block_hash] = identifier[state_change] . identifier[block_hash] , ) identifier[unlock_fail_events] = identifier[events_for_expired_pairs] ( identifier[channelidentifiers_to_channels] = identifier[channelidentifiers_to_channels] , identifier[transfers_pair] = identifier[mediator_state] . identifier[transfers_pair] , identifier[waiting_transfer] = identifier[mediator_state] . identifier[waiting_transfer] , identifier[block_number] = identifier[state_change] . identifier[block_number] , ) identifier[iteration] = identifier[TransitionResult] ( identifier[mediator_state] , identifier[unlock_fail_events] + identifier[secret_reveal_events] + identifier[expired_locks_events] , ) keyword[return] identifier[iteration]
def handle_block(mediator_state: MediatorTransferState, state_change: Block, channelidentifiers_to_channels: ChannelMap, pseudo_random_generator: random.Random) -> TransitionResult[MediatorTransferState]: """ After Raiden learns about a new block this function must be called to handle expiration of the hash time locks. Args: state: The current state. Return: TransitionResult: The resulting iteration """ expired_locks_events = events_to_remove_expired_locks(mediator_state, channelidentifiers_to_channels, state_change.block_number, pseudo_random_generator) secret_reveal_events = events_for_onchain_secretreveal_if_dangerzone(channelmap=channelidentifiers_to_channels, secrethash=mediator_state.secrethash, transfers_pair=mediator_state.transfers_pair, block_number=state_change.block_number, block_hash=state_change.block_hash) unlock_fail_events = events_for_expired_pairs(channelidentifiers_to_channels=channelidentifiers_to_channels, transfers_pair=mediator_state.transfers_pair, waiting_transfer=mediator_state.waiting_transfer, block_number=state_change.block_number) iteration = TransitionResult(mediator_state, unlock_fail_events + secret_reveal_events + expired_locks_events) return iteration
def _map_reduce(self, map, reduce, out, session, read_pref, **kwargs): """Internal mapReduce helper.""" cmd = SON([("mapReduce", self.__name), ("map", map), ("reduce", reduce), ("out", out)]) collation = validate_collation_or_none(kwargs.pop('collation', None)) cmd.update(kwargs) inline = 'inline' in out if inline: user_fields = {'results': 1} else: user_fields = None read_pref = ((session and session._txn_read_preference()) or read_pref) with self.__database.client._socket_for_reads(read_pref, session) as ( sock_info, slave_ok): if (sock_info.max_wire_version >= 4 and ('readConcern' not in cmd) and inline): read_concern = self.read_concern else: read_concern = None if 'writeConcern' not in cmd and not inline: write_concern = self._write_concern_for(session) else: write_concern = None return self._command( sock_info, cmd, slave_ok, read_pref, read_concern=read_concern, write_concern=write_concern, collation=collation, session=session, user_fields=user_fields)
def function[_map_reduce, parameter[self, map, reduce, out, session, read_pref]]: constant[Internal mapReduce helper.] variable[cmd] assign[=] call[name[SON], parameter[list[[<ast.Tuple object at 0x7da20e9b1990>, <ast.Tuple object at 0x7da20e9b19c0>, <ast.Tuple object at 0x7da20e9b16c0>, <ast.Tuple object at 0x7da20e9b07c0>]]]] variable[collation] assign[=] call[name[validate_collation_or_none], parameter[call[name[kwargs].pop, parameter[constant[collation], constant[None]]]]] call[name[cmd].update, parameter[name[kwargs]]] variable[inline] assign[=] compare[constant[inline] in name[out]] if name[inline] begin[:] variable[user_fields] assign[=] dictionary[[<ast.Constant object at 0x7da20e9b1420>], [<ast.Constant object at 0x7da20e9b23b0>]] variable[read_pref] assign[=] <ast.BoolOp object at 0x7da20e9b2800> with call[name[self].__database.client._socket_for_reads, parameter[name[read_pref], name[session]]] begin[:] if <ast.BoolOp object at 0x7da20e9b0f40> begin[:] variable[read_concern] assign[=] name[self].read_concern if <ast.BoolOp object at 0x7da18f812080> begin[:] variable[write_concern] assign[=] call[name[self]._write_concern_for, parameter[name[session]]] return[call[name[self]._command, parameter[name[sock_info], name[cmd], name[slave_ok], name[read_pref]]]]
keyword[def] identifier[_map_reduce] ( identifier[self] , identifier[map] , identifier[reduce] , identifier[out] , identifier[session] , identifier[read_pref] ,** identifier[kwargs] ): literal[string] identifier[cmd] = identifier[SON] ([( literal[string] , identifier[self] . identifier[__name] ), ( literal[string] , identifier[map] ), ( literal[string] , identifier[reduce] ), ( literal[string] , identifier[out] )]) identifier[collation] = identifier[validate_collation_or_none] ( identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] )) identifier[cmd] . identifier[update] ( identifier[kwargs] ) identifier[inline] = literal[string] keyword[in] identifier[out] keyword[if] identifier[inline] : identifier[user_fields] ={ literal[string] : literal[int] } keyword[else] : identifier[user_fields] = keyword[None] identifier[read_pref] =(( identifier[session] keyword[and] identifier[session] . identifier[_txn_read_preference] ()) keyword[or] identifier[read_pref] ) keyword[with] identifier[self] . identifier[__database] . identifier[client] . identifier[_socket_for_reads] ( identifier[read_pref] , identifier[session] ) keyword[as] ( identifier[sock_info] , identifier[slave_ok] ): keyword[if] ( identifier[sock_info] . identifier[max_wire_version] >= literal[int] keyword[and] ( literal[string] keyword[not] keyword[in] identifier[cmd] ) keyword[and] identifier[inline] ): identifier[read_concern] = identifier[self] . identifier[read_concern] keyword[else] : identifier[read_concern] = keyword[None] keyword[if] literal[string] keyword[not] keyword[in] identifier[cmd] keyword[and] keyword[not] identifier[inline] : identifier[write_concern] = identifier[self] . identifier[_write_concern_for] ( identifier[session] ) keyword[else] : identifier[write_concern] = keyword[None] keyword[return] identifier[self] . identifier[_command] ( identifier[sock_info] , identifier[cmd] , identifier[slave_ok] , identifier[read_pref] , identifier[read_concern] = identifier[read_concern] , identifier[write_concern] = identifier[write_concern] , identifier[collation] = identifier[collation] , identifier[session] = identifier[session] , identifier[user_fields] = identifier[user_fields] )
def _map_reduce(self, map, reduce, out, session, read_pref, **kwargs): """Internal mapReduce helper.""" cmd = SON([('mapReduce', self.__name), ('map', map), ('reduce', reduce), ('out', out)]) collation = validate_collation_or_none(kwargs.pop('collation', None)) cmd.update(kwargs) inline = 'inline' in out if inline: user_fields = {'results': 1} # depends on [control=['if'], data=[]] else: user_fields = None read_pref = session and session._txn_read_preference() or read_pref with self.__database.client._socket_for_reads(read_pref, session) as (sock_info, slave_ok): if sock_info.max_wire_version >= 4 and 'readConcern' not in cmd and inline: read_concern = self.read_concern # depends on [control=['if'], data=[]] else: read_concern = None if 'writeConcern' not in cmd and (not inline): write_concern = self._write_concern_for(session) # depends on [control=['if'], data=[]] else: write_concern = None return self._command(sock_info, cmd, slave_ok, read_pref, read_concern=read_concern, write_concern=write_concern, collation=collation, session=session, user_fields=user_fields) # depends on [control=['with'], data=[]]
def find_commands_module(app_name): """ Find the commands module in each app (if it exists) and return the path app_name : The name of an app in the INSTALLED_APPS setting return - path to the app """ parts = app_name.split('.') parts.append('commands') parts.reverse() part = parts.pop() path = None #Load the module if needed try: f, path, descr = imp.find_module(part, path) except ImportError as e: if os.path.basename(os.getcwd()) != part: raise e else: try: if f: f.close() except UnboundLocalError: log.error("Could not import module {0} at path {1}. Sys.path is {2}".format(part, path, sys.path)) #Go down level by and level and try to load the module at each level while parts: part = parts.pop() f, path, descr = imp.find_module(part, [path] if path else None) if f: f.close() return path
def function[find_commands_module, parameter[app_name]]: constant[ Find the commands module in each app (if it exists) and return the path app_name : The name of an app in the INSTALLED_APPS setting return - path to the app ] variable[parts] assign[=] call[name[app_name].split, parameter[constant[.]]] call[name[parts].append, parameter[constant[commands]]] call[name[parts].reverse, parameter[]] variable[part] assign[=] call[name[parts].pop, parameter[]] variable[path] assign[=] constant[None] <ast.Try object at 0x7da20c992020> while name[parts] begin[:] variable[part] assign[=] call[name[parts].pop, parameter[]] <ast.Tuple object at 0x7da18f00e740> assign[=] call[name[imp].find_module, parameter[name[part], <ast.IfExp object at 0x7da18f00dae0>]] if name[f] begin[:] call[name[f].close, parameter[]] return[name[path]]
keyword[def] identifier[find_commands_module] ( identifier[app_name] ): literal[string] identifier[parts] = identifier[app_name] . identifier[split] ( literal[string] ) identifier[parts] . identifier[append] ( literal[string] ) identifier[parts] . identifier[reverse] () identifier[part] = identifier[parts] . identifier[pop] () identifier[path] = keyword[None] keyword[try] : identifier[f] , identifier[path] , identifier[descr] = identifier[imp] . identifier[find_module] ( identifier[part] , identifier[path] ) keyword[except] identifier[ImportError] keyword[as] identifier[e] : keyword[if] identifier[os] . identifier[path] . identifier[basename] ( identifier[os] . identifier[getcwd] ())!= identifier[part] : keyword[raise] identifier[e] keyword[else] : keyword[try] : keyword[if] identifier[f] : identifier[f] . identifier[close] () keyword[except] identifier[UnboundLocalError] : identifier[log] . identifier[error] ( literal[string] . identifier[format] ( identifier[part] , identifier[path] , identifier[sys] . identifier[path] )) keyword[while] identifier[parts] : identifier[part] = identifier[parts] . identifier[pop] () identifier[f] , identifier[path] , identifier[descr] = identifier[imp] . identifier[find_module] ( identifier[part] ,[ identifier[path] ] keyword[if] identifier[path] keyword[else] keyword[None] ) keyword[if] identifier[f] : identifier[f] . identifier[close] () keyword[return] identifier[path]
def find_commands_module(app_name): """ Find the commands module in each app (if it exists) and return the path app_name : The name of an app in the INSTALLED_APPS setting return - path to the app """ parts = app_name.split('.') parts.append('commands') parts.reverse() part = parts.pop() path = None #Load the module if needed try: (f, path, descr) = imp.find_module(part, path) # depends on [control=['try'], data=[]] except ImportError as e: if os.path.basename(os.getcwd()) != part: raise e # depends on [control=['if'], data=[]] else: try: if f: f.close() # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except UnboundLocalError: log.error('Could not import module {0} at path {1}. Sys.path is {2}'.format(part, path, sys.path)) # depends on [control=['except'], data=[]] # depends on [control=['except'], data=['e']] #Go down level by and level and try to load the module at each level while parts: part = parts.pop() (f, path, descr) = imp.find_module(part, [path] if path else None) if f: f.close() # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] return path
def update_access_key_full(self, access_key_id, name, is_active, permitted, options): """ Replaces the 'name', 'is_active', 'permitted', and 'options' values of a given key. A master key must be set first. :param access_key_id: the 'key' value of the access key for which the values will be replaced :param name: the new name desired for this access key :param is_active: whether the key should become enabled (True) or revoked (False) :param permitted: the new list of permissions desired for this access key :param options: the new dictionary of options for this access key """ return self.api.update_access_key_full(access_key_id, name, is_active, permitted, options)
def function[update_access_key_full, parameter[self, access_key_id, name, is_active, permitted, options]]: constant[ Replaces the 'name', 'is_active', 'permitted', and 'options' values of a given key. A master key must be set first. :param access_key_id: the 'key' value of the access key for which the values will be replaced :param name: the new name desired for this access key :param is_active: whether the key should become enabled (True) or revoked (False) :param permitted: the new list of permissions desired for this access key :param options: the new dictionary of options for this access key ] return[call[name[self].api.update_access_key_full, parameter[name[access_key_id], name[name], name[is_active], name[permitted], name[options]]]]
keyword[def] identifier[update_access_key_full] ( identifier[self] , identifier[access_key_id] , identifier[name] , identifier[is_active] , identifier[permitted] , identifier[options] ): literal[string] keyword[return] identifier[self] . identifier[api] . identifier[update_access_key_full] ( identifier[access_key_id] , identifier[name] , identifier[is_active] , identifier[permitted] , identifier[options] )
def update_access_key_full(self, access_key_id, name, is_active, permitted, options): """ Replaces the 'name', 'is_active', 'permitted', and 'options' values of a given key. A master key must be set first. :param access_key_id: the 'key' value of the access key for which the values will be replaced :param name: the new name desired for this access key :param is_active: whether the key should become enabled (True) or revoked (False) :param permitted: the new list of permissions desired for this access key :param options: the new dictionary of options for this access key """ return self.api.update_access_key_full(access_key_id, name, is_active, permitted, options)
def _set_receive(self, v, load=False): """ Setter method for receive, mapped from YANG variable /routing_system/ipv6/receive (container) If this variable is read-only (config: false) in the source YANG file, then _set_receive is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_receive() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=receive.receive, is_container='container', presence=False, yang_name="receive", rest_name="receive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Receive ACL', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-ipv6-access-list', defining_module='brocade-ipv6-access-list', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """receive must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=receive.receive, is_container='container', presence=False, yang_name="receive", rest_name="receive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Receive ACL', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-ipv6-access-list', defining_module='brocade-ipv6-access-list', yang_type='container', is_config=True)""", }) self.__receive = t if hasattr(self, '_set'): self._set()
def function[_set_receive, parameter[self, v, load]]: constant[ Setter method for receive, mapped from YANG variable /routing_system/ipv6/receive (container) If this variable is read-only (config: false) in the source YANG file, then _set_receive is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_receive() directly. ] if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:] variable[v] assign[=] call[name[v]._utype, parameter[name[v]]] <ast.Try object at 0x7da20c76ee30> name[self].__receive assign[=] name[t] if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:] call[name[self]._set, parameter[]]
keyword[def] identifier[_set_receive] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ): literal[string] keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ): identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] ) keyword[try] : identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[receive] . identifier[receive] , identifier[is_container] = literal[string] , identifier[presence] = keyword[False] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[raise] identifier[ValueError] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , }) identifier[self] . identifier[__receive] = identifier[t] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[_set] ()
def _set_receive(self, v, load=False): """ Setter method for receive, mapped from YANG variable /routing_system/ipv6/receive (container) If this variable is read-only (config: false) in the source YANG file, then _set_receive is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_receive() directly. """ if hasattr(v, '_utype'): v = v._utype(v) # depends on [control=['if'], data=[]] try: t = YANGDynClass(v, base=receive.receive, is_container='container', presence=False, yang_name='receive', rest_name='receive', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Receive ACL', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-ipv6-access-list', defining_module='brocade-ipv6-access-list', yang_type='container', is_config=True) # depends on [control=['try'], data=[]] except (TypeError, ValueError): raise ValueError({'error-string': 'receive must be of a type compatible with container', 'defined-type': 'container', 'generated-type': 'YANGDynClass(base=receive.receive, is_container=\'container\', presence=False, yang_name="receive", rest_name="receive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Receive ACL\', u\'cli-incomplete-no\': None}}, namespace=\'urn:brocade.com:mgmt:brocade-ipv6-access-list\', defining_module=\'brocade-ipv6-access-list\', yang_type=\'container\', is_config=True)'}) # depends on [control=['except'], data=[]] self.__receive = t if hasattr(self, '_set'): self._set() # depends on [control=['if'], data=[]]
def leading_whitespace(self, inputstring): """Count leading whitespace.""" count = 0 for i, c in enumerate(inputstring): if c == " ": count += 1 elif c == "\t": count += tabworth - (i % tabworth) else: break if self.indchar is None: self.indchar = c elif c != self.indchar: self.strict_err_or_warn("found mixing of tabs and spaces", inputstring, i) return count
def function[leading_whitespace, parameter[self, inputstring]]: constant[Count leading whitespace.] variable[count] assign[=] constant[0] for taget[tuple[[<ast.Name object at 0x7da20c990250>, <ast.Name object at 0x7da20c990df0>]]] in starred[call[name[enumerate], parameter[name[inputstring]]]] begin[:] if compare[name[c] equal[==] constant[ ]] begin[:] <ast.AugAssign object at 0x7da20c990ac0> if compare[name[self].indchar is constant[None]] begin[:] name[self].indchar assign[=] name[c] return[name[count]]
keyword[def] identifier[leading_whitespace] ( identifier[self] , identifier[inputstring] ): literal[string] identifier[count] = literal[int] keyword[for] identifier[i] , identifier[c] keyword[in] identifier[enumerate] ( identifier[inputstring] ): keyword[if] identifier[c] == literal[string] : identifier[count] += literal[int] keyword[elif] identifier[c] == literal[string] : identifier[count] += identifier[tabworth] -( identifier[i] % identifier[tabworth] ) keyword[else] : keyword[break] keyword[if] identifier[self] . identifier[indchar] keyword[is] keyword[None] : identifier[self] . identifier[indchar] = identifier[c] keyword[elif] identifier[c] != identifier[self] . identifier[indchar] : identifier[self] . identifier[strict_err_or_warn] ( literal[string] , identifier[inputstring] , identifier[i] ) keyword[return] identifier[count]
def leading_whitespace(self, inputstring): """Count leading whitespace.""" count = 0 for (i, c) in enumerate(inputstring): if c == ' ': count += 1 # depends on [control=['if'], data=[]] elif c == '\t': count += tabworth - i % tabworth # depends on [control=['if'], data=[]] else: break if self.indchar is None: self.indchar = c # depends on [control=['if'], data=[]] elif c != self.indchar: self.strict_err_or_warn('found mixing of tabs and spaces', inputstring, i) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return count
def make_img_widget(cls, img, layout=Layout(), format='jpg'): "Returns an image widget for specified file name `img`." return widgets.Image(value=img, format=format, layout=layout)
def function[make_img_widget, parameter[cls, img, layout, format]]: constant[Returns an image widget for specified file name `img`.] return[call[name[widgets].Image, parameter[]]]
keyword[def] identifier[make_img_widget] ( identifier[cls] , identifier[img] , identifier[layout] = identifier[Layout] (), identifier[format] = literal[string] ): literal[string] keyword[return] identifier[widgets] . identifier[Image] ( identifier[value] = identifier[img] , identifier[format] = identifier[format] , identifier[layout] = identifier[layout] )
def make_img_widget(cls, img, layout=Layout(), format='jpg'): """Returns an image widget for specified file name `img`.""" return widgets.Image(value=img, format=format, layout=layout)
def dataset_prepare(self): '''Subcommand of dataset for processing a corpus into a dataset''' # Initialize the prepare subcommand's argparser parser = argparse.ArgumentParser(description='Preprocess a raw dialogue corpus into a dsrt dataset') self.init_dataset_prepare_args(parser) # Parse the args we got args = parser.parse_args(sys.argv[3:]) args.config = ConfigurationLoader(args.config).load().data_config print(CLI_DIVIDER + '\n') Preprocessor(**vars(args)).run()
def function[dataset_prepare, parameter[self]]: constant[Subcommand of dataset for processing a corpus into a dataset] variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]] call[name[self].init_dataset_prepare_args, parameter[name[parser]]] variable[args] assign[=] call[name[parser].parse_args, parameter[call[name[sys].argv][<ast.Slice object at 0x7da1b1649e40>]]] name[args].config assign[=] call[call[name[ConfigurationLoader], parameter[name[args].config]].load, parameter[]].data_config call[name[print], parameter[binary_operation[name[CLI_DIVIDER] + constant[ ]]]] call[call[name[Preprocessor], parameter[]].run, parameter[]]
keyword[def] identifier[dataset_prepare] ( identifier[self] ): literal[string] identifier[parser] = identifier[argparse] . identifier[ArgumentParser] ( identifier[description] = literal[string] ) identifier[self] . identifier[init_dataset_prepare_args] ( identifier[parser] ) identifier[args] = identifier[parser] . identifier[parse_args] ( identifier[sys] . identifier[argv] [ literal[int] :]) identifier[args] . identifier[config] = identifier[ConfigurationLoader] ( identifier[args] . identifier[config] ). identifier[load] (). identifier[data_config] identifier[print] ( identifier[CLI_DIVIDER] + literal[string] ) identifier[Preprocessor] (** identifier[vars] ( identifier[args] )). identifier[run] ()
def dataset_prepare(self): """Subcommand of dataset for processing a corpus into a dataset""" # Initialize the prepare subcommand's argparser parser = argparse.ArgumentParser(description='Preprocess a raw dialogue corpus into a dsrt dataset') self.init_dataset_prepare_args(parser) # Parse the args we got args = parser.parse_args(sys.argv[3:]) args.config = ConfigurationLoader(args.config).load().data_config print(CLI_DIVIDER + '\n') Preprocessor(**vars(args)).run()
def header(fname, sep="\t"): """ just grab the header from a given file """ fh = iter(nopen(fname)) h = tokens(next(fh), sep) h[0] = h[0].lstrip("#") return h
def function[header, parameter[fname, sep]]: constant[ just grab the header from a given file ] variable[fh] assign[=] call[name[iter], parameter[call[name[nopen], parameter[name[fname]]]]] variable[h] assign[=] call[name[tokens], parameter[call[name[next], parameter[name[fh]]], name[sep]]] call[name[h]][constant[0]] assign[=] call[call[name[h]][constant[0]].lstrip, parameter[constant[#]]] return[name[h]]
keyword[def] identifier[header] ( identifier[fname] , identifier[sep] = literal[string] ): literal[string] identifier[fh] = identifier[iter] ( identifier[nopen] ( identifier[fname] )) identifier[h] = identifier[tokens] ( identifier[next] ( identifier[fh] ), identifier[sep] ) identifier[h] [ literal[int] ]= identifier[h] [ literal[int] ]. identifier[lstrip] ( literal[string] ) keyword[return] identifier[h]
def header(fname, sep='\t'): """ just grab the header from a given file """ fh = iter(nopen(fname)) h = tokens(next(fh), sep) h[0] = h[0].lstrip('#') return h
def _safe_get_element_date(self, path, root=None): """Safe get elemnent date. Get element as datetime.date or None, :param root: Lxml element. :param path: String path (i.e. 'Items.Item.Offers.Offer'). :return: datetime.date or None. """ value = self._safe_get_element_text(path=path, root=root) if value is not None: try: value = dateutil.parser.parse(value) if value: value = value.date() except ValueError: value = None return value
def function[_safe_get_element_date, parameter[self, path, root]]: constant[Safe get elemnent date. Get element as datetime.date or None, :param root: Lxml element. :param path: String path (i.e. 'Items.Item.Offers.Offer'). :return: datetime.date or None. ] variable[value] assign[=] call[name[self]._safe_get_element_text, parameter[]] if compare[name[value] is_not constant[None]] begin[:] <ast.Try object at 0x7da20c6a8880> return[name[value]]
keyword[def] identifier[_safe_get_element_date] ( identifier[self] , identifier[path] , identifier[root] = keyword[None] ): literal[string] identifier[value] = identifier[self] . identifier[_safe_get_element_text] ( identifier[path] = identifier[path] , identifier[root] = identifier[root] ) keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] : keyword[try] : identifier[value] = identifier[dateutil] . identifier[parser] . identifier[parse] ( identifier[value] ) keyword[if] identifier[value] : identifier[value] = identifier[value] . identifier[date] () keyword[except] identifier[ValueError] : identifier[value] = keyword[None] keyword[return] identifier[value]
def _safe_get_element_date(self, path, root=None): """Safe get elemnent date. Get element as datetime.date or None, :param root: Lxml element. :param path: String path (i.e. 'Items.Item.Offers.Offer'). :return: datetime.date or None. """ value = self._safe_get_element_text(path=path, root=root) if value is not None: try: value = dateutil.parser.parse(value) if value: value = value.date() # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except ValueError: value = None # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['value']] return value
def download_article_from_ids(**id_dict): """Download an article in XML format from Elsevier matching the set of ids. Parameters ---------- <id_type> : str You can enter any combination of eid, doi, pmid, and/or pii. Ids will be checked in that order, until either content has been found or all ids have been checked. Returns ------- content : str or None If found, the content is returned as a string, otherwise None is returned. """ valid_id_types = ['eid', 'doi', 'pmid', 'pii'] assert all([k in valid_id_types for k in id_dict.keys()]),\ ("One of these id keys is invalid: %s Valid keys are: %s." % (list(id_dict.keys()), valid_id_types)) if 'doi' in id_dict.keys() and id_dict['doi'].lower().startswith('doi:'): id_dict['doi'] = id_dict['doi'][4:] content = None for id_type in valid_id_types: if id_type in id_dict.keys(): content = download_article(id_dict[id_type], id_type) if content is not None: break else: logger.error("Could not download article with any of the ids: %s." % str(id_dict)) return content
def function[download_article_from_ids, parameter[]]: constant[Download an article in XML format from Elsevier matching the set of ids. Parameters ---------- <id_type> : str You can enter any combination of eid, doi, pmid, and/or pii. Ids will be checked in that order, until either content has been found or all ids have been checked. Returns ------- content : str or None If found, the content is returned as a string, otherwise None is returned. ] variable[valid_id_types] assign[=] list[[<ast.Constant object at 0x7da20c7c9180>, <ast.Constant object at 0x7da20c7cbb80>, <ast.Constant object at 0x7da20c7ca020>, <ast.Constant object at 0x7da20c7c8160>]] assert[call[name[all], parameter[<ast.ListComp object at 0x7da20c7cbd90>]]] if <ast.BoolOp object at 0x7da20c7cb1c0> begin[:] call[name[id_dict]][constant[doi]] assign[=] call[call[name[id_dict]][constant[doi]]][<ast.Slice object at 0x7da18bc70580>] variable[content] assign[=] constant[None] for taget[name[id_type]] in starred[name[valid_id_types]] begin[:] if compare[name[id_type] in call[name[id_dict].keys, parameter[]]] begin[:] variable[content] assign[=] call[name[download_article], parameter[call[name[id_dict]][name[id_type]], name[id_type]]] if compare[name[content] is_not constant[None]] begin[:] break return[name[content]]
keyword[def] identifier[download_article_from_ids] (** identifier[id_dict] ): literal[string] identifier[valid_id_types] =[ literal[string] , literal[string] , literal[string] , literal[string] ] keyword[assert] identifier[all] ([ identifier[k] keyword[in] identifier[valid_id_types] keyword[for] identifier[k] keyword[in] identifier[id_dict] . identifier[keys] ()]),( literal[string] %( identifier[list] ( identifier[id_dict] . identifier[keys] ()), identifier[valid_id_types] )) keyword[if] literal[string] keyword[in] identifier[id_dict] . identifier[keys] () keyword[and] identifier[id_dict] [ literal[string] ]. identifier[lower] (). identifier[startswith] ( literal[string] ): identifier[id_dict] [ literal[string] ]= identifier[id_dict] [ literal[string] ][ literal[int] :] identifier[content] = keyword[None] keyword[for] identifier[id_type] keyword[in] identifier[valid_id_types] : keyword[if] identifier[id_type] keyword[in] identifier[id_dict] . identifier[keys] (): identifier[content] = identifier[download_article] ( identifier[id_dict] [ identifier[id_type] ], identifier[id_type] ) keyword[if] identifier[content] keyword[is] keyword[not] keyword[None] : keyword[break] keyword[else] : identifier[logger] . identifier[error] ( literal[string] % identifier[str] ( identifier[id_dict] )) keyword[return] identifier[content]
def download_article_from_ids(**id_dict): """Download an article in XML format from Elsevier matching the set of ids. Parameters ---------- <id_type> : str You can enter any combination of eid, doi, pmid, and/or pii. Ids will be checked in that order, until either content has been found or all ids have been checked. Returns ------- content : str or None If found, the content is returned as a string, otherwise None is returned. """ valid_id_types = ['eid', 'doi', 'pmid', 'pii'] assert all([k in valid_id_types for k in id_dict.keys()]), 'One of these id keys is invalid: %s Valid keys are: %s.' % (list(id_dict.keys()), valid_id_types) if 'doi' in id_dict.keys() and id_dict['doi'].lower().startswith('doi:'): id_dict['doi'] = id_dict['doi'][4:] # depends on [control=['if'], data=[]] content = None for id_type in valid_id_types: if id_type in id_dict.keys(): content = download_article(id_dict[id_type], id_type) if content is not None: break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['id_type']] # depends on [control=['for'], data=['id_type']] else: logger.error('Could not download article with any of the ids: %s.' % str(id_dict)) return content
def lex(self, text): """ Yield (token_type, str_data) tokens. The last token will be (EOF, None) where EOF is the singleton object defined in this module. """ for match in self.regex.finditer(text): for name, _ in self.lexicon: m = match.group(name) if m is not None: yield (name, m) break yield (EOF, None)
def function[lex, parameter[self, text]]: constant[ Yield (token_type, str_data) tokens. The last token will be (EOF, None) where EOF is the singleton object defined in this module. ] for taget[name[match]] in starred[call[name[self].regex.finditer, parameter[name[text]]]] begin[:] for taget[tuple[[<ast.Name object at 0x7da18f00e4a0>, <ast.Name object at 0x7da18f00de10>]]] in starred[name[self].lexicon] begin[:] variable[m] assign[=] call[name[match].group, parameter[name[name]]] if compare[name[m] is_not constant[None]] begin[:] <ast.Yield object at 0x7da18f00ceb0> break <ast.Yield object at 0x7da18f00e110>
keyword[def] identifier[lex] ( identifier[self] , identifier[text] ): literal[string] keyword[for] identifier[match] keyword[in] identifier[self] . identifier[regex] . identifier[finditer] ( identifier[text] ): keyword[for] identifier[name] , identifier[_] keyword[in] identifier[self] . identifier[lexicon] : identifier[m] = identifier[match] . identifier[group] ( identifier[name] ) keyword[if] identifier[m] keyword[is] keyword[not] keyword[None] : keyword[yield] ( identifier[name] , identifier[m] ) keyword[break] keyword[yield] ( identifier[EOF] , keyword[None] )
def lex(self, text): """ Yield (token_type, str_data) tokens. The last token will be (EOF, None) where EOF is the singleton object defined in this module. """ for match in self.regex.finditer(text): for (name, _) in self.lexicon: m = match.group(name) if m is not None: yield (name, m) break # depends on [control=['if'], data=['m']] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['match']] yield (EOF, None)
def status(self, job_ids): ''' Get the status of a list of jobs identified by their ids. Args: - job_ids (List of ids) : List of identifiers for the jobs Returns: - List of status codes. ''' logging.debug("Checking status of : {0}".format(job_ids)) for job_id in self.resources: poll_code = self.resources[job_id]['proc'].poll() if self.resources[job_id]['status'] in ['COMPLETED', 'FAILED']: continue if poll_code is None: self.resources[job_id]['status'] = 'RUNNING' elif poll_code == 0 and self.resources[job_id]['status'] != 'RUNNING': self.resources[job_id]['status'] = 'COMPLETED' elif poll_code < 0 and self.resources[job_id]['status'] != 'RUNNING': self.resources[job_id]['status'] = 'FAILED' return [self.resources[jid]['status'] for jid in job_ids]
def function[status, parameter[self, job_ids]]: constant[ Get the status of a list of jobs identified by their ids. Args: - job_ids (List of ids) : List of identifiers for the jobs Returns: - List of status codes. ] call[name[logging].debug, parameter[call[constant[Checking status of : {0}].format, parameter[name[job_ids]]]]] for taget[name[job_id]] in starred[name[self].resources] begin[:] variable[poll_code] assign[=] call[call[call[name[self].resources][name[job_id]]][constant[proc]].poll, parameter[]] if compare[call[call[name[self].resources][name[job_id]]][constant[status]] in list[[<ast.Constant object at 0x7da1b0ac8b20>, <ast.Constant object at 0x7da1b0ac8c10>]]] begin[:] continue if compare[name[poll_code] is constant[None]] begin[:] call[call[name[self].resources][name[job_id]]][constant[status]] assign[=] constant[RUNNING] return[<ast.ListComp object at 0x7da1b0a04ac0>]
keyword[def] identifier[status] ( identifier[self] , identifier[job_ids] ): literal[string] identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[job_ids] )) keyword[for] identifier[job_id] keyword[in] identifier[self] . identifier[resources] : identifier[poll_code] = identifier[self] . identifier[resources] [ identifier[job_id] ][ literal[string] ]. identifier[poll] () keyword[if] identifier[self] . identifier[resources] [ identifier[job_id] ][ literal[string] ] keyword[in] [ literal[string] , literal[string] ]: keyword[continue] keyword[if] identifier[poll_code] keyword[is] keyword[None] : identifier[self] . identifier[resources] [ identifier[job_id] ][ literal[string] ]= literal[string] keyword[elif] identifier[poll_code] == literal[int] keyword[and] identifier[self] . identifier[resources] [ identifier[job_id] ][ literal[string] ]!= literal[string] : identifier[self] . identifier[resources] [ identifier[job_id] ][ literal[string] ]= literal[string] keyword[elif] identifier[poll_code] < literal[int] keyword[and] identifier[self] . identifier[resources] [ identifier[job_id] ][ literal[string] ]!= literal[string] : identifier[self] . identifier[resources] [ identifier[job_id] ][ literal[string] ]= literal[string] keyword[return] [ identifier[self] . identifier[resources] [ identifier[jid] ][ literal[string] ] keyword[for] identifier[jid] keyword[in] identifier[job_ids] ]
def status(self, job_ids): """ Get the status of a list of jobs identified by their ids. Args: - job_ids (List of ids) : List of identifiers for the jobs Returns: - List of status codes. """ logging.debug('Checking status of : {0}'.format(job_ids)) for job_id in self.resources: poll_code = self.resources[job_id]['proc'].poll() if self.resources[job_id]['status'] in ['COMPLETED', 'FAILED']: continue # depends on [control=['if'], data=[]] if poll_code is None: self.resources[job_id]['status'] = 'RUNNING' # depends on [control=['if'], data=[]] elif poll_code == 0 and self.resources[job_id]['status'] != 'RUNNING': self.resources[job_id]['status'] = 'COMPLETED' # depends on [control=['if'], data=[]] elif poll_code < 0 and self.resources[job_id]['status'] != 'RUNNING': self.resources[job_id]['status'] = 'FAILED' # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['job_id']] return [self.resources[jid]['status'] for jid in job_ids]
def semantic_distance(go_id1, go_id2, godag, branch_dist=None): ''' Finds the semantic distance (minimum number of connecting branches) between two GO terms. ''' return min_branch_length(go_id1, go_id2, godag, branch_dist)
def function[semantic_distance, parameter[go_id1, go_id2, godag, branch_dist]]: constant[ Finds the semantic distance (minimum number of connecting branches) between two GO terms. ] return[call[name[min_branch_length], parameter[name[go_id1], name[go_id2], name[godag], name[branch_dist]]]]
keyword[def] identifier[semantic_distance] ( identifier[go_id1] , identifier[go_id2] , identifier[godag] , identifier[branch_dist] = keyword[None] ): literal[string] keyword[return] identifier[min_branch_length] ( identifier[go_id1] , identifier[go_id2] , identifier[godag] , identifier[branch_dist] )
def semantic_distance(go_id1, go_id2, godag, branch_dist=None): """ Finds the semantic distance (minimum number of connecting branches) between two GO terms. """ return min_branch_length(go_id1, go_id2, godag, branch_dist)
def get_src_or_dst_path(prompt, count): """ Let the user choose a path, and store the value. :return str _path: Target directory :return str count: Counter for attempted prompts """ _path = "" print(prompt) option = input("Option: ") print("\n") if option == '1': # Set the path to the system desktop folder. logger_directory.info("1: desktop") _path = os.path.expanduser('~/Desktop') elif option == '2': # Set the path to the system downloads folder. logger_directory.info("2: downloads") _path = os.path.expanduser('~/Downloads') elif option == '3': # Current directory logger_directory.info("3: current") _path = os.getcwd() elif option == '4': # Open up the GUI browse dialog logger_directory.info("4: browse ") _path = browse_dialog_dir() else: # Something went wrong. Prompt again. Give a couple tries before defaulting to downloads folder if count == 2: logger_directory.warn("too many attempts") print("Too many failed attempts. Defaulting to current working directory.") _path = os.getcwd() else: count += 1 logger_directory.warn("failed attempts: {}".format(count)) print("Invalid option. Try again.") return _path, count
def function[get_src_or_dst_path, parameter[prompt, count]]: constant[ Let the user choose a path, and store the value. :return str _path: Target directory :return str count: Counter for attempted prompts ] variable[_path] assign[=] constant[] call[name[print], parameter[name[prompt]]] variable[option] assign[=] call[name[input], parameter[constant[Option: ]]] call[name[print], parameter[constant[ ]]] if compare[name[option] equal[==] constant[1]] begin[:] call[name[logger_directory].info, parameter[constant[1: desktop]]] variable[_path] assign[=] call[name[os].path.expanduser, parameter[constant[~/Desktop]]] return[tuple[[<ast.Name object at 0x7da18fe90460>, <ast.Name object at 0x7da2044c3250>]]]
keyword[def] identifier[get_src_or_dst_path] ( identifier[prompt] , identifier[count] ): literal[string] identifier[_path] = literal[string] identifier[print] ( identifier[prompt] ) identifier[option] = identifier[input] ( literal[string] ) identifier[print] ( literal[string] ) keyword[if] identifier[option] == literal[string] : identifier[logger_directory] . identifier[info] ( literal[string] ) identifier[_path] = identifier[os] . identifier[path] . identifier[expanduser] ( literal[string] ) keyword[elif] identifier[option] == literal[string] : identifier[logger_directory] . identifier[info] ( literal[string] ) identifier[_path] = identifier[os] . identifier[path] . identifier[expanduser] ( literal[string] ) keyword[elif] identifier[option] == literal[string] : identifier[logger_directory] . identifier[info] ( literal[string] ) identifier[_path] = identifier[os] . identifier[getcwd] () keyword[elif] identifier[option] == literal[string] : identifier[logger_directory] . identifier[info] ( literal[string] ) identifier[_path] = identifier[browse_dialog_dir] () keyword[else] : keyword[if] identifier[count] == literal[int] : identifier[logger_directory] . identifier[warn] ( literal[string] ) identifier[print] ( literal[string] ) identifier[_path] = identifier[os] . identifier[getcwd] () keyword[else] : identifier[count] += literal[int] identifier[logger_directory] . identifier[warn] ( literal[string] . identifier[format] ( identifier[count] )) identifier[print] ( literal[string] ) keyword[return] identifier[_path] , identifier[count]
def get_src_or_dst_path(prompt, count): """ Let the user choose a path, and store the value. :return str _path: Target directory :return str count: Counter for attempted prompts """ _path = '' print(prompt) option = input('Option: ') print('\n') if option == '1': # Set the path to the system desktop folder. logger_directory.info('1: desktop') _path = os.path.expanduser('~/Desktop') # depends on [control=['if'], data=[]] elif option == '2': # Set the path to the system downloads folder. logger_directory.info('2: downloads') _path = os.path.expanduser('~/Downloads') # depends on [control=['if'], data=[]] elif option == '3': # Current directory logger_directory.info('3: current') _path = os.getcwd() # depends on [control=['if'], data=[]] elif option == '4': # Open up the GUI browse dialog logger_directory.info('4: browse ') _path = browse_dialog_dir() # depends on [control=['if'], data=[]] # Something went wrong. Prompt again. Give a couple tries before defaulting to downloads folder elif count == 2: logger_directory.warn('too many attempts') print('Too many failed attempts. Defaulting to current working directory.') _path = os.getcwd() # depends on [control=['if'], data=[]] else: count += 1 logger_directory.warn('failed attempts: {}'.format(count)) print('Invalid option. Try again.') return (_path, count)
def input(self, str): """ Defines input string, removing current lexer. """ self.input_data = str self.lex = lex.lex(object=self) self.lex.input(self.input_data)
def function[input, parameter[self, str]]: constant[ Defines input string, removing current lexer. ] name[self].input_data assign[=] name[str] name[self].lex assign[=] call[name[lex].lex, parameter[]] call[name[self].lex.input, parameter[name[self].input_data]]
keyword[def] identifier[input] ( identifier[self] , identifier[str] ): literal[string] identifier[self] . identifier[input_data] = identifier[str] identifier[self] . identifier[lex] = identifier[lex] . identifier[lex] ( identifier[object] = identifier[self] ) identifier[self] . identifier[lex] . identifier[input] ( identifier[self] . identifier[input_data] )
def input(self, str): """ Defines input string, removing current lexer. """ self.input_data = str self.lex = lex.lex(object=self) self.lex.input(self.input_data)
def dump_raw_data(filename, data): """ Write the data into a raw format file. Big endian is always used. Parameters ---------- filename: str Path to the output file data: numpy.ndarray n-dimensional image data array. """ if data.ndim == 3: # Begin 3D fix data = data.reshape([data.shape[0], data.shape[1]*data.shape[2]]) # End 3D fix a = array.array('f') for o in data: a.fromlist(list(o.flatten())) # if is_little_endian(): # a.byteswap() with open(filename, 'wb') as rawf: a.tofile(rawf)
def function[dump_raw_data, parameter[filename, data]]: constant[ Write the data into a raw format file. Big endian is always used. Parameters ---------- filename: str Path to the output file data: numpy.ndarray n-dimensional image data array. ] if compare[name[data].ndim equal[==] constant[3]] begin[:] variable[data] assign[=] call[name[data].reshape, parameter[list[[<ast.Subscript object at 0x7da1b008c070>, <ast.BinOp object at 0x7da1b008dc60>]]]] variable[a] assign[=] call[name[array].array, parameter[constant[f]]] for taget[name[o]] in starred[name[data]] begin[:] call[name[a].fromlist, parameter[call[name[list], parameter[call[name[o].flatten, parameter[]]]]]] with call[name[open], parameter[name[filename], constant[wb]]] begin[:] call[name[a].tofile, parameter[name[rawf]]]
keyword[def] identifier[dump_raw_data] ( identifier[filename] , identifier[data] ): literal[string] keyword[if] identifier[data] . identifier[ndim] == literal[int] : identifier[data] = identifier[data] . identifier[reshape] ([ identifier[data] . identifier[shape] [ literal[int] ], identifier[data] . identifier[shape] [ literal[int] ]* identifier[data] . identifier[shape] [ literal[int] ]]) identifier[a] = identifier[array] . identifier[array] ( literal[string] ) keyword[for] identifier[o] keyword[in] identifier[data] : identifier[a] . identifier[fromlist] ( identifier[list] ( identifier[o] . identifier[flatten] ())) keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[rawf] : identifier[a] . identifier[tofile] ( identifier[rawf] )
def dump_raw_data(filename, data): """ Write the data into a raw format file. Big endian is always used. Parameters ---------- filename: str Path to the output file data: numpy.ndarray n-dimensional image data array. """ if data.ndim == 3: # Begin 3D fix data = data.reshape([data.shape[0], data.shape[1] * data.shape[2]]) # depends on [control=['if'], data=[]] # End 3D fix a = array.array('f') for o in data: a.fromlist(list(o.flatten())) # depends on [control=['for'], data=['o']] # if is_little_endian(): # a.byteswap() with open(filename, 'wb') as rawf: a.tofile(rawf) # depends on [control=['with'], data=['rawf']]
def _context_source_file_url(path_or_url): """ Returns a URL for a remote or local context CSV file """ if path_or_url.startswith('http'): # Remote CSV. Just return the URL return path_or_url if path_or_url.startswith('/'): # Absolute path return "file://" + path_or_url return "file://" + os.path.join(os.path.realpath(os.getcwd()), path_or_url)
def function[_context_source_file_url, parameter[path_or_url]]: constant[ Returns a URL for a remote or local context CSV file ] if call[name[path_or_url].startswith, parameter[constant[http]]] begin[:] return[name[path_or_url]] if call[name[path_or_url].startswith, parameter[constant[/]]] begin[:] return[binary_operation[constant[file://] + name[path_or_url]]] return[binary_operation[constant[file://] + call[name[os].path.join, parameter[call[name[os].path.realpath, parameter[call[name[os].getcwd, parameter[]]]], name[path_or_url]]]]]
keyword[def] identifier[_context_source_file_url] ( identifier[path_or_url] ): literal[string] keyword[if] identifier[path_or_url] . identifier[startswith] ( literal[string] ): keyword[return] identifier[path_or_url] keyword[if] identifier[path_or_url] . identifier[startswith] ( literal[string] ): keyword[return] literal[string] + identifier[path_or_url] keyword[return] literal[string] + identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[realpath] ( identifier[os] . identifier[getcwd] ()), identifier[path_or_url] )
def _context_source_file_url(path_or_url): """ Returns a URL for a remote or local context CSV file """ if path_or_url.startswith('http'): # Remote CSV. Just return the URL return path_or_url # depends on [control=['if'], data=[]] if path_or_url.startswith('/'): # Absolute path return 'file://' + path_or_url # depends on [control=['if'], data=[]] return 'file://' + os.path.join(os.path.realpath(os.getcwd()), path_or_url)
def apply_changes(self): """Apply changes callback""" if self.is_modified: self.save_to_conf() if self.apply_callback is not None: self.apply_callback() # Since the language cannot be retrieved by CONF and the language # is needed before loading CONF, this is an extra method needed to # ensure that when changes are applied, they are copied to a # specific file storing the language value. This only applies to # the main section config. if self.CONF_SECTION == u'main': self._save_lang() for restart_option in self.restart_options: if restart_option in self.changed_options: self.prompt_restart_required() break # Ensure a single popup is displayed self.set_modified(False)
def function[apply_changes, parameter[self]]: constant[Apply changes callback] if name[self].is_modified begin[:] call[name[self].save_to_conf, parameter[]] if compare[name[self].apply_callback is_not constant[None]] begin[:] call[name[self].apply_callback, parameter[]] if compare[name[self].CONF_SECTION equal[==] constant[main]] begin[:] call[name[self]._save_lang, parameter[]] for taget[name[restart_option]] in starred[name[self].restart_options] begin[:] if compare[name[restart_option] in name[self].changed_options] begin[:] call[name[self].prompt_restart_required, parameter[]] break call[name[self].set_modified, parameter[constant[False]]]
keyword[def] identifier[apply_changes] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[is_modified] : identifier[self] . identifier[save_to_conf] () keyword[if] identifier[self] . identifier[apply_callback] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[apply_callback] () keyword[if] identifier[self] . identifier[CONF_SECTION] == literal[string] : identifier[self] . identifier[_save_lang] () keyword[for] identifier[restart_option] keyword[in] identifier[self] . identifier[restart_options] : keyword[if] identifier[restart_option] keyword[in] identifier[self] . identifier[changed_options] : identifier[self] . identifier[prompt_restart_required] () keyword[break] identifier[self] . identifier[set_modified] ( keyword[False] )
def apply_changes(self): """Apply changes callback""" if self.is_modified: self.save_to_conf() if self.apply_callback is not None: self.apply_callback() # depends on [control=['if'], data=[]] # Since the language cannot be retrieved by CONF and the language # is needed before loading CONF, this is an extra method needed to # ensure that when changes are applied, they are copied to a # specific file storing the language value. This only applies to # the main section config. if self.CONF_SECTION == u'main': self._save_lang() # depends on [control=['if'], data=[]] for restart_option in self.restart_options: if restart_option in self.changed_options: self.prompt_restart_required() break # Ensure a single popup is displayed # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['restart_option']] self.set_modified(False) # depends on [control=['if'], data=[]]
def remove_tag(tag_id): ''' Delete the records of certain tag. ''' entry = TabPost2Tag.delete().where( TabPost2Tag.tag_id == tag_id ) entry.execute()
def function[remove_tag, parameter[tag_id]]: constant[ Delete the records of certain tag. ] variable[entry] assign[=] call[call[name[TabPost2Tag].delete, parameter[]].where, parameter[compare[name[TabPost2Tag].tag_id equal[==] name[tag_id]]]] call[name[entry].execute, parameter[]]
keyword[def] identifier[remove_tag] ( identifier[tag_id] ): literal[string] identifier[entry] = identifier[TabPost2Tag] . identifier[delete] (). identifier[where] ( identifier[TabPost2Tag] . identifier[tag_id] == identifier[tag_id] ) identifier[entry] . identifier[execute] ()
def remove_tag(tag_id): """ Delete the records of certain tag. """ entry = TabPost2Tag.delete().where(TabPost2Tag.tag_id == tag_id) entry.execute()
def apns_send_bulk_message( registration_ids, alert, application_id=None, certfile=None, **kwargs ): """ Sends an APNS notification to one or more registration_ids. The registration_ids argument needs to be a list. Note that if set alert should always be a string. If it is not set, it won"t be included in the notification. You will need to pass None to this for silent notifications. """ results = _apns_send( registration_ids, alert, batch=True, application_id=application_id, certfile=certfile, **kwargs ) inactive_tokens = [token for token, result in results.items() if result == "Unregistered"] models.APNSDevice.objects.filter(registration_id__in=inactive_tokens).update(active=False) return results
def function[apns_send_bulk_message, parameter[registration_ids, alert, application_id, certfile]]: constant[ Sends an APNS notification to one or more registration_ids. The registration_ids argument needs to be a list. Note that if set alert should always be a string. If it is not set, it won"t be included in the notification. You will need to pass None to this for silent notifications. ] variable[results] assign[=] call[name[_apns_send], parameter[name[registration_ids], name[alert]]] variable[inactive_tokens] assign[=] <ast.ListComp object at 0x7da18f00d8d0> call[call[name[models].APNSDevice.objects.filter, parameter[]].update, parameter[]] return[name[results]]
keyword[def] identifier[apns_send_bulk_message] ( identifier[registration_ids] , identifier[alert] , identifier[application_id] = keyword[None] , identifier[certfile] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[results] = identifier[_apns_send] ( identifier[registration_ids] , identifier[alert] , identifier[batch] = keyword[True] , identifier[application_id] = identifier[application_id] , identifier[certfile] = identifier[certfile] ,** identifier[kwargs] ) identifier[inactive_tokens] =[ identifier[token] keyword[for] identifier[token] , identifier[result] keyword[in] identifier[results] . identifier[items] () keyword[if] identifier[result] == literal[string] ] identifier[models] . identifier[APNSDevice] . identifier[objects] . identifier[filter] ( identifier[registration_id__in] = identifier[inactive_tokens] ). identifier[update] ( identifier[active] = keyword[False] ) keyword[return] identifier[results]
def apns_send_bulk_message(registration_ids, alert, application_id=None, certfile=None, **kwargs): """ Sends an APNS notification to one or more registration_ids. The registration_ids argument needs to be a list. Note that if set alert should always be a string. If it is not set, it won"t be included in the notification. You will need to pass None to this for silent notifications. """ results = _apns_send(registration_ids, alert, batch=True, application_id=application_id, certfile=certfile, **kwargs) inactive_tokens = [token for (token, result) in results.items() if result == 'Unregistered'] models.APNSDevice.objects.filter(registration_id__in=inactive_tokens).update(active=False) return results
def add_measurement(self, exp_name, meas_num, spec_name=None, er_data=None, pmag_data=None): """ Find actual data object for specimen. Then create a measurement belonging to that specimen and add it to the data object """ specimen = self.find_by_name(spec_name, self.specimens) measurement = Measurement(exp_name, meas_num, specimen, er_data) self.measurements.append(measurement) return measurement
def function[add_measurement, parameter[self, exp_name, meas_num, spec_name, er_data, pmag_data]]: constant[ Find actual data object for specimen. Then create a measurement belonging to that specimen and add it to the data object ] variable[specimen] assign[=] call[name[self].find_by_name, parameter[name[spec_name], name[self].specimens]] variable[measurement] assign[=] call[name[Measurement], parameter[name[exp_name], name[meas_num], name[specimen], name[er_data]]] call[name[self].measurements.append, parameter[name[measurement]]] return[name[measurement]]
keyword[def] identifier[add_measurement] ( identifier[self] , identifier[exp_name] , identifier[meas_num] , identifier[spec_name] = keyword[None] , identifier[er_data] = keyword[None] , identifier[pmag_data] = keyword[None] ): literal[string] identifier[specimen] = identifier[self] . identifier[find_by_name] ( identifier[spec_name] , identifier[self] . identifier[specimens] ) identifier[measurement] = identifier[Measurement] ( identifier[exp_name] , identifier[meas_num] , identifier[specimen] , identifier[er_data] ) identifier[self] . identifier[measurements] . identifier[append] ( identifier[measurement] ) keyword[return] identifier[measurement]
def add_measurement(self, exp_name, meas_num, spec_name=None, er_data=None, pmag_data=None): """ Find actual data object for specimen. Then create a measurement belonging to that specimen and add it to the data object """ specimen = self.find_by_name(spec_name, self.specimens) measurement = Measurement(exp_name, meas_num, specimen, er_data) self.measurements.append(measurement) return measurement
def swap_args(self, new_args, new_length=None): """ This returns the same AST, with the arguments swapped out for new_args. """ if len(self.args) == len(new_args) and all(a is b for a,b in zip(self.args, new_args)): return self #symbolic = any(a.symbolic for a in new_args if isinstance(a, Base)) #variables = frozenset.union(frozenset(), *(a.variables for a in new_args if isinstance(a, Base))) length = self.length if new_length is None else new_length a = self.__class__(self.op, new_args, length=length) #if a.op != self.op or a.symbolic != self.symbolic or a.variables != self.variables: # raise ClaripyOperationError("major bug in swap_args()") return a
def function[swap_args, parameter[self, new_args, new_length]]: constant[ This returns the same AST, with the arguments swapped out for new_args. ] if <ast.BoolOp object at 0x7da1b1d4e590> begin[:] return[name[self]] variable[length] assign[=] <ast.IfExp object at 0x7da1b1d4f640> variable[a] assign[=] call[name[self].__class__, parameter[name[self].op, name[new_args]]] return[name[a]]
keyword[def] identifier[swap_args] ( identifier[self] , identifier[new_args] , identifier[new_length] = keyword[None] ): literal[string] keyword[if] identifier[len] ( identifier[self] . identifier[args] )== identifier[len] ( identifier[new_args] ) keyword[and] identifier[all] ( identifier[a] keyword[is] identifier[b] keyword[for] identifier[a] , identifier[b] keyword[in] identifier[zip] ( identifier[self] . identifier[args] , identifier[new_args] )): keyword[return] identifier[self] identifier[length] = identifier[self] . identifier[length] keyword[if] identifier[new_length] keyword[is] keyword[None] keyword[else] identifier[new_length] identifier[a] = identifier[self] . identifier[__class__] ( identifier[self] . identifier[op] , identifier[new_args] , identifier[length] = identifier[length] ) keyword[return] identifier[a]
def swap_args(self, new_args, new_length=None): """ This returns the same AST, with the arguments swapped out for new_args. """ if len(self.args) == len(new_args) and all((a is b for (a, b) in zip(self.args, new_args))): return self # depends on [control=['if'], data=[]] #symbolic = any(a.symbolic for a in new_args if isinstance(a, Base)) #variables = frozenset.union(frozenset(), *(a.variables for a in new_args if isinstance(a, Base))) length = self.length if new_length is None else new_length a = self.__class__(self.op, new_args, length=length) #if a.op != self.op or a.symbolic != self.symbolic or a.variables != self.variables: # raise ClaripyOperationError("major bug in swap_args()") return a
def index(self, doc, index, doc_type, id=None, parent=None, force_insert=False, op_type=None, bulk=False, version=None, querystring_args=None, ttl=None): """ Index a typed JSON document into a specific index and make it searchable. """ if querystring_args is None: querystring_args = {} if bulk: if op_type is None: op_type = "index" if force_insert: op_type = "create" cmd = {op_type: {"_index": index, "_type": doc_type}} if parent: cmd[op_type]['_parent'] = parent if version: cmd[op_type]['_version'] = version if 'routing' in querystring_args: cmd[op_type]['_routing'] = querystring_args['routing'] if 'percolate' in querystring_args: cmd[op_type]['percolate'] = querystring_args['percolate'] if id is not None: #None to support 0 as id cmd[op_type]['_id'] = id if ttl is not None: cmd[op_type]['_ttl'] = ttl if isinstance(doc, dict): doc = json.dumps(doc, cls=self.encoder) command = "%s\n%s" % (json.dumps(cmd, cls=self.encoder), doc) self.bulker.add(command) return self.flush_bulk() if force_insert: querystring_args['op_type'] = 'create' if op_type: querystring_args['op_type'] = op_type if parent: if not isinstance(parent, str): parent = str(parent) querystring_args['parent'] = parent if version: if not isinstance(version, str): version = str(version) querystring_args['version'] = version if ttl is not None: if not isinstance(ttl, str): ttl = str(ttl) querystring_args['ttl'] = ttl if id is None: request_method = 'POST' else: request_method = 'PUT' path = make_path(index, doc_type, id) return self._send_request(request_method, path, doc, querystring_args)
def function[index, parameter[self, doc, index, doc_type, id, parent, force_insert, op_type, bulk, version, querystring_args, ttl]]: constant[ Index a typed JSON document into a specific index and make it searchable. ] if compare[name[querystring_args] is constant[None]] begin[:] variable[querystring_args] assign[=] dictionary[[], []] if name[bulk] begin[:] if compare[name[op_type] is constant[None]] begin[:] variable[op_type] assign[=] constant[index] if name[force_insert] begin[:] variable[op_type] assign[=] constant[create] variable[cmd] assign[=] dictionary[[<ast.Name object at 0x7da1b0e839a0>], [<ast.Dict object at 0x7da1b0e80430>]] if name[parent] begin[:] call[call[name[cmd]][name[op_type]]][constant[_parent]] assign[=] name[parent] if name[version] begin[:] call[call[name[cmd]][name[op_type]]][constant[_version]] assign[=] name[version] if compare[constant[routing] in name[querystring_args]] begin[:] call[call[name[cmd]][name[op_type]]][constant[_routing]] assign[=] call[name[querystring_args]][constant[routing]] if compare[constant[percolate] in name[querystring_args]] begin[:] call[call[name[cmd]][name[op_type]]][constant[percolate]] assign[=] call[name[querystring_args]][constant[percolate]] if compare[name[id] is_not constant[None]] begin[:] call[call[name[cmd]][name[op_type]]][constant[_id]] assign[=] name[id] if compare[name[ttl] is_not constant[None]] begin[:] call[call[name[cmd]][name[op_type]]][constant[_ttl]] assign[=] name[ttl] if call[name[isinstance], parameter[name[doc], name[dict]]] begin[:] variable[doc] assign[=] call[name[json].dumps, parameter[name[doc]]] variable[command] assign[=] binary_operation[constant[%s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b0e6d4b0>, <ast.Name object at 0x7da1b0e6e170>]]] call[name[self].bulker.add, parameter[name[command]]] return[call[name[self].flush_bulk, parameter[]]] if name[force_insert] begin[:] call[name[querystring_args]][constant[op_type]] assign[=] constant[create] if name[op_type] begin[:] call[name[querystring_args]][constant[op_type]] assign[=] name[op_type] if name[parent] begin[:] if <ast.UnaryOp object at 0x7da1b0e6c0a0> begin[:] variable[parent] assign[=] call[name[str], parameter[name[parent]]] call[name[querystring_args]][constant[parent]] assign[=] name[parent] if name[version] begin[:] if <ast.UnaryOp object at 0x7da1b0cb7f70> begin[:] variable[version] assign[=] call[name[str], parameter[name[version]]] call[name[querystring_args]][constant[version]] assign[=] name[version] if compare[name[ttl] is_not constant[None]] begin[:] if <ast.UnaryOp object at 0x7da1b0cb6260> begin[:] variable[ttl] assign[=] call[name[str], parameter[name[ttl]]] call[name[querystring_args]][constant[ttl]] assign[=] name[ttl] if compare[name[id] is constant[None]] begin[:] variable[request_method] assign[=] constant[POST] variable[path] assign[=] call[name[make_path], parameter[name[index], name[doc_type], name[id]]] return[call[name[self]._send_request, parameter[name[request_method], name[path], name[doc], name[querystring_args]]]]
keyword[def] identifier[index] ( identifier[self] , identifier[doc] , identifier[index] , identifier[doc_type] , identifier[id] = keyword[None] , identifier[parent] = keyword[None] , identifier[force_insert] = keyword[False] , identifier[op_type] = keyword[None] , identifier[bulk] = keyword[False] , identifier[version] = keyword[None] , identifier[querystring_args] = keyword[None] , identifier[ttl] = keyword[None] ): literal[string] keyword[if] identifier[querystring_args] keyword[is] keyword[None] : identifier[querystring_args] ={} keyword[if] identifier[bulk] : keyword[if] identifier[op_type] keyword[is] keyword[None] : identifier[op_type] = literal[string] keyword[if] identifier[force_insert] : identifier[op_type] = literal[string] identifier[cmd] ={ identifier[op_type] :{ literal[string] : identifier[index] , literal[string] : identifier[doc_type] }} keyword[if] identifier[parent] : identifier[cmd] [ identifier[op_type] ][ literal[string] ]= identifier[parent] keyword[if] identifier[version] : identifier[cmd] [ identifier[op_type] ][ literal[string] ]= identifier[version] keyword[if] literal[string] keyword[in] identifier[querystring_args] : identifier[cmd] [ identifier[op_type] ][ literal[string] ]= identifier[querystring_args] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[querystring_args] : identifier[cmd] [ identifier[op_type] ][ literal[string] ]= identifier[querystring_args] [ literal[string] ] keyword[if] identifier[id] keyword[is] keyword[not] keyword[None] : identifier[cmd] [ identifier[op_type] ][ literal[string] ]= identifier[id] keyword[if] identifier[ttl] keyword[is] keyword[not] keyword[None] : identifier[cmd] [ identifier[op_type] ][ literal[string] ]= identifier[ttl] keyword[if] identifier[isinstance] ( identifier[doc] , identifier[dict] ): identifier[doc] = identifier[json] . identifier[dumps] ( identifier[doc] , identifier[cls] = identifier[self] . identifier[encoder] ) identifier[command] = literal[string] %( identifier[json] . identifier[dumps] ( identifier[cmd] , identifier[cls] = identifier[self] . identifier[encoder] ), identifier[doc] ) identifier[self] . identifier[bulker] . identifier[add] ( identifier[command] ) keyword[return] identifier[self] . identifier[flush_bulk] () keyword[if] identifier[force_insert] : identifier[querystring_args] [ literal[string] ]= literal[string] keyword[if] identifier[op_type] : identifier[querystring_args] [ literal[string] ]= identifier[op_type] keyword[if] identifier[parent] : keyword[if] keyword[not] identifier[isinstance] ( identifier[parent] , identifier[str] ): identifier[parent] = identifier[str] ( identifier[parent] ) identifier[querystring_args] [ literal[string] ]= identifier[parent] keyword[if] identifier[version] : keyword[if] keyword[not] identifier[isinstance] ( identifier[version] , identifier[str] ): identifier[version] = identifier[str] ( identifier[version] ) identifier[querystring_args] [ literal[string] ]= identifier[version] keyword[if] identifier[ttl] keyword[is] keyword[not] keyword[None] : keyword[if] keyword[not] identifier[isinstance] ( identifier[ttl] , identifier[str] ): identifier[ttl] = identifier[str] ( identifier[ttl] ) identifier[querystring_args] [ literal[string] ]= identifier[ttl] keyword[if] identifier[id] keyword[is] keyword[None] : identifier[request_method] = literal[string] keyword[else] : identifier[request_method] = literal[string] identifier[path] = identifier[make_path] ( identifier[index] , identifier[doc_type] , identifier[id] ) keyword[return] identifier[self] . identifier[_send_request] ( identifier[request_method] , identifier[path] , identifier[doc] , identifier[querystring_args] )
def index(self, doc, index, doc_type, id=None, parent=None, force_insert=False, op_type=None, bulk=False, version=None, querystring_args=None, ttl=None): """ Index a typed JSON document into a specific index and make it searchable. """ if querystring_args is None: querystring_args = {} # depends on [control=['if'], data=['querystring_args']] if bulk: if op_type is None: op_type = 'index' # depends on [control=['if'], data=['op_type']] if force_insert: op_type = 'create' # depends on [control=['if'], data=[]] cmd = {op_type: {'_index': index, '_type': doc_type}} if parent: cmd[op_type]['_parent'] = parent # depends on [control=['if'], data=[]] if version: cmd[op_type]['_version'] = version # depends on [control=['if'], data=[]] if 'routing' in querystring_args: cmd[op_type]['_routing'] = querystring_args['routing'] # depends on [control=['if'], data=['querystring_args']] if 'percolate' in querystring_args: cmd[op_type]['percolate'] = querystring_args['percolate'] # depends on [control=['if'], data=['querystring_args']] if id is not None: #None to support 0 as id cmd[op_type]['_id'] = id # depends on [control=['if'], data=['id']] if ttl is not None: cmd[op_type]['_ttl'] = ttl # depends on [control=['if'], data=['ttl']] if isinstance(doc, dict): doc = json.dumps(doc, cls=self.encoder) # depends on [control=['if'], data=[]] command = '%s\n%s' % (json.dumps(cmd, cls=self.encoder), doc) self.bulker.add(command) return self.flush_bulk() # depends on [control=['if'], data=[]] if force_insert: querystring_args['op_type'] = 'create' # depends on [control=['if'], data=[]] if op_type: querystring_args['op_type'] = op_type # depends on [control=['if'], data=[]] if parent: if not isinstance(parent, str): parent = str(parent) # depends on [control=['if'], data=[]] querystring_args['parent'] = parent # depends on [control=['if'], data=[]] if version: if not isinstance(version, str): version = str(version) # depends on [control=['if'], data=[]] querystring_args['version'] = version # depends on [control=['if'], data=[]] if ttl is not None: if not isinstance(ttl, str): ttl = str(ttl) # depends on [control=['if'], data=[]] querystring_args['ttl'] = ttl # depends on [control=['if'], data=['ttl']] if id is None: request_method = 'POST' # depends on [control=['if'], data=[]] else: request_method = 'PUT' path = make_path(index, doc_type, id) return self._send_request(request_method, path, doc, querystring_args)
def get_manager_state(drop_defaults=False, widgets=None): """Returns the full state for a widget manager for embedding :param drop_defaults: when True, it will not include default value :param widgets: list with widgets to include in the state (or all widgets when None) :return: """ state = {} if widgets is None: widgets = Widget.widgets.values() for widget in widgets: state[widget.model_id] = widget._get_embed_state(drop_defaults=drop_defaults) return {'version_major': 2, 'version_minor': 0, 'state': state}
def function[get_manager_state, parameter[drop_defaults, widgets]]: constant[Returns the full state for a widget manager for embedding :param drop_defaults: when True, it will not include default value :param widgets: list with widgets to include in the state (or all widgets when None) :return: ] variable[state] assign[=] dictionary[[], []] if compare[name[widgets] is constant[None]] begin[:] variable[widgets] assign[=] call[name[Widget].widgets.values, parameter[]] for taget[name[widget]] in starred[name[widgets]] begin[:] call[name[state]][name[widget].model_id] assign[=] call[name[widget]._get_embed_state, parameter[]] return[dictionary[[<ast.Constant object at 0x7da18ede5e70>, <ast.Constant object at 0x7da18ede4310>, <ast.Constant object at 0x7da18ede4be0>], [<ast.Constant object at 0x7da18ede4d00>, <ast.Constant object at 0x7da18ede7280>, <ast.Name object at 0x7da18ede7220>]]]
keyword[def] identifier[get_manager_state] ( identifier[drop_defaults] = keyword[False] , identifier[widgets] = keyword[None] ): literal[string] identifier[state] ={} keyword[if] identifier[widgets] keyword[is] keyword[None] : identifier[widgets] = identifier[Widget] . identifier[widgets] . identifier[values] () keyword[for] identifier[widget] keyword[in] identifier[widgets] : identifier[state] [ identifier[widget] . identifier[model_id] ]= identifier[widget] . identifier[_get_embed_state] ( identifier[drop_defaults] = identifier[drop_defaults] ) keyword[return] { literal[string] : literal[int] , literal[string] : literal[int] , literal[string] : identifier[state] }
def get_manager_state(drop_defaults=False, widgets=None): """Returns the full state for a widget manager for embedding :param drop_defaults: when True, it will not include default value :param widgets: list with widgets to include in the state (or all widgets when None) :return: """ state = {} if widgets is None: widgets = Widget.widgets.values() # depends on [control=['if'], data=['widgets']] for widget in widgets: state[widget.model_id] = widget._get_embed_state(drop_defaults=drop_defaults) # depends on [control=['for'], data=['widget']] return {'version_major': 2, 'version_minor': 0, 'state': state}
def edit_message_text(self, chat_id, message_id, text, **options): """ Edit a text message in a chat :param int chat_id: ID of the chat the message to edit is in :param int message_id: ID of the message to edit :param str text: Text to edit the message to :param options: Additional API options """ return self.api_call( "editMessageText", chat_id=chat_id, message_id=message_id, text=text, **options )
def function[edit_message_text, parameter[self, chat_id, message_id, text]]: constant[ Edit a text message in a chat :param int chat_id: ID of the chat the message to edit is in :param int message_id: ID of the message to edit :param str text: Text to edit the message to :param options: Additional API options ] return[call[name[self].api_call, parameter[constant[editMessageText]]]]
keyword[def] identifier[edit_message_text] ( identifier[self] , identifier[chat_id] , identifier[message_id] , identifier[text] ,** identifier[options] ): literal[string] keyword[return] identifier[self] . identifier[api_call] ( literal[string] , identifier[chat_id] = identifier[chat_id] , identifier[message_id] = identifier[message_id] , identifier[text] = identifier[text] , ** identifier[options] )
def edit_message_text(self, chat_id, message_id, text, **options): """ Edit a text message in a chat :param int chat_id: ID of the chat the message to edit is in :param int message_id: ID of the message to edit :param str text: Text to edit the message to :param options: Additional API options """ return self.api_call('editMessageText', chat_id=chat_id, message_id=message_id, text=text, **options)
def emailUser(video, error=None): """Emails the author of the video that it has finished processing""" html = render_to_string('frog/video_email.html', { 'user': video.author, 'error': error, 'video': video, 'SITE_URL': FROG_SITE_URL, }) subject, from_email, to = 'Video Processing Finished{}'.format(error or ''), 'noreply@frogmediaserver.com', video.author.email text_content = 'This is an important message.' html_content = html send_mail(subject, text_content, from_email, [to], html_message=html_content)
def function[emailUser, parameter[video, error]]: constant[Emails the author of the video that it has finished processing] variable[html] assign[=] call[name[render_to_string], parameter[constant[frog/video_email.html], dictionary[[<ast.Constant object at 0x7da20e960af0>, <ast.Constant object at 0x7da20e960430>, <ast.Constant object at 0x7da20e962860>, <ast.Constant object at 0x7da20e960700>], [<ast.Attribute object at 0x7da20e963250>, <ast.Name object at 0x7da20e963640>, <ast.Name object at 0x7da20e960d90>, <ast.Name object at 0x7da20e962e30>]]]] <ast.Tuple object at 0x7da20e963d30> assign[=] tuple[[<ast.Call object at 0x7da20e963b50>, <ast.Constant object at 0x7da20e9612d0>, <ast.Attribute object at 0x7da20e9630d0>]] variable[text_content] assign[=] constant[This is an important message.] variable[html_content] assign[=] name[html] call[name[send_mail], parameter[name[subject], name[text_content], name[from_email], list[[<ast.Name object at 0x7da20e961cc0>]]]]
keyword[def] identifier[emailUser] ( identifier[video] , identifier[error] = keyword[None] ): literal[string] identifier[html] = identifier[render_to_string] ( literal[string] ,{ literal[string] : identifier[video] . identifier[author] , literal[string] : identifier[error] , literal[string] : identifier[video] , literal[string] : identifier[FROG_SITE_URL] , }) identifier[subject] , identifier[from_email] , identifier[to] = literal[string] . identifier[format] ( identifier[error] keyword[or] literal[string] ), literal[string] , identifier[video] . identifier[author] . identifier[email] identifier[text_content] = literal[string] identifier[html_content] = identifier[html] identifier[send_mail] ( identifier[subject] , identifier[text_content] , identifier[from_email] ,[ identifier[to] ], identifier[html_message] = identifier[html_content] )
def emailUser(video, error=None): """Emails the author of the video that it has finished processing""" html = render_to_string('frog/video_email.html', {'user': video.author, 'error': error, 'video': video, 'SITE_URL': FROG_SITE_URL}) (subject, from_email, to) = ('Video Processing Finished{}'.format(error or ''), 'noreply@frogmediaserver.com', video.author.email) text_content = 'This is an important message.' html_content = html send_mail(subject, text_content, from_email, [to], html_message=html_content)
def propagate(self, date): """Compute state of orbit at a given date, past or future Args: date (Date) Return: Orbit: """ i0, Ω0, e0, ω0, M0, n0 = self.tle n0 *= 60 # conversion to min⁻¹ if isinstance(date, Date): t0 = self.tle.date.datetime tdiff = (date.datetime - t0).total_seconds() / 60. elif isinstance(date, timedelta): tdiff = date.total_seconds() / 60. date = self.tle.date + date else: raise TypeError("Unhandled type for 'date': %s" % type(date)) bstar = self.tle.complements['bstar'] µ = self.gravity.µ_e r_e = self.gravity.r_e k_e = self.gravity.k_e # retrieve initialized variables _i = self._init n0 = _i.n0 Mdf = M0 + _i.Mdot * n0 * tdiff ωdf = ω0 + _i.ωdot * n0 * tdiff Ωdf = Ω0 + _i.Ωdot * n0 * tdiff delta_ω = bstar * _i.C3 * cos(ω0) * tdiff delta_M = 0. if e0 > 1e-4: delta_M = - 2 / 3 * (_i.q0 - _i.s) ** 4 * bstar * _i.ξ ** 4 / (e0 * _i.η) * ((1 + _i.η * cos(Mdf)) ** 3 - (1 + _i.η * cos(M0)) ** 3) Mp = (Mdf + delta_ω + delta_M) % (2 * np.pi) ω = ωdf - delta_ω - delta_M Ω = Ωdf - 21 * n0 * _i.k2 * _i.θ / (2 * _i.a0 ** 2 * _i.β_0 ** 2) * _i.C1 * tdiff ** 2 e = e0 - bstar * _i.C4 * tdiff - bstar * _i.C5 * (sin(Mp) - sin(M0)) if e < 1e-6: e = 1e-6 a = _i.a0 * (1 - _i.C1 * tdiff - _i.D2 * tdiff ** 2 - _i.D3 * tdiff ** 3 - _i.D4 * tdiff ** 4) ** 2 L = Mp + ω + Ω + n0 * (3 / 2 * _i.C1 * tdiff ** 2 + (_i.D2 + 2 * _i.C1 ** 2) * tdiff ** 3 + 1 / 4 * (3 * _i.D3 + 12 * _i.C1 * _i.D2 + 10 * _i.C1 ** 3) * tdiff ** 4 + 1 / 5 * (3 * _i.D4 + 12 * _i.C1 * _i.D3 + 6 * _i.D2 ** 2 + 30 * _i.C1 ** 2 * _i.D2 + 15 * _i.C1 ** 4) * tdiff ** 5) β = sqrt(1 - e ** 2) n = µ / (a ** (3 / 2)) # Long-period terms axN = e * cos(ω) ayNL = _i.A30 * sin(i0) / (4 * _i.k2 * a * β ** 2) tmp = (1 + _i.θ) if (1 + _i.θ) > 1.5e-12 else 1.5e-12 L_L = ayNL / 2 * axN * ((3 + 5 * _i.θ) / tmp) L_T = L + L_L ayN = e * sin(ω) + ayNL # Resolving of kepler equation U = (L_T - Ω) % (2 * np.pi) Epω = U for xxx in range(10): delta_Epω = (U - ayN * cos(Epω) + axN * sin(Epω) - Epω) / (1 - ayN * sin(Epω) - axN * cos(Epω)) if abs(delta_Epω) < 1e-12: break Epω = Epω + delta_Epω # Short-period terms ecosE = axN * cos(Epω) + ayN * sin(Epω) esinE = axN * sin(Epω) - ayN * cos(Epω) e_L = sqrt(axN ** 2 + ayN ** 2) p_L = a * (1 - e_L ** 2) r = a * (1 - ecosE) rdot = sqrt(a) / r * esinE rfdot = sqrt(p_L) / r cosu = a / r * (cos(Epω) - axN + ayN * esinE / (1 + sqrt(1 - e_L ** 2))) sinu = a / r * (sin(Epω) - ayN - axN * esinE / (1 + sqrt(1 - e_L ** 2))) u = arctan2(sinu, cosu) Delta_r = _i.k2 / (2 * p_L) * (1 - _i.θ ** 2) * cos(2 * u) Delta_u = - _i.k2 / (4 * p_L ** 2) * (7 * _i.θ ** 2 - 1) * sin(2 * u) Delta_Ω = 3 * _i.k2 * _i.θ / (2 * p_L ** 2) * sin(2 * u) Delta_i = 3 * _i.k2 * _i.θ / (2 * p_L ** 2) * sin(i0) * cos(2 * u) Delta_rdot = - n * _i.k2 * (1 - _i.θ ** 2) * sin(2 * u) / (p_L * µ) Delta_rfdot = _i.k2 * n * ((1 - _i.θ ** 2) * cos(2 * u) - 3 / 2 * (1 - 3 * _i.θ ** 2)) / (p_L * µ) rk = r * (1 - 3 / 2 * _i.k2 * sqrt(1 - e_L ** 2) / (p_L ** 2) * (3 * _i.θ ** 2 - 1)) + Delta_r uk = u + Delta_u Ωk = Ω + Delta_Ω ik = i0 + Delta_i rdotk = rdot + Delta_rdot rfdotk = rfdot + Delta_rfdot # Vectors vM = np.array([- sin(Ωk) * cos(ik), cos(Ωk) * cos(ik), sin(ik)]) vN = np.array([cos(Ωk), sin(Ωk), 0]) vU = vM * sin(uk) + vN * cos(uk) vV = vM * cos(uk) - vN * sin(uk) vR = rk * vU * r_e vRdot = (rdotk * vU + rfdotk * vV) * (r_e * k_e / 60.) vector = np.concatenate((vR, vRdot)) * 1000 # conversion to meters return self.tle.__class__(date, vector, 'cartesian', 'TEME', self.__class__(), **self.tle.complements)
def function[propagate, parameter[self, date]]: constant[Compute state of orbit at a given date, past or future Args: date (Date) Return: Orbit: ] <ast.Tuple object at 0x7da1b0cebd30> assign[=] name[self].tle <ast.AugAssign object at 0x7da1b0cebb50> if call[name[isinstance], parameter[name[date], name[Date]]] begin[:] variable[t0] assign[=] name[self].tle.date.datetime variable[tdiff] assign[=] binary_operation[call[binary_operation[name[date].datetime - name[t0]].total_seconds, parameter[]] / constant[60.0]] variable[bstar] assign[=] call[name[self].tle.complements][constant[bstar]] variable[μ] assign[=] name[self].gravity.μ_e variable[r_e] assign[=] name[self].gravity.r_e variable[k_e] assign[=] name[self].gravity.k_e variable[_i] assign[=] name[self]._init variable[n0] assign[=] name[_i].n0 variable[Mdf] assign[=] binary_operation[name[M0] + binary_operation[binary_operation[name[_i].Mdot * name[n0]] * name[tdiff]]] variable[ωdf] assign[=] binary_operation[name[ω0] + binary_operation[binary_operation[name[_i].ωdot * name[n0]] * name[tdiff]]] variable[Ωdf] assign[=] binary_operation[name[Ω0] + binary_operation[binary_operation[name[_i].Ωdot * name[n0]] * name[tdiff]]] variable[delta_ω] assign[=] binary_operation[binary_operation[binary_operation[name[bstar] * name[_i].C3] * call[name[cos], parameter[name[ω0]]]] * name[tdiff]] variable[delta_M] assign[=] constant[0.0] if compare[name[e0] greater[>] constant[0.0001]] begin[:] variable[delta_M] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da1b0cea170> / constant[3]] * binary_operation[binary_operation[name[_i].q0 - name[_i].s] ** constant[4]]] * name[bstar]] * binary_operation[name[_i].ξ ** constant[4]]] / binary_operation[name[e0] * name[_i].η]] * binary_operation[binary_operation[binary_operation[constant[1] + binary_operation[name[_i].η * call[name[cos], parameter[name[Mdf]]]]] ** constant[3]] - binary_operation[binary_operation[constant[1] + binary_operation[name[_i].η * call[name[cos], parameter[name[M0]]]]] ** constant[3]]]] variable[Mp] assign[=] binary_operation[binary_operation[binary_operation[name[Mdf] + name[delta_ω]] + name[delta_M]] <ast.Mod object at 0x7da2590d6920> binary_operation[constant[2] * name[np].pi]] variable[ω] assign[=] binary_operation[binary_operation[name[ωdf] - name[delta_ω]] - name[delta_M]] variable[Ω] assign[=] binary_operation[name[Ωdf] - binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[21] * name[n0]] * name[_i].k2] * name[_i].θ] / binary_operation[binary_operation[constant[2] * binary_operation[name[_i].a0 ** constant[2]]] * binary_operation[name[_i].β_0 ** constant[2]]]] * name[_i].C1] * binary_operation[name[tdiff] ** constant[2]]]] variable[e] assign[=] binary_operation[binary_operation[name[e0] - binary_operation[binary_operation[name[bstar] * name[_i].C4] * name[tdiff]]] - binary_operation[binary_operation[name[bstar] * name[_i].C5] * binary_operation[call[name[sin], parameter[name[Mp]]] - call[name[sin], parameter[name[M0]]]]]] if compare[name[e] less[<] constant[1e-06]] begin[:] variable[e] assign[=] constant[1e-06] variable[a] assign[=] binary_operation[name[_i].a0 * binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[1] - binary_operation[name[_i].C1 * name[tdiff]]] - binary_operation[name[_i].D2 * binary_operation[name[tdiff] ** constant[2]]]] - binary_operation[name[_i].D3 * binary_operation[name[tdiff] ** constant[3]]]] - binary_operation[name[_i].D4 * binary_operation[name[tdiff] ** constant[4]]]] ** constant[2]]] variable[L] assign[=] binary_operation[binary_operation[binary_operation[name[Mp] + name[ω]] + name[Ω]] + binary_operation[name[n0] * binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[3] / constant[2]] * name[_i].C1] * binary_operation[name[tdiff] ** constant[2]]] + binary_operation[binary_operation[name[_i].D2 + binary_operation[constant[2] * binary_operation[name[_i].C1 ** constant[2]]]] * binary_operation[name[tdiff] ** constant[3]]]] + binary_operation[binary_operation[binary_operation[constant[1] / constant[4]] * binary_operation[binary_operation[binary_operation[constant[3] * name[_i].D3] + binary_operation[binary_operation[constant[12] * name[_i].C1] * name[_i].D2]] + binary_operation[constant[10] * binary_operation[name[_i].C1 ** constant[3]]]]] * binary_operation[name[tdiff] ** constant[4]]]] + binary_operation[binary_operation[binary_operation[constant[1] / constant[5]] * binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[3] * name[_i].D4] + binary_operation[binary_operation[constant[12] * name[_i].C1] * name[_i].D3]] + binary_operation[constant[6] * binary_operation[name[_i].D2 ** constant[2]]]] + binary_operation[binary_operation[constant[30] * binary_operation[name[_i].C1 ** constant[2]]] * name[_i].D2]] + binary_operation[constant[15] * binary_operation[name[_i].C1 ** constant[4]]]]] * binary_operation[name[tdiff] ** constant[5]]]]]] variable[β] assign[=] call[name[sqrt], parameter[binary_operation[constant[1] - binary_operation[name[e] ** constant[2]]]]] variable[n] assign[=] binary_operation[name[μ] / binary_operation[name[a] ** binary_operation[constant[3] / constant[2]]]] variable[axN] assign[=] binary_operation[name[e] * call[name[cos], parameter[name[ω]]]] variable[ayNL] assign[=] binary_operation[binary_operation[name[_i].A30 * call[name[sin], parameter[name[i0]]]] / binary_operation[binary_operation[binary_operation[constant[4] * name[_i].k2] * name[a]] * binary_operation[name[β] ** constant[2]]]] variable[tmp] assign[=] <ast.IfExp object at 0x7da1b0cf47c0> variable[L_L] assign[=] binary_operation[binary_operation[binary_operation[name[ayNL] / constant[2]] * name[axN]] * binary_operation[binary_operation[constant[3] + binary_operation[constant[5] * name[_i].θ]] / name[tmp]]] variable[L_T] assign[=] binary_operation[name[L] + name[L_L]] variable[ayN] assign[=] binary_operation[binary_operation[name[e] * call[name[sin], parameter[name[ω]]]] + name[ayNL]] variable[U] assign[=] binary_operation[binary_operation[name[L_T] - name[Ω]] <ast.Mod object at 0x7da2590d6920> binary_operation[constant[2] * name[np].pi]] variable[Epω] assign[=] name[U] for taget[name[xxx]] in starred[call[name[range], parameter[constant[10]]]] begin[:] variable[delta_Epω] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[U] - binary_operation[name[ayN] * call[name[cos], parameter[name[Epω]]]]] + binary_operation[name[axN] * call[name[sin], parameter[name[Epω]]]]] - name[Epω]] / binary_operation[binary_operation[constant[1] - binary_operation[name[ayN] * call[name[sin], parameter[name[Epω]]]]] - binary_operation[name[axN] * call[name[cos], parameter[name[Epω]]]]]] if compare[call[name[abs], parameter[name[delta_Epω]]] less[<] constant[1e-12]] begin[:] break variable[Epω] assign[=] binary_operation[name[Epω] + name[delta_Epω]] variable[ecosE] assign[=] binary_operation[binary_operation[name[axN] * call[name[cos], parameter[name[Epω]]]] + binary_operation[name[ayN] * call[name[sin], parameter[name[Epω]]]]] variable[esinE] assign[=] binary_operation[binary_operation[name[axN] * call[name[sin], parameter[name[Epω]]]] - binary_operation[name[ayN] * call[name[cos], parameter[name[Epω]]]]] variable[e_L] assign[=] call[name[sqrt], parameter[binary_operation[binary_operation[name[axN] ** constant[2]] + binary_operation[name[ayN] ** constant[2]]]]] variable[p_L] assign[=] binary_operation[name[a] * binary_operation[constant[1] - binary_operation[name[e_L] ** constant[2]]]] variable[r] assign[=] binary_operation[name[a] * binary_operation[constant[1] - name[ecosE]]] variable[rdot] assign[=] binary_operation[binary_operation[call[name[sqrt], parameter[name[a]]] / name[r]] * name[esinE]] variable[rfdot] assign[=] binary_operation[call[name[sqrt], parameter[name[p_L]]] / name[r]] variable[cosu] assign[=] binary_operation[binary_operation[name[a] / name[r]] * binary_operation[binary_operation[call[name[cos], parameter[name[Epω]]] - name[axN]] + binary_operation[binary_operation[name[ayN] * name[esinE]] / binary_operation[constant[1] + call[name[sqrt], parameter[binary_operation[constant[1] - binary_operation[name[e_L] ** constant[2]]]]]]]]] variable[sinu] assign[=] binary_operation[binary_operation[name[a] / name[r]] * binary_operation[binary_operation[call[name[sin], parameter[name[Epω]]] - name[ayN]] - binary_operation[binary_operation[name[axN] * name[esinE]] / binary_operation[constant[1] + call[name[sqrt], parameter[binary_operation[constant[1] - binary_operation[name[e_L] ** constant[2]]]]]]]]] variable[u] assign[=] call[name[arctan2], parameter[name[sinu], name[cosu]]] variable[Delta_r] assign[=] binary_operation[binary_operation[binary_operation[name[_i].k2 / binary_operation[constant[2] * name[p_L]]] * binary_operation[constant[1] - binary_operation[name[_i].θ ** constant[2]]]] * call[name[cos], parameter[binary_operation[constant[2] * name[u]]]]] variable[Delta_u] assign[=] binary_operation[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da1b0cffb20> / binary_operation[constant[4] * binary_operation[name[p_L] ** constant[2]]]] * binary_operation[binary_operation[constant[7] * binary_operation[name[_i].θ ** constant[2]]] - constant[1]]] * call[name[sin], parameter[binary_operation[constant[2] * name[u]]]]] variable[Delta_Ω] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[constant[3] * name[_i].k2] * name[_i].θ] / binary_operation[constant[2] * binary_operation[name[p_L] ** constant[2]]]] * call[name[sin], parameter[binary_operation[constant[2] * name[u]]]]] variable[Delta_i] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[3] * name[_i].k2] * name[_i].θ] / binary_operation[constant[2] * binary_operation[name[p_L] ** constant[2]]]] * call[name[sin], parameter[name[i0]]]] * call[name[cos], parameter[binary_operation[constant[2] * name[u]]]]] variable[Delta_rdot] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da1b0c3d150> * name[_i].k2] * binary_operation[constant[1] - binary_operation[name[_i].θ ** constant[2]]]] * call[name[sin], parameter[binary_operation[constant[2] * name[u]]]]] / binary_operation[name[p_L] * name[μ]]] variable[Delta_rfdot] assign[=] binary_operation[binary_operation[binary_operation[name[_i].k2 * name[n]] * binary_operation[binary_operation[binary_operation[constant[1] - binary_operation[name[_i].θ ** constant[2]]] * call[name[cos], parameter[binary_operation[constant[2] * name[u]]]]] - binary_operation[binary_operation[constant[3] / constant[2]] * binary_operation[constant[1] - binary_operation[constant[3] * binary_operation[name[_i].θ ** constant[2]]]]]]] / binary_operation[name[p_L] * name[μ]]] variable[rk] assign[=] binary_operation[binary_operation[name[r] * binary_operation[constant[1] - binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[3] / constant[2]] * name[_i].k2] * call[name[sqrt], parameter[binary_operation[constant[1] - binary_operation[name[e_L] ** constant[2]]]]]] / binary_operation[name[p_L] ** constant[2]]] * binary_operation[binary_operation[constant[3] * binary_operation[name[_i].θ ** constant[2]]] - constant[1]]]]] + name[Delta_r]] variable[uk] assign[=] binary_operation[name[u] + name[Delta_u]] variable[Ωk] assign[=] binary_operation[name[Ω] + name[Delta_Ω]] variable[ik] assign[=] binary_operation[name[i0] + name[Delta_i]] variable[rdotk] assign[=] binary_operation[name[rdot] + name[Delta_rdot]] variable[rfdotk] assign[=] binary_operation[name[rfdot] + name[Delta_rfdot]] variable[vM] assign[=] call[name[np].array, parameter[list[[<ast.BinOp object at 0x7da1b0ebe380>, <ast.BinOp object at 0x7da1b0ebf340>, <ast.Call object at 0x7da1b0ebc280>]]]] variable[vN] assign[=] call[name[np].array, parameter[list[[<ast.Call object at 0x7da1b0ebd4b0>, <ast.Call object at 0x7da1b0ebf3a0>, <ast.Constant object at 0x7da1b0ebe950>]]]] variable[vU] assign[=] binary_operation[binary_operation[name[vM] * call[name[sin], parameter[name[uk]]]] + binary_operation[name[vN] * call[name[cos], parameter[name[uk]]]]] variable[vV] assign[=] binary_operation[binary_operation[name[vM] * call[name[cos], parameter[name[uk]]]] - binary_operation[name[vN] * call[name[sin], parameter[name[uk]]]]] variable[vR] assign[=] binary_operation[binary_operation[name[rk] * name[vU]] * name[r_e]] variable[vRdot] assign[=] binary_operation[binary_operation[binary_operation[name[rdotk] * name[vU]] + binary_operation[name[rfdotk] * name[vV]]] * binary_operation[binary_operation[name[r_e] * name[k_e]] / constant[60.0]]] variable[vector] assign[=] binary_operation[call[name[np].concatenate, parameter[tuple[[<ast.Name object at 0x7da204345ae0>, <ast.Name object at 0x7da2043474f0>]]]] * constant[1000]] return[call[name[self].tle.__class__, parameter[name[date], name[vector], constant[cartesian], constant[TEME], call[name[self].__class__, parameter[]]]]]
keyword[def] identifier[propagate] ( identifier[self] , identifier[date] ): literal[string] identifier[i0] , identifier[Ω0] , identifier[e0] , identifier[ω0] , identifier[M0] , identifier[n0] = identifier[self] . identifier[tle] identifier[n0] *= literal[int] keyword[if] identifier[isinstance] ( identifier[date] , identifier[Date] ): identifier[t0] = identifier[self] . identifier[tle] . identifier[date] . identifier[datetime] identifier[tdiff] =( identifier[date] . identifier[datetime] - identifier[t0] ). identifier[total_seconds] ()/ literal[int] keyword[elif] identifier[isinstance] ( identifier[date] , identifier[timedelta] ): identifier[tdiff] = identifier[date] . identifier[total_seconds] ()/ literal[int] identifier[date] = identifier[self] . identifier[tle] . identifier[date] + identifier[date] keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] % identifier[type] ( identifier[date] )) identifier[bstar] = identifier[self] . identifier[tle] . identifier[complements] [ literal[string] ] identifier[µ] = identifier[self] . identifier[gravity] . identifier[µ_e] identifier[r_e] = identifier[self] . identifier[gravity] . identifier[r_e] identifier[k_e] = identifier[self] . identifier[gravity] . identifier[k_e] identifier[_i] = identifier[self] . identifier[_init] identifier[n0] = identifier[_i] . identifier[n0] identifier[Mdf] = identifier[M0] + identifier[_i] . identifier[Mdot] * identifier[n0] * identifier[tdiff] identifier[ωdf] = identifier[ω0] + identifier[_i] . identifier[ωdot] * identifier[n0] * identifier[tdiff] identifier[Ωdf] = identifier[Ω0] + identifier[_i] . identifier[Ωdot] * identifier[n0] * identifier[tdiff] identifier[delta_ω] = identifier[bstar] * identifier[_i] . identifier[C3] * identifier[cos] ( identifier[ω0] )* identifier[tdiff] identifier[delta_M] = literal[int] keyword[if] identifier[e0] > literal[int] : identifier[delta_M] =- literal[int] / literal[int] *( identifier[_i] . identifier[q0] - identifier[_i] . identifier[s] )** literal[int] * identifier[bstar] * identifier[_i] . identifier[ξ] ** literal[int] /( identifier[e0] * identifier[_i] . identifier[η] )*(( literal[int] + identifier[_i] . identifier[η] * identifier[cos] ( identifier[Mdf] ))** literal[int] -( literal[int] + identifier[_i] . identifier[η] * identifier[cos] ( identifier[M0] ))** literal[int] ) identifier[Mp] =( identifier[Mdf] + identifier[delta_ω] + identifier[delta_M] )%( literal[int] * identifier[np] . identifier[pi] ) identifier[ω] = identifier[ωdf] - identifier[delta_ω] - identifier[delta_M] identifier[Ω] = identifier[Ωdf] - literal[int] * identifier[n0] * identifier[_i] . identifier[k2] * identifier[_i] . identifier[θ] /( literal[int] * identifier[_i] . identifier[a0] ** literal[int] * identifier[_i] . identifier[β_0] ** literal[int] )* identifier[_i] . identifier[C1] * identifier[tdiff] ** literal[int] identifier[e] = identifier[e0] - identifier[bstar] * identifier[_i] . identifier[C4] * identifier[tdiff] - identifier[bstar] * identifier[_i] . identifier[C5] *( identifier[sin] ( identifier[Mp] )- identifier[sin] ( identifier[M0] )) keyword[if] identifier[e] < literal[int] : identifier[e] = literal[int] identifier[a] = identifier[_i] . identifier[a0] *( literal[int] - identifier[_i] . identifier[C1] * identifier[tdiff] - identifier[_i] . identifier[D2] * identifier[tdiff] ** literal[int] - identifier[_i] . identifier[D3] * identifier[tdiff] ** literal[int] - identifier[_i] . identifier[D4] * identifier[tdiff] ** literal[int] )** literal[int] identifier[L] = identifier[Mp] + identifier[ω] + identifier[Ω] + identifier[n0] *( literal[int] / literal[int] * identifier[_i] . identifier[C1] * identifier[tdiff] ** literal[int] +( identifier[_i] . identifier[D2] + literal[int] * identifier[_i] . identifier[C1] ** literal[int] )* identifier[tdiff] ** literal[int] + literal[int] / literal[int] *( literal[int] * identifier[_i] . identifier[D3] + literal[int] * identifier[_i] . identifier[C1] * identifier[_i] . identifier[D2] + literal[int] * identifier[_i] . identifier[C1] ** literal[int] )* identifier[tdiff] ** literal[int] + literal[int] / literal[int] *( literal[int] * identifier[_i] . identifier[D4] + literal[int] * identifier[_i] . identifier[C1] * identifier[_i] . identifier[D3] + literal[int] * identifier[_i] . identifier[D2] ** literal[int] + literal[int] * identifier[_i] . identifier[C1] ** literal[int] * identifier[_i] . identifier[D2] + literal[int] * identifier[_i] . identifier[C1] ** literal[int] )* identifier[tdiff] ** literal[int] ) identifier[β] = identifier[sqrt] ( literal[int] - identifier[e] ** literal[int] ) identifier[n] = identifier[µ] /( identifier[a] **( literal[int] / literal[int] )) identifier[axN] = identifier[e] * identifier[cos] ( identifier[ω] ) identifier[ayNL] = identifier[_i] . identifier[A30] * identifier[sin] ( identifier[i0] )/( literal[int] * identifier[_i] . identifier[k2] * identifier[a] * identifier[β] ** literal[int] ) identifier[tmp] =( literal[int] + identifier[_i] . identifier[θ] ) keyword[if] ( literal[int] + identifier[_i] . identifier[θ] )> literal[int] keyword[else] literal[int] identifier[L_L] = identifier[ayNL] / literal[int] * identifier[axN] *(( literal[int] + literal[int] * identifier[_i] . identifier[θ] )/ identifier[tmp] ) identifier[L_T] = identifier[L] + identifier[L_L] identifier[ayN] = identifier[e] * identifier[sin] ( identifier[ω] )+ identifier[ayNL] identifier[U] =( identifier[L_T] - identifier[Ω] )%( literal[int] * identifier[np] . identifier[pi] ) identifier[Epω] = identifier[U] keyword[for] identifier[xxx] keyword[in] identifier[range] ( literal[int] ): identifier[delta_Epω] =( identifier[U] - identifier[ayN] * identifier[cos] ( identifier[Epω] )+ identifier[axN] * identifier[sin] ( identifier[Epω] )- identifier[Epω] )/( literal[int] - identifier[ayN] * identifier[sin] ( identifier[Epω] )- identifier[axN] * identifier[cos] ( identifier[Epω] )) keyword[if] identifier[abs] ( identifier[delta_Epω] )< literal[int] : keyword[break] identifier[Epω] = identifier[Epω] + identifier[delta_Epω] identifier[ecosE] = identifier[axN] * identifier[cos] ( identifier[Epω] )+ identifier[ayN] * identifier[sin] ( identifier[Epω] ) identifier[esinE] = identifier[axN] * identifier[sin] ( identifier[Epω] )- identifier[ayN] * identifier[cos] ( identifier[Epω] ) identifier[e_L] = identifier[sqrt] ( identifier[axN] ** literal[int] + identifier[ayN] ** literal[int] ) identifier[p_L] = identifier[a] *( literal[int] - identifier[e_L] ** literal[int] ) identifier[r] = identifier[a] *( literal[int] - identifier[ecosE] ) identifier[rdot] = identifier[sqrt] ( identifier[a] )/ identifier[r] * identifier[esinE] identifier[rfdot] = identifier[sqrt] ( identifier[p_L] )/ identifier[r] identifier[cosu] = identifier[a] / identifier[r] *( identifier[cos] ( identifier[Epω] )- identifier[axN] + identifier[ayN] * identifier[esinE] /( literal[int] + identifier[sqrt] ( literal[int] - identifier[e_L] ** literal[int] ))) identifier[sinu] = identifier[a] / identifier[r] *( identifier[sin] ( identifier[Epω] )- identifier[ayN] - identifier[axN] * identifier[esinE] /( literal[int] + identifier[sqrt] ( literal[int] - identifier[e_L] ** literal[int] ))) identifier[u] = identifier[arctan2] ( identifier[sinu] , identifier[cosu] ) identifier[Delta_r] = identifier[_i] . identifier[k2] /( literal[int] * identifier[p_L] )*( literal[int] - identifier[_i] . identifier[θ] ** literal[int] )* identifier[cos] ( literal[int] * identifier[u] ) identifier[Delta_u] =- identifier[_i] . identifier[k2] /( literal[int] * identifier[p_L] ** literal[int] )*( literal[int] * identifier[_i] . identifier[θ] ** literal[int] - literal[int] )* identifier[sin] ( literal[int] * identifier[u] ) identifier[Delta_Ω] = literal[int] * identifier[_i] . identifier[k2] * identifier[_i] . identifier[θ] /( literal[int] * identifier[p_L] ** literal[int] )* identifier[sin] ( literal[int] * identifier[u] ) identifier[Delta_i] = literal[int] * identifier[_i] . identifier[k2] * identifier[_i] . identifier[θ] /( literal[int] * identifier[p_L] ** literal[int] )* identifier[sin] ( identifier[i0] )* identifier[cos] ( literal[int] * identifier[u] ) identifier[Delta_rdot] =- identifier[n] * identifier[_i] . identifier[k2] *( literal[int] - identifier[_i] . identifier[θ] ** literal[int] )* identifier[sin] ( literal[int] * identifier[u] )/( identifier[p_L] * identifier[µ] ) identifier[Delta_rfdot] = identifier[_i] . identifier[k2] * identifier[n] *(( literal[int] - identifier[_i] . identifier[θ] ** literal[int] )* identifier[cos] ( literal[int] * identifier[u] )- literal[int] / literal[int] *( literal[int] - literal[int] * identifier[_i] . identifier[θ] ** literal[int] ))/( identifier[p_L] * identifier[µ] ) identifier[rk] = identifier[r] *( literal[int] - literal[int] / literal[int] * identifier[_i] . identifier[k2] * identifier[sqrt] ( literal[int] - identifier[e_L] ** literal[int] )/( identifier[p_L] ** literal[int] )*( literal[int] * identifier[_i] . identifier[θ] ** literal[int] - literal[int] ))+ identifier[Delta_r] identifier[uk] = identifier[u] + identifier[Delta_u] identifier[Ωk] = identifier[Ω] + identifier[Delta_Ω] identifier[ik] = identifier[i0] + identifier[Delta_i] identifier[rdotk] = identifier[rdot] + identifier[Delta_rdot] identifier[rfdotk] = identifier[rfdot] + identifier[Delta_rfdot] identifier[vM] = identifier[np] . identifier[array] ([- identifier[sin] ( identifier[Ωk] )* identifier[cos] ( identifier[ik] ), identifier[cos] ( identifier[Ωk] )* identifier[cos] ( identifier[ik] ), identifier[sin] ( identifier[ik] )]) identifier[vN] = identifier[np] . identifier[array] ([ identifier[cos] ( identifier[Ωk] ), identifier[sin] ( identifier[Ωk] ), literal[int] ]) identifier[vU] = identifier[vM] * identifier[sin] ( identifier[uk] )+ identifier[vN] * identifier[cos] ( identifier[uk] ) identifier[vV] = identifier[vM] * identifier[cos] ( identifier[uk] )- identifier[vN] * identifier[sin] ( identifier[uk] ) identifier[vR] = identifier[rk] * identifier[vU] * identifier[r_e] identifier[vRdot] =( identifier[rdotk] * identifier[vU] + identifier[rfdotk] * identifier[vV] )*( identifier[r_e] * identifier[k_e] / literal[int] ) identifier[vector] = identifier[np] . identifier[concatenate] (( identifier[vR] , identifier[vRdot] ))* literal[int] keyword[return] identifier[self] . identifier[tle] . identifier[__class__] ( identifier[date] , identifier[vector] , literal[string] , literal[string] , identifier[self] . identifier[__class__] (),** identifier[self] . identifier[tle] . identifier[complements] )
def propagate(self, date): """Compute state of orbit at a given date, past or future Args: date (Date) Return: Orbit: """ (i0, Ω0, e0, ω0, M0, n0) = self.tle n0 *= 60 # conversion to min⁻¹ if isinstance(date, Date): t0 = self.tle.date.datetime tdiff = (date.datetime - t0).total_seconds() / 60.0 # depends on [control=['if'], data=[]] elif isinstance(date, timedelta): tdiff = date.total_seconds() / 60.0 date = self.tle.date + date # depends on [control=['if'], data=[]] else: raise TypeError("Unhandled type for 'date': %s" % type(date)) bstar = self.tle.complements['bstar'] μ = self.gravity.μ_e r_e = self.gravity.r_e k_e = self.gravity.k_e # retrieve initialized variables _i = self._init n0 = _i.n0 Mdf = M0 + _i.Mdot * n0 * tdiff ωdf = ω0 + _i.ωdot * n0 * tdiff Ωdf = Ω0 + _i.Ωdot * n0 * tdiff delta_ω = bstar * _i.C3 * cos(ω0) * tdiff delta_M = 0.0 if e0 > 0.0001: delta_M = -2 / 3 * (_i.q0 - _i.s) ** 4 * bstar * _i.ξ ** 4 / (e0 * _i.η) * ((1 + _i.η * cos(Mdf)) ** 3 - (1 + _i.η * cos(M0)) ** 3) # depends on [control=['if'], data=['e0']] Mp = (Mdf + delta_ω + delta_M) % (2 * np.pi) ω = ωdf - delta_ω - delta_M Ω = Ωdf - 21 * n0 * _i.k2 * _i.θ / (2 * _i.a0 ** 2 * _i.β_0 ** 2) * _i.C1 * tdiff ** 2 e = e0 - bstar * _i.C4 * tdiff - bstar * _i.C5 * (sin(Mp) - sin(M0)) if e < 1e-06: e = 1e-06 # depends on [control=['if'], data=['e']] a = _i.a0 * (1 - _i.C1 * tdiff - _i.D2 * tdiff ** 2 - _i.D3 * tdiff ** 3 - _i.D4 * tdiff ** 4) ** 2 L = Mp + ω + Ω + n0 * (3 / 2 * _i.C1 * tdiff ** 2 + (_i.D2 + 2 * _i.C1 ** 2) * tdiff ** 3 + 1 / 4 * (3 * _i.D3 + 12 * _i.C1 * _i.D2 + 10 * _i.C1 ** 3) * tdiff ** 4 + 1 / 5 * (3 * _i.D4 + 12 * _i.C1 * _i.D3 + 6 * _i.D2 ** 2 + 30 * _i.C1 ** 2 * _i.D2 + 15 * _i.C1 ** 4) * tdiff ** 5) β = sqrt(1 - e ** 2) n = μ / a ** (3 / 2) # Long-period terms axN = e * cos(ω) ayNL = _i.A30 * sin(i0) / (4 * _i.k2 * a * β ** 2) tmp = 1 + _i.θ if 1 + _i.θ > 1.5e-12 else 1.5e-12 L_L = ayNL / 2 * axN * ((3 + 5 * _i.θ) / tmp) L_T = L + L_L ayN = e * sin(ω) + ayNL # Resolving of kepler equation U = (L_T - Ω) % (2 * np.pi) Epω = U for xxx in range(10): delta_Epω = (U - ayN * cos(Epω) + axN * sin(Epω) - Epω) / (1 - ayN * sin(Epω) - axN * cos(Epω)) if abs(delta_Epω) < 1e-12: break # depends on [control=['if'], data=[]] Epω = Epω + delta_Epω # depends on [control=['for'], data=[]] # Short-period terms ecosE = axN * cos(Epω) + ayN * sin(Epω) esinE = axN * sin(Epω) - ayN * cos(Epω) e_L = sqrt(axN ** 2 + ayN ** 2) p_L = a * (1 - e_L ** 2) r = a * (1 - ecosE) rdot = sqrt(a) / r * esinE rfdot = sqrt(p_L) / r cosu = a / r * (cos(Epω) - axN + ayN * esinE / (1 + sqrt(1 - e_L ** 2))) sinu = a / r * (sin(Epω) - ayN - axN * esinE / (1 + sqrt(1 - e_L ** 2))) u = arctan2(sinu, cosu) Delta_r = _i.k2 / (2 * p_L) * (1 - _i.θ ** 2) * cos(2 * u) Delta_u = -_i.k2 / (4 * p_L ** 2) * (7 * _i.θ ** 2 - 1) * sin(2 * u) Delta_Ω = 3 * _i.k2 * _i.θ / (2 * p_L ** 2) * sin(2 * u) Delta_i = 3 * _i.k2 * _i.θ / (2 * p_L ** 2) * sin(i0) * cos(2 * u) Delta_rdot = -n * _i.k2 * (1 - _i.θ ** 2) * sin(2 * u) / (p_L * μ) Delta_rfdot = _i.k2 * n * ((1 - _i.θ ** 2) * cos(2 * u) - 3 / 2 * (1 - 3 * _i.θ ** 2)) / (p_L * μ) rk = r * (1 - 3 / 2 * _i.k2 * sqrt(1 - e_L ** 2) / p_L ** 2 * (3 * _i.θ ** 2 - 1)) + Delta_r uk = u + Delta_u Ωk = Ω + Delta_Ω ik = i0 + Delta_i rdotk = rdot + Delta_rdot rfdotk = rfdot + Delta_rfdot # Vectors vM = np.array([-sin(Ωk) * cos(ik), cos(Ωk) * cos(ik), sin(ik)]) vN = np.array([cos(Ωk), sin(Ωk), 0]) vU = vM * sin(uk) + vN * cos(uk) vV = vM * cos(uk) - vN * sin(uk) vR = rk * vU * r_e vRdot = (rdotk * vU + rfdotk * vV) * (r_e * k_e / 60.0) vector = np.concatenate((vR, vRdot)) * 1000 # conversion to meters return self.tle.__class__(date, vector, 'cartesian', 'TEME', self.__class__(), **self.tle.complements)
def issue_reactions(self, issue_number): """Get reactions of an issue""" payload = { 'per_page': PER_PAGE, 'direction': 'asc', 'sort': 'updated' } path = urijoin("issues", str(issue_number), "reactions") return self.fetch_items(path, payload)
def function[issue_reactions, parameter[self, issue_number]]: constant[Get reactions of an issue] variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da1b0351900>, <ast.Constant object at 0x7da1b0351810>, <ast.Constant object at 0x7da1b0351360>], [<ast.Name object at 0x7da1b0352e60>, <ast.Constant object at 0x7da1b0350820>, <ast.Constant object at 0x7da1b0350df0>]] variable[path] assign[=] call[name[urijoin], parameter[constant[issues], call[name[str], parameter[name[issue_number]]], constant[reactions]]] return[call[name[self].fetch_items, parameter[name[path], name[payload]]]]
keyword[def] identifier[issue_reactions] ( identifier[self] , identifier[issue_number] ): literal[string] identifier[payload] ={ literal[string] : identifier[PER_PAGE] , literal[string] : literal[string] , literal[string] : literal[string] } identifier[path] = identifier[urijoin] ( literal[string] , identifier[str] ( identifier[issue_number] ), literal[string] ) keyword[return] identifier[self] . identifier[fetch_items] ( identifier[path] , identifier[payload] )
def issue_reactions(self, issue_number): """Get reactions of an issue""" payload = {'per_page': PER_PAGE, 'direction': 'asc', 'sort': 'updated'} path = urijoin('issues', str(issue_number), 'reactions') return self.fetch_items(path, payload)
def _kl_divergence(self, other_locs, other_weights, kernel=None, delta=1e-2): """ Finds the KL divergence between this and another particle distribution by using a kernel density estimator to smooth over the other distribution's particles. """ if kernel is None: kernel = st.norm(loc=0, scale=1).pdf dist = rescaled_distance_mtx(self, other_locs) / delta K = kernel(dist) return -self.est_entropy() - (1 / delta) * np.sum( self.particle_weights * np.log( np.sum( other_weights * K, axis=1 # Sum over the particles of ``other``. ) ), axis=0 # Sum over the particles of ``self``. )
def function[_kl_divergence, parameter[self, other_locs, other_weights, kernel, delta]]: constant[ Finds the KL divergence between this and another particle distribution by using a kernel density estimator to smooth over the other distribution's particles. ] if compare[name[kernel] is constant[None]] begin[:] variable[kernel] assign[=] call[name[st].norm, parameter[]].pdf variable[dist] assign[=] binary_operation[call[name[rescaled_distance_mtx], parameter[name[self], name[other_locs]]] / name[delta]] variable[K] assign[=] call[name[kernel], parameter[name[dist]]] return[binary_operation[<ast.UnaryOp object at 0x7da1b0f0f7c0> - binary_operation[binary_operation[constant[1] / name[delta]] * call[name[np].sum, parameter[binary_operation[name[self].particle_weights * call[name[np].log, parameter[call[name[np].sum, parameter[binary_operation[name[other_weights] * name[K]]]]]]]]]]]]
keyword[def] identifier[_kl_divergence] ( identifier[self] , identifier[other_locs] , identifier[other_weights] , identifier[kernel] = keyword[None] , identifier[delta] = literal[int] ): literal[string] keyword[if] identifier[kernel] keyword[is] keyword[None] : identifier[kernel] = identifier[st] . identifier[norm] ( identifier[loc] = literal[int] , identifier[scale] = literal[int] ). identifier[pdf] identifier[dist] = identifier[rescaled_distance_mtx] ( identifier[self] , identifier[other_locs] )/ identifier[delta] identifier[K] = identifier[kernel] ( identifier[dist] ) keyword[return] - identifier[self] . identifier[est_entropy] ()-( literal[int] / identifier[delta] )* identifier[np] . identifier[sum] ( identifier[self] . identifier[particle_weights] * identifier[np] . identifier[log] ( identifier[np] . identifier[sum] ( identifier[other_weights] * identifier[K] , identifier[axis] = literal[int] ) ), identifier[axis] = literal[int] )
def _kl_divergence(self, other_locs, other_weights, kernel=None, delta=0.01): """ Finds the KL divergence between this and another particle distribution by using a kernel density estimator to smooth over the other distribution's particles. """ if kernel is None: kernel = st.norm(loc=0, scale=1).pdf # depends on [control=['if'], data=['kernel']] dist = rescaled_distance_mtx(self, other_locs) / delta K = kernel(dist) # Sum over the particles of ``other``. # Sum over the particles of ``self``. return -self.est_entropy() - 1 / delta * np.sum(self.particle_weights * np.log(np.sum(other_weights * K, axis=1)), axis=0)
def remove(self, package, shutit_pexpect_child=None, options=None, echo=None, timeout=shutit_global.shutit_global_object.default_timeout, note=None): """Distro-independent remove function. Takes a package name and runs relevant remove function. @param package: Package to remove, which is run through package_map. @param shutit_pexpect_child: See send() @param options: Dict of options to pass to the remove command, mapped by install_type. @param timeout: See send(). Default: 3600 @param note: See send() @return: True if all ok (i.e. the package was successfully removed), False otherwise. @rtype: boolean """ shutit_global.shutit_global_object.yield_to_draw() # If separated by spaces, remove separately if package.find(' ') != -1: for p in package.split(' '): self.install(p,shutit_pexpect_child=shutit_pexpect_child,options=options,timeout=timeout,note=note) shutit_pexpect_child = shutit_pexpect_child or self.get_current_shutit_pexpect_session().pexpect_child shutit_pexpect_session = self.get_shutit_pexpect_session_from_child(shutit_pexpect_child) return shutit_pexpect_session.remove(package, echo=echo, options=options, timeout=timeout, note=note)
def function[remove, parameter[self, package, shutit_pexpect_child, options, echo, timeout, note]]: constant[Distro-independent remove function. Takes a package name and runs relevant remove function. @param package: Package to remove, which is run through package_map. @param shutit_pexpect_child: See send() @param options: Dict of options to pass to the remove command, mapped by install_type. @param timeout: See send(). Default: 3600 @param note: See send() @return: True if all ok (i.e. the package was successfully removed), False otherwise. @rtype: boolean ] call[name[shutit_global].shutit_global_object.yield_to_draw, parameter[]] if compare[call[name[package].find, parameter[constant[ ]]] not_equal[!=] <ast.UnaryOp object at 0x7da20cabf9a0>] begin[:] for taget[name[p]] in starred[call[name[package].split, parameter[constant[ ]]]] begin[:] call[name[self].install, parameter[name[p]]] variable[shutit_pexpect_child] assign[=] <ast.BoolOp object at 0x7da20cabc7f0> variable[shutit_pexpect_session] assign[=] call[name[self].get_shutit_pexpect_session_from_child, parameter[name[shutit_pexpect_child]]] return[call[name[shutit_pexpect_session].remove, parameter[name[package]]]]
keyword[def] identifier[remove] ( identifier[self] , identifier[package] , identifier[shutit_pexpect_child] = keyword[None] , identifier[options] = keyword[None] , identifier[echo] = keyword[None] , identifier[timeout] = identifier[shutit_global] . identifier[shutit_global_object] . identifier[default_timeout] , identifier[note] = keyword[None] ): literal[string] identifier[shutit_global] . identifier[shutit_global_object] . identifier[yield_to_draw] () keyword[if] identifier[package] . identifier[find] ( literal[string] )!=- literal[int] : keyword[for] identifier[p] keyword[in] identifier[package] . identifier[split] ( literal[string] ): identifier[self] . identifier[install] ( identifier[p] , identifier[shutit_pexpect_child] = identifier[shutit_pexpect_child] , identifier[options] = identifier[options] , identifier[timeout] = identifier[timeout] , identifier[note] = identifier[note] ) identifier[shutit_pexpect_child] = identifier[shutit_pexpect_child] keyword[or] identifier[self] . identifier[get_current_shutit_pexpect_session] (). identifier[pexpect_child] identifier[shutit_pexpect_session] = identifier[self] . identifier[get_shutit_pexpect_session_from_child] ( identifier[shutit_pexpect_child] ) keyword[return] identifier[shutit_pexpect_session] . identifier[remove] ( identifier[package] , identifier[echo] = identifier[echo] , identifier[options] = identifier[options] , identifier[timeout] = identifier[timeout] , identifier[note] = identifier[note] )
def remove(self, package, shutit_pexpect_child=None, options=None, echo=None, timeout=shutit_global.shutit_global_object.default_timeout, note=None): """Distro-independent remove function. Takes a package name and runs relevant remove function. @param package: Package to remove, which is run through package_map. @param shutit_pexpect_child: See send() @param options: Dict of options to pass to the remove command, mapped by install_type. @param timeout: See send(). Default: 3600 @param note: See send() @return: True if all ok (i.e. the package was successfully removed), False otherwise. @rtype: boolean """ shutit_global.shutit_global_object.yield_to_draw() # If separated by spaces, remove separately if package.find(' ') != -1: for p in package.split(' '): self.install(p, shutit_pexpect_child=shutit_pexpect_child, options=options, timeout=timeout, note=note) # depends on [control=['for'], data=['p']] # depends on [control=['if'], data=[]] shutit_pexpect_child = shutit_pexpect_child or self.get_current_shutit_pexpect_session().pexpect_child shutit_pexpect_session = self.get_shutit_pexpect_session_from_child(shutit_pexpect_child) return shutit_pexpect_session.remove(package, echo=echo, options=options, timeout=timeout, note=note)
def simplify(self, e=None): """ Simplifies `e`. If `e` is None, simplifies the constraints of this state. """ if e is None: return self._solver.simplify() elif isinstance(e, (int, float, bool)): return e elif isinstance(e, claripy.ast.Base) and e.op in claripy.operations.leaf_operations_concrete: return e elif isinstance(e, SimActionObject) and e.op in claripy.operations.leaf_operations_concrete: return e.ast elif not isinstance(e, (SimActionObject, claripy.ast.Base)): return e else: return self._claripy_simplify(e)
def function[simplify, parameter[self, e]]: constant[ Simplifies `e`. If `e` is None, simplifies the constraints of this state. ] if compare[name[e] is constant[None]] begin[:] return[call[name[self]._solver.simplify, parameter[]]]
keyword[def] identifier[simplify] ( identifier[self] , identifier[e] = keyword[None] ): literal[string] keyword[if] identifier[e] keyword[is] keyword[None] : keyword[return] identifier[self] . identifier[_solver] . identifier[simplify] () keyword[elif] identifier[isinstance] ( identifier[e] ,( identifier[int] , identifier[float] , identifier[bool] )): keyword[return] identifier[e] keyword[elif] identifier[isinstance] ( identifier[e] , identifier[claripy] . identifier[ast] . identifier[Base] ) keyword[and] identifier[e] . identifier[op] keyword[in] identifier[claripy] . identifier[operations] . identifier[leaf_operations_concrete] : keyword[return] identifier[e] keyword[elif] identifier[isinstance] ( identifier[e] , identifier[SimActionObject] ) keyword[and] identifier[e] . identifier[op] keyword[in] identifier[claripy] . identifier[operations] . identifier[leaf_operations_concrete] : keyword[return] identifier[e] . identifier[ast] keyword[elif] keyword[not] identifier[isinstance] ( identifier[e] ,( identifier[SimActionObject] , identifier[claripy] . identifier[ast] . identifier[Base] )): keyword[return] identifier[e] keyword[else] : keyword[return] identifier[self] . identifier[_claripy_simplify] ( identifier[e] )
def simplify(self, e=None): """ Simplifies `e`. If `e` is None, simplifies the constraints of this state. """ if e is None: return self._solver.simplify() # depends on [control=['if'], data=[]] elif isinstance(e, (int, float, bool)): return e # depends on [control=['if'], data=[]] elif isinstance(e, claripy.ast.Base) and e.op in claripy.operations.leaf_operations_concrete: return e # depends on [control=['if'], data=[]] elif isinstance(e, SimActionObject) and e.op in claripy.operations.leaf_operations_concrete: return e.ast # depends on [control=['if'], data=[]] elif not isinstance(e, (SimActionObject, claripy.ast.Base)): return e # depends on [control=['if'], data=[]] else: return self._claripy_simplify(e)
def authorized_purchase_object(self, oid, price, huid): """Does delegated (pre-authorized) purchase of `oid` in the name of `huid`, at price `price` (vingd transferred from `huid` to consumer's acc). :raises GeneralException: :resource: ``objects/<oid>/purchases`` :access: authorized users with ACL flag ``purchase.object.authorize`` + delegate permission required for the requester to charge the user: ``purchase.object`` """ return self.request( 'post', safeformat('objects/{:int}/purchases', oid), json.dumps({ 'price': price, 'huid': huid, 'autocommit': True }))
def function[authorized_purchase_object, parameter[self, oid, price, huid]]: constant[Does delegated (pre-authorized) purchase of `oid` in the name of `huid`, at price `price` (vingd transferred from `huid` to consumer's acc). :raises GeneralException: :resource: ``objects/<oid>/purchases`` :access: authorized users with ACL flag ``purchase.object.authorize`` + delegate permission required for the requester to charge the user: ``purchase.object`` ] return[call[name[self].request, parameter[constant[post], call[name[safeformat], parameter[constant[objects/{:int}/purchases], name[oid]]], call[name[json].dumps, parameter[dictionary[[<ast.Constant object at 0x7da18bcca2f0>, <ast.Constant object at 0x7da18bcca8f0>, <ast.Constant object at 0x7da18bcc9960>], [<ast.Name object at 0x7da18bcc9240>, <ast.Name object at 0x7da18bccb7f0>, <ast.Constant object at 0x7da18bccb5e0>]]]]]]]
keyword[def] identifier[authorized_purchase_object] ( identifier[self] , identifier[oid] , identifier[price] , identifier[huid] ): literal[string] keyword[return] identifier[self] . identifier[request] ( literal[string] , identifier[safeformat] ( literal[string] , identifier[oid] ), identifier[json] . identifier[dumps] ({ literal[string] : identifier[price] , literal[string] : identifier[huid] , literal[string] : keyword[True] }))
def authorized_purchase_object(self, oid, price, huid): """Does delegated (pre-authorized) purchase of `oid` in the name of `huid`, at price `price` (vingd transferred from `huid` to consumer's acc). :raises GeneralException: :resource: ``objects/<oid>/purchases`` :access: authorized users with ACL flag ``purchase.object.authorize`` + delegate permission required for the requester to charge the user: ``purchase.object`` """ return self.request('post', safeformat('objects/{:int}/purchases', oid), json.dumps({'price': price, 'huid': huid, 'autocommit': True}))
def save(self, filename=None, ignore_discard=False, ignore_expires=False): '''Implement the FileCookieJar abstract method.''' if filename is None: if self.filename is not None: filename = self.filename else: raise ValueError(cookiejar.MISSING_FILENAME_TEXT) # TODO: obtain file lock, read contents of file, and merge with # current content. go_cookies = [] now = time.time() for cookie in self: if not ignore_discard and cookie.discard: continue if not ignore_expires and cookie.is_expired(now): continue go_cookies.append(py_to_go_cookie(cookie)) with open(filename, "w") as f: f.write(json.dumps(go_cookies))
def function[save, parameter[self, filename, ignore_discard, ignore_expires]]: constant[Implement the FileCookieJar abstract method.] if compare[name[filename] is constant[None]] begin[:] if compare[name[self].filename is_not constant[None]] begin[:] variable[filename] assign[=] name[self].filename variable[go_cookies] assign[=] list[[]] variable[now] assign[=] call[name[time].time, parameter[]] for taget[name[cookie]] in starred[name[self]] begin[:] if <ast.BoolOp object at 0x7da1b0c27f70> begin[:] continue if <ast.BoolOp object at 0x7da1b0c26ec0> begin[:] continue call[name[go_cookies].append, parameter[call[name[py_to_go_cookie], parameter[name[cookie]]]]] with call[name[open], parameter[name[filename], constant[w]]] begin[:] call[name[f].write, parameter[call[name[json].dumps, parameter[name[go_cookies]]]]]
keyword[def] identifier[save] ( identifier[self] , identifier[filename] = keyword[None] , identifier[ignore_discard] = keyword[False] , identifier[ignore_expires] = keyword[False] ): literal[string] keyword[if] identifier[filename] keyword[is] keyword[None] : keyword[if] identifier[self] . identifier[filename] keyword[is] keyword[not] keyword[None] : identifier[filename] = identifier[self] . identifier[filename] keyword[else] : keyword[raise] identifier[ValueError] ( identifier[cookiejar] . identifier[MISSING_FILENAME_TEXT] ) identifier[go_cookies] =[] identifier[now] = identifier[time] . identifier[time] () keyword[for] identifier[cookie] keyword[in] identifier[self] : keyword[if] keyword[not] identifier[ignore_discard] keyword[and] identifier[cookie] . identifier[discard] : keyword[continue] keyword[if] keyword[not] identifier[ignore_expires] keyword[and] identifier[cookie] . identifier[is_expired] ( identifier[now] ): keyword[continue] identifier[go_cookies] . identifier[append] ( identifier[py_to_go_cookie] ( identifier[cookie] )) keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[f] : identifier[f] . identifier[write] ( identifier[json] . identifier[dumps] ( identifier[go_cookies] ))
def save(self, filename=None, ignore_discard=False, ignore_expires=False): """Implement the FileCookieJar abstract method.""" if filename is None: if self.filename is not None: filename = self.filename # depends on [control=['if'], data=[]] else: raise ValueError(cookiejar.MISSING_FILENAME_TEXT) # depends on [control=['if'], data=['filename']] # TODO: obtain file lock, read contents of file, and merge with # current content. go_cookies = [] now = time.time() for cookie in self: if not ignore_discard and cookie.discard: continue # depends on [control=['if'], data=[]] if not ignore_expires and cookie.is_expired(now): continue # depends on [control=['if'], data=[]] go_cookies.append(py_to_go_cookie(cookie)) # depends on [control=['for'], data=['cookie']] with open(filename, 'w') as f: f.write(json.dumps(go_cookies)) # depends on [control=['with'], data=['f']]
def update(self, user, name=None, password=None, host=None): """ Allows you to change one or more of the user's username, password, or host. """ if not any((name, password, host)): raise exc.MissingDBUserParameters("You must supply at least one of " "the following: new username, new password, or new host " "specification.") if not isinstance(user, CloudDatabaseUser): # Must be the ID/name user = self.get(user) dct = {} if name and (name != user.name): dct["name"] = name if host and (host != user.host): dct["host"] = host if password: dct["password"] = password if not dct: raise exc.DBUpdateUnchanged("You must supply at least one changed " "value when updating a user.") uri = "/%s/%s" % (self.uri_base, user.name) body = {"user": dct} resp, resp_body = self.api.method_put(uri, body=body) return None
def function[update, parameter[self, user, name, password, host]]: constant[ Allows you to change one or more of the user's username, password, or host. ] if <ast.UnaryOp object at 0x7da1b055aa40> begin[:] <ast.Raise object at 0x7da1b055aef0> if <ast.UnaryOp object at 0x7da1b055bc40> begin[:] variable[user] assign[=] call[name[self].get, parameter[name[user]]] variable[dct] assign[=] dictionary[[], []] if <ast.BoolOp object at 0x7da1b0559f90> begin[:] call[name[dct]][constant[name]] assign[=] name[name] if <ast.BoolOp object at 0x7da1b055a0e0> begin[:] call[name[dct]][constant[host]] assign[=] name[host] if name[password] begin[:] call[name[dct]][constant[password]] assign[=] name[password] if <ast.UnaryOp object at 0x7da1b055a7d0> begin[:] <ast.Raise object at 0x7da1b0559420> variable[uri] assign[=] binary_operation[constant[/%s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b05595d0>, <ast.Attribute object at 0x7da1b055bfd0>]]] variable[body] assign[=] dictionary[[<ast.Constant object at 0x7da1b055a8f0>], [<ast.Name object at 0x7da1b055a320>]] <ast.Tuple object at 0x7da1b05590c0> assign[=] call[name[self].api.method_put, parameter[name[uri]]] return[constant[None]]
keyword[def] identifier[update] ( identifier[self] , identifier[user] , identifier[name] = keyword[None] , identifier[password] = keyword[None] , identifier[host] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[any] (( identifier[name] , identifier[password] , identifier[host] )): keyword[raise] identifier[exc] . identifier[MissingDBUserParameters] ( literal[string] literal[string] literal[string] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[user] , identifier[CloudDatabaseUser] ): identifier[user] = identifier[self] . identifier[get] ( identifier[user] ) identifier[dct] ={} keyword[if] identifier[name] keyword[and] ( identifier[name] != identifier[user] . identifier[name] ): identifier[dct] [ literal[string] ]= identifier[name] keyword[if] identifier[host] keyword[and] ( identifier[host] != identifier[user] . identifier[host] ): identifier[dct] [ literal[string] ]= identifier[host] keyword[if] identifier[password] : identifier[dct] [ literal[string] ]= identifier[password] keyword[if] keyword[not] identifier[dct] : keyword[raise] identifier[exc] . identifier[DBUpdateUnchanged] ( literal[string] literal[string] ) identifier[uri] = literal[string] %( identifier[self] . identifier[uri_base] , identifier[user] . identifier[name] ) identifier[body] ={ literal[string] : identifier[dct] } identifier[resp] , identifier[resp_body] = identifier[self] . identifier[api] . identifier[method_put] ( identifier[uri] , identifier[body] = identifier[body] ) keyword[return] keyword[None]
def update(self, user, name=None, password=None, host=None): """ Allows you to change one or more of the user's username, password, or host. """ if not any((name, password, host)): raise exc.MissingDBUserParameters('You must supply at least one of the following: new username, new password, or new host specification.') # depends on [control=['if'], data=[]] if not isinstance(user, CloudDatabaseUser): # Must be the ID/name user = self.get(user) # depends on [control=['if'], data=[]] dct = {} if name and name != user.name: dct['name'] = name # depends on [control=['if'], data=[]] if host and host != user.host: dct['host'] = host # depends on [control=['if'], data=[]] if password: dct['password'] = password # depends on [control=['if'], data=[]] if not dct: raise exc.DBUpdateUnchanged('You must supply at least one changed value when updating a user.') # depends on [control=['if'], data=[]] uri = '/%s/%s' % (self.uri_base, user.name) body = {'user': dct} (resp, resp_body) = self.api.method_put(uri, body=body) return None
def send(self, message_id, stm_id, args=[], kwargs={}): """ Send a message to a state machine handled by this driver. If you have a reference to the state machine, you can also send it directly to it by using `stmpy.Machine.send`. `stm_id` must be the id of a state machine earlier added to the driver. """ if stm_id not in Driver._stms_by_id: self._logger.warn('Machine with name {} cannot be found. ' 'Ignoring message {}.'.format(stm_id, message_id)) else: stm = Driver._stms_by_id[stm_id] self._add_event(message_id, args, kwargs, stm)
def function[send, parameter[self, message_id, stm_id, args, kwargs]]: constant[ Send a message to a state machine handled by this driver. If you have a reference to the state machine, you can also send it directly to it by using `stmpy.Machine.send`. `stm_id` must be the id of a state machine earlier added to the driver. ] if compare[name[stm_id] <ast.NotIn object at 0x7da2590d7190> name[Driver]._stms_by_id] begin[:] call[name[self]._logger.warn, parameter[call[constant[Machine with name {} cannot be found. Ignoring message {}.].format, parameter[name[stm_id], name[message_id]]]]]
keyword[def] identifier[send] ( identifier[self] , identifier[message_id] , identifier[stm_id] , identifier[args] =[], identifier[kwargs] ={}): literal[string] keyword[if] identifier[stm_id] keyword[not] keyword[in] identifier[Driver] . identifier[_stms_by_id] : identifier[self] . identifier[_logger] . identifier[warn] ( literal[string] literal[string] . identifier[format] ( identifier[stm_id] , identifier[message_id] )) keyword[else] : identifier[stm] = identifier[Driver] . identifier[_stms_by_id] [ identifier[stm_id] ] identifier[self] . identifier[_add_event] ( identifier[message_id] , identifier[args] , identifier[kwargs] , identifier[stm] )
def send(self, message_id, stm_id, args=[], kwargs={}): """ Send a message to a state machine handled by this driver. If you have a reference to the state machine, you can also send it directly to it by using `stmpy.Machine.send`. `stm_id` must be the id of a state machine earlier added to the driver. """ if stm_id not in Driver._stms_by_id: self._logger.warn('Machine with name {} cannot be found. Ignoring message {}.'.format(stm_id, message_id)) # depends on [control=['if'], data=['stm_id']] else: stm = Driver._stms_by_id[stm_id] self._add_event(message_id, args, kwargs, stm)
def truncate_schema(self): """ Will delete all data in schema. Only for test use!""" assert self.server == 'localhost' con = self.connection or self._connect() self._initialize(con) cur = con.cursor() cur.execute('DELETE FROM publication;') cur.execute('TRUNCATE systems CASCADE;') con.commit() con.close() return
def function[truncate_schema, parameter[self]]: constant[ Will delete all data in schema. Only for test use!] assert[compare[name[self].server equal[==] constant[localhost]]] variable[con] assign[=] <ast.BoolOp object at 0x7da20e74b0a0> call[name[self]._initialize, parameter[name[con]]] variable[cur] assign[=] call[name[con].cursor, parameter[]] call[name[cur].execute, parameter[constant[DELETE FROM publication;]]] call[name[cur].execute, parameter[constant[TRUNCATE systems CASCADE;]]] call[name[con].commit, parameter[]] call[name[con].close, parameter[]] return[None]
keyword[def] identifier[truncate_schema] ( identifier[self] ): literal[string] keyword[assert] identifier[self] . identifier[server] == literal[string] identifier[con] = identifier[self] . identifier[connection] keyword[or] identifier[self] . identifier[_connect] () identifier[self] . identifier[_initialize] ( identifier[con] ) identifier[cur] = identifier[con] . identifier[cursor] () identifier[cur] . identifier[execute] ( literal[string] ) identifier[cur] . identifier[execute] ( literal[string] ) identifier[con] . identifier[commit] () identifier[con] . identifier[close] () keyword[return]
def truncate_schema(self): """ Will delete all data in schema. Only for test use!""" assert self.server == 'localhost' con = self.connection or self._connect() self._initialize(con) cur = con.cursor() cur.execute('DELETE FROM publication;') cur.execute('TRUNCATE systems CASCADE;') con.commit() con.close() return
def _create_bvals_bvecs(grouped_dicoms, bval_file, bvec_file): """ Write the bvals from the sorted dicom files to a bval file """ # get the bvals and bvecs bvals, bvecs = _get_bvals_bvecs(grouped_dicoms) # save the found bvecs to the file common.write_bval_file(bvals, bval_file) common.write_bvec_file(bvecs, bvec_file) return bvals, bvecs
def function[_create_bvals_bvecs, parameter[grouped_dicoms, bval_file, bvec_file]]: constant[ Write the bvals from the sorted dicom files to a bval file ] <ast.Tuple object at 0x7da1b1307d60> assign[=] call[name[_get_bvals_bvecs], parameter[name[grouped_dicoms]]] call[name[common].write_bval_file, parameter[name[bvals], name[bval_file]]] call[name[common].write_bvec_file, parameter[name[bvecs], name[bvec_file]]] return[tuple[[<ast.Name object at 0x7da1b138ddb0>, <ast.Name object at 0x7da1b138dc90>]]]
keyword[def] identifier[_create_bvals_bvecs] ( identifier[grouped_dicoms] , identifier[bval_file] , identifier[bvec_file] ): literal[string] identifier[bvals] , identifier[bvecs] = identifier[_get_bvals_bvecs] ( identifier[grouped_dicoms] ) identifier[common] . identifier[write_bval_file] ( identifier[bvals] , identifier[bval_file] ) identifier[common] . identifier[write_bvec_file] ( identifier[bvecs] , identifier[bvec_file] ) keyword[return] identifier[bvals] , identifier[bvecs]
def _create_bvals_bvecs(grouped_dicoms, bval_file, bvec_file): """ Write the bvals from the sorted dicom files to a bval file """ # get the bvals and bvecs (bvals, bvecs) = _get_bvals_bvecs(grouped_dicoms) # save the found bvecs to the file common.write_bval_file(bvals, bval_file) common.write_bvec_file(bvecs, bvec_file) return (bvals, bvecs)
def _create(cls, repo, path, resolve, reference, force, logmsg=None): """internal method used to create a new symbolic reference. If resolve is False, the reference will be taken as is, creating a proper symbolic reference. Otherwise it will be resolved to the corresponding object and a detached symbolic reference will be created instead""" git_dir = _git_dir(repo, path) full_ref_path = cls.to_full_path(path) abs_ref_path = osp.join(git_dir, full_ref_path) # figure out target data target = reference if resolve: target = repo.rev_parse(str(reference)) if not force and osp.isfile(abs_ref_path): target_data = str(target) if isinstance(target, SymbolicReference): target_data = target.path if not resolve: target_data = "ref: " + target_data with open(abs_ref_path, 'rb') as fd: existing_data = fd.read().decode(defenc).strip() if existing_data != target_data: raise OSError("Reference at %r does already exist, pointing to %r, requested was %r" % (full_ref_path, existing_data, target_data)) # END no force handling ref = cls(repo, full_ref_path) ref.set_reference(target, logmsg) return ref
def function[_create, parameter[cls, repo, path, resolve, reference, force, logmsg]]: constant[internal method used to create a new symbolic reference. If resolve is False, the reference will be taken as is, creating a proper symbolic reference. Otherwise it will be resolved to the corresponding object and a detached symbolic reference will be created instead] variable[git_dir] assign[=] call[name[_git_dir], parameter[name[repo], name[path]]] variable[full_ref_path] assign[=] call[name[cls].to_full_path, parameter[name[path]]] variable[abs_ref_path] assign[=] call[name[osp].join, parameter[name[git_dir], name[full_ref_path]]] variable[target] assign[=] name[reference] if name[resolve] begin[:] variable[target] assign[=] call[name[repo].rev_parse, parameter[call[name[str], parameter[name[reference]]]]] if <ast.BoolOp object at 0x7da1b220f5e0> begin[:] variable[target_data] assign[=] call[name[str], parameter[name[target]]] if call[name[isinstance], parameter[name[target], name[SymbolicReference]]] begin[:] variable[target_data] assign[=] name[target].path if <ast.UnaryOp object at 0x7da1b220f700> begin[:] variable[target_data] assign[=] binary_operation[constant[ref: ] + name[target_data]] with call[name[open], parameter[name[abs_ref_path], constant[rb]]] begin[:] variable[existing_data] assign[=] call[call[call[name[fd].read, parameter[]].decode, parameter[name[defenc]]].strip, parameter[]] if compare[name[existing_data] not_equal[!=] name[target_data]] begin[:] <ast.Raise object at 0x7da1b220f1c0> variable[ref] assign[=] call[name[cls], parameter[name[repo], name[full_ref_path]]] call[name[ref].set_reference, parameter[name[target], name[logmsg]]] return[name[ref]]
keyword[def] identifier[_create] ( identifier[cls] , identifier[repo] , identifier[path] , identifier[resolve] , identifier[reference] , identifier[force] , identifier[logmsg] = keyword[None] ): literal[string] identifier[git_dir] = identifier[_git_dir] ( identifier[repo] , identifier[path] ) identifier[full_ref_path] = identifier[cls] . identifier[to_full_path] ( identifier[path] ) identifier[abs_ref_path] = identifier[osp] . identifier[join] ( identifier[git_dir] , identifier[full_ref_path] ) identifier[target] = identifier[reference] keyword[if] identifier[resolve] : identifier[target] = identifier[repo] . identifier[rev_parse] ( identifier[str] ( identifier[reference] )) keyword[if] keyword[not] identifier[force] keyword[and] identifier[osp] . identifier[isfile] ( identifier[abs_ref_path] ): identifier[target_data] = identifier[str] ( identifier[target] ) keyword[if] identifier[isinstance] ( identifier[target] , identifier[SymbolicReference] ): identifier[target_data] = identifier[target] . identifier[path] keyword[if] keyword[not] identifier[resolve] : identifier[target_data] = literal[string] + identifier[target_data] keyword[with] identifier[open] ( identifier[abs_ref_path] , literal[string] ) keyword[as] identifier[fd] : identifier[existing_data] = identifier[fd] . identifier[read] (). identifier[decode] ( identifier[defenc] ). identifier[strip] () keyword[if] identifier[existing_data] != identifier[target_data] : keyword[raise] identifier[OSError] ( literal[string] % ( identifier[full_ref_path] , identifier[existing_data] , identifier[target_data] )) identifier[ref] = identifier[cls] ( identifier[repo] , identifier[full_ref_path] ) identifier[ref] . identifier[set_reference] ( identifier[target] , identifier[logmsg] ) keyword[return] identifier[ref]
def _create(cls, repo, path, resolve, reference, force, logmsg=None): """internal method used to create a new symbolic reference. If resolve is False, the reference will be taken as is, creating a proper symbolic reference. Otherwise it will be resolved to the corresponding object and a detached symbolic reference will be created instead""" git_dir = _git_dir(repo, path) full_ref_path = cls.to_full_path(path) abs_ref_path = osp.join(git_dir, full_ref_path) # figure out target data target = reference if resolve: target = repo.rev_parse(str(reference)) # depends on [control=['if'], data=[]] if not force and osp.isfile(abs_ref_path): target_data = str(target) if isinstance(target, SymbolicReference): target_data = target.path # depends on [control=['if'], data=[]] if not resolve: target_data = 'ref: ' + target_data # depends on [control=['if'], data=[]] with open(abs_ref_path, 'rb') as fd: existing_data = fd.read().decode(defenc).strip() # depends on [control=['with'], data=['fd']] if existing_data != target_data: raise OSError('Reference at %r does already exist, pointing to %r, requested was %r' % (full_ref_path, existing_data, target_data)) # depends on [control=['if'], data=['existing_data', 'target_data']] # depends on [control=['if'], data=[]] # END no force handling ref = cls(repo, full_ref_path) ref.set_reference(target, logmsg) return ref
def lstrip(self, chars=None): """ Like str.lstrip, except it returns the Colr instance. """ return self.__class__( self._str_strip('lstrip', chars), no_closing=chars and (closing_code in chars), )
def function[lstrip, parameter[self, chars]]: constant[ Like str.lstrip, except it returns the Colr instance. ] return[call[name[self].__class__, parameter[call[name[self]._str_strip, parameter[constant[lstrip], name[chars]]]]]]
keyword[def] identifier[lstrip] ( identifier[self] , identifier[chars] = keyword[None] ): literal[string] keyword[return] identifier[self] . identifier[__class__] ( identifier[self] . identifier[_str_strip] ( literal[string] , identifier[chars] ), identifier[no_closing] = identifier[chars] keyword[and] ( identifier[closing_code] keyword[in] identifier[chars] ), )
def lstrip(self, chars=None): """ Like str.lstrip, except it returns the Colr instance. """ return self.__class__(self._str_strip('lstrip', chars), no_closing=chars and closing_code in chars)
def headerData(self, section, orientation, role): """Return the header data Will call :meth:`TreeItem.data` of the root :class:`TreeItem` with the given section (column) and role for horizontal orientations. Vertical orientations are numbered. :param section: the section in the header view :type section: int :param orientation: vertical or horizontal orientation :type orientation: :data:`QtCore.Qt.Vertical` | :data:`QtCore.Qt.Horizontal` :param role: the data role. :type role: :data:`QtCore.Qt.ItemDataRole` :returns: data for the header :raises: None """ if orientation == QtCore.Qt.Horizontal: d = self._root.data(section, role) if d is None and role == QtCore.Qt.DisplayRole: return str(section+1) return d if orientation == QtCore.Qt.Vertical and role == QtCore.Qt.DisplayRole: return str(section+1)
def function[headerData, parameter[self, section, orientation, role]]: constant[Return the header data Will call :meth:`TreeItem.data` of the root :class:`TreeItem` with the given section (column) and role for horizontal orientations. Vertical orientations are numbered. :param section: the section in the header view :type section: int :param orientation: vertical or horizontal orientation :type orientation: :data:`QtCore.Qt.Vertical` | :data:`QtCore.Qt.Horizontal` :param role: the data role. :type role: :data:`QtCore.Qt.ItemDataRole` :returns: data for the header :raises: None ] if compare[name[orientation] equal[==] name[QtCore].Qt.Horizontal] begin[:] variable[d] assign[=] call[name[self]._root.data, parameter[name[section], name[role]]] if <ast.BoolOp object at 0x7da1b1471690> begin[:] return[call[name[str], parameter[binary_operation[name[section] + constant[1]]]]] return[name[d]] if <ast.BoolOp object at 0x7da1b1471210> begin[:] return[call[name[str], parameter[binary_operation[name[section] + constant[1]]]]]
keyword[def] identifier[headerData] ( identifier[self] , identifier[section] , identifier[orientation] , identifier[role] ): literal[string] keyword[if] identifier[orientation] == identifier[QtCore] . identifier[Qt] . identifier[Horizontal] : identifier[d] = identifier[self] . identifier[_root] . identifier[data] ( identifier[section] , identifier[role] ) keyword[if] identifier[d] keyword[is] keyword[None] keyword[and] identifier[role] == identifier[QtCore] . identifier[Qt] . identifier[DisplayRole] : keyword[return] identifier[str] ( identifier[section] + literal[int] ) keyword[return] identifier[d] keyword[if] identifier[orientation] == identifier[QtCore] . identifier[Qt] . identifier[Vertical] keyword[and] identifier[role] == identifier[QtCore] . identifier[Qt] . identifier[DisplayRole] : keyword[return] identifier[str] ( identifier[section] + literal[int] )
def headerData(self, section, orientation, role): """Return the header data Will call :meth:`TreeItem.data` of the root :class:`TreeItem` with the given section (column) and role for horizontal orientations. Vertical orientations are numbered. :param section: the section in the header view :type section: int :param orientation: vertical or horizontal orientation :type orientation: :data:`QtCore.Qt.Vertical` | :data:`QtCore.Qt.Horizontal` :param role: the data role. :type role: :data:`QtCore.Qt.ItemDataRole` :returns: data for the header :raises: None """ if orientation == QtCore.Qt.Horizontal: d = self._root.data(section, role) if d is None and role == QtCore.Qt.DisplayRole: return str(section + 1) # depends on [control=['if'], data=[]] return d # depends on [control=['if'], data=[]] if orientation == QtCore.Qt.Vertical and role == QtCore.Qt.DisplayRole: return str(section + 1) # depends on [control=['if'], data=[]]
def _get_match_and_classification(self, urls): """Get classification for all matching URLs. :param urls: a sequence of URLs to test :return: a tuple containing matching URL and classification string pertaining to it """ for url_list, response in self._query(urls): classification_set = response.text.splitlines() for url, _class in zip(url_list, classification_set): if _class != 'ok': yield url, _class
def function[_get_match_and_classification, parameter[self, urls]]: constant[Get classification for all matching URLs. :param urls: a sequence of URLs to test :return: a tuple containing matching URL and classification string pertaining to it ] for taget[tuple[[<ast.Name object at 0x7da1b25978e0>, <ast.Name object at 0x7da1b25948e0>]]] in starred[call[name[self]._query, parameter[name[urls]]]] begin[:] variable[classification_set] assign[=] call[name[response].text.splitlines, parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b25d2e60>, <ast.Name object at 0x7da1b25d35e0>]]] in starred[call[name[zip], parameter[name[url_list], name[classification_set]]]] begin[:] if compare[name[_class] not_equal[!=] constant[ok]] begin[:] <ast.Yield object at 0x7da1b25d0970>
keyword[def] identifier[_get_match_and_classification] ( identifier[self] , identifier[urls] ): literal[string] keyword[for] identifier[url_list] , identifier[response] keyword[in] identifier[self] . identifier[_query] ( identifier[urls] ): identifier[classification_set] = identifier[response] . identifier[text] . identifier[splitlines] () keyword[for] identifier[url] , identifier[_class] keyword[in] identifier[zip] ( identifier[url_list] , identifier[classification_set] ): keyword[if] identifier[_class] != literal[string] : keyword[yield] identifier[url] , identifier[_class]
def _get_match_and_classification(self, urls): """Get classification for all matching URLs. :param urls: a sequence of URLs to test :return: a tuple containing matching URL and classification string pertaining to it """ for (url_list, response) in self._query(urls): classification_set = response.text.splitlines() for (url, _class) in zip(url_list, classification_set): if _class != 'ok': yield (url, _class) # depends on [control=['if'], data=['_class']] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
def mi_chain_rule(X, y): ''' Decompose the information between all X and y according to the chain rule and return all the terms in the chain rule. Inputs: ------- X: iterable of iterables. You should be able to compute [mi(x, y) for x in X] y: iterable of symbols output: ------- ndarray: terms of chaing rule Implemenation notes: I(X; y) = I(x0, x1, ..., xn; y) = I(x0; y) + I(x1;y | x0) + I(x2; y | x0, x1) + ... + I(xn; y | x0, x1, ..., xn-1) ''' # allocate ndarray output chain = np.zeros(len(X)) # first term in the expansion is not a conditional information, but the information between the first x and y chain[0] = mi(X[0], y) for i in range(1, len(X)): chain[i] = cond_mi(X[i], y, X[:i]) return chain
def function[mi_chain_rule, parameter[X, y]]: constant[ Decompose the information between all X and y according to the chain rule and return all the terms in the chain rule. Inputs: ------- X: iterable of iterables. You should be able to compute [mi(x, y) for x in X] y: iterable of symbols output: ------- ndarray: terms of chaing rule Implemenation notes: I(X; y) = I(x0, x1, ..., xn; y) = I(x0; y) + I(x1;y | x0) + I(x2; y | x0, x1) + ... + I(xn; y | x0, x1, ..., xn-1) ] variable[chain] assign[=] call[name[np].zeros, parameter[call[name[len], parameter[name[X]]]]] call[name[chain]][constant[0]] assign[=] call[name[mi], parameter[call[name[X]][constant[0]], name[y]]] for taget[name[i]] in starred[call[name[range], parameter[constant[1], call[name[len], parameter[name[X]]]]]] begin[:] call[name[chain]][name[i]] assign[=] call[name[cond_mi], parameter[call[name[X]][name[i]], name[y], call[name[X]][<ast.Slice object at 0x7da1b1992c80>]]] return[name[chain]]
keyword[def] identifier[mi_chain_rule] ( identifier[X] , identifier[y] ): literal[string] identifier[chain] = identifier[np] . identifier[zeros] ( identifier[len] ( identifier[X] )) identifier[chain] [ literal[int] ]= identifier[mi] ( identifier[X] [ literal[int] ], identifier[y] ) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[X] )): identifier[chain] [ identifier[i] ]= identifier[cond_mi] ( identifier[X] [ identifier[i] ], identifier[y] , identifier[X] [: identifier[i] ]) keyword[return] identifier[chain]
def mi_chain_rule(X, y): """ Decompose the information between all X and y according to the chain rule and return all the terms in the chain rule. Inputs: ------- X: iterable of iterables. You should be able to compute [mi(x, y) for x in X] y: iterable of symbols output: ------- ndarray: terms of chaing rule Implemenation notes: I(X; y) = I(x0, x1, ..., xn; y) = I(x0; y) + I(x1;y | x0) + I(x2; y | x0, x1) + ... + I(xn; y | x0, x1, ..., xn-1) """ # allocate ndarray output chain = np.zeros(len(X)) # first term in the expansion is not a conditional information, but the information between the first x and y chain[0] = mi(X[0], y) for i in range(1, len(X)): chain[i] = cond_mi(X[i], y, X[:i]) # depends on [control=['for'], data=['i']] return chain
def profiles(self): """ A list of all profiles on this web property. You may select a specific profile using its name, its id or an index. ```python property.profiles[0] property.profiles['9234823'] property.profiles['marketing profile'] ``` """ raw_profiles = self.account.service.management().profiles().list( accountId=self.account.id, webPropertyId=self.id).execute()['items'] profiles = [Profile(raw, self) for raw in raw_profiles] return addressable.List(profiles, indices=['id', 'name'], insensitive=True)
def function[profiles, parameter[self]]: constant[ A list of all profiles on this web property. You may select a specific profile using its name, its id or an index. ```python property.profiles[0] property.profiles['9234823'] property.profiles['marketing profile'] ``` ] variable[raw_profiles] assign[=] call[call[call[call[call[name[self].account.service.management, parameter[]].profiles, parameter[]].list, parameter[]].execute, parameter[]]][constant[items]] variable[profiles] assign[=] <ast.ListComp object at 0x7da1b0f398d0> return[call[name[addressable].List, parameter[name[profiles]]]]
keyword[def] identifier[profiles] ( identifier[self] ): literal[string] identifier[raw_profiles] = identifier[self] . identifier[account] . identifier[service] . identifier[management] (). identifier[profiles] (). identifier[list] ( identifier[accountId] = identifier[self] . identifier[account] . identifier[id] , identifier[webPropertyId] = identifier[self] . identifier[id] ). identifier[execute] ()[ literal[string] ] identifier[profiles] =[ identifier[Profile] ( identifier[raw] , identifier[self] ) keyword[for] identifier[raw] keyword[in] identifier[raw_profiles] ] keyword[return] identifier[addressable] . identifier[List] ( identifier[profiles] , identifier[indices] =[ literal[string] , literal[string] ], identifier[insensitive] = keyword[True] )
def profiles(self): """ A list of all profiles on this web property. You may select a specific profile using its name, its id or an index. ```python property.profiles[0] property.profiles['9234823'] property.profiles['marketing profile'] ``` """ raw_profiles = self.account.service.management().profiles().list(accountId=self.account.id, webPropertyId=self.id).execute()['items'] profiles = [Profile(raw, self) for raw in raw_profiles] return addressable.List(profiles, indices=['id', 'name'], insensitive=True)
def tauchen(N, mu, rho, sigma, m=2): """ Approximate an AR1 process by a finite markov chain using Tauchen's method. :param N: scalar, number of nodes for Z :param mu: scalar, unconditional mean of process :param rho: scalar :param sigma: scalar, std. dev. of epsilons :param m: max +- std. devs. :returns: Z, N*1 vector, nodes for Z. Zprob, N*N matrix, transition probabilities SJB: This is a port of Martin Floden's 1996 Matlab code to implement Tauchen 1986 Economic Letters method The following comments are Floden's. Finds a Markov chain whose sample paths approximate those of the AR(1) process z(t+1) = (1-rho)*mu + rho * z(t) + eps(t+1) where eps are normal with stddev sigma. """ Z = np.zeros((N,1)) Zprob = np.zeros((N,N)) a = (1-rho)*mu Z[-1] = m * math.sqrt(sigma**2 / (1 - (rho**2))) Z[0] = -1 * Z[-1] zstep = (Z[-1] - Z[0]) / (N - 1) for i in range(1,N): Z[i] = Z[0] + zstep * (i) Z = Z + a / (1-rho) for j in range(0,N): for k in range(0,N): if k == 0: Zprob[j,k] = sp.stats.norm.cdf((Z[0] - a - rho * Z[j] + zstep / 2) / sigma) elif k == (N-1): Zprob[j,k] = 1 - sp.stats.norm.cdf((Z[-1] - a - rho * Z[j] - zstep / 2) / sigma) else: up = sp.stats.norm.cdf((Z[k] - a - rho * Z[j] + zstep / 2) / sigma) down = sp.stats.norm.cdf( (Z[k] - a - rho * Z[j] - zstep / 2) / sigma) Zprob[j,k] = up - down return( (Z, Zprob) )
def function[tauchen, parameter[N, mu, rho, sigma, m]]: constant[ Approximate an AR1 process by a finite markov chain using Tauchen's method. :param N: scalar, number of nodes for Z :param mu: scalar, unconditional mean of process :param rho: scalar :param sigma: scalar, std. dev. of epsilons :param m: max +- std. devs. :returns: Z, N*1 vector, nodes for Z. Zprob, N*N matrix, transition probabilities SJB: This is a port of Martin Floden's 1996 Matlab code to implement Tauchen 1986 Economic Letters method The following comments are Floden's. Finds a Markov chain whose sample paths approximate those of the AR(1) process z(t+1) = (1-rho)*mu + rho * z(t) + eps(t+1) where eps are normal with stddev sigma. ] variable[Z] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da207f02b90>, <ast.Constant object at 0x7da207f01060>]]]] variable[Zprob] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da207f03d30>, <ast.Name object at 0x7da207f03970>]]]] variable[a] assign[=] binary_operation[binary_operation[constant[1] - name[rho]] * name[mu]] call[name[Z]][<ast.UnaryOp object at 0x7da207f03640>] assign[=] binary_operation[name[m] * call[name[math].sqrt, parameter[binary_operation[binary_operation[name[sigma] ** constant[2]] / binary_operation[constant[1] - binary_operation[name[rho] ** constant[2]]]]]]] call[name[Z]][constant[0]] assign[=] binary_operation[<ast.UnaryOp object at 0x7da207f03550> * call[name[Z]][<ast.UnaryOp object at 0x7da207f01f30>]] variable[zstep] assign[=] binary_operation[binary_operation[call[name[Z]][<ast.UnaryOp object at 0x7da207f02230>] - call[name[Z]][constant[0]]] / binary_operation[name[N] - constant[1]]] for taget[name[i]] in starred[call[name[range], parameter[constant[1], name[N]]]] begin[:] call[name[Z]][name[i]] assign[=] binary_operation[call[name[Z]][constant[0]] + binary_operation[name[zstep] * name[i]]] variable[Z] assign[=] binary_operation[name[Z] + binary_operation[name[a] / binary_operation[constant[1] - name[rho]]]] for taget[name[j]] in starred[call[name[range], parameter[constant[0], name[N]]]] begin[:] for taget[name[k]] in starred[call[name[range], parameter[constant[0], name[N]]]] begin[:] if compare[name[k] equal[==] constant[0]] begin[:] call[name[Zprob]][tuple[[<ast.Name object at 0x7da207f01ff0>, <ast.Name object at 0x7da207f024d0>]]] assign[=] call[name[sp].stats.norm.cdf, parameter[binary_operation[binary_operation[binary_operation[binary_operation[call[name[Z]][constant[0]] - name[a]] - binary_operation[name[rho] * call[name[Z]][name[j]]]] + binary_operation[name[zstep] / constant[2]]] / name[sigma]]]] return[tuple[[<ast.Name object at 0x7da18fe91270>, <ast.Name object at 0x7da18fe91960>]]]
keyword[def] identifier[tauchen] ( identifier[N] , identifier[mu] , identifier[rho] , identifier[sigma] , identifier[m] = literal[int] ): literal[string] identifier[Z] = identifier[np] . identifier[zeros] (( identifier[N] , literal[int] )) identifier[Zprob] = identifier[np] . identifier[zeros] (( identifier[N] , identifier[N] )) identifier[a] =( literal[int] - identifier[rho] )* identifier[mu] identifier[Z] [- literal[int] ]= identifier[m] * identifier[math] . identifier[sqrt] ( identifier[sigma] ** literal[int] /( literal[int] -( identifier[rho] ** literal[int] ))) identifier[Z] [ literal[int] ]=- literal[int] * identifier[Z] [- literal[int] ] identifier[zstep] =( identifier[Z] [- literal[int] ]- identifier[Z] [ literal[int] ])/( identifier[N] - literal[int] ) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[N] ): identifier[Z] [ identifier[i] ]= identifier[Z] [ literal[int] ]+ identifier[zstep] *( identifier[i] ) identifier[Z] = identifier[Z] + identifier[a] /( literal[int] - identifier[rho] ) keyword[for] identifier[j] keyword[in] identifier[range] ( literal[int] , identifier[N] ): keyword[for] identifier[k] keyword[in] identifier[range] ( literal[int] , identifier[N] ): keyword[if] identifier[k] == literal[int] : identifier[Zprob] [ identifier[j] , identifier[k] ]= identifier[sp] . identifier[stats] . identifier[norm] . identifier[cdf] (( identifier[Z] [ literal[int] ]- identifier[a] - identifier[rho] * identifier[Z] [ identifier[j] ]+ identifier[zstep] / literal[int] )/ identifier[sigma] ) keyword[elif] identifier[k] ==( identifier[N] - literal[int] ): identifier[Zprob] [ identifier[j] , identifier[k] ]= literal[int] - identifier[sp] . identifier[stats] . identifier[norm] . identifier[cdf] (( identifier[Z] [- literal[int] ]- identifier[a] - identifier[rho] * identifier[Z] [ identifier[j] ]- identifier[zstep] / literal[int] )/ identifier[sigma] ) keyword[else] : identifier[up] = identifier[sp] . identifier[stats] . identifier[norm] . identifier[cdf] (( identifier[Z] [ identifier[k] ]- identifier[a] - identifier[rho] * identifier[Z] [ identifier[j] ]+ identifier[zstep] / literal[int] )/ identifier[sigma] ) identifier[down] = identifier[sp] . identifier[stats] . identifier[norm] . identifier[cdf] (( identifier[Z] [ identifier[k] ]- identifier[a] - identifier[rho] * identifier[Z] [ identifier[j] ]- identifier[zstep] / literal[int] )/ identifier[sigma] ) identifier[Zprob] [ identifier[j] , identifier[k] ]= identifier[up] - identifier[down] keyword[return] (( identifier[Z] , identifier[Zprob] ))
def tauchen(N, mu, rho, sigma, m=2): """ Approximate an AR1 process by a finite markov chain using Tauchen's method. :param N: scalar, number of nodes for Z :param mu: scalar, unconditional mean of process :param rho: scalar :param sigma: scalar, std. dev. of epsilons :param m: max +- std. devs. :returns: Z, N*1 vector, nodes for Z. Zprob, N*N matrix, transition probabilities SJB: This is a port of Martin Floden's 1996 Matlab code to implement Tauchen 1986 Economic Letters method The following comments are Floden's. Finds a Markov chain whose sample paths approximate those of the AR(1) process z(t+1) = (1-rho)*mu + rho * z(t) + eps(t+1) where eps are normal with stddev sigma. """ Z = np.zeros((N, 1)) Zprob = np.zeros((N, N)) a = (1 - rho) * mu Z[-1] = m * math.sqrt(sigma ** 2 / (1 - rho ** 2)) Z[0] = -1 * Z[-1] zstep = (Z[-1] - Z[0]) / (N - 1) for i in range(1, N): Z[i] = Z[0] + zstep * i # depends on [control=['for'], data=['i']] Z = Z + a / (1 - rho) for j in range(0, N): for k in range(0, N): if k == 0: Zprob[j, k] = sp.stats.norm.cdf((Z[0] - a - rho * Z[j] + zstep / 2) / sigma) # depends on [control=['if'], data=['k']] elif k == N - 1: Zprob[j, k] = 1 - sp.stats.norm.cdf((Z[-1] - a - rho * Z[j] - zstep / 2) / sigma) # depends on [control=['if'], data=['k']] else: up = sp.stats.norm.cdf((Z[k] - a - rho * Z[j] + zstep / 2) / sigma) down = sp.stats.norm.cdf((Z[k] - a - rho * Z[j] - zstep / 2) / sigma) Zprob[j, k] = up - down # depends on [control=['for'], data=['k']] # depends on [control=['for'], data=['j']] return (Z, Zprob)
def strip_extras_markers_from_requirement(req): # type: (TRequirement) -> TRequirement """ Given a :class:`~packaging.requirements.Requirement` instance with markers defining *extra == 'name'*, strip out the extras from the markers and return the cleaned requirement :param PackagingRequirement req: A packaging requirement to clean :return: A cleaned requirement :rtype: PackagingRequirement """ if req is None: raise TypeError("Must pass in a valid requirement, received {0!r}".format(req)) if getattr(req, "marker", None) is not None: marker = req.marker # type: TMarker marker._markers = _strip_extras_markers(marker._markers) if not marker._markers: req.marker = None else: req.marker = marker return req
def function[strip_extras_markers_from_requirement, parameter[req]]: constant[ Given a :class:`~packaging.requirements.Requirement` instance with markers defining *extra == 'name'*, strip out the extras from the markers and return the cleaned requirement :param PackagingRequirement req: A packaging requirement to clean :return: A cleaned requirement :rtype: PackagingRequirement ] if compare[name[req] is constant[None]] begin[:] <ast.Raise object at 0x7da18dc04760> if compare[call[name[getattr], parameter[name[req], constant[marker], constant[None]]] is_not constant[None]] begin[:] variable[marker] assign[=] name[req].marker name[marker]._markers assign[=] call[name[_strip_extras_markers], parameter[name[marker]._markers]] if <ast.UnaryOp object at 0x7da18dc074f0> begin[:] name[req].marker assign[=] constant[None] return[name[req]]
keyword[def] identifier[strip_extras_markers_from_requirement] ( identifier[req] ): literal[string] keyword[if] identifier[req] keyword[is] keyword[None] : keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[req] )) keyword[if] identifier[getattr] ( identifier[req] , literal[string] , keyword[None] ) keyword[is] keyword[not] keyword[None] : identifier[marker] = identifier[req] . identifier[marker] identifier[marker] . identifier[_markers] = identifier[_strip_extras_markers] ( identifier[marker] . identifier[_markers] ) keyword[if] keyword[not] identifier[marker] . identifier[_markers] : identifier[req] . identifier[marker] = keyword[None] keyword[else] : identifier[req] . identifier[marker] = identifier[marker] keyword[return] identifier[req]
def strip_extras_markers_from_requirement(req): # type: (TRequirement) -> TRequirement "\n Given a :class:`~packaging.requirements.Requirement` instance with markers defining\n *extra == 'name'*, strip out the extras from the markers and return the cleaned\n requirement\n\n :param PackagingRequirement req: A packaging requirement to clean\n :return: A cleaned requirement\n :rtype: PackagingRequirement\n " if req is None: raise TypeError('Must pass in a valid requirement, received {0!r}'.format(req)) # depends on [control=['if'], data=['req']] if getattr(req, 'marker', None) is not None: marker = req.marker # type: TMarker marker._markers = _strip_extras_markers(marker._markers) if not marker._markers: req.marker = None # depends on [control=['if'], data=[]] else: req.marker = marker # depends on [control=['if'], data=[]] return req
def QA_SU_save_deal(dealist, client=DATABASE): """存储order_handler的deal_status Arguments: dealist {[dataframe]} -- [description] Keyword Arguments: client {[type]} -- [description] (default: {DATABASE}) """ if isinstance(dealist, pd.DataFrame): collection = client.deal collection.create_index( [('account_cookie', ASCENDING), ('trade_id', ASCENDING)], unique=True ) try: dealist = QA_util_to_json_from_pandas(dealist.reset_index()) collection.insert_many(dealist, ordered=False) except Exception as e: pass
def function[QA_SU_save_deal, parameter[dealist, client]]: constant[存储order_handler的deal_status Arguments: dealist {[dataframe]} -- [description] Keyword Arguments: client {[type]} -- [description] (default: {DATABASE}) ] if call[name[isinstance], parameter[name[dealist], name[pd].DataFrame]] begin[:] variable[collection] assign[=] name[client].deal call[name[collection].create_index, parameter[list[[<ast.Tuple object at 0x7da1b1eb9030>, <ast.Tuple object at 0x7da1b1eb9e10>]]]] <ast.Try object at 0x7da1b1eba050>
keyword[def] identifier[QA_SU_save_deal] ( identifier[dealist] , identifier[client] = identifier[DATABASE] ): literal[string] keyword[if] identifier[isinstance] ( identifier[dealist] , identifier[pd] . identifier[DataFrame] ): identifier[collection] = identifier[client] . identifier[deal] identifier[collection] . identifier[create_index] ( [( literal[string] , identifier[ASCENDING] ), ( literal[string] , identifier[ASCENDING] )], identifier[unique] = keyword[True] ) keyword[try] : identifier[dealist] = identifier[QA_util_to_json_from_pandas] ( identifier[dealist] . identifier[reset_index] ()) identifier[collection] . identifier[insert_many] ( identifier[dealist] , identifier[ordered] = keyword[False] ) keyword[except] identifier[Exception] keyword[as] identifier[e] : keyword[pass]
def QA_SU_save_deal(dealist, client=DATABASE): """存储order_handler的deal_status Arguments: dealist {[dataframe]} -- [description] Keyword Arguments: client {[type]} -- [description] (default: {DATABASE}) """ if isinstance(dealist, pd.DataFrame): collection = client.deal collection.create_index([('account_cookie', ASCENDING), ('trade_id', ASCENDING)], unique=True) try: dealist = QA_util_to_json_from_pandas(dealist.reset_index()) collection.insert_many(dealist, ordered=False) # depends on [control=['try'], data=[]] except Exception as e: pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
def determine_api_port(public_port, singlenode_mode=False): ''' Determine correct API server listening port based on existence of HTTPS reverse proxy and/or haproxy. public_port: int: standard public port for given service singlenode_mode: boolean: Shuffle ports when only a single unit is present returns: int: the correct listening port for the API service ''' i = 0 if singlenode_mode: i += 1 elif len(peer_units()) > 0 or is_clustered(): i += 1 if https(): i += 1 return public_port - (i * 10)
def function[determine_api_port, parameter[public_port, singlenode_mode]]: constant[ Determine correct API server listening port based on existence of HTTPS reverse proxy and/or haproxy. public_port: int: standard public port for given service singlenode_mode: boolean: Shuffle ports when only a single unit is present returns: int: the correct listening port for the API service ] variable[i] assign[=] constant[0] if name[singlenode_mode] begin[:] <ast.AugAssign object at 0x7da18dc99c60> if call[name[https], parameter[]] begin[:] <ast.AugAssign object at 0x7da18dc9b430> return[binary_operation[name[public_port] - binary_operation[name[i] * constant[10]]]]
keyword[def] identifier[determine_api_port] ( identifier[public_port] , identifier[singlenode_mode] = keyword[False] ): literal[string] identifier[i] = literal[int] keyword[if] identifier[singlenode_mode] : identifier[i] += literal[int] keyword[elif] identifier[len] ( identifier[peer_units] ())> literal[int] keyword[or] identifier[is_clustered] (): identifier[i] += literal[int] keyword[if] identifier[https] (): identifier[i] += literal[int] keyword[return] identifier[public_port] -( identifier[i] * literal[int] )
def determine_api_port(public_port, singlenode_mode=False): """ Determine correct API server listening port based on existence of HTTPS reverse proxy and/or haproxy. public_port: int: standard public port for given service singlenode_mode: boolean: Shuffle ports when only a single unit is present returns: int: the correct listening port for the API service """ i = 0 if singlenode_mode: i += 1 # depends on [control=['if'], data=[]] elif len(peer_units()) > 0 or is_clustered(): i += 1 # depends on [control=['if'], data=[]] if https(): i += 1 # depends on [control=['if'], data=[]] return public_port - i * 10
def construct_request_uri(local_dir, base_path, **kwargs): """ Constructs a special redirect_uri to be used when communicating with one OP. Each OP should get their own redirect_uris. :param local_dir: Local directory in which to place the file :param base_path: Base URL to start with :param kwargs: :return: 2-tuple with (filename, url) """ _filedir = local_dir if not os.path.isdir(_filedir): os.makedirs(_filedir) _webpath = base_path _name = rndstr(10) + ".jwt" filename = os.path.join(_filedir, _name) while os.path.exists(filename): _name = rndstr(10) filename = os.path.join(_filedir, _name) _webname = "%s%s" % (_webpath, _name) return filename, _webname
def function[construct_request_uri, parameter[local_dir, base_path]]: constant[ Constructs a special redirect_uri to be used when communicating with one OP. Each OP should get their own redirect_uris. :param local_dir: Local directory in which to place the file :param base_path: Base URL to start with :param kwargs: :return: 2-tuple with (filename, url) ] variable[_filedir] assign[=] name[local_dir] if <ast.UnaryOp object at 0x7da1b1b9c7c0> begin[:] call[name[os].makedirs, parameter[name[_filedir]]] variable[_webpath] assign[=] name[base_path] variable[_name] assign[=] binary_operation[call[name[rndstr], parameter[constant[10]]] + constant[.jwt]] variable[filename] assign[=] call[name[os].path.join, parameter[name[_filedir], name[_name]]] while call[name[os].path.exists, parameter[name[filename]]] begin[:] variable[_name] assign[=] call[name[rndstr], parameter[constant[10]]] variable[filename] assign[=] call[name[os].path.join, parameter[name[_filedir], name[_name]]] variable[_webname] assign[=] binary_operation[constant[%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1bdeef0>, <ast.Name object at 0x7da1b1bdef20>]]] return[tuple[[<ast.Name object at 0x7da1b1bdc220>, <ast.Name object at 0x7da1b1bdc1f0>]]]
keyword[def] identifier[construct_request_uri] ( identifier[local_dir] , identifier[base_path] ,** identifier[kwargs] ): literal[string] identifier[_filedir] = identifier[local_dir] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[_filedir] ): identifier[os] . identifier[makedirs] ( identifier[_filedir] ) identifier[_webpath] = identifier[base_path] identifier[_name] = identifier[rndstr] ( literal[int] )+ literal[string] identifier[filename] = identifier[os] . identifier[path] . identifier[join] ( identifier[_filedir] , identifier[_name] ) keyword[while] identifier[os] . identifier[path] . identifier[exists] ( identifier[filename] ): identifier[_name] = identifier[rndstr] ( literal[int] ) identifier[filename] = identifier[os] . identifier[path] . identifier[join] ( identifier[_filedir] , identifier[_name] ) identifier[_webname] = literal[string] %( identifier[_webpath] , identifier[_name] ) keyword[return] identifier[filename] , identifier[_webname]
def construct_request_uri(local_dir, base_path, **kwargs): """ Constructs a special redirect_uri to be used when communicating with one OP. Each OP should get their own redirect_uris. :param local_dir: Local directory in which to place the file :param base_path: Base URL to start with :param kwargs: :return: 2-tuple with (filename, url) """ _filedir = local_dir if not os.path.isdir(_filedir): os.makedirs(_filedir) # depends on [control=['if'], data=[]] _webpath = base_path _name = rndstr(10) + '.jwt' filename = os.path.join(_filedir, _name) while os.path.exists(filename): _name = rndstr(10) filename = os.path.join(_filedir, _name) # depends on [control=['while'], data=[]] _webname = '%s%s' % (_webpath, _name) return (filename, _webname)
def cli(env, identifier): """Cancel global IP.""" mgr = SoftLayer.NetworkManager(env.client) global_ip_id = helpers.resolve_id(mgr.resolve_global_ip_ids, identifier, name='global ip') if not (env.skip_confirmations or formatting.no_going_back(global_ip_id)): raise exceptions.CLIAbort('Aborted') mgr.cancel_global_ip(global_ip_id)
def function[cli, parameter[env, identifier]]: constant[Cancel global IP.] variable[mgr] assign[=] call[name[SoftLayer].NetworkManager, parameter[name[env].client]] variable[global_ip_id] assign[=] call[name[helpers].resolve_id, parameter[name[mgr].resolve_global_ip_ids, name[identifier]]] if <ast.UnaryOp object at 0x7da20e9b3820> begin[:] <ast.Raise object at 0x7da20e9b3fd0> call[name[mgr].cancel_global_ip, parameter[name[global_ip_id]]]
keyword[def] identifier[cli] ( identifier[env] , identifier[identifier] ): literal[string] identifier[mgr] = identifier[SoftLayer] . identifier[NetworkManager] ( identifier[env] . identifier[client] ) identifier[global_ip_id] = identifier[helpers] . identifier[resolve_id] ( identifier[mgr] . identifier[resolve_global_ip_ids] , identifier[identifier] , identifier[name] = literal[string] ) keyword[if] keyword[not] ( identifier[env] . identifier[skip_confirmations] keyword[or] identifier[formatting] . identifier[no_going_back] ( identifier[global_ip_id] )): keyword[raise] identifier[exceptions] . identifier[CLIAbort] ( literal[string] ) identifier[mgr] . identifier[cancel_global_ip] ( identifier[global_ip_id] )
def cli(env, identifier): """Cancel global IP.""" mgr = SoftLayer.NetworkManager(env.client) global_ip_id = helpers.resolve_id(mgr.resolve_global_ip_ids, identifier, name='global ip') if not (env.skip_confirmations or formatting.no_going_back(global_ip_id)): raise exceptions.CLIAbort('Aborted') # depends on [control=['if'], data=[]] mgr.cancel_global_ip(global_ip_id)
def all_sample_md5s(self, type_tag=None): """Return a list of all md5 matching the type_tag ('exe','pdf', etc). Args: type_tag: the type of sample. Returns: a list of matching samples. """ if type_tag: cursor = self.database[self.sample_collection].find({'type_tag': type_tag}, {'md5': 1, '_id': 0}) else: cursor = self.database[self.sample_collection].find({}, {'md5': 1, '_id': 0}) return [match.values()[0] for match in cursor]
def function[all_sample_md5s, parameter[self, type_tag]]: constant[Return a list of all md5 matching the type_tag ('exe','pdf', etc). Args: type_tag: the type of sample. Returns: a list of matching samples. ] if name[type_tag] begin[:] variable[cursor] assign[=] call[call[name[self].database][name[self].sample_collection].find, parameter[dictionary[[<ast.Constant object at 0x7da20c6c68f0>], [<ast.Name object at 0x7da20c6c7670>]], dictionary[[<ast.Constant object at 0x7da20c6c64d0>, <ast.Constant object at 0x7da20c6c6680>], [<ast.Constant object at 0x7da20c6c6290>, <ast.Constant object at 0x7da20c6c4b50>]]]] return[<ast.ListComp object at 0x7da20c6c7700>]
keyword[def] identifier[all_sample_md5s] ( identifier[self] , identifier[type_tag] = keyword[None] ): literal[string] keyword[if] identifier[type_tag] : identifier[cursor] = identifier[self] . identifier[database] [ identifier[self] . identifier[sample_collection] ]. identifier[find] ({ literal[string] : identifier[type_tag] },{ literal[string] : literal[int] , literal[string] : literal[int] }) keyword[else] : identifier[cursor] = identifier[self] . identifier[database] [ identifier[self] . identifier[sample_collection] ]. identifier[find] ({},{ literal[string] : literal[int] , literal[string] : literal[int] }) keyword[return] [ identifier[match] . identifier[values] ()[ literal[int] ] keyword[for] identifier[match] keyword[in] identifier[cursor] ]
def all_sample_md5s(self, type_tag=None): """Return a list of all md5 matching the type_tag ('exe','pdf', etc). Args: type_tag: the type of sample. Returns: a list of matching samples. """ if type_tag: cursor = self.database[self.sample_collection].find({'type_tag': type_tag}, {'md5': 1, '_id': 0}) # depends on [control=['if'], data=[]] else: cursor = self.database[self.sample_collection].find({}, {'md5': 1, '_id': 0}) return [match.values()[0] for match in cursor]