code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def args_to_props(target: Target, builder: Builder, args: list, kwargs: dict): """Convert build file `args` and `kwargs` to `target` props. Use builder signature to validate builder usage in build-file, raising appropriate exceptions on signature-mismatches. Use builder signature default values to assign props values to args that were not passed in the build-file call. This function handles only the arg/kwargs-to-prop assignment, including default values when necessary. When it returns, if no exception was raised, it is guaranteed that `target.props` contains all args defined in the builder registered signature, with values taken either from the build-file call, or from default values provided in the signature. Specifically, this function DOES NOT do anything about the arg types defined in the builder signature. :raise TypeError: On signature-call mismatch. """ if len(args) > len(builder.sig): # too many positional arguments supplied - say how many we can take raise TypeError('{}() takes {}, but {} were given' .format(target.builder_name, format_num_positional_arguments(builder), len(args))) # read given args into the matching props according to the signature for arg_name, value in zip(builder.sig.keys(), args): target.props[arg_name] = value # read given kwargs into the named props, asserting matching sig arg names for arg_name, value in kwargs.items(): if arg_name not in builder.sig: raise TypeError("{}() got an unexpected keyword argument '{}'" .format(target.builder_name, arg_name)) if arg_name in target.props: raise TypeError("{}() got multiple values for argument '{}'" .format(target.builder_name, arg_name)) target.props[arg_name] = value # go over signature args, assigning default values to anything that wasn't # assigned from args / kwargs, making sure no positional args are missing missing_args = [] for arg_name, sig_spec in builder.sig.items(): if arg_name not in target.props: if sig_spec.default == Empty: missing_args.append(arg_name) else: target.props[arg_name] = sig_spec.default if missing_args: # not enough positional arguments supplied - say which # TODO(itamar): match Python's error more closely (last "and "): # foo() missing 3 required positional arguments: 'a', 'b', and 'c' # TODO(itamar): use inflect raise TypeError('{}() missing {} required positional argument{}: {}' .format(target.builder_name, len(missing_args), 's' if len(missing_args) > 1 else '', ', '.join("'{}'".format(arg) for arg in missing_args))) logger.debug('Got props for target: {}', target)
def function[args_to_props, parameter[target, builder, args, kwargs]]: constant[Convert build file `args` and `kwargs` to `target` props. Use builder signature to validate builder usage in build-file, raising appropriate exceptions on signature-mismatches. Use builder signature default values to assign props values to args that were not passed in the build-file call. This function handles only the arg/kwargs-to-prop assignment, including default values when necessary. When it returns, if no exception was raised, it is guaranteed that `target.props` contains all args defined in the builder registered signature, with values taken either from the build-file call, or from default values provided in the signature. Specifically, this function DOES NOT do anything about the arg types defined in the builder signature. :raise TypeError: On signature-call mismatch. ] if compare[call[name[len], parameter[name[args]]] greater[>] call[name[len], parameter[name[builder].sig]]] begin[:] <ast.Raise object at 0x7da1b1b6ad10> for taget[tuple[[<ast.Name object at 0x7da1b1b6ac50>, <ast.Name object at 0x7da1b1b6a1a0>]]] in starred[call[name[zip], parameter[call[name[builder].sig.keys, parameter[]], name[args]]]] begin[:] call[name[target].props][name[arg_name]] assign[=] name[value] for taget[tuple[[<ast.Name object at 0x7da1b1b6b9a0>, <ast.Name object at 0x7da1b1b69ed0>]]] in starred[call[name[kwargs].items, parameter[]]] begin[:] if compare[name[arg_name] <ast.NotIn object at 0x7da2590d7190> name[builder].sig] begin[:] <ast.Raise object at 0x7da1b1b68ca0> if compare[name[arg_name] in name[target].props] begin[:] <ast.Raise object at 0x7da1b1bfbbb0> call[name[target].props][name[arg_name]] assign[=] name[value] variable[missing_args] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b1bfbcd0>, <ast.Name object at 0x7da1b1bfb7c0>]]] in starred[call[name[builder].sig.items, parameter[]]] begin[:] if compare[name[arg_name] <ast.NotIn object at 0x7da2590d7190> name[target].props] begin[:] if compare[name[sig_spec].default equal[==] name[Empty]] begin[:] call[name[missing_args].append, parameter[name[arg_name]]] if name[missing_args] begin[:] <ast.Raise object at 0x7da20e957d60> call[name[logger].debug, parameter[constant[Got props for target: {}], name[target]]]
keyword[def] identifier[args_to_props] ( identifier[target] : identifier[Target] , identifier[builder] : identifier[Builder] , identifier[args] : identifier[list] , identifier[kwargs] : identifier[dict] ): literal[string] keyword[if] identifier[len] ( identifier[args] )> identifier[len] ( identifier[builder] . identifier[sig] ): keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[target] . identifier[builder_name] , identifier[format_num_positional_arguments] ( identifier[builder] ), identifier[len] ( identifier[args] ))) keyword[for] identifier[arg_name] , identifier[value] keyword[in] identifier[zip] ( identifier[builder] . identifier[sig] . identifier[keys] (), identifier[args] ): identifier[target] . identifier[props] [ identifier[arg_name] ]= identifier[value] keyword[for] identifier[arg_name] , identifier[value] keyword[in] identifier[kwargs] . identifier[items] (): keyword[if] identifier[arg_name] keyword[not] keyword[in] identifier[builder] . identifier[sig] : keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[target] . identifier[builder_name] , identifier[arg_name] )) keyword[if] identifier[arg_name] keyword[in] identifier[target] . identifier[props] : keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[target] . identifier[builder_name] , identifier[arg_name] )) identifier[target] . identifier[props] [ identifier[arg_name] ]= identifier[value] identifier[missing_args] =[] keyword[for] identifier[arg_name] , identifier[sig_spec] keyword[in] identifier[builder] . identifier[sig] . identifier[items] (): keyword[if] identifier[arg_name] keyword[not] keyword[in] identifier[target] . identifier[props] : keyword[if] identifier[sig_spec] . identifier[default] == identifier[Empty] : identifier[missing_args] . identifier[append] ( identifier[arg_name] ) keyword[else] : identifier[target] . identifier[props] [ identifier[arg_name] ]= identifier[sig_spec] . identifier[default] keyword[if] identifier[missing_args] : keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[target] . identifier[builder_name] , identifier[len] ( identifier[missing_args] ), literal[string] keyword[if] identifier[len] ( identifier[missing_args] )> literal[int] keyword[else] literal[string] , literal[string] . identifier[join] ( literal[string] . identifier[format] ( identifier[arg] ) keyword[for] identifier[arg] keyword[in] identifier[missing_args] ))) identifier[logger] . identifier[debug] ( literal[string] , identifier[target] )
def args_to_props(target: Target, builder: Builder, args: list, kwargs: dict): """Convert build file `args` and `kwargs` to `target` props. Use builder signature to validate builder usage in build-file, raising appropriate exceptions on signature-mismatches. Use builder signature default values to assign props values to args that were not passed in the build-file call. This function handles only the arg/kwargs-to-prop assignment, including default values when necessary. When it returns, if no exception was raised, it is guaranteed that `target.props` contains all args defined in the builder registered signature, with values taken either from the build-file call, or from default values provided in the signature. Specifically, this function DOES NOT do anything about the arg types defined in the builder signature. :raise TypeError: On signature-call mismatch. """ if len(args) > len(builder.sig): # too many positional arguments supplied - say how many we can take raise TypeError('{}() takes {}, but {} were given'.format(target.builder_name, format_num_positional_arguments(builder), len(args))) # depends on [control=['if'], data=[]] # read given args into the matching props according to the signature for (arg_name, value) in zip(builder.sig.keys(), args): target.props[arg_name] = value # depends on [control=['for'], data=[]] # read given kwargs into the named props, asserting matching sig arg names for (arg_name, value) in kwargs.items(): if arg_name not in builder.sig: raise TypeError("{}() got an unexpected keyword argument '{}'".format(target.builder_name, arg_name)) # depends on [control=['if'], data=['arg_name']] if arg_name in target.props: raise TypeError("{}() got multiple values for argument '{}'".format(target.builder_name, arg_name)) # depends on [control=['if'], data=['arg_name']] target.props[arg_name] = value # depends on [control=['for'], data=[]] # go over signature args, assigning default values to anything that wasn't # assigned from args / kwargs, making sure no positional args are missing missing_args = [] for (arg_name, sig_spec) in builder.sig.items(): if arg_name not in target.props: if sig_spec.default == Empty: missing_args.append(arg_name) # depends on [control=['if'], data=[]] else: target.props[arg_name] = sig_spec.default # depends on [control=['if'], data=['arg_name']] # depends on [control=['for'], data=[]] if missing_args: # not enough positional arguments supplied - say which # TODO(itamar): match Python's error more closely (last "and "): # foo() missing 3 required positional arguments: 'a', 'b', and 'c' # TODO(itamar): use inflect raise TypeError('{}() missing {} required positional argument{}: {}'.format(target.builder_name, len(missing_args), 's' if len(missing_args) > 1 else '', ', '.join(("'{}'".format(arg) for arg in missing_args)))) # depends on [control=['if'], data=[]] logger.debug('Got props for target: {}', target)
def scan_modules(self): """ Populates the snapshot with loaded modules. """ # The module filenames may be spoofed by malware, # since this information resides in usermode space. # See: http://www.ragestorm.net/blogs/?p=163 # Ignore special process IDs. # PID 0: System Idle Process. Also has a special meaning to the # toolhelp APIs (current process). # PID 4: System Integrity Group. See this forum post for more info: # http://tinyurl.com/ycza8jo # (points to social.technet.microsoft.com) # Only on XP and above # PID 8: System (?) only in Windows 2000 and below AFAIK. # It's probably the same as PID 4 in XP and above. dwProcessId = self.get_pid() if dwProcessId in (0, 4, 8): return # It would seem easier to clear the snapshot first. # But then all open handles would be closed. found_bases = set() with win32.CreateToolhelp32Snapshot(win32.TH32CS_SNAPMODULE, dwProcessId) as hSnapshot: me = win32.Module32First(hSnapshot) while me is not None: lpBaseAddress = me.modBaseAddr fileName = me.szExePath # full pathname if not fileName: fileName = me.szModule # filename only if not fileName: fileName = None else: fileName = PathOperations.native_to_win32_pathname(fileName) found_bases.add(lpBaseAddress) ## if not self.has_module(lpBaseAddress): # XXX triggers a scan if lpBaseAddress not in self.__moduleDict: aModule = Module(lpBaseAddress, fileName = fileName, SizeOfImage = me.modBaseSize, process = self) self._add_module(aModule) else: aModule = self.get_module(lpBaseAddress) if not aModule.fileName: aModule.fileName = fileName if not aModule.SizeOfImage: aModule.SizeOfImage = me.modBaseSize if not aModule.process: aModule.process = self me = win32.Module32Next(hSnapshot) ## for base in self.get_module_bases(): # XXX triggers a scan for base in compat.keys(self.__moduleDict): if base not in found_bases: self._del_module(base)
def function[scan_modules, parameter[self]]: constant[ Populates the snapshot with loaded modules. ] variable[dwProcessId] assign[=] call[name[self].get_pid, parameter[]] if compare[name[dwProcessId] in tuple[[<ast.Constant object at 0x7da20e955e40>, <ast.Constant object at 0x7da20e956a70>, <ast.Constant object at 0x7da20e954ee0>]]] begin[:] return[None] variable[found_bases] assign[=] call[name[set], parameter[]] with call[name[win32].CreateToolhelp32Snapshot, parameter[name[win32].TH32CS_SNAPMODULE, name[dwProcessId]]] begin[:] variable[me] assign[=] call[name[win32].Module32First, parameter[name[hSnapshot]]] while compare[name[me] is_not constant[None]] begin[:] variable[lpBaseAddress] assign[=] name[me].modBaseAddr variable[fileName] assign[=] name[me].szExePath if <ast.UnaryOp object at 0x7da20e9546a0> begin[:] variable[fileName] assign[=] name[me].szModule if <ast.UnaryOp object at 0x7da20e955e10> begin[:] variable[fileName] assign[=] constant[None] call[name[found_bases].add, parameter[name[lpBaseAddress]]] if compare[name[lpBaseAddress] <ast.NotIn object at 0x7da2590d7190> name[self].__moduleDict] begin[:] variable[aModule] assign[=] call[name[Module], parameter[name[lpBaseAddress]]] call[name[self]._add_module, parameter[name[aModule]]] variable[me] assign[=] call[name[win32].Module32Next, parameter[name[hSnapshot]]] for taget[name[base]] in starred[call[name[compat].keys, parameter[name[self].__moduleDict]]] begin[:] if compare[name[base] <ast.NotIn object at 0x7da2590d7190> name[found_bases]] begin[:] call[name[self]._del_module, parameter[name[base]]]
keyword[def] identifier[scan_modules] ( identifier[self] ): literal[string] identifier[dwProcessId] = identifier[self] . identifier[get_pid] () keyword[if] identifier[dwProcessId] keyword[in] ( literal[int] , literal[int] , literal[int] ): keyword[return] identifier[found_bases] = identifier[set] () keyword[with] identifier[win32] . identifier[CreateToolhelp32Snapshot] ( identifier[win32] . identifier[TH32CS_SNAPMODULE] , identifier[dwProcessId] ) keyword[as] identifier[hSnapshot] : identifier[me] = identifier[win32] . identifier[Module32First] ( identifier[hSnapshot] ) keyword[while] identifier[me] keyword[is] keyword[not] keyword[None] : identifier[lpBaseAddress] = identifier[me] . identifier[modBaseAddr] identifier[fileName] = identifier[me] . identifier[szExePath] keyword[if] keyword[not] identifier[fileName] : identifier[fileName] = identifier[me] . identifier[szModule] keyword[if] keyword[not] identifier[fileName] : identifier[fileName] = keyword[None] keyword[else] : identifier[fileName] = identifier[PathOperations] . identifier[native_to_win32_pathname] ( identifier[fileName] ) identifier[found_bases] . identifier[add] ( identifier[lpBaseAddress] ) keyword[if] identifier[lpBaseAddress] keyword[not] keyword[in] identifier[self] . identifier[__moduleDict] : identifier[aModule] = identifier[Module] ( identifier[lpBaseAddress] , identifier[fileName] = identifier[fileName] , identifier[SizeOfImage] = identifier[me] . identifier[modBaseSize] , identifier[process] = identifier[self] ) identifier[self] . identifier[_add_module] ( identifier[aModule] ) keyword[else] : identifier[aModule] = identifier[self] . identifier[get_module] ( identifier[lpBaseAddress] ) keyword[if] keyword[not] identifier[aModule] . identifier[fileName] : identifier[aModule] . identifier[fileName] = identifier[fileName] keyword[if] keyword[not] identifier[aModule] . identifier[SizeOfImage] : identifier[aModule] . identifier[SizeOfImage] = identifier[me] . identifier[modBaseSize] keyword[if] keyword[not] identifier[aModule] . identifier[process] : identifier[aModule] . identifier[process] = identifier[self] identifier[me] = identifier[win32] . identifier[Module32Next] ( identifier[hSnapshot] ) keyword[for] identifier[base] keyword[in] identifier[compat] . identifier[keys] ( identifier[self] . identifier[__moduleDict] ): keyword[if] identifier[base] keyword[not] keyword[in] identifier[found_bases] : identifier[self] . identifier[_del_module] ( identifier[base] )
def scan_modules(self): """ Populates the snapshot with loaded modules. """ # The module filenames may be spoofed by malware, # since this information resides in usermode space. # See: http://www.ragestorm.net/blogs/?p=163 # Ignore special process IDs. # PID 0: System Idle Process. Also has a special meaning to the # toolhelp APIs (current process). # PID 4: System Integrity Group. See this forum post for more info: # http://tinyurl.com/ycza8jo # (points to social.technet.microsoft.com) # Only on XP and above # PID 8: System (?) only in Windows 2000 and below AFAIK. # It's probably the same as PID 4 in XP and above. dwProcessId = self.get_pid() if dwProcessId in (0, 4, 8): return # depends on [control=['if'], data=[]] # It would seem easier to clear the snapshot first. # But then all open handles would be closed. found_bases = set() with win32.CreateToolhelp32Snapshot(win32.TH32CS_SNAPMODULE, dwProcessId) as hSnapshot: me = win32.Module32First(hSnapshot) while me is not None: lpBaseAddress = me.modBaseAddr fileName = me.szExePath # full pathname if not fileName: fileName = me.szModule # filename only if not fileName: fileName = None # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: fileName = PathOperations.native_to_win32_pathname(fileName) found_bases.add(lpBaseAddress) ## if not self.has_module(lpBaseAddress): # XXX triggers a scan if lpBaseAddress not in self.__moduleDict: aModule = Module(lpBaseAddress, fileName=fileName, SizeOfImage=me.modBaseSize, process=self) self._add_module(aModule) # depends on [control=['if'], data=['lpBaseAddress']] else: aModule = self.get_module(lpBaseAddress) if not aModule.fileName: aModule.fileName = fileName # depends on [control=['if'], data=[]] if not aModule.SizeOfImage: aModule.SizeOfImage = me.modBaseSize # depends on [control=['if'], data=[]] if not aModule.process: aModule.process = self # depends on [control=['if'], data=[]] me = win32.Module32Next(hSnapshot) # depends on [control=['while'], data=['me']] # depends on [control=['with'], data=['hSnapshot']] ## for base in self.get_module_bases(): # XXX triggers a scan for base in compat.keys(self.__moduleDict): if base not in found_bases: self._del_module(base) # depends on [control=['if'], data=['base']] # depends on [control=['for'], data=['base']]
def get_target_chemblid(target_upid): """Get ChEMBL ID from UniProt upid Parameters ---------- target_upid : str Returns ------- target_chembl_id : str """ url = 'https://www.ebi.ac.uk/chembl/api/data/target.json' params = {'target_components__accession': target_upid} r = requests.get(url, params=params) r.raise_for_status() js = r.json() target_chemblid = js['targets'][0]['target_chembl_id'] return target_chemblid
def function[get_target_chemblid, parameter[target_upid]]: constant[Get ChEMBL ID from UniProt upid Parameters ---------- target_upid : str Returns ------- target_chembl_id : str ] variable[url] assign[=] constant[https://www.ebi.ac.uk/chembl/api/data/target.json] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da18bcc98a0>], [<ast.Name object at 0x7da18bccbfa0>]] variable[r] assign[=] call[name[requests].get, parameter[name[url]]] call[name[r].raise_for_status, parameter[]] variable[js] assign[=] call[name[r].json, parameter[]] variable[target_chemblid] assign[=] call[call[call[name[js]][constant[targets]]][constant[0]]][constant[target_chembl_id]] return[name[target_chemblid]]
keyword[def] identifier[get_target_chemblid] ( identifier[target_upid] ): literal[string] identifier[url] = literal[string] identifier[params] ={ literal[string] : identifier[target_upid] } identifier[r] = identifier[requests] . identifier[get] ( identifier[url] , identifier[params] = identifier[params] ) identifier[r] . identifier[raise_for_status] () identifier[js] = identifier[r] . identifier[json] () identifier[target_chemblid] = identifier[js] [ literal[string] ][ literal[int] ][ literal[string] ] keyword[return] identifier[target_chemblid]
def get_target_chemblid(target_upid): """Get ChEMBL ID from UniProt upid Parameters ---------- target_upid : str Returns ------- target_chembl_id : str """ url = 'https://www.ebi.ac.uk/chembl/api/data/target.json' params = {'target_components__accession': target_upid} r = requests.get(url, params=params) r.raise_for_status() js = r.json() target_chemblid = js['targets'][0]['target_chembl_id'] return target_chemblid
def relaxNGValidatePushCData(self, data, len): """check the CData parsed for validation in the current stack """ ret = libxml2mod.xmlRelaxNGValidatePushCData(self._o, data, len) return ret
def function[relaxNGValidatePushCData, parameter[self, data, len]]: constant[check the CData parsed for validation in the current stack ] variable[ret] assign[=] call[name[libxml2mod].xmlRelaxNGValidatePushCData, parameter[name[self]._o, name[data], name[len]]] return[name[ret]]
keyword[def] identifier[relaxNGValidatePushCData] ( identifier[self] , identifier[data] , identifier[len] ): literal[string] identifier[ret] = identifier[libxml2mod] . identifier[xmlRelaxNGValidatePushCData] ( identifier[self] . identifier[_o] , identifier[data] , identifier[len] ) keyword[return] identifier[ret]
def relaxNGValidatePushCData(self, data, len): """check the CData parsed for validation in the current stack """ ret = libxml2mod.xmlRelaxNGValidatePushCData(self._o, data, len) return ret
def custom_to_radec(phi1,phi2,T=None,degree=False): """ NAME: custom_to_radec PURPOSE: rotate a custom set of sky coordinates (phi1, phi2) to (ra, dec) given the rotation matrix T for (ra, dec) -> (phi1, phi2) INPUT: phi1 - custom sky coord phi2 - custom sky coord T - matrix defining the transformation (ra, dec) -> (phi1, phi2) degree - default: False. If True, phi1 and phi2 in degrees OUTPUT: (ra, dec) for vector inputs [:, 2] HISTORY: 2018-10-23 - Written - Nathaniel (UofT) """ if T is None: raise ValueError("Must set T= for custom_to_radec") return radec_to_custom(phi1, phi2, T=nu.transpose(T), # T.T = inv(T) degree=degree)
def function[custom_to_radec, parameter[phi1, phi2, T, degree]]: constant[ NAME: custom_to_radec PURPOSE: rotate a custom set of sky coordinates (phi1, phi2) to (ra, dec) given the rotation matrix T for (ra, dec) -> (phi1, phi2) INPUT: phi1 - custom sky coord phi2 - custom sky coord T - matrix defining the transformation (ra, dec) -> (phi1, phi2) degree - default: False. If True, phi1 and phi2 in degrees OUTPUT: (ra, dec) for vector inputs [:, 2] HISTORY: 2018-10-23 - Written - Nathaniel (UofT) ] if compare[name[T] is constant[None]] begin[:] <ast.Raise object at 0x7da1b0cb64a0> return[call[name[radec_to_custom], parameter[name[phi1], name[phi2]]]]
keyword[def] identifier[custom_to_radec] ( identifier[phi1] , identifier[phi2] , identifier[T] = keyword[None] , identifier[degree] = keyword[False] ): literal[string] keyword[if] identifier[T] keyword[is] keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[return] identifier[radec_to_custom] ( identifier[phi1] , identifier[phi2] , identifier[T] = identifier[nu] . identifier[transpose] ( identifier[T] ), identifier[degree] = identifier[degree] )
def custom_to_radec(phi1, phi2, T=None, degree=False): """ NAME: custom_to_radec PURPOSE: rotate a custom set of sky coordinates (phi1, phi2) to (ra, dec) given the rotation matrix T for (ra, dec) -> (phi1, phi2) INPUT: phi1 - custom sky coord phi2 - custom sky coord T - matrix defining the transformation (ra, dec) -> (phi1, phi2) degree - default: False. If True, phi1 and phi2 in degrees OUTPUT: (ra, dec) for vector inputs [:, 2] HISTORY: 2018-10-23 - Written - Nathaniel (UofT) """ if T is None: raise ValueError('Must set T= for custom_to_radec') # depends on [control=['if'], data=[]] # T.T = inv(T) return radec_to_custom(phi1, phi2, T=nu.transpose(T), degree=degree)
def daterange(self, datecol, date_start, op, **args): """ Returns rows in a date range """ df = self._daterange(datecol, date_start, op, **args) if df is None: self.err("Can not select date range data") self.df = df
def function[daterange, parameter[self, datecol, date_start, op]]: constant[ Returns rows in a date range ] variable[df] assign[=] call[name[self]._daterange, parameter[name[datecol], name[date_start], name[op]]] if compare[name[df] is constant[None]] begin[:] call[name[self].err, parameter[constant[Can not select date range data]]] name[self].df assign[=] name[df]
keyword[def] identifier[daterange] ( identifier[self] , identifier[datecol] , identifier[date_start] , identifier[op] ,** identifier[args] ): literal[string] identifier[df] = identifier[self] . identifier[_daterange] ( identifier[datecol] , identifier[date_start] , identifier[op] ,** identifier[args] ) keyword[if] identifier[df] keyword[is] keyword[None] : identifier[self] . identifier[err] ( literal[string] ) identifier[self] . identifier[df] = identifier[df]
def daterange(self, datecol, date_start, op, **args): """ Returns rows in a date range """ df = self._daterange(datecol, date_start, op, **args) if df is None: self.err('Can not select date range data') # depends on [control=['if'], data=[]] self.df = df
def verify_sampler(sampler, image, image_shape, view_set, num_slices): """verifies the sampler requested is valid.""" if isinstance(sampler, str): sampler = sampler.lower() if sampler not in ['linear', ]: raise ValueError('Sampling strategy: {} not implemented.'.format(sampler)) out_sampler = sampler out_sampling_method = 'linear' elif isinstance(sampler, Iterable): if any([index < 0 or index > 100 for index in sampler]): raise ValueError('sampling percentages must be in [0-100]% range') if len(sampler) > min(num_slices): num_slices = np.maximum(num_slices, len(sampler)) out_sampler = np.array(sampler) out_sampling_method = 'percentage' elif callable(sampler): # checking if the callable returns a bool for view in view_set: middle_slice = int(image_shape[view] / 2) if not isinstance(sampler(get_axis(image, view, middle_slice)), bool): raise ValueError('sampler callable must return a boolean value (True/False)') out_sampler = sampler out_sampling_method = 'callable' else: raise NotImplementedError('Invalid choice for sampler! Choose one of: ' 'linear, percentage or callable') return out_sampler, out_sampling_method, num_slices
def function[verify_sampler, parameter[sampler, image, image_shape, view_set, num_slices]]: constant[verifies the sampler requested is valid.] if call[name[isinstance], parameter[name[sampler], name[str]]] begin[:] variable[sampler] assign[=] call[name[sampler].lower, parameter[]] if compare[name[sampler] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da20c795630>]]] begin[:] <ast.Raise object at 0x7da20c7951e0> variable[out_sampler] assign[=] name[sampler] variable[out_sampling_method] assign[=] constant[linear] return[tuple[[<ast.Name object at 0x7da18bcc8250>, <ast.Name object at 0x7da18bccb190>, <ast.Name object at 0x7da18bcc8d00>]]]
keyword[def] identifier[verify_sampler] ( identifier[sampler] , identifier[image] , identifier[image_shape] , identifier[view_set] , identifier[num_slices] ): literal[string] keyword[if] identifier[isinstance] ( identifier[sampler] , identifier[str] ): identifier[sampler] = identifier[sampler] . identifier[lower] () keyword[if] identifier[sampler] keyword[not] keyword[in] [ literal[string] ,]: keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[sampler] )) identifier[out_sampler] = identifier[sampler] identifier[out_sampling_method] = literal[string] keyword[elif] identifier[isinstance] ( identifier[sampler] , identifier[Iterable] ): keyword[if] identifier[any] ([ identifier[index] < literal[int] keyword[or] identifier[index] > literal[int] keyword[for] identifier[index] keyword[in] identifier[sampler] ]): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[len] ( identifier[sampler] )> identifier[min] ( identifier[num_slices] ): identifier[num_slices] = identifier[np] . identifier[maximum] ( identifier[num_slices] , identifier[len] ( identifier[sampler] )) identifier[out_sampler] = identifier[np] . identifier[array] ( identifier[sampler] ) identifier[out_sampling_method] = literal[string] keyword[elif] identifier[callable] ( identifier[sampler] ): keyword[for] identifier[view] keyword[in] identifier[view_set] : identifier[middle_slice] = identifier[int] ( identifier[image_shape] [ identifier[view] ]/ literal[int] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[sampler] ( identifier[get_axis] ( identifier[image] , identifier[view] , identifier[middle_slice] )), identifier[bool] ): keyword[raise] identifier[ValueError] ( literal[string] ) identifier[out_sampler] = identifier[sampler] identifier[out_sampling_method] = literal[string] keyword[else] : keyword[raise] identifier[NotImplementedError] ( literal[string] literal[string] ) keyword[return] identifier[out_sampler] , identifier[out_sampling_method] , identifier[num_slices]
def verify_sampler(sampler, image, image_shape, view_set, num_slices): """verifies the sampler requested is valid.""" if isinstance(sampler, str): sampler = sampler.lower() if sampler not in ['linear']: raise ValueError('Sampling strategy: {} not implemented.'.format(sampler)) # depends on [control=['if'], data=['sampler']] out_sampler = sampler out_sampling_method = 'linear' # depends on [control=['if'], data=[]] elif isinstance(sampler, Iterable): if any([index < 0 or index > 100 for index in sampler]): raise ValueError('sampling percentages must be in [0-100]% range') # depends on [control=['if'], data=[]] if len(sampler) > min(num_slices): num_slices = np.maximum(num_slices, len(sampler)) # depends on [control=['if'], data=[]] out_sampler = np.array(sampler) out_sampling_method = 'percentage' # depends on [control=['if'], data=[]] elif callable(sampler): # checking if the callable returns a bool for view in view_set: middle_slice = int(image_shape[view] / 2) if not isinstance(sampler(get_axis(image, view, middle_slice)), bool): raise ValueError('sampler callable must return a boolean value (True/False)') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['view']] out_sampler = sampler out_sampling_method = 'callable' # depends on [control=['if'], data=[]] else: raise NotImplementedError('Invalid choice for sampler! Choose one of: linear, percentage or callable') return (out_sampler, out_sampling_method, num_slices)
def _handle_qos0_message_flow(self, app_message): """ Handle QOS_0 application message acknowledgment For incoming messages, this method stores the message For outgoing messages, this methods sends PUBLISH :param app_message: :return: """ assert app_message.qos == QOS_0 if app_message.direction == OUTGOING: packet = app_message.build_publish_packet() # Send PUBLISH packet yield from self._send_packet(packet) app_message.publish_packet = packet elif app_message.direction == INCOMING: if app_message.publish_packet.dup_flag: self.logger.warning("[MQTT-3.3.1-2] DUP flag must set to 0 for QOS 0 message. Message ignored: %s" % repr(app_message.publish_packet)) else: try: self.session.delivered_message_queue.put_nowait(app_message) except: self.logger.warning("delivered messages queue full. QOS_0 message discarded")
def function[_handle_qos0_message_flow, parameter[self, app_message]]: constant[ Handle QOS_0 application message acknowledgment For incoming messages, this method stores the message For outgoing messages, this methods sends PUBLISH :param app_message: :return: ] assert[compare[name[app_message].qos equal[==] name[QOS_0]]] if compare[name[app_message].direction equal[==] name[OUTGOING]] begin[:] variable[packet] assign[=] call[name[app_message].build_publish_packet, parameter[]] <ast.YieldFrom object at 0x7da18c4cd0f0> name[app_message].publish_packet assign[=] name[packet]
keyword[def] identifier[_handle_qos0_message_flow] ( identifier[self] , identifier[app_message] ): literal[string] keyword[assert] identifier[app_message] . identifier[qos] == identifier[QOS_0] keyword[if] identifier[app_message] . identifier[direction] == identifier[OUTGOING] : identifier[packet] = identifier[app_message] . identifier[build_publish_packet] () keyword[yield] keyword[from] identifier[self] . identifier[_send_packet] ( identifier[packet] ) identifier[app_message] . identifier[publish_packet] = identifier[packet] keyword[elif] identifier[app_message] . identifier[direction] == identifier[INCOMING] : keyword[if] identifier[app_message] . identifier[publish_packet] . identifier[dup_flag] : identifier[self] . identifier[logger] . identifier[warning] ( literal[string] % identifier[repr] ( identifier[app_message] . identifier[publish_packet] )) keyword[else] : keyword[try] : identifier[self] . identifier[session] . identifier[delivered_message_queue] . identifier[put_nowait] ( identifier[app_message] ) keyword[except] : identifier[self] . identifier[logger] . identifier[warning] ( literal[string] )
def _handle_qos0_message_flow(self, app_message): """ Handle QOS_0 application message acknowledgment For incoming messages, this method stores the message For outgoing messages, this methods sends PUBLISH :param app_message: :return: """ assert app_message.qos == QOS_0 if app_message.direction == OUTGOING: packet = app_message.build_publish_packet() # Send PUBLISH packet yield from self._send_packet(packet) app_message.publish_packet = packet # depends on [control=['if'], data=[]] elif app_message.direction == INCOMING: if app_message.publish_packet.dup_flag: self.logger.warning('[MQTT-3.3.1-2] DUP flag must set to 0 for QOS 0 message. Message ignored: %s' % repr(app_message.publish_packet)) # depends on [control=['if'], data=[]] else: try: self.session.delivered_message_queue.put_nowait(app_message) # depends on [control=['try'], data=[]] except: self.logger.warning('delivered messages queue full. QOS_0 message discarded') # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
def weighted_choice(self, probabilities, key): """Makes a weighted choice between several options. Probabilities is a list of 2-tuples, (probability, option). The probabilties don't need to add up to anything, they are automatically scaled.""" total = sum(x[0] for x in probabilities) choice = total * self._random(key) for probability, option in probabilities: choice -= probability if choice <= 0: return option
def function[weighted_choice, parameter[self, probabilities, key]]: constant[Makes a weighted choice between several options. Probabilities is a list of 2-tuples, (probability, option). The probabilties don't need to add up to anything, they are automatically scaled.] variable[total] assign[=] call[name[sum], parameter[<ast.GeneratorExp object at 0x7da18f7206d0>]] variable[choice] assign[=] binary_operation[name[total] * call[name[self]._random, parameter[name[key]]]] for taget[tuple[[<ast.Name object at 0x7da18f7204f0>, <ast.Name object at 0x7da18f7229b0>]]] in starred[name[probabilities]] begin[:] <ast.AugAssign object at 0x7da18f723970> if compare[name[choice] less_or_equal[<=] constant[0]] begin[:] return[name[option]]
keyword[def] identifier[weighted_choice] ( identifier[self] , identifier[probabilities] , identifier[key] ): literal[string] identifier[total] = identifier[sum] ( identifier[x] [ literal[int] ] keyword[for] identifier[x] keyword[in] identifier[probabilities] ) identifier[choice] = identifier[total] * identifier[self] . identifier[_random] ( identifier[key] ) keyword[for] identifier[probability] , identifier[option] keyword[in] identifier[probabilities] : identifier[choice] -= identifier[probability] keyword[if] identifier[choice] <= literal[int] : keyword[return] identifier[option]
def weighted_choice(self, probabilities, key): """Makes a weighted choice between several options. Probabilities is a list of 2-tuples, (probability, option). The probabilties don't need to add up to anything, they are automatically scaled.""" total = sum((x[0] for x in probabilities)) choice = total * self._random(key) for (probability, option) in probabilities: choice -= probability if choice <= 0: return option # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
def evaluate_array(self, x): '''Simple way of evaluating a polynomial at value x, but here we return both the full array (evaluated at each polynomial position) and the sum''' x_gf = self.coefficients[0].__class__(x) arr = [self.coefficients[-i]*x_gf**(i-1) for i in _range(len(self), 0, -1)] # if x == 1: arr = sum(self.coefficients) return arr, sum(arr)
def function[evaluate_array, parameter[self, x]]: constant[Simple way of evaluating a polynomial at value x, but here we return both the full array (evaluated at each polynomial position) and the sum] variable[x_gf] assign[=] call[call[name[self].coefficients][constant[0]].__class__, parameter[name[x]]] variable[arr] assign[=] <ast.ListComp object at 0x7da18f00f7c0> return[tuple[[<ast.Name object at 0x7da18f00ccd0>, <ast.Call object at 0x7da18f00d7b0>]]]
keyword[def] identifier[evaluate_array] ( identifier[self] , identifier[x] ): literal[string] identifier[x_gf] = identifier[self] . identifier[coefficients] [ literal[int] ]. identifier[__class__] ( identifier[x] ) identifier[arr] =[ identifier[self] . identifier[coefficients] [- identifier[i] ]* identifier[x_gf] **( identifier[i] - literal[int] ) keyword[for] identifier[i] keyword[in] identifier[_range] ( identifier[len] ( identifier[self] ), literal[int] ,- literal[int] )] keyword[return] identifier[arr] , identifier[sum] ( identifier[arr] )
def evaluate_array(self, x): """Simple way of evaluating a polynomial at value x, but here we return both the full array (evaluated at each polynomial position) and the sum""" x_gf = self.coefficients[0].__class__(x) arr = [self.coefficients[-i] * x_gf ** (i - 1) for i in _range(len(self), 0, -1)] # if x == 1: arr = sum(self.coefficients) return (arr, sum(arr))
def run_with_configuration(self, configuration): """ Executes a BigQuery SQL query. See here: https://cloud.google.com/bigquery/docs/reference/v2/jobs For more details about the configuration parameter. :param configuration: The configuration parameter maps directly to BigQuery's configuration field in the job object. See https://cloud.google.com/bigquery/docs/reference/v2/jobs for details. """ jobs = self.service.jobs() job_data = {'configuration': configuration} # Send query and wait for reply. query_reply = jobs \ .insert(projectId=self.project_id, body=job_data) \ .execute(num_retries=self.num_retries) self.running_job_id = query_reply['jobReference']['jobId'] if 'location' in query_reply['jobReference']: location = query_reply['jobReference']['location'] else: location = self.location # Wait for query to finish. keep_polling_job = True while keep_polling_job: try: if location: job = jobs.get( projectId=self.project_id, jobId=self.running_job_id, location=location).execute(num_retries=self.num_retries) else: job = jobs.get( projectId=self.project_id, jobId=self.running_job_id).execute(num_retries=self.num_retries) if job['status']['state'] == 'DONE': keep_polling_job = False # Check if job had errors. if 'errorResult' in job['status']: raise Exception( 'BigQuery job failed. Final error was: {}. The job was: {}'. format(job['status']['errorResult'], job)) else: self.log.info('Waiting for job to complete : %s, %s', self.project_id, self.running_job_id) time.sleep(5) except HttpError as err: if err.resp.status in [500, 503]: self.log.info( '%s: Retryable error, waiting for job to complete: %s', err.resp.status, self.running_job_id) time.sleep(5) else: raise Exception( 'BigQuery job status check failed. Final error was: {}'. format(err.resp.status)) return self.running_job_id
def function[run_with_configuration, parameter[self, configuration]]: constant[ Executes a BigQuery SQL query. See here: https://cloud.google.com/bigquery/docs/reference/v2/jobs For more details about the configuration parameter. :param configuration: The configuration parameter maps directly to BigQuery's configuration field in the job object. See https://cloud.google.com/bigquery/docs/reference/v2/jobs for details. ] variable[jobs] assign[=] call[name[self].service.jobs, parameter[]] variable[job_data] assign[=] dictionary[[<ast.Constant object at 0x7da2054a7b20>], [<ast.Name object at 0x7da2054a5420>]] variable[query_reply] assign[=] call[call[name[jobs].insert, parameter[]].execute, parameter[]] name[self].running_job_id assign[=] call[call[name[query_reply]][constant[jobReference]]][constant[jobId]] if compare[constant[location] in call[name[query_reply]][constant[jobReference]]] begin[:] variable[location] assign[=] call[call[name[query_reply]][constant[jobReference]]][constant[location]] variable[keep_polling_job] assign[=] constant[True] while name[keep_polling_job] begin[:] <ast.Try object at 0x7da2054a7fd0> return[name[self].running_job_id]
keyword[def] identifier[run_with_configuration] ( identifier[self] , identifier[configuration] ): literal[string] identifier[jobs] = identifier[self] . identifier[service] . identifier[jobs] () identifier[job_data] ={ literal[string] : identifier[configuration] } identifier[query_reply] = identifier[jobs] . identifier[insert] ( identifier[projectId] = identifier[self] . identifier[project_id] , identifier[body] = identifier[job_data] ). identifier[execute] ( identifier[num_retries] = identifier[self] . identifier[num_retries] ) identifier[self] . identifier[running_job_id] = identifier[query_reply] [ literal[string] ][ literal[string] ] keyword[if] literal[string] keyword[in] identifier[query_reply] [ literal[string] ]: identifier[location] = identifier[query_reply] [ literal[string] ][ literal[string] ] keyword[else] : identifier[location] = identifier[self] . identifier[location] identifier[keep_polling_job] = keyword[True] keyword[while] identifier[keep_polling_job] : keyword[try] : keyword[if] identifier[location] : identifier[job] = identifier[jobs] . identifier[get] ( identifier[projectId] = identifier[self] . identifier[project_id] , identifier[jobId] = identifier[self] . identifier[running_job_id] , identifier[location] = identifier[location] ). identifier[execute] ( identifier[num_retries] = identifier[self] . identifier[num_retries] ) keyword[else] : identifier[job] = identifier[jobs] . identifier[get] ( identifier[projectId] = identifier[self] . identifier[project_id] , identifier[jobId] = identifier[self] . identifier[running_job_id] ). identifier[execute] ( identifier[num_retries] = identifier[self] . identifier[num_retries] ) keyword[if] identifier[job] [ literal[string] ][ literal[string] ]== literal[string] : identifier[keep_polling_job] = keyword[False] keyword[if] literal[string] keyword[in] identifier[job] [ literal[string] ]: keyword[raise] identifier[Exception] ( literal[string] . identifier[format] ( identifier[job] [ literal[string] ][ literal[string] ], identifier[job] )) keyword[else] : identifier[self] . identifier[log] . identifier[info] ( literal[string] , identifier[self] . identifier[project_id] , identifier[self] . identifier[running_job_id] ) identifier[time] . identifier[sleep] ( literal[int] ) keyword[except] identifier[HttpError] keyword[as] identifier[err] : keyword[if] identifier[err] . identifier[resp] . identifier[status] keyword[in] [ literal[int] , literal[int] ]: identifier[self] . identifier[log] . identifier[info] ( literal[string] , identifier[err] . identifier[resp] . identifier[status] , identifier[self] . identifier[running_job_id] ) identifier[time] . identifier[sleep] ( literal[int] ) keyword[else] : keyword[raise] identifier[Exception] ( literal[string] . identifier[format] ( identifier[err] . identifier[resp] . identifier[status] )) keyword[return] identifier[self] . identifier[running_job_id]
def run_with_configuration(self, configuration): """ Executes a BigQuery SQL query. See here: https://cloud.google.com/bigquery/docs/reference/v2/jobs For more details about the configuration parameter. :param configuration: The configuration parameter maps directly to BigQuery's configuration field in the job object. See https://cloud.google.com/bigquery/docs/reference/v2/jobs for details. """ jobs = self.service.jobs() job_data = {'configuration': configuration} # Send query and wait for reply. query_reply = jobs.insert(projectId=self.project_id, body=job_data).execute(num_retries=self.num_retries) self.running_job_id = query_reply['jobReference']['jobId'] if 'location' in query_reply['jobReference']: location = query_reply['jobReference']['location'] # depends on [control=['if'], data=[]] else: location = self.location # Wait for query to finish. keep_polling_job = True while keep_polling_job: try: if location: job = jobs.get(projectId=self.project_id, jobId=self.running_job_id, location=location).execute(num_retries=self.num_retries) # depends on [control=['if'], data=[]] else: job = jobs.get(projectId=self.project_id, jobId=self.running_job_id).execute(num_retries=self.num_retries) if job['status']['state'] == 'DONE': keep_polling_job = False # Check if job had errors. if 'errorResult' in job['status']: raise Exception('BigQuery job failed. Final error was: {}. The job was: {}'.format(job['status']['errorResult'], job)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: self.log.info('Waiting for job to complete : %s, %s', self.project_id, self.running_job_id) time.sleep(5) # depends on [control=['try'], data=[]] except HttpError as err: if err.resp.status in [500, 503]: self.log.info('%s: Retryable error, waiting for job to complete: %s', err.resp.status, self.running_job_id) time.sleep(5) # depends on [control=['if'], data=[]] else: raise Exception('BigQuery job status check failed. Final error was: {}'.format(err.resp.status)) # depends on [control=['except'], data=['err']] # depends on [control=['while'], data=[]] return self.running_job_id
def send(self, metrics): """Send the metrics to zabbix server. :type metrics: list :param metrics: List of :class:`zabbix.sender.ZabbixMetric` to send to Zabbix :rtype: :class:`pyzabbix.sender.ZabbixResponse` :return: Parsed response from Zabbix Server """ result = ZabbixResponse() for m in range(0, len(metrics), self.chunk_size): result.parse(self._chunk_send(metrics[m:m + self.chunk_size])) return result
def function[send, parameter[self, metrics]]: constant[Send the metrics to zabbix server. :type metrics: list :param metrics: List of :class:`zabbix.sender.ZabbixMetric` to send to Zabbix :rtype: :class:`pyzabbix.sender.ZabbixResponse` :return: Parsed response from Zabbix Server ] variable[result] assign[=] call[name[ZabbixResponse], parameter[]] for taget[name[m]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[metrics]]], name[self].chunk_size]]] begin[:] call[name[result].parse, parameter[call[name[self]._chunk_send, parameter[call[name[metrics]][<ast.Slice object at 0x7da1b2344bb0>]]]]] return[name[result]]
keyword[def] identifier[send] ( identifier[self] , identifier[metrics] ): literal[string] identifier[result] = identifier[ZabbixResponse] () keyword[for] identifier[m] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[metrics] ), identifier[self] . identifier[chunk_size] ): identifier[result] . identifier[parse] ( identifier[self] . identifier[_chunk_send] ( identifier[metrics] [ identifier[m] : identifier[m] + identifier[self] . identifier[chunk_size] ])) keyword[return] identifier[result]
def send(self, metrics): """Send the metrics to zabbix server. :type metrics: list :param metrics: List of :class:`zabbix.sender.ZabbixMetric` to send to Zabbix :rtype: :class:`pyzabbix.sender.ZabbixResponse` :return: Parsed response from Zabbix Server """ result = ZabbixResponse() for m in range(0, len(metrics), self.chunk_size): result.parse(self._chunk_send(metrics[m:m + self.chunk_size])) # depends on [control=['for'], data=['m']] return result
def _retry_on_connection_error(func: Callable) -> Callable: """Decorator to retry the function max_connection_attemps number of times. Herewith-decorated functions need an ``_attempt`` keyword argument. This is to decorate functions that do network requests that may fail. Note that :meth:`.get_json`, :meth:`.get_iphone_json`, :meth:`.graphql_query` and :meth:`.graphql_node_list` already have their own logic for retrying, hence functions that only use these for network access must not be decorated with this decorator.""" @wraps(func) def call(instaloader, *args, **kwargs): try: return func(instaloader, *args, **kwargs) except (urllib3.exceptions.HTTPError, requests.exceptions.RequestException, ConnectionException) as err: error_string = "{}({}): {}".format(func.__name__, ', '.join([repr(arg) for arg in args]), err) if (kwargs.get('_attempt') or 1) == instaloader.context.max_connection_attempts: raise ConnectionException(error_string) from None instaloader.context.error(error_string + " [retrying; skip with ^C]", repeat_at_end=False) try: if kwargs.get('_attempt'): kwargs['_attempt'] += 1 else: kwargs['_attempt'] = 2 instaloader.context.do_sleep() return call(instaloader, *args, **kwargs) except KeyboardInterrupt: instaloader.context.error("[skipped by user]", repeat_at_end=False) raise ConnectionException(error_string) from None return call
def function[_retry_on_connection_error, parameter[func]]: constant[Decorator to retry the function max_connection_attemps number of times. Herewith-decorated functions need an ``_attempt`` keyword argument. This is to decorate functions that do network requests that may fail. Note that :meth:`.get_json`, :meth:`.get_iphone_json`, :meth:`.graphql_query` and :meth:`.graphql_node_list` already have their own logic for retrying, hence functions that only use these for network access must not be decorated with this decorator.] def function[call, parameter[instaloader]]: <ast.Try object at 0x7da18f00f070> return[name[call]]
keyword[def] identifier[_retry_on_connection_error] ( identifier[func] : identifier[Callable] )-> identifier[Callable] : literal[string] @ identifier[wraps] ( identifier[func] ) keyword[def] identifier[call] ( identifier[instaloader] ,* identifier[args] ,** identifier[kwargs] ): keyword[try] : keyword[return] identifier[func] ( identifier[instaloader] ,* identifier[args] ,** identifier[kwargs] ) keyword[except] ( identifier[urllib3] . identifier[exceptions] . identifier[HTTPError] , identifier[requests] . identifier[exceptions] . identifier[RequestException] , identifier[ConnectionException] ) keyword[as] identifier[err] : identifier[error_string] = literal[string] . identifier[format] ( identifier[func] . identifier[__name__] , literal[string] . identifier[join] ([ identifier[repr] ( identifier[arg] ) keyword[for] identifier[arg] keyword[in] identifier[args] ]), identifier[err] ) keyword[if] ( identifier[kwargs] . identifier[get] ( literal[string] ) keyword[or] literal[int] )== identifier[instaloader] . identifier[context] . identifier[max_connection_attempts] : keyword[raise] identifier[ConnectionException] ( identifier[error_string] ) keyword[from] keyword[None] identifier[instaloader] . identifier[context] . identifier[error] ( identifier[error_string] + literal[string] , identifier[repeat_at_end] = keyword[False] ) keyword[try] : keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ): identifier[kwargs] [ literal[string] ]+= literal[int] keyword[else] : identifier[kwargs] [ literal[string] ]= literal[int] identifier[instaloader] . identifier[context] . identifier[do_sleep] () keyword[return] identifier[call] ( identifier[instaloader] ,* identifier[args] ,** identifier[kwargs] ) keyword[except] identifier[KeyboardInterrupt] : identifier[instaloader] . identifier[context] . identifier[error] ( literal[string] , identifier[repeat_at_end] = keyword[False] ) keyword[raise] identifier[ConnectionException] ( identifier[error_string] ) keyword[from] keyword[None] keyword[return] identifier[call]
def _retry_on_connection_error(func: Callable) -> Callable: """Decorator to retry the function max_connection_attemps number of times. Herewith-decorated functions need an ``_attempt`` keyword argument. This is to decorate functions that do network requests that may fail. Note that :meth:`.get_json`, :meth:`.get_iphone_json`, :meth:`.graphql_query` and :meth:`.graphql_node_list` already have their own logic for retrying, hence functions that only use these for network access must not be decorated with this decorator.""" @wraps(func) def call(instaloader, *args, **kwargs): try: return func(instaloader, *args, **kwargs) # depends on [control=['try'], data=[]] except (urllib3.exceptions.HTTPError, requests.exceptions.RequestException, ConnectionException) as err: error_string = '{}({}): {}'.format(func.__name__, ', '.join([repr(arg) for arg in args]), err) if (kwargs.get('_attempt') or 1) == instaloader.context.max_connection_attempts: raise ConnectionException(error_string) from None # depends on [control=['if'], data=[]] instaloader.context.error(error_string + ' [retrying; skip with ^C]', repeat_at_end=False) try: if kwargs.get('_attempt'): kwargs['_attempt'] += 1 # depends on [control=['if'], data=[]] else: kwargs['_attempt'] = 2 instaloader.context.do_sleep() return call(instaloader, *args, **kwargs) # depends on [control=['try'], data=[]] except KeyboardInterrupt: instaloader.context.error('[skipped by user]', repeat_at_end=False) raise ConnectionException(error_string) from None # depends on [control=['except'], data=[]] # depends on [control=['except'], data=['err']] return call
def domain_from_url(url): """ Get root domain from url. Will prune away query strings, url paths, protocol prefix and sub-domains Exceptions will be raised on invalid urls """ ext = tldextract.extract(url) if not ext.suffix: raise InvalidURLException() new_url = ext.domain + "." + ext.suffix return new_url
def function[domain_from_url, parameter[url]]: constant[ Get root domain from url. Will prune away query strings, url paths, protocol prefix and sub-domains Exceptions will be raised on invalid urls ] variable[ext] assign[=] call[name[tldextract].extract, parameter[name[url]]] if <ast.UnaryOp object at 0x7da1b023e1d0> begin[:] <ast.Raise object at 0x7da1b023e050> variable[new_url] assign[=] binary_operation[binary_operation[name[ext].domain + constant[.]] + name[ext].suffix] return[name[new_url]]
keyword[def] identifier[domain_from_url] ( identifier[url] ): literal[string] identifier[ext] = identifier[tldextract] . identifier[extract] ( identifier[url] ) keyword[if] keyword[not] identifier[ext] . identifier[suffix] : keyword[raise] identifier[InvalidURLException] () identifier[new_url] = identifier[ext] . identifier[domain] + literal[string] + identifier[ext] . identifier[suffix] keyword[return] identifier[new_url]
def domain_from_url(url): """ Get root domain from url. Will prune away query strings, url paths, protocol prefix and sub-domains Exceptions will be raised on invalid urls """ ext = tldextract.extract(url) if not ext.suffix: raise InvalidURLException() # depends on [control=['if'], data=[]] new_url = ext.domain + '.' + ext.suffix return new_url
def convert_labels(Y, source, dest): """Convert a matrix from one label type to another Args: Y: A np.ndarray or torch.Tensor of labels (ints) source: The convention the labels are currently expressed in dest: The convention to convert the labels to Conventions: 'categorical': [0: abstain, 1: positive, 2: negative] 'plusminus': [0: abstain, 1: positive, -1: negative] 'onezero': [0: negative, 1: positive] Note that converting to 'onezero' will combine abstain and negative labels. """ if Y is None: return Y if isinstance(Y, np.ndarray): Y = Y.copy() assert isinstance(Y, int) elif isinstance(Y, torch.Tensor): Y = Y.clone() assert np.sum(Y.numpy() - Y.numpy().astype(int)) == 0.0 else: raise ValueError("Unrecognized label data type.") negative_map = {"categorical": 2, "plusminus": -1, "onezero": 0} Y[Y == negative_map[source]] = negative_map[dest] return Y
def function[convert_labels, parameter[Y, source, dest]]: constant[Convert a matrix from one label type to another Args: Y: A np.ndarray or torch.Tensor of labels (ints) source: The convention the labels are currently expressed in dest: The convention to convert the labels to Conventions: 'categorical': [0: abstain, 1: positive, 2: negative] 'plusminus': [0: abstain, 1: positive, -1: negative] 'onezero': [0: negative, 1: positive] Note that converting to 'onezero' will combine abstain and negative labels. ] if compare[name[Y] is constant[None]] begin[:] return[name[Y]] if call[name[isinstance], parameter[name[Y], name[np].ndarray]] begin[:] variable[Y] assign[=] call[name[Y].copy, parameter[]] assert[call[name[isinstance], parameter[name[Y], name[int]]]] variable[negative_map] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c58b80>, <ast.Constant object at 0x7da1b1c586a0>, <ast.Constant object at 0x7da1b1c58670>], [<ast.Constant object at 0x7da1b1c58b50>, <ast.UnaryOp object at 0x7da1b1c58be0>, <ast.Constant object at 0x7da1b1c58460>]] call[name[Y]][compare[name[Y] equal[==] call[name[negative_map]][name[source]]]] assign[=] call[name[negative_map]][name[dest]] return[name[Y]]
keyword[def] identifier[convert_labels] ( identifier[Y] , identifier[source] , identifier[dest] ): literal[string] keyword[if] identifier[Y] keyword[is] keyword[None] : keyword[return] identifier[Y] keyword[if] identifier[isinstance] ( identifier[Y] , identifier[np] . identifier[ndarray] ): identifier[Y] = identifier[Y] . identifier[copy] () keyword[assert] identifier[isinstance] ( identifier[Y] , identifier[int] ) keyword[elif] identifier[isinstance] ( identifier[Y] , identifier[torch] . identifier[Tensor] ): identifier[Y] = identifier[Y] . identifier[clone] () keyword[assert] identifier[np] . identifier[sum] ( identifier[Y] . identifier[numpy] ()- identifier[Y] . identifier[numpy] (). identifier[astype] ( identifier[int] ))== literal[int] keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[negative_map] ={ literal[string] : literal[int] , literal[string] :- literal[int] , literal[string] : literal[int] } identifier[Y] [ identifier[Y] == identifier[negative_map] [ identifier[source] ]]= identifier[negative_map] [ identifier[dest] ] keyword[return] identifier[Y]
def convert_labels(Y, source, dest): """Convert a matrix from one label type to another Args: Y: A np.ndarray or torch.Tensor of labels (ints) source: The convention the labels are currently expressed in dest: The convention to convert the labels to Conventions: 'categorical': [0: abstain, 1: positive, 2: negative] 'plusminus': [0: abstain, 1: positive, -1: negative] 'onezero': [0: negative, 1: positive] Note that converting to 'onezero' will combine abstain and negative labels. """ if Y is None: return Y # depends on [control=['if'], data=['Y']] if isinstance(Y, np.ndarray): Y = Y.copy() assert isinstance(Y, int) # depends on [control=['if'], data=[]] elif isinstance(Y, torch.Tensor): Y = Y.clone() assert np.sum(Y.numpy() - Y.numpy().astype(int)) == 0.0 # depends on [control=['if'], data=[]] else: raise ValueError('Unrecognized label data type.') negative_map = {'categorical': 2, 'plusminus': -1, 'onezero': 0} Y[Y == negative_map[source]] = negative_map[dest] return Y
def get_managed_object(handle, class_id, params, inMo=None, in_heir=False, dump=False): """Get the specified MO from UCS Manager. :param managed_object: MO classid :in_filter: input filter value :returns: Managed Object :raises: UcsException in case of failure. """ return handle.GetManagedObject(inMo, class_id, params, inHierarchical=in_heir, dumpXml=dump)
def function[get_managed_object, parameter[handle, class_id, params, inMo, in_heir, dump]]: constant[Get the specified MO from UCS Manager. :param managed_object: MO classid :in_filter: input filter value :returns: Managed Object :raises: UcsException in case of failure. ] return[call[name[handle].GetManagedObject, parameter[name[inMo], name[class_id], name[params]]]]
keyword[def] identifier[get_managed_object] ( identifier[handle] , identifier[class_id] , identifier[params] , identifier[inMo] = keyword[None] , identifier[in_heir] = keyword[False] , identifier[dump] = keyword[False] ): literal[string] keyword[return] identifier[handle] . identifier[GetManagedObject] ( identifier[inMo] , identifier[class_id] , identifier[params] , identifier[inHierarchical] = identifier[in_heir] , identifier[dumpXml] = identifier[dump] )
def get_managed_object(handle, class_id, params, inMo=None, in_heir=False, dump=False): """Get the specified MO from UCS Manager. :param managed_object: MO classid :in_filter: input filter value :returns: Managed Object :raises: UcsException in case of failure. """ return handle.GetManagedObject(inMo, class_id, params, inHierarchical=in_heir, dumpXml=dump)
def getP1(self): """ Left, upper point """ return Point(self.center[0] - self.radius, self.center[1] - self.radius)
def function[getP1, parameter[self]]: constant[ Left, upper point ] return[call[name[Point], parameter[binary_operation[call[name[self].center][constant[0]] - name[self].radius], binary_operation[call[name[self].center][constant[1]] - name[self].radius]]]]
keyword[def] identifier[getP1] ( identifier[self] ): literal[string] keyword[return] identifier[Point] ( identifier[self] . identifier[center] [ literal[int] ]- identifier[self] . identifier[radius] , identifier[self] . identifier[center] [ literal[int] ]- identifier[self] . identifier[radius] )
def getP1(self): """ Left, upper point """ return Point(self.center[0] - self.radius, self.center[1] - self.radius)
def groups(self, group_type=None, filters=None, params=None): """ Gets all groups from a tag. Args: filters: params: group_type: """ group = self._tcex.ti.group(group_type) for g in self.tc_requests.groups_from_tag(group, self.name, filters=filters, params=params): yield g
def function[groups, parameter[self, group_type, filters, params]]: constant[ Gets all groups from a tag. Args: filters: params: group_type: ] variable[group] assign[=] call[name[self]._tcex.ti.group, parameter[name[group_type]]] for taget[name[g]] in starred[call[name[self].tc_requests.groups_from_tag, parameter[name[group], name[self].name]]] begin[:] <ast.Yield object at 0x7da18f810af0>
keyword[def] identifier[groups] ( identifier[self] , identifier[group_type] = keyword[None] , identifier[filters] = keyword[None] , identifier[params] = keyword[None] ): literal[string] identifier[group] = identifier[self] . identifier[_tcex] . identifier[ti] . identifier[group] ( identifier[group_type] ) keyword[for] identifier[g] keyword[in] identifier[self] . identifier[tc_requests] . identifier[groups_from_tag] ( identifier[group] , identifier[self] . identifier[name] , identifier[filters] = identifier[filters] , identifier[params] = identifier[params] ): keyword[yield] identifier[g]
def groups(self, group_type=None, filters=None, params=None): """ Gets all groups from a tag. Args: filters: params: group_type: """ group = self._tcex.ti.group(group_type) for g in self.tc_requests.groups_from_tag(group, self.name, filters=filters, params=params): yield g # depends on [control=['for'], data=['g']]
def mad(data, axis=None): """ Computes the median absolute deviation of *data* along a given *axis*. See `link <https://en.wikipedia.org/wiki/Median_absolute_deviation>`_ for details. **Parameters** data : array-like **Returns** mad : number or array-like """ return median(absolute(data - median(data, axis)), axis)
def function[mad, parameter[data, axis]]: constant[ Computes the median absolute deviation of *data* along a given *axis*. See `link <https://en.wikipedia.org/wiki/Median_absolute_deviation>`_ for details. **Parameters** data : array-like **Returns** mad : number or array-like ] return[call[name[median], parameter[call[name[absolute], parameter[binary_operation[name[data] - call[name[median], parameter[name[data], name[axis]]]]]], name[axis]]]]
keyword[def] identifier[mad] ( identifier[data] , identifier[axis] = keyword[None] ): literal[string] keyword[return] identifier[median] ( identifier[absolute] ( identifier[data] - identifier[median] ( identifier[data] , identifier[axis] )), identifier[axis] )
def mad(data, axis=None): """ Computes the median absolute deviation of *data* along a given *axis*. See `link <https://en.wikipedia.org/wiki/Median_absolute_deviation>`_ for details. **Parameters** data : array-like **Returns** mad : number or array-like """ return median(absolute(data - median(data, axis)), axis)
def next_bit_address(self): """Gets the next boolean address""" if self._current_bit_address == "": if self._is_16bit: return "{0}.{1}".format( self.next_address(), "00") return "{0}.{1}".format( self.next_address(), "0") if self._is_16bit: bool_half = int(self._current_bit_address.split(".")[1]) if bool_half < 4: register_half = self._current_bit_address.split(".")[0] return "{0}.{1}".format( register_half, pad_zeroes(str(bool_half + 1), 2)) self.move_to_next_address(self._size_of_current_register_address) return "{0}.{1}".format( self.next_address(), "00") bool_half = int(self._current_bit_address.split(".")[1]) if bool_half < 3: register_half = self._current_bit_address.split(".")[0] return "{0}.{1}".format( register_half, bool_half + 1) self.move_to_next_address(self._size_of_current_register_address) return "{0}.{1}".format( self.next_address(), "0")
def function[next_bit_address, parameter[self]]: constant[Gets the next boolean address] if compare[name[self]._current_bit_address equal[==] constant[]] begin[:] if name[self]._is_16bit begin[:] return[call[constant[{0}.{1}].format, parameter[call[name[self].next_address, parameter[]], constant[00]]]] return[call[constant[{0}.{1}].format, parameter[call[name[self].next_address, parameter[]], constant[0]]]] if name[self]._is_16bit begin[:] variable[bool_half] assign[=] call[name[int], parameter[call[call[name[self]._current_bit_address.split, parameter[constant[.]]]][constant[1]]]] if compare[name[bool_half] less[<] constant[4]] begin[:] variable[register_half] assign[=] call[call[name[self]._current_bit_address.split, parameter[constant[.]]]][constant[0]] return[call[constant[{0}.{1}].format, parameter[name[register_half], call[name[pad_zeroes], parameter[call[name[str], parameter[binary_operation[name[bool_half] + constant[1]]]], constant[2]]]]]] call[name[self].move_to_next_address, parameter[name[self]._size_of_current_register_address]] return[call[constant[{0}.{1}].format, parameter[call[name[self].next_address, parameter[]], constant[00]]]] variable[bool_half] assign[=] call[name[int], parameter[call[call[name[self]._current_bit_address.split, parameter[constant[.]]]][constant[1]]]] if compare[name[bool_half] less[<] constant[3]] begin[:] variable[register_half] assign[=] call[call[name[self]._current_bit_address.split, parameter[constant[.]]]][constant[0]] return[call[constant[{0}.{1}].format, parameter[name[register_half], binary_operation[name[bool_half] + constant[1]]]]] call[name[self].move_to_next_address, parameter[name[self]._size_of_current_register_address]] return[call[constant[{0}.{1}].format, parameter[call[name[self].next_address, parameter[]], constant[0]]]]
keyword[def] identifier[next_bit_address] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_current_bit_address] == literal[string] : keyword[if] identifier[self] . identifier[_is_16bit] : keyword[return] literal[string] . identifier[format] ( identifier[self] . identifier[next_address] (), literal[string] ) keyword[return] literal[string] . identifier[format] ( identifier[self] . identifier[next_address] (), literal[string] ) keyword[if] identifier[self] . identifier[_is_16bit] : identifier[bool_half] = identifier[int] ( identifier[self] . identifier[_current_bit_address] . identifier[split] ( literal[string] )[ literal[int] ]) keyword[if] identifier[bool_half] < literal[int] : identifier[register_half] = identifier[self] . identifier[_current_bit_address] . identifier[split] ( literal[string] )[ literal[int] ] keyword[return] literal[string] . identifier[format] ( identifier[register_half] , identifier[pad_zeroes] ( identifier[str] ( identifier[bool_half] + literal[int] ), literal[int] )) identifier[self] . identifier[move_to_next_address] ( identifier[self] . identifier[_size_of_current_register_address] ) keyword[return] literal[string] . identifier[format] ( identifier[self] . identifier[next_address] (), literal[string] ) identifier[bool_half] = identifier[int] ( identifier[self] . identifier[_current_bit_address] . identifier[split] ( literal[string] )[ literal[int] ]) keyword[if] identifier[bool_half] < literal[int] : identifier[register_half] = identifier[self] . identifier[_current_bit_address] . identifier[split] ( literal[string] )[ literal[int] ] keyword[return] literal[string] . identifier[format] ( identifier[register_half] , identifier[bool_half] + literal[int] ) identifier[self] . identifier[move_to_next_address] ( identifier[self] . identifier[_size_of_current_register_address] ) keyword[return] literal[string] . identifier[format] ( identifier[self] . identifier[next_address] (), literal[string] )
def next_bit_address(self): """Gets the next boolean address""" if self._current_bit_address == '': if self._is_16bit: return '{0}.{1}'.format(self.next_address(), '00') # depends on [control=['if'], data=[]] return '{0}.{1}'.format(self.next_address(), '0') # depends on [control=['if'], data=[]] if self._is_16bit: bool_half = int(self._current_bit_address.split('.')[1]) if bool_half < 4: register_half = self._current_bit_address.split('.')[0] return '{0}.{1}'.format(register_half, pad_zeroes(str(bool_half + 1), 2)) # depends on [control=['if'], data=['bool_half']] self.move_to_next_address(self._size_of_current_register_address) return '{0}.{1}'.format(self.next_address(), '00') # depends on [control=['if'], data=[]] bool_half = int(self._current_bit_address.split('.')[1]) if bool_half < 3: register_half = self._current_bit_address.split('.')[0] return '{0}.{1}'.format(register_half, bool_half + 1) # depends on [control=['if'], data=['bool_half']] self.move_to_next_address(self._size_of_current_register_address) return '{0}.{1}'.format(self.next_address(), '0')
def _adjust_probability_vec_best(population, fitnesses, probability_vec, adjust_rate): """Shift probabilities towards the best solution.""" best_solution = max(zip(fitnesses, population))[1] # Shift probabilities towards best solution return _adjust(probability_vec, best_solution, adjust_rate)
def function[_adjust_probability_vec_best, parameter[population, fitnesses, probability_vec, adjust_rate]]: constant[Shift probabilities towards the best solution.] variable[best_solution] assign[=] call[call[name[max], parameter[call[name[zip], parameter[name[fitnesses], name[population]]]]]][constant[1]] return[call[name[_adjust], parameter[name[probability_vec], name[best_solution], name[adjust_rate]]]]
keyword[def] identifier[_adjust_probability_vec_best] ( identifier[population] , identifier[fitnesses] , identifier[probability_vec] , identifier[adjust_rate] ): literal[string] identifier[best_solution] = identifier[max] ( identifier[zip] ( identifier[fitnesses] , identifier[population] ))[ literal[int] ] keyword[return] identifier[_adjust] ( identifier[probability_vec] , identifier[best_solution] , identifier[adjust_rate] )
def _adjust_probability_vec_best(population, fitnesses, probability_vec, adjust_rate): """Shift probabilities towards the best solution.""" best_solution = max(zip(fitnesses, population))[1] # Shift probabilities towards best solution return _adjust(probability_vec, best_solution, adjust_rate)
def handle_request(self, request, app, model, pk): """Render and return tab""" ModelClass = self.get_model_class() object = ModelClass.objects.get(id=pk) tab_code = request.GET.get('tab') model_alias = request.GET.get('model_alias') model_alias = model_alias if model_alias else '{}.{}'.format(app, model) # TODO permission check item = tabs.get_tab(model_alias, object, tab_code) return item.get_layout(object).render(request)
def function[handle_request, parameter[self, request, app, model, pk]]: constant[Render and return tab] variable[ModelClass] assign[=] call[name[self].get_model_class, parameter[]] variable[object] assign[=] call[name[ModelClass].objects.get, parameter[]] variable[tab_code] assign[=] call[name[request].GET.get, parameter[constant[tab]]] variable[model_alias] assign[=] call[name[request].GET.get, parameter[constant[model_alias]]] variable[model_alias] assign[=] <ast.IfExp object at 0x7da18bcc87c0> variable[item] assign[=] call[name[tabs].get_tab, parameter[name[model_alias], name[object], name[tab_code]]] return[call[call[name[item].get_layout, parameter[name[object]]].render, parameter[name[request]]]]
keyword[def] identifier[handle_request] ( identifier[self] , identifier[request] , identifier[app] , identifier[model] , identifier[pk] ): literal[string] identifier[ModelClass] = identifier[self] . identifier[get_model_class] () identifier[object] = identifier[ModelClass] . identifier[objects] . identifier[get] ( identifier[id] = identifier[pk] ) identifier[tab_code] = identifier[request] . identifier[GET] . identifier[get] ( literal[string] ) identifier[model_alias] = identifier[request] . identifier[GET] . identifier[get] ( literal[string] ) identifier[model_alias] = identifier[model_alias] keyword[if] identifier[model_alias] keyword[else] literal[string] . identifier[format] ( identifier[app] , identifier[model] ) identifier[item] = identifier[tabs] . identifier[get_tab] ( identifier[model_alias] , identifier[object] , identifier[tab_code] ) keyword[return] identifier[item] . identifier[get_layout] ( identifier[object] ). identifier[render] ( identifier[request] )
def handle_request(self, request, app, model, pk): """Render and return tab""" ModelClass = self.get_model_class() object = ModelClass.objects.get(id=pk) tab_code = request.GET.get('tab') model_alias = request.GET.get('model_alias') model_alias = model_alias if model_alias else '{}.{}'.format(app, model) # TODO permission check item = tabs.get_tab(model_alias, object, tab_code) return item.get_layout(object).render(request)
def prepare_static_data(self, data): """ If user defined static fields, then process them with visiable value """ d = data.copy() for f in self.get_fields(): if f['static'] and f['name'] in d: d[f['name']] = make_view_field(f, None, self.types_convert_map, self.fields_convert_map, d[f['name']])['display'] return d
def function[prepare_static_data, parameter[self, data]]: constant[ If user defined static fields, then process them with visiable value ] variable[d] assign[=] call[name[data].copy, parameter[]] for taget[name[f]] in starred[call[name[self].get_fields, parameter[]]] begin[:] if <ast.BoolOp object at 0x7da18f723a90> begin[:] call[name[d]][call[name[f]][constant[name]]] assign[=] call[call[name[make_view_field], parameter[name[f], constant[None], name[self].types_convert_map, name[self].fields_convert_map, call[name[d]][call[name[f]][constant[name]]]]]][constant[display]] return[name[d]]
keyword[def] identifier[prepare_static_data] ( identifier[self] , identifier[data] ): literal[string] identifier[d] = identifier[data] . identifier[copy] () keyword[for] identifier[f] keyword[in] identifier[self] . identifier[get_fields] (): keyword[if] identifier[f] [ literal[string] ] keyword[and] identifier[f] [ literal[string] ] keyword[in] identifier[d] : identifier[d] [ identifier[f] [ literal[string] ]]= identifier[make_view_field] ( identifier[f] , keyword[None] , identifier[self] . identifier[types_convert_map] , identifier[self] . identifier[fields_convert_map] , identifier[d] [ identifier[f] [ literal[string] ]])[ literal[string] ] keyword[return] identifier[d]
def prepare_static_data(self, data): """ If user defined static fields, then process them with visiable value """ d = data.copy() for f in self.get_fields(): if f['static'] and f['name'] in d: d[f['name']] = make_view_field(f, None, self.types_convert_map, self.fields_convert_map, d[f['name']])['display'] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['f']] return d
def iteration(self, node_status=True): """ Execute a single model iteration :return: Iteration_id, Incremental node status (dictionary node->status) """ self.clean_initial_status(self.available_statuses.values()) actual_status = {node: nstatus for node, nstatus in future.utils.iteritems(self.status)} # streaming if self.stream_execution: u, v = list(self.graph.edges())[0] u_status = self.status[u] v_status = self.status[v] if u_status == 1 and v_status == 0: p = np.random.random_sample() if p < self.params['model']['beta']: actual_status[v] = 1 if v_status == 1 and u_status == 0: p = np.random.random_sample() if p < self.params['model']['beta']: actual_status[u] = 1 delta, node_count, status_delta = self.status_delta(actual_status) self.status = actual_status self.actual_iteration += 1 if node_status: return {"iteration": self.actual_iteration - 1, "status": delta.copy(), "node_count": node_count.copy(), "status_delta": status_delta.copy()} else: return {"iteration": self.actual_iteration - 1, "status": {}, "node_count": node_count.copy(), "status_delta": status_delta.copy()} # snapshot else: if self.actual_iteration == 0: self.actual_iteration += 1 delta, node_count, status_delta = self.status_delta(actual_status) if node_status: return {"iteration": 0, "status": actual_status.copy(), "node_count": node_count.copy(), "status_delta": status_delta.copy()} else: return {"iteration": 0, "status": {}, "node_count": node_count.copy(), "status_delta": status_delta.copy()} for u in self.graph.nodes(): u_status = self.status[u] eventp = np.random.random_sample() neighbors = self.graph.neighbors(u) if isinstance(self.graph, nx.DiGraph): neighbors = self.graph.predecessors(u) if u_status == 0: infected_neighbors = len([v for v in neighbors if self.status[v] == 1]) if eventp < self.params['model']['beta'] * infected_neighbors: actual_status[u] = 1 delta, node_count, status_delta = self.status_delta(actual_status) self.status = actual_status self.actual_iteration += 1 if node_status: return {"iteration": self.actual_iteration - 1, "status": delta.copy(), "node_count": node_count.copy(), "status_delta": status_delta.copy()} else: return {"iteration": self.actual_iteration - 1, "status": {}, "node_count": node_count.copy(), "status_delta": status_delta.copy()}
def function[iteration, parameter[self, node_status]]: constant[ Execute a single model iteration :return: Iteration_id, Incremental node status (dictionary node->status) ] call[name[self].clean_initial_status, parameter[call[name[self].available_statuses.values, parameter[]]]] variable[actual_status] assign[=] <ast.DictComp object at 0x7da18f7209d0> if name[self].stream_execution begin[:] <ast.Tuple object at 0x7da18f722aa0> assign[=] call[call[name[list], parameter[call[name[self].graph.edges, parameter[]]]]][constant[0]] variable[u_status] assign[=] call[name[self].status][name[u]] variable[v_status] assign[=] call[name[self].status][name[v]] if <ast.BoolOp object at 0x7da18f723130> begin[:] variable[p] assign[=] call[name[np].random.random_sample, parameter[]] if compare[name[p] less[<] call[call[name[self].params][constant[model]]][constant[beta]]] begin[:] call[name[actual_status]][name[v]] assign[=] constant[1] if <ast.BoolOp object at 0x7da18f722fe0> begin[:] variable[p] assign[=] call[name[np].random.random_sample, parameter[]] if compare[name[p] less[<] call[call[name[self].params][constant[model]]][constant[beta]]] begin[:] call[name[actual_status]][name[u]] assign[=] constant[1] <ast.Tuple object at 0x7da18f7207c0> assign[=] call[name[self].status_delta, parameter[name[actual_status]]] name[self].status assign[=] name[actual_status] <ast.AugAssign object at 0x7da18f7224d0> if name[node_status] begin[:] return[dictionary[[<ast.Constant object at 0x7da18ede77f0>, <ast.Constant object at 0x7da18ede51b0>, <ast.Constant object at 0x7da18ede70a0>, <ast.Constant object at 0x7da18ede5480>], [<ast.BinOp object at 0x7da18ede5810>, <ast.Call object at 0x7da18ede4520>, <ast.Call object at 0x7da18ede7970>, <ast.Call object at 0x7da18ede5150>]]] <ast.Tuple object at 0x7da18ede7820> assign[=] call[name[self].status_delta, parameter[name[actual_status]]] name[self].status assign[=] name[actual_status] <ast.AugAssign object at 0x7da1b1279c90> if name[node_status] begin[:] return[dictionary[[<ast.Constant object at 0x7da1b1279b40>, <ast.Constant object at 0x7da1b1278640>, <ast.Constant object at 0x7da1b12786d0>, <ast.Constant object at 0x7da1b1278790>], [<ast.BinOp object at 0x7da1b12784c0>, <ast.Call object at 0x7da1b127a4a0>, <ast.Call object at 0x7da1b1279ba0>, <ast.Call object at 0x7da1b127a2c0>]]]
keyword[def] identifier[iteration] ( identifier[self] , identifier[node_status] = keyword[True] ): literal[string] identifier[self] . identifier[clean_initial_status] ( identifier[self] . identifier[available_statuses] . identifier[values] ()) identifier[actual_status] ={ identifier[node] : identifier[nstatus] keyword[for] identifier[node] , identifier[nstatus] keyword[in] identifier[future] . identifier[utils] . identifier[iteritems] ( identifier[self] . identifier[status] )} keyword[if] identifier[self] . identifier[stream_execution] : identifier[u] , identifier[v] = identifier[list] ( identifier[self] . identifier[graph] . identifier[edges] ())[ literal[int] ] identifier[u_status] = identifier[self] . identifier[status] [ identifier[u] ] identifier[v_status] = identifier[self] . identifier[status] [ identifier[v] ] keyword[if] identifier[u_status] == literal[int] keyword[and] identifier[v_status] == literal[int] : identifier[p] = identifier[np] . identifier[random] . identifier[random_sample] () keyword[if] identifier[p] < identifier[self] . identifier[params] [ literal[string] ][ literal[string] ]: identifier[actual_status] [ identifier[v] ]= literal[int] keyword[if] identifier[v_status] == literal[int] keyword[and] identifier[u_status] == literal[int] : identifier[p] = identifier[np] . identifier[random] . identifier[random_sample] () keyword[if] identifier[p] < identifier[self] . identifier[params] [ literal[string] ][ literal[string] ]: identifier[actual_status] [ identifier[u] ]= literal[int] identifier[delta] , identifier[node_count] , identifier[status_delta] = identifier[self] . identifier[status_delta] ( identifier[actual_status] ) identifier[self] . identifier[status] = identifier[actual_status] identifier[self] . identifier[actual_iteration] += literal[int] keyword[if] identifier[node_status] : keyword[return] { literal[string] : identifier[self] . identifier[actual_iteration] - literal[int] , literal[string] : identifier[delta] . identifier[copy] (), literal[string] : identifier[node_count] . identifier[copy] (), literal[string] : identifier[status_delta] . identifier[copy] ()} keyword[else] : keyword[return] { literal[string] : identifier[self] . identifier[actual_iteration] - literal[int] , literal[string] :{}, literal[string] : identifier[node_count] . identifier[copy] (), literal[string] : identifier[status_delta] . identifier[copy] ()} keyword[else] : keyword[if] identifier[self] . identifier[actual_iteration] == literal[int] : identifier[self] . identifier[actual_iteration] += literal[int] identifier[delta] , identifier[node_count] , identifier[status_delta] = identifier[self] . identifier[status_delta] ( identifier[actual_status] ) keyword[if] identifier[node_status] : keyword[return] { literal[string] : literal[int] , literal[string] : identifier[actual_status] . identifier[copy] (), literal[string] : identifier[node_count] . identifier[copy] (), literal[string] : identifier[status_delta] . identifier[copy] ()} keyword[else] : keyword[return] { literal[string] : literal[int] , literal[string] :{}, literal[string] : identifier[node_count] . identifier[copy] (), literal[string] : identifier[status_delta] . identifier[copy] ()} keyword[for] identifier[u] keyword[in] identifier[self] . identifier[graph] . identifier[nodes] (): identifier[u_status] = identifier[self] . identifier[status] [ identifier[u] ] identifier[eventp] = identifier[np] . identifier[random] . identifier[random_sample] () identifier[neighbors] = identifier[self] . identifier[graph] . identifier[neighbors] ( identifier[u] ) keyword[if] identifier[isinstance] ( identifier[self] . identifier[graph] , identifier[nx] . identifier[DiGraph] ): identifier[neighbors] = identifier[self] . identifier[graph] . identifier[predecessors] ( identifier[u] ) keyword[if] identifier[u_status] == literal[int] : identifier[infected_neighbors] = identifier[len] ([ identifier[v] keyword[for] identifier[v] keyword[in] identifier[neighbors] keyword[if] identifier[self] . identifier[status] [ identifier[v] ]== literal[int] ]) keyword[if] identifier[eventp] < identifier[self] . identifier[params] [ literal[string] ][ literal[string] ]* identifier[infected_neighbors] : identifier[actual_status] [ identifier[u] ]= literal[int] identifier[delta] , identifier[node_count] , identifier[status_delta] = identifier[self] . identifier[status_delta] ( identifier[actual_status] ) identifier[self] . identifier[status] = identifier[actual_status] identifier[self] . identifier[actual_iteration] += literal[int] keyword[if] identifier[node_status] : keyword[return] { literal[string] : identifier[self] . identifier[actual_iteration] - literal[int] , literal[string] : identifier[delta] . identifier[copy] (), literal[string] : identifier[node_count] . identifier[copy] (), literal[string] : identifier[status_delta] . identifier[copy] ()} keyword[else] : keyword[return] { literal[string] : identifier[self] . identifier[actual_iteration] - literal[int] , literal[string] :{}, literal[string] : identifier[node_count] . identifier[copy] (), literal[string] : identifier[status_delta] . identifier[copy] ()}
def iteration(self, node_status=True): """ Execute a single model iteration :return: Iteration_id, Incremental node status (dictionary node->status) """ self.clean_initial_status(self.available_statuses.values()) actual_status = {node: nstatus for (node, nstatus) in future.utils.iteritems(self.status)} # streaming if self.stream_execution: (u, v) = list(self.graph.edges())[0] u_status = self.status[u] v_status = self.status[v] if u_status == 1 and v_status == 0: p = np.random.random_sample() if p < self.params['model']['beta']: actual_status[v] = 1 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if v_status == 1 and u_status == 0: p = np.random.random_sample() if p < self.params['model']['beta']: actual_status[u] = 1 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] (delta, node_count, status_delta) = self.status_delta(actual_status) self.status = actual_status self.actual_iteration += 1 if node_status: return {'iteration': self.actual_iteration - 1, 'status': delta.copy(), 'node_count': node_count.copy(), 'status_delta': status_delta.copy()} # depends on [control=['if'], data=[]] else: return {'iteration': self.actual_iteration - 1, 'status': {}, 'node_count': node_count.copy(), 'status_delta': status_delta.copy()} # depends on [control=['if'], data=[]] else: # snapshot if self.actual_iteration == 0: self.actual_iteration += 1 (delta, node_count, status_delta) = self.status_delta(actual_status) if node_status: return {'iteration': 0, 'status': actual_status.copy(), 'node_count': node_count.copy(), 'status_delta': status_delta.copy()} # depends on [control=['if'], data=[]] else: return {'iteration': 0, 'status': {}, 'node_count': node_count.copy(), 'status_delta': status_delta.copy()} # depends on [control=['if'], data=[]] for u in self.graph.nodes(): u_status = self.status[u] eventp = np.random.random_sample() neighbors = self.graph.neighbors(u) if isinstance(self.graph, nx.DiGraph): neighbors = self.graph.predecessors(u) # depends on [control=['if'], data=[]] if u_status == 0: infected_neighbors = len([v for v in neighbors if self.status[v] == 1]) if eventp < self.params['model']['beta'] * infected_neighbors: actual_status[u] = 1 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['u']] (delta, node_count, status_delta) = self.status_delta(actual_status) self.status = actual_status self.actual_iteration += 1 if node_status: return {'iteration': self.actual_iteration - 1, 'status': delta.copy(), 'node_count': node_count.copy(), 'status_delta': status_delta.copy()} # depends on [control=['if'], data=[]] else: return {'iteration': self.actual_iteration - 1, 'status': {}, 'node_count': node_count.copy(), 'status_delta': status_delta.copy()}
def help_center_article_translations(self, article_id, **kwargs): "https://developer.zendesk.com/rest_api/docs/help_center/translations#list-translations" api_path = "/api/v2/help_center/articles/{article_id}/translations.json" api_path = api_path.format(article_id=article_id) return self.call(api_path, **kwargs)
def function[help_center_article_translations, parameter[self, article_id]]: constant[https://developer.zendesk.com/rest_api/docs/help_center/translations#list-translations] variable[api_path] assign[=] constant[/api/v2/help_center/articles/{article_id}/translations.json] variable[api_path] assign[=] call[name[api_path].format, parameter[]] return[call[name[self].call, parameter[name[api_path]]]]
keyword[def] identifier[help_center_article_translations] ( identifier[self] , identifier[article_id] ,** identifier[kwargs] ): literal[string] identifier[api_path] = literal[string] identifier[api_path] = identifier[api_path] . identifier[format] ( identifier[article_id] = identifier[article_id] ) keyword[return] identifier[self] . identifier[call] ( identifier[api_path] ,** identifier[kwargs] )
def help_center_article_translations(self, article_id, **kwargs): """https://developer.zendesk.com/rest_api/docs/help_center/translations#list-translations""" api_path = '/api/v2/help_center/articles/{article_id}/translations.json' api_path = api_path.format(article_id=article_id) return self.call(api_path, **kwargs)
def set_definition(self, rule_definitions, attributes = [], ip_ranges = [], params = {}): """ Update every attribute of the rule by setting the argument values as necessary :param parameterized_input: :param arg_values: :param convert: :return: """ string_definition = rule_definitions[self.filename].string_definition # Load condition dependencies definition = json.loads(string_definition) definition['conditions'] += self.conditions loaded_conditions = [] for condition in definition['conditions']: if condition[0].startswith('_INCLUDE_('): include = re.findall(r'_INCLUDE_\((.*?)\)', condition[0])[0] #new_conditions = load_data(include, key_name = 'conditions') with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data/%s' % include), 'rt') as f: new_conditions = f.read() for (i, value) in enumerate(condition[1]): new_conditions = re.sub(condition[1][i], condition[2][i], new_conditions) new_conditions = json.loads(new_conditions)['conditions'] loaded_conditions.append(new_conditions) else: loaded_conditions.append(condition) definition['conditions'] = loaded_conditions string_definition = json.dumps(definition) # Set parameters parameters = re.findall(r'(_ARG_([a-zA-Z0-9]+)_)', string_definition) for param in parameters: index = int(param[1]) if len(self.args) <= index: string_definition = string_definition.replace(param[0], '') elif type(self.args[index]) == list: value = '[ %s ]' % ', '.join('"%s"' % v for v in self.args[index]) string_definition = string_definition.replace('"%s"' % param[0], value) else: string_definition = string_definition.replace(param[0], self.args[index]) # Strip dots if necessary stripdots = re_strip_dots.findall(string_definition) for value in stripdots: string_definition = string_definition.replace(value[0], value[1].replace('.', '')) definition = json.loads(string_definition) # Set special values (IP ranges, AWS account ID, ...) for condition in definition['conditions']: if type(condition) != list or len(condition) == 1 or type(condition[2]) == list: continue for testcase in testcases: result = testcase['regex'].match(condition[2]) if result and (testcase['name'] == 'ip_ranges_from_file' or testcase['name'] == 'ip_ranges_from_local_file'): filename = result.groups()[0] conditions = result.groups()[1] if len(result.groups()) > 1 else [] # TODO :: handle comma here... if filename == ip_ranges_from_args: prefixes = [] for filename in ip_ranges: prefixes += read_ip_ranges(filename, local_file = True, ip_only = True, conditions = conditions) condition[2] = prefixes break else: local_file = True if testcase['name'] == 'ip_ranges_from_local_file' else False condition[2] = read_ip_ranges(filename, local_file = local_file, ip_only = True, conditions = conditions) break break elif result: condition[2] = params[testcase['name']] break if len(attributes) == 0: attributes = [attr for attr in definition] for attr in attributes: if attr in definition: setattr(self, attr, definition[attr]) if hasattr(self, 'path'): self.service = format_service_name(self.path.split('.')[0]) if not hasattr(self, 'key'): setattr(self, 'key', self.filename) setattr(self, 'key', self.key.replace('.json', '')) if self.key_suffix: setattr(self, 'key', '%s-%s' % (self.key, self.key_suffix))
def function[set_definition, parameter[self, rule_definitions, attributes, ip_ranges, params]]: constant[ Update every attribute of the rule by setting the argument values as necessary :param parameterized_input: :param arg_values: :param convert: :return: ] variable[string_definition] assign[=] call[name[rule_definitions]][name[self].filename].string_definition variable[definition] assign[=] call[name[json].loads, parameter[name[string_definition]]] <ast.AugAssign object at 0x7da20e961ab0> variable[loaded_conditions] assign[=] list[[]] for taget[name[condition]] in starred[call[name[definition]][constant[conditions]]] begin[:] if call[call[name[condition]][constant[0]].startswith, parameter[constant[_INCLUDE_(]]] begin[:] variable[include] assign[=] call[call[name[re].findall, parameter[constant[_INCLUDE_\((.*?)\)], call[name[condition]][constant[0]]]]][constant[0]] with call[name[open], parameter[call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[call[name[os].path.realpath, parameter[name[__file__]]]]], binary_operation[constant[data/%s] <ast.Mod object at 0x7da2590d6920> name[include]]]], constant[rt]]] begin[:] variable[new_conditions] assign[=] call[name[f].read, parameter[]] for taget[tuple[[<ast.Name object at 0x7da20e9600a0>, <ast.Name object at 0x7da20e960a90>]]] in starred[call[name[enumerate], parameter[call[name[condition]][constant[1]]]]] begin[:] variable[new_conditions] assign[=] call[name[re].sub, parameter[call[call[name[condition]][constant[1]]][name[i]], call[call[name[condition]][constant[2]]][name[i]], name[new_conditions]]] variable[new_conditions] assign[=] call[call[name[json].loads, parameter[name[new_conditions]]]][constant[conditions]] call[name[loaded_conditions].append, parameter[name[new_conditions]]] call[name[definition]][constant[conditions]] assign[=] name[loaded_conditions] variable[string_definition] assign[=] call[name[json].dumps, parameter[name[definition]]] variable[parameters] assign[=] call[name[re].findall, parameter[constant[(_ARG_([a-zA-Z0-9]+)_)], name[string_definition]]] for taget[name[param]] in starred[name[parameters]] begin[:] variable[index] assign[=] call[name[int], parameter[call[name[param]][constant[1]]]] if compare[call[name[len], parameter[name[self].args]] less_or_equal[<=] name[index]] begin[:] variable[string_definition] assign[=] call[name[string_definition].replace, parameter[call[name[param]][constant[0]], constant[]]] variable[stripdots] assign[=] call[name[re_strip_dots].findall, parameter[name[string_definition]]] for taget[name[value]] in starred[name[stripdots]] begin[:] variable[string_definition] assign[=] call[name[string_definition].replace, parameter[call[name[value]][constant[0]], call[call[name[value]][constant[1]].replace, parameter[constant[.], constant[]]]]] variable[definition] assign[=] call[name[json].loads, parameter[name[string_definition]]] for taget[name[condition]] in starred[call[name[definition]][constant[conditions]]] begin[:] if <ast.BoolOp object at 0x7da20c6a8e80> begin[:] continue for taget[name[testcase]] in starred[name[testcases]] begin[:] variable[result] assign[=] call[call[name[testcase]][constant[regex]].match, parameter[call[name[condition]][constant[2]]]] if <ast.BoolOp object at 0x7da20c6a9c90> begin[:] variable[filename] assign[=] call[call[name[result].groups, parameter[]]][constant[0]] variable[conditions] assign[=] <ast.IfExp object at 0x7da20c6ab5e0> if compare[name[filename] equal[==] name[ip_ranges_from_args]] begin[:] variable[prefixes] assign[=] list[[]] for taget[name[filename]] in starred[name[ip_ranges]] begin[:] <ast.AugAssign object at 0x7da20c6ab0a0> call[name[condition]][constant[2]] assign[=] name[prefixes] break break if compare[call[name[len], parameter[name[attributes]]] equal[==] constant[0]] begin[:] variable[attributes] assign[=] <ast.ListComp object at 0x7da20c6a8370> for taget[name[attr]] in starred[name[attributes]] begin[:] if compare[name[attr] in name[definition]] begin[:] call[name[setattr], parameter[name[self], name[attr], call[name[definition]][name[attr]]]] if call[name[hasattr], parameter[name[self], constant[path]]] begin[:] name[self].service assign[=] call[name[format_service_name], parameter[call[call[name[self].path.split, parameter[constant[.]]]][constant[0]]]] if <ast.UnaryOp object at 0x7da20c6a8bb0> begin[:] call[name[setattr], parameter[name[self], constant[key], name[self].filename]] call[name[setattr], parameter[name[self], constant[key], call[name[self].key.replace, parameter[constant[.json], constant[]]]]] if name[self].key_suffix begin[:] call[name[setattr], parameter[name[self], constant[key], binary_operation[constant[%s-%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20c6aa440>, <ast.Attribute object at 0x7da20c6a8c10>]]]]]
keyword[def] identifier[set_definition] ( identifier[self] , identifier[rule_definitions] , identifier[attributes] =[], identifier[ip_ranges] =[], identifier[params] ={}): literal[string] identifier[string_definition] = identifier[rule_definitions] [ identifier[self] . identifier[filename] ]. identifier[string_definition] identifier[definition] = identifier[json] . identifier[loads] ( identifier[string_definition] ) identifier[definition] [ literal[string] ]+= identifier[self] . identifier[conditions] identifier[loaded_conditions] =[] keyword[for] identifier[condition] keyword[in] identifier[definition] [ literal[string] ]: keyword[if] identifier[condition] [ literal[int] ]. identifier[startswith] ( literal[string] ): identifier[include] = identifier[re] . identifier[findall] ( literal[string] , identifier[condition] [ literal[int] ])[ literal[int] ] keyword[with] identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[os] . identifier[path] . identifier[realpath] ( identifier[__file__] )), literal[string] % identifier[include] ), literal[string] ) keyword[as] identifier[f] : identifier[new_conditions] = identifier[f] . identifier[read] () keyword[for] ( identifier[i] , identifier[value] ) keyword[in] identifier[enumerate] ( identifier[condition] [ literal[int] ]): identifier[new_conditions] = identifier[re] . identifier[sub] ( identifier[condition] [ literal[int] ][ identifier[i] ], identifier[condition] [ literal[int] ][ identifier[i] ], identifier[new_conditions] ) identifier[new_conditions] = identifier[json] . identifier[loads] ( identifier[new_conditions] )[ literal[string] ] identifier[loaded_conditions] . identifier[append] ( identifier[new_conditions] ) keyword[else] : identifier[loaded_conditions] . identifier[append] ( identifier[condition] ) identifier[definition] [ literal[string] ]= identifier[loaded_conditions] identifier[string_definition] = identifier[json] . identifier[dumps] ( identifier[definition] ) identifier[parameters] = identifier[re] . identifier[findall] ( literal[string] , identifier[string_definition] ) keyword[for] identifier[param] keyword[in] identifier[parameters] : identifier[index] = identifier[int] ( identifier[param] [ literal[int] ]) keyword[if] identifier[len] ( identifier[self] . identifier[args] )<= identifier[index] : identifier[string_definition] = identifier[string_definition] . identifier[replace] ( identifier[param] [ literal[int] ], literal[string] ) keyword[elif] identifier[type] ( identifier[self] . identifier[args] [ identifier[index] ])== identifier[list] : identifier[value] = literal[string] % literal[string] . identifier[join] ( literal[string] % identifier[v] keyword[for] identifier[v] keyword[in] identifier[self] . identifier[args] [ identifier[index] ]) identifier[string_definition] = identifier[string_definition] . identifier[replace] ( literal[string] % identifier[param] [ literal[int] ], identifier[value] ) keyword[else] : identifier[string_definition] = identifier[string_definition] . identifier[replace] ( identifier[param] [ literal[int] ], identifier[self] . identifier[args] [ identifier[index] ]) identifier[stripdots] = identifier[re_strip_dots] . identifier[findall] ( identifier[string_definition] ) keyword[for] identifier[value] keyword[in] identifier[stripdots] : identifier[string_definition] = identifier[string_definition] . identifier[replace] ( identifier[value] [ literal[int] ], identifier[value] [ literal[int] ]. identifier[replace] ( literal[string] , literal[string] )) identifier[definition] = identifier[json] . identifier[loads] ( identifier[string_definition] ) keyword[for] identifier[condition] keyword[in] identifier[definition] [ literal[string] ]: keyword[if] identifier[type] ( identifier[condition] )!= identifier[list] keyword[or] identifier[len] ( identifier[condition] )== literal[int] keyword[or] identifier[type] ( identifier[condition] [ literal[int] ])== identifier[list] : keyword[continue] keyword[for] identifier[testcase] keyword[in] identifier[testcases] : identifier[result] = identifier[testcase] [ literal[string] ]. identifier[match] ( identifier[condition] [ literal[int] ]) keyword[if] identifier[result] keyword[and] ( identifier[testcase] [ literal[string] ]== literal[string] keyword[or] identifier[testcase] [ literal[string] ]== literal[string] ): identifier[filename] = identifier[result] . identifier[groups] ()[ literal[int] ] identifier[conditions] = identifier[result] . identifier[groups] ()[ literal[int] ] keyword[if] identifier[len] ( identifier[result] . identifier[groups] ())> literal[int] keyword[else] [] keyword[if] identifier[filename] == identifier[ip_ranges_from_args] : identifier[prefixes] =[] keyword[for] identifier[filename] keyword[in] identifier[ip_ranges] : identifier[prefixes] += identifier[read_ip_ranges] ( identifier[filename] , identifier[local_file] = keyword[True] , identifier[ip_only] = keyword[True] , identifier[conditions] = identifier[conditions] ) identifier[condition] [ literal[int] ]= identifier[prefixes] keyword[break] keyword[else] : identifier[local_file] = keyword[True] keyword[if] identifier[testcase] [ literal[string] ]== literal[string] keyword[else] keyword[False] identifier[condition] [ literal[int] ]= identifier[read_ip_ranges] ( identifier[filename] , identifier[local_file] = identifier[local_file] , identifier[ip_only] = keyword[True] , identifier[conditions] = identifier[conditions] ) keyword[break] keyword[break] keyword[elif] identifier[result] : identifier[condition] [ literal[int] ]= identifier[params] [ identifier[testcase] [ literal[string] ]] keyword[break] keyword[if] identifier[len] ( identifier[attributes] )== literal[int] : identifier[attributes] =[ identifier[attr] keyword[for] identifier[attr] keyword[in] identifier[definition] ] keyword[for] identifier[attr] keyword[in] identifier[attributes] : keyword[if] identifier[attr] keyword[in] identifier[definition] : identifier[setattr] ( identifier[self] , identifier[attr] , identifier[definition] [ identifier[attr] ]) keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[service] = identifier[format_service_name] ( identifier[self] . identifier[path] . identifier[split] ( literal[string] )[ literal[int] ]) keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[setattr] ( identifier[self] , literal[string] , identifier[self] . identifier[filename] ) identifier[setattr] ( identifier[self] , literal[string] , identifier[self] . identifier[key] . identifier[replace] ( literal[string] , literal[string] )) keyword[if] identifier[self] . identifier[key_suffix] : identifier[setattr] ( identifier[self] , literal[string] , literal[string] %( identifier[self] . identifier[key] , identifier[self] . identifier[key_suffix] ))
def set_definition(self, rule_definitions, attributes=[], ip_ranges=[], params={}): """ Update every attribute of the rule by setting the argument values as necessary :param parameterized_input: :param arg_values: :param convert: :return: """ string_definition = rule_definitions[self.filename].string_definition # Load condition dependencies definition = json.loads(string_definition) definition['conditions'] += self.conditions loaded_conditions = [] for condition in definition['conditions']: if condition[0].startswith('_INCLUDE_('): include = re.findall('_INCLUDE_\\((.*?)\\)', condition[0])[0] #new_conditions = load_data(include, key_name = 'conditions') with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data/%s' % include), 'rt') as f: new_conditions = f.read() for (i, value) in enumerate(condition[1]): new_conditions = re.sub(condition[1][i], condition[2][i], new_conditions) # depends on [control=['for'], data=[]] new_conditions = json.loads(new_conditions)['conditions'] # depends on [control=['with'], data=['f']] loaded_conditions.append(new_conditions) # depends on [control=['if'], data=[]] else: loaded_conditions.append(condition) # depends on [control=['for'], data=['condition']] definition['conditions'] = loaded_conditions string_definition = json.dumps(definition) # Set parameters parameters = re.findall('(_ARG_([a-zA-Z0-9]+)_)', string_definition) for param in parameters: index = int(param[1]) if len(self.args) <= index: string_definition = string_definition.replace(param[0], '') # depends on [control=['if'], data=[]] elif type(self.args[index]) == list: value = '[ %s ]' % ', '.join(('"%s"' % v for v in self.args[index])) string_definition = string_definition.replace('"%s"' % param[0], value) # depends on [control=['if'], data=[]] else: string_definition = string_definition.replace(param[0], self.args[index]) # depends on [control=['for'], data=['param']] # Strip dots if necessary stripdots = re_strip_dots.findall(string_definition) for value in stripdots: string_definition = string_definition.replace(value[0], value[1].replace('.', '')) # depends on [control=['for'], data=['value']] definition = json.loads(string_definition) # Set special values (IP ranges, AWS account ID, ...) for condition in definition['conditions']: if type(condition) != list or len(condition) == 1 or type(condition[2]) == list: continue # depends on [control=['if'], data=[]] for testcase in testcases: result = testcase['regex'].match(condition[2]) if result and (testcase['name'] == 'ip_ranges_from_file' or testcase['name'] == 'ip_ranges_from_local_file'): filename = result.groups()[0] conditions = result.groups()[1] if len(result.groups()) > 1 else [] # TODO :: handle comma here... if filename == ip_ranges_from_args: prefixes = [] for filename in ip_ranges: prefixes += read_ip_ranges(filename, local_file=True, ip_only=True, conditions=conditions) # depends on [control=['for'], data=['filename']] condition[2] = prefixes break # depends on [control=['if'], data=['filename']] else: local_file = True if testcase['name'] == 'ip_ranges_from_local_file' else False condition[2] = read_ip_ranges(filename, local_file=local_file, ip_only=True, conditions=conditions) break break # depends on [control=['if'], data=[]] elif result: condition[2] = params[testcase['name']] break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['testcase']] # depends on [control=['for'], data=['condition']] if len(attributes) == 0: attributes = [attr for attr in definition] # depends on [control=['if'], data=[]] for attr in attributes: if attr in definition: setattr(self, attr, definition[attr]) # depends on [control=['if'], data=['attr', 'definition']] # depends on [control=['for'], data=['attr']] if hasattr(self, 'path'): self.service = format_service_name(self.path.split('.')[0]) # depends on [control=['if'], data=[]] if not hasattr(self, 'key'): setattr(self, 'key', self.filename) # depends on [control=['if'], data=[]] setattr(self, 'key', self.key.replace('.json', '')) if self.key_suffix: setattr(self, 'key', '%s-%s' % (self.key, self.key_suffix)) # depends on [control=['if'], data=[]]
def safe_concat(objs, *args, **kwargs): """Alternative to :func:`pandas.concat` that preserves categorical variables. Parameters ---------- objs : a sequence or mapping of Series, DataFrame, or Panel objects If a dict is passed, the sorted keys will be used as the `keys` argument, unless it is passed, in which case the values will be selected (see below). Any None objects will be dropped silently unless they are all None in which case a ValueError will be raised axis : {0, 1, ...}, default 0 The axis to concatenate along join : {'inner', 'outer'}, default 'outer' How to handle indexes on other axis(es) join_axes : list of Index objects Specific indexes to use for the other n - 1 axes instead of performing inner/outer set logic verify_integrity : boolean, default False Check whether the new concatenated axis contains duplicates. This can be very expensive relative to the actual data concatenation keys : sequence, default None If multiple levels passed, should contain tuples. Construct hierarchical index using the passed keys as the outermost level levels : list of sequences, default None Specific levels (unique values) to use for constructing a MultiIndex. Otherwise they will be inferred from the keys names : list, default None Names for the levels in the resulting hierarchical index ignore_index : boolean, default False If True, do not use the index values along the concatenation axis. The resulting axis will be labeled 0, ..., n - 1. This is useful if you are concatenating objects where the concatenation axis does not have meaningful indexing information. Note the the index values on the other axes are still respected in the join. copy : boolean, default True If False, do not copy data unnecessarily Notes ----- The keys, levels, and names arguments are all optional Returns ------- concatenated : type of objects """ axis = kwargs.pop("axis", 0) categories = {} for df in objs: if isinstance(df, pandas.Series): if is_categorical_dtype(df.dtype): categories[df.name] = {"categories": df.cat.categories, "ordered": df.cat.ordered} else: dfc = df.select_dtypes(include=["category"]) for name, s in dfc.iteritems(): if name in categories: if axis == 1: raise ValueError("duplicate columns %s" % name) if not categories[name]["categories"].equals(s.cat.categories): raise ValueError("categories for column %s do not match" % name) else: categories[name] = {"categories": s.cat.categories, "ordered": s.cat.ordered} df[name] = df[name].astype(object) concatenated = pandas.concat(objs, *args, axis=axis, **kwargs) for name, params in categories.items(): concatenated[name] = pandas.Categorical(concatenated[name], **params) return concatenated
def function[safe_concat, parameter[objs]]: constant[Alternative to :func:`pandas.concat` that preserves categorical variables. Parameters ---------- objs : a sequence or mapping of Series, DataFrame, or Panel objects If a dict is passed, the sorted keys will be used as the `keys` argument, unless it is passed, in which case the values will be selected (see below). Any None objects will be dropped silently unless they are all None in which case a ValueError will be raised axis : {0, 1, ...}, default 0 The axis to concatenate along join : {'inner', 'outer'}, default 'outer' How to handle indexes on other axis(es) join_axes : list of Index objects Specific indexes to use for the other n - 1 axes instead of performing inner/outer set logic verify_integrity : boolean, default False Check whether the new concatenated axis contains duplicates. This can be very expensive relative to the actual data concatenation keys : sequence, default None If multiple levels passed, should contain tuples. Construct hierarchical index using the passed keys as the outermost level levels : list of sequences, default None Specific levels (unique values) to use for constructing a MultiIndex. Otherwise they will be inferred from the keys names : list, default None Names for the levels in the resulting hierarchical index ignore_index : boolean, default False If True, do not use the index values along the concatenation axis. The resulting axis will be labeled 0, ..., n - 1. This is useful if you are concatenating objects where the concatenation axis does not have meaningful indexing information. Note the the index values on the other axes are still respected in the join. copy : boolean, default True If False, do not copy data unnecessarily Notes ----- The keys, levels, and names arguments are all optional Returns ------- concatenated : type of objects ] variable[axis] assign[=] call[name[kwargs].pop, parameter[constant[axis], constant[0]]] variable[categories] assign[=] dictionary[[], []] for taget[name[df]] in starred[name[objs]] begin[:] if call[name[isinstance], parameter[name[df], name[pandas].Series]] begin[:] if call[name[is_categorical_dtype], parameter[name[df].dtype]] begin[:] call[name[categories]][name[df].name] assign[=] dictionary[[<ast.Constant object at 0x7da1b17ef3a0>, <ast.Constant object at 0x7da1b17ed180>], [<ast.Attribute object at 0x7da1b17ec7c0>, <ast.Attribute object at 0x7da1b17ed330>]] variable[concatenated] assign[=] call[name[pandas].concat, parameter[name[objs], <ast.Starred object at 0x7da18bc738e0>]] for taget[tuple[[<ast.Name object at 0x7da18bc70fd0>, <ast.Name object at 0x7da18bc73520>]]] in starred[call[name[categories].items, parameter[]]] begin[:] call[name[concatenated]][name[name]] assign[=] call[name[pandas].Categorical, parameter[call[name[concatenated]][name[name]]]] return[name[concatenated]]
keyword[def] identifier[safe_concat] ( identifier[objs] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[axis] = identifier[kwargs] . identifier[pop] ( literal[string] , literal[int] ) identifier[categories] ={} keyword[for] identifier[df] keyword[in] identifier[objs] : keyword[if] identifier[isinstance] ( identifier[df] , identifier[pandas] . identifier[Series] ): keyword[if] identifier[is_categorical_dtype] ( identifier[df] . identifier[dtype] ): identifier[categories] [ identifier[df] . identifier[name] ]={ literal[string] : identifier[df] . identifier[cat] . identifier[categories] , literal[string] : identifier[df] . identifier[cat] . identifier[ordered] } keyword[else] : identifier[dfc] = identifier[df] . identifier[select_dtypes] ( identifier[include] =[ literal[string] ]) keyword[for] identifier[name] , identifier[s] keyword[in] identifier[dfc] . identifier[iteritems] (): keyword[if] identifier[name] keyword[in] identifier[categories] : keyword[if] identifier[axis] == literal[int] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[name] ) keyword[if] keyword[not] identifier[categories] [ identifier[name] ][ literal[string] ]. identifier[equals] ( identifier[s] . identifier[cat] . identifier[categories] ): keyword[raise] identifier[ValueError] ( literal[string] % identifier[name] ) keyword[else] : identifier[categories] [ identifier[name] ]={ literal[string] : identifier[s] . identifier[cat] . identifier[categories] , literal[string] : identifier[s] . identifier[cat] . identifier[ordered] } identifier[df] [ identifier[name] ]= identifier[df] [ identifier[name] ]. identifier[astype] ( identifier[object] ) identifier[concatenated] = identifier[pandas] . identifier[concat] ( identifier[objs] ,* identifier[args] , identifier[axis] = identifier[axis] ,** identifier[kwargs] ) keyword[for] identifier[name] , identifier[params] keyword[in] identifier[categories] . identifier[items] (): identifier[concatenated] [ identifier[name] ]= identifier[pandas] . identifier[Categorical] ( identifier[concatenated] [ identifier[name] ],** identifier[params] ) keyword[return] identifier[concatenated]
def safe_concat(objs, *args, **kwargs): """Alternative to :func:`pandas.concat` that preserves categorical variables. Parameters ---------- objs : a sequence or mapping of Series, DataFrame, or Panel objects If a dict is passed, the sorted keys will be used as the `keys` argument, unless it is passed, in which case the values will be selected (see below). Any None objects will be dropped silently unless they are all None in which case a ValueError will be raised axis : {0, 1, ...}, default 0 The axis to concatenate along join : {'inner', 'outer'}, default 'outer' How to handle indexes on other axis(es) join_axes : list of Index objects Specific indexes to use for the other n - 1 axes instead of performing inner/outer set logic verify_integrity : boolean, default False Check whether the new concatenated axis contains duplicates. This can be very expensive relative to the actual data concatenation keys : sequence, default None If multiple levels passed, should contain tuples. Construct hierarchical index using the passed keys as the outermost level levels : list of sequences, default None Specific levels (unique values) to use for constructing a MultiIndex. Otherwise they will be inferred from the keys names : list, default None Names for the levels in the resulting hierarchical index ignore_index : boolean, default False If True, do not use the index values along the concatenation axis. The resulting axis will be labeled 0, ..., n - 1. This is useful if you are concatenating objects where the concatenation axis does not have meaningful indexing information. Note the the index values on the other axes are still respected in the join. copy : boolean, default True If False, do not copy data unnecessarily Notes ----- The keys, levels, and names arguments are all optional Returns ------- concatenated : type of objects """ axis = kwargs.pop('axis', 0) categories = {} for df in objs: if isinstance(df, pandas.Series): if is_categorical_dtype(df.dtype): categories[df.name] = {'categories': df.cat.categories, 'ordered': df.cat.ordered} # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: dfc = df.select_dtypes(include=['category']) for (name, s) in dfc.iteritems(): if name in categories: if axis == 1: raise ValueError('duplicate columns %s' % name) # depends on [control=['if'], data=[]] if not categories[name]['categories'].equals(s.cat.categories): raise ValueError('categories for column %s do not match' % name) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['name', 'categories']] else: categories[name] = {'categories': s.cat.categories, 'ordered': s.cat.ordered} df[name] = df[name].astype(object) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['df']] concatenated = pandas.concat(objs, *args, axis=axis, **kwargs) for (name, params) in categories.items(): concatenated[name] = pandas.Categorical(concatenated[name], **params) # depends on [control=['for'], data=[]] return concatenated
def RgbToYuv(r, g, b): '''Convert the color from RGB coordinates to YUV. Parameters: :r: The Red component value [0...1] :g: The Green component value [0...1] :b: The Blue component value [0...1] Returns: The color as an (y, u, v) tuple in the range: y[0...1], u[-0.436...0.436], v[-0.615...0.615] >>> '(%g, %g, %g)' % Color.RgbToYuv(1, 0.5, 0) '(0.5925, -0.29156, 0.357505)' ''' y = (r * 0.29900) + (g * 0.58700) + (b * 0.11400) u = -(r * 0.14713) - (g * 0.28886) + (b * 0.43600) v = (r * 0.61500) - (g * 0.51499) - (b * 0.10001) return (y, u, v)
def function[RgbToYuv, parameter[r, g, b]]: constant[Convert the color from RGB coordinates to YUV. Parameters: :r: The Red component value [0...1] :g: The Green component value [0...1] :b: The Blue component value [0...1] Returns: The color as an (y, u, v) tuple in the range: y[0...1], u[-0.436...0.436], v[-0.615...0.615] >>> '(%g, %g, %g)' % Color.RgbToYuv(1, 0.5, 0) '(0.5925, -0.29156, 0.357505)' ] variable[y] assign[=] binary_operation[binary_operation[binary_operation[name[r] * constant[0.299]] + binary_operation[name[g] * constant[0.587]]] + binary_operation[name[b] * constant[0.114]]] variable[u] assign[=] binary_operation[binary_operation[<ast.UnaryOp object at 0x7da20c7969b0> - binary_operation[name[g] * constant[0.28886]]] + binary_operation[name[b] * constant[0.436]]] variable[v] assign[=] binary_operation[binary_operation[binary_operation[name[r] * constant[0.615]] - binary_operation[name[g] * constant[0.51499]]] - binary_operation[name[b] * constant[0.10001]]] return[tuple[[<ast.Name object at 0x7da20c796710>, <ast.Name object at 0x7da20c795f90>, <ast.Name object at 0x7da20c795750>]]]
keyword[def] identifier[RgbToYuv] ( identifier[r] , identifier[g] , identifier[b] ): literal[string] identifier[y] =( identifier[r] * literal[int] )+( identifier[g] * literal[int] )+( identifier[b] * literal[int] ) identifier[u] =-( identifier[r] * literal[int] )-( identifier[g] * literal[int] )+( identifier[b] * literal[int] ) identifier[v] =( identifier[r] * literal[int] )-( identifier[g] * literal[int] )-( identifier[b] * literal[int] ) keyword[return] ( identifier[y] , identifier[u] , identifier[v] )
def RgbToYuv(r, g, b): """Convert the color from RGB coordinates to YUV. Parameters: :r: The Red component value [0...1] :g: The Green component value [0...1] :b: The Blue component value [0...1] Returns: The color as an (y, u, v) tuple in the range: y[0...1], u[-0.436...0.436], v[-0.615...0.615] >>> '(%g, %g, %g)' % Color.RgbToYuv(1, 0.5, 0) '(0.5925, -0.29156, 0.357505)' """ y = r * 0.299 + g * 0.587 + b * 0.114 u = -(r * 0.14713) - g * 0.28886 + b * 0.436 v = r * 0.615 - g * 0.51499 - b * 0.10001 return (y, u, v)
def to_frame(self, data, state): """ Extract a single frame from the data buffer. The consumed data should be removed from the buffer. If no complete frame can be read, must raise a ``NoFrames`` exception. :param data: A ``bytearray`` instance containing the data so far read. :param state: An instance of ``FramerState``. If the buffer contains a partial frame, this object can be used to store state information to allow the remainder of the frame to be read. :returns: A frame. The frame may be any object. The stock framers always return bytes. """ # Convert the data to bytes frame = six.binary_type(data) # Clear the buffer del data[:] # Return the frame return frame
def function[to_frame, parameter[self, data, state]]: constant[ Extract a single frame from the data buffer. The consumed data should be removed from the buffer. If no complete frame can be read, must raise a ``NoFrames`` exception. :param data: A ``bytearray`` instance containing the data so far read. :param state: An instance of ``FramerState``. If the buffer contains a partial frame, this object can be used to store state information to allow the remainder of the frame to be read. :returns: A frame. The frame may be any object. The stock framers always return bytes. ] variable[frame] assign[=] call[name[six].binary_type, parameter[name[data]]] <ast.Delete object at 0x7da18f09cf70> return[name[frame]]
keyword[def] identifier[to_frame] ( identifier[self] , identifier[data] , identifier[state] ): literal[string] identifier[frame] = identifier[six] . identifier[binary_type] ( identifier[data] ) keyword[del] identifier[data] [:] keyword[return] identifier[frame]
def to_frame(self, data, state): """ Extract a single frame from the data buffer. The consumed data should be removed from the buffer. If no complete frame can be read, must raise a ``NoFrames`` exception. :param data: A ``bytearray`` instance containing the data so far read. :param state: An instance of ``FramerState``. If the buffer contains a partial frame, this object can be used to store state information to allow the remainder of the frame to be read. :returns: A frame. The frame may be any object. The stock framers always return bytes. """ # Convert the data to bytes frame = six.binary_type(data) # Clear the buffer del data[:] # Return the frame return frame
def get_domain_class_relationship_attribute_iterator(ent): """ Returns an iterator over all terminal attributes in the given registered resource. """ for attr in itervalues_(ent.__everest_attributes__): if attr.kind != RESOURCE_ATTRIBUTE_KINDS.TERMINAL: yield attr
def function[get_domain_class_relationship_attribute_iterator, parameter[ent]]: constant[ Returns an iterator over all terminal attributes in the given registered resource. ] for taget[name[attr]] in starred[call[name[itervalues_], parameter[name[ent].__everest_attributes__]]] begin[:] if compare[name[attr].kind not_equal[!=] name[RESOURCE_ATTRIBUTE_KINDS].TERMINAL] begin[:] <ast.Yield object at 0x7da20c76e770>
keyword[def] identifier[get_domain_class_relationship_attribute_iterator] ( identifier[ent] ): literal[string] keyword[for] identifier[attr] keyword[in] identifier[itervalues_] ( identifier[ent] . identifier[__everest_attributes__] ): keyword[if] identifier[attr] . identifier[kind] != identifier[RESOURCE_ATTRIBUTE_KINDS] . identifier[TERMINAL] : keyword[yield] identifier[attr]
def get_domain_class_relationship_attribute_iterator(ent): """ Returns an iterator over all terminal attributes in the given registered resource. """ for attr in itervalues_(ent.__everest_attributes__): if attr.kind != RESOURCE_ATTRIBUTE_KINDS.TERMINAL: yield attr # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['attr']]
def call_doctree_resolved(cls, kb_app, sphinx_app: Sphinx, doctree: doctree, fromdocname: str): """ On doctree-resolved, do callbacks""" for callback in EventAction.get_callbacks(kb_app, SphinxEvent.DRES): callback(kb_app, sphinx_app, doctree, fromdocname)
def function[call_doctree_resolved, parameter[cls, kb_app, sphinx_app, doctree, fromdocname]]: constant[ On doctree-resolved, do callbacks] for taget[name[callback]] in starred[call[name[EventAction].get_callbacks, parameter[name[kb_app], name[SphinxEvent].DRES]]] begin[:] call[name[callback], parameter[name[kb_app], name[sphinx_app], name[doctree], name[fromdocname]]]
keyword[def] identifier[call_doctree_resolved] ( identifier[cls] , identifier[kb_app] , identifier[sphinx_app] : identifier[Sphinx] , identifier[doctree] : identifier[doctree] , identifier[fromdocname] : identifier[str] ): literal[string] keyword[for] identifier[callback] keyword[in] identifier[EventAction] . identifier[get_callbacks] ( identifier[kb_app] , identifier[SphinxEvent] . identifier[DRES] ): identifier[callback] ( identifier[kb_app] , identifier[sphinx_app] , identifier[doctree] , identifier[fromdocname] )
def call_doctree_resolved(cls, kb_app, sphinx_app: Sphinx, doctree: doctree, fromdocname: str): """ On doctree-resolved, do callbacks""" for callback in EventAction.get_callbacks(kb_app, SphinxEvent.DRES): callback(kb_app, sphinx_app, doctree, fromdocname) # depends on [control=['for'], data=['callback']]
def bulk(self, body, index=None, doc_type=None, **query_params): """ Perform many index/delete operations in a single API call. See the :func:`~elasticsearch.helpers.bulk` helper function for a more friendly API. `<http://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html>`_ :param body: The operation definition and data (action-data pairs), separated by newlines :param index: Default index for items which don't provide one :param doc_type: Default document type for items which don't provide one :arg consistency: Explicit write consistency setting for the operation, valid choices are: 'one', 'quorum', 'all' :arg fields: Default comma-separated list of fields to return in the response for updates :arg pipeline: The pipeline id to preprocess incoming documents with :arg refresh: Refresh the index after performing the operation :arg routing: Specific routing value :arg timeout: Explicit operation timeout """ self._es_parser.is_not_empty_params(body) path = self._es_parser.make_path(index, doc_type, EsMethods.BULK) result = yield self._perform_request(HttpMethod.POST, path, self._bulk_body(body), params=query_params) returnValue(result)
def function[bulk, parameter[self, body, index, doc_type]]: constant[ Perform many index/delete operations in a single API call. See the :func:`~elasticsearch.helpers.bulk` helper function for a more friendly API. `<http://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html>`_ :param body: The operation definition and data (action-data pairs), separated by newlines :param index: Default index for items which don't provide one :param doc_type: Default document type for items which don't provide one :arg consistency: Explicit write consistency setting for the operation, valid choices are: 'one', 'quorum', 'all' :arg fields: Default comma-separated list of fields to return in the response for updates :arg pipeline: The pipeline id to preprocess incoming documents with :arg refresh: Refresh the index after performing the operation :arg routing: Specific routing value :arg timeout: Explicit operation timeout ] call[name[self]._es_parser.is_not_empty_params, parameter[name[body]]] variable[path] assign[=] call[name[self]._es_parser.make_path, parameter[name[index], name[doc_type], name[EsMethods].BULK]] variable[result] assign[=] <ast.Yield object at 0x7da18c4cc9a0> call[name[returnValue], parameter[name[result]]]
keyword[def] identifier[bulk] ( identifier[self] , identifier[body] , identifier[index] = keyword[None] , identifier[doc_type] = keyword[None] ,** identifier[query_params] ): literal[string] identifier[self] . identifier[_es_parser] . identifier[is_not_empty_params] ( identifier[body] ) identifier[path] = identifier[self] . identifier[_es_parser] . identifier[make_path] ( identifier[index] , identifier[doc_type] , identifier[EsMethods] . identifier[BULK] ) identifier[result] = keyword[yield] identifier[self] . identifier[_perform_request] ( identifier[HttpMethod] . identifier[POST] , identifier[path] , identifier[self] . identifier[_bulk_body] ( identifier[body] ), identifier[params] = identifier[query_params] ) identifier[returnValue] ( identifier[result] )
def bulk(self, body, index=None, doc_type=None, **query_params): """ Perform many index/delete operations in a single API call. See the :func:`~elasticsearch.helpers.bulk` helper function for a more friendly API. `<http://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html>`_ :param body: The operation definition and data (action-data pairs), separated by newlines :param index: Default index for items which don't provide one :param doc_type: Default document type for items which don't provide one :arg consistency: Explicit write consistency setting for the operation, valid choices are: 'one', 'quorum', 'all' :arg fields: Default comma-separated list of fields to return in the response for updates :arg pipeline: The pipeline id to preprocess incoming documents with :arg refresh: Refresh the index after performing the operation :arg routing: Specific routing value :arg timeout: Explicit operation timeout """ self._es_parser.is_not_empty_params(body) path = self._es_parser.make_path(index, doc_type, EsMethods.BULK) result = (yield self._perform_request(HttpMethod.POST, path, self._bulk_body(body), params=query_params)) returnValue(result)
def unpack_rsp(cls, rsp_pb): """Convert from PLS response to user response""" if rsp_pb.retType != RET_OK: return RET_ERROR, rsp_pb.retMsg, None raw_acc_list = rsp_pb.s2c.accList acc_list = [{ 'acc_id': record.accID, 'trd_env': TRADE.REV_TRD_ENV_MAP[record.trdEnv] if record.trdEnv in TRADE.REV_TRD_ENV_MAP else "", 'trdMarket_list': [(TRADE.REV_TRD_MKT_MAP[trdMkt] if trdMkt in TRADE.REV_TRD_MKT_MAP else TrdMarket.NONE) for trdMkt in record.trdMarketAuthList] } for record in raw_acc_list] return RET_OK, "", acc_list
def function[unpack_rsp, parameter[cls, rsp_pb]]: constant[Convert from PLS response to user response] if compare[name[rsp_pb].retType not_equal[!=] name[RET_OK]] begin[:] return[tuple[[<ast.Name object at 0x7da1b06fd330>, <ast.Attribute object at 0x7da1b06fe860>, <ast.Constant object at 0x7da1b06fe680>]]] variable[raw_acc_list] assign[=] name[rsp_pb].s2c.accList variable[acc_list] assign[=] <ast.ListComp object at 0x7da1b06fccd0> return[tuple[[<ast.Name object at 0x7da1b06fd690>, <ast.Constant object at 0x7da1b06ff730>, <ast.Name object at 0x7da1b06fef50>]]]
keyword[def] identifier[unpack_rsp] ( identifier[cls] , identifier[rsp_pb] ): literal[string] keyword[if] identifier[rsp_pb] . identifier[retType] != identifier[RET_OK] : keyword[return] identifier[RET_ERROR] , identifier[rsp_pb] . identifier[retMsg] , keyword[None] identifier[raw_acc_list] = identifier[rsp_pb] . identifier[s2c] . identifier[accList] identifier[acc_list] =[{ literal[string] : identifier[record] . identifier[accID] , literal[string] : identifier[TRADE] . identifier[REV_TRD_ENV_MAP] [ identifier[record] . identifier[trdEnv] ] keyword[if] identifier[record] . identifier[trdEnv] keyword[in] identifier[TRADE] . identifier[REV_TRD_ENV_MAP] keyword[else] literal[string] , literal[string] :[( identifier[TRADE] . identifier[REV_TRD_MKT_MAP] [ identifier[trdMkt] ] keyword[if] identifier[trdMkt] keyword[in] identifier[TRADE] . identifier[REV_TRD_MKT_MAP] keyword[else] identifier[TrdMarket] . identifier[NONE] ) keyword[for] identifier[trdMkt] keyword[in] identifier[record] . identifier[trdMarketAuthList] ] } keyword[for] identifier[record] keyword[in] identifier[raw_acc_list] ] keyword[return] identifier[RET_OK] , literal[string] , identifier[acc_list]
def unpack_rsp(cls, rsp_pb): """Convert from PLS response to user response""" if rsp_pb.retType != RET_OK: return (RET_ERROR, rsp_pb.retMsg, None) # depends on [control=['if'], data=[]] raw_acc_list = rsp_pb.s2c.accList acc_list = [{'acc_id': record.accID, 'trd_env': TRADE.REV_TRD_ENV_MAP[record.trdEnv] if record.trdEnv in TRADE.REV_TRD_ENV_MAP else '', 'trdMarket_list': [TRADE.REV_TRD_MKT_MAP[trdMkt] if trdMkt in TRADE.REV_TRD_MKT_MAP else TrdMarket.NONE for trdMkt in record.trdMarketAuthList]} for record in raw_acc_list] return (RET_OK, '', acc_list)
def virtual_networks_list(resource_group, **kwargs): ''' .. versionadded:: 2019.2.0 List all virtual networks within a resource group. :param resource_group: The resource group name to list virtual networks within. CLI Example: .. code-block:: bash salt-call azurearm_network.virtual_networks_list testgroup ''' result = {} netconn = __utils__['azurearm.get_client']('network', **kwargs) try: vnets = __utils__['azurearm.paged_object_to_list']( netconn.virtual_networks.list( resource_group_name=resource_group ) ) for vnet in vnets: result[vnet['name']] = vnet except CloudError as exc: __utils__['azurearm.log_cloud_error']('network', str(exc), **kwargs) result = {'error': str(exc)} return result
def function[virtual_networks_list, parameter[resource_group]]: constant[ .. versionadded:: 2019.2.0 List all virtual networks within a resource group. :param resource_group: The resource group name to list virtual networks within. CLI Example: .. code-block:: bash salt-call azurearm_network.virtual_networks_list testgroup ] variable[result] assign[=] dictionary[[], []] variable[netconn] assign[=] call[call[name[__utils__]][constant[azurearm.get_client]], parameter[constant[network]]] <ast.Try object at 0x7da2047eaa40> return[name[result]]
keyword[def] identifier[virtual_networks_list] ( identifier[resource_group] ,** identifier[kwargs] ): literal[string] identifier[result] ={} identifier[netconn] = identifier[__utils__] [ literal[string] ]( literal[string] ,** identifier[kwargs] ) keyword[try] : identifier[vnets] = identifier[__utils__] [ literal[string] ]( identifier[netconn] . identifier[virtual_networks] . identifier[list] ( identifier[resource_group_name] = identifier[resource_group] ) ) keyword[for] identifier[vnet] keyword[in] identifier[vnets] : identifier[result] [ identifier[vnet] [ literal[string] ]]= identifier[vnet] keyword[except] identifier[CloudError] keyword[as] identifier[exc] : identifier[__utils__] [ literal[string] ]( literal[string] , identifier[str] ( identifier[exc] ),** identifier[kwargs] ) identifier[result] ={ literal[string] : identifier[str] ( identifier[exc] )} keyword[return] identifier[result]
def virtual_networks_list(resource_group, **kwargs): """ .. versionadded:: 2019.2.0 List all virtual networks within a resource group. :param resource_group: The resource group name to list virtual networks within. CLI Example: .. code-block:: bash salt-call azurearm_network.virtual_networks_list testgroup """ result = {} netconn = __utils__['azurearm.get_client']('network', **kwargs) try: vnets = __utils__['azurearm.paged_object_to_list'](netconn.virtual_networks.list(resource_group_name=resource_group)) for vnet in vnets: result[vnet['name']] = vnet # depends on [control=['for'], data=['vnet']] # depends on [control=['try'], data=[]] except CloudError as exc: __utils__['azurearm.log_cloud_error']('network', str(exc), **kwargs) result = {'error': str(exc)} # depends on [control=['except'], data=['exc']] return result
def get_packet_type(cls, type_): """Override method for the Length/Type field (self.ethertype). The Length/Type field means Length or Type interpretation, same as ethernet IEEE802.3. If the value of Length/Type field is less than or equal to 1500 decimal(05DC hexadecimal), it means Length interpretation and be passed to the LLC sublayer.""" if type_ <= ether.ETH_TYPE_IEEE802_3: type_ = ether.ETH_TYPE_IEEE802_3 return cls._TYPES.get(type_)
def function[get_packet_type, parameter[cls, type_]]: constant[Override method for the Length/Type field (self.ethertype). The Length/Type field means Length or Type interpretation, same as ethernet IEEE802.3. If the value of Length/Type field is less than or equal to 1500 decimal(05DC hexadecimal), it means Length interpretation and be passed to the LLC sublayer.] if compare[name[type_] less_or_equal[<=] name[ether].ETH_TYPE_IEEE802_3] begin[:] variable[type_] assign[=] name[ether].ETH_TYPE_IEEE802_3 return[call[name[cls]._TYPES.get, parameter[name[type_]]]]
keyword[def] identifier[get_packet_type] ( identifier[cls] , identifier[type_] ): literal[string] keyword[if] identifier[type_] <= identifier[ether] . identifier[ETH_TYPE_IEEE802_3] : identifier[type_] = identifier[ether] . identifier[ETH_TYPE_IEEE802_3] keyword[return] identifier[cls] . identifier[_TYPES] . identifier[get] ( identifier[type_] )
def get_packet_type(cls, type_): """Override method for the Length/Type field (self.ethertype). The Length/Type field means Length or Type interpretation, same as ethernet IEEE802.3. If the value of Length/Type field is less than or equal to 1500 decimal(05DC hexadecimal), it means Length interpretation and be passed to the LLC sublayer.""" if type_ <= ether.ETH_TYPE_IEEE802_3: type_ = ether.ETH_TYPE_IEEE802_3 # depends on [control=['if'], data=['type_']] return cls._TYPES.get(type_)
def yaml_filter(element, doc, tag=None, function=None, tags=None, strict_yaml=False): ''' Convenience function for parsing code blocks with YAML options This function is useful to create a filter that applies to code blocks that have specific classes. It is used as an argument of ``run_filter``, with two additional options: ``tag`` and ``function``. Using this is equivalent to having filter functions that: 1. Check if the element is a code block 2. Check if the element belongs to a specific class 3. Split the YAML options (at the beginning of the block, by looking for ``...`` or ``---`` strings in a separate line 4. Parse the YAML 5. Use the YAML options and (optionally) the data that follows the YAML to return a new or modified element Instead, you just need to: 1. Call ``run_filter`` with ``yaml_filter`` as the action function, and with the additional arguments ``tag`` and ``function`` 2. Construct a ``fenced_action`` function that takes four arguments: (options, data, element, doc). Note that options is a dict and data is a raw string. Notice that this is similar to the ``action`` functions of standard filters, but with *options* and *data* as the new ones. Note: if you want to apply multiple functions to separate classes, you can use the ``tags`` argument, which receives a dict of ``tag: function`` pairs. Note: use the ``strict_yaml=True`` option in order to allow for more verbose but flexible YAML metadata: more than one YAML blocks are allowed, but they all must start with ``---`` (even at the beginning) and end with ``---`` or ``...``. Also, YAML is not the default content when no delimiters are set. Example:: """ Replace code blocks of class 'foo' with # horizontal rules """ import panflute as pf def fenced_action(options, data, element, doc): count = options.get('count', 1) div = pf.Div(attributes={'count': str(count)}) div.content.extend([pf.HorizontalRule] * count) return div if __name__ == '__main__': pf.run_filter(pf.yaml_filter, tag='foo', function=fenced_action) ''' # Allow for either tag+function or a dict {tag: function} assert (tag is None) + (tags is None) == 1 # XOR if tags is None: tags = {tag: function} if type(element) == CodeBlock: for tag in tags: if tag in element.classes: function = tags[tag] if not strict_yaml: # Split YAML and data parts (separated by ... or ---) raw = re.split("^([.]{3,}|[-]{3,})$", element.text, 1, re.MULTILINE) data = raw[2] if len(raw) > 2 else '' data = data.lstrip('\n') raw = raw[0] try: options = yaml.safe_load(raw) except yaml.scanner.ScannerError: debug("panflute: malformed YAML block") return if options is None: options = {} else: options = {} data = [] raw = re.split("^([.]{3,}|[-]{3,})$", element.text, 0, re.MULTILINE) rawmode = True for chunk in raw: chunk = chunk.strip('\n') if not chunk: continue if rawmode: if chunk.startswith('---'): rawmode = False else: data.append(chunk) else: if chunk.startswith('---') or chunk.startswith('...'): rawmode = True else: try: options.update(yaml.safe_load(chunk)) except yaml.scanner.ScannerError: debug("panflute: malformed YAML block") return data = '\n'.join(data) return function(options=options, data=data, element=element, doc=doc)
def function[yaml_filter, parameter[element, doc, tag, function, tags, strict_yaml]]: constant[ Convenience function for parsing code blocks with YAML options This function is useful to create a filter that applies to code blocks that have specific classes. It is used as an argument of ``run_filter``, with two additional options: ``tag`` and ``function``. Using this is equivalent to having filter functions that: 1. Check if the element is a code block 2. Check if the element belongs to a specific class 3. Split the YAML options (at the beginning of the block, by looking for ``...`` or ``---`` strings in a separate line 4. Parse the YAML 5. Use the YAML options and (optionally) the data that follows the YAML to return a new or modified element Instead, you just need to: 1. Call ``run_filter`` with ``yaml_filter`` as the action function, and with the additional arguments ``tag`` and ``function`` 2. Construct a ``fenced_action`` function that takes four arguments: (options, data, element, doc). Note that options is a dict and data is a raw string. Notice that this is similar to the ``action`` functions of standard filters, but with *options* and *data* as the new ones. Note: if you want to apply multiple functions to separate classes, you can use the ``tags`` argument, which receives a dict of ``tag: function`` pairs. Note: use the ``strict_yaml=True`` option in order to allow for more verbose but flexible YAML metadata: more than one YAML blocks are allowed, but they all must start with ``---`` (even at the beginning) and end with ``---`` or ``...``. Also, YAML is not the default content when no delimiters are set. Example:: """ Replace code blocks of class 'foo' with # horizontal rules """ import panflute as pf def fenced_action(options, data, element, doc): count = options.get('count', 1) div = pf.Div(attributes={'count': str(count)}) div.content.extend([pf.HorizontalRule] * count) return div if __name__ == '__main__': pf.run_filter(pf.yaml_filter, tag='foo', function=fenced_action) ] assert[compare[binary_operation[compare[name[tag] is constant[None]] + compare[name[tags] is constant[None]]] equal[==] constant[1]]] if compare[name[tags] is constant[None]] begin[:] variable[tags] assign[=] dictionary[[<ast.Name object at 0x7da1b11abb50>], [<ast.Name object at 0x7da1b11a8700>]] if compare[call[name[type], parameter[name[element]]] equal[==] name[CodeBlock]] begin[:] for taget[name[tag]] in starred[name[tags]] begin[:] if compare[name[tag] in name[element].classes] begin[:] variable[function] assign[=] call[name[tags]][name[tag]] if <ast.UnaryOp object at 0x7da1b11ab8b0> begin[:] variable[raw] assign[=] call[name[re].split, parameter[constant[^([.]{3,}|[-]{3,})$], name[element].text, constant[1], name[re].MULTILINE]] variable[data] assign[=] <ast.IfExp object at 0x7da18dc9b100> variable[data] assign[=] call[name[data].lstrip, parameter[constant[ ]]] variable[raw] assign[=] call[name[raw]][constant[0]] <ast.Try object at 0x7da18dc9bd60> if compare[name[options] is constant[None]] begin[:] variable[options] assign[=] dictionary[[], []] return[call[name[function], parameter[]]]
keyword[def] identifier[yaml_filter] ( identifier[element] , identifier[doc] , identifier[tag] = keyword[None] , identifier[function] = keyword[None] , identifier[tags] = keyword[None] , identifier[strict_yaml] = keyword[False] ): literal[string] keyword[assert] ( identifier[tag] keyword[is] keyword[None] )+( identifier[tags] keyword[is] keyword[None] )== literal[int] keyword[if] identifier[tags] keyword[is] keyword[None] : identifier[tags] ={ identifier[tag] : identifier[function] } keyword[if] identifier[type] ( identifier[element] )== identifier[CodeBlock] : keyword[for] identifier[tag] keyword[in] identifier[tags] : keyword[if] identifier[tag] keyword[in] identifier[element] . identifier[classes] : identifier[function] = identifier[tags] [ identifier[tag] ] keyword[if] keyword[not] identifier[strict_yaml] : identifier[raw] = identifier[re] . identifier[split] ( literal[string] , identifier[element] . identifier[text] , literal[int] , identifier[re] . identifier[MULTILINE] ) identifier[data] = identifier[raw] [ literal[int] ] keyword[if] identifier[len] ( identifier[raw] )> literal[int] keyword[else] literal[string] identifier[data] = identifier[data] . identifier[lstrip] ( literal[string] ) identifier[raw] = identifier[raw] [ literal[int] ] keyword[try] : identifier[options] = identifier[yaml] . identifier[safe_load] ( identifier[raw] ) keyword[except] identifier[yaml] . identifier[scanner] . identifier[ScannerError] : identifier[debug] ( literal[string] ) keyword[return] keyword[if] identifier[options] keyword[is] keyword[None] : identifier[options] ={} keyword[else] : identifier[options] ={} identifier[data] =[] identifier[raw] = identifier[re] . identifier[split] ( literal[string] , identifier[element] . identifier[text] , literal[int] , identifier[re] . identifier[MULTILINE] ) identifier[rawmode] = keyword[True] keyword[for] identifier[chunk] keyword[in] identifier[raw] : identifier[chunk] = identifier[chunk] . identifier[strip] ( literal[string] ) keyword[if] keyword[not] identifier[chunk] : keyword[continue] keyword[if] identifier[rawmode] : keyword[if] identifier[chunk] . identifier[startswith] ( literal[string] ): identifier[rawmode] = keyword[False] keyword[else] : identifier[data] . identifier[append] ( identifier[chunk] ) keyword[else] : keyword[if] identifier[chunk] . identifier[startswith] ( literal[string] ) keyword[or] identifier[chunk] . identifier[startswith] ( literal[string] ): identifier[rawmode] = keyword[True] keyword[else] : keyword[try] : identifier[options] . identifier[update] ( identifier[yaml] . identifier[safe_load] ( identifier[chunk] )) keyword[except] identifier[yaml] . identifier[scanner] . identifier[ScannerError] : identifier[debug] ( literal[string] ) keyword[return] identifier[data] = literal[string] . identifier[join] ( identifier[data] ) keyword[return] identifier[function] ( identifier[options] = identifier[options] , identifier[data] = identifier[data] , identifier[element] = identifier[element] , identifier[doc] = identifier[doc] )
def yaml_filter(element, doc, tag=None, function=None, tags=None, strict_yaml=False): ''' Convenience function for parsing code blocks with YAML options This function is useful to create a filter that applies to code blocks that have specific classes. It is used as an argument of ``run_filter``, with two additional options: ``tag`` and ``function``. Using this is equivalent to having filter functions that: 1. Check if the element is a code block 2. Check if the element belongs to a specific class 3. Split the YAML options (at the beginning of the block, by looking for ``...`` or ``---`` strings in a separate line 4. Parse the YAML 5. Use the YAML options and (optionally) the data that follows the YAML to return a new or modified element Instead, you just need to: 1. Call ``run_filter`` with ``yaml_filter`` as the action function, and with the additional arguments ``tag`` and ``function`` 2. Construct a ``fenced_action`` function that takes four arguments: (options, data, element, doc). Note that options is a dict and data is a raw string. Notice that this is similar to the ``action`` functions of standard filters, but with *options* and *data* as the new ones. Note: if you want to apply multiple functions to separate classes, you can use the ``tags`` argument, which receives a dict of ``tag: function`` pairs. Note: use the ``strict_yaml=True`` option in order to allow for more verbose but flexible YAML metadata: more than one YAML blocks are allowed, but they all must start with ``---`` (even at the beginning) and end with ``---`` or ``...``. Also, YAML is not the default content when no delimiters are set. Example:: """ Replace code blocks of class 'foo' with # horizontal rules """ import panflute as pf def fenced_action(options, data, element, doc): count = options.get('count', 1) div = pf.Div(attributes={'count': str(count)}) div.content.extend([pf.HorizontalRule] * count) return div if __name__ == '__main__': pf.run_filter(pf.yaml_filter, tag='foo', function=fenced_action) ''' # Allow for either tag+function or a dict {tag: function} assert (tag is None) + (tags is None) == 1 # XOR if tags is None: tags = {tag: function} # depends on [control=['if'], data=['tags']] if type(element) == CodeBlock: for tag in tags: if tag in element.classes: function = tags[tag] if not strict_yaml: # Split YAML and data parts (separated by ... or ---) raw = re.split('^([.]{3,}|[-]{3,})$', element.text, 1, re.MULTILINE) data = raw[2] if len(raw) > 2 else '' data = data.lstrip('\n') raw = raw[0] try: options = yaml.safe_load(raw) # depends on [control=['try'], data=[]] except yaml.scanner.ScannerError: debug('panflute: malformed YAML block') return # depends on [control=['except'], data=[]] if options is None: options = {} # depends on [control=['if'], data=['options']] # depends on [control=['if'], data=[]] else: options = {} data = [] raw = re.split('^([.]{3,}|[-]{3,})$', element.text, 0, re.MULTILINE) rawmode = True for chunk in raw: chunk = chunk.strip('\n') if not chunk: continue # depends on [control=['if'], data=[]] if rawmode: if chunk.startswith('---'): rawmode = False # depends on [control=['if'], data=[]] else: data.append(chunk) # depends on [control=['if'], data=[]] elif chunk.startswith('---') or chunk.startswith('...'): rawmode = True # depends on [control=['if'], data=[]] else: try: options.update(yaml.safe_load(chunk)) # depends on [control=['try'], data=[]] except yaml.scanner.ScannerError: debug('panflute: malformed YAML block') return # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['chunk']] data = '\n'.join(data) return function(options=options, data=data, element=element, doc=doc) # depends on [control=['if'], data=['tag']] # depends on [control=['for'], data=['tag']] # depends on [control=['if'], data=[]]
def editContactItem(self, contact, label, number): """ Change the I{number} attribute of C{contact} to C{number}, and the I{label} attribute to C{label}. @type contact: L{PhoneNumber} @type label: C{unicode} @type number: C{unicode} @return: C{None} """ contact.label = label contact.number = number
def function[editContactItem, parameter[self, contact, label, number]]: constant[ Change the I{number} attribute of C{contact} to C{number}, and the I{label} attribute to C{label}. @type contact: L{PhoneNumber} @type label: C{unicode} @type number: C{unicode} @return: C{None} ] name[contact].label assign[=] name[label] name[contact].number assign[=] name[number]
keyword[def] identifier[editContactItem] ( identifier[self] , identifier[contact] , identifier[label] , identifier[number] ): literal[string] identifier[contact] . identifier[label] = identifier[label] identifier[contact] . identifier[number] = identifier[number]
def editContactItem(self, contact, label, number): """ Change the I{number} attribute of C{contact} to C{number}, and the I{label} attribute to C{label}. @type contact: L{PhoneNumber} @type label: C{unicode} @type number: C{unicode} @return: C{None} """ contact.label = label contact.number = number
def input_has_value(self, field_name, value): """ Assert the form input with label (recommended), name or id has given value. """ text_field = find_any_field(world.browser, DATE_FIELDS + TEXT_FIELDS, field_name) if text_field is False: raise AssertionError( "Can not find a field named {!r}.".format(field_name)) actual = text_field.get_attribute('value') if actual != value: raise AssertionError( "Field value expected to be {!r}, got {!r}.".format( value, actual))
def function[input_has_value, parameter[self, field_name, value]]: constant[ Assert the form input with label (recommended), name or id has given value. ] variable[text_field] assign[=] call[name[find_any_field], parameter[name[world].browser, binary_operation[name[DATE_FIELDS] + name[TEXT_FIELDS]], name[field_name]]] if compare[name[text_field] is constant[False]] begin[:] <ast.Raise object at 0x7da2047ea0b0> variable[actual] assign[=] call[name[text_field].get_attribute, parameter[constant[value]]] if compare[name[actual] not_equal[!=] name[value]] begin[:] <ast.Raise object at 0x7da2047e97b0>
keyword[def] identifier[input_has_value] ( identifier[self] , identifier[field_name] , identifier[value] ): literal[string] identifier[text_field] = identifier[find_any_field] ( identifier[world] . identifier[browser] , identifier[DATE_FIELDS] + identifier[TEXT_FIELDS] , identifier[field_name] ) keyword[if] identifier[text_field] keyword[is] keyword[False] : keyword[raise] identifier[AssertionError] ( literal[string] . identifier[format] ( identifier[field_name] )) identifier[actual] = identifier[text_field] . identifier[get_attribute] ( literal[string] ) keyword[if] identifier[actual] != identifier[value] : keyword[raise] identifier[AssertionError] ( literal[string] . identifier[format] ( identifier[value] , identifier[actual] ))
def input_has_value(self, field_name, value): """ Assert the form input with label (recommended), name or id has given value. """ text_field = find_any_field(world.browser, DATE_FIELDS + TEXT_FIELDS, field_name) if text_field is False: raise AssertionError('Can not find a field named {!r}.'.format(field_name)) # depends on [control=['if'], data=[]] actual = text_field.get_attribute('value') if actual != value: raise AssertionError('Field value expected to be {!r}, got {!r}.'.format(value, actual)) # depends on [control=['if'], data=['actual', 'value']]
def logsumexp(X, axis=0): """ Log-sum-exp trick for matrix X for summation along a specified axis. This performs the following operation in a stable fashion, .. math:: \log \sum^K_{k=1} \exp\{x_k\} Parameters ---------- X: ndarray 2D array of shape (N, D) to apply the log-sum-exp trick. axis: int, optional Axis to apply the summation along (works the same as axis in numpy.sum). Returns ------- lseX: ndarray results of applying the log-sum-exp trick, this will be shape (D,) if :code:`axis=0` or shape (N,) if :code:`axis=1`. """ mx = X.max(axis=axis) if (X.ndim > 1): mx = np.atleast_2d(mx).T if axis == 1 else np.atleast_2d(mx) return np.log(np.exp(X - mx).sum(axis=axis)) + np.ravel(mx)
def function[logsumexp, parameter[X, axis]]: constant[ Log-sum-exp trick for matrix X for summation along a specified axis. This performs the following operation in a stable fashion, .. math:: \log \sum^K_{k=1} \exp\{x_k\} Parameters ---------- X: ndarray 2D array of shape (N, D) to apply the log-sum-exp trick. axis: int, optional Axis to apply the summation along (works the same as axis in numpy.sum). Returns ------- lseX: ndarray results of applying the log-sum-exp trick, this will be shape (D,) if :code:`axis=0` or shape (N,) if :code:`axis=1`. ] variable[mx] assign[=] call[name[X].max, parameter[]] if compare[name[X].ndim greater[>] constant[1]] begin[:] variable[mx] assign[=] <ast.IfExp object at 0x7da2044c1270> return[binary_operation[call[name[np].log, parameter[call[call[name[np].exp, parameter[binary_operation[name[X] - name[mx]]]].sum, parameter[]]]] + call[name[np].ravel, parameter[name[mx]]]]]
keyword[def] identifier[logsumexp] ( identifier[X] , identifier[axis] = literal[int] ): literal[string] identifier[mx] = identifier[X] . identifier[max] ( identifier[axis] = identifier[axis] ) keyword[if] ( identifier[X] . identifier[ndim] > literal[int] ): identifier[mx] = identifier[np] . identifier[atleast_2d] ( identifier[mx] ). identifier[T] keyword[if] identifier[axis] == literal[int] keyword[else] identifier[np] . identifier[atleast_2d] ( identifier[mx] ) keyword[return] identifier[np] . identifier[log] ( identifier[np] . identifier[exp] ( identifier[X] - identifier[mx] ). identifier[sum] ( identifier[axis] = identifier[axis] ))+ identifier[np] . identifier[ravel] ( identifier[mx] )
def logsumexp(X, axis=0): """ Log-sum-exp trick for matrix X for summation along a specified axis. This performs the following operation in a stable fashion, .. math:: \\log \\sum^K_{k=1} \\exp\\{x_k\\} Parameters ---------- X: ndarray 2D array of shape (N, D) to apply the log-sum-exp trick. axis: int, optional Axis to apply the summation along (works the same as axis in numpy.sum). Returns ------- lseX: ndarray results of applying the log-sum-exp trick, this will be shape (D,) if :code:`axis=0` or shape (N,) if :code:`axis=1`. """ mx = X.max(axis=axis) if X.ndim > 1: mx = np.atleast_2d(mx).T if axis == 1 else np.atleast_2d(mx) # depends on [control=['if'], data=[]] return np.log(np.exp(X - mx).sum(axis=axis)) + np.ravel(mx)
def level(self, level, time=0): """(Helper) Set light to specified level""" if level <= 0: self._elk.send(pf_encode(self._index)) elif level >= 98: self._elk.send(pn_encode(self._index)) else: self._elk.send(pc_encode(self._index, 9, level, time))
def function[level, parameter[self, level, time]]: constant[(Helper) Set light to specified level] if compare[name[level] less_or_equal[<=] constant[0]] begin[:] call[name[self]._elk.send, parameter[call[name[pf_encode], parameter[name[self]._index]]]]
keyword[def] identifier[level] ( identifier[self] , identifier[level] , identifier[time] = literal[int] ): literal[string] keyword[if] identifier[level] <= literal[int] : identifier[self] . identifier[_elk] . identifier[send] ( identifier[pf_encode] ( identifier[self] . identifier[_index] )) keyword[elif] identifier[level] >= literal[int] : identifier[self] . identifier[_elk] . identifier[send] ( identifier[pn_encode] ( identifier[self] . identifier[_index] )) keyword[else] : identifier[self] . identifier[_elk] . identifier[send] ( identifier[pc_encode] ( identifier[self] . identifier[_index] , literal[int] , identifier[level] , identifier[time] ))
def level(self, level, time=0): """(Helper) Set light to specified level""" if level <= 0: self._elk.send(pf_encode(self._index)) # depends on [control=['if'], data=[]] elif level >= 98: self._elk.send(pn_encode(self._index)) # depends on [control=['if'], data=[]] else: self._elk.send(pc_encode(self._index, 9, level, time))
def _std(self,x): """ Compute standard deviation with ddof degrees of freedom """ return np.nanstd(x.values,ddof=self._ddof)
def function[_std, parameter[self, x]]: constant[ Compute standard deviation with ddof degrees of freedom ] return[call[name[np].nanstd, parameter[name[x].values]]]
keyword[def] identifier[_std] ( identifier[self] , identifier[x] ): literal[string] keyword[return] identifier[np] . identifier[nanstd] ( identifier[x] . identifier[values] , identifier[ddof] = identifier[self] . identifier[_ddof] )
def _std(self, x): """ Compute standard deviation with ddof degrees of freedom """ return np.nanstd(x.values, ddof=self._ddof)
def rules(ctx): """ [bookie] List all rules """ rules = Rules(peerplays_instance=ctx.peerplays) click.echo(pretty_print(rules, ctx=ctx))
def function[rules, parameter[ctx]]: constant[ [bookie] List all rules ] variable[rules] assign[=] call[name[Rules], parameter[]] call[name[click].echo, parameter[call[name[pretty_print], parameter[name[rules]]]]]
keyword[def] identifier[rules] ( identifier[ctx] ): literal[string] identifier[rules] = identifier[Rules] ( identifier[peerplays_instance] = identifier[ctx] . identifier[peerplays] ) identifier[click] . identifier[echo] ( identifier[pretty_print] ( identifier[rules] , identifier[ctx] = identifier[ctx] ))
def rules(ctx): """ [bookie] List all rules """ rules = Rules(peerplays_instance=ctx.peerplays) click.echo(pretty_print(rules, ctx=ctx))
def get_files_from_textfile(textfile_handler): """Yield the file names and widths by parsing a text file handler.""" for line in textfile_handler: line = line.rstrip() try: (image_name, width) = line.rsplit(',', 1) width = int(width) except ValueError: image_name = line width = None yield (image_name, width)
def function[get_files_from_textfile, parameter[textfile_handler]]: constant[Yield the file names and widths by parsing a text file handler.] for taget[name[line]] in starred[name[textfile_handler]] begin[:] variable[line] assign[=] call[name[line].rstrip, parameter[]] <ast.Try object at 0x7da18c4cca30> <ast.Yield object at 0x7da18c4ccbb0>
keyword[def] identifier[get_files_from_textfile] ( identifier[textfile_handler] ): literal[string] keyword[for] identifier[line] keyword[in] identifier[textfile_handler] : identifier[line] = identifier[line] . identifier[rstrip] () keyword[try] : ( identifier[image_name] , identifier[width] )= identifier[line] . identifier[rsplit] ( literal[string] , literal[int] ) identifier[width] = identifier[int] ( identifier[width] ) keyword[except] identifier[ValueError] : identifier[image_name] = identifier[line] identifier[width] = keyword[None] keyword[yield] ( identifier[image_name] , identifier[width] )
def get_files_from_textfile(textfile_handler): """Yield the file names and widths by parsing a text file handler.""" for line in textfile_handler: line = line.rstrip() try: (image_name, width) = line.rsplit(',', 1) width = int(width) # depends on [control=['try'], data=[]] except ValueError: image_name = line width = None # depends on [control=['except'], data=[]] yield (image_name, width) # depends on [control=['for'], data=['line']]
def get_standard(self): """get list of allowed parameters""" try: res = urlopen(PARSELY_PAGE_SCHEMA) except: return [] text = res.read() if isinstance(text, bytes): text = text.decode('utf-8') tree = etree.parse(StringIO(text)) stdref = tree.xpath("//div/@about") return [a.split(':')[1] for a in stdref]
def function[get_standard, parameter[self]]: constant[get list of allowed parameters] <ast.Try object at 0x7da1b256fdc0> variable[text] assign[=] call[name[res].read, parameter[]] if call[name[isinstance], parameter[name[text], name[bytes]]] begin[:] variable[text] assign[=] call[name[text].decode, parameter[constant[utf-8]]] variable[tree] assign[=] call[name[etree].parse, parameter[call[name[StringIO], parameter[name[text]]]]] variable[stdref] assign[=] call[name[tree].xpath, parameter[constant[//div/@about]]] return[<ast.ListComp object at 0x7da1b25ec670>]
keyword[def] identifier[get_standard] ( identifier[self] ): literal[string] keyword[try] : identifier[res] = identifier[urlopen] ( identifier[PARSELY_PAGE_SCHEMA] ) keyword[except] : keyword[return] [] identifier[text] = identifier[res] . identifier[read] () keyword[if] identifier[isinstance] ( identifier[text] , identifier[bytes] ): identifier[text] = identifier[text] . identifier[decode] ( literal[string] ) identifier[tree] = identifier[etree] . identifier[parse] ( identifier[StringIO] ( identifier[text] )) identifier[stdref] = identifier[tree] . identifier[xpath] ( literal[string] ) keyword[return] [ identifier[a] . identifier[split] ( literal[string] )[ literal[int] ] keyword[for] identifier[a] keyword[in] identifier[stdref] ]
def get_standard(self): """get list of allowed parameters""" try: res = urlopen(PARSELY_PAGE_SCHEMA) # depends on [control=['try'], data=[]] except: return [] # depends on [control=['except'], data=[]] text = res.read() if isinstance(text, bytes): text = text.decode('utf-8') # depends on [control=['if'], data=[]] tree = etree.parse(StringIO(text)) stdref = tree.xpath('//div/@about') return [a.split(':')[1] for a in stdref]
def find_additional_rels(self, all_models): """Attempts to scan for additional relationship fields for this model based on all of the other models' structures and relationships. """ for model_name, model in iteritems(all_models): if model_name != self.name: for field_name in model.field_names: field = model.fields[field_name] # if this field type references the current model if field.field_type == self.name and field.back_populates is not None and \ (isinstance(field, StatikForeignKeyField) or isinstance(field, StatikManyToManyField)): self.additional_rels[field.back_populates] = { 'to_model': model_name, 'back_populates': field_name, 'secondary': (model_name, field.field_type) if isinstance(field, StatikManyToManyField) else None } logger.debug( 'Additional relationship %s.%s -> %s (%s)', self.name, field.back_populates, model_name, self.additional_rels[field.back_populates] )
def function[find_additional_rels, parameter[self, all_models]]: constant[Attempts to scan for additional relationship fields for this model based on all of the other models' structures and relationships. ] for taget[tuple[[<ast.Name object at 0x7da1b1236f50>, <ast.Name object at 0x7da1b12360b0>]]] in starred[call[name[iteritems], parameter[name[all_models]]]] begin[:] if compare[name[model_name] not_equal[!=] name[self].name] begin[:] for taget[name[field_name]] in starred[name[model].field_names] begin[:] variable[field] assign[=] call[name[model].fields][name[field_name]] if <ast.BoolOp object at 0x7da1b1235e10> begin[:] call[name[self].additional_rels][name[field].back_populates] assign[=] dictionary[[<ast.Constant object at 0x7da1b1234d30>, <ast.Constant object at 0x7da1b12346d0>, <ast.Constant object at 0x7da1b1236a70>], [<ast.Name object at 0x7da1b1234640>, <ast.Name object at 0x7da1b12370d0>, <ast.IfExp object at 0x7da1b12343d0>]] call[name[logger].debug, parameter[constant[Additional relationship %s.%s -> %s (%s)], name[self].name, name[field].back_populates, name[model_name], call[name[self].additional_rels][name[field].back_populates]]]
keyword[def] identifier[find_additional_rels] ( identifier[self] , identifier[all_models] ): literal[string] keyword[for] identifier[model_name] , identifier[model] keyword[in] identifier[iteritems] ( identifier[all_models] ): keyword[if] identifier[model_name] != identifier[self] . identifier[name] : keyword[for] identifier[field_name] keyword[in] identifier[model] . identifier[field_names] : identifier[field] = identifier[model] . identifier[fields] [ identifier[field_name] ] keyword[if] identifier[field] . identifier[field_type] == identifier[self] . identifier[name] keyword[and] identifier[field] . identifier[back_populates] keyword[is] keyword[not] keyword[None] keyword[and] ( identifier[isinstance] ( identifier[field] , identifier[StatikForeignKeyField] ) keyword[or] identifier[isinstance] ( identifier[field] , identifier[StatikManyToManyField] )): identifier[self] . identifier[additional_rels] [ identifier[field] . identifier[back_populates] ]={ literal[string] : identifier[model_name] , literal[string] : identifier[field_name] , literal[string] :( identifier[model_name] , identifier[field] . identifier[field_type] ) keyword[if] identifier[isinstance] ( identifier[field] , identifier[StatikManyToManyField] ) keyword[else] keyword[None] } identifier[logger] . identifier[debug] ( literal[string] , identifier[self] . identifier[name] , identifier[field] . identifier[back_populates] , identifier[model_name] , identifier[self] . identifier[additional_rels] [ identifier[field] . identifier[back_populates] ] )
def find_additional_rels(self, all_models): """Attempts to scan for additional relationship fields for this model based on all of the other models' structures and relationships. """ for (model_name, model) in iteritems(all_models): if model_name != self.name: for field_name in model.field_names: field = model.fields[field_name] # if this field type references the current model if field.field_type == self.name and field.back_populates is not None and (isinstance(field, StatikForeignKeyField) or isinstance(field, StatikManyToManyField)): self.additional_rels[field.back_populates] = {'to_model': model_name, 'back_populates': field_name, 'secondary': (model_name, field.field_type) if isinstance(field, StatikManyToManyField) else None} logger.debug('Additional relationship %s.%s -> %s (%s)', self.name, field.back_populates, model_name, self.additional_rels[field.back_populates]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['field_name']] # depends on [control=['if'], data=['model_name']] # depends on [control=['for'], data=[]]
def getRoles(self, principal_id): """ give an Owner who is also a 'selfpublisher', the reviewer role """ context = self.context current_roles = list(DefaultLocalRoleAdapter.getRoles( self, principal_id, )) # check we are not on the workspace itself if IHasWorkspace.providedBy(context): return current_roles # otherwise we should acquire the workspace and check out roles workspace = getattr(context, 'acquire_workspace', lambda: None)() if workspace is None: return current_roles workspace_roles = api.user.get_roles(obj=workspace) if 'SelfPublisher' in workspace_roles and 'Owner' in current_roles: current_roles.append('Reviewer') return current_roles
def function[getRoles, parameter[self, principal_id]]: constant[ give an Owner who is also a 'selfpublisher', the reviewer role ] variable[context] assign[=] name[self].context variable[current_roles] assign[=] call[name[list], parameter[call[name[DefaultLocalRoleAdapter].getRoles, parameter[name[self], name[principal_id]]]]] if call[name[IHasWorkspace].providedBy, parameter[name[context]]] begin[:] return[name[current_roles]] variable[workspace] assign[=] call[call[name[getattr], parameter[name[context], constant[acquire_workspace], <ast.Lambda object at 0x7da2054a77c0>]], parameter[]] if compare[name[workspace] is constant[None]] begin[:] return[name[current_roles]] variable[workspace_roles] assign[=] call[name[api].user.get_roles, parameter[]] if <ast.BoolOp object at 0x7da2054a4c40> begin[:] call[name[current_roles].append, parameter[constant[Reviewer]]] return[name[current_roles]]
keyword[def] identifier[getRoles] ( identifier[self] , identifier[principal_id] ): literal[string] identifier[context] = identifier[self] . identifier[context] identifier[current_roles] = identifier[list] ( identifier[DefaultLocalRoleAdapter] . identifier[getRoles] ( identifier[self] , identifier[principal_id] , )) keyword[if] identifier[IHasWorkspace] . identifier[providedBy] ( identifier[context] ): keyword[return] identifier[current_roles] identifier[workspace] = identifier[getattr] ( identifier[context] , literal[string] , keyword[lambda] : keyword[None] )() keyword[if] identifier[workspace] keyword[is] keyword[None] : keyword[return] identifier[current_roles] identifier[workspace_roles] = identifier[api] . identifier[user] . identifier[get_roles] ( identifier[obj] = identifier[workspace] ) keyword[if] literal[string] keyword[in] identifier[workspace_roles] keyword[and] literal[string] keyword[in] identifier[current_roles] : identifier[current_roles] . identifier[append] ( literal[string] ) keyword[return] identifier[current_roles]
def getRoles(self, principal_id): """ give an Owner who is also a 'selfpublisher', the reviewer role """ context = self.context current_roles = list(DefaultLocalRoleAdapter.getRoles(self, principal_id)) # check we are not on the workspace itself if IHasWorkspace.providedBy(context): return current_roles # depends on [control=['if'], data=[]] # otherwise we should acquire the workspace and check out roles workspace = getattr(context, 'acquire_workspace', lambda : None)() if workspace is None: return current_roles # depends on [control=['if'], data=[]] workspace_roles = api.user.get_roles(obj=workspace) if 'SelfPublisher' in workspace_roles and 'Owner' in current_roles: current_roles.append('Reviewer') # depends on [control=['if'], data=[]] return current_roles
def suffix(transformers, default=None): """Returns a different transformer depending on the suffix at the end of the requested URL. If none match and no default is given no transformation takes place. should pass in a dict with the following format: {'[suffix]': transformation_action, ... } """ transformers = {suffix: auto_kwargs(transformer) if transformer else transformer for suffix, transformer in transformers.items()} default = default and auto_kwargs(default) def transform(data, request): path = request.path transformer = default for suffix_test, suffix_transformer in transformers.items(): if path.endswith(suffix_test): transformer = suffix_transformer break return transformer(data) if transformer else data return transform
def function[suffix, parameter[transformers, default]]: constant[Returns a different transformer depending on the suffix at the end of the requested URL. If none match and no default is given no transformation takes place. should pass in a dict with the following format: {'[suffix]': transformation_action, ... } ] variable[transformers] assign[=] <ast.DictComp object at 0x7da1b1b47340> variable[default] assign[=] <ast.BoolOp object at 0x7da1b1b45c90> def function[transform, parameter[data, request]]: variable[path] assign[=] name[request].path variable[transformer] assign[=] name[default] for taget[tuple[[<ast.Name object at 0x7da1b1b46890>, <ast.Name object at 0x7da1b1b45570>]]] in starred[call[name[transformers].items, parameter[]]] begin[:] if call[name[path].endswith, parameter[name[suffix_test]]] begin[:] variable[transformer] assign[=] name[suffix_transformer] break return[<ast.IfExp object at 0x7da1b1b46fb0>] return[name[transform]]
keyword[def] identifier[suffix] ( identifier[transformers] , identifier[default] = keyword[None] ): literal[string] identifier[transformers] ={ identifier[suffix] : identifier[auto_kwargs] ( identifier[transformer] ) keyword[if] identifier[transformer] keyword[else] identifier[transformer] keyword[for] identifier[suffix] , identifier[transformer] keyword[in] identifier[transformers] . identifier[items] ()} identifier[default] = identifier[default] keyword[and] identifier[auto_kwargs] ( identifier[default] ) keyword[def] identifier[transform] ( identifier[data] , identifier[request] ): identifier[path] = identifier[request] . identifier[path] identifier[transformer] = identifier[default] keyword[for] identifier[suffix_test] , identifier[suffix_transformer] keyword[in] identifier[transformers] . identifier[items] (): keyword[if] identifier[path] . identifier[endswith] ( identifier[suffix_test] ): identifier[transformer] = identifier[suffix_transformer] keyword[break] keyword[return] identifier[transformer] ( identifier[data] ) keyword[if] identifier[transformer] keyword[else] identifier[data] keyword[return] identifier[transform]
def suffix(transformers, default=None): """Returns a different transformer depending on the suffix at the end of the requested URL. If none match and no default is given no transformation takes place. should pass in a dict with the following format: {'[suffix]': transformation_action, ... } """ transformers = {suffix: auto_kwargs(transformer) if transformer else transformer for (suffix, transformer) in transformers.items()} default = default and auto_kwargs(default) def transform(data, request): path = request.path transformer = default for (suffix_test, suffix_transformer) in transformers.items(): if path.endswith(suffix_test): transformer = suffix_transformer break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return transformer(data) if transformer else data return transform
def np_to_datetime(intime): """Convert numpy/pandas datetime64 to list[datetime].""" nptime = np.atleast_1d(intime) np_corr = (nptime - np.datetime64('1970-01-01T00:00:00')) / \ np.timedelta64(1, 's') return [datetime.utcfromtimestamp(t) for t in np_corr]
def function[np_to_datetime, parameter[intime]]: constant[Convert numpy/pandas datetime64 to list[datetime].] variable[nptime] assign[=] call[name[np].atleast_1d, parameter[name[intime]]] variable[np_corr] assign[=] binary_operation[binary_operation[name[nptime] - call[name[np].datetime64, parameter[constant[1970-01-01T00:00:00]]]] / call[name[np].timedelta64, parameter[constant[1], constant[s]]]] return[<ast.ListComp object at 0x7da18f810a90>]
keyword[def] identifier[np_to_datetime] ( identifier[intime] ): literal[string] identifier[nptime] = identifier[np] . identifier[atleast_1d] ( identifier[intime] ) identifier[np_corr] =( identifier[nptime] - identifier[np] . identifier[datetime64] ( literal[string] ))/ identifier[np] . identifier[timedelta64] ( literal[int] , literal[string] ) keyword[return] [ identifier[datetime] . identifier[utcfromtimestamp] ( identifier[t] ) keyword[for] identifier[t] keyword[in] identifier[np_corr] ]
def np_to_datetime(intime): """Convert numpy/pandas datetime64 to list[datetime].""" nptime = np.atleast_1d(intime) np_corr = (nptime - np.datetime64('1970-01-01T00:00:00')) / np.timedelta64(1, 's') return [datetime.utcfromtimestamp(t) for t in np_corr]
def get_con_id(self): """ Return contribution id if available """ con_id = "" if "contribution" in self.tables: if "id" in self.tables["contribution"].df.columns: con_id = str(self.tables["contribution"].df["id"].values[0]) return con_id
def function[get_con_id, parameter[self]]: constant[ Return contribution id if available ] variable[con_id] assign[=] constant[] if compare[constant[contribution] in name[self].tables] begin[:] if compare[constant[id] in call[name[self].tables][constant[contribution]].df.columns] begin[:] variable[con_id] assign[=] call[name[str], parameter[call[call[call[name[self].tables][constant[contribution]].df][constant[id]].values][constant[0]]]] return[name[con_id]]
keyword[def] identifier[get_con_id] ( identifier[self] ): literal[string] identifier[con_id] = literal[string] keyword[if] literal[string] keyword[in] identifier[self] . identifier[tables] : keyword[if] literal[string] keyword[in] identifier[self] . identifier[tables] [ literal[string] ]. identifier[df] . identifier[columns] : identifier[con_id] = identifier[str] ( identifier[self] . identifier[tables] [ literal[string] ]. identifier[df] [ literal[string] ]. identifier[values] [ literal[int] ]) keyword[return] identifier[con_id]
def get_con_id(self): """ Return contribution id if available """ con_id = '' if 'contribution' in self.tables: if 'id' in self.tables['contribution'].df.columns: con_id = str(self.tables['contribution'].df['id'].values[0]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return con_id
def _commit(self, session, errorMessage): """ Custom commit function for file objects """ try: session.commit() except IntegrityError: # Raise special error if the commit fails due to empty files log.error('Commit to database failed. %s' % errorMessage) except: # Raise other errors as normal raise
def function[_commit, parameter[self, session, errorMessage]]: constant[ Custom commit function for file objects ] <ast.Try object at 0x7da2044c3b50>
keyword[def] identifier[_commit] ( identifier[self] , identifier[session] , identifier[errorMessage] ): literal[string] keyword[try] : identifier[session] . identifier[commit] () keyword[except] identifier[IntegrityError] : identifier[log] . identifier[error] ( literal[string] % identifier[errorMessage] ) keyword[except] : keyword[raise]
def _commit(self, session, errorMessage): """ Custom commit function for file objects """ try: session.commit() # depends on [control=['try'], data=[]] except IntegrityError: # Raise special error if the commit fails due to empty files log.error('Commit to database failed. %s' % errorMessage) # depends on [control=['except'], data=[]] except: # Raise other errors as normal raise # depends on [control=['except'], data=[]]
def add_n_delay(delay_input, delay_time, initial_value, order, subs, subscript_dict): """ Creates code to instantiate a stateful 'Delay' object, and provides reference to that object's output. The name of the stateful object is based upon the passed in parameters, so if there are multiple places where identical delay functions are referenced, the translated python file will only maintain one stateful object, and reference it multiple times. Parameters ---------- delay_input: <string> Reference to the model component that is the input to the delay delay_time: <string> Can be a number (in string format) or a reference to another model element which will calculate the delay. This is calculated throughout the simulation at runtime. initial_value: <string> This is used to initialize the stocks that are present in the delay. We initialize the stocks with equal values so that the outflow in the first timestep is equal to this value. order: string The number of stocks in the delay pipeline. As we construct the delays at build time, this must be an integer and cannot be calculated from other model components. Anything else will yield a ValueError. Returns ------- reference: basestring reference to the delay object `__call__` method, which will return the output of the delay process new_structure: list list of element construction dictionaries for the builder to assemble """ # the py name has to be unique to all the passed parameters, or if there are two things # that delay the output by different amounts, they'll overwrite the original function... stateful = { 'py_name': utils.make_python_identifier('_delay_%s_%s_%s_%s' % (delay_input, delay_time, initial_value, order))[0], 'real_name': 'Delay of %s' % delay_input, 'doc': 'Delay time: %s \n Delay initial value %s \n Delay order %s' % ( delay_time, initial_value, order), 'py_expr': 'functions.Delay(lambda: %s, lambda: %s, lambda: %s, lambda: %s)' % ( delay_input, delay_time, initial_value, order), 'unit': 'None', 'lims': 'None', 'eqn': 'None', 'subs': '', 'kind': 'stateful', 'arguments': '' } return "%s()" % stateful['py_name'], [stateful]
def function[add_n_delay, parameter[delay_input, delay_time, initial_value, order, subs, subscript_dict]]: constant[ Creates code to instantiate a stateful 'Delay' object, and provides reference to that object's output. The name of the stateful object is based upon the passed in parameters, so if there are multiple places where identical delay functions are referenced, the translated python file will only maintain one stateful object, and reference it multiple times. Parameters ---------- delay_input: <string> Reference to the model component that is the input to the delay delay_time: <string> Can be a number (in string format) or a reference to another model element which will calculate the delay. This is calculated throughout the simulation at runtime. initial_value: <string> This is used to initialize the stocks that are present in the delay. We initialize the stocks with equal values so that the outflow in the first timestep is equal to this value. order: string The number of stocks in the delay pipeline. As we construct the delays at build time, this must be an integer and cannot be calculated from other model components. Anything else will yield a ValueError. Returns ------- reference: basestring reference to the delay object `__call__` method, which will return the output of the delay process new_structure: list list of element construction dictionaries for the builder to assemble ] variable[stateful] assign[=] dictionary[[<ast.Constant object at 0x7da204344d00>, <ast.Constant object at 0x7da204347100>, <ast.Constant object at 0x7da204346350>, <ast.Constant object at 0x7da204344c10>, <ast.Constant object at 0x7da204345000>, <ast.Constant object at 0x7da204344460>, <ast.Constant object at 0x7da204344e20>, <ast.Constant object at 0x7da204345360>, <ast.Constant object at 0x7da2043457b0>, <ast.Constant object at 0x7da204345a50>], [<ast.Subscript object at 0x7da204344dc0>, <ast.BinOp object at 0x7da204346ef0>, <ast.BinOp object at 0x7da2043460b0>, <ast.BinOp object at 0x7da204347df0>, <ast.Constant object at 0x7da204346620>, <ast.Constant object at 0x7da204347400>, <ast.Constant object at 0x7da204346ad0>, <ast.Constant object at 0x7da204345660>, <ast.Constant object at 0x7da2043478b0>, <ast.Constant object at 0x7da2043462c0>]] return[tuple[[<ast.BinOp object at 0x7da204345b70>, <ast.List object at 0x7da204344fa0>]]]
keyword[def] identifier[add_n_delay] ( identifier[delay_input] , identifier[delay_time] , identifier[initial_value] , identifier[order] , identifier[subs] , identifier[subscript_dict] ): literal[string] identifier[stateful] ={ literal[string] : identifier[utils] . identifier[make_python_identifier] ( literal[string] %( identifier[delay_input] , identifier[delay_time] , identifier[initial_value] , identifier[order] ))[ literal[int] ], literal[string] : literal[string] % identifier[delay_input] , literal[string] : literal[string] %( identifier[delay_time] , identifier[initial_value] , identifier[order] ), literal[string] : literal[string] %( identifier[delay_input] , identifier[delay_time] , identifier[initial_value] , identifier[order] ), literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] } keyword[return] literal[string] % identifier[stateful] [ literal[string] ],[ identifier[stateful] ]
def add_n_delay(delay_input, delay_time, initial_value, order, subs, subscript_dict): """ Creates code to instantiate a stateful 'Delay' object, and provides reference to that object's output. The name of the stateful object is based upon the passed in parameters, so if there are multiple places where identical delay functions are referenced, the translated python file will only maintain one stateful object, and reference it multiple times. Parameters ---------- delay_input: <string> Reference to the model component that is the input to the delay delay_time: <string> Can be a number (in string format) or a reference to another model element which will calculate the delay. This is calculated throughout the simulation at runtime. initial_value: <string> This is used to initialize the stocks that are present in the delay. We initialize the stocks with equal values so that the outflow in the first timestep is equal to this value. order: string The number of stocks in the delay pipeline. As we construct the delays at build time, this must be an integer and cannot be calculated from other model components. Anything else will yield a ValueError. Returns ------- reference: basestring reference to the delay object `__call__` method, which will return the output of the delay process new_structure: list list of element construction dictionaries for the builder to assemble """ # the py name has to be unique to all the passed parameters, or if there are two things # that delay the output by different amounts, they'll overwrite the original function... stateful = {'py_name': utils.make_python_identifier('_delay_%s_%s_%s_%s' % (delay_input, delay_time, initial_value, order))[0], 'real_name': 'Delay of %s' % delay_input, 'doc': 'Delay time: %s \n Delay initial value %s \n Delay order %s' % (delay_time, initial_value, order), 'py_expr': 'functions.Delay(lambda: %s, lambda: %s, lambda: %s, lambda: %s)' % (delay_input, delay_time, initial_value, order), 'unit': 'None', 'lims': 'None', 'eqn': 'None', 'subs': '', 'kind': 'stateful', 'arguments': ''} return ('%s()' % stateful['py_name'], [stateful])
def write_bus_data(self, file, padding=" "): """ Writes bus data to file. """ for bus in self.case.buses: attrs = ['%s="%s"' % (k, v) for k, v in self.bus_attr.iteritems()] # attrs.insert(0, 'label="%s"' % bus.name) attr_str = ", ".join(attrs) file.write("%s%s [%s];\n" % (padding, bus.name, attr_str))
def function[write_bus_data, parameter[self, file, padding]]: constant[ Writes bus data to file. ] for taget[name[bus]] in starred[name[self].case.buses] begin[:] variable[attrs] assign[=] <ast.ListComp object at 0x7da1b25d1ba0> variable[attr_str] assign[=] call[constant[, ].join, parameter[name[attrs]]] call[name[file].write, parameter[binary_operation[constant[%s%s [%s]; ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b25d0eb0>, <ast.Attribute object at 0x7da1b25d1840>, <ast.Name object at 0x7da1b25d1060>]]]]]
keyword[def] identifier[write_bus_data] ( identifier[self] , identifier[file] , identifier[padding] = literal[string] ): literal[string] keyword[for] identifier[bus] keyword[in] identifier[self] . identifier[case] . identifier[buses] : identifier[attrs] =[ literal[string] %( identifier[k] , identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[bus_attr] . identifier[iteritems] ()] identifier[attr_str] = literal[string] . identifier[join] ( identifier[attrs] ) identifier[file] . identifier[write] ( literal[string] %( identifier[padding] , identifier[bus] . identifier[name] , identifier[attr_str] ))
def write_bus_data(self, file, padding=' '): """ Writes bus data to file. """ for bus in self.case.buses: attrs = ['%s="%s"' % (k, v) for (k, v) in self.bus_attr.iteritems()] # attrs.insert(0, 'label="%s"' % bus.name) attr_str = ', '.join(attrs) file.write('%s%s [%s];\n' % (padding, bus.name, attr_str)) # depends on [control=['for'], data=['bus']]
def get_catalog(): """Returns a catalog of available transforms. These are used to build chains for rendering with different back ends. """ tforms = {} for name, value in list(globals().items()): if name.endswith('Transform'): tforms[name] = value return Bunch.Bunch(tforms, caseless=True)
def function[get_catalog, parameter[]]: constant[Returns a catalog of available transforms. These are used to build chains for rendering with different back ends. ] variable[tforms] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da2041d83d0>, <ast.Name object at 0x7da2041d9cc0>]]] in starred[call[name[list], parameter[call[call[name[globals], parameter[]].items, parameter[]]]]] begin[:] if call[name[name].endswith, parameter[constant[Transform]]] begin[:] call[name[tforms]][name[name]] assign[=] name[value] return[call[name[Bunch].Bunch, parameter[name[tforms]]]]
keyword[def] identifier[get_catalog] (): literal[string] identifier[tforms] ={} keyword[for] identifier[name] , identifier[value] keyword[in] identifier[list] ( identifier[globals] (). identifier[items] ()): keyword[if] identifier[name] . identifier[endswith] ( literal[string] ): identifier[tforms] [ identifier[name] ]= identifier[value] keyword[return] identifier[Bunch] . identifier[Bunch] ( identifier[tforms] , identifier[caseless] = keyword[True] )
def get_catalog(): """Returns a catalog of available transforms. These are used to build chains for rendering with different back ends. """ tforms = {} for (name, value) in list(globals().items()): if name.endswith('Transform'): tforms[name] = value # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return Bunch.Bunch(tforms, caseless=True)
def add_workflow_command_line_group(parser): """ The standard way of initializing a ConfigParser object in workflow will be to do it from the command line. This is done by giving a --local-config-files filea.ini fileb.ini filec.ini command. You can also set config file override commands on the command line. This will be most useful when setting (for example) start and end times, or active ifos. This is done by --config-overrides section1:option1:value1 section2:option2:value2 ... This can also be given as --config-overrides section1:option1 where the value will be left as ''. To remove a configuration option, use the command line argument --config-delete section1:option1 which will delete option1 from [section1] or --config-delete section1 to delete all of the options in [section1] Deletes are implemented before overrides. This function returns an argparse OptionGroup to ensure these options are parsed correctly and can then be sent directly to initialize an WorkflowConfigParser. Parameters ----------- parser : argparse.ArgumentParser instance The initialized argparse instance to add the workflow option group to. """ workflowArgs = parser.add_argument_group('Configuration', 'Options needed for parsing ' 'config file(s).') workflowArgs.add_argument("--config-files", nargs="+", action='store', metavar="CONFIGFILE", help="List of config files to be used in " "analysis.") workflowArgs.add_argument("--config-overrides", nargs="*", action='store', metavar="SECTION:OPTION:VALUE", help="List of section,option,value combinations to " "add into the configuration file. Normally the gps " "start and end times might be provided this way, " "and user specific locations (ie. output directories). " "This can also be provided as SECTION:OPTION or " "SECTION:OPTION: both of which indicate that the " "corresponding value is left blank.") workflowArgs.add_argument("--config-delete", nargs="*", action='store', metavar="SECTION:OPTION", help="List of section,option combinations to delete " "from the configuration file. This can also be " "provided as SECTION which deletes the enture section" " from the configuration file or SECTION:OPTION " "which deletes a specific option from a given " "section.")
def function[add_workflow_command_line_group, parameter[parser]]: constant[ The standard way of initializing a ConfigParser object in workflow will be to do it from the command line. This is done by giving a --local-config-files filea.ini fileb.ini filec.ini command. You can also set config file override commands on the command line. This will be most useful when setting (for example) start and end times, or active ifos. This is done by --config-overrides section1:option1:value1 section2:option2:value2 ... This can also be given as --config-overrides section1:option1 where the value will be left as ''. To remove a configuration option, use the command line argument --config-delete section1:option1 which will delete option1 from [section1] or --config-delete section1 to delete all of the options in [section1] Deletes are implemented before overrides. This function returns an argparse OptionGroup to ensure these options are parsed correctly and can then be sent directly to initialize an WorkflowConfigParser. Parameters ----------- parser : argparse.ArgumentParser instance The initialized argparse instance to add the workflow option group to. ] variable[workflowArgs] assign[=] call[name[parser].add_argument_group, parameter[constant[Configuration], constant[Options needed for parsing config file(s).]]] call[name[workflowArgs].add_argument, parameter[constant[--config-files]]] call[name[workflowArgs].add_argument, parameter[constant[--config-overrides]]] call[name[workflowArgs].add_argument, parameter[constant[--config-delete]]]
keyword[def] identifier[add_workflow_command_line_group] ( identifier[parser] ): literal[string] identifier[workflowArgs] = identifier[parser] . identifier[add_argument_group] ( literal[string] , literal[string] literal[string] ) identifier[workflowArgs] . identifier[add_argument] ( literal[string] , identifier[nargs] = literal[string] , identifier[action] = literal[string] , identifier[metavar] = literal[string] , identifier[help] = literal[string] literal[string] ) identifier[workflowArgs] . identifier[add_argument] ( literal[string] , identifier[nargs] = literal[string] , identifier[action] = literal[string] , identifier[metavar] = literal[string] , identifier[help] = literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] ) identifier[workflowArgs] . identifier[add_argument] ( literal[string] , identifier[nargs] = literal[string] , identifier[action] = literal[string] , identifier[metavar] = literal[string] , identifier[help] = literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] )
def add_workflow_command_line_group(parser): """ The standard way of initializing a ConfigParser object in workflow will be to do it from the command line. This is done by giving a --local-config-files filea.ini fileb.ini filec.ini command. You can also set config file override commands on the command line. This will be most useful when setting (for example) start and end times, or active ifos. This is done by --config-overrides section1:option1:value1 section2:option2:value2 ... This can also be given as --config-overrides section1:option1 where the value will be left as ''. To remove a configuration option, use the command line argument --config-delete section1:option1 which will delete option1 from [section1] or --config-delete section1 to delete all of the options in [section1] Deletes are implemented before overrides. This function returns an argparse OptionGroup to ensure these options are parsed correctly and can then be sent directly to initialize an WorkflowConfigParser. Parameters ----------- parser : argparse.ArgumentParser instance The initialized argparse instance to add the workflow option group to. """ workflowArgs = parser.add_argument_group('Configuration', 'Options needed for parsing config file(s).') workflowArgs.add_argument('--config-files', nargs='+', action='store', metavar='CONFIGFILE', help='List of config files to be used in analysis.') workflowArgs.add_argument('--config-overrides', nargs='*', action='store', metavar='SECTION:OPTION:VALUE', help='List of section,option,value combinations to add into the configuration file. Normally the gps start and end times might be provided this way, and user specific locations (ie. output directories). This can also be provided as SECTION:OPTION or SECTION:OPTION: both of which indicate that the corresponding value is left blank.') workflowArgs.add_argument('--config-delete', nargs='*', action='store', metavar='SECTION:OPTION', help='List of section,option combinations to delete from the configuration file. This can also be provided as SECTION which deletes the enture section from the configuration file or SECTION:OPTION which deletes a specific option from a given section.')
def _count_extra_actions(self, game_image): """Count the number of extra actions for player in this turn.""" proportional = self._bonus_tools['extra_action_region'] # Use ProportionalRegion to isolate the extra actions area t, l, b, r = proportional.region_in(game_image) token_region = game_image[t:b, l:r] # Use TemplateFinder (multiple) to check for extra actions game_h, game_w = game_image.shape[0:2] token_h = int(round(game_h * 27.0 / 960)) token_w = int(round(game_w * 22.0 / 1280)) sizes = (token_h, token_w), # sizes change every time so just remake it. # thresholds are tight since need to count conservatively finder = v.TemplateFinder(pq_data.extra_action_template, sizes=sizes, acceptable_threshold=0.1, immediate_threshold=0.1) found_tokens = finder.locate_multiple_in(token_region) return len(found_tokens)
def function[_count_extra_actions, parameter[self, game_image]]: constant[Count the number of extra actions for player in this turn.] variable[proportional] assign[=] call[name[self]._bonus_tools][constant[extra_action_region]] <ast.Tuple object at 0x7da2044c1660> assign[=] call[name[proportional].region_in, parameter[name[game_image]]] variable[token_region] assign[=] call[name[game_image]][tuple[[<ast.Slice object at 0x7da2044c3d00>, <ast.Slice object at 0x7da2044c3460>]]] <ast.Tuple object at 0x7da2044c11b0> assign[=] call[name[game_image].shape][<ast.Slice object at 0x7da2044c2aa0>] variable[token_h] assign[=] call[name[int], parameter[call[name[round], parameter[binary_operation[binary_operation[name[game_h] * constant[27.0]] / constant[960]]]]]] variable[token_w] assign[=] call[name[int], parameter[call[name[round], parameter[binary_operation[binary_operation[name[game_w] * constant[22.0]] / constant[1280]]]]]] variable[sizes] assign[=] tuple[[<ast.Tuple object at 0x7da2044c19c0>]] variable[finder] assign[=] call[name[v].TemplateFinder, parameter[name[pq_data].extra_action_template]] variable[found_tokens] assign[=] call[name[finder].locate_multiple_in, parameter[name[token_region]]] return[call[name[len], parameter[name[found_tokens]]]]
keyword[def] identifier[_count_extra_actions] ( identifier[self] , identifier[game_image] ): literal[string] identifier[proportional] = identifier[self] . identifier[_bonus_tools] [ literal[string] ] identifier[t] , identifier[l] , identifier[b] , identifier[r] = identifier[proportional] . identifier[region_in] ( identifier[game_image] ) identifier[token_region] = identifier[game_image] [ identifier[t] : identifier[b] , identifier[l] : identifier[r] ] identifier[game_h] , identifier[game_w] = identifier[game_image] . identifier[shape] [ literal[int] : literal[int] ] identifier[token_h] = identifier[int] ( identifier[round] ( identifier[game_h] * literal[int] / literal[int] )) identifier[token_w] = identifier[int] ( identifier[round] ( identifier[game_w] * literal[int] / literal[int] )) identifier[sizes] =( identifier[token_h] , identifier[token_w] ), identifier[finder] = identifier[v] . identifier[TemplateFinder] ( identifier[pq_data] . identifier[extra_action_template] , identifier[sizes] = identifier[sizes] , identifier[acceptable_threshold] = literal[int] , identifier[immediate_threshold] = literal[int] ) identifier[found_tokens] = identifier[finder] . identifier[locate_multiple_in] ( identifier[token_region] ) keyword[return] identifier[len] ( identifier[found_tokens] )
def _count_extra_actions(self, game_image): """Count the number of extra actions for player in this turn.""" proportional = self._bonus_tools['extra_action_region'] # Use ProportionalRegion to isolate the extra actions area (t, l, b, r) = proportional.region_in(game_image) token_region = game_image[t:b, l:r] # Use TemplateFinder (multiple) to check for extra actions (game_h, game_w) = game_image.shape[0:2] token_h = int(round(game_h * 27.0 / 960)) token_w = int(round(game_w * 22.0 / 1280)) sizes = ((token_h, token_w),) # sizes change every time so just remake it. # thresholds are tight since need to count conservatively finder = v.TemplateFinder(pq_data.extra_action_template, sizes=sizes, acceptable_threshold=0.1, immediate_threshold=0.1) found_tokens = finder.locate_multiple_in(token_region) return len(found_tokens)
def total_hits(self, filename=None): """ Return the total number of covered statements for the file `filename`. If `filename` is not given, return the total number of covered statements for all files. """ if filename is not None: return len(self.hit_statements(filename)) total = 0 for filename in self.files(): total += len(self.hit_statements(filename)) return total
def function[total_hits, parameter[self, filename]]: constant[ Return the total number of covered statements for the file `filename`. If `filename` is not given, return the total number of covered statements for all files. ] if compare[name[filename] is_not constant[None]] begin[:] return[call[name[len], parameter[call[name[self].hit_statements, parameter[name[filename]]]]]] variable[total] assign[=] constant[0] for taget[name[filename]] in starred[call[name[self].files, parameter[]]] begin[:] <ast.AugAssign object at 0x7da1b0fc4730> return[name[total]]
keyword[def] identifier[total_hits] ( identifier[self] , identifier[filename] = keyword[None] ): literal[string] keyword[if] identifier[filename] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[len] ( identifier[self] . identifier[hit_statements] ( identifier[filename] )) identifier[total] = literal[int] keyword[for] identifier[filename] keyword[in] identifier[self] . identifier[files] (): identifier[total] += identifier[len] ( identifier[self] . identifier[hit_statements] ( identifier[filename] )) keyword[return] identifier[total]
def total_hits(self, filename=None): """ Return the total number of covered statements for the file `filename`. If `filename` is not given, return the total number of covered statements for all files. """ if filename is not None: return len(self.hit_statements(filename)) # depends on [control=['if'], data=['filename']] total = 0 for filename in self.files(): total += len(self.hit_statements(filename)) # depends on [control=['for'], data=['filename']] return total
def from_dict(input_dict): """ Instantiate an object of a derived class using the information in input_dict (built by the to_dict method of the derived class). More specifically, after reading the derived class from input_dict, it calls the method _build_from_input_dict of the derived class. Note: This method should not be overrided in the derived class. In case it is needed, please override _build_from_input_dict instate. :param dict input_dict: Dictionary with all the information needed to instantiate the object. """ import copy input_dict = copy.deepcopy(input_dict) link_class = input_dict.pop('class') import GPy link_class = eval(link_class) return link_class._build_from_input_dict(link_class, input_dict)
def function[from_dict, parameter[input_dict]]: constant[ Instantiate an object of a derived class using the information in input_dict (built by the to_dict method of the derived class). More specifically, after reading the derived class from input_dict, it calls the method _build_from_input_dict of the derived class. Note: This method should not be overrided in the derived class. In case it is needed, please override _build_from_input_dict instate. :param dict input_dict: Dictionary with all the information needed to instantiate the object. ] import module[copy] variable[input_dict] assign[=] call[name[copy].deepcopy, parameter[name[input_dict]]] variable[link_class] assign[=] call[name[input_dict].pop, parameter[constant[class]]] import module[GPy] variable[link_class] assign[=] call[name[eval], parameter[name[link_class]]] return[call[name[link_class]._build_from_input_dict, parameter[name[link_class], name[input_dict]]]]
keyword[def] identifier[from_dict] ( identifier[input_dict] ): literal[string] keyword[import] identifier[copy] identifier[input_dict] = identifier[copy] . identifier[deepcopy] ( identifier[input_dict] ) identifier[link_class] = identifier[input_dict] . identifier[pop] ( literal[string] ) keyword[import] identifier[GPy] identifier[link_class] = identifier[eval] ( identifier[link_class] ) keyword[return] identifier[link_class] . identifier[_build_from_input_dict] ( identifier[link_class] , identifier[input_dict] )
def from_dict(input_dict): """ Instantiate an object of a derived class using the information in input_dict (built by the to_dict method of the derived class). More specifically, after reading the derived class from input_dict, it calls the method _build_from_input_dict of the derived class. Note: This method should not be overrided in the derived class. In case it is needed, please override _build_from_input_dict instate. :param dict input_dict: Dictionary with all the information needed to instantiate the object. """ import copy input_dict = copy.deepcopy(input_dict) link_class = input_dict.pop('class') import GPy link_class = eval(link_class) return link_class._build_from_input_dict(link_class, input_dict)
def cmd_fire(self, connection, sender, target, payload): """ Sends a message """ msg_target, topic, content = self.parse_payload(payload) def callback(sender, payload): logging.info("FIRE ACK from %s", sender) self.__herald.fire(msg_target, topic, content, callback)
def function[cmd_fire, parameter[self, connection, sender, target, payload]]: constant[ Sends a message ] <ast.Tuple object at 0x7da1b26ae710> assign[=] call[name[self].parse_payload, parameter[name[payload]]] def function[callback, parameter[sender, payload]]: call[name[logging].info, parameter[constant[FIRE ACK from %s], name[sender]]] call[name[self].__herald.fire, parameter[name[msg_target], name[topic], name[content], name[callback]]]
keyword[def] identifier[cmd_fire] ( identifier[self] , identifier[connection] , identifier[sender] , identifier[target] , identifier[payload] ): literal[string] identifier[msg_target] , identifier[topic] , identifier[content] = identifier[self] . identifier[parse_payload] ( identifier[payload] ) keyword[def] identifier[callback] ( identifier[sender] , identifier[payload] ): identifier[logging] . identifier[info] ( literal[string] , identifier[sender] ) identifier[self] . identifier[__herald] . identifier[fire] ( identifier[msg_target] , identifier[topic] , identifier[content] , identifier[callback] )
def cmd_fire(self, connection, sender, target, payload): """ Sends a message """ (msg_target, topic, content) = self.parse_payload(payload) def callback(sender, payload): logging.info('FIRE ACK from %s', sender) self.__herald.fire(msg_target, topic, content, callback)
async def read_portrait_landscape(self, callback=None): """ This function reads the portrait/landscape status register of the MMA8452Q. It will return either PORTRAIT_U, PORTRAIT_D, LANDSCAPE_R, LANDSCAPE_L, or LOCKOUT. LOCKOUT indicates that the sensor is in neither p or ls. :param callback: Callback function :returns: See above. """ register = self.MMA8452Q_Register['PL_STATUS'] await self.board.i2c_read_request(self.address, register, 1, Constants.I2C_READ | Constants.I2C_END_TX_MASK, self.data_val, Constants.CB_TYPE_ASYNCIO) pl_status = await self.wait_for_read_result() pl_status = pl_status[self.data_start] if pl_status & 0x40: # Z-tilt lockout pl_status = self.LOCKOUT else: # Otherwise return LAPO status pl_status = (pl_status & 0x6) >> 1 if callback: await callback(pl_status) await asyncio.sleep(.001) return pl_status
<ast.AsyncFunctionDef object at 0x7da1b23441f0>
keyword[async] keyword[def] identifier[read_portrait_landscape] ( identifier[self] , identifier[callback] = keyword[None] ): literal[string] identifier[register] = identifier[self] . identifier[MMA8452Q_Register] [ literal[string] ] keyword[await] identifier[self] . identifier[board] . identifier[i2c_read_request] ( identifier[self] . identifier[address] , identifier[register] , literal[int] , identifier[Constants] . identifier[I2C_READ] | identifier[Constants] . identifier[I2C_END_TX_MASK] , identifier[self] . identifier[data_val] , identifier[Constants] . identifier[CB_TYPE_ASYNCIO] ) identifier[pl_status] = keyword[await] identifier[self] . identifier[wait_for_read_result] () identifier[pl_status] = identifier[pl_status] [ identifier[self] . identifier[data_start] ] keyword[if] identifier[pl_status] & literal[int] : identifier[pl_status] = identifier[self] . identifier[LOCKOUT] keyword[else] : identifier[pl_status] =( identifier[pl_status] & literal[int] )>> literal[int] keyword[if] identifier[callback] : keyword[await] identifier[callback] ( identifier[pl_status] ) keyword[await] identifier[asyncio] . identifier[sleep] ( literal[int] ) keyword[return] identifier[pl_status]
async def read_portrait_landscape(self, callback=None): """ This function reads the portrait/landscape status register of the MMA8452Q. It will return either PORTRAIT_U, PORTRAIT_D, LANDSCAPE_R, LANDSCAPE_L, or LOCKOUT. LOCKOUT indicates that the sensor is in neither p or ls. :param callback: Callback function :returns: See above. """ register = self.MMA8452Q_Register['PL_STATUS'] await self.board.i2c_read_request(self.address, register, 1, Constants.I2C_READ | Constants.I2C_END_TX_MASK, self.data_val, Constants.CB_TYPE_ASYNCIO) pl_status = await self.wait_for_read_result() pl_status = pl_status[self.data_start] if pl_status & 64: # Z-tilt lockout pl_status = self.LOCKOUT # depends on [control=['if'], data=[]] else: # Otherwise return LAPO status pl_status = (pl_status & 6) >> 1 if callback: await callback(pl_status) # depends on [control=['if'], data=[]] await asyncio.sleep(0.001) return pl_status
def search_read_all(self, domain, order, fields, batch_size=500, context=None, offset=0, limit=None): """ An endless iterator that iterates over records. :param domain: A search domain :param order: The order clause for search read :param fields: The fields argument for search_read :param batch_size: The optimal batch size when sending paginated requests """ if context is None: context = {} if limit is None: # When no limit is specified, all the records # should be fetched. record_count = self.search_count(domain, context=context) end = record_count + offset else: end = limit + offset for page_offset in range(offset, end, batch_size): if page_offset + batch_size > end: batch_size = end - page_offset for record in self.search_read( domain, page_offset, batch_size, order, fields, context=context): yield record
def function[search_read_all, parameter[self, domain, order, fields, batch_size, context, offset, limit]]: constant[ An endless iterator that iterates over records. :param domain: A search domain :param order: The order clause for search read :param fields: The fields argument for search_read :param batch_size: The optimal batch size when sending paginated requests ] if compare[name[context] is constant[None]] begin[:] variable[context] assign[=] dictionary[[], []] if compare[name[limit] is constant[None]] begin[:] variable[record_count] assign[=] call[name[self].search_count, parameter[name[domain]]] variable[end] assign[=] binary_operation[name[record_count] + name[offset]] for taget[name[page_offset]] in starred[call[name[range], parameter[name[offset], name[end], name[batch_size]]]] begin[:] if compare[binary_operation[name[page_offset] + name[batch_size]] greater[>] name[end]] begin[:] variable[batch_size] assign[=] binary_operation[name[end] - name[page_offset]] for taget[name[record]] in starred[call[name[self].search_read, parameter[name[domain], name[page_offset], name[batch_size], name[order], name[fields]]]] begin[:] <ast.Yield object at 0x7da1b1a5ca30>
keyword[def] identifier[search_read_all] ( identifier[self] , identifier[domain] , identifier[order] , identifier[fields] , identifier[batch_size] = literal[int] , identifier[context] = keyword[None] , identifier[offset] = literal[int] , identifier[limit] = keyword[None] ): literal[string] keyword[if] identifier[context] keyword[is] keyword[None] : identifier[context] ={} keyword[if] identifier[limit] keyword[is] keyword[None] : identifier[record_count] = identifier[self] . identifier[search_count] ( identifier[domain] , identifier[context] = identifier[context] ) identifier[end] = identifier[record_count] + identifier[offset] keyword[else] : identifier[end] = identifier[limit] + identifier[offset] keyword[for] identifier[page_offset] keyword[in] identifier[range] ( identifier[offset] , identifier[end] , identifier[batch_size] ): keyword[if] identifier[page_offset] + identifier[batch_size] > identifier[end] : identifier[batch_size] = identifier[end] - identifier[page_offset] keyword[for] identifier[record] keyword[in] identifier[self] . identifier[search_read] ( identifier[domain] , identifier[page_offset] , identifier[batch_size] , identifier[order] , identifier[fields] , identifier[context] = identifier[context] ): keyword[yield] identifier[record]
def search_read_all(self, domain, order, fields, batch_size=500, context=None, offset=0, limit=None): """ An endless iterator that iterates over records. :param domain: A search domain :param order: The order clause for search read :param fields: The fields argument for search_read :param batch_size: The optimal batch size when sending paginated requests """ if context is None: context = {} # depends on [control=['if'], data=['context']] if limit is None: # When no limit is specified, all the records # should be fetched. record_count = self.search_count(domain, context=context) end = record_count + offset # depends on [control=['if'], data=[]] else: end = limit + offset for page_offset in range(offset, end, batch_size): if page_offset + batch_size > end: batch_size = end - page_offset # depends on [control=['if'], data=['end']] for record in self.search_read(domain, page_offset, batch_size, order, fields, context=context): yield record # depends on [control=['for'], data=['record']] # depends on [control=['for'], data=['page_offset']]
def add_token_object(self, token): ''' Add a token object into this sentence ''' token.sent = self # take ownership of given token self.__tokens.append(token) return token
def function[add_token_object, parameter[self, token]]: constant[ Add a token object into this sentence ] name[token].sent assign[=] name[self] call[name[self].__tokens.append, parameter[name[token]]] return[name[token]]
keyword[def] identifier[add_token_object] ( identifier[self] , identifier[token] ): literal[string] identifier[token] . identifier[sent] = identifier[self] identifier[self] . identifier[__tokens] . identifier[append] ( identifier[token] ) keyword[return] identifier[token]
def add_token_object(self, token): """ Add a token object into this sentence """ token.sent = self # take ownership of given token self.__tokens.append(token) return token
def convert(self, form): """ Convert a lazy user to a non-lazy one. The form passed in is expected to be a ModelForm instance, bound to the user to be converted. The converted ``User`` object is returned. Raises a TypeError if the user is not lazy. """ if not is_lazy_user(form.instance): raise NotLazyError('You cannot convert a non-lazy user') user = form.save() # We need to remove the LazyUser instance assocated with the # newly-converted user self.filter(user=user).delete() converted.send(self, user=user) return user
def function[convert, parameter[self, form]]: constant[ Convert a lazy user to a non-lazy one. The form passed in is expected to be a ModelForm instance, bound to the user to be converted. The converted ``User`` object is returned. Raises a TypeError if the user is not lazy. ] if <ast.UnaryOp object at 0x7da20c7ca920> begin[:] <ast.Raise object at 0x7da20c7cbdf0> variable[user] assign[=] call[name[form].save, parameter[]] call[call[name[self].filter, parameter[]].delete, parameter[]] call[name[converted].send, parameter[name[self]]] return[name[user]]
keyword[def] identifier[convert] ( identifier[self] , identifier[form] ): literal[string] keyword[if] keyword[not] identifier[is_lazy_user] ( identifier[form] . identifier[instance] ): keyword[raise] identifier[NotLazyError] ( literal[string] ) identifier[user] = identifier[form] . identifier[save] () identifier[self] . identifier[filter] ( identifier[user] = identifier[user] ). identifier[delete] () identifier[converted] . identifier[send] ( identifier[self] , identifier[user] = identifier[user] ) keyword[return] identifier[user]
def convert(self, form): """ Convert a lazy user to a non-lazy one. The form passed in is expected to be a ModelForm instance, bound to the user to be converted. The converted ``User`` object is returned. Raises a TypeError if the user is not lazy. """ if not is_lazy_user(form.instance): raise NotLazyError('You cannot convert a non-lazy user') # depends on [control=['if'], data=[]] user = form.save() # We need to remove the LazyUser instance assocated with the # newly-converted user self.filter(user=user).delete() converted.send(self, user=user) return user
def load_stream(handle, delimiter=None): """ Creates a string generator from a stream (file handle) containing data delimited by the delimiter strings. This is a stand-alone function and should be used to feed external data into a pipeline. Arguments: - hande(``file``) A file handle open for reading. - delimiter(``str``) [default: ``None``] The default means that items will be separated by two new-line characters i.e.: ``"\\n\\n"``. """ delimiter = (delimiter or "") + "\n" while True: item = [] while True: line = handle.readline() if line == "": raise StopIteration elif line == delimiter: if item: break elif line != '\n': item.append(line) yield "".join(item)
def function[load_stream, parameter[handle, delimiter]]: constant[ Creates a string generator from a stream (file handle) containing data delimited by the delimiter strings. This is a stand-alone function and should be used to feed external data into a pipeline. Arguments: - hande(``file``) A file handle open for reading. - delimiter(``str``) [default: ``None``] The default means that items will be separated by two new-line characters i.e.: ``"\n\n"``. ] variable[delimiter] assign[=] binary_operation[<ast.BoolOp object at 0x7da1b2566740> + constant[ ]] while constant[True] begin[:] variable[item] assign[=] list[[]] while constant[True] begin[:] variable[line] assign[=] call[name[handle].readline, parameter[]] if compare[name[line] equal[==] constant[]] begin[:] <ast.Raise object at 0x7da20c6a9960> <ast.Yield object at 0x7da1b25d26b0>
keyword[def] identifier[load_stream] ( identifier[handle] , identifier[delimiter] = keyword[None] ): literal[string] identifier[delimiter] =( identifier[delimiter] keyword[or] literal[string] )+ literal[string] keyword[while] keyword[True] : identifier[item] =[] keyword[while] keyword[True] : identifier[line] = identifier[handle] . identifier[readline] () keyword[if] identifier[line] == literal[string] : keyword[raise] identifier[StopIteration] keyword[elif] identifier[line] == identifier[delimiter] : keyword[if] identifier[item] : keyword[break] keyword[elif] identifier[line] != literal[string] : identifier[item] . identifier[append] ( identifier[line] ) keyword[yield] literal[string] . identifier[join] ( identifier[item] )
def load_stream(handle, delimiter=None): """ Creates a string generator from a stream (file handle) containing data delimited by the delimiter strings. This is a stand-alone function and should be used to feed external data into a pipeline. Arguments: - hande(``file``) A file handle open for reading. - delimiter(``str``) [default: ``None``] The default means that items will be separated by two new-line characters i.e.: ``"\\n\\n"``. """ delimiter = (delimiter or '') + '\n' while True: item = [] while True: line = handle.readline() if line == '': raise StopIteration # depends on [control=['if'], data=[]] elif line == delimiter: if item: break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif line != '\n': item.append(line) # depends on [control=['if'], data=['line']] # depends on [control=['while'], data=[]] yield ''.join(item) # depends on [control=['while'], data=[]]
def list_records(self, bucket_name, prefix='', delimiter='', max_results=1000, starting_key=''): ''' a method for retrieving a list of the versions of records in a bucket :param bucket_name: string with name of bucket :param prefix: [optional] string with value limiting results to key prefix :param delimiter: string with value which results must not contain (after prefix) :param max_results: [optional] integer with max results to return :param starting_key: [optional] string with key value to continue search with :return: list of results with key, size and date, string with ending key value ''' title = '%s.list_records' % self.__class__.__name__ from datetime import datetime from dateutil.tz import tzutc # validate inputs input_fields = { 'bucket_name': bucket_name, 'prefix': prefix, 'delimiter': delimiter, 'max_results': max_results, 'starting_key': starting_key } for key, value in input_fields.items(): if value: object_title = '%s(%s=%s)' % (title, key, str(value)) self.fields.validate(value, '.%s' % key, object_title) # verify existence of bucket if not bucket_name in self.bucket_list: if not bucket_name in self.list_buckets(): raise ValueError('S3 Bucket "%s" does not exist in aws region %s.' % (bucket_name, self.iam.region_name)) # create key word argument dictionary kw_args = { 'Bucket': bucket_name } if starting_key: kw_args['Marker'] = starting_key if prefix: kw_args['Prefix'] = prefix if delimiter: kw_args['Delimiter'] = delimiter if max_results: kw_args['MaxKeys'] = max_results # send request for objects record_list = [] next_key = '' try: response = self.connection.list_objects(**kw_args) except: raise AWSConnectionError(title) # add retrieved contents to object list if 'Contents' in response: for record in response['Contents']: details = { 'key': '' } details = self.iam.ingest(record, details) epoch_zero = datetime.fromtimestamp(0).replace(tzinfo=tzutc()) details['last_modified'] = (details['last_modified'] - epoch_zero).total_seconds() record_list.append(details) # define ending key value if response['IsTruncated']: next_key = response['NextMarker'] return record_list, next_key
def function[list_records, parameter[self, bucket_name, prefix, delimiter, max_results, starting_key]]: constant[ a method for retrieving a list of the versions of records in a bucket :param bucket_name: string with name of bucket :param prefix: [optional] string with value limiting results to key prefix :param delimiter: string with value which results must not contain (after prefix) :param max_results: [optional] integer with max results to return :param starting_key: [optional] string with key value to continue search with :return: list of results with key, size and date, string with ending key value ] variable[title] assign[=] binary_operation[constant[%s.list_records] <ast.Mod object at 0x7da2590d6920> name[self].__class__.__name__] from relative_module[datetime] import module[datetime] from relative_module[dateutil.tz] import module[tzutc] variable[input_fields] assign[=] dictionary[[<ast.Constant object at 0x7da20cabc7f0>, <ast.Constant object at 0x7da20cabd990>, <ast.Constant object at 0x7da20cabc6d0>, <ast.Constant object at 0x7da20cabf2b0>, <ast.Constant object at 0x7da20cabf580>], [<ast.Name object at 0x7da20cabe7d0>, <ast.Name object at 0x7da20cabe200>, <ast.Name object at 0x7da20cabea40>, <ast.Name object at 0x7da20cabe920>, <ast.Name object at 0x7da20cabd930>]] for taget[tuple[[<ast.Name object at 0x7da20cabd870>, <ast.Name object at 0x7da20cabe8f0>]]] in starred[call[name[input_fields].items, parameter[]]] begin[:] if name[value] begin[:] variable[object_title] assign[=] binary_operation[constant[%s(%s=%s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20cabf640>, <ast.Name object at 0x7da20cabeb00>, <ast.Call object at 0x7da20cabfb80>]]] call[name[self].fields.validate, parameter[name[value], binary_operation[constant[.%s] <ast.Mod object at 0x7da2590d6920> name[key]], name[object_title]]] if <ast.UnaryOp object at 0x7da20cabd120> begin[:] if <ast.UnaryOp object at 0x7da20cabe410> begin[:] <ast.Raise object at 0x7da20cabd2a0> variable[kw_args] assign[=] dictionary[[<ast.Constant object at 0x7da20cabf700>], [<ast.Name object at 0x7da20cabece0>]] if name[starting_key] begin[:] call[name[kw_args]][constant[Marker]] assign[=] name[starting_key] if name[prefix] begin[:] call[name[kw_args]][constant[Prefix]] assign[=] name[prefix] if name[delimiter] begin[:] call[name[kw_args]][constant[Delimiter]] assign[=] name[delimiter] if name[max_results] begin[:] call[name[kw_args]][constant[MaxKeys]] assign[=] name[max_results] variable[record_list] assign[=] list[[]] variable[next_key] assign[=] constant[] <ast.Try object at 0x7da1b1529810> if compare[constant[Contents] in name[response]] begin[:] for taget[name[record]] in starred[call[name[response]][constant[Contents]]] begin[:] variable[details] assign[=] dictionary[[<ast.Constant object at 0x7da1b1529a20>], [<ast.Constant object at 0x7da1b1529240>]] variable[details] assign[=] call[name[self].iam.ingest, parameter[name[record], name[details]]] variable[epoch_zero] assign[=] call[call[name[datetime].fromtimestamp, parameter[constant[0]]].replace, parameter[]] call[name[details]][constant[last_modified]] assign[=] call[binary_operation[call[name[details]][constant[last_modified]] - name[epoch_zero]].total_seconds, parameter[]] call[name[record_list].append, parameter[name[details]]] if call[name[response]][constant[IsTruncated]] begin[:] variable[next_key] assign[=] call[name[response]][constant[NextMarker]] return[tuple[[<ast.Name object at 0x7da18f58f610>, <ast.Name object at 0x7da18f58d570>]]]
keyword[def] identifier[list_records] ( identifier[self] , identifier[bucket_name] , identifier[prefix] = literal[string] , identifier[delimiter] = literal[string] , identifier[max_results] = literal[int] , identifier[starting_key] = literal[string] ): literal[string] identifier[title] = literal[string] % identifier[self] . identifier[__class__] . identifier[__name__] keyword[from] identifier[datetime] keyword[import] identifier[datetime] keyword[from] identifier[dateutil] . identifier[tz] keyword[import] identifier[tzutc] identifier[input_fields] ={ literal[string] : identifier[bucket_name] , literal[string] : identifier[prefix] , literal[string] : identifier[delimiter] , literal[string] : identifier[max_results] , literal[string] : identifier[starting_key] } keyword[for] identifier[key] , identifier[value] keyword[in] identifier[input_fields] . identifier[items] (): keyword[if] identifier[value] : identifier[object_title] = literal[string] %( identifier[title] , identifier[key] , identifier[str] ( identifier[value] )) identifier[self] . identifier[fields] . identifier[validate] ( identifier[value] , literal[string] % identifier[key] , identifier[object_title] ) keyword[if] keyword[not] identifier[bucket_name] keyword[in] identifier[self] . identifier[bucket_list] : keyword[if] keyword[not] identifier[bucket_name] keyword[in] identifier[self] . identifier[list_buckets] (): keyword[raise] identifier[ValueError] ( literal[string] %( identifier[bucket_name] , identifier[self] . identifier[iam] . identifier[region_name] )) identifier[kw_args] ={ literal[string] : identifier[bucket_name] } keyword[if] identifier[starting_key] : identifier[kw_args] [ literal[string] ]= identifier[starting_key] keyword[if] identifier[prefix] : identifier[kw_args] [ literal[string] ]= identifier[prefix] keyword[if] identifier[delimiter] : identifier[kw_args] [ literal[string] ]= identifier[delimiter] keyword[if] identifier[max_results] : identifier[kw_args] [ literal[string] ]= identifier[max_results] identifier[record_list] =[] identifier[next_key] = literal[string] keyword[try] : identifier[response] = identifier[self] . identifier[connection] . identifier[list_objects] (** identifier[kw_args] ) keyword[except] : keyword[raise] identifier[AWSConnectionError] ( identifier[title] ) keyword[if] literal[string] keyword[in] identifier[response] : keyword[for] identifier[record] keyword[in] identifier[response] [ literal[string] ]: identifier[details] ={ literal[string] : literal[string] } identifier[details] = identifier[self] . identifier[iam] . identifier[ingest] ( identifier[record] , identifier[details] ) identifier[epoch_zero] = identifier[datetime] . identifier[fromtimestamp] ( literal[int] ). identifier[replace] ( identifier[tzinfo] = identifier[tzutc] ()) identifier[details] [ literal[string] ]=( identifier[details] [ literal[string] ]- identifier[epoch_zero] ). identifier[total_seconds] () identifier[record_list] . identifier[append] ( identifier[details] ) keyword[if] identifier[response] [ literal[string] ]: identifier[next_key] = identifier[response] [ literal[string] ] keyword[return] identifier[record_list] , identifier[next_key]
def list_records(self, bucket_name, prefix='', delimiter='', max_results=1000, starting_key=''): """ a method for retrieving a list of the versions of records in a bucket :param bucket_name: string with name of bucket :param prefix: [optional] string with value limiting results to key prefix :param delimiter: string with value which results must not contain (after prefix) :param max_results: [optional] integer with max results to return :param starting_key: [optional] string with key value to continue search with :return: list of results with key, size and date, string with ending key value """ title = '%s.list_records' % self.__class__.__name__ from datetime import datetime from dateutil.tz import tzutc # validate inputs input_fields = {'bucket_name': bucket_name, 'prefix': prefix, 'delimiter': delimiter, 'max_results': max_results, 'starting_key': starting_key} for (key, value) in input_fields.items(): if value: object_title = '%s(%s=%s)' % (title, key, str(value)) self.fields.validate(value, '.%s' % key, object_title) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # verify existence of bucket if not bucket_name in self.bucket_list: if not bucket_name in self.list_buckets(): raise ValueError('S3 Bucket "%s" does not exist in aws region %s.' % (bucket_name, self.iam.region_name)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # create key word argument dictionary kw_args = {'Bucket': bucket_name} if starting_key: kw_args['Marker'] = starting_key # depends on [control=['if'], data=[]] if prefix: kw_args['Prefix'] = prefix # depends on [control=['if'], data=[]] if delimiter: kw_args['Delimiter'] = delimiter # depends on [control=['if'], data=[]] if max_results: kw_args['MaxKeys'] = max_results # depends on [control=['if'], data=[]] # send request for objects record_list = [] next_key = '' try: response = self.connection.list_objects(**kw_args) # depends on [control=['try'], data=[]] except: raise AWSConnectionError(title) # depends on [control=['except'], data=[]] # add retrieved contents to object list if 'Contents' in response: for record in response['Contents']: details = {'key': ''} details = self.iam.ingest(record, details) epoch_zero = datetime.fromtimestamp(0).replace(tzinfo=tzutc()) details['last_modified'] = (details['last_modified'] - epoch_zero).total_seconds() record_list.append(details) # depends on [control=['for'], data=['record']] # depends on [control=['if'], data=['response']] # define ending key value if response['IsTruncated']: next_key = response['NextMarker'] # depends on [control=['if'], data=[]] return (record_list, next_key)
def refresh(self): """Refresh the current figure """ for cbar in self.colorbars: cbar.draw_all() self.canvas.draw()
def function[refresh, parameter[self]]: constant[Refresh the current figure ] for taget[name[cbar]] in starred[name[self].colorbars] begin[:] call[name[cbar].draw_all, parameter[]] call[name[self].canvas.draw, parameter[]]
keyword[def] identifier[refresh] ( identifier[self] ): literal[string] keyword[for] identifier[cbar] keyword[in] identifier[self] . identifier[colorbars] : identifier[cbar] . identifier[draw_all] () identifier[self] . identifier[canvas] . identifier[draw] ()
def refresh(self): """Refresh the current figure """ for cbar in self.colorbars: cbar.draw_all() # depends on [control=['for'], data=['cbar']] self.canvas.draw()
def quick_view(structure, bonds=True, conventional=False, transform=None, show_box=True, bond_tol=0.2, stick_radius=0.1): """ A function to visualize pymatgen Structure objects in jupyter notebook using chemview package. Args: structure: pymatgen Structure bonds: (bool) visualize bonds. Bonds are found by comparing distances to added covalent radii of pairs. Defaults to True. conventional: (bool) use conventional cell. Defaults to False. transform: (list) can be used to make supercells with pymatgen.Structure.make_supercell method show_box: (bool) unit cell is shown. Defaults to True. bond_tol: (float) used if bonds=True. Sets the extra distance tolerance when finding bonds. stick_radius: (float) radius of bonds. Returns: A chemview.MolecularViewer object """ s = structure.copy() if conventional: s = SpacegroupAnalyzer(s).get_conventional_standard_structure() if transform: s.make_supercell(transform) atom_types = [i.symbol for i in s.species] if bonds: bonds = [] for i in range(s.num_sites - 1): sym_i = s[i].specie.symbol for j in range(i + 1, s.num_sites): sym_j = s[j].specie.symbol max_d = CovalentRadius.radius[sym_i] + CovalentRadius.radius[sym_j] + bond_tol if s.get_distance(i, j, np.array([0,0,0])) < max_d: bonds.append((i, j)) bonds = bonds if bonds else None mv = MolecularViewer(s.cart_coords, topology={'atom_types': atom_types, 'bonds': bonds}) if bonds: mv.ball_and_sticks(stick_radius=stick_radius) for i in s.sites: el = i.specie.symbol coord = i.coords r = CovalentRadius.radius[el] mv.add_representation('spheres', {'coordinates': coord.astype('float32'), 'colors': [get_atom_color(el)], 'radii': [r * 0.5], 'opacity': 1.0}) if show_box: o = np.array([0, 0, 0]) a, b, c = s.lattice.matrix[0], s.lattice.matrix[1], s.lattice.matrix[2] starts = [o, o, o, a, a, b, b, c, c, a + b, a + c, b + c] ends = [a, b, c, a + b, a + c, b + a, b + c, c + a, c + b, a + b + c, a + b + c, a + b + c] colors = [0xffffff for i in range(12)] mv.add_representation('lines', {'startCoords': np.array(starts), 'endCoords': np.array(ends), 'startColors': colors, 'endColors': colors}) return mv
def function[quick_view, parameter[structure, bonds, conventional, transform, show_box, bond_tol, stick_radius]]: constant[ A function to visualize pymatgen Structure objects in jupyter notebook using chemview package. Args: structure: pymatgen Structure bonds: (bool) visualize bonds. Bonds are found by comparing distances to added covalent radii of pairs. Defaults to True. conventional: (bool) use conventional cell. Defaults to False. transform: (list) can be used to make supercells with pymatgen.Structure.make_supercell method show_box: (bool) unit cell is shown. Defaults to True. bond_tol: (float) used if bonds=True. Sets the extra distance tolerance when finding bonds. stick_radius: (float) radius of bonds. Returns: A chemview.MolecularViewer object ] variable[s] assign[=] call[name[structure].copy, parameter[]] if name[conventional] begin[:] variable[s] assign[=] call[call[name[SpacegroupAnalyzer], parameter[name[s]]].get_conventional_standard_structure, parameter[]] if name[transform] begin[:] call[name[s].make_supercell, parameter[name[transform]]] variable[atom_types] assign[=] <ast.ListComp object at 0x7da204566080> if name[bonds] begin[:] variable[bonds] assign[=] list[[]] for taget[name[i]] in starred[call[name[range], parameter[binary_operation[name[s].num_sites - constant[1]]]]] begin[:] variable[sym_i] assign[=] call[name[s]][name[i]].specie.symbol for taget[name[j]] in starred[call[name[range], parameter[binary_operation[name[i] + constant[1]], name[s].num_sites]]] begin[:] variable[sym_j] assign[=] call[name[s]][name[j]].specie.symbol variable[max_d] assign[=] binary_operation[binary_operation[call[name[CovalentRadius].radius][name[sym_i]] + call[name[CovalentRadius].radius][name[sym_j]]] + name[bond_tol]] if compare[call[name[s].get_distance, parameter[name[i], name[j], call[name[np].array, parameter[list[[<ast.Constant object at 0x7da204566920>, <ast.Constant object at 0x7da204566b90>, <ast.Constant object at 0x7da204566620>]]]]]] less[<] name[max_d]] begin[:] call[name[bonds].append, parameter[tuple[[<ast.Name object at 0x7da204567790>, <ast.Name object at 0x7da204567070>]]]] variable[bonds] assign[=] <ast.IfExp object at 0x7da204566110> variable[mv] assign[=] call[name[MolecularViewer], parameter[name[s].cart_coords]] if name[bonds] begin[:] call[name[mv].ball_and_sticks, parameter[]] for taget[name[i]] in starred[name[s].sites] begin[:] variable[el] assign[=] name[i].specie.symbol variable[coord] assign[=] name[i].coords variable[r] assign[=] call[name[CovalentRadius].radius][name[el]] call[name[mv].add_representation, parameter[constant[spheres], dictionary[[<ast.Constant object at 0x7da2045648b0>, <ast.Constant object at 0x7da204564a00>, <ast.Constant object at 0x7da204564af0>, <ast.Constant object at 0x7da2045669b0>], [<ast.Call object at 0x7da204566b00>, <ast.List object at 0x7da2045657e0>, <ast.List object at 0x7da204564c10>, <ast.Constant object at 0x7da204565180>]]]] if name[show_box] begin[:] variable[o] assign[=] call[name[np].array, parameter[list[[<ast.Constant object at 0x7da204564280>, <ast.Constant object at 0x7da204564b20>, <ast.Constant object at 0x7da204567100>]]]] <ast.Tuple object at 0x7da204564640> assign[=] tuple[[<ast.Subscript object at 0x7da204564580>, <ast.Subscript object at 0x7da204567580>, <ast.Subscript object at 0x7da204566590>]] variable[starts] assign[=] list[[<ast.Name object at 0x7da204567610>, <ast.Name object at 0x7da204565060>, <ast.Name object at 0x7da204565330>, <ast.Name object at 0x7da204564070>, <ast.Name object at 0x7da2045648e0>, <ast.Name object at 0x7da204566680>, <ast.Name object at 0x7da204566350>, <ast.Name object at 0x7da204567b50>, <ast.Name object at 0x7da204567490>, <ast.BinOp object at 0x7da204566c80>, <ast.BinOp object at 0x7da2045641c0>, <ast.BinOp object at 0x7da204566260>]] variable[ends] assign[=] list[[<ast.Name object at 0x7da204567430>, <ast.Name object at 0x7da2045673a0>, <ast.Name object at 0x7da204564e50>, <ast.BinOp object at 0x7da204565b40>, <ast.BinOp object at 0x7da204565780>, <ast.BinOp object at 0x7da204566f50>, <ast.BinOp object at 0x7da204565720>, <ast.BinOp object at 0x7da204567fd0>, <ast.BinOp object at 0x7da204564160>, <ast.BinOp object at 0x7da204567eb0>, <ast.BinOp object at 0x7da204564c40>, <ast.BinOp object at 0x7da204566380>]] variable[colors] assign[=] <ast.ListComp object at 0x7da204565ba0> call[name[mv].add_representation, parameter[constant[lines], dictionary[[<ast.Constant object at 0x7da204565570>, <ast.Constant object at 0x7da204564670>, <ast.Constant object at 0x7da204567040>, <ast.Constant object at 0x7da204565450>], [<ast.Call object at 0x7da2045640d0>, <ast.Call object at 0x7da204566f80>, <ast.Name object at 0x7da204565d80>, <ast.Name object at 0x7da204567700>]]]] return[name[mv]]
keyword[def] identifier[quick_view] ( identifier[structure] , identifier[bonds] = keyword[True] , identifier[conventional] = keyword[False] , identifier[transform] = keyword[None] , identifier[show_box] = keyword[True] , identifier[bond_tol] = literal[int] , identifier[stick_radius] = literal[int] ): literal[string] identifier[s] = identifier[structure] . identifier[copy] () keyword[if] identifier[conventional] : identifier[s] = identifier[SpacegroupAnalyzer] ( identifier[s] ). identifier[get_conventional_standard_structure] () keyword[if] identifier[transform] : identifier[s] . identifier[make_supercell] ( identifier[transform] ) identifier[atom_types] =[ identifier[i] . identifier[symbol] keyword[for] identifier[i] keyword[in] identifier[s] . identifier[species] ] keyword[if] identifier[bonds] : identifier[bonds] =[] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[s] . identifier[num_sites] - literal[int] ): identifier[sym_i] = identifier[s] [ identifier[i] ]. identifier[specie] . identifier[symbol] keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[i] + literal[int] , identifier[s] . identifier[num_sites] ): identifier[sym_j] = identifier[s] [ identifier[j] ]. identifier[specie] . identifier[symbol] identifier[max_d] = identifier[CovalentRadius] . identifier[radius] [ identifier[sym_i] ]+ identifier[CovalentRadius] . identifier[radius] [ identifier[sym_j] ]+ identifier[bond_tol] keyword[if] identifier[s] . identifier[get_distance] ( identifier[i] , identifier[j] , identifier[np] . identifier[array] ([ literal[int] , literal[int] , literal[int] ]))< identifier[max_d] : identifier[bonds] . identifier[append] (( identifier[i] , identifier[j] )) identifier[bonds] = identifier[bonds] keyword[if] identifier[bonds] keyword[else] keyword[None] identifier[mv] = identifier[MolecularViewer] ( identifier[s] . identifier[cart_coords] , identifier[topology] ={ literal[string] : identifier[atom_types] , literal[string] : identifier[bonds] }) keyword[if] identifier[bonds] : identifier[mv] . identifier[ball_and_sticks] ( identifier[stick_radius] = identifier[stick_radius] ) keyword[for] identifier[i] keyword[in] identifier[s] . identifier[sites] : identifier[el] = identifier[i] . identifier[specie] . identifier[symbol] identifier[coord] = identifier[i] . identifier[coords] identifier[r] = identifier[CovalentRadius] . identifier[radius] [ identifier[el] ] identifier[mv] . identifier[add_representation] ( literal[string] ,{ literal[string] : identifier[coord] . identifier[astype] ( literal[string] ), literal[string] :[ identifier[get_atom_color] ( identifier[el] )], literal[string] :[ identifier[r] * literal[int] ], literal[string] : literal[int] }) keyword[if] identifier[show_box] : identifier[o] = identifier[np] . identifier[array] ([ literal[int] , literal[int] , literal[int] ]) identifier[a] , identifier[b] , identifier[c] = identifier[s] . identifier[lattice] . identifier[matrix] [ literal[int] ], identifier[s] . identifier[lattice] . identifier[matrix] [ literal[int] ], identifier[s] . identifier[lattice] . identifier[matrix] [ literal[int] ] identifier[starts] =[ identifier[o] , identifier[o] , identifier[o] , identifier[a] , identifier[a] , identifier[b] , identifier[b] , identifier[c] , identifier[c] , identifier[a] + identifier[b] , identifier[a] + identifier[c] , identifier[b] + identifier[c] ] identifier[ends] =[ identifier[a] , identifier[b] , identifier[c] , identifier[a] + identifier[b] , identifier[a] + identifier[c] , identifier[b] + identifier[a] , identifier[b] + identifier[c] , identifier[c] + identifier[a] , identifier[c] + identifier[b] , identifier[a] + identifier[b] + identifier[c] , identifier[a] + identifier[b] + identifier[c] , identifier[a] + identifier[b] + identifier[c] ] identifier[colors] =[ literal[int] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] )] identifier[mv] . identifier[add_representation] ( literal[string] ,{ literal[string] : identifier[np] . identifier[array] ( identifier[starts] ), literal[string] : identifier[np] . identifier[array] ( identifier[ends] ), literal[string] : identifier[colors] , literal[string] : identifier[colors] }) keyword[return] identifier[mv]
def quick_view(structure, bonds=True, conventional=False, transform=None, show_box=True, bond_tol=0.2, stick_radius=0.1): """ A function to visualize pymatgen Structure objects in jupyter notebook using chemview package. Args: structure: pymatgen Structure bonds: (bool) visualize bonds. Bonds are found by comparing distances to added covalent radii of pairs. Defaults to True. conventional: (bool) use conventional cell. Defaults to False. transform: (list) can be used to make supercells with pymatgen.Structure.make_supercell method show_box: (bool) unit cell is shown. Defaults to True. bond_tol: (float) used if bonds=True. Sets the extra distance tolerance when finding bonds. stick_radius: (float) radius of bonds. Returns: A chemview.MolecularViewer object """ s = structure.copy() if conventional: s = SpacegroupAnalyzer(s).get_conventional_standard_structure() # depends on [control=['if'], data=[]] if transform: s.make_supercell(transform) # depends on [control=['if'], data=[]] atom_types = [i.symbol for i in s.species] if bonds: bonds = [] for i in range(s.num_sites - 1): sym_i = s[i].specie.symbol for j in range(i + 1, s.num_sites): sym_j = s[j].specie.symbol max_d = CovalentRadius.radius[sym_i] + CovalentRadius.radius[sym_j] + bond_tol if s.get_distance(i, j, np.array([0, 0, 0])) < max_d: bonds.append((i, j)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] bonds = bonds if bonds else None mv = MolecularViewer(s.cart_coords, topology={'atom_types': atom_types, 'bonds': bonds}) if bonds: mv.ball_and_sticks(stick_radius=stick_radius) # depends on [control=['if'], data=[]] for i in s.sites: el = i.specie.symbol coord = i.coords r = CovalentRadius.radius[el] mv.add_representation('spheres', {'coordinates': coord.astype('float32'), 'colors': [get_atom_color(el)], 'radii': [r * 0.5], 'opacity': 1.0}) # depends on [control=['for'], data=['i']] if show_box: o = np.array([0, 0, 0]) (a, b, c) = (s.lattice.matrix[0], s.lattice.matrix[1], s.lattice.matrix[2]) starts = [o, o, o, a, a, b, b, c, c, a + b, a + c, b + c] ends = [a, b, c, a + b, a + c, b + a, b + c, c + a, c + b, a + b + c, a + b + c, a + b + c] colors = [16777215 for i in range(12)] mv.add_representation('lines', {'startCoords': np.array(starts), 'endCoords': np.array(ends), 'startColors': colors, 'endColors': colors}) # depends on [control=['if'], data=[]] return mv
def infix(tokens, operator_table): """Match an infix of an operator.""" operator, matched_tokens = operator_table.infix.match(tokens) if operator: return TokenMatch(operator, None, matched_tokens)
def function[infix, parameter[tokens, operator_table]]: constant[Match an infix of an operator.] <ast.Tuple object at 0x7da1b0fb26b0> assign[=] call[name[operator_table].infix.match, parameter[name[tokens]]] if name[operator] begin[:] return[call[name[TokenMatch], parameter[name[operator], constant[None], name[matched_tokens]]]]
keyword[def] identifier[infix] ( identifier[tokens] , identifier[operator_table] ): literal[string] identifier[operator] , identifier[matched_tokens] = identifier[operator_table] . identifier[infix] . identifier[match] ( identifier[tokens] ) keyword[if] identifier[operator] : keyword[return] identifier[TokenMatch] ( identifier[operator] , keyword[None] , identifier[matched_tokens] )
def infix(tokens, operator_table): """Match an infix of an operator.""" (operator, matched_tokens) = operator_table.infix.match(tokens) if operator: return TokenMatch(operator, None, matched_tokens) # depends on [control=['if'], data=[]]
async def send_script(self, conn_id, data): """Asynchronously send a a script to this IOTile device Args: conn_id (int): A unique identifier that will refer to this connection data (bytes or bytearray): the script to send to the device """ self._ensure_connection(conn_id, True) dev = self._get_property(conn_id, 'device') conn_string = self._get_property(conn_id, 'connection_string') # Simulate some progress callbacks (0, 50%, 100%) await self.notify_progress(conn_string, 'script', 0, len(data)) await self.notify_progress(conn_string, 'script', len(data) // 2, len(data)) await self.notify_progress(conn_string, 'script', len(data), len(data)) dev.script = data
<ast.AsyncFunctionDef object at 0x7da18f00f4c0>
keyword[async] keyword[def] identifier[send_script] ( identifier[self] , identifier[conn_id] , identifier[data] ): literal[string] identifier[self] . identifier[_ensure_connection] ( identifier[conn_id] , keyword[True] ) identifier[dev] = identifier[self] . identifier[_get_property] ( identifier[conn_id] , literal[string] ) identifier[conn_string] = identifier[self] . identifier[_get_property] ( identifier[conn_id] , literal[string] ) keyword[await] identifier[self] . identifier[notify_progress] ( identifier[conn_string] , literal[string] , literal[int] , identifier[len] ( identifier[data] )) keyword[await] identifier[self] . identifier[notify_progress] ( identifier[conn_string] , literal[string] , identifier[len] ( identifier[data] )// literal[int] , identifier[len] ( identifier[data] )) keyword[await] identifier[self] . identifier[notify_progress] ( identifier[conn_string] , literal[string] , identifier[len] ( identifier[data] ), identifier[len] ( identifier[data] )) identifier[dev] . identifier[script] = identifier[data]
async def send_script(self, conn_id, data): """Asynchronously send a a script to this IOTile device Args: conn_id (int): A unique identifier that will refer to this connection data (bytes or bytearray): the script to send to the device """ self._ensure_connection(conn_id, True) dev = self._get_property(conn_id, 'device') conn_string = self._get_property(conn_id, 'connection_string') # Simulate some progress callbacks (0, 50%, 100%) await self.notify_progress(conn_string, 'script', 0, len(data)) await self.notify_progress(conn_string, 'script', len(data) // 2, len(data)) await self.notify_progress(conn_string, 'script', len(data), len(data)) dev.script = data
def get_roots(self): """get the roots of a graph. must be a directed graph :returns: root list of nodes :rtype: Node[] """ if self.__directionless: sys.stderr.write("ERROR: can't get roots of an undirected graph\n") sys.exit() outputids = self.__nodes.keys() #print outputids rootset = set(outputids) - set(self.__child_to_parent.keys()) return [self.__nodes[x] for x in rootset]
def function[get_roots, parameter[self]]: constant[get the roots of a graph. must be a directed graph :returns: root list of nodes :rtype: Node[] ] if name[self].__directionless begin[:] call[name[sys].stderr.write, parameter[constant[ERROR: can't get roots of an undirected graph ]]] call[name[sys].exit, parameter[]] variable[outputids] assign[=] call[name[self].__nodes.keys, parameter[]] variable[rootset] assign[=] binary_operation[call[name[set], parameter[name[outputids]]] - call[name[set], parameter[call[name[self].__child_to_parent.keys, parameter[]]]]] return[<ast.ListComp object at 0x7da1b0a654b0>]
keyword[def] identifier[get_roots] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[__directionless] : identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] ) identifier[sys] . identifier[exit] () identifier[outputids] = identifier[self] . identifier[__nodes] . identifier[keys] () identifier[rootset] = identifier[set] ( identifier[outputids] )- identifier[set] ( identifier[self] . identifier[__child_to_parent] . identifier[keys] ()) keyword[return] [ identifier[self] . identifier[__nodes] [ identifier[x] ] keyword[for] identifier[x] keyword[in] identifier[rootset] ]
def get_roots(self): """get the roots of a graph. must be a directed graph :returns: root list of nodes :rtype: Node[] """ if self.__directionless: sys.stderr.write("ERROR: can't get roots of an undirected graph\n") sys.exit() # depends on [control=['if'], data=[]] outputids = self.__nodes.keys() #print outputids rootset = set(outputids) - set(self.__child_to_parent.keys()) return [self.__nodes[x] for x in rootset]
def agreement_weighted(ci, wts): ''' D = AGREEMENT_WEIGHTED(CI,WTS) is identical to AGREEMENT, with the exception that each partitions contribution is weighted according to the corresponding scalar value stored in the vector WTS. As an example, suppose CI contained partitions obtained using some heuristic for maximizing modularity. A possible choice for WTS might be the Q metric (Newman's modularity score). Such a choice would add more weight to higher modularity partitions. NOTE: Unlike AGREEMENT, this script does not have the input argument BUFFSZ. Parameters ---------- ci : MxN np.ndarray set of M (possibly degenerate) partitions of N nodes wts : Mx1 np.ndarray relative weight of each partition Returns ------- D : NxN np.ndarray weighted agreement matrix ''' ci = np.array(ci) m, n = ci.shape wts = np.array(wts) / np.sum(wts) D = np.zeros((n, n)) for i in range(m): d = dummyvar(ci[i, :].reshape(1, n)) D += np.dot(d, d.T) * wts[i] return D
def function[agreement_weighted, parameter[ci, wts]]: constant[ D = AGREEMENT_WEIGHTED(CI,WTS) is identical to AGREEMENT, with the exception that each partitions contribution is weighted according to the corresponding scalar value stored in the vector WTS. As an example, suppose CI contained partitions obtained using some heuristic for maximizing modularity. A possible choice for WTS might be the Q metric (Newman's modularity score). Such a choice would add more weight to higher modularity partitions. NOTE: Unlike AGREEMENT, this script does not have the input argument BUFFSZ. Parameters ---------- ci : MxN np.ndarray set of M (possibly degenerate) partitions of N nodes wts : Mx1 np.ndarray relative weight of each partition Returns ------- D : NxN np.ndarray weighted agreement matrix ] variable[ci] assign[=] call[name[np].array, parameter[name[ci]]] <ast.Tuple object at 0x7da1b0840130> assign[=] name[ci].shape variable[wts] assign[=] binary_operation[call[name[np].array, parameter[name[wts]]] / call[name[np].sum, parameter[name[wts]]]] variable[D] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b0840430>, <ast.Name object at 0x7da1b0840790>]]]] for taget[name[i]] in starred[call[name[range], parameter[name[m]]]] begin[:] variable[d] assign[=] call[name[dummyvar], parameter[call[call[name[ci]][tuple[[<ast.Name object at 0x7da1b08407c0>, <ast.Slice object at 0x7da1b0840400>]]].reshape, parameter[constant[1], name[n]]]]] <ast.AugAssign object at 0x7da1b08416f0> return[name[D]]
keyword[def] identifier[agreement_weighted] ( identifier[ci] , identifier[wts] ): literal[string] identifier[ci] = identifier[np] . identifier[array] ( identifier[ci] ) identifier[m] , identifier[n] = identifier[ci] . identifier[shape] identifier[wts] = identifier[np] . identifier[array] ( identifier[wts] )/ identifier[np] . identifier[sum] ( identifier[wts] ) identifier[D] = identifier[np] . identifier[zeros] (( identifier[n] , identifier[n] )) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[m] ): identifier[d] = identifier[dummyvar] ( identifier[ci] [ identifier[i] ,:]. identifier[reshape] ( literal[int] , identifier[n] )) identifier[D] += identifier[np] . identifier[dot] ( identifier[d] , identifier[d] . identifier[T] )* identifier[wts] [ identifier[i] ] keyword[return] identifier[D]
def agreement_weighted(ci, wts): """ D = AGREEMENT_WEIGHTED(CI,WTS) is identical to AGREEMENT, with the exception that each partitions contribution is weighted according to the corresponding scalar value stored in the vector WTS. As an example, suppose CI contained partitions obtained using some heuristic for maximizing modularity. A possible choice for WTS might be the Q metric (Newman's modularity score). Such a choice would add more weight to higher modularity partitions. NOTE: Unlike AGREEMENT, this script does not have the input argument BUFFSZ. Parameters ---------- ci : MxN np.ndarray set of M (possibly degenerate) partitions of N nodes wts : Mx1 np.ndarray relative weight of each partition Returns ------- D : NxN np.ndarray weighted agreement matrix """ ci = np.array(ci) (m, n) = ci.shape wts = np.array(wts) / np.sum(wts) D = np.zeros((n, n)) for i in range(m): d = dummyvar(ci[i, :].reshape(1, n)) D += np.dot(d, d.T) * wts[i] # depends on [control=['for'], data=['i']] return D
def tree(self, path, max_depth, full_path=False, include_stat=False): """DFS generator which starts from a given path and goes up to a max depth. :param path: path from which the DFS will start :param max_depth: max depth of DFS (0 means no limit) :param full_path: should the full path of the child node be returned :param include_stat: return the child Znode's stat along with the name & level """ for child_level_stat in self.do_tree(path, max_depth, 0, full_path, include_stat): yield child_level_stat
def function[tree, parameter[self, path, max_depth, full_path, include_stat]]: constant[DFS generator which starts from a given path and goes up to a max depth. :param path: path from which the DFS will start :param max_depth: max depth of DFS (0 means no limit) :param full_path: should the full path of the child node be returned :param include_stat: return the child Znode's stat along with the name & level ] for taget[name[child_level_stat]] in starred[call[name[self].do_tree, parameter[name[path], name[max_depth], constant[0], name[full_path], name[include_stat]]]] begin[:] <ast.Yield object at 0x7da20c993940>
keyword[def] identifier[tree] ( identifier[self] , identifier[path] , identifier[max_depth] , identifier[full_path] = keyword[False] , identifier[include_stat] = keyword[False] ): literal[string] keyword[for] identifier[child_level_stat] keyword[in] identifier[self] . identifier[do_tree] ( identifier[path] , identifier[max_depth] , literal[int] , identifier[full_path] , identifier[include_stat] ): keyword[yield] identifier[child_level_stat]
def tree(self, path, max_depth, full_path=False, include_stat=False): """DFS generator which starts from a given path and goes up to a max depth. :param path: path from which the DFS will start :param max_depth: max depth of DFS (0 means no limit) :param full_path: should the full path of the child node be returned :param include_stat: return the child Znode's stat along with the name & level """ for child_level_stat in self.do_tree(path, max_depth, 0, full_path, include_stat): yield child_level_stat # depends on [control=['for'], data=['child_level_stat']]
def apply_vnc_actions(self, vnc_actions): """ Play a list of vnc_actions forward over the current keysyms state NOTE: Since we are squashing a set of diffs into a single keyboard state, some information may be lost. For example if the Z key is down, then we receive [(Z-up), (Z-down)], the output will not reflect any change in Z You can make each frame shorter to offset this effect. """ for event in vnc_actions: if isinstance(event, spaces.KeyEvent): if event.down: self._down_keysyms.add(event.key) else: self._down_keysyms.discard(event.key) logger.debug("AtariKeyState._down_keysyms: {}".format(self._down_keysyms))
def function[apply_vnc_actions, parameter[self, vnc_actions]]: constant[ Play a list of vnc_actions forward over the current keysyms state NOTE: Since we are squashing a set of diffs into a single keyboard state, some information may be lost. For example if the Z key is down, then we receive [(Z-up), (Z-down)], the output will not reflect any change in Z You can make each frame shorter to offset this effect. ] for taget[name[event]] in starred[name[vnc_actions]] begin[:] if call[name[isinstance], parameter[name[event], name[spaces].KeyEvent]] begin[:] if name[event].down begin[:] call[name[self]._down_keysyms.add, parameter[name[event].key]] call[name[logger].debug, parameter[call[constant[AtariKeyState._down_keysyms: {}].format, parameter[name[self]._down_keysyms]]]]
keyword[def] identifier[apply_vnc_actions] ( identifier[self] , identifier[vnc_actions] ): literal[string] keyword[for] identifier[event] keyword[in] identifier[vnc_actions] : keyword[if] identifier[isinstance] ( identifier[event] , identifier[spaces] . identifier[KeyEvent] ): keyword[if] identifier[event] . identifier[down] : identifier[self] . identifier[_down_keysyms] . identifier[add] ( identifier[event] . identifier[key] ) keyword[else] : identifier[self] . identifier[_down_keysyms] . identifier[discard] ( identifier[event] . identifier[key] ) identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[self] . identifier[_down_keysyms] ))
def apply_vnc_actions(self, vnc_actions): """ Play a list of vnc_actions forward over the current keysyms state NOTE: Since we are squashing a set of diffs into a single keyboard state, some information may be lost. For example if the Z key is down, then we receive [(Z-up), (Z-down)], the output will not reflect any change in Z You can make each frame shorter to offset this effect. """ for event in vnc_actions: if isinstance(event, spaces.KeyEvent): if event.down: self._down_keysyms.add(event.key) # depends on [control=['if'], data=[]] else: self._down_keysyms.discard(event.key) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['event']] logger.debug('AtariKeyState._down_keysyms: {}'.format(self._down_keysyms))
def links(self): """Yields all links in the page""" for anchor in self.parsed.findall(".//a"): if anchor.get("href"): href = anchor.get("href") url = self.clean_link( urllib_parse.urljoin(self.base_url, href) ) # Determine if this link is internal. If that distinction # doesn't make sense in this context, then we don't make # any distinction. internal = None if self.api_version and self.api_version >= 2: # Only api_versions >= 2 have a distinction between # external and internal links internal = bool( anchor.get("rel") and "internal" in anchor.get("rel").split() ) yield Link(url, self, internal=internal)
def function[links, parameter[self]]: constant[Yields all links in the page] for taget[name[anchor]] in starred[call[name[self].parsed.findall, parameter[constant[.//a]]]] begin[:] if call[name[anchor].get, parameter[constant[href]]] begin[:] variable[href] assign[=] call[name[anchor].get, parameter[constant[href]]] variable[url] assign[=] call[name[self].clean_link, parameter[call[name[urllib_parse].urljoin, parameter[name[self].base_url, name[href]]]]] variable[internal] assign[=] constant[None] if <ast.BoolOp object at 0x7da1b26ae200> begin[:] variable[internal] assign[=] call[name[bool], parameter[<ast.BoolOp object at 0x7da204622f20>]] <ast.Yield object at 0x7da204623730>
keyword[def] identifier[links] ( identifier[self] ): literal[string] keyword[for] identifier[anchor] keyword[in] identifier[self] . identifier[parsed] . identifier[findall] ( literal[string] ): keyword[if] identifier[anchor] . identifier[get] ( literal[string] ): identifier[href] = identifier[anchor] . identifier[get] ( literal[string] ) identifier[url] = identifier[self] . identifier[clean_link] ( identifier[urllib_parse] . identifier[urljoin] ( identifier[self] . identifier[base_url] , identifier[href] ) ) identifier[internal] = keyword[None] keyword[if] identifier[self] . identifier[api_version] keyword[and] identifier[self] . identifier[api_version] >= literal[int] : identifier[internal] = identifier[bool] ( identifier[anchor] . identifier[get] ( literal[string] ) keyword[and] literal[string] keyword[in] identifier[anchor] . identifier[get] ( literal[string] ). identifier[split] () ) keyword[yield] identifier[Link] ( identifier[url] , identifier[self] , identifier[internal] = identifier[internal] )
def links(self): """Yields all links in the page""" for anchor in self.parsed.findall('.//a'): if anchor.get('href'): href = anchor.get('href') url = self.clean_link(urllib_parse.urljoin(self.base_url, href)) # Determine if this link is internal. If that distinction # doesn't make sense in this context, then we don't make # any distinction. internal = None if self.api_version and self.api_version >= 2: # Only api_versions >= 2 have a distinction between # external and internal links internal = bool(anchor.get('rel') and 'internal' in anchor.get('rel').split()) # depends on [control=['if'], data=[]] yield Link(url, self, internal=internal) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['anchor']]
def progressbar(total, pos, msg=""): """ Given a total and a progress position, output a progress bar to stderr. It is important to not output anything else while using this, as it relies soley on the behavior of carriage return (\\r). Can also take an optioal message to add after the progressbar. It must not contain newlines. The progress bar will look something like this: [099/500][=========...............................] ETA: 13:36:59 Of course, the ETA part should be supplied be the calling function. """ width = get_terminal_size()[0] - 40 rel_pos = int(float(pos) / total * width) bar = ''.join(["=" * rel_pos, "." * (width - rel_pos)]) # Determine how many digits in total (base 10) digits_total = len(str(total)) fmt_width = "%0" + str(digits_total) + "d" fmt = "\r[" + fmt_width + "/" + fmt_width + "][%s] %s" progress_stream.write(fmt % (pos, total, bar, msg))
def function[progressbar, parameter[total, pos, msg]]: constant[ Given a total and a progress position, output a progress bar to stderr. It is important to not output anything else while using this, as it relies soley on the behavior of carriage return (\r). Can also take an optioal message to add after the progressbar. It must not contain newlines. The progress bar will look something like this: [099/500][=========...............................] ETA: 13:36:59 Of course, the ETA part should be supplied be the calling function. ] variable[width] assign[=] binary_operation[call[call[name[get_terminal_size], parameter[]]][constant[0]] - constant[40]] variable[rel_pos] assign[=] call[name[int], parameter[binary_operation[binary_operation[call[name[float], parameter[name[pos]]] / name[total]] * name[width]]]] variable[bar] assign[=] call[constant[].join, parameter[list[[<ast.BinOp object at 0x7da18f58ccd0>, <ast.BinOp object at 0x7da18f58d2d0>]]]] variable[digits_total] assign[=] call[name[len], parameter[call[name[str], parameter[name[total]]]]] variable[fmt_width] assign[=] binary_operation[binary_operation[constant[%0] + call[name[str], parameter[name[digits_total]]]] + constant[d]] variable[fmt] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[constant[ [] + name[fmt_width]] + constant[/]] + name[fmt_width]] + constant[][%s] %s]] call[name[progress_stream].write, parameter[binary_operation[name[fmt] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f58fc40>, <ast.Name object at 0x7da18f58c1f0>, <ast.Name object at 0x7da18f58e7a0>, <ast.Name object at 0x7da18f58f280>]]]]]
keyword[def] identifier[progressbar] ( identifier[total] , identifier[pos] , identifier[msg] = literal[string] ): literal[string] identifier[width] = identifier[get_terminal_size] ()[ literal[int] ]- literal[int] identifier[rel_pos] = identifier[int] ( identifier[float] ( identifier[pos] )/ identifier[total] * identifier[width] ) identifier[bar] = literal[string] . identifier[join] ([ literal[string] * identifier[rel_pos] , literal[string] *( identifier[width] - identifier[rel_pos] )]) identifier[digits_total] = identifier[len] ( identifier[str] ( identifier[total] )) identifier[fmt_width] = literal[string] + identifier[str] ( identifier[digits_total] )+ literal[string] identifier[fmt] = literal[string] + identifier[fmt_width] + literal[string] + identifier[fmt_width] + literal[string] identifier[progress_stream] . identifier[write] ( identifier[fmt] %( identifier[pos] , identifier[total] , identifier[bar] , identifier[msg] ))
def progressbar(total, pos, msg=''): """ Given a total and a progress position, output a progress bar to stderr. It is important to not output anything else while using this, as it relies soley on the behavior of carriage return (\\r). Can also take an optioal message to add after the progressbar. It must not contain newlines. The progress bar will look something like this: [099/500][=========...............................] ETA: 13:36:59 Of course, the ETA part should be supplied be the calling function. """ width = get_terminal_size()[0] - 40 rel_pos = int(float(pos) / total * width) bar = ''.join(['=' * rel_pos, '.' * (width - rel_pos)]) # Determine how many digits in total (base 10) digits_total = len(str(total)) fmt_width = '%0' + str(digits_total) + 'd' fmt = '\r[' + fmt_width + '/' + fmt_width + '][%s] %s' progress_stream.write(fmt % (pos, total, bar, msg))
def multihead_attention(query_antecedent, memory_antecedent, bias, total_key_depth, total_value_depth, output_depth, num_heads, dropout_rate, shared_rel=False, max_relative_position=None, image_shapes=None, attention_type="dot_product", block_length=128, block_width=128, q_filter_width=1, kv_filter_width=1, q_padding="VALID", kv_padding="VALID", cache=None, gap_size=0, num_memory_blocks=2, name="multihead_attention", save_weights_to=None, make_image_summary=True, dropout_broadcast_dims=None, max_length=None, vars_3d=False, scale_dotproduct=True, **kwargs): """Multihead scaled-dot-product attention with input/output transformations. Args: query_antecedent: a Tensor with shape [batch, length_q, channels] memory_antecedent: a Tensor with shape [batch, length_m, channels] or None bias: bias Tensor (see attention_bias()) total_key_depth: an integer total_value_depth: an integer output_depth: an integer num_heads: an integer dividing total_key_depth and total_value_depth dropout_rate: a floating point number shared_rel: boolean to share relative embeddings max_relative_position: Maximum distance between inputs to generate unique relation embeddings for. Only relevant when using "dot_product_relative" attention. image_shapes: optional tuple of integer scalars. see comments for attention_image_summary() attention_type: a string, either "dot_product", "dot_product_relative", "local_mask_right", "local_unmasked", "masked_dilated_1d", "unmasked_dilated_1d", graph, or any attention function with the signature (query, key, value, **kwargs) block_length: an integer - relevant for "local_mask_right" block_width: an integer - relevant for "local_unmasked" q_filter_width: An integer specifying how wide you want the query to be. kv_filter_width: An integer specifying how wide you want the keys and values to be. q_padding: One of "VALID", "SAME" or "LEFT". Default is VALID: No padding. kv_padding: One of "VALID", "SAME" or "LEFT". Default is "VALID": no padding. cache: dict containing Tensors which are the results of previous attentions, used for fast decoding. Expects the dict to contrain two keys ('k' and 'v'), for the initial call the values for these keys should be empty Tensors of the appropriate shape. 'k' [batch_size, 0, key_channels] 'v' [batch_size, 0, value_channels] gap_size: Integer option for dilated attention to indicate spacing between memory blocks. num_memory_blocks: Integer option to indicate how many memory blocks to look at. name: an optional string. save_weights_to: an optional dictionary to capture attention weights for vizualization; the weights tensor will be appended there under a string key created from the variable scope (including name). make_image_summary: Whether to make an attention image summary. dropout_broadcast_dims: an optional list of integers less than 4 specifying in which dimensions to broadcast the dropout decisions. saves memory. max_length: an integer - needed by relative attention vars_3d: use 3-dimensional variables for input/output transformations scale_dotproduct: whether to normalize the attention product. **kwargs (dict): Parameters for the attention function Caching: WARNING: For decoder self-attention, i.e. when memory_antecedent == None, the caching assumes that the bias contains future masking. The caching works by saving all the previous key and value values so that you are able to send just the last query location to this attention function. I.e. if the cache dict is provided it assumes the query is of the shape [batch_size, 1, hidden_dim] rather than the full memory. Returns: The result of the attention transformation. The output shape is [batch_size, length_q, hidden_dim] unless the cache dict is provided in which case only the last memory position is calculated and the output shape is [batch_size, 1, hidden_dim] Optionally returns an additional loss parameters (ex: load balance loss for the experts) returned by the attention_type function. Raises: ValueError: if the key depth or value depth are not divisible by the number of attention heads. """ if total_key_depth % num_heads != 0: raise ValueError("Key depth (%d) must be divisible by the number of " "attention heads (%d)." % (total_key_depth, num_heads)) if total_value_depth % num_heads != 0: raise ValueError("Value depth (%d) must be divisible by the number of " "attention heads (%d)." % (total_value_depth, num_heads)) vars_3d_num_heads = num_heads if vars_3d else 0 with tf.variable_scope(name, default_name="multihead_attention", values=[query_antecedent, memory_antecedent]): if cache is None or memory_antecedent is None: q, k, v = common_attention.compute_qkv( query_antecedent, memory_antecedent, total_key_depth, total_value_depth, q_filter_width, kv_filter_width, q_padding, kv_padding, vars_3d_num_heads=vars_3d_num_heads) if cache is not None: if attention_type != "dot_product": # TODO(petershaw): Support caching when using relative position # representations, i.e. "dot_product_relative" attention. raise NotImplementedError( "Caching is not guaranteed to work with attention types other than" " dot_product.") if bias is None: raise ValueError("Bias required for caching. See function docstring " "for details.") if memory_antecedent is not None: # Encoder-Decoder Attention Cache q = common_attention.compute_attention_component( query_antecedent, total_key_depth, q_filter_width, q_padding, "q", vars_3d_num_heads=vars_3d_num_heads) k = cache["k_encdec"] v = cache["v_encdec"] else: k = common_attention.split_heads(k, num_heads) v = common_attention.split_heads(v, num_heads) decode_loop_step = kwargs.get("decode_loop_step") if decode_loop_step is None: k = cache["k"] = tf.concat([cache["k"], k], axis=2) v = cache["v"] = tf.concat([cache["v"], v], axis=2) else: # Inplace update is required for inference on TPU. # Inplace_ops only supports inplace_update on the first dimension. # The performance of current implementation is better than updating # the tensor by adding the result of matmul(one_hot, # update_in_current_step) tmp_k = tf.transpose(cache["k"], perm=[2, 0, 1, 3]) tmp_k = inplace_ops.alias_inplace_update( tmp_k, decode_loop_step, tf.squeeze(k, axis=2)) k = cache["k"] = tf.transpose(tmp_k, perm=[1, 2, 0, 3]) tmp_v = tf.transpose(cache["v"], perm=[2, 0, 1, 3]) tmp_v = inplace_ops.alias_inplace_update( tmp_v, decode_loop_step, tf.squeeze(v, axis=2)) v = cache["v"] = tf.transpose(tmp_v, perm=[1, 2, 0, 3]) q = common_attention.split_heads(q, num_heads) if cache is None: k = common_attention.split_heads(k, num_heads) v = common_attention.split_heads(v, num_heads) key_depth_per_head = total_key_depth // num_heads if not vars_3d: if scale_dotproduct: q *= key_depth_per_head**-0.5 additional_returned_value = None if callable(attention_type): # Generic way to extend multihead_attention x = attention_type(q, k, v, **kwargs) if isinstance(x, tuple): x, additional_returned_value = x # Unpack elif attention_type == "dot_product": x = common_attention.dot_product_attention( q, k, v, bias, dropout_rate, image_shapes, save_weights_to=save_weights_to, make_image_summary=make_image_summary, dropout_broadcast_dims=dropout_broadcast_dims) elif attention_type == "dot_product_relative": x = common_attention.dot_product_attention_relative( q, k, v, bias, max_relative_position, dropout_rate, image_shapes, make_image_summary=make_image_summary) elif attention_type == "dot_product_relative_v2": x = common_attention.dot_product_self_attention_relative_v2( q, k, v, bias, max_length, dropout_rate, image_shapes, make_image_summary=make_image_summary, dropout_broadcast_dims=dropout_broadcast_dims) elif attention_type == "local_within_block_mask_right": x = common_attention.masked_within_block_local_attention_1d( q, k, v, block_length=block_length) elif attention_type == "rel_local_mask_right": x = common_attention.masked_rel_local_attention_1d( q, k, v, block_length=block_length, make_image_summary=make_image_summary, dropout_rate=dropout_rate, share_rel_embed=shared_rel) elif attention_type == "local_mask_right": x = common_attention.masked_local_attention_1d( q, k, v, block_length=block_length, make_image_summary=make_image_summary) elif attention_type == "local_unmasked": x = common_attention.local_attention_1d( q, k, v, block_length=block_length, filter_width=block_width) elif attention_type == "masked_dilated_1d": x = common_attention.masked_dilated_self_attention_1d( q, k, v, block_length, block_width, gap_size, num_memory_blocks) else: assert attention_type == "unmasked_dilated_1d" x = common_attention.dilated_self_attention_1d( q, k, v, block_length, block_width, gap_size, num_memory_blocks) x = common_attention.combine_heads(x) # Set last dim specifically. x.set_shape(x.shape.as_list()[:-1] + [total_value_depth]) if vars_3d: o_var = tf.get_variable( "o", [num_heads, total_value_depth // num_heads, output_depth]) o_var = tf.cast(o_var, x.dtype) o_var = tf.reshape(o_var, [total_value_depth, output_depth]) x = tf.tensordot(x, o_var, axes=1) else: x = common_layers.dense( x, output_depth, use_bias=False, name="output_transform") if additional_returned_value is not None: return x, additional_returned_value return x
def function[multihead_attention, parameter[query_antecedent, memory_antecedent, bias, total_key_depth, total_value_depth, output_depth, num_heads, dropout_rate, shared_rel, max_relative_position, image_shapes, attention_type, block_length, block_width, q_filter_width, kv_filter_width, q_padding, kv_padding, cache, gap_size, num_memory_blocks, name, save_weights_to, make_image_summary, dropout_broadcast_dims, max_length, vars_3d, scale_dotproduct]]: constant[Multihead scaled-dot-product attention with input/output transformations. Args: query_antecedent: a Tensor with shape [batch, length_q, channels] memory_antecedent: a Tensor with shape [batch, length_m, channels] or None bias: bias Tensor (see attention_bias()) total_key_depth: an integer total_value_depth: an integer output_depth: an integer num_heads: an integer dividing total_key_depth and total_value_depth dropout_rate: a floating point number shared_rel: boolean to share relative embeddings max_relative_position: Maximum distance between inputs to generate unique relation embeddings for. Only relevant when using "dot_product_relative" attention. image_shapes: optional tuple of integer scalars. see comments for attention_image_summary() attention_type: a string, either "dot_product", "dot_product_relative", "local_mask_right", "local_unmasked", "masked_dilated_1d", "unmasked_dilated_1d", graph, or any attention function with the signature (query, key, value, **kwargs) block_length: an integer - relevant for "local_mask_right" block_width: an integer - relevant for "local_unmasked" q_filter_width: An integer specifying how wide you want the query to be. kv_filter_width: An integer specifying how wide you want the keys and values to be. q_padding: One of "VALID", "SAME" or "LEFT". Default is VALID: No padding. kv_padding: One of "VALID", "SAME" or "LEFT". Default is "VALID": no padding. cache: dict containing Tensors which are the results of previous attentions, used for fast decoding. Expects the dict to contrain two keys ('k' and 'v'), for the initial call the values for these keys should be empty Tensors of the appropriate shape. 'k' [batch_size, 0, key_channels] 'v' [batch_size, 0, value_channels] gap_size: Integer option for dilated attention to indicate spacing between memory blocks. num_memory_blocks: Integer option to indicate how many memory blocks to look at. name: an optional string. save_weights_to: an optional dictionary to capture attention weights for vizualization; the weights tensor will be appended there under a string key created from the variable scope (including name). make_image_summary: Whether to make an attention image summary. dropout_broadcast_dims: an optional list of integers less than 4 specifying in which dimensions to broadcast the dropout decisions. saves memory. max_length: an integer - needed by relative attention vars_3d: use 3-dimensional variables for input/output transformations scale_dotproduct: whether to normalize the attention product. **kwargs (dict): Parameters for the attention function Caching: WARNING: For decoder self-attention, i.e. when memory_antecedent == None, the caching assumes that the bias contains future masking. The caching works by saving all the previous key and value values so that you are able to send just the last query location to this attention function. I.e. if the cache dict is provided it assumes the query is of the shape [batch_size, 1, hidden_dim] rather than the full memory. Returns: The result of the attention transformation. The output shape is [batch_size, length_q, hidden_dim] unless the cache dict is provided in which case only the last memory position is calculated and the output shape is [batch_size, 1, hidden_dim] Optionally returns an additional loss parameters (ex: load balance loss for the experts) returned by the attention_type function. Raises: ValueError: if the key depth or value depth are not divisible by the number of attention heads. ] if compare[binary_operation[name[total_key_depth] <ast.Mod object at 0x7da2590d6920> name[num_heads]] not_equal[!=] constant[0]] begin[:] <ast.Raise object at 0x7da20e954160> if compare[binary_operation[name[total_value_depth] <ast.Mod object at 0x7da2590d6920> name[num_heads]] not_equal[!=] constant[0]] begin[:] <ast.Raise object at 0x7da20e9555d0> variable[vars_3d_num_heads] assign[=] <ast.IfExp object at 0x7da20e956020> with call[name[tf].variable_scope, parameter[name[name]]] begin[:] if <ast.BoolOp object at 0x7da20e957610> begin[:] <ast.Tuple object at 0x7da20e956f80> assign[=] call[name[common_attention].compute_qkv, parameter[name[query_antecedent], name[memory_antecedent], name[total_key_depth], name[total_value_depth], name[q_filter_width], name[kv_filter_width], name[q_padding], name[kv_padding]]] if compare[name[cache] is_not constant[None]] begin[:] if compare[name[attention_type] not_equal[!=] constant[dot_product]] begin[:] <ast.Raise object at 0x7da20e957700> if compare[name[bias] is constant[None]] begin[:] <ast.Raise object at 0x7da20e954f70> if compare[name[memory_antecedent] is_not constant[None]] begin[:] variable[q] assign[=] call[name[common_attention].compute_attention_component, parameter[name[query_antecedent], name[total_key_depth], name[q_filter_width], name[q_padding], constant[q]]] variable[k] assign[=] call[name[cache]][constant[k_encdec]] variable[v] assign[=] call[name[cache]][constant[v_encdec]] variable[q] assign[=] call[name[common_attention].split_heads, parameter[name[q], name[num_heads]]] if compare[name[cache] is constant[None]] begin[:] variable[k] assign[=] call[name[common_attention].split_heads, parameter[name[k], name[num_heads]]] variable[v] assign[=] call[name[common_attention].split_heads, parameter[name[v], name[num_heads]]] variable[key_depth_per_head] assign[=] binary_operation[name[total_key_depth] <ast.FloorDiv object at 0x7da2590d6bc0> name[num_heads]] if <ast.UnaryOp object at 0x7da20c76d150> begin[:] if name[scale_dotproduct] begin[:] <ast.AugAssign object at 0x7da20c76eb30> variable[additional_returned_value] assign[=] constant[None] if call[name[callable], parameter[name[attention_type]]] begin[:] variable[x] assign[=] call[name[attention_type], parameter[name[q], name[k], name[v]]] if call[name[isinstance], parameter[name[x], name[tuple]]] begin[:] <ast.Tuple object at 0x7da20c76d0f0> assign[=] name[x] variable[x] assign[=] call[name[common_attention].combine_heads, parameter[name[x]]] call[name[x].set_shape, parameter[binary_operation[call[call[name[x].shape.as_list, parameter[]]][<ast.Slice object at 0x7da20c76e500>] + list[[<ast.Name object at 0x7da20c76c760>]]]]] if name[vars_3d] begin[:] variable[o_var] assign[=] call[name[tf].get_variable, parameter[constant[o], list[[<ast.Name object at 0x7da20c76f5e0>, <ast.BinOp object at 0x7da20c76d8a0>, <ast.Name object at 0x7da20c76e7a0>]]]] variable[o_var] assign[=] call[name[tf].cast, parameter[name[o_var], name[x].dtype]] variable[o_var] assign[=] call[name[tf].reshape, parameter[name[o_var], list[[<ast.Name object at 0x7da20c76fcd0>, <ast.Name object at 0x7da20c76fa90>]]]] variable[x] assign[=] call[name[tf].tensordot, parameter[name[x], name[o_var]]] if compare[name[additional_returned_value] is_not constant[None]] begin[:] return[tuple[[<ast.Name object at 0x7da20c76d390>, <ast.Name object at 0x7da20c76ebf0>]]] return[name[x]]
keyword[def] identifier[multihead_attention] ( identifier[query_antecedent] , identifier[memory_antecedent] , identifier[bias] , identifier[total_key_depth] , identifier[total_value_depth] , identifier[output_depth] , identifier[num_heads] , identifier[dropout_rate] , identifier[shared_rel] = keyword[False] , identifier[max_relative_position] = keyword[None] , identifier[image_shapes] = keyword[None] , identifier[attention_type] = literal[string] , identifier[block_length] = literal[int] , identifier[block_width] = literal[int] , identifier[q_filter_width] = literal[int] , identifier[kv_filter_width] = literal[int] , identifier[q_padding] = literal[string] , identifier[kv_padding] = literal[string] , identifier[cache] = keyword[None] , identifier[gap_size] = literal[int] , identifier[num_memory_blocks] = literal[int] , identifier[name] = literal[string] , identifier[save_weights_to] = keyword[None] , identifier[make_image_summary] = keyword[True] , identifier[dropout_broadcast_dims] = keyword[None] , identifier[max_length] = keyword[None] , identifier[vars_3d] = keyword[False] , identifier[scale_dotproduct] = keyword[True] , ** identifier[kwargs] ): literal[string] keyword[if] identifier[total_key_depth] % identifier[num_heads] != literal[int] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] %( identifier[total_key_depth] , identifier[num_heads] )) keyword[if] identifier[total_value_depth] % identifier[num_heads] != literal[int] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] %( identifier[total_value_depth] , identifier[num_heads] )) identifier[vars_3d_num_heads] = identifier[num_heads] keyword[if] identifier[vars_3d] keyword[else] literal[int] keyword[with] identifier[tf] . identifier[variable_scope] ( identifier[name] , identifier[default_name] = literal[string] , identifier[values] =[ identifier[query_antecedent] , identifier[memory_antecedent] ]): keyword[if] identifier[cache] keyword[is] keyword[None] keyword[or] identifier[memory_antecedent] keyword[is] keyword[None] : identifier[q] , identifier[k] , identifier[v] = identifier[common_attention] . identifier[compute_qkv] ( identifier[query_antecedent] , identifier[memory_antecedent] , identifier[total_key_depth] , identifier[total_value_depth] , identifier[q_filter_width] , identifier[kv_filter_width] , identifier[q_padding] , identifier[kv_padding] , identifier[vars_3d_num_heads] = identifier[vars_3d_num_heads] ) keyword[if] identifier[cache] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[attention_type] != literal[string] : keyword[raise] identifier[NotImplementedError] ( literal[string] literal[string] ) keyword[if] identifier[bias] keyword[is] keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] ) keyword[if] identifier[memory_antecedent] keyword[is] keyword[not] keyword[None] : identifier[q] = identifier[common_attention] . identifier[compute_attention_component] ( identifier[query_antecedent] , identifier[total_key_depth] , identifier[q_filter_width] , identifier[q_padding] , literal[string] , identifier[vars_3d_num_heads] = identifier[vars_3d_num_heads] ) identifier[k] = identifier[cache] [ literal[string] ] identifier[v] = identifier[cache] [ literal[string] ] keyword[else] : identifier[k] = identifier[common_attention] . identifier[split_heads] ( identifier[k] , identifier[num_heads] ) identifier[v] = identifier[common_attention] . identifier[split_heads] ( identifier[v] , identifier[num_heads] ) identifier[decode_loop_step] = identifier[kwargs] . identifier[get] ( literal[string] ) keyword[if] identifier[decode_loop_step] keyword[is] keyword[None] : identifier[k] = identifier[cache] [ literal[string] ]= identifier[tf] . identifier[concat] ([ identifier[cache] [ literal[string] ], identifier[k] ], identifier[axis] = literal[int] ) identifier[v] = identifier[cache] [ literal[string] ]= identifier[tf] . identifier[concat] ([ identifier[cache] [ literal[string] ], identifier[v] ], identifier[axis] = literal[int] ) keyword[else] : identifier[tmp_k] = identifier[tf] . identifier[transpose] ( identifier[cache] [ literal[string] ], identifier[perm] =[ literal[int] , literal[int] , literal[int] , literal[int] ]) identifier[tmp_k] = identifier[inplace_ops] . identifier[alias_inplace_update] ( identifier[tmp_k] , identifier[decode_loop_step] , identifier[tf] . identifier[squeeze] ( identifier[k] , identifier[axis] = literal[int] )) identifier[k] = identifier[cache] [ literal[string] ]= identifier[tf] . identifier[transpose] ( identifier[tmp_k] , identifier[perm] =[ literal[int] , literal[int] , literal[int] , literal[int] ]) identifier[tmp_v] = identifier[tf] . identifier[transpose] ( identifier[cache] [ literal[string] ], identifier[perm] =[ literal[int] , literal[int] , literal[int] , literal[int] ]) identifier[tmp_v] = identifier[inplace_ops] . identifier[alias_inplace_update] ( identifier[tmp_v] , identifier[decode_loop_step] , identifier[tf] . identifier[squeeze] ( identifier[v] , identifier[axis] = literal[int] )) identifier[v] = identifier[cache] [ literal[string] ]= identifier[tf] . identifier[transpose] ( identifier[tmp_v] , identifier[perm] =[ literal[int] , literal[int] , literal[int] , literal[int] ]) identifier[q] = identifier[common_attention] . identifier[split_heads] ( identifier[q] , identifier[num_heads] ) keyword[if] identifier[cache] keyword[is] keyword[None] : identifier[k] = identifier[common_attention] . identifier[split_heads] ( identifier[k] , identifier[num_heads] ) identifier[v] = identifier[common_attention] . identifier[split_heads] ( identifier[v] , identifier[num_heads] ) identifier[key_depth_per_head] = identifier[total_key_depth] // identifier[num_heads] keyword[if] keyword[not] identifier[vars_3d] : keyword[if] identifier[scale_dotproduct] : identifier[q] *= identifier[key_depth_per_head] **- literal[int] identifier[additional_returned_value] = keyword[None] keyword[if] identifier[callable] ( identifier[attention_type] ): identifier[x] = identifier[attention_type] ( identifier[q] , identifier[k] , identifier[v] ,** identifier[kwargs] ) keyword[if] identifier[isinstance] ( identifier[x] , identifier[tuple] ): identifier[x] , identifier[additional_returned_value] = identifier[x] keyword[elif] identifier[attention_type] == literal[string] : identifier[x] = identifier[common_attention] . identifier[dot_product_attention] ( identifier[q] , identifier[k] , identifier[v] , identifier[bias] , identifier[dropout_rate] , identifier[image_shapes] , identifier[save_weights_to] = identifier[save_weights_to] , identifier[make_image_summary] = identifier[make_image_summary] , identifier[dropout_broadcast_dims] = identifier[dropout_broadcast_dims] ) keyword[elif] identifier[attention_type] == literal[string] : identifier[x] = identifier[common_attention] . identifier[dot_product_attention_relative] ( identifier[q] , identifier[k] , identifier[v] , identifier[bias] , identifier[max_relative_position] , identifier[dropout_rate] , identifier[image_shapes] , identifier[make_image_summary] = identifier[make_image_summary] ) keyword[elif] identifier[attention_type] == literal[string] : identifier[x] = identifier[common_attention] . identifier[dot_product_self_attention_relative_v2] ( identifier[q] , identifier[k] , identifier[v] , identifier[bias] , identifier[max_length] , identifier[dropout_rate] , identifier[image_shapes] , identifier[make_image_summary] = identifier[make_image_summary] , identifier[dropout_broadcast_dims] = identifier[dropout_broadcast_dims] ) keyword[elif] identifier[attention_type] == literal[string] : identifier[x] = identifier[common_attention] . identifier[masked_within_block_local_attention_1d] ( identifier[q] , identifier[k] , identifier[v] , identifier[block_length] = identifier[block_length] ) keyword[elif] identifier[attention_type] == literal[string] : identifier[x] = identifier[common_attention] . identifier[masked_rel_local_attention_1d] ( identifier[q] , identifier[k] , identifier[v] , identifier[block_length] = identifier[block_length] , identifier[make_image_summary] = identifier[make_image_summary] , identifier[dropout_rate] = identifier[dropout_rate] , identifier[share_rel_embed] = identifier[shared_rel] ) keyword[elif] identifier[attention_type] == literal[string] : identifier[x] = identifier[common_attention] . identifier[masked_local_attention_1d] ( identifier[q] , identifier[k] , identifier[v] , identifier[block_length] = identifier[block_length] , identifier[make_image_summary] = identifier[make_image_summary] ) keyword[elif] identifier[attention_type] == literal[string] : identifier[x] = identifier[common_attention] . identifier[local_attention_1d] ( identifier[q] , identifier[k] , identifier[v] , identifier[block_length] = identifier[block_length] , identifier[filter_width] = identifier[block_width] ) keyword[elif] identifier[attention_type] == literal[string] : identifier[x] = identifier[common_attention] . identifier[masked_dilated_self_attention_1d] ( identifier[q] , identifier[k] , identifier[v] , identifier[block_length] , identifier[block_width] , identifier[gap_size] , identifier[num_memory_blocks] ) keyword[else] : keyword[assert] identifier[attention_type] == literal[string] identifier[x] = identifier[common_attention] . identifier[dilated_self_attention_1d] ( identifier[q] , identifier[k] , identifier[v] , identifier[block_length] , identifier[block_width] , identifier[gap_size] , identifier[num_memory_blocks] ) identifier[x] = identifier[common_attention] . identifier[combine_heads] ( identifier[x] ) identifier[x] . identifier[set_shape] ( identifier[x] . identifier[shape] . identifier[as_list] ()[:- literal[int] ]+[ identifier[total_value_depth] ]) keyword[if] identifier[vars_3d] : identifier[o_var] = identifier[tf] . identifier[get_variable] ( literal[string] ,[ identifier[num_heads] , identifier[total_value_depth] // identifier[num_heads] , identifier[output_depth] ]) identifier[o_var] = identifier[tf] . identifier[cast] ( identifier[o_var] , identifier[x] . identifier[dtype] ) identifier[o_var] = identifier[tf] . identifier[reshape] ( identifier[o_var] ,[ identifier[total_value_depth] , identifier[output_depth] ]) identifier[x] = identifier[tf] . identifier[tensordot] ( identifier[x] , identifier[o_var] , identifier[axes] = literal[int] ) keyword[else] : identifier[x] = identifier[common_layers] . identifier[dense] ( identifier[x] , identifier[output_depth] , identifier[use_bias] = keyword[False] , identifier[name] = literal[string] ) keyword[if] identifier[additional_returned_value] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[x] , identifier[additional_returned_value] keyword[return] identifier[x]
def multihead_attention(query_antecedent, memory_antecedent, bias, total_key_depth, total_value_depth, output_depth, num_heads, dropout_rate, shared_rel=False, max_relative_position=None, image_shapes=None, attention_type='dot_product', block_length=128, block_width=128, q_filter_width=1, kv_filter_width=1, q_padding='VALID', kv_padding='VALID', cache=None, gap_size=0, num_memory_blocks=2, name='multihead_attention', save_weights_to=None, make_image_summary=True, dropout_broadcast_dims=None, max_length=None, vars_3d=False, scale_dotproduct=True, **kwargs): """Multihead scaled-dot-product attention with input/output transformations. Args: query_antecedent: a Tensor with shape [batch, length_q, channels] memory_antecedent: a Tensor with shape [batch, length_m, channels] or None bias: bias Tensor (see attention_bias()) total_key_depth: an integer total_value_depth: an integer output_depth: an integer num_heads: an integer dividing total_key_depth and total_value_depth dropout_rate: a floating point number shared_rel: boolean to share relative embeddings max_relative_position: Maximum distance between inputs to generate unique relation embeddings for. Only relevant when using "dot_product_relative" attention. image_shapes: optional tuple of integer scalars. see comments for attention_image_summary() attention_type: a string, either "dot_product", "dot_product_relative", "local_mask_right", "local_unmasked", "masked_dilated_1d", "unmasked_dilated_1d", graph, or any attention function with the signature (query, key, value, **kwargs) block_length: an integer - relevant for "local_mask_right" block_width: an integer - relevant for "local_unmasked" q_filter_width: An integer specifying how wide you want the query to be. kv_filter_width: An integer specifying how wide you want the keys and values to be. q_padding: One of "VALID", "SAME" or "LEFT". Default is VALID: No padding. kv_padding: One of "VALID", "SAME" or "LEFT". Default is "VALID": no padding. cache: dict containing Tensors which are the results of previous attentions, used for fast decoding. Expects the dict to contrain two keys ('k' and 'v'), for the initial call the values for these keys should be empty Tensors of the appropriate shape. 'k' [batch_size, 0, key_channels] 'v' [batch_size, 0, value_channels] gap_size: Integer option for dilated attention to indicate spacing between memory blocks. num_memory_blocks: Integer option to indicate how many memory blocks to look at. name: an optional string. save_weights_to: an optional dictionary to capture attention weights for vizualization; the weights tensor will be appended there under a string key created from the variable scope (including name). make_image_summary: Whether to make an attention image summary. dropout_broadcast_dims: an optional list of integers less than 4 specifying in which dimensions to broadcast the dropout decisions. saves memory. max_length: an integer - needed by relative attention vars_3d: use 3-dimensional variables for input/output transformations scale_dotproduct: whether to normalize the attention product. **kwargs (dict): Parameters for the attention function Caching: WARNING: For decoder self-attention, i.e. when memory_antecedent == None, the caching assumes that the bias contains future masking. The caching works by saving all the previous key and value values so that you are able to send just the last query location to this attention function. I.e. if the cache dict is provided it assumes the query is of the shape [batch_size, 1, hidden_dim] rather than the full memory. Returns: The result of the attention transformation. The output shape is [batch_size, length_q, hidden_dim] unless the cache dict is provided in which case only the last memory position is calculated and the output shape is [batch_size, 1, hidden_dim] Optionally returns an additional loss parameters (ex: load balance loss for the experts) returned by the attention_type function. Raises: ValueError: if the key depth or value depth are not divisible by the number of attention heads. """ if total_key_depth % num_heads != 0: raise ValueError('Key depth (%d) must be divisible by the number of attention heads (%d).' % (total_key_depth, num_heads)) # depends on [control=['if'], data=[]] if total_value_depth % num_heads != 0: raise ValueError('Value depth (%d) must be divisible by the number of attention heads (%d).' % (total_value_depth, num_heads)) # depends on [control=['if'], data=[]] vars_3d_num_heads = num_heads if vars_3d else 0 with tf.variable_scope(name, default_name='multihead_attention', values=[query_antecedent, memory_antecedent]): if cache is None or memory_antecedent is None: (q, k, v) = common_attention.compute_qkv(query_antecedent, memory_antecedent, total_key_depth, total_value_depth, q_filter_width, kv_filter_width, q_padding, kv_padding, vars_3d_num_heads=vars_3d_num_heads) # depends on [control=['if'], data=[]] if cache is not None: if attention_type != 'dot_product': # TODO(petershaw): Support caching when using relative position # representations, i.e. "dot_product_relative" attention. raise NotImplementedError('Caching is not guaranteed to work with attention types other than dot_product.') # depends on [control=['if'], data=[]] if bias is None: raise ValueError('Bias required for caching. See function docstring for details.') # depends on [control=['if'], data=[]] if memory_antecedent is not None: # Encoder-Decoder Attention Cache q = common_attention.compute_attention_component(query_antecedent, total_key_depth, q_filter_width, q_padding, 'q', vars_3d_num_heads=vars_3d_num_heads) k = cache['k_encdec'] v = cache['v_encdec'] # depends on [control=['if'], data=[]] else: k = common_attention.split_heads(k, num_heads) v = common_attention.split_heads(v, num_heads) decode_loop_step = kwargs.get('decode_loop_step') if decode_loop_step is None: k = cache['k'] = tf.concat([cache['k'], k], axis=2) v = cache['v'] = tf.concat([cache['v'], v], axis=2) # depends on [control=['if'], data=[]] else: # Inplace update is required for inference on TPU. # Inplace_ops only supports inplace_update on the first dimension. # The performance of current implementation is better than updating # the tensor by adding the result of matmul(one_hot, # update_in_current_step) tmp_k = tf.transpose(cache['k'], perm=[2, 0, 1, 3]) tmp_k = inplace_ops.alias_inplace_update(tmp_k, decode_loop_step, tf.squeeze(k, axis=2)) k = cache['k'] = tf.transpose(tmp_k, perm=[1, 2, 0, 3]) tmp_v = tf.transpose(cache['v'], perm=[2, 0, 1, 3]) tmp_v = inplace_ops.alias_inplace_update(tmp_v, decode_loop_step, tf.squeeze(v, axis=2)) v = cache['v'] = tf.transpose(tmp_v, perm=[1, 2, 0, 3]) # depends on [control=['if'], data=['cache']] q = common_attention.split_heads(q, num_heads) if cache is None: k = common_attention.split_heads(k, num_heads) v = common_attention.split_heads(v, num_heads) # depends on [control=['if'], data=[]] key_depth_per_head = total_key_depth // num_heads if not vars_3d: if scale_dotproduct: q *= key_depth_per_head ** (-0.5) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] additional_returned_value = None if callable(attention_type): # Generic way to extend multihead_attention x = attention_type(q, k, v, **kwargs) if isinstance(x, tuple): (x, additional_returned_value) = x # Unpack # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif attention_type == 'dot_product': x = common_attention.dot_product_attention(q, k, v, bias, dropout_rate, image_shapes, save_weights_to=save_weights_to, make_image_summary=make_image_summary, dropout_broadcast_dims=dropout_broadcast_dims) # depends on [control=['if'], data=[]] elif attention_type == 'dot_product_relative': x = common_attention.dot_product_attention_relative(q, k, v, bias, max_relative_position, dropout_rate, image_shapes, make_image_summary=make_image_summary) # depends on [control=['if'], data=[]] elif attention_type == 'dot_product_relative_v2': x = common_attention.dot_product_self_attention_relative_v2(q, k, v, bias, max_length, dropout_rate, image_shapes, make_image_summary=make_image_summary, dropout_broadcast_dims=dropout_broadcast_dims) # depends on [control=['if'], data=[]] elif attention_type == 'local_within_block_mask_right': x = common_attention.masked_within_block_local_attention_1d(q, k, v, block_length=block_length) # depends on [control=['if'], data=[]] elif attention_type == 'rel_local_mask_right': x = common_attention.masked_rel_local_attention_1d(q, k, v, block_length=block_length, make_image_summary=make_image_summary, dropout_rate=dropout_rate, share_rel_embed=shared_rel) # depends on [control=['if'], data=[]] elif attention_type == 'local_mask_right': x = common_attention.masked_local_attention_1d(q, k, v, block_length=block_length, make_image_summary=make_image_summary) # depends on [control=['if'], data=[]] elif attention_type == 'local_unmasked': x = common_attention.local_attention_1d(q, k, v, block_length=block_length, filter_width=block_width) # depends on [control=['if'], data=[]] elif attention_type == 'masked_dilated_1d': x = common_attention.masked_dilated_self_attention_1d(q, k, v, block_length, block_width, gap_size, num_memory_blocks) # depends on [control=['if'], data=[]] else: assert attention_type == 'unmasked_dilated_1d' x = common_attention.dilated_self_attention_1d(q, k, v, block_length, block_width, gap_size, num_memory_blocks) x = common_attention.combine_heads(x) # Set last dim specifically. x.set_shape(x.shape.as_list()[:-1] + [total_value_depth]) if vars_3d: o_var = tf.get_variable('o', [num_heads, total_value_depth // num_heads, output_depth]) o_var = tf.cast(o_var, x.dtype) o_var = tf.reshape(o_var, [total_value_depth, output_depth]) x = tf.tensordot(x, o_var, axes=1) # depends on [control=['if'], data=[]] else: x = common_layers.dense(x, output_depth, use_bias=False, name='output_transform') if additional_returned_value is not None: return (x, additional_returned_value) # depends on [control=['if'], data=['additional_returned_value']] return x # depends on [control=['with'], data=[]]
def view_vrf(arg, opts, shell_opts): """ View a single VRF """ if arg is None: print("ERROR: Please specify the RT of the VRF to view.", file=sys.stderr) sys.exit(1) # interpret as default VRF (ie, RT = None) if arg.lower() in ('-', 'none'): arg = None try: v = VRF.search({ 'val1': 'rt', 'operator': 'equals', 'val2': arg } )['result'][0] except (KeyError, IndexError): print("VRF with [RT: %s] not found." % str(arg), file=sys.stderr) sys.exit(1) print("-- VRF") print(" %-26s : %d" % ("ID", v.id)) print(" %-26s : %s" % ("RT", v.rt)) print(" %-26s : %s" % ("Name", v.name)) print(" %-26s : %s" % ("Description", v.description)) print("-- Extra Attributes") if v.avps is not None: for key in sorted(v.avps, key=lambda s: s.lower()): print(" %-26s : %s" % (key, v.avps[key])) print("-- Tags") for tag_name in sorted(v.tags, key=lambda s: s.lower()): print(" %s" % tag_name) # statistics if v.total_addresses_v4 == 0: used_percent_v4 = 0 else: used_percent_v4 = (float(v.used_addresses_v4)/v.total_addresses_v4)*100 if v.total_addresses_v6 == 0: used_percent_v6 = 0 else: used_percent_v6 = (float(v.used_addresses_v6)/v.total_addresses_v6)*100 print("-- Statistics") print(" %-26s : %s" % ("IPv4 prefixes", v.num_prefixes_v4)) print(" %-26s : %.0f / %.0f (%.2f%% of %.0f)" % ("IPv4 addresses Used / Free", v.used_addresses_v4, v.free_addresses_v4, used_percent_v4, v.total_addresses_v4)) print(" %-26s : %s" % ("IPv6 prefixes", v.num_prefixes_v6)) print(" %-26s : %.4e / %.4e (%.2f%% of %.4e)" % ("IPv6 addresses Used / Free", v.used_addresses_v6, v.free_addresses_v6, used_percent_v6, v.total_addresses_v6))
def function[view_vrf, parameter[arg, opts, shell_opts]]: constant[ View a single VRF ] if compare[name[arg] is constant[None]] begin[:] call[name[print], parameter[constant[ERROR: Please specify the RT of the VRF to view.]]] call[name[sys].exit, parameter[constant[1]]] if compare[call[name[arg].lower, parameter[]] in tuple[[<ast.Constant object at 0x7da18eb57760>, <ast.Constant object at 0x7da18eb57cd0>]]] begin[:] variable[arg] assign[=] constant[None] <ast.Try object at 0x7da1b26ae770> call[name[print], parameter[constant[-- VRF]]] call[name[print], parameter[binary_operation[constant[ %-26s : %d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Constant object at 0x7da1b26ac4c0>, <ast.Attribute object at 0x7da1b26aeda0>]]]]] call[name[print], parameter[binary_operation[constant[ %-26s : %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Constant object at 0x7da1b26afbb0>, <ast.Attribute object at 0x7da1b26ae650>]]]]] call[name[print], parameter[binary_operation[constant[ %-26s : %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Constant object at 0x7da1b26af0a0>, <ast.Attribute object at 0x7da1b26ac1c0>]]]]] call[name[print], parameter[binary_operation[constant[ %-26s : %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Constant object at 0x7da1b26afa90>, <ast.Attribute object at 0x7da1b26ae1a0>]]]]] call[name[print], parameter[constant[-- Extra Attributes]]] if compare[name[v].avps is_not constant[None]] begin[:] for taget[name[key]] in starred[call[name[sorted], parameter[name[v].avps]]] begin[:] call[name[print], parameter[binary_operation[constant[ %-26s : %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b26aedd0>, <ast.Subscript object at 0x7da1b26ac490>]]]]] call[name[print], parameter[constant[-- Tags]]] for taget[name[tag_name]] in starred[call[name[sorted], parameter[name[v].tags]]] begin[:] call[name[print], parameter[binary_operation[constant[ %s] <ast.Mod object at 0x7da2590d6920> name[tag_name]]]] if compare[name[v].total_addresses_v4 equal[==] constant[0]] begin[:] variable[used_percent_v4] assign[=] constant[0] if compare[name[v].total_addresses_v6 equal[==] constant[0]] begin[:] variable[used_percent_v6] assign[=] constant[0] call[name[print], parameter[constant[-- Statistics]]] call[name[print], parameter[binary_operation[constant[ %-26s : %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Constant object at 0x7da2054a5600>, <ast.Attribute object at 0x7da2054a61a0>]]]]] call[name[print], parameter[binary_operation[constant[ %-26s : %.0f / %.0f (%.2f%% of %.0f)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Constant object at 0x7da2054a48b0>, <ast.Attribute object at 0x7da2054a6980>, <ast.Attribute object at 0x7da2054a4c70>, <ast.Name object at 0x7da2054a45e0>, <ast.Attribute object at 0x7da2054a5cf0>]]]]] call[name[print], parameter[binary_operation[constant[ %-26s : %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Constant object at 0x7da2054a6020>, <ast.Attribute object at 0x7da2054a43a0>]]]]] call[name[print], parameter[binary_operation[constant[ %-26s : %.4e / %.4e (%.2f%% of %.4e)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Constant object at 0x7da2054a6140>, <ast.Attribute object at 0x7da2054a4550>, <ast.Attribute object at 0x7da2054a5000>, <ast.Name object at 0x7da2054a7e20>, <ast.Attribute object at 0x7da2054a40d0>]]]]]
keyword[def] identifier[view_vrf] ( identifier[arg] , identifier[opts] , identifier[shell_opts] ): literal[string] keyword[if] identifier[arg] keyword[is] keyword[None] : identifier[print] ( literal[string] , identifier[file] = identifier[sys] . identifier[stderr] ) identifier[sys] . identifier[exit] ( literal[int] ) keyword[if] identifier[arg] . identifier[lower] () keyword[in] ( literal[string] , literal[string] ): identifier[arg] = keyword[None] keyword[try] : identifier[v] = identifier[VRF] . identifier[search] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : identifier[arg] } )[ literal[string] ][ literal[int] ] keyword[except] ( identifier[KeyError] , identifier[IndexError] ): identifier[print] ( literal[string] % identifier[str] ( identifier[arg] ), identifier[file] = identifier[sys] . identifier[stderr] ) identifier[sys] . identifier[exit] ( literal[int] ) identifier[print] ( literal[string] ) identifier[print] ( literal[string] %( literal[string] , identifier[v] . identifier[id] )) identifier[print] ( literal[string] %( literal[string] , identifier[v] . identifier[rt] )) identifier[print] ( literal[string] %( literal[string] , identifier[v] . identifier[name] )) identifier[print] ( literal[string] %( literal[string] , identifier[v] . identifier[description] )) identifier[print] ( literal[string] ) keyword[if] identifier[v] . identifier[avps] keyword[is] keyword[not] keyword[None] : keyword[for] identifier[key] keyword[in] identifier[sorted] ( identifier[v] . identifier[avps] , identifier[key] = keyword[lambda] identifier[s] : identifier[s] . identifier[lower] ()): identifier[print] ( literal[string] %( identifier[key] , identifier[v] . identifier[avps] [ identifier[key] ])) identifier[print] ( literal[string] ) keyword[for] identifier[tag_name] keyword[in] identifier[sorted] ( identifier[v] . identifier[tags] , identifier[key] = keyword[lambda] identifier[s] : identifier[s] . identifier[lower] ()): identifier[print] ( literal[string] % identifier[tag_name] ) keyword[if] identifier[v] . identifier[total_addresses_v4] == literal[int] : identifier[used_percent_v4] = literal[int] keyword[else] : identifier[used_percent_v4] =( identifier[float] ( identifier[v] . identifier[used_addresses_v4] )/ identifier[v] . identifier[total_addresses_v4] )* literal[int] keyword[if] identifier[v] . identifier[total_addresses_v6] == literal[int] : identifier[used_percent_v6] = literal[int] keyword[else] : identifier[used_percent_v6] =( identifier[float] ( identifier[v] . identifier[used_addresses_v6] )/ identifier[v] . identifier[total_addresses_v6] )* literal[int] identifier[print] ( literal[string] ) identifier[print] ( literal[string] %( literal[string] , identifier[v] . identifier[num_prefixes_v4] )) identifier[print] ( literal[string] %( literal[string] , identifier[v] . identifier[used_addresses_v4] , identifier[v] . identifier[free_addresses_v4] , identifier[used_percent_v4] , identifier[v] . identifier[total_addresses_v4] )) identifier[print] ( literal[string] %( literal[string] , identifier[v] . identifier[num_prefixes_v6] )) identifier[print] ( literal[string] %( literal[string] , identifier[v] . identifier[used_addresses_v6] , identifier[v] . identifier[free_addresses_v6] , identifier[used_percent_v6] , identifier[v] . identifier[total_addresses_v6] ))
def view_vrf(arg, opts, shell_opts): """ View a single VRF """ if arg is None: print('ERROR: Please specify the RT of the VRF to view.', file=sys.stderr) sys.exit(1) # depends on [control=['if'], data=[]] # interpret as default VRF (ie, RT = None) if arg.lower() in ('-', 'none'): arg = None # depends on [control=['if'], data=[]] try: v = VRF.search({'val1': 'rt', 'operator': 'equals', 'val2': arg})['result'][0] # depends on [control=['try'], data=[]] except (KeyError, IndexError): print('VRF with [RT: %s] not found.' % str(arg), file=sys.stderr) sys.exit(1) # depends on [control=['except'], data=[]] print('-- VRF') print(' %-26s : %d' % ('ID', v.id)) print(' %-26s : %s' % ('RT', v.rt)) print(' %-26s : %s' % ('Name', v.name)) print(' %-26s : %s' % ('Description', v.description)) print('-- Extra Attributes') if v.avps is not None: for key in sorted(v.avps, key=lambda s: s.lower()): print(' %-26s : %s' % (key, v.avps[key])) # depends on [control=['for'], data=['key']] # depends on [control=['if'], data=[]] print('-- Tags') for tag_name in sorted(v.tags, key=lambda s: s.lower()): print(' %s' % tag_name) # depends on [control=['for'], data=['tag_name']] # statistics if v.total_addresses_v4 == 0: used_percent_v4 = 0 # depends on [control=['if'], data=[]] else: used_percent_v4 = float(v.used_addresses_v4) / v.total_addresses_v4 * 100 if v.total_addresses_v6 == 0: used_percent_v6 = 0 # depends on [control=['if'], data=[]] else: used_percent_v6 = float(v.used_addresses_v6) / v.total_addresses_v6 * 100 print('-- Statistics') print(' %-26s : %s' % ('IPv4 prefixes', v.num_prefixes_v4)) print(' %-26s : %.0f / %.0f (%.2f%% of %.0f)' % ('IPv4 addresses Used / Free', v.used_addresses_v4, v.free_addresses_v4, used_percent_v4, v.total_addresses_v4)) print(' %-26s : %s' % ('IPv6 prefixes', v.num_prefixes_v6)) print(' %-26s : %.4e / %.4e (%.2f%% of %.4e)' % ('IPv6 addresses Used / Free', v.used_addresses_v6, v.free_addresses_v6, used_percent_v6, v.total_addresses_v6))
def get_token(opts, tok): ''' Fetch the token data from the store. :param opts: Salt master config options :param tok: Token value to get :returns: Token data if successful. Empty dict if failed. ''' t_path = os.path.join(opts['token_dir'], tok) if not os.path.isfile(t_path): return {} serial = salt.payload.Serial(opts) try: with salt.utils.files.fopen(t_path, 'rb') as fp_: tdata = serial.loads(fp_.read()) return tdata except (IOError, OSError): log.warning( 'Authentication failure: can not read token file "%s".', t_path) return {}
def function[get_token, parameter[opts, tok]]: constant[ Fetch the token data from the store. :param opts: Salt master config options :param tok: Token value to get :returns: Token data if successful. Empty dict if failed. ] variable[t_path] assign[=] call[name[os].path.join, parameter[call[name[opts]][constant[token_dir]], name[tok]]] if <ast.UnaryOp object at 0x7da2045666b0> begin[:] return[dictionary[[], []]] variable[serial] assign[=] call[name[salt].payload.Serial, parameter[name[opts]]] <ast.Try object at 0x7da2045678e0>
keyword[def] identifier[get_token] ( identifier[opts] , identifier[tok] ): literal[string] identifier[t_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[opts] [ literal[string] ], identifier[tok] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[t_path] ): keyword[return] {} identifier[serial] = identifier[salt] . identifier[payload] . identifier[Serial] ( identifier[opts] ) keyword[try] : keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( identifier[t_path] , literal[string] ) keyword[as] identifier[fp_] : identifier[tdata] = identifier[serial] . identifier[loads] ( identifier[fp_] . identifier[read] ()) keyword[return] identifier[tdata] keyword[except] ( identifier[IOError] , identifier[OSError] ): identifier[log] . identifier[warning] ( literal[string] , identifier[t_path] ) keyword[return] {}
def get_token(opts, tok): """ Fetch the token data from the store. :param opts: Salt master config options :param tok: Token value to get :returns: Token data if successful. Empty dict if failed. """ t_path = os.path.join(opts['token_dir'], tok) if not os.path.isfile(t_path): return {} # depends on [control=['if'], data=[]] serial = salt.payload.Serial(opts) try: with salt.utils.files.fopen(t_path, 'rb') as fp_: tdata = serial.loads(fp_.read()) return tdata # depends on [control=['with'], data=['fp_']] # depends on [control=['try'], data=[]] except (IOError, OSError): log.warning('Authentication failure: can not read token file "%s".', t_path) return {} # depends on [control=['except'], data=[]]
def get_spot_value(self, assets, field, dt, data_frequency): """ Public API method that returns a scalar value representing the value of the desired asset's field at either the given dt. Parameters ---------- assets : Asset, ContinuousFuture, or iterable of same. The asset or assets whose data is desired. field : {'open', 'high', 'low', 'close', 'volume', 'price', 'last_traded'} The desired field of the asset. dt : pd.Timestamp The timestamp for the desired value. data_frequency : str The frequency of the data to query; i.e. whether the data is 'daily' or 'minute' bars Returns ------- value : float, int, or pd.Timestamp The spot value of ``field`` for ``asset`` The return type is based on the ``field`` requested. If the field is one of 'open', 'high', 'low', 'close', or 'price', the value will be a float. If the ``field`` is 'volume' the value will be a int. If the ``field`` is 'last_traded' the value will be a Timestamp. """ assets_is_scalar = False if isinstance(assets, (AssetConvertible, PricingDataAssociable)): assets_is_scalar = True else: # If 'assets' was not one of the expected types then it should be # an iterable. try: iter(assets) except TypeError: raise TypeError( "Unexpected 'assets' value of type {}." .format(type(assets)) ) session_label = self.trading_calendar.minute_to_session_label(dt) if assets_is_scalar: return self._get_single_asset_value( session_label, assets, field, dt, data_frequency, ) else: get_single_asset_value = self._get_single_asset_value return [ get_single_asset_value( session_label, asset, field, dt, data_frequency, ) for asset in assets ]
def function[get_spot_value, parameter[self, assets, field, dt, data_frequency]]: constant[ Public API method that returns a scalar value representing the value of the desired asset's field at either the given dt. Parameters ---------- assets : Asset, ContinuousFuture, or iterable of same. The asset or assets whose data is desired. field : {'open', 'high', 'low', 'close', 'volume', 'price', 'last_traded'} The desired field of the asset. dt : pd.Timestamp The timestamp for the desired value. data_frequency : str The frequency of the data to query; i.e. whether the data is 'daily' or 'minute' bars Returns ------- value : float, int, or pd.Timestamp The spot value of ``field`` for ``asset`` The return type is based on the ``field`` requested. If the field is one of 'open', 'high', 'low', 'close', or 'price', the value will be a float. If the ``field`` is 'volume' the value will be a int. If the ``field`` is 'last_traded' the value will be a Timestamp. ] variable[assets_is_scalar] assign[=] constant[False] if call[name[isinstance], parameter[name[assets], tuple[[<ast.Name object at 0x7da1b20246a0>, <ast.Name object at 0x7da1b2024eb0>]]]] begin[:] variable[assets_is_scalar] assign[=] constant[True] variable[session_label] assign[=] call[name[self].trading_calendar.minute_to_session_label, parameter[name[dt]]] if name[assets_is_scalar] begin[:] return[call[name[self]._get_single_asset_value, parameter[name[session_label], name[assets], name[field], name[dt], name[data_frequency]]]]
keyword[def] identifier[get_spot_value] ( identifier[self] , identifier[assets] , identifier[field] , identifier[dt] , identifier[data_frequency] ): literal[string] identifier[assets_is_scalar] = keyword[False] keyword[if] identifier[isinstance] ( identifier[assets] ,( identifier[AssetConvertible] , identifier[PricingDataAssociable] )): identifier[assets_is_scalar] = keyword[True] keyword[else] : keyword[try] : identifier[iter] ( identifier[assets] ) keyword[except] identifier[TypeError] : keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[type] ( identifier[assets] )) ) identifier[session_label] = identifier[self] . identifier[trading_calendar] . identifier[minute_to_session_label] ( identifier[dt] ) keyword[if] identifier[assets_is_scalar] : keyword[return] identifier[self] . identifier[_get_single_asset_value] ( identifier[session_label] , identifier[assets] , identifier[field] , identifier[dt] , identifier[data_frequency] , ) keyword[else] : identifier[get_single_asset_value] = identifier[self] . identifier[_get_single_asset_value] keyword[return] [ identifier[get_single_asset_value] ( identifier[session_label] , identifier[asset] , identifier[field] , identifier[dt] , identifier[data_frequency] , ) keyword[for] identifier[asset] keyword[in] identifier[assets] ]
def get_spot_value(self, assets, field, dt, data_frequency): """ Public API method that returns a scalar value representing the value of the desired asset's field at either the given dt. Parameters ---------- assets : Asset, ContinuousFuture, or iterable of same. The asset or assets whose data is desired. field : {'open', 'high', 'low', 'close', 'volume', 'price', 'last_traded'} The desired field of the asset. dt : pd.Timestamp The timestamp for the desired value. data_frequency : str The frequency of the data to query; i.e. whether the data is 'daily' or 'minute' bars Returns ------- value : float, int, or pd.Timestamp The spot value of ``field`` for ``asset`` The return type is based on the ``field`` requested. If the field is one of 'open', 'high', 'low', 'close', or 'price', the value will be a float. If the ``field`` is 'volume' the value will be a int. If the ``field`` is 'last_traded' the value will be a Timestamp. """ assets_is_scalar = False if isinstance(assets, (AssetConvertible, PricingDataAssociable)): assets_is_scalar = True # depends on [control=['if'], data=[]] else: # If 'assets' was not one of the expected types then it should be # an iterable. try: iter(assets) # depends on [control=['try'], data=[]] except TypeError: raise TypeError("Unexpected 'assets' value of type {}.".format(type(assets))) # depends on [control=['except'], data=[]] session_label = self.trading_calendar.minute_to_session_label(dt) if assets_is_scalar: return self._get_single_asset_value(session_label, assets, field, dt, data_frequency) # depends on [control=['if'], data=[]] else: get_single_asset_value = self._get_single_asset_value return [get_single_asset_value(session_label, asset, field, dt, data_frequency) for asset in assets]
def score(self, X, y, compute=True): """Returns the score on the given data. Parameters ---------- X : array-like, shape = [n_samples, n_features] Input data, where n_samples is the number of samples and n_features is the number of features. y : array-like, shape = [n_samples] or [n_samples, n_output], optional Target relative to X for classification or regression; None for unsupervised learning. Returns ------- score : float return self.estimator.score(X, y) """ scoring = self.scoring X = self._check_array(X) y = self._check_array(y) if not scoring: if type(self._postfit_estimator).score == sklearn.base.RegressorMixin.score: scoring = "r2" elif ( type(self._postfit_estimator).score == sklearn.base.ClassifierMixin.score ): scoring = "accuracy" else: scoring = self.scoring if scoring: if not dask.is_dask_collection(X) and not dask.is_dask_collection(y): scorer = sklearn.metrics.get_scorer(scoring) else: scorer = get_scorer(scoring, compute=compute) return scorer(self, X, y) else: return self._postfit_estimator.score(X, y)
def function[score, parameter[self, X, y, compute]]: constant[Returns the score on the given data. Parameters ---------- X : array-like, shape = [n_samples, n_features] Input data, where n_samples is the number of samples and n_features is the number of features. y : array-like, shape = [n_samples] or [n_samples, n_output], optional Target relative to X for classification or regression; None for unsupervised learning. Returns ------- score : float return self.estimator.score(X, y) ] variable[scoring] assign[=] name[self].scoring variable[X] assign[=] call[name[self]._check_array, parameter[name[X]]] variable[y] assign[=] call[name[self]._check_array, parameter[name[y]]] if <ast.UnaryOp object at 0x7da1b19861d0> begin[:] if compare[call[name[type], parameter[name[self]._postfit_estimator]].score equal[==] name[sklearn].base.RegressorMixin.score] begin[:] variable[scoring] assign[=] constant[r2] if name[scoring] begin[:] if <ast.BoolOp object at 0x7da1b1987cd0> begin[:] variable[scorer] assign[=] call[name[sklearn].metrics.get_scorer, parameter[name[scoring]]] return[call[name[scorer], parameter[name[self], name[X], name[y]]]]
keyword[def] identifier[score] ( identifier[self] , identifier[X] , identifier[y] , identifier[compute] = keyword[True] ): literal[string] identifier[scoring] = identifier[self] . identifier[scoring] identifier[X] = identifier[self] . identifier[_check_array] ( identifier[X] ) identifier[y] = identifier[self] . identifier[_check_array] ( identifier[y] ) keyword[if] keyword[not] identifier[scoring] : keyword[if] identifier[type] ( identifier[self] . identifier[_postfit_estimator] ). identifier[score] == identifier[sklearn] . identifier[base] . identifier[RegressorMixin] . identifier[score] : identifier[scoring] = literal[string] keyword[elif] ( identifier[type] ( identifier[self] . identifier[_postfit_estimator] ). identifier[score] == identifier[sklearn] . identifier[base] . identifier[ClassifierMixin] . identifier[score] ): identifier[scoring] = literal[string] keyword[else] : identifier[scoring] = identifier[self] . identifier[scoring] keyword[if] identifier[scoring] : keyword[if] keyword[not] identifier[dask] . identifier[is_dask_collection] ( identifier[X] ) keyword[and] keyword[not] identifier[dask] . identifier[is_dask_collection] ( identifier[y] ): identifier[scorer] = identifier[sklearn] . identifier[metrics] . identifier[get_scorer] ( identifier[scoring] ) keyword[else] : identifier[scorer] = identifier[get_scorer] ( identifier[scoring] , identifier[compute] = identifier[compute] ) keyword[return] identifier[scorer] ( identifier[self] , identifier[X] , identifier[y] ) keyword[else] : keyword[return] identifier[self] . identifier[_postfit_estimator] . identifier[score] ( identifier[X] , identifier[y] )
def score(self, X, y, compute=True): """Returns the score on the given data. Parameters ---------- X : array-like, shape = [n_samples, n_features] Input data, where n_samples is the number of samples and n_features is the number of features. y : array-like, shape = [n_samples] or [n_samples, n_output], optional Target relative to X for classification or regression; None for unsupervised learning. Returns ------- score : float return self.estimator.score(X, y) """ scoring = self.scoring X = self._check_array(X) y = self._check_array(y) if not scoring: if type(self._postfit_estimator).score == sklearn.base.RegressorMixin.score: scoring = 'r2' # depends on [control=['if'], data=[]] elif type(self._postfit_estimator).score == sklearn.base.ClassifierMixin.score: scoring = 'accuracy' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: scoring = self.scoring if scoring: if not dask.is_dask_collection(X) and (not dask.is_dask_collection(y)): scorer = sklearn.metrics.get_scorer(scoring) # depends on [control=['if'], data=[]] else: scorer = get_scorer(scoring, compute=compute) return scorer(self, X, y) # depends on [control=['if'], data=[]] else: return self._postfit_estimator.score(X, y)
def _sanitize(self, value): """ Remove the control characters that are not allowed in XML: https://www.w3.org/TR/xml/#charsets Leave all other characters. """ if isinstance(value, six.binary_type): value = value.decode('utf-8') if isinstance(value, six.text_type): new_value = ''.join(ch for ch in value if self._valid_char(ch)) else: return value # The new string will be equivalent to the original string if no control characters are present. # If equivalent, return the original string - some tests check for object equality instead of string equality. return value if value == new_value else new_value
def function[_sanitize, parameter[self, value]]: constant[ Remove the control characters that are not allowed in XML: https://www.w3.org/TR/xml/#charsets Leave all other characters. ] if call[name[isinstance], parameter[name[value], name[six].binary_type]] begin[:] variable[value] assign[=] call[name[value].decode, parameter[constant[utf-8]]] if call[name[isinstance], parameter[name[value], name[six].text_type]] begin[:] variable[new_value] assign[=] call[constant[].join, parameter[<ast.GeneratorExp object at 0x7da18fe91f90>]] return[<ast.IfExp object at 0x7da20c993af0>]
keyword[def] identifier[_sanitize] ( identifier[self] , identifier[value] ): literal[string] keyword[if] identifier[isinstance] ( identifier[value] , identifier[six] . identifier[binary_type] ): identifier[value] = identifier[value] . identifier[decode] ( literal[string] ) keyword[if] identifier[isinstance] ( identifier[value] , identifier[six] . identifier[text_type] ): identifier[new_value] = literal[string] . identifier[join] ( identifier[ch] keyword[for] identifier[ch] keyword[in] identifier[value] keyword[if] identifier[self] . identifier[_valid_char] ( identifier[ch] )) keyword[else] : keyword[return] identifier[value] keyword[return] identifier[value] keyword[if] identifier[value] == identifier[new_value] keyword[else] identifier[new_value]
def _sanitize(self, value): """ Remove the control characters that are not allowed in XML: https://www.w3.org/TR/xml/#charsets Leave all other characters. """ if isinstance(value, six.binary_type): value = value.decode('utf-8') # depends on [control=['if'], data=[]] if isinstance(value, six.text_type): new_value = ''.join((ch for ch in value if self._valid_char(ch))) # depends on [control=['if'], data=[]] else: return value # The new string will be equivalent to the original string if no control characters are present. # If equivalent, return the original string - some tests check for object equality instead of string equality. return value if value == new_value else new_value
def _update_context_field_expression(present_locations, expression): """Lower Expressions involving non-existent ContextFields to TrueLiteral and simplify result.""" no_op_blocks = (ContextField, Literal, LocalField, UnaryTransformation, Variable) if isinstance(expression, BinaryComposition): if isinstance(expression.left, ContextField) or isinstance(expression.right, ContextField): return _update_context_field_binary_composition(present_locations, expression) else: return _simplify_non_context_field_binary_composition(expression) elif isinstance(expression, TernaryConditional): return _simplify_ternary_conditional(expression) elif isinstance(expression, BetweenClause): lower_bound = expression.lower_bound upper_bound = expression.upper_bound if isinstance(lower_bound, ContextField) or isinstance(upper_bound, ContextField): raise AssertionError(u'Found BetweenClause with ContextFields as lower/upper bounds. ' u'This should never happen: {}'.format(expression)) return expression elif isinstance(expression, (OutputContextField, FoldedContextField)): raise AssertionError(u'Found unexpected expression of type {}. This should never happen: ' u'{}'.format(type(expression).__name__, expression)) elif isinstance(expression, no_op_blocks): return expression raise AssertionError(u'Found unhandled expression of type {}. This should never happen: ' u'{}'.format(type(expression).__name__, expression))
def function[_update_context_field_expression, parameter[present_locations, expression]]: constant[Lower Expressions involving non-existent ContextFields to TrueLiteral and simplify result.] variable[no_op_blocks] assign[=] tuple[[<ast.Name object at 0x7da1b17ed7e0>, <ast.Name object at 0x7da1b17ec1f0>, <ast.Name object at 0x7da1b17ed330>, <ast.Name object at 0x7da1b17ed0c0>, <ast.Name object at 0x7da1b17ecfa0>]] if call[name[isinstance], parameter[name[expression], name[BinaryComposition]]] begin[:] if <ast.BoolOp object at 0x7da1b17ed180> begin[:] return[call[name[_update_context_field_binary_composition], parameter[name[present_locations], name[expression]]]] <ast.Raise object at 0x7da1b175e440>
keyword[def] identifier[_update_context_field_expression] ( identifier[present_locations] , identifier[expression] ): literal[string] identifier[no_op_blocks] =( identifier[ContextField] , identifier[Literal] , identifier[LocalField] , identifier[UnaryTransformation] , identifier[Variable] ) keyword[if] identifier[isinstance] ( identifier[expression] , identifier[BinaryComposition] ): keyword[if] identifier[isinstance] ( identifier[expression] . identifier[left] , identifier[ContextField] ) keyword[or] identifier[isinstance] ( identifier[expression] . identifier[right] , identifier[ContextField] ): keyword[return] identifier[_update_context_field_binary_composition] ( identifier[present_locations] , identifier[expression] ) keyword[else] : keyword[return] identifier[_simplify_non_context_field_binary_composition] ( identifier[expression] ) keyword[elif] identifier[isinstance] ( identifier[expression] , identifier[TernaryConditional] ): keyword[return] identifier[_simplify_ternary_conditional] ( identifier[expression] ) keyword[elif] identifier[isinstance] ( identifier[expression] , identifier[BetweenClause] ): identifier[lower_bound] = identifier[expression] . identifier[lower_bound] identifier[upper_bound] = identifier[expression] . identifier[upper_bound] keyword[if] identifier[isinstance] ( identifier[lower_bound] , identifier[ContextField] ) keyword[or] identifier[isinstance] ( identifier[upper_bound] , identifier[ContextField] ): keyword[raise] identifier[AssertionError] ( literal[string] literal[string] . identifier[format] ( identifier[expression] )) keyword[return] identifier[expression] keyword[elif] identifier[isinstance] ( identifier[expression] ,( identifier[OutputContextField] , identifier[FoldedContextField] )): keyword[raise] identifier[AssertionError] ( literal[string] literal[string] . identifier[format] ( identifier[type] ( identifier[expression] ). identifier[__name__] , identifier[expression] )) keyword[elif] identifier[isinstance] ( identifier[expression] , identifier[no_op_blocks] ): keyword[return] identifier[expression] keyword[raise] identifier[AssertionError] ( literal[string] literal[string] . identifier[format] ( identifier[type] ( identifier[expression] ). identifier[__name__] , identifier[expression] ))
def _update_context_field_expression(present_locations, expression): """Lower Expressions involving non-existent ContextFields to TrueLiteral and simplify result.""" no_op_blocks = (ContextField, Literal, LocalField, UnaryTransformation, Variable) if isinstance(expression, BinaryComposition): if isinstance(expression.left, ContextField) or isinstance(expression.right, ContextField): return _update_context_field_binary_composition(present_locations, expression) # depends on [control=['if'], data=[]] else: return _simplify_non_context_field_binary_composition(expression) # depends on [control=['if'], data=[]] elif isinstance(expression, TernaryConditional): return _simplify_ternary_conditional(expression) # depends on [control=['if'], data=[]] elif isinstance(expression, BetweenClause): lower_bound = expression.lower_bound upper_bound = expression.upper_bound if isinstance(lower_bound, ContextField) or isinstance(upper_bound, ContextField): raise AssertionError(u'Found BetweenClause with ContextFields as lower/upper bounds. This should never happen: {}'.format(expression)) # depends on [control=['if'], data=[]] return expression # depends on [control=['if'], data=[]] elif isinstance(expression, (OutputContextField, FoldedContextField)): raise AssertionError(u'Found unexpected expression of type {}. This should never happen: {}'.format(type(expression).__name__, expression)) # depends on [control=['if'], data=[]] elif isinstance(expression, no_op_blocks): return expression # depends on [control=['if'], data=[]] raise AssertionError(u'Found unhandled expression of type {}. This should never happen: {}'.format(type(expression).__name__, expression))
def create_packages_archive(packages, filename): """ Create a tar archive which will contain the files for the packages listed in packages. """ import tarfile tar = tarfile.open(filename, "w") def add(src, dst): logger.debug('adding to tar: %s -> %s', src, dst) tar.add(src, dst) def add_files_for_package(sub_package_path, root_package_path, root_package_name): for root, dirs, files in os.walk(sub_package_path): if '.svn' in dirs: dirs.remove('.svn') for f in files: if not f.endswith(".pyc") and not f.startswith("."): add(dereference(root + "/" + f), root.replace(root_package_path, root_package_name) + "/" + f) for package in packages: # Put a submodule's entire package in the archive. This is the # magic that usually packages everything you need without # having to attach packages/modules explicitly if not getattr(package, "__path__", None) and '.' in package.__name__: package = __import__(package.__name__.rpartition('.')[0], None, None, 'non_empty') n = package.__name__.replace(".", "/") if getattr(package, "__path__", None): # TODO: (BUG) picking only the first path does not # properly deal with namespaced packages in different # directories p = package.__path__[0] if p.endswith('.egg') and os.path.isfile(p): raise 'egg files not supported!!!' # Add the entire egg file # p = p[:p.find('.egg') + 4] # add(dereference(p), os.path.basename(p)) else: # include __init__ files from parent projects root = [] for parent in package.__name__.split('.')[0:-1]: root.append(parent) module_name = '.'.join(root) directory = '/'.join(root) add(dereference(__import__(module_name, None, None, 'non_empty').__path__[0] + "/__init__.py"), directory + "/__init__.py") add_files_for_package(p, p, n) # include egg-info directories that are parallel: for egg_info_path in glob.glob(p + '*.egg-info'): logger.debug( 'Adding package metadata to archive for "%s" found at "%s"', package.__name__, egg_info_path ) add_files_for_package(egg_info_path, p, n) else: f = package.__file__ if f.endswith("pyc"): f = f[:-3] + "py" if n.find(".") == -1: add(dereference(f), os.path.basename(f)) else: add(dereference(f), n + ".py") tar.close()
def function[create_packages_archive, parameter[packages, filename]]: constant[ Create a tar archive which will contain the files for the packages listed in packages. ] import module[tarfile] variable[tar] assign[=] call[name[tarfile].open, parameter[name[filename], constant[w]]] def function[add, parameter[src, dst]]: call[name[logger].debug, parameter[constant[adding to tar: %s -> %s], name[src], name[dst]]] call[name[tar].add, parameter[name[src], name[dst]]] def function[add_files_for_package, parameter[sub_package_path, root_package_path, root_package_name]]: for taget[tuple[[<ast.Name object at 0x7da1b1f9b2b0>, <ast.Name object at 0x7da1b1f9a6e0>, <ast.Name object at 0x7da1b1f9ab60>]]] in starred[call[name[os].walk, parameter[name[sub_package_path]]]] begin[:] if compare[constant[.svn] in name[dirs]] begin[:] call[name[dirs].remove, parameter[constant[.svn]]] for taget[name[f]] in starred[name[files]] begin[:] if <ast.BoolOp object at 0x7da1b1f9bfa0> begin[:] call[name[add], parameter[call[name[dereference], parameter[binary_operation[binary_operation[name[root] + constant[/]] + name[f]]]], binary_operation[binary_operation[call[name[root].replace, parameter[name[root_package_path], name[root_package_name]]] + constant[/]] + name[f]]]] for taget[name[package]] in starred[name[packages]] begin[:] if <ast.BoolOp object at 0x7da1b1fa3eb0> begin[:] variable[package] assign[=] call[name[__import__], parameter[call[call[name[package].__name__.rpartition, parameter[constant[.]]]][constant[0]], constant[None], constant[None], constant[non_empty]]] variable[n] assign[=] call[name[package].__name__.replace, parameter[constant[.], constant[/]]] if call[name[getattr], parameter[name[package], constant[__path__], constant[None]]] begin[:] variable[p] assign[=] call[name[package].__path__][constant[0]] if <ast.BoolOp object at 0x7da1b1fa1f90> begin[:] <ast.Raise object at 0x7da1b1f5ae30> call[name[tar].close, parameter[]]
keyword[def] identifier[create_packages_archive] ( identifier[packages] , identifier[filename] ): literal[string] keyword[import] identifier[tarfile] identifier[tar] = identifier[tarfile] . identifier[open] ( identifier[filename] , literal[string] ) keyword[def] identifier[add] ( identifier[src] , identifier[dst] ): identifier[logger] . identifier[debug] ( literal[string] , identifier[src] , identifier[dst] ) identifier[tar] . identifier[add] ( identifier[src] , identifier[dst] ) keyword[def] identifier[add_files_for_package] ( identifier[sub_package_path] , identifier[root_package_path] , identifier[root_package_name] ): keyword[for] identifier[root] , identifier[dirs] , identifier[files] keyword[in] identifier[os] . identifier[walk] ( identifier[sub_package_path] ): keyword[if] literal[string] keyword[in] identifier[dirs] : identifier[dirs] . identifier[remove] ( literal[string] ) keyword[for] identifier[f] keyword[in] identifier[files] : keyword[if] keyword[not] identifier[f] . identifier[endswith] ( literal[string] ) keyword[and] keyword[not] identifier[f] . identifier[startswith] ( literal[string] ): identifier[add] ( identifier[dereference] ( identifier[root] + literal[string] + identifier[f] ), identifier[root] . identifier[replace] ( identifier[root_package_path] , identifier[root_package_name] )+ literal[string] + identifier[f] ) keyword[for] identifier[package] keyword[in] identifier[packages] : keyword[if] keyword[not] identifier[getattr] ( identifier[package] , literal[string] , keyword[None] ) keyword[and] literal[string] keyword[in] identifier[package] . identifier[__name__] : identifier[package] = identifier[__import__] ( identifier[package] . identifier[__name__] . identifier[rpartition] ( literal[string] )[ literal[int] ], keyword[None] , keyword[None] , literal[string] ) identifier[n] = identifier[package] . identifier[__name__] . identifier[replace] ( literal[string] , literal[string] ) keyword[if] identifier[getattr] ( identifier[package] , literal[string] , keyword[None] ): identifier[p] = identifier[package] . identifier[__path__] [ literal[int] ] keyword[if] identifier[p] . identifier[endswith] ( literal[string] ) keyword[and] identifier[os] . identifier[path] . identifier[isfile] ( identifier[p] ): keyword[raise] literal[string] keyword[else] : identifier[root] =[] keyword[for] identifier[parent] keyword[in] identifier[package] . identifier[__name__] . identifier[split] ( literal[string] )[ literal[int] :- literal[int] ]: identifier[root] . identifier[append] ( identifier[parent] ) identifier[module_name] = literal[string] . identifier[join] ( identifier[root] ) identifier[directory] = literal[string] . identifier[join] ( identifier[root] ) identifier[add] ( identifier[dereference] ( identifier[__import__] ( identifier[module_name] , keyword[None] , keyword[None] , literal[string] ). identifier[__path__] [ literal[int] ]+ literal[string] ), identifier[directory] + literal[string] ) identifier[add_files_for_package] ( identifier[p] , identifier[p] , identifier[n] ) keyword[for] identifier[egg_info_path] keyword[in] identifier[glob] . identifier[glob] ( identifier[p] + literal[string] ): identifier[logger] . identifier[debug] ( literal[string] , identifier[package] . identifier[__name__] , identifier[egg_info_path] ) identifier[add_files_for_package] ( identifier[egg_info_path] , identifier[p] , identifier[n] ) keyword[else] : identifier[f] = identifier[package] . identifier[__file__] keyword[if] identifier[f] . identifier[endswith] ( literal[string] ): identifier[f] = identifier[f] [:- literal[int] ]+ literal[string] keyword[if] identifier[n] . identifier[find] ( literal[string] )==- literal[int] : identifier[add] ( identifier[dereference] ( identifier[f] ), identifier[os] . identifier[path] . identifier[basename] ( identifier[f] )) keyword[else] : identifier[add] ( identifier[dereference] ( identifier[f] ), identifier[n] + literal[string] ) identifier[tar] . identifier[close] ()
def create_packages_archive(packages, filename): """ Create a tar archive which will contain the files for the packages listed in packages. """ import tarfile tar = tarfile.open(filename, 'w') def add(src, dst): logger.debug('adding to tar: %s -> %s', src, dst) tar.add(src, dst) def add_files_for_package(sub_package_path, root_package_path, root_package_name): for (root, dirs, files) in os.walk(sub_package_path): if '.svn' in dirs: dirs.remove('.svn') # depends on [control=['if'], data=['dirs']] for f in files: if not f.endswith('.pyc') and (not f.startswith('.')): add(dereference(root + '/' + f), root.replace(root_package_path, root_package_name) + '/' + f) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['f']] # depends on [control=['for'], data=[]] for package in packages: # Put a submodule's entire package in the archive. This is the # magic that usually packages everything you need without # having to attach packages/modules explicitly if not getattr(package, '__path__', None) and '.' in package.__name__: package = __import__(package.__name__.rpartition('.')[0], None, None, 'non_empty') # depends on [control=['if'], data=[]] n = package.__name__.replace('.', '/') if getattr(package, '__path__', None): # TODO: (BUG) picking only the first path does not # properly deal with namespaced packages in different # directories p = package.__path__[0] if p.endswith('.egg') and os.path.isfile(p): raise 'egg files not supported!!!' # depends on [control=['if'], data=[]] else: # Add the entire egg file # p = p[:p.find('.egg') + 4] # add(dereference(p), os.path.basename(p)) # include __init__ files from parent projects root = [] for parent in package.__name__.split('.')[0:-1]: root.append(parent) module_name = '.'.join(root) directory = '/'.join(root) add(dereference(__import__(module_name, None, None, 'non_empty').__path__[0] + '/__init__.py'), directory + '/__init__.py') # depends on [control=['for'], data=['parent']] add_files_for_package(p, p, n) # include egg-info directories that are parallel: for egg_info_path in glob.glob(p + '*.egg-info'): logger.debug('Adding package metadata to archive for "%s" found at "%s"', package.__name__, egg_info_path) add_files_for_package(egg_info_path, p, n) # depends on [control=['for'], data=['egg_info_path']] # depends on [control=['if'], data=[]] else: f = package.__file__ if f.endswith('pyc'): f = f[:-3] + 'py' # depends on [control=['if'], data=[]] if n.find('.') == -1: add(dereference(f), os.path.basename(f)) # depends on [control=['if'], data=[]] else: add(dereference(f), n + '.py') # depends on [control=['for'], data=['package']] tar.close()
def marginalize(self, variables, inplace=True): """ Marginalize the factor with respect to the given variables. Parameters ---------- variables: list, array-like List of variables with respect to which factor is to be maximized. inplace: boolean If inplace=True it will modify the factor itself, else would return a new ContinuousFactor instance. Returns ------- DiscreteFactor or None: if inplace=True (default) returns None if inplace=False returns a new ContinuousFactor instance. Examples -------- >>> from pgmpy.factors.continuous import ContinuousFactor >>> from scipy.stats import multivariate_normal >>> std_normal_pdf = lambda *x: multivariate_normal.pdf(x, [0, 0], [[1, 0], [0, 1]]) >>> std_normal = ContinuousFactor(['x1', 'x2'], std_normal_pdf) >>> std_normal.scope() ['x1', 'x2'] >>> std_normal.assignment([1, 1]) 0.058549831524319168 >>> std_normal.marginalize(['x2']) >>> std_normal.scope() ['x1'] >>> std_normal.assignment(1) """ phi = self if inplace else self.copy() phi.distribution = phi.distribution.marginalize(variables, inplace=False) if not inplace: return phi
def function[marginalize, parameter[self, variables, inplace]]: constant[ Marginalize the factor with respect to the given variables. Parameters ---------- variables: list, array-like List of variables with respect to which factor is to be maximized. inplace: boolean If inplace=True it will modify the factor itself, else would return a new ContinuousFactor instance. Returns ------- DiscreteFactor or None: if inplace=True (default) returns None if inplace=False returns a new ContinuousFactor instance. Examples -------- >>> from pgmpy.factors.continuous import ContinuousFactor >>> from scipy.stats import multivariate_normal >>> std_normal_pdf = lambda *x: multivariate_normal.pdf(x, [0, 0], [[1, 0], [0, 1]]) >>> std_normal = ContinuousFactor(['x1', 'x2'], std_normal_pdf) >>> std_normal.scope() ['x1', 'x2'] >>> std_normal.assignment([1, 1]) 0.058549831524319168 >>> std_normal.marginalize(['x2']) >>> std_normal.scope() ['x1'] >>> std_normal.assignment(1) ] variable[phi] assign[=] <ast.IfExp object at 0x7da20c6aa980> name[phi].distribution assign[=] call[name[phi].distribution.marginalize, parameter[name[variables]]] if <ast.UnaryOp object at 0x7da20c6a8670> begin[:] return[name[phi]]
keyword[def] identifier[marginalize] ( identifier[self] , identifier[variables] , identifier[inplace] = keyword[True] ): literal[string] identifier[phi] = identifier[self] keyword[if] identifier[inplace] keyword[else] identifier[self] . identifier[copy] () identifier[phi] . identifier[distribution] = identifier[phi] . identifier[distribution] . identifier[marginalize] ( identifier[variables] , identifier[inplace] = keyword[False] ) keyword[if] keyword[not] identifier[inplace] : keyword[return] identifier[phi]
def marginalize(self, variables, inplace=True): """ Marginalize the factor with respect to the given variables. Parameters ---------- variables: list, array-like List of variables with respect to which factor is to be maximized. inplace: boolean If inplace=True it will modify the factor itself, else would return a new ContinuousFactor instance. Returns ------- DiscreteFactor or None: if inplace=True (default) returns None if inplace=False returns a new ContinuousFactor instance. Examples -------- >>> from pgmpy.factors.continuous import ContinuousFactor >>> from scipy.stats import multivariate_normal >>> std_normal_pdf = lambda *x: multivariate_normal.pdf(x, [0, 0], [[1, 0], [0, 1]]) >>> std_normal = ContinuousFactor(['x1', 'x2'], std_normal_pdf) >>> std_normal.scope() ['x1', 'x2'] >>> std_normal.assignment([1, 1]) 0.058549831524319168 >>> std_normal.marginalize(['x2']) >>> std_normal.scope() ['x1'] >>> std_normal.assignment(1) """ phi = self if inplace else self.copy() phi.distribution = phi.distribution.marginalize(variables, inplace=False) if not inplace: return phi # depends on [control=['if'], data=[]]
def provideCustomerReferralCode(sender,**kwargs): ''' If the vouchers app is installed and referrals are enabled, then the customer's profile page can show their voucher referral code. ''' customer = kwargs.pop('customer') if getConstant('vouchers__enableVouchers') and getConstant('referrals__enableReferralProgram'): vrd = ensureReferralVouchersExist(customer) return { 'referralVoucherId': vrd.referreeVoucher.voucherId }
def function[provideCustomerReferralCode, parameter[sender]]: constant[ If the vouchers app is installed and referrals are enabled, then the customer's profile page can show their voucher referral code. ] variable[customer] assign[=] call[name[kwargs].pop, parameter[constant[customer]]] if <ast.BoolOp object at 0x7da1b1346740> begin[:] variable[vrd] assign[=] call[name[ensureReferralVouchersExist], parameter[name[customer]]] return[dictionary[[<ast.Constant object at 0x7da1b13464a0>], [<ast.Attribute object at 0x7da1b1346470>]]]
keyword[def] identifier[provideCustomerReferralCode] ( identifier[sender] ,** identifier[kwargs] ): literal[string] identifier[customer] = identifier[kwargs] . identifier[pop] ( literal[string] ) keyword[if] identifier[getConstant] ( literal[string] ) keyword[and] identifier[getConstant] ( literal[string] ): identifier[vrd] = identifier[ensureReferralVouchersExist] ( identifier[customer] ) keyword[return] { literal[string] : identifier[vrd] . identifier[referreeVoucher] . identifier[voucherId] }
def provideCustomerReferralCode(sender, **kwargs): """ If the vouchers app is installed and referrals are enabled, then the customer's profile page can show their voucher referral code. """ customer = kwargs.pop('customer') if getConstant('vouchers__enableVouchers') and getConstant('referrals__enableReferralProgram'): vrd = ensureReferralVouchersExist(customer) return {'referralVoucherId': vrd.referreeVoucher.voucherId} # depends on [control=['if'], data=[]]
def set_mode(path, mode): ''' Set the mode of a file This just calls get_mode, which returns None because we don't use mode on Windows Args: path: The path to the file or directory mode: The mode (not used) Returns: None CLI Example: .. code-block:: bash salt '*' file.set_mode /etc/passwd 0644 ''' func_name = '{0}.set_mode'.format(__virtualname__) if __opts__.get('fun', '') == func_name: log.info('The function %s should not be used on Windows systems; ' 'see function docs for details. The value returned is ' 'always None. Use set_perms instead.', func_name) return get_mode(path)
def function[set_mode, parameter[path, mode]]: constant[ Set the mode of a file This just calls get_mode, which returns None because we don't use mode on Windows Args: path: The path to the file or directory mode: The mode (not used) Returns: None CLI Example: .. code-block:: bash salt '*' file.set_mode /etc/passwd 0644 ] variable[func_name] assign[=] call[constant[{0}.set_mode].format, parameter[name[__virtualname__]]] if compare[call[name[__opts__].get, parameter[constant[fun], constant[]]] equal[==] name[func_name]] begin[:] call[name[log].info, parameter[constant[The function %s should not be used on Windows systems; see function docs for details. The value returned is always None. Use set_perms instead.], name[func_name]]] return[call[name[get_mode], parameter[name[path]]]]
keyword[def] identifier[set_mode] ( identifier[path] , identifier[mode] ): literal[string] identifier[func_name] = literal[string] . identifier[format] ( identifier[__virtualname__] ) keyword[if] identifier[__opts__] . identifier[get] ( literal[string] , literal[string] )== identifier[func_name] : identifier[log] . identifier[info] ( literal[string] literal[string] literal[string] , identifier[func_name] ) keyword[return] identifier[get_mode] ( identifier[path] )
def set_mode(path, mode): """ Set the mode of a file This just calls get_mode, which returns None because we don't use mode on Windows Args: path: The path to the file or directory mode: The mode (not used) Returns: None CLI Example: .. code-block:: bash salt '*' file.set_mode /etc/passwd 0644 """ func_name = '{0}.set_mode'.format(__virtualname__) if __opts__.get('fun', '') == func_name: log.info('The function %s should not be used on Windows systems; see function docs for details. The value returned is always None. Use set_perms instead.', func_name) # depends on [control=['if'], data=['func_name']] return get_mode(path)
def make_pl_si_lists(lst, plending, siendingsize, dojoinstem=True): """ given a list of singular words: lst an ending to append to make the plural: plending the number of characters to remove from the singular before appending plending: siendingsize a flag whether to create a joinstem: dojoinstem return: a list of pluralised words: si_list (called si because this is what you need to look for to make the singular) the pluralised words as a dict of sets sorted by word length: si_bysize the singular words as a dict of sets sorted by word length: pl_bysize if dojoinstem is True: a regular expression that matches any of the stems: stem """ if siendingsize is not None: siendingsize = -siendingsize si_list = [w[:siendingsize] + plending for w in lst] pl_bysize = bysize(lst) si_bysize = bysize(si_list) if dojoinstem: stem = joinstem(siendingsize, lst) return si_list, si_bysize, pl_bysize, stem else: return si_list, si_bysize, pl_bysize
def function[make_pl_si_lists, parameter[lst, plending, siendingsize, dojoinstem]]: constant[ given a list of singular words: lst an ending to append to make the plural: plending the number of characters to remove from the singular before appending plending: siendingsize a flag whether to create a joinstem: dojoinstem return: a list of pluralised words: si_list (called si because this is what you need to look for to make the singular) the pluralised words as a dict of sets sorted by word length: si_bysize the singular words as a dict of sets sorted by word length: pl_bysize if dojoinstem is True: a regular expression that matches any of the stems: stem ] if compare[name[siendingsize] is_not constant[None]] begin[:] variable[siendingsize] assign[=] <ast.UnaryOp object at 0x7da1b12cb8e0> variable[si_list] assign[=] <ast.ListComp object at 0x7da1b12c9de0> variable[pl_bysize] assign[=] call[name[bysize], parameter[name[lst]]] variable[si_bysize] assign[=] call[name[bysize], parameter[name[si_list]]] if name[dojoinstem] begin[:] variable[stem] assign[=] call[name[joinstem], parameter[name[siendingsize], name[lst]]] return[tuple[[<ast.Name object at 0x7da1b12c81f0>, <ast.Name object at 0x7da1b12c9a80>, <ast.Name object at 0x7da1b12c9960>, <ast.Name object at 0x7da1b12c96c0>]]]
keyword[def] identifier[make_pl_si_lists] ( identifier[lst] , identifier[plending] , identifier[siendingsize] , identifier[dojoinstem] = keyword[True] ): literal[string] keyword[if] identifier[siendingsize] keyword[is] keyword[not] keyword[None] : identifier[siendingsize] =- identifier[siendingsize] identifier[si_list] =[ identifier[w] [: identifier[siendingsize] ]+ identifier[plending] keyword[for] identifier[w] keyword[in] identifier[lst] ] identifier[pl_bysize] = identifier[bysize] ( identifier[lst] ) identifier[si_bysize] = identifier[bysize] ( identifier[si_list] ) keyword[if] identifier[dojoinstem] : identifier[stem] = identifier[joinstem] ( identifier[siendingsize] , identifier[lst] ) keyword[return] identifier[si_list] , identifier[si_bysize] , identifier[pl_bysize] , identifier[stem] keyword[else] : keyword[return] identifier[si_list] , identifier[si_bysize] , identifier[pl_bysize]
def make_pl_si_lists(lst, plending, siendingsize, dojoinstem=True): """ given a list of singular words: lst an ending to append to make the plural: plending the number of characters to remove from the singular before appending plending: siendingsize a flag whether to create a joinstem: dojoinstem return: a list of pluralised words: si_list (called si because this is what you need to look for to make the singular) the pluralised words as a dict of sets sorted by word length: si_bysize the singular words as a dict of sets sorted by word length: pl_bysize if dojoinstem is True: a regular expression that matches any of the stems: stem """ if siendingsize is not None: siendingsize = -siendingsize # depends on [control=['if'], data=['siendingsize']] si_list = [w[:siendingsize] + plending for w in lst] pl_bysize = bysize(lst) si_bysize = bysize(si_list) if dojoinstem: stem = joinstem(siendingsize, lst) return (si_list, si_bysize, pl_bysize, stem) # depends on [control=['if'], data=[]] else: return (si_list, si_bysize, pl_bysize)
def describe(table_name, region=None, key=None, keyid=None, profile=None): ''' Describe a DynamoDB table. CLI example:: salt myminion boto_dynamodb.describe table_name region=us-east-1 ''' conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) table = Table(table_name, connection=conn) return table.describe()
def function[describe, parameter[table_name, region, key, keyid, profile]]: constant[ Describe a DynamoDB table. CLI example:: salt myminion boto_dynamodb.describe table_name region=us-east-1 ] variable[conn] assign[=] call[name[_get_conn], parameter[]] variable[table] assign[=] call[name[Table], parameter[name[table_name]]] return[call[name[table].describe, parameter[]]]
keyword[def] identifier[describe] ( identifier[table_name] , identifier[region] = keyword[None] , identifier[key] = keyword[None] , identifier[keyid] = keyword[None] , identifier[profile] = keyword[None] ): literal[string] identifier[conn] = identifier[_get_conn] ( identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] ) identifier[table] = identifier[Table] ( identifier[table_name] , identifier[connection] = identifier[conn] ) keyword[return] identifier[table] . identifier[describe] ()
def describe(table_name, region=None, key=None, keyid=None, profile=None): """ Describe a DynamoDB table. CLI example:: salt myminion boto_dynamodb.describe table_name region=us-east-1 """ conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) table = Table(table_name, connection=conn) return table.describe()
def parse_blast(blast_string): '''Clean up HTML BLAST results This function requires BeautifulSoup and the re module It goes throught the complicated output returned by the BLAST search and provides a list of matches, as well as the raw text file showing the alignments for each of the matches. This function works best with HTML formatted Inputs ------ get_blast() uses this function internally Parameters ---------- blast_string : str A complete webpage of standard BLAST results Returns ------- out : 2-tuple A tuple consisting of a list of PDB matches, and a list of their alignment text files (unformatted) ''' soup = BeautifulSoup(str(blast_string), "html.parser") all_blasts = list() all_blast_ids = list() pattern = '></a>....:' prog = re.compile(pattern) for item in soup.find_all('pre'): if len(item.find_all('a'))==1: all_blasts.append(item) blast_id = re.findall(pattern, str(item) )[0][-5:-1] all_blast_ids.append(blast_id) out = (all_blast_ids, all_blasts) return out
def function[parse_blast, parameter[blast_string]]: constant[Clean up HTML BLAST results This function requires BeautifulSoup and the re module It goes throught the complicated output returned by the BLAST search and provides a list of matches, as well as the raw text file showing the alignments for each of the matches. This function works best with HTML formatted Inputs ------ get_blast() uses this function internally Parameters ---------- blast_string : str A complete webpage of standard BLAST results Returns ------- out : 2-tuple A tuple consisting of a list of PDB matches, and a list of their alignment text files (unformatted) ] variable[soup] assign[=] call[name[BeautifulSoup], parameter[call[name[str], parameter[name[blast_string]]], constant[html.parser]]] variable[all_blasts] assign[=] call[name[list], parameter[]] variable[all_blast_ids] assign[=] call[name[list], parameter[]] variable[pattern] assign[=] constant[></a>....:] variable[prog] assign[=] call[name[re].compile, parameter[name[pattern]]] for taget[name[item]] in starred[call[name[soup].find_all, parameter[constant[pre]]]] begin[:] if compare[call[name[len], parameter[call[name[item].find_all, parameter[constant[a]]]]] equal[==] constant[1]] begin[:] call[name[all_blasts].append, parameter[name[item]]] variable[blast_id] assign[=] call[call[call[name[re].findall, parameter[name[pattern], call[name[str], parameter[name[item]]]]]][constant[0]]][<ast.Slice object at 0x7da1b26ad0f0>] call[name[all_blast_ids].append, parameter[name[blast_id]]] variable[out] assign[=] tuple[[<ast.Name object at 0x7da1b26afdc0>, <ast.Name object at 0x7da1b26ad9c0>]] return[name[out]]
keyword[def] identifier[parse_blast] ( identifier[blast_string] ): literal[string] identifier[soup] = identifier[BeautifulSoup] ( identifier[str] ( identifier[blast_string] ), literal[string] ) identifier[all_blasts] = identifier[list] () identifier[all_blast_ids] = identifier[list] () identifier[pattern] = literal[string] identifier[prog] = identifier[re] . identifier[compile] ( identifier[pattern] ) keyword[for] identifier[item] keyword[in] identifier[soup] . identifier[find_all] ( literal[string] ): keyword[if] identifier[len] ( identifier[item] . identifier[find_all] ( literal[string] ))== literal[int] : identifier[all_blasts] . identifier[append] ( identifier[item] ) identifier[blast_id] = identifier[re] . identifier[findall] ( identifier[pattern] , identifier[str] ( identifier[item] ))[ literal[int] ][- literal[int] :- literal[int] ] identifier[all_blast_ids] . identifier[append] ( identifier[blast_id] ) identifier[out] =( identifier[all_blast_ids] , identifier[all_blasts] ) keyword[return] identifier[out]
def parse_blast(blast_string): """Clean up HTML BLAST results This function requires BeautifulSoup and the re module It goes throught the complicated output returned by the BLAST search and provides a list of matches, as well as the raw text file showing the alignments for each of the matches. This function works best with HTML formatted Inputs ------ get_blast() uses this function internally Parameters ---------- blast_string : str A complete webpage of standard BLAST results Returns ------- out : 2-tuple A tuple consisting of a list of PDB matches, and a list of their alignment text files (unformatted) """ soup = BeautifulSoup(str(blast_string), 'html.parser') all_blasts = list() all_blast_ids = list() pattern = '></a>....:' prog = re.compile(pattern) for item in soup.find_all('pre'): if len(item.find_all('a')) == 1: all_blasts.append(item) blast_id = re.findall(pattern, str(item))[0][-5:-1] all_blast_ids.append(blast_id) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']] out = (all_blast_ids, all_blasts) return out
def prefixes_for_asn(self, asn): '''Gets the AS information for a given ASN. Return the CIDR and geolocation associated with the AS.''' uri = self._uris["prefixes_for_asn"].format(asn) resp_json = self.get_parse(uri) return resp_json
def function[prefixes_for_asn, parameter[self, asn]]: constant[Gets the AS information for a given ASN. Return the CIDR and geolocation associated with the AS.] variable[uri] assign[=] call[call[name[self]._uris][constant[prefixes_for_asn]].format, parameter[name[asn]]] variable[resp_json] assign[=] call[name[self].get_parse, parameter[name[uri]]] return[name[resp_json]]
keyword[def] identifier[prefixes_for_asn] ( identifier[self] , identifier[asn] ): literal[string] identifier[uri] = identifier[self] . identifier[_uris] [ literal[string] ]. identifier[format] ( identifier[asn] ) identifier[resp_json] = identifier[self] . identifier[get_parse] ( identifier[uri] ) keyword[return] identifier[resp_json]
def prefixes_for_asn(self, asn): """Gets the AS information for a given ASN. Return the CIDR and geolocation associated with the AS.""" uri = self._uris['prefixes_for_asn'].format(asn) resp_json = self.get_parse(uri) return resp_json
def get_portchannel_info_by_intf_output_lacp_actor_port(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") get_portchannel_info_by_intf = ET.Element("get_portchannel_info_by_intf") config = get_portchannel_info_by_intf output = ET.SubElement(get_portchannel_info_by_intf, "output") lacp = ET.SubElement(output, "lacp") actor_port = ET.SubElement(lacp, "actor-port") actor_port.text = kwargs.pop('actor_port') callback = kwargs.pop('callback', self._callback) return callback(config)
def function[get_portchannel_info_by_intf_output_lacp_actor_port, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[get_portchannel_info_by_intf] assign[=] call[name[ET].Element, parameter[constant[get_portchannel_info_by_intf]]] variable[config] assign[=] name[get_portchannel_info_by_intf] variable[output] assign[=] call[name[ET].SubElement, parameter[name[get_portchannel_info_by_intf], constant[output]]] variable[lacp] assign[=] call[name[ET].SubElement, parameter[name[output], constant[lacp]]] variable[actor_port] assign[=] call[name[ET].SubElement, parameter[name[lacp], constant[actor-port]]] name[actor_port].text assign[=] call[name[kwargs].pop, parameter[constant[actor_port]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[get_portchannel_info_by_intf_output_lacp_actor_port] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[get_portchannel_info_by_intf] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[config] = identifier[get_portchannel_info_by_intf] identifier[output] = identifier[ET] . identifier[SubElement] ( identifier[get_portchannel_info_by_intf] , literal[string] ) identifier[lacp] = identifier[ET] . identifier[SubElement] ( identifier[output] , literal[string] ) identifier[actor_port] = identifier[ET] . identifier[SubElement] ( identifier[lacp] , literal[string] ) identifier[actor_port] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def get_portchannel_info_by_intf_output_lacp_actor_port(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') get_portchannel_info_by_intf = ET.Element('get_portchannel_info_by_intf') config = get_portchannel_info_by_intf output = ET.SubElement(get_portchannel_info_by_intf, 'output') lacp = ET.SubElement(output, 'lacp') actor_port = ET.SubElement(lacp, 'actor-port') actor_port.text = kwargs.pop('actor_port') callback = kwargs.pop('callback', self._callback) return callback(config)
def sort_nts(self, nt_list, codekey): """Sort list of namedtuples such so evidence codes in same order as code2nt.""" # Problem is that some members in the nt_list do NOT have # codekey=EvidenceCode, then it returns None, which breaks py34 and 35 # The fix here is that for these members, default to -1 (is this valid?) sortby = lambda nt: self.ev2idx.get(getattr(nt, codekey), -1) return sorted(nt_list, key=sortby)
def function[sort_nts, parameter[self, nt_list, codekey]]: constant[Sort list of namedtuples such so evidence codes in same order as code2nt.] variable[sortby] assign[=] <ast.Lambda object at 0x7da1b2345630> return[call[name[sorted], parameter[name[nt_list]]]]
keyword[def] identifier[sort_nts] ( identifier[self] , identifier[nt_list] , identifier[codekey] ): literal[string] identifier[sortby] = keyword[lambda] identifier[nt] : identifier[self] . identifier[ev2idx] . identifier[get] ( identifier[getattr] ( identifier[nt] , identifier[codekey] ),- literal[int] ) keyword[return] identifier[sorted] ( identifier[nt_list] , identifier[key] = identifier[sortby] )
def sort_nts(self, nt_list, codekey): """Sort list of namedtuples such so evidence codes in same order as code2nt.""" # Problem is that some members in the nt_list do NOT have # codekey=EvidenceCode, then it returns None, which breaks py34 and 35 # The fix here is that for these members, default to -1 (is this valid?) sortby = lambda nt: self.ev2idx.get(getattr(nt, codekey), -1) return sorted(nt_list, key=sortby)
def filter_ast(module_ast): """ Filters a given module ast, removing non-whitelisted nodes It allows only the following top level items: - imports - function definitions - class definitions - top level assignments where all the targets on the LHS are all caps """ def node_predicate(node): """ Return true if given node is whitelisted """ for an in ALLOWED_NODES: if isinstance(node, an): return True # Recurse through Assign node LHS targets when an id is not specified, # otherwise check that the id is uppercase if isinstance(node, ast.Assign): return all([node_predicate(t) for t in node.targets if not hasattr(t, 'id')]) \ and all([t.id.isupper() for t in node.targets if hasattr(t, 'id')]) return False module_ast.body = [n for n in module_ast.body if node_predicate(n)] return module_ast
def function[filter_ast, parameter[module_ast]]: constant[ Filters a given module ast, removing non-whitelisted nodes It allows only the following top level items: - imports - function definitions - class definitions - top level assignments where all the targets on the LHS are all caps ] def function[node_predicate, parameter[node]]: constant[ Return true if given node is whitelisted ] for taget[name[an]] in starred[name[ALLOWED_NODES]] begin[:] if call[name[isinstance], parameter[name[node], name[an]]] begin[:] return[constant[True]] if call[name[isinstance], parameter[name[node], name[ast].Assign]] begin[:] return[<ast.BoolOp object at 0x7da20c76f970>] return[constant[False]] name[module_ast].body assign[=] <ast.ListComp object at 0x7da20c76c550> return[name[module_ast]]
keyword[def] identifier[filter_ast] ( identifier[module_ast] ): literal[string] keyword[def] identifier[node_predicate] ( identifier[node] ): literal[string] keyword[for] identifier[an] keyword[in] identifier[ALLOWED_NODES] : keyword[if] identifier[isinstance] ( identifier[node] , identifier[an] ): keyword[return] keyword[True] keyword[if] identifier[isinstance] ( identifier[node] , identifier[ast] . identifier[Assign] ): keyword[return] identifier[all] ([ identifier[node_predicate] ( identifier[t] ) keyword[for] identifier[t] keyword[in] identifier[node] . identifier[targets] keyword[if] keyword[not] identifier[hasattr] ( identifier[t] , literal[string] )]) keyword[and] identifier[all] ([ identifier[t] . identifier[id] . identifier[isupper] () keyword[for] identifier[t] keyword[in] identifier[node] . identifier[targets] keyword[if] identifier[hasattr] ( identifier[t] , literal[string] )]) keyword[return] keyword[False] identifier[module_ast] . identifier[body] =[ identifier[n] keyword[for] identifier[n] keyword[in] identifier[module_ast] . identifier[body] keyword[if] identifier[node_predicate] ( identifier[n] )] keyword[return] identifier[module_ast]
def filter_ast(module_ast): """ Filters a given module ast, removing non-whitelisted nodes It allows only the following top level items: - imports - function definitions - class definitions - top level assignments where all the targets on the LHS are all caps """ def node_predicate(node): """ Return true if given node is whitelisted """ for an in ALLOWED_NODES: if isinstance(node, an): return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['an']] # Recurse through Assign node LHS targets when an id is not specified, # otherwise check that the id is uppercase if isinstance(node, ast.Assign): return all([node_predicate(t) for t in node.targets if not hasattr(t, 'id')]) and all([t.id.isupper() for t in node.targets if hasattr(t, 'id')]) # depends on [control=['if'], data=[]] return False module_ast.body = [n for n in module_ast.body if node_predicate(n)] return module_ast
def tracemessage(self, maxlen=6): """ if maxlen > 0, the message is shortened to maxlen traces. """ result = "" for i, value in enumerate(self): result += "{0}: {1}\n".format(i, get_node_repr(value)) result = result.strip("\n") lines = result.split("\n") if maxlen and len(lines) > maxlen: i = int(maxlen / 2) lines = lines[:i] + ["..."] + lines[-(maxlen - i) :] result = "\n".join(lines) return result
def function[tracemessage, parameter[self, maxlen]]: constant[ if maxlen > 0, the message is shortened to maxlen traces. ] variable[result] assign[=] constant[] for taget[tuple[[<ast.Name object at 0x7da1afe515d0>, <ast.Name object at 0x7da1afe51fc0>]]] in starred[call[name[enumerate], parameter[name[self]]]] begin[:] <ast.AugAssign object at 0x7da1afe51480> variable[result] assign[=] call[name[result].strip, parameter[constant[ ]]] variable[lines] assign[=] call[name[result].split, parameter[constant[ ]]] if <ast.BoolOp object at 0x7da1afe53790> begin[:] variable[i] assign[=] call[name[int], parameter[binary_operation[name[maxlen] / constant[2]]]] variable[lines] assign[=] binary_operation[binary_operation[call[name[lines]][<ast.Slice object at 0x7da1afe52a10>] + list[[<ast.Constant object at 0x7da1afe53c40>]]] + call[name[lines]][<ast.Slice object at 0x7da1afe53eb0>]] variable[result] assign[=] call[constant[ ].join, parameter[name[lines]]] return[name[result]]
keyword[def] identifier[tracemessage] ( identifier[self] , identifier[maxlen] = literal[int] ): literal[string] identifier[result] = literal[string] keyword[for] identifier[i] , identifier[value] keyword[in] identifier[enumerate] ( identifier[self] ): identifier[result] += literal[string] . identifier[format] ( identifier[i] , identifier[get_node_repr] ( identifier[value] )) identifier[result] = identifier[result] . identifier[strip] ( literal[string] ) identifier[lines] = identifier[result] . identifier[split] ( literal[string] ) keyword[if] identifier[maxlen] keyword[and] identifier[len] ( identifier[lines] )> identifier[maxlen] : identifier[i] = identifier[int] ( identifier[maxlen] / literal[int] ) identifier[lines] = identifier[lines] [: identifier[i] ]+[ literal[string] ]+ identifier[lines] [-( identifier[maxlen] - identifier[i] ):] identifier[result] = literal[string] . identifier[join] ( identifier[lines] ) keyword[return] identifier[result]
def tracemessage(self, maxlen=6): """ if maxlen > 0, the message is shortened to maxlen traces. """ result = '' for (i, value) in enumerate(self): result += '{0}: {1}\n'.format(i, get_node_repr(value)) # depends on [control=['for'], data=[]] result = result.strip('\n') lines = result.split('\n') if maxlen and len(lines) > maxlen: i = int(maxlen / 2) lines = lines[:i] + ['...'] + lines[-(maxlen - i):] result = '\n'.join(lines) # depends on [control=['if'], data=[]] return result
def _call(self, x): """Return ``self(x)``.""" # The algorithm takes finite differences in one dimension at a time # reusing the dx vector to improve efficiency. dfdx = self.domain.zero() dx = self.domain.zero() if self.method == 'backward': fx = self.functional(x) for i in range(self.domain.size): dx[i - 1] = 0 # reset step from last iteration dx[i] = self.step dfdx[i] = fx - self.functional(x - dx) elif self.method == 'forward': fx = self.functional(x) for i in range(self.domain.size): dx[i - 1] = 0 # reset step from last iteration dx[i] = self.step dfdx[i] = self.functional(x + dx) - fx elif self.method == 'central': for i in range(self.domain.size): dx[i - 1] = 0 # reset step from last iteration dx[i] = self.step / 2 dfdx[i] = self.functional(x + dx) - self.functional(x - dx) else: raise RuntimeError('unknown method') dfdx /= self.step return dfdx
def function[_call, parameter[self, x]]: constant[Return ``self(x)``.] variable[dfdx] assign[=] call[name[self].domain.zero, parameter[]] variable[dx] assign[=] call[name[self].domain.zero, parameter[]] if compare[name[self].method equal[==] constant[backward]] begin[:] variable[fx] assign[=] call[name[self].functional, parameter[name[x]]] for taget[name[i]] in starred[call[name[range], parameter[name[self].domain.size]]] begin[:] call[name[dx]][binary_operation[name[i] - constant[1]]] assign[=] constant[0] call[name[dx]][name[i]] assign[=] name[self].step call[name[dfdx]][name[i]] assign[=] binary_operation[name[fx] - call[name[self].functional, parameter[binary_operation[name[x] - name[dx]]]]] <ast.AugAssign object at 0x7da1b1e58340> return[name[dfdx]]
keyword[def] identifier[_call] ( identifier[self] , identifier[x] ): literal[string] identifier[dfdx] = identifier[self] . identifier[domain] . identifier[zero] () identifier[dx] = identifier[self] . identifier[domain] . identifier[zero] () keyword[if] identifier[self] . identifier[method] == literal[string] : identifier[fx] = identifier[self] . identifier[functional] ( identifier[x] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[domain] . identifier[size] ): identifier[dx] [ identifier[i] - literal[int] ]= literal[int] identifier[dx] [ identifier[i] ]= identifier[self] . identifier[step] identifier[dfdx] [ identifier[i] ]= identifier[fx] - identifier[self] . identifier[functional] ( identifier[x] - identifier[dx] ) keyword[elif] identifier[self] . identifier[method] == literal[string] : identifier[fx] = identifier[self] . identifier[functional] ( identifier[x] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[domain] . identifier[size] ): identifier[dx] [ identifier[i] - literal[int] ]= literal[int] identifier[dx] [ identifier[i] ]= identifier[self] . identifier[step] identifier[dfdx] [ identifier[i] ]= identifier[self] . identifier[functional] ( identifier[x] + identifier[dx] )- identifier[fx] keyword[elif] identifier[self] . identifier[method] == literal[string] : keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[domain] . identifier[size] ): identifier[dx] [ identifier[i] - literal[int] ]= literal[int] identifier[dx] [ identifier[i] ]= identifier[self] . identifier[step] / literal[int] identifier[dfdx] [ identifier[i] ]= identifier[self] . identifier[functional] ( identifier[x] + identifier[dx] )- identifier[self] . identifier[functional] ( identifier[x] - identifier[dx] ) keyword[else] : keyword[raise] identifier[RuntimeError] ( literal[string] ) identifier[dfdx] /= identifier[self] . identifier[step] keyword[return] identifier[dfdx]
def _call(self, x): """Return ``self(x)``.""" # The algorithm takes finite differences in one dimension at a time # reusing the dx vector to improve efficiency. dfdx = self.domain.zero() dx = self.domain.zero() if self.method == 'backward': fx = self.functional(x) for i in range(self.domain.size): dx[i - 1] = 0 # reset step from last iteration dx[i] = self.step dfdx[i] = fx - self.functional(x - dx) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] elif self.method == 'forward': fx = self.functional(x) for i in range(self.domain.size): dx[i - 1] = 0 # reset step from last iteration dx[i] = self.step dfdx[i] = self.functional(x + dx) - fx # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] elif self.method == 'central': for i in range(self.domain.size): dx[i - 1] = 0 # reset step from last iteration dx[i] = self.step / 2 dfdx[i] = self.functional(x + dx) - self.functional(x - dx) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] else: raise RuntimeError('unknown method') dfdx /= self.step return dfdx
def forward(self, x): """Compute forward-pass of this module on ``x``. Parameters ---------- x : `torch.autograd.variable.Variable` Input of this layer. The contained tensor must have shape ``extra_shape + operator.domain.shape``, and ``len(extra_shape)`` must be at least 1 (batch axis). Returns ------- out : `torch.autograd.variable.Variable` The computed output. Its tensor will have shape ``extra_shape + operator.range.shape``, where ``extra_shape`` are the extra axes of ``x``. Examples -------- Evaluating on a 2D tensor, where the operator expects a 1D input, i.e., with extra batch axis only: >>> matrix = np.array([[1, 0, 0], ... [0, 1, 1]], dtype='float32') >>> odl_op = odl.MatrixOperator(matrix) >>> odl_op.domain.shape (3,) >>> odl_op.range.shape (2,) >>> op_mod = OperatorAsModule(odl_op) >>> t = torch.ones(3) >>> x = autograd.Variable(t[None, :]) # "fake" batch axis >>> op_mod(x) Variable containing: 1 2 [torch.FloatTensor of size 1x2] >>> t = torch.ones(3) >>> x_tensor = torch.stack([0 * t, 1 * t]) >>> x = autograd.Variable(x_tensor) # batch of 2 inputs >>> op_mod(x) Variable containing: 0 0 1 2 [torch.FloatTensor of size 2x2] An arbitrary number of axes is supported: >>> x = autograd.Variable(t[None, None, :]) # "fake" batch and channel >>> op_mod(x) Variable containing: (0 ,.,.) = 1 2 [torch.FloatTensor of size 1x1x2] >>> x_tensor = torch.stack([torch.stack([0 * t, 1 * t]), ... torch.stack([2 * t, 3 * t]), ... torch.stack([4 * t, 5 * t])]) >>> x = autograd.Variable(x_tensor) # batch of 3x2 inputs >>> op_mod(x) Variable containing: (0 ,.,.) = 0 0 1 2 <BLANKLINE> (1 ,.,.) = 2 4 3 6 <BLANKLINE> (2 ,.,.) = 4 8 5 10 [torch.FloatTensor of size 3x2x2] """ in_shape = x.data.shape op_in_shape = self.op_func.operator.domain.shape op_out_shape = self.op_func.operator.range.shape extra_shape = in_shape[:-len(op_in_shape)] if in_shape[-len(op_in_shape):] != op_in_shape or not extra_shape: shp_str = str(op_in_shape).strip('()') raise ValueError('expected input of shape (N, *, {}), got input ' 'with shape {}'.format(shp_str, in_shape)) # Flatten extra axes, then do one entry at a time newshape = (int(np.prod(extra_shape)),) + op_in_shape x_flat_xtra = x.reshape(*newshape) results = [] for i in range(x_flat_xtra.data.shape[0]): results.append(self.op_func(x_flat_xtra[i])) # Reshape the resulting stack to the expected output shape stack_flat_xtra = torch.stack(results) return stack_flat_xtra.view(extra_shape + op_out_shape)
def function[forward, parameter[self, x]]: constant[Compute forward-pass of this module on ``x``. Parameters ---------- x : `torch.autograd.variable.Variable` Input of this layer. The contained tensor must have shape ``extra_shape + operator.domain.shape``, and ``len(extra_shape)`` must be at least 1 (batch axis). Returns ------- out : `torch.autograd.variable.Variable` The computed output. Its tensor will have shape ``extra_shape + operator.range.shape``, where ``extra_shape`` are the extra axes of ``x``. Examples -------- Evaluating on a 2D tensor, where the operator expects a 1D input, i.e., with extra batch axis only: >>> matrix = np.array([[1, 0, 0], ... [0, 1, 1]], dtype='float32') >>> odl_op = odl.MatrixOperator(matrix) >>> odl_op.domain.shape (3,) >>> odl_op.range.shape (2,) >>> op_mod = OperatorAsModule(odl_op) >>> t = torch.ones(3) >>> x = autograd.Variable(t[None, :]) # "fake" batch axis >>> op_mod(x) Variable containing: 1 2 [torch.FloatTensor of size 1x2] >>> t = torch.ones(3) >>> x_tensor = torch.stack([0 * t, 1 * t]) >>> x = autograd.Variable(x_tensor) # batch of 2 inputs >>> op_mod(x) Variable containing: 0 0 1 2 [torch.FloatTensor of size 2x2] An arbitrary number of axes is supported: >>> x = autograd.Variable(t[None, None, :]) # "fake" batch and channel >>> op_mod(x) Variable containing: (0 ,.,.) = 1 2 [torch.FloatTensor of size 1x1x2] >>> x_tensor = torch.stack([torch.stack([0 * t, 1 * t]), ... torch.stack([2 * t, 3 * t]), ... torch.stack([4 * t, 5 * t])]) >>> x = autograd.Variable(x_tensor) # batch of 3x2 inputs >>> op_mod(x) Variable containing: (0 ,.,.) = 0 0 1 2 <BLANKLINE> (1 ,.,.) = 2 4 3 6 <BLANKLINE> (2 ,.,.) = 4 8 5 10 [torch.FloatTensor of size 3x2x2] ] variable[in_shape] assign[=] name[x].data.shape variable[op_in_shape] assign[=] name[self].op_func.operator.domain.shape variable[op_out_shape] assign[=] name[self].op_func.operator.range.shape variable[extra_shape] assign[=] call[name[in_shape]][<ast.Slice object at 0x7da1b1e5e5f0>] if <ast.BoolOp object at 0x7da1b1e5da50> begin[:] variable[shp_str] assign[=] call[call[name[str], parameter[name[op_in_shape]]].strip, parameter[constant[()]]] <ast.Raise object at 0x7da1b1d0de70> variable[newshape] assign[=] binary_operation[tuple[[<ast.Call object at 0x7da1b1d0c940>]] + name[op_in_shape]] variable[x_flat_xtra] assign[=] call[name[x].reshape, parameter[<ast.Starred object at 0x7da1b1d0fd00>]] variable[results] assign[=] list[[]] for taget[name[i]] in starred[call[name[range], parameter[call[name[x_flat_xtra].data.shape][constant[0]]]]] begin[:] call[name[results].append, parameter[call[name[self].op_func, parameter[call[name[x_flat_xtra]][name[i]]]]]] variable[stack_flat_xtra] assign[=] call[name[torch].stack, parameter[name[results]]] return[call[name[stack_flat_xtra].view, parameter[binary_operation[name[extra_shape] + name[op_out_shape]]]]]
keyword[def] identifier[forward] ( identifier[self] , identifier[x] ): literal[string] identifier[in_shape] = identifier[x] . identifier[data] . identifier[shape] identifier[op_in_shape] = identifier[self] . identifier[op_func] . identifier[operator] . identifier[domain] . identifier[shape] identifier[op_out_shape] = identifier[self] . identifier[op_func] . identifier[operator] . identifier[range] . identifier[shape] identifier[extra_shape] = identifier[in_shape] [:- identifier[len] ( identifier[op_in_shape] )] keyword[if] identifier[in_shape] [- identifier[len] ( identifier[op_in_shape] ):]!= identifier[op_in_shape] keyword[or] keyword[not] identifier[extra_shape] : identifier[shp_str] = identifier[str] ( identifier[op_in_shape] ). identifier[strip] ( literal[string] ) keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[shp_str] , identifier[in_shape] )) identifier[newshape] =( identifier[int] ( identifier[np] . identifier[prod] ( identifier[extra_shape] )),)+ identifier[op_in_shape] identifier[x_flat_xtra] = identifier[x] . identifier[reshape] (* identifier[newshape] ) identifier[results] =[] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[x_flat_xtra] . identifier[data] . identifier[shape] [ literal[int] ]): identifier[results] . identifier[append] ( identifier[self] . identifier[op_func] ( identifier[x_flat_xtra] [ identifier[i] ])) identifier[stack_flat_xtra] = identifier[torch] . identifier[stack] ( identifier[results] ) keyword[return] identifier[stack_flat_xtra] . identifier[view] ( identifier[extra_shape] + identifier[op_out_shape] )
def forward(self, x): """Compute forward-pass of this module on ``x``. Parameters ---------- x : `torch.autograd.variable.Variable` Input of this layer. The contained tensor must have shape ``extra_shape + operator.domain.shape``, and ``len(extra_shape)`` must be at least 1 (batch axis). Returns ------- out : `torch.autograd.variable.Variable` The computed output. Its tensor will have shape ``extra_shape + operator.range.shape``, where ``extra_shape`` are the extra axes of ``x``. Examples -------- Evaluating on a 2D tensor, where the operator expects a 1D input, i.e., with extra batch axis only: >>> matrix = np.array([[1, 0, 0], ... [0, 1, 1]], dtype='float32') >>> odl_op = odl.MatrixOperator(matrix) >>> odl_op.domain.shape (3,) >>> odl_op.range.shape (2,) >>> op_mod = OperatorAsModule(odl_op) >>> t = torch.ones(3) >>> x = autograd.Variable(t[None, :]) # "fake" batch axis >>> op_mod(x) Variable containing: 1 2 [torch.FloatTensor of size 1x2] >>> t = torch.ones(3) >>> x_tensor = torch.stack([0 * t, 1 * t]) >>> x = autograd.Variable(x_tensor) # batch of 2 inputs >>> op_mod(x) Variable containing: 0 0 1 2 [torch.FloatTensor of size 2x2] An arbitrary number of axes is supported: >>> x = autograd.Variable(t[None, None, :]) # "fake" batch and channel >>> op_mod(x) Variable containing: (0 ,.,.) = 1 2 [torch.FloatTensor of size 1x1x2] >>> x_tensor = torch.stack([torch.stack([0 * t, 1 * t]), ... torch.stack([2 * t, 3 * t]), ... torch.stack([4 * t, 5 * t])]) >>> x = autograd.Variable(x_tensor) # batch of 3x2 inputs >>> op_mod(x) Variable containing: (0 ,.,.) = 0 0 1 2 <BLANKLINE> (1 ,.,.) = 2 4 3 6 <BLANKLINE> (2 ,.,.) = 4 8 5 10 [torch.FloatTensor of size 3x2x2] """ in_shape = x.data.shape op_in_shape = self.op_func.operator.domain.shape op_out_shape = self.op_func.operator.range.shape extra_shape = in_shape[:-len(op_in_shape)] if in_shape[-len(op_in_shape):] != op_in_shape or not extra_shape: shp_str = str(op_in_shape).strip('()') raise ValueError('expected input of shape (N, *, {}), got input with shape {}'.format(shp_str, in_shape)) # depends on [control=['if'], data=[]] # Flatten extra axes, then do one entry at a time newshape = (int(np.prod(extra_shape)),) + op_in_shape x_flat_xtra = x.reshape(*newshape) results = [] for i in range(x_flat_xtra.data.shape[0]): results.append(self.op_func(x_flat_xtra[i])) # depends on [control=['for'], data=['i']] # Reshape the resulting stack to the expected output shape stack_flat_xtra = torch.stack(results) return stack_flat_xtra.view(extra_shape + op_out_shape)
def cache_spec(name, spec): """ Cache the spec dict :param name: Version name :param spec: Spec dict :return: True if cached """ return cache.set(build_cache_name(name), spec, app_settings.ESI_SPEC_CACHE_DURATION)
def function[cache_spec, parameter[name, spec]]: constant[ Cache the spec dict :param name: Version name :param spec: Spec dict :return: True if cached ] return[call[name[cache].set, parameter[call[name[build_cache_name], parameter[name[name]]], name[spec], name[app_settings].ESI_SPEC_CACHE_DURATION]]]
keyword[def] identifier[cache_spec] ( identifier[name] , identifier[spec] ): literal[string] keyword[return] identifier[cache] . identifier[set] ( identifier[build_cache_name] ( identifier[name] ), identifier[spec] , identifier[app_settings] . identifier[ESI_SPEC_CACHE_DURATION] )
def cache_spec(name, spec): """ Cache the spec dict :param name: Version name :param spec: Spec dict :return: True if cached """ return cache.set(build_cache_name(name), spec, app_settings.ESI_SPEC_CACHE_DURATION)
def create_attributes(klass, attributes, previous_object=None): """ Attributes for content type creation. """ result = super(ContentType, klass).create_attributes(attributes, previous_object) if 'fields' not in result: result['fields'] = [] return result
def function[create_attributes, parameter[klass, attributes, previous_object]]: constant[ Attributes for content type creation. ] variable[result] assign[=] call[call[name[super], parameter[name[ContentType], name[klass]]].create_attributes, parameter[name[attributes], name[previous_object]]] if compare[constant[fields] <ast.NotIn object at 0x7da2590d7190> name[result]] begin[:] call[name[result]][constant[fields]] assign[=] list[[]] return[name[result]]
keyword[def] identifier[create_attributes] ( identifier[klass] , identifier[attributes] , identifier[previous_object] = keyword[None] ): literal[string] identifier[result] = identifier[super] ( identifier[ContentType] , identifier[klass] ). identifier[create_attributes] ( identifier[attributes] , identifier[previous_object] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[result] : identifier[result] [ literal[string] ]=[] keyword[return] identifier[result]
def create_attributes(klass, attributes, previous_object=None): """ Attributes for content type creation. """ result = super(ContentType, klass).create_attributes(attributes, previous_object) if 'fields' not in result: result['fields'] = [] # depends on [control=['if'], data=['result']] return result
def records(self): """ Access the records :returns: twilio.rest.api.v2010.account.usage.record.RecordList :rtype: twilio.rest.api.v2010.account.usage.record.RecordList """ if self._records is None: self._records = RecordList(self._version, account_sid=self._solution['account_sid'], ) return self._records
def function[records, parameter[self]]: constant[ Access the records :returns: twilio.rest.api.v2010.account.usage.record.RecordList :rtype: twilio.rest.api.v2010.account.usage.record.RecordList ] if compare[name[self]._records is constant[None]] begin[:] name[self]._records assign[=] call[name[RecordList], parameter[name[self]._version]] return[name[self]._records]
keyword[def] identifier[records] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_records] keyword[is] keyword[None] : identifier[self] . identifier[_records] = identifier[RecordList] ( identifier[self] . identifier[_version] , identifier[account_sid] = identifier[self] . identifier[_solution] [ literal[string] ],) keyword[return] identifier[self] . identifier[_records]
def records(self): """ Access the records :returns: twilio.rest.api.v2010.account.usage.record.RecordList :rtype: twilio.rest.api.v2010.account.usage.record.RecordList """ if self._records is None: self._records = RecordList(self._version, account_sid=self._solution['account_sid']) # depends on [control=['if'], data=[]] return self._records
def update_batch(self, loss_per_instance): """ Computes priorities according to loss. Args: loss_per_instance: """ if self.batch_indices is None: raise TensorForceError("Need to call get_batch before each update_batch call.") # if len(loss_per_instance) != len(self.batch_indices): # raise TensorForceError("For all instances a loss value has to be provided.") for index, loss in zip(self.batch_indices, loss_per_instance): # Sampling priority is proportional to the largest absolute temporal difference error. new_priority = (np.abs(loss) + self.prioritization_constant) ** self.prioritization_weight self.observations._move(index, new_priority) self.none_priority_index += 1
def function[update_batch, parameter[self, loss_per_instance]]: constant[ Computes priorities according to loss. Args: loss_per_instance: ] if compare[name[self].batch_indices is constant[None]] begin[:] <ast.Raise object at 0x7da18dc99cf0> for taget[tuple[[<ast.Name object at 0x7da18dc9bd90>, <ast.Name object at 0x7da18dc9b340>]]] in starred[call[name[zip], parameter[name[self].batch_indices, name[loss_per_instance]]]] begin[:] variable[new_priority] assign[=] binary_operation[binary_operation[call[name[np].abs, parameter[name[loss]]] + name[self].prioritization_constant] ** name[self].prioritization_weight] call[name[self].observations._move, parameter[name[index], name[new_priority]]] <ast.AugAssign object at 0x7da18dc99f90>
keyword[def] identifier[update_batch] ( identifier[self] , identifier[loss_per_instance] ): literal[string] keyword[if] identifier[self] . identifier[batch_indices] keyword[is] keyword[None] : keyword[raise] identifier[TensorForceError] ( literal[string] ) keyword[for] identifier[index] , identifier[loss] keyword[in] identifier[zip] ( identifier[self] . identifier[batch_indices] , identifier[loss_per_instance] ): identifier[new_priority] =( identifier[np] . identifier[abs] ( identifier[loss] )+ identifier[self] . identifier[prioritization_constant] )** identifier[self] . identifier[prioritization_weight] identifier[self] . identifier[observations] . identifier[_move] ( identifier[index] , identifier[new_priority] ) identifier[self] . identifier[none_priority_index] += literal[int]
def update_batch(self, loss_per_instance): """ Computes priorities according to loss. Args: loss_per_instance: """ if self.batch_indices is None: raise TensorForceError('Need to call get_batch before each update_batch call.') # depends on [control=['if'], data=[]] # if len(loss_per_instance) != len(self.batch_indices): # raise TensorForceError("For all instances a loss value has to be provided.") for (index, loss) in zip(self.batch_indices, loss_per_instance): # Sampling priority is proportional to the largest absolute temporal difference error. new_priority = (np.abs(loss) + self.prioritization_constant) ** self.prioritization_weight self.observations._move(index, new_priority) self.none_priority_index += 1 # depends on [control=['for'], data=[]]
def _get_repo_filter(self, query): """ Apply repository wide side filter / mask query """ if self.filter is not None: return query.extra(where=[self.filter]) return query
def function[_get_repo_filter, parameter[self, query]]: constant[ Apply repository wide side filter / mask query ] if compare[name[self].filter is_not constant[None]] begin[:] return[call[name[query].extra, parameter[]]] return[name[query]]
keyword[def] identifier[_get_repo_filter] ( identifier[self] , identifier[query] ): literal[string] keyword[if] identifier[self] . identifier[filter] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[query] . identifier[extra] ( identifier[where] =[ identifier[self] . identifier[filter] ]) keyword[return] identifier[query]
def _get_repo_filter(self, query): """ Apply repository wide side filter / mask query """ if self.filter is not None: return query.extra(where=[self.filter]) # depends on [control=['if'], data=[]] return query