code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def get_or_create_environment(self, id=None, name=None, zone=None, default=False): """ Get environment by id or name. If not found: create with given or generated parameters """ if id: return self.get_environment(id=id) elif name: try: env = self.get_environment(name=name) self._assert_env_and_zone(env, zone) except exceptions.NotFoundError: env = self.create_environment(name=name, zone=zone, default=default) return env else: name = 'auto-generated-env' return self.create_environment(name=name, zone=zone, default=default)
def function[get_or_create_environment, parameter[self, id, name, zone, default]]: constant[ Get environment by id or name. If not found: create with given or generated parameters ] if name[id] begin[:] return[call[name[self].get_environment, parameter[]]]
keyword[def] identifier[get_or_create_environment] ( identifier[self] , identifier[id] = keyword[None] , identifier[name] = keyword[None] , identifier[zone] = keyword[None] , identifier[default] = keyword[False] ): literal[string] keyword[if] identifier[id] : keyword[return] identifier[self] . identifier[get_environment] ( identifier[id] = identifier[id] ) keyword[elif] identifier[name] : keyword[try] : identifier[env] = identifier[self] . identifier[get_environment] ( identifier[name] = identifier[name] ) identifier[self] . identifier[_assert_env_and_zone] ( identifier[env] , identifier[zone] ) keyword[except] identifier[exceptions] . identifier[NotFoundError] : identifier[env] = identifier[self] . identifier[create_environment] ( identifier[name] = identifier[name] , identifier[zone] = identifier[zone] , identifier[default] = identifier[default] ) keyword[return] identifier[env] keyword[else] : identifier[name] = literal[string] keyword[return] identifier[self] . identifier[create_environment] ( identifier[name] = identifier[name] , identifier[zone] = identifier[zone] , identifier[default] = identifier[default] )
def get_or_create_environment(self, id=None, name=None, zone=None, default=False): """ Get environment by id or name. If not found: create with given or generated parameters """ if id: return self.get_environment(id=id) # depends on [control=['if'], data=[]] elif name: try: env = self.get_environment(name=name) self._assert_env_and_zone(env, zone) # depends on [control=['try'], data=[]] except exceptions.NotFoundError: env = self.create_environment(name=name, zone=zone, default=default) # depends on [control=['except'], data=[]] return env # depends on [control=['if'], data=[]] else: name = 'auto-generated-env' return self.create_environment(name=name, zone=zone, default=default)
def refresh(self): """ Updates the cache with setting values from the database. """ # `values_list('name', 'value')` doesn't work because `value` is not a # setting (base class) field, it's a setting value (subclass) field. So # we have to get real instances. args = [(obj.name, obj.value) for obj in self.queryset.all()] super(SettingDict, self).update(args) self.empty_cache = False
def function[refresh, parameter[self]]: constant[ Updates the cache with setting values from the database. ] variable[args] assign[=] <ast.ListComp object at 0x7da20e955180> call[call[name[super], parameter[name[SettingDict], name[self]]].update, parameter[name[args]]] name[self].empty_cache assign[=] constant[False]
keyword[def] identifier[refresh] ( identifier[self] ): literal[string] identifier[args] =[( identifier[obj] . identifier[name] , identifier[obj] . identifier[value] ) keyword[for] identifier[obj] keyword[in] identifier[self] . identifier[queryset] . identifier[all] ()] identifier[super] ( identifier[SettingDict] , identifier[self] ). identifier[update] ( identifier[args] ) identifier[self] . identifier[empty_cache] = keyword[False]
def refresh(self): """ Updates the cache with setting values from the database. """ # `values_list('name', 'value')` doesn't work because `value` is not a # setting (base class) field, it's a setting value (subclass) field. So # we have to get real instances. args = [(obj.name, obj.value) for obj in self.queryset.all()] super(SettingDict, self).update(args) self.empty_cache = False
def get_contact(self, jid): """ Returns a contact Args: jid (aioxmpp.JID): jid of the contact Returns: dict: the roster of contacts """ try: return self.get_contacts()[jid.bare()] except KeyError: raise ContactNotFound except AttributeError: raise AttributeError("jid must be an aioxmpp.JID object")
def function[get_contact, parameter[self, jid]]: constant[ Returns a contact Args: jid (aioxmpp.JID): jid of the contact Returns: dict: the roster of contacts ] <ast.Try object at 0x7da1b07930d0>
keyword[def] identifier[get_contact] ( identifier[self] , identifier[jid] ): literal[string] keyword[try] : keyword[return] identifier[self] . identifier[get_contacts] ()[ identifier[jid] . identifier[bare] ()] keyword[except] identifier[KeyError] : keyword[raise] identifier[ContactNotFound] keyword[except] identifier[AttributeError] : keyword[raise] identifier[AttributeError] ( literal[string] )
def get_contact(self, jid): """ Returns a contact Args: jid (aioxmpp.JID): jid of the contact Returns: dict: the roster of contacts """ try: return self.get_contacts()[jid.bare()] # depends on [control=['try'], data=[]] except KeyError: raise ContactNotFound # depends on [control=['except'], data=[]] except AttributeError: raise AttributeError('jid must be an aioxmpp.JID object') # depends on [control=['except'], data=[]]
def getAsGrassAsciiRaster(self, tableName, rasterId=1, rasterIdFieldName='id', rasterFieldName='raster', newSRID=None): """ Returns a string representation of the raster in GRASS ASCII raster format. """ # Get raster in ArcInfo Grid format arcInfoGrid = self.getAsGdalRaster(rasterFieldName, tableName, rasterIdFieldName, rasterId, 'AAIGrid', newSRID).splitlines() ## Convert arcInfoGrid to GRASS ASCII format ## # Get values from header which look something this: # ncols 67 # nrows 55 # xllcorner 425802.32143212341 # yllcorner 44091450.41551345213 # cellsize 90.0000000 # ... nCols = int(arcInfoGrid[0].split()[1]) nRows = int(arcInfoGrid[1].split()[1]) xLLCorner = float(arcInfoGrid[2].split()[1]) yLLCorner = float(arcInfoGrid[3].split()[1]) cellSize = float(arcInfoGrid[4].split()[1]) # Remove old headers for i in range(0, 5): arcInfoGrid.pop(0) # Check for NODATA_value row and remove if it is there if 'NODATA_value' in arcInfoGrid[0]: arcInfoGrid.pop(0) ## Calculate values for GRASS ASCII headers ## # These should look like this: # north: 4501028.972140 # south: 4494548.972140 # east: 460348.288604 # west: 454318.288604 # rows: 72 # cols: 67 # ... # xLLCorner and yLLCorner represent the coordinates for the Lower Left corner of the raster north = yLLCorner + (cellSize * nRows) south = yLLCorner east = xLLCorner + (cellSize * nCols) west = xLLCorner # Create header Lines (the first shall be last and the last shall be first) grassHeader = ['cols: %s' % nCols, 'rows: %s' % nRows, 'west: %s' % west, 'east: %s' % east, 'south: %s' % south, 'north: %s' % north] # Insert grass headers into the grid for header in grassHeader: arcInfoGrid.insert(0, header) # Create string arcInfoGridString = '\n'.join(arcInfoGrid) return arcInfoGridString
def function[getAsGrassAsciiRaster, parameter[self, tableName, rasterId, rasterIdFieldName, rasterFieldName, newSRID]]: constant[ Returns a string representation of the raster in GRASS ASCII raster format. ] variable[arcInfoGrid] assign[=] call[call[name[self].getAsGdalRaster, parameter[name[rasterFieldName], name[tableName], name[rasterIdFieldName], name[rasterId], constant[AAIGrid], name[newSRID]]].splitlines, parameter[]] variable[nCols] assign[=] call[name[int], parameter[call[call[call[name[arcInfoGrid]][constant[0]].split, parameter[]]][constant[1]]]] variable[nRows] assign[=] call[name[int], parameter[call[call[call[name[arcInfoGrid]][constant[1]].split, parameter[]]][constant[1]]]] variable[xLLCorner] assign[=] call[name[float], parameter[call[call[call[name[arcInfoGrid]][constant[2]].split, parameter[]]][constant[1]]]] variable[yLLCorner] assign[=] call[name[float], parameter[call[call[call[name[arcInfoGrid]][constant[3]].split, parameter[]]][constant[1]]]] variable[cellSize] assign[=] call[name[float], parameter[call[call[call[name[arcInfoGrid]][constant[4]].split, parameter[]]][constant[1]]]] for taget[name[i]] in starred[call[name[range], parameter[constant[0], constant[5]]]] begin[:] call[name[arcInfoGrid].pop, parameter[constant[0]]] if compare[constant[NODATA_value] in call[name[arcInfoGrid]][constant[0]]] begin[:] call[name[arcInfoGrid].pop, parameter[constant[0]]] variable[north] assign[=] binary_operation[name[yLLCorner] + binary_operation[name[cellSize] * name[nRows]]] variable[south] assign[=] name[yLLCorner] variable[east] assign[=] binary_operation[name[xLLCorner] + binary_operation[name[cellSize] * name[nCols]]] variable[west] assign[=] name[xLLCorner] variable[grassHeader] assign[=] list[[<ast.BinOp object at 0x7da18f09e140>, <ast.BinOp object at 0x7da18f09c310>, <ast.BinOp object at 0x7da18f09e770>, <ast.BinOp object at 0x7da18f09c5e0>, <ast.BinOp object at 0x7da18f09e7d0>, <ast.BinOp object at 0x7da18f09f010>]] for taget[name[header]] in starred[name[grassHeader]] begin[:] call[name[arcInfoGrid].insert, parameter[constant[0], name[header]]] variable[arcInfoGridString] assign[=] call[constant[ ].join, parameter[name[arcInfoGrid]]] return[name[arcInfoGridString]]
keyword[def] identifier[getAsGrassAsciiRaster] ( identifier[self] , identifier[tableName] , identifier[rasterId] = literal[int] , identifier[rasterIdFieldName] = literal[string] , identifier[rasterFieldName] = literal[string] , identifier[newSRID] = keyword[None] ): literal[string] identifier[arcInfoGrid] = identifier[self] . identifier[getAsGdalRaster] ( identifier[rasterFieldName] , identifier[tableName] , identifier[rasterIdFieldName] , identifier[rasterId] , literal[string] , identifier[newSRID] ). identifier[splitlines] () identifier[nCols] = identifier[int] ( identifier[arcInfoGrid] [ literal[int] ]. identifier[split] ()[ literal[int] ]) identifier[nRows] = identifier[int] ( identifier[arcInfoGrid] [ literal[int] ]. identifier[split] ()[ literal[int] ]) identifier[xLLCorner] = identifier[float] ( identifier[arcInfoGrid] [ literal[int] ]. identifier[split] ()[ literal[int] ]) identifier[yLLCorner] = identifier[float] ( identifier[arcInfoGrid] [ literal[int] ]. identifier[split] ()[ literal[int] ]) identifier[cellSize] = identifier[float] ( identifier[arcInfoGrid] [ literal[int] ]. identifier[split] ()[ literal[int] ]) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , literal[int] ): identifier[arcInfoGrid] . identifier[pop] ( literal[int] ) keyword[if] literal[string] keyword[in] identifier[arcInfoGrid] [ literal[int] ]: identifier[arcInfoGrid] . identifier[pop] ( literal[int] ) identifier[north] = identifier[yLLCorner] +( identifier[cellSize] * identifier[nRows] ) identifier[south] = identifier[yLLCorner] identifier[east] = identifier[xLLCorner] +( identifier[cellSize] * identifier[nCols] ) identifier[west] = identifier[xLLCorner] identifier[grassHeader] =[ literal[string] % identifier[nCols] , literal[string] % identifier[nRows] , literal[string] % identifier[west] , literal[string] % identifier[east] , literal[string] % identifier[south] , literal[string] % identifier[north] ] keyword[for] identifier[header] keyword[in] identifier[grassHeader] : identifier[arcInfoGrid] . identifier[insert] ( literal[int] , identifier[header] ) identifier[arcInfoGridString] = literal[string] . identifier[join] ( identifier[arcInfoGrid] ) keyword[return] identifier[arcInfoGridString]
def getAsGrassAsciiRaster(self, tableName, rasterId=1, rasterIdFieldName='id', rasterFieldName='raster', newSRID=None): """ Returns a string representation of the raster in GRASS ASCII raster format. """ # Get raster in ArcInfo Grid format arcInfoGrid = self.getAsGdalRaster(rasterFieldName, tableName, rasterIdFieldName, rasterId, 'AAIGrid', newSRID).splitlines() ## Convert arcInfoGrid to GRASS ASCII format ## # Get values from header which look something this: # ncols 67 # nrows 55 # xllcorner 425802.32143212341 # yllcorner 44091450.41551345213 # cellsize 90.0000000 # ... nCols = int(arcInfoGrid[0].split()[1]) nRows = int(arcInfoGrid[1].split()[1]) xLLCorner = float(arcInfoGrid[2].split()[1]) yLLCorner = float(arcInfoGrid[3].split()[1]) cellSize = float(arcInfoGrid[4].split()[1]) # Remove old headers for i in range(0, 5): arcInfoGrid.pop(0) # depends on [control=['for'], data=[]] # Check for NODATA_value row and remove if it is there if 'NODATA_value' in arcInfoGrid[0]: arcInfoGrid.pop(0) # depends on [control=['if'], data=[]] ## Calculate values for GRASS ASCII headers ## # These should look like this: # north: 4501028.972140 # south: 4494548.972140 # east: 460348.288604 # west: 454318.288604 # rows: 72 # cols: 67 # ... # xLLCorner and yLLCorner represent the coordinates for the Lower Left corner of the raster north = yLLCorner + cellSize * nRows south = yLLCorner east = xLLCorner + cellSize * nCols west = xLLCorner # Create header Lines (the first shall be last and the last shall be first) grassHeader = ['cols: %s' % nCols, 'rows: %s' % nRows, 'west: %s' % west, 'east: %s' % east, 'south: %s' % south, 'north: %s' % north] # Insert grass headers into the grid for header in grassHeader: arcInfoGrid.insert(0, header) # depends on [control=['for'], data=['header']] # Create string arcInfoGridString = '\n'.join(arcInfoGrid) return arcInfoGridString
def _dict_compare(d1, d2): """ We care if one of two things happens: * d2 has added a new key * a (value for the same key) in d2 has a different value than d1 We don't care if this stuff happens: * A key is deleted from the dict Should return a list of keys that either have been added or have a different value than they used to """ keys_added = set(d2.keys()) - set(d1.keys()) keys_changed = [k for k in d1.keys() if k in d2.keys() and d1[k] != d2[k]] return list(keys_added) + keys_changed
def function[_dict_compare, parameter[d1, d2]]: constant[ We care if one of two things happens: * d2 has added a new key * a (value for the same key) in d2 has a different value than d1 We don't care if this stuff happens: * A key is deleted from the dict Should return a list of keys that either have been added or have a different value than they used to ] variable[keys_added] assign[=] binary_operation[call[name[set], parameter[call[name[d2].keys, parameter[]]]] - call[name[set], parameter[call[name[d1].keys, parameter[]]]]] variable[keys_changed] assign[=] <ast.ListComp object at 0x7da1b1605a20> return[binary_operation[call[name[list], parameter[name[keys_added]]] + name[keys_changed]]]
keyword[def] identifier[_dict_compare] ( identifier[d1] , identifier[d2] ): literal[string] identifier[keys_added] = identifier[set] ( identifier[d2] . identifier[keys] ())- identifier[set] ( identifier[d1] . identifier[keys] ()) identifier[keys_changed] =[ identifier[k] keyword[for] identifier[k] keyword[in] identifier[d1] . identifier[keys] () keyword[if] identifier[k] keyword[in] identifier[d2] . identifier[keys] () keyword[and] identifier[d1] [ identifier[k] ]!= identifier[d2] [ identifier[k] ]] keyword[return] identifier[list] ( identifier[keys_added] )+ identifier[keys_changed]
def _dict_compare(d1, d2): """ We care if one of two things happens: * d2 has added a new key * a (value for the same key) in d2 has a different value than d1 We don't care if this stuff happens: * A key is deleted from the dict Should return a list of keys that either have been added or have a different value than they used to """ keys_added = set(d2.keys()) - set(d1.keys()) keys_changed = [k for k in d1.keys() if k in d2.keys() and d1[k] != d2[k]] return list(keys_added) + keys_changed
def _write_model(self, specification, specification_set): """ Write autogenerate specification file """ filename = "%s%s.py" % (self._class_prefix.lower(), specification.entity_name.lower()) override_content = self._extract_override_content(specification.entity_name) constants = self._extract_constants(specification) superclass_name = "NURESTRootObject" if specification.rest_name == self.api_root else "NURESTObject" self.write(destination=self.output_directory, filename=filename, template_name="model.py.tpl", specification=specification, specification_set=specification_set, version=self.api_version, class_prefix=self._class_prefix, product_accronym=self._product_accronym, override_content=override_content, superclass_name=superclass_name, constants=constants, header=self.header_content) self.model_filenames[filename] = specification.entity_name
def function[_write_model, parameter[self, specification, specification_set]]: constant[ Write autogenerate specification file ] variable[filename] assign[=] binary_operation[constant[%s%s.py] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b0693b80>, <ast.Call object at 0x7da1b0691e40>]]] variable[override_content] assign[=] call[name[self]._extract_override_content, parameter[name[specification].entity_name]] variable[constants] assign[=] call[name[self]._extract_constants, parameter[name[specification]]] variable[superclass_name] assign[=] <ast.IfExp object at 0x7da1b06908e0> call[name[self].write, parameter[]] call[name[self].model_filenames][name[filename]] assign[=] name[specification].entity_name
keyword[def] identifier[_write_model] ( identifier[self] , identifier[specification] , identifier[specification_set] ): literal[string] identifier[filename] = literal[string] %( identifier[self] . identifier[_class_prefix] . identifier[lower] (), identifier[specification] . identifier[entity_name] . identifier[lower] ()) identifier[override_content] = identifier[self] . identifier[_extract_override_content] ( identifier[specification] . identifier[entity_name] ) identifier[constants] = identifier[self] . identifier[_extract_constants] ( identifier[specification] ) identifier[superclass_name] = literal[string] keyword[if] identifier[specification] . identifier[rest_name] == identifier[self] . identifier[api_root] keyword[else] literal[string] identifier[self] . identifier[write] ( identifier[destination] = identifier[self] . identifier[output_directory] , identifier[filename] = identifier[filename] , identifier[template_name] = literal[string] , identifier[specification] = identifier[specification] , identifier[specification_set] = identifier[specification_set] , identifier[version] = identifier[self] . identifier[api_version] , identifier[class_prefix] = identifier[self] . identifier[_class_prefix] , identifier[product_accronym] = identifier[self] . identifier[_product_accronym] , identifier[override_content] = identifier[override_content] , identifier[superclass_name] = identifier[superclass_name] , identifier[constants] = identifier[constants] , identifier[header] = identifier[self] . identifier[header_content] ) identifier[self] . identifier[model_filenames] [ identifier[filename] ]= identifier[specification] . identifier[entity_name]
def _write_model(self, specification, specification_set): """ Write autogenerate specification file """ filename = '%s%s.py' % (self._class_prefix.lower(), specification.entity_name.lower()) override_content = self._extract_override_content(specification.entity_name) constants = self._extract_constants(specification) superclass_name = 'NURESTRootObject' if specification.rest_name == self.api_root else 'NURESTObject' self.write(destination=self.output_directory, filename=filename, template_name='model.py.tpl', specification=specification, specification_set=specification_set, version=self.api_version, class_prefix=self._class_prefix, product_accronym=self._product_accronym, override_content=override_content, superclass_name=superclass_name, constants=constants, header=self.header_content) self.model_filenames[filename] = specification.entity_name
def _query_helper(self, by=None): """ Internal helper for preparing queries. """ if by is None: primary_keys = self.table.primary_key.columns.keys() if len(primary_keys) > 1: warnings.warn("WARNING: MORE THAN 1 PRIMARY KEY FOR TABLE %s. " "USING THE FIRST KEY %s." % (self.table.name, primary_keys[0])) if not primary_keys: raise NoPrimaryKeyException("Table %s needs a primary key for" "the .last() method to work properly. " "Alternatively, specify an ORDER BY " "column with the by= argument. " % self.table.name) id_col = primary_keys[0] else: id_col = by if self.column is None: col = "*" else: col = self.column.name return col, id_col
def function[_query_helper, parameter[self, by]]: constant[ Internal helper for preparing queries. ] if compare[name[by] is constant[None]] begin[:] variable[primary_keys] assign[=] call[name[self].table.primary_key.columns.keys, parameter[]] if compare[call[name[len], parameter[name[primary_keys]]] greater[>] constant[1]] begin[:] call[name[warnings].warn, parameter[binary_operation[constant[WARNING: MORE THAN 1 PRIMARY KEY FOR TABLE %s. USING THE FIRST KEY %s.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b11ee590>, <ast.Subscript object at 0x7da1b11ec280>]]]]] if <ast.UnaryOp object at 0x7da1b11eda50> begin[:] <ast.Raise object at 0x7da1b0f32140> variable[id_col] assign[=] call[name[primary_keys]][constant[0]] if compare[name[self].column is constant[None]] begin[:] variable[col] assign[=] constant[*] return[tuple[[<ast.Name object at 0x7da1b0f33490>, <ast.Name object at 0x7da1b0f33a90>]]]
keyword[def] identifier[_query_helper] ( identifier[self] , identifier[by] = keyword[None] ): literal[string] keyword[if] identifier[by] keyword[is] keyword[None] : identifier[primary_keys] = identifier[self] . identifier[table] . identifier[primary_key] . identifier[columns] . identifier[keys] () keyword[if] identifier[len] ( identifier[primary_keys] )> literal[int] : identifier[warnings] . identifier[warn] ( literal[string] literal[string] % ( identifier[self] . identifier[table] . identifier[name] , identifier[primary_keys] [ literal[int] ])) keyword[if] keyword[not] identifier[primary_keys] : keyword[raise] identifier[NoPrimaryKeyException] ( literal[string] literal[string] literal[string] literal[string] % identifier[self] . identifier[table] . identifier[name] ) identifier[id_col] = identifier[primary_keys] [ literal[int] ] keyword[else] : identifier[id_col] = identifier[by] keyword[if] identifier[self] . identifier[column] keyword[is] keyword[None] : identifier[col] = literal[string] keyword[else] : identifier[col] = identifier[self] . identifier[column] . identifier[name] keyword[return] identifier[col] , identifier[id_col]
def _query_helper(self, by=None): """ Internal helper for preparing queries. """ if by is None: primary_keys = self.table.primary_key.columns.keys() if len(primary_keys) > 1: warnings.warn('WARNING: MORE THAN 1 PRIMARY KEY FOR TABLE %s. USING THE FIRST KEY %s.' % (self.table.name, primary_keys[0])) # depends on [control=['if'], data=[]] if not primary_keys: raise NoPrimaryKeyException('Table %s needs a primary key forthe .last() method to work properly. Alternatively, specify an ORDER BY column with the by= argument. ' % self.table.name) # depends on [control=['if'], data=[]] id_col = primary_keys[0] # depends on [control=['if'], data=[]] else: id_col = by if self.column is None: col = '*' # depends on [control=['if'], data=[]] else: col = self.column.name return (col, id_col)
def get_spectral_index(src, egy): """Compute the local spectral index of a source.""" delta = 1E-5 f0 = src.spectrum()(pyLike.dArg(egy * (1 - delta))) f1 = src.spectrum()(pyLike.dArg(egy * (1 + delta))) if f0 > 0 and f1 > 0: gamma = np.log10(f0 / f1) / np.log10((1 - delta) / (1 + delta)) else: gamma = np.nan return gamma
def function[get_spectral_index, parameter[src, egy]]: constant[Compute the local spectral index of a source.] variable[delta] assign[=] constant[1e-05] variable[f0] assign[=] call[call[name[src].spectrum, parameter[]], parameter[call[name[pyLike].dArg, parameter[binary_operation[name[egy] * binary_operation[constant[1] - name[delta]]]]]]] variable[f1] assign[=] call[call[name[src].spectrum, parameter[]], parameter[call[name[pyLike].dArg, parameter[binary_operation[name[egy] * binary_operation[constant[1] + name[delta]]]]]]] if <ast.BoolOp object at 0x7da207f03a00> begin[:] variable[gamma] assign[=] binary_operation[call[name[np].log10, parameter[binary_operation[name[f0] / name[f1]]]] / call[name[np].log10, parameter[binary_operation[binary_operation[constant[1] - name[delta]] / binary_operation[constant[1] + name[delta]]]]]] return[name[gamma]]
keyword[def] identifier[get_spectral_index] ( identifier[src] , identifier[egy] ): literal[string] identifier[delta] = literal[int] identifier[f0] = identifier[src] . identifier[spectrum] ()( identifier[pyLike] . identifier[dArg] ( identifier[egy] *( literal[int] - identifier[delta] ))) identifier[f1] = identifier[src] . identifier[spectrum] ()( identifier[pyLike] . identifier[dArg] ( identifier[egy] *( literal[int] + identifier[delta] ))) keyword[if] identifier[f0] > literal[int] keyword[and] identifier[f1] > literal[int] : identifier[gamma] = identifier[np] . identifier[log10] ( identifier[f0] / identifier[f1] )/ identifier[np] . identifier[log10] (( literal[int] - identifier[delta] )/( literal[int] + identifier[delta] )) keyword[else] : identifier[gamma] = identifier[np] . identifier[nan] keyword[return] identifier[gamma]
def get_spectral_index(src, egy): """Compute the local spectral index of a source.""" delta = 1e-05 f0 = src.spectrum()(pyLike.dArg(egy * (1 - delta))) f1 = src.spectrum()(pyLike.dArg(egy * (1 + delta))) if f0 > 0 and f1 > 0: gamma = np.log10(f0 / f1) / np.log10((1 - delta) / (1 + delta)) # depends on [control=['if'], data=[]] else: gamma = np.nan return gamma
def parse_opt(self): """ parses the command line options for different settings. """ optparser = optparse.OptionParser() optparser.add_option('-c', '--config', action='store', dest='config', type='string', default='experiments.cfg', help="your experiments config file") optparser.add_option('-n', '--numcores', action='store', dest='ncores', type='int', default=cpu_count(), help="number of processes you want to use, default is %i"%cpu_count()) optparser.add_option('-d', '--del', action='store_true', dest='delete', default=False, help="delete experiment folder if it exists") optparser.add_option('-e', '--experiment', action='append', dest='experiments', type='string', help="run only selected experiments, by default run all experiments in config file.") optparser.add_option('-b', '--browse', action='store_true', dest='browse', default=False, help="browse existing experiments.") optparser.add_option('-B', '--Browse', action='store_true', dest='browse_big', default=False, help="browse existing experiments, more verbose than -b") optparser.add_option('-p', '--progress', action='store_true', dest='progress', default=False, help="like browse, but only shows name and progress bar") options, args = optparser.parse_args() self.options = options return options, args
def function[parse_opt, parameter[self]]: constant[ parses the command line options for different settings. ] variable[optparser] assign[=] call[name[optparse].OptionParser, parameter[]] call[name[optparser].add_option, parameter[constant[-c], constant[--config]]] call[name[optparser].add_option, parameter[constant[-n], constant[--numcores]]] call[name[optparser].add_option, parameter[constant[-d], constant[--del]]] call[name[optparser].add_option, parameter[constant[-e], constant[--experiment]]] call[name[optparser].add_option, parameter[constant[-b], constant[--browse]]] call[name[optparser].add_option, parameter[constant[-B], constant[--Browse]]] call[name[optparser].add_option, parameter[constant[-p], constant[--progress]]] <ast.Tuple object at 0x7da1b0902410> assign[=] call[name[optparser].parse_args, parameter[]] name[self].options assign[=] name[options] return[tuple[[<ast.Name object at 0x7da1b0900d30>, <ast.Name object at 0x7da1b0901ed0>]]]
keyword[def] identifier[parse_opt] ( identifier[self] ): literal[string] identifier[optparser] = identifier[optparse] . identifier[OptionParser] () identifier[optparser] . identifier[add_option] ( literal[string] , literal[string] , identifier[action] = literal[string] , identifier[dest] = literal[string] , identifier[type] = literal[string] , identifier[default] = literal[string] , identifier[help] = literal[string] ) identifier[optparser] . identifier[add_option] ( literal[string] , literal[string] , identifier[action] = literal[string] , identifier[dest] = literal[string] , identifier[type] = literal[string] , identifier[default] = identifier[cpu_count] (), identifier[help] = literal[string] % identifier[cpu_count] ()) identifier[optparser] . identifier[add_option] ( literal[string] , literal[string] , identifier[action] = literal[string] , identifier[dest] = literal[string] , identifier[default] = keyword[False] , identifier[help] = literal[string] ) identifier[optparser] . identifier[add_option] ( literal[string] , literal[string] , identifier[action] = literal[string] , identifier[dest] = literal[string] , identifier[type] = literal[string] , identifier[help] = literal[string] ) identifier[optparser] . identifier[add_option] ( literal[string] , literal[string] , identifier[action] = literal[string] , identifier[dest] = literal[string] , identifier[default] = keyword[False] , identifier[help] = literal[string] ) identifier[optparser] . identifier[add_option] ( literal[string] , literal[string] , identifier[action] = literal[string] , identifier[dest] = literal[string] , identifier[default] = keyword[False] , identifier[help] = literal[string] ) identifier[optparser] . identifier[add_option] ( literal[string] , literal[string] , identifier[action] = literal[string] , identifier[dest] = literal[string] , identifier[default] = keyword[False] , identifier[help] = literal[string] ) identifier[options] , identifier[args] = identifier[optparser] . identifier[parse_args] () identifier[self] . identifier[options] = identifier[options] keyword[return] identifier[options] , identifier[args]
def parse_opt(self): """ parses the command line options for different settings. """ optparser = optparse.OptionParser() optparser.add_option('-c', '--config', action='store', dest='config', type='string', default='experiments.cfg', help='your experiments config file') optparser.add_option('-n', '--numcores', action='store', dest='ncores', type='int', default=cpu_count(), help='number of processes you want to use, default is %i' % cpu_count()) optparser.add_option('-d', '--del', action='store_true', dest='delete', default=False, help='delete experiment folder if it exists') optparser.add_option('-e', '--experiment', action='append', dest='experiments', type='string', help='run only selected experiments, by default run all experiments in config file.') optparser.add_option('-b', '--browse', action='store_true', dest='browse', default=False, help='browse existing experiments.') optparser.add_option('-B', '--Browse', action='store_true', dest='browse_big', default=False, help='browse existing experiments, more verbose than -b') optparser.add_option('-p', '--progress', action='store_true', dest='progress', default=False, help='like browse, but only shows name and progress bar') (options, args) = optparser.parse_args() self.options = options return (options, args)
def _structure_dict(self, obj, cl): """Convert a mapping into a potentially generic dict.""" if is_bare(cl) or cl.__args__ == (Any, Any): return dict(obj) else: key_type, val_type = cl.__args__ if key_type is Any: val_conv = self._structure_func.dispatch(val_type) return {k: val_conv(v, val_type) for k, v in obj.items()} elif val_type is Any: key_conv = self._structure_func.dispatch(key_type) return {key_conv(k, key_type): v for k, v in obj.items()} else: key_conv = self._structure_func.dispatch(key_type) val_conv = self._structure_func.dispatch(val_type) return { key_conv(k, key_type): val_conv(v, val_type) for k, v in obj.items() }
def function[_structure_dict, parameter[self, obj, cl]]: constant[Convert a mapping into a potentially generic dict.] if <ast.BoolOp object at 0x7da1b07bde70> begin[:] return[call[name[dict], parameter[name[obj]]]]
keyword[def] identifier[_structure_dict] ( identifier[self] , identifier[obj] , identifier[cl] ): literal[string] keyword[if] identifier[is_bare] ( identifier[cl] ) keyword[or] identifier[cl] . identifier[__args__] ==( identifier[Any] , identifier[Any] ): keyword[return] identifier[dict] ( identifier[obj] ) keyword[else] : identifier[key_type] , identifier[val_type] = identifier[cl] . identifier[__args__] keyword[if] identifier[key_type] keyword[is] identifier[Any] : identifier[val_conv] = identifier[self] . identifier[_structure_func] . identifier[dispatch] ( identifier[val_type] ) keyword[return] { identifier[k] : identifier[val_conv] ( identifier[v] , identifier[val_type] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[obj] . identifier[items] ()} keyword[elif] identifier[val_type] keyword[is] identifier[Any] : identifier[key_conv] = identifier[self] . identifier[_structure_func] . identifier[dispatch] ( identifier[key_type] ) keyword[return] { identifier[key_conv] ( identifier[k] , identifier[key_type] ): identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[obj] . identifier[items] ()} keyword[else] : identifier[key_conv] = identifier[self] . identifier[_structure_func] . identifier[dispatch] ( identifier[key_type] ) identifier[val_conv] = identifier[self] . identifier[_structure_func] . identifier[dispatch] ( identifier[val_type] ) keyword[return] { identifier[key_conv] ( identifier[k] , identifier[key_type] ): identifier[val_conv] ( identifier[v] , identifier[val_type] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[obj] . identifier[items] () }
def _structure_dict(self, obj, cl): """Convert a mapping into a potentially generic dict.""" if is_bare(cl) or cl.__args__ == (Any, Any): return dict(obj) # depends on [control=['if'], data=[]] else: (key_type, val_type) = cl.__args__ if key_type is Any: val_conv = self._structure_func.dispatch(val_type) return {k: val_conv(v, val_type) for (k, v) in obj.items()} # depends on [control=['if'], data=[]] elif val_type is Any: key_conv = self._structure_func.dispatch(key_type) return {key_conv(k, key_type): v for (k, v) in obj.items()} # depends on [control=['if'], data=[]] else: key_conv = self._structure_func.dispatch(key_type) val_conv = self._structure_func.dispatch(val_type) return {key_conv(k, key_type): val_conv(v, val_type) for (k, v) in obj.items()}
def _contextMenu(self, pos): """Handle plot area customContextMenuRequested signal. :param QPoint pos: Mouse position relative to plot area """ # Create the context menu. menu = QMenu(self) menu.addAction(self._zoomBackAction) # Displaying the context menu at the mouse position requires # a global position. # The position received as argument is relative to PlotWidget's # plot area, and thus needs to be converted. plotArea = self.getWidgetHandle() globalPosition = plotArea.mapToGlobal(pos) menu.exec_(globalPosition)
def function[_contextMenu, parameter[self, pos]]: constant[Handle plot area customContextMenuRequested signal. :param QPoint pos: Mouse position relative to plot area ] variable[menu] assign[=] call[name[QMenu], parameter[name[self]]] call[name[menu].addAction, parameter[name[self]._zoomBackAction]] variable[plotArea] assign[=] call[name[self].getWidgetHandle, parameter[]] variable[globalPosition] assign[=] call[name[plotArea].mapToGlobal, parameter[name[pos]]] call[name[menu].exec_, parameter[name[globalPosition]]]
keyword[def] identifier[_contextMenu] ( identifier[self] , identifier[pos] ): literal[string] identifier[menu] = identifier[QMenu] ( identifier[self] ) identifier[menu] . identifier[addAction] ( identifier[self] . identifier[_zoomBackAction] ) identifier[plotArea] = identifier[self] . identifier[getWidgetHandle] () identifier[globalPosition] = identifier[plotArea] . identifier[mapToGlobal] ( identifier[pos] ) identifier[menu] . identifier[exec_] ( identifier[globalPosition] )
def _contextMenu(self, pos): """Handle plot area customContextMenuRequested signal. :param QPoint pos: Mouse position relative to plot area """ # Create the context menu. menu = QMenu(self) menu.addAction(self._zoomBackAction) # Displaying the context menu at the mouse position requires # a global position. # The position received as argument is relative to PlotWidget's # plot area, and thus needs to be converted. plotArea = self.getWidgetHandle() globalPosition = plotArea.mapToGlobal(pos) menu.exec_(globalPosition)
def _to_meta_data(pif_obj, dataset_hit, mdf_acl): """Convert the meta-data from the PIF into MDF""" pif = pif_obj.as_dictionary() dataset = dataset_hit.as_dictionary() mdf = {} try: if pif.get("names"): mdf["title"] = pif["names"][0] else: mdf["title"] = "Citrine PIF " + str(pif["uid"]) if pif.get("chemicalFormula"): mdf["composition"] = pif["chemicalFormula"] elif pif.get("composition"): mdf["composition"] = ''.join([comp["element"] for comp in pif["composition"] if comp["element"]]) if not mdf["composition"]: mdf.pop("composition") mdf["acl"] = mdf_acl mdf["source_name"] = _construct_new_key(dataset["name"]) if pif.get("contacts"): mdf["data_contact"] = [] for contact in pif["contacts"]: data_c = { "given_name": contact["name"]["given"], #REQ "family_name": contact["name"]["family"] #REQ } if contact.get("email"): data_c["email"] = contact.get("email", "") if contact.get("orcid"): data_c["orcid"] = contact.get("orcid", "") mdf["data_contact"].append(data_c) if not mdf["data_contact"]: mdf.pop("data_contact") mdf["data_contributor"] = [{}] if "owner" in dataset: name = dataset["owner"].split() contributor = { "given_name": name[0], "family_name": name[1], "email": dataset["email"] } mdf["data_contributor"] = [contributor] mdf["links"] = { "landing_page": "https://citrination.com/datasets/{}".format(dataset["id"]), "publication": [] } if pif.get("references"): mdf["author"] = [] mdf["citation"] = [] for ref in pif["references"]: if ref.get("doi"): mdf["citation"].append(ref["doi"]) #TODO: Make actual citation mdf["links"]["publication"].append(ref["doi"]) if ref.get("authors"): for author in ref["authors"]: if author.get("given") and author.get("family"): mdf["author"].append({ "given_name": author["given"], "family_name": author["family"] }) # Remove fields if blank if not mdf["author"]: mdf.pop("author") if not mdf["citation"]: mdf.pop("citation") if not mdf["links"]["publication"]: mdf["links"].pop("publication") if pif.get("licenses", [{}])[0].get("url"): mdf["license"] = pif["licenses"][0]["url"] if pif.get("tags"): mdf["tags"] = pif["tags"] # If required MDF metadata is missing from PIF, abort except KeyError as e: print("Error: Required MDF metadata", str(e), "not found in PIF", pif["uid"]) return None return mdf
def function[_to_meta_data, parameter[pif_obj, dataset_hit, mdf_acl]]: constant[Convert the meta-data from the PIF into MDF] variable[pif] assign[=] call[name[pif_obj].as_dictionary, parameter[]] variable[dataset] assign[=] call[name[dataset_hit].as_dictionary, parameter[]] variable[mdf] assign[=] dictionary[[], []] <ast.Try object at 0x7da1b23452a0> return[name[mdf]]
keyword[def] identifier[_to_meta_data] ( identifier[pif_obj] , identifier[dataset_hit] , identifier[mdf_acl] ): literal[string] identifier[pif] = identifier[pif_obj] . identifier[as_dictionary] () identifier[dataset] = identifier[dataset_hit] . identifier[as_dictionary] () identifier[mdf] ={} keyword[try] : keyword[if] identifier[pif] . identifier[get] ( literal[string] ): identifier[mdf] [ literal[string] ]= identifier[pif] [ literal[string] ][ literal[int] ] keyword[else] : identifier[mdf] [ literal[string] ]= literal[string] + identifier[str] ( identifier[pif] [ literal[string] ]) keyword[if] identifier[pif] . identifier[get] ( literal[string] ): identifier[mdf] [ literal[string] ]= identifier[pif] [ literal[string] ] keyword[elif] identifier[pif] . identifier[get] ( literal[string] ): identifier[mdf] [ literal[string] ]= literal[string] . identifier[join] ([ identifier[comp] [ literal[string] ] keyword[for] identifier[comp] keyword[in] identifier[pif] [ literal[string] ] keyword[if] identifier[comp] [ literal[string] ]]) keyword[if] keyword[not] identifier[mdf] [ literal[string] ]: identifier[mdf] . identifier[pop] ( literal[string] ) identifier[mdf] [ literal[string] ]= identifier[mdf_acl] identifier[mdf] [ literal[string] ]= identifier[_construct_new_key] ( identifier[dataset] [ literal[string] ]) keyword[if] identifier[pif] . identifier[get] ( literal[string] ): identifier[mdf] [ literal[string] ]=[] keyword[for] identifier[contact] keyword[in] identifier[pif] [ literal[string] ]: identifier[data_c] ={ literal[string] : identifier[contact] [ literal[string] ][ literal[string] ], literal[string] : identifier[contact] [ literal[string] ][ literal[string] ] } keyword[if] identifier[contact] . identifier[get] ( literal[string] ): identifier[data_c] [ literal[string] ]= identifier[contact] . identifier[get] ( literal[string] , literal[string] ) keyword[if] identifier[contact] . identifier[get] ( literal[string] ): identifier[data_c] [ literal[string] ]= identifier[contact] . identifier[get] ( literal[string] , literal[string] ) identifier[mdf] [ literal[string] ]. identifier[append] ( identifier[data_c] ) keyword[if] keyword[not] identifier[mdf] [ literal[string] ]: identifier[mdf] . identifier[pop] ( literal[string] ) identifier[mdf] [ literal[string] ]=[{}] keyword[if] literal[string] keyword[in] identifier[dataset] : identifier[name] = identifier[dataset] [ literal[string] ]. identifier[split] () identifier[contributor] ={ literal[string] : identifier[name] [ literal[int] ], literal[string] : identifier[name] [ literal[int] ], literal[string] : identifier[dataset] [ literal[string] ] } identifier[mdf] [ literal[string] ]=[ identifier[contributor] ] identifier[mdf] [ literal[string] ]={ literal[string] : literal[string] . identifier[format] ( identifier[dataset] [ literal[string] ]), literal[string] :[] } keyword[if] identifier[pif] . identifier[get] ( literal[string] ): identifier[mdf] [ literal[string] ]=[] identifier[mdf] [ literal[string] ]=[] keyword[for] identifier[ref] keyword[in] identifier[pif] [ literal[string] ]: keyword[if] identifier[ref] . identifier[get] ( literal[string] ): identifier[mdf] [ literal[string] ]. identifier[append] ( identifier[ref] [ literal[string] ]) identifier[mdf] [ literal[string] ][ literal[string] ]. identifier[append] ( identifier[ref] [ literal[string] ]) keyword[if] identifier[ref] . identifier[get] ( literal[string] ): keyword[for] identifier[author] keyword[in] identifier[ref] [ literal[string] ]: keyword[if] identifier[author] . identifier[get] ( literal[string] ) keyword[and] identifier[author] . identifier[get] ( literal[string] ): identifier[mdf] [ literal[string] ]. identifier[append] ({ literal[string] : identifier[author] [ literal[string] ], literal[string] : identifier[author] [ literal[string] ] }) keyword[if] keyword[not] identifier[mdf] [ literal[string] ]: identifier[mdf] . identifier[pop] ( literal[string] ) keyword[if] keyword[not] identifier[mdf] [ literal[string] ]: identifier[mdf] . identifier[pop] ( literal[string] ) keyword[if] keyword[not] identifier[mdf] [ literal[string] ][ literal[string] ]: identifier[mdf] [ literal[string] ]. identifier[pop] ( literal[string] ) keyword[if] identifier[pif] . identifier[get] ( literal[string] ,[{}])[ literal[int] ]. identifier[get] ( literal[string] ): identifier[mdf] [ literal[string] ]= identifier[pif] [ literal[string] ][ literal[int] ][ literal[string] ] keyword[if] identifier[pif] . identifier[get] ( literal[string] ): identifier[mdf] [ literal[string] ]= identifier[pif] [ literal[string] ] keyword[except] identifier[KeyError] keyword[as] identifier[e] : identifier[print] ( literal[string] , identifier[str] ( identifier[e] ), literal[string] , identifier[pif] [ literal[string] ]) keyword[return] keyword[None] keyword[return] identifier[mdf]
def _to_meta_data(pif_obj, dataset_hit, mdf_acl): """Convert the meta-data from the PIF into MDF""" pif = pif_obj.as_dictionary() dataset = dataset_hit.as_dictionary() mdf = {} try: if pif.get('names'): mdf['title'] = pif['names'][0] # depends on [control=['if'], data=[]] else: mdf['title'] = 'Citrine PIF ' + str(pif['uid']) if pif.get('chemicalFormula'): mdf['composition'] = pif['chemicalFormula'] # depends on [control=['if'], data=[]] elif pif.get('composition'): mdf['composition'] = ''.join([comp['element'] for comp in pif['composition'] if comp['element']]) # depends on [control=['if'], data=[]] if not mdf['composition']: mdf.pop('composition') # depends on [control=['if'], data=[]] mdf['acl'] = mdf_acl mdf['source_name'] = _construct_new_key(dataset['name']) if pif.get('contacts'): mdf['data_contact'] = [] for contact in pif['contacts']: #REQ #REQ data_c = {'given_name': contact['name']['given'], 'family_name': contact['name']['family']} if contact.get('email'): data_c['email'] = contact.get('email', '') # depends on [control=['if'], data=[]] if contact.get('orcid'): data_c['orcid'] = contact.get('orcid', '') # depends on [control=['if'], data=[]] mdf['data_contact'].append(data_c) # depends on [control=['for'], data=['contact']] if not mdf['data_contact']: mdf.pop('data_contact') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] mdf['data_contributor'] = [{}] if 'owner' in dataset: name = dataset['owner'].split() contributor = {'given_name': name[0], 'family_name': name[1], 'email': dataset['email']} mdf['data_contributor'] = [contributor] # depends on [control=['if'], data=['dataset']] mdf['links'] = {'landing_page': 'https://citrination.com/datasets/{}'.format(dataset['id']), 'publication': []} if pif.get('references'): mdf['author'] = [] mdf['citation'] = [] for ref in pif['references']: if ref.get('doi'): mdf['citation'].append(ref['doi']) #TODO: Make actual citation mdf['links']['publication'].append(ref['doi']) # depends on [control=['if'], data=[]] if ref.get('authors'): for author in ref['authors']: if author.get('given') and author.get('family'): mdf['author'].append({'given_name': author['given'], 'family_name': author['family']}) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['author']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ref']] # Remove fields if blank if not mdf['author']: mdf.pop('author') # depends on [control=['if'], data=[]] if not mdf['citation']: mdf.pop('citation') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if not mdf['links']['publication']: mdf['links'].pop('publication') # depends on [control=['if'], data=[]] if pif.get('licenses', [{}])[0].get('url'): mdf['license'] = pif['licenses'][0]['url'] # depends on [control=['if'], data=[]] if pif.get('tags'): mdf['tags'] = pif['tags'] # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] # If required MDF metadata is missing from PIF, abort except KeyError as e: print('Error: Required MDF metadata', str(e), 'not found in PIF', pif['uid']) return None # depends on [control=['except'], data=['e']] return mdf
def activate(self, experiment_key, user_id, attributes=None): """ Buckets visitor and sends impression event to Optimizely. Args: experiment_key: Experiment which needs to be activated. user_id: ID for user. attributes: Dict representing user attributes and values which need to be recorded. Returns: Variation key representing the variation the user will be bucketed in. None if user is not in experiment or if experiment is not Running. """ if not self.is_valid: self.logger.error(enums.Errors.INVALID_DATAFILE.format('activate')) return None if not validator.is_non_empty_string(experiment_key): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('experiment_key')) return None if not isinstance(user_id, string_types): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('user_id')) return None variation_key = self.get_variation(experiment_key, user_id, attributes) if not variation_key: self.logger.info('Not activating user "%s".' % user_id) return None experiment = self.config.get_experiment_from_key(experiment_key) variation = self.config.get_variation_from_key(experiment_key, variation_key) # Create and dispatch impression event self.logger.info('Activating user "%s" in experiment "%s".' % (user_id, experiment.key)) self._send_impression_event(experiment, variation, user_id, attributes) return variation.key
def function[activate, parameter[self, experiment_key, user_id, attributes]]: constant[ Buckets visitor and sends impression event to Optimizely. Args: experiment_key: Experiment which needs to be activated. user_id: ID for user. attributes: Dict representing user attributes and values which need to be recorded. Returns: Variation key representing the variation the user will be bucketed in. None if user is not in experiment or if experiment is not Running. ] if <ast.UnaryOp object at 0x7da1b11a9780> begin[:] call[name[self].logger.error, parameter[call[name[enums].Errors.INVALID_DATAFILE.format, parameter[constant[activate]]]]] return[constant[None]] if <ast.UnaryOp object at 0x7da1b11a92d0> begin[:] call[name[self].logger.error, parameter[call[name[enums].Errors.INVALID_INPUT_ERROR.format, parameter[constant[experiment_key]]]]] return[constant[None]] if <ast.UnaryOp object at 0x7da1b11a98a0> begin[:] call[name[self].logger.error, parameter[call[name[enums].Errors.INVALID_INPUT_ERROR.format, parameter[constant[user_id]]]]] return[constant[None]] variable[variation_key] assign[=] call[name[self].get_variation, parameter[name[experiment_key], name[user_id], name[attributes]]] if <ast.UnaryOp object at 0x7da1b11abd30> begin[:] call[name[self].logger.info, parameter[binary_operation[constant[Not activating user "%s".] <ast.Mod object at 0x7da2590d6920> name[user_id]]]] return[constant[None]] variable[experiment] assign[=] call[name[self].config.get_experiment_from_key, parameter[name[experiment_key]]] variable[variation] assign[=] call[name[self].config.get_variation_from_key, parameter[name[experiment_key], name[variation_key]]] call[name[self].logger.info, parameter[binary_operation[constant[Activating user "%s" in experiment "%s".] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c6aa050>, <ast.Attribute object at 0x7da20c6a9540>]]]]] call[name[self]._send_impression_event, parameter[name[experiment], name[variation], name[user_id], name[attributes]]] return[name[variation].key]
keyword[def] identifier[activate] ( identifier[self] , identifier[experiment_key] , identifier[user_id] , identifier[attributes] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[is_valid] : identifier[self] . identifier[logger] . identifier[error] ( identifier[enums] . identifier[Errors] . identifier[INVALID_DATAFILE] . identifier[format] ( literal[string] )) keyword[return] keyword[None] keyword[if] keyword[not] identifier[validator] . identifier[is_non_empty_string] ( identifier[experiment_key] ): identifier[self] . identifier[logger] . identifier[error] ( identifier[enums] . identifier[Errors] . identifier[INVALID_INPUT_ERROR] . identifier[format] ( literal[string] )) keyword[return] keyword[None] keyword[if] keyword[not] identifier[isinstance] ( identifier[user_id] , identifier[string_types] ): identifier[self] . identifier[logger] . identifier[error] ( identifier[enums] . identifier[Errors] . identifier[INVALID_INPUT_ERROR] . identifier[format] ( literal[string] )) keyword[return] keyword[None] identifier[variation_key] = identifier[self] . identifier[get_variation] ( identifier[experiment_key] , identifier[user_id] , identifier[attributes] ) keyword[if] keyword[not] identifier[variation_key] : identifier[self] . identifier[logger] . identifier[info] ( literal[string] % identifier[user_id] ) keyword[return] keyword[None] identifier[experiment] = identifier[self] . identifier[config] . identifier[get_experiment_from_key] ( identifier[experiment_key] ) identifier[variation] = identifier[self] . identifier[config] . identifier[get_variation_from_key] ( identifier[experiment_key] , identifier[variation_key] ) identifier[self] . identifier[logger] . identifier[info] ( literal[string] %( identifier[user_id] , identifier[experiment] . identifier[key] )) identifier[self] . identifier[_send_impression_event] ( identifier[experiment] , identifier[variation] , identifier[user_id] , identifier[attributes] ) keyword[return] identifier[variation] . identifier[key]
def activate(self, experiment_key, user_id, attributes=None): """ Buckets visitor and sends impression event to Optimizely. Args: experiment_key: Experiment which needs to be activated. user_id: ID for user. attributes: Dict representing user attributes and values which need to be recorded. Returns: Variation key representing the variation the user will be bucketed in. None if user is not in experiment or if experiment is not Running. """ if not self.is_valid: self.logger.error(enums.Errors.INVALID_DATAFILE.format('activate')) return None # depends on [control=['if'], data=[]] if not validator.is_non_empty_string(experiment_key): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('experiment_key')) return None # depends on [control=['if'], data=[]] if not isinstance(user_id, string_types): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('user_id')) return None # depends on [control=['if'], data=[]] variation_key = self.get_variation(experiment_key, user_id, attributes) if not variation_key: self.logger.info('Not activating user "%s".' % user_id) return None # depends on [control=['if'], data=[]] experiment = self.config.get_experiment_from_key(experiment_key) variation = self.config.get_variation_from_key(experiment_key, variation_key) # Create and dispatch impression event self.logger.info('Activating user "%s" in experiment "%s".' % (user_id, experiment.key)) self._send_impression_event(experiment, variation, user_id, attributes) return variation.key
def average_returns(ts, **kwargs): ''' Compute geometric average returns from a returns time serie''' average_type = kwargs.get('type', 'net') if average_type == 'net': relative = 0 else: relative = -1 # gross #start = kwargs.get('start', ts.index[0]) #end = kwargs.get('end', ts.index[len(ts.index) - 1]) #delta = kwargs.get('delta', ts.index[1] - ts.index[0]) period = kwargs.get('period', None) if isinstance(period, int): pass #else: #ts = reIndexDF(ts, start=start, end=end, delta=delta) #period = 1 avg_ret = 1 for idx in range(len(ts.index)): if idx % period == 0: avg_ret *= (1 + ts[idx] + relative) return avg_ret - 1
def function[average_returns, parameter[ts]]: constant[ Compute geometric average returns from a returns time serie] variable[average_type] assign[=] call[name[kwargs].get, parameter[constant[type], constant[net]]] if compare[name[average_type] equal[==] constant[net]] begin[:] variable[relative] assign[=] constant[0] variable[period] assign[=] call[name[kwargs].get, parameter[constant[period], constant[None]]] if call[name[isinstance], parameter[name[period], name[int]]] begin[:] pass variable[avg_ret] assign[=] constant[1] for taget[name[idx]] in starred[call[name[range], parameter[call[name[len], parameter[name[ts].index]]]]] begin[:] if compare[binary_operation[name[idx] <ast.Mod object at 0x7da2590d6920> name[period]] equal[==] constant[0]] begin[:] <ast.AugAssign object at 0x7da204622710> return[binary_operation[name[avg_ret] - constant[1]]]
keyword[def] identifier[average_returns] ( identifier[ts] ,** identifier[kwargs] ): literal[string] identifier[average_type] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] ) keyword[if] identifier[average_type] == literal[string] : identifier[relative] = literal[int] keyword[else] : identifier[relative] =- literal[int] identifier[period] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] ) keyword[if] identifier[isinstance] ( identifier[period] , identifier[int] ): keyword[pass] identifier[avg_ret] = literal[int] keyword[for] identifier[idx] keyword[in] identifier[range] ( identifier[len] ( identifier[ts] . identifier[index] )): keyword[if] identifier[idx] % identifier[period] == literal[int] : identifier[avg_ret] *=( literal[int] + identifier[ts] [ identifier[idx] ]+ identifier[relative] ) keyword[return] identifier[avg_ret] - literal[int]
def average_returns(ts, **kwargs): """ Compute geometric average returns from a returns time serie""" average_type = kwargs.get('type', 'net') if average_type == 'net': relative = 0 # depends on [control=['if'], data=[]] else: relative = -1 # gross #start = kwargs.get('start', ts.index[0]) #end = kwargs.get('end', ts.index[len(ts.index) - 1]) #delta = kwargs.get('delta', ts.index[1] - ts.index[0]) period = kwargs.get('period', None) if isinstance(period, int): pass # depends on [control=['if'], data=[]] #else: #ts = reIndexDF(ts, start=start, end=end, delta=delta) #period = 1 avg_ret = 1 for idx in range(len(ts.index)): if idx % period == 0: avg_ret *= 1 + ts[idx] + relative # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['idx']] return avg_ret - 1
def meta_changed(self, model, prop_name, info): """This method notifies the parent state about changes made to the meta data """ if self.parent is not None: msg = info.arg # Add information about notification to the signal message notification = Notification(model, prop_name, info) msg = msg._replace(notification=notification) info.arg = msg self.parent.meta_changed(model, prop_name, info)
def function[meta_changed, parameter[self, model, prop_name, info]]: constant[This method notifies the parent state about changes made to the meta data ] if compare[name[self].parent is_not constant[None]] begin[:] variable[msg] assign[=] name[info].arg variable[notification] assign[=] call[name[Notification], parameter[name[model], name[prop_name], name[info]]] variable[msg] assign[=] call[name[msg]._replace, parameter[]] name[info].arg assign[=] name[msg] call[name[self].parent.meta_changed, parameter[name[model], name[prop_name], name[info]]]
keyword[def] identifier[meta_changed] ( identifier[self] , identifier[model] , identifier[prop_name] , identifier[info] ): literal[string] keyword[if] identifier[self] . identifier[parent] keyword[is] keyword[not] keyword[None] : identifier[msg] = identifier[info] . identifier[arg] identifier[notification] = identifier[Notification] ( identifier[model] , identifier[prop_name] , identifier[info] ) identifier[msg] = identifier[msg] . identifier[_replace] ( identifier[notification] = identifier[notification] ) identifier[info] . identifier[arg] = identifier[msg] identifier[self] . identifier[parent] . identifier[meta_changed] ( identifier[model] , identifier[prop_name] , identifier[info] )
def meta_changed(self, model, prop_name, info): """This method notifies the parent state about changes made to the meta data """ if self.parent is not None: msg = info.arg # Add information about notification to the signal message notification = Notification(model, prop_name, info) msg = msg._replace(notification=notification) info.arg = msg self.parent.meta_changed(model, prop_name, info) # depends on [control=['if'], data=[]]
def _get_session(self, session): """Creates a new session with basic auth, unless one was provided, and sets headers. :param session: (optional) Session to re-use :return: - :class:`requests.Session` object """ if not session: logger.debug('(SESSION_CREATE) User: %s' % self._user) s = requests.Session() s.auth = HTTPBasicAuth(self._user, self._password) else: logger.debug('(SESSION_CREATE) Object: %s' % session) s = session s.headers.update( { 'content-type': 'application/json', 'accept': 'application/json', 'User-Agent': 'pysnow/%s' % pysnow.__version__ } ) return s
def function[_get_session, parameter[self, session]]: constant[Creates a new session with basic auth, unless one was provided, and sets headers. :param session: (optional) Session to re-use :return: - :class:`requests.Session` object ] if <ast.UnaryOp object at 0x7da1b0655390> begin[:] call[name[logger].debug, parameter[binary_operation[constant[(SESSION_CREATE) User: %s] <ast.Mod object at 0x7da2590d6920> name[self]._user]]] variable[s] assign[=] call[name[requests].Session, parameter[]] name[s].auth assign[=] call[name[HTTPBasicAuth], parameter[name[self]._user, name[self]._password]] call[name[s].headers.update, parameter[dictionary[[<ast.Constant object at 0x7da1b0655570>, <ast.Constant object at 0x7da1b0655c00>, <ast.Constant object at 0x7da1b0655c60>], [<ast.Constant object at 0x7da1b06558a0>, <ast.Constant object at 0x7da1b0656aa0>, <ast.BinOp object at 0x7da1b06544f0>]]]] return[name[s]]
keyword[def] identifier[_get_session] ( identifier[self] , identifier[session] ): literal[string] keyword[if] keyword[not] identifier[session] : identifier[logger] . identifier[debug] ( literal[string] % identifier[self] . identifier[_user] ) identifier[s] = identifier[requests] . identifier[Session] () identifier[s] . identifier[auth] = identifier[HTTPBasicAuth] ( identifier[self] . identifier[_user] , identifier[self] . identifier[_password] ) keyword[else] : identifier[logger] . identifier[debug] ( literal[string] % identifier[session] ) identifier[s] = identifier[session] identifier[s] . identifier[headers] . identifier[update] ( { literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] % identifier[pysnow] . identifier[__version__] } ) keyword[return] identifier[s]
def _get_session(self, session): """Creates a new session with basic auth, unless one was provided, and sets headers. :param session: (optional) Session to re-use :return: - :class:`requests.Session` object """ if not session: logger.debug('(SESSION_CREATE) User: %s' % self._user) s = requests.Session() s.auth = HTTPBasicAuth(self._user, self._password) # depends on [control=['if'], data=[]] else: logger.debug('(SESSION_CREATE) Object: %s' % session) s = session s.headers.update({'content-type': 'application/json', 'accept': 'application/json', 'User-Agent': 'pysnow/%s' % pysnow.__version__}) return s
def GetNeighbors(ID, model = None, neighbors = None, mag_range = None, cdpp_range = None, aperture_name = None, cadence = 'lc', **kwargs): ''' Return `neighbors` random bright stars on the same module as `EPIC`. :param int ID: The target ID number :param str model: The :py:obj:`everest` model name. Only used when imposing CDPP bounds. Default :py:obj:`None` :param int neighbors: Number of neighbors to return. Default None :param str aperture_name: The name of the aperture to use. Select `custom` to call \ :py:func:`GetCustomAperture`. Default :py:obj:`None` :param str cadence: The light curve cadence. Default `lc` :param tuple mag_range: (`low`, `high`) values for the Kepler magnitude. Default :py:obj:`None` :param tuple cdpp_range: (`low`, `high`) values for the de-trended CDPP. Default :py:obj:`None` ''' raise NotImplementedError('This mission is not yet supported.')
def function[GetNeighbors, parameter[ID, model, neighbors, mag_range, cdpp_range, aperture_name, cadence]]: constant[ Return `neighbors` random bright stars on the same module as `EPIC`. :param int ID: The target ID number :param str model: The :py:obj:`everest` model name. Only used when imposing CDPP bounds. Default :py:obj:`None` :param int neighbors: Number of neighbors to return. Default None :param str aperture_name: The name of the aperture to use. Select `custom` to call :py:func:`GetCustomAperture`. Default :py:obj:`None` :param str cadence: The light curve cadence. Default `lc` :param tuple mag_range: (`low`, `high`) values for the Kepler magnitude. Default :py:obj:`None` :param tuple cdpp_range: (`low`, `high`) values for the de-trended CDPP. Default :py:obj:`None` ] <ast.Raise object at 0x7da1b0fe7df0>
keyword[def] identifier[GetNeighbors] ( identifier[ID] , identifier[model] = keyword[None] , identifier[neighbors] = keyword[None] , identifier[mag_range] = keyword[None] , identifier[cdpp_range] = keyword[None] , identifier[aperture_name] = keyword[None] , identifier[cadence] = literal[string] ,** identifier[kwargs] ): literal[string] keyword[raise] identifier[NotImplementedError] ( literal[string] )
def GetNeighbors(ID, model=None, neighbors=None, mag_range=None, cdpp_range=None, aperture_name=None, cadence='lc', **kwargs): """ Return `neighbors` random bright stars on the same module as `EPIC`. :param int ID: The target ID number :param str model: The :py:obj:`everest` model name. Only used when imposing CDPP bounds. Default :py:obj:`None` :param int neighbors: Number of neighbors to return. Default None :param str aperture_name: The name of the aperture to use. Select `custom` to call :py:func:`GetCustomAperture`. Default :py:obj:`None` :param str cadence: The light curve cadence. Default `lc` :param tuple mag_range: (`low`, `high`) values for the Kepler magnitude. Default :py:obj:`None` :param tuple cdpp_range: (`low`, `high`) values for the de-trended CDPP. Default :py:obj:`None` """ raise NotImplementedError('This mission is not yet supported.')
def load(self, filename, offset): """Will eventually load information for Apple_Boot volume. \ Not yet implemented""" try: self.offset = offset # self.fd = open(filename, 'rb') # self.fd.close() except IOError: self.logger.error('Unable to load EfiSystem volume')
def function[load, parameter[self, filename, offset]]: constant[Will eventually load information for Apple_Boot volume. Not yet implemented] <ast.Try object at 0x7da1b27eb730>
keyword[def] identifier[load] ( identifier[self] , identifier[filename] , identifier[offset] ): literal[string] keyword[try] : identifier[self] . identifier[offset] = identifier[offset] keyword[except] identifier[IOError] : identifier[self] . identifier[logger] . identifier[error] ( literal[string] )
def load(self, filename, offset): """Will eventually load information for Apple_Boot volume. Not yet implemented""" try: self.offset = offset # depends on [control=['try'], data=[]] # self.fd = open(filename, 'rb') # self.fd.close() except IOError: self.logger.error('Unable to load EfiSystem volume') # depends on [control=['except'], data=[]]
def max_delta_volume(self): """ Maximum volume change along insertion """ vols = [v.vol_charge for v in self.voltage_pairs] vols.extend([v.vol_discharge for v in self.voltage_pairs]) return max(vols) / min(vols) - 1
def function[max_delta_volume, parameter[self]]: constant[ Maximum volume change along insertion ] variable[vols] assign[=] <ast.ListComp object at 0x7da1b1c35b40> call[name[vols].extend, parameter[<ast.ListComp object at 0x7da1b1c36d40>]] return[binary_operation[binary_operation[call[name[max], parameter[name[vols]]] / call[name[min], parameter[name[vols]]]] - constant[1]]]
keyword[def] identifier[max_delta_volume] ( identifier[self] ): literal[string] identifier[vols] =[ identifier[v] . identifier[vol_charge] keyword[for] identifier[v] keyword[in] identifier[self] . identifier[voltage_pairs] ] identifier[vols] . identifier[extend] ([ identifier[v] . identifier[vol_discharge] keyword[for] identifier[v] keyword[in] identifier[self] . identifier[voltage_pairs] ]) keyword[return] identifier[max] ( identifier[vols] )/ identifier[min] ( identifier[vols] )- literal[int]
def max_delta_volume(self): """ Maximum volume change along insertion """ vols = [v.vol_charge for v in self.voltage_pairs] vols.extend([v.vol_discharge for v in self.voltage_pairs]) return max(vols) / min(vols) - 1
def _remove_by_number(self, number: int): """ Removes the data object from this collection with the given number. A `ValueError` will be raised if a data object with the given number does not exist. :param number: the number of the data object to remove """ if number not in self._data: raise ValueError("Data object replica number %d is not in this collection" % number) del self._data[number] assert number not in self._data
def function[_remove_by_number, parameter[self, number]]: constant[ Removes the data object from this collection with the given number. A `ValueError` will be raised if a data object with the given number does not exist. :param number: the number of the data object to remove ] if compare[name[number] <ast.NotIn object at 0x7da2590d7190> name[self]._data] begin[:] <ast.Raise object at 0x7da1b1470ca0> <ast.Delete object at 0x7da1b1473940> assert[compare[name[number] <ast.NotIn object at 0x7da2590d7190> name[self]._data]]
keyword[def] identifier[_remove_by_number] ( identifier[self] , identifier[number] : identifier[int] ): literal[string] keyword[if] identifier[number] keyword[not] keyword[in] identifier[self] . identifier[_data] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[number] ) keyword[del] identifier[self] . identifier[_data] [ identifier[number] ] keyword[assert] identifier[number] keyword[not] keyword[in] identifier[self] . identifier[_data]
def _remove_by_number(self, number: int): """ Removes the data object from this collection with the given number. A `ValueError` will be raised if a data object with the given number does not exist. :param number: the number of the data object to remove """ if number not in self._data: raise ValueError('Data object replica number %d is not in this collection' % number) # depends on [control=['if'], data=['number']] del self._data[number] assert number not in self._data
def remove_option(self, section, option, remove_default=True): """ Remove an option if it exists in config from a file or default config. If both of config have the same option, this removes the option in both configs unless remove_default=False. """ if super().has_option(section, option): super().remove_option(section, option) if self.airflow_defaults.has_option(section, option) and remove_default: self.airflow_defaults.remove_option(section, option)
def function[remove_option, parameter[self, section, option, remove_default]]: constant[ Remove an option if it exists in config from a file or default config. If both of config have the same option, this removes the option in both configs unless remove_default=False. ] if call[call[name[super], parameter[]].has_option, parameter[name[section], name[option]]] begin[:] call[call[name[super], parameter[]].remove_option, parameter[name[section], name[option]]] if <ast.BoolOp object at 0x7da20e9631f0> begin[:] call[name[self].airflow_defaults.remove_option, parameter[name[section], name[option]]]
keyword[def] identifier[remove_option] ( identifier[self] , identifier[section] , identifier[option] , identifier[remove_default] = keyword[True] ): literal[string] keyword[if] identifier[super] (). identifier[has_option] ( identifier[section] , identifier[option] ): identifier[super] (). identifier[remove_option] ( identifier[section] , identifier[option] ) keyword[if] identifier[self] . identifier[airflow_defaults] . identifier[has_option] ( identifier[section] , identifier[option] ) keyword[and] identifier[remove_default] : identifier[self] . identifier[airflow_defaults] . identifier[remove_option] ( identifier[section] , identifier[option] )
def remove_option(self, section, option, remove_default=True): """ Remove an option if it exists in config from a file or default config. If both of config have the same option, this removes the option in both configs unless remove_default=False. """ if super().has_option(section, option): super().remove_option(section, option) # depends on [control=['if'], data=[]] if self.airflow_defaults.has_option(section, option) and remove_default: self.airflow_defaults.remove_option(section, option) # depends on [control=['if'], data=[]]
def draw_line(self, ax, line, force_trans=None): """Process a matplotlib line and call renderer.draw_line""" coordinates, data = self.process_transform(line.get_transform(), ax, line.get_xydata(), force_trans=force_trans) linestyle = utils.get_line_style(line) if linestyle['dasharray'] is None: linestyle = None markerstyle = utils.get_marker_style(line) if (markerstyle['marker'] in ['None', 'none', None] or markerstyle['markerpath'][0].size == 0): markerstyle = None label = line.get_label() if markerstyle or linestyle: self.renderer.draw_marked_line(data=data, coordinates=coordinates, linestyle=linestyle, markerstyle=markerstyle, label=label, mplobj=line)
def function[draw_line, parameter[self, ax, line, force_trans]]: constant[Process a matplotlib line and call renderer.draw_line] <ast.Tuple object at 0x7da1b0e7a200> assign[=] call[name[self].process_transform, parameter[call[name[line].get_transform, parameter[]], name[ax], call[name[line].get_xydata, parameter[]]]] variable[linestyle] assign[=] call[name[utils].get_line_style, parameter[name[line]]] if compare[call[name[linestyle]][constant[dasharray]] is constant[None]] begin[:] variable[linestyle] assign[=] constant[None] variable[markerstyle] assign[=] call[name[utils].get_marker_style, parameter[name[line]]] if <ast.BoolOp object at 0x7da1b0e7afb0> begin[:] variable[markerstyle] assign[=] constant[None] variable[label] assign[=] call[name[line].get_label, parameter[]] if <ast.BoolOp object at 0x7da1b0e7beb0> begin[:] call[name[self].renderer.draw_marked_line, parameter[]]
keyword[def] identifier[draw_line] ( identifier[self] , identifier[ax] , identifier[line] , identifier[force_trans] = keyword[None] ): literal[string] identifier[coordinates] , identifier[data] = identifier[self] . identifier[process_transform] ( identifier[line] . identifier[get_transform] (), identifier[ax] , identifier[line] . identifier[get_xydata] (), identifier[force_trans] = identifier[force_trans] ) identifier[linestyle] = identifier[utils] . identifier[get_line_style] ( identifier[line] ) keyword[if] identifier[linestyle] [ literal[string] ] keyword[is] keyword[None] : identifier[linestyle] = keyword[None] identifier[markerstyle] = identifier[utils] . identifier[get_marker_style] ( identifier[line] ) keyword[if] ( identifier[markerstyle] [ literal[string] ] keyword[in] [ literal[string] , literal[string] , keyword[None] ] keyword[or] identifier[markerstyle] [ literal[string] ][ literal[int] ]. identifier[size] == literal[int] ): identifier[markerstyle] = keyword[None] identifier[label] = identifier[line] . identifier[get_label] () keyword[if] identifier[markerstyle] keyword[or] identifier[linestyle] : identifier[self] . identifier[renderer] . identifier[draw_marked_line] ( identifier[data] = identifier[data] , identifier[coordinates] = identifier[coordinates] , identifier[linestyle] = identifier[linestyle] , identifier[markerstyle] = identifier[markerstyle] , identifier[label] = identifier[label] , identifier[mplobj] = identifier[line] )
def draw_line(self, ax, line, force_trans=None): """Process a matplotlib line and call renderer.draw_line""" (coordinates, data) = self.process_transform(line.get_transform(), ax, line.get_xydata(), force_trans=force_trans) linestyle = utils.get_line_style(line) if linestyle['dasharray'] is None: linestyle = None # depends on [control=['if'], data=[]] markerstyle = utils.get_marker_style(line) if markerstyle['marker'] in ['None', 'none', None] or markerstyle['markerpath'][0].size == 0: markerstyle = None # depends on [control=['if'], data=[]] label = line.get_label() if markerstyle or linestyle: self.renderer.draw_marked_line(data=data, coordinates=coordinates, linestyle=linestyle, markerstyle=markerstyle, label=label, mplobj=line) # depends on [control=['if'], data=[]]
def validate_password(entry, username, check_function, password=None, retries=1, save_on_success=True, prompt=None, **check_args): """ Validate a password with a check function & retry if the password is incorrect. Useful for after a user has changed their password in LDAP, but their local keychain entry is then out of sync. :param str entry: The keychain entry to fetch a password from. :param str username: The username to authenticate :param func check_function: Check function to use. Should take (username, password, **check_args) :param str password: The password to validate. If `None`, the user will be prompted. :param int retries: Number of retries to prompt the user for. :param bool save_on_success: Save the password if the validation was successful. :param str prompt: Alternate prompt to use when asking for the user's password. :returns: `True` on successful authentication. `False` otherwise. :rtype: bool """ if password is None: password = get_password(entry, username, prompt) for _ in xrange(retries + 1): if check_function(username, password, **check_args): if save_on_success: save_password(entry, password, username) return True log.error("Couldn't successfully authenticate your username & password..") password = get_password(entry, username, prompt, always_ask=True) return False
def function[validate_password, parameter[entry, username, check_function, password, retries, save_on_success, prompt]]: constant[ Validate a password with a check function & retry if the password is incorrect. Useful for after a user has changed their password in LDAP, but their local keychain entry is then out of sync. :param str entry: The keychain entry to fetch a password from. :param str username: The username to authenticate :param func check_function: Check function to use. Should take (username, password, **check_args) :param str password: The password to validate. If `None`, the user will be prompted. :param int retries: Number of retries to prompt the user for. :param bool save_on_success: Save the password if the validation was successful. :param str prompt: Alternate prompt to use when asking for the user's password. :returns: `True` on successful authentication. `False` otherwise. :rtype: bool ] if compare[name[password] is constant[None]] begin[:] variable[password] assign[=] call[name[get_password], parameter[name[entry], name[username], name[prompt]]] for taget[name[_]] in starred[call[name[xrange], parameter[binary_operation[name[retries] + constant[1]]]]] begin[:] if call[name[check_function], parameter[name[username], name[password]]] begin[:] if name[save_on_success] begin[:] call[name[save_password], parameter[name[entry], name[password], name[username]]] return[constant[True]] call[name[log].error, parameter[constant[Couldn't successfully authenticate your username & password..]]] variable[password] assign[=] call[name[get_password], parameter[name[entry], name[username], name[prompt]]] return[constant[False]]
keyword[def] identifier[validate_password] ( identifier[entry] , identifier[username] , identifier[check_function] , identifier[password] = keyword[None] , identifier[retries] = literal[int] , identifier[save_on_success] = keyword[True] , identifier[prompt] = keyword[None] ,** identifier[check_args] ): literal[string] keyword[if] identifier[password] keyword[is] keyword[None] : identifier[password] = identifier[get_password] ( identifier[entry] , identifier[username] , identifier[prompt] ) keyword[for] identifier[_] keyword[in] identifier[xrange] ( identifier[retries] + literal[int] ): keyword[if] identifier[check_function] ( identifier[username] , identifier[password] ,** identifier[check_args] ): keyword[if] identifier[save_on_success] : identifier[save_password] ( identifier[entry] , identifier[password] , identifier[username] ) keyword[return] keyword[True] identifier[log] . identifier[error] ( literal[string] ) identifier[password] = identifier[get_password] ( identifier[entry] , identifier[username] , identifier[prompt] , identifier[always_ask] = keyword[True] ) keyword[return] keyword[False]
def validate_password(entry, username, check_function, password=None, retries=1, save_on_success=True, prompt=None, **check_args): """ Validate a password with a check function & retry if the password is incorrect. Useful for after a user has changed their password in LDAP, but their local keychain entry is then out of sync. :param str entry: The keychain entry to fetch a password from. :param str username: The username to authenticate :param func check_function: Check function to use. Should take (username, password, **check_args) :param str password: The password to validate. If `None`, the user will be prompted. :param int retries: Number of retries to prompt the user for. :param bool save_on_success: Save the password if the validation was successful. :param str prompt: Alternate prompt to use when asking for the user's password. :returns: `True` on successful authentication. `False` otherwise. :rtype: bool """ if password is None: password = get_password(entry, username, prompt) # depends on [control=['if'], data=['password']] for _ in xrange(retries + 1): if check_function(username, password, **check_args): if save_on_success: save_password(entry, password, username) # depends on [control=['if'], data=[]] return True # depends on [control=['if'], data=[]] log.error("Couldn't successfully authenticate your username & password..") password = get_password(entry, username, prompt, always_ask=True) # depends on [control=['for'], data=[]] return False
def fit_tranform(self, raw_documents): """ Transform given list of raw_documents to document-term matrix in sparse CSR format (see scipy) """ X = self.transform(raw_documents, new_document=True) return X
def function[fit_tranform, parameter[self, raw_documents]]: constant[ Transform given list of raw_documents to document-term matrix in sparse CSR format (see scipy) ] variable[X] assign[=] call[name[self].transform, parameter[name[raw_documents]]] return[name[X]]
keyword[def] identifier[fit_tranform] ( identifier[self] , identifier[raw_documents] ): literal[string] identifier[X] = identifier[self] . identifier[transform] ( identifier[raw_documents] , identifier[new_document] = keyword[True] ) keyword[return] identifier[X]
def fit_tranform(self, raw_documents): """ Transform given list of raw_documents to document-term matrix in sparse CSR format (see scipy) """ X = self.transform(raw_documents, new_document=True) return X
def _report_error(self, legacy_message, new_message=None, schema_suffix=None): """ Report an error during validation. There are two error messages. The legacy message is used for backwards compatibility and usually contains the object (possibly very large) that failed to validate. The new message is much better as it contains just a short message on what went wrong. User code can inspect object_expr and schema_expr to see which part of the object failed to validate against which part of the schema. The schema_suffix, if provided, is appended to the schema_expr. This is quite handy to specify the bit that the validator looked at (such as the type or optional flag, etc). object_suffix serves the same purpose but is used for object expressions instead. """ object_expr = self._get_object_expression() schema_expr = self._get_schema_expression() if schema_suffix: schema_expr += schema_suffix raise ValidationError(legacy_message, new_message, object_expr, schema_expr)
def function[_report_error, parameter[self, legacy_message, new_message, schema_suffix]]: constant[ Report an error during validation. There are two error messages. The legacy message is used for backwards compatibility and usually contains the object (possibly very large) that failed to validate. The new message is much better as it contains just a short message on what went wrong. User code can inspect object_expr and schema_expr to see which part of the object failed to validate against which part of the schema. The schema_suffix, if provided, is appended to the schema_expr. This is quite handy to specify the bit that the validator looked at (such as the type or optional flag, etc). object_suffix serves the same purpose but is used for object expressions instead. ] variable[object_expr] assign[=] call[name[self]._get_object_expression, parameter[]] variable[schema_expr] assign[=] call[name[self]._get_schema_expression, parameter[]] if name[schema_suffix] begin[:] <ast.AugAssign object at 0x7da1b26ae1a0> <ast.Raise object at 0x7da1b26afa00>
keyword[def] identifier[_report_error] ( identifier[self] , identifier[legacy_message] , identifier[new_message] = keyword[None] , identifier[schema_suffix] = keyword[None] ): literal[string] identifier[object_expr] = identifier[self] . identifier[_get_object_expression] () identifier[schema_expr] = identifier[self] . identifier[_get_schema_expression] () keyword[if] identifier[schema_suffix] : identifier[schema_expr] += identifier[schema_suffix] keyword[raise] identifier[ValidationError] ( identifier[legacy_message] , identifier[new_message] , identifier[object_expr] , identifier[schema_expr] )
def _report_error(self, legacy_message, new_message=None, schema_suffix=None): """ Report an error during validation. There are two error messages. The legacy message is used for backwards compatibility and usually contains the object (possibly very large) that failed to validate. The new message is much better as it contains just a short message on what went wrong. User code can inspect object_expr and schema_expr to see which part of the object failed to validate against which part of the schema. The schema_suffix, if provided, is appended to the schema_expr. This is quite handy to specify the bit that the validator looked at (such as the type or optional flag, etc). object_suffix serves the same purpose but is used for object expressions instead. """ object_expr = self._get_object_expression() schema_expr = self._get_schema_expression() if schema_suffix: schema_expr += schema_suffix # depends on [control=['if'], data=[]] raise ValidationError(legacy_message, new_message, object_expr, schema_expr)
def GetValues(self): """Retrieves all values within the key. Returns: generator[WinRegistryValue]: Windows Registry value generator. """ if not self._registry_key and self._registry: self._GetKeyFromRegistry() if self._registry_key: return self._registry_key.GetValues() return iter([])
def function[GetValues, parameter[self]]: constant[Retrieves all values within the key. Returns: generator[WinRegistryValue]: Windows Registry value generator. ] if <ast.BoolOp object at 0x7da18dc05ba0> begin[:] call[name[self]._GetKeyFromRegistry, parameter[]] if name[self]._registry_key begin[:] return[call[name[self]._registry_key.GetValues, parameter[]]] return[call[name[iter], parameter[list[[]]]]]
keyword[def] identifier[GetValues] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[_registry_key] keyword[and] identifier[self] . identifier[_registry] : identifier[self] . identifier[_GetKeyFromRegistry] () keyword[if] identifier[self] . identifier[_registry_key] : keyword[return] identifier[self] . identifier[_registry_key] . identifier[GetValues] () keyword[return] identifier[iter] ([])
def GetValues(self): """Retrieves all values within the key. Returns: generator[WinRegistryValue]: Windows Registry value generator. """ if not self._registry_key and self._registry: self._GetKeyFromRegistry() # depends on [control=['if'], data=[]] if self._registry_key: return self._registry_key.GetValues() # depends on [control=['if'], data=[]] return iter([])
def get_current_bios_settings(self, only_allowed_settings=True): """Get current BIOS settings. :param: only_allowed_settings: True when only allowed BIOS settings are to be returned. If False, All the BIOS settings supported by iLO are returned. :return: a dictionary of current BIOS settings is returned. Depending on the 'only_allowed_settings', either only the allowed settings are returned or all the supported settings are returned. :raises: IloError, on an error from iLO """ sushy_system = self._get_sushy_system(PROLIANT_SYSTEM_ID) try: current_settings = sushy_system.bios_settings.json except sushy.exceptions.SushyError as e: msg = (self._('The current BIOS Settings were not found. Error ' '%(error)s') % {'error': str(e)}) LOG.debug(msg) raise exception.IloError(msg) attributes = current_settings.get("Attributes") if only_allowed_settings and attributes: return common_utils.apply_bios_properties_filter( attributes, ilo_cons.SUPPORTED_REDFISH_BIOS_PROPERTIES) return attributes
def function[get_current_bios_settings, parameter[self, only_allowed_settings]]: constant[Get current BIOS settings. :param: only_allowed_settings: True when only allowed BIOS settings are to be returned. If False, All the BIOS settings supported by iLO are returned. :return: a dictionary of current BIOS settings is returned. Depending on the 'only_allowed_settings', either only the allowed settings are returned or all the supported settings are returned. :raises: IloError, on an error from iLO ] variable[sushy_system] assign[=] call[name[self]._get_sushy_system, parameter[name[PROLIANT_SYSTEM_ID]]] <ast.Try object at 0x7da1b197f9d0> variable[attributes] assign[=] call[name[current_settings].get, parameter[constant[Attributes]]] if <ast.BoolOp object at 0x7da1b197eb00> begin[:] return[call[name[common_utils].apply_bios_properties_filter, parameter[name[attributes], name[ilo_cons].SUPPORTED_REDFISH_BIOS_PROPERTIES]]] return[name[attributes]]
keyword[def] identifier[get_current_bios_settings] ( identifier[self] , identifier[only_allowed_settings] = keyword[True] ): literal[string] identifier[sushy_system] = identifier[self] . identifier[_get_sushy_system] ( identifier[PROLIANT_SYSTEM_ID] ) keyword[try] : identifier[current_settings] = identifier[sushy_system] . identifier[bios_settings] . identifier[json] keyword[except] identifier[sushy] . identifier[exceptions] . identifier[SushyError] keyword[as] identifier[e] : identifier[msg] =( identifier[self] . identifier[_] ( literal[string] literal[string] )% { literal[string] : identifier[str] ( identifier[e] )}) identifier[LOG] . identifier[debug] ( identifier[msg] ) keyword[raise] identifier[exception] . identifier[IloError] ( identifier[msg] ) identifier[attributes] = identifier[current_settings] . identifier[get] ( literal[string] ) keyword[if] identifier[only_allowed_settings] keyword[and] identifier[attributes] : keyword[return] identifier[common_utils] . identifier[apply_bios_properties_filter] ( identifier[attributes] , identifier[ilo_cons] . identifier[SUPPORTED_REDFISH_BIOS_PROPERTIES] ) keyword[return] identifier[attributes]
def get_current_bios_settings(self, only_allowed_settings=True): """Get current BIOS settings. :param: only_allowed_settings: True when only allowed BIOS settings are to be returned. If False, All the BIOS settings supported by iLO are returned. :return: a dictionary of current BIOS settings is returned. Depending on the 'only_allowed_settings', either only the allowed settings are returned or all the supported settings are returned. :raises: IloError, on an error from iLO """ sushy_system = self._get_sushy_system(PROLIANT_SYSTEM_ID) try: current_settings = sushy_system.bios_settings.json # depends on [control=['try'], data=[]] except sushy.exceptions.SushyError as e: msg = self._('The current BIOS Settings were not found. Error %(error)s') % {'error': str(e)} LOG.debug(msg) raise exception.IloError(msg) # depends on [control=['except'], data=['e']] attributes = current_settings.get('Attributes') if only_allowed_settings and attributes: return common_utils.apply_bios_properties_filter(attributes, ilo_cons.SUPPORTED_REDFISH_BIOS_PROPERTIES) # depends on [control=['if'], data=[]] return attributes
def store(self, obj, distinct=False): ''' Store an object in the table. :param obj: An object to store :param distinct: Store object only if there is none identical of such. If at least one field is different, store it. :return: ''' if distinct: fields = dict(zip(self._tables[obj._TABLE].keys(), obj._serialize(self._tables[obj._TABLE]))) db_obj = self.get(obj.__class__, eq=fields) if db_obj and distinct: raise Exception("Object already in the database.") with gzip.open(os.path.join(self.db_path, obj._TABLE), 'a') as table: csv.writer(table).writerow(self._validate_object(obj))
def function[store, parameter[self, obj, distinct]]: constant[ Store an object in the table. :param obj: An object to store :param distinct: Store object only if there is none identical of such. If at least one field is different, store it. :return: ] if name[distinct] begin[:] variable[fields] assign[=] call[name[dict], parameter[call[name[zip], parameter[call[call[name[self]._tables][name[obj]._TABLE].keys, parameter[]], call[name[obj]._serialize, parameter[call[name[self]._tables][name[obj]._TABLE]]]]]]] variable[db_obj] assign[=] call[name[self].get, parameter[name[obj].__class__]] if <ast.BoolOp object at 0x7da18f811e40> begin[:] <ast.Raise object at 0x7da18f810400> with call[name[gzip].open, parameter[call[name[os].path.join, parameter[name[self].db_path, name[obj]._TABLE]], constant[a]]] begin[:] call[call[name[csv].writer, parameter[name[table]]].writerow, parameter[call[name[self]._validate_object, parameter[name[obj]]]]]
keyword[def] identifier[store] ( identifier[self] , identifier[obj] , identifier[distinct] = keyword[False] ): literal[string] keyword[if] identifier[distinct] : identifier[fields] = identifier[dict] ( identifier[zip] ( identifier[self] . identifier[_tables] [ identifier[obj] . identifier[_TABLE] ]. identifier[keys] (), identifier[obj] . identifier[_serialize] ( identifier[self] . identifier[_tables] [ identifier[obj] . identifier[_TABLE] ]))) identifier[db_obj] = identifier[self] . identifier[get] ( identifier[obj] . identifier[__class__] , identifier[eq] = identifier[fields] ) keyword[if] identifier[db_obj] keyword[and] identifier[distinct] : keyword[raise] identifier[Exception] ( literal[string] ) keyword[with] identifier[gzip] . identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[db_path] , identifier[obj] . identifier[_TABLE] ), literal[string] ) keyword[as] identifier[table] : identifier[csv] . identifier[writer] ( identifier[table] ). identifier[writerow] ( identifier[self] . identifier[_validate_object] ( identifier[obj] ))
def store(self, obj, distinct=False): """ Store an object in the table. :param obj: An object to store :param distinct: Store object only if there is none identical of such. If at least one field is different, store it. :return: """ if distinct: fields = dict(zip(self._tables[obj._TABLE].keys(), obj._serialize(self._tables[obj._TABLE]))) db_obj = self.get(obj.__class__, eq=fields) if db_obj and distinct: raise Exception('Object already in the database.') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] with gzip.open(os.path.join(self.db_path, obj._TABLE), 'a') as table: csv.writer(table).writerow(self._validate_object(obj)) # depends on [control=['with'], data=['table']]
def find_element_by_jquery(step, browser, selector): """Find a single HTML element using jQuery-style selectors.""" elements = find_elements_by_jquery(browser, selector) assert_true(step, len(elements) > 0) return elements[0]
def function[find_element_by_jquery, parameter[step, browser, selector]]: constant[Find a single HTML element using jQuery-style selectors.] variable[elements] assign[=] call[name[find_elements_by_jquery], parameter[name[browser], name[selector]]] call[name[assert_true], parameter[name[step], compare[call[name[len], parameter[name[elements]]] greater[>] constant[0]]]] return[call[name[elements]][constant[0]]]
keyword[def] identifier[find_element_by_jquery] ( identifier[step] , identifier[browser] , identifier[selector] ): literal[string] identifier[elements] = identifier[find_elements_by_jquery] ( identifier[browser] , identifier[selector] ) identifier[assert_true] ( identifier[step] , identifier[len] ( identifier[elements] )> literal[int] ) keyword[return] identifier[elements] [ literal[int] ]
def find_element_by_jquery(step, browser, selector): """Find a single HTML element using jQuery-style selectors.""" elements = find_elements_by_jquery(browser, selector) assert_true(step, len(elements) > 0) return elements[0]
def scheduled_event_trigger(self, event_type): """Returns a callback that schedules events for the future. Returned callback function will add an event of type event_type to a queue which will be checked the next time an event is requested.""" def callback(when, **kwargs): self.queued_scheduled_events.append((when, event_type(when=when, **kwargs))) return callback
def function[scheduled_event_trigger, parameter[self, event_type]]: constant[Returns a callback that schedules events for the future. Returned callback function will add an event of type event_type to a queue which will be checked the next time an event is requested.] def function[callback, parameter[when]]: call[name[self].queued_scheduled_events.append, parameter[tuple[[<ast.Name object at 0x7da1b0fb1de0>, <ast.Call object at 0x7da1b0fb2320>]]]] return[name[callback]]
keyword[def] identifier[scheduled_event_trigger] ( identifier[self] , identifier[event_type] ): literal[string] keyword[def] identifier[callback] ( identifier[when] ,** identifier[kwargs] ): identifier[self] . identifier[queued_scheduled_events] . identifier[append] (( identifier[when] , identifier[event_type] ( identifier[when] = identifier[when] ,** identifier[kwargs] ))) keyword[return] identifier[callback]
def scheduled_event_trigger(self, event_type): """Returns a callback that schedules events for the future. Returned callback function will add an event of type event_type to a queue which will be checked the next time an event is requested.""" def callback(when, **kwargs): self.queued_scheduled_events.append((when, event_type(when=when, **kwargs))) return callback
def sync(self, rules: list): """ Synchronizes the given ruleset with the one on the server and adds the not yet existing rules to the server. :type rules: collections.Iterable[Rule] """ self.client = self.connect() try: server_rules = set(self.server_rules) rules = set(rules) to_remove_rules = server_rules.difference(rules) to_add_rules = rules.difference(server_rules) for to_remove_rule in to_remove_rules: stdin, stdout, stderr = self.client.exec_command( to_remove_rule.remove_command ) stdout.read() stderr.read() for to_add_rule in to_add_rules: stdin, stdout, stderr = self.client.exec_command( to_add_rule.add_command ) stdout.read() stderr.read() if len(to_remove_rules) or len(to_add_rules): self._write_to_server(rules) stdin, stdout, stderr = self.client.exec_command( 'ip route flush cache' ) stdout.read() stderr.read() finally: self.client.close()
def function[sync, parameter[self, rules]]: constant[ Synchronizes the given ruleset with the one on the server and adds the not yet existing rules to the server. :type rules: collections.Iterable[Rule] ] name[self].client assign[=] call[name[self].connect, parameter[]] <ast.Try object at 0x7da18bc72c20>
keyword[def] identifier[sync] ( identifier[self] , identifier[rules] : identifier[list] ): literal[string] identifier[self] . identifier[client] = identifier[self] . identifier[connect] () keyword[try] : identifier[server_rules] = identifier[set] ( identifier[self] . identifier[server_rules] ) identifier[rules] = identifier[set] ( identifier[rules] ) identifier[to_remove_rules] = identifier[server_rules] . identifier[difference] ( identifier[rules] ) identifier[to_add_rules] = identifier[rules] . identifier[difference] ( identifier[server_rules] ) keyword[for] identifier[to_remove_rule] keyword[in] identifier[to_remove_rules] : identifier[stdin] , identifier[stdout] , identifier[stderr] = identifier[self] . identifier[client] . identifier[exec_command] ( identifier[to_remove_rule] . identifier[remove_command] ) identifier[stdout] . identifier[read] () identifier[stderr] . identifier[read] () keyword[for] identifier[to_add_rule] keyword[in] identifier[to_add_rules] : identifier[stdin] , identifier[stdout] , identifier[stderr] = identifier[self] . identifier[client] . identifier[exec_command] ( identifier[to_add_rule] . identifier[add_command] ) identifier[stdout] . identifier[read] () identifier[stderr] . identifier[read] () keyword[if] identifier[len] ( identifier[to_remove_rules] ) keyword[or] identifier[len] ( identifier[to_add_rules] ): identifier[self] . identifier[_write_to_server] ( identifier[rules] ) identifier[stdin] , identifier[stdout] , identifier[stderr] = identifier[self] . identifier[client] . identifier[exec_command] ( literal[string] ) identifier[stdout] . identifier[read] () identifier[stderr] . identifier[read] () keyword[finally] : identifier[self] . identifier[client] . identifier[close] ()
def sync(self, rules: list): """ Synchronizes the given ruleset with the one on the server and adds the not yet existing rules to the server. :type rules: collections.Iterable[Rule] """ self.client = self.connect() try: server_rules = set(self.server_rules) rules = set(rules) to_remove_rules = server_rules.difference(rules) to_add_rules = rules.difference(server_rules) for to_remove_rule in to_remove_rules: (stdin, stdout, stderr) = self.client.exec_command(to_remove_rule.remove_command) stdout.read() stderr.read() # depends on [control=['for'], data=['to_remove_rule']] for to_add_rule in to_add_rules: (stdin, stdout, stderr) = self.client.exec_command(to_add_rule.add_command) stdout.read() stderr.read() # depends on [control=['for'], data=['to_add_rule']] if len(to_remove_rules) or len(to_add_rules): self._write_to_server(rules) (stdin, stdout, stderr) = self.client.exec_command('ip route flush cache') stdout.read() stderr.read() # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] finally: self.client.close()
def roll_down_capture(returns, factor_returns, window=10, **kwargs): """ Computes the down capture measure over a rolling window. see documentation for :func:`~empyrical.stats.down_capture`. (pass all args, kwargs required) Parameters ---------- returns : pd.Series or np.ndarray Daily returns of the strategy, noncumulative. - See full explanation in :func:`~empyrical.stats.cum_returns`. factor_returns : pd.Series or np.ndarray Noncumulative returns of the factor to which beta is computed. Usually a benchmark such as the market. - This is in the same style as returns. window : int, required Size of the rolling window in terms of the periodicity of the data. - eg window = 60, periodicity=DAILY, represents a rolling 60 day window """ return roll(returns, factor_returns, window=window, function=down_capture, **kwargs)
def function[roll_down_capture, parameter[returns, factor_returns, window]]: constant[ Computes the down capture measure over a rolling window. see documentation for :func:`~empyrical.stats.down_capture`. (pass all args, kwargs required) Parameters ---------- returns : pd.Series or np.ndarray Daily returns of the strategy, noncumulative. - See full explanation in :func:`~empyrical.stats.cum_returns`. factor_returns : pd.Series or np.ndarray Noncumulative returns of the factor to which beta is computed. Usually a benchmark such as the market. - This is in the same style as returns. window : int, required Size of the rolling window in terms of the periodicity of the data. - eg window = 60, periodicity=DAILY, represents a rolling 60 day window ] return[call[name[roll], parameter[name[returns], name[factor_returns]]]]
keyword[def] identifier[roll_down_capture] ( identifier[returns] , identifier[factor_returns] , identifier[window] = literal[int] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[roll] ( identifier[returns] , identifier[factor_returns] , identifier[window] = identifier[window] , identifier[function] = identifier[down_capture] , ** identifier[kwargs] )
def roll_down_capture(returns, factor_returns, window=10, **kwargs): """ Computes the down capture measure over a rolling window. see documentation for :func:`~empyrical.stats.down_capture`. (pass all args, kwargs required) Parameters ---------- returns : pd.Series or np.ndarray Daily returns of the strategy, noncumulative. - See full explanation in :func:`~empyrical.stats.cum_returns`. factor_returns : pd.Series or np.ndarray Noncumulative returns of the factor to which beta is computed. Usually a benchmark such as the market. - This is in the same style as returns. window : int, required Size of the rolling window in terms of the periodicity of the data. - eg window = 60, periodicity=DAILY, represents a rolling 60 day window """ return roll(returns, factor_returns, window=window, function=down_capture, **kwargs)
def retrieve_keras_weights(java_model): """For a previously imported Keras model, after training it with DL4J Spark, we want to set the resulting weights back to the original Keras model. :param java_model: DL4J model (MultiLayerNetwork or ComputationGraph :return: list of numpy arrays in correct order for model.set_weights(...) of a corresponding Keras model """ weights = [] layers = java_model.getLayers() for layer in layers: params = layer.paramTable() keys = params.keySet() key_list = java_classes.ArrayList(keys) for key in key_list: weight = params.get(key) np_weight = np.squeeze(to_numpy(weight)) weights.append(np_weight) return weights
def function[retrieve_keras_weights, parameter[java_model]]: constant[For a previously imported Keras model, after training it with DL4J Spark, we want to set the resulting weights back to the original Keras model. :param java_model: DL4J model (MultiLayerNetwork or ComputationGraph :return: list of numpy arrays in correct order for model.set_weights(...) of a corresponding Keras model ] variable[weights] assign[=] list[[]] variable[layers] assign[=] call[name[java_model].getLayers, parameter[]] for taget[name[layer]] in starred[name[layers]] begin[:] variable[params] assign[=] call[name[layer].paramTable, parameter[]] variable[keys] assign[=] call[name[params].keySet, parameter[]] variable[key_list] assign[=] call[name[java_classes].ArrayList, parameter[name[keys]]] for taget[name[key]] in starred[name[key_list]] begin[:] variable[weight] assign[=] call[name[params].get, parameter[name[key]]] variable[np_weight] assign[=] call[name[np].squeeze, parameter[call[name[to_numpy], parameter[name[weight]]]]] call[name[weights].append, parameter[name[np_weight]]] return[name[weights]]
keyword[def] identifier[retrieve_keras_weights] ( identifier[java_model] ): literal[string] identifier[weights] =[] identifier[layers] = identifier[java_model] . identifier[getLayers] () keyword[for] identifier[layer] keyword[in] identifier[layers] : identifier[params] = identifier[layer] . identifier[paramTable] () identifier[keys] = identifier[params] . identifier[keySet] () identifier[key_list] = identifier[java_classes] . identifier[ArrayList] ( identifier[keys] ) keyword[for] identifier[key] keyword[in] identifier[key_list] : identifier[weight] = identifier[params] . identifier[get] ( identifier[key] ) identifier[np_weight] = identifier[np] . identifier[squeeze] ( identifier[to_numpy] ( identifier[weight] )) identifier[weights] . identifier[append] ( identifier[np_weight] ) keyword[return] identifier[weights]
def retrieve_keras_weights(java_model): """For a previously imported Keras model, after training it with DL4J Spark, we want to set the resulting weights back to the original Keras model. :param java_model: DL4J model (MultiLayerNetwork or ComputationGraph :return: list of numpy arrays in correct order for model.set_weights(...) of a corresponding Keras model """ weights = [] layers = java_model.getLayers() for layer in layers: params = layer.paramTable() keys = params.keySet() key_list = java_classes.ArrayList(keys) for key in key_list: weight = params.get(key) np_weight = np.squeeze(to_numpy(weight)) weights.append(np_weight) # depends on [control=['for'], data=['key']] # depends on [control=['for'], data=['layer']] return weights
def setEnable(self, status, lanInterfaceId=1, timeout=1): """Set enable status for a LAN interface, be careful you don't cut yourself off. :param bool status: enable or disable the interface :param int lanInterfaceId: the id of the LAN interface :param float timeout: the timeout to wait for the action to be executed """ namespace = Lan.getServiceType("setEnable") + str(lanInterfaceId) uri = self.getControlURL(namespace) if status: setStatus = 1 else: setStatus = 0 self.execute(uri, namespace, "SetEnable", timeout=timeout, NewEnable=setStatus)
def function[setEnable, parameter[self, status, lanInterfaceId, timeout]]: constant[Set enable status for a LAN interface, be careful you don't cut yourself off. :param bool status: enable or disable the interface :param int lanInterfaceId: the id of the LAN interface :param float timeout: the timeout to wait for the action to be executed ] variable[namespace] assign[=] binary_operation[call[name[Lan].getServiceType, parameter[constant[setEnable]]] + call[name[str], parameter[name[lanInterfaceId]]]] variable[uri] assign[=] call[name[self].getControlURL, parameter[name[namespace]]] if name[status] begin[:] variable[setStatus] assign[=] constant[1] call[name[self].execute, parameter[name[uri], name[namespace], constant[SetEnable]]]
keyword[def] identifier[setEnable] ( identifier[self] , identifier[status] , identifier[lanInterfaceId] = literal[int] , identifier[timeout] = literal[int] ): literal[string] identifier[namespace] = identifier[Lan] . identifier[getServiceType] ( literal[string] )+ identifier[str] ( identifier[lanInterfaceId] ) identifier[uri] = identifier[self] . identifier[getControlURL] ( identifier[namespace] ) keyword[if] identifier[status] : identifier[setStatus] = literal[int] keyword[else] : identifier[setStatus] = literal[int] identifier[self] . identifier[execute] ( identifier[uri] , identifier[namespace] , literal[string] , identifier[timeout] = identifier[timeout] , identifier[NewEnable] = identifier[setStatus] )
def setEnable(self, status, lanInterfaceId=1, timeout=1): """Set enable status for a LAN interface, be careful you don't cut yourself off. :param bool status: enable or disable the interface :param int lanInterfaceId: the id of the LAN interface :param float timeout: the timeout to wait for the action to be executed """ namespace = Lan.getServiceType('setEnable') + str(lanInterfaceId) uri = self.getControlURL(namespace) if status: setStatus = 1 # depends on [control=['if'], data=[]] else: setStatus = 0 self.execute(uri, namespace, 'SetEnable', timeout=timeout, NewEnable=setStatus)
def in_out_ratio(self, node: BaseEntity) -> float: """Calculate the ratio of in-degree / out-degree of a node.""" return self.graph.in_degree(node) / float(self.graph.out_degree(node))
def function[in_out_ratio, parameter[self, node]]: constant[Calculate the ratio of in-degree / out-degree of a node.] return[binary_operation[call[name[self].graph.in_degree, parameter[name[node]]] / call[name[float], parameter[call[name[self].graph.out_degree, parameter[name[node]]]]]]]
keyword[def] identifier[in_out_ratio] ( identifier[self] , identifier[node] : identifier[BaseEntity] )-> identifier[float] : literal[string] keyword[return] identifier[self] . identifier[graph] . identifier[in_degree] ( identifier[node] )/ identifier[float] ( identifier[self] . identifier[graph] . identifier[out_degree] ( identifier[node] ))
def in_out_ratio(self, node: BaseEntity) -> float: """Calculate the ratio of in-degree / out-degree of a node.""" return self.graph.in_degree(node) / float(self.graph.out_degree(node))
def _gen_trend_graph(start, end, force_overwrite=False): """ Total trend graph for machine category. """ filename = graphs.get_trend_graph_filename(start, end) csv_filename = os.path.join(GRAPH_ROOT, filename + '.csv') png_filename = os.path.join(GRAPH_ROOT, filename + '.png') _check_directory_exists(csv_filename) _check_directory_exists(png_filename) if not settings.GRAPH_DEBUG or force_overwrite: if os.path.exists(csv_filename): if os.path.exists(png_filename): return query = CPUJob.objects.filter( date__range=(start, end) ) query = query.values('date').annotate(Sum('cpu_usage')) query = query.order_by('date') t_start = start t_end = end start_str = start.strftime('%Y-%m-%d') end_str = end.strftime('%Y-%m-%d') fig, ax = plt.subplots(figsize=(6, 4)) ax.set_xlim(start, end) ax.set_title('%s - %s' % (start_str, end_str)) ax.set_ylabel("CPU Time (hours)") ax.set_xlabel("Date") locator = mdates.AutoDateLocator() ax.xaxis.set_major_locator(locator) ax.xaxis.set_major_formatter(mdates.AutoDateFormatter(locator)) ax.xaxis.set_minor_locator(mdates.DayLocator()) data = {} x_data = [] y_data = [] with open(csv_filename, 'w') as csv_file: csv_writer = csv.writer(csv_file) for row in query.iterator(): csv_writer.writerow([ row['date'], row['cpu_usage__sum'] / 3600.00 ]) date = row['date'] data[date] = row['cpu_usage__sum'] start = t_start end = t_end while start <= end: total = 0 if start in data: total = data[start] x_data.append(start) y_data.append(total / 3600.00) start = start + datetime.timedelta(days=1) del data ax.plot(x_data, y_data) del x_data del y_data fig.autofmt_xdate() plt.tight_layout() plt.savefig(png_filename) plt.close()
def function[_gen_trend_graph, parameter[start, end, force_overwrite]]: constant[ Total trend graph for machine category. ] variable[filename] assign[=] call[name[graphs].get_trend_graph_filename, parameter[name[start], name[end]]] variable[csv_filename] assign[=] call[name[os].path.join, parameter[name[GRAPH_ROOT], binary_operation[name[filename] + constant[.csv]]]] variable[png_filename] assign[=] call[name[os].path.join, parameter[name[GRAPH_ROOT], binary_operation[name[filename] + constant[.png]]]] call[name[_check_directory_exists], parameter[name[csv_filename]]] call[name[_check_directory_exists], parameter[name[png_filename]]] if <ast.BoolOp object at 0x7da18ede58a0> begin[:] if call[name[os].path.exists, parameter[name[csv_filename]]] begin[:] if call[name[os].path.exists, parameter[name[png_filename]]] begin[:] return[None] variable[query] assign[=] call[name[CPUJob].objects.filter, parameter[]] variable[query] assign[=] call[call[name[query].values, parameter[constant[date]]].annotate, parameter[call[name[Sum], parameter[constant[cpu_usage]]]]] variable[query] assign[=] call[name[query].order_by, parameter[constant[date]]] variable[t_start] assign[=] name[start] variable[t_end] assign[=] name[end] variable[start_str] assign[=] call[name[start].strftime, parameter[constant[%Y-%m-%d]]] variable[end_str] assign[=] call[name[end].strftime, parameter[constant[%Y-%m-%d]]] <ast.Tuple object at 0x7da18ede6ec0> assign[=] call[name[plt].subplots, parameter[]] call[name[ax].set_xlim, parameter[name[start], name[end]]] call[name[ax].set_title, parameter[binary_operation[constant[%s - %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18ede4640>, <ast.Name object at 0x7da18ede6020>]]]]] call[name[ax].set_ylabel, parameter[constant[CPU Time (hours)]]] call[name[ax].set_xlabel, parameter[constant[Date]]] variable[locator] assign[=] call[name[mdates].AutoDateLocator, parameter[]] call[name[ax].xaxis.set_major_locator, parameter[name[locator]]] call[name[ax].xaxis.set_major_formatter, parameter[call[name[mdates].AutoDateFormatter, parameter[name[locator]]]]] call[name[ax].xaxis.set_minor_locator, parameter[call[name[mdates].DayLocator, parameter[]]]] variable[data] assign[=] dictionary[[], []] variable[x_data] assign[=] list[[]] variable[y_data] assign[=] list[[]] with call[name[open], parameter[name[csv_filename], constant[w]]] begin[:] variable[csv_writer] assign[=] call[name[csv].writer, parameter[name[csv_file]]] for taget[name[row]] in starred[call[name[query].iterator, parameter[]]] begin[:] call[name[csv_writer].writerow, parameter[list[[<ast.Subscript object at 0x7da18ede60e0>, <ast.BinOp object at 0x7da18ede76d0>]]]] variable[date] assign[=] call[name[row]][constant[date]] call[name[data]][name[date]] assign[=] call[name[row]][constant[cpu_usage__sum]] variable[start] assign[=] name[t_start] variable[end] assign[=] name[t_end] while compare[name[start] less_or_equal[<=] name[end]] begin[:] variable[total] assign[=] constant[0] if compare[name[start] in name[data]] begin[:] variable[total] assign[=] call[name[data]][name[start]] call[name[x_data].append, parameter[name[start]]] call[name[y_data].append, parameter[binary_operation[name[total] / constant[3600.0]]]] variable[start] assign[=] binary_operation[name[start] + call[name[datetime].timedelta, parameter[]]] <ast.Delete object at 0x7da1b02406d0> call[name[ax].plot, parameter[name[x_data], name[y_data]]] <ast.Delete object at 0x7da1b0337eb0> <ast.Delete object at 0x7da1b0335810> call[name[fig].autofmt_xdate, parameter[]] call[name[plt].tight_layout, parameter[]] call[name[plt].savefig, parameter[name[png_filename]]] call[name[plt].close, parameter[]]
keyword[def] identifier[_gen_trend_graph] ( identifier[start] , identifier[end] , identifier[force_overwrite] = keyword[False] ): literal[string] identifier[filename] = identifier[graphs] . identifier[get_trend_graph_filename] ( identifier[start] , identifier[end] ) identifier[csv_filename] = identifier[os] . identifier[path] . identifier[join] ( identifier[GRAPH_ROOT] , identifier[filename] + literal[string] ) identifier[png_filename] = identifier[os] . identifier[path] . identifier[join] ( identifier[GRAPH_ROOT] , identifier[filename] + literal[string] ) identifier[_check_directory_exists] ( identifier[csv_filename] ) identifier[_check_directory_exists] ( identifier[png_filename] ) keyword[if] keyword[not] identifier[settings] . identifier[GRAPH_DEBUG] keyword[or] identifier[force_overwrite] : keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[csv_filename] ): keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[png_filename] ): keyword[return] identifier[query] = identifier[CPUJob] . identifier[objects] . identifier[filter] ( identifier[date__range] =( identifier[start] , identifier[end] ) ) identifier[query] = identifier[query] . identifier[values] ( literal[string] ). identifier[annotate] ( identifier[Sum] ( literal[string] )) identifier[query] = identifier[query] . identifier[order_by] ( literal[string] ) identifier[t_start] = identifier[start] identifier[t_end] = identifier[end] identifier[start_str] = identifier[start] . identifier[strftime] ( literal[string] ) identifier[end_str] = identifier[end] . identifier[strftime] ( literal[string] ) identifier[fig] , identifier[ax] = identifier[plt] . identifier[subplots] ( identifier[figsize] =( literal[int] , literal[int] )) identifier[ax] . identifier[set_xlim] ( identifier[start] , identifier[end] ) identifier[ax] . identifier[set_title] ( literal[string] %( identifier[start_str] , identifier[end_str] )) identifier[ax] . identifier[set_ylabel] ( literal[string] ) identifier[ax] . identifier[set_xlabel] ( literal[string] ) identifier[locator] = identifier[mdates] . identifier[AutoDateLocator] () identifier[ax] . identifier[xaxis] . identifier[set_major_locator] ( identifier[locator] ) identifier[ax] . identifier[xaxis] . identifier[set_major_formatter] ( identifier[mdates] . identifier[AutoDateFormatter] ( identifier[locator] )) identifier[ax] . identifier[xaxis] . identifier[set_minor_locator] ( identifier[mdates] . identifier[DayLocator] ()) identifier[data] ={} identifier[x_data] =[] identifier[y_data] =[] keyword[with] identifier[open] ( identifier[csv_filename] , literal[string] ) keyword[as] identifier[csv_file] : identifier[csv_writer] = identifier[csv] . identifier[writer] ( identifier[csv_file] ) keyword[for] identifier[row] keyword[in] identifier[query] . identifier[iterator] (): identifier[csv_writer] . identifier[writerow] ([ identifier[row] [ literal[string] ], identifier[row] [ literal[string] ]/ literal[int] ]) identifier[date] = identifier[row] [ literal[string] ] identifier[data] [ identifier[date] ]= identifier[row] [ literal[string] ] identifier[start] = identifier[t_start] identifier[end] = identifier[t_end] keyword[while] identifier[start] <= identifier[end] : identifier[total] = literal[int] keyword[if] identifier[start] keyword[in] identifier[data] : identifier[total] = identifier[data] [ identifier[start] ] identifier[x_data] . identifier[append] ( identifier[start] ) identifier[y_data] . identifier[append] ( identifier[total] / literal[int] ) identifier[start] = identifier[start] + identifier[datetime] . identifier[timedelta] ( identifier[days] = literal[int] ) keyword[del] identifier[data] identifier[ax] . identifier[plot] ( identifier[x_data] , identifier[y_data] ) keyword[del] identifier[x_data] keyword[del] identifier[y_data] identifier[fig] . identifier[autofmt_xdate] () identifier[plt] . identifier[tight_layout] () identifier[plt] . identifier[savefig] ( identifier[png_filename] ) identifier[plt] . identifier[close] ()
def _gen_trend_graph(start, end, force_overwrite=False): """ Total trend graph for machine category. """ filename = graphs.get_trend_graph_filename(start, end) csv_filename = os.path.join(GRAPH_ROOT, filename + '.csv') png_filename = os.path.join(GRAPH_ROOT, filename + '.png') _check_directory_exists(csv_filename) _check_directory_exists(png_filename) if not settings.GRAPH_DEBUG or force_overwrite: if os.path.exists(csv_filename): if os.path.exists(png_filename): return # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] query = CPUJob.objects.filter(date__range=(start, end)) query = query.values('date').annotate(Sum('cpu_usage')) query = query.order_by('date') t_start = start t_end = end start_str = start.strftime('%Y-%m-%d') end_str = end.strftime('%Y-%m-%d') (fig, ax) = plt.subplots(figsize=(6, 4)) ax.set_xlim(start, end) ax.set_title('%s - %s' % (start_str, end_str)) ax.set_ylabel('CPU Time (hours)') ax.set_xlabel('Date') locator = mdates.AutoDateLocator() ax.xaxis.set_major_locator(locator) ax.xaxis.set_major_formatter(mdates.AutoDateFormatter(locator)) ax.xaxis.set_minor_locator(mdates.DayLocator()) data = {} x_data = [] y_data = [] with open(csv_filename, 'w') as csv_file: csv_writer = csv.writer(csv_file) for row in query.iterator(): csv_writer.writerow([row['date'], row['cpu_usage__sum'] / 3600.0]) date = row['date'] data[date] = row['cpu_usage__sum'] # depends on [control=['for'], data=['row']] # depends on [control=['with'], data=['csv_file']] start = t_start end = t_end while start <= end: total = 0 if start in data: total = data[start] # depends on [control=['if'], data=['start', 'data']] x_data.append(start) y_data.append(total / 3600.0) start = start + datetime.timedelta(days=1) # depends on [control=['while'], data=['start']] del data ax.plot(x_data, y_data) del x_data del y_data fig.autofmt_xdate() plt.tight_layout() plt.savefig(png_filename) plt.close()
def add_n_trend(trend_input, average_time, initial_trend, subs, subscript_dict): """Trend. Parameters ---------- trend_input: <string> average_time: <string> trend_initial: <string> subs: list of strings List of strings of subscript indices that correspond to the list of expressions, and collectively define the shape of the output See `builder.add_flaux` for more info Returns ------- reference: basestring reference to the trend object `__call__` method, which will return the output of the trend process new_structure: list list of element construction dictionaries for the builder to assemble """ stateful = { 'py_name': utils.make_python_identifier('_trend_%s_%s_%s' % (trend_input, average_time, initial_trend))[0], 'real_name': 'trend of %s' % trend_input, 'doc': 'Trend average time: %s \n Trend initial value %s' % ( average_time, initial_trend), 'py_expr': 'functions.Trend(lambda: %s, lambda: %s, lambda: %s)' % ( trend_input, average_time, initial_trend), 'unit': 'None', 'lims': 'None', 'eqn': 'None', 'subs': '', 'kind': 'stateful', 'arguments': '' } return "%s()" % stateful['py_name'], [stateful]
def function[add_n_trend, parameter[trend_input, average_time, initial_trend, subs, subscript_dict]]: constant[Trend. Parameters ---------- trend_input: <string> average_time: <string> trend_initial: <string> subs: list of strings List of strings of subscript indices that correspond to the list of expressions, and collectively define the shape of the output See `builder.add_flaux` for more info Returns ------- reference: basestring reference to the trend object `__call__` method, which will return the output of the trend process new_structure: list list of element construction dictionaries for the builder to assemble ] variable[stateful] assign[=] dictionary[[<ast.Constant object at 0x7da204344f40>, <ast.Constant object at 0x7da204346590>, <ast.Constant object at 0x7da2043475e0>, <ast.Constant object at 0x7da204346920>, <ast.Constant object at 0x7da204346ce0>, <ast.Constant object at 0x7da2043470d0>, <ast.Constant object at 0x7da204345930>, <ast.Constant object at 0x7da204345540>, <ast.Constant object at 0x7da204346110>, <ast.Constant object at 0x7da204345510>], [<ast.Subscript object at 0x7da2043477c0>, <ast.BinOp object at 0x7da2043451b0>, <ast.BinOp object at 0x7da204347790>, <ast.BinOp object at 0x7da204345870>, <ast.Constant object at 0x7da204346d40>, <ast.Constant object at 0x7da2043445e0>, <ast.Constant object at 0x7da204344040>, <ast.Constant object at 0x7da204347a00>, <ast.Constant object at 0x7da204345630>, <ast.Constant object at 0x7da204345a20>]] return[tuple[[<ast.BinOp object at 0x7da204345690>, <ast.List object at 0x7da2043440d0>]]]
keyword[def] identifier[add_n_trend] ( identifier[trend_input] , identifier[average_time] , identifier[initial_trend] , identifier[subs] , identifier[subscript_dict] ): literal[string] identifier[stateful] ={ literal[string] : identifier[utils] . identifier[make_python_identifier] ( literal[string] %( identifier[trend_input] , identifier[average_time] , identifier[initial_trend] ))[ literal[int] ], literal[string] : literal[string] % identifier[trend_input] , literal[string] : literal[string] %( identifier[average_time] , identifier[initial_trend] ), literal[string] : literal[string] %( identifier[trend_input] , identifier[average_time] , identifier[initial_trend] ), literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] } keyword[return] literal[string] % identifier[stateful] [ literal[string] ],[ identifier[stateful] ]
def add_n_trend(trend_input, average_time, initial_trend, subs, subscript_dict): """Trend. Parameters ---------- trend_input: <string> average_time: <string> trend_initial: <string> subs: list of strings List of strings of subscript indices that correspond to the list of expressions, and collectively define the shape of the output See `builder.add_flaux` for more info Returns ------- reference: basestring reference to the trend object `__call__` method, which will return the output of the trend process new_structure: list list of element construction dictionaries for the builder to assemble """ stateful = {'py_name': utils.make_python_identifier('_trend_%s_%s_%s' % (trend_input, average_time, initial_trend))[0], 'real_name': 'trend of %s' % trend_input, 'doc': 'Trend average time: %s \n Trend initial value %s' % (average_time, initial_trend), 'py_expr': 'functions.Trend(lambda: %s, lambda: %s, lambda: %s)' % (trend_input, average_time, initial_trend), 'unit': 'None', 'lims': 'None', 'eqn': 'None', 'subs': '', 'kind': 'stateful', 'arguments': ''} return ('%s()' % stateful['py_name'], [stateful])
def parse_verilog(text): '''Parse a text buffer of Verilog code Args: text (str): Source code to parse Returns: List of parsed objects. ''' lex = VerilogLexer name = None kind = None saved_type = None mode = 'input' ptype = 'wire' metacomments = [] parameters = [] param_items = [] generics = [] ports = collections.OrderedDict() sections = [] port_param_index = 0 last_item = None array_range_start_pos = 0 objects = [] for pos, action, groups in lex.run(text): if action == 'metacomment': if last_item is None: metacomments.append(groups[0]) else: last_item.desc = groups[0] if action == 'section_meta': sections.append((port_param_index, groups[0])) elif action == 'module': kind = 'module' name = groups[0] generics = [] ports = collections.OrderedDict() param_items = [] sections = [] port_param_index = 0 elif action == 'parameter_start': net_type, vec_range = groups new_ptype = '' if net_type is not None: new_ptype += net_type if vec_range is not None: new_ptype += ' ' + vec_range ptype = new_ptype elif action == 'param_item': generics.append(VerilogParameter(groups[0], 'in', ptype)) elif action == 'module_port_start': new_mode, net_type, signed, vec_range = groups new_ptype = '' if net_type is not None: new_ptype += net_type if signed is not None: new_ptype += ' ' + signed if vec_range is not None: new_ptype += ' ' + vec_range # Complete pending items for i in param_items: ports[i] = VerilogParameter(i, mode, ptype) param_items = [] if len(ports) > 0: last_item = next(reversed(ports)) # Start with new mode mode = new_mode ptype = new_ptype elif action == 'port_param': ident = groups[0] param_items.append(ident) port_param_index += 1 elif action == 'end_module': # Finish any pending ports for i in param_items: ports[i] = VerilogParameter(i, mode, ptype) vobj = VerilogModule(name, ports.values(), generics, dict(sections), metacomments) objects.append(vobj) last_item = None metacomments = [] return objects
def function[parse_verilog, parameter[text]]: constant[Parse a text buffer of Verilog code Args: text (str): Source code to parse Returns: List of parsed objects. ] variable[lex] assign[=] name[VerilogLexer] variable[name] assign[=] constant[None] variable[kind] assign[=] constant[None] variable[saved_type] assign[=] constant[None] variable[mode] assign[=] constant[input] variable[ptype] assign[=] constant[wire] variable[metacomments] assign[=] list[[]] variable[parameters] assign[=] list[[]] variable[param_items] assign[=] list[[]] variable[generics] assign[=] list[[]] variable[ports] assign[=] call[name[collections].OrderedDict, parameter[]] variable[sections] assign[=] list[[]] variable[port_param_index] assign[=] constant[0] variable[last_item] assign[=] constant[None] variable[array_range_start_pos] assign[=] constant[0] variable[objects] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b11d94b0>, <ast.Name object at 0x7da1b11db6a0>, <ast.Name object at 0x7da1b11dba60>]]] in starred[call[name[lex].run, parameter[name[text]]]] begin[:] if compare[name[action] equal[==] constant[metacomment]] begin[:] if compare[name[last_item] is constant[None]] begin[:] call[name[metacomments].append, parameter[call[name[groups]][constant[0]]]] if compare[name[action] equal[==] constant[section_meta]] begin[:] call[name[sections].append, parameter[tuple[[<ast.Name object at 0x7da1b11155a0>, <ast.Subscript object at 0x7da1b11168c0>]]]] return[name[objects]]
keyword[def] identifier[parse_verilog] ( identifier[text] ): literal[string] identifier[lex] = identifier[VerilogLexer] identifier[name] = keyword[None] identifier[kind] = keyword[None] identifier[saved_type] = keyword[None] identifier[mode] = literal[string] identifier[ptype] = literal[string] identifier[metacomments] =[] identifier[parameters] =[] identifier[param_items] =[] identifier[generics] =[] identifier[ports] = identifier[collections] . identifier[OrderedDict] () identifier[sections] =[] identifier[port_param_index] = literal[int] identifier[last_item] = keyword[None] identifier[array_range_start_pos] = literal[int] identifier[objects] =[] keyword[for] identifier[pos] , identifier[action] , identifier[groups] keyword[in] identifier[lex] . identifier[run] ( identifier[text] ): keyword[if] identifier[action] == literal[string] : keyword[if] identifier[last_item] keyword[is] keyword[None] : identifier[metacomments] . identifier[append] ( identifier[groups] [ literal[int] ]) keyword[else] : identifier[last_item] . identifier[desc] = identifier[groups] [ literal[int] ] keyword[if] identifier[action] == literal[string] : identifier[sections] . identifier[append] (( identifier[port_param_index] , identifier[groups] [ literal[int] ])) keyword[elif] identifier[action] == literal[string] : identifier[kind] = literal[string] identifier[name] = identifier[groups] [ literal[int] ] identifier[generics] =[] identifier[ports] = identifier[collections] . identifier[OrderedDict] () identifier[param_items] =[] identifier[sections] =[] identifier[port_param_index] = literal[int] keyword[elif] identifier[action] == literal[string] : identifier[net_type] , identifier[vec_range] = identifier[groups] identifier[new_ptype] = literal[string] keyword[if] identifier[net_type] keyword[is] keyword[not] keyword[None] : identifier[new_ptype] += identifier[net_type] keyword[if] identifier[vec_range] keyword[is] keyword[not] keyword[None] : identifier[new_ptype] += literal[string] + identifier[vec_range] identifier[ptype] = identifier[new_ptype] keyword[elif] identifier[action] == literal[string] : identifier[generics] . identifier[append] ( identifier[VerilogParameter] ( identifier[groups] [ literal[int] ], literal[string] , identifier[ptype] )) keyword[elif] identifier[action] == literal[string] : identifier[new_mode] , identifier[net_type] , identifier[signed] , identifier[vec_range] = identifier[groups] identifier[new_ptype] = literal[string] keyword[if] identifier[net_type] keyword[is] keyword[not] keyword[None] : identifier[new_ptype] += identifier[net_type] keyword[if] identifier[signed] keyword[is] keyword[not] keyword[None] : identifier[new_ptype] += literal[string] + identifier[signed] keyword[if] identifier[vec_range] keyword[is] keyword[not] keyword[None] : identifier[new_ptype] += literal[string] + identifier[vec_range] keyword[for] identifier[i] keyword[in] identifier[param_items] : identifier[ports] [ identifier[i] ]= identifier[VerilogParameter] ( identifier[i] , identifier[mode] , identifier[ptype] ) identifier[param_items] =[] keyword[if] identifier[len] ( identifier[ports] )> literal[int] : identifier[last_item] = identifier[next] ( identifier[reversed] ( identifier[ports] )) identifier[mode] = identifier[new_mode] identifier[ptype] = identifier[new_ptype] keyword[elif] identifier[action] == literal[string] : identifier[ident] = identifier[groups] [ literal[int] ] identifier[param_items] . identifier[append] ( identifier[ident] ) identifier[port_param_index] += literal[int] keyword[elif] identifier[action] == literal[string] : keyword[for] identifier[i] keyword[in] identifier[param_items] : identifier[ports] [ identifier[i] ]= identifier[VerilogParameter] ( identifier[i] , identifier[mode] , identifier[ptype] ) identifier[vobj] = identifier[VerilogModule] ( identifier[name] , identifier[ports] . identifier[values] (), identifier[generics] , identifier[dict] ( identifier[sections] ), identifier[metacomments] ) identifier[objects] . identifier[append] ( identifier[vobj] ) identifier[last_item] = keyword[None] identifier[metacomments] =[] keyword[return] identifier[objects]
def parse_verilog(text): """Parse a text buffer of Verilog code Args: text (str): Source code to parse Returns: List of parsed objects. """ lex = VerilogLexer name = None kind = None saved_type = None mode = 'input' ptype = 'wire' metacomments = [] parameters = [] param_items = [] generics = [] ports = collections.OrderedDict() sections = [] port_param_index = 0 last_item = None array_range_start_pos = 0 objects = [] for (pos, action, groups) in lex.run(text): if action == 'metacomment': if last_item is None: metacomments.append(groups[0]) # depends on [control=['if'], data=[]] else: last_item.desc = groups[0] # depends on [control=['if'], data=[]] if action == 'section_meta': sections.append((port_param_index, groups[0])) # depends on [control=['if'], data=[]] elif action == 'module': kind = 'module' name = groups[0] generics = [] ports = collections.OrderedDict() param_items = [] sections = [] port_param_index = 0 # depends on [control=['if'], data=[]] elif action == 'parameter_start': (net_type, vec_range) = groups new_ptype = '' if net_type is not None: new_ptype += net_type # depends on [control=['if'], data=['net_type']] if vec_range is not None: new_ptype += ' ' + vec_range # depends on [control=['if'], data=['vec_range']] ptype = new_ptype # depends on [control=['if'], data=[]] elif action == 'param_item': generics.append(VerilogParameter(groups[0], 'in', ptype)) # depends on [control=['if'], data=[]] elif action == 'module_port_start': (new_mode, net_type, signed, vec_range) = groups new_ptype = '' if net_type is not None: new_ptype += net_type # depends on [control=['if'], data=['net_type']] if signed is not None: new_ptype += ' ' + signed # depends on [control=['if'], data=['signed']] if vec_range is not None: new_ptype += ' ' + vec_range # depends on [control=['if'], data=['vec_range']] # Complete pending items for i in param_items: ports[i] = VerilogParameter(i, mode, ptype) # depends on [control=['for'], data=['i']] param_items = [] if len(ports) > 0: last_item = next(reversed(ports)) # depends on [control=['if'], data=[]] # Start with new mode mode = new_mode ptype = new_ptype # depends on [control=['if'], data=[]] elif action == 'port_param': ident = groups[0] param_items.append(ident) port_param_index += 1 # depends on [control=['if'], data=[]] elif action == 'end_module': # Finish any pending ports for i in param_items: ports[i] = VerilogParameter(i, mode, ptype) # depends on [control=['for'], data=['i']] vobj = VerilogModule(name, ports.values(), generics, dict(sections), metacomments) objects.append(vobj) last_item = None metacomments = [] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return objects
def setAvailableLocales(self, locales): """ Sets a list of the available locales to use for displaying the locale information for. This provides a way to filter the interface for only locales that you care about. :param locales | [<str>, ..] """ try: expr = re.compile('[a-z]+_[A-Z]+') babel_locales = [] for locale in locales: if not expr.match(locale): continue try: babel_locale = babel.Locale.parse(str(locale)) except (babel.UnknownLocaleError, StandardError): continue if babel_locale.territory and babel_locale.language: babel_locales.append(babel_locale) babel_locales.sort(key=str) except ImportError: babel_locales = [] self._availableLocales = babel_locales self.setDirty()
def function[setAvailableLocales, parameter[self, locales]]: constant[ Sets a list of the available locales to use for displaying the locale information for. This provides a way to filter the interface for only locales that you care about. :param locales | [<str>, ..] ] <ast.Try object at 0x7da18f09ff10> name[self]._availableLocales assign[=] name[babel_locales] call[name[self].setDirty, parameter[]]
keyword[def] identifier[setAvailableLocales] ( identifier[self] , identifier[locales] ): literal[string] keyword[try] : identifier[expr] = identifier[re] . identifier[compile] ( literal[string] ) identifier[babel_locales] =[] keyword[for] identifier[locale] keyword[in] identifier[locales] : keyword[if] keyword[not] identifier[expr] . identifier[match] ( identifier[locale] ): keyword[continue] keyword[try] : identifier[babel_locale] = identifier[babel] . identifier[Locale] . identifier[parse] ( identifier[str] ( identifier[locale] )) keyword[except] ( identifier[babel] . identifier[UnknownLocaleError] , identifier[StandardError] ): keyword[continue] keyword[if] identifier[babel_locale] . identifier[territory] keyword[and] identifier[babel_locale] . identifier[language] : identifier[babel_locales] . identifier[append] ( identifier[babel_locale] ) identifier[babel_locales] . identifier[sort] ( identifier[key] = identifier[str] ) keyword[except] identifier[ImportError] : identifier[babel_locales] =[] identifier[self] . identifier[_availableLocales] = identifier[babel_locales] identifier[self] . identifier[setDirty] ()
def setAvailableLocales(self, locales): """ Sets a list of the available locales to use for displaying the locale information for. This provides a way to filter the interface for only locales that you care about. :param locales | [<str>, ..] """ try: expr = re.compile('[a-z]+_[A-Z]+') babel_locales = [] for locale in locales: if not expr.match(locale): continue # depends on [control=['if'], data=[]] try: babel_locale = babel.Locale.parse(str(locale)) # depends on [control=['try'], data=[]] except (babel.UnknownLocaleError, StandardError): continue # depends on [control=['except'], data=[]] if babel_locale.territory and babel_locale.language: babel_locales.append(babel_locale) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['locale']] babel_locales.sort(key=str) # depends on [control=['try'], data=[]] except ImportError: babel_locales = [] # depends on [control=['except'], data=[]] self._availableLocales = babel_locales self.setDirty()
def export_default_scripts(target_folder, source_folder = None, raise_errors = False, verbose=False): """ tries to instantiate all the scripts that are imported in /scripts/__init__.py saves each script that could be instantiated into a .b26 file in the folder path Args: target_folder: target path for .b26 files source_folder: location of python script files """ scripts_to_load = get_classes_in_folder(source_folder, Script) if verbose: print(('attempt to load {:d} scripts: '.format(len(scripts_to_load)))) loaded_scripts, failed, loaded_instruments = Script.load_and_append(scripts_to_load, raise_errors=raise_errors) for name, value in loaded_scripts.items(): filename = os.path.join(target_folder, '{:s}.b26'.format(name)) value.save_b26(filename) if verbose: print('\n================================================') print('================================================') print(('saved {:d} scripts, {:d} failed'.format(len(loaded_scripts), len(failed)))) if failed != {}: for error_name, error in failed.items(): print(('failed to create script: ', error_name, error))
def function[export_default_scripts, parameter[target_folder, source_folder, raise_errors, verbose]]: constant[ tries to instantiate all the scripts that are imported in /scripts/__init__.py saves each script that could be instantiated into a .b26 file in the folder path Args: target_folder: target path for .b26 files source_folder: location of python script files ] variable[scripts_to_load] assign[=] call[name[get_classes_in_folder], parameter[name[source_folder], name[Script]]] if name[verbose] begin[:] call[name[print], parameter[call[constant[attempt to load {:d} scripts: ].format, parameter[call[name[len], parameter[name[scripts_to_load]]]]]]] <ast.Tuple object at 0x7da18f00d900> assign[=] call[name[Script].load_and_append, parameter[name[scripts_to_load]]] for taget[tuple[[<ast.Name object at 0x7da18f00c130>, <ast.Name object at 0x7da18f00e2c0>]]] in starred[call[name[loaded_scripts].items, parameter[]]] begin[:] variable[filename] assign[=] call[name[os].path.join, parameter[name[target_folder], call[constant[{:s}.b26].format, parameter[name[name]]]]] call[name[value].save_b26, parameter[name[filename]]] if name[verbose] begin[:] call[name[print], parameter[constant[ ================================================]]] call[name[print], parameter[constant[================================================]]] call[name[print], parameter[call[constant[saved {:d} scripts, {:d} failed].format, parameter[call[name[len], parameter[name[loaded_scripts]]], call[name[len], parameter[name[failed]]]]]]] if compare[name[failed] not_equal[!=] dictionary[[], []]] begin[:] for taget[tuple[[<ast.Name object at 0x7da18f00e0e0>, <ast.Name object at 0x7da18f00cac0>]]] in starred[call[name[failed].items, parameter[]]] begin[:] call[name[print], parameter[tuple[[<ast.Constant object at 0x7da18f00da80>, <ast.Name object at 0x7da18f00ebf0>, <ast.Name object at 0x7da18f00e4d0>]]]]
keyword[def] identifier[export_default_scripts] ( identifier[target_folder] , identifier[source_folder] = keyword[None] , identifier[raise_errors] = keyword[False] , identifier[verbose] = keyword[False] ): literal[string] identifier[scripts_to_load] = identifier[get_classes_in_folder] ( identifier[source_folder] , identifier[Script] ) keyword[if] identifier[verbose] : identifier[print] (( literal[string] . identifier[format] ( identifier[len] ( identifier[scripts_to_load] )))) identifier[loaded_scripts] , identifier[failed] , identifier[loaded_instruments] = identifier[Script] . identifier[load_and_append] ( identifier[scripts_to_load] , identifier[raise_errors] = identifier[raise_errors] ) keyword[for] identifier[name] , identifier[value] keyword[in] identifier[loaded_scripts] . identifier[items] (): identifier[filename] = identifier[os] . identifier[path] . identifier[join] ( identifier[target_folder] , literal[string] . identifier[format] ( identifier[name] )) identifier[value] . identifier[save_b26] ( identifier[filename] ) keyword[if] identifier[verbose] : identifier[print] ( literal[string] ) identifier[print] ( literal[string] ) identifier[print] (( literal[string] . identifier[format] ( identifier[len] ( identifier[loaded_scripts] ), identifier[len] ( identifier[failed] )))) keyword[if] identifier[failed] !={}: keyword[for] identifier[error_name] , identifier[error] keyword[in] identifier[failed] . identifier[items] (): identifier[print] (( literal[string] , identifier[error_name] , identifier[error] ))
def export_default_scripts(target_folder, source_folder=None, raise_errors=False, verbose=False): """ tries to instantiate all the scripts that are imported in /scripts/__init__.py saves each script that could be instantiated into a .b26 file in the folder path Args: target_folder: target path for .b26 files source_folder: location of python script files """ scripts_to_load = get_classes_in_folder(source_folder, Script) if verbose: print('attempt to load {:d} scripts: '.format(len(scripts_to_load))) # depends on [control=['if'], data=[]] (loaded_scripts, failed, loaded_instruments) = Script.load_and_append(scripts_to_load, raise_errors=raise_errors) for (name, value) in loaded_scripts.items(): filename = os.path.join(target_folder, '{:s}.b26'.format(name)) value.save_b26(filename) # depends on [control=['for'], data=[]] if verbose: print('\n================================================') print('================================================') print('saved {:d} scripts, {:d} failed'.format(len(loaded_scripts), len(failed))) if failed != {}: for (error_name, error) in failed.items(): print(('failed to create script: ', error_name, error)) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['failed']] # depends on [control=['if'], data=[]]
def forms_invalid(self, form, inlines): """ If the form or formsets are invalid, re-render the context data with the data-filled form and formsets and errors. """ return self.render_to_response(self.get_context_data(form=form, inlines=inlines))
def function[forms_invalid, parameter[self, form, inlines]]: constant[ If the form or formsets are invalid, re-render the context data with the data-filled form and formsets and errors. ] return[call[name[self].render_to_response, parameter[call[name[self].get_context_data, parameter[]]]]]
keyword[def] identifier[forms_invalid] ( identifier[self] , identifier[form] , identifier[inlines] ): literal[string] keyword[return] identifier[self] . identifier[render_to_response] ( identifier[self] . identifier[get_context_data] ( identifier[form] = identifier[form] , identifier[inlines] = identifier[inlines] ))
def forms_invalid(self, form, inlines): """ If the form or formsets are invalid, re-render the context data with the data-filled form and formsets and errors. """ return self.render_to_response(self.get_context_data(form=form, inlines=inlines))
def _normal_prompt(self): """ Flushes the prompt before requesting the input :return: The command line """ sys.stdout.write(self.__get_ps1()) sys.stdout.flush() return safe_input()
def function[_normal_prompt, parameter[self]]: constant[ Flushes the prompt before requesting the input :return: The command line ] call[name[sys].stdout.write, parameter[call[name[self].__get_ps1, parameter[]]]] call[name[sys].stdout.flush, parameter[]] return[call[name[safe_input], parameter[]]]
keyword[def] identifier[_normal_prompt] ( identifier[self] ): literal[string] identifier[sys] . identifier[stdout] . identifier[write] ( identifier[self] . identifier[__get_ps1] ()) identifier[sys] . identifier[stdout] . identifier[flush] () keyword[return] identifier[safe_input] ()
def _normal_prompt(self): """ Flushes the prompt before requesting the input :return: The command line """ sys.stdout.write(self.__get_ps1()) sys.stdout.flush() return safe_input()
def STOS(cpu, dest, src): """ Stores String. Stores a byte, word, or doubleword from the AL, AX, or EAX register, respectively, into the destination operand. The destination operand is a memory location, the address of which is read from either the ES:EDI or the ES:DI registers (depending on the address-size attribute of the instruction, 32 or 16, respectively). The ES segment cannot be overridden with a segment override prefix. :param cpu: current CPU. :param dest: destination operand. :param src: source operand. """ size = src.size dest.write(src.read()) dest_reg = dest.mem.base increment = Operators.ITEBV({'RDI': 64, 'EDI': 32, 'DI': 16}[dest_reg], cpu.DF, -size // 8, size // 8) cpu.write_register(dest_reg, cpu.read_register(dest_reg) + increment)
def function[STOS, parameter[cpu, dest, src]]: constant[ Stores String. Stores a byte, word, or doubleword from the AL, AX, or EAX register, respectively, into the destination operand. The destination operand is a memory location, the address of which is read from either the ES:EDI or the ES:DI registers (depending on the address-size attribute of the instruction, 32 or 16, respectively). The ES segment cannot be overridden with a segment override prefix. :param cpu: current CPU. :param dest: destination operand. :param src: source operand. ] variable[size] assign[=] name[src].size call[name[dest].write, parameter[call[name[src].read, parameter[]]]] variable[dest_reg] assign[=] name[dest].mem.base variable[increment] assign[=] call[name[Operators].ITEBV, parameter[call[dictionary[[<ast.Constant object at 0x7da20c6e5150>, <ast.Constant object at 0x7da20c6e6050>, <ast.Constant object at 0x7da20c6e6e60>], [<ast.Constant object at 0x7da20c6e4c70>, <ast.Constant object at 0x7da20c6e5000>, <ast.Constant object at 0x7da20c6e5120>]]][name[dest_reg]], name[cpu].DF, binary_operation[<ast.UnaryOp object at 0x7da20c6e6440> <ast.FloorDiv object at 0x7da2590d6bc0> constant[8]], binary_operation[name[size] <ast.FloorDiv object at 0x7da2590d6bc0> constant[8]]]] call[name[cpu].write_register, parameter[name[dest_reg], binary_operation[call[name[cpu].read_register, parameter[name[dest_reg]]] + name[increment]]]]
keyword[def] identifier[STOS] ( identifier[cpu] , identifier[dest] , identifier[src] ): literal[string] identifier[size] = identifier[src] . identifier[size] identifier[dest] . identifier[write] ( identifier[src] . identifier[read] ()) identifier[dest_reg] = identifier[dest] . identifier[mem] . identifier[base] identifier[increment] = identifier[Operators] . identifier[ITEBV] ({ literal[string] : literal[int] , literal[string] : literal[int] , literal[string] : literal[int] }[ identifier[dest_reg] ], identifier[cpu] . identifier[DF] ,- identifier[size] // literal[int] , identifier[size] // literal[int] ) identifier[cpu] . identifier[write_register] ( identifier[dest_reg] , identifier[cpu] . identifier[read_register] ( identifier[dest_reg] )+ identifier[increment] )
def STOS(cpu, dest, src): """ Stores String. Stores a byte, word, or doubleword from the AL, AX, or EAX register, respectively, into the destination operand. The destination operand is a memory location, the address of which is read from either the ES:EDI or the ES:DI registers (depending on the address-size attribute of the instruction, 32 or 16, respectively). The ES segment cannot be overridden with a segment override prefix. :param cpu: current CPU. :param dest: destination operand. :param src: source operand. """ size = src.size dest.write(src.read()) dest_reg = dest.mem.base increment = Operators.ITEBV({'RDI': 64, 'EDI': 32, 'DI': 16}[dest_reg], cpu.DF, -size // 8, size // 8) cpu.write_register(dest_reg, cpu.read_register(dest_reg) + increment)
def add(self, layer, items): """ Add items in model. """ for k in items.iterkeys(): if k in self.model[layer]: raise Exception('item %s is already in layer %s' % (k, layer)) self.model[layer].update(items) # this should also update Layer.layer, the layer data # same as calling layer constructor # so now just need to add items to the layer for k, v in items.iteritems(): getattr(self, layer).add(k, v['module'], v.get('package'))
def function[add, parameter[self, layer, items]]: constant[ Add items in model. ] for taget[name[k]] in starred[call[name[items].iterkeys, parameter[]]] begin[:] if compare[name[k] in call[name[self].model][name[layer]]] begin[:] <ast.Raise object at 0x7da20c7c8c10> call[call[name[self].model][name[layer]].update, parameter[name[items]]] for taget[tuple[[<ast.Name object at 0x7da20c7ca890>, <ast.Name object at 0x7da20c7c99c0>]]] in starred[call[name[items].iteritems, parameter[]]] begin[:] call[call[name[getattr], parameter[name[self], name[layer]]].add, parameter[name[k], call[name[v]][constant[module]], call[name[v].get, parameter[constant[package]]]]]
keyword[def] identifier[add] ( identifier[self] , identifier[layer] , identifier[items] ): literal[string] keyword[for] identifier[k] keyword[in] identifier[items] . identifier[iterkeys] (): keyword[if] identifier[k] keyword[in] identifier[self] . identifier[model] [ identifier[layer] ]: keyword[raise] identifier[Exception] ( literal[string] %( identifier[k] , identifier[layer] )) identifier[self] . identifier[model] [ identifier[layer] ]. identifier[update] ( identifier[items] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[items] . identifier[iteritems] (): identifier[getattr] ( identifier[self] , identifier[layer] ). identifier[add] ( identifier[k] , identifier[v] [ literal[string] ], identifier[v] . identifier[get] ( literal[string] ))
def add(self, layer, items): """ Add items in model. """ for k in items.iterkeys(): if k in self.model[layer]: raise Exception('item %s is already in layer %s' % (k, layer)) # depends on [control=['if'], data=['k']] # depends on [control=['for'], data=['k']] self.model[layer].update(items) # this should also update Layer.layer, the layer data # same as calling layer constructor # so now just need to add items to the layer for (k, v) in items.iteritems(): getattr(self, layer).add(k, v['module'], v.get('package')) # depends on [control=['for'], data=[]]
def str_to_dt(val): """ Return a datetime object if the string value represents one Epoch integer or an ISO 8601 compatible string is supported. :param val: str :return: datetime :raise: ValueError """ if isinstance(val, dt): return val try: if val.isdigit(): return dt.utcfromtimestamp(float(val)) else: return dt.strptime(val, '%Y-%m-%dT%H:%M:%S.%f') except (AttributeError, TypeError): raise ValueError
def function[str_to_dt, parameter[val]]: constant[ Return a datetime object if the string value represents one Epoch integer or an ISO 8601 compatible string is supported. :param val: str :return: datetime :raise: ValueError ] if call[name[isinstance], parameter[name[val], name[dt]]] begin[:] return[name[val]] <ast.Try object at 0x7da1b15f5b70>
keyword[def] identifier[str_to_dt] ( identifier[val] ): literal[string] keyword[if] identifier[isinstance] ( identifier[val] , identifier[dt] ): keyword[return] identifier[val] keyword[try] : keyword[if] identifier[val] . identifier[isdigit] (): keyword[return] identifier[dt] . identifier[utcfromtimestamp] ( identifier[float] ( identifier[val] )) keyword[else] : keyword[return] identifier[dt] . identifier[strptime] ( identifier[val] , literal[string] ) keyword[except] ( identifier[AttributeError] , identifier[TypeError] ): keyword[raise] identifier[ValueError]
def str_to_dt(val): """ Return a datetime object if the string value represents one Epoch integer or an ISO 8601 compatible string is supported. :param val: str :return: datetime :raise: ValueError """ if isinstance(val, dt): return val # depends on [control=['if'], data=[]] try: if val.isdigit(): return dt.utcfromtimestamp(float(val)) # depends on [control=['if'], data=[]] else: return dt.strptime(val, '%Y-%m-%dT%H:%M:%S.%f') # depends on [control=['try'], data=[]] except (AttributeError, TypeError): raise ValueError # depends on [control=['except'], data=[]]
def build_layers(self): """ Constructs layers. Returns: list: List of DrawElements. Raises: VisualizationError: When the drawing is, for some reason, impossible to be drawn. """ wire_names = self.wire_names(with_initial_value=True) if not wire_names: return [] layers = [InputWire.fillup_layer(wire_names)] for instruction_layer in self.instructions: layer = Layer(self.qregs, self.cregs) for instruction in instruction_layer: layer, current_connections, connection_label = \ self._instruction_to_gate(instruction, layer) layer.connections.append((connection_label, current_connections)) layer.connect_with("│") layers.append(layer.full_layer) return layers
def function[build_layers, parameter[self]]: constant[ Constructs layers. Returns: list: List of DrawElements. Raises: VisualizationError: When the drawing is, for some reason, impossible to be drawn. ] variable[wire_names] assign[=] call[name[self].wire_names, parameter[]] if <ast.UnaryOp object at 0x7da20c6c4b20> begin[:] return[list[[]]] variable[layers] assign[=] list[[<ast.Call object at 0x7da20c6c5ea0>]] for taget[name[instruction_layer]] in starred[name[self].instructions] begin[:] variable[layer] assign[=] call[name[Layer], parameter[name[self].qregs, name[self].cregs]] for taget[name[instruction]] in starred[name[instruction_layer]] begin[:] <ast.Tuple object at 0x7da20c6c7e20> assign[=] call[name[self]._instruction_to_gate, parameter[name[instruction], name[layer]]] call[name[layer].connections.append, parameter[tuple[[<ast.Name object at 0x7da20c992bf0>, <ast.Name object at 0x7da20c993370>]]]] call[name[layer].connect_with, parameter[constant[│]]] call[name[layers].append, parameter[name[layer].full_layer]] return[name[layers]]
keyword[def] identifier[build_layers] ( identifier[self] ): literal[string] identifier[wire_names] = identifier[self] . identifier[wire_names] ( identifier[with_initial_value] = keyword[True] ) keyword[if] keyword[not] identifier[wire_names] : keyword[return] [] identifier[layers] =[ identifier[InputWire] . identifier[fillup_layer] ( identifier[wire_names] )] keyword[for] identifier[instruction_layer] keyword[in] identifier[self] . identifier[instructions] : identifier[layer] = identifier[Layer] ( identifier[self] . identifier[qregs] , identifier[self] . identifier[cregs] ) keyword[for] identifier[instruction] keyword[in] identifier[instruction_layer] : identifier[layer] , identifier[current_connections] , identifier[connection_label] = identifier[self] . identifier[_instruction_to_gate] ( identifier[instruction] , identifier[layer] ) identifier[layer] . identifier[connections] . identifier[append] (( identifier[connection_label] , identifier[current_connections] )) identifier[layer] . identifier[connect_with] ( literal[string] ) identifier[layers] . identifier[append] ( identifier[layer] . identifier[full_layer] ) keyword[return] identifier[layers]
def build_layers(self): """ Constructs layers. Returns: list: List of DrawElements. Raises: VisualizationError: When the drawing is, for some reason, impossible to be drawn. """ wire_names = self.wire_names(with_initial_value=True) if not wire_names: return [] # depends on [control=['if'], data=[]] layers = [InputWire.fillup_layer(wire_names)] for instruction_layer in self.instructions: layer = Layer(self.qregs, self.cregs) for instruction in instruction_layer: (layer, current_connections, connection_label) = self._instruction_to_gate(instruction, layer) layer.connections.append((connection_label, current_connections)) layer.connect_with('│') # depends on [control=['for'], data=['instruction']] layers.append(layer.full_layer) # depends on [control=['for'], data=['instruction_layer']] return layers
def _trace_summary(self): """ Summarizes the trace of values used to update the DynamicArgs and the arguments subsequently returned. May be used to implement the summary method. """ for (i, (val, args)) in enumerate(self.trace): if args is StopIteration: info = "Terminated" else: pprint = ','.join('{' + ','.join('%s=%r' % (k,v) for (k,v) in arg.items()) + '}' for arg in args) info = ("exploring arguments [%s]" % pprint ) if i == 0: print("Step %d: Initially %s." % (i, info)) else: print("Step %d: %s after receiving input(s) %s." % (i, info.capitalize(), val))
def function[_trace_summary, parameter[self]]: constant[ Summarizes the trace of values used to update the DynamicArgs and the arguments subsequently returned. May be used to implement the summary method. ] for taget[tuple[[<ast.Name object at 0x7da1afe57b20>, <ast.Tuple object at 0x7da1afe57be0>]]] in starred[call[name[enumerate], parameter[name[self].trace]]] begin[:] if compare[name[args] is name[StopIteration]] begin[:] variable[info] assign[=] constant[Terminated] if compare[name[i] equal[==] constant[0]] begin[:] call[name[print], parameter[binary_operation[constant[Step %d: Initially %s.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1afe57d60>, <ast.Name object at 0x7da1afe570d0>]]]]]
keyword[def] identifier[_trace_summary] ( identifier[self] ): literal[string] keyword[for] ( identifier[i] ,( identifier[val] , identifier[args] )) keyword[in] identifier[enumerate] ( identifier[self] . identifier[trace] ): keyword[if] identifier[args] keyword[is] identifier[StopIteration] : identifier[info] = literal[string] keyword[else] : identifier[pprint] = literal[string] . identifier[join] ( literal[string] + literal[string] . identifier[join] ( literal[string] %( identifier[k] , identifier[v] ) keyword[for] ( identifier[k] , identifier[v] ) keyword[in] identifier[arg] . identifier[items] ())+ literal[string] keyword[for] identifier[arg] keyword[in] identifier[args] ) identifier[info] =( literal[string] % identifier[pprint] ) keyword[if] identifier[i] == literal[int] : identifier[print] ( literal[string] %( identifier[i] , identifier[info] )) keyword[else] : identifier[print] ( literal[string] %( identifier[i] , identifier[info] . identifier[capitalize] (), identifier[val] ))
def _trace_summary(self): """ Summarizes the trace of values used to update the DynamicArgs and the arguments subsequently returned. May be used to implement the summary method. """ for (i, (val, args)) in enumerate(self.trace): if args is StopIteration: info = 'Terminated' # depends on [control=['if'], data=[]] else: pprint = ','.join(('{' + ','.join(('%s=%r' % (k, v) for (k, v) in arg.items())) + '}' for arg in args)) info = 'exploring arguments [%s]' % pprint if i == 0: print('Step %d: Initially %s.' % (i, info)) # depends on [control=['if'], data=['i']] else: print('Step %d: %s after receiving input(s) %s.' % (i, info.capitalize(), val)) # depends on [control=['for'], data=[]]
def get_variation(self, experiment_key, user_id, attributes=None): """ Gets variation where user will be bucketed. Args: experiment_key: Experiment for which user variation needs to be determined. user_id: ID for user. attributes: Dict representing user attributes. Returns: Variation key representing the variation the user will be bucketed in. None if user is not in experiment or if experiment is not Running. """ if not self.is_valid: self.logger.error(enums.Errors.INVALID_DATAFILE.format('get_variation')) return None if not validator.is_non_empty_string(experiment_key): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('experiment_key')) return None if not isinstance(user_id, string_types): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('user_id')) return None experiment = self.config.get_experiment_from_key(experiment_key) variation_key = None if not experiment: self.logger.info('Experiment key "%s" is invalid. Not activating user "%s".' % ( experiment_key, user_id )) return None if not self._validate_user_inputs(attributes): return None variation = self.decision_service.get_variation(experiment, user_id, attributes) if variation: variation_key = variation.key if self.config.is_feature_experiment(experiment.id): decision_notification_type = enums.DecisionNotificationTypes.FEATURE_TEST else: decision_notification_type = enums.DecisionNotificationTypes.AB_TEST self.notification_center.send_notifications( enums.NotificationTypes.DECISION, decision_notification_type, user_id, attributes or {}, { 'experiment_key': experiment_key, 'variation_key': variation_key } ) return variation_key
def function[get_variation, parameter[self, experiment_key, user_id, attributes]]: constant[ Gets variation where user will be bucketed. Args: experiment_key: Experiment for which user variation needs to be determined. user_id: ID for user. attributes: Dict representing user attributes. Returns: Variation key representing the variation the user will be bucketed in. None if user is not in experiment or if experiment is not Running. ] if <ast.UnaryOp object at 0x7da1b11ab0d0> begin[:] call[name[self].logger.error, parameter[call[name[enums].Errors.INVALID_DATAFILE.format, parameter[constant[get_variation]]]]] return[constant[None]] if <ast.UnaryOp object at 0x7da1b11a9570> begin[:] call[name[self].logger.error, parameter[call[name[enums].Errors.INVALID_INPUT_ERROR.format, parameter[constant[experiment_key]]]]] return[constant[None]] if <ast.UnaryOp object at 0x7da1b11aac20> begin[:] call[name[self].logger.error, parameter[call[name[enums].Errors.INVALID_INPUT_ERROR.format, parameter[constant[user_id]]]]] return[constant[None]] variable[experiment] assign[=] call[name[self].config.get_experiment_from_key, parameter[name[experiment_key]]] variable[variation_key] assign[=] constant[None] if <ast.UnaryOp object at 0x7da1b11a9780> begin[:] call[name[self].logger.info, parameter[binary_operation[constant[Experiment key "%s" is invalid. Not activating user "%s".] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b11a96f0>, <ast.Name object at 0x7da1b11a8b20>]]]]] return[constant[None]] if <ast.UnaryOp object at 0x7da1b11ab430> begin[:] return[constant[None]] variable[variation] assign[=] call[name[self].decision_service.get_variation, parameter[name[experiment], name[user_id], name[attributes]]] if name[variation] begin[:] variable[variation_key] assign[=] name[variation].key if call[name[self].config.is_feature_experiment, parameter[name[experiment].id]] begin[:] variable[decision_notification_type] assign[=] name[enums].DecisionNotificationTypes.FEATURE_TEST call[name[self].notification_center.send_notifications, parameter[name[enums].NotificationTypes.DECISION, name[decision_notification_type], name[user_id], <ast.BoolOp object at 0x7da20c6a90c0>, dictionary[[<ast.Constant object at 0x7da20c6ab250>, <ast.Constant object at 0x7da20c6ab4c0>], [<ast.Name object at 0x7da20c6a95a0>, <ast.Name object at 0x7da20c6abdc0>]]]] return[name[variation_key]]
keyword[def] identifier[get_variation] ( identifier[self] , identifier[experiment_key] , identifier[user_id] , identifier[attributes] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[is_valid] : identifier[self] . identifier[logger] . identifier[error] ( identifier[enums] . identifier[Errors] . identifier[INVALID_DATAFILE] . identifier[format] ( literal[string] )) keyword[return] keyword[None] keyword[if] keyword[not] identifier[validator] . identifier[is_non_empty_string] ( identifier[experiment_key] ): identifier[self] . identifier[logger] . identifier[error] ( identifier[enums] . identifier[Errors] . identifier[INVALID_INPUT_ERROR] . identifier[format] ( literal[string] )) keyword[return] keyword[None] keyword[if] keyword[not] identifier[isinstance] ( identifier[user_id] , identifier[string_types] ): identifier[self] . identifier[logger] . identifier[error] ( identifier[enums] . identifier[Errors] . identifier[INVALID_INPUT_ERROR] . identifier[format] ( literal[string] )) keyword[return] keyword[None] identifier[experiment] = identifier[self] . identifier[config] . identifier[get_experiment_from_key] ( identifier[experiment_key] ) identifier[variation_key] = keyword[None] keyword[if] keyword[not] identifier[experiment] : identifier[self] . identifier[logger] . identifier[info] ( literal[string] %( identifier[experiment_key] , identifier[user_id] )) keyword[return] keyword[None] keyword[if] keyword[not] identifier[self] . identifier[_validate_user_inputs] ( identifier[attributes] ): keyword[return] keyword[None] identifier[variation] = identifier[self] . identifier[decision_service] . identifier[get_variation] ( identifier[experiment] , identifier[user_id] , identifier[attributes] ) keyword[if] identifier[variation] : identifier[variation_key] = identifier[variation] . identifier[key] keyword[if] identifier[self] . identifier[config] . identifier[is_feature_experiment] ( identifier[experiment] . identifier[id] ): identifier[decision_notification_type] = identifier[enums] . identifier[DecisionNotificationTypes] . identifier[FEATURE_TEST] keyword[else] : identifier[decision_notification_type] = identifier[enums] . identifier[DecisionNotificationTypes] . identifier[AB_TEST] identifier[self] . identifier[notification_center] . identifier[send_notifications] ( identifier[enums] . identifier[NotificationTypes] . identifier[DECISION] , identifier[decision_notification_type] , identifier[user_id] , identifier[attributes] keyword[or] {}, { literal[string] : identifier[experiment_key] , literal[string] : identifier[variation_key] } ) keyword[return] identifier[variation_key]
def get_variation(self, experiment_key, user_id, attributes=None): """ Gets variation where user will be bucketed. Args: experiment_key: Experiment for which user variation needs to be determined. user_id: ID for user. attributes: Dict representing user attributes. Returns: Variation key representing the variation the user will be bucketed in. None if user is not in experiment or if experiment is not Running. """ if not self.is_valid: self.logger.error(enums.Errors.INVALID_DATAFILE.format('get_variation')) return None # depends on [control=['if'], data=[]] if not validator.is_non_empty_string(experiment_key): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('experiment_key')) return None # depends on [control=['if'], data=[]] if not isinstance(user_id, string_types): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('user_id')) return None # depends on [control=['if'], data=[]] experiment = self.config.get_experiment_from_key(experiment_key) variation_key = None if not experiment: self.logger.info('Experiment key "%s" is invalid. Not activating user "%s".' % (experiment_key, user_id)) return None # depends on [control=['if'], data=[]] if not self._validate_user_inputs(attributes): return None # depends on [control=['if'], data=[]] variation = self.decision_service.get_variation(experiment, user_id, attributes) if variation: variation_key = variation.key # depends on [control=['if'], data=[]] if self.config.is_feature_experiment(experiment.id): decision_notification_type = enums.DecisionNotificationTypes.FEATURE_TEST # depends on [control=['if'], data=[]] else: decision_notification_type = enums.DecisionNotificationTypes.AB_TEST self.notification_center.send_notifications(enums.NotificationTypes.DECISION, decision_notification_type, user_id, attributes or {}, {'experiment_key': experiment_key, 'variation_key': variation_key}) return variation_key
def cmd_playtune(self, args): '''send PLAY_TUNE message''' if len(args) < 1: print("Usage: playtune TUNE") return tune = args[0] str1 = tune[0:30] str2 = tune[30:] if sys.version_info.major >= 3 and not isinstance(str1, bytes): str1 = bytes(str1, "ascii") if sys.version_info.major >= 3 and not isinstance(str2, bytes): str2 = bytes(str2, "ascii") self.master.mav.play_tune_send(self.settings.target_system, self.settings.target_component, str1, str2)
def function[cmd_playtune, parameter[self, args]]: constant[send PLAY_TUNE message] if compare[call[name[len], parameter[name[args]]] less[<] constant[1]] begin[:] call[name[print], parameter[constant[Usage: playtune TUNE]]] return[None] variable[tune] assign[=] call[name[args]][constant[0]] variable[str1] assign[=] call[name[tune]][<ast.Slice object at 0x7da18f58d7e0>] variable[str2] assign[=] call[name[tune]][<ast.Slice object at 0x7da18f58d870>] if <ast.BoolOp object at 0x7da18f58e7d0> begin[:] variable[str1] assign[=] call[name[bytes], parameter[name[str1], constant[ascii]]] if <ast.BoolOp object at 0x7da18f58e020> begin[:] variable[str2] assign[=] call[name[bytes], parameter[name[str2], constant[ascii]]] call[name[self].master.mav.play_tune_send, parameter[name[self].settings.target_system, name[self].settings.target_component, name[str1], name[str2]]]
keyword[def] identifier[cmd_playtune] ( identifier[self] , identifier[args] ): literal[string] keyword[if] identifier[len] ( identifier[args] )< literal[int] : identifier[print] ( literal[string] ) keyword[return] identifier[tune] = identifier[args] [ literal[int] ] identifier[str1] = identifier[tune] [ literal[int] : literal[int] ] identifier[str2] = identifier[tune] [ literal[int] :] keyword[if] identifier[sys] . identifier[version_info] . identifier[major] >= literal[int] keyword[and] keyword[not] identifier[isinstance] ( identifier[str1] , identifier[bytes] ): identifier[str1] = identifier[bytes] ( identifier[str1] , literal[string] ) keyword[if] identifier[sys] . identifier[version_info] . identifier[major] >= literal[int] keyword[and] keyword[not] identifier[isinstance] ( identifier[str2] , identifier[bytes] ): identifier[str2] = identifier[bytes] ( identifier[str2] , literal[string] ) identifier[self] . identifier[master] . identifier[mav] . identifier[play_tune_send] ( identifier[self] . identifier[settings] . identifier[target_system] , identifier[self] . identifier[settings] . identifier[target_component] , identifier[str1] , identifier[str2] )
def cmd_playtune(self, args): """send PLAY_TUNE message""" if len(args) < 1: print('Usage: playtune TUNE') return # depends on [control=['if'], data=[]] tune = args[0] str1 = tune[0:30] str2 = tune[30:] if sys.version_info.major >= 3 and (not isinstance(str1, bytes)): str1 = bytes(str1, 'ascii') # depends on [control=['if'], data=[]] if sys.version_info.major >= 3 and (not isinstance(str2, bytes)): str2 = bytes(str2, 'ascii') # depends on [control=['if'], data=[]] self.master.mav.play_tune_send(self.settings.target_system, self.settings.target_component, str1, str2)
def generate_corpus(self, text): """ Given a text string, returns a list of lists; that is, a list of "sentences," each of which is a list of words. Before splitting into words, the sentences are filtered through `self.test_sentence_input` """ if isinstance(text, str): sentences = self.sentence_split(text) else: sentences = [] for line in text: sentences += self.sentence_split(line) passing = filter(self.test_sentence_input, sentences) runs = map(self.word_split, passing) return runs
def function[generate_corpus, parameter[self, text]]: constant[ Given a text string, returns a list of lists; that is, a list of "sentences," each of which is a list of words. Before splitting into words, the sentences are filtered through `self.test_sentence_input` ] if call[name[isinstance], parameter[name[text], name[str]]] begin[:] variable[sentences] assign[=] call[name[self].sentence_split, parameter[name[text]]] variable[passing] assign[=] call[name[filter], parameter[name[self].test_sentence_input, name[sentences]]] variable[runs] assign[=] call[name[map], parameter[name[self].word_split, name[passing]]] return[name[runs]]
keyword[def] identifier[generate_corpus] ( identifier[self] , identifier[text] ): literal[string] keyword[if] identifier[isinstance] ( identifier[text] , identifier[str] ): identifier[sentences] = identifier[self] . identifier[sentence_split] ( identifier[text] ) keyword[else] : identifier[sentences] =[] keyword[for] identifier[line] keyword[in] identifier[text] : identifier[sentences] += identifier[self] . identifier[sentence_split] ( identifier[line] ) identifier[passing] = identifier[filter] ( identifier[self] . identifier[test_sentence_input] , identifier[sentences] ) identifier[runs] = identifier[map] ( identifier[self] . identifier[word_split] , identifier[passing] ) keyword[return] identifier[runs]
def generate_corpus(self, text): """ Given a text string, returns a list of lists; that is, a list of "sentences," each of which is a list of words. Before splitting into words, the sentences are filtered through `self.test_sentence_input` """ if isinstance(text, str): sentences = self.sentence_split(text) # depends on [control=['if'], data=[]] else: sentences = [] for line in text: sentences += self.sentence_split(line) # depends on [control=['for'], data=['line']] passing = filter(self.test_sentence_input, sentences) runs = map(self.word_split, passing) return runs
def probe_response(msg, arg): """Process responses from from the query sent by genl_ctrl_probe_by_name(). https://github.com/thom311/libnl/blob/libnl3_2_25/lib/genl/ctrl.c#L203 Process returned messages, filling out the missing information in the genl_family structure. Positional arguments: msg -- returned message (nl_msg class instance). arg -- genl_family class instance to fill out. Returns: Indicator to keep processing frames or not (NL_SKIP or NL_STOP). """ tb = dict((i, None) for i in range(CTRL_ATTR_MAX + 1)) nlh = nlmsg_hdr(msg) ret = arg if genlmsg_parse(nlh, 0, tb, CTRL_ATTR_MAX, ctrl_policy): return NL_SKIP if tb[CTRL_ATTR_FAMILY_ID]: genl_family_set_id(ret, nla_get_u16(tb[CTRL_ATTR_FAMILY_ID])) if tb[CTRL_ATTR_MCAST_GROUPS] and parse_mcast_grps(ret, tb[CTRL_ATTR_MCAST_GROUPS]) < 0: return NL_SKIP return NL_STOP
def function[probe_response, parameter[msg, arg]]: constant[Process responses from from the query sent by genl_ctrl_probe_by_name(). https://github.com/thom311/libnl/blob/libnl3_2_25/lib/genl/ctrl.c#L203 Process returned messages, filling out the missing information in the genl_family structure. Positional arguments: msg -- returned message (nl_msg class instance). arg -- genl_family class instance to fill out. Returns: Indicator to keep processing frames or not (NL_SKIP or NL_STOP). ] variable[tb] assign[=] call[name[dict], parameter[<ast.GeneratorExp object at 0x7da1b26365f0>]] variable[nlh] assign[=] call[name[nlmsg_hdr], parameter[name[msg]]] variable[ret] assign[=] name[arg] if call[name[genlmsg_parse], parameter[name[nlh], constant[0], name[tb], name[CTRL_ATTR_MAX], name[ctrl_policy]]] begin[:] return[name[NL_SKIP]] if call[name[tb]][name[CTRL_ATTR_FAMILY_ID]] begin[:] call[name[genl_family_set_id], parameter[name[ret], call[name[nla_get_u16], parameter[call[name[tb]][name[CTRL_ATTR_FAMILY_ID]]]]]] if <ast.BoolOp object at 0x7da1b2634be0> begin[:] return[name[NL_SKIP]] return[name[NL_STOP]]
keyword[def] identifier[probe_response] ( identifier[msg] , identifier[arg] ): literal[string] identifier[tb] = identifier[dict] (( identifier[i] , keyword[None] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[CTRL_ATTR_MAX] + literal[int] )) identifier[nlh] = identifier[nlmsg_hdr] ( identifier[msg] ) identifier[ret] = identifier[arg] keyword[if] identifier[genlmsg_parse] ( identifier[nlh] , literal[int] , identifier[tb] , identifier[CTRL_ATTR_MAX] , identifier[ctrl_policy] ): keyword[return] identifier[NL_SKIP] keyword[if] identifier[tb] [ identifier[CTRL_ATTR_FAMILY_ID] ]: identifier[genl_family_set_id] ( identifier[ret] , identifier[nla_get_u16] ( identifier[tb] [ identifier[CTRL_ATTR_FAMILY_ID] ])) keyword[if] identifier[tb] [ identifier[CTRL_ATTR_MCAST_GROUPS] ] keyword[and] identifier[parse_mcast_grps] ( identifier[ret] , identifier[tb] [ identifier[CTRL_ATTR_MCAST_GROUPS] ])< literal[int] : keyword[return] identifier[NL_SKIP] keyword[return] identifier[NL_STOP]
def probe_response(msg, arg): """Process responses from from the query sent by genl_ctrl_probe_by_name(). https://github.com/thom311/libnl/blob/libnl3_2_25/lib/genl/ctrl.c#L203 Process returned messages, filling out the missing information in the genl_family structure. Positional arguments: msg -- returned message (nl_msg class instance). arg -- genl_family class instance to fill out. Returns: Indicator to keep processing frames or not (NL_SKIP or NL_STOP). """ tb = dict(((i, None) for i in range(CTRL_ATTR_MAX + 1))) nlh = nlmsg_hdr(msg) ret = arg if genlmsg_parse(nlh, 0, tb, CTRL_ATTR_MAX, ctrl_policy): return NL_SKIP # depends on [control=['if'], data=[]] if tb[CTRL_ATTR_FAMILY_ID]: genl_family_set_id(ret, nla_get_u16(tb[CTRL_ATTR_FAMILY_ID])) # depends on [control=['if'], data=[]] if tb[CTRL_ATTR_MCAST_GROUPS] and parse_mcast_grps(ret, tb[CTRL_ATTR_MCAST_GROUPS]) < 0: return NL_SKIP # depends on [control=['if'], data=[]] return NL_STOP
def MapFields(function, key=True): """ Transformation factory that maps `function` on the values of a row. It can be applied either to 1. all columns (`key=True`), 2. no column (`key=False`), or 3. a subset of columns by passing a callable, which takes column name and returns `bool` (same as the parameter `function` in `filter`). :param function: callable :param key: bool or callable :return: callable """ @use_raw_input def _MapFields(bag): try: factory = type(bag)._make except AttributeError: factory = type(bag) if callable(key): try: fields = bag._fields except AttributeError as e: raise UnrecoverableAttributeError( 'This transformation works only on objects with named' ' fields (namedtuple, BagType, ...).') from e return factory( function(value) if key(key_) else value for key_, value in zip(fields, bag) ) elif key: return factory(function(value) for value in bag) else: return NOT_MODIFIED return _MapFields
def function[MapFields, parameter[function, key]]: constant[ Transformation factory that maps `function` on the values of a row. It can be applied either to 1. all columns (`key=True`), 2. no column (`key=False`), or 3. a subset of columns by passing a callable, which takes column name and returns `bool` (same as the parameter `function` in `filter`). :param function: callable :param key: bool or callable :return: callable ] def function[_MapFields, parameter[bag]]: <ast.Try object at 0x7da2041da860> if call[name[callable], parameter[name[key]]] begin[:] <ast.Try object at 0x7da2041da170> return[call[name[factory], parameter[<ast.GeneratorExp object at 0x7da2041d9150>]]] return[name[_MapFields]]
keyword[def] identifier[MapFields] ( identifier[function] , identifier[key] = keyword[True] ): literal[string] @ identifier[use_raw_input] keyword[def] identifier[_MapFields] ( identifier[bag] ): keyword[try] : identifier[factory] = identifier[type] ( identifier[bag] ). identifier[_make] keyword[except] identifier[AttributeError] : identifier[factory] = identifier[type] ( identifier[bag] ) keyword[if] identifier[callable] ( identifier[key] ): keyword[try] : identifier[fields] = identifier[bag] . identifier[_fields] keyword[except] identifier[AttributeError] keyword[as] identifier[e] : keyword[raise] identifier[UnrecoverableAttributeError] ( literal[string] literal[string] ) keyword[from] identifier[e] keyword[return] identifier[factory] ( identifier[function] ( identifier[value] ) keyword[if] identifier[key] ( identifier[key_] ) keyword[else] identifier[value] keyword[for] identifier[key_] , identifier[value] keyword[in] identifier[zip] ( identifier[fields] , identifier[bag] ) ) keyword[elif] identifier[key] : keyword[return] identifier[factory] ( identifier[function] ( identifier[value] ) keyword[for] identifier[value] keyword[in] identifier[bag] ) keyword[else] : keyword[return] identifier[NOT_MODIFIED] keyword[return] identifier[_MapFields]
def MapFields(function, key=True): """ Transformation factory that maps `function` on the values of a row. It can be applied either to 1. all columns (`key=True`), 2. no column (`key=False`), or 3. a subset of columns by passing a callable, which takes column name and returns `bool` (same as the parameter `function` in `filter`). :param function: callable :param key: bool or callable :return: callable """ @use_raw_input def _MapFields(bag): try: factory = type(bag)._make # depends on [control=['try'], data=[]] except AttributeError: factory = type(bag) # depends on [control=['except'], data=[]] if callable(key): try: fields = bag._fields # depends on [control=['try'], data=[]] except AttributeError as e: raise UnrecoverableAttributeError('This transformation works only on objects with named fields (namedtuple, BagType, ...).') from e # depends on [control=['except'], data=['e']] return factory((function(value) if key(key_) else value for (key_, value) in zip(fields, bag))) # depends on [control=['if'], data=[]] elif key: return factory((function(value) for value in bag)) # depends on [control=['if'], data=[]] else: return NOT_MODIFIED return _MapFields
def sparse_mux(sel, vals): """ Mux that avoids instantiating unnecessary mux_2s when possible. :param WireVector sel: Select wire, determines what is selected on a given cycle :param dictionary vals: dictionary of values at mux inputs (of type `{int:WireVector}`) :return: WireVector that signifies the change This mux supports not having a full specification. Indices that are not specified are treated as don't-cares It also supports a specified default value, SparseDefault """ import numbers max_val = 2**len(sel) - 1 if SparseDefault in vals: default_val = vals[SparseDefault] del vals[SparseDefault] for i in range(max_val + 1): if i not in vals: vals[i] = default_val for key in vals.keys(): if not isinstance(key, numbers.Integral): raise pyrtl.PyrtlError("value %s nust be either an integer or 'default'" % str(key)) if key < 0 or key > max_val: raise pyrtl.PyrtlError("value %s is out of range of the sel wire" % str(key)) return _sparse_mux(sel, vals)
def function[sparse_mux, parameter[sel, vals]]: constant[ Mux that avoids instantiating unnecessary mux_2s when possible. :param WireVector sel: Select wire, determines what is selected on a given cycle :param dictionary vals: dictionary of values at mux inputs (of type `{int:WireVector}`) :return: WireVector that signifies the change This mux supports not having a full specification. Indices that are not specified are treated as don't-cares It also supports a specified default value, SparseDefault ] import module[numbers] variable[max_val] assign[=] binary_operation[binary_operation[constant[2] ** call[name[len], parameter[name[sel]]]] - constant[1]] if compare[name[SparseDefault] in name[vals]] begin[:] variable[default_val] assign[=] call[name[vals]][name[SparseDefault]] <ast.Delete object at 0x7da20c6c58a0> for taget[name[i]] in starred[call[name[range], parameter[binary_operation[name[max_val] + constant[1]]]]] begin[:] if compare[name[i] <ast.NotIn object at 0x7da2590d7190> name[vals]] begin[:] call[name[vals]][name[i]] assign[=] name[default_val] for taget[name[key]] in starred[call[name[vals].keys, parameter[]]] begin[:] if <ast.UnaryOp object at 0x7da2047e8400> begin[:] <ast.Raise object at 0x7da20c6c4ca0> if <ast.BoolOp object at 0x7da20c6c4f70> begin[:] <ast.Raise object at 0x7da20c6c6500> return[call[name[_sparse_mux], parameter[name[sel], name[vals]]]]
keyword[def] identifier[sparse_mux] ( identifier[sel] , identifier[vals] ): literal[string] keyword[import] identifier[numbers] identifier[max_val] = literal[int] ** identifier[len] ( identifier[sel] )- literal[int] keyword[if] identifier[SparseDefault] keyword[in] identifier[vals] : identifier[default_val] = identifier[vals] [ identifier[SparseDefault] ] keyword[del] identifier[vals] [ identifier[SparseDefault] ] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[max_val] + literal[int] ): keyword[if] identifier[i] keyword[not] keyword[in] identifier[vals] : identifier[vals] [ identifier[i] ]= identifier[default_val] keyword[for] identifier[key] keyword[in] identifier[vals] . identifier[keys] (): keyword[if] keyword[not] identifier[isinstance] ( identifier[key] , identifier[numbers] . identifier[Integral] ): keyword[raise] identifier[pyrtl] . identifier[PyrtlError] ( literal[string] % identifier[str] ( identifier[key] )) keyword[if] identifier[key] < literal[int] keyword[or] identifier[key] > identifier[max_val] : keyword[raise] identifier[pyrtl] . identifier[PyrtlError] ( literal[string] % identifier[str] ( identifier[key] )) keyword[return] identifier[_sparse_mux] ( identifier[sel] , identifier[vals] )
def sparse_mux(sel, vals): """ Mux that avoids instantiating unnecessary mux_2s when possible. :param WireVector sel: Select wire, determines what is selected on a given cycle :param dictionary vals: dictionary of values at mux inputs (of type `{int:WireVector}`) :return: WireVector that signifies the change This mux supports not having a full specification. Indices that are not specified are treated as don't-cares It also supports a specified default value, SparseDefault """ import numbers max_val = 2 ** len(sel) - 1 if SparseDefault in vals: default_val = vals[SparseDefault] del vals[SparseDefault] for i in range(max_val + 1): if i not in vals: vals[i] = default_val # depends on [control=['if'], data=['i', 'vals']] # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=['SparseDefault', 'vals']] for key in vals.keys(): if not isinstance(key, numbers.Integral): raise pyrtl.PyrtlError("value %s nust be either an integer or 'default'" % str(key)) # depends on [control=['if'], data=[]] if key < 0 or key > max_val: raise pyrtl.PyrtlError('value %s is out of range of the sel wire' % str(key)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']] return _sparse_mux(sel, vals)
def p_sigtypes(self, p): 'sigtypes : sigtypes sigtype' p[0] = p[1] + (p[2],) p.set_lineno(0, p.lineno(1))
def function[p_sigtypes, parameter[self, p]]: constant[sigtypes : sigtypes sigtype] call[name[p]][constant[0]] assign[=] binary_operation[call[name[p]][constant[1]] + tuple[[<ast.Subscript object at 0x7da1b26ada80>]]] call[name[p].set_lineno, parameter[constant[0], call[name[p].lineno, parameter[constant[1]]]]]
keyword[def] identifier[p_sigtypes] ( identifier[self] , identifier[p] ): literal[string] identifier[p] [ literal[int] ]= identifier[p] [ literal[int] ]+( identifier[p] [ literal[int] ],) identifier[p] . identifier[set_lineno] ( literal[int] , identifier[p] . identifier[lineno] ( literal[int] ))
def p_sigtypes(self, p): """sigtypes : sigtypes sigtype""" p[0] = p[1] + (p[2],) p.set_lineno(0, p.lineno(1))
def transform(self, dstreams, transformFunc): """ Create a new DStream in which each RDD is generated by applying a function on RDDs of the DStreams. The order of the JavaRDDs in the transform function parameter will be the same as the order of corresponding DStreams in the list. """ jdstreams = [d._jdstream for d in dstreams] # change the final serializer to sc.serializer func = TransformFunction(self._sc, lambda t, *rdds: transformFunc(rdds), *[d._jrdd_deserializer for d in dstreams]) jfunc = self._jvm.TransformFunction(func) jdstream = self._jssc.transform(jdstreams, jfunc) return DStream(jdstream, self, self._sc.serializer)
def function[transform, parameter[self, dstreams, transformFunc]]: constant[ Create a new DStream in which each RDD is generated by applying a function on RDDs of the DStreams. The order of the JavaRDDs in the transform function parameter will be the same as the order of corresponding DStreams in the list. ] variable[jdstreams] assign[=] <ast.ListComp object at 0x7da20c795f00> variable[func] assign[=] call[name[TransformFunction], parameter[name[self]._sc, <ast.Lambda object at 0x7da20c796020>, <ast.Starred object at 0x7da20c794580>]] variable[jfunc] assign[=] call[name[self]._jvm.TransformFunction, parameter[name[func]]] variable[jdstream] assign[=] call[name[self]._jssc.transform, parameter[name[jdstreams], name[jfunc]]] return[call[name[DStream], parameter[name[jdstream], name[self], name[self]._sc.serializer]]]
keyword[def] identifier[transform] ( identifier[self] , identifier[dstreams] , identifier[transformFunc] ): literal[string] identifier[jdstreams] =[ identifier[d] . identifier[_jdstream] keyword[for] identifier[d] keyword[in] identifier[dstreams] ] identifier[func] = identifier[TransformFunction] ( identifier[self] . identifier[_sc] , keyword[lambda] identifier[t] ,* identifier[rdds] : identifier[transformFunc] ( identifier[rdds] ), *[ identifier[d] . identifier[_jrdd_deserializer] keyword[for] identifier[d] keyword[in] identifier[dstreams] ]) identifier[jfunc] = identifier[self] . identifier[_jvm] . identifier[TransformFunction] ( identifier[func] ) identifier[jdstream] = identifier[self] . identifier[_jssc] . identifier[transform] ( identifier[jdstreams] , identifier[jfunc] ) keyword[return] identifier[DStream] ( identifier[jdstream] , identifier[self] , identifier[self] . identifier[_sc] . identifier[serializer] )
def transform(self, dstreams, transformFunc): """ Create a new DStream in which each RDD is generated by applying a function on RDDs of the DStreams. The order of the JavaRDDs in the transform function parameter will be the same as the order of corresponding DStreams in the list. """ jdstreams = [d._jdstream for d in dstreams] # change the final serializer to sc.serializer func = TransformFunction(self._sc, lambda t, *rdds: transformFunc(rdds), *[d._jrdd_deserializer for d in dstreams]) jfunc = self._jvm.TransformFunction(func) jdstream = self._jssc.transform(jdstreams, jfunc) return DStream(jdstream, self, self._sc.serializer)
async def unlock(self, device): """ Unlock the device if not already unlocked. :param device: device object, block device path or mount path :returns: whether the device is unlocked """ device = self._find_device(device) if not self.is_handleable(device) or not device.is_crypto: self._log.warn(_('not unlocking {0}: unhandled device', device)) return False if device.is_unlocked: self._log.info(_('not unlocking {0}: already unlocked', device)) return True if not self._prompt: self._log.error(_('not unlocking {0}: no password prompt', device)) return False unlocked = await self._unlock_from_cache(device) if unlocked: return True unlocked = await self._unlock_from_keyfile(device) if unlocked: return True options = dict(allow_keyfile=self.udisks.keyfile_support, allow_cache=self._cache is not None, cache_hint=self._cache_hint) password = await self._prompt(device, options) # password can be: None, str, or udiskie.prompt.PasswordResult cache_hint = getattr(password, 'cache_hint', self._cache_hint) password = getattr(password, 'password', password) if password is None: self._log.debug(_('not unlocking {0}: cancelled by user', device)) return False if isinstance(password, bytes): self._log.debug(_('unlocking {0} using keyfile', device)) await device.unlock_keyfile(password) else: self._log.debug(_('unlocking {0}', device)) await device.unlock(password) self._update_cache(device, password, cache_hint) self._log.info(_('unlocked {0}', device)) return True
<ast.AsyncFunctionDef object at 0x7da20c7c8bb0>
keyword[async] keyword[def] identifier[unlock] ( identifier[self] , identifier[device] ): literal[string] identifier[device] = identifier[self] . identifier[_find_device] ( identifier[device] ) keyword[if] keyword[not] identifier[self] . identifier[is_handleable] ( identifier[device] ) keyword[or] keyword[not] identifier[device] . identifier[is_crypto] : identifier[self] . identifier[_log] . identifier[warn] ( identifier[_] ( literal[string] , identifier[device] )) keyword[return] keyword[False] keyword[if] identifier[device] . identifier[is_unlocked] : identifier[self] . identifier[_log] . identifier[info] ( identifier[_] ( literal[string] , identifier[device] )) keyword[return] keyword[True] keyword[if] keyword[not] identifier[self] . identifier[_prompt] : identifier[self] . identifier[_log] . identifier[error] ( identifier[_] ( literal[string] , identifier[device] )) keyword[return] keyword[False] identifier[unlocked] = keyword[await] identifier[self] . identifier[_unlock_from_cache] ( identifier[device] ) keyword[if] identifier[unlocked] : keyword[return] keyword[True] identifier[unlocked] = keyword[await] identifier[self] . identifier[_unlock_from_keyfile] ( identifier[device] ) keyword[if] identifier[unlocked] : keyword[return] keyword[True] identifier[options] = identifier[dict] ( identifier[allow_keyfile] = identifier[self] . identifier[udisks] . identifier[keyfile_support] , identifier[allow_cache] = identifier[self] . identifier[_cache] keyword[is] keyword[not] keyword[None] , identifier[cache_hint] = identifier[self] . identifier[_cache_hint] ) identifier[password] = keyword[await] identifier[self] . identifier[_prompt] ( identifier[device] , identifier[options] ) identifier[cache_hint] = identifier[getattr] ( identifier[password] , literal[string] , identifier[self] . identifier[_cache_hint] ) identifier[password] = identifier[getattr] ( identifier[password] , literal[string] , identifier[password] ) keyword[if] identifier[password] keyword[is] keyword[None] : identifier[self] . identifier[_log] . identifier[debug] ( identifier[_] ( literal[string] , identifier[device] )) keyword[return] keyword[False] keyword[if] identifier[isinstance] ( identifier[password] , identifier[bytes] ): identifier[self] . identifier[_log] . identifier[debug] ( identifier[_] ( literal[string] , identifier[device] )) keyword[await] identifier[device] . identifier[unlock_keyfile] ( identifier[password] ) keyword[else] : identifier[self] . identifier[_log] . identifier[debug] ( identifier[_] ( literal[string] , identifier[device] )) keyword[await] identifier[device] . identifier[unlock] ( identifier[password] ) identifier[self] . identifier[_update_cache] ( identifier[device] , identifier[password] , identifier[cache_hint] ) identifier[self] . identifier[_log] . identifier[info] ( identifier[_] ( literal[string] , identifier[device] )) keyword[return] keyword[True]
async def unlock(self, device): """ Unlock the device if not already unlocked. :param device: device object, block device path or mount path :returns: whether the device is unlocked """ device = self._find_device(device) if not self.is_handleable(device) or not device.is_crypto: self._log.warn(_('not unlocking {0}: unhandled device', device)) return False # depends on [control=['if'], data=[]] if device.is_unlocked: self._log.info(_('not unlocking {0}: already unlocked', device)) return True # depends on [control=['if'], data=[]] if not self._prompt: self._log.error(_('not unlocking {0}: no password prompt', device)) return False # depends on [control=['if'], data=[]] unlocked = await self._unlock_from_cache(device) if unlocked: return True # depends on [control=['if'], data=[]] unlocked = await self._unlock_from_keyfile(device) if unlocked: return True # depends on [control=['if'], data=[]] options = dict(allow_keyfile=self.udisks.keyfile_support, allow_cache=self._cache is not None, cache_hint=self._cache_hint) password = await self._prompt(device, options) # password can be: None, str, or udiskie.prompt.PasswordResult cache_hint = getattr(password, 'cache_hint', self._cache_hint) password = getattr(password, 'password', password) if password is None: self._log.debug(_('not unlocking {0}: cancelled by user', device)) return False # depends on [control=['if'], data=[]] if isinstance(password, bytes): self._log.debug(_('unlocking {0} using keyfile', device)) await device.unlock_keyfile(password) # depends on [control=['if'], data=[]] else: self._log.debug(_('unlocking {0}', device)) await device.unlock(password) self._update_cache(device, password, cache_hint) self._log.info(_('unlocked {0}', device)) return True
def set_level(self, val): """Set the devive ON LEVEL.""" if val == 0: self.off() else: setlevel = 255 if val < 1: setlevel = val * 100 elif val <= 0xff: setlevel = val set_command = StandardSend( self._address, COMMAND_LIGHT_ON_0X11_NONE, cmd2=setlevel) self._send_method(set_command, self._on_message_received)
def function[set_level, parameter[self, val]]: constant[Set the devive ON LEVEL.] if compare[name[val] equal[==] constant[0]] begin[:] call[name[self].off, parameter[]]
keyword[def] identifier[set_level] ( identifier[self] , identifier[val] ): literal[string] keyword[if] identifier[val] == literal[int] : identifier[self] . identifier[off] () keyword[else] : identifier[setlevel] = literal[int] keyword[if] identifier[val] < literal[int] : identifier[setlevel] = identifier[val] * literal[int] keyword[elif] identifier[val] <= literal[int] : identifier[setlevel] = identifier[val] identifier[set_command] = identifier[StandardSend] ( identifier[self] . identifier[_address] , identifier[COMMAND_LIGHT_ON_0X11_NONE] , identifier[cmd2] = identifier[setlevel] ) identifier[self] . identifier[_send_method] ( identifier[set_command] , identifier[self] . identifier[_on_message_received] )
def set_level(self, val): """Set the devive ON LEVEL.""" if val == 0: self.off() # depends on [control=['if'], data=[]] else: setlevel = 255 if val < 1: setlevel = val * 100 # depends on [control=['if'], data=['val']] elif val <= 255: setlevel = val # depends on [control=['if'], data=['val']] set_command = StandardSend(self._address, COMMAND_LIGHT_ON_0X11_NONE, cmd2=setlevel) self._send_method(set_command, self._on_message_received)
def delete_assessment_taken(self, assessment_taken_id): """Deletes an ``AssessmentTaken``. arg: assessment_taken_id (osid.id.Id): the ``Id`` of the ``AssessmentTaken`` to remove raise: NotFound - ``assessment_taken_id`` not found raise: NullArgument - ``assessment_taken_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure occurred *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for # osid.resource.ResourceAdminSession.delete_resource_template collection = JSONClientValidated('assessment', collection='AssessmentTaken', runtime=self._runtime) if not isinstance(assessment_taken_id, ABCId): raise errors.InvalidArgument('the argument is not a valid OSID Id') assessment_taken_map = collection.find_one( dict({'_id': ObjectId(assessment_taken_id.get_identifier())}, **self._view_filter())) objects.AssessmentTaken(osid_object_map=assessment_taken_map, runtime=self._runtime, proxy=self._proxy)._delete() collection.delete_one({'_id': ObjectId(assessment_taken_id.get_identifier())})
def function[delete_assessment_taken, parameter[self, assessment_taken_id]]: constant[Deletes an ``AssessmentTaken``. arg: assessment_taken_id (osid.id.Id): the ``Id`` of the ``AssessmentTaken`` to remove raise: NotFound - ``assessment_taken_id`` not found raise: NullArgument - ``assessment_taken_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure occurred *compliance: mandatory -- This method must be implemented.* ] variable[collection] assign[=] call[name[JSONClientValidated], parameter[constant[assessment]]] if <ast.UnaryOp object at 0x7da20c794850> begin[:] <ast.Raise object at 0x7da20c795330> variable[assessment_taken_map] assign[=] call[name[collection].find_one, parameter[call[name[dict], parameter[dictionary[[<ast.Constant object at 0x7da204962830>], [<ast.Call object at 0x7da204961cf0>]]]]]] call[call[name[objects].AssessmentTaken, parameter[]]._delete, parameter[]] call[name[collection].delete_one, parameter[dictionary[[<ast.Constant object at 0x7da1b26ac8e0>], [<ast.Call object at 0x7da1b26ad960>]]]]
keyword[def] identifier[delete_assessment_taken] ( identifier[self] , identifier[assessment_taken_id] ): literal[string] identifier[collection] = identifier[JSONClientValidated] ( literal[string] , identifier[collection] = literal[string] , identifier[runtime] = identifier[self] . identifier[_runtime] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[assessment_taken_id] , identifier[ABCId] ): keyword[raise] identifier[errors] . identifier[InvalidArgument] ( literal[string] ) identifier[assessment_taken_map] = identifier[collection] . identifier[find_one] ( identifier[dict] ({ literal[string] : identifier[ObjectId] ( identifier[assessment_taken_id] . identifier[get_identifier] ())}, ** identifier[self] . identifier[_view_filter] ())) identifier[objects] . identifier[AssessmentTaken] ( identifier[osid_object_map] = identifier[assessment_taken_map] , identifier[runtime] = identifier[self] . identifier[_runtime] , identifier[proxy] = identifier[self] . identifier[_proxy] ). identifier[_delete] () identifier[collection] . identifier[delete_one] ({ literal[string] : identifier[ObjectId] ( identifier[assessment_taken_id] . identifier[get_identifier] ())})
def delete_assessment_taken(self, assessment_taken_id): """Deletes an ``AssessmentTaken``. arg: assessment_taken_id (osid.id.Id): the ``Id`` of the ``AssessmentTaken`` to remove raise: NotFound - ``assessment_taken_id`` not found raise: NullArgument - ``assessment_taken_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure occurred *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for # osid.resource.ResourceAdminSession.delete_resource_template collection = JSONClientValidated('assessment', collection='AssessmentTaken', runtime=self._runtime) if not isinstance(assessment_taken_id, ABCId): raise errors.InvalidArgument('the argument is not a valid OSID Id') # depends on [control=['if'], data=[]] assessment_taken_map = collection.find_one(dict({'_id': ObjectId(assessment_taken_id.get_identifier())}, **self._view_filter())) objects.AssessmentTaken(osid_object_map=assessment_taken_map, runtime=self._runtime, proxy=self._proxy)._delete() collection.delete_one({'_id': ObjectId(assessment_taken_id.get_identifier())})
def __makeShowColumnFunction(self, column_idx): """ Creates a function that shows or hides a column.""" show_column = lambda checked: self.setColumnHidden(column_idx, not checked) return show_column
def function[__makeShowColumnFunction, parameter[self, column_idx]]: constant[ Creates a function that shows or hides a column.] variable[show_column] assign[=] <ast.Lambda object at 0x7da1b04cabf0> return[name[show_column]]
keyword[def] identifier[__makeShowColumnFunction] ( identifier[self] , identifier[column_idx] ): literal[string] identifier[show_column] = keyword[lambda] identifier[checked] : identifier[self] . identifier[setColumnHidden] ( identifier[column_idx] , keyword[not] identifier[checked] ) keyword[return] identifier[show_column]
def __makeShowColumnFunction(self, column_idx): """ Creates a function that shows or hides a column.""" show_column = lambda checked: self.setColumnHidden(column_idx, not checked) return show_column
def index(): """ Renders the dashboard when the server is initially run. Usage description: The rendered HTML allows the user to select a project and the desired run. :return: Template to render, Object that is taken care by flask. """ # Reset current index values when the page is refreshed for k, v in current_index.items(): current_index[k] = 0 logging.info("Dashboard refreshed") # render the template (below) that will use JavaScript to read the stream return render_template("crystal_dashboard.html")
def function[index, parameter[]]: constant[ Renders the dashboard when the server is initially run. Usage description: The rendered HTML allows the user to select a project and the desired run. :return: Template to render, Object that is taken care by flask. ] for taget[tuple[[<ast.Name object at 0x7da18fe93280>, <ast.Name object at 0x7da18fe906d0>]]] in starred[call[name[current_index].items, parameter[]]] begin[:] call[name[current_index]][name[k]] assign[=] constant[0] call[name[logging].info, parameter[constant[Dashboard refreshed]]] return[call[name[render_template], parameter[constant[crystal_dashboard.html]]]]
keyword[def] identifier[index] (): literal[string] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[current_index] . identifier[items] (): identifier[current_index] [ identifier[k] ]= literal[int] identifier[logging] . identifier[info] ( literal[string] ) keyword[return] identifier[render_template] ( literal[string] )
def index(): """ Renders the dashboard when the server is initially run. Usage description: The rendered HTML allows the user to select a project and the desired run. :return: Template to render, Object that is taken care by flask. """ # Reset current index values when the page is refreshed for (k, v) in current_index.items(): current_index[k] = 0 # depends on [control=['for'], data=[]] logging.info('Dashboard refreshed') # render the template (below) that will use JavaScript to read the stream return render_template('crystal_dashboard.html')
async def send(self, request: Request, **kwargs) -> Response: """Send request object according to configuration. :param Request request: The request object to be sent. """ if request.context is None: # Should not happen, but make mypy happy and does not hurt request.context = self.build_context() if request.context.session is not self.driver.session: kwargs['session'] = request.context.session return Response( request, await self.driver.send(request.http_request, **kwargs) )
<ast.AsyncFunctionDef object at 0x7da18c4cf6a0>
keyword[async] keyword[def] identifier[send] ( identifier[self] , identifier[request] : identifier[Request] ,** identifier[kwargs] )-> identifier[Response] : literal[string] keyword[if] identifier[request] . identifier[context] keyword[is] keyword[None] : identifier[request] . identifier[context] = identifier[self] . identifier[build_context] () keyword[if] identifier[request] . identifier[context] . identifier[session] keyword[is] keyword[not] identifier[self] . identifier[driver] . identifier[session] : identifier[kwargs] [ literal[string] ]= identifier[request] . identifier[context] . identifier[session] keyword[return] identifier[Response] ( identifier[request] , keyword[await] identifier[self] . identifier[driver] . identifier[send] ( identifier[request] . identifier[http_request] ,** identifier[kwargs] ) )
async def send(self, request: Request, **kwargs) -> Response: """Send request object according to configuration. :param Request request: The request object to be sent. """ if request.context is None: # Should not happen, but make mypy happy and does not hurt request.context = self.build_context() # depends on [control=['if'], data=[]] if request.context.session is not self.driver.session: kwargs['session'] = request.context.session # depends on [control=['if'], data=[]] return Response(request, await self.driver.send(request.http_request, **kwargs))
def do_import(token, account_uuid, bank_account, since=None): """Import data from teller.io Returns the created StatementImport """ response = requests.get( url="https://api.teller.io/accounts/{}/transactions".format(account_uuid), headers={"Authorization": "Bearer {}".format(token)}, ) response.raise_for_status() data = response.json() statement_import = StatementImport.objects.create( source="teller.io", extra={"account_uuid": account_uuid}, bank_account=bank_account ) for line_data in data: uuid = UUID(hex=line_data["id"]) if StatementLine.objects.filter(uuid=uuid): continue description = ", ".join(filter(bool, [line_data["counterparty"], line_data["description"]])) date = datetime.date(*map(int, line_data["date"].split("-"))) if not since or date >= since: StatementLine.objects.create( uuid=uuid, date=line_data["date"], statement_import=statement_import, amount=line_data["amount"], type=line_data["type"], description=description, source_data=line_data, )
def function[do_import, parameter[token, account_uuid, bank_account, since]]: constant[Import data from teller.io Returns the created StatementImport ] variable[response] assign[=] call[name[requests].get, parameter[]] call[name[response].raise_for_status, parameter[]] variable[data] assign[=] call[name[response].json, parameter[]] variable[statement_import] assign[=] call[name[StatementImport].objects.create, parameter[]] for taget[name[line_data]] in starred[name[data]] begin[:] variable[uuid] assign[=] call[name[UUID], parameter[]] if call[name[StatementLine].objects.filter, parameter[]] begin[:] continue variable[description] assign[=] call[constant[, ].join, parameter[call[name[filter], parameter[name[bool], list[[<ast.Subscript object at 0x7da20e957b80>, <ast.Subscript object at 0x7da20e955c60>]]]]]] variable[date] assign[=] call[name[datetime].date, parameter[<ast.Starred object at 0x7da20e9563e0>]] if <ast.BoolOp object at 0x7da20e9552a0> begin[:] call[name[StatementLine].objects.create, parameter[]]
keyword[def] identifier[do_import] ( identifier[token] , identifier[account_uuid] , identifier[bank_account] , identifier[since] = keyword[None] ): literal[string] identifier[response] = identifier[requests] . identifier[get] ( identifier[url] = literal[string] . identifier[format] ( identifier[account_uuid] ), identifier[headers] ={ literal[string] : literal[string] . identifier[format] ( identifier[token] )}, ) identifier[response] . identifier[raise_for_status] () identifier[data] = identifier[response] . identifier[json] () identifier[statement_import] = identifier[StatementImport] . identifier[objects] . identifier[create] ( identifier[source] = literal[string] , identifier[extra] ={ literal[string] : identifier[account_uuid] }, identifier[bank_account] = identifier[bank_account] ) keyword[for] identifier[line_data] keyword[in] identifier[data] : identifier[uuid] = identifier[UUID] ( identifier[hex] = identifier[line_data] [ literal[string] ]) keyword[if] identifier[StatementLine] . identifier[objects] . identifier[filter] ( identifier[uuid] = identifier[uuid] ): keyword[continue] identifier[description] = literal[string] . identifier[join] ( identifier[filter] ( identifier[bool] ,[ identifier[line_data] [ literal[string] ], identifier[line_data] [ literal[string] ]])) identifier[date] = identifier[datetime] . identifier[date] (* identifier[map] ( identifier[int] , identifier[line_data] [ literal[string] ]. identifier[split] ( literal[string] ))) keyword[if] keyword[not] identifier[since] keyword[or] identifier[date] >= identifier[since] : identifier[StatementLine] . identifier[objects] . identifier[create] ( identifier[uuid] = identifier[uuid] , identifier[date] = identifier[line_data] [ literal[string] ], identifier[statement_import] = identifier[statement_import] , identifier[amount] = identifier[line_data] [ literal[string] ], identifier[type] = identifier[line_data] [ literal[string] ], identifier[description] = identifier[description] , identifier[source_data] = identifier[line_data] , )
def do_import(token, account_uuid, bank_account, since=None): """Import data from teller.io Returns the created StatementImport """ response = requests.get(url='https://api.teller.io/accounts/{}/transactions'.format(account_uuid), headers={'Authorization': 'Bearer {}'.format(token)}) response.raise_for_status() data = response.json() statement_import = StatementImport.objects.create(source='teller.io', extra={'account_uuid': account_uuid}, bank_account=bank_account) for line_data in data: uuid = UUID(hex=line_data['id']) if StatementLine.objects.filter(uuid=uuid): continue # depends on [control=['if'], data=[]] description = ', '.join(filter(bool, [line_data['counterparty'], line_data['description']])) date = datetime.date(*map(int, line_data['date'].split('-'))) if not since or date >= since: StatementLine.objects.create(uuid=uuid, date=line_data['date'], statement_import=statement_import, amount=line_data['amount'], type=line_data['type'], description=description, source_data=line_data) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line_data']]
def initialize_delete_state_map(self): """This is a mapping of delete result message string to state. """ self.fabric_state_del_map = { fw_const.INIT_STATE_STR: fw_const.OS_IN_NETWORK_STATE, fw_const.OS_IN_NETWORK_DEL_FAIL: fw_const.OS_IN_NETWORK_STATE, fw_const.OS_IN_NETWORK_DEL_SUCCESS: fw_const.INIT_STATE, fw_const.OS_OUT_NETWORK_DEL_FAIL: fw_const.OS_OUT_NETWORK_STATE, fw_const.OS_OUT_NETWORK_DEL_SUCCESS: fw_const.OS_IN_NETWORK_STATE, fw_const.OS_DUMMY_RTR_DEL_FAIL: fw_const.OS_DUMMY_RTR_STATE, fw_const.OS_DUMMY_RTR_DEL_SUCCESS: fw_const.OS_OUT_NETWORK_STATE, fw_const.DCNM_IN_NETWORK_DEL_FAIL: fw_const.DCNM_IN_NETWORK_STATE, fw_const.DCNM_IN_NETWORK_DEL_SUCCESS: fw_const.OS_DUMMY_RTR_STATE, fw_const.DCNM_IN_PART_UPDDEL_FAIL: fw_const.DCNM_IN_PART_UPDATE_STATE, fw_const.DCNM_IN_PART_UPDDEL_SUCCESS: fw_const.DCNM_IN_NETWORK_STATE, fw_const.DCNM_OUT_PART_DEL_FAIL: fw_const.DCNM_OUT_PART_STATE, fw_const.DCNM_OUT_PART_DEL_SUCCESS: fw_const.DCNM_IN_PART_UPDATE_STATE, fw_const.DCNM_OUT_NETWORK_DEL_FAIL: fw_const.DCNM_OUT_NETWORK_STATE, fw_const.DCNM_OUT_NETWORK_DEL_SUCCESS: fw_const.DCNM_OUT_PART_STATE, fw_const.DCNM_OUT_PART_UPDDEL_FAIL: fw_const.DCNM_OUT_PART_UPDATE_STATE, fw_const.DCNM_OUT_PART_UPDDEL_SUCCESS: fw_const.DCNM_OUT_NETWORK_STATE}
def function[initialize_delete_state_map, parameter[self]]: constant[This is a mapping of delete result message string to state. ] name[self].fabric_state_del_map assign[=] dictionary[[<ast.Attribute object at 0x7da1b1c617e0>, <ast.Attribute object at 0x7da1b1c62170>, <ast.Attribute object at 0x7da1b1c62080>, <ast.Attribute object at 0x7da1b1c618d0>, <ast.Attribute object at 0x7da1b1c62230>, <ast.Attribute object at 0x7da1b1c605e0>, <ast.Attribute object at 0x7da1b1c60fd0>, <ast.Attribute object at 0x7da1b1c61780>, <ast.Attribute object at 0x7da1b1c614b0>, <ast.Attribute object at 0x7da1b1c61690>, <ast.Attribute object at 0x7da1b1c60610>, <ast.Attribute object at 0x7da1b1c61960>, <ast.Attribute object at 0x7da1b1c61cf0>, <ast.Attribute object at 0x7da1b1c613c0>, <ast.Attribute object at 0x7da1b1c62800>, <ast.Attribute object at 0x7da1b1c61510>, <ast.Attribute object at 0x7da1b1c61c30>], [<ast.Attribute object at 0x7da1b1c60520>, <ast.Attribute object at 0x7da1b1c60760>, <ast.Attribute object at 0x7da1b1c61000>, <ast.Attribute object at 0x7da1b1c612a0>, <ast.Attribute object at 0x7da1b1c60250>, <ast.Attribute object at 0x7da1b1c63280>, <ast.Attribute object at 0x7da1b1c61c00>, <ast.Attribute object at 0x7da1b1c61ff0>, <ast.Attribute object at 0x7da1b1c62ef0>, <ast.Attribute object at 0x7da1b1c60040>, <ast.Attribute object at 0x7da1b1c610f0>, <ast.Attribute object at 0x7da1b1c60d00>, <ast.Attribute object at 0x7da1b1c61720>, <ast.Attribute object at 0x7da1b1c62530>, <ast.Attribute object at 0x7da1b1c62a40>, <ast.Attribute object at 0x7da1b1c62410>, <ast.Attribute object at 0x7da1b1c61e70>]]
keyword[def] identifier[initialize_delete_state_map] ( identifier[self] ): literal[string] identifier[self] . identifier[fabric_state_del_map] ={ identifier[fw_const] . identifier[INIT_STATE_STR] : identifier[fw_const] . identifier[OS_IN_NETWORK_STATE] , identifier[fw_const] . identifier[OS_IN_NETWORK_DEL_FAIL] : identifier[fw_const] . identifier[OS_IN_NETWORK_STATE] , identifier[fw_const] . identifier[OS_IN_NETWORK_DEL_SUCCESS] : identifier[fw_const] . identifier[INIT_STATE] , identifier[fw_const] . identifier[OS_OUT_NETWORK_DEL_FAIL] : identifier[fw_const] . identifier[OS_OUT_NETWORK_STATE] , identifier[fw_const] . identifier[OS_OUT_NETWORK_DEL_SUCCESS] : identifier[fw_const] . identifier[OS_IN_NETWORK_STATE] , identifier[fw_const] . identifier[OS_DUMMY_RTR_DEL_FAIL] : identifier[fw_const] . identifier[OS_DUMMY_RTR_STATE] , identifier[fw_const] . identifier[OS_DUMMY_RTR_DEL_SUCCESS] : identifier[fw_const] . identifier[OS_OUT_NETWORK_STATE] , identifier[fw_const] . identifier[DCNM_IN_NETWORK_DEL_FAIL] : identifier[fw_const] . identifier[DCNM_IN_NETWORK_STATE] , identifier[fw_const] . identifier[DCNM_IN_NETWORK_DEL_SUCCESS] : identifier[fw_const] . identifier[OS_DUMMY_RTR_STATE] , identifier[fw_const] . identifier[DCNM_IN_PART_UPDDEL_FAIL] : identifier[fw_const] . identifier[DCNM_IN_PART_UPDATE_STATE] , identifier[fw_const] . identifier[DCNM_IN_PART_UPDDEL_SUCCESS] : identifier[fw_const] . identifier[DCNM_IN_NETWORK_STATE] , identifier[fw_const] . identifier[DCNM_OUT_PART_DEL_FAIL] : identifier[fw_const] . identifier[DCNM_OUT_PART_STATE] , identifier[fw_const] . identifier[DCNM_OUT_PART_DEL_SUCCESS] : identifier[fw_const] . identifier[DCNM_IN_PART_UPDATE_STATE] , identifier[fw_const] . identifier[DCNM_OUT_NETWORK_DEL_FAIL] : identifier[fw_const] . identifier[DCNM_OUT_NETWORK_STATE] , identifier[fw_const] . identifier[DCNM_OUT_NETWORK_DEL_SUCCESS] : identifier[fw_const] . identifier[DCNM_OUT_PART_STATE] , identifier[fw_const] . identifier[DCNM_OUT_PART_UPDDEL_FAIL] : identifier[fw_const] . identifier[DCNM_OUT_PART_UPDATE_STATE] , identifier[fw_const] . identifier[DCNM_OUT_PART_UPDDEL_SUCCESS] : identifier[fw_const] . identifier[DCNM_OUT_NETWORK_STATE] }
def initialize_delete_state_map(self): """This is a mapping of delete result message string to state. """ self.fabric_state_del_map = {fw_const.INIT_STATE_STR: fw_const.OS_IN_NETWORK_STATE, fw_const.OS_IN_NETWORK_DEL_FAIL: fw_const.OS_IN_NETWORK_STATE, fw_const.OS_IN_NETWORK_DEL_SUCCESS: fw_const.INIT_STATE, fw_const.OS_OUT_NETWORK_DEL_FAIL: fw_const.OS_OUT_NETWORK_STATE, fw_const.OS_OUT_NETWORK_DEL_SUCCESS: fw_const.OS_IN_NETWORK_STATE, fw_const.OS_DUMMY_RTR_DEL_FAIL: fw_const.OS_DUMMY_RTR_STATE, fw_const.OS_DUMMY_RTR_DEL_SUCCESS: fw_const.OS_OUT_NETWORK_STATE, fw_const.DCNM_IN_NETWORK_DEL_FAIL: fw_const.DCNM_IN_NETWORK_STATE, fw_const.DCNM_IN_NETWORK_DEL_SUCCESS: fw_const.OS_DUMMY_RTR_STATE, fw_const.DCNM_IN_PART_UPDDEL_FAIL: fw_const.DCNM_IN_PART_UPDATE_STATE, fw_const.DCNM_IN_PART_UPDDEL_SUCCESS: fw_const.DCNM_IN_NETWORK_STATE, fw_const.DCNM_OUT_PART_DEL_FAIL: fw_const.DCNM_OUT_PART_STATE, fw_const.DCNM_OUT_PART_DEL_SUCCESS: fw_const.DCNM_IN_PART_UPDATE_STATE, fw_const.DCNM_OUT_NETWORK_DEL_FAIL: fw_const.DCNM_OUT_NETWORK_STATE, fw_const.DCNM_OUT_NETWORK_DEL_SUCCESS: fw_const.DCNM_OUT_PART_STATE, fw_const.DCNM_OUT_PART_UPDDEL_FAIL: fw_const.DCNM_OUT_PART_UPDATE_STATE, fw_const.DCNM_OUT_PART_UPDDEL_SUCCESS: fw_const.DCNM_OUT_NETWORK_STATE}
def write_member(self, data): """Writes the given data as one gzip member. The data can be a string, an iterator that gives strings or a file-like object. """ if isinstance(data, basestring): self.write(data) else: for text in data: self.write(text) self.close_member()
def function[write_member, parameter[self, data]]: constant[Writes the given data as one gzip member. The data can be a string, an iterator that gives strings or a file-like object. ] if call[name[isinstance], parameter[name[data], name[basestring]]] begin[:] call[name[self].write, parameter[name[data]]] call[name[self].close_member, parameter[]]
keyword[def] identifier[write_member] ( identifier[self] , identifier[data] ): literal[string] keyword[if] identifier[isinstance] ( identifier[data] , identifier[basestring] ): identifier[self] . identifier[write] ( identifier[data] ) keyword[else] : keyword[for] identifier[text] keyword[in] identifier[data] : identifier[self] . identifier[write] ( identifier[text] ) identifier[self] . identifier[close_member] ()
def write_member(self, data): """Writes the given data as one gzip member. The data can be a string, an iterator that gives strings or a file-like object. """ if isinstance(data, basestring): self.write(data) # depends on [control=['if'], data=[]] else: for text in data: self.write(text) # depends on [control=['for'], data=['text']] self.close_member()
def graphql_mutation_from_summary(summary): """ This function returns a graphql mutation corresponding to the provided summary. """ # get the name of the mutation from the summary mutation_name = summary['name'] # print(summary) # the treat the "type" string as a gra input_name = mutation_name + "Input" input_fields = build_native_type_dictionary(summary['inputs'], name=input_name, respect_required=True) # the inputs for the mutation are defined by a class record inputs = type('Input', (object,), input_fields) # the outputs for the mutation are attributes to the class record output_name = mutation_name + "Output" outputs = build_native_type_dictionary(summary['outputs'], name=output_name) # a no-op in order to satisfy the introspection query mutate = classmethod(lambda *_, **__ : 'hello') # create the appropriate mutation class record mutation = type(mutation_name, (graphene.Mutation,), { 'Input': inputs, 'mutate': mutate, **outputs }) # return the newly created mutation record return mutation
def function[graphql_mutation_from_summary, parameter[summary]]: constant[ This function returns a graphql mutation corresponding to the provided summary. ] variable[mutation_name] assign[=] call[name[summary]][constant[name]] variable[input_name] assign[=] binary_operation[name[mutation_name] + constant[Input]] variable[input_fields] assign[=] call[name[build_native_type_dictionary], parameter[call[name[summary]][constant[inputs]]]] variable[inputs] assign[=] call[name[type], parameter[constant[Input], tuple[[<ast.Name object at 0x7da1b0fdd630>]], name[input_fields]]] variable[output_name] assign[=] binary_operation[name[mutation_name] + constant[Output]] variable[outputs] assign[=] call[name[build_native_type_dictionary], parameter[call[name[summary]][constant[outputs]]]] variable[mutate] assign[=] call[name[classmethod], parameter[<ast.Lambda object at 0x7da1b0fdc280>]] variable[mutation] assign[=] call[name[type], parameter[name[mutation_name], tuple[[<ast.Attribute object at 0x7da1b0fdc130>]], dictionary[[<ast.Constant object at 0x7da1b0fdf430>, <ast.Constant object at 0x7da1b0fdd120>, None], [<ast.Name object at 0x7da1b0fdf3d0>, <ast.Name object at 0x7da1b0fde2c0>, <ast.Name object at 0x7da1b0fddcc0>]]]] return[name[mutation]]
keyword[def] identifier[graphql_mutation_from_summary] ( identifier[summary] ): literal[string] identifier[mutation_name] = identifier[summary] [ literal[string] ] identifier[input_name] = identifier[mutation_name] + literal[string] identifier[input_fields] = identifier[build_native_type_dictionary] ( identifier[summary] [ literal[string] ], identifier[name] = identifier[input_name] , identifier[respect_required] = keyword[True] ) identifier[inputs] = identifier[type] ( literal[string] ,( identifier[object] ,), identifier[input_fields] ) identifier[output_name] = identifier[mutation_name] + literal[string] identifier[outputs] = identifier[build_native_type_dictionary] ( identifier[summary] [ literal[string] ], identifier[name] = identifier[output_name] ) identifier[mutate] = identifier[classmethod] ( keyword[lambda] * identifier[_] ,** identifier[__] : literal[string] ) identifier[mutation] = identifier[type] ( identifier[mutation_name] ,( identifier[graphene] . identifier[Mutation] ,),{ literal[string] : identifier[inputs] , literal[string] : identifier[mutate] , ** identifier[outputs] }) keyword[return] identifier[mutation]
def graphql_mutation_from_summary(summary): """ This function returns a graphql mutation corresponding to the provided summary. """ # get the name of the mutation from the summary mutation_name = summary['name'] # print(summary) # the treat the "type" string as a gra input_name = mutation_name + 'Input' input_fields = build_native_type_dictionary(summary['inputs'], name=input_name, respect_required=True) # the inputs for the mutation are defined by a class record inputs = type('Input', (object,), input_fields) # the outputs for the mutation are attributes to the class record output_name = mutation_name + 'Output' outputs = build_native_type_dictionary(summary['outputs'], name=output_name) # a no-op in order to satisfy the introspection query mutate = classmethod(lambda *_, **__: 'hello') # create the appropriate mutation class record mutation = type(mutation_name, (graphene.Mutation,), {'Input': inputs, 'mutate': mutate, **outputs}) # return the newly created mutation record return mutation
def check(ctx, meta_model_file, model_file, ignore_case): """ Check validity of meta-model and optionally model. """ debug = ctx.obj['debug'] check_model(meta_model_file, model_file, debug, ignore_case)
def function[check, parameter[ctx, meta_model_file, model_file, ignore_case]]: constant[ Check validity of meta-model and optionally model. ] variable[debug] assign[=] call[name[ctx].obj][constant[debug]] call[name[check_model], parameter[name[meta_model_file], name[model_file], name[debug], name[ignore_case]]]
keyword[def] identifier[check] ( identifier[ctx] , identifier[meta_model_file] , identifier[model_file] , identifier[ignore_case] ): literal[string] identifier[debug] = identifier[ctx] . identifier[obj] [ literal[string] ] identifier[check_model] ( identifier[meta_model_file] , identifier[model_file] , identifier[debug] , identifier[ignore_case] )
def check(ctx, meta_model_file, model_file, ignore_case): """ Check validity of meta-model and optionally model. """ debug = ctx.obj['debug'] check_model(meta_model_file, model_file, debug, ignore_case)
async def delete_shade_from_scene(self, shade_id, scene_id): """Delete a shade from a scene.""" return await self.request.delete( self._base_path, params={ATTR_SCENE_ID: scene_id, ATTR_SHADE_ID: shade_id} )
<ast.AsyncFunctionDef object at 0x7da1b09ce7d0>
keyword[async] keyword[def] identifier[delete_shade_from_scene] ( identifier[self] , identifier[shade_id] , identifier[scene_id] ): literal[string] keyword[return] keyword[await] identifier[self] . identifier[request] . identifier[delete] ( identifier[self] . identifier[_base_path] , identifier[params] ={ identifier[ATTR_SCENE_ID] : identifier[scene_id] , identifier[ATTR_SHADE_ID] : identifier[shade_id] } )
async def delete_shade_from_scene(self, shade_id, scene_id): """Delete a shade from a scene.""" return await self.request.delete(self._base_path, params={ATTR_SCENE_ID: scene_id, ATTR_SHADE_ID: shade_id})
def interevent_time_recharges(recharges): """ Return the distribution of time between consecutive recharges of the user. """ time_pairs = pairwise(r.datetime for r in recharges) times = [(new - old).total_seconds() for old, new in time_pairs] return summary_stats(times)
def function[interevent_time_recharges, parameter[recharges]]: constant[ Return the distribution of time between consecutive recharges of the user. ] variable[time_pairs] assign[=] call[name[pairwise], parameter[<ast.GeneratorExp object at 0x7da1b0da10c0>]] variable[times] assign[=] <ast.ListComp object at 0x7da1b0da33d0> return[call[name[summary_stats], parameter[name[times]]]]
keyword[def] identifier[interevent_time_recharges] ( identifier[recharges] ): literal[string] identifier[time_pairs] = identifier[pairwise] ( identifier[r] . identifier[datetime] keyword[for] identifier[r] keyword[in] identifier[recharges] ) identifier[times] =[( identifier[new] - identifier[old] ). identifier[total_seconds] () keyword[for] identifier[old] , identifier[new] keyword[in] identifier[time_pairs] ] keyword[return] identifier[summary_stats] ( identifier[times] )
def interevent_time_recharges(recharges): """ Return the distribution of time between consecutive recharges of the user. """ time_pairs = pairwise((r.datetime for r in recharges)) times = [(new - old).total_seconds() for (old, new) in time_pairs] return summary_stats(times)
def model_definition_factory(base_model_definition, **kwargs): """ Provides an iterator over passed-in configuration values, allowing for easy exploration of models. Parameters: ___________ base_model_definition: The base `ModelDefinition` to augment kwargs: Can be any keyword accepted by `ModelDefinition`. Values should be iterables. """ if not kwargs: yield config else: for param in kwargs: if not hasattr(base_model_definition, param): raise ValueError("'%s' is not a valid configuration parameter" % param) for raw_params in itertools.product(*kwargs.values()): new_definition = copy.copy(base_model_definition) new_definition.update(dict(zip(kwargs.keys(), raw_params))) yield new_definition
def function[model_definition_factory, parameter[base_model_definition]]: constant[ Provides an iterator over passed-in configuration values, allowing for easy exploration of models. Parameters: ___________ base_model_definition: The base `ModelDefinition` to augment kwargs: Can be any keyword accepted by `ModelDefinition`. Values should be iterables. ] if <ast.UnaryOp object at 0x7da1b0bae920> begin[:] <ast.Yield object at 0x7da1b0bae0e0>
keyword[def] identifier[model_definition_factory] ( identifier[base_model_definition] ,** identifier[kwargs] ): literal[string] keyword[if] keyword[not] identifier[kwargs] : keyword[yield] identifier[config] keyword[else] : keyword[for] identifier[param] keyword[in] identifier[kwargs] : keyword[if] keyword[not] identifier[hasattr] ( identifier[base_model_definition] , identifier[param] ): keyword[raise] identifier[ValueError] ( literal[string] % identifier[param] ) keyword[for] identifier[raw_params] keyword[in] identifier[itertools] . identifier[product] (* identifier[kwargs] . identifier[values] ()): identifier[new_definition] = identifier[copy] . identifier[copy] ( identifier[base_model_definition] ) identifier[new_definition] . identifier[update] ( identifier[dict] ( identifier[zip] ( identifier[kwargs] . identifier[keys] (), identifier[raw_params] ))) keyword[yield] identifier[new_definition]
def model_definition_factory(base_model_definition, **kwargs): """ Provides an iterator over passed-in configuration values, allowing for easy exploration of models. Parameters: ___________ base_model_definition: The base `ModelDefinition` to augment kwargs: Can be any keyword accepted by `ModelDefinition`. Values should be iterables. """ if not kwargs: yield config # depends on [control=['if'], data=[]] else: for param in kwargs: if not hasattr(base_model_definition, param): raise ValueError("'%s' is not a valid configuration parameter" % param) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['param']] for raw_params in itertools.product(*kwargs.values()): new_definition = copy.copy(base_model_definition) new_definition.update(dict(zip(kwargs.keys(), raw_params))) yield new_definition # depends on [control=['for'], data=['raw_params']]
def file_list(package, **kwargs): ''' List the files that belong to a package. CLI Examples: .. code-block:: bash salt '*' pkg.file_list nginx ''' ret = file_dict(package) files = [] for pkg_files in six.itervalues(ret['files']): files.extend(pkg_files) ret['files'] = files return ret
def function[file_list, parameter[package]]: constant[ List the files that belong to a package. CLI Examples: .. code-block:: bash salt '*' pkg.file_list nginx ] variable[ret] assign[=] call[name[file_dict], parameter[name[package]]] variable[files] assign[=] list[[]] for taget[name[pkg_files]] in starred[call[name[six].itervalues, parameter[call[name[ret]][constant[files]]]]] begin[:] call[name[files].extend, parameter[name[pkg_files]]] call[name[ret]][constant[files]] assign[=] name[files] return[name[ret]]
keyword[def] identifier[file_list] ( identifier[package] ,** identifier[kwargs] ): literal[string] identifier[ret] = identifier[file_dict] ( identifier[package] ) identifier[files] =[] keyword[for] identifier[pkg_files] keyword[in] identifier[six] . identifier[itervalues] ( identifier[ret] [ literal[string] ]): identifier[files] . identifier[extend] ( identifier[pkg_files] ) identifier[ret] [ literal[string] ]= identifier[files] keyword[return] identifier[ret]
def file_list(package, **kwargs): """ List the files that belong to a package. CLI Examples: .. code-block:: bash salt '*' pkg.file_list nginx """ ret = file_dict(package) files = [] for pkg_files in six.itervalues(ret['files']): files.extend(pkg_files) # depends on [control=['for'], data=['pkg_files']] ret['files'] = files return ret
def provider_login_url(parser, token): """ {% provider_login_url "facebook" next=bla %} {% provider_login_url "openid" openid="http://me.yahoo.com" next=bla %} """ bits = token.split_contents() provider_id = bits[1] params = token_kwargs(bits[2:], parser, support_legacy=False) return ProviderLoginURLNode(provider_id, params)
def function[provider_login_url, parameter[parser, token]]: constant[ {% provider_login_url "facebook" next=bla %} {% provider_login_url "openid" openid="http://me.yahoo.com" next=bla %} ] variable[bits] assign[=] call[name[token].split_contents, parameter[]] variable[provider_id] assign[=] call[name[bits]][constant[1]] variable[params] assign[=] call[name[token_kwargs], parameter[call[name[bits]][<ast.Slice object at 0x7da18bcc9960>], name[parser]]] return[call[name[ProviderLoginURLNode], parameter[name[provider_id], name[params]]]]
keyword[def] identifier[provider_login_url] ( identifier[parser] , identifier[token] ): literal[string] identifier[bits] = identifier[token] . identifier[split_contents] () identifier[provider_id] = identifier[bits] [ literal[int] ] identifier[params] = identifier[token_kwargs] ( identifier[bits] [ literal[int] :], identifier[parser] , identifier[support_legacy] = keyword[False] ) keyword[return] identifier[ProviderLoginURLNode] ( identifier[provider_id] , identifier[params] )
def provider_login_url(parser, token): """ {% provider_login_url "facebook" next=bla %} {% provider_login_url "openid" openid="http://me.yahoo.com" next=bla %} """ bits = token.split_contents() provider_id = bits[1] params = token_kwargs(bits[2:], parser, support_legacy=False) return ProviderLoginURLNode(provider_id, params)
def update_git_devstr(version, path=None): """ Updates the git revision string if and only if the path is being imported directly from a git working copy. This ensures that the revision number in the version string is accurate. """ try: # Quick way to determine if we're in git or not - returns '' if not devstr = get_git_devstr(sha=True, show_warning=False, path=path) except OSError: return version if not devstr: # Probably not in git so just pass silently return version if 'dev' in version: # update to the current git revision version_base = version.split('.dev', 1)[0] devstr = get_git_devstr(sha=False, show_warning=False, path=path) return version_base + '.dev' + devstr else: # otherwise it's already the true/release version return version
def function[update_git_devstr, parameter[version, path]]: constant[ Updates the git revision string if and only if the path is being imported directly from a git working copy. This ensures that the revision number in the version string is accurate. ] <ast.Try object at 0x7da1b04edab0> if <ast.UnaryOp object at 0x7da1b04ee770> begin[:] return[name[version]] if compare[constant[dev] in name[version]] begin[:] variable[version_base] assign[=] call[call[name[version].split, parameter[constant[.dev], constant[1]]]][constant[0]] variable[devstr] assign[=] call[name[get_git_devstr], parameter[]] return[binary_operation[binary_operation[name[version_base] + constant[.dev]] + name[devstr]]]
keyword[def] identifier[update_git_devstr] ( identifier[version] , identifier[path] = keyword[None] ): literal[string] keyword[try] : identifier[devstr] = identifier[get_git_devstr] ( identifier[sha] = keyword[True] , identifier[show_warning] = keyword[False] , identifier[path] = identifier[path] ) keyword[except] identifier[OSError] : keyword[return] identifier[version] keyword[if] keyword[not] identifier[devstr] : keyword[return] identifier[version] keyword[if] literal[string] keyword[in] identifier[version] : identifier[version_base] = identifier[version] . identifier[split] ( literal[string] , literal[int] )[ literal[int] ] identifier[devstr] = identifier[get_git_devstr] ( identifier[sha] = keyword[False] , identifier[show_warning] = keyword[False] , identifier[path] = identifier[path] ) keyword[return] identifier[version_base] + literal[string] + identifier[devstr] keyword[else] : keyword[return] identifier[version]
def update_git_devstr(version, path=None): """ Updates the git revision string if and only if the path is being imported directly from a git working copy. This ensures that the revision number in the version string is accurate. """ try: # Quick way to determine if we're in git or not - returns '' if not devstr = get_git_devstr(sha=True, show_warning=False, path=path) # depends on [control=['try'], data=[]] except OSError: return version # depends on [control=['except'], data=[]] if not devstr: # Probably not in git so just pass silently return version # depends on [control=['if'], data=[]] if 'dev' in version: # update to the current git revision version_base = version.split('.dev', 1)[0] devstr = get_git_devstr(sha=False, show_warning=False, path=path) return version_base + '.dev' + devstr # depends on [control=['if'], data=['version']] else: # otherwise it's already the true/release version return version
def mixture( val: Any, default: Any = RaiseTypeErrorIfNotProvided) -> Sequence[Tuple[float, Any]]: """Return a sequence of tuples representing a probabilistic combination. A mixture is described by an iterable of tuples of the form (probability of object, object) The probability components of the tuples must sum to 1.0 and be between 0 and 1 (inclusive). Args: val: The value whose mixture is being computed. default: A default value if val does not support mixture. Returns: An iterable of tuples of size 2. The first element of the tuple is a probability (between 0 and 1) and the second is the object that occurs with that probability in the mixture. The probabilities will sum to 1.0. """ getter = getattr(val, '_mixture_', None) result = NotImplemented if getter is None else getter() if result is not NotImplemented: return result if default is not RaiseTypeErrorIfNotProvided: return default if getter is None: raise TypeError( "object of type '{}' has no _mixture_ method.".format(type(val))) raise TypeError("object of type '{}' does have a _mixture_ method, " "but it returned NotImplemented.".format(type(val)))
def function[mixture, parameter[val, default]]: constant[Return a sequence of tuples representing a probabilistic combination. A mixture is described by an iterable of tuples of the form (probability of object, object) The probability components of the tuples must sum to 1.0 and be between 0 and 1 (inclusive). Args: val: The value whose mixture is being computed. default: A default value if val does not support mixture. Returns: An iterable of tuples of size 2. The first element of the tuple is a probability (between 0 and 1) and the second is the object that occurs with that probability in the mixture. The probabilities will sum to 1.0. ] variable[getter] assign[=] call[name[getattr], parameter[name[val], constant[_mixture_], constant[None]]] variable[result] assign[=] <ast.IfExp object at 0x7da1b1c3e110> if compare[name[result] is_not name[NotImplemented]] begin[:] return[name[result]] if compare[name[default] is_not name[RaiseTypeErrorIfNotProvided]] begin[:] return[name[default]] if compare[name[getter] is constant[None]] begin[:] <ast.Raise object at 0x7da1b1ce5e10> <ast.Raise object at 0x7da1b1c633a0>
keyword[def] identifier[mixture] ( identifier[val] : identifier[Any] , identifier[default] : identifier[Any] = identifier[RaiseTypeErrorIfNotProvided] )-> identifier[Sequence] [ identifier[Tuple] [ identifier[float] , identifier[Any] ]]: literal[string] identifier[getter] = identifier[getattr] ( identifier[val] , literal[string] , keyword[None] ) identifier[result] = identifier[NotImplemented] keyword[if] identifier[getter] keyword[is] keyword[None] keyword[else] identifier[getter] () keyword[if] identifier[result] keyword[is] keyword[not] identifier[NotImplemented] : keyword[return] identifier[result] keyword[if] identifier[default] keyword[is] keyword[not] identifier[RaiseTypeErrorIfNotProvided] : keyword[return] identifier[default] keyword[if] identifier[getter] keyword[is] keyword[None] : keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[type] ( identifier[val] ))) keyword[raise] identifier[TypeError] ( literal[string] literal[string] . identifier[format] ( identifier[type] ( identifier[val] )))
def mixture(val: Any, default: Any=RaiseTypeErrorIfNotProvided) -> Sequence[Tuple[float, Any]]: """Return a sequence of tuples representing a probabilistic combination. A mixture is described by an iterable of tuples of the form (probability of object, object) The probability components of the tuples must sum to 1.0 and be between 0 and 1 (inclusive). Args: val: The value whose mixture is being computed. default: A default value if val does not support mixture. Returns: An iterable of tuples of size 2. The first element of the tuple is a probability (between 0 and 1) and the second is the object that occurs with that probability in the mixture. The probabilities will sum to 1.0. """ getter = getattr(val, '_mixture_', None) result = NotImplemented if getter is None else getter() if result is not NotImplemented: return result # depends on [control=['if'], data=['result']] if default is not RaiseTypeErrorIfNotProvided: return default # depends on [control=['if'], data=['default']] if getter is None: raise TypeError("object of type '{}' has no _mixture_ method.".format(type(val))) # depends on [control=['if'], data=[]] raise TypeError("object of type '{}' does have a _mixture_ method, but it returned NotImplemented.".format(type(val)))
def compute(self, function): """Evaluate the passed function with the supplied data. Stores result in self.out. Parameters ---------- function: str Name of the :mod:`allantools` function to evaluate Returns ------- result: dict The results of the calculation. """ try: func = getattr(allantools, function) except AttributeError: raise AttributeError("function must be defined in allantools") whitelisted = ["theo1", "mtie", "tierms"] if function[-3:] != "dev" and function not in whitelisted: # this should probably raise a custom exception type so # it's easier to distinguish from other bad things raise RuntimeError("function must be one of the 'dev' functions") result = func(self.inp["data"], rate=self.inp["rate"], data_type=self.inp["data_type"], taus=self.inp["taus"]) keys = ["taus", "stat", "stat_err", "stat_n"] result = {key: result[i] for i, key in enumerate(keys)} self.out = result.copy() self.out["stat_id"] = function return result
def function[compute, parameter[self, function]]: constant[Evaluate the passed function with the supplied data. Stores result in self.out. Parameters ---------- function: str Name of the :mod:`allantools` function to evaluate Returns ------- result: dict The results of the calculation. ] <ast.Try object at 0x7da1b15463b0> variable[whitelisted] assign[=] list[[<ast.Constant object at 0x7da1b153c4f0>, <ast.Constant object at 0x7da1b153cdc0>, <ast.Constant object at 0x7da1b153eec0>]] if <ast.BoolOp object at 0x7da1b153c400> begin[:] <ast.Raise object at 0x7da1b153fd30> variable[result] assign[=] call[name[func], parameter[call[name[self].inp][constant[data]]]] variable[keys] assign[=] list[[<ast.Constant object at 0x7da1b153e530>, <ast.Constant object at 0x7da1b153e6b0>, <ast.Constant object at 0x7da1b153ccd0>, <ast.Constant object at 0x7da1b153c370>]] variable[result] assign[=] <ast.DictComp object at 0x7da1b153cf70> name[self].out assign[=] call[name[result].copy, parameter[]] call[name[self].out][constant[stat_id]] assign[=] name[function] return[name[result]]
keyword[def] identifier[compute] ( identifier[self] , identifier[function] ): literal[string] keyword[try] : identifier[func] = identifier[getattr] ( identifier[allantools] , identifier[function] ) keyword[except] identifier[AttributeError] : keyword[raise] identifier[AttributeError] ( literal[string] ) identifier[whitelisted] =[ literal[string] , literal[string] , literal[string] ] keyword[if] identifier[function] [- literal[int] :]!= literal[string] keyword[and] identifier[function] keyword[not] keyword[in] identifier[whitelisted] : keyword[raise] identifier[RuntimeError] ( literal[string] ) identifier[result] = identifier[func] ( identifier[self] . identifier[inp] [ literal[string] ], identifier[rate] = identifier[self] . identifier[inp] [ literal[string] ], identifier[data_type] = identifier[self] . identifier[inp] [ literal[string] ], identifier[taus] = identifier[self] . identifier[inp] [ literal[string] ]) identifier[keys] =[ literal[string] , literal[string] , literal[string] , literal[string] ] identifier[result] ={ identifier[key] : identifier[result] [ identifier[i] ] keyword[for] identifier[i] , identifier[key] keyword[in] identifier[enumerate] ( identifier[keys] )} identifier[self] . identifier[out] = identifier[result] . identifier[copy] () identifier[self] . identifier[out] [ literal[string] ]= identifier[function] keyword[return] identifier[result]
def compute(self, function): """Evaluate the passed function with the supplied data. Stores result in self.out. Parameters ---------- function: str Name of the :mod:`allantools` function to evaluate Returns ------- result: dict The results of the calculation. """ try: func = getattr(allantools, function) # depends on [control=['try'], data=[]] except AttributeError: raise AttributeError('function must be defined in allantools') # depends on [control=['except'], data=[]] whitelisted = ['theo1', 'mtie', 'tierms'] if function[-3:] != 'dev' and function not in whitelisted: # this should probably raise a custom exception type so # it's easier to distinguish from other bad things raise RuntimeError("function must be one of the 'dev' functions") # depends on [control=['if'], data=[]] result = func(self.inp['data'], rate=self.inp['rate'], data_type=self.inp['data_type'], taus=self.inp['taus']) keys = ['taus', 'stat', 'stat_err', 'stat_n'] result = {key: result[i] for (i, key) in enumerate(keys)} self.out = result.copy() self.out['stat_id'] = function return result
def start(self, daemon = False): """Start the threads.""" self.daemon = daemon self.io_threads = [] self.event_thread = EventDispatcherThread(self.event_dispatcher, daemon = daemon, exc_queue = self.exc_queue) self.event_thread.start() for handler in self.io_handlers: self._run_io_threads(handler) for handler in self.timeout_handlers: self._run_timeout_threads(handler)
def function[start, parameter[self, daemon]]: constant[Start the threads.] name[self].daemon assign[=] name[daemon] name[self].io_threads assign[=] list[[]] name[self].event_thread assign[=] call[name[EventDispatcherThread], parameter[name[self].event_dispatcher]] call[name[self].event_thread.start, parameter[]] for taget[name[handler]] in starred[name[self].io_handlers] begin[:] call[name[self]._run_io_threads, parameter[name[handler]]] for taget[name[handler]] in starred[name[self].timeout_handlers] begin[:] call[name[self]._run_timeout_threads, parameter[name[handler]]]
keyword[def] identifier[start] ( identifier[self] , identifier[daemon] = keyword[False] ): literal[string] identifier[self] . identifier[daemon] = identifier[daemon] identifier[self] . identifier[io_threads] =[] identifier[self] . identifier[event_thread] = identifier[EventDispatcherThread] ( identifier[self] . identifier[event_dispatcher] , identifier[daemon] = identifier[daemon] , identifier[exc_queue] = identifier[self] . identifier[exc_queue] ) identifier[self] . identifier[event_thread] . identifier[start] () keyword[for] identifier[handler] keyword[in] identifier[self] . identifier[io_handlers] : identifier[self] . identifier[_run_io_threads] ( identifier[handler] ) keyword[for] identifier[handler] keyword[in] identifier[self] . identifier[timeout_handlers] : identifier[self] . identifier[_run_timeout_threads] ( identifier[handler] )
def start(self, daemon=False): """Start the threads.""" self.daemon = daemon self.io_threads = [] self.event_thread = EventDispatcherThread(self.event_dispatcher, daemon=daemon, exc_queue=self.exc_queue) self.event_thread.start() for handler in self.io_handlers: self._run_io_threads(handler) # depends on [control=['for'], data=['handler']] for handler in self.timeout_handlers: self._run_timeout_threads(handler) # depends on [control=['for'], data=['handler']]
def delete_modules(self): ''' Clean up after any modules created by this Document when its session is destroyed. ''' from gc import get_referrers from types import FrameType log.debug("Deleting %s modules for %s" % (len(self._modules), self)) for module in self._modules: # Modules created for a Document should have three referrers at this point: # # - sys.modules # - self._modules # - a frame object # # This function will take care of removing these expected references. # # If there are any additional referrers, this probably means the module will be # leaked. Here we perform a detailed check that the only referrers are expected # ones. Otherwise issue an error log message with details. referrers = get_referrers(module) referrers = [x for x in referrers if x is not sys.modules] referrers = [x for x in referrers if x is not self._modules] referrers = [x for x in referrers if not isinstance(x, FrameType)] if len(referrers) != 0: log.error("Module %r has extra unexpected referrers! This could indicate a serious memory leak. Extra referrers: %r" % (module, referrers)) # remove the reference from sys.modules if module.__name__ in sys.modules: del sys.modules[module.__name__] # remove the reference from self._modules self._modules = None
def function[delete_modules, parameter[self]]: constant[ Clean up after any modules created by this Document when its session is destroyed. ] from relative_module[gc] import module[get_referrers] from relative_module[types] import module[FrameType] call[name[log].debug, parameter[binary_operation[constant[Deleting %s modules for %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da204345d20>, <ast.Name object at 0x7da204345f60>]]]]] for taget[name[module]] in starred[name[self]._modules] begin[:] variable[referrers] assign[=] call[name[get_referrers], parameter[name[module]]] variable[referrers] assign[=] <ast.ListComp object at 0x7da204344520> variable[referrers] assign[=] <ast.ListComp object at 0x7da204345c90> variable[referrers] assign[=] <ast.ListComp object at 0x7da204344550> if compare[call[name[len], parameter[name[referrers]]] not_equal[!=] constant[0]] begin[:] call[name[log].error, parameter[binary_operation[constant[Module %r has extra unexpected referrers! This could indicate a serious memory leak. Extra referrers: %r] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da204344070>, <ast.Name object at 0x7da204347160>]]]]] if compare[name[module].__name__ in name[sys].modules] begin[:] <ast.Delete object at 0x7da204344610> name[self]._modules assign[=] constant[None]
keyword[def] identifier[delete_modules] ( identifier[self] ): literal[string] keyword[from] identifier[gc] keyword[import] identifier[get_referrers] keyword[from] identifier[types] keyword[import] identifier[FrameType] identifier[log] . identifier[debug] ( literal[string] %( identifier[len] ( identifier[self] . identifier[_modules] ), identifier[self] )) keyword[for] identifier[module] keyword[in] identifier[self] . identifier[_modules] : identifier[referrers] = identifier[get_referrers] ( identifier[module] ) identifier[referrers] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[referrers] keyword[if] identifier[x] keyword[is] keyword[not] identifier[sys] . identifier[modules] ] identifier[referrers] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[referrers] keyword[if] identifier[x] keyword[is] keyword[not] identifier[self] . identifier[_modules] ] identifier[referrers] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[referrers] keyword[if] keyword[not] identifier[isinstance] ( identifier[x] , identifier[FrameType] )] keyword[if] identifier[len] ( identifier[referrers] )!= literal[int] : identifier[log] . identifier[error] ( literal[string] %( identifier[module] , identifier[referrers] )) keyword[if] identifier[module] . identifier[__name__] keyword[in] identifier[sys] . identifier[modules] : keyword[del] identifier[sys] . identifier[modules] [ identifier[module] . identifier[__name__] ] identifier[self] . identifier[_modules] = keyword[None]
def delete_modules(self): """ Clean up after any modules created by this Document when its session is destroyed. """ from gc import get_referrers from types import FrameType log.debug('Deleting %s modules for %s' % (len(self._modules), self)) for module in self._modules: # Modules created for a Document should have three referrers at this point: # # - sys.modules # - self._modules # - a frame object # # This function will take care of removing these expected references. # # If there are any additional referrers, this probably means the module will be # leaked. Here we perform a detailed check that the only referrers are expected # ones. Otherwise issue an error log message with details. referrers = get_referrers(module) referrers = [x for x in referrers if x is not sys.modules] referrers = [x for x in referrers if x is not self._modules] referrers = [x for x in referrers if not isinstance(x, FrameType)] if len(referrers) != 0: log.error('Module %r has extra unexpected referrers! This could indicate a serious memory leak. Extra referrers: %r' % (module, referrers)) # depends on [control=['if'], data=[]] # remove the reference from sys.modules if module.__name__ in sys.modules: del sys.modules[module.__name__] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['module']] # remove the reference from self._modules self._modules = None
def score(self, test_X, test_Y): """Compute the mean accuracy over the test set. Parameters ---------- test_X : array_like, shape (n_samples, n_features) Test data. test_Y : array_like, shape (n_samples, n_features) Test labels. Returns ------- float : mean accuracy over the test set """ with self.tf_graph.as_default(): with tf.Session() as self.tf_session: self.tf_saver.restore(self.tf_session, self.model_path) feed = { self.input_data: test_X, self.input_labels: test_Y, self.keep_prob: 1 } return self.accuracy.eval(feed)
def function[score, parameter[self, test_X, test_Y]]: constant[Compute the mean accuracy over the test set. Parameters ---------- test_X : array_like, shape (n_samples, n_features) Test data. test_Y : array_like, shape (n_samples, n_features) Test labels. Returns ------- float : mean accuracy over the test set ] with call[name[self].tf_graph.as_default, parameter[]] begin[:] with call[name[tf].Session, parameter[]] begin[:] call[name[self].tf_saver.restore, parameter[name[self].tf_session, name[self].model_path]] variable[feed] assign[=] dictionary[[<ast.Attribute object at 0x7da1b0780910>, <ast.Attribute object at 0x7da1b0780a60>, <ast.Attribute object at 0x7da1b07806a0>], [<ast.Name object at 0x7da1b0782230>, <ast.Name object at 0x7da1b07822f0>, <ast.Constant object at 0x7da1b0781330>]] return[call[name[self].accuracy.eval, parameter[name[feed]]]]
keyword[def] identifier[score] ( identifier[self] , identifier[test_X] , identifier[test_Y] ): literal[string] keyword[with] identifier[self] . identifier[tf_graph] . identifier[as_default] (): keyword[with] identifier[tf] . identifier[Session] () keyword[as] identifier[self] . identifier[tf_session] : identifier[self] . identifier[tf_saver] . identifier[restore] ( identifier[self] . identifier[tf_session] , identifier[self] . identifier[model_path] ) identifier[feed] ={ identifier[self] . identifier[input_data] : identifier[test_X] , identifier[self] . identifier[input_labels] : identifier[test_Y] , identifier[self] . identifier[keep_prob] : literal[int] } keyword[return] identifier[self] . identifier[accuracy] . identifier[eval] ( identifier[feed] )
def score(self, test_X, test_Y): """Compute the mean accuracy over the test set. Parameters ---------- test_X : array_like, shape (n_samples, n_features) Test data. test_Y : array_like, shape (n_samples, n_features) Test labels. Returns ------- float : mean accuracy over the test set """ with self.tf_graph.as_default(): with tf.Session() as self.tf_session: self.tf_saver.restore(self.tf_session, self.model_path) feed = {self.input_data: test_X, self.input_labels: test_Y, self.keep_prob: 1} return self.accuracy.eval(feed) # depends on [control=['with'], data=[]] # depends on [control=['with'], data=[]]
def interrupt(self, threadId=None): """ Interrupts the thread at the given id. :param threadId | <int> || None """ back = self.backend() if back: back.interrupt(threadId)
def function[interrupt, parameter[self, threadId]]: constant[ Interrupts the thread at the given id. :param threadId | <int> || None ] variable[back] assign[=] call[name[self].backend, parameter[]] if name[back] begin[:] call[name[back].interrupt, parameter[name[threadId]]]
keyword[def] identifier[interrupt] ( identifier[self] , identifier[threadId] = keyword[None] ): literal[string] identifier[back] = identifier[self] . identifier[backend] () keyword[if] identifier[back] : identifier[back] . identifier[interrupt] ( identifier[threadId] )
def interrupt(self, threadId=None): """ Interrupts the thread at the given id. :param threadId | <int> || None """ back = self.backend() if back: back.interrupt(threadId) # depends on [control=['if'], data=[]]
def parse_multiple_json(json_file, offset=None): """Parse multiple json records from the given file. Seek to the offset as the start point before parsing if offset set. return empty list if the json file does not exists or exception occurs. Args: json_file (str): File path to be parsed. offset (int): Initial seek position of the file. Returns: A dict of json info. New offset after parsing. """ json_info_list = [] if not os.path.exists(json_file): return json_info_list try: with open(json_file, "r") as f: if offset: f.seek(offset) for line in f: if line[-1] != "\n": # Incomplete line break json_info = json.loads(line) json_info_list.append(json_info) offset += len(line) except BaseException as e: logging.error(e.message) return json_info_list, offset
def function[parse_multiple_json, parameter[json_file, offset]]: constant[Parse multiple json records from the given file. Seek to the offset as the start point before parsing if offset set. return empty list if the json file does not exists or exception occurs. Args: json_file (str): File path to be parsed. offset (int): Initial seek position of the file. Returns: A dict of json info. New offset after parsing. ] variable[json_info_list] assign[=] list[[]] if <ast.UnaryOp object at 0x7da207f990c0> begin[:] return[name[json_info_list]] <ast.Try object at 0x7da18f00dd80> return[tuple[[<ast.Name object at 0x7da18f00e950>, <ast.Name object at 0x7da18f00fca0>]]]
keyword[def] identifier[parse_multiple_json] ( identifier[json_file] , identifier[offset] = keyword[None] ): literal[string] identifier[json_info_list] =[] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[json_file] ): keyword[return] identifier[json_info_list] keyword[try] : keyword[with] identifier[open] ( identifier[json_file] , literal[string] ) keyword[as] identifier[f] : keyword[if] identifier[offset] : identifier[f] . identifier[seek] ( identifier[offset] ) keyword[for] identifier[line] keyword[in] identifier[f] : keyword[if] identifier[line] [- literal[int] ]!= literal[string] : keyword[break] identifier[json_info] = identifier[json] . identifier[loads] ( identifier[line] ) identifier[json_info_list] . identifier[append] ( identifier[json_info] ) identifier[offset] += identifier[len] ( identifier[line] ) keyword[except] identifier[BaseException] keyword[as] identifier[e] : identifier[logging] . identifier[error] ( identifier[e] . identifier[message] ) keyword[return] identifier[json_info_list] , identifier[offset]
def parse_multiple_json(json_file, offset=None): """Parse multiple json records from the given file. Seek to the offset as the start point before parsing if offset set. return empty list if the json file does not exists or exception occurs. Args: json_file (str): File path to be parsed. offset (int): Initial seek position of the file. Returns: A dict of json info. New offset after parsing. """ json_info_list = [] if not os.path.exists(json_file): return json_info_list # depends on [control=['if'], data=[]] try: with open(json_file, 'r') as f: if offset: f.seek(offset) # depends on [control=['if'], data=[]] for line in f: if line[-1] != '\n': # Incomplete line break # depends on [control=['if'], data=[]] json_info = json.loads(line) json_info_list.append(json_info) offset += len(line) # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['f']] # depends on [control=['try'], data=[]] except BaseException as e: logging.error(e.message) # depends on [control=['except'], data=['e']] return (json_info_list, offset)
def is_os(name, version_id=None): '''Return True if OS name in /etc/lsb-release of host given by fabric param `-H` is the same as given by argument, False else. If arg version_id is not None only return True if it is the same as in /etc/lsb-release, too. Args: name: 'Debian GNU/Linux', 'Ubuntu' version_id(None or str): None, '14.04', (Ubuntu) '16.04', (Ubuntu) '8', (Debian) ''' result = False os_release_infos = _fetch_os_release_infos() if name == os_release_infos.get('name', None): if version_id is None: result = True elif version_id == os_release_infos.get('version_id', None): result = True return result
def function[is_os, parameter[name, version_id]]: constant[Return True if OS name in /etc/lsb-release of host given by fabric param `-H` is the same as given by argument, False else. If arg version_id is not None only return True if it is the same as in /etc/lsb-release, too. Args: name: 'Debian GNU/Linux', 'Ubuntu' version_id(None or str): None, '14.04', (Ubuntu) '16.04', (Ubuntu) '8', (Debian) ] variable[result] assign[=] constant[False] variable[os_release_infos] assign[=] call[name[_fetch_os_release_infos], parameter[]] if compare[name[name] equal[==] call[name[os_release_infos].get, parameter[constant[name], constant[None]]]] begin[:] if compare[name[version_id] is constant[None]] begin[:] variable[result] assign[=] constant[True] return[name[result]]
keyword[def] identifier[is_os] ( identifier[name] , identifier[version_id] = keyword[None] ): literal[string] identifier[result] = keyword[False] identifier[os_release_infos] = identifier[_fetch_os_release_infos] () keyword[if] identifier[name] == identifier[os_release_infos] . identifier[get] ( literal[string] , keyword[None] ): keyword[if] identifier[version_id] keyword[is] keyword[None] : identifier[result] = keyword[True] keyword[elif] identifier[version_id] == identifier[os_release_infos] . identifier[get] ( literal[string] , keyword[None] ): identifier[result] = keyword[True] keyword[return] identifier[result]
def is_os(name, version_id=None): """Return True if OS name in /etc/lsb-release of host given by fabric param `-H` is the same as given by argument, False else. If arg version_id is not None only return True if it is the same as in /etc/lsb-release, too. Args: name: 'Debian GNU/Linux', 'Ubuntu' version_id(None or str): None, '14.04', (Ubuntu) '16.04', (Ubuntu) '8', (Debian) """ result = False os_release_infos = _fetch_os_release_infos() if name == os_release_infos.get('name', None): if version_id is None: result = True # depends on [control=['if'], data=[]] elif version_id == os_release_infos.get('version_id', None): result = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return result
def create_app(object_name): """ An flask application factory, as explained here: http://flask.pocoo.org/docs/patterns/appfactories/ Arguments: object_name: the python path of the config object, e.g. webapp.settings.ProdConfig env: The name of the current environment, e.g. prod or dev """ app = Flask(__name__) app.config.from_object(object_name) # initialize the cache cache.init_app(app) # initialize the debug tool bar debug_toolbar.init_app(app) # initialize SQLAlchemy db.init_app(app) return app
def function[create_app, parameter[object_name]]: constant[ An flask application factory, as explained here: http://flask.pocoo.org/docs/patterns/appfactories/ Arguments: object_name: the python path of the config object, e.g. webapp.settings.ProdConfig env: The name of the current environment, e.g. prod or dev ] variable[app] assign[=] call[name[Flask], parameter[name[__name__]]] call[name[app].config.from_object, parameter[name[object_name]]] call[name[cache].init_app, parameter[name[app]]] call[name[debug_toolbar].init_app, parameter[name[app]]] call[name[db].init_app, parameter[name[app]]] return[name[app]]
keyword[def] identifier[create_app] ( identifier[object_name] ): literal[string] identifier[app] = identifier[Flask] ( identifier[__name__] ) identifier[app] . identifier[config] . identifier[from_object] ( identifier[object_name] ) identifier[cache] . identifier[init_app] ( identifier[app] ) identifier[debug_toolbar] . identifier[init_app] ( identifier[app] ) identifier[db] . identifier[init_app] ( identifier[app] ) keyword[return] identifier[app]
def create_app(object_name): """ An flask application factory, as explained here: http://flask.pocoo.org/docs/patterns/appfactories/ Arguments: object_name: the python path of the config object, e.g. webapp.settings.ProdConfig env: The name of the current environment, e.g. prod or dev """ app = Flask(__name__) app.config.from_object(object_name) # initialize the cache cache.init_app(app) # initialize the debug tool bar debug_toolbar.init_app(app) # initialize SQLAlchemy db.init_app(app) return app
def trigger_on_off(request, trigger_id): """ enable/disable the status of the trigger then go back home :param request: request object :param trigger_id: the trigger ID to switch the status to True or False :type request: HttpRequest object :type trigger_id: int :return render :rtype HttpResponse """ now = arrow.utcnow().to(settings.TIME_ZONE).format('YYYY-MM-DD HH:mm:ssZZ') trigger = get_object_or_404(TriggerService, pk=trigger_id) if trigger.status: title = 'disabled' title_trigger = _('Set this trigger on') btn = 'success' trigger.status = False else: title = _('Edit your service') title_trigger = _('Set this trigger off') btn = 'primary' trigger.status = True # set the trigger to the current date when the # the trigger is back online trigger.date_triggered = now trigger.save() return render(request, 'triggers/trigger_line.html', {'trigger': trigger, 'title': title, 'title_trigger': title_trigger, 'btn': btn, 'fire': settings.DJANGO_TH.get('fire', False) } )
def function[trigger_on_off, parameter[request, trigger_id]]: constant[ enable/disable the status of the trigger then go back home :param request: request object :param trigger_id: the trigger ID to switch the status to True or False :type request: HttpRequest object :type trigger_id: int :return render :rtype HttpResponse ] variable[now] assign[=] call[call[call[name[arrow].utcnow, parameter[]].to, parameter[name[settings].TIME_ZONE]].format, parameter[constant[YYYY-MM-DD HH:mm:ssZZ]]] variable[trigger] assign[=] call[name[get_object_or_404], parameter[name[TriggerService]]] if name[trigger].status begin[:] variable[title] assign[=] constant[disabled] variable[title_trigger] assign[=] call[name[_], parameter[constant[Set this trigger on]]] variable[btn] assign[=] constant[success] name[trigger].status assign[=] constant[False] call[name[trigger].save, parameter[]] return[call[name[render], parameter[name[request], constant[triggers/trigger_line.html], dictionary[[<ast.Constant object at 0x7da20c6e4a00>, <ast.Constant object at 0x7da20c6e4850>, <ast.Constant object at 0x7da20c6e6380>, <ast.Constant object at 0x7da20c6e4820>, <ast.Constant object at 0x7da20c6e4bb0>], [<ast.Name object at 0x7da20c6e7550>, <ast.Name object at 0x7da20c6e7640>, <ast.Name object at 0x7da20c6e6020>, <ast.Name object at 0x7da20c6e4430>, <ast.Call object at 0x7da20c6e5e40>]]]]]
keyword[def] identifier[trigger_on_off] ( identifier[request] , identifier[trigger_id] ): literal[string] identifier[now] = identifier[arrow] . identifier[utcnow] (). identifier[to] ( identifier[settings] . identifier[TIME_ZONE] ). identifier[format] ( literal[string] ) identifier[trigger] = identifier[get_object_or_404] ( identifier[TriggerService] , identifier[pk] = identifier[trigger_id] ) keyword[if] identifier[trigger] . identifier[status] : identifier[title] = literal[string] identifier[title_trigger] = identifier[_] ( literal[string] ) identifier[btn] = literal[string] identifier[trigger] . identifier[status] = keyword[False] keyword[else] : identifier[title] = identifier[_] ( literal[string] ) identifier[title_trigger] = identifier[_] ( literal[string] ) identifier[btn] = literal[string] identifier[trigger] . identifier[status] = keyword[True] identifier[trigger] . identifier[date_triggered] = identifier[now] identifier[trigger] . identifier[save] () keyword[return] identifier[render] ( identifier[request] , literal[string] , { literal[string] : identifier[trigger] , literal[string] : identifier[title] , literal[string] : identifier[title_trigger] , literal[string] : identifier[btn] , literal[string] : identifier[settings] . identifier[DJANGO_TH] . identifier[get] ( literal[string] , keyword[False] ) } )
def trigger_on_off(request, trigger_id): """ enable/disable the status of the trigger then go back home :param request: request object :param trigger_id: the trigger ID to switch the status to True or False :type request: HttpRequest object :type trigger_id: int :return render :rtype HttpResponse """ now = arrow.utcnow().to(settings.TIME_ZONE).format('YYYY-MM-DD HH:mm:ssZZ') trigger = get_object_or_404(TriggerService, pk=trigger_id) if trigger.status: title = 'disabled' title_trigger = _('Set this trigger on') btn = 'success' trigger.status = False # depends on [control=['if'], data=[]] else: title = _('Edit your service') title_trigger = _('Set this trigger off') btn = 'primary' trigger.status = True # set the trigger to the current date when the # the trigger is back online trigger.date_triggered = now trigger.save() return render(request, 'triggers/trigger_line.html', {'trigger': trigger, 'title': title, 'title_trigger': title_trigger, 'btn': btn, 'fire': settings.DJANGO_TH.get('fire', False)})
def up(self): """ Move this object up one position. """ self.swap(self.get_ordering_queryset().filter(order__lt=self.order).order_by('-order'))
def function[up, parameter[self]]: constant[ Move this object up one position. ] call[name[self].swap, parameter[call[call[call[name[self].get_ordering_queryset, parameter[]].filter, parameter[]].order_by, parameter[constant[-order]]]]]
keyword[def] identifier[up] ( identifier[self] ): literal[string] identifier[self] . identifier[swap] ( identifier[self] . identifier[get_ordering_queryset] (). identifier[filter] ( identifier[order__lt] = identifier[self] . identifier[order] ). identifier[order_by] ( literal[string] ))
def up(self): """ Move this object up one position. """ self.swap(self.get_ordering_queryset().filter(order__lt=self.order).order_by('-order'))
def ReleaseRecords(cls, ids, token): """Release records identified by subjects. Releases any claim on the records identified by ids. Args: ids: A list of ids provided by ClaimRecords. token: The database access token to write with. Raises: LockError: If the queue is not locked. """ with data_store.DB.GetMutationPool() as mutation_pool: mutation_pool.QueueReleaseRecords(ids)
def function[ReleaseRecords, parameter[cls, ids, token]]: constant[Release records identified by subjects. Releases any claim on the records identified by ids. Args: ids: A list of ids provided by ClaimRecords. token: The database access token to write with. Raises: LockError: If the queue is not locked. ] with call[name[data_store].DB.GetMutationPool, parameter[]] begin[:] call[name[mutation_pool].QueueReleaseRecords, parameter[name[ids]]]
keyword[def] identifier[ReleaseRecords] ( identifier[cls] , identifier[ids] , identifier[token] ): literal[string] keyword[with] identifier[data_store] . identifier[DB] . identifier[GetMutationPool] () keyword[as] identifier[mutation_pool] : identifier[mutation_pool] . identifier[QueueReleaseRecords] ( identifier[ids] )
def ReleaseRecords(cls, ids, token): """Release records identified by subjects. Releases any claim on the records identified by ids. Args: ids: A list of ids provided by ClaimRecords. token: The database access token to write with. Raises: LockError: If the queue is not locked. """ with data_store.DB.GetMutationPool() as mutation_pool: mutation_pool.QueueReleaseRecords(ids) # depends on [control=['with'], data=['mutation_pool']]
def get_nearest_points_dirty(self, center_point, radius, unit='km'): """ return approx list of point from circle with given center and radius it uses geohash and return with some error (see GEO_HASH_ERRORS) :param center_point: center of search circle :param radius: radius of search circle :return: list of GeoPoints from given area """ if unit == 'mi': radius = utils.mi_to_km(radius) grid_size = GEO_HASH_GRID_SIZE[self.precision] if radius > grid_size / 2: # radius is too big for current grid, we cannot use 9 neighbors # to cover all possible points suggested_precision = 0 for precision, max_size in GEO_HASH_GRID_SIZE.items(): if radius > max_size / 2: suggested_precision = precision - 1 break raise ValueError( 'Too large radius, please rebuild GeoHashGrid with ' 'precision={0}'.format(suggested_precision) ) me_and_neighbors = geohash.expand(self.get_point_hash(center_point)) return chain(*(self.data.get(key, []) for key in me_and_neighbors))
def function[get_nearest_points_dirty, parameter[self, center_point, radius, unit]]: constant[ return approx list of point from circle with given center and radius it uses geohash and return with some error (see GEO_HASH_ERRORS) :param center_point: center of search circle :param radius: radius of search circle :return: list of GeoPoints from given area ] if compare[name[unit] equal[==] constant[mi]] begin[:] variable[radius] assign[=] call[name[utils].mi_to_km, parameter[name[radius]]] variable[grid_size] assign[=] call[name[GEO_HASH_GRID_SIZE]][name[self].precision] if compare[name[radius] greater[>] binary_operation[name[grid_size] / constant[2]]] begin[:] variable[suggested_precision] assign[=] constant[0] for taget[tuple[[<ast.Name object at 0x7da18f00e3b0>, <ast.Name object at 0x7da18f00e440>]]] in starred[call[name[GEO_HASH_GRID_SIZE].items, parameter[]]] begin[:] if compare[name[radius] greater[>] binary_operation[name[max_size] / constant[2]]] begin[:] variable[suggested_precision] assign[=] binary_operation[name[precision] - constant[1]] break <ast.Raise object at 0x7da18f00cdc0> variable[me_and_neighbors] assign[=] call[name[geohash].expand, parameter[call[name[self].get_point_hash, parameter[name[center_point]]]]] return[call[name[chain], parameter[<ast.Starred object at 0x7da18f00f1c0>]]]
keyword[def] identifier[get_nearest_points_dirty] ( identifier[self] , identifier[center_point] , identifier[radius] , identifier[unit] = literal[string] ): literal[string] keyword[if] identifier[unit] == literal[string] : identifier[radius] = identifier[utils] . identifier[mi_to_km] ( identifier[radius] ) identifier[grid_size] = identifier[GEO_HASH_GRID_SIZE] [ identifier[self] . identifier[precision] ] keyword[if] identifier[radius] > identifier[grid_size] / literal[int] : identifier[suggested_precision] = literal[int] keyword[for] identifier[precision] , identifier[max_size] keyword[in] identifier[GEO_HASH_GRID_SIZE] . identifier[items] (): keyword[if] identifier[radius] > identifier[max_size] / literal[int] : identifier[suggested_precision] = identifier[precision] - literal[int] keyword[break] keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[suggested_precision] ) ) identifier[me_and_neighbors] = identifier[geohash] . identifier[expand] ( identifier[self] . identifier[get_point_hash] ( identifier[center_point] )) keyword[return] identifier[chain] (*( identifier[self] . identifier[data] . identifier[get] ( identifier[key] ,[]) keyword[for] identifier[key] keyword[in] identifier[me_and_neighbors] ))
def get_nearest_points_dirty(self, center_point, radius, unit='km'): """ return approx list of point from circle with given center and radius it uses geohash and return with some error (see GEO_HASH_ERRORS) :param center_point: center of search circle :param radius: radius of search circle :return: list of GeoPoints from given area """ if unit == 'mi': radius = utils.mi_to_km(radius) # depends on [control=['if'], data=[]] grid_size = GEO_HASH_GRID_SIZE[self.precision] if radius > grid_size / 2: # radius is too big for current grid, we cannot use 9 neighbors # to cover all possible points suggested_precision = 0 for (precision, max_size) in GEO_HASH_GRID_SIZE.items(): if radius > max_size / 2: suggested_precision = precision - 1 break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] raise ValueError('Too large radius, please rebuild GeoHashGrid with precision={0}'.format(suggested_precision)) # depends on [control=['if'], data=['radius']] me_and_neighbors = geohash.expand(self.get_point_hash(center_point)) return chain(*(self.data.get(key, []) for key in me_and_neighbors))
def rand_crop(*args, padding_mode='reflection', p:float=1.): "Randomized version of `crop_pad`." return crop_pad(*args, **rand_pos, padding_mode=padding_mode, p=p)
def function[rand_crop, parameter[]]: constant[Randomized version of `crop_pad`.] return[call[name[crop_pad], parameter[<ast.Starred object at 0x7da1b202a080>]]]
keyword[def] identifier[rand_crop] (* identifier[args] , identifier[padding_mode] = literal[string] , identifier[p] : identifier[float] = literal[int] ): literal[string] keyword[return] identifier[crop_pad] (* identifier[args] ,** identifier[rand_pos] , identifier[padding_mode] = identifier[padding_mode] , identifier[p] = identifier[p] )
def rand_crop(*args, padding_mode='reflection', p: float=1.0): """Randomized version of `crop_pad`.""" return crop_pad(*args, **rand_pos, padding_mode=padding_mode, p=p)
def start(self, hostname=None, port=None, templates_path=None): """ Starts the web interface. Args: hostname (str, optional): host name to listen from. (Default value = None) port (int, optional): port to listen from. (Default value = None) templates_path (str, optional): path to look for templates. (Default value = None) """ self.hostname = hostname if hostname else "localhost" if port: self.port = port elif not self.port: self.port = unused_port(self.hostname) if templates_path: self.loaders.insert(0, jinja2.FileSystemLoader(templates_path)) self._set_loaders() self.setup_routes() self.runner = aioweb.AppRunner(self.app) return self.agent.submit(start_server_in_loop(self.runner, self.hostname, self.port, self.agent))
def function[start, parameter[self, hostname, port, templates_path]]: constant[ Starts the web interface. Args: hostname (str, optional): host name to listen from. (Default value = None) port (int, optional): port to listen from. (Default value = None) templates_path (str, optional): path to look for templates. (Default value = None) ] name[self].hostname assign[=] <ast.IfExp object at 0x7da1b0761fc0> if name[port] begin[:] name[self].port assign[=] name[port] if name[templates_path] begin[:] call[name[self].loaders.insert, parameter[constant[0], call[name[jinja2].FileSystemLoader, parameter[name[templates_path]]]]] call[name[self]._set_loaders, parameter[]] call[name[self].setup_routes, parameter[]] name[self].runner assign[=] call[name[aioweb].AppRunner, parameter[name[self].app]] return[call[name[self].agent.submit, parameter[call[name[start_server_in_loop], parameter[name[self].runner, name[self].hostname, name[self].port, name[self].agent]]]]]
keyword[def] identifier[start] ( identifier[self] , identifier[hostname] = keyword[None] , identifier[port] = keyword[None] , identifier[templates_path] = keyword[None] ): literal[string] identifier[self] . identifier[hostname] = identifier[hostname] keyword[if] identifier[hostname] keyword[else] literal[string] keyword[if] identifier[port] : identifier[self] . identifier[port] = identifier[port] keyword[elif] keyword[not] identifier[self] . identifier[port] : identifier[self] . identifier[port] = identifier[unused_port] ( identifier[self] . identifier[hostname] ) keyword[if] identifier[templates_path] : identifier[self] . identifier[loaders] . identifier[insert] ( literal[int] , identifier[jinja2] . identifier[FileSystemLoader] ( identifier[templates_path] )) identifier[self] . identifier[_set_loaders] () identifier[self] . identifier[setup_routes] () identifier[self] . identifier[runner] = identifier[aioweb] . identifier[AppRunner] ( identifier[self] . identifier[app] ) keyword[return] identifier[self] . identifier[agent] . identifier[submit] ( identifier[start_server_in_loop] ( identifier[self] . identifier[runner] , identifier[self] . identifier[hostname] , identifier[self] . identifier[port] , identifier[self] . identifier[agent] ))
def start(self, hostname=None, port=None, templates_path=None): """ Starts the web interface. Args: hostname (str, optional): host name to listen from. (Default value = None) port (int, optional): port to listen from. (Default value = None) templates_path (str, optional): path to look for templates. (Default value = None) """ self.hostname = hostname if hostname else 'localhost' if port: self.port = port # depends on [control=['if'], data=[]] elif not self.port: self.port = unused_port(self.hostname) # depends on [control=['if'], data=[]] if templates_path: self.loaders.insert(0, jinja2.FileSystemLoader(templates_path)) self._set_loaders() # depends on [control=['if'], data=[]] self.setup_routes() self.runner = aioweb.AppRunner(self.app) return self.agent.submit(start_server_in_loop(self.runner, self.hostname, self.port, self.agent))
def qps(self, callback=None, errback=None): """ Return the current QPS for this zone :rtype: dict :return: QPS information """ stats = Stats(self.config) return stats.qps(zone=self.zone, callback=callback, errback=errback)
def function[qps, parameter[self, callback, errback]]: constant[ Return the current QPS for this zone :rtype: dict :return: QPS information ] variable[stats] assign[=] call[name[Stats], parameter[name[self].config]] return[call[name[stats].qps, parameter[]]]
keyword[def] identifier[qps] ( identifier[self] , identifier[callback] = keyword[None] , identifier[errback] = keyword[None] ): literal[string] identifier[stats] = identifier[Stats] ( identifier[self] . identifier[config] ) keyword[return] identifier[stats] . identifier[qps] ( identifier[zone] = identifier[self] . identifier[zone] , identifier[callback] = identifier[callback] , identifier[errback] = identifier[errback] )
def qps(self, callback=None, errback=None): """ Return the current QPS for this zone :rtype: dict :return: QPS information """ stats = Stats(self.config) return stats.qps(zone=self.zone, callback=callback, errback=errback)
def from_dict(cls, d): """ Create an instance from a dictionary. """ execution_request = super(ExecutionRequest, cls).from_dict(d) if isinstance(execution_request.simulation_group, dict): execution_request.simulation_group = SimulationGroup.from_dict(execution_request.simulation_group) return execution_request
def function[from_dict, parameter[cls, d]]: constant[ Create an instance from a dictionary. ] variable[execution_request] assign[=] call[call[name[super], parameter[name[ExecutionRequest], name[cls]]].from_dict, parameter[name[d]]] if call[name[isinstance], parameter[name[execution_request].simulation_group, name[dict]]] begin[:] name[execution_request].simulation_group assign[=] call[name[SimulationGroup].from_dict, parameter[name[execution_request].simulation_group]] return[name[execution_request]]
keyword[def] identifier[from_dict] ( identifier[cls] , identifier[d] ): literal[string] identifier[execution_request] = identifier[super] ( identifier[ExecutionRequest] , identifier[cls] ). identifier[from_dict] ( identifier[d] ) keyword[if] identifier[isinstance] ( identifier[execution_request] . identifier[simulation_group] , identifier[dict] ): identifier[execution_request] . identifier[simulation_group] = identifier[SimulationGroup] . identifier[from_dict] ( identifier[execution_request] . identifier[simulation_group] ) keyword[return] identifier[execution_request]
def from_dict(cls, d): """ Create an instance from a dictionary. """ execution_request = super(ExecutionRequest, cls).from_dict(d) if isinstance(execution_request.simulation_group, dict): execution_request.simulation_group = SimulationGroup.from_dict(execution_request.simulation_group) # depends on [control=['if'], data=[]] return execution_request
def get_hash_of_dirs(directory): """ Recursively hash the contents of the given directory. Args: directory (str): The root directory we want to hash. Returns: A hash of all the contents in the directory. """ import hashlib sha = hashlib.sha512() if not os.path.exists(directory): return -1 for root, _, files in os.walk(directory): for name in files: filepath = local.path(root) / name if filepath.exists(): with open(filepath, 'rb') as next_file: for line in next_file: sha.update(line) return sha.hexdigest()
def function[get_hash_of_dirs, parameter[directory]]: constant[ Recursively hash the contents of the given directory. Args: directory (str): The root directory we want to hash. Returns: A hash of all the contents in the directory. ] import module[hashlib] variable[sha] assign[=] call[name[hashlib].sha512, parameter[]] if <ast.UnaryOp object at 0x7da2041d86a0> begin[:] return[<ast.UnaryOp object at 0x7da2041d8400>] for taget[tuple[[<ast.Name object at 0x7da2041db7c0>, <ast.Name object at 0x7da2041d8f70>, <ast.Name object at 0x7da2041d87f0>]]] in starred[call[name[os].walk, parameter[name[directory]]]] begin[:] for taget[name[name]] in starred[name[files]] begin[:] variable[filepath] assign[=] binary_operation[call[name[local].path, parameter[name[root]]] / name[name]] if call[name[filepath].exists, parameter[]] begin[:] with call[name[open], parameter[name[filepath], constant[rb]]] begin[:] for taget[name[line]] in starred[name[next_file]] begin[:] call[name[sha].update, parameter[name[line]]] return[call[name[sha].hexdigest, parameter[]]]
keyword[def] identifier[get_hash_of_dirs] ( identifier[directory] ): literal[string] keyword[import] identifier[hashlib] identifier[sha] = identifier[hashlib] . identifier[sha512] () keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[directory] ): keyword[return] - literal[int] keyword[for] identifier[root] , identifier[_] , identifier[files] keyword[in] identifier[os] . identifier[walk] ( identifier[directory] ): keyword[for] identifier[name] keyword[in] identifier[files] : identifier[filepath] = identifier[local] . identifier[path] ( identifier[root] )/ identifier[name] keyword[if] identifier[filepath] . identifier[exists] (): keyword[with] identifier[open] ( identifier[filepath] , literal[string] ) keyword[as] identifier[next_file] : keyword[for] identifier[line] keyword[in] identifier[next_file] : identifier[sha] . identifier[update] ( identifier[line] ) keyword[return] identifier[sha] . identifier[hexdigest] ()
def get_hash_of_dirs(directory): """ Recursively hash the contents of the given directory. Args: directory (str): The root directory we want to hash. Returns: A hash of all the contents in the directory. """ import hashlib sha = hashlib.sha512() if not os.path.exists(directory): return -1 # depends on [control=['if'], data=[]] for (root, _, files) in os.walk(directory): for name in files: filepath = local.path(root) / name if filepath.exists(): with open(filepath, 'rb') as next_file: for line in next_file: sha.update(line) # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['next_file']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['name']] # depends on [control=['for'], data=[]] return sha.hexdigest()
def normalize_likes(sql): """ Normalize and wrap LIKE statements :type sql str :rtype: str """ sql = sql.replace('%', '') # LIKE '%bot' sql = re.sub(r"LIKE '[^\']+'", 'LIKE X', sql) # or all_groups LIKE X or all_groups LIKE X matches = re.finditer(r'(or|and) [^\s]+ LIKE X', sql, flags=re.IGNORECASE) matches = [match.group(0) for match in matches] if matches else None if matches: for match in set(matches): sql = re.sub(r'(\s?' + re.escape(match) + ')+', ' ' + match + ' ...', sql) return sql
def function[normalize_likes, parameter[sql]]: constant[ Normalize and wrap LIKE statements :type sql str :rtype: str ] variable[sql] assign[=] call[name[sql].replace, parameter[constant[%], constant[]]] variable[sql] assign[=] call[name[re].sub, parameter[constant[LIKE '[^\']+'], constant[LIKE X], name[sql]]] variable[matches] assign[=] call[name[re].finditer, parameter[constant[(or|and) [^\s]+ LIKE X], name[sql]]] variable[matches] assign[=] <ast.IfExp object at 0x7da20e9b2740> if name[matches] begin[:] for taget[name[match]] in starred[call[name[set], parameter[name[matches]]]] begin[:] variable[sql] assign[=] call[name[re].sub, parameter[binary_operation[binary_operation[constant[(\s?] + call[name[re].escape, parameter[name[match]]]] + constant[)+]], binary_operation[binary_operation[constant[ ] + name[match]] + constant[ ...]], name[sql]]] return[name[sql]]
keyword[def] identifier[normalize_likes] ( identifier[sql] ): literal[string] identifier[sql] = identifier[sql] . identifier[replace] ( literal[string] , literal[string] ) identifier[sql] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[sql] ) identifier[matches] = identifier[re] . identifier[finditer] ( literal[string] , identifier[sql] , identifier[flags] = identifier[re] . identifier[IGNORECASE] ) identifier[matches] =[ identifier[match] . identifier[group] ( literal[int] ) keyword[for] identifier[match] keyword[in] identifier[matches] ] keyword[if] identifier[matches] keyword[else] keyword[None] keyword[if] identifier[matches] : keyword[for] identifier[match] keyword[in] identifier[set] ( identifier[matches] ): identifier[sql] = identifier[re] . identifier[sub] ( literal[string] + identifier[re] . identifier[escape] ( identifier[match] )+ literal[string] , literal[string] + identifier[match] + literal[string] , identifier[sql] ) keyword[return] identifier[sql]
def normalize_likes(sql): """ Normalize and wrap LIKE statements :type sql str :rtype: str """ sql = sql.replace('%', '') # LIKE '%bot' sql = re.sub("LIKE '[^\\']+'", 'LIKE X', sql) # or all_groups LIKE X or all_groups LIKE X matches = re.finditer('(or|and) [^\\s]+ LIKE X', sql, flags=re.IGNORECASE) matches = [match.group(0) for match in matches] if matches else None if matches: for match in set(matches): sql = re.sub('(\\s?' + re.escape(match) + ')+', ' ' + match + ' ...', sql) # depends on [control=['for'], data=['match']] # depends on [control=['if'], data=[]] return sql
def clip_joint_velocities(self, velocities): """ Clips joint velocities into a valid range. """ for i in range(len(velocities)): if velocities[i] >= 1.0: velocities[i] = 1.0 elif velocities[i] <= -1.0: velocities[i] = -1.0 return velocities
def function[clip_joint_velocities, parameter[self, velocities]]: constant[ Clips joint velocities into a valid range. ] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[velocities]]]]]] begin[:] if compare[call[name[velocities]][name[i]] greater_or_equal[>=] constant[1.0]] begin[:] call[name[velocities]][name[i]] assign[=] constant[1.0] return[name[velocities]]
keyword[def] identifier[clip_joint_velocities] ( identifier[self] , identifier[velocities] ): literal[string] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[velocities] )): keyword[if] identifier[velocities] [ identifier[i] ]>= literal[int] : identifier[velocities] [ identifier[i] ]= literal[int] keyword[elif] identifier[velocities] [ identifier[i] ]<=- literal[int] : identifier[velocities] [ identifier[i] ]=- literal[int] keyword[return] identifier[velocities]
def clip_joint_velocities(self, velocities): """ Clips joint velocities into a valid range. """ for i in range(len(velocities)): if velocities[i] >= 1.0: velocities[i] = 1.0 # depends on [control=['if'], data=[]] elif velocities[i] <= -1.0: velocities[i] = -1.0 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] return velocities
def read(cls, proto): """ :param proto: capnp TwoGramModelProto message reader """ instance = object.__new__(cls) super(TwoGramModel, instance).__init__(proto=proto.modelBase) instance._logger = opf_utils.initLogger(instance) instance._reset = proto.reset instance._hashToValueDict = {x.hash: x.value for x in proto.hashToValueDict} instance._learningEnabled = proto.learningEnabled instance._encoder = encoders.MultiEncoder.read(proto.encoder) instance._fieldNames = instance._encoder.getScalarNames() instance._prevValues = list(proto.prevValues) instance._twoGramDicts = [dict() for _ in xrange(len(proto.twoGramDicts))] for idx, field in enumerate(proto.twoGramDicts): for entry in field: prev = None if entry.value == -1 else entry.value instance._twoGramDicts[idx][prev] = collections.defaultdict(int) for bucket in entry.buckets: instance._twoGramDicts[idx][prev][bucket.index] = bucket.count return instance
def function[read, parameter[cls, proto]]: constant[ :param proto: capnp TwoGramModelProto message reader ] variable[instance] assign[=] call[name[object].__new__, parameter[name[cls]]] call[call[name[super], parameter[name[TwoGramModel], name[instance]]].__init__, parameter[]] name[instance]._logger assign[=] call[name[opf_utils].initLogger, parameter[name[instance]]] name[instance]._reset assign[=] name[proto].reset name[instance]._hashToValueDict assign[=] <ast.DictComp object at 0x7da1b2346290> name[instance]._learningEnabled assign[=] name[proto].learningEnabled name[instance]._encoder assign[=] call[name[encoders].MultiEncoder.read, parameter[name[proto].encoder]] name[instance]._fieldNames assign[=] call[name[instance]._encoder.getScalarNames, parameter[]] name[instance]._prevValues assign[=] call[name[list], parameter[name[proto].prevValues]] name[instance]._twoGramDicts assign[=] <ast.ListComp object at 0x7da1b2346e60> for taget[tuple[[<ast.Name object at 0x7da1b2344550>, <ast.Name object at 0x7da1b23458a0>]]] in starred[call[name[enumerate], parameter[name[proto].twoGramDicts]]] begin[:] for taget[name[entry]] in starred[name[field]] begin[:] variable[prev] assign[=] <ast.IfExp object at 0x7da1b2346a70> call[call[name[instance]._twoGramDicts][name[idx]]][name[prev]] assign[=] call[name[collections].defaultdict, parameter[name[int]]] for taget[name[bucket]] in starred[name[entry].buckets] begin[:] call[call[call[name[instance]._twoGramDicts][name[idx]]][name[prev]]][name[bucket].index] assign[=] name[bucket].count return[name[instance]]
keyword[def] identifier[read] ( identifier[cls] , identifier[proto] ): literal[string] identifier[instance] = identifier[object] . identifier[__new__] ( identifier[cls] ) identifier[super] ( identifier[TwoGramModel] , identifier[instance] ). identifier[__init__] ( identifier[proto] = identifier[proto] . identifier[modelBase] ) identifier[instance] . identifier[_logger] = identifier[opf_utils] . identifier[initLogger] ( identifier[instance] ) identifier[instance] . identifier[_reset] = identifier[proto] . identifier[reset] identifier[instance] . identifier[_hashToValueDict] ={ identifier[x] . identifier[hash] : identifier[x] . identifier[value] keyword[for] identifier[x] keyword[in] identifier[proto] . identifier[hashToValueDict] } identifier[instance] . identifier[_learningEnabled] = identifier[proto] . identifier[learningEnabled] identifier[instance] . identifier[_encoder] = identifier[encoders] . identifier[MultiEncoder] . identifier[read] ( identifier[proto] . identifier[encoder] ) identifier[instance] . identifier[_fieldNames] = identifier[instance] . identifier[_encoder] . identifier[getScalarNames] () identifier[instance] . identifier[_prevValues] = identifier[list] ( identifier[proto] . identifier[prevValues] ) identifier[instance] . identifier[_twoGramDicts] =[ identifier[dict] () keyword[for] identifier[_] keyword[in] identifier[xrange] ( identifier[len] ( identifier[proto] . identifier[twoGramDicts] ))] keyword[for] identifier[idx] , identifier[field] keyword[in] identifier[enumerate] ( identifier[proto] . identifier[twoGramDicts] ): keyword[for] identifier[entry] keyword[in] identifier[field] : identifier[prev] = keyword[None] keyword[if] identifier[entry] . identifier[value] ==- literal[int] keyword[else] identifier[entry] . identifier[value] identifier[instance] . identifier[_twoGramDicts] [ identifier[idx] ][ identifier[prev] ]= identifier[collections] . identifier[defaultdict] ( identifier[int] ) keyword[for] identifier[bucket] keyword[in] identifier[entry] . identifier[buckets] : identifier[instance] . identifier[_twoGramDicts] [ identifier[idx] ][ identifier[prev] ][ identifier[bucket] . identifier[index] ]= identifier[bucket] . identifier[count] keyword[return] identifier[instance]
def read(cls, proto): """ :param proto: capnp TwoGramModelProto message reader """ instance = object.__new__(cls) super(TwoGramModel, instance).__init__(proto=proto.modelBase) instance._logger = opf_utils.initLogger(instance) instance._reset = proto.reset instance._hashToValueDict = {x.hash: x.value for x in proto.hashToValueDict} instance._learningEnabled = proto.learningEnabled instance._encoder = encoders.MultiEncoder.read(proto.encoder) instance._fieldNames = instance._encoder.getScalarNames() instance._prevValues = list(proto.prevValues) instance._twoGramDicts = [dict() for _ in xrange(len(proto.twoGramDicts))] for (idx, field) in enumerate(proto.twoGramDicts): for entry in field: prev = None if entry.value == -1 else entry.value instance._twoGramDicts[idx][prev] = collections.defaultdict(int) for bucket in entry.buckets: instance._twoGramDicts[idx][prev][bucket.index] = bucket.count # depends on [control=['for'], data=['bucket']] # depends on [control=['for'], data=['entry']] # depends on [control=['for'], data=[]] return instance
def title(self, title): """Prints the title""" title = " What's it like out side {0}? ".format(title) click.secho("{:=^62}".format(title), fg=self.colors.WHITE) click.echo()
def function[title, parameter[self, title]]: constant[Prints the title] variable[title] assign[=] call[constant[ What's it like out side {0}? ].format, parameter[name[title]]] call[name[click].secho, parameter[call[constant[{:=^62}].format, parameter[name[title]]]]] call[name[click].echo, parameter[]]
keyword[def] identifier[title] ( identifier[self] , identifier[title] ): literal[string] identifier[title] = literal[string] . identifier[format] ( identifier[title] ) identifier[click] . identifier[secho] ( literal[string] . identifier[format] ( identifier[title] ), identifier[fg] = identifier[self] . identifier[colors] . identifier[WHITE] ) identifier[click] . identifier[echo] ()
def title(self, title): """Prints the title""" title = " What's it like out side {0}? ".format(title) click.secho('{:=^62}'.format(title), fg=self.colors.WHITE) click.echo()
def gdsii_hash(filename, engine=None): """ Calculate the a hash value for a GDSII file. The hash is generated based only on the contents of the cells in the GDSII library, ignoring any timestamp records present in the file structure. Parameters ---------- filename : string Full path to the GDSII file. engine : hashlib-like engine The engine that executes the hashing algorithm. It must provide the methods ``update`` and ``hexdigest`` as defined in the hashlib module. If ``None``, the dafault ``hashlib.sha1()`` is used. Returns ------- out : string The hash correponding to the library contents in hex format. """ with open(filename, 'rb') as fin: data = fin.read() contents = [] start = pos = 0 while pos < len(data): size, rec = struct.unpack('>HH', data[pos:pos + 4]) if rec == 0x0502: start = pos + 28 elif rec == 0x0700: contents.append(data[start:pos]) pos += size h = hashlib.sha1() if engine is None else engine for x in sorted(contents): h.update(x) return h.hexdigest()
def function[gdsii_hash, parameter[filename, engine]]: constant[ Calculate the a hash value for a GDSII file. The hash is generated based only on the contents of the cells in the GDSII library, ignoring any timestamp records present in the file structure. Parameters ---------- filename : string Full path to the GDSII file. engine : hashlib-like engine The engine that executes the hashing algorithm. It must provide the methods ``update`` and ``hexdigest`` as defined in the hashlib module. If ``None``, the dafault ``hashlib.sha1()`` is used. Returns ------- out : string The hash correponding to the library contents in hex format. ] with call[name[open], parameter[name[filename], constant[rb]]] begin[:] variable[data] assign[=] call[name[fin].read, parameter[]] variable[contents] assign[=] list[[]] variable[start] assign[=] constant[0] while compare[name[pos] less[<] call[name[len], parameter[name[data]]]] begin[:] <ast.Tuple object at 0x7da20c6e7a30> assign[=] call[name[struct].unpack, parameter[constant[>HH], call[name[data]][<ast.Slice object at 0x7da20c6e7d90>]]] if compare[name[rec] equal[==] constant[1282]] begin[:] variable[start] assign[=] binary_operation[name[pos] + constant[28]] <ast.AugAssign object at 0x7da20c6e52a0> variable[h] assign[=] <ast.IfExp object at 0x7da20c6e6530> for taget[name[x]] in starred[call[name[sorted], parameter[name[contents]]]] begin[:] call[name[h].update, parameter[name[x]]] return[call[name[h].hexdigest, parameter[]]]
keyword[def] identifier[gdsii_hash] ( identifier[filename] , identifier[engine] = keyword[None] ): literal[string] keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[fin] : identifier[data] = identifier[fin] . identifier[read] () identifier[contents] =[] identifier[start] = identifier[pos] = literal[int] keyword[while] identifier[pos] < identifier[len] ( identifier[data] ): identifier[size] , identifier[rec] = identifier[struct] . identifier[unpack] ( literal[string] , identifier[data] [ identifier[pos] : identifier[pos] + literal[int] ]) keyword[if] identifier[rec] == literal[int] : identifier[start] = identifier[pos] + literal[int] keyword[elif] identifier[rec] == literal[int] : identifier[contents] . identifier[append] ( identifier[data] [ identifier[start] : identifier[pos] ]) identifier[pos] += identifier[size] identifier[h] = identifier[hashlib] . identifier[sha1] () keyword[if] identifier[engine] keyword[is] keyword[None] keyword[else] identifier[engine] keyword[for] identifier[x] keyword[in] identifier[sorted] ( identifier[contents] ): identifier[h] . identifier[update] ( identifier[x] ) keyword[return] identifier[h] . identifier[hexdigest] ()
def gdsii_hash(filename, engine=None): """ Calculate the a hash value for a GDSII file. The hash is generated based only on the contents of the cells in the GDSII library, ignoring any timestamp records present in the file structure. Parameters ---------- filename : string Full path to the GDSII file. engine : hashlib-like engine The engine that executes the hashing algorithm. It must provide the methods ``update`` and ``hexdigest`` as defined in the hashlib module. If ``None``, the dafault ``hashlib.sha1()`` is used. Returns ------- out : string The hash correponding to the library contents in hex format. """ with open(filename, 'rb') as fin: data = fin.read() # depends on [control=['with'], data=['fin']] contents = [] start = pos = 0 while pos < len(data): (size, rec) = struct.unpack('>HH', data[pos:pos + 4]) if rec == 1282: start = pos + 28 # depends on [control=['if'], data=[]] elif rec == 1792: contents.append(data[start:pos]) # depends on [control=['if'], data=[]] pos += size # depends on [control=['while'], data=['pos']] h = hashlib.sha1() if engine is None else engine for x in sorted(contents): h.update(x) # depends on [control=['for'], data=['x']] return h.hexdigest()
def read_histogram(self): """Read and reset the histogram. The expected return is a dictionary containing the counts per bin, MToF for bins 1, 3, 5, and 7, temperature, pressure, the sampling period, the checksum, PM1, PM2.5, and PM10. **NOTE:** The sampling period for the OPCN1 seems to be incorrect. :returns: dictionary """ resp = [] data = {} # command byte command = 0x30 # Send the command byte self.cnxn.xfer([command]) # Wait 10 ms sleep(10e-3) # read the histogram for i in range(62): r = self.cnxn.xfer([0x00])[0] resp.append(r) # convert to real things and store in dictionary! data['Bin 0'] = self._16bit_unsigned(resp[0], resp[1]) data['Bin 1'] = self._16bit_unsigned(resp[2], resp[3]) data['Bin 2'] = self._16bit_unsigned(resp[4], resp[5]) data['Bin 3'] = self._16bit_unsigned(resp[6], resp[7]) data['Bin 4'] = self._16bit_unsigned(resp[8], resp[9]) data['Bin 5'] = self._16bit_unsigned(resp[10], resp[11]) data['Bin 6'] = self._16bit_unsigned(resp[12], resp[13]) data['Bin 7'] = self._16bit_unsigned(resp[14], resp[15]) data['Bin 8'] = self._16bit_unsigned(resp[16], resp[17]) data['Bin 9'] = self._16bit_unsigned(resp[18], resp[19]) data['Bin 10'] = self._16bit_unsigned(resp[20], resp[21]) data['Bin 11'] = self._16bit_unsigned(resp[22], resp[23]) data['Bin 12'] = self._16bit_unsigned(resp[24], resp[25]) data['Bin 13'] = self._16bit_unsigned(resp[26], resp[27]) data['Bin 14'] = self._16bit_unsigned(resp[28], resp[29]) data['Bin 15'] = self._16bit_unsigned(resp[30], resp[31]) data['Bin1 MToF'] = self._calculate_mtof(resp[32]) data['Bin3 MToF'] = self._calculate_mtof(resp[33]) data['Bin5 MToF'] = self._calculate_mtof(resp[34]) data['Bin7 MToF'] = self._calculate_mtof(resp[35]) data['Temperature'] = self._calculate_temp(resp[36:40]) data['Pressure'] = self._calculate_pressure(resp[40:44]) data['Sampling Period'] = self._calculate_period(resp[44:48]) data['Checksum'] = self._16bit_unsigned(resp[48], resp[49]) data['PM1'] = self._calculate_float(resp[50:54]) data['PM2.5'] = self._calculate_float(resp[54:58]) data['PM10'] = self._calculate_float(resp[58:]) # Calculate the sum of the histogram bins histogram_sum = data['Bin 0'] + data['Bin 1'] + data['Bin 2'] + \ data['Bin 3'] + data['Bin 4'] + data['Bin 5'] + data['Bin 6'] + \ data['Bin 7'] + data['Bin 8'] + data['Bin 9'] + data['Bin 10'] + \ data['Bin 11'] + data['Bin 12'] + data['Bin 13'] + data['Bin 14'] + \ data['Bin 15'] return data
def function[read_histogram, parameter[self]]: constant[Read and reset the histogram. The expected return is a dictionary containing the counts per bin, MToF for bins 1, 3, 5, and 7, temperature, pressure, the sampling period, the checksum, PM1, PM2.5, and PM10. **NOTE:** The sampling period for the OPCN1 seems to be incorrect. :returns: dictionary ] variable[resp] assign[=] list[[]] variable[data] assign[=] dictionary[[], []] variable[command] assign[=] constant[48] call[name[self].cnxn.xfer, parameter[list[[<ast.Name object at 0x7da1aff8f760>]]]] call[name[sleep], parameter[constant[0.01]]] for taget[name[i]] in starred[call[name[range], parameter[constant[62]]]] begin[:] variable[r] assign[=] call[call[name[self].cnxn.xfer, parameter[list[[<ast.Constant object at 0x7da1aff8cb50>]]]]][constant[0]] call[name[resp].append, parameter[name[r]]] call[name[data]][constant[Bin 0]] assign[=] call[name[self]._16bit_unsigned, parameter[call[name[resp]][constant[0]], call[name[resp]][constant[1]]]] call[name[data]][constant[Bin 1]] assign[=] call[name[self]._16bit_unsigned, parameter[call[name[resp]][constant[2]], call[name[resp]][constant[3]]]] call[name[data]][constant[Bin 2]] assign[=] call[name[self]._16bit_unsigned, parameter[call[name[resp]][constant[4]], call[name[resp]][constant[5]]]] call[name[data]][constant[Bin 3]] assign[=] call[name[self]._16bit_unsigned, parameter[call[name[resp]][constant[6]], call[name[resp]][constant[7]]]] call[name[data]][constant[Bin 4]] assign[=] call[name[self]._16bit_unsigned, parameter[call[name[resp]][constant[8]], call[name[resp]][constant[9]]]] call[name[data]][constant[Bin 5]] assign[=] call[name[self]._16bit_unsigned, parameter[call[name[resp]][constant[10]], call[name[resp]][constant[11]]]] call[name[data]][constant[Bin 6]] assign[=] call[name[self]._16bit_unsigned, parameter[call[name[resp]][constant[12]], call[name[resp]][constant[13]]]] call[name[data]][constant[Bin 7]] assign[=] call[name[self]._16bit_unsigned, parameter[call[name[resp]][constant[14]], call[name[resp]][constant[15]]]] call[name[data]][constant[Bin 8]] assign[=] call[name[self]._16bit_unsigned, parameter[call[name[resp]][constant[16]], call[name[resp]][constant[17]]]] call[name[data]][constant[Bin 9]] assign[=] call[name[self]._16bit_unsigned, parameter[call[name[resp]][constant[18]], call[name[resp]][constant[19]]]] call[name[data]][constant[Bin 10]] assign[=] call[name[self]._16bit_unsigned, parameter[call[name[resp]][constant[20]], call[name[resp]][constant[21]]]] call[name[data]][constant[Bin 11]] assign[=] call[name[self]._16bit_unsigned, parameter[call[name[resp]][constant[22]], call[name[resp]][constant[23]]]] call[name[data]][constant[Bin 12]] assign[=] call[name[self]._16bit_unsigned, parameter[call[name[resp]][constant[24]], call[name[resp]][constant[25]]]] call[name[data]][constant[Bin 13]] assign[=] call[name[self]._16bit_unsigned, parameter[call[name[resp]][constant[26]], call[name[resp]][constant[27]]]] call[name[data]][constant[Bin 14]] assign[=] call[name[self]._16bit_unsigned, parameter[call[name[resp]][constant[28]], call[name[resp]][constant[29]]]] call[name[data]][constant[Bin 15]] assign[=] call[name[self]._16bit_unsigned, parameter[call[name[resp]][constant[30]], call[name[resp]][constant[31]]]] call[name[data]][constant[Bin1 MToF]] assign[=] call[name[self]._calculate_mtof, parameter[call[name[resp]][constant[32]]]] call[name[data]][constant[Bin3 MToF]] assign[=] call[name[self]._calculate_mtof, parameter[call[name[resp]][constant[33]]]] call[name[data]][constant[Bin5 MToF]] assign[=] call[name[self]._calculate_mtof, parameter[call[name[resp]][constant[34]]]] call[name[data]][constant[Bin7 MToF]] assign[=] call[name[self]._calculate_mtof, parameter[call[name[resp]][constant[35]]]] call[name[data]][constant[Temperature]] assign[=] call[name[self]._calculate_temp, parameter[call[name[resp]][<ast.Slice object at 0x7da1afe18280>]]] call[name[data]][constant[Pressure]] assign[=] call[name[self]._calculate_pressure, parameter[call[name[resp]][<ast.Slice object at 0x7da1aff00100>]]] call[name[data]][constant[Sampling Period]] assign[=] call[name[self]._calculate_period, parameter[call[name[resp]][<ast.Slice object at 0x7da1aff00d00>]]] call[name[data]][constant[Checksum]] assign[=] call[name[self]._16bit_unsigned, parameter[call[name[resp]][constant[48]], call[name[resp]][constant[49]]]] call[name[data]][constant[PM1]] assign[=] call[name[self]._calculate_float, parameter[call[name[resp]][<ast.Slice object at 0x7da1affc0520>]]] call[name[data]][constant[PM2.5]] assign[=] call[name[self]._calculate_float, parameter[call[name[resp]][<ast.Slice object at 0x7da1affc2830>]]] call[name[data]][constant[PM10]] assign[=] call[name[self]._calculate_float, parameter[call[name[resp]][<ast.Slice object at 0x7da1affc01c0>]]] variable[histogram_sum] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[call[name[data]][constant[Bin 0]] + call[name[data]][constant[Bin 1]]] + call[name[data]][constant[Bin 2]]] + call[name[data]][constant[Bin 3]]] + call[name[data]][constant[Bin 4]]] + call[name[data]][constant[Bin 5]]] + call[name[data]][constant[Bin 6]]] + call[name[data]][constant[Bin 7]]] + call[name[data]][constant[Bin 8]]] + call[name[data]][constant[Bin 9]]] + call[name[data]][constant[Bin 10]]] + call[name[data]][constant[Bin 11]]] + call[name[data]][constant[Bin 12]]] + call[name[data]][constant[Bin 13]]] + call[name[data]][constant[Bin 14]]] + call[name[data]][constant[Bin 15]]] return[name[data]]
keyword[def] identifier[read_histogram] ( identifier[self] ): literal[string] identifier[resp] =[] identifier[data] ={} identifier[command] = literal[int] identifier[self] . identifier[cnxn] . identifier[xfer] ([ identifier[command] ]) identifier[sleep] ( literal[int] ) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] ): identifier[r] = identifier[self] . identifier[cnxn] . identifier[xfer] ([ literal[int] ])[ literal[int] ] identifier[resp] . identifier[append] ( identifier[r] ) identifier[data] [ literal[string] ]= identifier[self] . identifier[_16bit_unsigned] ( identifier[resp] [ literal[int] ], identifier[resp] [ literal[int] ]) identifier[data] [ literal[string] ]= identifier[self] . identifier[_16bit_unsigned] ( identifier[resp] [ literal[int] ], identifier[resp] [ literal[int] ]) identifier[data] [ literal[string] ]= identifier[self] . identifier[_16bit_unsigned] ( identifier[resp] [ literal[int] ], identifier[resp] [ literal[int] ]) identifier[data] [ literal[string] ]= identifier[self] . identifier[_16bit_unsigned] ( identifier[resp] [ literal[int] ], identifier[resp] [ literal[int] ]) identifier[data] [ literal[string] ]= identifier[self] . identifier[_16bit_unsigned] ( identifier[resp] [ literal[int] ], identifier[resp] [ literal[int] ]) identifier[data] [ literal[string] ]= identifier[self] . identifier[_16bit_unsigned] ( identifier[resp] [ literal[int] ], identifier[resp] [ literal[int] ]) identifier[data] [ literal[string] ]= identifier[self] . identifier[_16bit_unsigned] ( identifier[resp] [ literal[int] ], identifier[resp] [ literal[int] ]) identifier[data] [ literal[string] ]= identifier[self] . identifier[_16bit_unsigned] ( identifier[resp] [ literal[int] ], identifier[resp] [ literal[int] ]) identifier[data] [ literal[string] ]= identifier[self] . identifier[_16bit_unsigned] ( identifier[resp] [ literal[int] ], identifier[resp] [ literal[int] ]) identifier[data] [ literal[string] ]= identifier[self] . identifier[_16bit_unsigned] ( identifier[resp] [ literal[int] ], identifier[resp] [ literal[int] ]) identifier[data] [ literal[string] ]= identifier[self] . identifier[_16bit_unsigned] ( identifier[resp] [ literal[int] ], identifier[resp] [ literal[int] ]) identifier[data] [ literal[string] ]= identifier[self] . identifier[_16bit_unsigned] ( identifier[resp] [ literal[int] ], identifier[resp] [ literal[int] ]) identifier[data] [ literal[string] ]= identifier[self] . identifier[_16bit_unsigned] ( identifier[resp] [ literal[int] ], identifier[resp] [ literal[int] ]) identifier[data] [ literal[string] ]= identifier[self] . identifier[_16bit_unsigned] ( identifier[resp] [ literal[int] ], identifier[resp] [ literal[int] ]) identifier[data] [ literal[string] ]= identifier[self] . identifier[_16bit_unsigned] ( identifier[resp] [ literal[int] ], identifier[resp] [ literal[int] ]) identifier[data] [ literal[string] ]= identifier[self] . identifier[_16bit_unsigned] ( identifier[resp] [ literal[int] ], identifier[resp] [ literal[int] ]) identifier[data] [ literal[string] ]= identifier[self] . identifier[_calculate_mtof] ( identifier[resp] [ literal[int] ]) identifier[data] [ literal[string] ]= identifier[self] . identifier[_calculate_mtof] ( identifier[resp] [ literal[int] ]) identifier[data] [ literal[string] ]= identifier[self] . identifier[_calculate_mtof] ( identifier[resp] [ literal[int] ]) identifier[data] [ literal[string] ]= identifier[self] . identifier[_calculate_mtof] ( identifier[resp] [ literal[int] ]) identifier[data] [ literal[string] ]= identifier[self] . identifier[_calculate_temp] ( identifier[resp] [ literal[int] : literal[int] ]) identifier[data] [ literal[string] ]= identifier[self] . identifier[_calculate_pressure] ( identifier[resp] [ literal[int] : literal[int] ]) identifier[data] [ literal[string] ]= identifier[self] . identifier[_calculate_period] ( identifier[resp] [ literal[int] : literal[int] ]) identifier[data] [ literal[string] ]= identifier[self] . identifier[_16bit_unsigned] ( identifier[resp] [ literal[int] ], identifier[resp] [ literal[int] ]) identifier[data] [ literal[string] ]= identifier[self] . identifier[_calculate_float] ( identifier[resp] [ literal[int] : literal[int] ]) identifier[data] [ literal[string] ]= identifier[self] . identifier[_calculate_float] ( identifier[resp] [ literal[int] : literal[int] ]) identifier[data] [ literal[string] ]= identifier[self] . identifier[_calculate_float] ( identifier[resp] [ literal[int] :]) identifier[histogram_sum] = identifier[data] [ literal[string] ]+ identifier[data] [ literal[string] ]+ identifier[data] [ literal[string] ]+ identifier[data] [ literal[string] ]+ identifier[data] [ literal[string] ]+ identifier[data] [ literal[string] ]+ identifier[data] [ literal[string] ]+ identifier[data] [ literal[string] ]+ identifier[data] [ literal[string] ]+ identifier[data] [ literal[string] ]+ identifier[data] [ literal[string] ]+ identifier[data] [ literal[string] ]+ identifier[data] [ literal[string] ]+ identifier[data] [ literal[string] ]+ identifier[data] [ literal[string] ]+ identifier[data] [ literal[string] ] keyword[return] identifier[data]
def read_histogram(self): """Read and reset the histogram. The expected return is a dictionary containing the counts per bin, MToF for bins 1, 3, 5, and 7, temperature, pressure, the sampling period, the checksum, PM1, PM2.5, and PM10. **NOTE:** The sampling period for the OPCN1 seems to be incorrect. :returns: dictionary """ resp = [] data = {} # command byte command = 48 # Send the command byte self.cnxn.xfer([command]) # Wait 10 ms sleep(0.01) # read the histogram for i in range(62): r = self.cnxn.xfer([0])[0] resp.append(r) # depends on [control=['for'], data=[]] # convert to real things and store in dictionary! data['Bin 0'] = self._16bit_unsigned(resp[0], resp[1]) data['Bin 1'] = self._16bit_unsigned(resp[2], resp[3]) data['Bin 2'] = self._16bit_unsigned(resp[4], resp[5]) data['Bin 3'] = self._16bit_unsigned(resp[6], resp[7]) data['Bin 4'] = self._16bit_unsigned(resp[8], resp[9]) data['Bin 5'] = self._16bit_unsigned(resp[10], resp[11]) data['Bin 6'] = self._16bit_unsigned(resp[12], resp[13]) data['Bin 7'] = self._16bit_unsigned(resp[14], resp[15]) data['Bin 8'] = self._16bit_unsigned(resp[16], resp[17]) data['Bin 9'] = self._16bit_unsigned(resp[18], resp[19]) data['Bin 10'] = self._16bit_unsigned(resp[20], resp[21]) data['Bin 11'] = self._16bit_unsigned(resp[22], resp[23]) data['Bin 12'] = self._16bit_unsigned(resp[24], resp[25]) data['Bin 13'] = self._16bit_unsigned(resp[26], resp[27]) data['Bin 14'] = self._16bit_unsigned(resp[28], resp[29]) data['Bin 15'] = self._16bit_unsigned(resp[30], resp[31]) data['Bin1 MToF'] = self._calculate_mtof(resp[32]) data['Bin3 MToF'] = self._calculate_mtof(resp[33]) data['Bin5 MToF'] = self._calculate_mtof(resp[34]) data['Bin7 MToF'] = self._calculate_mtof(resp[35]) data['Temperature'] = self._calculate_temp(resp[36:40]) data['Pressure'] = self._calculate_pressure(resp[40:44]) data['Sampling Period'] = self._calculate_period(resp[44:48]) data['Checksum'] = self._16bit_unsigned(resp[48], resp[49]) data['PM1'] = self._calculate_float(resp[50:54]) data['PM2.5'] = self._calculate_float(resp[54:58]) data['PM10'] = self._calculate_float(resp[58:]) # Calculate the sum of the histogram bins histogram_sum = data['Bin 0'] + data['Bin 1'] + data['Bin 2'] + data['Bin 3'] + data['Bin 4'] + data['Bin 5'] + data['Bin 6'] + data['Bin 7'] + data['Bin 8'] + data['Bin 9'] + data['Bin 10'] + data['Bin 11'] + data['Bin 12'] + data['Bin 13'] + data['Bin 14'] + data['Bin 15'] return data
def _print_speed(self): '''Print the current speed.''' if self._bandwidth_meter.num_samples: speed = self._bandwidth_meter.speed() if self._human_format: file_size_str = wpull.string.format_size(speed) else: file_size_str = '{:.1f} b'.format(speed * 8) speed_str = _('{preformatted_file_size}/s').format( preformatted_file_size=file_size_str ) else: speed_str = _('-- B/s') self._print(speed_str)
def function[_print_speed, parameter[self]]: constant[Print the current speed.] if name[self]._bandwidth_meter.num_samples begin[:] variable[speed] assign[=] call[name[self]._bandwidth_meter.speed, parameter[]] if name[self]._human_format begin[:] variable[file_size_str] assign[=] call[name[wpull].string.format_size, parameter[name[speed]]] variable[speed_str] assign[=] call[call[name[_], parameter[constant[{preformatted_file_size}/s]]].format, parameter[]] call[name[self]._print, parameter[name[speed_str]]]
keyword[def] identifier[_print_speed] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_bandwidth_meter] . identifier[num_samples] : identifier[speed] = identifier[self] . identifier[_bandwidth_meter] . identifier[speed] () keyword[if] identifier[self] . identifier[_human_format] : identifier[file_size_str] = identifier[wpull] . identifier[string] . identifier[format_size] ( identifier[speed] ) keyword[else] : identifier[file_size_str] = literal[string] . identifier[format] ( identifier[speed] * literal[int] ) identifier[speed_str] = identifier[_] ( literal[string] ). identifier[format] ( identifier[preformatted_file_size] = identifier[file_size_str] ) keyword[else] : identifier[speed_str] = identifier[_] ( literal[string] ) identifier[self] . identifier[_print] ( identifier[speed_str] )
def _print_speed(self): """Print the current speed.""" if self._bandwidth_meter.num_samples: speed = self._bandwidth_meter.speed() if self._human_format: file_size_str = wpull.string.format_size(speed) # depends on [control=['if'], data=[]] else: file_size_str = '{:.1f} b'.format(speed * 8) speed_str = _('{preformatted_file_size}/s').format(preformatted_file_size=file_size_str) # depends on [control=['if'], data=[]] else: speed_str = _('-- B/s') self._print(speed_str)
def _recv_keys(self, keyids, keyserver=None): """Import keys from a keyserver. :param str keyids: A space-delimited string containing the keyids to request. :param str keyserver: The keyserver to request the ``keyids`` from; defaults to `gnupg.GPG.keyserver`. """ if not keyserver: keyserver = self.keyserver args = ['--keyserver {0}'.format(keyserver), '--recv-keys {0}'.format(keyids)] log.info('Requesting keys from %s: %s' % (keyserver, keyids)) result = self._result_map['import'](self) proc = self._open_subprocess(args) self._collect_output(proc, result) log.debug('recv_keys result: %r', result.__dict__) return result
def function[_recv_keys, parameter[self, keyids, keyserver]]: constant[Import keys from a keyserver. :param str keyids: A space-delimited string containing the keyids to request. :param str keyserver: The keyserver to request the ``keyids`` from; defaults to `gnupg.GPG.keyserver`. ] if <ast.UnaryOp object at 0x7da18bccb430> begin[:] variable[keyserver] assign[=] name[self].keyserver variable[args] assign[=] list[[<ast.Call object at 0x7da18bcc8ee0>, <ast.Call object at 0x7da18bcc8c40>]] call[name[log].info, parameter[binary_operation[constant[Requesting keys from %s: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18bcc8820>, <ast.Name object at 0x7da18bcc8c10>]]]]] variable[result] assign[=] call[call[name[self]._result_map][constant[import]], parameter[name[self]]] variable[proc] assign[=] call[name[self]._open_subprocess, parameter[name[args]]] call[name[self]._collect_output, parameter[name[proc], name[result]]] call[name[log].debug, parameter[constant[recv_keys result: %r], name[result].__dict__]] return[name[result]]
keyword[def] identifier[_recv_keys] ( identifier[self] , identifier[keyids] , identifier[keyserver] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[keyserver] : identifier[keyserver] = identifier[self] . identifier[keyserver] identifier[args] =[ literal[string] . identifier[format] ( identifier[keyserver] ), literal[string] . identifier[format] ( identifier[keyids] )] identifier[log] . identifier[info] ( literal[string] %( identifier[keyserver] , identifier[keyids] )) identifier[result] = identifier[self] . identifier[_result_map] [ literal[string] ]( identifier[self] ) identifier[proc] = identifier[self] . identifier[_open_subprocess] ( identifier[args] ) identifier[self] . identifier[_collect_output] ( identifier[proc] , identifier[result] ) identifier[log] . identifier[debug] ( literal[string] , identifier[result] . identifier[__dict__] ) keyword[return] identifier[result]
def _recv_keys(self, keyids, keyserver=None): """Import keys from a keyserver. :param str keyids: A space-delimited string containing the keyids to request. :param str keyserver: The keyserver to request the ``keyids`` from; defaults to `gnupg.GPG.keyserver`. """ if not keyserver: keyserver = self.keyserver # depends on [control=['if'], data=[]] args = ['--keyserver {0}'.format(keyserver), '--recv-keys {0}'.format(keyids)] log.info('Requesting keys from %s: %s' % (keyserver, keyids)) result = self._result_map['import'](self) proc = self._open_subprocess(args) self._collect_output(proc, result) log.debug('recv_keys result: %r', result.__dict__) return result
def create_project(self, key, name=None, assignee=None, type="Software", template_name=None): """Create a project with the specified parameters. :param key: Mandatory. Must match JIRA project key requirements, usually only 2-10 uppercase characters. :type: str :param name: If not specified it will use the key value. :type name: Optional[str] :param assignee: If not specified it will use current user. :type assignee: Optional[str] :param type: Determines the type of project should be created. :type type: Optional[str] :param template_name: is used to create a project based on one of the existing project templates. If `template_name` is not specified, then it should use one of the default values. :type template_name: Optional[str] :return: Should evaluate to False if it fails otherwise it will be the new project id. :rtype: Union[bool,int] """ if assignee is None: assignee = self.current_user() if name is None: name = key possible_templates = ['Basic', 'JIRA Classic', 'JIRA Default Schemes', 'Basic software development'] if template_name is not None: possible_templates = [template_name] # https://confluence.atlassian.com/jirakb/creating-a-project-via-rest-based-on-jira-default-schemes-744325852.html templates = self.templates() # TODO(ssbarnea): find a better logic to pick a default fallback template template_key = list(templates.values())[0]['projectTemplateModuleCompleteKey'] for template_name, template_dic in templates.items(): if template_name in possible_templates: template_key = template_dic['projectTemplateModuleCompleteKey'] break payload = {'name': name, 'key': key, 'keyEdited': 'false', # 'projectTemplate': 'com.atlassian.jira-core-project-templates:jira-issuetracking', # 'permissionScheme': '', 'projectTemplateWebItemKey': template_key, 'projectTemplateModuleKey': template_key, 'lead': assignee, # 'assigneeType': '2', } if self._version[0] > 6: # JIRA versions before 7 will throw an error if we specify type parameter payload['type'] = type headers = CaseInsensitiveDict( {'Content-Type': 'application/x-www-form-urlencoded'}) url = self._options['server'] + \ '/rest/project-templates/latest/templates' r = self._session.post(url, data=payload, headers=headers) if r.status_code == 200: r_json = json_loads(r) return r_json f = tempfile.NamedTemporaryFile( suffix='.html', prefix='python-jira-error-create-project-', delete=False) f.write(r.text) if self.logging: logging.error( "Unexpected result while running create project. Server response saved in %s for further investigation [HTTP response=%s]." % ( f.name, r.status_code)) return False
def function[create_project, parameter[self, key, name, assignee, type, template_name]]: constant[Create a project with the specified parameters. :param key: Mandatory. Must match JIRA project key requirements, usually only 2-10 uppercase characters. :type: str :param name: If not specified it will use the key value. :type name: Optional[str] :param assignee: If not specified it will use current user. :type assignee: Optional[str] :param type: Determines the type of project should be created. :type type: Optional[str] :param template_name: is used to create a project based on one of the existing project templates. If `template_name` is not specified, then it should use one of the default values. :type template_name: Optional[str] :return: Should evaluate to False if it fails otherwise it will be the new project id. :rtype: Union[bool,int] ] if compare[name[assignee] is constant[None]] begin[:] variable[assignee] assign[=] call[name[self].current_user, parameter[]] if compare[name[name] is constant[None]] begin[:] variable[name] assign[=] name[key] variable[possible_templates] assign[=] list[[<ast.Constant object at 0x7da1b1f96fe0>, <ast.Constant object at 0x7da1b1f97dc0>, <ast.Constant object at 0x7da1b1f96a40>, <ast.Constant object at 0x7da1b1f94370>]] if compare[name[template_name] is_not constant[None]] begin[:] variable[possible_templates] assign[=] list[[<ast.Name object at 0x7da1b1f957b0>]] variable[templates] assign[=] call[name[self].templates, parameter[]] variable[template_key] assign[=] call[call[call[name[list], parameter[call[name[templates].values, parameter[]]]]][constant[0]]][constant[projectTemplateModuleCompleteKey]] for taget[tuple[[<ast.Name object at 0x7da1b21d54b0>, <ast.Name object at 0x7da1b21d54e0>]]] in starred[call[name[templates].items, parameter[]]] begin[:] if compare[name[template_name] in name[possible_templates]] begin[:] variable[template_key] assign[=] call[name[template_dic]][constant[projectTemplateModuleCompleteKey]] break variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da1b21d51b0>, <ast.Constant object at 0x7da1b21d55a0>, <ast.Constant object at 0x7da1b21d62f0>, <ast.Constant object at 0x7da1b21d45b0>, <ast.Constant object at 0x7da1b21d5750>, <ast.Constant object at 0x7da1b21d6d40>], [<ast.Name object at 0x7da1b21d6620>, <ast.Name object at 0x7da1b21d7700>, <ast.Constant object at 0x7da1b21d7160>, <ast.Name object at 0x7da1b21d75b0>, <ast.Name object at 0x7da1b21d7250>, <ast.Name object at 0x7da1b21d43a0>]] if compare[call[name[self]._version][constant[0]] greater[>] constant[6]] begin[:] call[name[payload]][constant[type]] assign[=] name[type] variable[headers] assign[=] call[name[CaseInsensitiveDict], parameter[dictionary[[<ast.Constant object at 0x7da1b21d4fa0>], [<ast.Constant object at 0x7da1b21d7070>]]]] variable[url] assign[=] binary_operation[call[name[self]._options][constant[server]] + constant[/rest/project-templates/latest/templates]] variable[r] assign[=] call[name[self]._session.post, parameter[name[url]]] if compare[name[r].status_code equal[==] constant[200]] begin[:] variable[r_json] assign[=] call[name[json_loads], parameter[name[r]]] return[name[r_json]] variable[f] assign[=] call[name[tempfile].NamedTemporaryFile, parameter[]] call[name[f].write, parameter[name[r].text]] if name[self].logging begin[:] call[name[logging].error, parameter[binary_operation[constant[Unexpected result while running create project. Server response saved in %s for further investigation [HTTP response=%s].] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b21d49d0>, <ast.Attribute object at 0x7da1b21d53f0>]]]]] return[constant[False]]
keyword[def] identifier[create_project] ( identifier[self] , identifier[key] , identifier[name] = keyword[None] , identifier[assignee] = keyword[None] , identifier[type] = literal[string] , identifier[template_name] = keyword[None] ): literal[string] keyword[if] identifier[assignee] keyword[is] keyword[None] : identifier[assignee] = identifier[self] . identifier[current_user] () keyword[if] identifier[name] keyword[is] keyword[None] : identifier[name] = identifier[key] identifier[possible_templates] =[ literal[string] , literal[string] , literal[string] , literal[string] ] keyword[if] identifier[template_name] keyword[is] keyword[not] keyword[None] : identifier[possible_templates] =[ identifier[template_name] ] identifier[templates] = identifier[self] . identifier[templates] () identifier[template_key] = identifier[list] ( identifier[templates] . identifier[values] ())[ literal[int] ][ literal[string] ] keyword[for] identifier[template_name] , identifier[template_dic] keyword[in] identifier[templates] . identifier[items] (): keyword[if] identifier[template_name] keyword[in] identifier[possible_templates] : identifier[template_key] = identifier[template_dic] [ literal[string] ] keyword[break] identifier[payload] ={ literal[string] : identifier[name] , literal[string] : identifier[key] , literal[string] : literal[string] , literal[string] : identifier[template_key] , literal[string] : identifier[template_key] , literal[string] : identifier[assignee] , } keyword[if] identifier[self] . identifier[_version] [ literal[int] ]> literal[int] : identifier[payload] [ literal[string] ]= identifier[type] identifier[headers] = identifier[CaseInsensitiveDict] ( { literal[string] : literal[string] }) identifier[url] = identifier[self] . identifier[_options] [ literal[string] ]+ literal[string] identifier[r] = identifier[self] . identifier[_session] . identifier[post] ( identifier[url] , identifier[data] = identifier[payload] , identifier[headers] = identifier[headers] ) keyword[if] identifier[r] . identifier[status_code] == literal[int] : identifier[r_json] = identifier[json_loads] ( identifier[r] ) keyword[return] identifier[r_json] identifier[f] = identifier[tempfile] . identifier[NamedTemporaryFile] ( identifier[suffix] = literal[string] , identifier[prefix] = literal[string] , identifier[delete] = keyword[False] ) identifier[f] . identifier[write] ( identifier[r] . identifier[text] ) keyword[if] identifier[self] . identifier[logging] : identifier[logging] . identifier[error] ( literal[string] %( identifier[f] . identifier[name] , identifier[r] . identifier[status_code] )) keyword[return] keyword[False]
def create_project(self, key, name=None, assignee=None, type='Software', template_name=None): """Create a project with the specified parameters. :param key: Mandatory. Must match JIRA project key requirements, usually only 2-10 uppercase characters. :type: str :param name: If not specified it will use the key value. :type name: Optional[str] :param assignee: If not specified it will use current user. :type assignee: Optional[str] :param type: Determines the type of project should be created. :type type: Optional[str] :param template_name: is used to create a project based on one of the existing project templates. If `template_name` is not specified, then it should use one of the default values. :type template_name: Optional[str] :return: Should evaluate to False if it fails otherwise it will be the new project id. :rtype: Union[bool,int] """ if assignee is None: assignee = self.current_user() # depends on [control=['if'], data=['assignee']] if name is None: name = key # depends on [control=['if'], data=['name']] possible_templates = ['Basic', 'JIRA Classic', 'JIRA Default Schemes', 'Basic software development'] if template_name is not None: possible_templates = [template_name] # depends on [control=['if'], data=['template_name']] # https://confluence.atlassian.com/jirakb/creating-a-project-via-rest-based-on-jira-default-schemes-744325852.html templates = self.templates() # TODO(ssbarnea): find a better logic to pick a default fallback template template_key = list(templates.values())[0]['projectTemplateModuleCompleteKey'] for (template_name, template_dic) in templates.items(): if template_name in possible_templates: template_key = template_dic['projectTemplateModuleCompleteKey'] break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # 'projectTemplate': 'com.atlassian.jira-core-project-templates:jira-issuetracking', # 'permissionScheme': '', # 'assigneeType': '2', payload = {'name': name, 'key': key, 'keyEdited': 'false', 'projectTemplateWebItemKey': template_key, 'projectTemplateModuleKey': template_key, 'lead': assignee} if self._version[0] > 6: # JIRA versions before 7 will throw an error if we specify type parameter payload['type'] = type # depends on [control=['if'], data=[]] headers = CaseInsensitiveDict({'Content-Type': 'application/x-www-form-urlencoded'}) url = self._options['server'] + '/rest/project-templates/latest/templates' r = self._session.post(url, data=payload, headers=headers) if r.status_code == 200: r_json = json_loads(r) return r_json # depends on [control=['if'], data=[]] f = tempfile.NamedTemporaryFile(suffix='.html', prefix='python-jira-error-create-project-', delete=False) f.write(r.text) if self.logging: logging.error('Unexpected result while running create project. Server response saved in %s for further investigation [HTTP response=%s].' % (f.name, r.status_code)) # depends on [control=['if'], data=[]] return False
def structure_attrs_fromtuple(self, obj, cl): # type: (Tuple, Type[T]) -> T """Load an attrs class from a sequence (tuple).""" conv_obj = [] # A list of converter parameters. for a, value in zip(cl.__attrs_attrs__, obj): # type: ignore # We detect the type by the metadata. converted = self._structure_attr_from_tuple(a, a.name, value) conv_obj.append(converted) return cl(*conv_obj)
def function[structure_attrs_fromtuple, parameter[self, obj, cl]]: constant[Load an attrs class from a sequence (tuple).] variable[conv_obj] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da20c993310>, <ast.Name object at 0x7da20c9909a0>]]] in starred[call[name[zip], parameter[name[cl].__attrs_attrs__, name[obj]]]] begin[:] variable[converted] assign[=] call[name[self]._structure_attr_from_tuple, parameter[name[a], name[a].name, name[value]]] call[name[conv_obj].append, parameter[name[converted]]] return[call[name[cl], parameter[<ast.Starred object at 0x7da1b07bdbd0>]]]
keyword[def] identifier[structure_attrs_fromtuple] ( identifier[self] , identifier[obj] , identifier[cl] ): literal[string] identifier[conv_obj] =[] keyword[for] identifier[a] , identifier[value] keyword[in] identifier[zip] ( identifier[cl] . identifier[__attrs_attrs__] , identifier[obj] ): identifier[converted] = identifier[self] . identifier[_structure_attr_from_tuple] ( identifier[a] , identifier[a] . identifier[name] , identifier[value] ) identifier[conv_obj] . identifier[append] ( identifier[converted] ) keyword[return] identifier[cl] (* identifier[conv_obj] )
def structure_attrs_fromtuple(self, obj, cl): # type: (Tuple, Type[T]) -> T 'Load an attrs class from a sequence (tuple).' conv_obj = [] # A list of converter parameters. for (a, value) in zip(cl.__attrs_attrs__, obj): # type: ignore # We detect the type by the metadata. converted = self._structure_attr_from_tuple(a, a.name, value) conv_obj.append(converted) # depends on [control=['for'], data=[]] return cl(*conv_obj)