code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def get_last_sequence(self, cur=None): """ Get the highest sequence number in this db """ sql = 'SELECT sequence FROM {} ORDER BY sequence DESC LIMIT 1;'.format(self.subdomain_table) cursor = None if cur is None: cursor = self.conn.cursor() else: cursor = cur db_query_execute(cursor, sql, ()) last_seq = None try: last_seq = cursor.fetchone()[0] except: last_seq = 0 return int(last_seq)
def function[get_last_sequence, parameter[self, cur]]: constant[ Get the highest sequence number in this db ] variable[sql] assign[=] call[constant[SELECT sequence FROM {} ORDER BY sequence DESC LIMIT 1;].format, parameter[name[self].subdomain_table]] variable[cursor] assign[=] constant[None] if compare[name[cur] is constant[None]] begin[:] variable[cursor] assign[=] call[name[self].conn.cursor, parameter[]] call[name[db_query_execute], parameter[name[cursor], name[sql], tuple[[]]]] variable[last_seq] assign[=] constant[None] <ast.Try object at 0x7da20c6a97b0> return[call[name[int], parameter[name[last_seq]]]]
keyword[def] identifier[get_last_sequence] ( identifier[self] , identifier[cur] = keyword[None] ): literal[string] identifier[sql] = literal[string] . identifier[format] ( identifier[self] . identifier[subdomain_table] ) identifier[cursor] = keyword[None] keyword[if] identifier[cur] keyword[is] keyword[None] : identifier[cursor] = identifier[self] . identifier[conn] . identifier[cursor] () keyword[else] : identifier[cursor] = identifier[cur] identifier[db_query_execute] ( identifier[cursor] , identifier[sql] ,()) identifier[last_seq] = keyword[None] keyword[try] : identifier[last_seq] = identifier[cursor] . identifier[fetchone] ()[ literal[int] ] keyword[except] : identifier[last_seq] = literal[int] keyword[return] identifier[int] ( identifier[last_seq] )
def get_last_sequence(self, cur=None): """ Get the highest sequence number in this db """ sql = 'SELECT sequence FROM {} ORDER BY sequence DESC LIMIT 1;'.format(self.subdomain_table) cursor = None if cur is None: cursor = self.conn.cursor() # depends on [control=['if'], data=[]] else: cursor = cur db_query_execute(cursor, sql, ()) last_seq = None try: last_seq = cursor.fetchone()[0] # depends on [control=['try'], data=[]] except: last_seq = 0 # depends on [control=['except'], data=[]] return int(last_seq)
def to_nullable_map(value): """ Converts JSON string into map object or returns null when conversion is not possible. :param value: the JSON string to convert. :return: Map object value or null when conversion is not supported. """ if value == None: return None # Parse JSON try: value = json.loads(value) return RecursiveMapConverter.to_nullable_map(value) except: return None
def function[to_nullable_map, parameter[value]]: constant[ Converts JSON string into map object or returns null when conversion is not possible. :param value: the JSON string to convert. :return: Map object value or null when conversion is not supported. ] if compare[name[value] equal[==] constant[None]] begin[:] return[constant[None]] <ast.Try object at 0x7da1b1496ec0>
keyword[def] identifier[to_nullable_map] ( identifier[value] ): literal[string] keyword[if] identifier[value] == keyword[None] : keyword[return] keyword[None] keyword[try] : identifier[value] = identifier[json] . identifier[loads] ( identifier[value] ) keyword[return] identifier[RecursiveMapConverter] . identifier[to_nullable_map] ( identifier[value] ) keyword[except] : keyword[return] keyword[None]
def to_nullable_map(value): """ Converts JSON string into map object or returns null when conversion is not possible. :param value: the JSON string to convert. :return: Map object value or null when conversion is not supported. """ if value == None: return None # depends on [control=['if'], data=[]] # Parse JSON try: value = json.loads(value) return RecursiveMapConverter.to_nullable_map(value) # depends on [control=['try'], data=[]] except: return None # depends on [control=['except'], data=[]]
def search(self, query): """ Search the Skype Directory for a user. Args: query (str): name to search for Returns: SkypeUser list: collection of possible results """ results = self.skype.conn("GET", SkypeConnection.API_DIRECTORY, auth=SkypeConnection.Auth.SkypeToken, params={"searchstring": query, "requestId": "0"}).json().get("results", []) return [SkypeUser.fromRaw(self.skype, json.get("nodeProfileData", {})) for json in results]
def function[search, parameter[self, query]]: constant[ Search the Skype Directory for a user. Args: query (str): name to search for Returns: SkypeUser list: collection of possible results ] variable[results] assign[=] call[call[call[name[self].skype.conn, parameter[constant[GET], name[SkypeConnection].API_DIRECTORY]].json, parameter[]].get, parameter[constant[results], list[[]]]] return[<ast.ListComp object at 0x7da18fe92aa0>]
keyword[def] identifier[search] ( identifier[self] , identifier[query] ): literal[string] identifier[results] = identifier[self] . identifier[skype] . identifier[conn] ( literal[string] , identifier[SkypeConnection] . identifier[API_DIRECTORY] , identifier[auth] = identifier[SkypeConnection] . identifier[Auth] . identifier[SkypeToken] , identifier[params] ={ literal[string] : identifier[query] , literal[string] : literal[string] }). identifier[json] (). identifier[get] ( literal[string] ,[]) keyword[return] [ identifier[SkypeUser] . identifier[fromRaw] ( identifier[self] . identifier[skype] , identifier[json] . identifier[get] ( literal[string] ,{})) keyword[for] identifier[json] keyword[in] identifier[results] ]
def search(self, query): """ Search the Skype Directory for a user. Args: query (str): name to search for Returns: SkypeUser list: collection of possible results """ results = self.skype.conn('GET', SkypeConnection.API_DIRECTORY, auth=SkypeConnection.Auth.SkypeToken, params={'searchstring': query, 'requestId': '0'}).json().get('results', []) return [SkypeUser.fromRaw(self.skype, json.get('nodeProfileData', {})) for json in results]
def bool(self, state): """ Returns the Boolean evaluation of the clause with respect to a given state Parameters ---------- state : dict Key-value mapping describing a Boolean state or assignment Returns ------- boolean The evaluation of the clause with respect to the given state or assignment """ value = 1 for source, sign in self: value = value and (state[source] if sign == 1 else not state[source]) if not value: break return value
def function[bool, parameter[self, state]]: constant[ Returns the Boolean evaluation of the clause with respect to a given state Parameters ---------- state : dict Key-value mapping describing a Boolean state or assignment Returns ------- boolean The evaluation of the clause with respect to the given state or assignment ] variable[value] assign[=] constant[1] for taget[tuple[[<ast.Name object at 0x7da1b0a70430>, <ast.Name object at 0x7da1b0a71bd0>]]] in starred[name[self]] begin[:] variable[value] assign[=] <ast.BoolOp object at 0x7da1b0a71900> if <ast.UnaryOp object at 0x7da1b0a70370> begin[:] break return[name[value]]
keyword[def] identifier[bool] ( identifier[self] , identifier[state] ): literal[string] identifier[value] = literal[int] keyword[for] identifier[source] , identifier[sign] keyword[in] identifier[self] : identifier[value] = identifier[value] keyword[and] ( identifier[state] [ identifier[source] ] keyword[if] identifier[sign] == literal[int] keyword[else] keyword[not] identifier[state] [ identifier[source] ]) keyword[if] keyword[not] identifier[value] : keyword[break] keyword[return] identifier[value]
def bool(self, state): """ Returns the Boolean evaluation of the clause with respect to a given state Parameters ---------- state : dict Key-value mapping describing a Boolean state or assignment Returns ------- boolean The evaluation of the clause with respect to the given state or assignment """ value = 1 for (source, sign) in self: value = value and (state[source] if sign == 1 else not state[source]) if not value: break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return value
def perfect_platonic_per_pixel(N, R, scale=11, pos=None, zscale=1.0, returnpix=None): """ Create a perfect platonic sphere of a given radius R by supersampling by a factor scale on a grid of size N. Scale must be odd. We are able to perfectly position these particles up to 1/scale. Therefore, let's only allow those types of shifts for now, but return the actual position used for the placement. """ # enforce odd scale size if scale % 2 != 1: scale += 1 if pos is None: # place the default position in the center of the grid pos = np.array([(N-1)/2.0]*3) # limit positions to those that are exact on the size 1./scale # positions have the form (d = divisions): # p = N + m/d s = 1.0/scale f = zscale**2 i = pos.astype('int') p = i + s*((pos - i)/s).astype('int') pos = p + 1e-10 # unfortunately needed to break ties # make the output arrays image = np.zeros((N,)*3) x,y,z = np.meshgrid(*(xrange(N),)*3, indexing='ij') # for each real pixel in the image, integrate a bunch of superres pixels for x0,y0,z0 in zip(x.flatten(),y.flatten(),z.flatten()): # short-circuit things that are just too far away! ddd = np.sqrt(f*(x0-pos[0])**2 + (y0-pos[1])**2 + (z0-pos[2])**2) if ddd > R + 4: image[x0,y0,z0] = 0.0 continue # otherwise, build the local mesh and count the volume xp,yp,zp = np.meshgrid( *(np.linspace(i-0.5+s/2, i+0.5-s/2, scale, endpoint=True) for i in (x0,y0,z0)), indexing='ij' ) ddd = np.sqrt(f*(xp-pos[0])**2 + (yp-pos[1])**2 + (zp-pos[2])**2) if returnpix is not None and returnpix == [x0,y0,z0]: outpix = 1.0 * (ddd < R) vol = (1.0*(ddd < R) + 0.0*(ddd == R)).sum() image[x0,y0,z0] = vol / float(scale**3) #vol_true = 4./3*np.pi*R**3 #vol_real = image.sum() #print vol_true, vol_real, (vol_true - vol_real)/vol_true if returnpix: return image, pos, outpix return image, pos
def function[perfect_platonic_per_pixel, parameter[N, R, scale, pos, zscale, returnpix]]: constant[ Create a perfect platonic sphere of a given radius R by supersampling by a factor scale on a grid of size N. Scale must be odd. We are able to perfectly position these particles up to 1/scale. Therefore, let's only allow those types of shifts for now, but return the actual position used for the placement. ] if compare[binary_operation[name[scale] <ast.Mod object at 0x7da2590d6920> constant[2]] not_equal[!=] constant[1]] begin[:] <ast.AugAssign object at 0x7da18dc050c0> if compare[name[pos] is constant[None]] begin[:] variable[pos] assign[=] call[name[np].array, parameter[binary_operation[list[[<ast.BinOp object at 0x7da18dc065f0>]] * constant[3]]]] variable[s] assign[=] binary_operation[constant[1.0] / name[scale]] variable[f] assign[=] binary_operation[name[zscale] ** constant[2]] variable[i] assign[=] call[name[pos].astype, parameter[constant[int]]] variable[p] assign[=] binary_operation[name[i] + binary_operation[name[s] * call[binary_operation[binary_operation[name[pos] - name[i]] / name[s]].astype, parameter[constant[int]]]]] variable[pos] assign[=] binary_operation[name[p] + constant[1e-10]] variable[image] assign[=] call[name[np].zeros, parameter[binary_operation[tuple[[<ast.Name object at 0x7da18dc06d10>]] * constant[3]]]] <ast.Tuple object at 0x7da18dc04100> assign[=] call[name[np].meshgrid, parameter[<ast.Starred object at 0x7da18dc058a0>]] for taget[tuple[[<ast.Name object at 0x7da18dc07e80>, <ast.Name object at 0x7da18dc05780>, <ast.Name object at 0x7da18dc07760>]]] in starred[call[name[zip], parameter[call[name[x].flatten, parameter[]], call[name[y].flatten, parameter[]], call[name[z].flatten, parameter[]]]]] begin[:] variable[ddd] assign[=] call[name[np].sqrt, parameter[binary_operation[binary_operation[binary_operation[name[f] * binary_operation[binary_operation[name[x0] - call[name[pos]][constant[0]]] ** constant[2]]] + binary_operation[binary_operation[name[y0] - call[name[pos]][constant[1]]] ** constant[2]]] + binary_operation[binary_operation[name[z0] - call[name[pos]][constant[2]]] ** constant[2]]]]] if compare[name[ddd] greater[>] binary_operation[name[R] + constant[4]]] begin[:] call[name[image]][tuple[[<ast.Name object at 0x7da1b00f51b0>, <ast.Name object at 0x7da1b00f7a90>, <ast.Name object at 0x7da1b00f49d0>]]] assign[=] constant[0.0] continue <ast.Tuple object at 0x7da1b00f6860> assign[=] call[name[np].meshgrid, parameter[<ast.Starred object at 0x7da1b00f6800>]] variable[ddd] assign[=] call[name[np].sqrt, parameter[binary_operation[binary_operation[binary_operation[name[f] * binary_operation[binary_operation[name[xp] - call[name[pos]][constant[0]]] ** constant[2]]] + binary_operation[binary_operation[name[yp] - call[name[pos]][constant[1]]] ** constant[2]]] + binary_operation[binary_operation[name[zp] - call[name[pos]][constant[2]]] ** constant[2]]]]] if <ast.BoolOp object at 0x7da1b00f4580> begin[:] variable[outpix] assign[=] binary_operation[constant[1.0] * compare[name[ddd] less[<] name[R]]] variable[vol] assign[=] call[binary_operation[binary_operation[constant[1.0] * compare[name[ddd] less[<] name[R]]] + binary_operation[constant[0.0] * compare[name[ddd] equal[==] name[R]]]].sum, parameter[]] call[name[image]][tuple[[<ast.Name object at 0x7da1b00f6da0>, <ast.Name object at 0x7da1b00f6ad0>, <ast.Name object at 0x7da1b00f4340>]]] assign[=] binary_operation[name[vol] / call[name[float], parameter[binary_operation[name[scale] ** constant[3]]]]] if name[returnpix] begin[:] return[tuple[[<ast.Name object at 0x7da1b00f5120>, <ast.Name object at 0x7da1b00f4dc0>, <ast.Name object at 0x7da1b00f4820>]]] return[tuple[[<ast.Name object at 0x7da1b00f6770>, <ast.Name object at 0x7da1b00f5330>]]]
keyword[def] identifier[perfect_platonic_per_pixel] ( identifier[N] , identifier[R] , identifier[scale] = literal[int] , identifier[pos] = keyword[None] , identifier[zscale] = literal[int] , identifier[returnpix] = keyword[None] ): literal[string] keyword[if] identifier[scale] % literal[int] != literal[int] : identifier[scale] += literal[int] keyword[if] identifier[pos] keyword[is] keyword[None] : identifier[pos] = identifier[np] . identifier[array] ([( identifier[N] - literal[int] )/ literal[int] ]* literal[int] ) identifier[s] = literal[int] / identifier[scale] identifier[f] = identifier[zscale] ** literal[int] identifier[i] = identifier[pos] . identifier[astype] ( literal[string] ) identifier[p] = identifier[i] + identifier[s] *(( identifier[pos] - identifier[i] )/ identifier[s] ). identifier[astype] ( literal[string] ) identifier[pos] = identifier[p] + literal[int] identifier[image] = identifier[np] . identifier[zeros] (( identifier[N] ,)* literal[int] ) identifier[x] , identifier[y] , identifier[z] = identifier[np] . identifier[meshgrid] (*( identifier[xrange] ( identifier[N] ),)* literal[int] , identifier[indexing] = literal[string] ) keyword[for] identifier[x0] , identifier[y0] , identifier[z0] keyword[in] identifier[zip] ( identifier[x] . identifier[flatten] (), identifier[y] . identifier[flatten] (), identifier[z] . identifier[flatten] ()): identifier[ddd] = identifier[np] . identifier[sqrt] ( identifier[f] *( identifier[x0] - identifier[pos] [ literal[int] ])** literal[int] +( identifier[y0] - identifier[pos] [ literal[int] ])** literal[int] +( identifier[z0] - identifier[pos] [ literal[int] ])** literal[int] ) keyword[if] identifier[ddd] > identifier[R] + literal[int] : identifier[image] [ identifier[x0] , identifier[y0] , identifier[z0] ]= literal[int] keyword[continue] identifier[xp] , identifier[yp] , identifier[zp] = identifier[np] . identifier[meshgrid] ( *( identifier[np] . identifier[linspace] ( identifier[i] - literal[int] + identifier[s] / literal[int] , identifier[i] + literal[int] - identifier[s] / literal[int] , identifier[scale] , identifier[endpoint] = keyword[True] ) keyword[for] identifier[i] keyword[in] ( identifier[x0] , identifier[y0] , identifier[z0] )), identifier[indexing] = literal[string] ) identifier[ddd] = identifier[np] . identifier[sqrt] ( identifier[f] *( identifier[xp] - identifier[pos] [ literal[int] ])** literal[int] +( identifier[yp] - identifier[pos] [ literal[int] ])** literal[int] +( identifier[zp] - identifier[pos] [ literal[int] ])** literal[int] ) keyword[if] identifier[returnpix] keyword[is] keyword[not] keyword[None] keyword[and] identifier[returnpix] ==[ identifier[x0] , identifier[y0] , identifier[z0] ]: identifier[outpix] = literal[int] *( identifier[ddd] < identifier[R] ) identifier[vol] =( literal[int] *( identifier[ddd] < identifier[R] )+ literal[int] *( identifier[ddd] == identifier[R] )). identifier[sum] () identifier[image] [ identifier[x0] , identifier[y0] , identifier[z0] ]= identifier[vol] / identifier[float] ( identifier[scale] ** literal[int] ) keyword[if] identifier[returnpix] : keyword[return] identifier[image] , identifier[pos] , identifier[outpix] keyword[return] identifier[image] , identifier[pos]
def perfect_platonic_per_pixel(N, R, scale=11, pos=None, zscale=1.0, returnpix=None): """ Create a perfect platonic sphere of a given radius R by supersampling by a factor scale on a grid of size N. Scale must be odd. We are able to perfectly position these particles up to 1/scale. Therefore, let's only allow those types of shifts for now, but return the actual position used for the placement. """ # enforce odd scale size if scale % 2 != 1: scale += 1 # depends on [control=['if'], data=[]] if pos is None: # place the default position in the center of the grid pos = np.array([(N - 1) / 2.0] * 3) # depends on [control=['if'], data=['pos']] # limit positions to those that are exact on the size 1./scale # positions have the form (d = divisions): # p = N + m/d s = 1.0 / scale f = zscale ** 2 i = pos.astype('int') p = i + s * ((pos - i) / s).astype('int') pos = p + 1e-10 # unfortunately needed to break ties # make the output arrays image = np.zeros((N,) * 3) (x, y, z) = np.meshgrid(*(xrange(N),) * 3, indexing='ij') # for each real pixel in the image, integrate a bunch of superres pixels for (x0, y0, z0) in zip(x.flatten(), y.flatten(), z.flatten()): # short-circuit things that are just too far away! ddd = np.sqrt(f * (x0 - pos[0]) ** 2 + (y0 - pos[1]) ** 2 + (z0 - pos[2]) ** 2) if ddd > R + 4: image[x0, y0, z0] = 0.0 continue # depends on [control=['if'], data=[]] # otherwise, build the local mesh and count the volume (xp, yp, zp) = np.meshgrid(*(np.linspace(i - 0.5 + s / 2, i + 0.5 - s / 2, scale, endpoint=True) for i in (x0, y0, z0)), indexing='ij') ddd = np.sqrt(f * (xp - pos[0]) ** 2 + (yp - pos[1]) ** 2 + (zp - pos[2]) ** 2) if returnpix is not None and returnpix == [x0, y0, z0]: outpix = 1.0 * (ddd < R) # depends on [control=['if'], data=[]] vol = (1.0 * (ddd < R) + 0.0 * (ddd == R)).sum() image[x0, y0, z0] = vol / float(scale ** 3) # depends on [control=['for'], data=[]] #vol_true = 4./3*np.pi*R**3 #vol_real = image.sum() #print vol_true, vol_real, (vol_true - vol_real)/vol_true if returnpix: return (image, pos, outpix) # depends on [control=['if'], data=[]] return (image, pos)
def emitRecordMiddleDoubleClicked(self, item): """ Emits the record clicked signal for the given item, provided the signals are not currently blocked. :param item | <QTreeWidgetItem> """ # emit that the record has been double clicked if isinstance(item, XOrbRecordItem) and not self.signalsBlocked(): self.recordMiddleDoubleClicked.emit(item.record())
def function[emitRecordMiddleDoubleClicked, parameter[self, item]]: constant[ Emits the record clicked signal for the given item, provided the signals are not currently blocked. :param item | <QTreeWidgetItem> ] if <ast.BoolOp object at 0x7da18f09db10> begin[:] call[name[self].recordMiddleDoubleClicked.emit, parameter[call[name[item].record, parameter[]]]]
keyword[def] identifier[emitRecordMiddleDoubleClicked] ( identifier[self] , identifier[item] ): literal[string] keyword[if] identifier[isinstance] ( identifier[item] , identifier[XOrbRecordItem] ) keyword[and] keyword[not] identifier[self] . identifier[signalsBlocked] (): identifier[self] . identifier[recordMiddleDoubleClicked] . identifier[emit] ( identifier[item] . identifier[record] ())
def emitRecordMiddleDoubleClicked(self, item): """ Emits the record clicked signal for the given item, provided the signals are not currently blocked. :param item | <QTreeWidgetItem> """ # emit that the record has been double clicked if isinstance(item, XOrbRecordItem) and (not self.signalsBlocked()): self.recordMiddleDoubleClicked.emit(item.record()) # depends on [control=['if'], data=[]]
def get_next_objective(self): """Gets the next Objective in this list. return: (osid.learning.Objective) - the next Objective in this list. The has_next() method should be used to test that a next Objective is available before calling this method. raise: IllegalState - no more elements available in this list raise: OperationFailed - unable to complete request compliance: mandatory - This method must be implemented. """ try: next_object = next(self) except StopIteration: raise IllegalState('no more elements available in this list') except Exception: # Need to specify exceptions here! raise OperationFailed() else: return next_object
def function[get_next_objective, parameter[self]]: constant[Gets the next Objective in this list. return: (osid.learning.Objective) - the next Objective in this list. The has_next() method should be used to test that a next Objective is available before calling this method. raise: IllegalState - no more elements available in this list raise: OperationFailed - unable to complete request compliance: mandatory - This method must be implemented. ] <ast.Try object at 0x7da1b0a66dd0>
keyword[def] identifier[get_next_objective] ( identifier[self] ): literal[string] keyword[try] : identifier[next_object] = identifier[next] ( identifier[self] ) keyword[except] identifier[StopIteration] : keyword[raise] identifier[IllegalState] ( literal[string] ) keyword[except] identifier[Exception] : keyword[raise] identifier[OperationFailed] () keyword[else] : keyword[return] identifier[next_object]
def get_next_objective(self): """Gets the next Objective in this list. return: (osid.learning.Objective) - the next Objective in this list. The has_next() method should be used to test that a next Objective is available before calling this method. raise: IllegalState - no more elements available in this list raise: OperationFailed - unable to complete request compliance: mandatory - This method must be implemented. """ try: next_object = next(self) # depends on [control=['try'], data=[]] except StopIteration: raise IllegalState('no more elements available in this list') # depends on [control=['except'], data=[]] except Exception: # Need to specify exceptions here! raise OperationFailed() # depends on [control=['except'], data=[]] else: return next_object
def password(self, password): """ Encode a string and set as password """ from boiler.user.util.passlib import passlib_context password = str(password) encrypted = passlib_context.encrypt(password) self._password = encrypted
def function[password, parameter[self, password]]: constant[ Encode a string and set as password ] from relative_module[boiler.user.util.passlib] import module[passlib_context] variable[password] assign[=] call[name[str], parameter[name[password]]] variable[encrypted] assign[=] call[name[passlib_context].encrypt, parameter[name[password]]] name[self]._password assign[=] name[encrypted]
keyword[def] identifier[password] ( identifier[self] , identifier[password] ): literal[string] keyword[from] identifier[boiler] . identifier[user] . identifier[util] . identifier[passlib] keyword[import] identifier[passlib_context] identifier[password] = identifier[str] ( identifier[password] ) identifier[encrypted] = identifier[passlib_context] . identifier[encrypt] ( identifier[password] ) identifier[self] . identifier[_password] = identifier[encrypted]
def password(self, password): """ Encode a string and set as password """ from boiler.user.util.passlib import passlib_context password = str(password) encrypted = passlib_context.encrypt(password) self._password = encrypted
def get_resources(cls): """Returns Ext Resources.""" plural_mappings = resource_helper.build_plural_mappings( {}, RESOURCE_ATTRIBUTE_MAP) # attr.PLURALS.update(plural_mappings) return resource_helper.build_resource_info(plural_mappings, RESOURCE_ATTRIBUTE_MAP, None, register_quota=True)
def function[get_resources, parameter[cls]]: constant[Returns Ext Resources.] variable[plural_mappings] assign[=] call[name[resource_helper].build_plural_mappings, parameter[dictionary[[], []], name[RESOURCE_ATTRIBUTE_MAP]]] return[call[name[resource_helper].build_resource_info, parameter[name[plural_mappings], name[RESOURCE_ATTRIBUTE_MAP], constant[None]]]]
keyword[def] identifier[get_resources] ( identifier[cls] ): literal[string] identifier[plural_mappings] = identifier[resource_helper] . identifier[build_plural_mappings] ( {}, identifier[RESOURCE_ATTRIBUTE_MAP] ) keyword[return] identifier[resource_helper] . identifier[build_resource_info] ( identifier[plural_mappings] , identifier[RESOURCE_ATTRIBUTE_MAP] , keyword[None] , identifier[register_quota] = keyword[True] )
def get_resources(cls): """Returns Ext Resources.""" plural_mappings = resource_helper.build_plural_mappings({}, RESOURCE_ATTRIBUTE_MAP) # attr.PLURALS.update(plural_mappings) return resource_helper.build_resource_info(plural_mappings, RESOURCE_ATTRIBUTE_MAP, None, register_quota=True)
def get_entry_compact_text_repr(entry, entries): """If the entry has a text value, return that. If the entry has a source_from value, return the text value of the source. Otherwise, return None.""" text = get_shortest_text_value(entry) if text is not None: return text else: sources = get_sourced_from(entry) # There are a lot of references to this entity, each of which refer # to it by a different text label. For the sake of visualization, # let's pick one of these labels (in this case, the shortest one) if sources is not None: texts = [] for source in sources: source_entry = entries[source] texts.append(get_shortest_text_value(source_entry)) return get_shortest_string(texts)
def function[get_entry_compact_text_repr, parameter[entry, entries]]: constant[If the entry has a text value, return that. If the entry has a source_from value, return the text value of the source. Otherwise, return None.] variable[text] assign[=] call[name[get_shortest_text_value], parameter[name[entry]]] if compare[name[text] is_not constant[None]] begin[:] return[name[text]]
keyword[def] identifier[get_entry_compact_text_repr] ( identifier[entry] , identifier[entries] ): literal[string] identifier[text] = identifier[get_shortest_text_value] ( identifier[entry] ) keyword[if] identifier[text] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[text] keyword[else] : identifier[sources] = identifier[get_sourced_from] ( identifier[entry] ) keyword[if] identifier[sources] keyword[is] keyword[not] keyword[None] : identifier[texts] =[] keyword[for] identifier[source] keyword[in] identifier[sources] : identifier[source_entry] = identifier[entries] [ identifier[source] ] identifier[texts] . identifier[append] ( identifier[get_shortest_text_value] ( identifier[source_entry] )) keyword[return] identifier[get_shortest_string] ( identifier[texts] )
def get_entry_compact_text_repr(entry, entries): """If the entry has a text value, return that. If the entry has a source_from value, return the text value of the source. Otherwise, return None.""" text = get_shortest_text_value(entry) if text is not None: return text # depends on [control=['if'], data=['text']] else: sources = get_sourced_from(entry) # There are a lot of references to this entity, each of which refer # to it by a different text label. For the sake of visualization, # let's pick one of these labels (in this case, the shortest one) if sources is not None: texts = [] for source in sources: source_entry = entries[source] texts.append(get_shortest_text_value(source_entry)) # depends on [control=['for'], data=['source']] return get_shortest_string(texts) # depends on [control=['if'], data=['sources']]
def _get_weekly_date_range(self, metric_date, delta): """ Gets the range of years that we need to use as keys to get metrics from redis. """ dates = [metric_date] end_date = metric_date + delta #Figure out how many years our metric range spans spanning_years = end_date.year - metric_date.year for i in range(spanning_years): #for the weekly keys, we only care about the year dates.append( datetime.date( year=metric_date.year + (i + 1), month=1, day=1)) return dates
def function[_get_weekly_date_range, parameter[self, metric_date, delta]]: constant[ Gets the range of years that we need to use as keys to get metrics from redis. ] variable[dates] assign[=] list[[<ast.Name object at 0x7da20c990ac0>]] variable[end_date] assign[=] binary_operation[name[metric_date] + name[delta]] variable[spanning_years] assign[=] binary_operation[name[end_date].year - name[metric_date].year] for taget[name[i]] in starred[call[name[range], parameter[name[spanning_years]]]] begin[:] call[name[dates].append, parameter[call[name[datetime].date, parameter[]]]] return[name[dates]]
keyword[def] identifier[_get_weekly_date_range] ( identifier[self] , identifier[metric_date] , identifier[delta] ): literal[string] identifier[dates] =[ identifier[metric_date] ] identifier[end_date] = identifier[metric_date] + identifier[delta] identifier[spanning_years] = identifier[end_date] . identifier[year] - identifier[metric_date] . identifier[year] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[spanning_years] ): identifier[dates] . identifier[append] ( identifier[datetime] . identifier[date] ( identifier[year] = identifier[metric_date] . identifier[year] +( identifier[i] + literal[int] ), identifier[month] = literal[int] , identifier[day] = literal[int] )) keyword[return] identifier[dates]
def _get_weekly_date_range(self, metric_date, delta): """ Gets the range of years that we need to use as keys to get metrics from redis. """ dates = [metric_date] end_date = metric_date + delta #Figure out how many years our metric range spans spanning_years = end_date.year - metric_date.year for i in range(spanning_years): #for the weekly keys, we only care about the year dates.append(datetime.date(year=metric_date.year + (i + 1), month=1, day=1)) # depends on [control=['for'], data=['i']] return dates
def citedReferences(self, uid, count=100, offset=1, retrieveParameters=None): """The citedReferences operation returns references cited by an article identified by a unique identifier. You may specify only one identifier per request. :uid: Thomson Reuters unique record identifier :count: Number of records to display in the result. Cannot be less than 0 and cannot be greater than 100. If count is 0 then only the summary information will be returned. :offset: First record in results to return. Must be greater than zero :retrieveParameters: Retrieve parameters. If omitted the result of make_retrieveParameters(offset, count, 'RS', 'D') is used. """ return self._search.service.citedReferences( databaseId='WOS', uid=uid, queryLanguage='en', retrieveParameters=(retrieveParameters or self.make_retrieveParameters(offset, count)) )
def function[citedReferences, parameter[self, uid, count, offset, retrieveParameters]]: constant[The citedReferences operation returns references cited by an article identified by a unique identifier. You may specify only one identifier per request. :uid: Thomson Reuters unique record identifier :count: Number of records to display in the result. Cannot be less than 0 and cannot be greater than 100. If count is 0 then only the summary information will be returned. :offset: First record in results to return. Must be greater than zero :retrieveParameters: Retrieve parameters. If omitted the result of make_retrieveParameters(offset, count, 'RS', 'D') is used. ] return[call[name[self]._search.service.citedReferences, parameter[]]]
keyword[def] identifier[citedReferences] ( identifier[self] , identifier[uid] , identifier[count] = literal[int] , identifier[offset] = literal[int] , identifier[retrieveParameters] = keyword[None] ): literal[string] keyword[return] identifier[self] . identifier[_search] . identifier[service] . identifier[citedReferences] ( identifier[databaseId] = literal[string] , identifier[uid] = identifier[uid] , identifier[queryLanguage] = literal[string] , identifier[retrieveParameters] =( identifier[retrieveParameters] keyword[or] identifier[self] . identifier[make_retrieveParameters] ( identifier[offset] , identifier[count] )) )
def citedReferences(self, uid, count=100, offset=1, retrieveParameters=None): """The citedReferences operation returns references cited by an article identified by a unique identifier. You may specify only one identifier per request. :uid: Thomson Reuters unique record identifier :count: Number of records to display in the result. Cannot be less than 0 and cannot be greater than 100. If count is 0 then only the summary information will be returned. :offset: First record in results to return. Must be greater than zero :retrieveParameters: Retrieve parameters. If omitted the result of make_retrieveParameters(offset, count, 'RS', 'D') is used. """ return self._search.service.citedReferences(databaseId='WOS', uid=uid, queryLanguage='en', retrieveParameters=retrieveParameters or self.make_retrieveParameters(offset, count))
def store(self, installed_stuff, metadata, interpreter, options): """Store the virtualenv metadata for the indicated installed_stuff.""" new_content = { 'timestamp': int(time.mktime(time.localtime())), 'installed': installed_stuff, 'metadata': metadata, 'interpreter': interpreter, 'options': options } logger.debug("Storing installed=%s metadata=%s interpreter=%s options=%s", installed_stuff, metadata, interpreter, options) with filelock(self.lockpath): self._write_cache([json.dumps(new_content)], append=True)
def function[store, parameter[self, installed_stuff, metadata, interpreter, options]]: constant[Store the virtualenv metadata for the indicated installed_stuff.] variable[new_content] assign[=] dictionary[[<ast.Constant object at 0x7da1b0d12920>, <ast.Constant object at 0x7da1b0d10f40>, <ast.Constant object at 0x7da1b0d10280>, <ast.Constant object at 0x7da1b0d10760>, <ast.Constant object at 0x7da1b0d104c0>], [<ast.Call object at 0x7da1b0d125c0>, <ast.Name object at 0x7da1b0d13250>, <ast.Name object at 0x7da1b0d114e0>, <ast.Name object at 0x7da1b0d10850>, <ast.Name object at 0x7da1b0d11990>]] call[name[logger].debug, parameter[constant[Storing installed=%s metadata=%s interpreter=%s options=%s], name[installed_stuff], name[metadata], name[interpreter], name[options]]] with call[name[filelock], parameter[name[self].lockpath]] begin[:] call[name[self]._write_cache, parameter[list[[<ast.Call object at 0x7da1b0d10b20>]]]]
keyword[def] identifier[store] ( identifier[self] , identifier[installed_stuff] , identifier[metadata] , identifier[interpreter] , identifier[options] ): literal[string] identifier[new_content] ={ literal[string] : identifier[int] ( identifier[time] . identifier[mktime] ( identifier[time] . identifier[localtime] ())), literal[string] : identifier[installed_stuff] , literal[string] : identifier[metadata] , literal[string] : identifier[interpreter] , literal[string] : identifier[options] } identifier[logger] . identifier[debug] ( literal[string] , identifier[installed_stuff] , identifier[metadata] , identifier[interpreter] , identifier[options] ) keyword[with] identifier[filelock] ( identifier[self] . identifier[lockpath] ): identifier[self] . identifier[_write_cache] ([ identifier[json] . identifier[dumps] ( identifier[new_content] )], identifier[append] = keyword[True] )
def store(self, installed_stuff, metadata, interpreter, options): """Store the virtualenv metadata for the indicated installed_stuff.""" new_content = {'timestamp': int(time.mktime(time.localtime())), 'installed': installed_stuff, 'metadata': metadata, 'interpreter': interpreter, 'options': options} logger.debug('Storing installed=%s metadata=%s interpreter=%s options=%s', installed_stuff, metadata, interpreter, options) with filelock(self.lockpath): self._write_cache([json.dumps(new_content)], append=True) # depends on [control=['with'], data=[]]
async def unsafe_ask_async(self, patch_stdout: bool = False) -> Any: """Ask the question using asyncio and return user response. Does not catch keyboard interrupts.""" if not utils.ACTIVATED_ASYNC_MODE: await utils.activate_prompt_toolkit_async_mode() if patch_stdout: # with prompt_toolkit.patch_stdout.patch_stdout(): return await self.application.run_async().to_asyncio_future() else: return await self.application.run_async().to_asyncio_future()
<ast.AsyncFunctionDef object at 0x7da1b07af310>
keyword[async] keyword[def] identifier[unsafe_ask_async] ( identifier[self] , identifier[patch_stdout] : identifier[bool] = keyword[False] )-> identifier[Any] : literal[string] keyword[if] keyword[not] identifier[utils] . identifier[ACTIVATED_ASYNC_MODE] : keyword[await] identifier[utils] . identifier[activate_prompt_toolkit_async_mode] () keyword[if] identifier[patch_stdout] : keyword[return] keyword[await] identifier[self] . identifier[application] . identifier[run_async] (). identifier[to_asyncio_future] () keyword[else] : keyword[return] keyword[await] identifier[self] . identifier[application] . identifier[run_async] (). identifier[to_asyncio_future] ()
async def unsafe_ask_async(self, patch_stdout: bool=False) -> Any: """Ask the question using asyncio and return user response. Does not catch keyboard interrupts.""" if not utils.ACTIVATED_ASYNC_MODE: await utils.activate_prompt_toolkit_async_mode() # depends on [control=['if'], data=[]] if patch_stdout: # with prompt_toolkit.patch_stdout.patch_stdout(): return await self.application.run_async().to_asyncio_future() # depends on [control=['if'], data=[]] else: return await self.application.run_async().to_asyncio_future()
def get_times(self): """ Return a list of occurrance times of the events :return: list of times """ if not self.n: return list() ret = list() for item in self._event_times: ret += list(self.__dict__[item]) return ret + list(matrix(ret) - 1e-6)
def function[get_times, parameter[self]]: constant[ Return a list of occurrance times of the events :return: list of times ] if <ast.UnaryOp object at 0x7da18eb56230> begin[:] return[call[name[list], parameter[]]] variable[ret] assign[=] call[name[list], parameter[]] for taget[name[item]] in starred[name[self]._event_times] begin[:] <ast.AugAssign object at 0x7da18eb55990> return[binary_operation[name[ret] + call[name[list], parameter[binary_operation[call[name[matrix], parameter[name[ret]]] - constant[1e-06]]]]]]
keyword[def] identifier[get_times] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[n] : keyword[return] identifier[list] () identifier[ret] = identifier[list] () keyword[for] identifier[item] keyword[in] identifier[self] . identifier[_event_times] : identifier[ret] += identifier[list] ( identifier[self] . identifier[__dict__] [ identifier[item] ]) keyword[return] identifier[ret] + identifier[list] ( identifier[matrix] ( identifier[ret] )- literal[int] )
def get_times(self): """ Return a list of occurrance times of the events :return: list of times """ if not self.n: return list() # depends on [control=['if'], data=[]] ret = list() for item in self._event_times: ret += list(self.__dict__[item]) # depends on [control=['for'], data=['item']] return ret + list(matrix(ret) - 1e-06)
def register(self, resource_class, content_type, configuration=None): """ Registers a representer factory for the given combination of resource class and content type. :param configuration: representer configuration. A default instance will be created if this is not given. :type configuration: :class:`everest.representers.config.RepresenterConfiguration` """ if not issubclass(resource_class, Resource): raise ValueError('Representers can only be registered for ' 'resource classes (got: %s).' % resource_class) if not content_type in self.__rpr_classes: raise ValueError('No representer class has been registered for ' 'content type "%s".' % content_type) # Register a factory resource -> representer for the given combination # of resource class and content type. rpr_cls = self.__rpr_classes[content_type] self.__rpr_factories[(resource_class, content_type)] = \ rpr_cls.create_from_resource_class if issubclass(rpr_cls, MappingResourceRepresenter): # Create or update an attribute mapping. mp_reg = self.__mp_regs[content_type] mp = mp_reg.find_mapping(resource_class) if mp is None: # No mapping was registered yet for this resource class or any # of its base classes; create a new one on the fly. new_mp = mp_reg.create_mapping(resource_class, configuration) elif not configuration is None: if resource_class is mp.mapped_class: # We have additional configuration for an existing mapping. mp.configuration.update(configuration) new_mp = mp else: # We have a derived class with additional configuration. new_mp = mp_reg.create_mapping( resource_class, configuration=mp.configuration) new_mp.configuration.update(configuration) elif not resource_class is mp.mapped_class: # We have a derived class without additional configuration. new_mp = mp_reg.create_mapping(resource_class, configuration=mp.configuration) else: # We found a dynamically created mapping for the right class # without additional configuration; do not create a new one. new_mp = None if not new_mp is None: # Store the new (or updated) mapping. mp_reg.set_mapping(new_mp)
def function[register, parameter[self, resource_class, content_type, configuration]]: constant[ Registers a representer factory for the given combination of resource class and content type. :param configuration: representer configuration. A default instance will be created if this is not given. :type configuration: :class:`everest.representers.config.RepresenterConfiguration` ] if <ast.UnaryOp object at 0x7da1b27f58a0> begin[:] <ast.Raise object at 0x7da1b27f7490> if <ast.UnaryOp object at 0x7da1b27f7130> begin[:] <ast.Raise object at 0x7da1b27f49d0> variable[rpr_cls] assign[=] call[name[self].__rpr_classes][name[content_type]] call[name[self].__rpr_factories][tuple[[<ast.Name object at 0x7da1b287f010>, <ast.Name object at 0x7da1b287de40>]]] assign[=] name[rpr_cls].create_from_resource_class if call[name[issubclass], parameter[name[rpr_cls], name[MappingResourceRepresenter]]] begin[:] variable[mp_reg] assign[=] call[name[self].__mp_regs][name[content_type]] variable[mp] assign[=] call[name[mp_reg].find_mapping, parameter[name[resource_class]]] if compare[name[mp] is constant[None]] begin[:] variable[new_mp] assign[=] call[name[mp_reg].create_mapping, parameter[name[resource_class], name[configuration]]] if <ast.UnaryOp object at 0x7da1b28beaa0> begin[:] call[name[mp_reg].set_mapping, parameter[name[new_mp]]]
keyword[def] identifier[register] ( identifier[self] , identifier[resource_class] , identifier[content_type] , identifier[configuration] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[issubclass] ( identifier[resource_class] , identifier[Resource] ): keyword[raise] identifier[ValueError] ( literal[string] literal[string] % identifier[resource_class] ) keyword[if] keyword[not] identifier[content_type] keyword[in] identifier[self] . identifier[__rpr_classes] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] % identifier[content_type] ) identifier[rpr_cls] = identifier[self] . identifier[__rpr_classes] [ identifier[content_type] ] identifier[self] . identifier[__rpr_factories] [( identifier[resource_class] , identifier[content_type] )]= identifier[rpr_cls] . identifier[create_from_resource_class] keyword[if] identifier[issubclass] ( identifier[rpr_cls] , identifier[MappingResourceRepresenter] ): identifier[mp_reg] = identifier[self] . identifier[__mp_regs] [ identifier[content_type] ] identifier[mp] = identifier[mp_reg] . identifier[find_mapping] ( identifier[resource_class] ) keyword[if] identifier[mp] keyword[is] keyword[None] : identifier[new_mp] = identifier[mp_reg] . identifier[create_mapping] ( identifier[resource_class] , identifier[configuration] ) keyword[elif] keyword[not] identifier[configuration] keyword[is] keyword[None] : keyword[if] identifier[resource_class] keyword[is] identifier[mp] . identifier[mapped_class] : identifier[mp] . identifier[configuration] . identifier[update] ( identifier[configuration] ) identifier[new_mp] = identifier[mp] keyword[else] : identifier[new_mp] = identifier[mp_reg] . identifier[create_mapping] ( identifier[resource_class] , identifier[configuration] = identifier[mp] . identifier[configuration] ) identifier[new_mp] . identifier[configuration] . identifier[update] ( identifier[configuration] ) keyword[elif] keyword[not] identifier[resource_class] keyword[is] identifier[mp] . identifier[mapped_class] : identifier[new_mp] = identifier[mp_reg] . identifier[create_mapping] ( identifier[resource_class] , identifier[configuration] = identifier[mp] . identifier[configuration] ) keyword[else] : identifier[new_mp] = keyword[None] keyword[if] keyword[not] identifier[new_mp] keyword[is] keyword[None] : identifier[mp_reg] . identifier[set_mapping] ( identifier[new_mp] )
def register(self, resource_class, content_type, configuration=None): """ Registers a representer factory for the given combination of resource class and content type. :param configuration: representer configuration. A default instance will be created if this is not given. :type configuration: :class:`everest.representers.config.RepresenterConfiguration` """ if not issubclass(resource_class, Resource): raise ValueError('Representers can only be registered for resource classes (got: %s).' % resource_class) # depends on [control=['if'], data=[]] if not content_type in self.__rpr_classes: raise ValueError('No representer class has been registered for content type "%s".' % content_type) # depends on [control=['if'], data=[]] # Register a factory resource -> representer for the given combination # of resource class and content type. rpr_cls = self.__rpr_classes[content_type] self.__rpr_factories[resource_class, content_type] = rpr_cls.create_from_resource_class if issubclass(rpr_cls, MappingResourceRepresenter): # Create or update an attribute mapping. mp_reg = self.__mp_regs[content_type] mp = mp_reg.find_mapping(resource_class) if mp is None: # No mapping was registered yet for this resource class or any # of its base classes; create a new one on the fly. new_mp = mp_reg.create_mapping(resource_class, configuration) # depends on [control=['if'], data=[]] elif not configuration is None: if resource_class is mp.mapped_class: # We have additional configuration for an existing mapping. mp.configuration.update(configuration) new_mp = mp # depends on [control=['if'], data=[]] else: # We have a derived class with additional configuration. new_mp = mp_reg.create_mapping(resource_class, configuration=mp.configuration) new_mp.configuration.update(configuration) # depends on [control=['if'], data=[]] elif not resource_class is mp.mapped_class: # We have a derived class without additional configuration. new_mp = mp_reg.create_mapping(resource_class, configuration=mp.configuration) # depends on [control=['if'], data=[]] else: # We found a dynamically created mapping for the right class # without additional configuration; do not create a new one. new_mp = None if not new_mp is None: # Store the new (or updated) mapping. mp_reg.set_mapping(new_mp) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
def next_token(text): r"""Returns the next possible token, advancing the iterator to the next position to start processing from. :param Union[str,iterator,Buffer] text: LaTeX to process :return str: the token >>> b = Buffer(r'\textbf{Do play\textit{nice}.} $$\min_w \|w\|_2^2$$') >>> print(next_token(b), next_token(b), next_token(b), next_token(b)) \textbf { Do play \textit >>> print(next_token(b), next_token(b), next_token(b), next_token(b)) { nice } . >>> print(next_token(b)) } >>> print(next_token(Buffer('.}'))) . >>> next_token(b) ' ' >>> next_token(b) '$$' >>> b2 = Buffer(r'\gamma = \beta') >>> print(next_token(b2), next_token(b2), next_token(b2)) \gamma = \beta """ while text.hasNext(): for name, f in tokenizers: current_token = f(text) if current_token is not None: return current_token
def function[next_token, parameter[text]]: constant[Returns the next possible token, advancing the iterator to the next position to start processing from. :param Union[str,iterator,Buffer] text: LaTeX to process :return str: the token >>> b = Buffer(r'\textbf{Do play\textit{nice}.} $$\min_w \|w\|_2^2$$') >>> print(next_token(b), next_token(b), next_token(b), next_token(b)) \textbf { Do play \textit >>> print(next_token(b), next_token(b), next_token(b), next_token(b)) { nice } . >>> print(next_token(b)) } >>> print(next_token(Buffer('.}'))) . >>> next_token(b) ' ' >>> next_token(b) '$$' >>> b2 = Buffer(r'\gamma = \beta') >>> print(next_token(b2), next_token(b2), next_token(b2)) \gamma = \beta ] while call[name[text].hasNext, parameter[]] begin[:] for taget[tuple[[<ast.Name object at 0x7da1b0786a40>, <ast.Name object at 0x7da1b0786f50>]]] in starred[name[tokenizers]] begin[:] variable[current_token] assign[=] call[name[f], parameter[name[text]]] if compare[name[current_token] is_not constant[None]] begin[:] return[name[current_token]]
keyword[def] identifier[next_token] ( identifier[text] ): literal[string] keyword[while] identifier[text] . identifier[hasNext] (): keyword[for] identifier[name] , identifier[f] keyword[in] identifier[tokenizers] : identifier[current_token] = identifier[f] ( identifier[text] ) keyword[if] identifier[current_token] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[current_token]
def next_token(text): """Returns the next possible token, advancing the iterator to the next position to start processing from. :param Union[str,iterator,Buffer] text: LaTeX to process :return str: the token >>> b = Buffer(r'\\textbf{Do play\\textit{nice}.} $$\\min_w \\|w\\|_2^2$$') >>> print(next_token(b), next_token(b), next_token(b), next_token(b)) \\textbf { Do play \\textit >>> print(next_token(b), next_token(b), next_token(b), next_token(b)) { nice } . >>> print(next_token(b)) } >>> print(next_token(Buffer('.}'))) . >>> next_token(b) ' ' >>> next_token(b) '$$' >>> b2 = Buffer(r'\\gamma = \\beta') >>> print(next_token(b2), next_token(b2), next_token(b2)) \\gamma = \\beta """ while text.hasNext(): for (name, f) in tokenizers: current_token = f(text) if current_token is not None: return current_token # depends on [control=['if'], data=['current_token']] # depends on [control=['for'], data=[]] # depends on [control=['while'], data=[]]
def getOverlayTexture(self, ulOverlayHandle, pNativeTextureRef): """ Get the native texture handle/device for an overlay you have created. On windows this handle will be a ID3D11ShaderResourceView with a ID3D11Texture2D bound. * The texture will always be sized to match the backing texture you supplied in SetOverlayTexture above. * You MUST call ReleaseNativeOverlayHandle() with pNativeTextureHandle once you are done with this texture. * pNativeTextureHandle is an OUTPUT, it will be a pointer to a ID3D11ShaderResourceView *. pNativeTextureRef is an INPUT and should be a ID3D11Resource *. The device used by pNativeTextureRef will be used to bind pNativeTextureHandle. """ fn = self.function_table.getOverlayTexture pNativeTextureHandle = c_void_p() pWidth = c_uint32() pHeight = c_uint32() pNativeFormat = c_uint32() pAPIType = ETextureType() pColorSpace = EColorSpace() pTextureBounds = VRTextureBounds_t() result = fn(ulOverlayHandle, byref(pNativeTextureHandle), pNativeTextureRef, byref(pWidth), byref(pHeight), byref(pNativeFormat), byref(pAPIType), byref(pColorSpace), byref(pTextureBounds)) return result, pNativeTextureHandle.value, pWidth.value, pHeight.value, pNativeFormat.value, pAPIType, pColorSpace, pTextureBounds
def function[getOverlayTexture, parameter[self, ulOverlayHandle, pNativeTextureRef]]: constant[ Get the native texture handle/device for an overlay you have created. On windows this handle will be a ID3D11ShaderResourceView with a ID3D11Texture2D bound. * The texture will always be sized to match the backing texture you supplied in SetOverlayTexture above. * You MUST call ReleaseNativeOverlayHandle() with pNativeTextureHandle once you are done with this texture. * pNativeTextureHandle is an OUTPUT, it will be a pointer to a ID3D11ShaderResourceView *. pNativeTextureRef is an INPUT and should be a ID3D11Resource *. The device used by pNativeTextureRef will be used to bind pNativeTextureHandle. ] variable[fn] assign[=] name[self].function_table.getOverlayTexture variable[pNativeTextureHandle] assign[=] call[name[c_void_p], parameter[]] variable[pWidth] assign[=] call[name[c_uint32], parameter[]] variable[pHeight] assign[=] call[name[c_uint32], parameter[]] variable[pNativeFormat] assign[=] call[name[c_uint32], parameter[]] variable[pAPIType] assign[=] call[name[ETextureType], parameter[]] variable[pColorSpace] assign[=] call[name[EColorSpace], parameter[]] variable[pTextureBounds] assign[=] call[name[VRTextureBounds_t], parameter[]] variable[result] assign[=] call[name[fn], parameter[name[ulOverlayHandle], call[name[byref], parameter[name[pNativeTextureHandle]]], name[pNativeTextureRef], call[name[byref], parameter[name[pWidth]]], call[name[byref], parameter[name[pHeight]]], call[name[byref], parameter[name[pNativeFormat]]], call[name[byref], parameter[name[pAPIType]]], call[name[byref], parameter[name[pColorSpace]]], call[name[byref], parameter[name[pTextureBounds]]]]] return[tuple[[<ast.Name object at 0x7da2046226b0>, <ast.Attribute object at 0x7da204623b50>, <ast.Attribute object at 0x7da204622a40>, <ast.Attribute object at 0x7da204622e30>, <ast.Attribute object at 0x7da204622860>, <ast.Name object at 0x7da204620190>, <ast.Name object at 0x7da204623d60>, <ast.Name object at 0x7da204620c40>]]]
keyword[def] identifier[getOverlayTexture] ( identifier[self] , identifier[ulOverlayHandle] , identifier[pNativeTextureRef] ): literal[string] identifier[fn] = identifier[self] . identifier[function_table] . identifier[getOverlayTexture] identifier[pNativeTextureHandle] = identifier[c_void_p] () identifier[pWidth] = identifier[c_uint32] () identifier[pHeight] = identifier[c_uint32] () identifier[pNativeFormat] = identifier[c_uint32] () identifier[pAPIType] = identifier[ETextureType] () identifier[pColorSpace] = identifier[EColorSpace] () identifier[pTextureBounds] = identifier[VRTextureBounds_t] () identifier[result] = identifier[fn] ( identifier[ulOverlayHandle] , identifier[byref] ( identifier[pNativeTextureHandle] ), identifier[pNativeTextureRef] , identifier[byref] ( identifier[pWidth] ), identifier[byref] ( identifier[pHeight] ), identifier[byref] ( identifier[pNativeFormat] ), identifier[byref] ( identifier[pAPIType] ), identifier[byref] ( identifier[pColorSpace] ), identifier[byref] ( identifier[pTextureBounds] )) keyword[return] identifier[result] , identifier[pNativeTextureHandle] . identifier[value] , identifier[pWidth] . identifier[value] , identifier[pHeight] . identifier[value] , identifier[pNativeFormat] . identifier[value] , identifier[pAPIType] , identifier[pColorSpace] , identifier[pTextureBounds]
def getOverlayTexture(self, ulOverlayHandle, pNativeTextureRef): """ Get the native texture handle/device for an overlay you have created. On windows this handle will be a ID3D11ShaderResourceView with a ID3D11Texture2D bound. * The texture will always be sized to match the backing texture you supplied in SetOverlayTexture above. * You MUST call ReleaseNativeOverlayHandle() with pNativeTextureHandle once you are done with this texture. * pNativeTextureHandle is an OUTPUT, it will be a pointer to a ID3D11ShaderResourceView *. pNativeTextureRef is an INPUT and should be a ID3D11Resource *. The device used by pNativeTextureRef will be used to bind pNativeTextureHandle. """ fn = self.function_table.getOverlayTexture pNativeTextureHandle = c_void_p() pWidth = c_uint32() pHeight = c_uint32() pNativeFormat = c_uint32() pAPIType = ETextureType() pColorSpace = EColorSpace() pTextureBounds = VRTextureBounds_t() result = fn(ulOverlayHandle, byref(pNativeTextureHandle), pNativeTextureRef, byref(pWidth), byref(pHeight), byref(pNativeFormat), byref(pAPIType), byref(pColorSpace), byref(pTextureBounds)) return (result, pNativeTextureHandle.value, pWidth.value, pHeight.value, pNativeFormat.value, pAPIType, pColorSpace, pTextureBounds)
def relevant_rules_for_match(self, action, subject): """retrive match action and subject""" matches = [] for rule in self.rules: rule.expanded_actions = self.expand_actions(rule.actions) if rule.is_relevant(action, subject): matches.append(rule) return self.optimize(matches[::-1])
def function[relevant_rules_for_match, parameter[self, action, subject]]: constant[retrive match action and subject] variable[matches] assign[=] list[[]] for taget[name[rule]] in starred[name[self].rules] begin[:] name[rule].expanded_actions assign[=] call[name[self].expand_actions, parameter[name[rule].actions]] if call[name[rule].is_relevant, parameter[name[action], name[subject]]] begin[:] call[name[matches].append, parameter[name[rule]]] return[call[name[self].optimize, parameter[call[name[matches]][<ast.Slice object at 0x7da2054a4640>]]]]
keyword[def] identifier[relevant_rules_for_match] ( identifier[self] , identifier[action] , identifier[subject] ): literal[string] identifier[matches] =[] keyword[for] identifier[rule] keyword[in] identifier[self] . identifier[rules] : identifier[rule] . identifier[expanded_actions] = identifier[self] . identifier[expand_actions] ( identifier[rule] . identifier[actions] ) keyword[if] identifier[rule] . identifier[is_relevant] ( identifier[action] , identifier[subject] ): identifier[matches] . identifier[append] ( identifier[rule] ) keyword[return] identifier[self] . identifier[optimize] ( identifier[matches] [::- literal[int] ])
def relevant_rules_for_match(self, action, subject): """retrive match action and subject""" matches = [] for rule in self.rules: rule.expanded_actions = self.expand_actions(rule.actions) if rule.is_relevant(action, subject): matches.append(rule) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['rule']] return self.optimize(matches[::-1])
def convert_datetext_to_dategui(datetext, ln=None, secs=False): """Convert: '2005-11-16 15:11:57' => '16 nov 2005, 15:11' Or optionally with seconds: '2005-11-16 15:11:57' => '16 nov 2005, 15:11:57' Month is internationalized """ assert ln is None, 'setting language is not supported' try: datestruct = convert_datetext_to_datestruct(datetext) if datestruct == datestruct_default: raise ValueError if secs: output_format = "d MMM Y, H:mm:ss" else: output_format = "d MMM Y, H:mm" dt = datetime.fromtimestamp(time.mktime(datestruct)) return babel_format_datetime(dt, output_format) except ValueError: return _("N/A")
def function[convert_datetext_to_dategui, parameter[datetext, ln, secs]]: constant[Convert: '2005-11-16 15:11:57' => '16 nov 2005, 15:11' Or optionally with seconds: '2005-11-16 15:11:57' => '16 nov 2005, 15:11:57' Month is internationalized ] assert[compare[name[ln] is constant[None]]] <ast.Try object at 0x7da1b276bb20>
keyword[def] identifier[convert_datetext_to_dategui] ( identifier[datetext] , identifier[ln] = keyword[None] , identifier[secs] = keyword[False] ): literal[string] keyword[assert] identifier[ln] keyword[is] keyword[None] , literal[string] keyword[try] : identifier[datestruct] = identifier[convert_datetext_to_datestruct] ( identifier[datetext] ) keyword[if] identifier[datestruct] == identifier[datestruct_default] : keyword[raise] identifier[ValueError] keyword[if] identifier[secs] : identifier[output_format] = literal[string] keyword[else] : identifier[output_format] = literal[string] identifier[dt] = identifier[datetime] . identifier[fromtimestamp] ( identifier[time] . identifier[mktime] ( identifier[datestruct] )) keyword[return] identifier[babel_format_datetime] ( identifier[dt] , identifier[output_format] ) keyword[except] identifier[ValueError] : keyword[return] identifier[_] ( literal[string] )
def convert_datetext_to_dategui(datetext, ln=None, secs=False): """Convert: '2005-11-16 15:11:57' => '16 nov 2005, 15:11' Or optionally with seconds: '2005-11-16 15:11:57' => '16 nov 2005, 15:11:57' Month is internationalized """ assert ln is None, 'setting language is not supported' try: datestruct = convert_datetext_to_datestruct(datetext) if datestruct == datestruct_default: raise ValueError # depends on [control=['if'], data=[]] if secs: output_format = 'd MMM Y, H:mm:ss' # depends on [control=['if'], data=[]] else: output_format = 'd MMM Y, H:mm' dt = datetime.fromtimestamp(time.mktime(datestruct)) return babel_format_datetime(dt, output_format) # depends on [control=['try'], data=[]] except ValueError: return _('N/A') # depends on [control=['except'], data=[]]
def get_config(): """ Return configuration for current session. When called for the first time, this will create a config object, using whatever is the default load path to find the config yaml """ if session.config is None: path = session.default_config_path if os.path.isfile(path): logging.info("LOADING FROM: {}".format(path)) session.config = load_config(path) else: session.config = Config() logging.info("using default session: {}, path does not exist: {}".format(session, path)) else: logging.info("Using pre-loaded object: {}".format(session.config)) return session.config
def function[get_config, parameter[]]: constant[ Return configuration for current session. When called for the first time, this will create a config object, using whatever is the default load path to find the config yaml ] if compare[name[session].config is constant[None]] begin[:] variable[path] assign[=] name[session].default_config_path if call[name[os].path.isfile, parameter[name[path]]] begin[:] call[name[logging].info, parameter[call[constant[LOADING FROM: {}].format, parameter[name[path]]]]] name[session].config assign[=] call[name[load_config], parameter[name[path]]] return[name[session].config]
keyword[def] identifier[get_config] (): literal[string] keyword[if] identifier[session] . identifier[config] keyword[is] keyword[None] : identifier[path] = identifier[session] . identifier[default_config_path] keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[path] ): identifier[logging] . identifier[info] ( literal[string] . identifier[format] ( identifier[path] )) identifier[session] . identifier[config] = identifier[load_config] ( identifier[path] ) keyword[else] : identifier[session] . identifier[config] = identifier[Config] () identifier[logging] . identifier[info] ( literal[string] . identifier[format] ( identifier[session] , identifier[path] )) keyword[else] : identifier[logging] . identifier[info] ( literal[string] . identifier[format] ( identifier[session] . identifier[config] )) keyword[return] identifier[session] . identifier[config]
def get_config(): """ Return configuration for current session. When called for the first time, this will create a config object, using whatever is the default load path to find the config yaml """ if session.config is None: path = session.default_config_path if os.path.isfile(path): logging.info('LOADING FROM: {}'.format(path)) session.config = load_config(path) # depends on [control=['if'], data=[]] else: session.config = Config() logging.info('using default session: {}, path does not exist: {}'.format(session, path)) # depends on [control=['if'], data=[]] else: logging.info('Using pre-loaded object: {}'.format(session.config)) return session.config
def hwpack_names(): """return installed hardware package names.""" ls = hwpack_dir().listdir() ls = [x.name for x in ls] ls = [x for x in ls if x != 'tools'] arduino_included = 'arduino' in ls ls = [x for x in ls if x != 'arduino'] ls.sort() if arduino_included: ls = ['arduino'] + ls # move to 1st pos return ls
def function[hwpack_names, parameter[]]: constant[return installed hardware package names.] variable[ls] assign[=] call[call[name[hwpack_dir], parameter[]].listdir, parameter[]] variable[ls] assign[=] <ast.ListComp object at 0x7da1b28c53c0> variable[ls] assign[=] <ast.ListComp object at 0x7da1b28c6740> variable[arduino_included] assign[=] compare[constant[arduino] in name[ls]] variable[ls] assign[=] <ast.ListComp object at 0x7da1b28c51b0> call[name[ls].sort, parameter[]] if name[arduino_included] begin[:] variable[ls] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b28c5510>]] + name[ls]] return[name[ls]]
keyword[def] identifier[hwpack_names] (): literal[string] identifier[ls] = identifier[hwpack_dir] (). identifier[listdir] () identifier[ls] =[ identifier[x] . identifier[name] keyword[for] identifier[x] keyword[in] identifier[ls] ] identifier[ls] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[ls] keyword[if] identifier[x] != literal[string] ] identifier[arduino_included] = literal[string] keyword[in] identifier[ls] identifier[ls] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[ls] keyword[if] identifier[x] != literal[string] ] identifier[ls] . identifier[sort] () keyword[if] identifier[arduino_included] : identifier[ls] =[ literal[string] ]+ identifier[ls] keyword[return] identifier[ls]
def hwpack_names(): """return installed hardware package names.""" ls = hwpack_dir().listdir() ls = [x.name for x in ls] ls = [x for x in ls if x != 'tools'] arduino_included = 'arduino' in ls ls = [x for x in ls if x != 'arduino'] ls.sort() if arduino_included: ls = ['arduino'] + ls # move to 1st pos # depends on [control=['if'], data=[]] return ls
def sg_densenet_layer(x, opt): r"""Applies basic architecture of densenet layer. Note that the fc layers in the original architecture will be replaced with fully convolutional layers. For convenience, We still call them fc layers, though. Args: x: A `Tensor`. opt: dim: An integer. Dimension for this resnet layer num: Number of times to repeat act: String. 'relu' (default). the activation function name trans: Boolean. If True(default), transition layer will be applied. reuse: Boolean(Optional). If True, all variables will be loaded from previous network. name: String. (optional) Used as convolution layer prefix Returns: A `Tensor`. """ assert opt.dim is not None, 'dim is mandatory.' assert opt.num is not None, 'num is mandatory.' # default stride opt += tf.sg_opt(stride=1, act='relu', trans=True) # format convolutional layer name def cname(index): return opt.name if opt.name is None else opt.name + '_%d' % index # dense layer with tf.sg_context(bias=False, reuse=opt.reuse): out = x for i in range(opt.num): # dense block out_new = (out .sg_bypass(act=opt.act, bn=True, name=cname(3 * i + 1)) .sg_conv(dim=opt.dim // 4, size=1, act=opt.act, bn=True, name=cname(3 * i + 2)) .sg_conv(dim=opt.dim, size=3, name=cname(3 * i + 3))) out = tf.concat([out_new, out], 3) # transition layer if opt.trans: out = (out .sg_bypass(act=opt.act, bn=True, name=cname(3 * i + 4)) .sg_conv(size=1, name=cname(3 * i + 5)) .sg_pool(avg=True)) return out
def function[sg_densenet_layer, parameter[x, opt]]: constant[Applies basic architecture of densenet layer. Note that the fc layers in the original architecture will be replaced with fully convolutional layers. For convenience, We still call them fc layers, though. Args: x: A `Tensor`. opt: dim: An integer. Dimension for this resnet layer num: Number of times to repeat act: String. 'relu' (default). the activation function name trans: Boolean. If True(default), transition layer will be applied. reuse: Boolean(Optional). If True, all variables will be loaded from previous network. name: String. (optional) Used as convolution layer prefix Returns: A `Tensor`. ] assert[compare[name[opt].dim is_not constant[None]]] assert[compare[name[opt].num is_not constant[None]]] <ast.AugAssign object at 0x7da1b1236da0> def function[cname, parameter[index]]: return[<ast.IfExp object at 0x7da1b1236770>] with call[name[tf].sg_context, parameter[]] begin[:] variable[out] assign[=] name[x] for taget[name[i]] in starred[call[name[range], parameter[name[opt].num]]] begin[:] variable[out_new] assign[=] call[call[call[name[out].sg_bypass, parameter[]].sg_conv, parameter[]].sg_conv, parameter[]] variable[out] assign[=] call[name[tf].concat, parameter[list[[<ast.Name object at 0x7da1b1236b00>, <ast.Name object at 0x7da1b12365f0>]], constant[3]]] if name[opt].trans begin[:] variable[out] assign[=] call[call[call[name[out].sg_bypass, parameter[]].sg_conv, parameter[]].sg_pool, parameter[]] return[name[out]]
keyword[def] identifier[sg_densenet_layer] ( identifier[x] , identifier[opt] ): literal[string] keyword[assert] identifier[opt] . identifier[dim] keyword[is] keyword[not] keyword[None] , literal[string] keyword[assert] identifier[opt] . identifier[num] keyword[is] keyword[not] keyword[None] , literal[string] identifier[opt] += identifier[tf] . identifier[sg_opt] ( identifier[stride] = literal[int] , identifier[act] = literal[string] , identifier[trans] = keyword[True] ) keyword[def] identifier[cname] ( identifier[index] ): keyword[return] identifier[opt] . identifier[name] keyword[if] identifier[opt] . identifier[name] keyword[is] keyword[None] keyword[else] identifier[opt] . identifier[name] + literal[string] % identifier[index] keyword[with] identifier[tf] . identifier[sg_context] ( identifier[bias] = keyword[False] , identifier[reuse] = identifier[opt] . identifier[reuse] ): identifier[out] = identifier[x] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[opt] . identifier[num] ): identifier[out_new] =( identifier[out] . identifier[sg_bypass] ( identifier[act] = identifier[opt] . identifier[act] , identifier[bn] = keyword[True] , identifier[name] = identifier[cname] ( literal[int] * identifier[i] + literal[int] )) . identifier[sg_conv] ( identifier[dim] = identifier[opt] . identifier[dim] // literal[int] , identifier[size] = literal[int] , identifier[act] = identifier[opt] . identifier[act] , identifier[bn] = keyword[True] , identifier[name] = identifier[cname] ( literal[int] * identifier[i] + literal[int] )) . identifier[sg_conv] ( identifier[dim] = identifier[opt] . identifier[dim] , identifier[size] = literal[int] , identifier[name] = identifier[cname] ( literal[int] * identifier[i] + literal[int] ))) identifier[out] = identifier[tf] . identifier[concat] ([ identifier[out_new] , identifier[out] ], literal[int] ) keyword[if] identifier[opt] . identifier[trans] : identifier[out] =( identifier[out] . identifier[sg_bypass] ( identifier[act] = identifier[opt] . identifier[act] , identifier[bn] = keyword[True] , identifier[name] = identifier[cname] ( literal[int] * identifier[i] + literal[int] )) . identifier[sg_conv] ( identifier[size] = literal[int] , identifier[name] = identifier[cname] ( literal[int] * identifier[i] + literal[int] )) . identifier[sg_pool] ( identifier[avg] = keyword[True] )) keyword[return] identifier[out]
def sg_densenet_layer(x, opt): """Applies basic architecture of densenet layer. Note that the fc layers in the original architecture will be replaced with fully convolutional layers. For convenience, We still call them fc layers, though. Args: x: A `Tensor`. opt: dim: An integer. Dimension for this resnet layer num: Number of times to repeat act: String. 'relu' (default). the activation function name trans: Boolean. If True(default), transition layer will be applied. reuse: Boolean(Optional). If True, all variables will be loaded from previous network. name: String. (optional) Used as convolution layer prefix Returns: A `Tensor`. """ assert opt.dim is not None, 'dim is mandatory.' assert opt.num is not None, 'num is mandatory.' # default stride opt += tf.sg_opt(stride=1, act='relu', trans=True) # format convolutional layer name def cname(index): return opt.name if opt.name is None else opt.name + '_%d' % index # dense layer with tf.sg_context(bias=False, reuse=opt.reuse): out = x for i in range(opt.num): # dense block out_new = out.sg_bypass(act=opt.act, bn=True, name=cname(3 * i + 1)).sg_conv(dim=opt.dim // 4, size=1, act=opt.act, bn=True, name=cname(3 * i + 2)).sg_conv(dim=opt.dim, size=3, name=cname(3 * i + 3)) out = tf.concat([out_new, out], 3) # depends on [control=['for'], data=['i']] # transition layer if opt.trans: out = out.sg_bypass(act=opt.act, bn=True, name=cname(3 * i + 4)).sg_conv(size=1, name=cname(3 * i + 5)).sg_pool(avg=True) # depends on [control=['if'], data=[]] # depends on [control=['with'], data=[]] return out
def dump(self, force=False): """ Encodes the value using DER :param force: If the encoded contents already exist, clear them and regenerate to ensure they are in DER format instead of BER format :return: A byte string of the DER-encoded value """ if force: self._set_contents(force=force) if self._fields and self.children is not None: for index, (field_name, _, params) in enumerate(self._fields): if self.children[index] is not VOID: continue if 'default' in params or 'optional' in params: continue raise ValueError(unwrap( ''' Field "%s" is missing from structure ''', field_name )) return Asn1Value.dump(self)
def function[dump, parameter[self, force]]: constant[ Encodes the value using DER :param force: If the encoded contents already exist, clear them and regenerate to ensure they are in DER format instead of BER format :return: A byte string of the DER-encoded value ] if name[force] begin[:] call[name[self]._set_contents, parameter[]] if <ast.BoolOp object at 0x7da18f00f670> begin[:] for taget[tuple[[<ast.Name object at 0x7da18f00f5b0>, <ast.Tuple object at 0x7da18f00fc40>]]] in starred[call[name[enumerate], parameter[name[self]._fields]]] begin[:] if compare[call[name[self].children][name[index]] is_not name[VOID]] begin[:] continue if <ast.BoolOp object at 0x7da18f00e560> begin[:] continue <ast.Raise object at 0x7da18f00c7c0> return[call[name[Asn1Value].dump, parameter[name[self]]]]
keyword[def] identifier[dump] ( identifier[self] , identifier[force] = keyword[False] ): literal[string] keyword[if] identifier[force] : identifier[self] . identifier[_set_contents] ( identifier[force] = identifier[force] ) keyword[if] identifier[self] . identifier[_fields] keyword[and] identifier[self] . identifier[children] keyword[is] keyword[not] keyword[None] : keyword[for] identifier[index] ,( identifier[field_name] , identifier[_] , identifier[params] ) keyword[in] identifier[enumerate] ( identifier[self] . identifier[_fields] ): keyword[if] identifier[self] . identifier[children] [ identifier[index] ] keyword[is] keyword[not] identifier[VOID] : keyword[continue] keyword[if] literal[string] keyword[in] identifier[params] keyword[or] literal[string] keyword[in] identifier[params] : keyword[continue] keyword[raise] identifier[ValueError] ( identifier[unwrap] ( literal[string] , identifier[field_name] )) keyword[return] identifier[Asn1Value] . identifier[dump] ( identifier[self] )
def dump(self, force=False): """ Encodes the value using DER :param force: If the encoded contents already exist, clear them and regenerate to ensure they are in DER format instead of BER format :return: A byte string of the DER-encoded value """ if force: self._set_contents(force=force) # depends on [control=['if'], data=[]] if self._fields and self.children is not None: for (index, (field_name, _, params)) in enumerate(self._fields): if self.children[index] is not VOID: continue # depends on [control=['if'], data=[]] if 'default' in params or 'optional' in params: continue # depends on [control=['if'], data=[]] raise ValueError(unwrap('\n Field "%s" is missing from structure\n ', field_name)) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] return Asn1Value.dump(self)
def list(self, request, project): """ GET method for list of ``push`` records with revisions """ # What is the upper limit on the number of pushes returned by the api MAX_PUSH_COUNT = 1000 # make a mutable copy of these params filter_params = request.query_params.copy() # This will contain some meta data about the request and results meta = {} # support ranges for date as well as revisions(changes) like old tbpl for param in ["fromchange", "tochange", "startdate", "enddate", "revision", "commit_revision"]: v = filter_params.get(param, None) if v: del filter_params[param] meta[param] = v try: repository = Repository.objects.get(name=project) except Repository.DoesNotExist: return Response({ "detail": "No project with name {}".format(project) }, status=HTTP_404_NOT_FOUND) pushes = Push.objects.filter(repository=repository).order_by('-time') for (param, value) in meta.items(): if param == 'fromchange': revision_field = 'revision__startswith' if len(value) < 40 else 'revision' filter_kwargs = {revision_field: value, 'repository': repository} frompush_time = Push.objects.values_list('time', flat=True).get( **filter_kwargs) pushes = pushes.filter(time__gte=frompush_time) filter_params.update({ "push_timestamp__gte": to_timestamp(frompush_time) }) self.report_if_short_revision(param, value) elif param == 'tochange': revision_field = 'revision__startswith' if len(value) < 40 else 'revision' filter_kwargs = {revision_field: value, 'repository': repository} topush_time = Push.objects.values_list('time', flat=True).get( **filter_kwargs) pushes = pushes.filter(time__lte=topush_time) filter_params.update({ "push_timestamp__lte": to_timestamp(topush_time) }) self.report_if_short_revision(param, value) elif param == 'startdate': pushes = pushes.filter(time__gte=to_datetime(value)) filter_params.update({ "push_timestamp__gte": to_timestamp(to_datetime(value)) }) elif param == 'enddate': real_end_date = to_datetime(value) + datetime.timedelta(days=1) pushes = pushes.filter(time__lte=real_end_date) filter_params.update({ "push_timestamp__lt": to_timestamp(real_end_date) }) elif param == 'revision': # revision must be the tip revision of the push itself revision_field = 'revision__startswith' if len(value) < 40 else 'revision' filter_kwargs = {revision_field: value} pushes = pushes.filter(**filter_kwargs) rev_key = "revisions_long_revision" \ if len(meta['revision']) == 40 else "revisions_short_revision" filter_params.update({rev_key: meta['revision']}) self.report_if_short_revision(param, value) elif param == 'commit_revision': # revision can be either the revision of the push itself, or # any of the commits it refers to pushes = pushes.filter(commits__revision=value) self.report_if_short_revision(param, value) for param in ['push_timestamp__lt', 'push_timestamp__lte', 'push_timestamp__gt', 'push_timestamp__gte']: if filter_params.get(param): # translate push timestamp directly into a filter try: value = datetime.datetime.fromtimestamp( float(filter_params.get(param))) except ValueError: return Response({ "detail": "Invalid timestamp specified for {}".format( param) }, status=HTTP_400_BAD_REQUEST) pushes = pushes.filter(**{ param.replace('push_timestamp', 'time'): value }) for param in ['id__lt', 'id__lte', 'id__gt', 'id__gte', 'id']: try: value = int(filter_params.get(param, 0)) except ValueError: return Response({ "detail": "Invalid timestamp specified for {}".format( param) }, status=HTTP_400_BAD_REQUEST) if value: pushes = pushes.filter(**{param: value}) id_in = filter_params.get("id__in") if id_in: try: id_in_list = [int(id) for id in id_in.split(',')] except ValueError: return Response({"detail": "Invalid id__in specification"}, status=HTTP_400_BAD_REQUEST) pushes = pushes.filter(id__in=id_in_list) author = filter_params.get("author") if author: pushes = pushes.filter(author=author) try: count = int(filter_params.get("count", 10)) except ValueError: return Response({"detail": "Valid count value required"}, status=HTTP_400_BAD_REQUEST) if count > MAX_PUSH_COUNT: msg = "Specified count exceeds api limit: {}".format(MAX_PUSH_COUNT) return Response({"detail": msg}, status=HTTP_400_BAD_REQUEST) # we used to have a "full" parameter for this endpoint so you could # specify to not fetch the revision information if it was set to # false. however AFAIK no one ever used it (default was to fetch # everything), so let's just leave it out. it doesn't break # anything to send extra data when not required. pushes = pushes.select_related('repository').prefetch_related('commits')[:count] serializer = PushSerializer(pushes, many=True) meta['count'] = len(pushes) meta['repository'] = project meta['filter_params'] = filter_params resp = { 'meta': meta, 'results': serializer.data } return Response(resp)
def function[list, parameter[self, request, project]]: constant[ GET method for list of ``push`` records with revisions ] variable[MAX_PUSH_COUNT] assign[=] constant[1000] variable[filter_params] assign[=] call[name[request].query_params.copy, parameter[]] variable[meta] assign[=] dictionary[[], []] for taget[name[param]] in starred[list[[<ast.Constant object at 0x7da1b08cb700>, <ast.Constant object at 0x7da1b08cb4f0>, <ast.Constant object at 0x7da1b08cb100>, <ast.Constant object at 0x7da1b08c8670>, <ast.Constant object at 0x7da1b08c9f00>, <ast.Constant object at 0x7da1b08c8520>]]] begin[:] variable[v] assign[=] call[name[filter_params].get, parameter[name[param], constant[None]]] if name[v] begin[:] <ast.Delete object at 0x7da1b08ca350> call[name[meta]][name[param]] assign[=] name[v] <ast.Try object at 0x7da1b08c8a60> variable[pushes] assign[=] call[call[name[Push].objects.filter, parameter[]].order_by, parameter[constant[-time]]] for taget[tuple[[<ast.Name object at 0x7da1b08cb9a0>, <ast.Name object at 0x7da1b08ca7d0>]]] in starred[call[name[meta].items, parameter[]]] begin[:] if compare[name[param] equal[==] constant[fromchange]] begin[:] variable[revision_field] assign[=] <ast.IfExp object at 0x7da1b08c9030> variable[filter_kwargs] assign[=] dictionary[[<ast.Name object at 0x7da1b08cb310>, <ast.Constant object at 0x7da1b08c8e50>], [<ast.Name object at 0x7da1b08c91e0>, <ast.Name object at 0x7da1b08c80a0>]] variable[frompush_time] assign[=] call[call[name[Push].objects.values_list, parameter[constant[time]]].get, parameter[]] variable[pushes] assign[=] call[name[pushes].filter, parameter[]] call[name[filter_params].update, parameter[dictionary[[<ast.Constant object at 0x7da1b08f9810>], [<ast.Call object at 0x7da1b08f9540>]]]] call[name[self].report_if_short_revision, parameter[name[param], name[value]]] for taget[name[param]] in starred[list[[<ast.Constant object at 0x7da1b06326b0>, <ast.Constant object at 0x7da1b06318a0>, <ast.Constant object at 0x7da1b0633b20>, <ast.Constant object at 0x7da1b0630d30>]]] begin[:] if call[name[filter_params].get, parameter[name[param]]] begin[:] <ast.Try object at 0x7da1b0632470> variable[pushes] assign[=] call[name[pushes].filter, parameter[]] for taget[name[param]] in starred[list[[<ast.Constant object at 0x7da1b0631120>, <ast.Constant object at 0x7da1b06312a0>, <ast.Constant object at 0x7da1b0632b00>, <ast.Constant object at 0x7da1b0633040>, <ast.Constant object at 0x7da1b06331c0>]]] begin[:] <ast.Try object at 0x7da1b0631c60> if name[value] begin[:] variable[pushes] assign[=] call[name[pushes].filter, parameter[]] variable[id_in] assign[=] call[name[filter_params].get, parameter[constant[id__in]]] if name[id_in] begin[:] <ast.Try object at 0x7da1b0630130> variable[pushes] assign[=] call[name[pushes].filter, parameter[]] variable[author] assign[=] call[name[filter_params].get, parameter[constant[author]]] if name[author] begin[:] variable[pushes] assign[=] call[name[pushes].filter, parameter[]] <ast.Try object at 0x7da1b0630fd0> if compare[name[count] greater[>] name[MAX_PUSH_COUNT]] begin[:] variable[msg] assign[=] call[constant[Specified count exceeds api limit: {}].format, parameter[name[MAX_PUSH_COUNT]]] return[call[name[Response], parameter[dictionary[[<ast.Constant object at 0x7da1b0641ba0>], [<ast.Name object at 0x7da1b0641b40>]]]]] variable[pushes] assign[=] call[call[call[name[pushes].select_related, parameter[constant[repository]]].prefetch_related, parameter[constant[commits]]]][<ast.Slice object at 0x7da1b0642680>] variable[serializer] assign[=] call[name[PushSerializer], parameter[name[pushes]]] call[name[meta]][constant[count]] assign[=] call[name[len], parameter[name[pushes]]] call[name[meta]][constant[repository]] assign[=] name[project] call[name[meta]][constant[filter_params]] assign[=] name[filter_params] variable[resp] assign[=] dictionary[[<ast.Constant object at 0x7da1b0643280>, <ast.Constant object at 0x7da1b0641ed0>], [<ast.Name object at 0x7da1b0642a10>, <ast.Attribute object at 0x7da1b06401f0>]] return[call[name[Response], parameter[name[resp]]]]
keyword[def] identifier[list] ( identifier[self] , identifier[request] , identifier[project] ): literal[string] identifier[MAX_PUSH_COUNT] = literal[int] identifier[filter_params] = identifier[request] . identifier[query_params] . identifier[copy] () identifier[meta] ={} keyword[for] identifier[param] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]: identifier[v] = identifier[filter_params] . identifier[get] ( identifier[param] , keyword[None] ) keyword[if] identifier[v] : keyword[del] identifier[filter_params] [ identifier[param] ] identifier[meta] [ identifier[param] ]= identifier[v] keyword[try] : identifier[repository] = identifier[Repository] . identifier[objects] . identifier[get] ( identifier[name] = identifier[project] ) keyword[except] identifier[Repository] . identifier[DoesNotExist] : keyword[return] identifier[Response] ({ literal[string] : literal[string] . identifier[format] ( identifier[project] ) }, identifier[status] = identifier[HTTP_404_NOT_FOUND] ) identifier[pushes] = identifier[Push] . identifier[objects] . identifier[filter] ( identifier[repository] = identifier[repository] ). identifier[order_by] ( literal[string] ) keyword[for] ( identifier[param] , identifier[value] ) keyword[in] identifier[meta] . identifier[items] (): keyword[if] identifier[param] == literal[string] : identifier[revision_field] = literal[string] keyword[if] identifier[len] ( identifier[value] )< literal[int] keyword[else] literal[string] identifier[filter_kwargs] ={ identifier[revision_field] : identifier[value] , literal[string] : identifier[repository] } identifier[frompush_time] = identifier[Push] . identifier[objects] . identifier[values_list] ( literal[string] , identifier[flat] = keyword[True] ). identifier[get] ( ** identifier[filter_kwargs] ) identifier[pushes] = identifier[pushes] . identifier[filter] ( identifier[time__gte] = identifier[frompush_time] ) identifier[filter_params] . identifier[update] ({ literal[string] : identifier[to_timestamp] ( identifier[frompush_time] ) }) identifier[self] . identifier[report_if_short_revision] ( identifier[param] , identifier[value] ) keyword[elif] identifier[param] == literal[string] : identifier[revision_field] = literal[string] keyword[if] identifier[len] ( identifier[value] )< literal[int] keyword[else] literal[string] identifier[filter_kwargs] ={ identifier[revision_field] : identifier[value] , literal[string] : identifier[repository] } identifier[topush_time] = identifier[Push] . identifier[objects] . identifier[values_list] ( literal[string] , identifier[flat] = keyword[True] ). identifier[get] ( ** identifier[filter_kwargs] ) identifier[pushes] = identifier[pushes] . identifier[filter] ( identifier[time__lte] = identifier[topush_time] ) identifier[filter_params] . identifier[update] ({ literal[string] : identifier[to_timestamp] ( identifier[topush_time] ) }) identifier[self] . identifier[report_if_short_revision] ( identifier[param] , identifier[value] ) keyword[elif] identifier[param] == literal[string] : identifier[pushes] = identifier[pushes] . identifier[filter] ( identifier[time__gte] = identifier[to_datetime] ( identifier[value] )) identifier[filter_params] . identifier[update] ({ literal[string] : identifier[to_timestamp] ( identifier[to_datetime] ( identifier[value] )) }) keyword[elif] identifier[param] == literal[string] : identifier[real_end_date] = identifier[to_datetime] ( identifier[value] )+ identifier[datetime] . identifier[timedelta] ( identifier[days] = literal[int] ) identifier[pushes] = identifier[pushes] . identifier[filter] ( identifier[time__lte] = identifier[real_end_date] ) identifier[filter_params] . identifier[update] ({ literal[string] : identifier[to_timestamp] ( identifier[real_end_date] ) }) keyword[elif] identifier[param] == literal[string] : identifier[revision_field] = literal[string] keyword[if] identifier[len] ( identifier[value] )< literal[int] keyword[else] literal[string] identifier[filter_kwargs] ={ identifier[revision_field] : identifier[value] } identifier[pushes] = identifier[pushes] . identifier[filter] (** identifier[filter_kwargs] ) identifier[rev_key] = literal[string] keyword[if] identifier[len] ( identifier[meta] [ literal[string] ])== literal[int] keyword[else] literal[string] identifier[filter_params] . identifier[update] ({ identifier[rev_key] : identifier[meta] [ literal[string] ]}) identifier[self] . identifier[report_if_short_revision] ( identifier[param] , identifier[value] ) keyword[elif] identifier[param] == literal[string] : identifier[pushes] = identifier[pushes] . identifier[filter] ( identifier[commits__revision] = identifier[value] ) identifier[self] . identifier[report_if_short_revision] ( identifier[param] , identifier[value] ) keyword[for] identifier[param] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] ]: keyword[if] identifier[filter_params] . identifier[get] ( identifier[param] ): keyword[try] : identifier[value] = identifier[datetime] . identifier[datetime] . identifier[fromtimestamp] ( identifier[float] ( identifier[filter_params] . identifier[get] ( identifier[param] ))) keyword[except] identifier[ValueError] : keyword[return] identifier[Response] ({ literal[string] : literal[string] . identifier[format] ( identifier[param] ) }, identifier[status] = identifier[HTTP_400_BAD_REQUEST] ) identifier[pushes] = identifier[pushes] . identifier[filter] (**{ identifier[param] . identifier[replace] ( literal[string] , literal[string] ): identifier[value] }) keyword[for] identifier[param] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]: keyword[try] : identifier[value] = identifier[int] ( identifier[filter_params] . identifier[get] ( identifier[param] , literal[int] )) keyword[except] identifier[ValueError] : keyword[return] identifier[Response] ({ literal[string] : literal[string] . identifier[format] ( identifier[param] ) }, identifier[status] = identifier[HTTP_400_BAD_REQUEST] ) keyword[if] identifier[value] : identifier[pushes] = identifier[pushes] . identifier[filter] (**{ identifier[param] : identifier[value] }) identifier[id_in] = identifier[filter_params] . identifier[get] ( literal[string] ) keyword[if] identifier[id_in] : keyword[try] : identifier[id_in_list] =[ identifier[int] ( identifier[id] ) keyword[for] identifier[id] keyword[in] identifier[id_in] . identifier[split] ( literal[string] )] keyword[except] identifier[ValueError] : keyword[return] identifier[Response] ({ literal[string] : literal[string] }, identifier[status] = identifier[HTTP_400_BAD_REQUEST] ) identifier[pushes] = identifier[pushes] . identifier[filter] ( identifier[id__in] = identifier[id_in_list] ) identifier[author] = identifier[filter_params] . identifier[get] ( literal[string] ) keyword[if] identifier[author] : identifier[pushes] = identifier[pushes] . identifier[filter] ( identifier[author] = identifier[author] ) keyword[try] : identifier[count] = identifier[int] ( identifier[filter_params] . identifier[get] ( literal[string] , literal[int] )) keyword[except] identifier[ValueError] : keyword[return] identifier[Response] ({ literal[string] : literal[string] }, identifier[status] = identifier[HTTP_400_BAD_REQUEST] ) keyword[if] identifier[count] > identifier[MAX_PUSH_COUNT] : identifier[msg] = literal[string] . identifier[format] ( identifier[MAX_PUSH_COUNT] ) keyword[return] identifier[Response] ({ literal[string] : identifier[msg] }, identifier[status] = identifier[HTTP_400_BAD_REQUEST] ) identifier[pushes] = identifier[pushes] . identifier[select_related] ( literal[string] ). identifier[prefetch_related] ( literal[string] )[: identifier[count] ] identifier[serializer] = identifier[PushSerializer] ( identifier[pushes] , identifier[many] = keyword[True] ) identifier[meta] [ literal[string] ]= identifier[len] ( identifier[pushes] ) identifier[meta] [ literal[string] ]= identifier[project] identifier[meta] [ literal[string] ]= identifier[filter_params] identifier[resp] ={ literal[string] : identifier[meta] , literal[string] : identifier[serializer] . identifier[data] } keyword[return] identifier[Response] ( identifier[resp] )
def list(self, request, project): """ GET method for list of ``push`` records with revisions """ # What is the upper limit on the number of pushes returned by the api MAX_PUSH_COUNT = 1000 # make a mutable copy of these params filter_params = request.query_params.copy() # This will contain some meta data about the request and results meta = {} # support ranges for date as well as revisions(changes) like old tbpl for param in ['fromchange', 'tochange', 'startdate', 'enddate', 'revision', 'commit_revision']: v = filter_params.get(param, None) if v: del filter_params[param] meta[param] = v # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['param']] try: repository = Repository.objects.get(name=project) # depends on [control=['try'], data=[]] except Repository.DoesNotExist: return Response({'detail': 'No project with name {}'.format(project)}, status=HTTP_404_NOT_FOUND) # depends on [control=['except'], data=[]] pushes = Push.objects.filter(repository=repository).order_by('-time') for (param, value) in meta.items(): if param == 'fromchange': revision_field = 'revision__startswith' if len(value) < 40 else 'revision' filter_kwargs = {revision_field: value, 'repository': repository} frompush_time = Push.objects.values_list('time', flat=True).get(**filter_kwargs) pushes = pushes.filter(time__gte=frompush_time) filter_params.update({'push_timestamp__gte': to_timestamp(frompush_time)}) self.report_if_short_revision(param, value) # depends on [control=['if'], data=['param']] elif param == 'tochange': revision_field = 'revision__startswith' if len(value) < 40 else 'revision' filter_kwargs = {revision_field: value, 'repository': repository} topush_time = Push.objects.values_list('time', flat=True).get(**filter_kwargs) pushes = pushes.filter(time__lte=topush_time) filter_params.update({'push_timestamp__lte': to_timestamp(topush_time)}) self.report_if_short_revision(param, value) # depends on [control=['if'], data=['param']] elif param == 'startdate': pushes = pushes.filter(time__gte=to_datetime(value)) filter_params.update({'push_timestamp__gte': to_timestamp(to_datetime(value))}) # depends on [control=['if'], data=[]] elif param == 'enddate': real_end_date = to_datetime(value) + datetime.timedelta(days=1) pushes = pushes.filter(time__lte=real_end_date) filter_params.update({'push_timestamp__lt': to_timestamp(real_end_date)}) # depends on [control=['if'], data=[]] elif param == 'revision': # revision must be the tip revision of the push itself revision_field = 'revision__startswith' if len(value) < 40 else 'revision' filter_kwargs = {revision_field: value} pushes = pushes.filter(**filter_kwargs) rev_key = 'revisions_long_revision' if len(meta['revision']) == 40 else 'revisions_short_revision' filter_params.update({rev_key: meta['revision']}) self.report_if_short_revision(param, value) # depends on [control=['if'], data=['param']] elif param == 'commit_revision': # revision can be either the revision of the push itself, or # any of the commits it refers to pushes = pushes.filter(commits__revision=value) self.report_if_short_revision(param, value) # depends on [control=['if'], data=['param']] # depends on [control=['for'], data=[]] for param in ['push_timestamp__lt', 'push_timestamp__lte', 'push_timestamp__gt', 'push_timestamp__gte']: if filter_params.get(param): # translate push timestamp directly into a filter try: value = datetime.datetime.fromtimestamp(float(filter_params.get(param))) # depends on [control=['try'], data=[]] except ValueError: return Response({'detail': 'Invalid timestamp specified for {}'.format(param)}, status=HTTP_400_BAD_REQUEST) # depends on [control=['except'], data=[]] pushes = pushes.filter(**{param.replace('push_timestamp', 'time'): value}) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['param']] for param in ['id__lt', 'id__lte', 'id__gt', 'id__gte', 'id']: try: value = int(filter_params.get(param, 0)) # depends on [control=['try'], data=[]] except ValueError: return Response({'detail': 'Invalid timestamp specified for {}'.format(param)}, status=HTTP_400_BAD_REQUEST) # depends on [control=['except'], data=[]] if value: pushes = pushes.filter(**{param: value}) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['param']] id_in = filter_params.get('id__in') if id_in: try: id_in_list = [int(id) for id in id_in.split(',')] # depends on [control=['try'], data=[]] except ValueError: return Response({'detail': 'Invalid id__in specification'}, status=HTTP_400_BAD_REQUEST) # depends on [control=['except'], data=[]] pushes = pushes.filter(id__in=id_in_list) # depends on [control=['if'], data=[]] author = filter_params.get('author') if author: pushes = pushes.filter(author=author) # depends on [control=['if'], data=[]] try: count = int(filter_params.get('count', 10)) # depends on [control=['try'], data=[]] except ValueError: return Response({'detail': 'Valid count value required'}, status=HTTP_400_BAD_REQUEST) # depends on [control=['except'], data=[]] if count > MAX_PUSH_COUNT: msg = 'Specified count exceeds api limit: {}'.format(MAX_PUSH_COUNT) return Response({'detail': msg}, status=HTTP_400_BAD_REQUEST) # depends on [control=['if'], data=['MAX_PUSH_COUNT']] # we used to have a "full" parameter for this endpoint so you could # specify to not fetch the revision information if it was set to # false. however AFAIK no one ever used it (default was to fetch # everything), so let's just leave it out. it doesn't break # anything to send extra data when not required. pushes = pushes.select_related('repository').prefetch_related('commits')[:count] serializer = PushSerializer(pushes, many=True) meta['count'] = len(pushes) meta['repository'] = project meta['filter_params'] = filter_params resp = {'meta': meta, 'results': serializer.data} return Response(resp)
def set_if_missing(cfg,section,option,value): """If the given option is missing, set to the given value.""" try: cfg.get(section,option) except NoSectionError: cfg.add_section(section) cfg.set(section,option,value) except NoOptionError: cfg.set(section,option,value)
def function[set_if_missing, parameter[cfg, section, option, value]]: constant[If the given option is missing, set to the given value.] <ast.Try object at 0x7da1b28b53c0>
keyword[def] identifier[set_if_missing] ( identifier[cfg] , identifier[section] , identifier[option] , identifier[value] ): literal[string] keyword[try] : identifier[cfg] . identifier[get] ( identifier[section] , identifier[option] ) keyword[except] identifier[NoSectionError] : identifier[cfg] . identifier[add_section] ( identifier[section] ) identifier[cfg] . identifier[set] ( identifier[section] , identifier[option] , identifier[value] ) keyword[except] identifier[NoOptionError] : identifier[cfg] . identifier[set] ( identifier[section] , identifier[option] , identifier[value] )
def set_if_missing(cfg, section, option, value): """If the given option is missing, set to the given value.""" try: cfg.get(section, option) # depends on [control=['try'], data=[]] except NoSectionError: cfg.add_section(section) cfg.set(section, option, value) # depends on [control=['except'], data=[]] except NoOptionError: cfg.set(section, option, value) # depends on [control=['except'], data=[]]
def holiday_description(self): """ Return the holiday description. In case none exists will return None. """ entry = self._holiday_entry() desc = entry.description return desc.hebrew.long if self.hebrew else desc.english
def function[holiday_description, parameter[self]]: constant[ Return the holiday description. In case none exists will return None. ] variable[entry] assign[=] call[name[self]._holiday_entry, parameter[]] variable[desc] assign[=] name[entry].description return[<ast.IfExp object at 0x7da1b0aa75b0>]
keyword[def] identifier[holiday_description] ( identifier[self] ): literal[string] identifier[entry] = identifier[self] . identifier[_holiday_entry] () identifier[desc] = identifier[entry] . identifier[description] keyword[return] identifier[desc] . identifier[hebrew] . identifier[long] keyword[if] identifier[self] . identifier[hebrew] keyword[else] identifier[desc] . identifier[english]
def holiday_description(self): """ Return the holiday description. In case none exists will return None. """ entry = self._holiday_entry() desc = entry.description return desc.hebrew.long if self.hebrew else desc.english
def _copy(self, other, copy_func): """ Copies the contents of another ParsableOctetString object to itself :param object: Another instance of the same class :param copy_func: An reference of copy.copy() or copy.deepcopy() to use when copying lists, dicts and objects """ super(ParsableOctetString, self)._copy(other, copy_func) self._bytes = other._bytes self._parsed = copy_func(other._parsed)
def function[_copy, parameter[self, other, copy_func]]: constant[ Copies the contents of another ParsableOctetString object to itself :param object: Another instance of the same class :param copy_func: An reference of copy.copy() or copy.deepcopy() to use when copying lists, dicts and objects ] call[call[name[super], parameter[name[ParsableOctetString], name[self]]]._copy, parameter[name[other], name[copy_func]]] name[self]._bytes assign[=] name[other]._bytes name[self]._parsed assign[=] call[name[copy_func], parameter[name[other]._parsed]]
keyword[def] identifier[_copy] ( identifier[self] , identifier[other] , identifier[copy_func] ): literal[string] identifier[super] ( identifier[ParsableOctetString] , identifier[self] ). identifier[_copy] ( identifier[other] , identifier[copy_func] ) identifier[self] . identifier[_bytes] = identifier[other] . identifier[_bytes] identifier[self] . identifier[_parsed] = identifier[copy_func] ( identifier[other] . identifier[_parsed] )
def _copy(self, other, copy_func): """ Copies the contents of another ParsableOctetString object to itself :param object: Another instance of the same class :param copy_func: An reference of copy.copy() or copy.deepcopy() to use when copying lists, dicts and objects """ super(ParsableOctetString, self)._copy(other, copy_func) self._bytes = other._bytes self._parsed = copy_func(other._parsed)
def _get_jamo_short_name(jamo): """ Function for taking a Unicode scalar value representing a Jamo and determining the correct value for its Jamo_Short_Name property. For more information on the Jamo_Short_Name property see the Unicode Standard, ch. 03, section 3.12, Conjoining Jamo Behavior. https://www.unicode.org/versions/latest/ch03.pdf :param jamo: Unicode scalar value representing a Jamo :return: Returns a string representing its Jamo_Short_Name property """ if not _is_jamo(jamo): raise ValueError("Value 0x%0.4x passed in does not represent a Jamo!" % jamo) if not _jamo_short_names: _load_jamo_short_names() return _jamo_short_names[jamo]
def function[_get_jamo_short_name, parameter[jamo]]: constant[ Function for taking a Unicode scalar value representing a Jamo and determining the correct value for its Jamo_Short_Name property. For more information on the Jamo_Short_Name property see the Unicode Standard, ch. 03, section 3.12, Conjoining Jamo Behavior. https://www.unicode.org/versions/latest/ch03.pdf :param jamo: Unicode scalar value representing a Jamo :return: Returns a string representing its Jamo_Short_Name property ] if <ast.UnaryOp object at 0x7da1b11e1720> begin[:] <ast.Raise object at 0x7da1b11e1bd0> if <ast.UnaryOp object at 0x7da1b11e1000> begin[:] call[name[_load_jamo_short_names], parameter[]] return[call[name[_jamo_short_names]][name[jamo]]]
keyword[def] identifier[_get_jamo_short_name] ( identifier[jamo] ): literal[string] keyword[if] keyword[not] identifier[_is_jamo] ( identifier[jamo] ): keyword[raise] identifier[ValueError] ( literal[string] % identifier[jamo] ) keyword[if] keyword[not] identifier[_jamo_short_names] : identifier[_load_jamo_short_names] () keyword[return] identifier[_jamo_short_names] [ identifier[jamo] ]
def _get_jamo_short_name(jamo): """ Function for taking a Unicode scalar value representing a Jamo and determining the correct value for its Jamo_Short_Name property. For more information on the Jamo_Short_Name property see the Unicode Standard, ch. 03, section 3.12, Conjoining Jamo Behavior. https://www.unicode.org/versions/latest/ch03.pdf :param jamo: Unicode scalar value representing a Jamo :return: Returns a string representing its Jamo_Short_Name property """ if not _is_jamo(jamo): raise ValueError('Value 0x%0.4x passed in does not represent a Jamo!' % jamo) # depends on [control=['if'], data=[]] if not _jamo_short_names: _load_jamo_short_names() # depends on [control=['if'], data=[]] return _jamo_short_names[jamo]
def get_conn(self): """ Sign into Salesforce, only if we are not already signed in. """ if not self.conn: connection = self.get_connection(self.conn_id) extras = connection.extra_dejson self.conn = Salesforce( username=connection.login, password=connection.password, security_token=extras['security_token'], instance_url=connection.host, sandbox=extras.get('sandbox', False) ) return self.conn
def function[get_conn, parameter[self]]: constant[ Sign into Salesforce, only if we are not already signed in. ] if <ast.UnaryOp object at 0x7da1b03a0e80> begin[:] variable[connection] assign[=] call[name[self].get_connection, parameter[name[self].conn_id]] variable[extras] assign[=] name[connection].extra_dejson name[self].conn assign[=] call[name[Salesforce], parameter[]] return[name[self].conn]
keyword[def] identifier[get_conn] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[conn] : identifier[connection] = identifier[self] . identifier[get_connection] ( identifier[self] . identifier[conn_id] ) identifier[extras] = identifier[connection] . identifier[extra_dejson] identifier[self] . identifier[conn] = identifier[Salesforce] ( identifier[username] = identifier[connection] . identifier[login] , identifier[password] = identifier[connection] . identifier[password] , identifier[security_token] = identifier[extras] [ literal[string] ], identifier[instance_url] = identifier[connection] . identifier[host] , identifier[sandbox] = identifier[extras] . identifier[get] ( literal[string] , keyword[False] ) ) keyword[return] identifier[self] . identifier[conn]
def get_conn(self): """ Sign into Salesforce, only if we are not already signed in. """ if not self.conn: connection = self.get_connection(self.conn_id) extras = connection.extra_dejson self.conn = Salesforce(username=connection.login, password=connection.password, security_token=extras['security_token'], instance_url=connection.host, sandbox=extras.get('sandbox', False)) # depends on [control=['if'], data=[]] return self.conn
def create_turnover_tear_sheet(factor_data, turnover_periods=None): """ Creates a tear sheet for analyzing the turnover properties of a factor. Parameters ---------- factor_data : pd.DataFrame - MultiIndex A MultiIndex DataFrame indexed by date (level 0) and asset (level 1), containing the values for a single alpha factor, forward returns for each period, the factor quantile/bin that factor value belongs to, and (optionally) the group the asset belongs to. - See full explanation in utils.get_clean_factor_and_forward_returns turnover_periods : sequence[string], optional Periods to compute turnover analysis on. By default periods in 'factor_data' are used but custom periods can provided instead. This can be useful when periods in 'factor_data' are not multiples of the frequency at which factor values are computed i.e. the periods are 2h and 4h and the factor is computed daily and so values like ['1D', '2D'] could be used instead """ if turnover_periods is None: turnover_periods = utils.get_forward_returns_columns( factor_data.columns) quantile_factor = factor_data['factor_quantile'] quantile_turnover = \ {p: pd.concat([perf.quantile_turnover(quantile_factor, q, p) for q in range(1, int(quantile_factor.max()) + 1)], axis=1) for p in turnover_periods} autocorrelation = pd.concat( [perf.factor_rank_autocorrelation(factor_data, period) for period in turnover_periods], axis=1) plotting.plot_turnover_table(autocorrelation, quantile_turnover) fr_cols = len(turnover_periods) columns_wide = 1 rows_when_wide = (((fr_cols - 1) // 1) + 1) vertical_sections = fr_cols + 3 * rows_when_wide + 2 * fr_cols gf = GridFigure(rows=vertical_sections, cols=columns_wide) for period in turnover_periods: if quantile_turnover[period].isnull().all().all(): continue plotting.plot_top_bottom_quantile_turnover(quantile_turnover[period], period=period, ax=gf.next_row()) for period in autocorrelation: if autocorrelation[period].isnull().all(): continue plotting.plot_factor_rank_auto_correlation(autocorrelation[period], period=period, ax=gf.next_row()) plt.show() gf.close()
def function[create_turnover_tear_sheet, parameter[factor_data, turnover_periods]]: constant[ Creates a tear sheet for analyzing the turnover properties of a factor. Parameters ---------- factor_data : pd.DataFrame - MultiIndex A MultiIndex DataFrame indexed by date (level 0) and asset (level 1), containing the values for a single alpha factor, forward returns for each period, the factor quantile/bin that factor value belongs to, and (optionally) the group the asset belongs to. - See full explanation in utils.get_clean_factor_and_forward_returns turnover_periods : sequence[string], optional Periods to compute turnover analysis on. By default periods in 'factor_data' are used but custom periods can provided instead. This can be useful when periods in 'factor_data' are not multiples of the frequency at which factor values are computed i.e. the periods are 2h and 4h and the factor is computed daily and so values like ['1D', '2D'] could be used instead ] if compare[name[turnover_periods] is constant[None]] begin[:] variable[turnover_periods] assign[=] call[name[utils].get_forward_returns_columns, parameter[name[factor_data].columns]] variable[quantile_factor] assign[=] call[name[factor_data]][constant[factor_quantile]] variable[quantile_turnover] assign[=] <ast.DictComp object at 0x7da20c7c9ae0> variable[autocorrelation] assign[=] call[name[pd].concat, parameter[<ast.ListComp object at 0x7da20c7c8100>]] call[name[plotting].plot_turnover_table, parameter[name[autocorrelation], name[quantile_turnover]]] variable[fr_cols] assign[=] call[name[len], parameter[name[turnover_periods]]] variable[columns_wide] assign[=] constant[1] variable[rows_when_wide] assign[=] binary_operation[binary_operation[binary_operation[name[fr_cols] - constant[1]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[1]] + constant[1]] variable[vertical_sections] assign[=] binary_operation[binary_operation[name[fr_cols] + binary_operation[constant[3] * name[rows_when_wide]]] + binary_operation[constant[2] * name[fr_cols]]] variable[gf] assign[=] call[name[GridFigure], parameter[]] for taget[name[period]] in starred[name[turnover_periods]] begin[:] if call[call[call[call[name[quantile_turnover]][name[period]].isnull, parameter[]].all, parameter[]].all, parameter[]] begin[:] continue call[name[plotting].plot_top_bottom_quantile_turnover, parameter[call[name[quantile_turnover]][name[period]]]] for taget[name[period]] in starred[name[autocorrelation]] begin[:] if call[call[call[name[autocorrelation]][name[period]].isnull, parameter[]].all, parameter[]] begin[:] continue call[name[plotting].plot_factor_rank_auto_correlation, parameter[call[name[autocorrelation]][name[period]]]] call[name[plt].show, parameter[]] call[name[gf].close, parameter[]]
keyword[def] identifier[create_turnover_tear_sheet] ( identifier[factor_data] , identifier[turnover_periods] = keyword[None] ): literal[string] keyword[if] identifier[turnover_periods] keyword[is] keyword[None] : identifier[turnover_periods] = identifier[utils] . identifier[get_forward_returns_columns] ( identifier[factor_data] . identifier[columns] ) identifier[quantile_factor] = identifier[factor_data] [ literal[string] ] identifier[quantile_turnover] ={ identifier[p] : identifier[pd] . identifier[concat] ([ identifier[perf] . identifier[quantile_turnover] ( identifier[quantile_factor] , identifier[q] , identifier[p] ) keyword[for] identifier[q] keyword[in] identifier[range] ( literal[int] , identifier[int] ( identifier[quantile_factor] . identifier[max] ())+ literal[int] )], identifier[axis] = literal[int] ) keyword[for] identifier[p] keyword[in] identifier[turnover_periods] } identifier[autocorrelation] = identifier[pd] . identifier[concat] ( [ identifier[perf] . identifier[factor_rank_autocorrelation] ( identifier[factor_data] , identifier[period] ) keyword[for] identifier[period] keyword[in] identifier[turnover_periods] ], identifier[axis] = literal[int] ) identifier[plotting] . identifier[plot_turnover_table] ( identifier[autocorrelation] , identifier[quantile_turnover] ) identifier[fr_cols] = identifier[len] ( identifier[turnover_periods] ) identifier[columns_wide] = literal[int] identifier[rows_when_wide] =((( identifier[fr_cols] - literal[int] )// literal[int] )+ literal[int] ) identifier[vertical_sections] = identifier[fr_cols] + literal[int] * identifier[rows_when_wide] + literal[int] * identifier[fr_cols] identifier[gf] = identifier[GridFigure] ( identifier[rows] = identifier[vertical_sections] , identifier[cols] = identifier[columns_wide] ) keyword[for] identifier[period] keyword[in] identifier[turnover_periods] : keyword[if] identifier[quantile_turnover] [ identifier[period] ]. identifier[isnull] (). identifier[all] (). identifier[all] (): keyword[continue] identifier[plotting] . identifier[plot_top_bottom_quantile_turnover] ( identifier[quantile_turnover] [ identifier[period] ], identifier[period] = identifier[period] , identifier[ax] = identifier[gf] . identifier[next_row] ()) keyword[for] identifier[period] keyword[in] identifier[autocorrelation] : keyword[if] identifier[autocorrelation] [ identifier[period] ]. identifier[isnull] (). identifier[all] (): keyword[continue] identifier[plotting] . identifier[plot_factor_rank_auto_correlation] ( identifier[autocorrelation] [ identifier[period] ], identifier[period] = identifier[period] , identifier[ax] = identifier[gf] . identifier[next_row] ()) identifier[plt] . identifier[show] () identifier[gf] . identifier[close] ()
def create_turnover_tear_sheet(factor_data, turnover_periods=None): """ Creates a tear sheet for analyzing the turnover properties of a factor. Parameters ---------- factor_data : pd.DataFrame - MultiIndex A MultiIndex DataFrame indexed by date (level 0) and asset (level 1), containing the values for a single alpha factor, forward returns for each period, the factor quantile/bin that factor value belongs to, and (optionally) the group the asset belongs to. - See full explanation in utils.get_clean_factor_and_forward_returns turnover_periods : sequence[string], optional Periods to compute turnover analysis on. By default periods in 'factor_data' are used but custom periods can provided instead. This can be useful when periods in 'factor_data' are not multiples of the frequency at which factor values are computed i.e. the periods are 2h and 4h and the factor is computed daily and so values like ['1D', '2D'] could be used instead """ if turnover_periods is None: turnover_periods = utils.get_forward_returns_columns(factor_data.columns) # depends on [control=['if'], data=['turnover_periods']] quantile_factor = factor_data['factor_quantile'] quantile_turnover = {p: pd.concat([perf.quantile_turnover(quantile_factor, q, p) for q in range(1, int(quantile_factor.max()) + 1)], axis=1) for p in turnover_periods} autocorrelation = pd.concat([perf.factor_rank_autocorrelation(factor_data, period) for period in turnover_periods], axis=1) plotting.plot_turnover_table(autocorrelation, quantile_turnover) fr_cols = len(turnover_periods) columns_wide = 1 rows_when_wide = (fr_cols - 1) // 1 + 1 vertical_sections = fr_cols + 3 * rows_when_wide + 2 * fr_cols gf = GridFigure(rows=vertical_sections, cols=columns_wide) for period in turnover_periods: if quantile_turnover[period].isnull().all().all(): continue # depends on [control=['if'], data=[]] plotting.plot_top_bottom_quantile_turnover(quantile_turnover[period], period=period, ax=gf.next_row()) # depends on [control=['for'], data=['period']] for period in autocorrelation: if autocorrelation[period].isnull().all(): continue # depends on [control=['if'], data=[]] plotting.plot_factor_rank_auto_correlation(autocorrelation[period], period=period, ax=gf.next_row()) # depends on [control=['for'], data=['period']] plt.show() gf.close()
def create_widget(self, place, type, file=None, **kwargs): ''' Create a widget object based on given arguments. If file object is provided, callable arguments will be resolved: its return value will be used after calling them with file as first parameter. All extra `kwargs` parameters will be passed to widget constructor. :param place: place hint where widget should be shown. :type place: str :param type: widget type name as taken from :attr:`widget_types` dict keys. :type type: str :param file: optional file object for widget attribute resolving :type type: browsepy.files.Node or None :returns: widget instance :rtype: object ''' widget_class = self.widget_types.get(type, self.widget_types['base']) kwargs.update(place=place, type=type) try: element = widget_class(**kwargs) except TypeError as e: message = e.args[0] if e.args else '' if ( 'unexpected keyword argument' in message or 'required positional argument' in message ): raise WidgetParameterException( 'type %s; %s; available: %r' % (type, message, widget_class._fields) ) raise e if file and any(map(callable, element)): return self._resolve_widget(file, element) return element
def function[create_widget, parameter[self, place, type, file]]: constant[ Create a widget object based on given arguments. If file object is provided, callable arguments will be resolved: its return value will be used after calling them with file as first parameter. All extra `kwargs` parameters will be passed to widget constructor. :param place: place hint where widget should be shown. :type place: str :param type: widget type name as taken from :attr:`widget_types` dict keys. :type type: str :param file: optional file object for widget attribute resolving :type type: browsepy.files.Node or None :returns: widget instance :rtype: object ] variable[widget_class] assign[=] call[name[self].widget_types.get, parameter[name[type], call[name[self].widget_types][constant[base]]]] call[name[kwargs].update, parameter[]] <ast.Try object at 0x7da2046237c0> if <ast.BoolOp object at 0x7da204623160> begin[:] return[call[name[self]._resolve_widget, parameter[name[file], name[element]]]] return[name[element]]
keyword[def] identifier[create_widget] ( identifier[self] , identifier[place] , identifier[type] , identifier[file] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[widget_class] = identifier[self] . identifier[widget_types] . identifier[get] ( identifier[type] , identifier[self] . identifier[widget_types] [ literal[string] ]) identifier[kwargs] . identifier[update] ( identifier[place] = identifier[place] , identifier[type] = identifier[type] ) keyword[try] : identifier[element] = identifier[widget_class] (** identifier[kwargs] ) keyword[except] identifier[TypeError] keyword[as] identifier[e] : identifier[message] = identifier[e] . identifier[args] [ literal[int] ] keyword[if] identifier[e] . identifier[args] keyword[else] literal[string] keyword[if] ( literal[string] keyword[in] identifier[message] keyword[or] literal[string] keyword[in] identifier[message] ): keyword[raise] identifier[WidgetParameterException] ( literal[string] %( identifier[type] , identifier[message] , identifier[widget_class] . identifier[_fields] ) ) keyword[raise] identifier[e] keyword[if] identifier[file] keyword[and] identifier[any] ( identifier[map] ( identifier[callable] , identifier[element] )): keyword[return] identifier[self] . identifier[_resolve_widget] ( identifier[file] , identifier[element] ) keyword[return] identifier[element]
def create_widget(self, place, type, file=None, **kwargs): """ Create a widget object based on given arguments. If file object is provided, callable arguments will be resolved: its return value will be used after calling them with file as first parameter. All extra `kwargs` parameters will be passed to widget constructor. :param place: place hint where widget should be shown. :type place: str :param type: widget type name as taken from :attr:`widget_types` dict keys. :type type: str :param file: optional file object for widget attribute resolving :type type: browsepy.files.Node or None :returns: widget instance :rtype: object """ widget_class = self.widget_types.get(type, self.widget_types['base']) kwargs.update(place=place, type=type) try: element = widget_class(**kwargs) # depends on [control=['try'], data=[]] except TypeError as e: message = e.args[0] if e.args else '' if 'unexpected keyword argument' in message or 'required positional argument' in message: raise WidgetParameterException('type %s; %s; available: %r' % (type, message, widget_class._fields)) # depends on [control=['if'], data=[]] raise e # depends on [control=['except'], data=['e']] if file and any(map(callable, element)): return self._resolve_widget(file, element) # depends on [control=['if'], data=[]] return element
def load_config(self, filename): ''' Load values from an *.ini style config file. If the config file contains sections, their names are used as namespaces for the values within. The two special sections ``DEFAULT`` and ``bottle`` refer to the root namespace (no prefix). ''' conf = ConfigParser() conf.read(filename) for section in conf.sections(): for key, value in conf.items(section): if section not in ('DEFAULT', 'bottle'): key = section + '.' + key self[key] = value return self
def function[load_config, parameter[self, filename]]: constant[ Load values from an *.ini style config file. If the config file contains sections, their names are used as namespaces for the values within. The two special sections ``DEFAULT`` and ``bottle`` refer to the root namespace (no prefix). ] variable[conf] assign[=] call[name[ConfigParser], parameter[]] call[name[conf].read, parameter[name[filename]]] for taget[name[section]] in starred[call[name[conf].sections, parameter[]]] begin[:] for taget[tuple[[<ast.Name object at 0x7da18dc06bc0>, <ast.Name object at 0x7da18dc04b20>]]] in starred[call[name[conf].items, parameter[name[section]]]] begin[:] if compare[name[section] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da20e956200>, <ast.Constant object at 0x7da20e954610>]]] begin[:] variable[key] assign[=] binary_operation[binary_operation[name[section] + constant[.]] + name[key]] call[name[self]][name[key]] assign[=] name[value] return[name[self]]
keyword[def] identifier[load_config] ( identifier[self] , identifier[filename] ): literal[string] identifier[conf] = identifier[ConfigParser] () identifier[conf] . identifier[read] ( identifier[filename] ) keyword[for] identifier[section] keyword[in] identifier[conf] . identifier[sections] (): keyword[for] identifier[key] , identifier[value] keyword[in] identifier[conf] . identifier[items] ( identifier[section] ): keyword[if] identifier[section] keyword[not] keyword[in] ( literal[string] , literal[string] ): identifier[key] = identifier[section] + literal[string] + identifier[key] identifier[self] [ identifier[key] ]= identifier[value] keyword[return] identifier[self]
def load_config(self, filename): """ Load values from an *.ini style config file. If the config file contains sections, their names are used as namespaces for the values within. The two special sections ``DEFAULT`` and ``bottle`` refer to the root namespace (no prefix). """ conf = ConfigParser() conf.read(filename) for section in conf.sections(): for (key, value) in conf.items(section): if section not in ('DEFAULT', 'bottle'): key = section + '.' + key # depends on [control=['if'], data=['section']] self[key] = value # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['section']] return self
def build_acl_port(self, port, enabled=True): "Build the acl for L4 Ports. " if port is not None: if ':' in port: range = port.replace(':', ' ') acl = "range %(range)s " % {'range': range} else: acl = "eq %(port)s " % {'port': port} if not enabled: acl += "inactive" return acl
def function[build_acl_port, parameter[self, port, enabled]]: constant[Build the acl for L4 Ports. ] if compare[name[port] is_not constant[None]] begin[:] if compare[constant[:] in name[port]] begin[:] variable[range] assign[=] call[name[port].replace, parameter[constant[:], constant[ ]]] variable[acl] assign[=] binary_operation[constant[range %(range)s ] <ast.Mod object at 0x7da2590d6920> dictionary[[<ast.Constant object at 0x7da1b1be4580>], [<ast.Name object at 0x7da1b1be5540>]]] return[name[acl]]
keyword[def] identifier[build_acl_port] ( identifier[self] , identifier[port] , identifier[enabled] = keyword[True] ): literal[string] keyword[if] identifier[port] keyword[is] keyword[not] keyword[None] : keyword[if] literal[string] keyword[in] identifier[port] : identifier[range] = identifier[port] . identifier[replace] ( literal[string] , literal[string] ) identifier[acl] = literal[string] %{ literal[string] : identifier[range] } keyword[else] : identifier[acl] = literal[string] %{ literal[string] : identifier[port] } keyword[if] keyword[not] identifier[enabled] : identifier[acl] += literal[string] keyword[return] identifier[acl]
def build_acl_port(self, port, enabled=True): """Build the acl for L4 Ports. """ if port is not None: if ':' in port: range = port.replace(':', ' ') acl = 'range %(range)s ' % {'range': range} # depends on [control=['if'], data=['port']] else: acl = 'eq %(port)s ' % {'port': port} if not enabled: acl += 'inactive' # depends on [control=['if'], data=[]] return acl # depends on [control=['if'], data=['port']]
def docutils_sucks(spec): """ Yeah. It doesn't allow using a class because it does stupid stuff like try to set attributes on the callable object rather than just keeping a dict. """ base_url = VALIDATION_SPEC ref_url = "https://json-schema.org/draft-04/json-schema-core.html#rfc.section.4.1" schema_url = "https://json-schema.org/draft-04/json-schema-core.html#rfc.section.6" def validator(name, raw_text, text, lineno, inliner): """ Link to the JSON Schema documentation for a validator. Arguments: name (str): the name of the role in the document raw_source (str): the raw text (role with argument) text (str): the argument given to the role lineno (int): the line number inliner (docutils.parsers.rst.states.Inliner): the inliner Returns: tuple: a 2-tuple of nodes to insert into the document and an iterable of system messages, both possibly empty """ if text == "$ref": return [nodes.reference(raw_text, text, refuri=ref_url)], [] elif text == "$schema": return [nodes.reference(raw_text, text, refuri=schema_url)], [] # find the header in the validation spec containing matching text header = spec.xpath("//h1[contains(text(), '{0}')]".format(text)) if len(header) == 0: inliner.reporter.warning( "Didn't find a target for {0}".format(text), ) uri = base_url else: if len(header) > 1: inliner.reporter.info( "Found multiple targets for {0}".format(text), ) # get the href from link in the header uri = base_url + header[0].find('a').attrib["href"] reference = nodes.reference(raw_text, text, refuri=uri) return [reference], [] return validator
def function[docutils_sucks, parameter[spec]]: constant[ Yeah. It doesn't allow using a class because it does stupid stuff like try to set attributes on the callable object rather than just keeping a dict. ] variable[base_url] assign[=] name[VALIDATION_SPEC] variable[ref_url] assign[=] constant[https://json-schema.org/draft-04/json-schema-core.html#rfc.section.4.1] variable[schema_url] assign[=] constant[https://json-schema.org/draft-04/json-schema-core.html#rfc.section.6] def function[validator, parameter[name, raw_text, text, lineno, inliner]]: constant[ Link to the JSON Schema documentation for a validator. Arguments: name (str): the name of the role in the document raw_source (str): the raw text (role with argument) text (str): the argument given to the role lineno (int): the line number inliner (docutils.parsers.rst.states.Inliner): the inliner Returns: tuple: a 2-tuple of nodes to insert into the document and an iterable of system messages, both possibly empty ] if compare[name[text] equal[==] constant[$ref]] begin[:] return[tuple[[<ast.List object at 0x7da2054a6c80>, <ast.List object at 0x7da2054a5cc0>]]] variable[header] assign[=] call[name[spec].xpath, parameter[call[constant[//h1[contains(text(), '{0}')]].format, parameter[name[text]]]]] if compare[call[name[len], parameter[name[header]]] equal[==] constant[0]] begin[:] call[name[inliner].reporter.warning, parameter[call[constant[Didn't find a target for {0}].format, parameter[name[text]]]]] variable[uri] assign[=] name[base_url] variable[reference] assign[=] call[name[nodes].reference, parameter[name[raw_text], name[text]]] return[tuple[[<ast.List object at 0x7da20c6ab250>, <ast.List object at 0x7da20c6abbe0>]]] return[name[validator]]
keyword[def] identifier[docutils_sucks] ( identifier[spec] ): literal[string] identifier[base_url] = identifier[VALIDATION_SPEC] identifier[ref_url] = literal[string] identifier[schema_url] = literal[string] keyword[def] identifier[validator] ( identifier[name] , identifier[raw_text] , identifier[text] , identifier[lineno] , identifier[inliner] ): literal[string] keyword[if] identifier[text] == literal[string] : keyword[return] [ identifier[nodes] . identifier[reference] ( identifier[raw_text] , identifier[text] , identifier[refuri] = identifier[ref_url] )],[] keyword[elif] identifier[text] == literal[string] : keyword[return] [ identifier[nodes] . identifier[reference] ( identifier[raw_text] , identifier[text] , identifier[refuri] = identifier[schema_url] )],[] identifier[header] = identifier[spec] . identifier[xpath] ( literal[string] . identifier[format] ( identifier[text] )) keyword[if] identifier[len] ( identifier[header] )== literal[int] : identifier[inliner] . identifier[reporter] . identifier[warning] ( literal[string] . identifier[format] ( identifier[text] ), ) identifier[uri] = identifier[base_url] keyword[else] : keyword[if] identifier[len] ( identifier[header] )> literal[int] : identifier[inliner] . identifier[reporter] . identifier[info] ( literal[string] . identifier[format] ( identifier[text] ), ) identifier[uri] = identifier[base_url] + identifier[header] [ literal[int] ]. identifier[find] ( literal[string] ). identifier[attrib] [ literal[string] ] identifier[reference] = identifier[nodes] . identifier[reference] ( identifier[raw_text] , identifier[text] , identifier[refuri] = identifier[uri] ) keyword[return] [ identifier[reference] ],[] keyword[return] identifier[validator]
def docutils_sucks(spec): """ Yeah. It doesn't allow using a class because it does stupid stuff like try to set attributes on the callable object rather than just keeping a dict. """ base_url = VALIDATION_SPEC ref_url = 'https://json-schema.org/draft-04/json-schema-core.html#rfc.section.4.1' schema_url = 'https://json-schema.org/draft-04/json-schema-core.html#rfc.section.6' def validator(name, raw_text, text, lineno, inliner): """ Link to the JSON Schema documentation for a validator. Arguments: name (str): the name of the role in the document raw_source (str): the raw text (role with argument) text (str): the argument given to the role lineno (int): the line number inliner (docutils.parsers.rst.states.Inliner): the inliner Returns: tuple: a 2-tuple of nodes to insert into the document and an iterable of system messages, both possibly empty """ if text == '$ref': return ([nodes.reference(raw_text, text, refuri=ref_url)], []) # depends on [control=['if'], data=['text']] elif text == '$schema': return ([nodes.reference(raw_text, text, refuri=schema_url)], []) # depends on [control=['if'], data=['text']] # find the header in the validation spec containing matching text header = spec.xpath("//h1[contains(text(), '{0}')]".format(text)) if len(header) == 0: inliner.reporter.warning("Didn't find a target for {0}".format(text)) uri = base_url # depends on [control=['if'], data=[]] else: if len(header) > 1: inliner.reporter.info('Found multiple targets for {0}'.format(text)) # depends on [control=['if'], data=[]] # get the href from link in the header uri = base_url + header[0].find('a').attrib['href'] reference = nodes.reference(raw_text, text, refuri=uri) return ([reference], []) return validator
def dataset_status_cli(self, dataset, dataset_opt=None): """ wrapper for client for dataset_status, with additional dataset_opt to get the status of a dataset from the API Parameters ========== dataset_opt: an alternative to dataset """ dataset = dataset or dataset_opt return self.dataset_status(dataset)
def function[dataset_status_cli, parameter[self, dataset, dataset_opt]]: constant[ wrapper for client for dataset_status, with additional dataset_opt to get the status of a dataset from the API Parameters ========== dataset_opt: an alternative to dataset ] variable[dataset] assign[=] <ast.BoolOp object at 0x7da1b21a4040> return[call[name[self].dataset_status, parameter[name[dataset]]]]
keyword[def] identifier[dataset_status_cli] ( identifier[self] , identifier[dataset] , identifier[dataset_opt] = keyword[None] ): literal[string] identifier[dataset] = identifier[dataset] keyword[or] identifier[dataset_opt] keyword[return] identifier[self] . identifier[dataset_status] ( identifier[dataset] )
def dataset_status_cli(self, dataset, dataset_opt=None): """ wrapper for client for dataset_status, with additional dataset_opt to get the status of a dataset from the API Parameters ========== dataset_opt: an alternative to dataset """ dataset = dataset or dataset_opt return self.dataset_status(dataset)
def kind(path, user=None): """ Get the kind of item ("file" or "directory") that the path references. Return :obj:`None` if ``path`` doesn't exist. """ hostname, port, path = split(path, user=user) fs = hdfs_fs.hdfs(hostname, port) try: return fs.get_path_info(path)['kind'] except IOError: return None finally: fs.close()
def function[kind, parameter[path, user]]: constant[ Get the kind of item ("file" or "directory") that the path references. Return :obj:`None` if ``path`` doesn't exist. ] <ast.Tuple object at 0x7da18bc73400> assign[=] call[name[split], parameter[name[path]]] variable[fs] assign[=] call[name[hdfs_fs].hdfs, parameter[name[hostname], name[port]]] <ast.Try object at 0x7da1b120a200>
keyword[def] identifier[kind] ( identifier[path] , identifier[user] = keyword[None] ): literal[string] identifier[hostname] , identifier[port] , identifier[path] = identifier[split] ( identifier[path] , identifier[user] = identifier[user] ) identifier[fs] = identifier[hdfs_fs] . identifier[hdfs] ( identifier[hostname] , identifier[port] ) keyword[try] : keyword[return] identifier[fs] . identifier[get_path_info] ( identifier[path] )[ literal[string] ] keyword[except] identifier[IOError] : keyword[return] keyword[None] keyword[finally] : identifier[fs] . identifier[close] ()
def kind(path, user=None): """ Get the kind of item ("file" or "directory") that the path references. Return :obj:`None` if ``path`` doesn't exist. """ (hostname, port, path) = split(path, user=user) fs = hdfs_fs.hdfs(hostname, port) try: return fs.get_path_info(path)['kind'] # depends on [control=['try'], data=[]] except IOError: return None # depends on [control=['except'], data=[]] finally: fs.close()
def run_driz(imageObjectList,output_wcs,paramDict,single,build,wcsmap=None): """ Perform drizzle operation on input to create output. The input parameters originally was a list of dictionaries, one for each input, that matches the primary parameters for an ``IRAF`` `drizzle` task. This method would then loop over all the entries in the list and run `drizzle` for each entry. Parameters required for input in paramDict: build,single,units,wt_scl,pixfrac,kernel,fillval, rot,scale,xsh,ysh,blotnx,blotny,outnx,outny,data """ # Insure that input imageObject is a list if not isinstance(imageObjectList, list): imageObjectList = [imageObjectList] # # Setup the versions info dictionary for output to PRIMARY header # The keys will be used as the name reported in the header, as-is # _versions = {'AstroDrizzle':__version__, 'PyFITS':util.__fits_version__, 'Numpy':util.__numpy_version__} # Set sub-sampling rate for drizzling #stepsize = 2.0 log.info(' **Using sub-sampling value of %s for kernel %s' % (paramDict['stepsize'], paramDict['kernel'])) maskval = interpret_maskval(paramDict) outwcs = copy.deepcopy(output_wcs) # Check for existance of output file. if (not single and build and fileutil.findFile(imageObjectList[0].outputNames['outFinal'])): log.info('Removing previous output product...') os.remove(imageObjectList[0].outputNames['outFinal']) # print out parameters being used for drizzling log.info("Running Drizzle to create output frame with WCS of: ") output_wcs.printwcs() # Will we be running in parallel? pool_size = util.get_pool_size(paramDict.get('num_cores'), len(imageObjectList)) will_parallel = single and pool_size > 1 if will_parallel: log.info('Executing %d parallel workers' % pool_size) else: if single: # not yet an option for final drizzle, msg would confuse log.info('Executing serially') # Set parameters for each input and run drizzle on it here. # # Perform drizzling... numctx = 0 for img in imageObjectList: numctx += img._nmembers _numctx = {'all':numctx} # if single: # Determine how many chips make up each single image for img in imageObjectList: for chip in img.returnAllChips(extname=img.scienceExt): plsingle = chip.outputNames['outSingle'] if plsingle in _numctx: _numctx[plsingle] += 1 else: _numctx[plsingle] = 1 # Compute how many planes will be needed for the context image. _nplanes = int((_numctx['all']-1) / 32) + 1 # For single drizzling or when context is turned off, # minimize to 1 plane only... if single or imageObjectList[0][1].outputNames['outContext'] in [None,'',' ']: _nplanes = 1 # # An image buffer needs to be setup for converting the input # arrays (sci and wht) from FITS format to native format # with respect to byteorder and byteswapping. # This buffer should be reused for each input if possible. # _outsci = _outwht = _outctx = _hdrlist = None if (not single) or \ (single and (not will_parallel) and (not imageObjectList[0].inmemory)): # Note there are four cases/combinations for single drizzle alone here: # (not-inmem, serial), (not-inmem, parallel), (inmem, serial), (inmem, parallel) _outsci=np.empty(output_wcs.array_shape, dtype=np.float32) _outsci.fill(maskval) _outwht=np.zeros(output_wcs.array_shape, dtype=np.float32) # initialize context to 3-D array but only pass appropriate plane to drizzle as needed _outctx=np.zeros((_nplanes,) + output_wcs.array_shape, dtype=np.int32) _hdrlist = [] # Keep track of how many chips have been processed # For single case, this will determine when to close # one product and open the next. _chipIdx = 0 # Remember the name of the 1st image that goes into this particular product # Insure that the header reports the proper values for the start of the # exposure time used to make this; in particular, TIME-OBS and DATE-OBS. template = None # # Work on each image # subprocs = [] for img in imageObjectList: chiplist = img.returnAllChips(extname=img.scienceExt) # How many inputs should go into this product? num_in_prod = _numctx['all'] if single: num_in_prod = _numctx[chiplist[0].outputNames['outSingle']] # The name of the 1st image fnames = [] for chip in chiplist: fnames.append(chip.outputNames['data']) if _chipIdx == 0: template = fnames else: template.extend(fnames) # Work each image, possibly in parallel if will_parallel: # use multiprocessing.Manager only if in parallel and in memory if img.inmemory: manager = multiprocessing.Manager() dproxy = manager.dict(img.virtualOutputs) # copy & wrap it in proxy img.virtualOutputs = dproxy # parallelize run_driz_img (currently for separate drizzle only) p = multiprocessing.Process(target=run_driz_img, name='adrizzle.run_driz_img()', # for err msgs args=(img,chiplist,output_wcs,outwcs,template,paramDict, single,num_in_prod,build,_versions,_numctx,_nplanes, _chipIdx,None,None,None,None,wcsmap)) subprocs.append(p) else: # serial run_driz_img run (either separate drizzle or final drizzle) run_driz_img(img,chiplist,output_wcs,outwcs,template,paramDict, single,num_in_prod,build,_versions,_numctx,_nplanes, _chipIdx,_outsci,_outwht,_outctx,_hdrlist,wcsmap) # Increment/reset master chip counter _chipIdx += len(chiplist) if _chipIdx == num_in_prod: _chipIdx = 0 # do the join if we spawned tasks if will_parallel: mputil.launch_and_wait(subprocs, pool_size) # blocks till all done del _outsci,_outwht,_outctx,_hdrlist
def function[run_driz, parameter[imageObjectList, output_wcs, paramDict, single, build, wcsmap]]: constant[ Perform drizzle operation on input to create output. The input parameters originally was a list of dictionaries, one for each input, that matches the primary parameters for an ``IRAF`` `drizzle` task. This method would then loop over all the entries in the list and run `drizzle` for each entry. Parameters required for input in paramDict: build,single,units,wt_scl,pixfrac,kernel,fillval, rot,scale,xsh,ysh,blotnx,blotny,outnx,outny,data ] if <ast.UnaryOp object at 0x7da1b1c23fd0> begin[:] variable[imageObjectList] assign[=] list[[<ast.Name object at 0x7da1b1c23e50>]] variable[_versions] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c21b40>, <ast.Constant object at 0x7da1b1c21bd0>, <ast.Constant object at 0x7da1b1c23910>], [<ast.Name object at 0x7da1b1c22f50>, <ast.Attribute object at 0x7da1b1c22d70>, <ast.Attribute object at 0x7da1b1c22a70>]] call[name[log].info, parameter[binary_operation[constant[ **Using sub-sampling value of %s for kernel %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b1c22e90>, <ast.Subscript object at 0x7da1b1c21630>]]]]] variable[maskval] assign[=] call[name[interpret_maskval], parameter[name[paramDict]]] variable[outwcs] assign[=] call[name[copy].deepcopy, parameter[name[output_wcs]]] if <ast.BoolOp object at 0x7da1b1bcac20> begin[:] call[name[log].info, parameter[constant[Removing previous output product...]]] call[name[os].remove, parameter[call[call[name[imageObjectList]][constant[0]].outputNames][constant[outFinal]]]] call[name[log].info, parameter[constant[Running Drizzle to create output frame with WCS of: ]]] call[name[output_wcs].printwcs, parameter[]] variable[pool_size] assign[=] call[name[util].get_pool_size, parameter[call[name[paramDict].get, parameter[constant[num_cores]]], call[name[len], parameter[name[imageObjectList]]]]] variable[will_parallel] assign[=] <ast.BoolOp object at 0x7da1b1bca320> if name[will_parallel] begin[:] call[name[log].info, parameter[binary_operation[constant[Executing %d parallel workers] <ast.Mod object at 0x7da2590d6920> name[pool_size]]]] variable[numctx] assign[=] constant[0] for taget[name[img]] in starred[name[imageObjectList]] begin[:] <ast.AugAssign object at 0x7da1b1bcb340> variable[_numctx] assign[=] dictionary[[<ast.Constant object at 0x7da1b1bcaec0>], [<ast.Name object at 0x7da1b1bcba90>]] for taget[name[img]] in starred[name[imageObjectList]] begin[:] for taget[name[chip]] in starred[call[name[img].returnAllChips, parameter[]]] begin[:] variable[plsingle] assign[=] call[name[chip].outputNames][constant[outSingle]] if compare[name[plsingle] in name[_numctx]] begin[:] <ast.AugAssign object at 0x7da1b1bca740> variable[_nplanes] assign[=] binary_operation[call[name[int], parameter[binary_operation[binary_operation[call[name[_numctx]][constant[all]] - constant[1]] / constant[32]]]] + constant[1]] if <ast.BoolOp object at 0x7da1b1bcadd0> begin[:] variable[_nplanes] assign[=] constant[1] variable[_outsci] assign[=] constant[None] if <ast.BoolOp object at 0x7da1b1bcb280> begin[:] variable[_outsci] assign[=] call[name[np].empty, parameter[name[output_wcs].array_shape]] call[name[_outsci].fill, parameter[name[maskval]]] variable[_outwht] assign[=] call[name[np].zeros, parameter[name[output_wcs].array_shape]] variable[_outctx] assign[=] call[name[np].zeros, parameter[binary_operation[tuple[[<ast.Name object at 0x7da1b1baca90>]] + name[output_wcs].array_shape]]] variable[_hdrlist] assign[=] list[[]] variable[_chipIdx] assign[=] constant[0] variable[template] assign[=] constant[None] variable[subprocs] assign[=] list[[]] for taget[name[img]] in starred[name[imageObjectList]] begin[:] variable[chiplist] assign[=] call[name[img].returnAllChips, parameter[]] variable[num_in_prod] assign[=] call[name[_numctx]][constant[all]] if name[single] begin[:] variable[num_in_prod] assign[=] call[name[_numctx]][call[call[name[chiplist]][constant[0]].outputNames][constant[outSingle]]] variable[fnames] assign[=] list[[]] for taget[name[chip]] in starred[name[chiplist]] begin[:] call[name[fnames].append, parameter[call[name[chip].outputNames][constant[data]]]] if compare[name[_chipIdx] equal[==] constant[0]] begin[:] variable[template] assign[=] name[fnames] if name[will_parallel] begin[:] if name[img].inmemory begin[:] variable[manager] assign[=] call[name[multiprocessing].Manager, parameter[]] variable[dproxy] assign[=] call[name[manager].dict, parameter[name[img].virtualOutputs]] name[img].virtualOutputs assign[=] name[dproxy] variable[p] assign[=] call[name[multiprocessing].Process, parameter[]] call[name[subprocs].append, parameter[name[p]]] <ast.AugAssign object at 0x7da204623940> if compare[name[_chipIdx] equal[==] name[num_in_prod]] begin[:] variable[_chipIdx] assign[=] constant[0] if name[will_parallel] begin[:] call[name[mputil].launch_and_wait, parameter[name[subprocs], name[pool_size]]] <ast.Delete object at 0x7da204621180>
keyword[def] identifier[run_driz] ( identifier[imageObjectList] , identifier[output_wcs] , identifier[paramDict] , identifier[single] , identifier[build] , identifier[wcsmap] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[imageObjectList] , identifier[list] ): identifier[imageObjectList] =[ identifier[imageObjectList] ] identifier[_versions] ={ literal[string] : identifier[__version__] , literal[string] : identifier[util] . identifier[__fits_version__] , literal[string] : identifier[util] . identifier[__numpy_version__] } identifier[log] . identifier[info] ( literal[string] % ( identifier[paramDict] [ literal[string] ], identifier[paramDict] [ literal[string] ])) identifier[maskval] = identifier[interpret_maskval] ( identifier[paramDict] ) identifier[outwcs] = identifier[copy] . identifier[deepcopy] ( identifier[output_wcs] ) keyword[if] ( keyword[not] identifier[single] keyword[and] identifier[build] keyword[and] identifier[fileutil] . identifier[findFile] ( identifier[imageObjectList] [ literal[int] ]. identifier[outputNames] [ literal[string] ])): identifier[log] . identifier[info] ( literal[string] ) identifier[os] . identifier[remove] ( identifier[imageObjectList] [ literal[int] ]. identifier[outputNames] [ literal[string] ]) identifier[log] . identifier[info] ( literal[string] ) identifier[output_wcs] . identifier[printwcs] () identifier[pool_size] = identifier[util] . identifier[get_pool_size] ( identifier[paramDict] . identifier[get] ( literal[string] ), identifier[len] ( identifier[imageObjectList] )) identifier[will_parallel] = identifier[single] keyword[and] identifier[pool_size] > literal[int] keyword[if] identifier[will_parallel] : identifier[log] . identifier[info] ( literal[string] % identifier[pool_size] ) keyword[else] : keyword[if] identifier[single] : identifier[log] . identifier[info] ( literal[string] ) identifier[numctx] = literal[int] keyword[for] identifier[img] keyword[in] identifier[imageObjectList] : identifier[numctx] += identifier[img] . identifier[_nmembers] identifier[_numctx] ={ literal[string] : identifier[numctx] } keyword[for] identifier[img] keyword[in] identifier[imageObjectList] : keyword[for] identifier[chip] keyword[in] identifier[img] . identifier[returnAllChips] ( identifier[extname] = identifier[img] . identifier[scienceExt] ): identifier[plsingle] = identifier[chip] . identifier[outputNames] [ literal[string] ] keyword[if] identifier[plsingle] keyword[in] identifier[_numctx] : identifier[_numctx] [ identifier[plsingle] ]+= literal[int] keyword[else] : identifier[_numctx] [ identifier[plsingle] ]= literal[int] identifier[_nplanes] = identifier[int] (( identifier[_numctx] [ literal[string] ]- literal[int] )/ literal[int] )+ literal[int] keyword[if] identifier[single] keyword[or] identifier[imageObjectList] [ literal[int] ][ literal[int] ]. identifier[outputNames] [ literal[string] ] keyword[in] [ keyword[None] , literal[string] , literal[string] ]: identifier[_nplanes] = literal[int] identifier[_outsci] = identifier[_outwht] = identifier[_outctx] = identifier[_hdrlist] = keyword[None] keyword[if] ( keyword[not] identifier[single] ) keyword[or] ( identifier[single] keyword[and] ( keyword[not] identifier[will_parallel] ) keyword[and] ( keyword[not] identifier[imageObjectList] [ literal[int] ]. identifier[inmemory] )): identifier[_outsci] = identifier[np] . identifier[empty] ( identifier[output_wcs] . identifier[array_shape] , identifier[dtype] = identifier[np] . identifier[float32] ) identifier[_outsci] . identifier[fill] ( identifier[maskval] ) identifier[_outwht] = identifier[np] . identifier[zeros] ( identifier[output_wcs] . identifier[array_shape] , identifier[dtype] = identifier[np] . identifier[float32] ) identifier[_outctx] = identifier[np] . identifier[zeros] (( identifier[_nplanes] ,)+ identifier[output_wcs] . identifier[array_shape] , identifier[dtype] = identifier[np] . identifier[int32] ) identifier[_hdrlist] =[] identifier[_chipIdx] = literal[int] identifier[template] = keyword[None] identifier[subprocs] =[] keyword[for] identifier[img] keyword[in] identifier[imageObjectList] : identifier[chiplist] = identifier[img] . identifier[returnAllChips] ( identifier[extname] = identifier[img] . identifier[scienceExt] ) identifier[num_in_prod] = identifier[_numctx] [ literal[string] ] keyword[if] identifier[single] : identifier[num_in_prod] = identifier[_numctx] [ identifier[chiplist] [ literal[int] ]. identifier[outputNames] [ literal[string] ]] identifier[fnames] =[] keyword[for] identifier[chip] keyword[in] identifier[chiplist] : identifier[fnames] . identifier[append] ( identifier[chip] . identifier[outputNames] [ literal[string] ]) keyword[if] identifier[_chipIdx] == literal[int] : identifier[template] = identifier[fnames] keyword[else] : identifier[template] . identifier[extend] ( identifier[fnames] ) keyword[if] identifier[will_parallel] : keyword[if] identifier[img] . identifier[inmemory] : identifier[manager] = identifier[multiprocessing] . identifier[Manager] () identifier[dproxy] = identifier[manager] . identifier[dict] ( identifier[img] . identifier[virtualOutputs] ) identifier[img] . identifier[virtualOutputs] = identifier[dproxy] identifier[p] = identifier[multiprocessing] . identifier[Process] ( identifier[target] = identifier[run_driz_img] , identifier[name] = literal[string] , identifier[args] =( identifier[img] , identifier[chiplist] , identifier[output_wcs] , identifier[outwcs] , identifier[template] , identifier[paramDict] , identifier[single] , identifier[num_in_prod] , identifier[build] , identifier[_versions] , identifier[_numctx] , identifier[_nplanes] , identifier[_chipIdx] , keyword[None] , keyword[None] , keyword[None] , keyword[None] , identifier[wcsmap] )) identifier[subprocs] . identifier[append] ( identifier[p] ) keyword[else] : identifier[run_driz_img] ( identifier[img] , identifier[chiplist] , identifier[output_wcs] , identifier[outwcs] , identifier[template] , identifier[paramDict] , identifier[single] , identifier[num_in_prod] , identifier[build] , identifier[_versions] , identifier[_numctx] , identifier[_nplanes] , identifier[_chipIdx] , identifier[_outsci] , identifier[_outwht] , identifier[_outctx] , identifier[_hdrlist] , identifier[wcsmap] ) identifier[_chipIdx] += identifier[len] ( identifier[chiplist] ) keyword[if] identifier[_chipIdx] == identifier[num_in_prod] : identifier[_chipIdx] = literal[int] keyword[if] identifier[will_parallel] : identifier[mputil] . identifier[launch_and_wait] ( identifier[subprocs] , identifier[pool_size] ) keyword[del] identifier[_outsci] , identifier[_outwht] , identifier[_outctx] , identifier[_hdrlist]
def run_driz(imageObjectList, output_wcs, paramDict, single, build, wcsmap=None): """ Perform drizzle operation on input to create output. The input parameters originally was a list of dictionaries, one for each input, that matches the primary parameters for an ``IRAF`` `drizzle` task. This method would then loop over all the entries in the list and run `drizzle` for each entry. Parameters required for input in paramDict: build,single,units,wt_scl,pixfrac,kernel,fillval, rot,scale,xsh,ysh,blotnx,blotny,outnx,outny,data """ # Insure that input imageObject is a list if not isinstance(imageObjectList, list): imageObjectList = [imageObjectList] # depends on [control=['if'], data=[]] # # Setup the versions info dictionary for output to PRIMARY header # The keys will be used as the name reported in the header, as-is # _versions = {'AstroDrizzle': __version__, 'PyFITS': util.__fits_version__, 'Numpy': util.__numpy_version__} # Set sub-sampling rate for drizzling #stepsize = 2.0 log.info(' **Using sub-sampling value of %s for kernel %s' % (paramDict['stepsize'], paramDict['kernel'])) maskval = interpret_maskval(paramDict) outwcs = copy.deepcopy(output_wcs) # Check for existance of output file. if not single and build and fileutil.findFile(imageObjectList[0].outputNames['outFinal']): log.info('Removing previous output product...') os.remove(imageObjectList[0].outputNames['outFinal']) # depends on [control=['if'], data=[]] # print out parameters being used for drizzling log.info('Running Drizzle to create output frame with WCS of: ') output_wcs.printwcs() # Will we be running in parallel? pool_size = util.get_pool_size(paramDict.get('num_cores'), len(imageObjectList)) will_parallel = single and pool_size > 1 if will_parallel: log.info('Executing %d parallel workers' % pool_size) # depends on [control=['if'], data=[]] elif single: # not yet an option for final drizzle, msg would confuse log.info('Executing serially') # depends on [control=['if'], data=[]] # Set parameters for each input and run drizzle on it here. # # Perform drizzling... numctx = 0 for img in imageObjectList: numctx += img._nmembers # depends on [control=['for'], data=['img']] _numctx = {'all': numctx} # if single: # Determine how many chips make up each single image for img in imageObjectList: for chip in img.returnAllChips(extname=img.scienceExt): plsingle = chip.outputNames['outSingle'] if plsingle in _numctx: _numctx[plsingle] += 1 # depends on [control=['if'], data=['plsingle', '_numctx']] else: _numctx[plsingle] = 1 # depends on [control=['for'], data=['chip']] # depends on [control=['for'], data=['img']] # Compute how many planes will be needed for the context image. _nplanes = int((_numctx['all'] - 1) / 32) + 1 # For single drizzling or when context is turned off, # minimize to 1 plane only... if single or imageObjectList[0][1].outputNames['outContext'] in [None, '', ' ']: _nplanes = 1 # depends on [control=['if'], data=[]] # # An image buffer needs to be setup for converting the input # arrays (sci and wht) from FITS format to native format # with respect to byteorder and byteswapping. # This buffer should be reused for each input if possible. # _outsci = _outwht = _outctx = _hdrlist = None if not single or (single and (not will_parallel) and (not imageObjectList[0].inmemory)): # Note there are four cases/combinations for single drizzle alone here: # (not-inmem, serial), (not-inmem, parallel), (inmem, serial), (inmem, parallel) _outsci = np.empty(output_wcs.array_shape, dtype=np.float32) _outsci.fill(maskval) _outwht = np.zeros(output_wcs.array_shape, dtype=np.float32) # initialize context to 3-D array but only pass appropriate plane to drizzle as needed _outctx = np.zeros((_nplanes,) + output_wcs.array_shape, dtype=np.int32) _hdrlist = [] # depends on [control=['if'], data=[]] # Keep track of how many chips have been processed # For single case, this will determine when to close # one product and open the next. _chipIdx = 0 # Remember the name of the 1st image that goes into this particular product # Insure that the header reports the proper values for the start of the # exposure time used to make this; in particular, TIME-OBS and DATE-OBS. template = None # # Work on each image # subprocs = [] for img in imageObjectList: chiplist = img.returnAllChips(extname=img.scienceExt) # How many inputs should go into this product? num_in_prod = _numctx['all'] if single: num_in_prod = _numctx[chiplist[0].outputNames['outSingle']] # depends on [control=['if'], data=[]] # The name of the 1st image fnames = [] for chip in chiplist: fnames.append(chip.outputNames['data']) # depends on [control=['for'], data=['chip']] if _chipIdx == 0: template = fnames # depends on [control=['if'], data=[]] else: template.extend(fnames) # Work each image, possibly in parallel if will_parallel: # use multiprocessing.Manager only if in parallel and in memory if img.inmemory: manager = multiprocessing.Manager() dproxy = manager.dict(img.virtualOutputs) # copy & wrap it in proxy img.virtualOutputs = dproxy # depends on [control=['if'], data=[]] # parallelize run_driz_img (currently for separate drizzle only) # for err msgs p = multiprocessing.Process(target=run_driz_img, name='adrizzle.run_driz_img()', args=(img, chiplist, output_wcs, outwcs, template, paramDict, single, num_in_prod, build, _versions, _numctx, _nplanes, _chipIdx, None, None, None, None, wcsmap)) subprocs.append(p) # depends on [control=['if'], data=[]] else: # serial run_driz_img run (either separate drizzle or final drizzle) run_driz_img(img, chiplist, output_wcs, outwcs, template, paramDict, single, num_in_prod, build, _versions, _numctx, _nplanes, _chipIdx, _outsci, _outwht, _outctx, _hdrlist, wcsmap) # Increment/reset master chip counter _chipIdx += len(chiplist) if _chipIdx == num_in_prod: _chipIdx = 0 # depends on [control=['if'], data=['_chipIdx']] # depends on [control=['for'], data=['img']] # do the join if we spawned tasks if will_parallel: mputil.launch_and_wait(subprocs, pool_size) # blocks till all done # depends on [control=['if'], data=[]] del _outsci, _outwht, _outctx, _hdrlist
def SETO(cpu, dest): """ Sets byte if overflow. :param cpu: current CPU. :param dest: destination operand. """ dest.write(Operators.ITEBV(dest.size, cpu.OF, 1, 0))
def function[SETO, parameter[cpu, dest]]: constant[ Sets byte if overflow. :param cpu: current CPU. :param dest: destination operand. ] call[name[dest].write, parameter[call[name[Operators].ITEBV, parameter[name[dest].size, name[cpu].OF, constant[1], constant[0]]]]]
keyword[def] identifier[SETO] ( identifier[cpu] , identifier[dest] ): literal[string] identifier[dest] . identifier[write] ( identifier[Operators] . identifier[ITEBV] ( identifier[dest] . identifier[size] , identifier[cpu] . identifier[OF] , literal[int] , literal[int] ))
def SETO(cpu, dest): """ Sets byte if overflow. :param cpu: current CPU. :param dest: destination operand. """ dest.write(Operators.ITEBV(dest.size, cpu.OF, 1, 0))
def is_default(self): ''' Checks if the field is in its default form :return: True if field is in default form ''' for field in self._fields: if not field.is_default(): return False return super(Container, self).is_default()
def function[is_default, parameter[self]]: constant[ Checks if the field is in its default form :return: True if field is in default form ] for taget[name[field]] in starred[name[self]._fields] begin[:] if <ast.UnaryOp object at 0x7da18dc078e0> begin[:] return[constant[False]] return[call[call[name[super], parameter[name[Container], name[self]]].is_default, parameter[]]]
keyword[def] identifier[is_default] ( identifier[self] ): literal[string] keyword[for] identifier[field] keyword[in] identifier[self] . identifier[_fields] : keyword[if] keyword[not] identifier[field] . identifier[is_default] (): keyword[return] keyword[False] keyword[return] identifier[super] ( identifier[Container] , identifier[self] ). identifier[is_default] ()
def is_default(self): """ Checks if the field is in its default form :return: True if field is in default form """ for field in self._fields: if not field.is_default(): return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['field']] return super(Container, self).is_default()
def list_route(methods=None, **kwargs): """ Used to mark a method on a ViewSet that should be routed for list requests. Usage:: class UserViewSet(ModelCRUDViewSet): model = User schema = UserSchema @list_route(methods=['get'], url_path='active-users') def active_users(request, *args, **kwargs): ... :param methods: An iterable of strings representing the HTTP (GET, POST, etc.) methods accepted by the route. :param url_path: Replaces the route automatically generated by the ViewSetRouter for the decorated method with the value provided. """ methods = ['get'] if (methods is None) else methods def decorator(func): func.bind_to_methods = methods func.detail = False func.kwargs = kwargs return func return decorator
def function[list_route, parameter[methods]]: constant[ Used to mark a method on a ViewSet that should be routed for list requests. Usage:: class UserViewSet(ModelCRUDViewSet): model = User schema = UserSchema @list_route(methods=['get'], url_path='active-users') def active_users(request, *args, **kwargs): ... :param methods: An iterable of strings representing the HTTP (GET, POST, etc.) methods accepted by the route. :param url_path: Replaces the route automatically generated by the ViewSetRouter for the decorated method with the value provided. ] variable[methods] assign[=] <ast.IfExp object at 0x7da20c993f40> def function[decorator, parameter[func]]: name[func].bind_to_methods assign[=] name[methods] name[func].detail assign[=] constant[False] name[func].kwargs assign[=] name[kwargs] return[name[func]] return[name[decorator]]
keyword[def] identifier[list_route] ( identifier[methods] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[methods] =[ literal[string] ] keyword[if] ( identifier[methods] keyword[is] keyword[None] ) keyword[else] identifier[methods] keyword[def] identifier[decorator] ( identifier[func] ): identifier[func] . identifier[bind_to_methods] = identifier[methods] identifier[func] . identifier[detail] = keyword[False] identifier[func] . identifier[kwargs] = identifier[kwargs] keyword[return] identifier[func] keyword[return] identifier[decorator]
def list_route(methods=None, **kwargs): """ Used to mark a method on a ViewSet that should be routed for list requests. Usage:: class UserViewSet(ModelCRUDViewSet): model = User schema = UserSchema @list_route(methods=['get'], url_path='active-users') def active_users(request, *args, **kwargs): ... :param methods: An iterable of strings representing the HTTP (GET, POST, etc.) methods accepted by the route. :param url_path: Replaces the route automatically generated by the ViewSetRouter for the decorated method with the value provided. """ methods = ['get'] if methods is None else methods def decorator(func): func.bind_to_methods = methods func.detail = False func.kwargs = kwargs return func return decorator
async def AddRelation(self, endpoints): ''' endpoints : typing.Sequence[str] Returns -> typing.Mapping[str, ~CharmRelation] ''' # map input types to rpc msg _params = dict() msg = dict(type='Application', request='AddRelation', version=3, params=_params) _params['endpoints'] = endpoints reply = await self.rpc(msg) return reply
<ast.AsyncFunctionDef object at 0x7da1b0d1baf0>
keyword[async] keyword[def] identifier[AddRelation] ( identifier[self] , identifier[endpoints] ): literal[string] identifier[_params] = identifier[dict] () identifier[msg] = identifier[dict] ( identifier[type] = literal[string] , identifier[request] = literal[string] , identifier[version] = literal[int] , identifier[params] = identifier[_params] ) identifier[_params] [ literal[string] ]= identifier[endpoints] identifier[reply] = keyword[await] identifier[self] . identifier[rpc] ( identifier[msg] ) keyword[return] identifier[reply]
async def AddRelation(self, endpoints): """ endpoints : typing.Sequence[str] Returns -> typing.Mapping[str, ~CharmRelation] """ # map input types to rpc msg _params = dict() msg = dict(type='Application', request='AddRelation', version=3, params=_params) _params['endpoints'] = endpoints reply = await self.rpc(msg) return reply
def clean(self): """Remove internal fields""" doc = self._resource result = {k: v for k, v in doc.iteritems() if k not in self.internal_fields} if '_id' in doc and 'id' not in result: result['id'] = doc['_id'] return result
def function[clean, parameter[self]]: constant[Remove internal fields] variable[doc] assign[=] name[self]._resource variable[result] assign[=] <ast.DictComp object at 0x7da20e9b1420> if <ast.BoolOp object at 0x7da207f00f10> begin[:] call[name[result]][constant[id]] assign[=] call[name[doc]][constant[_id]] return[name[result]]
keyword[def] identifier[clean] ( identifier[self] ): literal[string] identifier[doc] = identifier[self] . identifier[_resource] identifier[result] ={ identifier[k] : identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[doc] . identifier[iteritems] () keyword[if] identifier[k] keyword[not] keyword[in] identifier[self] . identifier[internal_fields] } keyword[if] literal[string] keyword[in] identifier[doc] keyword[and] literal[string] keyword[not] keyword[in] identifier[result] : identifier[result] [ literal[string] ]= identifier[doc] [ literal[string] ] keyword[return] identifier[result]
def clean(self): """Remove internal fields""" doc = self._resource result = {k: v for (k, v) in doc.iteritems() if k not in self.internal_fields} if '_id' in doc and 'id' not in result: result['id'] = doc['_id'] # depends on [control=['if'], data=[]] return result
def set_xattr(self, path, xattr_name, xattr_value, flag, **kwargs): """Set an xattr of a file or directory. :param xattr_name: The name must be prefixed with the namespace followed by ``.``. For example, ``user.attr``. :param flag: ``CREATE`` or ``REPLACE`` """ kwargs['xattr.name'] = xattr_name kwargs['xattr.value'] = xattr_value response = self._put(path, 'SETXATTR', flag=flag, **kwargs) assert not response.content
def function[set_xattr, parameter[self, path, xattr_name, xattr_value, flag]]: constant[Set an xattr of a file or directory. :param xattr_name: The name must be prefixed with the namespace followed by ``.``. For example, ``user.attr``. :param flag: ``CREATE`` or ``REPLACE`` ] call[name[kwargs]][constant[xattr.name]] assign[=] name[xattr_name] call[name[kwargs]][constant[xattr.value]] assign[=] name[xattr_value] variable[response] assign[=] call[name[self]._put, parameter[name[path], constant[SETXATTR]]] assert[<ast.UnaryOp object at 0x7da18fe92590>]
keyword[def] identifier[set_xattr] ( identifier[self] , identifier[path] , identifier[xattr_name] , identifier[xattr_value] , identifier[flag] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= identifier[xattr_name] identifier[kwargs] [ literal[string] ]= identifier[xattr_value] identifier[response] = identifier[self] . identifier[_put] ( identifier[path] , literal[string] , identifier[flag] = identifier[flag] ,** identifier[kwargs] ) keyword[assert] keyword[not] identifier[response] . identifier[content]
def set_xattr(self, path, xattr_name, xattr_value, flag, **kwargs): """Set an xattr of a file or directory. :param xattr_name: The name must be prefixed with the namespace followed by ``.``. For example, ``user.attr``. :param flag: ``CREATE`` or ``REPLACE`` """ kwargs['xattr.name'] = xattr_name kwargs['xattr.value'] = xattr_value response = self._put(path, 'SETXATTR', flag=flag, **kwargs) assert not response.content
def all_to_public(self): """Sets all members, types and executables in this module as public as long as it doesn't already have the 'private' modifier. """ if "private" not in self.modifiers: def public_collection(attribute): for key in self.collection(attribute): if key not in self.publics: self.publics[key.lower()] = 1 public_collection("members") public_collection("types") public_collection("executables")
def function[all_to_public, parameter[self]]: constant[Sets all members, types and executables in this module as public as long as it doesn't already have the 'private' modifier. ] if compare[constant[private] <ast.NotIn object at 0x7da2590d7190> name[self].modifiers] begin[:] def function[public_collection, parameter[attribute]]: for taget[name[key]] in starred[call[name[self].collection, parameter[name[attribute]]]] begin[:] if compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[self].publics] begin[:] call[name[self].publics][call[name[key].lower, parameter[]]] assign[=] constant[1] call[name[public_collection], parameter[constant[members]]] call[name[public_collection], parameter[constant[types]]] call[name[public_collection], parameter[constant[executables]]]
keyword[def] identifier[all_to_public] ( identifier[self] ): literal[string] keyword[if] literal[string] keyword[not] keyword[in] identifier[self] . identifier[modifiers] : keyword[def] identifier[public_collection] ( identifier[attribute] ): keyword[for] identifier[key] keyword[in] identifier[self] . identifier[collection] ( identifier[attribute] ): keyword[if] identifier[key] keyword[not] keyword[in] identifier[self] . identifier[publics] : identifier[self] . identifier[publics] [ identifier[key] . identifier[lower] ()]= literal[int] identifier[public_collection] ( literal[string] ) identifier[public_collection] ( literal[string] ) identifier[public_collection] ( literal[string] )
def all_to_public(self): """Sets all members, types and executables in this module as public as long as it doesn't already have the 'private' modifier. """ if 'private' not in self.modifiers: def public_collection(attribute): for key in self.collection(attribute): if key not in self.publics: self.publics[key.lower()] = 1 # depends on [control=['if'], data=['key']] # depends on [control=['for'], data=['key']] public_collection('members') public_collection('types') public_collection('executables') # depends on [control=['if'], data=[]]
def vq_discrete_unbottleneck(x, hidden_size): """Simple undiscretization from vector quantized representation.""" x_shape = common_layers.shape_list(x) x = tf.to_float(x) bottleneck_size = common_layers.shape_list(x)[-1] means, _, _ = get_vq_codebook(bottleneck_size, hidden_size) result = tf.matmul(tf.reshape(x, [-1, x_shape[-1]]), means) return tf.reshape(result, x_shape[:-1] + [hidden_size])
def function[vq_discrete_unbottleneck, parameter[x, hidden_size]]: constant[Simple undiscretization from vector quantized representation.] variable[x_shape] assign[=] call[name[common_layers].shape_list, parameter[name[x]]] variable[x] assign[=] call[name[tf].to_float, parameter[name[x]]] variable[bottleneck_size] assign[=] call[call[name[common_layers].shape_list, parameter[name[x]]]][<ast.UnaryOp object at 0x7da1b209caf0>] <ast.Tuple object at 0x7da1b209ca60> assign[=] call[name[get_vq_codebook], parameter[name[bottleneck_size], name[hidden_size]]] variable[result] assign[=] call[name[tf].matmul, parameter[call[name[tf].reshape, parameter[name[x], list[[<ast.UnaryOp object at 0x7da1b209ddb0>, <ast.Subscript object at 0x7da1b209de10>]]]], name[means]]] return[call[name[tf].reshape, parameter[name[result], binary_operation[call[name[x_shape]][<ast.Slice object at 0x7da1b209e380>] + list[[<ast.Name object at 0x7da1b209e0e0>]]]]]]
keyword[def] identifier[vq_discrete_unbottleneck] ( identifier[x] , identifier[hidden_size] ): literal[string] identifier[x_shape] = identifier[common_layers] . identifier[shape_list] ( identifier[x] ) identifier[x] = identifier[tf] . identifier[to_float] ( identifier[x] ) identifier[bottleneck_size] = identifier[common_layers] . identifier[shape_list] ( identifier[x] )[- literal[int] ] identifier[means] , identifier[_] , identifier[_] = identifier[get_vq_codebook] ( identifier[bottleneck_size] , identifier[hidden_size] ) identifier[result] = identifier[tf] . identifier[matmul] ( identifier[tf] . identifier[reshape] ( identifier[x] ,[- literal[int] , identifier[x_shape] [- literal[int] ]]), identifier[means] ) keyword[return] identifier[tf] . identifier[reshape] ( identifier[result] , identifier[x_shape] [:- literal[int] ]+[ identifier[hidden_size] ])
def vq_discrete_unbottleneck(x, hidden_size): """Simple undiscretization from vector quantized representation.""" x_shape = common_layers.shape_list(x) x = tf.to_float(x) bottleneck_size = common_layers.shape_list(x)[-1] (means, _, _) = get_vq_codebook(bottleneck_size, hidden_size) result = tf.matmul(tf.reshape(x, [-1, x_shape[-1]]), means) return tf.reshape(result, x_shape[:-1] + [hidden_size])
def p_expression_noteql(self, p): 'expression : expression NEL expression' p[0] = NotEql(p[1], p[3], lineno=p.lineno(1)) p.set_lineno(0, p.lineno(1))
def function[p_expression_noteql, parameter[self, p]]: constant[expression : expression NEL expression] call[name[p]][constant[0]] assign[=] call[name[NotEql], parameter[call[name[p]][constant[1]], call[name[p]][constant[3]]]] call[name[p].set_lineno, parameter[constant[0], call[name[p].lineno, parameter[constant[1]]]]]
keyword[def] identifier[p_expression_noteql] ( identifier[self] , identifier[p] ): literal[string] identifier[p] [ literal[int] ]= identifier[NotEql] ( identifier[p] [ literal[int] ], identifier[p] [ literal[int] ], identifier[lineno] = identifier[p] . identifier[lineno] ( literal[int] )) identifier[p] . identifier[set_lineno] ( literal[int] , identifier[p] . identifier[lineno] ( literal[int] ))
def p_expression_noteql(self, p): """expression : expression NEL expression""" p[0] = NotEql(p[1], p[3], lineno=p.lineno(1)) p.set_lineno(0, p.lineno(1))
def _store(self): """Returns a dictionary of formatted data understood by the storage service. The data is put into an :class:`~pypet.parameter.ObjectTable` named 'data'. If the parameter is explored, the exploration range is also put into another table named 'explored_data'. :return: Dictionary containing the data and optionally the exploration range. """ if self._data is not None: store_dict = {'data': ObjectTable(data={'data': [self._data]})} if self.f_has_range(): store_dict['explored_data'] = ObjectTable(data={'data': self._explored_range}) self._locked = True return store_dict
def function[_store, parameter[self]]: constant[Returns a dictionary of formatted data understood by the storage service. The data is put into an :class:`~pypet.parameter.ObjectTable` named 'data'. If the parameter is explored, the exploration range is also put into another table named 'explored_data'. :return: Dictionary containing the data and optionally the exploration range. ] if compare[name[self]._data is_not constant[None]] begin[:] variable[store_dict] assign[=] dictionary[[<ast.Constant object at 0x7da1b0349180>], [<ast.Call object at 0x7da1b03489a0>]] if call[name[self].f_has_range, parameter[]] begin[:] call[name[store_dict]][constant[explored_data]] assign[=] call[name[ObjectTable], parameter[]] name[self]._locked assign[=] constant[True] return[name[store_dict]]
keyword[def] identifier[_store] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_data] keyword[is] keyword[not] keyword[None] : identifier[store_dict] ={ literal[string] : identifier[ObjectTable] ( identifier[data] ={ literal[string] :[ identifier[self] . identifier[_data] ]})} keyword[if] identifier[self] . identifier[f_has_range] (): identifier[store_dict] [ literal[string] ]= identifier[ObjectTable] ( identifier[data] ={ literal[string] : identifier[self] . identifier[_explored_range] }) identifier[self] . identifier[_locked] = keyword[True] keyword[return] identifier[store_dict]
def _store(self): """Returns a dictionary of formatted data understood by the storage service. The data is put into an :class:`~pypet.parameter.ObjectTable` named 'data'. If the parameter is explored, the exploration range is also put into another table named 'explored_data'. :return: Dictionary containing the data and optionally the exploration range. """ if self._data is not None: store_dict = {'data': ObjectTable(data={'data': [self._data]})} # depends on [control=['if'], data=[]] if self.f_has_range(): store_dict['explored_data'] = ObjectTable(data={'data': self._explored_range}) # depends on [control=['if'], data=[]] self._locked = True return store_dict
def forward(self, X, training=False, device='cpu'): """Gather and concatenate the output from forward call with input data. The outputs from ``self.module_.forward`` are gathered on the compute device specified by ``device`` and then concatenated using PyTorch :func:`~torch.cat`. If multiple outputs are returned by ``self.module_.forward``, each one of them must be able to be concatenated this way. Parameters ---------- X : input data, compatible with skorch.dataset.Dataset By default, you should be able to pass: * numpy arrays * torch tensors * pandas DataFrame or Series * scipy sparse CSR matrices * a dictionary of the former three * a list/tuple of the former three * a Dataset If this doesn't work with your data, you have to pass a ``Dataset`` that can deal with the data. training : bool (default=False) Whether to set the module to train mode or not. device : string (default='cpu') The device to store each inference result on. This defaults to CPU memory since there is genereally more memory available there. For performance reasons this might be changed to a specific CUDA device, e.g. 'cuda:0'. Returns ------- y_infer : torch tensor The result from the forward step. """ y_infer = list(self.forward_iter(X, training=training, device=device)) is_multioutput = len(y_infer) > 0 and isinstance(y_infer[0], tuple) if is_multioutput: return tuple(map(torch.cat, zip(*y_infer))) return torch.cat(y_infer)
def function[forward, parameter[self, X, training, device]]: constant[Gather and concatenate the output from forward call with input data. The outputs from ``self.module_.forward`` are gathered on the compute device specified by ``device`` and then concatenated using PyTorch :func:`~torch.cat`. If multiple outputs are returned by ``self.module_.forward``, each one of them must be able to be concatenated this way. Parameters ---------- X : input data, compatible with skorch.dataset.Dataset By default, you should be able to pass: * numpy arrays * torch tensors * pandas DataFrame or Series * scipy sparse CSR matrices * a dictionary of the former three * a list/tuple of the former three * a Dataset If this doesn't work with your data, you have to pass a ``Dataset`` that can deal with the data. training : bool (default=False) Whether to set the module to train mode or not. device : string (default='cpu') The device to store each inference result on. This defaults to CPU memory since there is genereally more memory available there. For performance reasons this might be changed to a specific CUDA device, e.g. 'cuda:0'. Returns ------- y_infer : torch tensor The result from the forward step. ] variable[y_infer] assign[=] call[name[list], parameter[call[name[self].forward_iter, parameter[name[X]]]]] variable[is_multioutput] assign[=] <ast.BoolOp object at 0x7da1b0ab9300> if name[is_multioutput] begin[:] return[call[name[tuple], parameter[call[name[map], parameter[name[torch].cat, call[name[zip], parameter[<ast.Starred object at 0x7da1b0abaa40>]]]]]]] return[call[name[torch].cat, parameter[name[y_infer]]]]
keyword[def] identifier[forward] ( identifier[self] , identifier[X] , identifier[training] = keyword[False] , identifier[device] = literal[string] ): literal[string] identifier[y_infer] = identifier[list] ( identifier[self] . identifier[forward_iter] ( identifier[X] , identifier[training] = identifier[training] , identifier[device] = identifier[device] )) identifier[is_multioutput] = identifier[len] ( identifier[y_infer] )> literal[int] keyword[and] identifier[isinstance] ( identifier[y_infer] [ literal[int] ], identifier[tuple] ) keyword[if] identifier[is_multioutput] : keyword[return] identifier[tuple] ( identifier[map] ( identifier[torch] . identifier[cat] , identifier[zip] (* identifier[y_infer] ))) keyword[return] identifier[torch] . identifier[cat] ( identifier[y_infer] )
def forward(self, X, training=False, device='cpu'): """Gather and concatenate the output from forward call with input data. The outputs from ``self.module_.forward`` are gathered on the compute device specified by ``device`` and then concatenated using PyTorch :func:`~torch.cat`. If multiple outputs are returned by ``self.module_.forward``, each one of them must be able to be concatenated this way. Parameters ---------- X : input data, compatible with skorch.dataset.Dataset By default, you should be able to pass: * numpy arrays * torch tensors * pandas DataFrame or Series * scipy sparse CSR matrices * a dictionary of the former three * a list/tuple of the former three * a Dataset If this doesn't work with your data, you have to pass a ``Dataset`` that can deal with the data. training : bool (default=False) Whether to set the module to train mode or not. device : string (default='cpu') The device to store each inference result on. This defaults to CPU memory since there is genereally more memory available there. For performance reasons this might be changed to a specific CUDA device, e.g. 'cuda:0'. Returns ------- y_infer : torch tensor The result from the forward step. """ y_infer = list(self.forward_iter(X, training=training, device=device)) is_multioutput = len(y_infer) > 0 and isinstance(y_infer[0], tuple) if is_multioutput: return tuple(map(torch.cat, zip(*y_infer))) # depends on [control=['if'], data=[]] return torch.cat(y_infer)
def is_ligolw(origin, filepath, fileobj, *args, **kwargs): """Identify a file object as LIGO_LW-format XML """ # pylint: disable=unused-argument if fileobj is not None: loc = fileobj.tell() fileobj.seek(0) try: line1 = fileobj.readline().lower() line2 = fileobj.readline().lower() try: return (line1.startswith(XML_SIGNATURE) and line2.startswith((LIGOLW_SIGNATURE, LIGOLW_ELEMENT))) except TypeError: # bytes vs str return (line1.startswith(XML_SIGNATURE.decode('utf-8')) and line2.startswith((LIGOLW_SIGNATURE.decode('utf-8'), LIGOLW_ELEMENT.decode('utf-8')))) finally: fileobj.seek(loc) try: from ligo.lw.ligolw import Element except ImportError: return False try: from glue.ligolw.ligolw import Element as GlueElement except ImportError: element_types = (Element,) else: element_types = (Element, GlueElement) return len(args) > 0 and isinstance(args[0], element_types)
def function[is_ligolw, parameter[origin, filepath, fileobj]]: constant[Identify a file object as LIGO_LW-format XML ] if compare[name[fileobj] is_not constant[None]] begin[:] variable[loc] assign[=] call[name[fileobj].tell, parameter[]] call[name[fileobj].seek, parameter[constant[0]]] <ast.Try object at 0x7da18f09faf0> <ast.Try object at 0x7da18f09d660> <ast.Try object at 0x7da18f09f7c0> return[<ast.BoolOp object at 0x7da18f09ceb0>]
keyword[def] identifier[is_ligolw] ( identifier[origin] , identifier[filepath] , identifier[fileobj] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[fileobj] keyword[is] keyword[not] keyword[None] : identifier[loc] = identifier[fileobj] . identifier[tell] () identifier[fileobj] . identifier[seek] ( literal[int] ) keyword[try] : identifier[line1] = identifier[fileobj] . identifier[readline] (). identifier[lower] () identifier[line2] = identifier[fileobj] . identifier[readline] (). identifier[lower] () keyword[try] : keyword[return] ( identifier[line1] . identifier[startswith] ( identifier[XML_SIGNATURE] ) keyword[and] identifier[line2] . identifier[startswith] (( identifier[LIGOLW_SIGNATURE] , identifier[LIGOLW_ELEMENT] ))) keyword[except] identifier[TypeError] : keyword[return] ( identifier[line1] . identifier[startswith] ( identifier[XML_SIGNATURE] . identifier[decode] ( literal[string] )) keyword[and] identifier[line2] . identifier[startswith] (( identifier[LIGOLW_SIGNATURE] . identifier[decode] ( literal[string] ), identifier[LIGOLW_ELEMENT] . identifier[decode] ( literal[string] )))) keyword[finally] : identifier[fileobj] . identifier[seek] ( identifier[loc] ) keyword[try] : keyword[from] identifier[ligo] . identifier[lw] . identifier[ligolw] keyword[import] identifier[Element] keyword[except] identifier[ImportError] : keyword[return] keyword[False] keyword[try] : keyword[from] identifier[glue] . identifier[ligolw] . identifier[ligolw] keyword[import] identifier[Element] keyword[as] identifier[GlueElement] keyword[except] identifier[ImportError] : identifier[element_types] =( identifier[Element] ,) keyword[else] : identifier[element_types] =( identifier[Element] , identifier[GlueElement] ) keyword[return] identifier[len] ( identifier[args] )> literal[int] keyword[and] identifier[isinstance] ( identifier[args] [ literal[int] ], identifier[element_types] )
def is_ligolw(origin, filepath, fileobj, *args, **kwargs): """Identify a file object as LIGO_LW-format XML """ # pylint: disable=unused-argument if fileobj is not None: loc = fileobj.tell() fileobj.seek(0) try: line1 = fileobj.readline().lower() line2 = fileobj.readline().lower() try: return line1.startswith(XML_SIGNATURE) and line2.startswith((LIGOLW_SIGNATURE, LIGOLW_ELEMENT)) # depends on [control=['try'], data=[]] except TypeError: # bytes vs str return line1.startswith(XML_SIGNATURE.decode('utf-8')) and line2.startswith((LIGOLW_SIGNATURE.decode('utf-8'), LIGOLW_ELEMENT.decode('utf-8'))) # depends on [control=['except'], data=[]] # depends on [control=['try'], data=[]] finally: fileobj.seek(loc) # depends on [control=['if'], data=['fileobj']] try: from ligo.lw.ligolw import Element # depends on [control=['try'], data=[]] except ImportError: return False # depends on [control=['except'], data=[]] try: from glue.ligolw.ligolw import Element as GlueElement # depends on [control=['try'], data=[]] except ImportError: element_types = (Element,) # depends on [control=['except'], data=[]] else: element_types = (Element, GlueElement) return len(args) > 0 and isinstance(args[0], element_types)
async def verify(self, proofRequest: ProofRequest, proof: FullProof): """ Verifies a proof from the prover. :param proofRequest: description of a proof to be presented (revealed attributes, predicates, timestamps for non-revocation) :param proof: a proof :return: True if verified successfully and false otherwise. """ if proofRequest.verifiableAttributes.keys() != proof.requestedProof.revealed_attrs.keys(): raise ValueError('Received attributes ={} do not correspond to requested={}'.format( proof.requestedProof.revealed_attrs.keys(), proofRequest.verifiableAttributes.keys())) if proofRequest.predicates.keys() != proof.requestedProof.predicates.keys(): raise ValueError('Received predicates ={} do not correspond to requested={}'.format( proof.requestedProof.predicates.keys(), proofRequest.predicates.keys())) TauList = [] for (uuid, proofItem) in proof.proofs.items(): if proofItem.proof.nonRevocProof: TauList += await self._nonRevocVerifier.verifyNonRevocation( proofRequest, proofItem.schema_seq_no, proof.aggregatedProof.cHash, proofItem.proof.nonRevocProof) if proofItem.proof.primaryProof: TauList += await self._primaryVerifier.verify(proofItem.schema_seq_no, proof.aggregatedProof.cHash, proofItem.proof.primaryProof) CHver = self._get_hash(proof.aggregatedProof.CList, self._prepare_collection(TauList), cmod.integer(proofRequest.nonce)) return CHver == proof.aggregatedProof.cHash
<ast.AsyncFunctionDef object at 0x7da1b26af6a0>
keyword[async] keyword[def] identifier[verify] ( identifier[self] , identifier[proofRequest] : identifier[ProofRequest] , identifier[proof] : identifier[FullProof] ): literal[string] keyword[if] identifier[proofRequest] . identifier[verifiableAttributes] . identifier[keys] ()!= identifier[proof] . identifier[requestedProof] . identifier[revealed_attrs] . identifier[keys] (): keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[proof] . identifier[requestedProof] . identifier[revealed_attrs] . identifier[keys] (), identifier[proofRequest] . identifier[verifiableAttributes] . identifier[keys] ())) keyword[if] identifier[proofRequest] . identifier[predicates] . identifier[keys] ()!= identifier[proof] . identifier[requestedProof] . identifier[predicates] . identifier[keys] (): keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[proof] . identifier[requestedProof] . identifier[predicates] . identifier[keys] (), identifier[proofRequest] . identifier[predicates] . identifier[keys] ())) identifier[TauList] =[] keyword[for] ( identifier[uuid] , identifier[proofItem] ) keyword[in] identifier[proof] . identifier[proofs] . identifier[items] (): keyword[if] identifier[proofItem] . identifier[proof] . identifier[nonRevocProof] : identifier[TauList] += keyword[await] identifier[self] . identifier[_nonRevocVerifier] . identifier[verifyNonRevocation] ( identifier[proofRequest] , identifier[proofItem] . identifier[schema_seq_no] , identifier[proof] . identifier[aggregatedProof] . identifier[cHash] , identifier[proofItem] . identifier[proof] . identifier[nonRevocProof] ) keyword[if] identifier[proofItem] . identifier[proof] . identifier[primaryProof] : identifier[TauList] += keyword[await] identifier[self] . identifier[_primaryVerifier] . identifier[verify] ( identifier[proofItem] . identifier[schema_seq_no] , identifier[proof] . identifier[aggregatedProof] . identifier[cHash] , identifier[proofItem] . identifier[proof] . identifier[primaryProof] ) identifier[CHver] = identifier[self] . identifier[_get_hash] ( identifier[proof] . identifier[aggregatedProof] . identifier[CList] , identifier[self] . identifier[_prepare_collection] ( identifier[TauList] ), identifier[cmod] . identifier[integer] ( identifier[proofRequest] . identifier[nonce] )) keyword[return] identifier[CHver] == identifier[proof] . identifier[aggregatedProof] . identifier[cHash]
async def verify(self, proofRequest: ProofRequest, proof: FullProof): """ Verifies a proof from the prover. :param proofRequest: description of a proof to be presented (revealed attributes, predicates, timestamps for non-revocation) :param proof: a proof :return: True if verified successfully and false otherwise. """ if proofRequest.verifiableAttributes.keys() != proof.requestedProof.revealed_attrs.keys(): raise ValueError('Received attributes ={} do not correspond to requested={}'.format(proof.requestedProof.revealed_attrs.keys(), proofRequest.verifiableAttributes.keys())) # depends on [control=['if'], data=[]] if proofRequest.predicates.keys() != proof.requestedProof.predicates.keys(): raise ValueError('Received predicates ={} do not correspond to requested={}'.format(proof.requestedProof.predicates.keys(), proofRequest.predicates.keys())) # depends on [control=['if'], data=[]] TauList = [] for (uuid, proofItem) in proof.proofs.items(): if proofItem.proof.nonRevocProof: TauList += await self._nonRevocVerifier.verifyNonRevocation(proofRequest, proofItem.schema_seq_no, proof.aggregatedProof.cHash, proofItem.proof.nonRevocProof) # depends on [control=['if'], data=[]] if proofItem.proof.primaryProof: TauList += await self._primaryVerifier.verify(proofItem.schema_seq_no, proof.aggregatedProof.cHash, proofItem.proof.primaryProof) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] CHver = self._get_hash(proof.aggregatedProof.CList, self._prepare_collection(TauList), cmod.integer(proofRequest.nonce)) return CHver == proof.aggregatedProof.cHash
def randomUUIDField(self): """ Return the unique uuid from uuid1, uuid3, uuid4, or uuid5. """ uuid1 = uuid.uuid1().hex uuid3 = uuid.uuid3( uuid.NAMESPACE_URL, self.randomize(['python', 'django', 'awesome']) ).hex uuid4 = uuid.uuid4().hex uuid5 = uuid.uuid5( uuid.NAMESPACE_DNS, self.randomize(['python', 'django', 'awesome']) ).hex return self.randomize([uuid1, uuid3, uuid4, uuid5])
def function[randomUUIDField, parameter[self]]: constant[ Return the unique uuid from uuid1, uuid3, uuid4, or uuid5. ] variable[uuid1] assign[=] call[name[uuid].uuid1, parameter[]].hex variable[uuid3] assign[=] call[name[uuid].uuid3, parameter[name[uuid].NAMESPACE_URL, call[name[self].randomize, parameter[list[[<ast.Constant object at 0x7da1b0cfff10>, <ast.Constant object at 0x7da1b0cff490>, <ast.Constant object at 0x7da1b0cff8b0>]]]]]].hex variable[uuid4] assign[=] call[name[uuid].uuid4, parameter[]].hex variable[uuid5] assign[=] call[name[uuid].uuid5, parameter[name[uuid].NAMESPACE_DNS, call[name[self].randomize, parameter[list[[<ast.Constant object at 0x7da1b0cfc400>, <ast.Constant object at 0x7da1b0cfcbe0>, <ast.Constant object at 0x7da1b0cfe710>]]]]]].hex return[call[name[self].randomize, parameter[list[[<ast.Name object at 0x7da1b0cfc220>, <ast.Name object at 0x7da1b0cfe800>, <ast.Name object at 0x7da1b0cff3a0>, <ast.Name object at 0x7da1b0cfef50>]]]]]
keyword[def] identifier[randomUUIDField] ( identifier[self] ): literal[string] identifier[uuid1] = identifier[uuid] . identifier[uuid1] (). identifier[hex] identifier[uuid3] = identifier[uuid] . identifier[uuid3] ( identifier[uuid] . identifier[NAMESPACE_URL] , identifier[self] . identifier[randomize] ([ literal[string] , literal[string] , literal[string] ]) ). identifier[hex] identifier[uuid4] = identifier[uuid] . identifier[uuid4] (). identifier[hex] identifier[uuid5] = identifier[uuid] . identifier[uuid5] ( identifier[uuid] . identifier[NAMESPACE_DNS] , identifier[self] . identifier[randomize] ([ literal[string] , literal[string] , literal[string] ]) ). identifier[hex] keyword[return] identifier[self] . identifier[randomize] ([ identifier[uuid1] , identifier[uuid3] , identifier[uuid4] , identifier[uuid5] ])
def randomUUIDField(self): """ Return the unique uuid from uuid1, uuid3, uuid4, or uuid5. """ uuid1 = uuid.uuid1().hex uuid3 = uuid.uuid3(uuid.NAMESPACE_URL, self.randomize(['python', 'django', 'awesome'])).hex uuid4 = uuid.uuid4().hex uuid5 = uuid.uuid5(uuid.NAMESPACE_DNS, self.randomize(['python', 'django', 'awesome'])).hex return self.randomize([uuid1, uuid3, uuid4, uuid5])
def send_event(self, instance, message, event_type=None, time=None, severity='info', source=None, sequence_number=None): """ Post a new event. :param str instance: A Yamcs instance name. :param str message: Event message. :param Optional[str] event_type: Type of event. :param severity: The severity level of the event. One of ``info``, ``watch``, ``warning``, ``critical`` or ``severe``. Defaults to ``info``. :type severity: Optional[str] :param time: Time of the event. If unspecified, defaults to mission time. :type time: Optional[~datetime.datetime] :param source: Source of the event. Useful for grouping events in the archive. When unset this defaults to ``User``. :type source: Optional[str] :param sequence_number: Sequence number of this event. This is primarily used to determine unicity of events coming from the same source. If not set Yamcs will automatically assign a sequential number as if every submitted event is unique. :type sequence_number: Optional[int] """ req = rest_pb2.CreateEventRequest() req.message = message req.severity = severity if event_type: req.type = event_type if time: req.time = to_isostring(time) if source: req.source = source if sequence_number is not None: req.sequence_number = sequence_number url = '/archive/{}/events'.format(instance) self.post_proto(url, data=req.SerializeToString())
def function[send_event, parameter[self, instance, message, event_type, time, severity, source, sequence_number]]: constant[ Post a new event. :param str instance: A Yamcs instance name. :param str message: Event message. :param Optional[str] event_type: Type of event. :param severity: The severity level of the event. One of ``info``, ``watch``, ``warning``, ``critical`` or ``severe``. Defaults to ``info``. :type severity: Optional[str] :param time: Time of the event. If unspecified, defaults to mission time. :type time: Optional[~datetime.datetime] :param source: Source of the event. Useful for grouping events in the archive. When unset this defaults to ``User``. :type source: Optional[str] :param sequence_number: Sequence number of this event. This is primarily used to determine unicity of events coming from the same source. If not set Yamcs will automatically assign a sequential number as if every submitted event is unique. :type sequence_number: Optional[int] ] variable[req] assign[=] call[name[rest_pb2].CreateEventRequest, parameter[]] name[req].message assign[=] name[message] name[req].severity assign[=] name[severity] if name[event_type] begin[:] name[req].type assign[=] name[event_type] if name[time] begin[:] name[req].time assign[=] call[name[to_isostring], parameter[name[time]]] if name[source] begin[:] name[req].source assign[=] name[source] if compare[name[sequence_number] is_not constant[None]] begin[:] name[req].sequence_number assign[=] name[sequence_number] variable[url] assign[=] call[constant[/archive/{}/events].format, parameter[name[instance]]] call[name[self].post_proto, parameter[name[url]]]
keyword[def] identifier[send_event] ( identifier[self] , identifier[instance] , identifier[message] , identifier[event_type] = keyword[None] , identifier[time] = keyword[None] , identifier[severity] = literal[string] , identifier[source] = keyword[None] , identifier[sequence_number] = keyword[None] ): literal[string] identifier[req] = identifier[rest_pb2] . identifier[CreateEventRequest] () identifier[req] . identifier[message] = identifier[message] identifier[req] . identifier[severity] = identifier[severity] keyword[if] identifier[event_type] : identifier[req] . identifier[type] = identifier[event_type] keyword[if] identifier[time] : identifier[req] . identifier[time] = identifier[to_isostring] ( identifier[time] ) keyword[if] identifier[source] : identifier[req] . identifier[source] = identifier[source] keyword[if] identifier[sequence_number] keyword[is] keyword[not] keyword[None] : identifier[req] . identifier[sequence_number] = identifier[sequence_number] identifier[url] = literal[string] . identifier[format] ( identifier[instance] ) identifier[self] . identifier[post_proto] ( identifier[url] , identifier[data] = identifier[req] . identifier[SerializeToString] ())
def send_event(self, instance, message, event_type=None, time=None, severity='info', source=None, sequence_number=None): """ Post a new event. :param str instance: A Yamcs instance name. :param str message: Event message. :param Optional[str] event_type: Type of event. :param severity: The severity level of the event. One of ``info``, ``watch``, ``warning``, ``critical`` or ``severe``. Defaults to ``info``. :type severity: Optional[str] :param time: Time of the event. If unspecified, defaults to mission time. :type time: Optional[~datetime.datetime] :param source: Source of the event. Useful for grouping events in the archive. When unset this defaults to ``User``. :type source: Optional[str] :param sequence_number: Sequence number of this event. This is primarily used to determine unicity of events coming from the same source. If not set Yamcs will automatically assign a sequential number as if every submitted event is unique. :type sequence_number: Optional[int] """ req = rest_pb2.CreateEventRequest() req.message = message req.severity = severity if event_type: req.type = event_type # depends on [control=['if'], data=[]] if time: req.time = to_isostring(time) # depends on [control=['if'], data=[]] if source: req.source = source # depends on [control=['if'], data=[]] if sequence_number is not None: req.sequence_number = sequence_number # depends on [control=['if'], data=['sequence_number']] url = '/archive/{}/events'.format(instance) self.post_proto(url, data=req.SerializeToString())
def process_tokens(self, tokens): """process tokens from the current module to search for module/block level options """ control_pragmas = {"disable", "enable"} for (tok_type, content, start, _, _) in tokens: if tok_type != tokenize.COMMENT: continue match = OPTION_RGX.search(content) if match is None: continue first_group = match.group(1) if ( first_group.strip() == "disable-all" or first_group.strip() == "skip-file" ): if first_group.strip() == "disable-all": self.add_message( "deprecated-pragma", line=start[0], args=("disable-all", "skip-file"), ) self.add_message("file-ignored", line=start[0]) self._ignore_file = True return try: opt, value = first_group.split("=", 1) except ValueError: self.add_message( "bad-inline-option", args=first_group.strip(), line=start[0] ) continue opt = opt.strip() if opt in self._options_methods or opt in self._bw_options_methods: try: meth = self._options_methods[opt] except KeyError: meth = self._bw_options_methods[opt] # found a "(dis|en)able-msg" pragma deprecated suppression self.add_message( "deprecated-pragma", line=start[0], args=(opt, opt.replace("-msg", "")), ) for msgid in utils._splitstrip(value): # Add the line where a control pragma was encountered. if opt in control_pragmas: self._pragma_lineno[msgid] = start[0] try: if (opt, msgid) == ("disable", "all"): self.add_message( "deprecated-pragma", line=start[0], args=("disable=all", "skip-file"), ) self.add_message("file-ignored", line=start[0]) self._ignore_file = True return meth(msgid, "module", start[0]) except exceptions.UnknownMessageError: self.add_message("bad-option-value", args=msgid, line=start[0]) else: self.add_message("unrecognized-inline-option", args=opt, line=start[0])
def function[process_tokens, parameter[self, tokens]]: constant[process tokens from the current module to search for module/block level options ] variable[control_pragmas] assign[=] <ast.Set object at 0x7da1b0351e10> for taget[tuple[[<ast.Name object at 0x7da1b0352cb0>, <ast.Name object at 0x7da1b0351030>, <ast.Name object at 0x7da1b0352500>, <ast.Name object at 0x7da1b03505e0>, <ast.Name object at 0x7da1b0351180>]]] in starred[name[tokens]] begin[:] if compare[name[tok_type] not_equal[!=] name[tokenize].COMMENT] begin[:] continue variable[match] assign[=] call[name[OPTION_RGX].search, parameter[name[content]]] if compare[name[match] is constant[None]] begin[:] continue variable[first_group] assign[=] call[name[match].group, parameter[constant[1]]] if <ast.BoolOp object at 0x7da1b03539a0> begin[:] if compare[call[name[first_group].strip, parameter[]] equal[==] constant[disable-all]] begin[:] call[name[self].add_message, parameter[constant[deprecated-pragma]]] call[name[self].add_message, parameter[constant[file-ignored]]] name[self]._ignore_file assign[=] constant[True] return[None] <ast.Try object at 0x7da1b03a4790> variable[opt] assign[=] call[name[opt].strip, parameter[]] if <ast.BoolOp object at 0x7da1b03a4ac0> begin[:] <ast.Try object at 0x7da1b0350f10> for taget[name[msgid]] in starred[call[name[utils]._splitstrip, parameter[name[value]]]] begin[:] if compare[name[opt] in name[control_pragmas]] begin[:] call[name[self]._pragma_lineno][name[msgid]] assign[=] call[name[start]][constant[0]] <ast.Try object at 0x7da1b03536a0>
keyword[def] identifier[process_tokens] ( identifier[self] , identifier[tokens] ): literal[string] identifier[control_pragmas] ={ literal[string] , literal[string] } keyword[for] ( identifier[tok_type] , identifier[content] , identifier[start] , identifier[_] , identifier[_] ) keyword[in] identifier[tokens] : keyword[if] identifier[tok_type] != identifier[tokenize] . identifier[COMMENT] : keyword[continue] identifier[match] = identifier[OPTION_RGX] . identifier[search] ( identifier[content] ) keyword[if] identifier[match] keyword[is] keyword[None] : keyword[continue] identifier[first_group] = identifier[match] . identifier[group] ( literal[int] ) keyword[if] ( identifier[first_group] . identifier[strip] ()== literal[string] keyword[or] identifier[first_group] . identifier[strip] ()== literal[string] ): keyword[if] identifier[first_group] . identifier[strip] ()== literal[string] : identifier[self] . identifier[add_message] ( literal[string] , identifier[line] = identifier[start] [ literal[int] ], identifier[args] =( literal[string] , literal[string] ), ) identifier[self] . identifier[add_message] ( literal[string] , identifier[line] = identifier[start] [ literal[int] ]) identifier[self] . identifier[_ignore_file] = keyword[True] keyword[return] keyword[try] : identifier[opt] , identifier[value] = identifier[first_group] . identifier[split] ( literal[string] , literal[int] ) keyword[except] identifier[ValueError] : identifier[self] . identifier[add_message] ( literal[string] , identifier[args] = identifier[first_group] . identifier[strip] (), identifier[line] = identifier[start] [ literal[int] ] ) keyword[continue] identifier[opt] = identifier[opt] . identifier[strip] () keyword[if] identifier[opt] keyword[in] identifier[self] . identifier[_options_methods] keyword[or] identifier[opt] keyword[in] identifier[self] . identifier[_bw_options_methods] : keyword[try] : identifier[meth] = identifier[self] . identifier[_options_methods] [ identifier[opt] ] keyword[except] identifier[KeyError] : identifier[meth] = identifier[self] . identifier[_bw_options_methods] [ identifier[opt] ] identifier[self] . identifier[add_message] ( literal[string] , identifier[line] = identifier[start] [ literal[int] ], identifier[args] =( identifier[opt] , identifier[opt] . identifier[replace] ( literal[string] , literal[string] )), ) keyword[for] identifier[msgid] keyword[in] identifier[utils] . identifier[_splitstrip] ( identifier[value] ): keyword[if] identifier[opt] keyword[in] identifier[control_pragmas] : identifier[self] . identifier[_pragma_lineno] [ identifier[msgid] ]= identifier[start] [ literal[int] ] keyword[try] : keyword[if] ( identifier[opt] , identifier[msgid] )==( literal[string] , literal[string] ): identifier[self] . identifier[add_message] ( literal[string] , identifier[line] = identifier[start] [ literal[int] ], identifier[args] =( literal[string] , literal[string] ), ) identifier[self] . identifier[add_message] ( literal[string] , identifier[line] = identifier[start] [ literal[int] ]) identifier[self] . identifier[_ignore_file] = keyword[True] keyword[return] identifier[meth] ( identifier[msgid] , literal[string] , identifier[start] [ literal[int] ]) keyword[except] identifier[exceptions] . identifier[UnknownMessageError] : identifier[self] . identifier[add_message] ( literal[string] , identifier[args] = identifier[msgid] , identifier[line] = identifier[start] [ literal[int] ]) keyword[else] : identifier[self] . identifier[add_message] ( literal[string] , identifier[args] = identifier[opt] , identifier[line] = identifier[start] [ literal[int] ])
def process_tokens(self, tokens): """process tokens from the current module to search for module/block level options """ control_pragmas = {'disable', 'enable'} for (tok_type, content, start, _, _) in tokens: if tok_type != tokenize.COMMENT: continue # depends on [control=['if'], data=[]] match = OPTION_RGX.search(content) if match is None: continue # depends on [control=['if'], data=[]] first_group = match.group(1) if first_group.strip() == 'disable-all' or first_group.strip() == 'skip-file': if first_group.strip() == 'disable-all': self.add_message('deprecated-pragma', line=start[0], args=('disable-all', 'skip-file')) # depends on [control=['if'], data=[]] self.add_message('file-ignored', line=start[0]) self._ignore_file = True return # depends on [control=['if'], data=[]] try: (opt, value) = first_group.split('=', 1) # depends on [control=['try'], data=[]] except ValueError: self.add_message('bad-inline-option', args=first_group.strip(), line=start[0]) continue # depends on [control=['except'], data=[]] opt = opt.strip() if opt in self._options_methods or opt in self._bw_options_methods: try: meth = self._options_methods[opt] # depends on [control=['try'], data=[]] except KeyError: meth = self._bw_options_methods[opt] # found a "(dis|en)able-msg" pragma deprecated suppression self.add_message('deprecated-pragma', line=start[0], args=(opt, opt.replace('-msg', ''))) # depends on [control=['except'], data=[]] for msgid in utils._splitstrip(value): # Add the line where a control pragma was encountered. if opt in control_pragmas: self._pragma_lineno[msgid] = start[0] # depends on [control=['if'], data=[]] try: if (opt, msgid) == ('disable', 'all'): self.add_message('deprecated-pragma', line=start[0], args=('disable=all', 'skip-file')) self.add_message('file-ignored', line=start[0]) self._ignore_file = True return # depends on [control=['if'], data=[]] meth(msgid, 'module', start[0]) # depends on [control=['try'], data=[]] except exceptions.UnknownMessageError: self.add_message('bad-option-value', args=msgid, line=start[0]) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['msgid']] # depends on [control=['if'], data=[]] else: self.add_message('unrecognized-inline-option', args=opt, line=start[0]) # depends on [control=['for'], data=[]]
def hacking_assert_greater_less(logical_line, noqa): r"""Check that self.assert{Greater,Less}[Equal] are used. Okay: self.assertGreater(x, y) Okay: self.assertGreaterEqual(x, y) Okay: self.assertLess(x, y) Okay: self.assertLessEqual(x, y) H205: self.assertTrue(x > y) H205: self.assertTrue(x >= y) H205: self.assertTrue(x < y) H205: self.assertTrue(x <= y) """ if noqa: return methods = ['assertTrue', 'assertFalse'] for method in methods: start = logical_line.find('.%s' % method) + 1 if start != 0: break else: return comparisons = [ast.Gt, ast.GtE, ast.Lt, ast.LtE] checker = AssertTrueFalseChecker(methods, comparisons) checker.visit(ast.parse(logical_line)) if checker.error: yield start, 'H205: Use assert{Greater,Less}[Equal]'
def function[hacking_assert_greater_less, parameter[logical_line, noqa]]: constant[Check that self.assert{Greater,Less}[Equal] are used. Okay: self.assertGreater(x, y) Okay: self.assertGreaterEqual(x, y) Okay: self.assertLess(x, y) Okay: self.assertLessEqual(x, y) H205: self.assertTrue(x > y) H205: self.assertTrue(x >= y) H205: self.assertTrue(x < y) H205: self.assertTrue(x <= y) ] if name[noqa] begin[:] return[None] variable[methods] assign[=] list[[<ast.Constant object at 0x7da1b04a4730>, <ast.Constant object at 0x7da1b04a7100>]] for taget[name[method]] in starred[name[methods]] begin[:] variable[start] assign[=] binary_operation[call[name[logical_line].find, parameter[binary_operation[constant[.%s] <ast.Mod object at 0x7da2590d6920> name[method]]]] + constant[1]] if compare[name[start] not_equal[!=] constant[0]] begin[:] break variable[comparisons] assign[=] list[[<ast.Attribute object at 0x7da1b04a4ee0>, <ast.Attribute object at 0x7da1b04a40d0>, <ast.Attribute object at 0x7da1b04a5e70>, <ast.Attribute object at 0x7da1b04a4ac0>]] variable[checker] assign[=] call[name[AssertTrueFalseChecker], parameter[name[methods], name[comparisons]]] call[name[checker].visit, parameter[call[name[ast].parse, parameter[name[logical_line]]]]] if name[checker].error begin[:] <ast.Yield object at 0x7da1b04a42b0>
keyword[def] identifier[hacking_assert_greater_less] ( identifier[logical_line] , identifier[noqa] ): literal[string] keyword[if] identifier[noqa] : keyword[return] identifier[methods] =[ literal[string] , literal[string] ] keyword[for] identifier[method] keyword[in] identifier[methods] : identifier[start] = identifier[logical_line] . identifier[find] ( literal[string] % identifier[method] )+ literal[int] keyword[if] identifier[start] != literal[int] : keyword[break] keyword[else] : keyword[return] identifier[comparisons] =[ identifier[ast] . identifier[Gt] , identifier[ast] . identifier[GtE] , identifier[ast] . identifier[Lt] , identifier[ast] . identifier[LtE] ] identifier[checker] = identifier[AssertTrueFalseChecker] ( identifier[methods] , identifier[comparisons] ) identifier[checker] . identifier[visit] ( identifier[ast] . identifier[parse] ( identifier[logical_line] )) keyword[if] identifier[checker] . identifier[error] : keyword[yield] identifier[start] , literal[string]
def hacking_assert_greater_less(logical_line, noqa): """Check that self.assert{Greater,Less}[Equal] are used. Okay: self.assertGreater(x, y) Okay: self.assertGreaterEqual(x, y) Okay: self.assertLess(x, y) Okay: self.assertLessEqual(x, y) H205: self.assertTrue(x > y) H205: self.assertTrue(x >= y) H205: self.assertTrue(x < y) H205: self.assertTrue(x <= y) """ if noqa: return # depends on [control=['if'], data=[]] methods = ['assertTrue', 'assertFalse'] for method in methods: start = logical_line.find('.%s' % method) + 1 if start != 0: break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['method']] else: return comparisons = [ast.Gt, ast.GtE, ast.Lt, ast.LtE] checker = AssertTrueFalseChecker(methods, comparisons) checker.visit(ast.parse(logical_line)) if checker.error: yield (start, 'H205: Use assert{Greater,Less}[Equal]') # depends on [control=['if'], data=[]]
def _validate_auth_scheme(self, req): """ Check if the request has auth & the proper scheme Remember NOT to include the error related info in the WWW-Authenticate header for these conditions. :raise: AuthRequired """ if not req.auth: raise AuthRequired(**{ 'detail': 'You must first login to access the requested ' 'resource(s). Please retry your request using ' 'OAuth 2.0 Bearer Token Authentication as ' 'documented in RFC 6750. If you do not have an ' 'access_token then request one at the token ' 'endpdoint of: %s' % self.token_endpoint, 'headers': self._error_headers, 'links': 'tools.ietf.org/html/rfc6750#section-2.1', }) elif req.auth_scheme != 'bearer': raise AuthRequired(**{ 'detail': 'Your Authorization header is using an unsupported ' 'authentication scheme. Please modify your scheme ' 'to be a string of: "Bearer".', 'headers': self._error_headers, 'links': 'tools.ietf.org/html/rfc6750#section-2.1', })
def function[_validate_auth_scheme, parameter[self, req]]: constant[ Check if the request has auth & the proper scheme Remember NOT to include the error related info in the WWW-Authenticate header for these conditions. :raise: AuthRequired ] if <ast.UnaryOp object at 0x7da18dc99b70> begin[:] <ast.Raise object at 0x7da18dc9a0b0>
keyword[def] identifier[_validate_auth_scheme] ( identifier[self] , identifier[req] ): literal[string] keyword[if] keyword[not] identifier[req] . identifier[auth] : keyword[raise] identifier[AuthRequired] (**{ literal[string] : literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] % identifier[self] . identifier[token_endpoint] , literal[string] : identifier[self] . identifier[_error_headers] , literal[string] : literal[string] , }) keyword[elif] identifier[req] . identifier[auth_scheme] != literal[string] : keyword[raise] identifier[AuthRequired] (**{ literal[string] : literal[string] literal[string] literal[string] , literal[string] : identifier[self] . identifier[_error_headers] , literal[string] : literal[string] , })
def _validate_auth_scheme(self, req): """ Check if the request has auth & the proper scheme Remember NOT to include the error related info in the WWW-Authenticate header for these conditions. :raise: AuthRequired """ if not req.auth: raise AuthRequired(**{'detail': 'You must first login to access the requested resource(s). Please retry your request using OAuth 2.0 Bearer Token Authentication as documented in RFC 6750. If you do not have an access_token then request one at the token endpdoint of: %s' % self.token_endpoint, 'headers': self._error_headers, 'links': 'tools.ietf.org/html/rfc6750#section-2.1'}) # depends on [control=['if'], data=[]] elif req.auth_scheme != 'bearer': raise AuthRequired(**{'detail': 'Your Authorization header is using an unsupported authentication scheme. Please modify your scheme to be a string of: "Bearer".', 'headers': self._error_headers, 'links': 'tools.ietf.org/html/rfc6750#section-2.1'}) # depends on [control=['if'], data=[]]
def on_toml_dumps(self, toml, config, dictionary, **kwargs): """ The `toml <https://pypi.org/project/toml/>`_ dumps method. :param module toml: The ``toml`` module :param class config: The instance's config class :param dict dictionary: The dictionary to serialize :param list inline_tables: A list glob patterns to use for derminining which dictionaries should be rendered as inline tables, defaults to [], optional :returns: The TOML serialization :rtype: str Dumping inline tables uses :mod:`fnmatch` to compare ``.`` delimited dictionary path glob patterns to filter tables >>> config.dumps_toml(prefer="toml") name = "My Project" type = "personal-project" keywords = [ "example", "test",] [dependencies.a-dependency] name = "A Dependency" version = "v12" >>> config.dumps_toml(prefer="toml", inline_tables=["dependencies"]) name = "My Project" type = "personal-project" keywords = [ "example", "test",] dependencies = {a-dependency = {name = "A Dependency",version = "v12"} } >>> config.dumps_toml(prefer="toml", inline_tables=["dependencies.*"]) name = "My Project" type = "personal-project" keywords = [ "example", "test",] [dependencies] a-dependency = { name = "A Dependency", version = "v12" } """ inline_tables = set(kwargs.get("inline_tables", [])) def _dump_dict(dictionary, source, source_path=[]): for (key, value) in dictionary.items(): if isinstance(value, dict): is_inline = any( [ fnmatch.fnmatch(".".join(source_path + [key]), pattern) for pattern in inline_tables ] ) if is_inline: source[key] = toml.TomlDecoder().get_empty_inline_table() else: source[key] = {} source[key].update( _dump_dict(value, {}, source_path=source_path + [key]) ) else: source[key] = value return source encoder = toml.TomlEncoder(preserve=True) return toml.dumps(_dump_dict(dictionary, {}), encoder=encoder)
def function[on_toml_dumps, parameter[self, toml, config, dictionary]]: constant[ The `toml <https://pypi.org/project/toml/>`_ dumps method. :param module toml: The ``toml`` module :param class config: The instance's config class :param dict dictionary: The dictionary to serialize :param list inline_tables: A list glob patterns to use for derminining which dictionaries should be rendered as inline tables, defaults to [], optional :returns: The TOML serialization :rtype: str Dumping inline tables uses :mod:`fnmatch` to compare ``.`` delimited dictionary path glob patterns to filter tables >>> config.dumps_toml(prefer="toml") name = "My Project" type = "personal-project" keywords = [ "example", "test",] [dependencies.a-dependency] name = "A Dependency" version = "v12" >>> config.dumps_toml(prefer="toml", inline_tables=["dependencies"]) name = "My Project" type = "personal-project" keywords = [ "example", "test",] dependencies = {a-dependency = {name = "A Dependency",version = "v12"} } >>> config.dumps_toml(prefer="toml", inline_tables=["dependencies.*"]) name = "My Project" type = "personal-project" keywords = [ "example", "test",] [dependencies] a-dependency = { name = "A Dependency", version = "v12" } ] variable[inline_tables] assign[=] call[name[set], parameter[call[name[kwargs].get, parameter[constant[inline_tables], list[[]]]]]] def function[_dump_dict, parameter[dictionary, source, source_path]]: for taget[tuple[[<ast.Name object at 0x7da1b0b9cf40>, <ast.Name object at 0x7da1b0bcbb80>]]] in starred[call[name[dictionary].items, parameter[]]] begin[:] if call[name[isinstance], parameter[name[value], name[dict]]] begin[:] variable[is_inline] assign[=] call[name[any], parameter[<ast.ListComp object at 0x7da1b0bc9090>]] if name[is_inline] begin[:] call[name[source]][name[key]] assign[=] call[call[name[toml].TomlDecoder, parameter[]].get_empty_inline_table, parameter[]] call[call[name[source]][name[key]].update, parameter[call[name[_dump_dict], parameter[name[value], dictionary[[], []]]]]] return[name[source]] variable[encoder] assign[=] call[name[toml].TomlEncoder, parameter[]] return[call[name[toml].dumps, parameter[call[name[_dump_dict], parameter[name[dictionary], dictionary[[], []]]]]]]
keyword[def] identifier[on_toml_dumps] ( identifier[self] , identifier[toml] , identifier[config] , identifier[dictionary] ,** identifier[kwargs] ): literal[string] identifier[inline_tables] = identifier[set] ( identifier[kwargs] . identifier[get] ( literal[string] ,[])) keyword[def] identifier[_dump_dict] ( identifier[dictionary] , identifier[source] , identifier[source_path] =[]): keyword[for] ( identifier[key] , identifier[value] ) keyword[in] identifier[dictionary] . identifier[items] (): keyword[if] identifier[isinstance] ( identifier[value] , identifier[dict] ): identifier[is_inline] = identifier[any] ( [ identifier[fnmatch] . identifier[fnmatch] ( literal[string] . identifier[join] ( identifier[source_path] +[ identifier[key] ]), identifier[pattern] ) keyword[for] identifier[pattern] keyword[in] identifier[inline_tables] ] ) keyword[if] identifier[is_inline] : identifier[source] [ identifier[key] ]= identifier[toml] . identifier[TomlDecoder] (). identifier[get_empty_inline_table] () keyword[else] : identifier[source] [ identifier[key] ]={} identifier[source] [ identifier[key] ]. identifier[update] ( identifier[_dump_dict] ( identifier[value] ,{}, identifier[source_path] = identifier[source_path] +[ identifier[key] ]) ) keyword[else] : identifier[source] [ identifier[key] ]= identifier[value] keyword[return] identifier[source] identifier[encoder] = identifier[toml] . identifier[TomlEncoder] ( identifier[preserve] = keyword[True] ) keyword[return] identifier[toml] . identifier[dumps] ( identifier[_dump_dict] ( identifier[dictionary] ,{}), identifier[encoder] = identifier[encoder] )
def on_toml_dumps(self, toml, config, dictionary, **kwargs): """ The `toml <https://pypi.org/project/toml/>`_ dumps method. :param module toml: The ``toml`` module :param class config: The instance's config class :param dict dictionary: The dictionary to serialize :param list inline_tables: A list glob patterns to use for derminining which dictionaries should be rendered as inline tables, defaults to [], optional :returns: The TOML serialization :rtype: str Dumping inline tables uses :mod:`fnmatch` to compare ``.`` delimited dictionary path glob patterns to filter tables >>> config.dumps_toml(prefer="toml") name = "My Project" type = "personal-project" keywords = [ "example", "test",] [dependencies.a-dependency] name = "A Dependency" version = "v12" >>> config.dumps_toml(prefer="toml", inline_tables=["dependencies"]) name = "My Project" type = "personal-project" keywords = [ "example", "test",] dependencies = {a-dependency = {name = "A Dependency",version = "v12"} } >>> config.dumps_toml(prefer="toml", inline_tables=["dependencies.*"]) name = "My Project" type = "personal-project" keywords = [ "example", "test",] [dependencies] a-dependency = { name = "A Dependency", version = "v12" } """ inline_tables = set(kwargs.get('inline_tables', [])) def _dump_dict(dictionary, source, source_path=[]): for (key, value) in dictionary.items(): if isinstance(value, dict): is_inline = any([fnmatch.fnmatch('.'.join(source_path + [key]), pattern) for pattern in inline_tables]) if is_inline: source[key] = toml.TomlDecoder().get_empty_inline_table() # depends on [control=['if'], data=[]] else: source[key] = {} source[key].update(_dump_dict(value, {}, source_path=source_path + [key])) # depends on [control=['if'], data=[]] else: source[key] = value # depends on [control=['for'], data=[]] return source encoder = toml.TomlEncoder(preserve=True) return toml.dumps(_dump_dict(dictionary, {}), encoder=encoder)
def add_metadata_defaults(md): """Central location for defaults for algorithm inputs. """ defaults = {"batch": None, "phenotype": ""} for k, v in defaults.items(): if k not in md: md[k] = v return md
def function[add_metadata_defaults, parameter[md]]: constant[Central location for defaults for algorithm inputs. ] variable[defaults] assign[=] dictionary[[<ast.Constant object at 0x7da1b178d2d0>, <ast.Constant object at 0x7da1b178e260>], [<ast.Constant object at 0x7da1b178e3e0>, <ast.Constant object at 0x7da1b178c0a0>]] for taget[tuple[[<ast.Name object at 0x7da1b178f640>, <ast.Name object at 0x7da1b178f580>]]] in starred[call[name[defaults].items, parameter[]]] begin[:] if compare[name[k] <ast.NotIn object at 0x7da2590d7190> name[md]] begin[:] call[name[md]][name[k]] assign[=] name[v] return[name[md]]
keyword[def] identifier[add_metadata_defaults] ( identifier[md] ): literal[string] identifier[defaults] ={ literal[string] : keyword[None] , literal[string] : literal[string] } keyword[for] identifier[k] , identifier[v] keyword[in] identifier[defaults] . identifier[items] (): keyword[if] identifier[k] keyword[not] keyword[in] identifier[md] : identifier[md] [ identifier[k] ]= identifier[v] keyword[return] identifier[md]
def add_metadata_defaults(md): """Central location for defaults for algorithm inputs. """ defaults = {'batch': None, 'phenotype': ''} for (k, v) in defaults.items(): if k not in md: md[k] = v # depends on [control=['if'], data=['k', 'md']] # depends on [control=['for'], data=[]] return md
def check_all_local(self): """Check or uncheck all local event parameters.""" all_local_chk = self.event['global']['all_local'].isChecked() for buttons in self.event['local'].values(): buttons[0].setChecked(all_local_chk) buttons[1].setEnabled(buttons[0].isChecked())
def function[check_all_local, parameter[self]]: constant[Check or uncheck all local event parameters.] variable[all_local_chk] assign[=] call[call[call[name[self].event][constant[global]]][constant[all_local]].isChecked, parameter[]] for taget[name[buttons]] in starred[call[call[name[self].event][constant[local]].values, parameter[]]] begin[:] call[call[name[buttons]][constant[0]].setChecked, parameter[name[all_local_chk]]] call[call[name[buttons]][constant[1]].setEnabled, parameter[call[call[name[buttons]][constant[0]].isChecked, parameter[]]]]
keyword[def] identifier[check_all_local] ( identifier[self] ): literal[string] identifier[all_local_chk] = identifier[self] . identifier[event] [ literal[string] ][ literal[string] ]. identifier[isChecked] () keyword[for] identifier[buttons] keyword[in] identifier[self] . identifier[event] [ literal[string] ]. identifier[values] (): identifier[buttons] [ literal[int] ]. identifier[setChecked] ( identifier[all_local_chk] ) identifier[buttons] [ literal[int] ]. identifier[setEnabled] ( identifier[buttons] [ literal[int] ]. identifier[isChecked] ())
def check_all_local(self): """Check or uncheck all local event parameters.""" all_local_chk = self.event['global']['all_local'].isChecked() for buttons in self.event['local'].values(): buttons[0].setChecked(all_local_chk) buttons[1].setEnabled(buttons[0].isChecked()) # depends on [control=['for'], data=['buttons']]
def calc_synch_snu_ujy(b, ne, delta, sinth, width, elongation, dist, ghz, E0=1.): """Calculate a flux density from pure gyrosynchrotron emission. This combines Dulk (1985) equations 40 and 41, which are fitting functions assuming a power-law electron population, with standard radiative transfer through a uniform medium. Arguments are: b Magnetic field strength in Gauss ne The density of electrons per cubic centimeter with energies greater than 10 keV. delta The power-law index defining the energy distribution of the electron population, with ``n(E) ~ E^(-delta)``. The equation is valid for ``2 <~ delta <~ 5``. sinth The sine of the angle between the line of sight and the magnetic field direction. It's not specified for what range of values the expressions work well. width The characteristic cross-sectional width of the emitting region, in cm. elongation The the elongation of the emitting region; ``depth = width * elongation``. dist The distance to the emitting region, in cm. ghz The frequencies at which to evaluate the spectrum, **in GHz**. E0 The minimum energy of electrons to consider, in MeV. Defaults to 1 so that these functions can be called identically to the gyrosynchrotron functions. The return value is the flux density **in μJy**. The arguments can be Numpy arrays. No complaints are raised if you attempt to use the equations outside of their range of validity. """ hz = ghz * 1e9 eta = calc_synch_eta(b, ne, delta, sinth, hz, E0=E0) kappa = calc_synch_kappa(b, ne, delta, sinth, hz, E0=E0) snu = calc_snu(eta, kappa, width, elongation, dist) ujy = snu * cgs.jypercgs * 1e6 return ujy
def function[calc_synch_snu_ujy, parameter[b, ne, delta, sinth, width, elongation, dist, ghz, E0]]: constant[Calculate a flux density from pure gyrosynchrotron emission. This combines Dulk (1985) equations 40 and 41, which are fitting functions assuming a power-law electron population, with standard radiative transfer through a uniform medium. Arguments are: b Magnetic field strength in Gauss ne The density of electrons per cubic centimeter with energies greater than 10 keV. delta The power-law index defining the energy distribution of the electron population, with ``n(E) ~ E^(-delta)``. The equation is valid for ``2 <~ delta <~ 5``. sinth The sine of the angle between the line of sight and the magnetic field direction. It's not specified for what range of values the expressions work well. width The characteristic cross-sectional width of the emitting region, in cm. elongation The the elongation of the emitting region; ``depth = width * elongation``. dist The distance to the emitting region, in cm. ghz The frequencies at which to evaluate the spectrum, **in GHz**. E0 The minimum energy of electrons to consider, in MeV. Defaults to 1 so that these functions can be called identically to the gyrosynchrotron functions. The return value is the flux density **in μJy**. The arguments can be Numpy arrays. No complaints are raised if you attempt to use the equations outside of their range of validity. ] variable[hz] assign[=] binary_operation[name[ghz] * constant[1000000000.0]] variable[eta] assign[=] call[name[calc_synch_eta], parameter[name[b], name[ne], name[delta], name[sinth], name[hz]]] variable[kappa] assign[=] call[name[calc_synch_kappa], parameter[name[b], name[ne], name[delta], name[sinth], name[hz]]] variable[snu] assign[=] call[name[calc_snu], parameter[name[eta], name[kappa], name[width], name[elongation], name[dist]]] variable[ujy] assign[=] binary_operation[binary_operation[name[snu] * name[cgs].jypercgs] * constant[1000000.0]] return[name[ujy]]
keyword[def] identifier[calc_synch_snu_ujy] ( identifier[b] , identifier[ne] , identifier[delta] , identifier[sinth] , identifier[width] , identifier[elongation] , identifier[dist] , identifier[ghz] , identifier[E0] = literal[int] ): literal[string] identifier[hz] = identifier[ghz] * literal[int] identifier[eta] = identifier[calc_synch_eta] ( identifier[b] , identifier[ne] , identifier[delta] , identifier[sinth] , identifier[hz] , identifier[E0] = identifier[E0] ) identifier[kappa] = identifier[calc_synch_kappa] ( identifier[b] , identifier[ne] , identifier[delta] , identifier[sinth] , identifier[hz] , identifier[E0] = identifier[E0] ) identifier[snu] = identifier[calc_snu] ( identifier[eta] , identifier[kappa] , identifier[width] , identifier[elongation] , identifier[dist] ) identifier[ujy] = identifier[snu] * identifier[cgs] . identifier[jypercgs] * literal[int] keyword[return] identifier[ujy]
def calc_synch_snu_ujy(b, ne, delta, sinth, width, elongation, dist, ghz, E0=1.0): """Calculate a flux density from pure gyrosynchrotron emission. This combines Dulk (1985) equations 40 and 41, which are fitting functions assuming a power-law electron population, with standard radiative transfer through a uniform medium. Arguments are: b Magnetic field strength in Gauss ne The density of electrons per cubic centimeter with energies greater than 10 keV. delta The power-law index defining the energy distribution of the electron population, with ``n(E) ~ E^(-delta)``. The equation is valid for ``2 <~ delta <~ 5``. sinth The sine of the angle between the line of sight and the magnetic field direction. It's not specified for what range of values the expressions work well. width The characteristic cross-sectional width of the emitting region, in cm. elongation The the elongation of the emitting region; ``depth = width * elongation``. dist The distance to the emitting region, in cm. ghz The frequencies at which to evaluate the spectrum, **in GHz**. E0 The minimum energy of electrons to consider, in MeV. Defaults to 1 so that these functions can be called identically to the gyrosynchrotron functions. The return value is the flux density **in μJy**. The arguments can be Numpy arrays. No complaints are raised if you attempt to use the equations outside of their range of validity. """ hz = ghz * 1000000000.0 eta = calc_synch_eta(b, ne, delta, sinth, hz, E0=E0) kappa = calc_synch_kappa(b, ne, delta, sinth, hz, E0=E0) snu = calc_snu(eta, kappa, width, elongation, dist) ujy = snu * cgs.jypercgs * 1000000.0 return ujy
def random_combination(iterable, r): "Random selection from itertools.combinations(iterable, r)" pool = tuple(iterable) n = len(pool) indices = sorted(random.sample(xrange(n), r)) return tuple(pool[i] for i in indices)
def function[random_combination, parameter[iterable, r]]: constant[Random selection from itertools.combinations(iterable, r)] variable[pool] assign[=] call[name[tuple], parameter[name[iterable]]] variable[n] assign[=] call[name[len], parameter[name[pool]]] variable[indices] assign[=] call[name[sorted], parameter[call[name[random].sample, parameter[call[name[xrange], parameter[name[n]]], name[r]]]]] return[call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da207f9ad40>]]]
keyword[def] identifier[random_combination] ( identifier[iterable] , identifier[r] ): literal[string] identifier[pool] = identifier[tuple] ( identifier[iterable] ) identifier[n] = identifier[len] ( identifier[pool] ) identifier[indices] = identifier[sorted] ( identifier[random] . identifier[sample] ( identifier[xrange] ( identifier[n] ), identifier[r] )) keyword[return] identifier[tuple] ( identifier[pool] [ identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[indices] )
def random_combination(iterable, r): """Random selection from itertools.combinations(iterable, r)""" pool = tuple(iterable) n = len(pool) indices = sorted(random.sample(xrange(n), r)) return tuple((pool[i] for i in indices))
def _get_cpu_info_from_dmesg(): ''' Returns the CPU info gathered from dmesg. Returns {} if dmesg is not found or does not have the desired info. ''' # Just return {} if there is no dmesg if not DataSource.has_dmesg(): return {} # If dmesg fails return {} returncode, output = DataSource.dmesg_a() if output == None or returncode != 0: return {} return _parse_dmesg_output(output)
def function[_get_cpu_info_from_dmesg, parameter[]]: constant[ Returns the CPU info gathered from dmesg. Returns {} if dmesg is not found or does not have the desired info. ] if <ast.UnaryOp object at 0x7da204620550> begin[:] return[dictionary[[], []]] <ast.Tuple object at 0x7da204622cb0> assign[=] call[name[DataSource].dmesg_a, parameter[]] if <ast.BoolOp object at 0x7da204620280> begin[:] return[dictionary[[], []]] return[call[name[_parse_dmesg_output], parameter[name[output]]]]
keyword[def] identifier[_get_cpu_info_from_dmesg] (): literal[string] keyword[if] keyword[not] identifier[DataSource] . identifier[has_dmesg] (): keyword[return] {} identifier[returncode] , identifier[output] = identifier[DataSource] . identifier[dmesg_a] () keyword[if] identifier[output] == keyword[None] keyword[or] identifier[returncode] != literal[int] : keyword[return] {} keyword[return] identifier[_parse_dmesg_output] ( identifier[output] )
def _get_cpu_info_from_dmesg(): """ Returns the CPU info gathered from dmesg. Returns {} if dmesg is not found or does not have the desired info. """ # Just return {} if there is no dmesg if not DataSource.has_dmesg(): return {} # depends on [control=['if'], data=[]] # If dmesg fails return {} (returncode, output) = DataSource.dmesg_a() if output == None or returncode != 0: return {} # depends on [control=['if'], data=[]] return _parse_dmesg_output(output)
def _update_zone_tracker(self, message): """ Trigger an update of the :py:class:`~alarmdecoder.messages.Zonetracker`. :param message: message to update the zonetracker with :type message: :py:class:`~alarmdecoder.messages.Message`, :py:class:`~alarmdecoder.messages.ExpanderMessage`, :py:class:`~alarmdecoder.messages.LRRMessage`, or :py:class:`~alarmdecoder.messages.RFMessage` """ # Retrieve a list of faults. # NOTE: This only happens on first boot or after exiting programming mode. if isinstance(message, Message): if not message.ready and ("Hit * for faults" in message.text or "Press * to show faults" in message.text): if time.time() > self.last_fault_expansion + self.fault_expansion_time_limit: self.last_fault_expansion = time.time() self.send('*') return self._zonetracker.update(message)
def function[_update_zone_tracker, parameter[self, message]]: constant[ Trigger an update of the :py:class:`~alarmdecoder.messages.Zonetracker`. :param message: message to update the zonetracker with :type message: :py:class:`~alarmdecoder.messages.Message`, :py:class:`~alarmdecoder.messages.ExpanderMessage`, :py:class:`~alarmdecoder.messages.LRRMessage`, or :py:class:`~alarmdecoder.messages.RFMessage` ] if call[name[isinstance], parameter[name[message], name[Message]]] begin[:] if <ast.BoolOp object at 0x7da1b27a6e60> begin[:] if compare[call[name[time].time, parameter[]] greater[>] binary_operation[name[self].last_fault_expansion + name[self].fault_expansion_time_limit]] begin[:] name[self].last_fault_expansion assign[=] call[name[time].time, parameter[]] call[name[self].send, parameter[constant[*]]] return[None] call[name[self]._zonetracker.update, parameter[name[message]]]
keyword[def] identifier[_update_zone_tracker] ( identifier[self] , identifier[message] ): literal[string] keyword[if] identifier[isinstance] ( identifier[message] , identifier[Message] ): keyword[if] keyword[not] identifier[message] . identifier[ready] keyword[and] ( literal[string] keyword[in] identifier[message] . identifier[text] keyword[or] literal[string] keyword[in] identifier[message] . identifier[text] ): keyword[if] identifier[time] . identifier[time] ()> identifier[self] . identifier[last_fault_expansion] + identifier[self] . identifier[fault_expansion_time_limit] : identifier[self] . identifier[last_fault_expansion] = identifier[time] . identifier[time] () identifier[self] . identifier[send] ( literal[string] ) keyword[return] identifier[self] . identifier[_zonetracker] . identifier[update] ( identifier[message] )
def _update_zone_tracker(self, message): """ Trigger an update of the :py:class:`~alarmdecoder.messages.Zonetracker`. :param message: message to update the zonetracker with :type message: :py:class:`~alarmdecoder.messages.Message`, :py:class:`~alarmdecoder.messages.ExpanderMessage`, :py:class:`~alarmdecoder.messages.LRRMessage`, or :py:class:`~alarmdecoder.messages.RFMessage` """ # Retrieve a list of faults. # NOTE: This only happens on first boot or after exiting programming mode. if isinstance(message, Message): if not message.ready and ('Hit * for faults' in message.text or 'Press * to show faults' in message.text): if time.time() > self.last_fault_expansion + self.fault_expansion_time_limit: self.last_fault_expansion = time.time() self.send('*') return # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] self._zonetracker.update(message)
def centerdc_2_twosided(data): """Convert a center-dc PSD to a twosided PSD""" N = len(data) newpsd = np.concatenate((data[N//2:], (cshift(data[0:N//2], -1)))) return newpsd
def function[centerdc_2_twosided, parameter[data]]: constant[Convert a center-dc PSD to a twosided PSD] variable[N] assign[=] call[name[len], parameter[name[data]]] variable[newpsd] assign[=] call[name[np].concatenate, parameter[tuple[[<ast.Subscript object at 0x7da1b01e66e0>, <ast.Call object at 0x7da1b01e7370>]]]] return[name[newpsd]]
keyword[def] identifier[centerdc_2_twosided] ( identifier[data] ): literal[string] identifier[N] = identifier[len] ( identifier[data] ) identifier[newpsd] = identifier[np] . identifier[concatenate] (( identifier[data] [ identifier[N] // literal[int] :],( identifier[cshift] ( identifier[data] [ literal[int] : identifier[N] // literal[int] ],- literal[int] )))) keyword[return] identifier[newpsd]
def centerdc_2_twosided(data): """Convert a center-dc PSD to a twosided PSD""" N = len(data) newpsd = np.concatenate((data[N // 2:], cshift(data[0:N // 2], -1))) return newpsd
def palette( self ): """ Converts the current color data to a QPalette. :return <QPalette> """ palette = QPalette() for colorGroup, qColorGroup in self.GroupMapping.items(): for colorRole, qColorRole in self.RoleMapping.items(): color = self.color(colorRole, colorGroup) palette.setColor( qColorGroup, qColorRole, color ) return palette
def function[palette, parameter[self]]: constant[ Converts the current color data to a QPalette. :return <QPalette> ] variable[palette] assign[=] call[name[QPalette], parameter[]] for taget[tuple[[<ast.Name object at 0x7da18c4cdab0>, <ast.Name object at 0x7da18c4cf3d0>]]] in starred[call[name[self].GroupMapping.items, parameter[]]] begin[:] for taget[tuple[[<ast.Name object at 0x7da18c4cd1b0>, <ast.Name object at 0x7da18c4cd7e0>]]] in starred[call[name[self].RoleMapping.items, parameter[]]] begin[:] variable[color] assign[=] call[name[self].color, parameter[name[colorRole], name[colorGroup]]] call[name[palette].setColor, parameter[name[qColorGroup], name[qColorRole], name[color]]] return[name[palette]]
keyword[def] identifier[palette] ( identifier[self] ): literal[string] identifier[palette] = identifier[QPalette] () keyword[for] identifier[colorGroup] , identifier[qColorGroup] keyword[in] identifier[self] . identifier[GroupMapping] . identifier[items] (): keyword[for] identifier[colorRole] , identifier[qColorRole] keyword[in] identifier[self] . identifier[RoleMapping] . identifier[items] (): identifier[color] = identifier[self] . identifier[color] ( identifier[colorRole] , identifier[colorGroup] ) identifier[palette] . identifier[setColor] ( identifier[qColorGroup] , identifier[qColorRole] , identifier[color] ) keyword[return] identifier[palette]
def palette(self): """ Converts the current color data to a QPalette. :return <QPalette> """ palette = QPalette() for (colorGroup, qColorGroup) in self.GroupMapping.items(): for (colorRole, qColorRole) in self.RoleMapping.items(): color = self.color(colorRole, colorGroup) palette.setColor(qColorGroup, qColorRole, color) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] return palette
def module_build(self, module, modname=None): """Build an astroid from a living module instance.""" node = None path = getattr(module, "__file__", None) if path is not None: path_, ext = os.path.splitext(modutils._path_from_filename(path)) if ext in (".py", ".pyc", ".pyo") and os.path.exists(path_ + ".py"): node = self.file_build(path_ + ".py", modname) if node is None: # this is a built-in module # get a partial representation by introspection node = self.inspect_build(module, modname=modname, path=path) if self._apply_transforms: # We have to handle transformation by ourselves since the # rebuilder isn't called for builtin nodes node = self._manager.visit_transforms(node) return node
def function[module_build, parameter[self, module, modname]]: constant[Build an astroid from a living module instance.] variable[node] assign[=] constant[None] variable[path] assign[=] call[name[getattr], parameter[name[module], constant[__file__], constant[None]]] if compare[name[path] is_not constant[None]] begin[:] <ast.Tuple object at 0x7da1b1e77550> assign[=] call[name[os].path.splitext, parameter[call[name[modutils]._path_from_filename, parameter[name[path]]]]] if <ast.BoolOp object at 0x7da1b1e772e0> begin[:] variable[node] assign[=] call[name[self].file_build, parameter[binary_operation[name[path_] + constant[.py]], name[modname]]] if compare[name[node] is constant[None]] begin[:] variable[node] assign[=] call[name[self].inspect_build, parameter[name[module]]] if name[self]._apply_transforms begin[:] variable[node] assign[=] call[name[self]._manager.visit_transforms, parameter[name[node]]] return[name[node]]
keyword[def] identifier[module_build] ( identifier[self] , identifier[module] , identifier[modname] = keyword[None] ): literal[string] identifier[node] = keyword[None] identifier[path] = identifier[getattr] ( identifier[module] , literal[string] , keyword[None] ) keyword[if] identifier[path] keyword[is] keyword[not] keyword[None] : identifier[path_] , identifier[ext] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[modutils] . identifier[_path_from_filename] ( identifier[path] )) keyword[if] identifier[ext] keyword[in] ( literal[string] , literal[string] , literal[string] ) keyword[and] identifier[os] . identifier[path] . identifier[exists] ( identifier[path_] + literal[string] ): identifier[node] = identifier[self] . identifier[file_build] ( identifier[path_] + literal[string] , identifier[modname] ) keyword[if] identifier[node] keyword[is] keyword[None] : identifier[node] = identifier[self] . identifier[inspect_build] ( identifier[module] , identifier[modname] = identifier[modname] , identifier[path] = identifier[path] ) keyword[if] identifier[self] . identifier[_apply_transforms] : identifier[node] = identifier[self] . identifier[_manager] . identifier[visit_transforms] ( identifier[node] ) keyword[return] identifier[node]
def module_build(self, module, modname=None): """Build an astroid from a living module instance.""" node = None path = getattr(module, '__file__', None) if path is not None: (path_, ext) = os.path.splitext(modutils._path_from_filename(path)) if ext in ('.py', '.pyc', '.pyo') and os.path.exists(path_ + '.py'): node = self.file_build(path_ + '.py', modname) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['path']] if node is None: # this is a built-in module # get a partial representation by introspection node = self.inspect_build(module, modname=modname, path=path) if self._apply_transforms: # We have to handle transformation by ourselves since the # rebuilder isn't called for builtin nodes node = self._manager.visit_transforms(node) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['node']] return node
def _build_url(self, path=None): """ helper function to build a WFS 3.0 URL @type path: string @param path: path of WFS URL @returns: fully constructed URL path """ url = self.url if self.url_query_string is not None: LOGGER.debug('base URL has a query string') url = urljoin(url, path) url = '?'.join([url, self.url_query_string]) else: url = urljoin(url, path) LOGGER.debug('URL: {}'.format(url)) return url
def function[_build_url, parameter[self, path]]: constant[ helper function to build a WFS 3.0 URL @type path: string @param path: path of WFS URL @returns: fully constructed URL path ] variable[url] assign[=] name[self].url if compare[name[self].url_query_string is_not constant[None]] begin[:] call[name[LOGGER].debug, parameter[constant[base URL has a query string]]] variable[url] assign[=] call[name[urljoin], parameter[name[url], name[path]]] variable[url] assign[=] call[constant[?].join, parameter[list[[<ast.Name object at 0x7da1b020d750>, <ast.Attribute object at 0x7da1b020ebc0>]]]] call[name[LOGGER].debug, parameter[call[constant[URL: {}].format, parameter[name[url]]]]] return[name[url]]
keyword[def] identifier[_build_url] ( identifier[self] , identifier[path] = keyword[None] ): literal[string] identifier[url] = identifier[self] . identifier[url] keyword[if] identifier[self] . identifier[url_query_string] keyword[is] keyword[not] keyword[None] : identifier[LOGGER] . identifier[debug] ( literal[string] ) identifier[url] = identifier[urljoin] ( identifier[url] , identifier[path] ) identifier[url] = literal[string] . identifier[join] ([ identifier[url] , identifier[self] . identifier[url_query_string] ]) keyword[else] : identifier[url] = identifier[urljoin] ( identifier[url] , identifier[path] ) identifier[LOGGER] . identifier[debug] ( literal[string] . identifier[format] ( identifier[url] )) keyword[return] identifier[url]
def _build_url(self, path=None): """ helper function to build a WFS 3.0 URL @type path: string @param path: path of WFS URL @returns: fully constructed URL path """ url = self.url if self.url_query_string is not None: LOGGER.debug('base URL has a query string') url = urljoin(url, path) url = '?'.join([url, self.url_query_string]) # depends on [control=['if'], data=[]] else: url = urljoin(url, path) LOGGER.debug('URL: {}'.format(url)) return url
def groups_remove_owner(self, room_id, user_id, **kwargs): """Removes the role of owner from a user in the current Group.""" return self.__call_api_post('groups.removeOwner', roomId=room_id, userId=user_id, kwargs=kwargs)
def function[groups_remove_owner, parameter[self, room_id, user_id]]: constant[Removes the role of owner from a user in the current Group.] return[call[name[self].__call_api_post, parameter[constant[groups.removeOwner]]]]
keyword[def] identifier[groups_remove_owner] ( identifier[self] , identifier[room_id] , identifier[user_id] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[self] . identifier[__call_api_post] ( literal[string] , identifier[roomId] = identifier[room_id] , identifier[userId] = identifier[user_id] , identifier[kwargs] = identifier[kwargs] )
def groups_remove_owner(self, room_id, user_id, **kwargs): """Removes the role of owner from a user in the current Group.""" return self.__call_api_post('groups.removeOwner', roomId=room_id, userId=user_id, kwargs=kwargs)
def present(name, Bucket, LocationConstraint=None, ACL=None, CORSRules=None, LifecycleConfiguration=None, Logging=None, NotificationConfiguration=None, Policy=None, Replication=None, RequestPayment=None, Tagging=None, Versioning=None, Website=None, region=None, key=None, keyid=None, profile=None): ''' Ensure bucket exists. name The name of the state definition Bucket Name of the bucket. LocationConstraint 'EU'|'eu-west-1'|'us-west-1'|'us-west-2'|'ap-southeast-1'|'ap-southeast-2'|'ap-northeast-1'|'sa-east-1'|'cn-north-1'|'eu-central-1' ACL The permissions on a bucket using access control lists (ACL). CORSRules The cors configuration for a bucket. LifecycleConfiguration Lifecycle configuration for your bucket Logging The logging parameters for a bucket and to specify permissions for who can view and modify the logging parameters. NotificationConfiguration notifications of specified events for a bucket Policy Policy on the bucket. As a special case, if the Policy is set to the string `external`, it will not be managed by this state, and can thus be safely set in other ways (e.g. by other state calls, or by hand if some unusual policy configuration is required). Replication Replication rules. You can add as many as 1,000 rules. Total replication configuration size can be up to 2 MB RequestPayment The request payment configuration for a bucket. By default, the bucket owner pays for downloads from the bucket. This configuration parameter enables the bucket owner (only) to specify that the person requesting the download will be charged for the download Tagging A dictionary of tags that should be set on the bucket Versioning The versioning state of the bucket Website The website configuration of the bucket region Region to connect to. key Secret key to be used. keyid Access key to be used. profile A dict with region, key and keyid, or a pillar key (string) that contains a dict with region, key and keyid. ''' ret = {'name': Bucket, 'result': True, 'comment': '', 'changes': {} } if ACL is None: ACL = {'ACL': 'private'} if NotificationConfiguration is None: NotificationConfiguration = {} if RequestPayment is None: RequestPayment = {'Payer': 'BucketOwner'} if Policy: if isinstance(Policy, six.string_types) and Policy != 'external': Policy = salt.utils.json.loads(Policy) Policy = __utils__['boto3.ordered'](Policy) r = __salt__['boto_s3_bucket.exists'](Bucket=Bucket, region=region, key=key, keyid=keyid, profile=profile) if 'error' in r: ret['result'] = False ret['comment'] = 'Failed to create bucket: {0}.'.format(r['error']['message']) return ret if not r.get('exists'): if __opts__['test']: ret['comment'] = 'S3 bucket {0} is set to be created.'.format(Bucket) ret['result'] = None return ret r = __salt__['boto_s3_bucket.create'](Bucket=Bucket, LocationConstraint=LocationConstraint, region=region, key=key, keyid=keyid, profile=profile) if not r.get('created'): ret['result'] = False ret['comment'] = 'Failed to create bucket: {0}.'.format(r['error']['message']) return ret for setter, testval, funcargs in ( ('put_acl', ACL, ACL), ('put_cors', CORSRules, {"CORSRules": CORSRules}), ('put_lifecycle_configuration', LifecycleConfiguration, {"Rules": LifecycleConfiguration}), ('put_logging', Logging, Logging), ('put_notification_configuration', NotificationConfiguration, NotificationConfiguration), ('put_policy', Policy, {"Policy": Policy}), # versioning must be set before replication ('put_versioning', Versioning, Versioning), ('put_replication', Replication, Replication), ('put_request_payment', RequestPayment, RequestPayment), ('put_tagging', Tagging, Tagging), ('put_website', Website, Website), ): if testval is not None: r = __salt__['boto_s3_bucket.{0}'.format(setter)](Bucket=Bucket, region=region, key=key, keyid=keyid, profile=profile, **funcargs) if not r.get('updated'): ret['result'] = False ret['comment'] = 'Failed to create bucket: {0}.'.format(r['error']['message']) return ret _describe = __salt__['boto_s3_bucket.describe'](Bucket, region=region, key=key, keyid=keyid, profile=profile) ret['changes']['old'] = {'bucket': None} ret['changes']['new'] = _describe ret['comment'] = 'S3 bucket {0} created.'.format(Bucket) return ret # bucket exists, ensure config matches ret['comment'] = ' '.join([ret['comment'], 'S3 bucket {0} is present.'.format(Bucket)]) ret['changes'] = {} _describe = __salt__['boto_s3_bucket.describe'](Bucket=Bucket, region=region, key=key, keyid=keyid, profile=profile) if 'error' in _describe: ret['result'] = False ret['comment'] = 'Failed to update bucket: {0}.'.format(_describe['error']['message']) ret['changes'] = {} return ret _describe = _describe['bucket'] # Once versioning has been enabled, it can't completely go away, it can # only be suspended if not bool(Versioning) and bool(_describe.get('Versioning')): Versioning = {'Status': 'Suspended'} config_items = [ ('ACL', 'put_acl', _describe.get('ACL'), _compare_acl, ACL, None), ('CORS', 'put_cors', _describe.get('CORS'), _compare_json, {"CORSRules": CORSRules} if CORSRules else None, 'delete_cors'), ('LifecycleConfiguration', 'put_lifecycle_configuration', _describe.get('LifecycleConfiguration'), _compare_json, {"Rules": LifecycleConfiguration} if LifecycleConfiguration else None, 'delete_lifecycle_configuration'), ('Logging', 'put_logging', _describe.get('Logging', {}).get('LoggingEnabled'), _compare_json, Logging, None), ('NotificationConfiguration', 'put_notification_configuration', _describe.get('NotificationConfiguration'), _compare_json, NotificationConfiguration, None), ('Policy', 'put_policy', _describe.get('Policy'), _compare_policy, {"Policy": Policy} if Policy else None, 'delete_policy'), ('RequestPayment', 'put_request_payment', _describe.get('RequestPayment'), _compare_json, RequestPayment, None), ('Tagging', 'put_tagging', _describe.get('Tagging'), _compare_json, Tagging, 'delete_tagging'), ('Website', 'put_website', _describe.get('Website'), _compare_json, Website, 'delete_website'), ] versioning_item = ('Versioning', 'put_versioning', _describe.get('Versioning'), _compare_json, Versioning or {}, None) # Substitute full ARN into desired state for comparison replication_item = ('Replication', 'put_replication', _describe.get('Replication', {}).get('ReplicationConfiguration'), _compare_replication, Replication, 'delete_replication') # versioning must be turned on before replication can be on, thus replication # must be turned off before versioning can be off if Replication is not None: # replication will be on, must deal with versioning first config_items.append(versioning_item) config_items.append(replication_item) else: # replication will be off, deal with it first config_items.append(replication_item) config_items.append(versioning_item) update = False changes = {} for varname, setter, current, comparator, desired, deleter in config_items: if varname == 'Policy': if desired == {'Policy': 'external'}: # Short-circuit to allow external policy control. log.debug('S3 Policy set to `external`, skipping application.') continue if current is not None: temp = current.get('Policy') # Policy description is always returned as a JSON string. # Convert it to JSON now for ease of comparisons later. if isinstance(temp, six.string_types): current = __utils__['boto3.ordered']( {'Policy': salt.utils.json.loads(temp)} ) if not comparator(current, desired, region, key, keyid, profile): update = True if varname == 'ACL': changes.setdefault('new', {})[varname] = _acl_to_grant( desired, _get_canonical_id(region, key, keyid, profile)) else: changes.setdefault('new', {})[varname] = desired changes.setdefault('old', {})[varname] = current if not __opts__['test']: if deleter and desired is None: # Setting can be deleted, so use that to unset it r = __salt__['boto_s3_bucket.{0}'.format(deleter)](Bucket=Bucket, region=region, key=key, keyid=keyid, profile=profile) if not r.get('deleted'): ret['result'] = False ret['comment'] = 'Failed to update bucket: {0}.'.format(r['error']['message']) return ret else: r = __salt__['boto_s3_bucket.{0}'.format(setter)](Bucket=Bucket, region=region, key=key, keyid=keyid, profile=profile, **(desired or {})) if not r.get('updated'): ret['result'] = False ret['comment'] = 'Failed to update bucket: {0}.'.format(r['error']['message']) return ret if update and __opts__['test']: msg = 'S3 bucket {0} set to be modified.'.format(Bucket) ret['comment'] = msg ret['result'] = None ret['pchanges'] = changes return ret ret['changes'] = changes # Since location can't be changed, try that last so at least the rest of # the things are correct by the time we fail here. Fail so the user will # notice something mismatches their desired state. if _describe.get('Location', {}).get('LocationConstraint') != LocationConstraint: msg = 'Bucket {0} location does not match desired configuration, but cannot be changed'.format(LocationConstraint) log.warning(msg) ret['result'] = False ret['comment'] = 'Failed to update bucket: {0}.'.format(msg) return ret return ret
def function[present, parameter[name, Bucket, LocationConstraint, ACL, CORSRules, LifecycleConfiguration, Logging, NotificationConfiguration, Policy, Replication, RequestPayment, Tagging, Versioning, Website, region, key, keyid, profile]]: constant[ Ensure bucket exists. name The name of the state definition Bucket Name of the bucket. LocationConstraint 'EU'|'eu-west-1'|'us-west-1'|'us-west-2'|'ap-southeast-1'|'ap-southeast-2'|'ap-northeast-1'|'sa-east-1'|'cn-north-1'|'eu-central-1' ACL The permissions on a bucket using access control lists (ACL). CORSRules The cors configuration for a bucket. LifecycleConfiguration Lifecycle configuration for your bucket Logging The logging parameters for a bucket and to specify permissions for who can view and modify the logging parameters. NotificationConfiguration notifications of specified events for a bucket Policy Policy on the bucket. As a special case, if the Policy is set to the string `external`, it will not be managed by this state, and can thus be safely set in other ways (e.g. by other state calls, or by hand if some unusual policy configuration is required). Replication Replication rules. You can add as many as 1,000 rules. Total replication configuration size can be up to 2 MB RequestPayment The request payment configuration for a bucket. By default, the bucket owner pays for downloads from the bucket. This configuration parameter enables the bucket owner (only) to specify that the person requesting the download will be charged for the download Tagging A dictionary of tags that should be set on the bucket Versioning The versioning state of the bucket Website The website configuration of the bucket region Region to connect to. key Secret key to be used. keyid Access key to be used. profile A dict with region, key and keyid, or a pillar key (string) that contains a dict with region, key and keyid. ] variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da2043472b0>, <ast.Constant object at 0x7da2043477f0>, <ast.Constant object at 0x7da2043458a0>, <ast.Constant object at 0x7da204344490>], [<ast.Name object at 0x7da204347a60>, <ast.Constant object at 0x7da204345b10>, <ast.Constant object at 0x7da204346770>, <ast.Dict object at 0x7da204347550>]] if compare[name[ACL] is constant[None]] begin[:] variable[ACL] assign[=] dictionary[[<ast.Constant object at 0x7da204344af0>], [<ast.Constant object at 0x7da2043449a0>]] if compare[name[NotificationConfiguration] is constant[None]] begin[:] variable[NotificationConfiguration] assign[=] dictionary[[], []] if compare[name[RequestPayment] is constant[None]] begin[:] variable[RequestPayment] assign[=] dictionary[[<ast.Constant object at 0x7da204345c60>], [<ast.Constant object at 0x7da204345d20>]] if name[Policy] begin[:] if <ast.BoolOp object at 0x7da204347790> begin[:] variable[Policy] assign[=] call[name[salt].utils.json.loads, parameter[name[Policy]]] variable[Policy] assign[=] call[call[name[__utils__]][constant[boto3.ordered]], parameter[name[Policy]]] variable[r] assign[=] call[call[name[__salt__]][constant[boto_s3_bucket.exists]], parameter[]] if compare[constant[error] in name[r]] begin[:] call[name[ret]][constant[result]] assign[=] constant[False] call[name[ret]][constant[comment]] assign[=] call[constant[Failed to create bucket: {0}.].format, parameter[call[call[name[r]][constant[error]]][constant[message]]]] return[name[ret]] if <ast.UnaryOp object at 0x7da2043478e0> begin[:] if call[name[__opts__]][constant[test]] begin[:] call[name[ret]][constant[comment]] assign[=] call[constant[S3 bucket {0} is set to be created.].format, parameter[name[Bucket]]] call[name[ret]][constant[result]] assign[=] constant[None] return[name[ret]] variable[r] assign[=] call[call[name[__salt__]][constant[boto_s3_bucket.create]], parameter[]] if <ast.UnaryOp object at 0x7da204345f30> begin[:] call[name[ret]][constant[result]] assign[=] constant[False] call[name[ret]][constant[comment]] assign[=] call[constant[Failed to create bucket: {0}.].format, parameter[call[call[name[r]][constant[error]]][constant[message]]]] return[name[ret]] for taget[tuple[[<ast.Name object at 0x7da204345000>, <ast.Name object at 0x7da204345390>, <ast.Name object at 0x7da2043444f0>]]] in starred[tuple[[<ast.Tuple object at 0x7da204345570>, <ast.Tuple object at 0x7da204344f70>, <ast.Tuple object at 0x7da204345ff0>, <ast.Tuple object at 0x7da204344340>, <ast.Tuple object at 0x7da2043469e0>, <ast.Tuple object at 0x7da2043446a0>, <ast.Tuple object at 0x7da204346020>, <ast.Tuple object at 0x7da204345780>, <ast.Tuple object at 0x7da204347430>, <ast.Tuple object at 0x7da2043443d0>, <ast.Tuple object at 0x7da2043465c0>]]] begin[:] if compare[name[testval] is_not constant[None]] begin[:] variable[r] assign[=] call[call[name[__salt__]][call[constant[boto_s3_bucket.{0}].format, parameter[name[setter]]]], parameter[]] if <ast.UnaryOp object at 0x7da20c76d210> begin[:] call[name[ret]][constant[result]] assign[=] constant[False] call[name[ret]][constant[comment]] assign[=] call[constant[Failed to create bucket: {0}.].format, parameter[call[call[name[r]][constant[error]]][constant[message]]]] return[name[ret]] variable[_describe] assign[=] call[call[name[__salt__]][constant[boto_s3_bucket.describe]], parameter[name[Bucket]]] call[call[name[ret]][constant[changes]]][constant[old]] assign[=] dictionary[[<ast.Constant object at 0x7da20c76e110>], [<ast.Constant object at 0x7da20c76f220>]] call[call[name[ret]][constant[changes]]][constant[new]] assign[=] name[_describe] call[name[ret]][constant[comment]] assign[=] call[constant[S3 bucket {0} created.].format, parameter[name[Bucket]]] return[name[ret]] call[name[ret]][constant[comment]] assign[=] call[constant[ ].join, parameter[list[[<ast.Subscript object at 0x7da20c76e740>, <ast.Call object at 0x7da20c76d9c0>]]]] call[name[ret]][constant[changes]] assign[=] dictionary[[], []] variable[_describe] assign[=] call[call[name[__salt__]][constant[boto_s3_bucket.describe]], parameter[]] if compare[constant[error] in name[_describe]] begin[:] call[name[ret]][constant[result]] assign[=] constant[False] call[name[ret]][constant[comment]] assign[=] call[constant[Failed to update bucket: {0}.].format, parameter[call[call[name[_describe]][constant[error]]][constant[message]]]] call[name[ret]][constant[changes]] assign[=] dictionary[[], []] return[name[ret]] variable[_describe] assign[=] call[name[_describe]][constant[bucket]] if <ast.BoolOp object at 0x7da20c76df90> begin[:] variable[Versioning] assign[=] dictionary[[<ast.Constant object at 0x7da20c76c220>], [<ast.Constant object at 0x7da20c76c400>]] variable[config_items] assign[=] list[[<ast.Tuple object at 0x7da20c76fa30>, <ast.Tuple object at 0x7da20c76d1e0>, <ast.Tuple object at 0x7da20c76cbe0>, <ast.Tuple object at 0x7da20c76d6c0>, <ast.Tuple object at 0x7da20c76d540>, <ast.Tuple object at 0x7da20c76d900>, <ast.Tuple object at 0x7da20c76ef50>, <ast.Tuple object at 0x7da20c76f2e0>, <ast.Tuple object at 0x7da20c76d0f0>]] variable[versioning_item] assign[=] tuple[[<ast.Constant object at 0x7da20c76f610>, <ast.Constant object at 0x7da20c76c1c0>, <ast.Call object at 0x7da20c76d840>, <ast.Name object at 0x7da20c76ffd0>, <ast.BoolOp object at 0x7da20c76f910>, <ast.Constant object at 0x7da20c76e9b0>]] variable[replication_item] assign[=] tuple[[<ast.Constant object at 0x7da20c76c850>, <ast.Constant object at 0x7da20c76f640>, <ast.Call object at 0x7da20c76f1f0>, <ast.Name object at 0x7da20c76c070>, <ast.Name object at 0x7da20c76c5e0>, <ast.Constant object at 0x7da20c76eb00>]] if compare[name[Replication] is_not constant[None]] begin[:] call[name[config_items].append, parameter[name[versioning_item]]] call[name[config_items].append, parameter[name[replication_item]]] variable[update] assign[=] constant[False] variable[changes] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da2047ead40>, <ast.Name object at 0x7da2047e9db0>, <ast.Name object at 0x7da2047e8d60>, <ast.Name object at 0x7da2047e8ac0>, <ast.Name object at 0x7da2047e80a0>, <ast.Name object at 0x7da2047e91e0>]]] in starred[name[config_items]] begin[:] if compare[name[varname] equal[==] constant[Policy]] begin[:] if compare[name[desired] equal[==] dictionary[[<ast.Constant object at 0x7da2047e9390>], [<ast.Constant object at 0x7da2047eabf0>]]] begin[:] call[name[log].debug, parameter[constant[S3 Policy set to `external`, skipping application.]]] continue if compare[name[current] is_not constant[None]] begin[:] variable[temp] assign[=] call[name[current].get, parameter[constant[Policy]]] if call[name[isinstance], parameter[name[temp], name[six].string_types]] begin[:] variable[current] assign[=] call[call[name[__utils__]][constant[boto3.ordered]], parameter[dictionary[[<ast.Constant object at 0x7da2047eb8e0>], [<ast.Call object at 0x7da2047e85b0>]]]] if <ast.UnaryOp object at 0x7da2047eb370> begin[:] variable[update] assign[=] constant[True] if compare[name[varname] equal[==] constant[ACL]] begin[:] call[call[name[changes].setdefault, parameter[constant[new], dictionary[[], []]]]][name[varname]] assign[=] call[name[_acl_to_grant], parameter[name[desired], call[name[_get_canonical_id], parameter[name[region], name[key], name[keyid], name[profile]]]]] call[call[name[changes].setdefault, parameter[constant[old], dictionary[[], []]]]][name[varname]] assign[=] name[current] if <ast.UnaryOp object at 0x7da2047e8fd0> begin[:] if <ast.BoolOp object at 0x7da2047ea1d0> begin[:] variable[r] assign[=] call[call[name[__salt__]][call[constant[boto_s3_bucket.{0}].format, parameter[name[deleter]]]], parameter[]] if <ast.UnaryOp object at 0x7da2047e8190> begin[:] call[name[ret]][constant[result]] assign[=] constant[False] call[name[ret]][constant[comment]] assign[=] call[constant[Failed to update bucket: {0}.].format, parameter[call[call[name[r]][constant[error]]][constant[message]]]] return[name[ret]] if <ast.BoolOp object at 0x7da204963f70> begin[:] variable[msg] assign[=] call[constant[S3 bucket {0} set to be modified.].format, parameter[name[Bucket]]] call[name[ret]][constant[comment]] assign[=] name[msg] call[name[ret]][constant[result]] assign[=] constant[None] call[name[ret]][constant[pchanges]] assign[=] name[changes] return[name[ret]] call[name[ret]][constant[changes]] assign[=] name[changes] if compare[call[call[name[_describe].get, parameter[constant[Location], dictionary[[], []]]].get, parameter[constant[LocationConstraint]]] not_equal[!=] name[LocationConstraint]] begin[:] variable[msg] assign[=] call[constant[Bucket {0} location does not match desired configuration, but cannot be changed].format, parameter[name[LocationConstraint]]] call[name[log].warning, parameter[name[msg]]] call[name[ret]][constant[result]] assign[=] constant[False] call[name[ret]][constant[comment]] assign[=] call[constant[Failed to update bucket: {0}.].format, parameter[name[msg]]] return[name[ret]] return[name[ret]]
keyword[def] identifier[present] ( identifier[name] , identifier[Bucket] , identifier[LocationConstraint] = keyword[None] , identifier[ACL] = keyword[None] , identifier[CORSRules] = keyword[None] , identifier[LifecycleConfiguration] = keyword[None] , identifier[Logging] = keyword[None] , identifier[NotificationConfiguration] = keyword[None] , identifier[Policy] = keyword[None] , identifier[Replication] = keyword[None] , identifier[RequestPayment] = keyword[None] , identifier[Tagging] = keyword[None] , identifier[Versioning] = keyword[None] , identifier[Website] = keyword[None] , identifier[region] = keyword[None] , identifier[key] = keyword[None] , identifier[keyid] = keyword[None] , identifier[profile] = keyword[None] ): literal[string] identifier[ret] ={ literal[string] : identifier[Bucket] , literal[string] : keyword[True] , literal[string] : literal[string] , literal[string] :{} } keyword[if] identifier[ACL] keyword[is] keyword[None] : identifier[ACL] ={ literal[string] : literal[string] } keyword[if] identifier[NotificationConfiguration] keyword[is] keyword[None] : identifier[NotificationConfiguration] ={} keyword[if] identifier[RequestPayment] keyword[is] keyword[None] : identifier[RequestPayment] ={ literal[string] : literal[string] } keyword[if] identifier[Policy] : keyword[if] identifier[isinstance] ( identifier[Policy] , identifier[six] . identifier[string_types] ) keyword[and] identifier[Policy] != literal[string] : identifier[Policy] = identifier[salt] . identifier[utils] . identifier[json] . identifier[loads] ( identifier[Policy] ) identifier[Policy] = identifier[__utils__] [ literal[string] ]( identifier[Policy] ) identifier[r] = identifier[__salt__] [ literal[string] ]( identifier[Bucket] = identifier[Bucket] , identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] ) keyword[if] literal[string] keyword[in] identifier[r] : identifier[ret] [ literal[string] ]= keyword[False] identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[r] [ literal[string] ][ literal[string] ]) keyword[return] identifier[ret] keyword[if] keyword[not] identifier[r] . identifier[get] ( literal[string] ): keyword[if] identifier[__opts__] [ literal[string] ]: identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[Bucket] ) identifier[ret] [ literal[string] ]= keyword[None] keyword[return] identifier[ret] identifier[r] = identifier[__salt__] [ literal[string] ]( identifier[Bucket] = identifier[Bucket] , identifier[LocationConstraint] = identifier[LocationConstraint] , identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] ) keyword[if] keyword[not] identifier[r] . identifier[get] ( literal[string] ): identifier[ret] [ literal[string] ]= keyword[False] identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[r] [ literal[string] ][ literal[string] ]) keyword[return] identifier[ret] keyword[for] identifier[setter] , identifier[testval] , identifier[funcargs] keyword[in] ( ( literal[string] , identifier[ACL] , identifier[ACL] ), ( literal[string] , identifier[CORSRules] ,{ literal[string] : identifier[CORSRules] }), ( literal[string] , identifier[LifecycleConfiguration] ,{ literal[string] : identifier[LifecycleConfiguration] }), ( literal[string] , identifier[Logging] , identifier[Logging] ), ( literal[string] , identifier[NotificationConfiguration] , identifier[NotificationConfiguration] ), ( literal[string] , identifier[Policy] ,{ literal[string] : identifier[Policy] }), ( literal[string] , identifier[Versioning] , identifier[Versioning] ), ( literal[string] , identifier[Replication] , identifier[Replication] ), ( literal[string] , identifier[RequestPayment] , identifier[RequestPayment] ), ( literal[string] , identifier[Tagging] , identifier[Tagging] ), ( literal[string] , identifier[Website] , identifier[Website] ), ): keyword[if] identifier[testval] keyword[is] keyword[not] keyword[None] : identifier[r] = identifier[__salt__] [ literal[string] . identifier[format] ( identifier[setter] )]( identifier[Bucket] = identifier[Bucket] , identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] , ** identifier[funcargs] ) keyword[if] keyword[not] identifier[r] . identifier[get] ( literal[string] ): identifier[ret] [ literal[string] ]= keyword[False] identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[r] [ literal[string] ][ literal[string] ]) keyword[return] identifier[ret] identifier[_describe] = identifier[__salt__] [ literal[string] ]( identifier[Bucket] , identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] ) identifier[ret] [ literal[string] ][ literal[string] ]={ literal[string] : keyword[None] } identifier[ret] [ literal[string] ][ literal[string] ]= identifier[_describe] identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[Bucket] ) keyword[return] identifier[ret] identifier[ret] [ literal[string] ]= literal[string] . identifier[join] ([ identifier[ret] [ literal[string] ], literal[string] . identifier[format] ( identifier[Bucket] )]) identifier[ret] [ literal[string] ]={} identifier[_describe] = identifier[__salt__] [ literal[string] ]( identifier[Bucket] = identifier[Bucket] , identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] ) keyword[if] literal[string] keyword[in] identifier[_describe] : identifier[ret] [ literal[string] ]= keyword[False] identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[_describe] [ literal[string] ][ literal[string] ]) identifier[ret] [ literal[string] ]={} keyword[return] identifier[ret] identifier[_describe] = identifier[_describe] [ literal[string] ] keyword[if] keyword[not] identifier[bool] ( identifier[Versioning] ) keyword[and] identifier[bool] ( identifier[_describe] . identifier[get] ( literal[string] )): identifier[Versioning] ={ literal[string] : literal[string] } identifier[config_items] =[ ( literal[string] , literal[string] , identifier[_describe] . identifier[get] ( literal[string] ), identifier[_compare_acl] , identifier[ACL] , keyword[None] ), ( literal[string] , literal[string] , identifier[_describe] . identifier[get] ( literal[string] ), identifier[_compare_json] ,{ literal[string] : identifier[CORSRules] } keyword[if] identifier[CORSRules] keyword[else] keyword[None] , literal[string] ), ( literal[string] , literal[string] , identifier[_describe] . identifier[get] ( literal[string] ), identifier[_compare_json] ,{ literal[string] : identifier[LifecycleConfiguration] } keyword[if] identifier[LifecycleConfiguration] keyword[else] keyword[None] , literal[string] ), ( literal[string] , literal[string] , identifier[_describe] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] ), identifier[_compare_json] , identifier[Logging] , keyword[None] ), ( literal[string] , literal[string] , identifier[_describe] . identifier[get] ( literal[string] ), identifier[_compare_json] , identifier[NotificationConfiguration] , keyword[None] ), ( literal[string] , literal[string] , identifier[_describe] . identifier[get] ( literal[string] ), identifier[_compare_policy] ,{ literal[string] : identifier[Policy] } keyword[if] identifier[Policy] keyword[else] keyword[None] , literal[string] ), ( literal[string] , literal[string] , identifier[_describe] . identifier[get] ( literal[string] ), identifier[_compare_json] , identifier[RequestPayment] , keyword[None] ), ( literal[string] , literal[string] , identifier[_describe] . identifier[get] ( literal[string] ), identifier[_compare_json] , identifier[Tagging] , literal[string] ), ( literal[string] , literal[string] , identifier[_describe] . identifier[get] ( literal[string] ), identifier[_compare_json] , identifier[Website] , literal[string] ), ] identifier[versioning_item] =( literal[string] , literal[string] , identifier[_describe] . identifier[get] ( literal[string] ), identifier[_compare_json] , identifier[Versioning] keyword[or] {}, keyword[None] ) identifier[replication_item] =( literal[string] , literal[string] , identifier[_describe] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] ), identifier[_compare_replication] , identifier[Replication] , literal[string] ) keyword[if] identifier[Replication] keyword[is] keyword[not] keyword[None] : identifier[config_items] . identifier[append] ( identifier[versioning_item] ) identifier[config_items] . identifier[append] ( identifier[replication_item] ) keyword[else] : identifier[config_items] . identifier[append] ( identifier[replication_item] ) identifier[config_items] . identifier[append] ( identifier[versioning_item] ) identifier[update] = keyword[False] identifier[changes] ={} keyword[for] identifier[varname] , identifier[setter] , identifier[current] , identifier[comparator] , identifier[desired] , identifier[deleter] keyword[in] identifier[config_items] : keyword[if] identifier[varname] == literal[string] : keyword[if] identifier[desired] =={ literal[string] : literal[string] }: identifier[log] . identifier[debug] ( literal[string] ) keyword[continue] keyword[if] identifier[current] keyword[is] keyword[not] keyword[None] : identifier[temp] = identifier[current] . identifier[get] ( literal[string] ) keyword[if] identifier[isinstance] ( identifier[temp] , identifier[six] . identifier[string_types] ): identifier[current] = identifier[__utils__] [ literal[string] ]( { literal[string] : identifier[salt] . identifier[utils] . identifier[json] . identifier[loads] ( identifier[temp] )} ) keyword[if] keyword[not] identifier[comparator] ( identifier[current] , identifier[desired] , identifier[region] , identifier[key] , identifier[keyid] , identifier[profile] ): identifier[update] = keyword[True] keyword[if] identifier[varname] == literal[string] : identifier[changes] . identifier[setdefault] ( literal[string] ,{})[ identifier[varname] ]= identifier[_acl_to_grant] ( identifier[desired] , identifier[_get_canonical_id] ( identifier[region] , identifier[key] , identifier[keyid] , identifier[profile] )) keyword[else] : identifier[changes] . identifier[setdefault] ( literal[string] ,{})[ identifier[varname] ]= identifier[desired] identifier[changes] . identifier[setdefault] ( literal[string] ,{})[ identifier[varname] ]= identifier[current] keyword[if] keyword[not] identifier[__opts__] [ literal[string] ]: keyword[if] identifier[deleter] keyword[and] identifier[desired] keyword[is] keyword[None] : identifier[r] = identifier[__salt__] [ literal[string] . identifier[format] ( identifier[deleter] )]( identifier[Bucket] = identifier[Bucket] , identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] ) keyword[if] keyword[not] identifier[r] . identifier[get] ( literal[string] ): identifier[ret] [ literal[string] ]= keyword[False] identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[r] [ literal[string] ][ literal[string] ]) keyword[return] identifier[ret] keyword[else] : identifier[r] = identifier[__salt__] [ literal[string] . identifier[format] ( identifier[setter] )]( identifier[Bucket] = identifier[Bucket] , identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] , **( identifier[desired] keyword[or] {})) keyword[if] keyword[not] identifier[r] . identifier[get] ( literal[string] ): identifier[ret] [ literal[string] ]= keyword[False] identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[r] [ literal[string] ][ literal[string] ]) keyword[return] identifier[ret] keyword[if] identifier[update] keyword[and] identifier[__opts__] [ literal[string] ]: identifier[msg] = literal[string] . identifier[format] ( identifier[Bucket] ) identifier[ret] [ literal[string] ]= identifier[msg] identifier[ret] [ literal[string] ]= keyword[None] identifier[ret] [ literal[string] ]= identifier[changes] keyword[return] identifier[ret] identifier[ret] [ literal[string] ]= identifier[changes] keyword[if] identifier[_describe] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] )!= identifier[LocationConstraint] : identifier[msg] = literal[string] . identifier[format] ( identifier[LocationConstraint] ) identifier[log] . identifier[warning] ( identifier[msg] ) identifier[ret] [ literal[string] ]= keyword[False] identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[msg] ) keyword[return] identifier[ret] keyword[return] identifier[ret]
def present(name, Bucket, LocationConstraint=None, ACL=None, CORSRules=None, LifecycleConfiguration=None, Logging=None, NotificationConfiguration=None, Policy=None, Replication=None, RequestPayment=None, Tagging=None, Versioning=None, Website=None, region=None, key=None, keyid=None, profile=None): """ Ensure bucket exists. name The name of the state definition Bucket Name of the bucket. LocationConstraint 'EU'|'eu-west-1'|'us-west-1'|'us-west-2'|'ap-southeast-1'|'ap-southeast-2'|'ap-northeast-1'|'sa-east-1'|'cn-north-1'|'eu-central-1' ACL The permissions on a bucket using access control lists (ACL). CORSRules The cors configuration for a bucket. LifecycleConfiguration Lifecycle configuration for your bucket Logging The logging parameters for a bucket and to specify permissions for who can view and modify the logging parameters. NotificationConfiguration notifications of specified events for a bucket Policy Policy on the bucket. As a special case, if the Policy is set to the string `external`, it will not be managed by this state, and can thus be safely set in other ways (e.g. by other state calls, or by hand if some unusual policy configuration is required). Replication Replication rules. You can add as many as 1,000 rules. Total replication configuration size can be up to 2 MB RequestPayment The request payment configuration for a bucket. By default, the bucket owner pays for downloads from the bucket. This configuration parameter enables the bucket owner (only) to specify that the person requesting the download will be charged for the download Tagging A dictionary of tags that should be set on the bucket Versioning The versioning state of the bucket Website The website configuration of the bucket region Region to connect to. key Secret key to be used. keyid Access key to be used. profile A dict with region, key and keyid, or a pillar key (string) that contains a dict with region, key and keyid. """ ret = {'name': Bucket, 'result': True, 'comment': '', 'changes': {}} if ACL is None: ACL = {'ACL': 'private'} # depends on [control=['if'], data=['ACL']] if NotificationConfiguration is None: NotificationConfiguration = {} # depends on [control=['if'], data=['NotificationConfiguration']] if RequestPayment is None: RequestPayment = {'Payer': 'BucketOwner'} # depends on [control=['if'], data=['RequestPayment']] if Policy: if isinstance(Policy, six.string_types) and Policy != 'external': Policy = salt.utils.json.loads(Policy) # depends on [control=['if'], data=[]] Policy = __utils__['boto3.ordered'](Policy) # depends on [control=['if'], data=[]] r = __salt__['boto_s3_bucket.exists'](Bucket=Bucket, region=region, key=key, keyid=keyid, profile=profile) if 'error' in r: ret['result'] = False ret['comment'] = 'Failed to create bucket: {0}.'.format(r['error']['message']) return ret # depends on [control=['if'], data=['r']] if not r.get('exists'): if __opts__['test']: ret['comment'] = 'S3 bucket {0} is set to be created.'.format(Bucket) ret['result'] = None return ret # depends on [control=['if'], data=[]] r = __salt__['boto_s3_bucket.create'](Bucket=Bucket, LocationConstraint=LocationConstraint, region=region, key=key, keyid=keyid, profile=profile) if not r.get('created'): ret['result'] = False ret['comment'] = 'Failed to create bucket: {0}.'.format(r['error']['message']) return ret # depends on [control=['if'], data=[]] for (setter, testval, funcargs) in (('put_acl', ACL, ACL), ('put_cors', CORSRules, {'CORSRules': CORSRules}), ('put_lifecycle_configuration', LifecycleConfiguration, {'Rules': LifecycleConfiguration}), ('put_logging', Logging, Logging), ('put_notification_configuration', NotificationConfiguration, NotificationConfiguration), ('put_policy', Policy, {'Policy': Policy}), ('put_versioning', Versioning, Versioning), ('put_replication', Replication, Replication), ('put_request_payment', RequestPayment, RequestPayment), ('put_tagging', Tagging, Tagging), ('put_website', Website, Website)): # versioning must be set before replication if testval is not None: r = __salt__['boto_s3_bucket.{0}'.format(setter)](Bucket=Bucket, region=region, key=key, keyid=keyid, profile=profile, **funcargs) if not r.get('updated'): ret['result'] = False ret['comment'] = 'Failed to create bucket: {0}.'.format(r['error']['message']) return ret # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] _describe = __salt__['boto_s3_bucket.describe'](Bucket, region=region, key=key, keyid=keyid, profile=profile) ret['changes']['old'] = {'bucket': None} ret['changes']['new'] = _describe ret['comment'] = 'S3 bucket {0} created.'.format(Bucket) return ret # depends on [control=['if'], data=[]] # bucket exists, ensure config matches ret['comment'] = ' '.join([ret['comment'], 'S3 bucket {0} is present.'.format(Bucket)]) ret['changes'] = {} _describe = __salt__['boto_s3_bucket.describe'](Bucket=Bucket, region=region, key=key, keyid=keyid, profile=profile) if 'error' in _describe: ret['result'] = False ret['comment'] = 'Failed to update bucket: {0}.'.format(_describe['error']['message']) ret['changes'] = {} return ret # depends on [control=['if'], data=['_describe']] _describe = _describe['bucket'] # Once versioning has been enabled, it can't completely go away, it can # only be suspended if not bool(Versioning) and bool(_describe.get('Versioning')): Versioning = {'Status': 'Suspended'} # depends on [control=['if'], data=[]] config_items = [('ACL', 'put_acl', _describe.get('ACL'), _compare_acl, ACL, None), ('CORS', 'put_cors', _describe.get('CORS'), _compare_json, {'CORSRules': CORSRules} if CORSRules else None, 'delete_cors'), ('LifecycleConfiguration', 'put_lifecycle_configuration', _describe.get('LifecycleConfiguration'), _compare_json, {'Rules': LifecycleConfiguration} if LifecycleConfiguration else None, 'delete_lifecycle_configuration'), ('Logging', 'put_logging', _describe.get('Logging', {}).get('LoggingEnabled'), _compare_json, Logging, None), ('NotificationConfiguration', 'put_notification_configuration', _describe.get('NotificationConfiguration'), _compare_json, NotificationConfiguration, None), ('Policy', 'put_policy', _describe.get('Policy'), _compare_policy, {'Policy': Policy} if Policy else None, 'delete_policy'), ('RequestPayment', 'put_request_payment', _describe.get('RequestPayment'), _compare_json, RequestPayment, None), ('Tagging', 'put_tagging', _describe.get('Tagging'), _compare_json, Tagging, 'delete_tagging'), ('Website', 'put_website', _describe.get('Website'), _compare_json, Website, 'delete_website')] versioning_item = ('Versioning', 'put_versioning', _describe.get('Versioning'), _compare_json, Versioning or {}, None) # Substitute full ARN into desired state for comparison replication_item = ('Replication', 'put_replication', _describe.get('Replication', {}).get('ReplicationConfiguration'), _compare_replication, Replication, 'delete_replication') # versioning must be turned on before replication can be on, thus replication # must be turned off before versioning can be off if Replication is not None: # replication will be on, must deal with versioning first config_items.append(versioning_item) config_items.append(replication_item) # depends on [control=['if'], data=[]] else: # replication will be off, deal with it first config_items.append(replication_item) config_items.append(versioning_item) update = False changes = {} for (varname, setter, current, comparator, desired, deleter) in config_items: if varname == 'Policy': if desired == {'Policy': 'external'}: # Short-circuit to allow external policy control. log.debug('S3 Policy set to `external`, skipping application.') continue # depends on [control=['if'], data=[]] if current is not None: temp = current.get('Policy') # Policy description is always returned as a JSON string. # Convert it to JSON now for ease of comparisons later. if isinstance(temp, six.string_types): current = __utils__['boto3.ordered']({'Policy': salt.utils.json.loads(temp)}) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['current']] # depends on [control=['if'], data=[]] if not comparator(current, desired, region, key, keyid, profile): update = True if varname == 'ACL': changes.setdefault('new', {})[varname] = _acl_to_grant(desired, _get_canonical_id(region, key, keyid, profile)) # depends on [control=['if'], data=['varname']] else: changes.setdefault('new', {})[varname] = desired changes.setdefault('old', {})[varname] = current if not __opts__['test']: if deleter and desired is None: # Setting can be deleted, so use that to unset it r = __salt__['boto_s3_bucket.{0}'.format(deleter)](Bucket=Bucket, region=region, key=key, keyid=keyid, profile=profile) if not r.get('deleted'): ret['result'] = False ret['comment'] = 'Failed to update bucket: {0}.'.format(r['error']['message']) return ret # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: r = __salt__['boto_s3_bucket.{0}'.format(setter)](Bucket=Bucket, region=region, key=key, keyid=keyid, profile=profile, **desired or {}) if not r.get('updated'): ret['result'] = False ret['comment'] = 'Failed to update bucket: {0}.'.format(r['error']['message']) return ret # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] if update and __opts__['test']: msg = 'S3 bucket {0} set to be modified.'.format(Bucket) ret['comment'] = msg ret['result'] = None ret['pchanges'] = changes return ret # depends on [control=['if'], data=[]] ret['changes'] = changes # Since location can't be changed, try that last so at least the rest of # the things are correct by the time we fail here. Fail so the user will # notice something mismatches their desired state. if _describe.get('Location', {}).get('LocationConstraint') != LocationConstraint: msg = 'Bucket {0} location does not match desired configuration, but cannot be changed'.format(LocationConstraint) log.warning(msg) ret['result'] = False ret['comment'] = 'Failed to update bucket: {0}.'.format(msg) return ret # depends on [control=['if'], data=['LocationConstraint']] return ret
def netconf_session_end_killed_by(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") netconf_session_end = ET.SubElement(config, "netconf-session-end", xmlns="urn:ietf:params:xml:ns:yang:ietf-netconf-notifications") killed_by = ET.SubElement(netconf_session_end, "killed-by") killed_by.text = kwargs.pop('killed_by') callback = kwargs.pop('callback', self._callback) return callback(config)
def function[netconf_session_end_killed_by, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[netconf_session_end] assign[=] call[name[ET].SubElement, parameter[name[config], constant[netconf-session-end]]] variable[killed_by] assign[=] call[name[ET].SubElement, parameter[name[netconf_session_end], constant[killed-by]]] name[killed_by].text assign[=] call[name[kwargs].pop, parameter[constant[killed_by]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[netconf_session_end_killed_by] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[netconf_session_end] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] ) identifier[killed_by] = identifier[ET] . identifier[SubElement] ( identifier[netconf_session_end] , literal[string] ) identifier[killed_by] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def netconf_session_end_killed_by(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') netconf_session_end = ET.SubElement(config, 'netconf-session-end', xmlns='urn:ietf:params:xml:ns:yang:ietf-netconf-notifications') killed_by = ET.SubElement(netconf_session_end, 'killed-by') killed_by.text = kwargs.pop('killed_by') callback = kwargs.pop('callback', self._callback) return callback(config)
def process(self, state, procedure, ret_to=None, inline=None, force_addr=None, **kwargs): """ Perform execution with a state. :param state: The state with which to execute :param procedure: An instance of a SimProcedure to run :param ret_to: The address to return to when this procedure is finished :param inline: This is an inline execution. Do not bother copying the state. :param force_addr: Force execution to pretend that we're working at this concrete address :returns: A SimSuccessors object categorizing the execution's successor states """ return super(SimEngineProcedure, self).process(state, procedure, ret_to=ret_to, inline=inline, force_addr=force_addr)
def function[process, parameter[self, state, procedure, ret_to, inline, force_addr]]: constant[ Perform execution with a state. :param state: The state with which to execute :param procedure: An instance of a SimProcedure to run :param ret_to: The address to return to when this procedure is finished :param inline: This is an inline execution. Do not bother copying the state. :param force_addr: Force execution to pretend that we're working at this concrete address :returns: A SimSuccessors object categorizing the execution's successor states ] return[call[call[name[super], parameter[name[SimEngineProcedure], name[self]]].process, parameter[name[state], name[procedure]]]]
keyword[def] identifier[process] ( identifier[self] , identifier[state] , identifier[procedure] , identifier[ret_to] = keyword[None] , identifier[inline] = keyword[None] , identifier[force_addr] = keyword[None] , ** identifier[kwargs] ): literal[string] keyword[return] identifier[super] ( identifier[SimEngineProcedure] , identifier[self] ). identifier[process] ( identifier[state] , identifier[procedure] , identifier[ret_to] = identifier[ret_to] , identifier[inline] = identifier[inline] , identifier[force_addr] = identifier[force_addr] )
def process(self, state, procedure, ret_to=None, inline=None, force_addr=None, **kwargs): """ Perform execution with a state. :param state: The state with which to execute :param procedure: An instance of a SimProcedure to run :param ret_to: The address to return to when this procedure is finished :param inline: This is an inline execution. Do not bother copying the state. :param force_addr: Force execution to pretend that we're working at this concrete address :returns: A SimSuccessors object categorizing the execution's successor states """ return super(SimEngineProcedure, self).process(state, procedure, ret_to=ret_to, inline=inline, force_addr=force_addr)
def validate(self, value=None, model=None, context=None): """ Sequentially apply each validator to value and collect errors. :param value: a value to validate :param model: parent entity :param context: validation context, usually parent entity :return: list of errors (if any) """ errors = [] for validator in self.validators: if value is None and not isinstance(validator, Required): continue error = validator.run( value=value, model=model, context=context if self.use_context else None ) if error: errors.append(error) return errors
def function[validate, parameter[self, value, model, context]]: constant[ Sequentially apply each validator to value and collect errors. :param value: a value to validate :param model: parent entity :param context: validation context, usually parent entity :return: list of errors (if any) ] variable[errors] assign[=] list[[]] for taget[name[validator]] in starred[name[self].validators] begin[:] if <ast.BoolOp object at 0x7da18bcca410> begin[:] continue variable[error] assign[=] call[name[validator].run, parameter[]] if name[error] begin[:] call[name[errors].append, parameter[name[error]]] return[name[errors]]
keyword[def] identifier[validate] ( identifier[self] , identifier[value] = keyword[None] , identifier[model] = keyword[None] , identifier[context] = keyword[None] ): literal[string] identifier[errors] =[] keyword[for] identifier[validator] keyword[in] identifier[self] . identifier[validators] : keyword[if] identifier[value] keyword[is] keyword[None] keyword[and] keyword[not] identifier[isinstance] ( identifier[validator] , identifier[Required] ): keyword[continue] identifier[error] = identifier[validator] . identifier[run] ( identifier[value] = identifier[value] , identifier[model] = identifier[model] , identifier[context] = identifier[context] keyword[if] identifier[self] . identifier[use_context] keyword[else] keyword[None] ) keyword[if] identifier[error] : identifier[errors] . identifier[append] ( identifier[error] ) keyword[return] identifier[errors]
def validate(self, value=None, model=None, context=None): """ Sequentially apply each validator to value and collect errors. :param value: a value to validate :param model: parent entity :param context: validation context, usually parent entity :return: list of errors (if any) """ errors = [] for validator in self.validators: if value is None and (not isinstance(validator, Required)): continue # depends on [control=['if'], data=[]] error = validator.run(value=value, model=model, context=context if self.use_context else None) if error: errors.append(error) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['validator']] return errors
def get_csv_rows_for_installed( old_csv_rows, # type: Iterable[List[str]] installed, # type: Dict[str, str] changed, # type: set generated, # type: List[str] lib_dir, # type: str ): # type: (...) -> List[InstalledCSVRow] """ :param installed: A map from archive RECORD path to installation RECORD path. """ installed_rows = [] # type: List[InstalledCSVRow] for row in old_csv_rows: if len(row) > 3: logger.warning( 'RECORD line has more than three elements: {}'.format(row) ) # Make a copy because we are mutating the row. row = list(row) old_path = row[0] new_path = installed.pop(old_path, old_path) row[0] = new_path if new_path in changed: digest, length = rehash(new_path) row[1] = digest row[2] = length installed_rows.append(tuple(row)) for f in generated: digest, length = rehash(f) installed_rows.append((normpath(f, lib_dir), digest, str(length))) for f in installed: installed_rows.append((installed[f], '', '')) return installed_rows
def function[get_csv_rows_for_installed, parameter[old_csv_rows, installed, changed, generated, lib_dir]]: constant[ :param installed: A map from archive RECORD path to installation RECORD path. ] variable[installed_rows] assign[=] list[[]] for taget[name[row]] in starred[name[old_csv_rows]] begin[:] if compare[call[name[len], parameter[name[row]]] greater[>] constant[3]] begin[:] call[name[logger].warning, parameter[call[constant[RECORD line has more than three elements: {}].format, parameter[name[row]]]]] variable[row] assign[=] call[name[list], parameter[name[row]]] variable[old_path] assign[=] call[name[row]][constant[0]] variable[new_path] assign[=] call[name[installed].pop, parameter[name[old_path], name[old_path]]] call[name[row]][constant[0]] assign[=] name[new_path] if compare[name[new_path] in name[changed]] begin[:] <ast.Tuple object at 0x7da18bc70c40> assign[=] call[name[rehash], parameter[name[new_path]]] call[name[row]][constant[1]] assign[=] name[digest] call[name[row]][constant[2]] assign[=] name[length] call[name[installed_rows].append, parameter[call[name[tuple], parameter[name[row]]]]] for taget[name[f]] in starred[name[generated]] begin[:] <ast.Tuple object at 0x7da18f58d600> assign[=] call[name[rehash], parameter[name[f]]] call[name[installed_rows].append, parameter[tuple[[<ast.Call object at 0x7da18ede52d0>, <ast.Name object at 0x7da18ede5690>, <ast.Call object at 0x7da18ede6c50>]]]] for taget[name[f]] in starred[name[installed]] begin[:] call[name[installed_rows].append, parameter[tuple[[<ast.Subscript object at 0x7da18ede5f00>, <ast.Constant object at 0x7da18ede7eb0>, <ast.Constant object at 0x7da18ede5870>]]]] return[name[installed_rows]]
keyword[def] identifier[get_csv_rows_for_installed] ( identifier[old_csv_rows] , identifier[installed] , identifier[changed] , identifier[generated] , identifier[lib_dir] , ): literal[string] identifier[installed_rows] =[] keyword[for] identifier[row] keyword[in] identifier[old_csv_rows] : keyword[if] identifier[len] ( identifier[row] )> literal[int] : identifier[logger] . identifier[warning] ( literal[string] . identifier[format] ( identifier[row] ) ) identifier[row] = identifier[list] ( identifier[row] ) identifier[old_path] = identifier[row] [ literal[int] ] identifier[new_path] = identifier[installed] . identifier[pop] ( identifier[old_path] , identifier[old_path] ) identifier[row] [ literal[int] ]= identifier[new_path] keyword[if] identifier[new_path] keyword[in] identifier[changed] : identifier[digest] , identifier[length] = identifier[rehash] ( identifier[new_path] ) identifier[row] [ literal[int] ]= identifier[digest] identifier[row] [ literal[int] ]= identifier[length] identifier[installed_rows] . identifier[append] ( identifier[tuple] ( identifier[row] )) keyword[for] identifier[f] keyword[in] identifier[generated] : identifier[digest] , identifier[length] = identifier[rehash] ( identifier[f] ) identifier[installed_rows] . identifier[append] (( identifier[normpath] ( identifier[f] , identifier[lib_dir] ), identifier[digest] , identifier[str] ( identifier[length] ))) keyword[for] identifier[f] keyword[in] identifier[installed] : identifier[installed_rows] . identifier[append] (( identifier[installed] [ identifier[f] ], literal[string] , literal[string] )) keyword[return] identifier[installed_rows]
def get_csv_rows_for_installed(old_csv_rows, installed, changed, generated, lib_dir): # type: Iterable[List[str]] # type: Dict[str, str] # type: set # type: List[str] # type: str # type: (...) -> List[InstalledCSVRow] '\n :param installed: A map from archive RECORD path to installation RECORD\n path.\n ' installed_rows = [] # type: List[InstalledCSVRow] for row in old_csv_rows: if len(row) > 3: logger.warning('RECORD line has more than three elements: {}'.format(row)) # depends on [control=['if'], data=[]] # Make a copy because we are mutating the row. row = list(row) old_path = row[0] new_path = installed.pop(old_path, old_path) row[0] = new_path if new_path in changed: (digest, length) = rehash(new_path) row[1] = digest row[2] = length # depends on [control=['if'], data=['new_path']] installed_rows.append(tuple(row)) # depends on [control=['for'], data=['row']] for f in generated: (digest, length) = rehash(f) installed_rows.append((normpath(f, lib_dir), digest, str(length))) # depends on [control=['for'], data=['f']] for f in installed: installed_rows.append((installed[f], '', '')) # depends on [control=['for'], data=['f']] return installed_rows
def is_transport_reaction_annotations(rxn): """ Return boolean if a reaction is a transport reaction (from annotations). Parameters ---------- rxn: cobra.Reaction The metabolic reaction under investigation. """ reactants = set([(k, tuple(v)) for met in rxn.reactants for k, v in iteritems(met.annotation) if met.id != "H" and k is not None and k != 'sbo' and v is not None]) products = set([(k, tuple(v)) for met in rxn.products for k, v in iteritems(met.annotation) if met.id != "H" and k is not None and k != 'sbo' and v is not None]) # Find intersection between reactant annotations and # product annotations to find common metabolites between them, # satisfying the requirements for a transport reaction. Reactions such # as those involving oxidoreductases (where no net transport of # Hydrogen is occurring, but rather just an exchange of electrons or # charges effecting a change in protonation) are excluded. transported_mets = reactants & products if len(transported_mets) > 0: return True
def function[is_transport_reaction_annotations, parameter[rxn]]: constant[ Return boolean if a reaction is a transport reaction (from annotations). Parameters ---------- rxn: cobra.Reaction The metabolic reaction under investigation. ] variable[reactants] assign[=] call[name[set], parameter[<ast.ListComp object at 0x7da1b06cf0a0>]] variable[products] assign[=] call[name[set], parameter[<ast.ListComp object at 0x7da1b06cf5b0>]] variable[transported_mets] assign[=] binary_operation[name[reactants] <ast.BitAnd object at 0x7da2590d6b60> name[products]] if compare[call[name[len], parameter[name[transported_mets]]] greater[>] constant[0]] begin[:] return[constant[True]]
keyword[def] identifier[is_transport_reaction_annotations] ( identifier[rxn] ): literal[string] identifier[reactants] = identifier[set] ([( identifier[k] , identifier[tuple] ( identifier[v] )) keyword[for] identifier[met] keyword[in] identifier[rxn] . identifier[reactants] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[iteritems] ( identifier[met] . identifier[annotation] ) keyword[if] identifier[met] . identifier[id] != literal[string] keyword[and] identifier[k] keyword[is] keyword[not] keyword[None] keyword[and] identifier[k] != literal[string] keyword[and] identifier[v] keyword[is] keyword[not] keyword[None] ]) identifier[products] = identifier[set] ([( identifier[k] , identifier[tuple] ( identifier[v] )) keyword[for] identifier[met] keyword[in] identifier[rxn] . identifier[products] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[iteritems] ( identifier[met] . identifier[annotation] ) keyword[if] identifier[met] . identifier[id] != literal[string] keyword[and] identifier[k] keyword[is] keyword[not] keyword[None] keyword[and] identifier[k] != literal[string] keyword[and] identifier[v] keyword[is] keyword[not] keyword[None] ]) identifier[transported_mets] = identifier[reactants] & identifier[products] keyword[if] identifier[len] ( identifier[transported_mets] )> literal[int] : keyword[return] keyword[True]
def is_transport_reaction_annotations(rxn): """ Return boolean if a reaction is a transport reaction (from annotations). Parameters ---------- rxn: cobra.Reaction The metabolic reaction under investigation. """ reactants = set([(k, tuple(v)) for met in rxn.reactants for (k, v) in iteritems(met.annotation) if met.id != 'H' and k is not None and (k != 'sbo') and (v is not None)]) products = set([(k, tuple(v)) for met in rxn.products for (k, v) in iteritems(met.annotation) if met.id != 'H' and k is not None and (k != 'sbo') and (v is not None)]) # Find intersection between reactant annotations and # product annotations to find common metabolites between them, # satisfying the requirements for a transport reaction. Reactions such # as those involving oxidoreductases (where no net transport of # Hydrogen is occurring, but rather just an exchange of electrons or # charges effecting a change in protonation) are excluded. transported_mets = reactants & products if len(transported_mets) > 0: return True # depends on [control=['if'], data=[]]
def filter(self, table, volumes, filter_string): """Naive case-insensitive search.""" q = filter_string.lower() return [volume for volume in volumes if q in volume.name.lower()]
def function[filter, parameter[self, table, volumes, filter_string]]: constant[Naive case-insensitive search.] variable[q] assign[=] call[name[filter_string].lower, parameter[]] return[<ast.ListComp object at 0x7da1b1987400>]
keyword[def] identifier[filter] ( identifier[self] , identifier[table] , identifier[volumes] , identifier[filter_string] ): literal[string] identifier[q] = identifier[filter_string] . identifier[lower] () keyword[return] [ identifier[volume] keyword[for] identifier[volume] keyword[in] identifier[volumes] keyword[if] identifier[q] keyword[in] identifier[volume] . identifier[name] . identifier[lower] ()]
def filter(self, table, volumes, filter_string): """Naive case-insensitive search.""" q = filter_string.lower() return [volume for volume in volumes if q in volume.name.lower()]
def plot_footprints(gdf, fig=None, ax=None, figsize=None, color='#333333', bgcolor='w', set_bounds=True, bbox=None, save=False, show=True, close=False, filename='image', file_format='png', dpi=600): """ Plot a GeoDataFrame of footprints. Parameters ---------- gdf : GeoDataFrame footprints fig : figure ax : axis figsize : tuple color : string the color of the footprints bgcolor : string the background color of the plot set_bounds : bool if True, set bounds from either passed-in bbox or the spatial extent of the gdf bbox : tuple if True and if set_bounds is True, set the display bounds to this bbox save : bool whether to save the figure to disk or not show : bool whether to display the figure or not close : bool close the figure (only if show equals False) to prevent display filename : string the name of the file to save file_format : string the format of the file to save (e.g., 'jpg', 'png', 'svg') dpi : int the resolution of the image file if saving Returns ------- fig, ax : tuple """ if fig is None or ax is None: fig, ax = plt.subplots(figsize=figsize, facecolor=bgcolor) ax.set_facecolor(bgcolor) # extract each polygon as a descartes patch, and add to a matplotlib patch # collection patches = [] for geometry in gdf['geometry']: if isinstance(geometry, Polygon): patches.append(PolygonPatch(geometry)) elif isinstance(geometry, MultiPolygon): for subpolygon in geometry: #if geometry is multipolygon, go through each constituent subpolygon patches.append(PolygonPatch(subpolygon)) pc = PatchCollection(patches, facecolor=color, edgecolor=color, linewidth=0, alpha=1) ax.add_collection(pc) if set_bounds: if bbox is None: # set the figure bounds to the polygons' bounds left, bottom, right, top = gdf.total_bounds else: top, bottom, right, left = bbox ax.set_xlim((left, right)) ax.set_ylim((bottom, top)) # turn off the axis display set the margins to zero and point the ticks in # so there's no space around the plot ax.axis('off') ax.margins(0) ax.tick_params(which='both', direction='in') fig.canvas.draw() # make everything square ax.set_aspect('equal') fig.canvas.draw() fig, ax = save_and_show(fig=fig, ax=ax, save=save, show=show, close=close, filename=filename, file_format=file_format, dpi=dpi, axis_off=True) return fig, ax
def function[plot_footprints, parameter[gdf, fig, ax, figsize, color, bgcolor, set_bounds, bbox, save, show, close, filename, file_format, dpi]]: constant[ Plot a GeoDataFrame of footprints. Parameters ---------- gdf : GeoDataFrame footprints fig : figure ax : axis figsize : tuple color : string the color of the footprints bgcolor : string the background color of the plot set_bounds : bool if True, set bounds from either passed-in bbox or the spatial extent of the gdf bbox : tuple if True and if set_bounds is True, set the display bounds to this bbox save : bool whether to save the figure to disk or not show : bool whether to display the figure or not close : bool close the figure (only if show equals False) to prevent display filename : string the name of the file to save file_format : string the format of the file to save (e.g., 'jpg', 'png', 'svg') dpi : int the resolution of the image file if saving Returns ------- fig, ax : tuple ] if <ast.BoolOp object at 0x7da1b1b7e710> begin[:] <ast.Tuple object at 0x7da1b1b7f010> assign[=] call[name[plt].subplots, parameter[]] call[name[ax].set_facecolor, parameter[name[bgcolor]]] variable[patches] assign[=] list[[]] for taget[name[geometry]] in starred[call[name[gdf]][constant[geometry]]] begin[:] if call[name[isinstance], parameter[name[geometry], name[Polygon]]] begin[:] call[name[patches].append, parameter[call[name[PolygonPatch], parameter[name[geometry]]]]] variable[pc] assign[=] call[name[PatchCollection], parameter[name[patches]]] call[name[ax].add_collection, parameter[name[pc]]] if name[set_bounds] begin[:] if compare[name[bbox] is constant[None]] begin[:] <ast.Tuple object at 0x7da1b1b7d900> assign[=] name[gdf].total_bounds call[name[ax].set_xlim, parameter[tuple[[<ast.Name object at 0x7da1b1b462c0>, <ast.Name object at 0x7da1b1b47b80>]]]] call[name[ax].set_ylim, parameter[tuple[[<ast.Name object at 0x7da1b1b45660>, <ast.Name object at 0x7da1b1b45b40>]]]] call[name[ax].axis, parameter[constant[off]]] call[name[ax].margins, parameter[constant[0]]] call[name[ax].tick_params, parameter[]] call[name[fig].canvas.draw, parameter[]] call[name[ax].set_aspect, parameter[constant[equal]]] call[name[fig].canvas.draw, parameter[]] <ast.Tuple object at 0x7da1b1b472e0> assign[=] call[name[save_and_show], parameter[]] return[tuple[[<ast.Name object at 0x7da1b1b46e60>, <ast.Name object at 0x7da1b1b46650>]]]
keyword[def] identifier[plot_footprints] ( identifier[gdf] , identifier[fig] = keyword[None] , identifier[ax] = keyword[None] , identifier[figsize] = keyword[None] , identifier[color] = literal[string] , identifier[bgcolor] = literal[string] , identifier[set_bounds] = keyword[True] , identifier[bbox] = keyword[None] , identifier[save] = keyword[False] , identifier[show] = keyword[True] , identifier[close] = keyword[False] , identifier[filename] = literal[string] , identifier[file_format] = literal[string] , identifier[dpi] = literal[int] ): literal[string] keyword[if] identifier[fig] keyword[is] keyword[None] keyword[or] identifier[ax] keyword[is] keyword[None] : identifier[fig] , identifier[ax] = identifier[plt] . identifier[subplots] ( identifier[figsize] = identifier[figsize] , identifier[facecolor] = identifier[bgcolor] ) identifier[ax] . identifier[set_facecolor] ( identifier[bgcolor] ) identifier[patches] =[] keyword[for] identifier[geometry] keyword[in] identifier[gdf] [ literal[string] ]: keyword[if] identifier[isinstance] ( identifier[geometry] , identifier[Polygon] ): identifier[patches] . identifier[append] ( identifier[PolygonPatch] ( identifier[geometry] )) keyword[elif] identifier[isinstance] ( identifier[geometry] , identifier[MultiPolygon] ): keyword[for] identifier[subpolygon] keyword[in] identifier[geometry] : identifier[patches] . identifier[append] ( identifier[PolygonPatch] ( identifier[subpolygon] )) identifier[pc] = identifier[PatchCollection] ( identifier[patches] , identifier[facecolor] = identifier[color] , identifier[edgecolor] = identifier[color] , identifier[linewidth] = literal[int] , identifier[alpha] = literal[int] ) identifier[ax] . identifier[add_collection] ( identifier[pc] ) keyword[if] identifier[set_bounds] : keyword[if] identifier[bbox] keyword[is] keyword[None] : identifier[left] , identifier[bottom] , identifier[right] , identifier[top] = identifier[gdf] . identifier[total_bounds] keyword[else] : identifier[top] , identifier[bottom] , identifier[right] , identifier[left] = identifier[bbox] identifier[ax] . identifier[set_xlim] (( identifier[left] , identifier[right] )) identifier[ax] . identifier[set_ylim] (( identifier[bottom] , identifier[top] )) identifier[ax] . identifier[axis] ( literal[string] ) identifier[ax] . identifier[margins] ( literal[int] ) identifier[ax] . identifier[tick_params] ( identifier[which] = literal[string] , identifier[direction] = literal[string] ) identifier[fig] . identifier[canvas] . identifier[draw] () identifier[ax] . identifier[set_aspect] ( literal[string] ) identifier[fig] . identifier[canvas] . identifier[draw] () identifier[fig] , identifier[ax] = identifier[save_and_show] ( identifier[fig] = identifier[fig] , identifier[ax] = identifier[ax] , identifier[save] = identifier[save] , identifier[show] = identifier[show] , identifier[close] = identifier[close] , identifier[filename] = identifier[filename] , identifier[file_format] = identifier[file_format] , identifier[dpi] = identifier[dpi] , identifier[axis_off] = keyword[True] ) keyword[return] identifier[fig] , identifier[ax]
def plot_footprints(gdf, fig=None, ax=None, figsize=None, color='#333333', bgcolor='w', set_bounds=True, bbox=None, save=False, show=True, close=False, filename='image', file_format='png', dpi=600): """ Plot a GeoDataFrame of footprints. Parameters ---------- gdf : GeoDataFrame footprints fig : figure ax : axis figsize : tuple color : string the color of the footprints bgcolor : string the background color of the plot set_bounds : bool if True, set bounds from either passed-in bbox or the spatial extent of the gdf bbox : tuple if True and if set_bounds is True, set the display bounds to this bbox save : bool whether to save the figure to disk or not show : bool whether to display the figure or not close : bool close the figure (only if show equals False) to prevent display filename : string the name of the file to save file_format : string the format of the file to save (e.g., 'jpg', 'png', 'svg') dpi : int the resolution of the image file if saving Returns ------- fig, ax : tuple """ if fig is None or ax is None: (fig, ax) = plt.subplots(figsize=figsize, facecolor=bgcolor) ax.set_facecolor(bgcolor) # depends on [control=['if'], data=[]] # extract each polygon as a descartes patch, and add to a matplotlib patch # collection patches = [] for geometry in gdf['geometry']: if isinstance(geometry, Polygon): patches.append(PolygonPatch(geometry)) # depends on [control=['if'], data=[]] elif isinstance(geometry, MultiPolygon): for subpolygon in geometry: #if geometry is multipolygon, go through each constituent subpolygon patches.append(PolygonPatch(subpolygon)) # depends on [control=['for'], data=['subpolygon']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['geometry']] pc = PatchCollection(patches, facecolor=color, edgecolor=color, linewidth=0, alpha=1) ax.add_collection(pc) if set_bounds: if bbox is None: # set the figure bounds to the polygons' bounds (left, bottom, right, top) = gdf.total_bounds # depends on [control=['if'], data=[]] else: (top, bottom, right, left) = bbox ax.set_xlim((left, right)) ax.set_ylim((bottom, top)) # depends on [control=['if'], data=[]] # turn off the axis display set the margins to zero and point the ticks in # so there's no space around the plot ax.axis('off') ax.margins(0) ax.tick_params(which='both', direction='in') fig.canvas.draw() # make everything square ax.set_aspect('equal') fig.canvas.draw() (fig, ax) = save_and_show(fig=fig, ax=ax, save=save, show=show, close=close, filename=filename, file_format=file_format, dpi=dpi, axis_off=True) return (fig, ax)
def __downloadPage(factory, *args, **kwargs): """Start a HTTP download, returning a HTTPDownloader object""" # The Twisted API is weird: # 1) web.client.downloadPage() doesn't give us the HTTP headers # 2) there is no method that simply accepts a URL and gives you back # a HTTPDownloader object #TODO: convert getPage() usage to something similar, too downloader = factory(*args, **kwargs) if downloader.scheme == 'https': from twisted.internet import ssl contextFactory = ssl.ClientContextFactory() reactor.connectSSL(downloader.host, downloader.port, downloader, contextFactory) else: reactor.connectTCP(downloader.host, downloader.port, downloader) return downloader
def function[__downloadPage, parameter[factory]]: constant[Start a HTTP download, returning a HTTPDownloader object] variable[downloader] assign[=] call[name[factory], parameter[<ast.Starred object at 0x7da1b2347580>]] if compare[name[downloader].scheme equal[==] constant[https]] begin[:] from relative_module[twisted.internet] import module[ssl] variable[contextFactory] assign[=] call[name[ssl].ClientContextFactory, parameter[]] call[name[reactor].connectSSL, parameter[name[downloader].host, name[downloader].port, name[downloader], name[contextFactory]]] return[name[downloader]]
keyword[def] identifier[__downloadPage] ( identifier[factory] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[downloader] = identifier[factory] (* identifier[args] ,** identifier[kwargs] ) keyword[if] identifier[downloader] . identifier[scheme] == literal[string] : keyword[from] identifier[twisted] . identifier[internet] keyword[import] identifier[ssl] identifier[contextFactory] = identifier[ssl] . identifier[ClientContextFactory] () identifier[reactor] . identifier[connectSSL] ( identifier[downloader] . identifier[host] , identifier[downloader] . identifier[port] , identifier[downloader] , identifier[contextFactory] ) keyword[else] : identifier[reactor] . identifier[connectTCP] ( identifier[downloader] . identifier[host] , identifier[downloader] . identifier[port] , identifier[downloader] ) keyword[return] identifier[downloader]
def __downloadPage(factory, *args, **kwargs): """Start a HTTP download, returning a HTTPDownloader object""" # The Twisted API is weird: # 1) web.client.downloadPage() doesn't give us the HTTP headers # 2) there is no method that simply accepts a URL and gives you back # a HTTPDownloader object #TODO: convert getPage() usage to something similar, too downloader = factory(*args, **kwargs) if downloader.scheme == 'https': from twisted.internet import ssl contextFactory = ssl.ClientContextFactory() reactor.connectSSL(downloader.host, downloader.port, downloader, contextFactory) # depends on [control=['if'], data=[]] else: reactor.connectTCP(downloader.host, downloader.port, downloader) return downloader
def build_url(base, additional_params=None): """Construct a URL based off of base containing all parameters in the query portion of base plus any additional parameters. :param base: Base URL :type base: str ::param additional_params: Additional query parameters to include. :type additional_params: dict :rtype: str """ url = urlparse.urlparse(base) query_params = {} query_params.update(urlparse.parse_qsl(url.query, True)) if additional_params is not None: query_params.update(additional_params) for k, v in additional_params.iteritems(): if v is None: query_params.pop(k) return urlparse.urlunparse((url.scheme, url.netloc, url.path, url.params, urllib.urlencode(query_params), url.fragment))
def function[build_url, parameter[base, additional_params]]: constant[Construct a URL based off of base containing all parameters in the query portion of base plus any additional parameters. :param base: Base URL :type base: str ::param additional_params: Additional query parameters to include. :type additional_params: dict :rtype: str ] variable[url] assign[=] call[name[urlparse].urlparse, parameter[name[base]]] variable[query_params] assign[=] dictionary[[], []] call[name[query_params].update, parameter[call[name[urlparse].parse_qsl, parameter[name[url].query, constant[True]]]]] if compare[name[additional_params] is_not constant[None]] begin[:] call[name[query_params].update, parameter[name[additional_params]]] for taget[tuple[[<ast.Name object at 0x7da1b0284700>, <ast.Name object at 0x7da1b02865c0>]]] in starred[call[name[additional_params].iteritems, parameter[]]] begin[:] if compare[name[v] is constant[None]] begin[:] call[name[query_params].pop, parameter[name[k]]] return[call[name[urlparse].urlunparse, parameter[tuple[[<ast.Attribute object at 0x7da1b0286980>, <ast.Attribute object at 0x7da1b02856c0>, <ast.Attribute object at 0x7da1b0285b70>, <ast.Attribute object at 0x7da1b0287b80>, <ast.Call object at 0x7da1b0287e50>, <ast.Attribute object at 0x7da20c6a8c10>]]]]]
keyword[def] identifier[build_url] ( identifier[base] , identifier[additional_params] = keyword[None] ): literal[string] identifier[url] = identifier[urlparse] . identifier[urlparse] ( identifier[base] ) identifier[query_params] ={} identifier[query_params] . identifier[update] ( identifier[urlparse] . identifier[parse_qsl] ( identifier[url] . identifier[query] , keyword[True] )) keyword[if] identifier[additional_params] keyword[is] keyword[not] keyword[None] : identifier[query_params] . identifier[update] ( identifier[additional_params] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[additional_params] . identifier[iteritems] (): keyword[if] identifier[v] keyword[is] keyword[None] : identifier[query_params] . identifier[pop] ( identifier[k] ) keyword[return] identifier[urlparse] . identifier[urlunparse] (( identifier[url] . identifier[scheme] , identifier[url] . identifier[netloc] , identifier[url] . identifier[path] , identifier[url] . identifier[params] , identifier[urllib] . identifier[urlencode] ( identifier[query_params] ), identifier[url] . identifier[fragment] ))
def build_url(base, additional_params=None): """Construct a URL based off of base containing all parameters in the query portion of base plus any additional parameters. :param base: Base URL :type base: str ::param additional_params: Additional query parameters to include. :type additional_params: dict :rtype: str """ url = urlparse.urlparse(base) query_params = {} query_params.update(urlparse.parse_qsl(url.query, True)) if additional_params is not None: query_params.update(additional_params) for (k, v) in additional_params.iteritems(): if v is None: query_params.pop(k) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['additional_params']] return urlparse.urlunparse((url.scheme, url.netloc, url.path, url.params, urllib.urlencode(query_params), url.fragment))
def Ge(self): """ Result of US from the SVD decomposition G = USVᵀ. """ from scipy.linalg import svd from numpy_sugar.linalg import ddot U, S, _ = svd(self._G, full_matrices=False, check_finite=False) if U.shape[1] < self._G.shape[1]: return ddot(U, S) return self._G
def function[Ge, parameter[self]]: constant[ Result of US from the SVD decomposition G = USVᵀ. ] from relative_module[scipy.linalg] import module[svd] from relative_module[numpy_sugar.linalg] import module[ddot] <ast.Tuple object at 0x7da1b2344c10> assign[=] call[name[svd], parameter[name[self]._G]] if compare[call[name[U].shape][constant[1]] less[<] call[name[self]._G.shape][constant[1]]] begin[:] return[call[name[ddot], parameter[name[U], name[S]]]] return[name[self]._G]
keyword[def] identifier[Ge] ( identifier[self] ): literal[string] keyword[from] identifier[scipy] . identifier[linalg] keyword[import] identifier[svd] keyword[from] identifier[numpy_sugar] . identifier[linalg] keyword[import] identifier[ddot] identifier[U] , identifier[S] , identifier[_] = identifier[svd] ( identifier[self] . identifier[_G] , identifier[full_matrices] = keyword[False] , identifier[check_finite] = keyword[False] ) keyword[if] identifier[U] . identifier[shape] [ literal[int] ]< identifier[self] . identifier[_G] . identifier[shape] [ literal[int] ]: keyword[return] identifier[ddot] ( identifier[U] , identifier[S] ) keyword[return] identifier[self] . identifier[_G]
def Ge(self): """ Result of US from the SVD decomposition G = USVᵀ. """ from scipy.linalg import svd from numpy_sugar.linalg import ddot (U, S, _) = svd(self._G, full_matrices=False, check_finite=False) if U.shape[1] < self._G.shape[1]: return ddot(U, S) # depends on [control=['if'], data=[]] return self._G
def weight(w, sparsity): """Weight-level magnitude pruning.""" w_shape = common_layers.shape_list(w) k = int(np.prod(w_shape[:-1])) count = tf.to_int32(k * sparsity) mask = common_layers.weight_targeting(w, count) return (1 - mask) * w
def function[weight, parameter[w, sparsity]]: constant[Weight-level magnitude pruning.] variable[w_shape] assign[=] call[name[common_layers].shape_list, parameter[name[w]]] variable[k] assign[=] call[name[int], parameter[call[name[np].prod, parameter[call[name[w_shape]][<ast.Slice object at 0x7da1b1e157e0>]]]]] variable[count] assign[=] call[name[tf].to_int32, parameter[binary_operation[name[k] * name[sparsity]]]] variable[mask] assign[=] call[name[common_layers].weight_targeting, parameter[name[w], name[count]]] return[binary_operation[binary_operation[constant[1] - name[mask]] * name[w]]]
keyword[def] identifier[weight] ( identifier[w] , identifier[sparsity] ): literal[string] identifier[w_shape] = identifier[common_layers] . identifier[shape_list] ( identifier[w] ) identifier[k] = identifier[int] ( identifier[np] . identifier[prod] ( identifier[w_shape] [:- literal[int] ])) identifier[count] = identifier[tf] . identifier[to_int32] ( identifier[k] * identifier[sparsity] ) identifier[mask] = identifier[common_layers] . identifier[weight_targeting] ( identifier[w] , identifier[count] ) keyword[return] ( literal[int] - identifier[mask] )* identifier[w]
def weight(w, sparsity): """Weight-level magnitude pruning.""" w_shape = common_layers.shape_list(w) k = int(np.prod(w_shape[:-1])) count = tf.to_int32(k * sparsity) mask = common_layers.weight_targeting(w, count) return (1 - mask) * w
def retry_when_service_down(func): """ Decorator that will retry a function while it fails with status code 503 Assumes the first argument to the fuction will be an object with a set_status_message method. :param func: function: will be called until it doesn't fail with DataServiceError status 503 :return: value returned by func """ def retry_function(*args, **kwds): showed_status_msg = False status_watcher = args[0] while True: try: result = func(*args, **kwds) if showed_status_msg: status_watcher.set_status_message('') return result except DataServiceError as dse: if dse.status_code == 503: if not showed_status_msg: message = SERVICE_DOWN_MESSAGE.format(datetime.datetime.utcnow()) status_watcher.set_status_message(message) showed_status_msg = True time.sleep(SERVICE_DOWN_RETRY_SECONDS) else: raise return retry_function
def function[retry_when_service_down, parameter[func]]: constant[ Decorator that will retry a function while it fails with status code 503 Assumes the first argument to the fuction will be an object with a set_status_message method. :param func: function: will be called until it doesn't fail with DataServiceError status 503 :return: value returned by func ] def function[retry_function, parameter[]]: variable[showed_status_msg] assign[=] constant[False] variable[status_watcher] assign[=] call[name[args]][constant[0]] while constant[True] begin[:] <ast.Try object at 0x7da18fe91600> return[name[retry_function]]
keyword[def] identifier[retry_when_service_down] ( identifier[func] ): literal[string] keyword[def] identifier[retry_function] (* identifier[args] ,** identifier[kwds] ): identifier[showed_status_msg] = keyword[False] identifier[status_watcher] = identifier[args] [ literal[int] ] keyword[while] keyword[True] : keyword[try] : identifier[result] = identifier[func] (* identifier[args] ,** identifier[kwds] ) keyword[if] identifier[showed_status_msg] : identifier[status_watcher] . identifier[set_status_message] ( literal[string] ) keyword[return] identifier[result] keyword[except] identifier[DataServiceError] keyword[as] identifier[dse] : keyword[if] identifier[dse] . identifier[status_code] == literal[int] : keyword[if] keyword[not] identifier[showed_status_msg] : identifier[message] = identifier[SERVICE_DOWN_MESSAGE] . identifier[format] ( identifier[datetime] . identifier[datetime] . identifier[utcnow] ()) identifier[status_watcher] . identifier[set_status_message] ( identifier[message] ) identifier[showed_status_msg] = keyword[True] identifier[time] . identifier[sleep] ( identifier[SERVICE_DOWN_RETRY_SECONDS] ) keyword[else] : keyword[raise] keyword[return] identifier[retry_function]
def retry_when_service_down(func): """ Decorator that will retry a function while it fails with status code 503 Assumes the first argument to the fuction will be an object with a set_status_message method. :param func: function: will be called until it doesn't fail with DataServiceError status 503 :return: value returned by func """ def retry_function(*args, **kwds): showed_status_msg = False status_watcher = args[0] while True: try: result = func(*args, **kwds) if showed_status_msg: status_watcher.set_status_message('') # depends on [control=['if'], data=[]] return result # depends on [control=['try'], data=[]] except DataServiceError as dse: if dse.status_code == 503: if not showed_status_msg: message = SERVICE_DOWN_MESSAGE.format(datetime.datetime.utcnow()) status_watcher.set_status_message(message) showed_status_msg = True # depends on [control=['if'], data=[]] time.sleep(SERVICE_DOWN_RETRY_SECONDS) # depends on [control=['if'], data=[]] else: raise # depends on [control=['except'], data=['dse']] # depends on [control=['while'], data=[]] return retry_function
def to_intermediate(self): """ Converts the NetJSON configuration dictionary (self.config) to the intermediate data structure (self.intermediate_data) that will be then used by the renderer class to generate the router configuration """ self.validate() self.intermediate_data = OrderedDict() for converter_class in self.converters: # skip unnecessary loop cycles if not converter_class.should_run_forward(self.config): continue converter = converter_class(self) value = converter.to_intermediate() # maintain backward compatibility with backends # that are currently in development by GSoC students # TODO for >= 0.6.2: remove once all backends have upgraded if value and isinstance(value, (tuple, list)): # pragma: nocover value = OrderedDict(value) if value: self.intermediate_data = merge_config(self.intermediate_data, value, list_identifiers=['.name'])
def function[to_intermediate, parameter[self]]: constant[ Converts the NetJSON configuration dictionary (self.config) to the intermediate data structure (self.intermediate_data) that will be then used by the renderer class to generate the router configuration ] call[name[self].validate, parameter[]] name[self].intermediate_data assign[=] call[name[OrderedDict], parameter[]] for taget[name[converter_class]] in starred[name[self].converters] begin[:] if <ast.UnaryOp object at 0x7da20c993c70> begin[:] continue variable[converter] assign[=] call[name[converter_class], parameter[name[self]]] variable[value] assign[=] call[name[converter].to_intermediate, parameter[]] if <ast.BoolOp object at 0x7da1b0141840> begin[:] variable[value] assign[=] call[name[OrderedDict], parameter[name[value]]] if name[value] begin[:] name[self].intermediate_data assign[=] call[name[merge_config], parameter[name[self].intermediate_data, name[value]]]
keyword[def] identifier[to_intermediate] ( identifier[self] ): literal[string] identifier[self] . identifier[validate] () identifier[self] . identifier[intermediate_data] = identifier[OrderedDict] () keyword[for] identifier[converter_class] keyword[in] identifier[self] . identifier[converters] : keyword[if] keyword[not] identifier[converter_class] . identifier[should_run_forward] ( identifier[self] . identifier[config] ): keyword[continue] identifier[converter] = identifier[converter_class] ( identifier[self] ) identifier[value] = identifier[converter] . identifier[to_intermediate] () keyword[if] identifier[value] keyword[and] identifier[isinstance] ( identifier[value] ,( identifier[tuple] , identifier[list] )): identifier[value] = identifier[OrderedDict] ( identifier[value] ) keyword[if] identifier[value] : identifier[self] . identifier[intermediate_data] = identifier[merge_config] ( identifier[self] . identifier[intermediate_data] , identifier[value] , identifier[list_identifiers] =[ literal[string] ])
def to_intermediate(self): """ Converts the NetJSON configuration dictionary (self.config) to the intermediate data structure (self.intermediate_data) that will be then used by the renderer class to generate the router configuration """ self.validate() self.intermediate_data = OrderedDict() for converter_class in self.converters: # skip unnecessary loop cycles if not converter_class.should_run_forward(self.config): continue # depends on [control=['if'], data=[]] converter = converter_class(self) value = converter.to_intermediate() # maintain backward compatibility with backends # that are currently in development by GSoC students # TODO for >= 0.6.2: remove once all backends have upgraded if value and isinstance(value, (tuple, list)): # pragma: nocover value = OrderedDict(value) # depends on [control=['if'], data=[]] if value: self.intermediate_data = merge_config(self.intermediate_data, value, list_identifiers=['.name']) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['converter_class']]
def items( self ): """ Returns all the rollout items for this widget. :return [<XRolloutItem>, ..] """ layout = self.widget().layout() return [layout.itemAt(i).widget() for i in range(layout.count()-1)]
def function[items, parameter[self]]: constant[ Returns all the rollout items for this widget. :return [<XRolloutItem>, ..] ] variable[layout] assign[=] call[call[name[self].widget, parameter[]].layout, parameter[]] return[<ast.ListComp object at 0x7da1b24265f0>]
keyword[def] identifier[items] ( identifier[self] ): literal[string] identifier[layout] = identifier[self] . identifier[widget] (). identifier[layout] () keyword[return] [ identifier[layout] . identifier[itemAt] ( identifier[i] ). identifier[widget] () keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[layout] . identifier[count] ()- literal[int] )]
def items(self): """ Returns all the rollout items for this widget. :return [<XRolloutItem>, ..] """ layout = self.widget().layout() return [layout.itemAt(i).widget() for i in range(layout.count() - 1)]
def build_arg_parser(): """ Build an argument parser using argparse. Use it when python version is 2.7 or later. """ parser = argparse.ArgumentParser(description="Smatch calculator -- arguments") parser.add_argument('-f', nargs=2, required=True, type=argparse.FileType('r'), help='Two files containing AMR pairs. AMRs in each file are separated by a single blank line') parser.add_argument('-r', type=int, default=4, help='Restart number (Default:4)') parser.add_argument('--significant', type=int, default=2, help='significant digits to output (default: 2)') parser.add_argument('-v', action='store_true', help='Verbose output (Default:false)') parser.add_argument('--vv', action='store_true', help='Very Verbose output (Default:false)') parser.add_argument('--ms', action='store_true', default=False, help='Output multiple scores (one AMR pair a score)' 'instead of a single document-level smatch score (Default: false)') parser.add_argument('--pr', action='store_true', default=False, help="Output precision and recall as well as the f-score. Default: false") parser.add_argument('--justinstance', action='store_true', default=False, help="just pay attention to matching instances") parser.add_argument('--justattribute', action='store_true', default=False, help="just pay attention to matching attributes") parser.add_argument('--justrelation', action='store_true', default=False, help="just pay attention to matching relations") return parser
def function[build_arg_parser, parameter[]]: constant[ Build an argument parser using argparse. Use it when python version is 2.7 or later. ] variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]] call[name[parser].add_argument, parameter[constant[-f]]] call[name[parser].add_argument, parameter[constant[-r]]] call[name[parser].add_argument, parameter[constant[--significant]]] call[name[parser].add_argument, parameter[constant[-v]]] call[name[parser].add_argument, parameter[constant[--vv]]] call[name[parser].add_argument, parameter[constant[--ms]]] call[name[parser].add_argument, parameter[constant[--pr]]] call[name[parser].add_argument, parameter[constant[--justinstance]]] call[name[parser].add_argument, parameter[constant[--justattribute]]] call[name[parser].add_argument, parameter[constant[--justrelation]]] return[name[parser]]
keyword[def] identifier[build_arg_parser] (): literal[string] identifier[parser] = identifier[argparse] . identifier[ArgumentParser] ( identifier[description] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[nargs] = literal[int] , identifier[required] = keyword[True] , identifier[type] = identifier[argparse] . identifier[FileType] ( literal[string] ), identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[int] , identifier[default] = literal[int] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[int] , identifier[default] = literal[int] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[action] = literal[string] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[action] = literal[string] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[action] = literal[string] , identifier[default] = keyword[False] , identifier[help] = literal[string] literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[action] = literal[string] , identifier[default] = keyword[False] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[action] = literal[string] , identifier[default] = keyword[False] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[action] = literal[string] , identifier[default] = keyword[False] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[action] = literal[string] , identifier[default] = keyword[False] , identifier[help] = literal[string] ) keyword[return] identifier[parser]
def build_arg_parser(): """ Build an argument parser using argparse. Use it when python version is 2.7 or later. """ parser = argparse.ArgumentParser(description='Smatch calculator -- arguments') parser.add_argument('-f', nargs=2, required=True, type=argparse.FileType('r'), help='Two files containing AMR pairs. AMRs in each file are separated by a single blank line') parser.add_argument('-r', type=int, default=4, help='Restart number (Default:4)') parser.add_argument('--significant', type=int, default=2, help='significant digits to output (default: 2)') parser.add_argument('-v', action='store_true', help='Verbose output (Default:false)') parser.add_argument('--vv', action='store_true', help='Very Verbose output (Default:false)') parser.add_argument('--ms', action='store_true', default=False, help='Output multiple scores (one AMR pair a score)instead of a single document-level smatch score (Default: false)') parser.add_argument('--pr', action='store_true', default=False, help='Output precision and recall as well as the f-score. Default: false') parser.add_argument('--justinstance', action='store_true', default=False, help='just pay attention to matching instances') parser.add_argument('--justattribute', action='store_true', default=False, help='just pay attention to matching attributes') parser.add_argument('--justrelation', action='store_true', default=False, help='just pay attention to matching relations') return parser
def insertSyntheticMemberAtBegin(self, synthesizedMember): """ :type synthesizedMember: SyntheticMember :raises DuplicateMemberNameError """ memberName = synthesizedMember.memberName() if memberName in [m.memberName() for m in self._syntheticMemberList]: raise DuplicateMemberNameError(memberName, self._class.__name__) self._syntheticMemberList.insert(0, synthesizedMember)
def function[insertSyntheticMemberAtBegin, parameter[self, synthesizedMember]]: constant[ :type synthesizedMember: SyntheticMember :raises DuplicateMemberNameError ] variable[memberName] assign[=] call[name[synthesizedMember].memberName, parameter[]] if compare[name[memberName] in <ast.ListComp object at 0x7da1b0a67eb0>] begin[:] <ast.Raise object at 0x7da1b0a644f0> call[name[self]._syntheticMemberList.insert, parameter[constant[0], name[synthesizedMember]]]
keyword[def] identifier[insertSyntheticMemberAtBegin] ( identifier[self] , identifier[synthesizedMember] ): literal[string] identifier[memberName] = identifier[synthesizedMember] . identifier[memberName] () keyword[if] identifier[memberName] keyword[in] [ identifier[m] . identifier[memberName] () keyword[for] identifier[m] keyword[in] identifier[self] . identifier[_syntheticMemberList] ]: keyword[raise] identifier[DuplicateMemberNameError] ( identifier[memberName] , identifier[self] . identifier[_class] . identifier[__name__] ) identifier[self] . identifier[_syntheticMemberList] . identifier[insert] ( literal[int] , identifier[synthesizedMember] )
def insertSyntheticMemberAtBegin(self, synthesizedMember): """ :type synthesizedMember: SyntheticMember :raises DuplicateMemberNameError """ memberName = synthesizedMember.memberName() if memberName in [m.memberName() for m in self._syntheticMemberList]: raise DuplicateMemberNameError(memberName, self._class.__name__) # depends on [control=['if'], data=['memberName']] self._syntheticMemberList.insert(0, synthesizedMember)
def move_cursor(cols=0, rows=0): """ Moves the cursor the given number of columns and rows The cursor is moved right when cols is positive and left when negative. The cursor is moved down when rows is positive and down when negative. """ if cols == 0 and rows == 0: return commands = "" commands += codes.cursor["up" if rows < 0 else "down"] * abs(rows) commands += codes.cursor["left" if cols < 0 else "right"] * abs(cols) if commands: print(commands, end="") stdout.flush()
def function[move_cursor, parameter[cols, rows]]: constant[ Moves the cursor the given number of columns and rows The cursor is moved right when cols is positive and left when negative. The cursor is moved down when rows is positive and down when negative. ] if <ast.BoolOp object at 0x7da1b16aba90> begin[:] return[None] variable[commands] assign[=] constant[] <ast.AugAssign object at 0x7da1b16a9840> <ast.AugAssign object at 0x7da1b16abca0> if name[commands] begin[:] call[name[print], parameter[name[commands]]] call[name[stdout].flush, parameter[]]
keyword[def] identifier[move_cursor] ( identifier[cols] = literal[int] , identifier[rows] = literal[int] ): literal[string] keyword[if] identifier[cols] == literal[int] keyword[and] identifier[rows] == literal[int] : keyword[return] identifier[commands] = literal[string] identifier[commands] += identifier[codes] . identifier[cursor] [ literal[string] keyword[if] identifier[rows] < literal[int] keyword[else] literal[string] ]* identifier[abs] ( identifier[rows] ) identifier[commands] += identifier[codes] . identifier[cursor] [ literal[string] keyword[if] identifier[cols] < literal[int] keyword[else] literal[string] ]* identifier[abs] ( identifier[cols] ) keyword[if] identifier[commands] : identifier[print] ( identifier[commands] , identifier[end] = literal[string] ) identifier[stdout] . identifier[flush] ()
def move_cursor(cols=0, rows=0): """ Moves the cursor the given number of columns and rows The cursor is moved right when cols is positive and left when negative. The cursor is moved down when rows is positive and down when negative. """ if cols == 0 and rows == 0: return # depends on [control=['if'], data=[]] commands = '' commands += codes.cursor['up' if rows < 0 else 'down'] * abs(rows) commands += codes.cursor['left' if cols < 0 else 'right'] * abs(cols) if commands: print(commands, end='') stdout.flush() # depends on [control=['if'], data=[]]
def get_registered(self): """Returns the user's registration date.""" doc = self._request(self.ws_prefix + ".getInfo", True) return _extract(doc, "registered")
def function[get_registered, parameter[self]]: constant[Returns the user's registration date.] variable[doc] assign[=] call[name[self]._request, parameter[binary_operation[name[self].ws_prefix + constant[.getInfo]], constant[True]]] return[call[name[_extract], parameter[name[doc], constant[registered]]]]
keyword[def] identifier[get_registered] ( identifier[self] ): literal[string] identifier[doc] = identifier[self] . identifier[_request] ( identifier[self] . identifier[ws_prefix] + literal[string] , keyword[True] ) keyword[return] identifier[_extract] ( identifier[doc] , literal[string] )
def get_registered(self): """Returns the user's registration date.""" doc = self._request(self.ws_prefix + '.getInfo', True) return _extract(doc, 'registered')
def add_tags(self, item, *tags): """ Add one or more tags to a retrieved item, then update it on the server Accepts a dict, and one or more tags to add to it Returns the updated item from the server """ # Make sure there's a tags field, or add one try: assert item["data"]["tags"] except AssertionError: item["data"]["tags"] = list() for tag in tags: item["data"]["tags"].append({"tag": "%s" % tag}) # make sure everything's OK assert self.check_items([item]) return self.update_item(item)
def function[add_tags, parameter[self, item]]: constant[ Add one or more tags to a retrieved item, then update it on the server Accepts a dict, and one or more tags to add to it Returns the updated item from the server ] <ast.Try object at 0x7da20c991090> for taget[name[tag]] in starred[name[tags]] begin[:] call[call[call[name[item]][constant[data]]][constant[tags]].append, parameter[dictionary[[<ast.Constant object at 0x7da18c4ccb50>], [<ast.BinOp object at 0x7da18c4cfeb0>]]]] assert[call[name[self].check_items, parameter[list[[<ast.Name object at 0x7da18c4cfc70>]]]]] return[call[name[self].update_item, parameter[name[item]]]]
keyword[def] identifier[add_tags] ( identifier[self] , identifier[item] ,* identifier[tags] ): literal[string] keyword[try] : keyword[assert] identifier[item] [ literal[string] ][ literal[string] ] keyword[except] identifier[AssertionError] : identifier[item] [ literal[string] ][ literal[string] ]= identifier[list] () keyword[for] identifier[tag] keyword[in] identifier[tags] : identifier[item] [ literal[string] ][ literal[string] ]. identifier[append] ({ literal[string] : literal[string] % identifier[tag] }) keyword[assert] identifier[self] . identifier[check_items] ([ identifier[item] ]) keyword[return] identifier[self] . identifier[update_item] ( identifier[item] )
def add_tags(self, item, *tags): """ Add one or more tags to a retrieved item, then update it on the server Accepts a dict, and one or more tags to add to it Returns the updated item from the server """ # Make sure there's a tags field, or add one try: assert item['data']['tags'] # depends on [control=['try'], data=[]] except AssertionError: item['data']['tags'] = list() # depends on [control=['except'], data=[]] for tag in tags: item['data']['tags'].append({'tag': '%s' % tag}) # depends on [control=['for'], data=['tag']] # make sure everything's OK assert self.check_items([item]) return self.update_item(item)
def init(storage_backend='dir', trust_password=None, network_address=None, network_port=None, storage_create_device=None, storage_create_loop=None, storage_pool=None): ''' Calls lxd init --auto -- opts storage_backend : Storage backend to use (zfs or dir, default: dir) trust_password : Password required to add new clients network_address : None Address to bind LXD to (default: none) network_port : None Port to bind LXD to (Default: 8443) storage_create_device : None Setup device based storage using this DEVICE storage_create_loop : None Setup loop based storage with this SIZE in GB storage_pool : None Storage pool to use or create CLI Examples: To listen on all IPv4/IPv6 Addresses: .. code-block:: bash salt '*' lxd.init dir PaSsW0rD [::] To not listen on Network: .. code-block:: bash salt '*' lxd.init ''' cmd = ('lxd init --auto' ' --storage-backend="{0}"').format( storage_backend ) if trust_password is not None: cmd = cmd + ' --trust-password="{0}"'.format(trust_password) if network_address is not None: cmd = cmd + ' --network-address="{0}"'.format(network_address) if network_port is not None: cmd = cmd + ' --network-port="{0}"'.format(network_port) if storage_create_device is not None: cmd = cmd + ' --storage-create-device="{0}"'.format( storage_create_device ) if storage_create_loop is not None: cmd = cmd + ' --storage-create-loop="{0}"'.format( storage_create_loop ) if storage_pool is not None: cmd = cmd + ' --storage-pool="{0}"'.format(storage_pool) try: output = __salt__['cmd.run'](cmd) except ValueError as e: raise CommandExecutionError( "Failed to call: '{0}', error was: {1}".format( cmd, six.text_type(e) ), ) if 'error:' in output: raise CommandExecutionError( output[output.index('error:') + 7:], ) return output
def function[init, parameter[storage_backend, trust_password, network_address, network_port, storage_create_device, storage_create_loop, storage_pool]]: constant[ Calls lxd init --auto -- opts storage_backend : Storage backend to use (zfs or dir, default: dir) trust_password : Password required to add new clients network_address : None Address to bind LXD to (default: none) network_port : None Port to bind LXD to (Default: 8443) storage_create_device : None Setup device based storage using this DEVICE storage_create_loop : None Setup loop based storage with this SIZE in GB storage_pool : None Storage pool to use or create CLI Examples: To listen on all IPv4/IPv6 Addresses: .. code-block:: bash salt '*' lxd.init dir PaSsW0rD [::] To not listen on Network: .. code-block:: bash salt '*' lxd.init ] variable[cmd] assign[=] call[constant[lxd init --auto --storage-backend="{0}"].format, parameter[name[storage_backend]]] if compare[name[trust_password] is_not constant[None]] begin[:] variable[cmd] assign[=] binary_operation[name[cmd] + call[constant[ --trust-password="{0}"].format, parameter[name[trust_password]]]] if compare[name[network_address] is_not constant[None]] begin[:] variable[cmd] assign[=] binary_operation[name[cmd] + call[constant[ --network-address="{0}"].format, parameter[name[network_address]]]] if compare[name[network_port] is_not constant[None]] begin[:] variable[cmd] assign[=] binary_operation[name[cmd] + call[constant[ --network-port="{0}"].format, parameter[name[network_port]]]] if compare[name[storage_create_device] is_not constant[None]] begin[:] variable[cmd] assign[=] binary_operation[name[cmd] + call[constant[ --storage-create-device="{0}"].format, parameter[name[storage_create_device]]]] if compare[name[storage_create_loop] is_not constant[None]] begin[:] variable[cmd] assign[=] binary_operation[name[cmd] + call[constant[ --storage-create-loop="{0}"].format, parameter[name[storage_create_loop]]]] if compare[name[storage_pool] is_not constant[None]] begin[:] variable[cmd] assign[=] binary_operation[name[cmd] + call[constant[ --storage-pool="{0}"].format, parameter[name[storage_pool]]]] <ast.Try object at 0x7da1b1c46860> if compare[constant[error:] in name[output]] begin[:] <ast.Raise object at 0x7da1b1c46770> return[name[output]]
keyword[def] identifier[init] ( identifier[storage_backend] = literal[string] , identifier[trust_password] = keyword[None] , identifier[network_address] = keyword[None] , identifier[network_port] = keyword[None] , identifier[storage_create_device] = keyword[None] , identifier[storage_create_loop] = keyword[None] , identifier[storage_pool] = keyword[None] ): literal[string] identifier[cmd] =( literal[string] literal[string] ). identifier[format] ( identifier[storage_backend] ) keyword[if] identifier[trust_password] keyword[is] keyword[not] keyword[None] : identifier[cmd] = identifier[cmd] + literal[string] . identifier[format] ( identifier[trust_password] ) keyword[if] identifier[network_address] keyword[is] keyword[not] keyword[None] : identifier[cmd] = identifier[cmd] + literal[string] . identifier[format] ( identifier[network_address] ) keyword[if] identifier[network_port] keyword[is] keyword[not] keyword[None] : identifier[cmd] = identifier[cmd] + literal[string] . identifier[format] ( identifier[network_port] ) keyword[if] identifier[storage_create_device] keyword[is] keyword[not] keyword[None] : identifier[cmd] = identifier[cmd] + literal[string] . identifier[format] ( identifier[storage_create_device] ) keyword[if] identifier[storage_create_loop] keyword[is] keyword[not] keyword[None] : identifier[cmd] = identifier[cmd] + literal[string] . identifier[format] ( identifier[storage_create_loop] ) keyword[if] identifier[storage_pool] keyword[is] keyword[not] keyword[None] : identifier[cmd] = identifier[cmd] + literal[string] . identifier[format] ( identifier[storage_pool] ) keyword[try] : identifier[output] = identifier[__salt__] [ literal[string] ]( identifier[cmd] ) keyword[except] identifier[ValueError] keyword[as] identifier[e] : keyword[raise] identifier[CommandExecutionError] ( literal[string] . identifier[format] ( identifier[cmd] , identifier[six] . identifier[text_type] ( identifier[e] ) ), ) keyword[if] literal[string] keyword[in] identifier[output] : keyword[raise] identifier[CommandExecutionError] ( identifier[output] [ identifier[output] . identifier[index] ( literal[string] )+ literal[int] :], ) keyword[return] identifier[output]
def init(storage_backend='dir', trust_password=None, network_address=None, network_port=None, storage_create_device=None, storage_create_loop=None, storage_pool=None): """ Calls lxd init --auto -- opts storage_backend : Storage backend to use (zfs or dir, default: dir) trust_password : Password required to add new clients network_address : None Address to bind LXD to (default: none) network_port : None Port to bind LXD to (Default: 8443) storage_create_device : None Setup device based storage using this DEVICE storage_create_loop : None Setup loop based storage with this SIZE in GB storage_pool : None Storage pool to use or create CLI Examples: To listen on all IPv4/IPv6 Addresses: .. code-block:: bash salt '*' lxd.init dir PaSsW0rD [::] To not listen on Network: .. code-block:: bash salt '*' lxd.init """ cmd = 'lxd init --auto --storage-backend="{0}"'.format(storage_backend) if trust_password is not None: cmd = cmd + ' --trust-password="{0}"'.format(trust_password) # depends on [control=['if'], data=['trust_password']] if network_address is not None: cmd = cmd + ' --network-address="{0}"'.format(network_address) # depends on [control=['if'], data=['network_address']] if network_port is not None: cmd = cmd + ' --network-port="{0}"'.format(network_port) # depends on [control=['if'], data=['network_port']] if storage_create_device is not None: cmd = cmd + ' --storage-create-device="{0}"'.format(storage_create_device) # depends on [control=['if'], data=['storage_create_device']] if storage_create_loop is not None: cmd = cmd + ' --storage-create-loop="{0}"'.format(storage_create_loop) # depends on [control=['if'], data=['storage_create_loop']] if storage_pool is not None: cmd = cmd + ' --storage-pool="{0}"'.format(storage_pool) # depends on [control=['if'], data=['storage_pool']] try: output = __salt__['cmd.run'](cmd) # depends on [control=['try'], data=[]] except ValueError as e: raise CommandExecutionError("Failed to call: '{0}', error was: {1}".format(cmd, six.text_type(e))) # depends on [control=['except'], data=['e']] if 'error:' in output: raise CommandExecutionError(output[output.index('error:') + 7:]) # depends on [control=['if'], data=['output']] return output
def write_gtfs(gtfs, output): """ Write out the database according to the GTFS format. Parameters ---------- gtfs: gtfspy.GTFS output: str Path where to put the GTFS files if output ends with ".zip" a ZIP-file is created instead. Returns ------- None """ output = os.path.abspath(output) uuid_str = "tmp_" + str(uuid.uuid1()) if output[-4:] == '.zip': zip = True out_basepath = os.path.dirname(os.path.abspath(output)) if not os.path.exists(out_basepath): raise IOError(out_basepath + " does not exist, cannot write gtfs as a zip") tmp_dir = os.path.join(out_basepath, str(uuid_str)) # zip_file_na,e = ../out_basedir + ".zip else: zip = False out_basepath = output tmp_dir = os.path.join(out_basepath + "_" + str(uuid_str)) os.makedirs(tmp_dir, exist_ok=True) gtfs_table_to_writer = { "agency": _write_gtfs_agencies, "calendar": _write_gtfs_calendar, "calendar_dates": _write_gtfs_calendar_dates, # fare attributes and fare_rules omitted (seldomly used) "feed_info": _write_gtfs_feed_info, # "frequencies": not written, as they are incorporated into trips and routes, # Frequencies table is expanded into other tables on initial import. -> Thus frequencies.txt is not created "routes": _write_gtfs_routes, "shapes": _write_gtfs_shapes, "stops": _write_gtfs_stops, "stop_times": _write_gtfs_stop_times, "transfers": _write_gtfs_transfers, "trips": _write_gtfs_trips, } for table, writer in gtfs_table_to_writer.items(): fname_to_write = os.path.join(tmp_dir, table + '.txt') print(fname_to_write) writer(gtfs, open(os.path.join(tmp_dir, table + '.txt'), 'w')) if zip: shutil.make_archive(output[:-4], 'zip', tmp_dir) shutil.rmtree(tmp_dir) else: print("moving " + str(tmp_dir) + " to " + out_basepath) os.rename(tmp_dir, out_basepath)
def function[write_gtfs, parameter[gtfs, output]]: constant[ Write out the database according to the GTFS format. Parameters ---------- gtfs: gtfspy.GTFS output: str Path where to put the GTFS files if output ends with ".zip" a ZIP-file is created instead. Returns ------- None ] variable[output] assign[=] call[name[os].path.abspath, parameter[name[output]]] variable[uuid_str] assign[=] binary_operation[constant[tmp_] + call[name[str], parameter[call[name[uuid].uuid1, parameter[]]]]] if compare[call[name[output]][<ast.Slice object at 0x7da1b01375b0>] equal[==] constant[.zip]] begin[:] variable[zip] assign[=] constant[True] variable[out_basepath] assign[=] call[name[os].path.dirname, parameter[call[name[os].path.abspath, parameter[name[output]]]]] if <ast.UnaryOp object at 0x7da1b01348b0> begin[:] <ast.Raise object at 0x7da1b0136260> variable[tmp_dir] assign[=] call[name[os].path.join, parameter[name[out_basepath], call[name[str], parameter[name[uuid_str]]]]] call[name[os].makedirs, parameter[name[tmp_dir]]] variable[gtfs_table_to_writer] assign[=] dictionary[[<ast.Constant object at 0x7da1b0135300>, <ast.Constant object at 0x7da1b01375e0>, <ast.Constant object at 0x7da1b01345b0>, <ast.Constant object at 0x7da1b0137580>, <ast.Constant object at 0x7da1b0137820>, <ast.Constant object at 0x7da1b0134e80>, <ast.Constant object at 0x7da1b00089d0>, <ast.Constant object at 0x7da1b0009240>, <ast.Constant object at 0x7da1b0008070>, <ast.Constant object at 0x7da1b0008790>], [<ast.Name object at 0x7da1b0009990>, <ast.Name object at 0x7da1b0008430>, <ast.Name object at 0x7da1b00084c0>, <ast.Name object at 0x7da1b0008730>, <ast.Name object at 0x7da1b0008160>, <ast.Name object at 0x7da1b0008400>, <ast.Name object at 0x7da1b0008460>, <ast.Name object at 0x7da1b0008490>, <ast.Name object at 0x7da1b0009930>, <ast.Name object at 0x7da1b0009720>]] for taget[tuple[[<ast.Name object at 0x7da1b0008850>, <ast.Name object at 0x7da1b00087f0>]]] in starred[call[name[gtfs_table_to_writer].items, parameter[]]] begin[:] variable[fname_to_write] assign[=] call[name[os].path.join, parameter[name[tmp_dir], binary_operation[name[table] + constant[.txt]]]] call[name[print], parameter[name[fname_to_write]]] call[name[writer], parameter[name[gtfs], call[name[open], parameter[call[name[os].path.join, parameter[name[tmp_dir], binary_operation[name[table] + constant[.txt]]]], constant[w]]]]] if name[zip] begin[:] call[name[shutil].make_archive, parameter[call[name[output]][<ast.Slice object at 0x7da1b0008e50>], constant[zip], name[tmp_dir]]] call[name[shutil].rmtree, parameter[name[tmp_dir]]]
keyword[def] identifier[write_gtfs] ( identifier[gtfs] , identifier[output] ): literal[string] identifier[output] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[output] ) identifier[uuid_str] = literal[string] + identifier[str] ( identifier[uuid] . identifier[uuid1] ()) keyword[if] identifier[output] [- literal[int] :]== literal[string] : identifier[zip] = keyword[True] identifier[out_basepath] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[output] )) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[out_basepath] ): keyword[raise] identifier[IOError] ( identifier[out_basepath] + literal[string] ) identifier[tmp_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[out_basepath] , identifier[str] ( identifier[uuid_str] )) keyword[else] : identifier[zip] = keyword[False] identifier[out_basepath] = identifier[output] identifier[tmp_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[out_basepath] + literal[string] + identifier[str] ( identifier[uuid_str] )) identifier[os] . identifier[makedirs] ( identifier[tmp_dir] , identifier[exist_ok] = keyword[True] ) identifier[gtfs_table_to_writer] ={ literal[string] : identifier[_write_gtfs_agencies] , literal[string] : identifier[_write_gtfs_calendar] , literal[string] : identifier[_write_gtfs_calendar_dates] , literal[string] : identifier[_write_gtfs_feed_info] , literal[string] : identifier[_write_gtfs_routes] , literal[string] : identifier[_write_gtfs_shapes] , literal[string] : identifier[_write_gtfs_stops] , literal[string] : identifier[_write_gtfs_stop_times] , literal[string] : identifier[_write_gtfs_transfers] , literal[string] : identifier[_write_gtfs_trips] , } keyword[for] identifier[table] , identifier[writer] keyword[in] identifier[gtfs_table_to_writer] . identifier[items] (): identifier[fname_to_write] = identifier[os] . identifier[path] . identifier[join] ( identifier[tmp_dir] , identifier[table] + literal[string] ) identifier[print] ( identifier[fname_to_write] ) identifier[writer] ( identifier[gtfs] , identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[tmp_dir] , identifier[table] + literal[string] ), literal[string] )) keyword[if] identifier[zip] : identifier[shutil] . identifier[make_archive] ( identifier[output] [:- literal[int] ], literal[string] , identifier[tmp_dir] ) identifier[shutil] . identifier[rmtree] ( identifier[tmp_dir] ) keyword[else] : identifier[print] ( literal[string] + identifier[str] ( identifier[tmp_dir] )+ literal[string] + identifier[out_basepath] ) identifier[os] . identifier[rename] ( identifier[tmp_dir] , identifier[out_basepath] )
def write_gtfs(gtfs, output): """ Write out the database according to the GTFS format. Parameters ---------- gtfs: gtfspy.GTFS output: str Path where to put the GTFS files if output ends with ".zip" a ZIP-file is created instead. Returns ------- None """ output = os.path.abspath(output) uuid_str = 'tmp_' + str(uuid.uuid1()) if output[-4:] == '.zip': zip = True out_basepath = os.path.dirname(os.path.abspath(output)) if not os.path.exists(out_basepath): raise IOError(out_basepath + ' does not exist, cannot write gtfs as a zip') # depends on [control=['if'], data=[]] tmp_dir = os.path.join(out_basepath, str(uuid_str)) # depends on [control=['if'], data=[]] else: # zip_file_na,e = ../out_basedir + ".zip zip = False out_basepath = output tmp_dir = os.path.join(out_basepath + '_' + str(uuid_str)) os.makedirs(tmp_dir, exist_ok=True) # fare attributes and fare_rules omitted (seldomly used) # "frequencies": not written, as they are incorporated into trips and routes, # Frequencies table is expanded into other tables on initial import. -> Thus frequencies.txt is not created gtfs_table_to_writer = {'agency': _write_gtfs_agencies, 'calendar': _write_gtfs_calendar, 'calendar_dates': _write_gtfs_calendar_dates, 'feed_info': _write_gtfs_feed_info, 'routes': _write_gtfs_routes, 'shapes': _write_gtfs_shapes, 'stops': _write_gtfs_stops, 'stop_times': _write_gtfs_stop_times, 'transfers': _write_gtfs_transfers, 'trips': _write_gtfs_trips} for (table, writer) in gtfs_table_to_writer.items(): fname_to_write = os.path.join(tmp_dir, table + '.txt') print(fname_to_write) writer(gtfs, open(os.path.join(tmp_dir, table + '.txt'), 'w')) # depends on [control=['for'], data=[]] if zip: shutil.make_archive(output[:-4], 'zip', tmp_dir) shutil.rmtree(tmp_dir) # depends on [control=['if'], data=[]] else: print('moving ' + str(tmp_dir) + ' to ' + out_basepath) os.rename(tmp_dir, out_basepath)
def compile_args(args, kwargs, sep, prefix): """ takes args and kwargs, as they were passed into the command instance being executed with __call__, and compose them into a flat list that will eventually be fed into exec. example: with this call: sh.ls("-l", "/tmp", color="never") this function receives args = ['-l', '/tmp'] kwargs = {'color': 'never'} and produces ['-l', '/tmp', '--color=never'] """ processed_args = [] encode = encode_to_py3bytes_or_py2str # aggregate positional args for arg in args: if isinstance(arg, (list, tuple)): if isinstance(arg, GlobResults) and not arg: arg = [arg.path] for sub_arg in arg: processed_args.append(encode(sub_arg)) elif isinstance(arg, dict): processed_args += aggregate_keywords(arg, sep, prefix, raw=True) else: processed_args.append(encode(arg)) # aggregate the keyword arguments processed_args += aggregate_keywords(kwargs, sep, prefix) return processed_args
def function[compile_args, parameter[args, kwargs, sep, prefix]]: constant[ takes args and kwargs, as they were passed into the command instance being executed with __call__, and compose them into a flat list that will eventually be fed into exec. example: with this call: sh.ls("-l", "/tmp", color="never") this function receives args = ['-l', '/tmp'] kwargs = {'color': 'never'} and produces ['-l', '/tmp', '--color=never'] ] variable[processed_args] assign[=] list[[]] variable[encode] assign[=] name[encode_to_py3bytes_or_py2str] for taget[name[arg]] in starred[name[args]] begin[:] if call[name[isinstance], parameter[name[arg], tuple[[<ast.Name object at 0x7da1b21daaa0>, <ast.Name object at 0x7da1b21da6b0>]]]] begin[:] if <ast.BoolOp object at 0x7da1b21da5c0> begin[:] variable[arg] assign[=] list[[<ast.Attribute object at 0x7da1b21da530>]] for taget[name[sub_arg]] in starred[name[arg]] begin[:] call[name[processed_args].append, parameter[call[name[encode], parameter[name[sub_arg]]]]] <ast.AugAssign object at 0x7da1b21dbd90> return[name[processed_args]]
keyword[def] identifier[compile_args] ( identifier[args] , identifier[kwargs] , identifier[sep] , identifier[prefix] ): literal[string] identifier[processed_args] =[] identifier[encode] = identifier[encode_to_py3bytes_or_py2str] keyword[for] identifier[arg] keyword[in] identifier[args] : keyword[if] identifier[isinstance] ( identifier[arg] ,( identifier[list] , identifier[tuple] )): keyword[if] identifier[isinstance] ( identifier[arg] , identifier[GlobResults] ) keyword[and] keyword[not] identifier[arg] : identifier[arg] =[ identifier[arg] . identifier[path] ] keyword[for] identifier[sub_arg] keyword[in] identifier[arg] : identifier[processed_args] . identifier[append] ( identifier[encode] ( identifier[sub_arg] )) keyword[elif] identifier[isinstance] ( identifier[arg] , identifier[dict] ): identifier[processed_args] += identifier[aggregate_keywords] ( identifier[arg] , identifier[sep] , identifier[prefix] , identifier[raw] = keyword[True] ) keyword[else] : identifier[processed_args] . identifier[append] ( identifier[encode] ( identifier[arg] )) identifier[processed_args] += identifier[aggregate_keywords] ( identifier[kwargs] , identifier[sep] , identifier[prefix] ) keyword[return] identifier[processed_args]
def compile_args(args, kwargs, sep, prefix): """ takes args and kwargs, as they were passed into the command instance being executed with __call__, and compose them into a flat list that will eventually be fed into exec. example: with this call: sh.ls("-l", "/tmp", color="never") this function receives args = ['-l', '/tmp'] kwargs = {'color': 'never'} and produces ['-l', '/tmp', '--color=never'] """ processed_args = [] encode = encode_to_py3bytes_or_py2str # aggregate positional args for arg in args: if isinstance(arg, (list, tuple)): if isinstance(arg, GlobResults) and (not arg): arg = [arg.path] # depends on [control=['if'], data=[]] for sub_arg in arg: processed_args.append(encode(sub_arg)) # depends on [control=['for'], data=['sub_arg']] # depends on [control=['if'], data=[]] elif isinstance(arg, dict): processed_args += aggregate_keywords(arg, sep, prefix, raw=True) # depends on [control=['if'], data=[]] else: processed_args.append(encode(arg)) # depends on [control=['for'], data=['arg']] # aggregate the keyword arguments processed_args += aggregate_keywords(kwargs, sep, prefix) return processed_args
def accept_line(self, logevent): """Return True if the log line has the nominated yaxis field.""" if self.regex_mode: return bool(re.search(self.field, logevent.line_str)) else: return getattr(logevent, self.field) is not None
def function[accept_line, parameter[self, logevent]]: constant[Return True if the log line has the nominated yaxis field.] if name[self].regex_mode begin[:] return[call[name[bool], parameter[call[name[re].search, parameter[name[self].field, name[logevent].line_str]]]]]
keyword[def] identifier[accept_line] ( identifier[self] , identifier[logevent] ): literal[string] keyword[if] identifier[self] . identifier[regex_mode] : keyword[return] identifier[bool] ( identifier[re] . identifier[search] ( identifier[self] . identifier[field] , identifier[logevent] . identifier[line_str] )) keyword[else] : keyword[return] identifier[getattr] ( identifier[logevent] , identifier[self] . identifier[field] ) keyword[is] keyword[not] keyword[None]
def accept_line(self, logevent): """Return True if the log line has the nominated yaxis field.""" if self.regex_mode: return bool(re.search(self.field, logevent.line_str)) # depends on [control=['if'], data=[]] else: return getattr(logevent, self.field) is not None
def crval(self): """ Get the world coordinate of the reference pixel. @rtype: float, float """ try: return self.wcs.crval1, self.wcs.crval2 except Exception as ex: logging.debug("Couldn't get CRVAL from WCS: {}".format(ex)) logging.debug("Trying RA/DEC values") try: return (float(self['RA-DEG']), float(self['DEC-DEG'])) except KeyError as ke: KeyError("Can't build CRVAL1/2 missing keyword: {}".format(ke.args[0]))
def function[crval, parameter[self]]: constant[ Get the world coordinate of the reference pixel. @rtype: float, float ] <ast.Try object at 0x7da1b1a2d9f0> <ast.Try object at 0x7da1b198d300>
keyword[def] identifier[crval] ( identifier[self] ): literal[string] keyword[try] : keyword[return] identifier[self] . identifier[wcs] . identifier[crval1] , identifier[self] . identifier[wcs] . identifier[crval2] keyword[except] identifier[Exception] keyword[as] identifier[ex] : identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[ex] )) identifier[logging] . identifier[debug] ( literal[string] ) keyword[try] : keyword[return] ( identifier[float] ( identifier[self] [ literal[string] ]), identifier[float] ( identifier[self] [ literal[string] ])) keyword[except] identifier[KeyError] keyword[as] identifier[ke] : identifier[KeyError] ( literal[string] . identifier[format] ( identifier[ke] . identifier[args] [ literal[int] ]))
def crval(self): """ Get the world coordinate of the reference pixel. @rtype: float, float """ try: return (self.wcs.crval1, self.wcs.crval2) # depends on [control=['try'], data=[]] except Exception as ex: logging.debug("Couldn't get CRVAL from WCS: {}".format(ex)) logging.debug('Trying RA/DEC values') # depends on [control=['except'], data=['ex']] try: return (float(self['RA-DEG']), float(self['DEC-DEG'])) # depends on [control=['try'], data=[]] except KeyError as ke: KeyError("Can't build CRVAL1/2 missing keyword: {}".format(ke.args[0])) # depends on [control=['except'], data=['ke']]
def from_config(config, **options): """Instantiate an `LogEventStore` from config. Parameters: _config -- the configuration file options read from file(s). **options -- various options given to the specific event store. Shall not be used with this event store. Warning will be logged for every extra non-recognized option. The only required key to this function is 'path'. returns -- a newly instantiated `LogEventStore`. """ expected_args = ('path',) rconfig.check_config_options("LogEventStore", expected_args, tuple(), options) return LogEventStore(options['path'])
def function[from_config, parameter[config]]: constant[Instantiate an `LogEventStore` from config. Parameters: _config -- the configuration file options read from file(s). **options -- various options given to the specific event store. Shall not be used with this event store. Warning will be logged for every extra non-recognized option. The only required key to this function is 'path'. returns -- a newly instantiated `LogEventStore`. ] variable[expected_args] assign[=] tuple[[<ast.Constant object at 0x7da1b244af20>]] call[name[rconfig].check_config_options, parameter[constant[LogEventStore], name[expected_args], call[name[tuple], parameter[]], name[options]]] return[call[name[LogEventStore], parameter[call[name[options]][constant[path]]]]]
keyword[def] identifier[from_config] ( identifier[config] ,** identifier[options] ): literal[string] identifier[expected_args] =( literal[string] ,) identifier[rconfig] . identifier[check_config_options] ( literal[string] , identifier[expected_args] , identifier[tuple] (), identifier[options] ) keyword[return] identifier[LogEventStore] ( identifier[options] [ literal[string] ])
def from_config(config, **options): """Instantiate an `LogEventStore` from config. Parameters: _config -- the configuration file options read from file(s). **options -- various options given to the specific event store. Shall not be used with this event store. Warning will be logged for every extra non-recognized option. The only required key to this function is 'path'. returns -- a newly instantiated `LogEventStore`. """ expected_args = ('path',) rconfig.check_config_options('LogEventStore', expected_args, tuple(), options) return LogEventStore(options['path'])
def parse_mapping(mapping_file: Optional[str]) -> configparser.ConfigParser: """ Parse the file containing the mappings from hosts to pass entries. Args: mapping_file: Name of the file to parse. If ``None``, the default file from the XDG location is used. """ LOGGER.debug('Parsing mapping file. Command line: %s', mapping_file) def parse(mapping_file): config = configparser.ConfigParser() config.read_file(mapping_file) return config # give precedence to the user-specified file if mapping_file is not None: LOGGER.debug('Parsing command line mapping file') return parse(mapping_file) # fall back on XDG config location xdg_config_dir = xdg.BaseDirectory.load_first_config('pass-git-helper') if xdg_config_dir is None: raise RuntimeError( 'No mapping configured so far at any XDG config location. ' 'Please create {config_file}'.format( config_file=DEFAULT_CONFIG_FILE)) mapping_file = os.path.join(xdg_config_dir, CONFIG_FILE_NAME) LOGGER.debug('Parsing mapping file %s', mapping_file) with open(mapping_file, 'r') as file_handle: return parse(file_handle)
def function[parse_mapping, parameter[mapping_file]]: constant[ Parse the file containing the mappings from hosts to pass entries. Args: mapping_file: Name of the file to parse. If ``None``, the default file from the XDG location is used. ] call[name[LOGGER].debug, parameter[constant[Parsing mapping file. Command line: %s], name[mapping_file]]] def function[parse, parameter[mapping_file]]: variable[config] assign[=] call[name[configparser].ConfigParser, parameter[]] call[name[config].read_file, parameter[name[mapping_file]]] return[name[config]] if compare[name[mapping_file] is_not constant[None]] begin[:] call[name[LOGGER].debug, parameter[constant[Parsing command line mapping file]]] return[call[name[parse], parameter[name[mapping_file]]]] variable[xdg_config_dir] assign[=] call[name[xdg].BaseDirectory.load_first_config, parameter[constant[pass-git-helper]]] if compare[name[xdg_config_dir] is constant[None]] begin[:] <ast.Raise object at 0x7da20c6c40d0> variable[mapping_file] assign[=] call[name[os].path.join, parameter[name[xdg_config_dir], name[CONFIG_FILE_NAME]]] call[name[LOGGER].debug, parameter[constant[Parsing mapping file %s], name[mapping_file]]] with call[name[open], parameter[name[mapping_file], constant[r]]] begin[:] return[call[name[parse], parameter[name[file_handle]]]]
keyword[def] identifier[parse_mapping] ( identifier[mapping_file] : identifier[Optional] [ identifier[str] ])-> identifier[configparser] . identifier[ConfigParser] : literal[string] identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[mapping_file] ) keyword[def] identifier[parse] ( identifier[mapping_file] ): identifier[config] = identifier[configparser] . identifier[ConfigParser] () identifier[config] . identifier[read_file] ( identifier[mapping_file] ) keyword[return] identifier[config] keyword[if] identifier[mapping_file] keyword[is] keyword[not] keyword[None] : identifier[LOGGER] . identifier[debug] ( literal[string] ) keyword[return] identifier[parse] ( identifier[mapping_file] ) identifier[xdg_config_dir] = identifier[xdg] . identifier[BaseDirectory] . identifier[load_first_config] ( literal[string] ) keyword[if] identifier[xdg_config_dir] keyword[is] keyword[None] : keyword[raise] identifier[RuntimeError] ( literal[string] literal[string] . identifier[format] ( identifier[config_file] = identifier[DEFAULT_CONFIG_FILE] )) identifier[mapping_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[xdg_config_dir] , identifier[CONFIG_FILE_NAME] ) identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[mapping_file] ) keyword[with] identifier[open] ( identifier[mapping_file] , literal[string] ) keyword[as] identifier[file_handle] : keyword[return] identifier[parse] ( identifier[file_handle] )
def parse_mapping(mapping_file: Optional[str]) -> configparser.ConfigParser: """ Parse the file containing the mappings from hosts to pass entries. Args: mapping_file: Name of the file to parse. If ``None``, the default file from the XDG location is used. """ LOGGER.debug('Parsing mapping file. Command line: %s', mapping_file) def parse(mapping_file): config = configparser.ConfigParser() config.read_file(mapping_file) return config # give precedence to the user-specified file if mapping_file is not None: LOGGER.debug('Parsing command line mapping file') return parse(mapping_file) # depends on [control=['if'], data=['mapping_file']] # fall back on XDG config location xdg_config_dir = xdg.BaseDirectory.load_first_config('pass-git-helper') if xdg_config_dir is None: raise RuntimeError('No mapping configured so far at any XDG config location. Please create {config_file}'.format(config_file=DEFAULT_CONFIG_FILE)) # depends on [control=['if'], data=[]] mapping_file = os.path.join(xdg_config_dir, CONFIG_FILE_NAME) LOGGER.debug('Parsing mapping file %s', mapping_file) with open(mapping_file, 'r') as file_handle: return parse(file_handle) # depends on [control=['with'], data=['file_handle']]
def _read_preference(self, allow_secondary): """ Return the mongo read preference given an 'allow_secondary' argument """ allow_secondary = self._allow_secondary if allow_secondary is None else allow_secondary return ReadPreference.NEAREST if allow_secondary else ReadPreference.PRIMARY
def function[_read_preference, parameter[self, allow_secondary]]: constant[ Return the mongo read preference given an 'allow_secondary' argument ] variable[allow_secondary] assign[=] <ast.IfExp object at 0x7da20c76c550> return[<ast.IfExp object at 0x7da20c76ebc0>]
keyword[def] identifier[_read_preference] ( identifier[self] , identifier[allow_secondary] ): literal[string] identifier[allow_secondary] = identifier[self] . identifier[_allow_secondary] keyword[if] identifier[allow_secondary] keyword[is] keyword[None] keyword[else] identifier[allow_secondary] keyword[return] identifier[ReadPreference] . identifier[NEAREST] keyword[if] identifier[allow_secondary] keyword[else] identifier[ReadPreference] . identifier[PRIMARY]
def _read_preference(self, allow_secondary): """ Return the mongo read preference given an 'allow_secondary' argument """ allow_secondary = self._allow_secondary if allow_secondary is None else allow_secondary return ReadPreference.NEAREST if allow_secondary else ReadPreference.PRIMARY
def setHierarchyLookup(self, columnName, tableType=None): """ Sets the hierarchy lookup for the inputed table type and column. :param columnName | <str> tableType | <subclass of Table> """ if tableType: tableType = self.tableType() self._hierarchyLookup[tableType] = (tableType, columnName)
def function[setHierarchyLookup, parameter[self, columnName, tableType]]: constant[ Sets the hierarchy lookup for the inputed table type and column. :param columnName | <str> tableType | <subclass of Table> ] if name[tableType] begin[:] variable[tableType] assign[=] call[name[self].tableType, parameter[]] call[name[self]._hierarchyLookup][name[tableType]] assign[=] tuple[[<ast.Name object at 0x7da1b2469b70>, <ast.Name object at 0x7da1b246b1c0>]]
keyword[def] identifier[setHierarchyLookup] ( identifier[self] , identifier[columnName] , identifier[tableType] = keyword[None] ): literal[string] keyword[if] identifier[tableType] : identifier[tableType] = identifier[self] . identifier[tableType] () identifier[self] . identifier[_hierarchyLookup] [ identifier[tableType] ]=( identifier[tableType] , identifier[columnName] )
def setHierarchyLookup(self, columnName, tableType=None): """ Sets the hierarchy lookup for the inputed table type and column. :param columnName | <str> tableType | <subclass of Table> """ if tableType: tableType = self.tableType() # depends on [control=['if'], data=[]] self._hierarchyLookup[tableType] = (tableType, columnName)
def _get_sorted_methods(self, methods): """Get a copy of 'methods' sorted the way they would be on the live server. Args: methods: JSON configuration of an API's methods. Returns: The same configuration with the methods sorted based on what order they'll be checked by the server. """ if not methods: return methods # Comparison function we'll use to sort the methods: def _sorted_methods_comparison(method_info1, method_info2): """Sort method info by path and http_method. Args: method_info1: Method name and info for the first method to compare. method_info2: Method name and info for the method to compare to. Returns: Negative if the first method should come first, positive if the first method should come after the second. Zero if they're equivalent. """ def _score_path(path): """Calculate the score for this path, used for comparisons. Higher scores have priority, and if scores are equal, the path text is sorted alphabetically. Scores are based on the number and location of the constant parts of the path. The server has some special handling for variables with regexes, which we don't handle here. Args: path: The request path that we're calculating a score for. Returns: The score for the given path. """ score = 0 parts = path.split('/') for part in parts: score <<= 1 if not part or part[0] != '{': # Found a constant. score += 1 # Shift by 31 instead of 32 because some (!) versions of Python like # to convert the int to a long if we shift by 32, and the sorted() # function that uses this blows up if it receives anything but an int. score <<= 31 - len(parts) return score # Higher path scores come first. path_score1 = _score_path(method_info1[1].get('path', '')) path_score2 = _score_path(method_info2[1].get('path', '')) if path_score1 != path_score2: return path_score2 - path_score1 # Compare by path text next, sorted alphabetically. path_result = cmp(method_info1[1].get('path', ''), method_info2[1].get('path', '')) if path_result != 0: return path_result # All else being equal, sort by HTTP method. method_result = cmp(method_info1[1].get('httpMethod', ''), method_info2[1].get('httpMethod', '')) return method_result return sorted(methods.items(), _sorted_methods_comparison)
def function[_get_sorted_methods, parameter[self, methods]]: constant[Get a copy of 'methods' sorted the way they would be on the live server. Args: methods: JSON configuration of an API's methods. Returns: The same configuration with the methods sorted based on what order they'll be checked by the server. ] if <ast.UnaryOp object at 0x7da1b0ec1d50> begin[:] return[name[methods]] def function[_sorted_methods_comparison, parameter[method_info1, method_info2]]: constant[Sort method info by path and http_method. Args: method_info1: Method name and info for the first method to compare. method_info2: Method name and info for the method to compare to. Returns: Negative if the first method should come first, positive if the first method should come after the second. Zero if they're equivalent. ] def function[_score_path, parameter[path]]: constant[Calculate the score for this path, used for comparisons. Higher scores have priority, and if scores are equal, the path text is sorted alphabetically. Scores are based on the number and location of the constant parts of the path. The server has some special handling for variables with regexes, which we don't handle here. Args: path: The request path that we're calculating a score for. Returns: The score for the given path. ] variable[score] assign[=] constant[0] variable[parts] assign[=] call[name[path].split, parameter[constant[/]]] for taget[name[part]] in starred[name[parts]] begin[:] <ast.AugAssign object at 0x7da1b0ec2290> if <ast.BoolOp object at 0x7da1b0ec1c30> begin[:] <ast.AugAssign object at 0x7da1b0ec2800> <ast.AugAssign object at 0x7da1b0ec1360> return[name[score]] variable[path_score1] assign[=] call[name[_score_path], parameter[call[call[name[method_info1]][constant[1]].get, parameter[constant[path], constant[]]]]] variable[path_score2] assign[=] call[name[_score_path], parameter[call[call[name[method_info2]][constant[1]].get, parameter[constant[path], constant[]]]]] if compare[name[path_score1] not_equal[!=] name[path_score2]] begin[:] return[binary_operation[name[path_score2] - name[path_score1]]] variable[path_result] assign[=] call[name[cmp], parameter[call[call[name[method_info1]][constant[1]].get, parameter[constant[path], constant[]]], call[call[name[method_info2]][constant[1]].get, parameter[constant[path], constant[]]]]] if compare[name[path_result] not_equal[!=] constant[0]] begin[:] return[name[path_result]] variable[method_result] assign[=] call[name[cmp], parameter[call[call[name[method_info1]][constant[1]].get, parameter[constant[httpMethod], constant[]]], call[call[name[method_info2]][constant[1]].get, parameter[constant[httpMethod], constant[]]]]] return[name[method_result]] return[call[name[sorted], parameter[call[name[methods].items, parameter[]], name[_sorted_methods_comparison]]]]
keyword[def] identifier[_get_sorted_methods] ( identifier[self] , identifier[methods] ): literal[string] keyword[if] keyword[not] identifier[methods] : keyword[return] identifier[methods] keyword[def] identifier[_sorted_methods_comparison] ( identifier[method_info1] , identifier[method_info2] ): literal[string] keyword[def] identifier[_score_path] ( identifier[path] ): literal[string] identifier[score] = literal[int] identifier[parts] = identifier[path] . identifier[split] ( literal[string] ) keyword[for] identifier[part] keyword[in] identifier[parts] : identifier[score] <<= literal[int] keyword[if] keyword[not] identifier[part] keyword[or] identifier[part] [ literal[int] ]!= literal[string] : identifier[score] += literal[int] identifier[score] <<= literal[int] - identifier[len] ( identifier[parts] ) keyword[return] identifier[score] identifier[path_score1] = identifier[_score_path] ( identifier[method_info1] [ literal[int] ]. identifier[get] ( literal[string] , literal[string] )) identifier[path_score2] = identifier[_score_path] ( identifier[method_info2] [ literal[int] ]. identifier[get] ( literal[string] , literal[string] )) keyword[if] identifier[path_score1] != identifier[path_score2] : keyword[return] identifier[path_score2] - identifier[path_score1] identifier[path_result] = identifier[cmp] ( identifier[method_info1] [ literal[int] ]. identifier[get] ( literal[string] , literal[string] ), identifier[method_info2] [ literal[int] ]. identifier[get] ( literal[string] , literal[string] )) keyword[if] identifier[path_result] != literal[int] : keyword[return] identifier[path_result] identifier[method_result] = identifier[cmp] ( identifier[method_info1] [ literal[int] ]. identifier[get] ( literal[string] , literal[string] ), identifier[method_info2] [ literal[int] ]. identifier[get] ( literal[string] , literal[string] )) keyword[return] identifier[method_result] keyword[return] identifier[sorted] ( identifier[methods] . identifier[items] (), identifier[_sorted_methods_comparison] )
def _get_sorted_methods(self, methods): """Get a copy of 'methods' sorted the way they would be on the live server. Args: methods: JSON configuration of an API's methods. Returns: The same configuration with the methods sorted based on what order they'll be checked by the server. """ if not methods: return methods # depends on [control=['if'], data=[]] # Comparison function we'll use to sort the methods: def _sorted_methods_comparison(method_info1, method_info2): """Sort method info by path and http_method. Args: method_info1: Method name and info for the first method to compare. method_info2: Method name and info for the method to compare to. Returns: Negative if the first method should come first, positive if the first method should come after the second. Zero if they're equivalent. """ def _score_path(path): """Calculate the score for this path, used for comparisons. Higher scores have priority, and if scores are equal, the path text is sorted alphabetically. Scores are based on the number and location of the constant parts of the path. The server has some special handling for variables with regexes, which we don't handle here. Args: path: The request path that we're calculating a score for. Returns: The score for the given path. """ score = 0 parts = path.split('/') for part in parts: score <<= 1 if not part or part[0] != '{': # Found a constant. score += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['part']] # Shift by 31 instead of 32 because some (!) versions of Python like # to convert the int to a long if we shift by 32, and the sorted() # function that uses this blows up if it receives anything but an int. score <<= 31 - len(parts) return score # Higher path scores come first. path_score1 = _score_path(method_info1[1].get('path', '')) path_score2 = _score_path(method_info2[1].get('path', '')) if path_score1 != path_score2: return path_score2 - path_score1 # depends on [control=['if'], data=['path_score1', 'path_score2']] # Compare by path text next, sorted alphabetically. path_result = cmp(method_info1[1].get('path', ''), method_info2[1].get('path', '')) if path_result != 0: return path_result # depends on [control=['if'], data=['path_result']] # All else being equal, sort by HTTP method. method_result = cmp(method_info1[1].get('httpMethod', ''), method_info2[1].get('httpMethod', '')) return method_result return sorted(methods.items(), _sorted_methods_comparison)
def first_or_create(self, _attributes=None, **attributes): """ Get the first related record matching the attributes or create it. :param attributes: The attributes :type attributes: dict :rtype: Model """ if _attributes is not None: attributes.update(_attributes) instance = self.where(attributes).first() if instance is None: instance = self.create(**attributes) return instance
def function[first_or_create, parameter[self, _attributes]]: constant[ Get the first related record matching the attributes or create it. :param attributes: The attributes :type attributes: dict :rtype: Model ] if compare[name[_attributes] is_not constant[None]] begin[:] call[name[attributes].update, parameter[name[_attributes]]] variable[instance] assign[=] call[call[name[self].where, parameter[name[attributes]]].first, parameter[]] if compare[name[instance] is constant[None]] begin[:] variable[instance] assign[=] call[name[self].create, parameter[]] return[name[instance]]
keyword[def] identifier[first_or_create] ( identifier[self] , identifier[_attributes] = keyword[None] ,** identifier[attributes] ): literal[string] keyword[if] identifier[_attributes] keyword[is] keyword[not] keyword[None] : identifier[attributes] . identifier[update] ( identifier[_attributes] ) identifier[instance] = identifier[self] . identifier[where] ( identifier[attributes] ). identifier[first] () keyword[if] identifier[instance] keyword[is] keyword[None] : identifier[instance] = identifier[self] . identifier[create] (** identifier[attributes] ) keyword[return] identifier[instance]
def first_or_create(self, _attributes=None, **attributes): """ Get the first related record matching the attributes or create it. :param attributes: The attributes :type attributes: dict :rtype: Model """ if _attributes is not None: attributes.update(_attributes) # depends on [control=['if'], data=['_attributes']] instance = self.where(attributes).first() if instance is None: instance = self.create(**attributes) # depends on [control=['if'], data=['instance']] return instance
def paths_to_polygons(paths, scale=None): """ Given a sequence of connected points turn them into valid shapely Polygon objects. Parameters ----------- paths : (n,) sequence Of (m,2) float, closed paths scale: float Approximate scale of drawing for precision Returns ----------- polys: (p,) list shapely.geometry.Polygon None """ polygons = [None] * len(paths) for i, path in enumerate(paths): if len(path) < 4: # since the first and last vertices are identical in # a closed loop a 4 vertex path is the minimum for # non-zero area continue try: polygons[i] = repair_invalid(Polygon(path), scale) except ValueError: # raised if a polygon is unrecoverable continue except BaseException: log.error('unrecoverable polygon', exc_info=True) polygons = np.array(polygons) return polygons
def function[paths_to_polygons, parameter[paths, scale]]: constant[ Given a sequence of connected points turn them into valid shapely Polygon objects. Parameters ----------- paths : (n,) sequence Of (m,2) float, closed paths scale: float Approximate scale of drawing for precision Returns ----------- polys: (p,) list shapely.geometry.Polygon None ] variable[polygons] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b22bac80>]] * call[name[len], parameter[name[paths]]]] for taget[tuple[[<ast.Name object at 0x7da1b22b97e0>, <ast.Name object at 0x7da1b22b9de0>]]] in starred[call[name[enumerate], parameter[name[paths]]]] begin[:] if compare[call[name[len], parameter[name[path]]] less[<] constant[4]] begin[:] continue <ast.Try object at 0x7da1b22b8700> variable[polygons] assign[=] call[name[np].array, parameter[name[polygons]]] return[name[polygons]]
keyword[def] identifier[paths_to_polygons] ( identifier[paths] , identifier[scale] = keyword[None] ): literal[string] identifier[polygons] =[ keyword[None] ]* identifier[len] ( identifier[paths] ) keyword[for] identifier[i] , identifier[path] keyword[in] identifier[enumerate] ( identifier[paths] ): keyword[if] identifier[len] ( identifier[path] )< literal[int] : keyword[continue] keyword[try] : identifier[polygons] [ identifier[i] ]= identifier[repair_invalid] ( identifier[Polygon] ( identifier[path] ), identifier[scale] ) keyword[except] identifier[ValueError] : keyword[continue] keyword[except] identifier[BaseException] : identifier[log] . identifier[error] ( literal[string] , identifier[exc_info] = keyword[True] ) identifier[polygons] = identifier[np] . identifier[array] ( identifier[polygons] ) keyword[return] identifier[polygons]
def paths_to_polygons(paths, scale=None): """ Given a sequence of connected points turn them into valid shapely Polygon objects. Parameters ----------- paths : (n,) sequence Of (m,2) float, closed paths scale: float Approximate scale of drawing for precision Returns ----------- polys: (p,) list shapely.geometry.Polygon None """ polygons = [None] * len(paths) for (i, path) in enumerate(paths): if len(path) < 4: # since the first and last vertices are identical in # a closed loop a 4 vertex path is the minimum for # non-zero area continue # depends on [control=['if'], data=[]] try: polygons[i] = repair_invalid(Polygon(path), scale) # depends on [control=['try'], data=[]] except ValueError: # raised if a polygon is unrecoverable continue # depends on [control=['except'], data=[]] except BaseException: log.error('unrecoverable polygon', exc_info=True) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]] polygons = np.array(polygons) return polygons