code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def create_admin_by_sis_id(self, sis_account_id, user_id, role): """ Flag an existing user as an admin within the account sis id. """ return self.create_admin(self._sis_id(sis_account_id), user_id, role)
def function[create_admin_by_sis_id, parameter[self, sis_account_id, user_id, role]]: constant[ Flag an existing user as an admin within the account sis id. ] return[call[name[self].create_admin, parameter[call[name[self]._sis_id, parameter[name[sis_account_id]]], name[user_id], name[role]]]]
keyword[def] identifier[create_admin_by_sis_id] ( identifier[self] , identifier[sis_account_id] , identifier[user_id] , identifier[role] ): literal[string] keyword[return] identifier[self] . identifier[create_admin] ( identifier[self] . identifier[_sis_id] ( identifier[sis_account_id] ), identifier[user_id] , identifier[role] )
def create_admin_by_sis_id(self, sis_account_id, user_id, role): """ Flag an existing user as an admin within the account sis id. """ return self.create_admin(self._sis_id(sis_account_id), user_id, role)
def remove_logger(self, cb_id): '''Remove a logger. @param cb_id The ID of the logger to remove. @raises NoLoggerError ''' if cb_id not in self._loggers: raise exceptions.NoLoggerError(cb_id, self.name) conf = self.object.get_configuration() res = conf.remove_service_profile(cb_id.get_bytes()) del self._loggers[cb_id]
def function[remove_logger, parameter[self, cb_id]]: constant[Remove a logger. @param cb_id The ID of the logger to remove. @raises NoLoggerError ] if compare[name[cb_id] <ast.NotIn object at 0x7da2590d7190> name[self]._loggers] begin[:] <ast.Raise object at 0x7da204347700> variable[conf] assign[=] call[name[self].object.get_configuration, parameter[]] variable[res] assign[=] call[name[conf].remove_service_profile, parameter[call[name[cb_id].get_bytes, parameter[]]]] <ast.Delete object at 0x7da204345bd0>
keyword[def] identifier[remove_logger] ( identifier[self] , identifier[cb_id] ): literal[string] keyword[if] identifier[cb_id] keyword[not] keyword[in] identifier[self] . identifier[_loggers] : keyword[raise] identifier[exceptions] . identifier[NoLoggerError] ( identifier[cb_id] , identifier[self] . identifier[name] ) identifier[conf] = identifier[self] . identifier[object] . identifier[get_configuration] () identifier[res] = identifier[conf] . identifier[remove_service_profile] ( identifier[cb_id] . identifier[get_bytes] ()) keyword[del] identifier[self] . identifier[_loggers] [ identifier[cb_id] ]
def remove_logger(self, cb_id): """Remove a logger. @param cb_id The ID of the logger to remove. @raises NoLoggerError """ if cb_id not in self._loggers: raise exceptions.NoLoggerError(cb_id, self.name) # depends on [control=['if'], data=['cb_id']] conf = self.object.get_configuration() res = conf.remove_service_profile(cb_id.get_bytes()) del self._loggers[cb_id]
def export_stl(surface, file_name, **kwargs): """ Exports surface(s) as a .stl file in plain text or binary format. Keyword Arguments: * ``binary``: flag to generate a binary STL file. *Default: True* * ``vertex_spacing``: size of the triangle edge in terms of points sampled on the surface. *Default: 1* * ``update_delta``: use multi-surface evaluation delta for all surfaces. *Default: True* :param surface: surface or surfaces to be saved :type surface: abstract.Surface or multi.SurfaceContainer :param file_name: name of the output file :type file_name: str :raises GeomdlException: an error occurred writing the file """ binary = kwargs.get('binary', True) if 'binary' in kwargs: kwargs.pop('binary') content = export_stl_str(surface, binary=binary, **kwargs) return exch.write_file(file_name, content, binary=binary)
def function[export_stl, parameter[surface, file_name]]: constant[ Exports surface(s) as a .stl file in plain text or binary format. Keyword Arguments: * ``binary``: flag to generate a binary STL file. *Default: True* * ``vertex_spacing``: size of the triangle edge in terms of points sampled on the surface. *Default: 1* * ``update_delta``: use multi-surface evaluation delta for all surfaces. *Default: True* :param surface: surface or surfaces to be saved :type surface: abstract.Surface or multi.SurfaceContainer :param file_name: name of the output file :type file_name: str :raises GeomdlException: an error occurred writing the file ] variable[binary] assign[=] call[name[kwargs].get, parameter[constant[binary], constant[True]]] if compare[constant[binary] in name[kwargs]] begin[:] call[name[kwargs].pop, parameter[constant[binary]]] variable[content] assign[=] call[name[export_stl_str], parameter[name[surface]]] return[call[name[exch].write_file, parameter[name[file_name], name[content]]]]
keyword[def] identifier[export_stl] ( identifier[surface] , identifier[file_name] ,** identifier[kwargs] ): literal[string] identifier[binary] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[True] ) keyword[if] literal[string] keyword[in] identifier[kwargs] : identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[content] = identifier[export_stl_str] ( identifier[surface] , identifier[binary] = identifier[binary] ,** identifier[kwargs] ) keyword[return] identifier[exch] . identifier[write_file] ( identifier[file_name] , identifier[content] , identifier[binary] = identifier[binary] )
def export_stl(surface, file_name, **kwargs): """ Exports surface(s) as a .stl file in plain text or binary format. Keyword Arguments: * ``binary``: flag to generate a binary STL file. *Default: True* * ``vertex_spacing``: size of the triangle edge in terms of points sampled on the surface. *Default: 1* * ``update_delta``: use multi-surface evaluation delta for all surfaces. *Default: True* :param surface: surface or surfaces to be saved :type surface: abstract.Surface or multi.SurfaceContainer :param file_name: name of the output file :type file_name: str :raises GeomdlException: an error occurred writing the file """ binary = kwargs.get('binary', True) if 'binary' in kwargs: kwargs.pop('binary') # depends on [control=['if'], data=['kwargs']] content = export_stl_str(surface, binary=binary, **kwargs) return exch.write_file(file_name, content, binary=binary)
def parse_item(self, response): """ Get basic information about a page, so that it can be passed to the `pa11y` tool for further testing. @url https://www.google.com/ @returns items 1 1 @returns requests 0 0 @scrapes url request_headers accessed_at page_title """ # if we got redirected to a login page, then login if URLObject(response.url).path == LOGIN_HTML_PATH: reqs = self.handle_unexpected_redirect_to_login_page(response) for req in reqs: yield req title = response.xpath("//title/text()").extract_first() if title: title = title.strip() # `response.request.headers` is a dictionary where the key is the # header name, and the value is a *list*, containing one item, # which is the header value. We need to get rid of this list, and just # have key-value pairs. (This list probably exists in case the same # header is sent multiple times, but that's not happening in this case, # and the list construct is getting in the way.) # # We also need to convert bytes to ASCII. In practice, headers can # only contain ASCII characters: see # http://stackoverflow.com/questions/5423223/how-to-send-non-english-unicode-string-using-http-header request_headers = {key.decode('ascii'): value[0].decode('ascii') for key, value in response.request.headers.items()} item = A11yItem( url=response.url, request_headers=request_headers, accessed_at=datetime.utcnow(), page_title=title, ) yield item
def function[parse_item, parameter[self, response]]: constant[ Get basic information about a page, so that it can be passed to the `pa11y` tool for further testing. @url https://www.google.com/ @returns items 1 1 @returns requests 0 0 @scrapes url request_headers accessed_at page_title ] if compare[call[name[URLObject], parameter[name[response].url]].path equal[==] name[LOGIN_HTML_PATH]] begin[:] variable[reqs] assign[=] call[name[self].handle_unexpected_redirect_to_login_page, parameter[name[response]]] for taget[name[req]] in starred[name[reqs]] begin[:] <ast.Yield object at 0x7da2041d8c10> variable[title] assign[=] call[call[name[response].xpath, parameter[constant[//title/text()]]].extract_first, parameter[]] if name[title] begin[:] variable[title] assign[=] call[name[title].strip, parameter[]] variable[request_headers] assign[=] <ast.DictComp object at 0x7da2041dabc0> variable[item] assign[=] call[name[A11yItem], parameter[]] <ast.Yield object at 0x7da18bc73670>
keyword[def] identifier[parse_item] ( identifier[self] , identifier[response] ): literal[string] keyword[if] identifier[URLObject] ( identifier[response] . identifier[url] ). identifier[path] == identifier[LOGIN_HTML_PATH] : identifier[reqs] = identifier[self] . identifier[handle_unexpected_redirect_to_login_page] ( identifier[response] ) keyword[for] identifier[req] keyword[in] identifier[reqs] : keyword[yield] identifier[req] identifier[title] = identifier[response] . identifier[xpath] ( literal[string] ). identifier[extract_first] () keyword[if] identifier[title] : identifier[title] = identifier[title] . identifier[strip] () identifier[request_headers] ={ identifier[key] . identifier[decode] ( literal[string] ): identifier[value] [ literal[int] ]. identifier[decode] ( literal[string] ) keyword[for] identifier[key] , identifier[value] keyword[in] identifier[response] . identifier[request] . identifier[headers] . identifier[items] ()} identifier[item] = identifier[A11yItem] ( identifier[url] = identifier[response] . identifier[url] , identifier[request_headers] = identifier[request_headers] , identifier[accessed_at] = identifier[datetime] . identifier[utcnow] (), identifier[page_title] = identifier[title] , ) keyword[yield] identifier[item]
def parse_item(self, response): """ Get basic information about a page, so that it can be passed to the `pa11y` tool for further testing. @url https://www.google.com/ @returns items 1 1 @returns requests 0 0 @scrapes url request_headers accessed_at page_title """ # if we got redirected to a login page, then login if URLObject(response.url).path == LOGIN_HTML_PATH: reqs = self.handle_unexpected_redirect_to_login_page(response) for req in reqs: yield req # depends on [control=['for'], data=['req']] # depends on [control=['if'], data=[]] title = response.xpath('//title/text()').extract_first() if title: title = title.strip() # depends on [control=['if'], data=[]] # `response.request.headers` is a dictionary where the key is the # header name, and the value is a *list*, containing one item, # which is the header value. We need to get rid of this list, and just # have key-value pairs. (This list probably exists in case the same # header is sent multiple times, but that's not happening in this case, # and the list construct is getting in the way.) # # We also need to convert bytes to ASCII. In practice, headers can # only contain ASCII characters: see # http://stackoverflow.com/questions/5423223/how-to-send-non-english-unicode-string-using-http-header request_headers = {key.decode('ascii'): value[0].decode('ascii') for (key, value) in response.request.headers.items()} item = A11yItem(url=response.url, request_headers=request_headers, accessed_at=datetime.utcnow(), page_title=title) yield item
def _parse_args(self,freqsAngles=True,_firstFlip=False,*args): """Helper function to parse the arguments to the __call__ and actionsFreqsAngles functions""" from galpy.orbit import Orbit RasOrbit= False integrated= True #whether the orbit was already integrated when given if len(args) == 5 or len(args) == 3: #pragma: no cover raise IOError("Must specify phi for actionAngleIsochroneApprox") if len(args) == 6 or len(args) == 4: if len(args) == 6: R,vR,vT, z, vz, phi= args else: R,vR,vT, phi= args z, vz= 0., 0. if isinstance(R,float): os= [Orbit([R,vR,vT,z,vz,phi])] RasOrbit= True integrated= False elif len(R.shape) == 1: #not integrated yet os= [Orbit([R[ii],vR[ii],vT[ii],z[ii],vz[ii],phi[ii]]) for ii in range(R.shape[0])] RasOrbit= True integrated= False if isinstance(args[0],Orbit) \ or (isinstance(args[0],list) and isinstance(args[0][0],Orbit)) \ or RasOrbit: if RasOrbit: pass elif not isinstance(args[0],list): os= [args[0]] if len(os[0]._orb.vxvv) == 3 or len(os[0]._orb.vxvv) == 5: #pragma: no cover raise IOError("Must specify phi for actionAngleIsochroneApprox") else: os= args[0] if len(os[0]._orb.vxvv) == 3 or len(os[0]._orb.vxvv) == 5: #pragma: no cover raise IOError("Must specify phi for actionAngleIsochroneApprox") self._check_consistent_units_orbitInput(os[0]) if not hasattr(os[0]._orb,'orbit'): #not integrated yet if _firstFlip: for o in os: o._orb.vxvv[1]= -o._orb.vxvv[1] o._orb.vxvv[2]= -o._orb.vxvv[2] o._orb.vxvv[4]= -o._orb.vxvv[4] [o.integrate(self._tsJ,pot=self._pot, method=self._integrate_method, dt=self._integrate_dt) for o in os] if _firstFlip: for o in os: o._orb.vxvv[1]= -o._orb.vxvv[1] o._orb.vxvv[2]= -o._orb.vxvv[2] o._orb.vxvv[4]= -o._orb.vxvv[4] o._orb.orbit[:,1]= -o._orb.orbit[:,1] o._orb.orbit[:,2]= -o._orb.orbit[:,2] o._orb.orbit[:,4]= -o._orb.orbit[:,4] integrated= False ntJ= os[0].getOrbit().shape[0] no= len(os) R= nu.empty((no,ntJ)) vR= nu.empty((no,ntJ)) vT= nu.empty((no,ntJ)) z= nu.zeros((no,ntJ))+10.**-7. #To avoid numpy warnings for vz= nu.zeros((no,ntJ))+10.**-7. #planarOrbits phi= nu.empty((no,ntJ)) for ii in range(len(os)): this_orbit= os[ii].getOrbit() R[ii,:]= this_orbit[:,0] vR[ii,:]= this_orbit[:,1] vT[ii,:]= this_orbit[:,2] if this_orbit.shape[1] == 6: z[ii,:]= this_orbit[:,3] vz[ii,:]= this_orbit[:,4] phi[ii,:]= this_orbit[:,5] else: phi[ii,:]= this_orbit[:,3] if freqsAngles and not integrated: #also integrate backwards in time, such that the requested point is not at the edge no= R.shape[0] nt= R.shape[1] oR= nu.empty((no,2*nt-1)) ovR= nu.empty((no,2*nt-1)) ovT= nu.empty((no,2*nt-1)) oz= nu.zeros((no,2*nt-1))+10.**-7. #To avoid numpy warnings for ovz= nu.zeros((no,2*nt-1))+10.**-7. #planarOrbits ophi= nu.empty((no,2*nt-1)) if _firstFlip: oR[:,:nt]= R[:,::-1] ovR[:,:nt]= vR[:,::-1] ovT[:,:nt]= vT[:,::-1] oz[:,:nt]= z[:,::-1] ovz[:,:nt]= vz[:,::-1] ophi[:,:nt]= phi[:,::-1] else: oR[:,nt-1:]= R ovR[:,nt-1:]= vR ovT[:,nt-1:]= vT oz[:,nt-1:]= z ovz[:,nt-1:]= vz ophi[:,nt-1:]= phi #load orbits if _firstFlip: os= [Orbit([R[ii,0],vR[ii,0],vT[ii,0],z[ii,0],vz[ii,0],phi[ii,0]]) for ii in range(R.shape[0])] else: os= [Orbit([R[ii,0],-vR[ii,0],-vT[ii,0],z[ii,0],-vz[ii,0],phi[ii,0]]) for ii in range(R.shape[0])] #integrate orbits [o.integrate(self._tsJ,pot=self._pot, method=self._integrate_method, dt=self._integrate_dt) for o in os] #extract phase-space points along the orbit ts= self._tsJ if _firstFlip: for ii in range(no): oR[ii,nt:]= os[ii].R(ts[1:]) #drop t=0, which we have ovR[ii,nt:]= os[ii].vR(ts[1:]) #already ovT[ii,nt:]= os[ii].vT(ts[1:]) # reverse, such that if os[ii].getOrbit().shape[1] == 6: oz[ii,nt:]= os[ii].z(ts[1:]) #everything is in the ovz[ii,nt:]= os[ii].vz(ts[1:]) #right order ophi[ii,nt:]= os[ii].phi(ts[1:]) #! else: for ii in range(no): oR[ii,:nt-1]= os[ii].R(ts[1:])[::-1] #drop t=0, which we have ovR[ii,:nt-1]= -os[ii].vR(ts[1:])[::-1] #already ovT[ii,:nt-1]= -os[ii].vT(ts[1:])[::-1] # reverse, such that if os[ii].getOrbit().shape[1] == 6: oz[ii,:nt-1]= os[ii].z(ts[1:])[::-1] #everything is in the ovz[ii,:nt-1]= -os[ii].vz(ts[1:])[::-1] #right order ophi[ii,:nt-1]= os[ii].phi(ts[1:])[::-1] #! return (oR,ovR,ovT,oz,ovz,ophi) else: return (R,vR,vT,z,vz,phi)
def function[_parse_args, parameter[self, freqsAngles, _firstFlip]]: constant[Helper function to parse the arguments to the __call__ and actionsFreqsAngles functions] from relative_module[galpy.orbit] import module[Orbit] variable[RasOrbit] assign[=] constant[False] variable[integrated] assign[=] constant[True] if <ast.BoolOp object at 0x7da1b0db9cf0> begin[:] <ast.Raise object at 0x7da1b0db9ae0> if <ast.BoolOp object at 0x7da1b0db99f0> begin[:] if compare[call[name[len], parameter[name[args]]] equal[==] constant[6]] begin[:] <ast.Tuple object at 0x7da1b0db9690> assign[=] name[args] if call[name[isinstance], parameter[name[R], name[float]]] begin[:] variable[os] assign[=] list[[<ast.Call object at 0x7da1b0db90c0>]] variable[RasOrbit] assign[=] constant[True] variable[integrated] assign[=] constant[False] if <ast.BoolOp object at 0x7da1b0db84c0> begin[:] if name[RasOrbit] begin[:] pass call[name[self]._check_consistent_units_orbitInput, parameter[call[name[os]][constant[0]]]] if <ast.UnaryOp object at 0x7da1b0da12d0> begin[:] if name[_firstFlip] begin[:] for taget[name[o]] in starred[name[os]] begin[:] call[name[o]._orb.vxvv][constant[1]] assign[=] <ast.UnaryOp object at 0x7da1b0da2920> call[name[o]._orb.vxvv][constant[2]] assign[=] <ast.UnaryOp object at 0x7da1b0da06a0> call[name[o]._orb.vxvv][constant[4]] assign[=] <ast.UnaryOp object at 0x7da1b0da0970> <ast.ListComp object at 0x7da1b0da0790> if name[_firstFlip] begin[:] for taget[name[o]] in starred[name[os]] begin[:] call[name[o]._orb.vxvv][constant[1]] assign[=] <ast.UnaryOp object at 0x7da1b0da08e0> call[name[o]._orb.vxvv][constant[2]] assign[=] <ast.UnaryOp object at 0x7da1b0da3d00> call[name[o]._orb.vxvv][constant[4]] assign[=] <ast.UnaryOp object at 0x7da1b0da1c90> call[name[o]._orb.orbit][tuple[[<ast.Slice object at 0x7da1b0ca64d0>, <ast.Constant object at 0x7da1b0ca6980>]]] assign[=] <ast.UnaryOp object at 0x7da1b0ca58a0> call[name[o]._orb.orbit][tuple[[<ast.Slice object at 0x7da1b0ca78e0>, <ast.Constant object at 0x7da1b0ca7bb0>]]] assign[=] <ast.UnaryOp object at 0x7da1b0ca6c50> call[name[o]._orb.orbit][tuple[[<ast.Slice object at 0x7da1b0ca5600>, <ast.Constant object at 0x7da1b0ca4e50>]]] assign[=] <ast.UnaryOp object at 0x7da1b0ca5b10> variable[integrated] assign[=] constant[False] variable[ntJ] assign[=] call[call[call[name[os]][constant[0]].getOrbit, parameter[]].shape][constant[0]] variable[no] assign[=] call[name[len], parameter[name[os]]] variable[R] assign[=] call[name[nu].empty, parameter[tuple[[<ast.Name object at 0x7da1b0ca7970>, <ast.Name object at 0x7da1b0ca5b70>]]]] variable[vR] assign[=] call[name[nu].empty, parameter[tuple[[<ast.Name object at 0x7da1b0ca7d60>, <ast.Name object at 0x7da1b0ca7160>]]]] variable[vT] assign[=] call[name[nu].empty, parameter[tuple[[<ast.Name object at 0x7da1b0ca40a0>, <ast.Name object at 0x7da1b0ca52a0>]]]] variable[z] assign[=] binary_operation[call[name[nu].zeros, parameter[tuple[[<ast.Name object at 0x7da1b0ca6a40>, <ast.Name object at 0x7da1b0ca7130>]]]] + binary_operation[constant[10.0] ** <ast.UnaryOp object at 0x7da1b0ca70a0>]] variable[vz] assign[=] binary_operation[call[name[nu].zeros, parameter[tuple[[<ast.Name object at 0x7da1b0ca79d0>, <ast.Name object at 0x7da1b0ca5870>]]]] + binary_operation[constant[10.0] ** <ast.UnaryOp object at 0x7da1b0ca7fa0>]] variable[phi] assign[=] call[name[nu].empty, parameter[tuple[[<ast.Name object at 0x7da1b0ca6530>, <ast.Name object at 0x7da1b0ca7d30>]]]] for taget[name[ii]] in starred[call[name[range], parameter[call[name[len], parameter[name[os]]]]]] begin[:] variable[this_orbit] assign[=] call[call[name[os]][name[ii]].getOrbit, parameter[]] call[name[R]][tuple[[<ast.Name object at 0x7da1b0ca6ad0>, <ast.Slice object at 0x7da1b0ca5900>]]] assign[=] call[name[this_orbit]][tuple[[<ast.Slice object at 0x7da1b0ca6ec0>, <ast.Constant object at 0x7da1b0ca6c80>]]] call[name[vR]][tuple[[<ast.Name object at 0x7da1b0ce34f0>, <ast.Slice object at 0x7da1b0ce0070>]]] assign[=] call[name[this_orbit]][tuple[[<ast.Slice object at 0x7da1b0ce18a0>, <ast.Constant object at 0x7da1b0ce3d60>]]] call[name[vT]][tuple[[<ast.Name object at 0x7da1b0ce1570>, <ast.Slice object at 0x7da1b0ce26b0>]]] assign[=] call[name[this_orbit]][tuple[[<ast.Slice object at 0x7da1b0ce0730>, <ast.Constant object at 0x7da1b0ce0400>]]] if compare[call[name[this_orbit].shape][constant[1]] equal[==] constant[6]] begin[:] call[name[z]][tuple[[<ast.Name object at 0x7da1b0ce1ed0>, <ast.Slice object at 0x7da1b0ce3940>]]] assign[=] call[name[this_orbit]][tuple[[<ast.Slice object at 0x7da1b0ce2bf0>, <ast.Constant object at 0x7da1b0ce08b0>]]] call[name[vz]][tuple[[<ast.Name object at 0x7da1b0ce3040>, <ast.Slice object at 0x7da1b0ce0760>]]] assign[=] call[name[this_orbit]][tuple[[<ast.Slice object at 0x7da1b0ce2ad0>, <ast.Constant object at 0x7da1b0ce37c0>]]] call[name[phi]][tuple[[<ast.Name object at 0x7da1b0ce1cf0>, <ast.Slice object at 0x7da1b0ce1e10>]]] assign[=] call[name[this_orbit]][tuple[[<ast.Slice object at 0x7da1b0ce2b30>, <ast.Constant object at 0x7da1b0ce1bd0>]]] if <ast.BoolOp object at 0x7da1b0ce1900> begin[:] variable[no] assign[=] call[name[R].shape][constant[0]] variable[nt] assign[=] call[name[R].shape][constant[1]] variable[oR] assign[=] call[name[nu].empty, parameter[tuple[[<ast.Name object at 0x7da1b0ce2320>, <ast.BinOp object at 0x7da1b0ce3d90>]]]] variable[ovR] assign[=] call[name[nu].empty, parameter[tuple[[<ast.Name object at 0x7da1b0ce1b70>, <ast.BinOp object at 0x7da1b0ce32b0>]]]] variable[ovT] assign[=] call[name[nu].empty, parameter[tuple[[<ast.Name object at 0x7da1b0ce2500>, <ast.BinOp object at 0x7da1b0ce34c0>]]]] variable[oz] assign[=] binary_operation[call[name[nu].zeros, parameter[tuple[[<ast.Name object at 0x7da1b0ce1a80>, <ast.BinOp object at 0x7da1b0ce2e30>]]]] + binary_operation[constant[10.0] ** <ast.UnaryOp object at 0x7da1b0ce0970>]] variable[ovz] assign[=] binary_operation[call[name[nu].zeros, parameter[tuple[[<ast.Name object at 0x7da1b0ce3100>, <ast.BinOp object at 0x7da1b0ce2e60>]]]] + binary_operation[constant[10.0] ** <ast.UnaryOp object at 0x7da1b0ce2470>]] variable[ophi] assign[=] call[name[nu].empty, parameter[tuple[[<ast.Name object at 0x7da1b0ce28f0>, <ast.BinOp object at 0x7da1b0ce1330>]]]] if name[_firstFlip] begin[:] call[name[oR]][tuple[[<ast.Slice object at 0x7da1b0ce2fb0>, <ast.Slice object at 0x7da1b0ce1690>]]] assign[=] call[name[R]][tuple[[<ast.Slice object at 0x7da1b0ce3a90>, <ast.Slice object at 0x7da1b0ce0190>]]] call[name[ovR]][tuple[[<ast.Slice object at 0x7da1b0ce2ec0>, <ast.Slice object at 0x7da1b0ce1630>]]] assign[=] call[name[vR]][tuple[[<ast.Slice object at 0x7da1b0ce31c0>, <ast.Slice object at 0x7da1b0ce0cd0>]]] call[name[ovT]][tuple[[<ast.Slice object at 0x7da1b0ce0ca0>, <ast.Slice object at 0x7da1b0ce3580>]]] assign[=] call[name[vT]][tuple[[<ast.Slice object at 0x7da1b0ce25f0>, <ast.Slice object at 0x7da1b0ce1c60>]]] call[name[oz]][tuple[[<ast.Slice object at 0x7da1b0ce3370>, <ast.Slice object at 0x7da1b0ce3af0>]]] assign[=] call[name[z]][tuple[[<ast.Slice object at 0x7da1b0ce1840>, <ast.Slice object at 0x7da1b0ce03a0>]]] call[name[ovz]][tuple[[<ast.Slice object at 0x7da1b0ce10f0>, <ast.Slice object at 0x7da1b0ce2950>]]] assign[=] call[name[vz]][tuple[[<ast.Slice object at 0x7da1b0ce0250>, <ast.Slice object at 0x7da1b0ce3cd0>]]] call[name[ophi]][tuple[[<ast.Slice object at 0x7da1b0ce1930>, <ast.Slice object at 0x7da1b0ce19c0>]]] assign[=] call[name[phi]][tuple[[<ast.Slice object at 0x7da1b0ce3970>, <ast.Slice object at 0x7da1b0ce16f0>]]] if name[_firstFlip] begin[:] variable[os] assign[=] <ast.ListComp object at 0x7da18bc73d00> <ast.ListComp object at 0x7da18bc710c0> variable[ts] assign[=] name[self]._tsJ if name[_firstFlip] begin[:] for taget[name[ii]] in starred[call[name[range], parameter[name[no]]]] begin[:] call[name[oR]][tuple[[<ast.Name object at 0x7da20c6c5690>, <ast.Slice object at 0x7da20c6c6aa0>]]] assign[=] call[call[name[os]][name[ii]].R, parameter[call[name[ts]][<ast.Slice object at 0x7da20c6c6680>]]] call[name[ovR]][tuple[[<ast.Name object at 0x7da20c6c5510>, <ast.Slice object at 0x7da20c6c56f0>]]] assign[=] call[call[name[os]][name[ii]].vR, parameter[call[name[ts]][<ast.Slice object at 0x7da20c6c79d0>]]] call[name[ovT]][tuple[[<ast.Name object at 0x7da20c6c5120>, <ast.Slice object at 0x7da20c6c5270>]]] assign[=] call[call[name[os]][name[ii]].vT, parameter[call[name[ts]][<ast.Slice object at 0x7da20c6c78e0>]]] if compare[call[call[call[name[os]][name[ii]].getOrbit, parameter[]].shape][constant[1]] equal[==] constant[6]] begin[:] call[name[oz]][tuple[[<ast.Name object at 0x7da1b0d40be0>, <ast.Slice object at 0x7da1b0d40c10>]]] assign[=] call[call[name[os]][name[ii]].z, parameter[call[name[ts]][<ast.Slice object at 0x7da1b0d40dc0>]]] call[name[ovz]][tuple[[<ast.Name object at 0x7da1b0d40ee0>, <ast.Slice object at 0x7da1b0d40f10>]]] assign[=] call[call[name[os]][name[ii]].vz, parameter[call[name[ts]][<ast.Slice object at 0x7da1b0d410c0>]]] call[name[ophi]][tuple[[<ast.Name object at 0x7da1b0d411e0>, <ast.Slice object at 0x7da1b0d41210>]]] assign[=] call[call[name[os]][name[ii]].phi, parameter[call[name[ts]][<ast.Slice object at 0x7da1b0d413c0>]]] return[tuple[[<ast.Name object at 0x7da1b0d43130>, <ast.Name object at 0x7da1b0d43160>, <ast.Name object at 0x7da1b0d43190>, <ast.Name object at 0x7da1b0d431c0>, <ast.Name object at 0x7da1b0d431f0>, <ast.Name object at 0x7da1b0d43220>]]]
keyword[def] identifier[_parse_args] ( identifier[self] , identifier[freqsAngles] = keyword[True] , identifier[_firstFlip] = keyword[False] ,* identifier[args] ): literal[string] keyword[from] identifier[galpy] . identifier[orbit] keyword[import] identifier[Orbit] identifier[RasOrbit] = keyword[False] identifier[integrated] = keyword[True] keyword[if] identifier[len] ( identifier[args] )== literal[int] keyword[or] identifier[len] ( identifier[args] )== literal[int] : keyword[raise] identifier[IOError] ( literal[string] ) keyword[if] identifier[len] ( identifier[args] )== literal[int] keyword[or] identifier[len] ( identifier[args] )== literal[int] : keyword[if] identifier[len] ( identifier[args] )== literal[int] : identifier[R] , identifier[vR] , identifier[vT] , identifier[z] , identifier[vz] , identifier[phi] = identifier[args] keyword[else] : identifier[R] , identifier[vR] , identifier[vT] , identifier[phi] = identifier[args] identifier[z] , identifier[vz] = literal[int] , literal[int] keyword[if] identifier[isinstance] ( identifier[R] , identifier[float] ): identifier[os] =[ identifier[Orbit] ([ identifier[R] , identifier[vR] , identifier[vT] , identifier[z] , identifier[vz] , identifier[phi] ])] identifier[RasOrbit] = keyword[True] identifier[integrated] = keyword[False] keyword[elif] identifier[len] ( identifier[R] . identifier[shape] )== literal[int] : identifier[os] =[ identifier[Orbit] ([ identifier[R] [ identifier[ii] ], identifier[vR] [ identifier[ii] ], identifier[vT] [ identifier[ii] ], identifier[z] [ identifier[ii] ], identifier[vz] [ identifier[ii] ], identifier[phi] [ identifier[ii] ]]) keyword[for] identifier[ii] keyword[in] identifier[range] ( identifier[R] . identifier[shape] [ literal[int] ])] identifier[RasOrbit] = keyword[True] identifier[integrated] = keyword[False] keyword[if] identifier[isinstance] ( identifier[args] [ literal[int] ], identifier[Orbit] ) keyword[or] ( identifier[isinstance] ( identifier[args] [ literal[int] ], identifier[list] ) keyword[and] identifier[isinstance] ( identifier[args] [ literal[int] ][ literal[int] ], identifier[Orbit] )) keyword[or] identifier[RasOrbit] : keyword[if] identifier[RasOrbit] : keyword[pass] keyword[elif] keyword[not] identifier[isinstance] ( identifier[args] [ literal[int] ], identifier[list] ): identifier[os] =[ identifier[args] [ literal[int] ]] keyword[if] identifier[len] ( identifier[os] [ literal[int] ]. identifier[_orb] . identifier[vxvv] )== literal[int] keyword[or] identifier[len] ( identifier[os] [ literal[int] ]. identifier[_orb] . identifier[vxvv] )== literal[int] : keyword[raise] identifier[IOError] ( literal[string] ) keyword[else] : identifier[os] = identifier[args] [ literal[int] ] keyword[if] identifier[len] ( identifier[os] [ literal[int] ]. identifier[_orb] . identifier[vxvv] )== literal[int] keyword[or] identifier[len] ( identifier[os] [ literal[int] ]. identifier[_orb] . identifier[vxvv] )== literal[int] : keyword[raise] identifier[IOError] ( literal[string] ) identifier[self] . identifier[_check_consistent_units_orbitInput] ( identifier[os] [ literal[int] ]) keyword[if] keyword[not] identifier[hasattr] ( identifier[os] [ literal[int] ]. identifier[_orb] , literal[string] ): keyword[if] identifier[_firstFlip] : keyword[for] identifier[o] keyword[in] identifier[os] : identifier[o] . identifier[_orb] . identifier[vxvv] [ literal[int] ]=- identifier[o] . identifier[_orb] . identifier[vxvv] [ literal[int] ] identifier[o] . identifier[_orb] . identifier[vxvv] [ literal[int] ]=- identifier[o] . identifier[_orb] . identifier[vxvv] [ literal[int] ] identifier[o] . identifier[_orb] . identifier[vxvv] [ literal[int] ]=- identifier[o] . identifier[_orb] . identifier[vxvv] [ literal[int] ] [ identifier[o] . identifier[integrate] ( identifier[self] . identifier[_tsJ] , identifier[pot] = identifier[self] . identifier[_pot] , identifier[method] = identifier[self] . identifier[_integrate_method] , identifier[dt] = identifier[self] . identifier[_integrate_dt] ) keyword[for] identifier[o] keyword[in] identifier[os] ] keyword[if] identifier[_firstFlip] : keyword[for] identifier[o] keyword[in] identifier[os] : identifier[o] . identifier[_orb] . identifier[vxvv] [ literal[int] ]=- identifier[o] . identifier[_orb] . identifier[vxvv] [ literal[int] ] identifier[o] . identifier[_orb] . identifier[vxvv] [ literal[int] ]=- identifier[o] . identifier[_orb] . identifier[vxvv] [ literal[int] ] identifier[o] . identifier[_orb] . identifier[vxvv] [ literal[int] ]=- identifier[o] . identifier[_orb] . identifier[vxvv] [ literal[int] ] identifier[o] . identifier[_orb] . identifier[orbit] [:, literal[int] ]=- identifier[o] . identifier[_orb] . identifier[orbit] [:, literal[int] ] identifier[o] . identifier[_orb] . identifier[orbit] [:, literal[int] ]=- identifier[o] . identifier[_orb] . identifier[orbit] [:, literal[int] ] identifier[o] . identifier[_orb] . identifier[orbit] [:, literal[int] ]=- identifier[o] . identifier[_orb] . identifier[orbit] [:, literal[int] ] identifier[integrated] = keyword[False] identifier[ntJ] = identifier[os] [ literal[int] ]. identifier[getOrbit] (). identifier[shape] [ literal[int] ] identifier[no] = identifier[len] ( identifier[os] ) identifier[R] = identifier[nu] . identifier[empty] (( identifier[no] , identifier[ntJ] )) identifier[vR] = identifier[nu] . identifier[empty] (( identifier[no] , identifier[ntJ] )) identifier[vT] = identifier[nu] . identifier[empty] (( identifier[no] , identifier[ntJ] )) identifier[z] = identifier[nu] . identifier[zeros] (( identifier[no] , identifier[ntJ] ))+ literal[int] **- literal[int] identifier[vz] = identifier[nu] . identifier[zeros] (( identifier[no] , identifier[ntJ] ))+ literal[int] **- literal[int] identifier[phi] = identifier[nu] . identifier[empty] (( identifier[no] , identifier[ntJ] )) keyword[for] identifier[ii] keyword[in] identifier[range] ( identifier[len] ( identifier[os] )): identifier[this_orbit] = identifier[os] [ identifier[ii] ]. identifier[getOrbit] () identifier[R] [ identifier[ii] ,:]= identifier[this_orbit] [:, literal[int] ] identifier[vR] [ identifier[ii] ,:]= identifier[this_orbit] [:, literal[int] ] identifier[vT] [ identifier[ii] ,:]= identifier[this_orbit] [:, literal[int] ] keyword[if] identifier[this_orbit] . identifier[shape] [ literal[int] ]== literal[int] : identifier[z] [ identifier[ii] ,:]= identifier[this_orbit] [:, literal[int] ] identifier[vz] [ identifier[ii] ,:]= identifier[this_orbit] [:, literal[int] ] identifier[phi] [ identifier[ii] ,:]= identifier[this_orbit] [:, literal[int] ] keyword[else] : identifier[phi] [ identifier[ii] ,:]= identifier[this_orbit] [:, literal[int] ] keyword[if] identifier[freqsAngles] keyword[and] keyword[not] identifier[integrated] : identifier[no] = identifier[R] . identifier[shape] [ literal[int] ] identifier[nt] = identifier[R] . identifier[shape] [ literal[int] ] identifier[oR] = identifier[nu] . identifier[empty] (( identifier[no] , literal[int] * identifier[nt] - literal[int] )) identifier[ovR] = identifier[nu] . identifier[empty] (( identifier[no] , literal[int] * identifier[nt] - literal[int] )) identifier[ovT] = identifier[nu] . identifier[empty] (( identifier[no] , literal[int] * identifier[nt] - literal[int] )) identifier[oz] = identifier[nu] . identifier[zeros] (( identifier[no] , literal[int] * identifier[nt] - literal[int] ))+ literal[int] **- literal[int] identifier[ovz] = identifier[nu] . identifier[zeros] (( identifier[no] , literal[int] * identifier[nt] - literal[int] ))+ literal[int] **- literal[int] identifier[ophi] = identifier[nu] . identifier[empty] (( identifier[no] , literal[int] * identifier[nt] - literal[int] )) keyword[if] identifier[_firstFlip] : identifier[oR] [:,: identifier[nt] ]= identifier[R] [:,::- literal[int] ] identifier[ovR] [:,: identifier[nt] ]= identifier[vR] [:,::- literal[int] ] identifier[ovT] [:,: identifier[nt] ]= identifier[vT] [:,::- literal[int] ] identifier[oz] [:,: identifier[nt] ]= identifier[z] [:,::- literal[int] ] identifier[ovz] [:,: identifier[nt] ]= identifier[vz] [:,::- literal[int] ] identifier[ophi] [:,: identifier[nt] ]= identifier[phi] [:,::- literal[int] ] keyword[else] : identifier[oR] [:, identifier[nt] - literal[int] :]= identifier[R] identifier[ovR] [:, identifier[nt] - literal[int] :]= identifier[vR] identifier[ovT] [:, identifier[nt] - literal[int] :]= identifier[vT] identifier[oz] [:, identifier[nt] - literal[int] :]= identifier[z] identifier[ovz] [:, identifier[nt] - literal[int] :]= identifier[vz] identifier[ophi] [:, identifier[nt] - literal[int] :]= identifier[phi] keyword[if] identifier[_firstFlip] : identifier[os] =[ identifier[Orbit] ([ identifier[R] [ identifier[ii] , literal[int] ], identifier[vR] [ identifier[ii] , literal[int] ], identifier[vT] [ identifier[ii] , literal[int] ], identifier[z] [ identifier[ii] , literal[int] ], identifier[vz] [ identifier[ii] , literal[int] ], identifier[phi] [ identifier[ii] , literal[int] ]]) keyword[for] identifier[ii] keyword[in] identifier[range] ( identifier[R] . identifier[shape] [ literal[int] ])] keyword[else] : identifier[os] =[ identifier[Orbit] ([ identifier[R] [ identifier[ii] , literal[int] ],- identifier[vR] [ identifier[ii] , literal[int] ],- identifier[vT] [ identifier[ii] , literal[int] ], identifier[z] [ identifier[ii] , literal[int] ],- identifier[vz] [ identifier[ii] , literal[int] ], identifier[phi] [ identifier[ii] , literal[int] ]]) keyword[for] identifier[ii] keyword[in] identifier[range] ( identifier[R] . identifier[shape] [ literal[int] ])] [ identifier[o] . identifier[integrate] ( identifier[self] . identifier[_tsJ] , identifier[pot] = identifier[self] . identifier[_pot] , identifier[method] = identifier[self] . identifier[_integrate_method] , identifier[dt] = identifier[self] . identifier[_integrate_dt] ) keyword[for] identifier[o] keyword[in] identifier[os] ] identifier[ts] = identifier[self] . identifier[_tsJ] keyword[if] identifier[_firstFlip] : keyword[for] identifier[ii] keyword[in] identifier[range] ( identifier[no] ): identifier[oR] [ identifier[ii] , identifier[nt] :]= identifier[os] [ identifier[ii] ]. identifier[R] ( identifier[ts] [ literal[int] :]) identifier[ovR] [ identifier[ii] , identifier[nt] :]= identifier[os] [ identifier[ii] ]. identifier[vR] ( identifier[ts] [ literal[int] :]) identifier[ovT] [ identifier[ii] , identifier[nt] :]= identifier[os] [ identifier[ii] ]. identifier[vT] ( identifier[ts] [ literal[int] :]) keyword[if] identifier[os] [ identifier[ii] ]. identifier[getOrbit] (). identifier[shape] [ literal[int] ]== literal[int] : identifier[oz] [ identifier[ii] , identifier[nt] :]= identifier[os] [ identifier[ii] ]. identifier[z] ( identifier[ts] [ literal[int] :]) identifier[ovz] [ identifier[ii] , identifier[nt] :]= identifier[os] [ identifier[ii] ]. identifier[vz] ( identifier[ts] [ literal[int] :]) identifier[ophi] [ identifier[ii] , identifier[nt] :]= identifier[os] [ identifier[ii] ]. identifier[phi] ( identifier[ts] [ literal[int] :]) keyword[else] : keyword[for] identifier[ii] keyword[in] identifier[range] ( identifier[no] ): identifier[oR] [ identifier[ii] ,: identifier[nt] - literal[int] ]= identifier[os] [ identifier[ii] ]. identifier[R] ( identifier[ts] [ literal[int] :])[::- literal[int] ] identifier[ovR] [ identifier[ii] ,: identifier[nt] - literal[int] ]=- identifier[os] [ identifier[ii] ]. identifier[vR] ( identifier[ts] [ literal[int] :])[::- literal[int] ] identifier[ovT] [ identifier[ii] ,: identifier[nt] - literal[int] ]=- identifier[os] [ identifier[ii] ]. identifier[vT] ( identifier[ts] [ literal[int] :])[::- literal[int] ] keyword[if] identifier[os] [ identifier[ii] ]. identifier[getOrbit] (). identifier[shape] [ literal[int] ]== literal[int] : identifier[oz] [ identifier[ii] ,: identifier[nt] - literal[int] ]= identifier[os] [ identifier[ii] ]. identifier[z] ( identifier[ts] [ literal[int] :])[::- literal[int] ] identifier[ovz] [ identifier[ii] ,: identifier[nt] - literal[int] ]=- identifier[os] [ identifier[ii] ]. identifier[vz] ( identifier[ts] [ literal[int] :])[::- literal[int] ] identifier[ophi] [ identifier[ii] ,: identifier[nt] - literal[int] ]= identifier[os] [ identifier[ii] ]. identifier[phi] ( identifier[ts] [ literal[int] :])[::- literal[int] ] keyword[return] ( identifier[oR] , identifier[ovR] , identifier[ovT] , identifier[oz] , identifier[ovz] , identifier[ophi] ) keyword[else] : keyword[return] ( identifier[R] , identifier[vR] , identifier[vT] , identifier[z] , identifier[vz] , identifier[phi] )
def _parse_args(self, freqsAngles=True, _firstFlip=False, *args): """Helper function to parse the arguments to the __call__ and actionsFreqsAngles functions""" from galpy.orbit import Orbit RasOrbit = False integrated = True #whether the orbit was already integrated when given if len(args) == 5 or len(args) == 3: #pragma: no cover raise IOError('Must specify phi for actionAngleIsochroneApprox') # depends on [control=['if'], data=[]] if len(args) == 6 or len(args) == 4: if len(args) == 6: (R, vR, vT, z, vz, phi) = args # depends on [control=['if'], data=[]] else: (R, vR, vT, phi) = args (z, vz) = (0.0, 0.0) if isinstance(R, float): os = [Orbit([R, vR, vT, z, vz, phi])] RasOrbit = True integrated = False # depends on [control=['if'], data=[]] elif len(R.shape) == 1: #not integrated yet os = [Orbit([R[ii], vR[ii], vT[ii], z[ii], vz[ii], phi[ii]]) for ii in range(R.shape[0])] RasOrbit = True integrated = False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if isinstance(args[0], Orbit) or (isinstance(args[0], list) and isinstance(args[0][0], Orbit)) or RasOrbit: if RasOrbit: pass # depends on [control=['if'], data=[]] elif not isinstance(args[0], list): os = [args[0]] if len(os[0]._orb.vxvv) == 3 or len(os[0]._orb.vxvv) == 5: #pragma: no cover raise IOError('Must specify phi for actionAngleIsochroneApprox') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: os = args[0] if len(os[0]._orb.vxvv) == 3 or len(os[0]._orb.vxvv) == 5: #pragma: no cover raise IOError('Must specify phi for actionAngleIsochroneApprox') # depends on [control=['if'], data=[]] self._check_consistent_units_orbitInput(os[0]) if not hasattr(os[0]._orb, 'orbit'): #not integrated yet if _firstFlip: for o in os: o._orb.vxvv[1] = -o._orb.vxvv[1] o._orb.vxvv[2] = -o._orb.vxvv[2] o._orb.vxvv[4] = -o._orb.vxvv[4] # depends on [control=['for'], data=['o']] # depends on [control=['if'], data=[]] [o.integrate(self._tsJ, pot=self._pot, method=self._integrate_method, dt=self._integrate_dt) for o in os] if _firstFlip: for o in os: o._orb.vxvv[1] = -o._orb.vxvv[1] o._orb.vxvv[2] = -o._orb.vxvv[2] o._orb.vxvv[4] = -o._orb.vxvv[4] o._orb.orbit[:, 1] = -o._orb.orbit[:, 1] o._orb.orbit[:, 2] = -o._orb.orbit[:, 2] o._orb.orbit[:, 4] = -o._orb.orbit[:, 4] # depends on [control=['for'], data=['o']] # depends on [control=['if'], data=[]] integrated = False # depends on [control=['if'], data=[]] ntJ = os[0].getOrbit().shape[0] no = len(os) R = nu.empty((no, ntJ)) vR = nu.empty((no, ntJ)) vT = nu.empty((no, ntJ)) z = nu.zeros((no, ntJ)) + 10.0 ** (-7.0) #To avoid numpy warnings for vz = nu.zeros((no, ntJ)) + 10.0 ** (-7.0) #planarOrbits phi = nu.empty((no, ntJ)) for ii in range(len(os)): this_orbit = os[ii].getOrbit() R[ii, :] = this_orbit[:, 0] vR[ii, :] = this_orbit[:, 1] vT[ii, :] = this_orbit[:, 2] if this_orbit.shape[1] == 6: z[ii, :] = this_orbit[:, 3] vz[ii, :] = this_orbit[:, 4] phi[ii, :] = this_orbit[:, 5] # depends on [control=['if'], data=[]] else: phi[ii, :] = this_orbit[:, 3] # depends on [control=['for'], data=['ii']] # depends on [control=['if'], data=[]] if freqsAngles and (not integrated): #also integrate backwards in time, such that the requested point is not at the edge no = R.shape[0] nt = R.shape[1] oR = nu.empty((no, 2 * nt - 1)) ovR = nu.empty((no, 2 * nt - 1)) ovT = nu.empty((no, 2 * nt - 1)) oz = nu.zeros((no, 2 * nt - 1)) + 10.0 ** (-7.0) #To avoid numpy warnings for ovz = nu.zeros((no, 2 * nt - 1)) + 10.0 ** (-7.0) #planarOrbits ophi = nu.empty((no, 2 * nt - 1)) if _firstFlip: oR[:, :nt] = R[:, ::-1] ovR[:, :nt] = vR[:, ::-1] ovT[:, :nt] = vT[:, ::-1] oz[:, :nt] = z[:, ::-1] ovz[:, :nt] = vz[:, ::-1] ophi[:, :nt] = phi[:, ::-1] # depends on [control=['if'], data=[]] else: oR[:, nt - 1:] = R ovR[:, nt - 1:] = vR ovT[:, nt - 1:] = vT oz[:, nt - 1:] = z ovz[:, nt - 1:] = vz ophi[:, nt - 1:] = phi #load orbits if _firstFlip: os = [Orbit([R[ii, 0], vR[ii, 0], vT[ii, 0], z[ii, 0], vz[ii, 0], phi[ii, 0]]) for ii in range(R.shape[0])] # depends on [control=['if'], data=[]] else: os = [Orbit([R[ii, 0], -vR[ii, 0], -vT[ii, 0], z[ii, 0], -vz[ii, 0], phi[ii, 0]]) for ii in range(R.shape[0])] #integrate orbits [o.integrate(self._tsJ, pot=self._pot, method=self._integrate_method, dt=self._integrate_dt) for o in os] #extract phase-space points along the orbit ts = self._tsJ if _firstFlip: for ii in range(no): oR[ii, nt:] = os[ii].R(ts[1:]) #drop t=0, which we have ovR[ii, nt:] = os[ii].vR(ts[1:]) #already ovT[ii, nt:] = os[ii].vT(ts[1:]) # reverse, such that if os[ii].getOrbit().shape[1] == 6: oz[ii, nt:] = os[ii].z(ts[1:]) #everything is in the ovz[ii, nt:] = os[ii].vz(ts[1:]) #right order # depends on [control=['if'], data=[]] ophi[ii, nt:] = os[ii].phi(ts[1:]) #! # depends on [control=['for'], data=['ii']] # depends on [control=['if'], data=[]] else: for ii in range(no): oR[ii, :nt - 1] = os[ii].R(ts[1:])[::-1] #drop t=0, which we have ovR[ii, :nt - 1] = -os[ii].vR(ts[1:])[::-1] #already ovT[ii, :nt - 1] = -os[ii].vT(ts[1:])[::-1] # reverse, such that if os[ii].getOrbit().shape[1] == 6: oz[ii, :nt - 1] = os[ii].z(ts[1:])[::-1] #everything is in the ovz[ii, :nt - 1] = -os[ii].vz(ts[1:])[::-1] #right order # depends on [control=['if'], data=[]] ophi[ii, :nt - 1] = os[ii].phi(ts[1:])[::-1] #! # depends on [control=['for'], data=['ii']] return (oR, ovR, ovT, oz, ovz, ophi) # depends on [control=['if'], data=[]] else: return (R, vR, vT, z, vz, phi)
def pause(self, campaign_id): """ Pause an RSS-Driven campaign. :param campaign_id: The unique id for the campaign. :type campaign_id: :py:class:`str` """ self.campaign_id = campaign_id return self._mc_client._post(url=self._build_path(campaign_id, 'actions/pause'))
def function[pause, parameter[self, campaign_id]]: constant[ Pause an RSS-Driven campaign. :param campaign_id: The unique id for the campaign. :type campaign_id: :py:class:`str` ] name[self].campaign_id assign[=] name[campaign_id] return[call[name[self]._mc_client._post, parameter[]]]
keyword[def] identifier[pause] ( identifier[self] , identifier[campaign_id] ): literal[string] identifier[self] . identifier[campaign_id] = identifier[campaign_id] keyword[return] identifier[self] . identifier[_mc_client] . identifier[_post] ( identifier[url] = identifier[self] . identifier[_build_path] ( identifier[campaign_id] , literal[string] ))
def pause(self, campaign_id): """ Pause an RSS-Driven campaign. :param campaign_id: The unique id for the campaign. :type campaign_id: :py:class:`str` """ self.campaign_id = campaign_id return self._mc_client._post(url=self._build_path(campaign_id, 'actions/pause'))
def sha256(message, encoder=nacl.encoding.HexEncoder): """ Hashes ``message`` with SHA256. :param message: The message to hash. :type message: bytes :param encoder: A class that is able to encode the hashed message. :returns: The hashed message. :rtype: bytes """ return encoder.encode(nacl.bindings.crypto_hash_sha256(message))
def function[sha256, parameter[message, encoder]]: constant[ Hashes ``message`` with SHA256. :param message: The message to hash. :type message: bytes :param encoder: A class that is able to encode the hashed message. :returns: The hashed message. :rtype: bytes ] return[call[name[encoder].encode, parameter[call[name[nacl].bindings.crypto_hash_sha256, parameter[name[message]]]]]]
keyword[def] identifier[sha256] ( identifier[message] , identifier[encoder] = identifier[nacl] . identifier[encoding] . identifier[HexEncoder] ): literal[string] keyword[return] identifier[encoder] . identifier[encode] ( identifier[nacl] . identifier[bindings] . identifier[crypto_hash_sha256] ( identifier[message] ))
def sha256(message, encoder=nacl.encoding.HexEncoder): """ Hashes ``message`` with SHA256. :param message: The message to hash. :type message: bytes :param encoder: A class that is able to encode the hashed message. :returns: The hashed message. :rtype: bytes """ return encoder.encode(nacl.bindings.crypto_hash_sha256(message))
def _interp_kernel_ft(norm_freqs, interp): """Scaled FT of a one-dimensional interpolation kernel. For normalized frequencies ``-1/2 <= xi <= 1/2``, this function returns:: sinc(pi * xi)**k / sqrt(2 * pi) where ``k=1`` for 'nearest' and ``k=2`` for 'linear' interpolation. Parameters ---------- norm_freqs : `numpy.ndarray` Normalized frequencies between -1/2 and 1/2 interp : {'nearest', 'linear'} Type of interpolation kernel Returns ------- ker_ft : `numpy.ndarray` Values of the kernel FT at the given frequencies """ # Numpy's sinc(x) is equal to the 'math' sinc(pi * x) ker_ft = np.sinc(norm_freqs) interp_ = str(interp).lower() if interp_ == 'nearest': pass elif interp_ == 'linear': ker_ft *= ker_ft else: raise ValueError("`interp` '{}' not understood".format(interp)) ker_ft /= np.sqrt(2 * np.pi) return ker_ft
def function[_interp_kernel_ft, parameter[norm_freqs, interp]]: constant[Scaled FT of a one-dimensional interpolation kernel. For normalized frequencies ``-1/2 <= xi <= 1/2``, this function returns:: sinc(pi * xi)**k / sqrt(2 * pi) where ``k=1`` for 'nearest' and ``k=2`` for 'linear' interpolation. Parameters ---------- norm_freqs : `numpy.ndarray` Normalized frequencies between -1/2 and 1/2 interp : {'nearest', 'linear'} Type of interpolation kernel Returns ------- ker_ft : `numpy.ndarray` Values of the kernel FT at the given frequencies ] variable[ker_ft] assign[=] call[name[np].sinc, parameter[name[norm_freqs]]] variable[interp_] assign[=] call[call[name[str], parameter[name[interp]]].lower, parameter[]] if compare[name[interp_] equal[==] constant[nearest]] begin[:] pass <ast.AugAssign object at 0x7da1b1e5e500> return[name[ker_ft]]
keyword[def] identifier[_interp_kernel_ft] ( identifier[norm_freqs] , identifier[interp] ): literal[string] identifier[ker_ft] = identifier[np] . identifier[sinc] ( identifier[norm_freqs] ) identifier[interp_] = identifier[str] ( identifier[interp] ). identifier[lower] () keyword[if] identifier[interp_] == literal[string] : keyword[pass] keyword[elif] identifier[interp_] == literal[string] : identifier[ker_ft] *= identifier[ker_ft] keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[interp] )) identifier[ker_ft] /= identifier[np] . identifier[sqrt] ( literal[int] * identifier[np] . identifier[pi] ) keyword[return] identifier[ker_ft]
def _interp_kernel_ft(norm_freqs, interp): """Scaled FT of a one-dimensional interpolation kernel. For normalized frequencies ``-1/2 <= xi <= 1/2``, this function returns:: sinc(pi * xi)**k / sqrt(2 * pi) where ``k=1`` for 'nearest' and ``k=2`` for 'linear' interpolation. Parameters ---------- norm_freqs : `numpy.ndarray` Normalized frequencies between -1/2 and 1/2 interp : {'nearest', 'linear'} Type of interpolation kernel Returns ------- ker_ft : `numpy.ndarray` Values of the kernel FT at the given frequencies """ # Numpy's sinc(x) is equal to the 'math' sinc(pi * x) ker_ft = np.sinc(norm_freqs) interp_ = str(interp).lower() if interp_ == 'nearest': pass # depends on [control=['if'], data=[]] elif interp_ == 'linear': ker_ft *= ker_ft # depends on [control=['if'], data=[]] else: raise ValueError("`interp` '{}' not understood".format(interp)) ker_ft /= np.sqrt(2 * np.pi) return ker_ft
def QRatio(s1, s2, force_ascii=True, full_process=True): """ Quick ratio comparison between two strings. Runs full_process from utils on both strings Short circuits if either of the strings is empty after processing. :param s1: :param s2: :param force_ascii: Allow only ASCII characters (Default: True) :full_process: Process inputs, used here to avoid double processing in extract functions (Default: True) :return: similarity ratio """ if full_process: p1 = utils.full_process(s1, force_ascii=force_ascii) p2 = utils.full_process(s2, force_ascii=force_ascii) else: p1 = s1 p2 = s2 if not utils.validate_string(p1): return 0 if not utils.validate_string(p2): return 0 return ratio(p1, p2)
def function[QRatio, parameter[s1, s2, force_ascii, full_process]]: constant[ Quick ratio comparison between two strings. Runs full_process from utils on both strings Short circuits if either of the strings is empty after processing. :param s1: :param s2: :param force_ascii: Allow only ASCII characters (Default: True) :full_process: Process inputs, used here to avoid double processing in extract functions (Default: True) :return: similarity ratio ] if name[full_process] begin[:] variable[p1] assign[=] call[name[utils].full_process, parameter[name[s1]]] variable[p2] assign[=] call[name[utils].full_process, parameter[name[s2]]] if <ast.UnaryOp object at 0x7da1b1c49750> begin[:] return[constant[0]] if <ast.UnaryOp object at 0x7da1b1c4a440> begin[:] return[constant[0]] return[call[name[ratio], parameter[name[p1], name[p2]]]]
keyword[def] identifier[QRatio] ( identifier[s1] , identifier[s2] , identifier[force_ascii] = keyword[True] , identifier[full_process] = keyword[True] ): literal[string] keyword[if] identifier[full_process] : identifier[p1] = identifier[utils] . identifier[full_process] ( identifier[s1] , identifier[force_ascii] = identifier[force_ascii] ) identifier[p2] = identifier[utils] . identifier[full_process] ( identifier[s2] , identifier[force_ascii] = identifier[force_ascii] ) keyword[else] : identifier[p1] = identifier[s1] identifier[p2] = identifier[s2] keyword[if] keyword[not] identifier[utils] . identifier[validate_string] ( identifier[p1] ): keyword[return] literal[int] keyword[if] keyword[not] identifier[utils] . identifier[validate_string] ( identifier[p2] ): keyword[return] literal[int] keyword[return] identifier[ratio] ( identifier[p1] , identifier[p2] )
def QRatio(s1, s2, force_ascii=True, full_process=True): """ Quick ratio comparison between two strings. Runs full_process from utils on both strings Short circuits if either of the strings is empty after processing. :param s1: :param s2: :param force_ascii: Allow only ASCII characters (Default: True) :full_process: Process inputs, used here to avoid double processing in extract functions (Default: True) :return: similarity ratio """ if full_process: p1 = utils.full_process(s1, force_ascii=force_ascii) p2 = utils.full_process(s2, force_ascii=force_ascii) # depends on [control=['if'], data=[]] else: p1 = s1 p2 = s2 if not utils.validate_string(p1): return 0 # depends on [control=['if'], data=[]] if not utils.validate_string(p2): return 0 # depends on [control=['if'], data=[]] return ratio(p1, p2)
def dump_json(obj): """Dump Python object as JSON string.""" return simplejson.dumps(obj, ignore_nan=True, default=json_util.default)
def function[dump_json, parameter[obj]]: constant[Dump Python object as JSON string.] return[call[name[simplejson].dumps, parameter[name[obj]]]]
keyword[def] identifier[dump_json] ( identifier[obj] ): literal[string] keyword[return] identifier[simplejson] . identifier[dumps] ( identifier[obj] , identifier[ignore_nan] = keyword[True] , identifier[default] = identifier[json_util] . identifier[default] )
def dump_json(obj): """Dump Python object as JSON string.""" return simplejson.dumps(obj, ignore_nan=True, default=json_util.default)
def domain_create(hypervisor, identifier, configuration, disk_path): """libvirt Domain definition. @raise: ConfigError, IOError, libvirt.libvirtError. """ with open(configuration['configuration']) as config_file: domain_config = config_file.read() xml = domain_xml(identifier, domain_config, disk_path) return hypervisor.defineXML(xml)
def function[domain_create, parameter[hypervisor, identifier, configuration, disk_path]]: constant[libvirt Domain definition. @raise: ConfigError, IOError, libvirt.libvirtError. ] with call[name[open], parameter[call[name[configuration]][constant[configuration]]]] begin[:] variable[domain_config] assign[=] call[name[config_file].read, parameter[]] variable[xml] assign[=] call[name[domain_xml], parameter[name[identifier], name[domain_config], name[disk_path]]] return[call[name[hypervisor].defineXML, parameter[name[xml]]]]
keyword[def] identifier[domain_create] ( identifier[hypervisor] , identifier[identifier] , identifier[configuration] , identifier[disk_path] ): literal[string] keyword[with] identifier[open] ( identifier[configuration] [ literal[string] ]) keyword[as] identifier[config_file] : identifier[domain_config] = identifier[config_file] . identifier[read] () identifier[xml] = identifier[domain_xml] ( identifier[identifier] , identifier[domain_config] , identifier[disk_path] ) keyword[return] identifier[hypervisor] . identifier[defineXML] ( identifier[xml] )
def domain_create(hypervisor, identifier, configuration, disk_path): """libvirt Domain definition. @raise: ConfigError, IOError, libvirt.libvirtError. """ with open(configuration['configuration']) as config_file: domain_config = config_file.read() # depends on [control=['with'], data=['config_file']] xml = domain_xml(identifier, domain_config, disk_path) return hypervisor.defineXML(xml)
def parse_binary_descriptor(bindata): """Convert a binary node descriptor into a string descriptor. Binary node descriptor are 20-byte binary structures that encode all information needed to create a graph node. They are used to communicate that information to an embedded device in an efficent format. This function exists to turn such a compressed node description back into an understandable string. Args: bindata (bytes): The raw binary structure that contains the node description. Returns: str: The corresponding string description of the same sensor_graph node """ func_names = {0: 'copy_latest_a', 1: 'average_a', 2: 'copy_all_a', 3: 'sum_a', 4: 'copy_count_a', 5: 'trigger_streamer', 6: 'call_rpc', 7: 'subtract_afromb'} if len(bindata) != 20: raise ArgumentError("Invalid binary node descriptor with incorrect size", size=len(bindata), expected=20, bindata=bindata) a_trig, b_trig, stream_id, a_id, b_id, proc, a_cond, b_cond, trig_combiner = struct.unpack("<LLHHHBBBB2x", bindata) node_stream = DataStream.FromEncoded(stream_id) if a_id == 0xFFFF: raise ArgumentError("Invalid binary node descriptor with invalid first input", input_selector=a_id) a_selector = DataStreamSelector.FromEncoded(a_id) a_trigger = _process_binary_trigger(a_trig, a_cond) b_selector = None b_trigger = None if b_id != 0xFFFF: b_selector = DataStreamSelector.FromEncoded(b_id) b_trigger = _process_binary_trigger(b_trig, b_cond) if trig_combiner == SGNode.AndTriggerCombiner: comb = '&&' elif trig_combiner == SGNode.OrTriggerCombiner: comb = '||' else: raise ArgumentError("Invalid trigger combiner in binary node descriptor", combiner=trig_combiner) if proc not in func_names: raise ArgumentError("Unknown processing function", function_id=proc, known_functions=func_names) func_name = func_names[proc] # Handle one input nodes if b_selector is None: return '({} {}) => {} using {}'.format(a_selector, a_trigger, node_stream, func_name) return '({} {} {} {} {}) => {} using {}'.format(a_selector, a_trigger, comb, b_selector, b_trigger, node_stream, func_name)
def function[parse_binary_descriptor, parameter[bindata]]: constant[Convert a binary node descriptor into a string descriptor. Binary node descriptor are 20-byte binary structures that encode all information needed to create a graph node. They are used to communicate that information to an embedded device in an efficent format. This function exists to turn such a compressed node description back into an understandable string. Args: bindata (bytes): The raw binary structure that contains the node description. Returns: str: The corresponding string description of the same sensor_graph node ] variable[func_names] assign[=] dictionary[[<ast.Constant object at 0x7da18f00ded0>, <ast.Constant object at 0x7da18f00df00>, <ast.Constant object at 0x7da18f00f010>, <ast.Constant object at 0x7da18f00eec0>, <ast.Constant object at 0x7da18f00ed10>, <ast.Constant object at 0x7da18f00d7e0>, <ast.Constant object at 0x7da18f00ecb0>, <ast.Constant object at 0x7da18f00c970>], [<ast.Constant object at 0x7da18f00d300>, <ast.Constant object at 0x7da18f00e950>, <ast.Constant object at 0x7da18f00c130>, <ast.Constant object at 0x7da18f00d7b0>, <ast.Constant object at 0x7da18f00fc70>, <ast.Constant object at 0x7da18f00d8a0>, <ast.Constant object at 0x7da18f00fb50>, <ast.Constant object at 0x7da18f00c730>]] if compare[call[name[len], parameter[name[bindata]]] not_equal[!=] constant[20]] begin[:] <ast.Raise object at 0x7da18f00cd30> <ast.Tuple object at 0x7da18f00c6a0> assign[=] call[name[struct].unpack, parameter[constant[<LLHHHBBBB2x], name[bindata]]] variable[node_stream] assign[=] call[name[DataStream].FromEncoded, parameter[name[stream_id]]] if compare[name[a_id] equal[==] constant[65535]] begin[:] <ast.Raise object at 0x7da18f00efe0> variable[a_selector] assign[=] call[name[DataStreamSelector].FromEncoded, parameter[name[a_id]]] variable[a_trigger] assign[=] call[name[_process_binary_trigger], parameter[name[a_trig], name[a_cond]]] variable[b_selector] assign[=] constant[None] variable[b_trigger] assign[=] constant[None] if compare[name[b_id] not_equal[!=] constant[65535]] begin[:] variable[b_selector] assign[=] call[name[DataStreamSelector].FromEncoded, parameter[name[b_id]]] variable[b_trigger] assign[=] call[name[_process_binary_trigger], parameter[name[b_trig], name[b_cond]]] if compare[name[trig_combiner] equal[==] name[SGNode].AndTriggerCombiner] begin[:] variable[comb] assign[=] constant[&&] if compare[name[proc] <ast.NotIn object at 0x7da2590d7190> name[func_names]] begin[:] <ast.Raise object at 0x7da18f00e4d0> variable[func_name] assign[=] call[name[func_names]][name[proc]] if compare[name[b_selector] is constant[None]] begin[:] return[call[constant[({} {}) => {} using {}].format, parameter[name[a_selector], name[a_trigger], name[node_stream], name[func_name]]]] return[call[constant[({} {} {} {} {}) => {} using {}].format, parameter[name[a_selector], name[a_trigger], name[comb], name[b_selector], name[b_trigger], name[node_stream], name[func_name]]]]
keyword[def] identifier[parse_binary_descriptor] ( identifier[bindata] ): literal[string] identifier[func_names] ={ literal[int] : literal[string] , literal[int] : literal[string] , literal[int] : literal[string] , literal[int] : literal[string] , literal[int] : literal[string] , literal[int] : literal[string] , literal[int] : literal[string] , literal[int] : literal[string] } keyword[if] identifier[len] ( identifier[bindata] )!= literal[int] : keyword[raise] identifier[ArgumentError] ( literal[string] , identifier[size] = identifier[len] ( identifier[bindata] ), identifier[expected] = literal[int] , identifier[bindata] = identifier[bindata] ) identifier[a_trig] , identifier[b_trig] , identifier[stream_id] , identifier[a_id] , identifier[b_id] , identifier[proc] , identifier[a_cond] , identifier[b_cond] , identifier[trig_combiner] = identifier[struct] . identifier[unpack] ( literal[string] , identifier[bindata] ) identifier[node_stream] = identifier[DataStream] . identifier[FromEncoded] ( identifier[stream_id] ) keyword[if] identifier[a_id] == literal[int] : keyword[raise] identifier[ArgumentError] ( literal[string] , identifier[input_selector] = identifier[a_id] ) identifier[a_selector] = identifier[DataStreamSelector] . identifier[FromEncoded] ( identifier[a_id] ) identifier[a_trigger] = identifier[_process_binary_trigger] ( identifier[a_trig] , identifier[a_cond] ) identifier[b_selector] = keyword[None] identifier[b_trigger] = keyword[None] keyword[if] identifier[b_id] != literal[int] : identifier[b_selector] = identifier[DataStreamSelector] . identifier[FromEncoded] ( identifier[b_id] ) identifier[b_trigger] = identifier[_process_binary_trigger] ( identifier[b_trig] , identifier[b_cond] ) keyword[if] identifier[trig_combiner] == identifier[SGNode] . identifier[AndTriggerCombiner] : identifier[comb] = literal[string] keyword[elif] identifier[trig_combiner] == identifier[SGNode] . identifier[OrTriggerCombiner] : identifier[comb] = literal[string] keyword[else] : keyword[raise] identifier[ArgumentError] ( literal[string] , identifier[combiner] = identifier[trig_combiner] ) keyword[if] identifier[proc] keyword[not] keyword[in] identifier[func_names] : keyword[raise] identifier[ArgumentError] ( literal[string] , identifier[function_id] = identifier[proc] , identifier[known_functions] = identifier[func_names] ) identifier[func_name] = identifier[func_names] [ identifier[proc] ] keyword[if] identifier[b_selector] keyword[is] keyword[None] : keyword[return] literal[string] . identifier[format] ( identifier[a_selector] , identifier[a_trigger] , identifier[node_stream] , identifier[func_name] ) keyword[return] literal[string] . identifier[format] ( identifier[a_selector] , identifier[a_trigger] , identifier[comb] , identifier[b_selector] , identifier[b_trigger] , identifier[node_stream] , identifier[func_name] )
def parse_binary_descriptor(bindata): """Convert a binary node descriptor into a string descriptor. Binary node descriptor are 20-byte binary structures that encode all information needed to create a graph node. They are used to communicate that information to an embedded device in an efficent format. This function exists to turn such a compressed node description back into an understandable string. Args: bindata (bytes): The raw binary structure that contains the node description. Returns: str: The corresponding string description of the same sensor_graph node """ func_names = {0: 'copy_latest_a', 1: 'average_a', 2: 'copy_all_a', 3: 'sum_a', 4: 'copy_count_a', 5: 'trigger_streamer', 6: 'call_rpc', 7: 'subtract_afromb'} if len(bindata) != 20: raise ArgumentError('Invalid binary node descriptor with incorrect size', size=len(bindata), expected=20, bindata=bindata) # depends on [control=['if'], data=[]] (a_trig, b_trig, stream_id, a_id, b_id, proc, a_cond, b_cond, trig_combiner) = struct.unpack('<LLHHHBBBB2x', bindata) node_stream = DataStream.FromEncoded(stream_id) if a_id == 65535: raise ArgumentError('Invalid binary node descriptor with invalid first input', input_selector=a_id) # depends on [control=['if'], data=['a_id']] a_selector = DataStreamSelector.FromEncoded(a_id) a_trigger = _process_binary_trigger(a_trig, a_cond) b_selector = None b_trigger = None if b_id != 65535: b_selector = DataStreamSelector.FromEncoded(b_id) b_trigger = _process_binary_trigger(b_trig, b_cond) # depends on [control=['if'], data=['b_id']] if trig_combiner == SGNode.AndTriggerCombiner: comb = '&&' # depends on [control=['if'], data=[]] elif trig_combiner == SGNode.OrTriggerCombiner: comb = '||' # depends on [control=['if'], data=[]] else: raise ArgumentError('Invalid trigger combiner in binary node descriptor', combiner=trig_combiner) if proc not in func_names: raise ArgumentError('Unknown processing function', function_id=proc, known_functions=func_names) # depends on [control=['if'], data=['proc', 'func_names']] func_name = func_names[proc] # Handle one input nodes if b_selector is None: return '({} {}) => {} using {}'.format(a_selector, a_trigger, node_stream, func_name) # depends on [control=['if'], data=[]] return '({} {} {} {} {}) => {} using {}'.format(a_selector, a_trigger, comb, b_selector, b_trigger, node_stream, func_name)
def get_params(self): """Get parameters from this object """ params = super().get_params() params.update({'beta': self.beta, 'knn': self.knn, 'decay': self.decay, 'bandwidth': self.bandwidth, 'distance': self.distance, 'thresh': self.thresh, 'n_jobs': self.n_jobs}) return params
def function[get_params, parameter[self]]: constant[Get parameters from this object ] variable[params] assign[=] call[call[name[super], parameter[]].get_params, parameter[]] call[name[params].update, parameter[dictionary[[<ast.Constant object at 0x7da1b0c8afe0>, <ast.Constant object at 0x7da1b0c89c00>, <ast.Constant object at 0x7da1b0c8bc40>, <ast.Constant object at 0x7da1b0c885e0>, <ast.Constant object at 0x7da1b0c8bfd0>, <ast.Constant object at 0x7da1b0c8b8b0>, <ast.Constant object at 0x7da1b0c8b790>], [<ast.Attribute object at 0x7da1b0c8b580>, <ast.Attribute object at 0x7da1b0c8bac0>, <ast.Attribute object at 0x7da1b0c8b6a0>, <ast.Attribute object at 0x7da1b0c8af20>, <ast.Attribute object at 0x7da1b0c89c60>, <ast.Attribute object at 0x7da1b0c8a710>, <ast.Attribute object at 0x7da1b0c8bb50>]]]] return[name[params]]
keyword[def] identifier[get_params] ( identifier[self] ): literal[string] identifier[params] = identifier[super] (). identifier[get_params] () identifier[params] . identifier[update] ({ literal[string] : identifier[self] . identifier[beta] , literal[string] : identifier[self] . identifier[knn] , literal[string] : identifier[self] . identifier[decay] , literal[string] : identifier[self] . identifier[bandwidth] , literal[string] : identifier[self] . identifier[distance] , literal[string] : identifier[self] . identifier[thresh] , literal[string] : identifier[self] . identifier[n_jobs] }) keyword[return] identifier[params]
def get_params(self): """Get parameters from this object """ params = super().get_params() params.update({'beta': self.beta, 'knn': self.knn, 'decay': self.decay, 'bandwidth': self.bandwidth, 'distance': self.distance, 'thresh': self.thresh, 'n_jobs': self.n_jobs}) return params
def run(wrapped): """ Special decorator encapsulating query method. """ @wraps(wrapped) def _run(self, query, bindings=None, *args, **kwargs): self._reconnect_if_missing_connection() start = time.time() try: result = wrapped(self, query, bindings, *args, **kwargs) except Exception as e: result = self._try_again_if_caused_by_lost_connection( e, query, bindings, wrapped ) t = self._get_elapsed_time(start) self.log_query(query, bindings, t) return result return _run
def function[run, parameter[wrapped]]: constant[ Special decorator encapsulating query method. ] def function[_run, parameter[self, query, bindings]]: call[name[self]._reconnect_if_missing_connection, parameter[]] variable[start] assign[=] call[name[time].time, parameter[]] <ast.Try object at 0x7da18eb575b0> variable[t] assign[=] call[name[self]._get_elapsed_time, parameter[name[start]]] call[name[self].log_query, parameter[name[query], name[bindings], name[t]]] return[name[result]] return[name[_run]]
keyword[def] identifier[run] ( identifier[wrapped] ): literal[string] @ identifier[wraps] ( identifier[wrapped] ) keyword[def] identifier[_run] ( identifier[self] , identifier[query] , identifier[bindings] = keyword[None] ,* identifier[args] ,** identifier[kwargs] ): identifier[self] . identifier[_reconnect_if_missing_connection] () identifier[start] = identifier[time] . identifier[time] () keyword[try] : identifier[result] = identifier[wrapped] ( identifier[self] , identifier[query] , identifier[bindings] ,* identifier[args] ,** identifier[kwargs] ) keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[result] = identifier[self] . identifier[_try_again_if_caused_by_lost_connection] ( identifier[e] , identifier[query] , identifier[bindings] , identifier[wrapped] ) identifier[t] = identifier[self] . identifier[_get_elapsed_time] ( identifier[start] ) identifier[self] . identifier[log_query] ( identifier[query] , identifier[bindings] , identifier[t] ) keyword[return] identifier[result] keyword[return] identifier[_run]
def run(wrapped): """ Special decorator encapsulating query method. """ @wraps(wrapped) def _run(self, query, bindings=None, *args, **kwargs): self._reconnect_if_missing_connection() start = time.time() try: result = wrapped(self, query, bindings, *args, **kwargs) # depends on [control=['try'], data=[]] except Exception as e: result = self._try_again_if_caused_by_lost_connection(e, query, bindings, wrapped) # depends on [control=['except'], data=['e']] t = self._get_elapsed_time(start) self.log_query(query, bindings, t) return result return _run
def GetModuleBaseNameFromWSDL(wsdl): """By default try to construct a reasonable base name for all generated modules. Otherwise return None. """ base_name = wsdl.name or wsdl.services[0].name base_name = SplitQName(base_name)[1] if base_name is None: return None return NCName_to_ModuleName(base_name)
def function[GetModuleBaseNameFromWSDL, parameter[wsdl]]: constant[By default try to construct a reasonable base name for all generated modules. Otherwise return None. ] variable[base_name] assign[=] <ast.BoolOp object at 0x7da1b1301ea0> variable[base_name] assign[=] call[call[name[SplitQName], parameter[name[base_name]]]][constant[1]] if compare[name[base_name] is constant[None]] begin[:] return[constant[None]] return[call[name[NCName_to_ModuleName], parameter[name[base_name]]]]
keyword[def] identifier[GetModuleBaseNameFromWSDL] ( identifier[wsdl] ): literal[string] identifier[base_name] = identifier[wsdl] . identifier[name] keyword[or] identifier[wsdl] . identifier[services] [ literal[int] ]. identifier[name] identifier[base_name] = identifier[SplitQName] ( identifier[base_name] )[ literal[int] ] keyword[if] identifier[base_name] keyword[is] keyword[None] : keyword[return] keyword[None] keyword[return] identifier[NCName_to_ModuleName] ( identifier[base_name] )
def GetModuleBaseNameFromWSDL(wsdl): """By default try to construct a reasonable base name for all generated modules. Otherwise return None. """ base_name = wsdl.name or wsdl.services[0].name base_name = SplitQName(base_name)[1] if base_name is None: return None # depends on [control=['if'], data=[]] return NCName_to_ModuleName(base_name)
def diff(s1, s2): ''' --word-diff=porcelain clone''' delta = difflib.Differ().compare(s1.split(), s2.split()) difflist = [] fullline = '' for line in delta: if line[0] == '?': continue elif line[0] == ' ': fullline += line.strip() + ' ' else: if fullline: difflist.append(fullline[:-1]) fullline = '' difflist.append(line) if fullline: difflist.append(fullline[:-1]) return [l[:] for l in '\n'.join(difflist).splitlines() if l]
def function[diff, parameter[s1, s2]]: constant[ --word-diff=porcelain clone] variable[delta] assign[=] call[call[name[difflib].Differ, parameter[]].compare, parameter[call[name[s1].split, parameter[]], call[name[s2].split, parameter[]]]] variable[difflist] assign[=] list[[]] variable[fullline] assign[=] constant[] for taget[name[line]] in starred[name[delta]] begin[:] if compare[call[name[line]][constant[0]] equal[==] constant[?]] begin[:] continue if name[fullline] begin[:] call[name[difflist].append, parameter[call[name[fullline]][<ast.Slice object at 0x7da1b1ad9d50>]]] return[<ast.ListComp object at 0x7da1b1ad9e10>]
keyword[def] identifier[diff] ( identifier[s1] , identifier[s2] ): literal[string] identifier[delta] = identifier[difflib] . identifier[Differ] (). identifier[compare] ( identifier[s1] . identifier[split] (), identifier[s2] . identifier[split] ()) identifier[difflist] =[] identifier[fullline] = literal[string] keyword[for] identifier[line] keyword[in] identifier[delta] : keyword[if] identifier[line] [ literal[int] ]== literal[string] : keyword[continue] keyword[elif] identifier[line] [ literal[int] ]== literal[string] : identifier[fullline] += identifier[line] . identifier[strip] ()+ literal[string] keyword[else] : keyword[if] identifier[fullline] : identifier[difflist] . identifier[append] ( identifier[fullline] [:- literal[int] ]) identifier[fullline] = literal[string] identifier[difflist] . identifier[append] ( identifier[line] ) keyword[if] identifier[fullline] : identifier[difflist] . identifier[append] ( identifier[fullline] [:- literal[int] ]) keyword[return] [ identifier[l] [:] keyword[for] identifier[l] keyword[in] literal[string] . identifier[join] ( identifier[difflist] ). identifier[splitlines] () keyword[if] identifier[l] ]
def diff(s1, s2): """ --word-diff=porcelain clone""" delta = difflib.Differ().compare(s1.split(), s2.split()) difflist = [] fullline = '' for line in delta: if line[0] == '?': continue # depends on [control=['if'], data=[]] elif line[0] == ' ': fullline += line.strip() + ' ' # depends on [control=['if'], data=[]] else: if fullline: difflist.append(fullline[:-1]) fullline = '' # depends on [control=['if'], data=[]] difflist.append(line) # depends on [control=['for'], data=['line']] if fullline: difflist.append(fullline[:-1]) # depends on [control=['if'], data=[]] return [l[:] for l in '\n'.join(difflist).splitlines() if l]
def load_neuron(handle, reader=None): '''Build section trees from an h5 or swc file''' rdw = load_data(handle, reader) if isinstance(handle, StringType): name = os.path.splitext(os.path.basename(handle))[0] else: name = None return FstNeuron(rdw, name)
def function[load_neuron, parameter[handle, reader]]: constant[Build section trees from an h5 or swc file] variable[rdw] assign[=] call[name[load_data], parameter[name[handle], name[reader]]] if call[name[isinstance], parameter[name[handle], name[StringType]]] begin[:] variable[name] assign[=] call[call[name[os].path.splitext, parameter[call[name[os].path.basename, parameter[name[handle]]]]]][constant[0]] return[call[name[FstNeuron], parameter[name[rdw], name[name]]]]
keyword[def] identifier[load_neuron] ( identifier[handle] , identifier[reader] = keyword[None] ): literal[string] identifier[rdw] = identifier[load_data] ( identifier[handle] , identifier[reader] ) keyword[if] identifier[isinstance] ( identifier[handle] , identifier[StringType] ): identifier[name] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[os] . identifier[path] . identifier[basename] ( identifier[handle] ))[ literal[int] ] keyword[else] : identifier[name] = keyword[None] keyword[return] identifier[FstNeuron] ( identifier[rdw] , identifier[name] )
def load_neuron(handle, reader=None): """Build section trees from an h5 or swc file""" rdw = load_data(handle, reader) if isinstance(handle, StringType): name = os.path.splitext(os.path.basename(handle))[0] # depends on [control=['if'], data=[]] else: name = None return FstNeuron(rdw, name)
def equality(self, indexes=None, value=None): """ Math helper method. Given a column and optional indexes will return a list of booleans on the equality of the value for that index in the DataFrame to the value parameter. :param indexes: list of index values or list of booleans. If a list of booleans then the list must be the same\ length as the DataFrame :param value: value to compare :return: list of booleans """ indexes = [True] * len(self._index) if indexes is None else indexes compare_list = self.get_rows(indexes, as_list=True) return [x == value for x in compare_list]
def function[equality, parameter[self, indexes, value]]: constant[ Math helper method. Given a column and optional indexes will return a list of booleans on the equality of the value for that index in the DataFrame to the value parameter. :param indexes: list of index values or list of booleans. If a list of booleans then the list must be the same length as the DataFrame :param value: value to compare :return: list of booleans ] variable[indexes] assign[=] <ast.IfExp object at 0x7da20c7ca800> variable[compare_list] assign[=] call[name[self].get_rows, parameter[name[indexes]]] return[<ast.ListComp object at 0x7da20e9540a0>]
keyword[def] identifier[equality] ( identifier[self] , identifier[indexes] = keyword[None] , identifier[value] = keyword[None] ): literal[string] identifier[indexes] =[ keyword[True] ]* identifier[len] ( identifier[self] . identifier[_index] ) keyword[if] identifier[indexes] keyword[is] keyword[None] keyword[else] identifier[indexes] identifier[compare_list] = identifier[self] . identifier[get_rows] ( identifier[indexes] , identifier[as_list] = keyword[True] ) keyword[return] [ identifier[x] == identifier[value] keyword[for] identifier[x] keyword[in] identifier[compare_list] ]
def equality(self, indexes=None, value=None): """ Math helper method. Given a column and optional indexes will return a list of booleans on the equality of the value for that index in the DataFrame to the value parameter. :param indexes: list of index values or list of booleans. If a list of booleans then the list must be the same length as the DataFrame :param value: value to compare :return: list of booleans """ indexes = [True] * len(self._index) if indexes is None else indexes compare_list = self.get_rows(indexes, as_list=True) return [x == value for x in compare_list]
def getPlannedFor(self, plannedfor_name, projectarea_id=None, projectarea_name=None, archived=False, returned_properties=None): """Get :class:`rtcclient.models.PlannedFor` object by its name :param plannedfor_name: the plannedfor name :param projectarea_id: the :class:`rtcclient.project_area.ProjectArea` id :param projectarea_name: the project area name :param archived: (default is False) whether the plannedfor is archived :param returned_properties: the returned properties that you want. Refer to :class:`rtcclient.client.RTCClient` for more explanations :return: the :class:`rtcclient.models.PlannedFor` object :rtype: rtcclient.models.PlannedFor """ if not isinstance(plannedfor_name, six.string_types) or not plannedfor_name: excp_msg = "Please specify a valid PlannedFor name" self.log.error(excp_msg) raise exception.BadValue(excp_msg) self.log.debug("Try to get <PlannedFor %s>", plannedfor_name) rp = returned_properties plannedfors = self._getPlannedFors(projectarea_id=projectarea_id, projectarea_name=projectarea_name, archived=archived, returned_properties=rp, plannedfor_name=plannedfor_name) if plannedfors is not None: plannedfor = plannedfors[0] self.log.info("Find <PlannedFor %s>", plannedfor) return plannedfor self.log.error("No PlannedFor named %s", plannedfor_name) raise exception.NotFound("No PlannedFor named %s" % plannedfor_name)
def function[getPlannedFor, parameter[self, plannedfor_name, projectarea_id, projectarea_name, archived, returned_properties]]: constant[Get :class:`rtcclient.models.PlannedFor` object by its name :param plannedfor_name: the plannedfor name :param projectarea_id: the :class:`rtcclient.project_area.ProjectArea` id :param projectarea_name: the project area name :param archived: (default is False) whether the plannedfor is archived :param returned_properties: the returned properties that you want. Refer to :class:`rtcclient.client.RTCClient` for more explanations :return: the :class:`rtcclient.models.PlannedFor` object :rtype: rtcclient.models.PlannedFor ] if <ast.BoolOp object at 0x7da20e9630d0> begin[:] variable[excp_msg] assign[=] constant[Please specify a valid PlannedFor name] call[name[self].log.error, parameter[name[excp_msg]]] <ast.Raise object at 0x7da20e961cc0> call[name[self].log.debug, parameter[constant[Try to get <PlannedFor %s>], name[plannedfor_name]]] variable[rp] assign[=] name[returned_properties] variable[plannedfors] assign[=] call[name[self]._getPlannedFors, parameter[]] if compare[name[plannedfors] is_not constant[None]] begin[:] variable[plannedfor] assign[=] call[name[plannedfors]][constant[0]] call[name[self].log.info, parameter[constant[Find <PlannedFor %s>], name[plannedfor]]] return[name[plannedfor]] call[name[self].log.error, parameter[constant[No PlannedFor named %s], name[plannedfor_name]]] <ast.Raise object at 0x7da20c76cf10>
keyword[def] identifier[getPlannedFor] ( identifier[self] , identifier[plannedfor_name] , identifier[projectarea_id] = keyword[None] , identifier[projectarea_name] = keyword[None] , identifier[archived] = keyword[False] , identifier[returned_properties] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[plannedfor_name] , identifier[six] . identifier[string_types] ) keyword[or] keyword[not] identifier[plannedfor_name] : identifier[excp_msg] = literal[string] identifier[self] . identifier[log] . identifier[error] ( identifier[excp_msg] ) keyword[raise] identifier[exception] . identifier[BadValue] ( identifier[excp_msg] ) identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[plannedfor_name] ) identifier[rp] = identifier[returned_properties] identifier[plannedfors] = identifier[self] . identifier[_getPlannedFors] ( identifier[projectarea_id] = identifier[projectarea_id] , identifier[projectarea_name] = identifier[projectarea_name] , identifier[archived] = identifier[archived] , identifier[returned_properties] = identifier[rp] , identifier[plannedfor_name] = identifier[plannedfor_name] ) keyword[if] identifier[plannedfors] keyword[is] keyword[not] keyword[None] : identifier[plannedfor] = identifier[plannedfors] [ literal[int] ] identifier[self] . identifier[log] . identifier[info] ( literal[string] , identifier[plannedfor] ) keyword[return] identifier[plannedfor] identifier[self] . identifier[log] . identifier[error] ( literal[string] , identifier[plannedfor_name] ) keyword[raise] identifier[exception] . identifier[NotFound] ( literal[string] % identifier[plannedfor_name] )
def getPlannedFor(self, plannedfor_name, projectarea_id=None, projectarea_name=None, archived=False, returned_properties=None): """Get :class:`rtcclient.models.PlannedFor` object by its name :param plannedfor_name: the plannedfor name :param projectarea_id: the :class:`rtcclient.project_area.ProjectArea` id :param projectarea_name: the project area name :param archived: (default is False) whether the plannedfor is archived :param returned_properties: the returned properties that you want. Refer to :class:`rtcclient.client.RTCClient` for more explanations :return: the :class:`rtcclient.models.PlannedFor` object :rtype: rtcclient.models.PlannedFor """ if not isinstance(plannedfor_name, six.string_types) or not plannedfor_name: excp_msg = 'Please specify a valid PlannedFor name' self.log.error(excp_msg) raise exception.BadValue(excp_msg) # depends on [control=['if'], data=[]] self.log.debug('Try to get <PlannedFor %s>', plannedfor_name) rp = returned_properties plannedfors = self._getPlannedFors(projectarea_id=projectarea_id, projectarea_name=projectarea_name, archived=archived, returned_properties=rp, plannedfor_name=plannedfor_name) if plannedfors is not None: plannedfor = plannedfors[0] self.log.info('Find <PlannedFor %s>', plannedfor) return plannedfor # depends on [control=['if'], data=['plannedfors']] self.log.error('No PlannedFor named %s', plannedfor_name) raise exception.NotFound('No PlannedFor named %s' % plannedfor_name)
def slice_clip(filename, start, stop, n_samples, sr, mono=True): '''Slice a fragment of audio from a file. This uses pysoundfile to efficiently seek without loading the entire stream. Parameters ---------- filename : str Path to the input file start : int The sample index of `filename` at which the audio fragment should start stop : int The sample index of `filename` at which the audio fragment should stop (e.g. y = audio[start:stop]) n_samples : int > 0 The number of samples to load sr : int > 0 The target sampling rate mono : bool Ensure monophonic audio Returns ------- y : np.ndarray [shape=(n_samples,)] A fragment of audio sampled from `filename` Raises ------ ValueError If the source file is shorter than the requested length ''' with psf.SoundFile(str(filename), mode='r') as soundf: n_target = stop - start soundf.seek(start) y = soundf.read(n_target).T if mono: y = librosa.to_mono(y) # Resample to initial sr y = librosa.resample(y, soundf.samplerate, sr) # Clip to the target length exactly y = librosa.util.fix_length(y, n_samples) return y
def function[slice_clip, parameter[filename, start, stop, n_samples, sr, mono]]: constant[Slice a fragment of audio from a file. This uses pysoundfile to efficiently seek without loading the entire stream. Parameters ---------- filename : str Path to the input file start : int The sample index of `filename` at which the audio fragment should start stop : int The sample index of `filename` at which the audio fragment should stop (e.g. y = audio[start:stop]) n_samples : int > 0 The number of samples to load sr : int > 0 The target sampling rate mono : bool Ensure monophonic audio Returns ------- y : np.ndarray [shape=(n_samples,)] A fragment of audio sampled from `filename` Raises ------ ValueError If the source file is shorter than the requested length ] with call[name[psf].SoundFile, parameter[call[name[str], parameter[name[filename]]]]] begin[:] variable[n_target] assign[=] binary_operation[name[stop] - name[start]] call[name[soundf].seek, parameter[name[start]]] variable[y] assign[=] call[name[soundf].read, parameter[name[n_target]]].T if name[mono] begin[:] variable[y] assign[=] call[name[librosa].to_mono, parameter[name[y]]] variable[y] assign[=] call[name[librosa].resample, parameter[name[y], name[soundf].samplerate, name[sr]]] variable[y] assign[=] call[name[librosa].util.fix_length, parameter[name[y], name[n_samples]]] return[name[y]]
keyword[def] identifier[slice_clip] ( identifier[filename] , identifier[start] , identifier[stop] , identifier[n_samples] , identifier[sr] , identifier[mono] = keyword[True] ): literal[string] keyword[with] identifier[psf] . identifier[SoundFile] ( identifier[str] ( identifier[filename] ), identifier[mode] = literal[string] ) keyword[as] identifier[soundf] : identifier[n_target] = identifier[stop] - identifier[start] identifier[soundf] . identifier[seek] ( identifier[start] ) identifier[y] = identifier[soundf] . identifier[read] ( identifier[n_target] ). identifier[T] keyword[if] identifier[mono] : identifier[y] = identifier[librosa] . identifier[to_mono] ( identifier[y] ) identifier[y] = identifier[librosa] . identifier[resample] ( identifier[y] , identifier[soundf] . identifier[samplerate] , identifier[sr] ) identifier[y] = identifier[librosa] . identifier[util] . identifier[fix_length] ( identifier[y] , identifier[n_samples] ) keyword[return] identifier[y]
def slice_clip(filename, start, stop, n_samples, sr, mono=True): """Slice a fragment of audio from a file. This uses pysoundfile to efficiently seek without loading the entire stream. Parameters ---------- filename : str Path to the input file start : int The sample index of `filename` at which the audio fragment should start stop : int The sample index of `filename` at which the audio fragment should stop (e.g. y = audio[start:stop]) n_samples : int > 0 The number of samples to load sr : int > 0 The target sampling rate mono : bool Ensure monophonic audio Returns ------- y : np.ndarray [shape=(n_samples,)] A fragment of audio sampled from `filename` Raises ------ ValueError If the source file is shorter than the requested length """ with psf.SoundFile(str(filename), mode='r') as soundf: n_target = stop - start soundf.seek(start) y = soundf.read(n_target).T if mono: y = librosa.to_mono(y) # depends on [control=['if'], data=[]] # Resample to initial sr y = librosa.resample(y, soundf.samplerate, sr) # Clip to the target length exactly y = librosa.util.fix_length(y, n_samples) return y # depends on [control=['with'], data=['soundf']]
def RightReleaseMouse(waitTime: float = OPERATION_WAIT_TIME) -> None: """ Release right mouse. waitTime: float. """ x, y = GetCursorPos() screenWidth, screenHeight = GetScreenSize() mouse_event(MouseEventFlag.RightUp | MouseEventFlag.Absolute, x * 65535 // screenWidth, y * 65535 // screenHeight, 0, 0) time.sleep(waitTime)
def function[RightReleaseMouse, parameter[waitTime]]: constant[ Release right mouse. waitTime: float. ] <ast.Tuple object at 0x7da20c6aa9b0> assign[=] call[name[GetCursorPos], parameter[]] <ast.Tuple object at 0x7da20c6c7ee0> assign[=] call[name[GetScreenSize], parameter[]] call[name[mouse_event], parameter[binary_operation[name[MouseEventFlag].RightUp <ast.BitOr object at 0x7da2590d6aa0> name[MouseEventFlag].Absolute], binary_operation[binary_operation[name[x] * constant[65535]] <ast.FloorDiv object at 0x7da2590d6bc0> name[screenWidth]], binary_operation[binary_operation[name[y] * constant[65535]] <ast.FloorDiv object at 0x7da2590d6bc0> name[screenHeight]], constant[0], constant[0]]] call[name[time].sleep, parameter[name[waitTime]]]
keyword[def] identifier[RightReleaseMouse] ( identifier[waitTime] : identifier[float] = identifier[OPERATION_WAIT_TIME] )-> keyword[None] : literal[string] identifier[x] , identifier[y] = identifier[GetCursorPos] () identifier[screenWidth] , identifier[screenHeight] = identifier[GetScreenSize] () identifier[mouse_event] ( identifier[MouseEventFlag] . identifier[RightUp] | identifier[MouseEventFlag] . identifier[Absolute] , identifier[x] * literal[int] // identifier[screenWidth] , identifier[y] * literal[int] // identifier[screenHeight] , literal[int] , literal[int] ) identifier[time] . identifier[sleep] ( identifier[waitTime] )
def RightReleaseMouse(waitTime: float=OPERATION_WAIT_TIME) -> None: """ Release right mouse. waitTime: float. """ (x, y) = GetCursorPos() (screenWidth, screenHeight) = GetScreenSize() mouse_event(MouseEventFlag.RightUp | MouseEventFlag.Absolute, x * 65535 // screenWidth, y * 65535 // screenHeight, 0, 0) time.sleep(waitTime)
def visit_and_get_function_nodes( self, definition, first_node ): """Visits the nodes of a user defined function. Args: definition(LocalModuleDefinition): Definition of the function being added. first_node(EntryOrExitNode or None or RestoreNode): Used to connect previous statements to this function. Returns: the_new_nodes(list[Node]): The nodes added while visiting the function. first_node(EntryOrExitNode or None or RestoreNode): Used to connect previous statements to this function. """ len_before_visiting_func = len(self.nodes) previous_node = self.nodes[-1] entry_node = self.append_node(EntryOrExitNode('Function Entry ' + definition.name)) if not first_node: first_node = entry_node self.connect_if_allowed(previous_node, entry_node) function_body_connect_statements = self.stmt_star_handler(definition.node.body) entry_node.connect(function_body_connect_statements.first_statement) exit_node = self.append_node(EntryOrExitNode('Exit ' + definition.name)) exit_node.connect_predecessors(function_body_connect_statements.last_statements) the_new_nodes = self.nodes[len_before_visiting_func:] return_connection_handler(the_new_nodes, exit_node) return (the_new_nodes, first_node)
def function[visit_and_get_function_nodes, parameter[self, definition, first_node]]: constant[Visits the nodes of a user defined function. Args: definition(LocalModuleDefinition): Definition of the function being added. first_node(EntryOrExitNode or None or RestoreNode): Used to connect previous statements to this function. Returns: the_new_nodes(list[Node]): The nodes added while visiting the function. first_node(EntryOrExitNode or None or RestoreNode): Used to connect previous statements to this function. ] variable[len_before_visiting_func] assign[=] call[name[len], parameter[name[self].nodes]] variable[previous_node] assign[=] call[name[self].nodes][<ast.UnaryOp object at 0x7da1b1db43a0>] variable[entry_node] assign[=] call[name[self].append_node, parameter[call[name[EntryOrExitNode], parameter[binary_operation[constant[Function Entry ] + name[definition].name]]]]] if <ast.UnaryOp object at 0x7da1b1db4520> begin[:] variable[first_node] assign[=] name[entry_node] call[name[self].connect_if_allowed, parameter[name[previous_node], name[entry_node]]] variable[function_body_connect_statements] assign[=] call[name[self].stmt_star_handler, parameter[name[definition].node.body]] call[name[entry_node].connect, parameter[name[function_body_connect_statements].first_statement]] variable[exit_node] assign[=] call[name[self].append_node, parameter[call[name[EntryOrExitNode], parameter[binary_operation[constant[Exit ] + name[definition].name]]]]] call[name[exit_node].connect_predecessors, parameter[name[function_body_connect_statements].last_statements]] variable[the_new_nodes] assign[=] call[name[self].nodes][<ast.Slice object at 0x7da1b1da2a40>] call[name[return_connection_handler], parameter[name[the_new_nodes], name[exit_node]]] return[tuple[[<ast.Name object at 0x7da1b1da1030>, <ast.Name object at 0x7da1b1da2da0>]]]
keyword[def] identifier[visit_and_get_function_nodes] ( identifier[self] , identifier[definition] , identifier[first_node] ): literal[string] identifier[len_before_visiting_func] = identifier[len] ( identifier[self] . identifier[nodes] ) identifier[previous_node] = identifier[self] . identifier[nodes] [- literal[int] ] identifier[entry_node] = identifier[self] . identifier[append_node] ( identifier[EntryOrExitNode] ( literal[string] + identifier[definition] . identifier[name] )) keyword[if] keyword[not] identifier[first_node] : identifier[first_node] = identifier[entry_node] identifier[self] . identifier[connect_if_allowed] ( identifier[previous_node] , identifier[entry_node] ) identifier[function_body_connect_statements] = identifier[self] . identifier[stmt_star_handler] ( identifier[definition] . identifier[node] . identifier[body] ) identifier[entry_node] . identifier[connect] ( identifier[function_body_connect_statements] . identifier[first_statement] ) identifier[exit_node] = identifier[self] . identifier[append_node] ( identifier[EntryOrExitNode] ( literal[string] + identifier[definition] . identifier[name] )) identifier[exit_node] . identifier[connect_predecessors] ( identifier[function_body_connect_statements] . identifier[last_statements] ) identifier[the_new_nodes] = identifier[self] . identifier[nodes] [ identifier[len_before_visiting_func] :] identifier[return_connection_handler] ( identifier[the_new_nodes] , identifier[exit_node] ) keyword[return] ( identifier[the_new_nodes] , identifier[first_node] )
def visit_and_get_function_nodes(self, definition, first_node): """Visits the nodes of a user defined function. Args: definition(LocalModuleDefinition): Definition of the function being added. first_node(EntryOrExitNode or None or RestoreNode): Used to connect previous statements to this function. Returns: the_new_nodes(list[Node]): The nodes added while visiting the function. first_node(EntryOrExitNode or None or RestoreNode): Used to connect previous statements to this function. """ len_before_visiting_func = len(self.nodes) previous_node = self.nodes[-1] entry_node = self.append_node(EntryOrExitNode('Function Entry ' + definition.name)) if not first_node: first_node = entry_node # depends on [control=['if'], data=[]] self.connect_if_allowed(previous_node, entry_node) function_body_connect_statements = self.stmt_star_handler(definition.node.body) entry_node.connect(function_body_connect_statements.first_statement) exit_node = self.append_node(EntryOrExitNode('Exit ' + definition.name)) exit_node.connect_predecessors(function_body_connect_statements.last_statements) the_new_nodes = self.nodes[len_before_visiting_func:] return_connection_handler(the_new_nodes, exit_node) return (the_new_nodes, first_node)
def update_local_rt_nlris(self): """Does book-keeping of local RT NLRIs based on all configured VRFs. Syncs all import RTs and RT NLRIs. The method should be called when any VRFs are added/removed/changed. """ current_conf_import_rts = set() for vrf in self._vrfs_conf.vrf_confs: current_conf_import_rts.update(vrf.import_rts) removed_rts = self._all_vrfs_import_rts_set - current_conf_import_rts new_rts = current_conf_import_rts - self._all_vrfs_import_rts_set self._all_vrfs_import_rts_set = current_conf_import_rts # Add new and withdraw removed local RtNlris for new_rt in new_rts: self.add_rt_nlri(new_rt) for removed_rt in removed_rts: self.add_rt_nlri(removed_rt, is_withdraw=True)
def function[update_local_rt_nlris, parameter[self]]: constant[Does book-keeping of local RT NLRIs based on all configured VRFs. Syncs all import RTs and RT NLRIs. The method should be called when any VRFs are added/removed/changed. ] variable[current_conf_import_rts] assign[=] call[name[set], parameter[]] for taget[name[vrf]] in starred[name[self]._vrfs_conf.vrf_confs] begin[:] call[name[current_conf_import_rts].update, parameter[name[vrf].import_rts]] variable[removed_rts] assign[=] binary_operation[name[self]._all_vrfs_import_rts_set - name[current_conf_import_rts]] variable[new_rts] assign[=] binary_operation[name[current_conf_import_rts] - name[self]._all_vrfs_import_rts_set] name[self]._all_vrfs_import_rts_set assign[=] name[current_conf_import_rts] for taget[name[new_rt]] in starred[name[new_rts]] begin[:] call[name[self].add_rt_nlri, parameter[name[new_rt]]] for taget[name[removed_rt]] in starred[name[removed_rts]] begin[:] call[name[self].add_rt_nlri, parameter[name[removed_rt]]]
keyword[def] identifier[update_local_rt_nlris] ( identifier[self] ): literal[string] identifier[current_conf_import_rts] = identifier[set] () keyword[for] identifier[vrf] keyword[in] identifier[self] . identifier[_vrfs_conf] . identifier[vrf_confs] : identifier[current_conf_import_rts] . identifier[update] ( identifier[vrf] . identifier[import_rts] ) identifier[removed_rts] = identifier[self] . identifier[_all_vrfs_import_rts_set] - identifier[current_conf_import_rts] identifier[new_rts] = identifier[current_conf_import_rts] - identifier[self] . identifier[_all_vrfs_import_rts_set] identifier[self] . identifier[_all_vrfs_import_rts_set] = identifier[current_conf_import_rts] keyword[for] identifier[new_rt] keyword[in] identifier[new_rts] : identifier[self] . identifier[add_rt_nlri] ( identifier[new_rt] ) keyword[for] identifier[removed_rt] keyword[in] identifier[removed_rts] : identifier[self] . identifier[add_rt_nlri] ( identifier[removed_rt] , identifier[is_withdraw] = keyword[True] )
def update_local_rt_nlris(self): """Does book-keeping of local RT NLRIs based on all configured VRFs. Syncs all import RTs and RT NLRIs. The method should be called when any VRFs are added/removed/changed. """ current_conf_import_rts = set() for vrf in self._vrfs_conf.vrf_confs: current_conf_import_rts.update(vrf.import_rts) # depends on [control=['for'], data=['vrf']] removed_rts = self._all_vrfs_import_rts_set - current_conf_import_rts new_rts = current_conf_import_rts - self._all_vrfs_import_rts_set self._all_vrfs_import_rts_set = current_conf_import_rts # Add new and withdraw removed local RtNlris for new_rt in new_rts: self.add_rt_nlri(new_rt) # depends on [control=['for'], data=['new_rt']] for removed_rt in removed_rts: self.add_rt_nlri(removed_rt, is_withdraw=True) # depends on [control=['for'], data=['removed_rt']]
def prepare_metadata(metadata, source_metadata=None, append=False, append_list=False): """Prepare a metadata dict for an :class:`S3PreparedRequest <S3PreparedRequest>` or :class:`MetadataPreparedRequest <MetadataPreparedRequest>` object. :type metadata: dict :param metadata: The metadata dict to be prepared. :type source_metadata: dict :param source_metadata: (optional) The source metadata for the item being modified. :rtype: dict :returns: A filtered metadata dict to be used for generating IA S3 and Metadata API requests. """ # Make a deepcopy of source_metadata if it exists. A deepcopy is # necessary to avoid modifying the original dict. source_metadata = {} if not source_metadata else copy.deepcopy(source_metadata) prepared_metadata = {} # Functions for dealing with metadata keys containing indexes. def get_index(key): match = re.search(r'(?<=\[)\d+(?=\])', key) if match is not None: return int(match.group()) def rm_index(key): return key.split('[')[0] # Create indexed_keys counter dict. i.e.: {'subject': 3} -- subject # (with the index removed) appears 3 times in the metadata dict. indexed_keys = {} for key in metadata: # Convert number values to strings! if isinstance(metadata[key], (six.integer_types, float, complex)): metadata[key] = str(metadata[key]) if get_index(key) is None: continue count = len([x for x in metadata if rm_index(x) == rm_index(key)]) indexed_keys[rm_index(key)] = count # Initialize the values for all indexed_keys. for key in indexed_keys: # Increment the counter so we know how many values the final # value in prepared_metadata should have. indexed_keys[key] += len(source_metadata.get(key, [])) # Intialize the value in the prepared_metadata dict. prepared_metadata[key] = source_metadata.get(key, []) if not isinstance(prepared_metadata[key], list): prepared_metadata[key] = [prepared_metadata[key]] # Fill the value of the prepared_metadata key with None values # so all indexed items can be indexed in order. while len(prepared_metadata[key]) < indexed_keys[key]: prepared_metadata[key].append(None) # Index all items which contain an index. for key in metadata: # Insert values from indexed keys into prepared_metadata dict. if (rm_index(key) in indexed_keys): try: prepared_metadata[rm_index(key)][get_index(key)] = metadata[key] except IndexError: prepared_metadata[rm_index(key)].append(metadata[key]) # If append is True, append value to source_metadata value. elif append_list and source_metadata.get(key): if not isinstance(metadata[key], list): metadata[key] = [metadata[key]] for v in metadata[key]: if not isinstance(source_metadata[key], list): if v in [source_metadata[key]]: continue else: if v in source_metadata[key]: continue if not isinstance(source_metadata[key], list): prepared_metadata[key] = [source_metadata[key]] else: prepared_metadata[key] = source_metadata[key] prepared_metadata[key].append(v) elif append and source_metadata.get(key): prepared_metadata[key] = '{0} {1}'.format( source_metadata[key], metadata[key]) else: prepared_metadata[key] = metadata[key] # Remove values from metadata if value is REMOVE_TAG. _done = [] for key in indexed_keys: # Filter None values from items with arrays as values prepared_metadata[key] = [v for v in prepared_metadata[key] if v] # Only filter the given indexed key if it has not already been # filtered. if key not in _done: indexes = [] for k in metadata: if not get_index(k): continue elif not rm_index(k) == key: continue elif not metadata[k] == 'REMOVE_TAG': continue else: indexes.append(get_index(k)) # Delete indexed values in reverse to not throw off the # subsequent indexes. for i in sorted(indexes, reverse=True): del prepared_metadata[key][i] _done.append(key) return prepared_metadata
def function[prepare_metadata, parameter[metadata, source_metadata, append, append_list]]: constant[Prepare a metadata dict for an :class:`S3PreparedRequest <S3PreparedRequest>` or :class:`MetadataPreparedRequest <MetadataPreparedRequest>` object. :type metadata: dict :param metadata: The metadata dict to be prepared. :type source_metadata: dict :param source_metadata: (optional) The source metadata for the item being modified. :rtype: dict :returns: A filtered metadata dict to be used for generating IA S3 and Metadata API requests. ] variable[source_metadata] assign[=] <ast.IfExp object at 0x7da20c7959c0> variable[prepared_metadata] assign[=] dictionary[[], []] def function[get_index, parameter[key]]: variable[match] assign[=] call[name[re].search, parameter[constant[(?<=\[)\d+(?=\])], name[key]]] if compare[name[match] is_not constant[None]] begin[:] return[call[name[int], parameter[call[name[match].group, parameter[]]]]] def function[rm_index, parameter[key]]: return[call[call[name[key].split, parameter[constant[[]]]][constant[0]]] variable[indexed_keys] assign[=] dictionary[[], []] for taget[name[key]] in starred[name[metadata]] begin[:] if call[name[isinstance], parameter[call[name[metadata]][name[key]], tuple[[<ast.Attribute object at 0x7da20c7942b0>, <ast.Name object at 0x7da20c795b10>, <ast.Name object at 0x7da20c796620>]]]] begin[:] call[name[metadata]][name[key]] assign[=] call[name[str], parameter[call[name[metadata]][name[key]]]] if compare[call[name[get_index], parameter[name[key]]] is constant[None]] begin[:] continue variable[count] assign[=] call[name[len], parameter[<ast.ListComp object at 0x7da2054a77c0>]] call[name[indexed_keys]][call[name[rm_index], parameter[name[key]]]] assign[=] name[count] for taget[name[key]] in starred[name[indexed_keys]] begin[:] <ast.AugAssign object at 0x7da2054a5150> call[name[prepared_metadata]][name[key]] assign[=] call[name[source_metadata].get, parameter[name[key], list[[]]]] if <ast.UnaryOp object at 0x7da2054a52a0> begin[:] call[name[prepared_metadata]][name[key]] assign[=] list[[<ast.Subscript object at 0x7da1b1d56170>]] while compare[call[name[len], parameter[call[name[prepared_metadata]][name[key]]]] less[<] call[name[indexed_keys]][name[key]]] begin[:] call[call[name[prepared_metadata]][name[key]].append, parameter[constant[None]]] for taget[name[key]] in starred[name[metadata]] begin[:] if compare[call[name[rm_index], parameter[name[key]]] in name[indexed_keys]] begin[:] <ast.Try object at 0x7da1b1d54370> variable[_done] assign[=] list[[]] for taget[name[key]] in starred[name[indexed_keys]] begin[:] call[name[prepared_metadata]][name[key]] assign[=] <ast.ListComp object at 0x7da1b1d57850> if compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[_done]] begin[:] variable[indexes] assign[=] list[[]] for taget[name[k]] in starred[name[metadata]] begin[:] if <ast.UnaryOp object at 0x7da2049615a0> begin[:] continue for taget[name[i]] in starred[call[name[sorted], parameter[name[indexes]]]] begin[:] <ast.Delete object at 0x7da204961b70> call[name[_done].append, parameter[name[key]]] return[name[prepared_metadata]]
keyword[def] identifier[prepare_metadata] ( identifier[metadata] , identifier[source_metadata] = keyword[None] , identifier[append] = keyword[False] , identifier[append_list] = keyword[False] ): literal[string] identifier[source_metadata] ={} keyword[if] keyword[not] identifier[source_metadata] keyword[else] identifier[copy] . identifier[deepcopy] ( identifier[source_metadata] ) identifier[prepared_metadata] ={} keyword[def] identifier[get_index] ( identifier[key] ): identifier[match] = identifier[re] . identifier[search] ( literal[string] , identifier[key] ) keyword[if] identifier[match] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[int] ( identifier[match] . identifier[group] ()) keyword[def] identifier[rm_index] ( identifier[key] ): keyword[return] identifier[key] . identifier[split] ( literal[string] )[ literal[int] ] identifier[indexed_keys] ={} keyword[for] identifier[key] keyword[in] identifier[metadata] : keyword[if] identifier[isinstance] ( identifier[metadata] [ identifier[key] ],( identifier[six] . identifier[integer_types] , identifier[float] , identifier[complex] )): identifier[metadata] [ identifier[key] ]= identifier[str] ( identifier[metadata] [ identifier[key] ]) keyword[if] identifier[get_index] ( identifier[key] ) keyword[is] keyword[None] : keyword[continue] identifier[count] = identifier[len] ([ identifier[x] keyword[for] identifier[x] keyword[in] identifier[metadata] keyword[if] identifier[rm_index] ( identifier[x] )== identifier[rm_index] ( identifier[key] )]) identifier[indexed_keys] [ identifier[rm_index] ( identifier[key] )]= identifier[count] keyword[for] identifier[key] keyword[in] identifier[indexed_keys] : identifier[indexed_keys] [ identifier[key] ]+= identifier[len] ( identifier[source_metadata] . identifier[get] ( identifier[key] ,[])) identifier[prepared_metadata] [ identifier[key] ]= identifier[source_metadata] . identifier[get] ( identifier[key] ,[]) keyword[if] keyword[not] identifier[isinstance] ( identifier[prepared_metadata] [ identifier[key] ], identifier[list] ): identifier[prepared_metadata] [ identifier[key] ]=[ identifier[prepared_metadata] [ identifier[key] ]] keyword[while] identifier[len] ( identifier[prepared_metadata] [ identifier[key] ])< identifier[indexed_keys] [ identifier[key] ]: identifier[prepared_metadata] [ identifier[key] ]. identifier[append] ( keyword[None] ) keyword[for] identifier[key] keyword[in] identifier[metadata] : keyword[if] ( identifier[rm_index] ( identifier[key] ) keyword[in] identifier[indexed_keys] ): keyword[try] : identifier[prepared_metadata] [ identifier[rm_index] ( identifier[key] )][ identifier[get_index] ( identifier[key] )]= identifier[metadata] [ identifier[key] ] keyword[except] identifier[IndexError] : identifier[prepared_metadata] [ identifier[rm_index] ( identifier[key] )]. identifier[append] ( identifier[metadata] [ identifier[key] ]) keyword[elif] identifier[append_list] keyword[and] identifier[source_metadata] . identifier[get] ( identifier[key] ): keyword[if] keyword[not] identifier[isinstance] ( identifier[metadata] [ identifier[key] ], identifier[list] ): identifier[metadata] [ identifier[key] ]=[ identifier[metadata] [ identifier[key] ]] keyword[for] identifier[v] keyword[in] identifier[metadata] [ identifier[key] ]: keyword[if] keyword[not] identifier[isinstance] ( identifier[source_metadata] [ identifier[key] ], identifier[list] ): keyword[if] identifier[v] keyword[in] [ identifier[source_metadata] [ identifier[key] ]]: keyword[continue] keyword[else] : keyword[if] identifier[v] keyword[in] identifier[source_metadata] [ identifier[key] ]: keyword[continue] keyword[if] keyword[not] identifier[isinstance] ( identifier[source_metadata] [ identifier[key] ], identifier[list] ): identifier[prepared_metadata] [ identifier[key] ]=[ identifier[source_metadata] [ identifier[key] ]] keyword[else] : identifier[prepared_metadata] [ identifier[key] ]= identifier[source_metadata] [ identifier[key] ] identifier[prepared_metadata] [ identifier[key] ]. identifier[append] ( identifier[v] ) keyword[elif] identifier[append] keyword[and] identifier[source_metadata] . identifier[get] ( identifier[key] ): identifier[prepared_metadata] [ identifier[key] ]= literal[string] . identifier[format] ( identifier[source_metadata] [ identifier[key] ], identifier[metadata] [ identifier[key] ]) keyword[else] : identifier[prepared_metadata] [ identifier[key] ]= identifier[metadata] [ identifier[key] ] identifier[_done] =[] keyword[for] identifier[key] keyword[in] identifier[indexed_keys] : identifier[prepared_metadata] [ identifier[key] ]=[ identifier[v] keyword[for] identifier[v] keyword[in] identifier[prepared_metadata] [ identifier[key] ] keyword[if] identifier[v] ] keyword[if] identifier[key] keyword[not] keyword[in] identifier[_done] : identifier[indexes] =[] keyword[for] identifier[k] keyword[in] identifier[metadata] : keyword[if] keyword[not] identifier[get_index] ( identifier[k] ): keyword[continue] keyword[elif] keyword[not] identifier[rm_index] ( identifier[k] )== identifier[key] : keyword[continue] keyword[elif] keyword[not] identifier[metadata] [ identifier[k] ]== literal[string] : keyword[continue] keyword[else] : identifier[indexes] . identifier[append] ( identifier[get_index] ( identifier[k] )) keyword[for] identifier[i] keyword[in] identifier[sorted] ( identifier[indexes] , identifier[reverse] = keyword[True] ): keyword[del] identifier[prepared_metadata] [ identifier[key] ][ identifier[i] ] identifier[_done] . identifier[append] ( identifier[key] ) keyword[return] identifier[prepared_metadata]
def prepare_metadata(metadata, source_metadata=None, append=False, append_list=False): """Prepare a metadata dict for an :class:`S3PreparedRequest <S3PreparedRequest>` or :class:`MetadataPreparedRequest <MetadataPreparedRequest>` object. :type metadata: dict :param metadata: The metadata dict to be prepared. :type source_metadata: dict :param source_metadata: (optional) The source metadata for the item being modified. :rtype: dict :returns: A filtered metadata dict to be used for generating IA S3 and Metadata API requests. """ # Make a deepcopy of source_metadata if it exists. A deepcopy is # necessary to avoid modifying the original dict. source_metadata = {} if not source_metadata else copy.deepcopy(source_metadata) prepared_metadata = {} # Functions for dealing with metadata keys containing indexes. def get_index(key): match = re.search('(?<=\\[)\\d+(?=\\])', key) if match is not None: return int(match.group()) # depends on [control=['if'], data=['match']] def rm_index(key): return key.split('[')[0] # Create indexed_keys counter dict. i.e.: {'subject': 3} -- subject # (with the index removed) appears 3 times in the metadata dict. indexed_keys = {} for key in metadata: # Convert number values to strings! if isinstance(metadata[key], (six.integer_types, float, complex)): metadata[key] = str(metadata[key]) # depends on [control=['if'], data=[]] if get_index(key) is None: continue # depends on [control=['if'], data=[]] count = len([x for x in metadata if rm_index(x) == rm_index(key)]) indexed_keys[rm_index(key)] = count # depends on [control=['for'], data=['key']] # Initialize the values for all indexed_keys. for key in indexed_keys: # Increment the counter so we know how many values the final # value in prepared_metadata should have. indexed_keys[key] += len(source_metadata.get(key, [])) # Intialize the value in the prepared_metadata dict. prepared_metadata[key] = source_metadata.get(key, []) if not isinstance(prepared_metadata[key], list): prepared_metadata[key] = [prepared_metadata[key]] # depends on [control=['if'], data=[]] # Fill the value of the prepared_metadata key with None values # so all indexed items can be indexed in order. while len(prepared_metadata[key]) < indexed_keys[key]: prepared_metadata[key].append(None) # depends on [control=['while'], data=[]] # depends on [control=['for'], data=['key']] # Index all items which contain an index. for key in metadata: # Insert values from indexed keys into prepared_metadata dict. if rm_index(key) in indexed_keys: try: prepared_metadata[rm_index(key)][get_index(key)] = metadata[key] # depends on [control=['try'], data=[]] except IndexError: prepared_metadata[rm_index(key)].append(metadata[key]) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # If append is True, append value to source_metadata value. elif append_list and source_metadata.get(key): if not isinstance(metadata[key], list): metadata[key] = [metadata[key]] # depends on [control=['if'], data=[]] for v in metadata[key]: if not isinstance(source_metadata[key], list): if v in [source_metadata[key]]: continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif v in source_metadata[key]: continue # depends on [control=['if'], data=[]] if not isinstance(source_metadata[key], list): prepared_metadata[key] = [source_metadata[key]] # depends on [control=['if'], data=[]] else: prepared_metadata[key] = source_metadata[key] prepared_metadata[key].append(v) # depends on [control=['for'], data=['v']] # depends on [control=['if'], data=[]] elif append and source_metadata.get(key): prepared_metadata[key] = '{0} {1}'.format(source_metadata[key], metadata[key]) # depends on [control=['if'], data=[]] else: prepared_metadata[key] = metadata[key] # depends on [control=['for'], data=['key']] # Remove values from metadata if value is REMOVE_TAG. _done = [] for key in indexed_keys: # Filter None values from items with arrays as values prepared_metadata[key] = [v for v in prepared_metadata[key] if v] # Only filter the given indexed key if it has not already been # filtered. if key not in _done: indexes = [] for k in metadata: if not get_index(k): continue # depends on [control=['if'], data=[]] elif not rm_index(k) == key: continue # depends on [control=['if'], data=[]] elif not metadata[k] == 'REMOVE_TAG': continue # depends on [control=['if'], data=[]] else: indexes.append(get_index(k)) # depends on [control=['for'], data=['k']] # Delete indexed values in reverse to not throw off the # subsequent indexes. for i in sorted(indexes, reverse=True): del prepared_metadata[key][i] # depends on [control=['for'], data=['i']] _done.append(key) # depends on [control=['if'], data=['key', '_done']] # depends on [control=['for'], data=['key']] return prepared_metadata
def get_hyperparameter_configurations(self, num, r, config_generator): """generate num hyperparameter configurations from search space using Bayesian optimization Parameters ---------- num: int the number of hyperparameter configurations Returns ------- list a list of hyperparameter configurations. Format: [[key1, value1], [key2, value2], ...] """ global _KEY assert self.i == 0 hyperparameter_configs = dict() for _ in range(num): params_id = create_bracket_parameter_id(self.s, self.i) params = config_generator.get_config(r) params[_KEY] = r hyperparameter_configs[params_id] = params self._record_hyper_configs(hyperparameter_configs) return [[key, value] for key, value in hyperparameter_configs.items()]
def function[get_hyperparameter_configurations, parameter[self, num, r, config_generator]]: constant[generate num hyperparameter configurations from search space using Bayesian optimization Parameters ---------- num: int the number of hyperparameter configurations Returns ------- list a list of hyperparameter configurations. Format: [[key1, value1], [key2, value2], ...] ] <ast.Global object at 0x7da18eb56d70> assert[compare[name[self].i equal[==] constant[0]]] variable[hyperparameter_configs] assign[=] call[name[dict], parameter[]] for taget[name[_]] in starred[call[name[range], parameter[name[num]]]] begin[:] variable[params_id] assign[=] call[name[create_bracket_parameter_id], parameter[name[self].s, name[self].i]] variable[params] assign[=] call[name[config_generator].get_config, parameter[name[r]]] call[name[params]][name[_KEY]] assign[=] name[r] call[name[hyperparameter_configs]][name[params_id]] assign[=] name[params] call[name[self]._record_hyper_configs, parameter[name[hyperparameter_configs]]] return[<ast.ListComp object at 0x7da18eb568c0>]
keyword[def] identifier[get_hyperparameter_configurations] ( identifier[self] , identifier[num] , identifier[r] , identifier[config_generator] ): literal[string] keyword[global] identifier[_KEY] keyword[assert] identifier[self] . identifier[i] == literal[int] identifier[hyperparameter_configs] = identifier[dict] () keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[num] ): identifier[params_id] = identifier[create_bracket_parameter_id] ( identifier[self] . identifier[s] , identifier[self] . identifier[i] ) identifier[params] = identifier[config_generator] . identifier[get_config] ( identifier[r] ) identifier[params] [ identifier[_KEY] ]= identifier[r] identifier[hyperparameter_configs] [ identifier[params_id] ]= identifier[params] identifier[self] . identifier[_record_hyper_configs] ( identifier[hyperparameter_configs] ) keyword[return] [[ identifier[key] , identifier[value] ] keyword[for] identifier[key] , identifier[value] keyword[in] identifier[hyperparameter_configs] . identifier[items] ()]
def get_hyperparameter_configurations(self, num, r, config_generator): """generate num hyperparameter configurations from search space using Bayesian optimization Parameters ---------- num: int the number of hyperparameter configurations Returns ------- list a list of hyperparameter configurations. Format: [[key1, value1], [key2, value2], ...] """ global _KEY assert self.i == 0 hyperparameter_configs = dict() for _ in range(num): params_id = create_bracket_parameter_id(self.s, self.i) params = config_generator.get_config(r) params[_KEY] = r hyperparameter_configs[params_id] = params # depends on [control=['for'], data=[]] self._record_hyper_configs(hyperparameter_configs) return [[key, value] for (key, value) in hyperparameter_configs.items()]
def countries(self) -> typing.Iterator['Country']: """ Returns: generator over all countries in this coalition """ for k in self._section_country: if k not in self._countries.keys(): country = Country(self.d, self.l10n, self.coa_color, k) self._countries[k] = country self._countries_by_id[country.country_id] = country self._countries_by_name[country.country_name] = country yield self._countries[k]
def function[countries, parameter[self]]: constant[ Returns: generator over all countries in this coalition ] for taget[name[k]] in starred[name[self]._section_country] begin[:] if compare[name[k] <ast.NotIn object at 0x7da2590d7190> call[name[self]._countries.keys, parameter[]]] begin[:] variable[country] assign[=] call[name[Country], parameter[name[self].d, name[self].l10n, name[self].coa_color, name[k]]] call[name[self]._countries][name[k]] assign[=] name[country] call[name[self]._countries_by_id][name[country].country_id] assign[=] name[country] call[name[self]._countries_by_name][name[country].country_name] assign[=] name[country] <ast.Yield object at 0x7da1b144ea70>
keyword[def] identifier[countries] ( identifier[self] )-> identifier[typing] . identifier[Iterator] [ literal[string] ]: literal[string] keyword[for] identifier[k] keyword[in] identifier[self] . identifier[_section_country] : keyword[if] identifier[k] keyword[not] keyword[in] identifier[self] . identifier[_countries] . identifier[keys] (): identifier[country] = identifier[Country] ( identifier[self] . identifier[d] , identifier[self] . identifier[l10n] , identifier[self] . identifier[coa_color] , identifier[k] ) identifier[self] . identifier[_countries] [ identifier[k] ]= identifier[country] identifier[self] . identifier[_countries_by_id] [ identifier[country] . identifier[country_id] ]= identifier[country] identifier[self] . identifier[_countries_by_name] [ identifier[country] . identifier[country_name] ]= identifier[country] keyword[yield] identifier[self] . identifier[_countries] [ identifier[k] ]
def countries(self) -> typing.Iterator['Country']: """ Returns: generator over all countries in this coalition """ for k in self._section_country: if k not in self._countries.keys(): country = Country(self.d, self.l10n, self.coa_color, k) self._countries[k] = country self._countries_by_id[country.country_id] = country self._countries_by_name[country.country_name] = country # depends on [control=['if'], data=['k']] yield self._countries[k] # depends on [control=['for'], data=['k']]
def init_wait_register(self): """ Initialize EventMatcher to wait for certain cli_ready_trigger to arrive from this Dut. :return: None """ app = self.config.get("application") if app: bef_init_cmds = app.get("cli_ready_trigger") if bef_init_cmds: self.init_done.clear() self.init_event_matcher = EventMatcher(EventTypes.DUT_LINE_RECEIVED, bef_init_cmds, self, self.init_done) self.init_wait_timeout = app.get("cli_ready_trigger_timeout", 30) return self.init_done.set() return
def function[init_wait_register, parameter[self]]: constant[ Initialize EventMatcher to wait for certain cli_ready_trigger to arrive from this Dut. :return: None ] variable[app] assign[=] call[name[self].config.get, parameter[constant[application]]] if name[app] begin[:] variable[bef_init_cmds] assign[=] call[name[app].get, parameter[constant[cli_ready_trigger]]] if name[bef_init_cmds] begin[:] call[name[self].init_done.clear, parameter[]] name[self].init_event_matcher assign[=] call[name[EventMatcher], parameter[name[EventTypes].DUT_LINE_RECEIVED, name[bef_init_cmds], name[self], name[self].init_done]] name[self].init_wait_timeout assign[=] call[name[app].get, parameter[constant[cli_ready_trigger_timeout], constant[30]]] return[None] call[name[self].init_done.set, parameter[]] return[None]
keyword[def] identifier[init_wait_register] ( identifier[self] ): literal[string] identifier[app] = identifier[self] . identifier[config] . identifier[get] ( literal[string] ) keyword[if] identifier[app] : identifier[bef_init_cmds] = identifier[app] . identifier[get] ( literal[string] ) keyword[if] identifier[bef_init_cmds] : identifier[self] . identifier[init_done] . identifier[clear] () identifier[self] . identifier[init_event_matcher] = identifier[EventMatcher] ( identifier[EventTypes] . identifier[DUT_LINE_RECEIVED] , identifier[bef_init_cmds] , identifier[self] , identifier[self] . identifier[init_done] ) identifier[self] . identifier[init_wait_timeout] = identifier[app] . identifier[get] ( literal[string] , literal[int] ) keyword[return] identifier[self] . identifier[init_done] . identifier[set] () keyword[return]
def init_wait_register(self): """ Initialize EventMatcher to wait for certain cli_ready_trigger to arrive from this Dut. :return: None """ app = self.config.get('application') if app: bef_init_cmds = app.get('cli_ready_trigger') if bef_init_cmds: self.init_done.clear() self.init_event_matcher = EventMatcher(EventTypes.DUT_LINE_RECEIVED, bef_init_cmds, self, self.init_done) self.init_wait_timeout = app.get('cli_ready_trigger_timeout', 30) return # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] self.init_done.set() return
def fromJobNode(cls, jobNode, jobStoreID, tryCount): """ Builds a job graph from a given job node :param toil.job.JobNode jobNode: a job node object to build into a job graph :param str jobStoreID: the job store ID to assign to the resulting job graph object :param int tryCount: the number of times the resulting job graph object can be retried after failure :return: The newly created job graph object :rtype: toil.jobGraph.JobGraph """ return cls(command=jobNode.command, jobStoreID=jobStoreID, remainingRetryCount=tryCount, predecessorNumber=jobNode.predecessorNumber, unitName=jobNode.unitName, jobName=jobNode.jobName, **jobNode._requirements)
def function[fromJobNode, parameter[cls, jobNode, jobStoreID, tryCount]]: constant[ Builds a job graph from a given job node :param toil.job.JobNode jobNode: a job node object to build into a job graph :param str jobStoreID: the job store ID to assign to the resulting job graph object :param int tryCount: the number of times the resulting job graph object can be retried after failure :return: The newly created job graph object :rtype: toil.jobGraph.JobGraph ] return[call[name[cls], parameter[]]]
keyword[def] identifier[fromJobNode] ( identifier[cls] , identifier[jobNode] , identifier[jobStoreID] , identifier[tryCount] ): literal[string] keyword[return] identifier[cls] ( identifier[command] = identifier[jobNode] . identifier[command] , identifier[jobStoreID] = identifier[jobStoreID] , identifier[remainingRetryCount] = identifier[tryCount] , identifier[predecessorNumber] = identifier[jobNode] . identifier[predecessorNumber] , identifier[unitName] = identifier[jobNode] . identifier[unitName] , identifier[jobName] = identifier[jobNode] . identifier[jobName] , ** identifier[jobNode] . identifier[_requirements] )
def fromJobNode(cls, jobNode, jobStoreID, tryCount): """ Builds a job graph from a given job node :param toil.job.JobNode jobNode: a job node object to build into a job graph :param str jobStoreID: the job store ID to assign to the resulting job graph object :param int tryCount: the number of times the resulting job graph object can be retried after failure :return: The newly created job graph object :rtype: toil.jobGraph.JobGraph """ return cls(command=jobNode.command, jobStoreID=jobStoreID, remainingRetryCount=tryCount, predecessorNumber=jobNode.predecessorNumber, unitName=jobNode.unitName, jobName=jobNode.jobName, **jobNode._requirements)
def lookup(*args, **kwargs): """ Use arguments to route constructor. Applies a series of checks on arguments to identify constructor, starting with known keyword arguments, and then applying constructor-specific checks """ if 'mode' in kwargs: mode = kwargs['mode'] if mode not in constructors: raise ValueError('Mode %s not supported' % mode) del kwargs['mode'] return constructors[mode] else: for mode, constructor in constructors: if constructor._argcheck(*args, **kwargs): return constructor return ConstructLocal
def function[lookup, parameter[]]: constant[ Use arguments to route constructor. Applies a series of checks on arguments to identify constructor, starting with known keyword arguments, and then applying constructor-specific checks ] if compare[constant[mode] in name[kwargs]] begin[:] variable[mode] assign[=] call[name[kwargs]][constant[mode]] if compare[name[mode] <ast.NotIn object at 0x7da2590d7190> name[constructors]] begin[:] <ast.Raise object at 0x7da1b26af8b0> <ast.Delete object at 0x7da1b26ae620> return[call[name[constructors]][name[mode]]] return[name[ConstructLocal]]
keyword[def] identifier[lookup] (* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[if] literal[string] keyword[in] identifier[kwargs] : identifier[mode] = identifier[kwargs] [ literal[string] ] keyword[if] identifier[mode] keyword[not] keyword[in] identifier[constructors] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[mode] ) keyword[del] identifier[kwargs] [ literal[string] ] keyword[return] identifier[constructors] [ identifier[mode] ] keyword[else] : keyword[for] identifier[mode] , identifier[constructor] keyword[in] identifier[constructors] : keyword[if] identifier[constructor] . identifier[_argcheck] (* identifier[args] ,** identifier[kwargs] ): keyword[return] identifier[constructor] keyword[return] identifier[ConstructLocal]
def lookup(*args, **kwargs): """ Use arguments to route constructor. Applies a series of checks on arguments to identify constructor, starting with known keyword arguments, and then applying constructor-specific checks """ if 'mode' in kwargs: mode = kwargs['mode'] if mode not in constructors: raise ValueError('Mode %s not supported' % mode) # depends on [control=['if'], data=['mode']] del kwargs['mode'] return constructors[mode] # depends on [control=['if'], data=['kwargs']] else: for (mode, constructor) in constructors: if constructor._argcheck(*args, **kwargs): return constructor # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return ConstructLocal
def result_pretty(self, number_of_runs=0, time_str=None, fbestever=None): """pretty print result. Returns ``self.result()`` """ if fbestever is None: fbestever = self.best.f s = (' after %i restart' + ('s' if number_of_runs > 1 else '')) \ % number_of_runs if number_of_runs else '' for k, v in self.stop().items(): print('termination on %s=%s%s' % (k, str(v), s + (' (%s)' % time_str if time_str else ''))) print('final/bestever f-value = %e %e' % (self.best.last.f, fbestever)) if self.N < 9: print('incumbent solution: ' + str(list(self.gp.pheno(self.mean, into_bounds=self.boundary_handler.repair)))) print('std deviation: ' + str(list(self.sigma * self.sigma_vec * sqrt(self.dC) * self.gp.scales))) else: print('incumbent solution: %s ...]' % (str(self.gp.pheno(self.mean, into_bounds=self.boundary_handler.repair)[:8])[:-1])) print('std deviations: %s ...]' % (str((self.sigma * self.sigma_vec * sqrt(self.dC) * self.gp.scales)[:8])[:-1])) return self.result()
def function[result_pretty, parameter[self, number_of_runs, time_str, fbestever]]: constant[pretty print result. Returns ``self.result()`` ] if compare[name[fbestever] is constant[None]] begin[:] variable[fbestever] assign[=] name[self].best.f variable[s] assign[=] <ast.IfExp object at 0x7da1b0b5fa00> for taget[tuple[[<ast.Name object at 0x7da1b0b5c760>, <ast.Name object at 0x7da1b0b5c700>]]] in starred[call[call[name[self].stop, parameter[]].items, parameter[]]] begin[:] call[name[print], parameter[binary_operation[constant[termination on %s=%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0b5f010>, <ast.Call object at 0x7da1b0b5f100>, <ast.BinOp object at 0x7da1b0b5dd20>]]]]] call[name[print], parameter[binary_operation[constant[final/bestever f-value = %e %e] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b0b5e9e0>, <ast.Name object at 0x7da1b0b5e950>]]]]] if compare[name[self].N less[<] constant[9]] begin[:] call[name[print], parameter[binary_operation[constant[incumbent solution: ] + call[name[str], parameter[call[name[list], parameter[call[name[self].gp.pheno, parameter[name[self].mean]]]]]]]]] call[name[print], parameter[binary_operation[constant[std deviation: ] + call[name[str], parameter[call[name[list], parameter[binary_operation[binary_operation[binary_operation[name[self].sigma * name[self].sigma_vec] * call[name[sqrt], parameter[name[self].dC]]] * name[self].gp.scales]]]]]]]] return[call[name[self].result, parameter[]]]
keyword[def] identifier[result_pretty] ( identifier[self] , identifier[number_of_runs] = literal[int] , identifier[time_str] = keyword[None] , identifier[fbestever] = keyword[None] ): literal[string] keyword[if] identifier[fbestever] keyword[is] keyword[None] : identifier[fbestever] = identifier[self] . identifier[best] . identifier[f] identifier[s] =( literal[string] +( literal[string] keyword[if] identifier[number_of_runs] > literal[int] keyword[else] literal[string] ))% identifier[number_of_runs] keyword[if] identifier[number_of_runs] keyword[else] literal[string] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[stop] (). identifier[items] (): identifier[print] ( literal[string] %( identifier[k] , identifier[str] ( identifier[v] ), identifier[s] + ( literal[string] % identifier[time_str] keyword[if] identifier[time_str] keyword[else] literal[string] ))) identifier[print] ( literal[string] %( identifier[self] . identifier[best] . identifier[last] . identifier[f] , identifier[fbestever] )) keyword[if] identifier[self] . identifier[N] < literal[int] : identifier[print] ( literal[string] + identifier[str] ( identifier[list] ( identifier[self] . identifier[gp] . identifier[pheno] ( identifier[self] . identifier[mean] , identifier[into_bounds] = identifier[self] . identifier[boundary_handler] . identifier[repair] )))) identifier[print] ( literal[string] + identifier[str] ( identifier[list] ( identifier[self] . identifier[sigma] * identifier[self] . identifier[sigma_vec] * identifier[sqrt] ( identifier[self] . identifier[dC] )* identifier[self] . identifier[gp] . identifier[scales] ))) keyword[else] : identifier[print] ( literal[string] %( identifier[str] ( identifier[self] . identifier[gp] . identifier[pheno] ( identifier[self] . identifier[mean] , identifier[into_bounds] = identifier[self] . identifier[boundary_handler] . identifier[repair] )[: literal[int] ])[:- literal[int] ])) identifier[print] ( literal[string] %( identifier[str] (( identifier[self] . identifier[sigma] * identifier[self] . identifier[sigma_vec] * identifier[sqrt] ( identifier[self] . identifier[dC] )* identifier[self] . identifier[gp] . identifier[scales] )[: literal[int] ])[:- literal[int] ])) keyword[return] identifier[self] . identifier[result] ()
def result_pretty(self, number_of_runs=0, time_str=None, fbestever=None): """pretty print result. Returns ``self.result()`` """ if fbestever is None: fbestever = self.best.f # depends on [control=['if'], data=['fbestever']] s = (' after %i restart' + ('s' if number_of_runs > 1 else '')) % number_of_runs if number_of_runs else '' for (k, v) in self.stop().items(): print('termination on %s=%s%s' % (k, str(v), s + (' (%s)' % time_str if time_str else ''))) # depends on [control=['for'], data=[]] print('final/bestever f-value = %e %e' % (self.best.last.f, fbestever)) if self.N < 9: print('incumbent solution: ' + str(list(self.gp.pheno(self.mean, into_bounds=self.boundary_handler.repair)))) print('std deviation: ' + str(list(self.sigma * self.sigma_vec * sqrt(self.dC) * self.gp.scales))) # depends on [control=['if'], data=[]] else: print('incumbent solution: %s ...]' % str(self.gp.pheno(self.mean, into_bounds=self.boundary_handler.repair)[:8])[:-1]) print('std deviations: %s ...]' % str((self.sigma * self.sigma_vec * sqrt(self.dC) * self.gp.scales)[:8])[:-1]) return self.result()
def is_tagged(required_tags, has_tags): """Checks if tags match""" if not required_tags and not has_tags: return True elif not required_tags: return False found_tags = [] for tag in required_tags: if tag in has_tags: found_tags.append(tag) return len(found_tags) == len(required_tags)
def function[is_tagged, parameter[required_tags, has_tags]]: constant[Checks if tags match] if <ast.BoolOp object at 0x7da1b1b69600> begin[:] return[constant[True]] variable[found_tags] assign[=] list[[]] for taget[name[tag]] in starred[name[required_tags]] begin[:] if compare[name[tag] in name[has_tags]] begin[:] call[name[found_tags].append, parameter[name[tag]]] return[compare[call[name[len], parameter[name[found_tags]]] equal[==] call[name[len], parameter[name[required_tags]]]]]
keyword[def] identifier[is_tagged] ( identifier[required_tags] , identifier[has_tags] ): literal[string] keyword[if] keyword[not] identifier[required_tags] keyword[and] keyword[not] identifier[has_tags] : keyword[return] keyword[True] keyword[elif] keyword[not] identifier[required_tags] : keyword[return] keyword[False] identifier[found_tags] =[] keyword[for] identifier[tag] keyword[in] identifier[required_tags] : keyword[if] identifier[tag] keyword[in] identifier[has_tags] : identifier[found_tags] . identifier[append] ( identifier[tag] ) keyword[return] identifier[len] ( identifier[found_tags] )== identifier[len] ( identifier[required_tags] )
def is_tagged(required_tags, has_tags): """Checks if tags match""" if not required_tags and (not has_tags): return True # depends on [control=['if'], data=[]] elif not required_tags: return False # depends on [control=['if'], data=[]] found_tags = [] for tag in required_tags: if tag in has_tags: found_tags.append(tag) # depends on [control=['if'], data=['tag']] # depends on [control=['for'], data=['tag']] return len(found_tags) == len(required_tags)
async def start(self): """Start the gateway.""" self._logger.info("Starting all device adapters") await self.device_manager.start() self._logger.info("Starting all servers") for server in self.servers: await server.start()
<ast.AsyncFunctionDef object at 0x7da18fe92ef0>
keyword[async] keyword[def] identifier[start] ( identifier[self] ): literal[string] identifier[self] . identifier[_logger] . identifier[info] ( literal[string] ) keyword[await] identifier[self] . identifier[device_manager] . identifier[start] () identifier[self] . identifier[_logger] . identifier[info] ( literal[string] ) keyword[for] identifier[server] keyword[in] identifier[self] . identifier[servers] : keyword[await] identifier[server] . identifier[start] ()
async def start(self): """Start the gateway.""" self._logger.info('Starting all device adapters') await self.device_manager.start() self._logger.info('Starting all servers') for server in self.servers: await server.start() # depends on [control=['for'], data=['server']]
def _run_sequence(self, sequence): ''' Run a single sequence ''' self._check_pause() self._pre_test() session_data = self.target.get_session_data() self._test_info() resp = None for edge in sequence: if edge.callback: edge.callback(self, edge, resp) session_data = self.target.get_session_data() node = edge.dst node.set_session_data(session_data) resp = self._transmit(node) return self._post_test()
def function[_run_sequence, parameter[self, sequence]]: constant[ Run a single sequence ] call[name[self]._check_pause, parameter[]] call[name[self]._pre_test, parameter[]] variable[session_data] assign[=] call[name[self].target.get_session_data, parameter[]] call[name[self]._test_info, parameter[]] variable[resp] assign[=] constant[None] for taget[name[edge]] in starred[name[sequence]] begin[:] if name[edge].callback begin[:] call[name[edge].callback, parameter[name[self], name[edge], name[resp]]] variable[session_data] assign[=] call[name[self].target.get_session_data, parameter[]] variable[node] assign[=] name[edge].dst call[name[node].set_session_data, parameter[name[session_data]]] variable[resp] assign[=] call[name[self]._transmit, parameter[name[node]]] return[call[name[self]._post_test, parameter[]]]
keyword[def] identifier[_run_sequence] ( identifier[self] , identifier[sequence] ): literal[string] identifier[self] . identifier[_check_pause] () identifier[self] . identifier[_pre_test] () identifier[session_data] = identifier[self] . identifier[target] . identifier[get_session_data] () identifier[self] . identifier[_test_info] () identifier[resp] = keyword[None] keyword[for] identifier[edge] keyword[in] identifier[sequence] : keyword[if] identifier[edge] . identifier[callback] : identifier[edge] . identifier[callback] ( identifier[self] , identifier[edge] , identifier[resp] ) identifier[session_data] = identifier[self] . identifier[target] . identifier[get_session_data] () identifier[node] = identifier[edge] . identifier[dst] identifier[node] . identifier[set_session_data] ( identifier[session_data] ) identifier[resp] = identifier[self] . identifier[_transmit] ( identifier[node] ) keyword[return] identifier[self] . identifier[_post_test] ()
def _run_sequence(self, sequence): """ Run a single sequence """ self._check_pause() self._pre_test() session_data = self.target.get_session_data() self._test_info() resp = None for edge in sequence: if edge.callback: edge.callback(self, edge, resp) # depends on [control=['if'], data=[]] session_data = self.target.get_session_data() node = edge.dst node.set_session_data(session_data) resp = self._transmit(node) # depends on [control=['for'], data=['edge']] return self._post_test()
def timezone(self, value=0.0): """Corresponds to IDD Field `timezone` Time relative to GMT. Args: value (float): value for IDD Field `timezone` Unit: hr - not on standard units list??? Default value: 0.0 value >= -12.0 value <= 12.0 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """ if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `timezone`'.format(value)) if value < -12.0: raise ValueError('value need to be greater or equal -12.0 ' 'for field `timezone`') if value > 12.0: raise ValueError('value need to be smaller 12.0 ' 'for field `timezone`') self._timezone = value
def function[timezone, parameter[self, value]]: constant[Corresponds to IDD Field `timezone` Time relative to GMT. Args: value (float): value for IDD Field `timezone` Unit: hr - not on standard units list??? Default value: 0.0 value >= -12.0 value <= 12.0 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value ] if compare[name[value] is_not constant[None]] begin[:] <ast.Try object at 0x7da18dc9b5e0> if compare[name[value] less[<] <ast.UnaryOp object at 0x7da18dc98520>] begin[:] <ast.Raise object at 0x7da18dc9b280> if compare[name[value] greater[>] constant[12.0]] begin[:] <ast.Raise object at 0x7da18dc9bb50> name[self]._timezone assign[=] name[value]
keyword[def] identifier[timezone] ( identifier[self] , identifier[value] = literal[int] ): literal[string] keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] : keyword[try] : identifier[value] = identifier[float] ( identifier[value] ) keyword[except] identifier[ValueError] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[value] )) keyword[if] identifier[value] <- literal[int] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] ) keyword[if] identifier[value] > literal[int] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] ) identifier[self] . identifier[_timezone] = identifier[value]
def timezone(self, value=0.0): """Corresponds to IDD Field `timezone` Time relative to GMT. Args: value (float): value for IDD Field `timezone` Unit: hr - not on standard units list??? Default value: 0.0 value >= -12.0 value <= 12.0 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """ if value is not None: try: value = float(value) # depends on [control=['try'], data=[]] except ValueError: raise ValueError('value {} need to be of type float for field `timezone`'.format(value)) # depends on [control=['except'], data=[]] if value < -12.0: raise ValueError('value need to be greater or equal -12.0 for field `timezone`') # depends on [control=['if'], data=[]] if value > 12.0: raise ValueError('value need to be smaller 12.0 for field `timezone`') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['value']] self._timezone = value
def officialPrice(symbol=None, token='', version=''): '''The Official Price message is used to disseminate the IEX Official Opening and Closing Prices. These messages will be provided only for IEX Listed Securities. https://iexcloud.io/docs/api/#deep-official-price Args: symbol (string); Ticker to request token (string); Access token version (string); API version Returns: dict: result ''' _raiseIfNotStr(symbol) if symbol: return _getJson('deep/official-price?symbols=' + symbol, token, version) return _getJson('deep/official-price', token, version)
def function[officialPrice, parameter[symbol, token, version]]: constant[The Official Price message is used to disseminate the IEX Official Opening and Closing Prices. These messages will be provided only for IEX Listed Securities. https://iexcloud.io/docs/api/#deep-official-price Args: symbol (string); Ticker to request token (string); Access token version (string); API version Returns: dict: result ] call[name[_raiseIfNotStr], parameter[name[symbol]]] if name[symbol] begin[:] return[call[name[_getJson], parameter[binary_operation[constant[deep/official-price?symbols=] + name[symbol]], name[token], name[version]]]] return[call[name[_getJson], parameter[constant[deep/official-price], name[token], name[version]]]]
keyword[def] identifier[officialPrice] ( identifier[symbol] = keyword[None] , identifier[token] = literal[string] , identifier[version] = literal[string] ): literal[string] identifier[_raiseIfNotStr] ( identifier[symbol] ) keyword[if] identifier[symbol] : keyword[return] identifier[_getJson] ( literal[string] + identifier[symbol] , identifier[token] , identifier[version] ) keyword[return] identifier[_getJson] ( literal[string] , identifier[token] , identifier[version] )
def officialPrice(symbol=None, token='', version=''): """The Official Price message is used to disseminate the IEX Official Opening and Closing Prices. These messages will be provided only for IEX Listed Securities. https://iexcloud.io/docs/api/#deep-official-price Args: symbol (string); Ticker to request token (string); Access token version (string); API version Returns: dict: result """ _raiseIfNotStr(symbol) if symbol: return _getJson('deep/official-price?symbols=' + symbol, token, version) # depends on [control=['if'], data=[]] return _getJson('deep/official-price', token, version)
def percentile_index(a, q): """ Returns the index of the value at the Qth percentile in array a. """ return np.where( a==np.percentile(a, q, interpolation='nearest') )[0][0]
def function[percentile_index, parameter[a, q]]: constant[ Returns the index of the value at the Qth percentile in array a. ] return[call[call[call[name[np].where, parameter[compare[name[a] equal[==] call[name[np].percentile, parameter[name[a], name[q]]]]]]][constant[0]]][constant[0]]]
keyword[def] identifier[percentile_index] ( identifier[a] , identifier[q] ): literal[string] keyword[return] identifier[np] . identifier[where] ( identifier[a] == identifier[np] . identifier[percentile] ( identifier[a] , identifier[q] , identifier[interpolation] = literal[string] ) )[ literal[int] ][ literal[int] ]
def percentile_index(a, q): """ Returns the index of the value at the Qth percentile in array a. """ return np.where(a == np.percentile(a, q, interpolation='nearest'))[0][0]
def _to_dict(self): """Return a json dictionary representing this model.""" _dict = {} if hasattr(self, 'url') and self.url is not None: _dict['url'] = self.url if hasattr(self, 'gender') and self.gender is not None: _dict['gender'] = self.gender if hasattr(self, 'name') and self.name is not None: _dict['name'] = self.name if hasattr(self, 'language') and self.language is not None: _dict['language'] = self.language if hasattr(self, 'description') and self.description is not None: _dict['description'] = self.description if hasattr(self, 'customizable') and self.customizable is not None: _dict['customizable'] = self.customizable if hasattr( self, 'supported_features') and self.supported_features is not None: _dict['supported_features'] = self.supported_features._to_dict() if hasattr(self, 'customization') and self.customization is not None: _dict['customization'] = self.customization._to_dict() return _dict
def function[_to_dict, parameter[self]]: constant[Return a json dictionary representing this model.] variable[_dict] assign[=] dictionary[[], []] if <ast.BoolOp object at 0x7da2044c3910> begin[:] call[name[_dict]][constant[url]] assign[=] name[self].url if <ast.BoolOp object at 0x7da18bccae60> begin[:] call[name[_dict]][constant[gender]] assign[=] name[self].gender if <ast.BoolOp object at 0x7da18bccbdf0> begin[:] call[name[_dict]][constant[name]] assign[=] name[self].name if <ast.BoolOp object at 0x7da18bcc9a20> begin[:] call[name[_dict]][constant[language]] assign[=] name[self].language if <ast.BoolOp object at 0x7da18bcc90c0> begin[:] call[name[_dict]][constant[description]] assign[=] name[self].description if <ast.BoolOp object at 0x7da2054a63b0> begin[:] call[name[_dict]][constant[customizable]] assign[=] name[self].customizable if <ast.BoolOp object at 0x7da2054a7f40> begin[:] call[name[_dict]][constant[supported_features]] assign[=] call[name[self].supported_features._to_dict, parameter[]] if <ast.BoolOp object at 0x7da2054a47f0> begin[:] call[name[_dict]][constant[customization]] assign[=] call[name[self].customization._to_dict, parameter[]] return[name[_dict]]
keyword[def] identifier[_to_dict] ( identifier[self] ): literal[string] identifier[_dict] ={} keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[url] keyword[is] keyword[not] keyword[None] : identifier[_dict] [ literal[string] ]= identifier[self] . identifier[url] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[gender] keyword[is] keyword[not] keyword[None] : identifier[_dict] [ literal[string] ]= identifier[self] . identifier[gender] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[name] keyword[is] keyword[not] keyword[None] : identifier[_dict] [ literal[string] ]= identifier[self] . identifier[name] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[language] keyword[is] keyword[not] keyword[None] : identifier[_dict] [ literal[string] ]= identifier[self] . identifier[language] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[description] keyword[is] keyword[not] keyword[None] : identifier[_dict] [ literal[string] ]= identifier[self] . identifier[description] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[customizable] keyword[is] keyword[not] keyword[None] : identifier[_dict] [ literal[string] ]= identifier[self] . identifier[customizable] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[supported_features] keyword[is] keyword[not] keyword[None] : identifier[_dict] [ literal[string] ]= identifier[self] . identifier[supported_features] . identifier[_to_dict] () keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[customization] keyword[is] keyword[not] keyword[None] : identifier[_dict] [ literal[string] ]= identifier[self] . identifier[customization] . identifier[_to_dict] () keyword[return] identifier[_dict]
def _to_dict(self): """Return a json dictionary representing this model.""" _dict = {} if hasattr(self, 'url') and self.url is not None: _dict['url'] = self.url # depends on [control=['if'], data=[]] if hasattr(self, 'gender') and self.gender is not None: _dict['gender'] = self.gender # depends on [control=['if'], data=[]] if hasattr(self, 'name') and self.name is not None: _dict['name'] = self.name # depends on [control=['if'], data=[]] if hasattr(self, 'language') and self.language is not None: _dict['language'] = self.language # depends on [control=['if'], data=[]] if hasattr(self, 'description') and self.description is not None: _dict['description'] = self.description # depends on [control=['if'], data=[]] if hasattr(self, 'customizable') and self.customizable is not None: _dict['customizable'] = self.customizable # depends on [control=['if'], data=[]] if hasattr(self, 'supported_features') and self.supported_features is not None: _dict['supported_features'] = self.supported_features._to_dict() # depends on [control=['if'], data=[]] if hasattr(self, 'customization') and self.customization is not None: _dict['customization'] = self.customization._to_dict() # depends on [control=['if'], data=[]] return _dict
def encrypt_variable(variable, build_repo, *, tld='.org', public_key=None, travis_token=None, **login_kwargs): """ Encrypt an environment variable for ``build_repo`` for Travis ``variable`` should be a bytes object, of the form ``b'ENV=value'``. ``build_repo`` is the repo that ``doctr deploy`` will be run from. It should be like 'drdoctr/doctr'. ``tld`` should be ``'.org'`` for travis-ci.org and ``'.com'`` for travis-ci.com. ``public_key`` should be a pem format public key, obtained from Travis if not provided. If the repo is private, travis_token should be as returned by ``get_temporary_token(**login_kwargs)``. A token being present automatically implies ``tld='.com'``. """ if not isinstance(variable, bytes): raise TypeError("variable should be bytes") if not b"=" in variable: raise ValueError("variable should be of the form 'VARIABLE=value'") if not public_key: _headers = { 'Content-Type': 'application/json', 'User-Agent': 'MyClient/1.0.0', } headersv2 = {**_headers, **Travis_APIv2} headersv3 = {**_headers, **Travis_APIv3} if travis_token: headersv3['Authorization'] = 'token {}'.format(travis_token) res = requests.get('https://api.travis-ci.com/repo/{build_repo}/key_pair/generated'.format(build_repo=urllib.parse.quote(build_repo, safe='')), headers=headersv3) if res.json().get('file') == 'not found': raise RuntimeError("Could not find the Travis public key for %s" % build_repo) public_key = res.json()['public_key'] else: res = requests.get('https://api.travis-ci{tld}/repos/{build_repo}/key'.format(build_repo=build_repo, tld=tld), headers=headersv2) public_key = res.json()['key'] if res.status_code == requests.codes.not_found: raise RuntimeError('Could not find requested repo on Travis. Is Travis enabled?') res.raise_for_status() public_key = public_key.replace("RSA PUBLIC KEY", "PUBLIC KEY").encode('utf-8') key = serialization.load_pem_public_key(public_key, backend=default_backend()) pad = padding.PKCS1v15() return base64.b64encode(key.encrypt(variable, pad))
def function[encrypt_variable, parameter[variable, build_repo]]: constant[ Encrypt an environment variable for ``build_repo`` for Travis ``variable`` should be a bytes object, of the form ``b'ENV=value'``. ``build_repo`` is the repo that ``doctr deploy`` will be run from. It should be like 'drdoctr/doctr'. ``tld`` should be ``'.org'`` for travis-ci.org and ``'.com'`` for travis-ci.com. ``public_key`` should be a pem format public key, obtained from Travis if not provided. If the repo is private, travis_token should be as returned by ``get_temporary_token(**login_kwargs)``. A token being present automatically implies ``tld='.com'``. ] if <ast.UnaryOp object at 0x7da1b1042650> begin[:] <ast.Raise object at 0x7da1b10428c0> if <ast.UnaryOp object at 0x7da1b1041c00> begin[:] <ast.Raise object at 0x7da1b1040070> if <ast.UnaryOp object at 0x7da1b1042bc0> begin[:] variable[_headers] assign[=] dictionary[[<ast.Constant object at 0x7da1b10430a0>, <ast.Constant object at 0x7da1b1043c40>], [<ast.Constant object at 0x7da1b1041060>, <ast.Constant object at 0x7da1b10403a0>]] variable[headersv2] assign[=] dictionary[[None, None], [<ast.Name object at 0x7da1b10413c0>, <ast.Name object at 0x7da1b1042c80>]] variable[headersv3] assign[=] dictionary[[None, None], [<ast.Name object at 0x7da1b1042290>, <ast.Name object at 0x7da1b10407c0>]] if name[travis_token] begin[:] call[name[headersv3]][constant[Authorization]] assign[=] call[constant[token {}].format, parameter[name[travis_token]]] variable[res] assign[=] call[name[requests].get, parameter[call[constant[https://api.travis-ci.com/repo/{build_repo}/key_pair/generated].format, parameter[]]]] if compare[call[call[name[res].json, parameter[]].get, parameter[constant[file]]] equal[==] constant[not found]] begin[:] <ast.Raise object at 0x7da1b1042980> variable[public_key] assign[=] call[call[name[res].json, parameter[]]][constant[public_key]] if compare[name[res].status_code equal[==] name[requests].codes.not_found] begin[:] <ast.Raise object at 0x7da1b1039600> call[name[res].raise_for_status, parameter[]] variable[public_key] assign[=] call[call[name[public_key].replace, parameter[constant[RSA PUBLIC KEY], constant[PUBLIC KEY]]].encode, parameter[constant[utf-8]]] variable[key] assign[=] call[name[serialization].load_pem_public_key, parameter[name[public_key]]] variable[pad] assign[=] call[name[padding].PKCS1v15, parameter[]] return[call[name[base64].b64encode, parameter[call[name[key].encrypt, parameter[name[variable], name[pad]]]]]]
keyword[def] identifier[encrypt_variable] ( identifier[variable] , identifier[build_repo] ,*, identifier[tld] = literal[string] , identifier[public_key] = keyword[None] , identifier[travis_token] = keyword[None] ,** identifier[login_kwargs] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[variable] , identifier[bytes] ): keyword[raise] identifier[TypeError] ( literal[string] ) keyword[if] keyword[not] literal[string] keyword[in] identifier[variable] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] keyword[not] identifier[public_key] : identifier[_headers] ={ literal[string] : literal[string] , literal[string] : literal[string] , } identifier[headersv2] ={** identifier[_headers] ,** identifier[Travis_APIv2] } identifier[headersv3] ={** identifier[_headers] ,** identifier[Travis_APIv3] } keyword[if] identifier[travis_token] : identifier[headersv3] [ literal[string] ]= literal[string] . identifier[format] ( identifier[travis_token] ) identifier[res] = identifier[requests] . identifier[get] ( literal[string] . identifier[format] ( identifier[build_repo] = identifier[urllib] . identifier[parse] . identifier[quote] ( identifier[build_repo] , identifier[safe] = literal[string] )), identifier[headers] = identifier[headersv3] ) keyword[if] identifier[res] . identifier[json] (). identifier[get] ( literal[string] )== literal[string] : keyword[raise] identifier[RuntimeError] ( literal[string] % identifier[build_repo] ) identifier[public_key] = identifier[res] . identifier[json] ()[ literal[string] ] keyword[else] : identifier[res] = identifier[requests] . identifier[get] ( literal[string] . identifier[format] ( identifier[build_repo] = identifier[build_repo] , identifier[tld] = identifier[tld] ), identifier[headers] = identifier[headersv2] ) identifier[public_key] = identifier[res] . identifier[json] ()[ literal[string] ] keyword[if] identifier[res] . identifier[status_code] == identifier[requests] . identifier[codes] . identifier[not_found] : keyword[raise] identifier[RuntimeError] ( literal[string] ) identifier[res] . identifier[raise_for_status] () identifier[public_key] = identifier[public_key] . identifier[replace] ( literal[string] , literal[string] ). identifier[encode] ( literal[string] ) identifier[key] = identifier[serialization] . identifier[load_pem_public_key] ( identifier[public_key] , identifier[backend] = identifier[default_backend] ()) identifier[pad] = identifier[padding] . identifier[PKCS1v15] () keyword[return] identifier[base64] . identifier[b64encode] ( identifier[key] . identifier[encrypt] ( identifier[variable] , identifier[pad] ))
def encrypt_variable(variable, build_repo, *, tld='.org', public_key=None, travis_token=None, **login_kwargs): """ Encrypt an environment variable for ``build_repo`` for Travis ``variable`` should be a bytes object, of the form ``b'ENV=value'``. ``build_repo`` is the repo that ``doctr deploy`` will be run from. It should be like 'drdoctr/doctr'. ``tld`` should be ``'.org'`` for travis-ci.org and ``'.com'`` for travis-ci.com. ``public_key`` should be a pem format public key, obtained from Travis if not provided. If the repo is private, travis_token should be as returned by ``get_temporary_token(**login_kwargs)``. A token being present automatically implies ``tld='.com'``. """ if not isinstance(variable, bytes): raise TypeError('variable should be bytes') # depends on [control=['if'], data=[]] if not b'=' in variable: raise ValueError("variable should be of the form 'VARIABLE=value'") # depends on [control=['if'], data=[]] if not public_key: _headers = {'Content-Type': 'application/json', 'User-Agent': 'MyClient/1.0.0'} headersv2 = {**_headers, **Travis_APIv2} headersv3 = {**_headers, **Travis_APIv3} if travis_token: headersv3['Authorization'] = 'token {}'.format(travis_token) res = requests.get('https://api.travis-ci.com/repo/{build_repo}/key_pair/generated'.format(build_repo=urllib.parse.quote(build_repo, safe='')), headers=headersv3) if res.json().get('file') == 'not found': raise RuntimeError('Could not find the Travis public key for %s' % build_repo) # depends on [control=['if'], data=[]] public_key = res.json()['public_key'] # depends on [control=['if'], data=[]] else: res = requests.get('https://api.travis-ci{tld}/repos/{build_repo}/key'.format(build_repo=build_repo, tld=tld), headers=headersv2) public_key = res.json()['key'] if res.status_code == requests.codes.not_found: raise RuntimeError('Could not find requested repo on Travis. Is Travis enabled?') res.raise_for_status() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] public_key = public_key.replace('RSA PUBLIC KEY', 'PUBLIC KEY').encode('utf-8') key = serialization.load_pem_public_key(public_key, backend=default_backend()) pad = padding.PKCS1v15() return base64.b64encode(key.encrypt(variable, pad))
def subject(self): """ Return a string to be used as the email subject line. """ if self.application_name and self.application_version: return 'Crash Report - {name} (v{version})'.format(name=self.application_name, version=self.application_version) else: return 'Crash Report'
def function[subject, parameter[self]]: constant[ Return a string to be used as the email subject line. ] if <ast.BoolOp object at 0x7da207f015d0> begin[:] return[call[constant[Crash Report - {name} (v{version})].format, parameter[]]]
keyword[def] identifier[subject] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[application_name] keyword[and] identifier[self] . identifier[application_version] : keyword[return] literal[string] . identifier[format] ( identifier[name] = identifier[self] . identifier[application_name] , identifier[version] = identifier[self] . identifier[application_version] ) keyword[else] : keyword[return] literal[string]
def subject(self): """ Return a string to be used as the email subject line. """ if self.application_name and self.application_version: return 'Crash Report - {name} (v{version})'.format(name=self.application_name, version=self.application_version) # depends on [control=['if'], data=[]] else: return 'Crash Report'
def to_onehot(indices, num_classes): """Convert a tensor of indices of any shape `(N, ...)` to a tensor of one-hot indicators of shape `(N, num_classes, ...)`. """ onehot = torch.zeros(indices.shape[0], num_classes, *indices.shape[1:], device=indices.device) return onehot.scatter_(1, indices.unsqueeze(1), 1)
def function[to_onehot, parameter[indices, num_classes]]: constant[Convert a tensor of indices of any shape `(N, ...)` to a tensor of one-hot indicators of shape `(N, num_classes, ...)`. ] variable[onehot] assign[=] call[name[torch].zeros, parameter[call[name[indices].shape][constant[0]], name[num_classes], <ast.Starred object at 0x7da18f00dcc0>]] return[call[name[onehot].scatter_, parameter[constant[1], call[name[indices].unsqueeze, parameter[constant[1]]], constant[1]]]]
keyword[def] identifier[to_onehot] ( identifier[indices] , identifier[num_classes] ): literal[string] identifier[onehot] = identifier[torch] . identifier[zeros] ( identifier[indices] . identifier[shape] [ literal[int] ], identifier[num_classes] ,* identifier[indices] . identifier[shape] [ literal[int] :], identifier[device] = identifier[indices] . identifier[device] ) keyword[return] identifier[onehot] . identifier[scatter_] ( literal[int] , identifier[indices] . identifier[unsqueeze] ( literal[int] ), literal[int] )
def to_onehot(indices, num_classes): """Convert a tensor of indices of any shape `(N, ...)` to a tensor of one-hot indicators of shape `(N, num_classes, ...)`. """ onehot = torch.zeros(indices.shape[0], num_classes, *indices.shape[1:], device=indices.device) return onehot.scatter_(1, indices.unsqueeze(1), 1)
def do_create(self, line): "create {tablename} [-c rc,wc] {hkey}[:{type} {rkey}:{type}]" args = self.getargs(line) rc = wc = 5 name = args.pop(0) # tablename if args[0] == "-c": # capacity args.pop(0) # skyp -c capacity = args.pop(0).strip() rc, _, wc = capacity.partition(",") rc = int(rc) wc = int(wc) if wc != "" else rc schema = [] hkey, _, hkey_type = args.pop(0).partition(':') hkey_type = self.get_type(hkey_type or 'S') schema.append(boto.dynamodb2.fields.HashKey(hkey, hkey_type)) if args: rkey, _, rkey_type = args.pop(0).partition(':') rkey_type = self.get_type(rkey_type or 'S') schema.append(boto.dynamodb2.fields.RangeKey(rkey, rkey_type)) t = boto.dynamodb2.table.Table.create(name, schema=schema, throughput={'read': rc, 'write': wc}) self.pprint(t.describe())
def function[do_create, parameter[self, line]]: constant[create {tablename} [-c rc,wc] {hkey}[:{type} {rkey}:{type}]] variable[args] assign[=] call[name[self].getargs, parameter[name[line]]] variable[rc] assign[=] constant[5] variable[name] assign[=] call[name[args].pop, parameter[constant[0]]] if compare[call[name[args]][constant[0]] equal[==] constant[-c]] begin[:] call[name[args].pop, parameter[constant[0]]] variable[capacity] assign[=] call[call[name[args].pop, parameter[constant[0]]].strip, parameter[]] <ast.Tuple object at 0x7da1b0ba8940> assign[=] call[name[capacity].partition, parameter[constant[,]]] variable[rc] assign[=] call[name[int], parameter[name[rc]]] variable[wc] assign[=] <ast.IfExp object at 0x7da1b0ba9ff0> variable[schema] assign[=] list[[]] <ast.Tuple object at 0x7da1b0ba9030> assign[=] call[call[name[args].pop, parameter[constant[0]]].partition, parameter[constant[:]]] variable[hkey_type] assign[=] call[name[self].get_type, parameter[<ast.BoolOp object at 0x7da1b0baa3b0>]] call[name[schema].append, parameter[call[name[boto].dynamodb2.fields.HashKey, parameter[name[hkey], name[hkey_type]]]]] if name[args] begin[:] <ast.Tuple object at 0x7da1b0bab670> assign[=] call[call[name[args].pop, parameter[constant[0]]].partition, parameter[constant[:]]] variable[rkey_type] assign[=] call[name[self].get_type, parameter[<ast.BoolOp object at 0x7da1b0b73a00>]] call[name[schema].append, parameter[call[name[boto].dynamodb2.fields.RangeKey, parameter[name[rkey], name[rkey_type]]]]] variable[t] assign[=] call[name[boto].dynamodb2.table.Table.create, parameter[name[name]]] call[name[self].pprint, parameter[call[name[t].describe, parameter[]]]]
keyword[def] identifier[do_create] ( identifier[self] , identifier[line] ): literal[string] identifier[args] = identifier[self] . identifier[getargs] ( identifier[line] ) identifier[rc] = identifier[wc] = literal[int] identifier[name] = identifier[args] . identifier[pop] ( literal[int] ) keyword[if] identifier[args] [ literal[int] ]== literal[string] : identifier[args] . identifier[pop] ( literal[int] ) identifier[capacity] = identifier[args] . identifier[pop] ( literal[int] ). identifier[strip] () identifier[rc] , identifier[_] , identifier[wc] = identifier[capacity] . identifier[partition] ( literal[string] ) identifier[rc] = identifier[int] ( identifier[rc] ) identifier[wc] = identifier[int] ( identifier[wc] ) keyword[if] identifier[wc] != literal[string] keyword[else] identifier[rc] identifier[schema] =[] identifier[hkey] , identifier[_] , identifier[hkey_type] = identifier[args] . identifier[pop] ( literal[int] ). identifier[partition] ( literal[string] ) identifier[hkey_type] = identifier[self] . identifier[get_type] ( identifier[hkey_type] keyword[or] literal[string] ) identifier[schema] . identifier[append] ( identifier[boto] . identifier[dynamodb2] . identifier[fields] . identifier[HashKey] ( identifier[hkey] , identifier[hkey_type] )) keyword[if] identifier[args] : identifier[rkey] , identifier[_] , identifier[rkey_type] = identifier[args] . identifier[pop] ( literal[int] ). identifier[partition] ( literal[string] ) identifier[rkey_type] = identifier[self] . identifier[get_type] ( identifier[rkey_type] keyword[or] literal[string] ) identifier[schema] . identifier[append] ( identifier[boto] . identifier[dynamodb2] . identifier[fields] . identifier[RangeKey] ( identifier[rkey] , identifier[rkey_type] )) identifier[t] = identifier[boto] . identifier[dynamodb2] . identifier[table] . identifier[Table] . identifier[create] ( identifier[name] , identifier[schema] = identifier[schema] , identifier[throughput] ={ literal[string] : identifier[rc] , literal[string] : identifier[wc] }) identifier[self] . identifier[pprint] ( identifier[t] . identifier[describe] ())
def do_create(self, line): """create {tablename} [-c rc,wc] {hkey}[:{type} {rkey}:{type}]""" args = self.getargs(line) rc = wc = 5 name = args.pop(0) # tablename if args[0] == '-c': # capacity args.pop(0) # skyp -c capacity = args.pop(0).strip() (rc, _, wc) = capacity.partition(',') rc = int(rc) wc = int(wc) if wc != '' else rc # depends on [control=['if'], data=[]] schema = [] (hkey, _, hkey_type) = args.pop(0).partition(':') hkey_type = self.get_type(hkey_type or 'S') schema.append(boto.dynamodb2.fields.HashKey(hkey, hkey_type)) if args: (rkey, _, rkey_type) = args.pop(0).partition(':') rkey_type = self.get_type(rkey_type or 'S') schema.append(boto.dynamodb2.fields.RangeKey(rkey, rkey_type)) # depends on [control=['if'], data=[]] t = boto.dynamodb2.table.Table.create(name, schema=schema, throughput={'read': rc, 'write': wc}) self.pprint(t.describe())
def _validate_molecule_env_var(self, molecule_env_var, field, value): """ Readonly but with a custom error. The rule's arguments are validated against this schema: {'type': 'boolean'} """ # TODO(retr0h): This needs to be better handled. pattern = r'^[{$]+MOLECULE[_a-z0-9A-Z]+[}]*$' if molecule_env_var: if re.match(pattern, value): msg = ('cannot reference $MOLECULE special variables ' 'in this section') self._error(field, msg)
def function[_validate_molecule_env_var, parameter[self, molecule_env_var, field, value]]: constant[ Readonly but with a custom error. The rule's arguments are validated against this schema: {'type': 'boolean'} ] variable[pattern] assign[=] constant[^[{$]+MOLECULE[_a-z0-9A-Z]+[}]*$] if name[molecule_env_var] begin[:] if call[name[re].match, parameter[name[pattern], name[value]]] begin[:] variable[msg] assign[=] constant[cannot reference $MOLECULE special variables in this section] call[name[self]._error, parameter[name[field], name[msg]]]
keyword[def] identifier[_validate_molecule_env_var] ( identifier[self] , identifier[molecule_env_var] , identifier[field] , identifier[value] ): literal[string] identifier[pattern] = literal[string] keyword[if] identifier[molecule_env_var] : keyword[if] identifier[re] . identifier[match] ( identifier[pattern] , identifier[value] ): identifier[msg] =( literal[string] literal[string] ) identifier[self] . identifier[_error] ( identifier[field] , identifier[msg] )
def _validate_molecule_env_var(self, molecule_env_var, field, value): """ Readonly but with a custom error. The rule's arguments are validated against this schema: {'type': 'boolean'} """ # TODO(retr0h): This needs to be better handled. pattern = '^[{$]+MOLECULE[_a-z0-9A-Z]+[}]*$' if molecule_env_var: if re.match(pattern, value): msg = 'cannot reference $MOLECULE special variables in this section' self._error(field, msg) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
def drop(self): """ Drop the table and all tables that reference it, recursively. User is prompted for confirmation if config['safemode'] is set to True. """ if self.restriction: raise DataJointError('A relation with an applied restriction condition cannot be dropped.' ' Call drop() on the unrestricted Table.') self.connection.dependencies.load() do_drop = True tables = [table for table in self.connection.dependencies.descendants(self.full_table_name) if not table.isdigit()] if config['safemode']: for table in tables: print(table, '(%d tuples)' % len(FreeTable(self.connection, table))) do_drop = user_choice("Proceed?", default='no') == 'yes' if do_drop: for table in reversed(tables): FreeTable(self.connection, table).drop_quick() print('Tables dropped. Restart kernel.')
def function[drop, parameter[self]]: constant[ Drop the table and all tables that reference it, recursively. User is prompted for confirmation if config['safemode'] is set to True. ] if name[self].restriction begin[:] <ast.Raise object at 0x7da18ede4520> call[name[self].connection.dependencies.load, parameter[]] variable[do_drop] assign[=] constant[True] variable[tables] assign[=] <ast.ListComp object at 0x7da18ede6830> if call[name[config]][constant[safemode]] begin[:] for taget[name[table]] in starred[name[tables]] begin[:] call[name[print], parameter[name[table], binary_operation[constant[(%d tuples)] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[call[name[FreeTable], parameter[name[self].connection, name[table]]]]]]]] variable[do_drop] assign[=] compare[call[name[user_choice], parameter[constant[Proceed?]]] equal[==] constant[yes]] if name[do_drop] begin[:] for taget[name[table]] in starred[call[name[reversed], parameter[name[tables]]]] begin[:] call[call[name[FreeTable], parameter[name[self].connection, name[table]]].drop_quick, parameter[]] call[name[print], parameter[constant[Tables dropped. Restart kernel.]]]
keyword[def] identifier[drop] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[restriction] : keyword[raise] identifier[DataJointError] ( literal[string] literal[string] ) identifier[self] . identifier[connection] . identifier[dependencies] . identifier[load] () identifier[do_drop] = keyword[True] identifier[tables] =[ identifier[table] keyword[for] identifier[table] keyword[in] identifier[self] . identifier[connection] . identifier[dependencies] . identifier[descendants] ( identifier[self] . identifier[full_table_name] ) keyword[if] keyword[not] identifier[table] . identifier[isdigit] ()] keyword[if] identifier[config] [ literal[string] ]: keyword[for] identifier[table] keyword[in] identifier[tables] : identifier[print] ( identifier[table] , literal[string] % identifier[len] ( identifier[FreeTable] ( identifier[self] . identifier[connection] , identifier[table] ))) identifier[do_drop] = identifier[user_choice] ( literal[string] , identifier[default] = literal[string] )== literal[string] keyword[if] identifier[do_drop] : keyword[for] identifier[table] keyword[in] identifier[reversed] ( identifier[tables] ): identifier[FreeTable] ( identifier[self] . identifier[connection] , identifier[table] ). identifier[drop_quick] () identifier[print] ( literal[string] )
def drop(self): """ Drop the table and all tables that reference it, recursively. User is prompted for confirmation if config['safemode'] is set to True. """ if self.restriction: raise DataJointError('A relation with an applied restriction condition cannot be dropped. Call drop() on the unrestricted Table.') # depends on [control=['if'], data=[]] self.connection.dependencies.load() do_drop = True tables = [table for table in self.connection.dependencies.descendants(self.full_table_name) if not table.isdigit()] if config['safemode']: for table in tables: print(table, '(%d tuples)' % len(FreeTable(self.connection, table))) # depends on [control=['for'], data=['table']] do_drop = user_choice('Proceed?', default='no') == 'yes' # depends on [control=['if'], data=[]] if do_drop: for table in reversed(tables): FreeTable(self.connection, table).drop_quick() # depends on [control=['for'], data=['table']] print('Tables dropped. Restart kernel.') # depends on [control=['if'], data=[]]
def arg_name(self): """ Returns the name of the parameter as a command line flag """ if self.constraint is bool and self.value: return '--no-%s' % self.name.replace('_', '-') return '--%s' % self.name.replace('_', '-')
def function[arg_name, parameter[self]]: constant[ Returns the name of the parameter as a command line flag ] if <ast.BoolOp object at 0x7da2044c3d30> begin[:] return[binary_operation[constant[--no-%s] <ast.Mod object at 0x7da2590d6920> call[name[self].name.replace, parameter[constant[_], constant[-]]]]] return[binary_operation[constant[--%s] <ast.Mod object at 0x7da2590d6920> call[name[self].name.replace, parameter[constant[_], constant[-]]]]]
keyword[def] identifier[arg_name] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[constraint] keyword[is] identifier[bool] keyword[and] identifier[self] . identifier[value] : keyword[return] literal[string] % identifier[self] . identifier[name] . identifier[replace] ( literal[string] , literal[string] ) keyword[return] literal[string] % identifier[self] . identifier[name] . identifier[replace] ( literal[string] , literal[string] )
def arg_name(self): """ Returns the name of the parameter as a command line flag """ if self.constraint is bool and self.value: return '--no-%s' % self.name.replace('_', '-') # depends on [control=['if'], data=[]] return '--%s' % self.name.replace('_', '-')
def init_blueprint(self, blueprint): """Initialize a Flask Blueprint, similar to init_app, but without the access to the application config. Keyword Arguments: blueprint {Flask Blueprint} -- Flask Blueprint instance to initialize (Default: {None}) """ if self._route is not None: raise TypeError("route cannot be set when using blueprints!") blueprint.rak = self blueprint.add_url_rule("", view_func=getattr(self, self._view_name), methods=['POST'])
def function[init_blueprint, parameter[self, blueprint]]: constant[Initialize a Flask Blueprint, similar to init_app, but without the access to the application config. Keyword Arguments: blueprint {Flask Blueprint} -- Flask Blueprint instance to initialize (Default: {None}) ] if compare[name[self]._route is_not constant[None]] begin[:] <ast.Raise object at 0x7da18eb55120> name[blueprint].rak assign[=] name[self] call[name[blueprint].add_url_rule, parameter[constant[]]]
keyword[def] identifier[init_blueprint] ( identifier[self] , identifier[blueprint] ): literal[string] keyword[if] identifier[self] . identifier[_route] keyword[is] keyword[not] keyword[None] : keyword[raise] identifier[TypeError] ( literal[string] ) identifier[blueprint] . identifier[rak] = identifier[self] identifier[blueprint] . identifier[add_url_rule] ( literal[string] , identifier[view_func] = identifier[getattr] ( identifier[self] , identifier[self] . identifier[_view_name] ), identifier[methods] =[ literal[string] ])
def init_blueprint(self, blueprint): """Initialize a Flask Blueprint, similar to init_app, but without the access to the application config. Keyword Arguments: blueprint {Flask Blueprint} -- Flask Blueprint instance to initialize (Default: {None}) """ if self._route is not None: raise TypeError('route cannot be set when using blueprints!') # depends on [control=['if'], data=[]] blueprint.rak = self blueprint.add_url_rule('', view_func=getattr(self, self._view_name), methods=['POST'])
def get_modules(folder, include_meta=False): """Finds modules (recursively) in folder :param folder: root folder :param include_meta: whether include meta files like (__init__ or __version__) :return: list of modules """ files = [ file for file in _get_modules(folder) if is_file(file) # just files ] if not include_meta: files = [ file for file in files if not Document(file).name.startswith("__") ] return files
def function[get_modules, parameter[folder, include_meta]]: constant[Finds modules (recursively) in folder :param folder: root folder :param include_meta: whether include meta files like (__init__ or __version__) :return: list of modules ] variable[files] assign[=] <ast.ListComp object at 0x7da207f9b4c0> if <ast.UnaryOp object at 0x7da207f99960> begin[:] variable[files] assign[=] <ast.ListComp object at 0x7da207f9bca0> return[name[files]]
keyword[def] identifier[get_modules] ( identifier[folder] , identifier[include_meta] = keyword[False] ): literal[string] identifier[files] =[ identifier[file] keyword[for] identifier[file] keyword[in] identifier[_get_modules] ( identifier[folder] ) keyword[if] identifier[is_file] ( identifier[file] ) ] keyword[if] keyword[not] identifier[include_meta] : identifier[files] =[ identifier[file] keyword[for] identifier[file] keyword[in] identifier[files] keyword[if] keyword[not] identifier[Document] ( identifier[file] ). identifier[name] . identifier[startswith] ( literal[string] ) ] keyword[return] identifier[files]
def get_modules(folder, include_meta=False): """Finds modules (recursively) in folder :param folder: root folder :param include_meta: whether include meta files like (__init__ or __version__) :return: list of modules """ # just files files = [file for file in _get_modules(folder) if is_file(file)] if not include_meta: files = [file for file in files if not Document(file).name.startswith('__')] # depends on [control=['if'], data=[]] return files
def process_transport_command(self, header, message): """Parse a command coming in through the transport command subscription""" if not isinstance(message, dict): return relevant = False if "host" in message: # Filter by host if message["host"] != self.__hostid: return relevant = True if "service" in message: # Filter by service if message["service"] != self._service_class_name: return relevant = True if not relevant: # Ignore message unless at least one filter matches return if message.get("command"): self.log.info( "Received command '%s' via transport layer", message["command"] ) if message["command"] == "shutdown": self.shutdown = True else: self.log.warning("Received invalid transport command message")
def function[process_transport_command, parameter[self, header, message]]: constant[Parse a command coming in through the transport command subscription] if <ast.UnaryOp object at 0x7da18c4cfe80> begin[:] return[None] variable[relevant] assign[=] constant[False] if compare[constant[host] in name[message]] begin[:] if compare[call[name[message]][constant[host]] not_equal[!=] name[self].__hostid] begin[:] return[None] variable[relevant] assign[=] constant[True] if compare[constant[service] in name[message]] begin[:] if compare[call[name[message]][constant[service]] not_equal[!=] name[self]._service_class_name] begin[:] return[None] variable[relevant] assign[=] constant[True] if <ast.UnaryOp object at 0x7da18c4ce140> begin[:] return[None] if call[name[message].get, parameter[constant[command]]] begin[:] call[name[self].log.info, parameter[constant[Received command '%s' via transport layer], call[name[message]][constant[command]]]] if compare[call[name[message]][constant[command]] equal[==] constant[shutdown]] begin[:] name[self].shutdown assign[=] constant[True]
keyword[def] identifier[process_transport_command] ( identifier[self] , identifier[header] , identifier[message] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[message] , identifier[dict] ): keyword[return] identifier[relevant] = keyword[False] keyword[if] literal[string] keyword[in] identifier[message] : keyword[if] identifier[message] [ literal[string] ]!= identifier[self] . identifier[__hostid] : keyword[return] identifier[relevant] = keyword[True] keyword[if] literal[string] keyword[in] identifier[message] : keyword[if] identifier[message] [ literal[string] ]!= identifier[self] . identifier[_service_class_name] : keyword[return] identifier[relevant] = keyword[True] keyword[if] keyword[not] identifier[relevant] : keyword[return] keyword[if] identifier[message] . identifier[get] ( literal[string] ): identifier[self] . identifier[log] . identifier[info] ( literal[string] , identifier[message] [ literal[string] ] ) keyword[if] identifier[message] [ literal[string] ]== literal[string] : identifier[self] . identifier[shutdown] = keyword[True] keyword[else] : identifier[self] . identifier[log] . identifier[warning] ( literal[string] )
def process_transport_command(self, header, message): """Parse a command coming in through the transport command subscription""" if not isinstance(message, dict): return # depends on [control=['if'], data=[]] relevant = False if 'host' in message: # Filter by host if message['host'] != self.__hostid: return # depends on [control=['if'], data=[]] relevant = True # depends on [control=['if'], data=['message']] if 'service' in message: # Filter by service if message['service'] != self._service_class_name: return # depends on [control=['if'], data=[]] relevant = True # depends on [control=['if'], data=['message']] if not relevant: # Ignore message unless at least one filter matches return # depends on [control=['if'], data=[]] if message.get('command'): self.log.info("Received command '%s' via transport layer", message['command']) if message['command'] == 'shutdown': self.shutdown = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: self.log.warning('Received invalid transport command message')
def __sub_del_reference(self, req, key): """Blindly clear reference to pending subscription on failure.""" if not req.success: try: self.__new_subs.pop(key) except KeyError: logger.warning('No sub ref %s', key)
def function[__sub_del_reference, parameter[self, req, key]]: constant[Blindly clear reference to pending subscription on failure.] if <ast.UnaryOp object at 0x7da1b1baa410> begin[:] <ast.Try object at 0x7da1b1bab430>
keyword[def] identifier[__sub_del_reference] ( identifier[self] , identifier[req] , identifier[key] ): literal[string] keyword[if] keyword[not] identifier[req] . identifier[success] : keyword[try] : identifier[self] . identifier[__new_subs] . identifier[pop] ( identifier[key] ) keyword[except] identifier[KeyError] : identifier[logger] . identifier[warning] ( literal[string] , identifier[key] )
def __sub_del_reference(self, req, key): """Blindly clear reference to pending subscription on failure.""" if not req.success: try: self.__new_subs.pop(key) # depends on [control=['try'], data=[]] except KeyError: logger.warning('No sub ref %s', key) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
def generate_credentials(self, name, role_arn=None, ttl="3600s", endpoint='creds', mount_point=DEFAULT_MOUNT_POINT): """Generates credential based on the named role. This role must be created before queried. The /aws/creds and /aws/sts endpoints are almost identical. The exception is when retrieving credentials for a role that was specified with the legacy arn or policy parameter. In this case, credentials retrieved through /aws/sts must be of either the assumed_role or federation_token types, and credentials retrieved through /aws/creds must be of the iam_user type. :param name: Specifies the name of the role to generate credentials against. This is part of the request URL. :type name: str | unicode :param role_arn: The ARN of the role to assume if credential_type on the Vault role is assumed_role. Must match one of the allowed role ARNs in the Vault role. Optional if the Vault role only allows a single AWS role ARN; required otherwise. :type role_arn: str | unicode :param ttl: Specifies the TTL for the use of the STS token. This is specified as a string with a duration suffix. Valid only when credential_type is assumed_role or federation_token. When not specified, the default sts_ttl set for the role will be used. If that is also not set, then the default value of 3600s will be used. AWS places limits on the maximum TTL allowed. See the AWS documentation on the DurationSeconds parameter for AssumeRole (for assumed_role credential types) and GetFederationToken (for federation_token credential types) for more details. :type ttl: str | unicode :param endpoint: Supported endpoints: GET: /{mount_point}/creds/{name}. Produces: 200 application/json GET: /{mount_point}/sts/{name}. Produces: 200 application/json :type endpoint: str | unicode :param mount_point: The "path" the method/backend was mounted on. :type mount_point: str | unicode :return: The JSON response of the request. :rtype: dict """ if endpoint not in ALLOWED_CREDS_ENDPOINTS: error_msg = 'invalid endpoint argument provided "{arg}", supported types: "{allowed_endpoints}"' raise exceptions.ParamValidationError(error_msg.format( arg=endpoint, allowed_endpoints=', '.join(ALLOWED_CREDS_ENDPOINTS), )) params = { 'name': name, 'role_arn': role_arn, 'ttl': ttl, } api_path = '/v1/{mount_point}/{endpoint}/{name}'.format( mount_point=mount_point, endpoint=endpoint, name=name, ) response = self._adapter.post( url=api_path, json=params, ) return response.json()
def function[generate_credentials, parameter[self, name, role_arn, ttl, endpoint, mount_point]]: constant[Generates credential based on the named role. This role must be created before queried. The /aws/creds and /aws/sts endpoints are almost identical. The exception is when retrieving credentials for a role that was specified with the legacy arn or policy parameter. In this case, credentials retrieved through /aws/sts must be of either the assumed_role or federation_token types, and credentials retrieved through /aws/creds must be of the iam_user type. :param name: Specifies the name of the role to generate credentials against. This is part of the request URL. :type name: str | unicode :param role_arn: The ARN of the role to assume if credential_type on the Vault role is assumed_role. Must match one of the allowed role ARNs in the Vault role. Optional if the Vault role only allows a single AWS role ARN; required otherwise. :type role_arn: str | unicode :param ttl: Specifies the TTL for the use of the STS token. This is specified as a string with a duration suffix. Valid only when credential_type is assumed_role or federation_token. When not specified, the default sts_ttl set for the role will be used. If that is also not set, then the default value of 3600s will be used. AWS places limits on the maximum TTL allowed. See the AWS documentation on the DurationSeconds parameter for AssumeRole (for assumed_role credential types) and GetFederationToken (for federation_token credential types) for more details. :type ttl: str | unicode :param endpoint: Supported endpoints: GET: /{mount_point}/creds/{name}. Produces: 200 application/json GET: /{mount_point}/sts/{name}. Produces: 200 application/json :type endpoint: str | unicode :param mount_point: The "path" the method/backend was mounted on. :type mount_point: str | unicode :return: The JSON response of the request. :rtype: dict ] if compare[name[endpoint] <ast.NotIn object at 0x7da2590d7190> name[ALLOWED_CREDS_ENDPOINTS]] begin[:] variable[error_msg] assign[=] constant[invalid endpoint argument provided "{arg}", supported types: "{allowed_endpoints}"] <ast.Raise object at 0x7da18f812e30> variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da18f813b20>, <ast.Constant object at 0x7da18f8132e0>, <ast.Constant object at 0x7da18f810c40>], [<ast.Name object at 0x7da18f812950>, <ast.Name object at 0x7da18f811630>, <ast.Name object at 0x7da18f812710>]] variable[api_path] assign[=] call[constant[/v1/{mount_point}/{endpoint}/{name}].format, parameter[]] variable[response] assign[=] call[name[self]._adapter.post, parameter[]] return[call[name[response].json, parameter[]]]
keyword[def] identifier[generate_credentials] ( identifier[self] , identifier[name] , identifier[role_arn] = keyword[None] , identifier[ttl] = literal[string] , identifier[endpoint] = literal[string] , identifier[mount_point] = identifier[DEFAULT_MOUNT_POINT] ): literal[string] keyword[if] identifier[endpoint] keyword[not] keyword[in] identifier[ALLOWED_CREDS_ENDPOINTS] : identifier[error_msg] = literal[string] keyword[raise] identifier[exceptions] . identifier[ParamValidationError] ( identifier[error_msg] . identifier[format] ( identifier[arg] = identifier[endpoint] , identifier[allowed_endpoints] = literal[string] . identifier[join] ( identifier[ALLOWED_CREDS_ENDPOINTS] ), )) identifier[params] ={ literal[string] : identifier[name] , literal[string] : identifier[role_arn] , literal[string] : identifier[ttl] , } identifier[api_path] = literal[string] . identifier[format] ( identifier[mount_point] = identifier[mount_point] , identifier[endpoint] = identifier[endpoint] , identifier[name] = identifier[name] , ) identifier[response] = identifier[self] . identifier[_adapter] . identifier[post] ( identifier[url] = identifier[api_path] , identifier[json] = identifier[params] , ) keyword[return] identifier[response] . identifier[json] ()
def generate_credentials(self, name, role_arn=None, ttl='3600s', endpoint='creds', mount_point=DEFAULT_MOUNT_POINT): """Generates credential based on the named role. This role must be created before queried. The /aws/creds and /aws/sts endpoints are almost identical. The exception is when retrieving credentials for a role that was specified with the legacy arn or policy parameter. In this case, credentials retrieved through /aws/sts must be of either the assumed_role or federation_token types, and credentials retrieved through /aws/creds must be of the iam_user type. :param name: Specifies the name of the role to generate credentials against. This is part of the request URL. :type name: str | unicode :param role_arn: The ARN of the role to assume if credential_type on the Vault role is assumed_role. Must match one of the allowed role ARNs in the Vault role. Optional if the Vault role only allows a single AWS role ARN; required otherwise. :type role_arn: str | unicode :param ttl: Specifies the TTL for the use of the STS token. This is specified as a string with a duration suffix. Valid only when credential_type is assumed_role or federation_token. When not specified, the default sts_ttl set for the role will be used. If that is also not set, then the default value of 3600s will be used. AWS places limits on the maximum TTL allowed. See the AWS documentation on the DurationSeconds parameter for AssumeRole (for assumed_role credential types) and GetFederationToken (for federation_token credential types) for more details. :type ttl: str | unicode :param endpoint: Supported endpoints: GET: /{mount_point}/creds/{name}. Produces: 200 application/json GET: /{mount_point}/sts/{name}. Produces: 200 application/json :type endpoint: str | unicode :param mount_point: The "path" the method/backend was mounted on. :type mount_point: str | unicode :return: The JSON response of the request. :rtype: dict """ if endpoint not in ALLOWED_CREDS_ENDPOINTS: error_msg = 'invalid endpoint argument provided "{arg}", supported types: "{allowed_endpoints}"' raise exceptions.ParamValidationError(error_msg.format(arg=endpoint, allowed_endpoints=', '.join(ALLOWED_CREDS_ENDPOINTS))) # depends on [control=['if'], data=['endpoint', 'ALLOWED_CREDS_ENDPOINTS']] params = {'name': name, 'role_arn': role_arn, 'ttl': ttl} api_path = '/v1/{mount_point}/{endpoint}/{name}'.format(mount_point=mount_point, endpoint=endpoint, name=name) response = self._adapter.post(url=api_path, json=params) return response.json()
def _printInstances(self, hrlinetop=True): """ print(more informative stats about the object) """ if not self.currentEntity: # ==> ontology level return x = self.currentEntity['object'] if self.currentEntity['type'] == 'class': if hrlinetop: self._print("----------------") self._print("INSTANCES: [%d]" % len(x.instances, "IMPORTANT")) for i in x.instances: self._print(i.qname) self._print("----------------") return
def function[_printInstances, parameter[self, hrlinetop]]: constant[ print(more informative stats about the object) ] if <ast.UnaryOp object at 0x7da1b1013e50> begin[:] return[None] variable[x] assign[=] call[name[self].currentEntity][constant[object]] if compare[call[name[self].currentEntity][constant[type]] equal[==] constant[class]] begin[:] if name[hrlinetop] begin[:] call[name[self]._print, parameter[constant[----------------]]] call[name[self]._print, parameter[binary_operation[constant[INSTANCES: [%d]] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[x].instances, constant[IMPORTANT]]]]]] for taget[name[i]] in starred[name[x].instances] begin[:] call[name[self]._print, parameter[name[i].qname]] call[name[self]._print, parameter[constant[----------------]]] return[None]
keyword[def] identifier[_printInstances] ( identifier[self] , identifier[hrlinetop] = keyword[True] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[currentEntity] : keyword[return] identifier[x] = identifier[self] . identifier[currentEntity] [ literal[string] ] keyword[if] identifier[self] . identifier[currentEntity] [ literal[string] ]== literal[string] : keyword[if] identifier[hrlinetop] : identifier[self] . identifier[_print] ( literal[string] ) identifier[self] . identifier[_print] ( literal[string] % identifier[len] ( identifier[x] . identifier[instances] , literal[string] )) keyword[for] identifier[i] keyword[in] identifier[x] . identifier[instances] : identifier[self] . identifier[_print] ( identifier[i] . identifier[qname] ) identifier[self] . identifier[_print] ( literal[string] ) keyword[return]
def _printInstances(self, hrlinetop=True): """ print(more informative stats about the object) """ if not self.currentEntity: # ==> ontology level return # depends on [control=['if'], data=[]] x = self.currentEntity['object'] if self.currentEntity['type'] == 'class': if hrlinetop: self._print('----------------') # depends on [control=['if'], data=[]] self._print('INSTANCES: [%d]' % len(x.instances, 'IMPORTANT')) for i in x.instances: self._print(i.qname) # depends on [control=['for'], data=['i']] self._print('----------------') # depends on [control=['if'], data=[]] return
def fine_tune_model_from_args(args: argparse.Namespace): """ Just converts from an ``argparse.Namespace`` object to string paths. """ fine_tune_model_from_file_paths(model_archive_path=args.model_archive, config_file=args.config_file, serialization_dir=args.serialization_dir, overrides=args.overrides, extend_vocab=args.extend_vocab, file_friendly_logging=args.file_friendly_logging, batch_weight_key=args.batch_weight_key, embedding_sources_mapping=args.embedding_sources_mapping)
def function[fine_tune_model_from_args, parameter[args]]: constant[ Just converts from an ``argparse.Namespace`` object to string paths. ] call[name[fine_tune_model_from_file_paths], parameter[]]
keyword[def] identifier[fine_tune_model_from_args] ( identifier[args] : identifier[argparse] . identifier[Namespace] ): literal[string] identifier[fine_tune_model_from_file_paths] ( identifier[model_archive_path] = identifier[args] . identifier[model_archive] , identifier[config_file] = identifier[args] . identifier[config_file] , identifier[serialization_dir] = identifier[args] . identifier[serialization_dir] , identifier[overrides] = identifier[args] . identifier[overrides] , identifier[extend_vocab] = identifier[args] . identifier[extend_vocab] , identifier[file_friendly_logging] = identifier[args] . identifier[file_friendly_logging] , identifier[batch_weight_key] = identifier[args] . identifier[batch_weight_key] , identifier[embedding_sources_mapping] = identifier[args] . identifier[embedding_sources_mapping] )
def fine_tune_model_from_args(args: argparse.Namespace): """ Just converts from an ``argparse.Namespace`` object to string paths. """ fine_tune_model_from_file_paths(model_archive_path=args.model_archive, config_file=args.config_file, serialization_dir=args.serialization_dir, overrides=args.overrides, extend_vocab=args.extend_vocab, file_friendly_logging=args.file_friendly_logging, batch_weight_key=args.batch_weight_key, embedding_sources_mapping=args.embedding_sources_mapping)
def get_all_pages_by_label(self, label, start=0, limit=50): """ Get all page by label :param label: :param start: OPTIONAL: The start point of the collection to return. Default: None (0). :param limit: OPTIONAL: The limit of the number of pages to return, this may be restricted by fixed system limits. Default: 50 :return: """ url = 'rest/api/content/search' params = {} if label: params['cql'] = 'type={type}%20AND%20label={label}'.format(type='page', label=label) if start: params['start'] = start if limit: params['limit'] = limit return (self.get(url, params=params) or {}).get('results')
def function[get_all_pages_by_label, parameter[self, label, start, limit]]: constant[ Get all page by label :param label: :param start: OPTIONAL: The start point of the collection to return. Default: None (0). :param limit: OPTIONAL: The limit of the number of pages to return, this may be restricted by fixed system limits. Default: 50 :return: ] variable[url] assign[=] constant[rest/api/content/search] variable[params] assign[=] dictionary[[], []] if name[label] begin[:] call[name[params]][constant[cql]] assign[=] call[constant[type={type}%20AND%20label={label}].format, parameter[]] if name[start] begin[:] call[name[params]][constant[start]] assign[=] name[start] if name[limit] begin[:] call[name[params]][constant[limit]] assign[=] name[limit] return[call[<ast.BoolOp object at 0x7da20e955c00>.get, parameter[constant[results]]]]
keyword[def] identifier[get_all_pages_by_label] ( identifier[self] , identifier[label] , identifier[start] = literal[int] , identifier[limit] = literal[int] ): literal[string] identifier[url] = literal[string] identifier[params] ={} keyword[if] identifier[label] : identifier[params] [ literal[string] ]= literal[string] . identifier[format] ( identifier[type] = literal[string] , identifier[label] = identifier[label] ) keyword[if] identifier[start] : identifier[params] [ literal[string] ]= identifier[start] keyword[if] identifier[limit] : identifier[params] [ literal[string] ]= identifier[limit] keyword[return] ( identifier[self] . identifier[get] ( identifier[url] , identifier[params] = identifier[params] ) keyword[or] {}). identifier[get] ( literal[string] )
def get_all_pages_by_label(self, label, start=0, limit=50): """ Get all page by label :param label: :param start: OPTIONAL: The start point of the collection to return. Default: None (0). :param limit: OPTIONAL: The limit of the number of pages to return, this may be restricted by fixed system limits. Default: 50 :return: """ url = 'rest/api/content/search' params = {} if label: params['cql'] = 'type={type}%20AND%20label={label}'.format(type='page', label=label) # depends on [control=['if'], data=[]] if start: params['start'] = start # depends on [control=['if'], data=[]] if limit: params['limit'] = limit # depends on [control=['if'], data=[]] return (self.get(url, params=params) or {}).get('results')
def end(self): """End access to the SD interface and close the HDF file. Args:: no argument Returns:: None The instance should not be used afterwards. The 'end()' method is implicitly called when the SD instance is deleted. C library equivalent : SDend """ status = _C.SDend(self._id) _checkErr('end', status, "cannot execute") self._id = None
def function[end, parameter[self]]: constant[End access to the SD interface and close the HDF file. Args:: no argument Returns:: None The instance should not be used afterwards. The 'end()' method is implicitly called when the SD instance is deleted. C library equivalent : SDend ] variable[status] assign[=] call[name[_C].SDend, parameter[name[self]._id]] call[name[_checkErr], parameter[constant[end], name[status], constant[cannot execute]]] name[self]._id assign[=] constant[None]
keyword[def] identifier[end] ( identifier[self] ): literal[string] identifier[status] = identifier[_C] . identifier[SDend] ( identifier[self] . identifier[_id] ) identifier[_checkErr] ( literal[string] , identifier[status] , literal[string] ) identifier[self] . identifier[_id] = keyword[None]
def end(self): """End access to the SD interface and close the HDF file. Args:: no argument Returns:: None The instance should not be used afterwards. The 'end()' method is implicitly called when the SD instance is deleted. C library equivalent : SDend """ status = _C.SDend(self._id) _checkErr('end', status, 'cannot execute') self._id = None
def get_session_id(self): """ get a unique id (shortish string) to allow simple aggregation of log records from multiple sources. This id is used for the life of the running program to allow extraction from all logs. WARING - this can give duplicate sessions when 2 apps hit it at the same time. """ max_session = '0' try: with open(self.log_folder + os.sep + '_sessions.txt', 'r') as f: for _ in f: txt = f.readline() if txt.strip('\n') != '': max_session = txt except Exception: max_session = '1' this_session = str(int(max_session) + random.randint(9,100)).zfill(9) # not a great way to ensure uniqueness - TODO FIX with open(self.log_folder + os.sep + '_sessions.txt', 'a') as f2: f2.write(this_session + '\n') return this_session
def function[get_session_id, parameter[self]]: constant[ get a unique id (shortish string) to allow simple aggregation of log records from multiple sources. This id is used for the life of the running program to allow extraction from all logs. WARING - this can give duplicate sessions when 2 apps hit it at the same time. ] variable[max_session] assign[=] constant[0] <ast.Try object at 0x7da20e9b30d0> variable[this_session] assign[=] call[call[name[str], parameter[binary_operation[call[name[int], parameter[name[max_session]]] + call[name[random].randint, parameter[constant[9], constant[100]]]]]].zfill, parameter[constant[9]]] with call[name[open], parameter[binary_operation[binary_operation[name[self].log_folder + name[os].sep] + constant[_sessions.txt]], constant[a]]] begin[:] call[name[f2].write, parameter[binary_operation[name[this_session] + constant[ ]]]] return[name[this_session]]
keyword[def] identifier[get_session_id] ( identifier[self] ): literal[string] identifier[max_session] = literal[string] keyword[try] : keyword[with] identifier[open] ( identifier[self] . identifier[log_folder] + identifier[os] . identifier[sep] + literal[string] , literal[string] ) keyword[as] identifier[f] : keyword[for] identifier[_] keyword[in] identifier[f] : identifier[txt] = identifier[f] . identifier[readline] () keyword[if] identifier[txt] . identifier[strip] ( literal[string] )!= literal[string] : identifier[max_session] = identifier[txt] keyword[except] identifier[Exception] : identifier[max_session] = literal[string] identifier[this_session] = identifier[str] ( identifier[int] ( identifier[max_session] )+ identifier[random] . identifier[randint] ( literal[int] , literal[int] )). identifier[zfill] ( literal[int] ) keyword[with] identifier[open] ( identifier[self] . identifier[log_folder] + identifier[os] . identifier[sep] + literal[string] , literal[string] ) keyword[as] identifier[f2] : identifier[f2] . identifier[write] ( identifier[this_session] + literal[string] ) keyword[return] identifier[this_session]
def get_session_id(self): """ get a unique id (shortish string) to allow simple aggregation of log records from multiple sources. This id is used for the life of the running program to allow extraction from all logs. WARING - this can give duplicate sessions when 2 apps hit it at the same time. """ max_session = '0' try: with open(self.log_folder + os.sep + '_sessions.txt', 'r') as f: for _ in f: txt = f.readline() if txt.strip('\n') != '': max_session = txt # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['with'], data=['f']] # depends on [control=['try'], data=[]] except Exception: max_session = '1' # depends on [control=['except'], data=[]] this_session = str(int(max_session) + random.randint(9, 100)).zfill(9) # not a great way to ensure uniqueness - TODO FIX with open(self.log_folder + os.sep + '_sessions.txt', 'a') as f2: f2.write(this_session + '\n') # depends on [control=['with'], data=['f2']] return this_session
def format(tokens, formatter, outfile=None): # pylint: disable=redefined-builtin """ Format a tokenlist ``tokens`` with the formatter ``formatter``. If ``outfile`` is given and a valid file object (an object with a ``write`` method), the result will be written to it, otherwise it is returned as a string. """ try: if not outfile: realoutfile = getattr(formatter, 'encoding', None) and BytesIO() or StringIO() formatter.format(tokens, realoutfile) return realoutfile.getvalue() else: formatter.format(tokens, outfile) except TypeError as err: if (isinstance(err.args[0], str) and ('unbound method format' in err.args[0] or 'missing 1 required positional argument' in err.args[0])): raise TypeError('format() argument must be a formatter instance, ' 'not a class') raise
def function[format, parameter[tokens, formatter, outfile]]: constant[ Format a tokenlist ``tokens`` with the formatter ``formatter``. If ``outfile`` is given and a valid file object (an object with a ``write`` method), the result will be written to it, otherwise it is returned as a string. ] <ast.Try object at 0x7da18bc71d80>
keyword[def] identifier[format] ( identifier[tokens] , identifier[formatter] , identifier[outfile] = keyword[None] ): literal[string] keyword[try] : keyword[if] keyword[not] identifier[outfile] : identifier[realoutfile] = identifier[getattr] ( identifier[formatter] , literal[string] , keyword[None] ) keyword[and] identifier[BytesIO] () keyword[or] identifier[StringIO] () identifier[formatter] . identifier[format] ( identifier[tokens] , identifier[realoutfile] ) keyword[return] identifier[realoutfile] . identifier[getvalue] () keyword[else] : identifier[formatter] . identifier[format] ( identifier[tokens] , identifier[outfile] ) keyword[except] identifier[TypeError] keyword[as] identifier[err] : keyword[if] ( identifier[isinstance] ( identifier[err] . identifier[args] [ literal[int] ], identifier[str] ) keyword[and] ( literal[string] keyword[in] identifier[err] . identifier[args] [ literal[int] ] keyword[or] literal[string] keyword[in] identifier[err] . identifier[args] [ literal[int] ])): keyword[raise] identifier[TypeError] ( literal[string] literal[string] ) keyword[raise]
def format(tokens, formatter, outfile=None): # pylint: disable=redefined-builtin '\n Format a tokenlist ``tokens`` with the formatter ``formatter``.\n\n If ``outfile`` is given and a valid file object (an object\n with a ``write`` method), the result will be written to it, otherwise\n it is returned as a string.\n ' try: if not outfile: realoutfile = getattr(formatter, 'encoding', None) and BytesIO() or StringIO() formatter.format(tokens, realoutfile) return realoutfile.getvalue() # depends on [control=['if'], data=[]] else: formatter.format(tokens, outfile) # depends on [control=['try'], data=[]] except TypeError as err: if isinstance(err.args[0], str) and ('unbound method format' in err.args[0] or 'missing 1 required positional argument' in err.args[0]): raise TypeError('format() argument must be a formatter instance, not a class') # depends on [control=['if'], data=[]] raise # depends on [control=['except'], data=['err']]
def op_match_funcdef_handle(self, original, loc, tokens): """Process infix match defs. Result must be passed to insert_docstring_handle.""" if len(tokens) == 3: func, args = get_infix_items(tokens) cond = None elif len(tokens) == 4: func, args = get_infix_items(tokens[:-1]) cond = tokens[-1] else: raise CoconutInternalException("invalid infix match function definition tokens", tokens) name_tokens = [func, args] if cond is not None: name_tokens.append(cond) return self.name_match_funcdef_handle(original, loc, name_tokens)
def function[op_match_funcdef_handle, parameter[self, original, loc, tokens]]: constant[Process infix match defs. Result must be passed to insert_docstring_handle.] if compare[call[name[len], parameter[name[tokens]]] equal[==] constant[3]] begin[:] <ast.Tuple object at 0x7da20c7c8760> assign[=] call[name[get_infix_items], parameter[name[tokens]]] variable[cond] assign[=] constant[None] variable[name_tokens] assign[=] list[[<ast.Name object at 0x7da1b0abbbe0>, <ast.Name object at 0x7da1b0abba00>]] if compare[name[cond] is_not constant[None]] begin[:] call[name[name_tokens].append, parameter[name[cond]]] return[call[name[self].name_match_funcdef_handle, parameter[name[original], name[loc], name[name_tokens]]]]
keyword[def] identifier[op_match_funcdef_handle] ( identifier[self] , identifier[original] , identifier[loc] , identifier[tokens] ): literal[string] keyword[if] identifier[len] ( identifier[tokens] )== literal[int] : identifier[func] , identifier[args] = identifier[get_infix_items] ( identifier[tokens] ) identifier[cond] = keyword[None] keyword[elif] identifier[len] ( identifier[tokens] )== literal[int] : identifier[func] , identifier[args] = identifier[get_infix_items] ( identifier[tokens] [:- literal[int] ]) identifier[cond] = identifier[tokens] [- literal[int] ] keyword[else] : keyword[raise] identifier[CoconutInternalException] ( literal[string] , identifier[tokens] ) identifier[name_tokens] =[ identifier[func] , identifier[args] ] keyword[if] identifier[cond] keyword[is] keyword[not] keyword[None] : identifier[name_tokens] . identifier[append] ( identifier[cond] ) keyword[return] identifier[self] . identifier[name_match_funcdef_handle] ( identifier[original] , identifier[loc] , identifier[name_tokens] )
def op_match_funcdef_handle(self, original, loc, tokens): """Process infix match defs. Result must be passed to insert_docstring_handle.""" if len(tokens) == 3: (func, args) = get_infix_items(tokens) cond = None # depends on [control=['if'], data=[]] elif len(tokens) == 4: (func, args) = get_infix_items(tokens[:-1]) cond = tokens[-1] # depends on [control=['if'], data=[]] else: raise CoconutInternalException('invalid infix match function definition tokens', tokens) name_tokens = [func, args] if cond is not None: name_tokens.append(cond) # depends on [control=['if'], data=['cond']] return self.name_match_funcdef_handle(original, loc, name_tokens)
def _diff_replication_group(current, desired): ''' If you need to enhance what modify_replication_group() considers when deciding what is to be (or can be) updated, add it to 'modifiable' below. It's a dict mapping the param as used in modify_replication_group() to that in describe_replication_groups(). Any data fiddlery that needs to be done to make the mappings meaningful should be done in the munging section below as well. This function will ONLY touch settings that are explicitly called out in 'desired' - any settings which might have previously been changed from their 'default' values will not be changed back simply by leaving them out of 'desired'. This is both intentional, and much, much easier to code :) ''' if current.get('AutomaticFailover') is not None: current['AutomaticFailoverEnabled'] = True if current['AutomaticFailover'] in ('enabled', 'enabling') else False modifiable = { # Amazingly, the AWS API provides NO WAY to query the current state of most repl group # settings! All we can do is send a modify op with the desired value, just in case it's # different. And THEN, we can't determine if it's been changed! Stupid? YOU BET! 'AutomaticFailoverEnabled': 'AutomaticFailoverEnabled', 'AutoMinorVersionUpgrade': None, 'CacheNodeType': None, 'CacheParameterGroupName': None, 'CacheSecurityGroupNames': None, 'EngineVersion': None, 'NotificationTopicArn': None, 'NotificationTopicStatus': None, 'PreferredMaintenanceWindow': None, 'PrimaryClusterId': None, 'ReplicationGroupDescription': 'Description', 'SecurityGroupIds': None, 'SnapshotRetentionLimit': 'SnapshotRetentionLimit', 'SnapshottingClusterId': 'SnapshottingClusterId', 'SnapshotWindow': 'SnapshotWindow' } need_update = {} for m, o in modifiable.items(): if m in desired: if not o: # Always pass these through - let AWS do the math... need_update[m] = desired[m] else: if m in current: # Equivalence testing works fine for current simple type comparisons # This might need enhancement if more complex structures enter the picture if current[m] != desired[m]: need_update[m] = desired[m] return need_update
def function[_diff_replication_group, parameter[current, desired]]: constant[ If you need to enhance what modify_replication_group() considers when deciding what is to be (or can be) updated, add it to 'modifiable' below. It's a dict mapping the param as used in modify_replication_group() to that in describe_replication_groups(). Any data fiddlery that needs to be done to make the mappings meaningful should be done in the munging section below as well. This function will ONLY touch settings that are explicitly called out in 'desired' - any settings which might have previously been changed from their 'default' values will not be changed back simply by leaving them out of 'desired'. This is both intentional, and much, much easier to code :) ] if compare[call[name[current].get, parameter[constant[AutomaticFailover]]] is_not constant[None]] begin[:] call[name[current]][constant[AutomaticFailoverEnabled]] assign[=] <ast.IfExp object at 0x7da207f9a8f0> variable[modifiable] assign[=] dictionary[[<ast.Constant object at 0x7da207f9a7a0>, <ast.Constant object at 0x7da207f9b400>, <ast.Constant object at 0x7da207f9a9b0>, <ast.Constant object at 0x7da207f98790>, <ast.Constant object at 0x7da207f9a4d0>, <ast.Constant object at 0x7da207f9a2c0>, <ast.Constant object at 0x7da207f98520>, <ast.Constant object at 0x7da207f9b700>, <ast.Constant object at 0x7da207f9b9a0>, <ast.Constant object at 0x7da207f98f10>, <ast.Constant object at 0x7da207f98df0>, <ast.Constant object at 0x7da207f98940>, <ast.Constant object at 0x7da207f9b040>, <ast.Constant object at 0x7da207f9ad70>, <ast.Constant object at 0x7da207f9baf0>], [<ast.Constant object at 0x7da207f98af0>, <ast.Constant object at 0x7da207f9ae90>, <ast.Constant object at 0x7da207f99810>, <ast.Constant object at 0x7da207f99780>, <ast.Constant object at 0x7da207f98ac0>, <ast.Constant object at 0x7da207f990c0>, <ast.Constant object at 0x7da207f99c00>, <ast.Constant object at 0x7da207f98fa0>, <ast.Constant object at 0x7da207f997e0>, <ast.Constant object at 0x7da207f9ab00>, <ast.Constant object at 0x7da207f98250>, <ast.Constant object at 0x7da207f992a0>, <ast.Constant object at 0x7da207f99150>, <ast.Constant object at 0x7da207f9bb20>, <ast.Constant object at 0x7da207f98a90>]] variable[need_update] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da207f98bb0>, <ast.Name object at 0x7da207f9b4f0>]]] in starred[call[name[modifiable].items, parameter[]]] begin[:] if compare[name[m] in name[desired]] begin[:] if <ast.UnaryOp object at 0x7da207f9b0a0> begin[:] call[name[need_update]][name[m]] assign[=] call[name[desired]][name[m]] return[name[need_update]]
keyword[def] identifier[_diff_replication_group] ( identifier[current] , identifier[desired] ): literal[string] keyword[if] identifier[current] . identifier[get] ( literal[string] ) keyword[is] keyword[not] keyword[None] : identifier[current] [ literal[string] ]= keyword[True] keyword[if] identifier[current] [ literal[string] ] keyword[in] ( literal[string] , literal[string] ) keyword[else] keyword[False] identifier[modifiable] ={ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] } identifier[need_update] ={} keyword[for] identifier[m] , identifier[o] keyword[in] identifier[modifiable] . identifier[items] (): keyword[if] identifier[m] keyword[in] identifier[desired] : keyword[if] keyword[not] identifier[o] : identifier[need_update] [ identifier[m] ]= identifier[desired] [ identifier[m] ] keyword[else] : keyword[if] identifier[m] keyword[in] identifier[current] : keyword[if] identifier[current] [ identifier[m] ]!= identifier[desired] [ identifier[m] ]: identifier[need_update] [ identifier[m] ]= identifier[desired] [ identifier[m] ] keyword[return] identifier[need_update]
def _diff_replication_group(current, desired): """ If you need to enhance what modify_replication_group() considers when deciding what is to be (or can be) updated, add it to 'modifiable' below. It's a dict mapping the param as used in modify_replication_group() to that in describe_replication_groups(). Any data fiddlery that needs to be done to make the mappings meaningful should be done in the munging section below as well. This function will ONLY touch settings that are explicitly called out in 'desired' - any settings which might have previously been changed from their 'default' values will not be changed back simply by leaving them out of 'desired'. This is both intentional, and much, much easier to code :) """ if current.get('AutomaticFailover') is not None: current['AutomaticFailoverEnabled'] = True if current['AutomaticFailover'] in ('enabled', 'enabling') else False # depends on [control=['if'], data=[]] # Amazingly, the AWS API provides NO WAY to query the current state of most repl group # settings! All we can do is send a modify op with the desired value, just in case it's # different. And THEN, we can't determine if it's been changed! Stupid? YOU BET! modifiable = {'AutomaticFailoverEnabled': 'AutomaticFailoverEnabled', 'AutoMinorVersionUpgrade': None, 'CacheNodeType': None, 'CacheParameterGroupName': None, 'CacheSecurityGroupNames': None, 'EngineVersion': None, 'NotificationTopicArn': None, 'NotificationTopicStatus': None, 'PreferredMaintenanceWindow': None, 'PrimaryClusterId': None, 'ReplicationGroupDescription': 'Description', 'SecurityGroupIds': None, 'SnapshotRetentionLimit': 'SnapshotRetentionLimit', 'SnapshottingClusterId': 'SnapshottingClusterId', 'SnapshotWindow': 'SnapshotWindow'} need_update = {} for (m, o) in modifiable.items(): if m in desired: if not o: # Always pass these through - let AWS do the math... need_update[m] = desired[m] # depends on [control=['if'], data=[]] elif m in current: # Equivalence testing works fine for current simple type comparisons # This might need enhancement if more complex structures enter the picture if current[m] != desired[m]: need_update[m] = desired[m] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['m', 'current']] # depends on [control=['if'], data=['m', 'desired']] # depends on [control=['for'], data=[]] return need_update
def get_all_subdomains(offset=None, count=None, min_sequence=None, db_path=None, zonefiles_dir=None): """ Static method for getting the list of all subdomains """ opts = get_blockstack_opts() if not is_subdomains_enabled(opts): return [] if db_path is None: db_path = opts['subdomaindb_path'] if zonefiles_dir is None: zonefiles_dir = opts['zonefiles'] db = SubdomainDB(db_path, zonefiles_dir) return db.get_all_subdomains(offset=offset, count=count, min_sequence=None)
def function[get_all_subdomains, parameter[offset, count, min_sequence, db_path, zonefiles_dir]]: constant[ Static method for getting the list of all subdomains ] variable[opts] assign[=] call[name[get_blockstack_opts], parameter[]] if <ast.UnaryOp object at 0x7da18bcc8190> begin[:] return[list[[]]] if compare[name[db_path] is constant[None]] begin[:] variable[db_path] assign[=] call[name[opts]][constant[subdomaindb_path]] if compare[name[zonefiles_dir] is constant[None]] begin[:] variable[zonefiles_dir] assign[=] call[name[opts]][constant[zonefiles]] variable[db] assign[=] call[name[SubdomainDB], parameter[name[db_path], name[zonefiles_dir]]] return[call[name[db].get_all_subdomains, parameter[]]]
keyword[def] identifier[get_all_subdomains] ( identifier[offset] = keyword[None] , identifier[count] = keyword[None] , identifier[min_sequence] = keyword[None] , identifier[db_path] = keyword[None] , identifier[zonefiles_dir] = keyword[None] ): literal[string] identifier[opts] = identifier[get_blockstack_opts] () keyword[if] keyword[not] identifier[is_subdomains_enabled] ( identifier[opts] ): keyword[return] [] keyword[if] identifier[db_path] keyword[is] keyword[None] : identifier[db_path] = identifier[opts] [ literal[string] ] keyword[if] identifier[zonefiles_dir] keyword[is] keyword[None] : identifier[zonefiles_dir] = identifier[opts] [ literal[string] ] identifier[db] = identifier[SubdomainDB] ( identifier[db_path] , identifier[zonefiles_dir] ) keyword[return] identifier[db] . identifier[get_all_subdomains] ( identifier[offset] = identifier[offset] , identifier[count] = identifier[count] , identifier[min_sequence] = keyword[None] )
def get_all_subdomains(offset=None, count=None, min_sequence=None, db_path=None, zonefiles_dir=None): """ Static method for getting the list of all subdomains """ opts = get_blockstack_opts() if not is_subdomains_enabled(opts): return [] # depends on [control=['if'], data=[]] if db_path is None: db_path = opts['subdomaindb_path'] # depends on [control=['if'], data=['db_path']] if zonefiles_dir is None: zonefiles_dir = opts['zonefiles'] # depends on [control=['if'], data=['zonefiles_dir']] db = SubdomainDB(db_path, zonefiles_dir) return db.get_all_subdomains(offset=offset, count=count, min_sequence=None)
def num_trees(n): """ :type n: int :rtype: int """ dp = [0] * (n+1) dp[0] = 1 dp[1] = 1 for i in range(2, n+1): for j in range(i+1): dp[i] += dp[i-j] * dp[j-1] return dp[-1]
def function[num_trees, parameter[n]]: constant[ :type n: int :rtype: int ] variable[dp] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b1e10190>]] * binary_operation[name[n] + constant[1]]] call[name[dp]][constant[0]] assign[=] constant[1] call[name[dp]][constant[1]] assign[=] constant[1] for taget[name[i]] in starred[call[name[range], parameter[constant[2], binary_operation[name[n] + constant[1]]]]] begin[:] for taget[name[j]] in starred[call[name[range], parameter[binary_operation[name[i] + constant[1]]]]] begin[:] <ast.AugAssign object at 0x7da1b1efa140> return[call[name[dp]][<ast.UnaryOp object at 0x7da1b1ef90c0>]]
keyword[def] identifier[num_trees] ( identifier[n] ): literal[string] identifier[dp] =[ literal[int] ]*( identifier[n] + literal[int] ) identifier[dp] [ literal[int] ]= literal[int] identifier[dp] [ literal[int] ]= literal[int] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[n] + literal[int] ): keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[i] + literal[int] ): identifier[dp] [ identifier[i] ]+= identifier[dp] [ identifier[i] - identifier[j] ]* identifier[dp] [ identifier[j] - literal[int] ] keyword[return] identifier[dp] [- literal[int] ]
def num_trees(n): """ :type n: int :rtype: int """ dp = [0] * (n + 1) dp[0] = 1 dp[1] = 1 for i in range(2, n + 1): for j in range(i + 1): dp[i] += dp[i - j] * dp[j - 1] # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] return dp[-1]
def feed(self, aBuf, aCharLen): """feed a character with known length""" if aCharLen == 2: # we only care about 2-bytes character in our distribution analysis order = self.get_order(aBuf) else: order = -1 if order >= 0: self._mTotalChars += 1 # order is valid if order < self._mTableSize: if 512 > self._mCharToFreqOrder[order]: self._mFreqChars += 1
def function[feed, parameter[self, aBuf, aCharLen]]: constant[feed a character with known length] if compare[name[aCharLen] equal[==] constant[2]] begin[:] variable[order] assign[=] call[name[self].get_order, parameter[name[aBuf]]] if compare[name[order] greater_or_equal[>=] constant[0]] begin[:] <ast.AugAssign object at 0x7da1b0fddf60> if compare[name[order] less[<] name[self]._mTableSize] begin[:] if compare[constant[512] greater[>] call[name[self]._mCharToFreqOrder][name[order]]] begin[:] <ast.AugAssign object at 0x7da1b0fdca60>
keyword[def] identifier[feed] ( identifier[self] , identifier[aBuf] , identifier[aCharLen] ): literal[string] keyword[if] identifier[aCharLen] == literal[int] : identifier[order] = identifier[self] . identifier[get_order] ( identifier[aBuf] ) keyword[else] : identifier[order] =- literal[int] keyword[if] identifier[order] >= literal[int] : identifier[self] . identifier[_mTotalChars] += literal[int] keyword[if] identifier[order] < identifier[self] . identifier[_mTableSize] : keyword[if] literal[int] > identifier[self] . identifier[_mCharToFreqOrder] [ identifier[order] ]: identifier[self] . identifier[_mFreqChars] += literal[int]
def feed(self, aBuf, aCharLen): """feed a character with known length""" if aCharLen == 2: # we only care about 2-bytes character in our distribution analysis order = self.get_order(aBuf) # depends on [control=['if'], data=[]] else: order = -1 if order >= 0: self._mTotalChars += 1 # order is valid if order < self._mTableSize: if 512 > self._mCharToFreqOrder[order]: self._mFreqChars += 1 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['order']] # depends on [control=['if'], data=['order']]
def deploy_script(host, port=22, timeout=900, username='root', password=None, key_filename=None, script=None, name=None, sock_dir=None, provider=None, conf_file=None, start_action=None, make_master=False, master_pub=None, master_pem=None, master_conf=None, minion_pub=None, minion_pem=None, minion_conf=None, keep_tmp=False, script_args=None, script_env=None, ssh_timeout=15, maxtries=15, make_syndic=False, make_minion=True, display_ssh_output=True, preseed_minion_keys=None, parallel=False, sudo_password=None, sudo=False, tty=None, vm_=None, opts=None, tmp_dir='/tmp/.saltcloud', file_map=None, master_sign_pub_file=None, cloud_grains=None, force_minion_config=False, **kwargs): ''' Copy a deploy script to a remote server, execute it, and remove it ''' if not isinstance(opts, dict): opts = {} vm_ = vm_ or {} # if None, default to empty dict cloud_grains = cloud_grains or {} tmp_dir = '{0}-{1}'.format(tmp_dir.rstrip('/'), uuid.uuid4()) deploy_command = salt.config.get_cloud_config_value( 'deploy_command', vm_, opts, default=os.path.join(tmp_dir, 'deploy.sh')) if key_filename is not None and not os.path.isfile(key_filename): raise SaltCloudConfigError( 'The defined key_filename \'{0}\' does not exist'.format( key_filename ) ) gateway = None if 'gateway' in kwargs: gateway = kwargs['gateway'] starttime = time.localtime() log.debug( 'Deploying %s at %s', host, time.strftime('%Y-%m-%d %H:%M:%S', starttime) ) known_hosts_file = kwargs.get('known_hosts_file', '/dev/null') hard_timeout = opts.get('hard_timeout', None) if wait_for_port(host=host, port=port, gateway=gateway): log.debug('SSH port %s on %s is available', port, host) if wait_for_passwd(host, port=port, username=username, password=password, key_filename=key_filename, ssh_timeout=ssh_timeout, display_ssh_output=display_ssh_output, gateway=gateway, known_hosts_file=known_hosts_file, maxtries=maxtries, hard_timeout=hard_timeout): log.debug('Logging into %s:%s as %s', host, port, username) ssh_kwargs = { 'hostname': host, 'port': port, 'username': username, 'timeout': ssh_timeout, 'display_ssh_output': display_ssh_output, 'sudo_password': sudo_password, 'sftp': opts.get('use_sftp', False) } ssh_kwargs.update(__ssh_gateway_config_dict(gateway)) if key_filename: log.debug('Using %s as the key_filename', key_filename) ssh_kwargs['key_filename'] = key_filename elif password and kwargs.get('has_ssh_agent', False) is False: ssh_kwargs['password'] = password if root_cmd('test -e \'{0}\''.format(tmp_dir), tty, sudo, allow_failure=True, **ssh_kwargs): ret = root_cmd(('sh -c "( mkdir -p -m 700 \'{0}\' )"').format(tmp_dir), tty, sudo, **ssh_kwargs) if ret: raise SaltCloudSystemExit( 'Can\'t create temporary ' 'directory in {0} !'.format(tmp_dir) ) if sudo: comps = tmp_dir.lstrip('/').rstrip('/').split('/') if comps: if len(comps) > 1 or comps[0] != 'tmp': ret = root_cmd( 'chown {0} "{1}"'.format(username, tmp_dir), tty, sudo, **ssh_kwargs ) if ret: raise SaltCloudSystemExit( 'Cant set {0} ownership on {1}'.format( username, tmp_dir)) if not isinstance(file_map, dict): file_map = {} # Copy an arbitrary group of files to the target system remote_dirs = [] file_map_success = [] file_map_fail = [] for map_item in file_map: local_file = map_item remote_file = file_map[map_item] if not os.path.exists(map_item): log.error( 'The local file "%s" does not exist, and will not be ' 'copied to "%s" on the target system', local_file, remote_file ) file_map_fail.append({local_file: remote_file}) continue if os.path.isdir(local_file): dir_name = os.path.basename(local_file) remote_dir = os.path.join(os.path.dirname(remote_file), dir_name) else: remote_dir = os.path.dirname(remote_file) if remote_dir not in remote_dirs: root_cmd('mkdir -p \'{0}\''.format(remote_dir), tty, sudo, **ssh_kwargs) if ssh_kwargs['username'] != 'root': root_cmd( 'chown {0} \'{1}\''.format( ssh_kwargs['username'], remote_dir ), tty, sudo, **ssh_kwargs ) remote_dirs.append(remote_dir) ssh_file( opts, remote_file, kwargs=ssh_kwargs, local_file=local_file ) file_map_success.append({local_file: remote_file}) # Minion configuration if minion_pem: ssh_file(opts, '{0}/minion.pem'.format(tmp_dir), minion_pem, ssh_kwargs) ret = root_cmd('chmod 600 \'{0}/minion.pem\''.format(tmp_dir), tty, sudo, **ssh_kwargs) if ret: raise SaltCloudSystemExit( 'Can\'t set perms on {0}/minion.pem'.format(tmp_dir)) if minion_pub: ssh_file(opts, '{0}/minion.pub'.format(tmp_dir), minion_pub, ssh_kwargs) if master_sign_pub_file: ssh_file(opts, '{0}/master_sign.pub'.format(tmp_dir), kwargs=ssh_kwargs, local_file=master_sign_pub_file) if minion_conf: if not isinstance(minion_conf, dict): # Let's not just fail regarding this change, specially # since we can handle it raise DeprecationWarning( '`salt.utils.cloud.deploy_script now only accepts ' 'dictionaries for it\'s `minion_conf` parameter. ' 'Loading YAML...' ) minion_grains = minion_conf.pop('grains', {}) if minion_grains: ssh_file( opts, '{0}/grains'.format(tmp_dir), salt_config_to_yaml(minion_grains), ssh_kwargs ) if cloud_grains and opts.get('enable_cloud_grains', True): minion_conf['grains'] = {'salt-cloud': cloud_grains} ssh_file( opts, '{0}/minion'.format(tmp_dir), salt_config_to_yaml(minion_conf), ssh_kwargs ) # Master configuration if master_pem: ssh_file(opts, '{0}/master.pem'.format(tmp_dir), master_pem, ssh_kwargs) ret = root_cmd('chmod 600 \'{0}/master.pem\''.format(tmp_dir), tty, sudo, **ssh_kwargs) if ret: raise SaltCloudSystemExit( 'Cant set perms on {0}/master.pem'.format(tmp_dir)) if master_pub: ssh_file(opts, '{0}/master.pub'.format(tmp_dir), master_pub, ssh_kwargs) if master_conf: if not isinstance(master_conf, dict): # Let's not just fail regarding this change, specially # since we can handle it raise DeprecationWarning( '`salt.utils.cloud.deploy_script now only accepts ' 'dictionaries for it\'s `master_conf` parameter. ' 'Loading from YAML ...' ) ssh_file( opts, '{0}/master'.format(tmp_dir), salt_config_to_yaml(master_conf), ssh_kwargs ) # XXX: We need to make these paths configurable preseed_minion_keys_tempdir = '{0}/preseed-minion-keys'.format( tmp_dir) if preseed_minion_keys is not None: # Create remote temp dir ret = root_cmd( 'mkdir \'{0}\''.format(preseed_minion_keys_tempdir), tty, sudo, **ssh_kwargs ) if ret: raise SaltCloudSystemExit( 'Cant create {0}'.format(preseed_minion_keys_tempdir)) ret = root_cmd( 'chmod 700 \'{0}\''.format(preseed_minion_keys_tempdir), tty, sudo, **ssh_kwargs ) if ret: raise SaltCloudSystemExit( 'Can\'t set perms on {0}'.format( preseed_minion_keys_tempdir)) if ssh_kwargs['username'] != 'root': root_cmd( 'chown {0} \'{1}\''.format( ssh_kwargs['username'], preseed_minion_keys_tempdir ), tty, sudo, **ssh_kwargs ) # Copy pre-seed minion keys for minion_id, minion_key in six.iteritems(preseed_minion_keys): rpath = os.path.join( preseed_minion_keys_tempdir, minion_id ) ssh_file(opts, rpath, minion_key, ssh_kwargs) if ssh_kwargs['username'] != 'root': root_cmd( 'chown -R root \'{0}\''.format( preseed_minion_keys_tempdir ), tty, sudo, **ssh_kwargs ) if ret: raise SaltCloudSystemExit( 'Can\'t set ownership for {0}'.format( preseed_minion_keys_tempdir)) # Run any pre-flight commands before running deploy scripts preflight_cmds = kwargs.get('preflight_cmds', []) for command in preflight_cmds: cmd_ret = root_cmd(command, tty, sudo, **ssh_kwargs) if cmd_ret: raise SaltCloudSystemExit( 'Pre-flight command failed: \'{0}\''.format(command) ) # The actual deploy script if script: # got strange escaping issues with sudoer, going onto a # subshell fixes that ssh_file(opts, '{0}/deploy.sh'.format(tmp_dir), script, ssh_kwargs) ret = root_cmd( ('sh -c "( chmod +x \'{0}/deploy.sh\' )";' 'exit $?').format(tmp_dir), tty, sudo, **ssh_kwargs) if ret: raise SaltCloudSystemExit( 'Can\'t set perms on {0}/deploy.sh'.format(tmp_dir)) time_used = time.mktime(time.localtime()) - time.mktime(starttime) newtimeout = timeout - time_used queue = None process = None # Consider this code experimental. It causes Salt Cloud to wait # for the minion to check in, and then fire a startup event. # Disabled if parallel because it doesn't work! if start_action and not parallel: queue = multiprocessing.Queue() process = multiprocessing.Process( target=check_auth, kwargs=dict( name=name, sock_dir=sock_dir, timeout=newtimeout, queue=queue ) ) log.debug('Starting new process to wait for salt-minion') process.start() # Run the deploy script if script: if 'bootstrap-salt' in script: deploy_command += ' -c \'{0}\''.format(tmp_dir) if force_minion_config: deploy_command += ' -F' if make_syndic is True: deploy_command += ' -S' if make_master is True: deploy_command += ' -M' if make_minion is False: deploy_command += ' -N' if keep_tmp is True: deploy_command += ' -K' if preseed_minion_keys is not None: deploy_command += ' -k \'{0}\''.format( preseed_minion_keys_tempdir ) if script_args: deploy_command += ' {0}'.format(script_args) if script_env: if not isinstance(script_env, dict): raise SaltCloudSystemExit( 'The \'script_env\' configuration setting NEEDS ' 'to be a dictionary not a {0}'.format( type(script_env) ) ) environ_script_contents = ['#!/bin/sh'] for key, value in six.iteritems(script_env): environ_script_contents.append( 'setenv {0} \'{1}\' >/dev/null 2>&1 || ' 'export {0}=\'{1}\''.format(key, value) ) environ_script_contents.append(deploy_command) # Upload our environ setter wrapper ssh_file( opts, '{0}/environ-deploy-wrapper.sh'.format(tmp_dir), '\n'.join(environ_script_contents), ssh_kwargs ) root_cmd( 'chmod +x \'{0}/environ-deploy-wrapper.sh\''.format(tmp_dir), tty, sudo, **ssh_kwargs ) # The deploy command is now our wrapper deploy_command = '\'{0}/environ-deploy-wrapper.sh\''.format( tmp_dir, ) if root_cmd(deploy_command, tty, sudo, **ssh_kwargs) != 0: raise SaltCloudSystemExit( 'Executing the command \'{0}\' failed'.format( deploy_command ) ) log.debug('Executed command \'%s\'', deploy_command) # Remove the deploy script if not keep_tmp: root_cmd('rm -f \'{0}/deploy.sh\''.format(tmp_dir), tty, sudo, **ssh_kwargs) log.debug('Removed %s/deploy.sh', tmp_dir) if script_env: root_cmd( 'rm -f \'{0}/environ-deploy-wrapper.sh\''.format( tmp_dir ), tty, sudo, **ssh_kwargs ) log.debug('Removed %s/environ-deploy-wrapper.sh', tmp_dir) if keep_tmp: log.debug('Not removing deployment files from %s/', tmp_dir) else: # Remove minion configuration if minion_pub: root_cmd('rm -f \'{0}/minion.pub\''.format(tmp_dir), tty, sudo, **ssh_kwargs) log.debug('Removed %s/minion.pub', tmp_dir) if minion_pem: root_cmd('rm -f \'{0}/minion.pem\''.format(tmp_dir), tty, sudo, **ssh_kwargs) log.debug('Removed %s/minion.pem', tmp_dir) if minion_conf: root_cmd('rm -f \'{0}/grains\''.format(tmp_dir), tty, sudo, **ssh_kwargs) log.debug('Removed %s/grains', tmp_dir) root_cmd('rm -f \'{0}/minion\''.format(tmp_dir), tty, sudo, **ssh_kwargs) log.debug('Removed %s/minion', tmp_dir) if master_sign_pub_file: root_cmd('rm -f {0}/master_sign.pub'.format(tmp_dir), tty, sudo, **ssh_kwargs) log.debug('Removed %s/master_sign.pub', tmp_dir) # Remove master configuration if master_pub: root_cmd('rm -f \'{0}/master.pub\''.format(tmp_dir), tty, sudo, **ssh_kwargs) log.debug('Removed %s/master.pub', tmp_dir) if master_pem: root_cmd('rm -f \'{0}/master.pem\''.format(tmp_dir), tty, sudo, **ssh_kwargs) log.debug('Removed %s/master.pem', tmp_dir) if master_conf: root_cmd('rm -f \'{0}/master\''.format(tmp_dir), tty, sudo, **ssh_kwargs) log.debug('Removed %s/master', tmp_dir) # Remove pre-seed keys directory if preseed_minion_keys is not None: root_cmd( 'rm -rf \'{0}\''.format( preseed_minion_keys_tempdir ), tty, sudo, **ssh_kwargs ) log.debug('Removed %s', preseed_minion_keys_tempdir) if start_action and not parallel: queuereturn = queue.get() process.join() if queuereturn and start_action: # client = salt.client.LocalClient(conf_file) # output = client.cmd_iter( # host, 'state.highstate', timeout=timeout # ) # for line in output: # print(line) log.info('Executing %s on the salt-minion', start_action) root_cmd( 'salt-call {0}'.format(start_action), tty, sudo, **ssh_kwargs ) log.info( 'Finished executing %s on the salt-minion', start_action ) # Fire deploy action fire_event( 'event', '{0} has been deployed at {1}'.format(name, host), 'salt/cloud/{0}/deploy_script'.format(name), args={ 'name': name, 'host': host }, sock_dir=opts.get( 'sock_dir', os.path.join(__opts__['sock_dir'], 'master')), transport=opts.get('transport', 'zeromq') ) if file_map_fail or file_map_success: return { 'File Upload Success': file_map_success, 'File Upload Failure': file_map_fail, } return True return False
def function[deploy_script, parameter[host, port, timeout, username, password, key_filename, script, name, sock_dir, provider, conf_file, start_action, make_master, master_pub, master_pem, master_conf, minion_pub, minion_pem, minion_conf, keep_tmp, script_args, script_env, ssh_timeout, maxtries, make_syndic, make_minion, display_ssh_output, preseed_minion_keys, parallel, sudo_password, sudo, tty, vm_, opts, tmp_dir, file_map, master_sign_pub_file, cloud_grains, force_minion_config]]: constant[ Copy a deploy script to a remote server, execute it, and remove it ] if <ast.UnaryOp object at 0x7da1b1ff1d50> begin[:] variable[opts] assign[=] dictionary[[], []] variable[vm_] assign[=] <ast.BoolOp object at 0x7da1b1ff2410> variable[cloud_grains] assign[=] <ast.BoolOp object at 0x7da1b1ff2740> variable[tmp_dir] assign[=] call[constant[{0}-{1}].format, parameter[call[name[tmp_dir].rstrip, parameter[constant[/]]], call[name[uuid].uuid4, parameter[]]]] variable[deploy_command] assign[=] call[name[salt].config.get_cloud_config_value, parameter[constant[deploy_command], name[vm_], name[opts]]] if <ast.BoolOp object at 0x7da1b1ff2b30> begin[:] <ast.Raise object at 0x7da1b1ff2dd0> variable[gateway] assign[=] constant[None] if compare[constant[gateway] in name[kwargs]] begin[:] variable[gateway] assign[=] call[name[kwargs]][constant[gateway]] variable[starttime] assign[=] call[name[time].localtime, parameter[]] call[name[log].debug, parameter[constant[Deploying %s at %s], name[host], call[name[time].strftime, parameter[constant[%Y-%m-%d %H:%M:%S], name[starttime]]]]] variable[known_hosts_file] assign[=] call[name[kwargs].get, parameter[constant[known_hosts_file], constant[/dev/null]]] variable[hard_timeout] assign[=] call[name[opts].get, parameter[constant[hard_timeout], constant[None]]] if call[name[wait_for_port], parameter[]] begin[:] call[name[log].debug, parameter[constant[SSH port %s on %s is available], name[port], name[host]]] if call[name[wait_for_passwd], parameter[name[host]]] begin[:] call[name[log].debug, parameter[constant[Logging into %s:%s as %s], name[host], name[port], name[username]]] variable[ssh_kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da1b1ff0a90>, <ast.Constant object at 0x7da1b1ff0310>, <ast.Constant object at 0x7da1b1ff0160>, <ast.Constant object at 0x7da1b1ff00d0>, <ast.Constant object at 0x7da1b1ff0730>, <ast.Constant object at 0x7da1b1ff0910>, <ast.Constant object at 0x7da1b1ff0250>], [<ast.Name object at 0x7da1b1ff0700>, <ast.Name object at 0x7da1b1ff03a0>, <ast.Name object at 0x7da1b1ff0670>, <ast.Name object at 0x7da1b1ff0400>, <ast.Name object at 0x7da1b1ff0b20>, <ast.Name object at 0x7da1b1ff0070>, <ast.Call object at 0x7da1b1ff0550>]] call[name[ssh_kwargs].update, parameter[call[name[__ssh_gateway_config_dict], parameter[name[gateway]]]]] if name[key_filename] begin[:] call[name[log].debug, parameter[constant[Using %s as the key_filename], name[key_filename]]] call[name[ssh_kwargs]][constant[key_filename]] assign[=] name[key_filename] if call[name[root_cmd], parameter[call[constant[test -e '{0}'].format, parameter[name[tmp_dir]]], name[tty], name[sudo]]] begin[:] variable[ret] assign[=] call[name[root_cmd], parameter[call[constant[sh -c "( mkdir -p -m 700 '{0}' )"].format, parameter[name[tmp_dir]]], name[tty], name[sudo]]] if name[ret] begin[:] <ast.Raise object at 0x7da1b2097a30> if name[sudo] begin[:] variable[comps] assign[=] call[call[call[name[tmp_dir].lstrip, parameter[constant[/]]].rstrip, parameter[constant[/]]].split, parameter[constant[/]]] if name[comps] begin[:] if <ast.BoolOp object at 0x7da1b2006cb0> begin[:] variable[ret] assign[=] call[name[root_cmd], parameter[call[constant[chown {0} "{1}"].format, parameter[name[username], name[tmp_dir]]], name[tty], name[sudo]]] if name[ret] begin[:] <ast.Raise object at 0x7da1b20067d0> if <ast.UnaryOp object at 0x7da1b2006620> begin[:] variable[file_map] assign[=] dictionary[[], []] variable[remote_dirs] assign[=] list[[]] variable[file_map_success] assign[=] list[[]] variable[file_map_fail] assign[=] list[[]] for taget[name[map_item]] in starred[name[file_map]] begin[:] variable[local_file] assign[=] name[map_item] variable[remote_file] assign[=] call[name[file_map]][name[map_item]] if <ast.UnaryOp object at 0x7da1b20060b0> begin[:] call[name[log].error, parameter[constant[The local file "%s" does not exist, and will not be copied to "%s" on the target system], name[local_file], name[remote_file]]] call[name[file_map_fail].append, parameter[dictionary[[<ast.Name object at 0x7da1b2005d20>], [<ast.Name object at 0x7da1b2005cf0>]]]] continue if call[name[os].path.isdir, parameter[name[local_file]]] begin[:] variable[dir_name] assign[=] call[name[os].path.basename, parameter[name[local_file]]] variable[remote_dir] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[remote_file]]], name[dir_name]]] if compare[name[remote_dir] <ast.NotIn object at 0x7da2590d7190> name[remote_dirs]] begin[:] call[name[root_cmd], parameter[call[constant[mkdir -p '{0}'].format, parameter[name[remote_dir]]], name[tty], name[sudo]]] if compare[call[name[ssh_kwargs]][constant[username]] not_equal[!=] constant[root]] begin[:] call[name[root_cmd], parameter[call[constant[chown {0} '{1}'].format, parameter[call[name[ssh_kwargs]][constant[username]], name[remote_dir]]], name[tty], name[sudo]]] call[name[remote_dirs].append, parameter[name[remote_dir]]] call[name[ssh_file], parameter[name[opts], name[remote_file]]] call[name[file_map_success].append, parameter[dictionary[[<ast.Name object at 0x7da1b2004b50>], [<ast.Name object at 0x7da1b2004b20>]]]] if name[minion_pem] begin[:] call[name[ssh_file], parameter[name[opts], call[constant[{0}/minion.pem].format, parameter[name[tmp_dir]]], name[minion_pem], name[ssh_kwargs]]] variable[ret] assign[=] call[name[root_cmd], parameter[call[constant[chmod 600 '{0}/minion.pem'].format, parameter[name[tmp_dir]]], name[tty], name[sudo]]] if name[ret] begin[:] <ast.Raise object at 0x7da1b20045b0> if name[minion_pub] begin[:] call[name[ssh_file], parameter[name[opts], call[constant[{0}/minion.pub].format, parameter[name[tmp_dir]]], name[minion_pub], name[ssh_kwargs]]] if name[master_sign_pub_file] begin[:] call[name[ssh_file], parameter[name[opts], call[constant[{0}/master_sign.pub].format, parameter[name[tmp_dir]]]]] if name[minion_conf] begin[:] if <ast.UnaryOp object at 0x7da207f00790> begin[:] <ast.Raise object at 0x7da207f016f0> variable[minion_grains] assign[=] call[name[minion_conf].pop, parameter[constant[grains], dictionary[[], []]]] if name[minion_grains] begin[:] call[name[ssh_file], parameter[name[opts], call[constant[{0}/grains].format, parameter[name[tmp_dir]]], call[name[salt_config_to_yaml], parameter[name[minion_grains]]], name[ssh_kwargs]]] if <ast.BoolOp object at 0x7da207f02ef0> begin[:] call[name[minion_conf]][constant[grains]] assign[=] dictionary[[<ast.Constant object at 0x7da207f00910>], [<ast.Name object at 0x7da207f00070>]] call[name[ssh_file], parameter[name[opts], call[constant[{0}/minion].format, parameter[name[tmp_dir]]], call[name[salt_config_to_yaml], parameter[name[minion_conf]]], name[ssh_kwargs]]] if name[master_pem] begin[:] call[name[ssh_file], parameter[name[opts], call[constant[{0}/master.pem].format, parameter[name[tmp_dir]]], name[master_pem], name[ssh_kwargs]]] variable[ret] assign[=] call[name[root_cmd], parameter[call[constant[chmod 600 '{0}/master.pem'].format, parameter[name[tmp_dir]]], name[tty], name[sudo]]] if name[ret] begin[:] <ast.Raise object at 0x7da207f00fd0> if name[master_pub] begin[:] call[name[ssh_file], parameter[name[opts], call[constant[{0}/master.pub].format, parameter[name[tmp_dir]]], name[master_pub], name[ssh_kwargs]]] if name[master_conf] begin[:] if <ast.UnaryOp object at 0x7da1b1f3c2b0> begin[:] <ast.Raise object at 0x7da1b1f3c250> call[name[ssh_file], parameter[name[opts], call[constant[{0}/master].format, parameter[name[tmp_dir]]], call[name[salt_config_to_yaml], parameter[name[master_conf]]], name[ssh_kwargs]]] variable[preseed_minion_keys_tempdir] assign[=] call[constant[{0}/preseed-minion-keys].format, parameter[name[tmp_dir]]] if compare[name[preseed_minion_keys] is_not constant[None]] begin[:] variable[ret] assign[=] call[name[root_cmd], parameter[call[constant[mkdir '{0}'].format, parameter[name[preseed_minion_keys_tempdir]]], name[tty], name[sudo]]] if name[ret] begin[:] <ast.Raise object at 0x7da1b1c15ae0> variable[ret] assign[=] call[name[root_cmd], parameter[call[constant[chmod 700 '{0}'].format, parameter[name[preseed_minion_keys_tempdir]]], name[tty], name[sudo]]] if name[ret] begin[:] <ast.Raise object at 0x7da1b1c172e0> if compare[call[name[ssh_kwargs]][constant[username]] not_equal[!=] constant[root]] begin[:] call[name[root_cmd], parameter[call[constant[chown {0} '{1}'].format, parameter[call[name[ssh_kwargs]][constant[username]], name[preseed_minion_keys_tempdir]]], name[tty], name[sudo]]] for taget[tuple[[<ast.Name object at 0x7da1b1c16170>, <ast.Name object at 0x7da1b1c15a50>]]] in starred[call[name[six].iteritems, parameter[name[preseed_minion_keys]]]] begin[:] variable[rpath] assign[=] call[name[os].path.join, parameter[name[preseed_minion_keys_tempdir], name[minion_id]]] call[name[ssh_file], parameter[name[opts], name[rpath], name[minion_key], name[ssh_kwargs]]] if compare[call[name[ssh_kwargs]][constant[username]] not_equal[!=] constant[root]] begin[:] call[name[root_cmd], parameter[call[constant[chown -R root '{0}'].format, parameter[name[preseed_minion_keys_tempdir]]], name[tty], name[sudo]]] if name[ret] begin[:] <ast.Raise object at 0x7da1b1c17100> variable[preflight_cmds] assign[=] call[name[kwargs].get, parameter[constant[preflight_cmds], list[[]]]] for taget[name[command]] in starred[name[preflight_cmds]] begin[:] variable[cmd_ret] assign[=] call[name[root_cmd], parameter[name[command], name[tty], name[sudo]]] if name[cmd_ret] begin[:] <ast.Raise object at 0x7da1b1c16e30> if name[script] begin[:] call[name[ssh_file], parameter[name[opts], call[constant[{0}/deploy.sh].format, parameter[name[tmp_dir]]], name[script], name[ssh_kwargs]]] variable[ret] assign[=] call[name[root_cmd], parameter[call[constant[sh -c "( chmod +x '{0}/deploy.sh' )";exit $?].format, parameter[name[tmp_dir]]], name[tty], name[sudo]]] if name[ret] begin[:] <ast.Raise object at 0x7da1b1c16c50> variable[time_used] assign[=] binary_operation[call[name[time].mktime, parameter[call[name[time].localtime, parameter[]]]] - call[name[time].mktime, parameter[name[starttime]]]] variable[newtimeout] assign[=] binary_operation[name[timeout] - name[time_used]] variable[queue] assign[=] constant[None] variable[process] assign[=] constant[None] if <ast.BoolOp object at 0x7da1b1c17190> begin[:] variable[queue] assign[=] call[name[multiprocessing].Queue, parameter[]] variable[process] assign[=] call[name[multiprocessing].Process, parameter[]] call[name[log].debug, parameter[constant[Starting new process to wait for salt-minion]]] call[name[process].start, parameter[]] if name[script] begin[:] if compare[constant[bootstrap-salt] in name[script]] begin[:] <ast.AugAssign object at 0x7da1b1c169e0> if name[force_minion_config] begin[:] <ast.AugAssign object at 0x7da1b1c15b70> if compare[name[make_syndic] is constant[True]] begin[:] <ast.AugAssign object at 0x7da1b1c14f10> if compare[name[make_master] is constant[True]] begin[:] <ast.AugAssign object at 0x7da1b1c17bb0> if compare[name[make_minion] is constant[False]] begin[:] <ast.AugAssign object at 0x7da1b1c14fa0> if compare[name[keep_tmp] is constant[True]] begin[:] <ast.AugAssign object at 0x7da1b1c17d30> if compare[name[preseed_minion_keys] is_not constant[None]] begin[:] <ast.AugAssign object at 0x7da1b1c141c0> if name[script_args] begin[:] <ast.AugAssign object at 0x7da1b2026410> if name[script_env] begin[:] if <ast.UnaryOp object at 0x7da1b2025840> begin[:] <ast.Raise object at 0x7da1b2026260> variable[environ_script_contents] assign[=] list[[<ast.Constant object at 0x7da1b2025300>]] for taget[tuple[[<ast.Name object at 0x7da1b2025150>, <ast.Name object at 0x7da1b2027310>]]] in starred[call[name[six].iteritems, parameter[name[script_env]]]] begin[:] call[name[environ_script_contents].append, parameter[call[constant[setenv {0} '{1}' >/dev/null 2>&1 || export {0}='{1}'].format, parameter[name[key], name[value]]]]] call[name[environ_script_contents].append, parameter[name[deploy_command]]] call[name[ssh_file], parameter[name[opts], call[constant[{0}/environ-deploy-wrapper.sh].format, parameter[name[tmp_dir]]], call[constant[ ].join, parameter[name[environ_script_contents]]], name[ssh_kwargs]]] call[name[root_cmd], parameter[call[constant[chmod +x '{0}/environ-deploy-wrapper.sh'].format, parameter[name[tmp_dir]]], name[tty], name[sudo]]] variable[deploy_command] assign[=] call[constant['{0}/environ-deploy-wrapper.sh'].format, parameter[name[tmp_dir]]] if compare[call[name[root_cmd], parameter[name[deploy_command], name[tty], name[sudo]]] not_equal[!=] constant[0]] begin[:] <ast.Raise object at 0x7da1b20277c0> call[name[log].debug, parameter[constant[Executed command '%s'], name[deploy_command]]] if <ast.UnaryOp object at 0x7da1b20273d0> begin[:] call[name[root_cmd], parameter[call[constant[rm -f '{0}/deploy.sh'].format, parameter[name[tmp_dir]]], name[tty], name[sudo]]] call[name[log].debug, parameter[constant[Removed %s/deploy.sh], name[tmp_dir]]] if name[script_env] begin[:] call[name[root_cmd], parameter[call[constant[rm -f '{0}/environ-deploy-wrapper.sh'].format, parameter[name[tmp_dir]]], name[tty], name[sudo]]] call[name[log].debug, parameter[constant[Removed %s/environ-deploy-wrapper.sh], name[tmp_dir]]] if name[keep_tmp] begin[:] call[name[log].debug, parameter[constant[Not removing deployment files from %s/], name[tmp_dir]]] if <ast.BoolOp object at 0x7da1b1f4ae90> begin[:] variable[queuereturn] assign[=] call[name[queue].get, parameter[]] call[name[process].join, parameter[]] if <ast.BoolOp object at 0x7da1b1f4a9e0> begin[:] call[name[log].info, parameter[constant[Executing %s on the salt-minion], name[start_action]]] call[name[root_cmd], parameter[call[constant[salt-call {0}].format, parameter[name[start_action]]], name[tty], name[sudo]]] call[name[log].info, parameter[constant[Finished executing %s on the salt-minion], name[start_action]]] call[name[fire_event], parameter[constant[event], call[constant[{0} has been deployed at {1}].format, parameter[name[name], name[host]]], call[constant[salt/cloud/{0}/deploy_script].format, parameter[name[name]]]]] if <ast.BoolOp object at 0x7da1b1f48100> begin[:] return[dictionary[[<ast.Constant object at 0x7da1b1f486d0>, <ast.Constant object at 0x7da1b1f48700>], [<ast.Name object at 0x7da1b1f48730>, <ast.Name object at 0x7da1b1f48760>]]] return[constant[True]] return[constant[False]]
keyword[def] identifier[deploy_script] ( identifier[host] , identifier[port] = literal[int] , identifier[timeout] = literal[int] , identifier[username] = literal[string] , identifier[password] = keyword[None] , identifier[key_filename] = keyword[None] , identifier[script] = keyword[None] , identifier[name] = keyword[None] , identifier[sock_dir] = keyword[None] , identifier[provider] = keyword[None] , identifier[conf_file] = keyword[None] , identifier[start_action] = keyword[None] , identifier[make_master] = keyword[False] , identifier[master_pub] = keyword[None] , identifier[master_pem] = keyword[None] , identifier[master_conf] = keyword[None] , identifier[minion_pub] = keyword[None] , identifier[minion_pem] = keyword[None] , identifier[minion_conf] = keyword[None] , identifier[keep_tmp] = keyword[False] , identifier[script_args] = keyword[None] , identifier[script_env] = keyword[None] , identifier[ssh_timeout] = literal[int] , identifier[maxtries] = literal[int] , identifier[make_syndic] = keyword[False] , identifier[make_minion] = keyword[True] , identifier[display_ssh_output] = keyword[True] , identifier[preseed_minion_keys] = keyword[None] , identifier[parallel] = keyword[False] , identifier[sudo_password] = keyword[None] , identifier[sudo] = keyword[False] , identifier[tty] = keyword[None] , identifier[vm_] = keyword[None] , identifier[opts] = keyword[None] , identifier[tmp_dir] = literal[string] , identifier[file_map] = keyword[None] , identifier[master_sign_pub_file] = keyword[None] , identifier[cloud_grains] = keyword[None] , identifier[force_minion_config] = keyword[False] , ** identifier[kwargs] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[opts] , identifier[dict] ): identifier[opts] ={} identifier[vm_] = identifier[vm_] keyword[or] {} identifier[cloud_grains] = identifier[cloud_grains] keyword[or] {} identifier[tmp_dir] = literal[string] . identifier[format] ( identifier[tmp_dir] . identifier[rstrip] ( literal[string] ), identifier[uuid] . identifier[uuid4] ()) identifier[deploy_command] = identifier[salt] . identifier[config] . identifier[get_cloud_config_value] ( literal[string] , identifier[vm_] , identifier[opts] , identifier[default] = identifier[os] . identifier[path] . identifier[join] ( identifier[tmp_dir] , literal[string] )) keyword[if] identifier[key_filename] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[key_filename] ): keyword[raise] identifier[SaltCloudConfigError] ( literal[string] . identifier[format] ( identifier[key_filename] ) ) identifier[gateway] = keyword[None] keyword[if] literal[string] keyword[in] identifier[kwargs] : identifier[gateway] = identifier[kwargs] [ literal[string] ] identifier[starttime] = identifier[time] . identifier[localtime] () identifier[log] . identifier[debug] ( literal[string] , identifier[host] , identifier[time] . identifier[strftime] ( literal[string] , identifier[starttime] ) ) identifier[known_hosts_file] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] ) identifier[hard_timeout] = identifier[opts] . identifier[get] ( literal[string] , keyword[None] ) keyword[if] identifier[wait_for_port] ( identifier[host] = identifier[host] , identifier[port] = identifier[port] , identifier[gateway] = identifier[gateway] ): identifier[log] . identifier[debug] ( literal[string] , identifier[port] , identifier[host] ) keyword[if] identifier[wait_for_passwd] ( identifier[host] , identifier[port] = identifier[port] , identifier[username] = identifier[username] , identifier[password] = identifier[password] , identifier[key_filename] = identifier[key_filename] , identifier[ssh_timeout] = identifier[ssh_timeout] , identifier[display_ssh_output] = identifier[display_ssh_output] , identifier[gateway] = identifier[gateway] , identifier[known_hosts_file] = identifier[known_hosts_file] , identifier[maxtries] = identifier[maxtries] , identifier[hard_timeout] = identifier[hard_timeout] ): identifier[log] . identifier[debug] ( literal[string] , identifier[host] , identifier[port] , identifier[username] ) identifier[ssh_kwargs] ={ literal[string] : identifier[host] , literal[string] : identifier[port] , literal[string] : identifier[username] , literal[string] : identifier[ssh_timeout] , literal[string] : identifier[display_ssh_output] , literal[string] : identifier[sudo_password] , literal[string] : identifier[opts] . identifier[get] ( literal[string] , keyword[False] ) } identifier[ssh_kwargs] . identifier[update] ( identifier[__ssh_gateway_config_dict] ( identifier[gateway] )) keyword[if] identifier[key_filename] : identifier[log] . identifier[debug] ( literal[string] , identifier[key_filename] ) identifier[ssh_kwargs] [ literal[string] ]= identifier[key_filename] keyword[elif] identifier[password] keyword[and] identifier[kwargs] . identifier[get] ( literal[string] , keyword[False] ) keyword[is] keyword[False] : identifier[ssh_kwargs] [ literal[string] ]= identifier[password] keyword[if] identifier[root_cmd] ( literal[string] . identifier[format] ( identifier[tmp_dir] ), identifier[tty] , identifier[sudo] , identifier[allow_failure] = keyword[True] ,** identifier[ssh_kwargs] ): identifier[ret] = identifier[root_cmd] (( literal[string] ). identifier[format] ( identifier[tmp_dir] ), identifier[tty] , identifier[sudo] ,** identifier[ssh_kwargs] ) keyword[if] identifier[ret] : keyword[raise] identifier[SaltCloudSystemExit] ( literal[string] literal[string] . identifier[format] ( identifier[tmp_dir] ) ) keyword[if] identifier[sudo] : identifier[comps] = identifier[tmp_dir] . identifier[lstrip] ( literal[string] ). identifier[rstrip] ( literal[string] ). identifier[split] ( literal[string] ) keyword[if] identifier[comps] : keyword[if] identifier[len] ( identifier[comps] )> literal[int] keyword[or] identifier[comps] [ literal[int] ]!= literal[string] : identifier[ret] = identifier[root_cmd] ( literal[string] . identifier[format] ( identifier[username] , identifier[tmp_dir] ), identifier[tty] , identifier[sudo] ,** identifier[ssh_kwargs] ) keyword[if] identifier[ret] : keyword[raise] identifier[SaltCloudSystemExit] ( literal[string] . identifier[format] ( identifier[username] , identifier[tmp_dir] )) keyword[if] keyword[not] identifier[isinstance] ( identifier[file_map] , identifier[dict] ): identifier[file_map] ={} identifier[remote_dirs] =[] identifier[file_map_success] =[] identifier[file_map_fail] =[] keyword[for] identifier[map_item] keyword[in] identifier[file_map] : identifier[local_file] = identifier[map_item] identifier[remote_file] = identifier[file_map] [ identifier[map_item] ] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[map_item] ): identifier[log] . identifier[error] ( literal[string] literal[string] , identifier[local_file] , identifier[remote_file] ) identifier[file_map_fail] . identifier[append] ({ identifier[local_file] : identifier[remote_file] }) keyword[continue] keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[local_file] ): identifier[dir_name] = identifier[os] . identifier[path] . identifier[basename] ( identifier[local_file] ) identifier[remote_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[remote_file] ), identifier[dir_name] ) keyword[else] : identifier[remote_dir] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[remote_file] ) keyword[if] identifier[remote_dir] keyword[not] keyword[in] identifier[remote_dirs] : identifier[root_cmd] ( literal[string] . identifier[format] ( identifier[remote_dir] ), identifier[tty] , identifier[sudo] ,** identifier[ssh_kwargs] ) keyword[if] identifier[ssh_kwargs] [ literal[string] ]!= literal[string] : identifier[root_cmd] ( literal[string] . identifier[format] ( identifier[ssh_kwargs] [ literal[string] ], identifier[remote_dir] ), identifier[tty] , identifier[sudo] ,** identifier[ssh_kwargs] ) identifier[remote_dirs] . identifier[append] ( identifier[remote_dir] ) identifier[ssh_file] ( identifier[opts] , identifier[remote_file] , identifier[kwargs] = identifier[ssh_kwargs] , identifier[local_file] = identifier[local_file] ) identifier[file_map_success] . identifier[append] ({ identifier[local_file] : identifier[remote_file] }) keyword[if] identifier[minion_pem] : identifier[ssh_file] ( identifier[opts] , literal[string] . identifier[format] ( identifier[tmp_dir] ), identifier[minion_pem] , identifier[ssh_kwargs] ) identifier[ret] = identifier[root_cmd] ( literal[string] . identifier[format] ( identifier[tmp_dir] ), identifier[tty] , identifier[sudo] ,** identifier[ssh_kwargs] ) keyword[if] identifier[ret] : keyword[raise] identifier[SaltCloudSystemExit] ( literal[string] . identifier[format] ( identifier[tmp_dir] )) keyword[if] identifier[minion_pub] : identifier[ssh_file] ( identifier[opts] , literal[string] . identifier[format] ( identifier[tmp_dir] ), identifier[minion_pub] , identifier[ssh_kwargs] ) keyword[if] identifier[master_sign_pub_file] : identifier[ssh_file] ( identifier[opts] , literal[string] . identifier[format] ( identifier[tmp_dir] ), identifier[kwargs] = identifier[ssh_kwargs] , identifier[local_file] = identifier[master_sign_pub_file] ) keyword[if] identifier[minion_conf] : keyword[if] keyword[not] identifier[isinstance] ( identifier[minion_conf] , identifier[dict] ): keyword[raise] identifier[DeprecationWarning] ( literal[string] literal[string] literal[string] ) identifier[minion_grains] = identifier[minion_conf] . identifier[pop] ( literal[string] ,{}) keyword[if] identifier[minion_grains] : identifier[ssh_file] ( identifier[opts] , literal[string] . identifier[format] ( identifier[tmp_dir] ), identifier[salt_config_to_yaml] ( identifier[minion_grains] ), identifier[ssh_kwargs] ) keyword[if] identifier[cloud_grains] keyword[and] identifier[opts] . identifier[get] ( literal[string] , keyword[True] ): identifier[minion_conf] [ literal[string] ]={ literal[string] : identifier[cloud_grains] } identifier[ssh_file] ( identifier[opts] , literal[string] . identifier[format] ( identifier[tmp_dir] ), identifier[salt_config_to_yaml] ( identifier[minion_conf] ), identifier[ssh_kwargs] ) keyword[if] identifier[master_pem] : identifier[ssh_file] ( identifier[opts] , literal[string] . identifier[format] ( identifier[tmp_dir] ), identifier[master_pem] , identifier[ssh_kwargs] ) identifier[ret] = identifier[root_cmd] ( literal[string] . identifier[format] ( identifier[tmp_dir] ), identifier[tty] , identifier[sudo] ,** identifier[ssh_kwargs] ) keyword[if] identifier[ret] : keyword[raise] identifier[SaltCloudSystemExit] ( literal[string] . identifier[format] ( identifier[tmp_dir] )) keyword[if] identifier[master_pub] : identifier[ssh_file] ( identifier[opts] , literal[string] . identifier[format] ( identifier[tmp_dir] ), identifier[master_pub] , identifier[ssh_kwargs] ) keyword[if] identifier[master_conf] : keyword[if] keyword[not] identifier[isinstance] ( identifier[master_conf] , identifier[dict] ): keyword[raise] identifier[DeprecationWarning] ( literal[string] literal[string] literal[string] ) identifier[ssh_file] ( identifier[opts] , literal[string] . identifier[format] ( identifier[tmp_dir] ), identifier[salt_config_to_yaml] ( identifier[master_conf] ), identifier[ssh_kwargs] ) identifier[preseed_minion_keys_tempdir] = literal[string] . identifier[format] ( identifier[tmp_dir] ) keyword[if] identifier[preseed_minion_keys] keyword[is] keyword[not] keyword[None] : identifier[ret] = identifier[root_cmd] ( literal[string] . identifier[format] ( identifier[preseed_minion_keys_tempdir] ), identifier[tty] , identifier[sudo] ,** identifier[ssh_kwargs] ) keyword[if] identifier[ret] : keyword[raise] identifier[SaltCloudSystemExit] ( literal[string] . identifier[format] ( identifier[preseed_minion_keys_tempdir] )) identifier[ret] = identifier[root_cmd] ( literal[string] . identifier[format] ( identifier[preseed_minion_keys_tempdir] ), identifier[tty] , identifier[sudo] ,** identifier[ssh_kwargs] ) keyword[if] identifier[ret] : keyword[raise] identifier[SaltCloudSystemExit] ( literal[string] . identifier[format] ( identifier[preseed_minion_keys_tempdir] )) keyword[if] identifier[ssh_kwargs] [ literal[string] ]!= literal[string] : identifier[root_cmd] ( literal[string] . identifier[format] ( identifier[ssh_kwargs] [ literal[string] ], identifier[preseed_minion_keys_tempdir] ), identifier[tty] , identifier[sudo] ,** identifier[ssh_kwargs] ) keyword[for] identifier[minion_id] , identifier[minion_key] keyword[in] identifier[six] . identifier[iteritems] ( identifier[preseed_minion_keys] ): identifier[rpath] = identifier[os] . identifier[path] . identifier[join] ( identifier[preseed_minion_keys_tempdir] , identifier[minion_id] ) identifier[ssh_file] ( identifier[opts] , identifier[rpath] , identifier[minion_key] , identifier[ssh_kwargs] ) keyword[if] identifier[ssh_kwargs] [ literal[string] ]!= literal[string] : identifier[root_cmd] ( literal[string] . identifier[format] ( identifier[preseed_minion_keys_tempdir] ), identifier[tty] , identifier[sudo] ,** identifier[ssh_kwargs] ) keyword[if] identifier[ret] : keyword[raise] identifier[SaltCloudSystemExit] ( literal[string] . identifier[format] ( identifier[preseed_minion_keys_tempdir] )) identifier[preflight_cmds] = identifier[kwargs] . identifier[get] ( literal[string] ,[]) keyword[for] identifier[command] keyword[in] identifier[preflight_cmds] : identifier[cmd_ret] = identifier[root_cmd] ( identifier[command] , identifier[tty] , identifier[sudo] ,** identifier[ssh_kwargs] ) keyword[if] identifier[cmd_ret] : keyword[raise] identifier[SaltCloudSystemExit] ( literal[string] . identifier[format] ( identifier[command] ) ) keyword[if] identifier[script] : identifier[ssh_file] ( identifier[opts] , literal[string] . identifier[format] ( identifier[tmp_dir] ), identifier[script] , identifier[ssh_kwargs] ) identifier[ret] = identifier[root_cmd] ( ( literal[string] literal[string] ). identifier[format] ( identifier[tmp_dir] ), identifier[tty] , identifier[sudo] ,** identifier[ssh_kwargs] ) keyword[if] identifier[ret] : keyword[raise] identifier[SaltCloudSystemExit] ( literal[string] . identifier[format] ( identifier[tmp_dir] )) identifier[time_used] = identifier[time] . identifier[mktime] ( identifier[time] . identifier[localtime] ())- identifier[time] . identifier[mktime] ( identifier[starttime] ) identifier[newtimeout] = identifier[timeout] - identifier[time_used] identifier[queue] = keyword[None] identifier[process] = keyword[None] keyword[if] identifier[start_action] keyword[and] keyword[not] identifier[parallel] : identifier[queue] = identifier[multiprocessing] . identifier[Queue] () identifier[process] = identifier[multiprocessing] . identifier[Process] ( identifier[target] = identifier[check_auth] , identifier[kwargs] = identifier[dict] ( identifier[name] = identifier[name] , identifier[sock_dir] = identifier[sock_dir] , identifier[timeout] = identifier[newtimeout] , identifier[queue] = identifier[queue] ) ) identifier[log] . identifier[debug] ( literal[string] ) identifier[process] . identifier[start] () keyword[if] identifier[script] : keyword[if] literal[string] keyword[in] identifier[script] : identifier[deploy_command] += literal[string] . identifier[format] ( identifier[tmp_dir] ) keyword[if] identifier[force_minion_config] : identifier[deploy_command] += literal[string] keyword[if] identifier[make_syndic] keyword[is] keyword[True] : identifier[deploy_command] += literal[string] keyword[if] identifier[make_master] keyword[is] keyword[True] : identifier[deploy_command] += literal[string] keyword[if] identifier[make_minion] keyword[is] keyword[False] : identifier[deploy_command] += literal[string] keyword[if] identifier[keep_tmp] keyword[is] keyword[True] : identifier[deploy_command] += literal[string] keyword[if] identifier[preseed_minion_keys] keyword[is] keyword[not] keyword[None] : identifier[deploy_command] += literal[string] . identifier[format] ( identifier[preseed_minion_keys_tempdir] ) keyword[if] identifier[script_args] : identifier[deploy_command] += literal[string] . identifier[format] ( identifier[script_args] ) keyword[if] identifier[script_env] : keyword[if] keyword[not] identifier[isinstance] ( identifier[script_env] , identifier[dict] ): keyword[raise] identifier[SaltCloudSystemExit] ( literal[string] literal[string] . identifier[format] ( identifier[type] ( identifier[script_env] ) ) ) identifier[environ_script_contents] =[ literal[string] ] keyword[for] identifier[key] , identifier[value] keyword[in] identifier[six] . identifier[iteritems] ( identifier[script_env] ): identifier[environ_script_contents] . identifier[append] ( literal[string] literal[string] . identifier[format] ( identifier[key] , identifier[value] ) ) identifier[environ_script_contents] . identifier[append] ( identifier[deploy_command] ) identifier[ssh_file] ( identifier[opts] , literal[string] . identifier[format] ( identifier[tmp_dir] ), literal[string] . identifier[join] ( identifier[environ_script_contents] ), identifier[ssh_kwargs] ) identifier[root_cmd] ( literal[string] . identifier[format] ( identifier[tmp_dir] ), identifier[tty] , identifier[sudo] ,** identifier[ssh_kwargs] ) identifier[deploy_command] = literal[string] . identifier[format] ( identifier[tmp_dir] , ) keyword[if] identifier[root_cmd] ( identifier[deploy_command] , identifier[tty] , identifier[sudo] ,** identifier[ssh_kwargs] )!= literal[int] : keyword[raise] identifier[SaltCloudSystemExit] ( literal[string] . identifier[format] ( identifier[deploy_command] ) ) identifier[log] . identifier[debug] ( literal[string] , identifier[deploy_command] ) keyword[if] keyword[not] identifier[keep_tmp] : identifier[root_cmd] ( literal[string] . identifier[format] ( identifier[tmp_dir] ), identifier[tty] , identifier[sudo] ,** identifier[ssh_kwargs] ) identifier[log] . identifier[debug] ( literal[string] , identifier[tmp_dir] ) keyword[if] identifier[script_env] : identifier[root_cmd] ( literal[string] . identifier[format] ( identifier[tmp_dir] ), identifier[tty] , identifier[sudo] ,** identifier[ssh_kwargs] ) identifier[log] . identifier[debug] ( literal[string] , identifier[tmp_dir] ) keyword[if] identifier[keep_tmp] : identifier[log] . identifier[debug] ( literal[string] , identifier[tmp_dir] ) keyword[else] : keyword[if] identifier[minion_pub] : identifier[root_cmd] ( literal[string] . identifier[format] ( identifier[tmp_dir] ), identifier[tty] , identifier[sudo] ,** identifier[ssh_kwargs] ) identifier[log] . identifier[debug] ( literal[string] , identifier[tmp_dir] ) keyword[if] identifier[minion_pem] : identifier[root_cmd] ( literal[string] . identifier[format] ( identifier[tmp_dir] ), identifier[tty] , identifier[sudo] ,** identifier[ssh_kwargs] ) identifier[log] . identifier[debug] ( literal[string] , identifier[tmp_dir] ) keyword[if] identifier[minion_conf] : identifier[root_cmd] ( literal[string] . identifier[format] ( identifier[tmp_dir] ), identifier[tty] , identifier[sudo] ,** identifier[ssh_kwargs] ) identifier[log] . identifier[debug] ( literal[string] , identifier[tmp_dir] ) identifier[root_cmd] ( literal[string] . identifier[format] ( identifier[tmp_dir] ), identifier[tty] , identifier[sudo] ,** identifier[ssh_kwargs] ) identifier[log] . identifier[debug] ( literal[string] , identifier[tmp_dir] ) keyword[if] identifier[master_sign_pub_file] : identifier[root_cmd] ( literal[string] . identifier[format] ( identifier[tmp_dir] ), identifier[tty] , identifier[sudo] ,** identifier[ssh_kwargs] ) identifier[log] . identifier[debug] ( literal[string] , identifier[tmp_dir] ) keyword[if] identifier[master_pub] : identifier[root_cmd] ( literal[string] . identifier[format] ( identifier[tmp_dir] ), identifier[tty] , identifier[sudo] ,** identifier[ssh_kwargs] ) identifier[log] . identifier[debug] ( literal[string] , identifier[tmp_dir] ) keyword[if] identifier[master_pem] : identifier[root_cmd] ( literal[string] . identifier[format] ( identifier[tmp_dir] ), identifier[tty] , identifier[sudo] ,** identifier[ssh_kwargs] ) identifier[log] . identifier[debug] ( literal[string] , identifier[tmp_dir] ) keyword[if] identifier[master_conf] : identifier[root_cmd] ( literal[string] . identifier[format] ( identifier[tmp_dir] ), identifier[tty] , identifier[sudo] ,** identifier[ssh_kwargs] ) identifier[log] . identifier[debug] ( literal[string] , identifier[tmp_dir] ) keyword[if] identifier[preseed_minion_keys] keyword[is] keyword[not] keyword[None] : identifier[root_cmd] ( literal[string] . identifier[format] ( identifier[preseed_minion_keys_tempdir] ), identifier[tty] , identifier[sudo] ,** identifier[ssh_kwargs] ) identifier[log] . identifier[debug] ( literal[string] , identifier[preseed_minion_keys_tempdir] ) keyword[if] identifier[start_action] keyword[and] keyword[not] identifier[parallel] : identifier[queuereturn] = identifier[queue] . identifier[get] () identifier[process] . identifier[join] () keyword[if] identifier[queuereturn] keyword[and] identifier[start_action] : identifier[log] . identifier[info] ( literal[string] , identifier[start_action] ) identifier[root_cmd] ( literal[string] . identifier[format] ( identifier[start_action] ), identifier[tty] , identifier[sudo] ,** identifier[ssh_kwargs] ) identifier[log] . identifier[info] ( literal[string] , identifier[start_action] ) identifier[fire_event] ( literal[string] , literal[string] . identifier[format] ( identifier[name] , identifier[host] ), literal[string] . identifier[format] ( identifier[name] ), identifier[args] ={ literal[string] : identifier[name] , literal[string] : identifier[host] }, identifier[sock_dir] = identifier[opts] . identifier[get] ( literal[string] , identifier[os] . identifier[path] . identifier[join] ( identifier[__opts__] [ literal[string] ], literal[string] )), identifier[transport] = identifier[opts] . identifier[get] ( literal[string] , literal[string] ) ) keyword[if] identifier[file_map_fail] keyword[or] identifier[file_map_success] : keyword[return] { literal[string] : identifier[file_map_success] , literal[string] : identifier[file_map_fail] , } keyword[return] keyword[True] keyword[return] keyword[False]
def deploy_script(host, port=22, timeout=900, username='root', password=None, key_filename=None, script=None, name=None, sock_dir=None, provider=None, conf_file=None, start_action=None, make_master=False, master_pub=None, master_pem=None, master_conf=None, minion_pub=None, minion_pem=None, minion_conf=None, keep_tmp=False, script_args=None, script_env=None, ssh_timeout=15, maxtries=15, make_syndic=False, make_minion=True, display_ssh_output=True, preseed_minion_keys=None, parallel=False, sudo_password=None, sudo=False, tty=None, vm_=None, opts=None, tmp_dir='/tmp/.saltcloud', file_map=None, master_sign_pub_file=None, cloud_grains=None, force_minion_config=False, **kwargs): """ Copy a deploy script to a remote server, execute it, and remove it """ if not isinstance(opts, dict): opts = {} # depends on [control=['if'], data=[]] vm_ = vm_ or {} # if None, default to empty dict cloud_grains = cloud_grains or {} tmp_dir = '{0}-{1}'.format(tmp_dir.rstrip('/'), uuid.uuid4()) deploy_command = salt.config.get_cloud_config_value('deploy_command', vm_, opts, default=os.path.join(tmp_dir, 'deploy.sh')) if key_filename is not None and (not os.path.isfile(key_filename)): raise SaltCloudConfigError("The defined key_filename '{0}' does not exist".format(key_filename)) # depends on [control=['if'], data=[]] gateway = None if 'gateway' in kwargs: gateway = kwargs['gateway'] # depends on [control=['if'], data=['kwargs']] starttime = time.localtime() log.debug('Deploying %s at %s', host, time.strftime('%Y-%m-%d %H:%M:%S', starttime)) known_hosts_file = kwargs.get('known_hosts_file', '/dev/null') hard_timeout = opts.get('hard_timeout', None) if wait_for_port(host=host, port=port, gateway=gateway): log.debug('SSH port %s on %s is available', port, host) if wait_for_passwd(host, port=port, username=username, password=password, key_filename=key_filename, ssh_timeout=ssh_timeout, display_ssh_output=display_ssh_output, gateway=gateway, known_hosts_file=known_hosts_file, maxtries=maxtries, hard_timeout=hard_timeout): log.debug('Logging into %s:%s as %s', host, port, username) ssh_kwargs = {'hostname': host, 'port': port, 'username': username, 'timeout': ssh_timeout, 'display_ssh_output': display_ssh_output, 'sudo_password': sudo_password, 'sftp': opts.get('use_sftp', False)} ssh_kwargs.update(__ssh_gateway_config_dict(gateway)) if key_filename: log.debug('Using %s as the key_filename', key_filename) ssh_kwargs['key_filename'] = key_filename # depends on [control=['if'], data=[]] elif password and kwargs.get('has_ssh_agent', False) is False: ssh_kwargs['password'] = password # depends on [control=['if'], data=[]] if root_cmd("test -e '{0}'".format(tmp_dir), tty, sudo, allow_failure=True, **ssh_kwargs): ret = root_cmd('sh -c "( mkdir -p -m 700 \'{0}\' )"'.format(tmp_dir), tty, sudo, **ssh_kwargs) if ret: raise SaltCloudSystemExit("Can't create temporary directory in {0} !".format(tmp_dir)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if sudo: comps = tmp_dir.lstrip('/').rstrip('/').split('/') if comps: if len(comps) > 1 or comps[0] != 'tmp': ret = root_cmd('chown {0} "{1}"'.format(username, tmp_dir), tty, sudo, **ssh_kwargs) if ret: raise SaltCloudSystemExit('Cant set {0} ownership on {1}'.format(username, tmp_dir)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if not isinstance(file_map, dict): file_map = {} # depends on [control=['if'], data=[]] # Copy an arbitrary group of files to the target system remote_dirs = [] file_map_success = [] file_map_fail = [] for map_item in file_map: local_file = map_item remote_file = file_map[map_item] if not os.path.exists(map_item): log.error('The local file "%s" does not exist, and will not be copied to "%s" on the target system', local_file, remote_file) file_map_fail.append({local_file: remote_file}) continue # depends on [control=['if'], data=[]] if os.path.isdir(local_file): dir_name = os.path.basename(local_file) remote_dir = os.path.join(os.path.dirname(remote_file), dir_name) # depends on [control=['if'], data=[]] else: remote_dir = os.path.dirname(remote_file) if remote_dir not in remote_dirs: root_cmd("mkdir -p '{0}'".format(remote_dir), tty, sudo, **ssh_kwargs) if ssh_kwargs['username'] != 'root': root_cmd("chown {0} '{1}'".format(ssh_kwargs['username'], remote_dir), tty, sudo, **ssh_kwargs) # depends on [control=['if'], data=[]] remote_dirs.append(remote_dir) # depends on [control=['if'], data=['remote_dir', 'remote_dirs']] ssh_file(opts, remote_file, kwargs=ssh_kwargs, local_file=local_file) file_map_success.append({local_file: remote_file}) # depends on [control=['for'], data=['map_item']] # Minion configuration if minion_pem: ssh_file(opts, '{0}/minion.pem'.format(tmp_dir), minion_pem, ssh_kwargs) ret = root_cmd("chmod 600 '{0}/minion.pem'".format(tmp_dir), tty, sudo, **ssh_kwargs) if ret: raise SaltCloudSystemExit("Can't set perms on {0}/minion.pem".format(tmp_dir)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if minion_pub: ssh_file(opts, '{0}/minion.pub'.format(tmp_dir), minion_pub, ssh_kwargs) # depends on [control=['if'], data=[]] if master_sign_pub_file: ssh_file(opts, '{0}/master_sign.pub'.format(tmp_dir), kwargs=ssh_kwargs, local_file=master_sign_pub_file) # depends on [control=['if'], data=[]] if minion_conf: if not isinstance(minion_conf, dict): # Let's not just fail regarding this change, specially # since we can handle it raise DeprecationWarning("`salt.utils.cloud.deploy_script now only accepts dictionaries for it's `minion_conf` parameter. Loading YAML...") # depends on [control=['if'], data=[]] minion_grains = minion_conf.pop('grains', {}) if minion_grains: ssh_file(opts, '{0}/grains'.format(tmp_dir), salt_config_to_yaml(minion_grains), ssh_kwargs) # depends on [control=['if'], data=[]] if cloud_grains and opts.get('enable_cloud_grains', True): minion_conf['grains'] = {'salt-cloud': cloud_grains} # depends on [control=['if'], data=[]] ssh_file(opts, '{0}/minion'.format(tmp_dir), salt_config_to_yaml(minion_conf), ssh_kwargs) # depends on [control=['if'], data=[]] # Master configuration if master_pem: ssh_file(opts, '{0}/master.pem'.format(tmp_dir), master_pem, ssh_kwargs) ret = root_cmd("chmod 600 '{0}/master.pem'".format(tmp_dir), tty, sudo, **ssh_kwargs) if ret: raise SaltCloudSystemExit('Cant set perms on {0}/master.pem'.format(tmp_dir)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if master_pub: ssh_file(opts, '{0}/master.pub'.format(tmp_dir), master_pub, ssh_kwargs) # depends on [control=['if'], data=[]] if master_conf: if not isinstance(master_conf, dict): # Let's not just fail regarding this change, specially # since we can handle it raise DeprecationWarning("`salt.utils.cloud.deploy_script now only accepts dictionaries for it's `master_conf` parameter. Loading from YAML ...") # depends on [control=['if'], data=[]] ssh_file(opts, '{0}/master'.format(tmp_dir), salt_config_to_yaml(master_conf), ssh_kwargs) # depends on [control=['if'], data=[]] # XXX: We need to make these paths configurable preseed_minion_keys_tempdir = '{0}/preseed-minion-keys'.format(tmp_dir) if preseed_minion_keys is not None: # Create remote temp dir ret = root_cmd("mkdir '{0}'".format(preseed_minion_keys_tempdir), tty, sudo, **ssh_kwargs) if ret: raise SaltCloudSystemExit('Cant create {0}'.format(preseed_minion_keys_tempdir)) # depends on [control=['if'], data=[]] ret = root_cmd("chmod 700 '{0}'".format(preseed_minion_keys_tempdir), tty, sudo, **ssh_kwargs) if ret: raise SaltCloudSystemExit("Can't set perms on {0}".format(preseed_minion_keys_tempdir)) # depends on [control=['if'], data=[]] if ssh_kwargs['username'] != 'root': root_cmd("chown {0} '{1}'".format(ssh_kwargs['username'], preseed_minion_keys_tempdir), tty, sudo, **ssh_kwargs) # depends on [control=['if'], data=[]] # Copy pre-seed minion keys for (minion_id, minion_key) in six.iteritems(preseed_minion_keys): rpath = os.path.join(preseed_minion_keys_tempdir, minion_id) ssh_file(opts, rpath, minion_key, ssh_kwargs) # depends on [control=['for'], data=[]] if ssh_kwargs['username'] != 'root': root_cmd("chown -R root '{0}'".format(preseed_minion_keys_tempdir), tty, sudo, **ssh_kwargs) if ret: raise SaltCloudSystemExit("Can't set ownership for {0}".format(preseed_minion_keys_tempdir)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['preseed_minion_keys']] # Run any pre-flight commands before running deploy scripts preflight_cmds = kwargs.get('preflight_cmds', []) for command in preflight_cmds: cmd_ret = root_cmd(command, tty, sudo, **ssh_kwargs) if cmd_ret: raise SaltCloudSystemExit("Pre-flight command failed: '{0}'".format(command)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['command']] # The actual deploy script if script: # got strange escaping issues with sudoer, going onto a # subshell fixes that ssh_file(opts, '{0}/deploy.sh'.format(tmp_dir), script, ssh_kwargs) ret = root_cmd('sh -c "( chmod +x \'{0}/deploy.sh\' )";exit $?'.format(tmp_dir), tty, sudo, **ssh_kwargs) if ret: raise SaltCloudSystemExit("Can't set perms on {0}/deploy.sh".format(tmp_dir)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] time_used = time.mktime(time.localtime()) - time.mktime(starttime) newtimeout = timeout - time_used queue = None process = None # Consider this code experimental. It causes Salt Cloud to wait # for the minion to check in, and then fire a startup event. # Disabled if parallel because it doesn't work! if start_action and (not parallel): queue = multiprocessing.Queue() process = multiprocessing.Process(target=check_auth, kwargs=dict(name=name, sock_dir=sock_dir, timeout=newtimeout, queue=queue)) log.debug('Starting new process to wait for salt-minion') process.start() # depends on [control=['if'], data=[]] # Run the deploy script if script: if 'bootstrap-salt' in script: deploy_command += " -c '{0}'".format(tmp_dir) if force_minion_config: deploy_command += ' -F' # depends on [control=['if'], data=[]] if make_syndic is True: deploy_command += ' -S' # depends on [control=['if'], data=[]] if make_master is True: deploy_command += ' -M' # depends on [control=['if'], data=[]] if make_minion is False: deploy_command += ' -N' # depends on [control=['if'], data=[]] if keep_tmp is True: deploy_command += ' -K' # depends on [control=['if'], data=[]] if preseed_minion_keys is not None: deploy_command += " -k '{0}'".format(preseed_minion_keys_tempdir) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if script_args: deploy_command += ' {0}'.format(script_args) # depends on [control=['if'], data=[]] if script_env: if not isinstance(script_env, dict): raise SaltCloudSystemExit("The 'script_env' configuration setting NEEDS to be a dictionary not a {0}".format(type(script_env))) # depends on [control=['if'], data=[]] environ_script_contents = ['#!/bin/sh'] for (key, value) in six.iteritems(script_env): environ_script_contents.append("setenv {0} '{1}' >/dev/null 2>&1 || export {0}='{1}'".format(key, value)) # depends on [control=['for'], data=[]] environ_script_contents.append(deploy_command) # Upload our environ setter wrapper ssh_file(opts, '{0}/environ-deploy-wrapper.sh'.format(tmp_dir), '\n'.join(environ_script_contents), ssh_kwargs) root_cmd("chmod +x '{0}/environ-deploy-wrapper.sh'".format(tmp_dir), tty, sudo, **ssh_kwargs) # The deploy command is now our wrapper deploy_command = "'{0}/environ-deploy-wrapper.sh'".format(tmp_dir) # depends on [control=['if'], data=[]] if root_cmd(deploy_command, tty, sudo, **ssh_kwargs) != 0: raise SaltCloudSystemExit("Executing the command '{0}' failed".format(deploy_command)) # depends on [control=['if'], data=[]] log.debug("Executed command '%s'", deploy_command) # Remove the deploy script if not keep_tmp: root_cmd("rm -f '{0}/deploy.sh'".format(tmp_dir), tty, sudo, **ssh_kwargs) log.debug('Removed %s/deploy.sh', tmp_dir) if script_env: root_cmd("rm -f '{0}/environ-deploy-wrapper.sh'".format(tmp_dir), tty, sudo, **ssh_kwargs) log.debug('Removed %s/environ-deploy-wrapper.sh', tmp_dir) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if keep_tmp: log.debug('Not removing deployment files from %s/', tmp_dir) # depends on [control=['if'], data=[]] else: # Remove minion configuration if minion_pub: root_cmd("rm -f '{0}/minion.pub'".format(tmp_dir), tty, sudo, **ssh_kwargs) log.debug('Removed %s/minion.pub', tmp_dir) # depends on [control=['if'], data=[]] if minion_pem: root_cmd("rm -f '{0}/minion.pem'".format(tmp_dir), tty, sudo, **ssh_kwargs) log.debug('Removed %s/minion.pem', tmp_dir) # depends on [control=['if'], data=[]] if minion_conf: root_cmd("rm -f '{0}/grains'".format(tmp_dir), tty, sudo, **ssh_kwargs) log.debug('Removed %s/grains', tmp_dir) root_cmd("rm -f '{0}/minion'".format(tmp_dir), tty, sudo, **ssh_kwargs) log.debug('Removed %s/minion', tmp_dir) # depends on [control=['if'], data=[]] if master_sign_pub_file: root_cmd('rm -f {0}/master_sign.pub'.format(tmp_dir), tty, sudo, **ssh_kwargs) log.debug('Removed %s/master_sign.pub', tmp_dir) # depends on [control=['if'], data=[]] # Remove master configuration if master_pub: root_cmd("rm -f '{0}/master.pub'".format(tmp_dir), tty, sudo, **ssh_kwargs) log.debug('Removed %s/master.pub', tmp_dir) # depends on [control=['if'], data=[]] if master_pem: root_cmd("rm -f '{0}/master.pem'".format(tmp_dir), tty, sudo, **ssh_kwargs) log.debug('Removed %s/master.pem', tmp_dir) # depends on [control=['if'], data=[]] if master_conf: root_cmd("rm -f '{0}/master'".format(tmp_dir), tty, sudo, **ssh_kwargs) log.debug('Removed %s/master', tmp_dir) # depends on [control=['if'], data=[]] # Remove pre-seed keys directory if preseed_minion_keys is not None: root_cmd("rm -rf '{0}'".format(preseed_minion_keys_tempdir), tty, sudo, **ssh_kwargs) log.debug('Removed %s', preseed_minion_keys_tempdir) # depends on [control=['if'], data=[]] if start_action and (not parallel): queuereturn = queue.get() process.join() if queuereturn and start_action: # client = salt.client.LocalClient(conf_file) # output = client.cmd_iter( # host, 'state.highstate', timeout=timeout # ) # for line in output: # print(line) log.info('Executing %s on the salt-minion', start_action) root_cmd('salt-call {0}'.format(start_action), tty, sudo, **ssh_kwargs) log.info('Finished executing %s on the salt-minion', start_action) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Fire deploy action fire_event('event', '{0} has been deployed at {1}'.format(name, host), 'salt/cloud/{0}/deploy_script'.format(name), args={'name': name, 'host': host}, sock_dir=opts.get('sock_dir', os.path.join(__opts__['sock_dir'], 'master')), transport=opts.get('transport', 'zeromq')) if file_map_fail or file_map_success: return {'File Upload Success': file_map_success, 'File Upload Failure': file_map_fail} # depends on [control=['if'], data=[]] return True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return False
def tilt(poly): """Tilt of a polygon poly""" num = len(poly) - 1 vec = unit_normal(poly[0], poly[1], poly[num]) vec_alt = np.array([vec[0], vec[1], vec[2]]) vec_z = np.array([0, 0, 1]) # return (90 - angle2vecs(vec_alt, vec_z)) # update by Santosh return angle2vecs(vec_alt, vec_z)
def function[tilt, parameter[poly]]: constant[Tilt of a polygon poly] variable[num] assign[=] binary_operation[call[name[len], parameter[name[poly]]] - constant[1]] variable[vec] assign[=] call[name[unit_normal], parameter[call[name[poly]][constant[0]], call[name[poly]][constant[1]], call[name[poly]][name[num]]]] variable[vec_alt] assign[=] call[name[np].array, parameter[list[[<ast.Subscript object at 0x7da20c6e6290>, <ast.Subscript object at 0x7da20c6e65c0>, <ast.Subscript object at 0x7da20c6e6f50>]]]] variable[vec_z] assign[=] call[name[np].array, parameter[list[[<ast.Constant object at 0x7da20c6e5a80>, <ast.Constant object at 0x7da20c6e4940>, <ast.Constant object at 0x7da20c6e61a0>]]]] return[call[name[angle2vecs], parameter[name[vec_alt], name[vec_z]]]]
keyword[def] identifier[tilt] ( identifier[poly] ): literal[string] identifier[num] = identifier[len] ( identifier[poly] )- literal[int] identifier[vec] = identifier[unit_normal] ( identifier[poly] [ literal[int] ], identifier[poly] [ literal[int] ], identifier[poly] [ identifier[num] ]) identifier[vec_alt] = identifier[np] . identifier[array] ([ identifier[vec] [ literal[int] ], identifier[vec] [ literal[int] ], identifier[vec] [ literal[int] ]]) identifier[vec_z] = identifier[np] . identifier[array] ([ literal[int] , literal[int] , literal[int] ]) keyword[return] identifier[angle2vecs] ( identifier[vec_alt] , identifier[vec_z] )
def tilt(poly): """Tilt of a polygon poly""" num = len(poly) - 1 vec = unit_normal(poly[0], poly[1], poly[num]) vec_alt = np.array([vec[0], vec[1], vec[2]]) vec_z = np.array([0, 0, 1]) # return (90 - angle2vecs(vec_alt, vec_z)) # update by Santosh return angle2vecs(vec_alt, vec_z)
def check_time_extents(self, ds): """ Check that the values of time_coverage_start/time_coverage_end approximately match the data. """ if not (hasattr(ds, 'time_coverage_start') and hasattr(ds, 'time_coverage_end')): return # Parse the ISO 8601 formatted dates try: t_min = dateparse(ds.time_coverage_start) t_max = dateparse(ds.time_coverage_end) except: return Result(BaseCheck.MEDIUM, False, 'time_coverage_extents_match', ['time_coverage attributes are not formatted properly. Use the ISO 8601:2004 date format, preferably the extended format.']) timevar = cfutil.get_time_variable(ds) if not timevar: return Result(BaseCheck.MEDIUM, False, 'time_coverage_extents_match', ['Could not find time variable to test extent of time_coverage_start/time_coverage_end, see CF-1.6 spec chapter 4.4']) # Time should be monotonically increasing, so we make that assumption here so we don't have to download THE ENTIRE ARRAY try: # num2date returns as naive date, but with time adjusted to UTC # we need to attach timezone information here, or the date # subtraction from t_min/t_max will assume that a naive timestamp is # in the same time zone and cause erroneous results. # Pendulum uses UTC by default, but we are being explicit here time0 = pendulum.instance(num2date(ds.variables[timevar][0], ds.variables[timevar].units), 'UTC') time1 = pendulum.instance(num2date(ds.variables[timevar][-1], ds.variables[timevar].units), 'UTC') except: return Result(BaseCheck.MEDIUM, False, 'time_coverage_extents_match', ['Failed to retrieve and convert times for variables %s.' % timevar]) start_dt = abs(time0 - t_min) end_dt = abs(time1 - t_max) score = 2 msgs = [] if start_dt > timedelta(hours=1): msgs.append("Date time mismatch between time_coverage_start and actual " "time values %s (time_coverage_start) != %s (time[0])" % (t_min.isoformat(), time0.isoformat())) score -= 1 if end_dt > timedelta(hours=1): msgs.append("Date time mismatch between time_coverage_end and actual " "time values %s (time_coverage_end) != %s (time[N])" % (t_max.isoformat(), time1.isoformat())) score -= 1 return Result(BaseCheck.MEDIUM, (score, 2), 'time_coverage_extents_match', msgs)
def function[check_time_extents, parameter[self, ds]]: constant[ Check that the values of time_coverage_start/time_coverage_end approximately match the data. ] if <ast.UnaryOp object at 0x7da18f00cdf0> begin[:] return[None] <ast.Try object at 0x7da18f00e920> variable[timevar] assign[=] call[name[cfutil].get_time_variable, parameter[name[ds]]] if <ast.UnaryOp object at 0x7da18f00e020> begin[:] return[call[name[Result], parameter[name[BaseCheck].MEDIUM, constant[False], constant[time_coverage_extents_match], list[[<ast.Constant object at 0x7da18f00f8b0>]]]]] <ast.Try object at 0x7da18f00fc10> variable[start_dt] assign[=] call[name[abs], parameter[binary_operation[name[time0] - name[t_min]]]] variable[end_dt] assign[=] call[name[abs], parameter[binary_operation[name[time1] - name[t_max]]]] variable[score] assign[=] constant[2] variable[msgs] assign[=] list[[]] if compare[name[start_dt] greater[>] call[name[timedelta], parameter[]]] begin[:] call[name[msgs].append, parameter[binary_operation[constant[Date time mismatch between time_coverage_start and actual time values %s (time_coverage_start) != %s (time[0])] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18f812080>, <ast.Call object at 0x7da18f813760>]]]]] <ast.AugAssign object at 0x7da18f813670> if compare[name[end_dt] greater[>] call[name[timedelta], parameter[]]] begin[:] call[name[msgs].append, parameter[binary_operation[constant[Date time mismatch between time_coverage_end and actual time values %s (time_coverage_end) != %s (time[N])] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18f8109d0>, <ast.Call object at 0x7da18f811690>]]]]] <ast.AugAssign object at 0x7da18f810b20> return[call[name[Result], parameter[name[BaseCheck].MEDIUM, tuple[[<ast.Name object at 0x7da18f721ea0>, <ast.Constant object at 0x7da18f720970>]], constant[time_coverage_extents_match], name[msgs]]]]
keyword[def] identifier[check_time_extents] ( identifier[self] , identifier[ds] ): literal[string] keyword[if] keyword[not] ( identifier[hasattr] ( identifier[ds] , literal[string] ) keyword[and] identifier[hasattr] ( identifier[ds] , literal[string] )): keyword[return] keyword[try] : identifier[t_min] = identifier[dateparse] ( identifier[ds] . identifier[time_coverage_start] ) identifier[t_max] = identifier[dateparse] ( identifier[ds] . identifier[time_coverage_end] ) keyword[except] : keyword[return] identifier[Result] ( identifier[BaseCheck] . identifier[MEDIUM] , keyword[False] , literal[string] , [ literal[string] ]) identifier[timevar] = identifier[cfutil] . identifier[get_time_variable] ( identifier[ds] ) keyword[if] keyword[not] identifier[timevar] : keyword[return] identifier[Result] ( identifier[BaseCheck] . identifier[MEDIUM] , keyword[False] , literal[string] , [ literal[string] ]) keyword[try] : identifier[time0] = identifier[pendulum] . identifier[instance] ( identifier[num2date] ( identifier[ds] . identifier[variables] [ identifier[timevar] ][ literal[int] ], identifier[ds] . identifier[variables] [ identifier[timevar] ]. identifier[units] ), literal[string] ) identifier[time1] = identifier[pendulum] . identifier[instance] ( identifier[num2date] ( identifier[ds] . identifier[variables] [ identifier[timevar] ][- literal[int] ], identifier[ds] . identifier[variables] [ identifier[timevar] ]. identifier[units] ), literal[string] ) keyword[except] : keyword[return] identifier[Result] ( identifier[BaseCheck] . identifier[MEDIUM] , keyword[False] , literal[string] , [ literal[string] % identifier[timevar] ]) identifier[start_dt] = identifier[abs] ( identifier[time0] - identifier[t_min] ) identifier[end_dt] = identifier[abs] ( identifier[time1] - identifier[t_max] ) identifier[score] = literal[int] identifier[msgs] =[] keyword[if] identifier[start_dt] > identifier[timedelta] ( identifier[hours] = literal[int] ): identifier[msgs] . identifier[append] ( literal[string] literal[string] %( identifier[t_min] . identifier[isoformat] (), identifier[time0] . identifier[isoformat] ())) identifier[score] -= literal[int] keyword[if] identifier[end_dt] > identifier[timedelta] ( identifier[hours] = literal[int] ): identifier[msgs] . identifier[append] ( literal[string] literal[string] %( identifier[t_max] . identifier[isoformat] (), identifier[time1] . identifier[isoformat] ())) identifier[score] -= literal[int] keyword[return] identifier[Result] ( identifier[BaseCheck] . identifier[MEDIUM] , ( identifier[score] , literal[int] ), literal[string] , identifier[msgs] )
def check_time_extents(self, ds): """ Check that the values of time_coverage_start/time_coverage_end approximately match the data. """ if not (hasattr(ds, 'time_coverage_start') and hasattr(ds, 'time_coverage_end')): return # depends on [control=['if'], data=[]] # Parse the ISO 8601 formatted dates try: t_min = dateparse(ds.time_coverage_start) t_max = dateparse(ds.time_coverage_end) # depends on [control=['try'], data=[]] except: return Result(BaseCheck.MEDIUM, False, 'time_coverage_extents_match', ['time_coverage attributes are not formatted properly. Use the ISO 8601:2004 date format, preferably the extended format.']) # depends on [control=['except'], data=[]] timevar = cfutil.get_time_variable(ds) if not timevar: return Result(BaseCheck.MEDIUM, False, 'time_coverage_extents_match', ['Could not find time variable to test extent of time_coverage_start/time_coverage_end, see CF-1.6 spec chapter 4.4']) # depends on [control=['if'], data=[]] # Time should be monotonically increasing, so we make that assumption here so we don't have to download THE ENTIRE ARRAY try: # num2date returns as naive date, but with time adjusted to UTC # we need to attach timezone information here, or the date # subtraction from t_min/t_max will assume that a naive timestamp is # in the same time zone and cause erroneous results. # Pendulum uses UTC by default, but we are being explicit here time0 = pendulum.instance(num2date(ds.variables[timevar][0], ds.variables[timevar].units), 'UTC') time1 = pendulum.instance(num2date(ds.variables[timevar][-1], ds.variables[timevar].units), 'UTC') # depends on [control=['try'], data=[]] except: return Result(BaseCheck.MEDIUM, False, 'time_coverage_extents_match', ['Failed to retrieve and convert times for variables %s.' % timevar]) # depends on [control=['except'], data=[]] start_dt = abs(time0 - t_min) end_dt = abs(time1 - t_max) score = 2 msgs = [] if start_dt > timedelta(hours=1): msgs.append('Date time mismatch between time_coverage_start and actual time values %s (time_coverage_start) != %s (time[0])' % (t_min.isoformat(), time0.isoformat())) score -= 1 # depends on [control=['if'], data=[]] if end_dt > timedelta(hours=1): msgs.append('Date time mismatch between time_coverage_end and actual time values %s (time_coverage_end) != %s (time[N])' % (t_max.isoformat(), time1.isoformat())) score -= 1 # depends on [control=['if'], data=[]] return Result(BaseCheck.MEDIUM, (score, 2), 'time_coverage_extents_match', msgs)
def satisfy_all(self, **params): """Iterate through all satisfying input points.""" verbosity = params.get('verbosity', 0) default_phase = params.get('default_phase', 2) propagation_limit = params.get('propagation_limit', -1) decision_limit = params.get('decision_limit', -1) seed = params.get('seed', 1) yield from picosat.satisfy_all(self.nvars, self.clauses, verbosity, default_phase, propagation_limit, decision_limit, seed)
def function[satisfy_all, parameter[self]]: constant[Iterate through all satisfying input points.] variable[verbosity] assign[=] call[name[params].get, parameter[constant[verbosity], constant[0]]] variable[default_phase] assign[=] call[name[params].get, parameter[constant[default_phase], constant[2]]] variable[propagation_limit] assign[=] call[name[params].get, parameter[constant[propagation_limit], <ast.UnaryOp object at 0x7da1b0c34eb0>]] variable[decision_limit] assign[=] call[name[params].get, parameter[constant[decision_limit], <ast.UnaryOp object at 0x7da1b0c35660>]] variable[seed] assign[=] call[name[params].get, parameter[constant[seed], constant[1]]] <ast.YieldFrom object at 0x7da1b0c35e10>
keyword[def] identifier[satisfy_all] ( identifier[self] ,** identifier[params] ): literal[string] identifier[verbosity] = identifier[params] . identifier[get] ( literal[string] , literal[int] ) identifier[default_phase] = identifier[params] . identifier[get] ( literal[string] , literal[int] ) identifier[propagation_limit] = identifier[params] . identifier[get] ( literal[string] ,- literal[int] ) identifier[decision_limit] = identifier[params] . identifier[get] ( literal[string] ,- literal[int] ) identifier[seed] = identifier[params] . identifier[get] ( literal[string] , literal[int] ) keyword[yield] keyword[from] identifier[picosat] . identifier[satisfy_all] ( identifier[self] . identifier[nvars] , identifier[self] . identifier[clauses] , identifier[verbosity] , identifier[default_phase] , identifier[propagation_limit] , identifier[decision_limit] , identifier[seed] )
def satisfy_all(self, **params): """Iterate through all satisfying input points.""" verbosity = params.get('verbosity', 0) default_phase = params.get('default_phase', 2) propagation_limit = params.get('propagation_limit', -1) decision_limit = params.get('decision_limit', -1) seed = params.get('seed', 1) yield from picosat.satisfy_all(self.nvars, self.clauses, verbosity, default_phase, propagation_limit, decision_limit, seed)
def translate_sites(self, indices=None, vector=None): """ Translate specific sites by some vector, keeping the sites within the unit cell. Args: indices (list): List of site indices on which to perform the translation. vector (3x1 array): Translation vector for sites. """ if indices is None: indices = range(len(self)) if vector is None: vector == [0, 0, 0] for i in indices: site = self._sites[i] new_site = Site(site.species, site.coords + vector, properties=site.properties) self._sites[i] = new_site
def function[translate_sites, parameter[self, indices, vector]]: constant[ Translate specific sites by some vector, keeping the sites within the unit cell. Args: indices (list): List of site indices on which to perform the translation. vector (3x1 array): Translation vector for sites. ] if compare[name[indices] is constant[None]] begin[:] variable[indices] assign[=] call[name[range], parameter[call[name[len], parameter[name[self]]]]] if compare[name[vector] is constant[None]] begin[:] compare[name[vector] equal[==] list[[<ast.Constant object at 0x7da204566d40>, <ast.Constant object at 0x7da2045668c0>, <ast.Constant object at 0x7da204566bf0>]]] for taget[name[i]] in starred[name[indices]] begin[:] variable[site] assign[=] call[name[self]._sites][name[i]] variable[new_site] assign[=] call[name[Site], parameter[name[site].species, binary_operation[name[site].coords + name[vector]]]] call[name[self]._sites][name[i]] assign[=] name[new_site]
keyword[def] identifier[translate_sites] ( identifier[self] , identifier[indices] = keyword[None] , identifier[vector] = keyword[None] ): literal[string] keyword[if] identifier[indices] keyword[is] keyword[None] : identifier[indices] = identifier[range] ( identifier[len] ( identifier[self] )) keyword[if] identifier[vector] keyword[is] keyword[None] : identifier[vector] ==[ literal[int] , literal[int] , literal[int] ] keyword[for] identifier[i] keyword[in] identifier[indices] : identifier[site] = identifier[self] . identifier[_sites] [ identifier[i] ] identifier[new_site] = identifier[Site] ( identifier[site] . identifier[species] , identifier[site] . identifier[coords] + identifier[vector] , identifier[properties] = identifier[site] . identifier[properties] ) identifier[self] . identifier[_sites] [ identifier[i] ]= identifier[new_site]
def translate_sites(self, indices=None, vector=None): """ Translate specific sites by some vector, keeping the sites within the unit cell. Args: indices (list): List of site indices on which to perform the translation. vector (3x1 array): Translation vector for sites. """ if indices is None: indices = range(len(self)) # depends on [control=['if'], data=['indices']] if vector is None: vector == [0, 0, 0] # depends on [control=['if'], data=['vector']] for i in indices: site = self._sites[i] new_site = Site(site.species, site.coords + vector, properties=site.properties) self._sites[i] = new_site # depends on [control=['for'], data=['i']]
def compare_layers_from_nets(caffe_net, arg_params, aux_params, exe, layer_name_to_record, top_to_layers, mean_diff_allowed, max_diff_allowed): """ Compare layer by layer of a caffe network with mxnet network :param caffe_net: loaded caffe network :param arg_params: arguments :param aux_params: auxiliary parameters :param exe: mxnet model :param layer_name_to_record: map between caffe layer and information record :param top_to_layers: map between caffe blob name to layers which outputs it (including inplace) :param mean_diff_allowed: mean difference allowed between caffe blob and mxnet blob :param max_diff_allowed: max difference allowed between caffe blob and mxnet blob """ import re log_format = ' {0:<40} {1:<40} {2:<8} {3:>10} {4:>10} {5:<1}' compare_layers_from_nets.is_first_convolution = True def _compare_blob(caf_blob, mx_blob, caf_name, mx_name, blob_type, note): diff = np.abs(mx_blob - caf_blob) diff_mean = diff.mean() diff_max = diff.max() logging.info(log_format.format(caf_name, mx_name, blob_type, '%4.5f' % diff_mean, '%4.5f' % diff_max, note)) assert diff_mean < mean_diff_allowed assert diff_max < max_diff_allowed def _process_layer_parameters(layer): logging.debug('processing layer %s of type %s', layer.name, layer.type) normalized_layer_name = re.sub('[-/]', '_', layer.name) # handle weight and bias of convolution and fully-connected layers if layer.name in caffe_net.params and layer.type in ['Convolution', 'InnerProduct', 'Deconvolution']: has_bias = len(caffe_net.params[layer.name]) > 1 mx_name_weight = '{}_weight'.format(normalized_layer_name) mx_beta = arg_params[mx_name_weight].asnumpy() # first convolution should change from BGR to RGB if layer.type == 'Convolution' and compare_layers_from_nets.is_first_convolution: compare_layers_from_nets.is_first_convolution = False # if RGB or RGBA if mx_beta.shape[1] == 3 or mx_beta.shape[1] == 4: # Swapping BGR of caffe into RGB in mxnet mx_beta[:, [0, 2], :, :] = mx_beta[:, [2, 0], :, :] caf_beta = caffe_net.params[layer.name][0].data _compare_blob(caf_beta, mx_beta, layer.name, mx_name_weight, 'weight', '') if has_bias: mx_name_bias = '{}_bias'.format(normalized_layer_name) mx_gamma = arg_params[mx_name_bias].asnumpy() caf_gamma = caffe_net.params[layer.name][1].data _compare_blob(caf_gamma, mx_gamma, layer.name, mx_name_bias, 'bias', '') elif layer.name in caffe_net.params and layer.type == 'Scale': if 'scale' in normalized_layer_name: bn_name = normalized_layer_name.replace('scale', 'bn') elif 'sc' in normalized_layer_name: bn_name = normalized_layer_name.replace('sc', 'bn') else: assert False, 'Unknown name convention for bn/scale' beta_name = '{}_beta'.format(bn_name) gamma_name = '{}_gamma'.format(bn_name) mx_beta = arg_params[beta_name].asnumpy() caf_beta = caffe_net.params[layer.name][1].data _compare_blob(caf_beta, mx_beta, layer.name, beta_name, 'mov_mean', '') mx_gamma = arg_params[gamma_name].asnumpy() caf_gamma = caffe_net.params[layer.name][0].data _compare_blob(caf_gamma, mx_gamma, layer.name, gamma_name, 'mov_var', '') elif layer.name in caffe_net.params and layer.type == 'BatchNorm': mean_name = '{}_moving_mean'.format(normalized_layer_name) var_name = '{}_moving_var'.format(normalized_layer_name) caf_rescale_factor = caffe_net.params[layer.name][2].data mx_mean = aux_params[mean_name].asnumpy() caf_mean = caffe_net.params[layer.name][0].data / caf_rescale_factor _compare_blob(caf_mean, mx_mean, layer.name, mean_name, 'mean', '') mx_var = aux_params[var_name].asnumpy() caf_var = caffe_net.params[layer.name][1].data / caf_rescale_factor _compare_blob(caf_var, mx_var, layer.name, var_name, 'var', 'expect 1e-04 change due to cudnn eps') elif layer.type in ['Input', 'Pooling', 'ReLU', 'Eltwise', 'Softmax', 'LRN', 'Concat', 'Dropout', 'Crop']: # no parameters to check for these layers pass else: warnings.warn('No handling for layer %s of type %s, should we ignore it?', layer.name, layer.type) return def _process_layer_output(caffe_blob_name): logging.debug('processing blob %s', caffe_blob_name) # skip blobs not originating from actual layers, e.g. artificial split layers added by caffe if caffe_blob_name not in top_to_layers: return caf_blob = caffe_net.blobs[caffe_blob_name].data # data should change from BGR to RGB if caffe_blob_name == 'data': # if RGB or RGBA if caf_blob.shape[1] == 3 or caf_blob.shape[1] == 4: # Swapping BGR of caffe into RGB in mxnet caf_blob[:, [0, 2], :, :] = caf_blob[:, [2, 0], :, :] mx_name = 'data' else: # get last layer name which outputs this blob name last_layer_name = top_to_layers[caffe_blob_name][-1] normalized_last_layer_name = re.sub('[-/]', '_', last_layer_name) mx_name = '{}_output'.format(normalized_last_layer_name) if 'scale' in mx_name: mx_name = mx_name.replace('scale', 'bn') elif 'sc' in mx_name: mx_name = mx_name.replace('sc', 'bn') if mx_name not in exe.output_dict: logging.error('mxnet blob %s is missing, time to extend the compare tool..', mx_name) return mx_blob = exe.output_dict[mx_name].asnumpy() _compare_blob(caf_blob, mx_blob, caffe_blob_name, mx_name, 'output', '') return # check layer parameters logging.info('\n***** Network Parameters '.ljust(140, '*')) logging.info(log_format.format('CAFFE', 'MXNET', 'Type', 'Mean(diff)', 'Max(diff)', 'Note')) first_layer_name = layer_name_to_record.keys()[0] _bfs(layer_name_to_record[first_layer_name], _process_layer_parameters) # check layer output logging.info('\n***** Network Outputs '.ljust(140, '*')) logging.info(log_format.format('CAFFE', 'MXNET', 'Type', 'Mean(diff)', 'Max(diff)', 'Note')) for caffe_blob_name in caffe_net.blobs.keys(): _process_layer_output(caffe_blob_name) return
def function[compare_layers_from_nets, parameter[caffe_net, arg_params, aux_params, exe, layer_name_to_record, top_to_layers, mean_diff_allowed, max_diff_allowed]]: constant[ Compare layer by layer of a caffe network with mxnet network :param caffe_net: loaded caffe network :param arg_params: arguments :param aux_params: auxiliary parameters :param exe: mxnet model :param layer_name_to_record: map between caffe layer and information record :param top_to_layers: map between caffe blob name to layers which outputs it (including inplace) :param mean_diff_allowed: mean difference allowed between caffe blob and mxnet blob :param max_diff_allowed: max difference allowed between caffe blob and mxnet blob ] import module[re] variable[log_format] assign[=] constant[ {0:<40} {1:<40} {2:<8} {3:>10} {4:>10} {5:<1}] name[compare_layers_from_nets].is_first_convolution assign[=] constant[True] def function[_compare_blob, parameter[caf_blob, mx_blob, caf_name, mx_name, blob_type, note]]: variable[diff] assign[=] call[name[np].abs, parameter[binary_operation[name[mx_blob] - name[caf_blob]]]] variable[diff_mean] assign[=] call[name[diff].mean, parameter[]] variable[diff_max] assign[=] call[name[diff].max, parameter[]] call[name[logging].info, parameter[call[name[log_format].format, parameter[name[caf_name], name[mx_name], name[blob_type], binary_operation[constant[%4.5f] <ast.Mod object at 0x7da2590d6920> name[diff_mean]], binary_operation[constant[%4.5f] <ast.Mod object at 0x7da2590d6920> name[diff_max]], name[note]]]]] assert[compare[name[diff_mean] less[<] name[mean_diff_allowed]]] assert[compare[name[diff_max] less[<] name[max_diff_allowed]]] def function[_process_layer_parameters, parameter[layer]]: call[name[logging].debug, parameter[constant[processing layer %s of type %s], name[layer].name, name[layer].type]] variable[normalized_layer_name] assign[=] call[name[re].sub, parameter[constant[[-/]], constant[_], name[layer].name]] if <ast.BoolOp object at 0x7da1b2025240> begin[:] variable[has_bias] assign[=] compare[call[name[len], parameter[call[name[caffe_net].params][name[layer].name]]] greater[>] constant[1]] variable[mx_name_weight] assign[=] call[constant[{}_weight].format, parameter[name[normalized_layer_name]]] variable[mx_beta] assign[=] call[call[name[arg_params]][name[mx_name_weight]].asnumpy, parameter[]] if <ast.BoolOp object at 0x7da2054a6da0> begin[:] name[compare_layers_from_nets].is_first_convolution assign[=] constant[False] if <ast.BoolOp object at 0x7da2054a6890> begin[:] call[name[mx_beta]][tuple[[<ast.Slice object at 0x7da2054a6170>, <ast.List object at 0x7da2054a4910>, <ast.Slice object at 0x7da2054a7c10>, <ast.Slice object at 0x7da2054a4a30>]]] assign[=] call[name[mx_beta]][tuple[[<ast.Slice object at 0x7da2054a59c0>, <ast.List object at 0x7da2054a7cd0>, <ast.Slice object at 0x7da2054a57e0>, <ast.Slice object at 0x7da2054a4eb0>]]] variable[caf_beta] assign[=] call[call[name[caffe_net].params][name[layer].name]][constant[0]].data call[name[_compare_blob], parameter[name[caf_beta], name[mx_beta], name[layer].name, name[mx_name_weight], constant[weight], constant[]]] if name[has_bias] begin[:] variable[mx_name_bias] assign[=] call[constant[{}_bias].format, parameter[name[normalized_layer_name]]] variable[mx_gamma] assign[=] call[call[name[arg_params]][name[mx_name_bias]].asnumpy, parameter[]] variable[caf_gamma] assign[=] call[call[name[caffe_net].params][name[layer].name]][constant[1]].data call[name[_compare_blob], parameter[name[caf_gamma], name[mx_gamma], name[layer].name, name[mx_name_bias], constant[bias], constant[]]] return[None] def function[_process_layer_output, parameter[caffe_blob_name]]: call[name[logging].debug, parameter[constant[processing blob %s], name[caffe_blob_name]]] if compare[name[caffe_blob_name] <ast.NotIn object at 0x7da2590d7190> name[top_to_layers]] begin[:] return[None] variable[caf_blob] assign[=] call[name[caffe_net].blobs][name[caffe_blob_name]].data if compare[name[caffe_blob_name] equal[==] constant[data]] begin[:] if <ast.BoolOp object at 0x7da1b2065270> begin[:] call[name[caf_blob]][tuple[[<ast.Slice object at 0x7da1b2067490>, <ast.List object at 0x7da1b2064c10>, <ast.Slice object at 0x7da1b2064a30>, <ast.Slice object at 0x7da1b2064130>]]] assign[=] call[name[caf_blob]][tuple[[<ast.Slice object at 0x7da1b2067fd0>, <ast.List object at 0x7da1b20661d0>, <ast.Slice object at 0x7da1b2067f40>, <ast.Slice object at 0x7da1b2064b50>]]] variable[mx_name] assign[=] constant[data] if compare[name[mx_name] <ast.NotIn object at 0x7da2590d7190> name[exe].output_dict] begin[:] call[name[logging].error, parameter[constant[mxnet blob %s is missing, time to extend the compare tool..], name[mx_name]]] return[None] variable[mx_blob] assign[=] call[call[name[exe].output_dict][name[mx_name]].asnumpy, parameter[]] call[name[_compare_blob], parameter[name[caf_blob], name[mx_blob], name[caffe_blob_name], name[mx_name], constant[output], constant[]]] return[None] call[name[logging].info, parameter[call[constant[ ***** Network Parameters ].ljust, parameter[constant[140], constant[*]]]]] call[name[logging].info, parameter[call[name[log_format].format, parameter[constant[CAFFE], constant[MXNET], constant[Type], constant[Mean(diff)], constant[Max(diff)], constant[Note]]]]] variable[first_layer_name] assign[=] call[call[name[layer_name_to_record].keys, parameter[]]][constant[0]] call[name[_bfs], parameter[call[name[layer_name_to_record]][name[first_layer_name]], name[_process_layer_parameters]]] call[name[logging].info, parameter[call[constant[ ***** Network Outputs ].ljust, parameter[constant[140], constant[*]]]]] call[name[logging].info, parameter[call[name[log_format].format, parameter[constant[CAFFE], constant[MXNET], constant[Type], constant[Mean(diff)], constant[Max(diff)], constant[Note]]]]] for taget[name[caffe_blob_name]] in starred[call[name[caffe_net].blobs.keys, parameter[]]] begin[:] call[name[_process_layer_output], parameter[name[caffe_blob_name]]] return[None]
keyword[def] identifier[compare_layers_from_nets] ( identifier[caffe_net] , identifier[arg_params] , identifier[aux_params] , identifier[exe] , identifier[layer_name_to_record] , identifier[top_to_layers] , identifier[mean_diff_allowed] , identifier[max_diff_allowed] ): literal[string] keyword[import] identifier[re] identifier[log_format] = literal[string] identifier[compare_layers_from_nets] . identifier[is_first_convolution] = keyword[True] keyword[def] identifier[_compare_blob] ( identifier[caf_blob] , identifier[mx_blob] , identifier[caf_name] , identifier[mx_name] , identifier[blob_type] , identifier[note] ): identifier[diff] = identifier[np] . identifier[abs] ( identifier[mx_blob] - identifier[caf_blob] ) identifier[diff_mean] = identifier[diff] . identifier[mean] () identifier[diff_max] = identifier[diff] . identifier[max] () identifier[logging] . identifier[info] ( identifier[log_format] . identifier[format] ( identifier[caf_name] , identifier[mx_name] , identifier[blob_type] , literal[string] % identifier[diff_mean] , literal[string] % identifier[diff_max] , identifier[note] )) keyword[assert] identifier[diff_mean] < identifier[mean_diff_allowed] keyword[assert] identifier[diff_max] < identifier[max_diff_allowed] keyword[def] identifier[_process_layer_parameters] ( identifier[layer] ): identifier[logging] . identifier[debug] ( literal[string] , identifier[layer] . identifier[name] , identifier[layer] . identifier[type] ) identifier[normalized_layer_name] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[layer] . identifier[name] ) keyword[if] identifier[layer] . identifier[name] keyword[in] identifier[caffe_net] . identifier[params] keyword[and] identifier[layer] . identifier[type] keyword[in] [ literal[string] , literal[string] , literal[string] ]: identifier[has_bias] = identifier[len] ( identifier[caffe_net] . identifier[params] [ identifier[layer] . identifier[name] ])> literal[int] identifier[mx_name_weight] = literal[string] . identifier[format] ( identifier[normalized_layer_name] ) identifier[mx_beta] = identifier[arg_params] [ identifier[mx_name_weight] ]. identifier[asnumpy] () keyword[if] identifier[layer] . identifier[type] == literal[string] keyword[and] identifier[compare_layers_from_nets] . identifier[is_first_convolution] : identifier[compare_layers_from_nets] . identifier[is_first_convolution] = keyword[False] keyword[if] identifier[mx_beta] . identifier[shape] [ literal[int] ]== literal[int] keyword[or] identifier[mx_beta] . identifier[shape] [ literal[int] ]== literal[int] : identifier[mx_beta] [:,[ literal[int] , literal[int] ],:,:]= identifier[mx_beta] [:,[ literal[int] , literal[int] ],:,:] identifier[caf_beta] = identifier[caffe_net] . identifier[params] [ identifier[layer] . identifier[name] ][ literal[int] ]. identifier[data] identifier[_compare_blob] ( identifier[caf_beta] , identifier[mx_beta] , identifier[layer] . identifier[name] , identifier[mx_name_weight] , literal[string] , literal[string] ) keyword[if] identifier[has_bias] : identifier[mx_name_bias] = literal[string] . identifier[format] ( identifier[normalized_layer_name] ) identifier[mx_gamma] = identifier[arg_params] [ identifier[mx_name_bias] ]. identifier[asnumpy] () identifier[caf_gamma] = identifier[caffe_net] . identifier[params] [ identifier[layer] . identifier[name] ][ literal[int] ]. identifier[data] identifier[_compare_blob] ( identifier[caf_gamma] , identifier[mx_gamma] , identifier[layer] . identifier[name] , identifier[mx_name_bias] , literal[string] , literal[string] ) keyword[elif] identifier[layer] . identifier[name] keyword[in] identifier[caffe_net] . identifier[params] keyword[and] identifier[layer] . identifier[type] == literal[string] : keyword[if] literal[string] keyword[in] identifier[normalized_layer_name] : identifier[bn_name] = identifier[normalized_layer_name] . identifier[replace] ( literal[string] , literal[string] ) keyword[elif] literal[string] keyword[in] identifier[normalized_layer_name] : identifier[bn_name] = identifier[normalized_layer_name] . identifier[replace] ( literal[string] , literal[string] ) keyword[else] : keyword[assert] keyword[False] , literal[string] identifier[beta_name] = literal[string] . identifier[format] ( identifier[bn_name] ) identifier[gamma_name] = literal[string] . identifier[format] ( identifier[bn_name] ) identifier[mx_beta] = identifier[arg_params] [ identifier[beta_name] ]. identifier[asnumpy] () identifier[caf_beta] = identifier[caffe_net] . identifier[params] [ identifier[layer] . identifier[name] ][ literal[int] ]. identifier[data] identifier[_compare_blob] ( identifier[caf_beta] , identifier[mx_beta] , identifier[layer] . identifier[name] , identifier[beta_name] , literal[string] , literal[string] ) identifier[mx_gamma] = identifier[arg_params] [ identifier[gamma_name] ]. identifier[asnumpy] () identifier[caf_gamma] = identifier[caffe_net] . identifier[params] [ identifier[layer] . identifier[name] ][ literal[int] ]. identifier[data] identifier[_compare_blob] ( identifier[caf_gamma] , identifier[mx_gamma] , identifier[layer] . identifier[name] , identifier[gamma_name] , literal[string] , literal[string] ) keyword[elif] identifier[layer] . identifier[name] keyword[in] identifier[caffe_net] . identifier[params] keyword[and] identifier[layer] . identifier[type] == literal[string] : identifier[mean_name] = literal[string] . identifier[format] ( identifier[normalized_layer_name] ) identifier[var_name] = literal[string] . identifier[format] ( identifier[normalized_layer_name] ) identifier[caf_rescale_factor] = identifier[caffe_net] . identifier[params] [ identifier[layer] . identifier[name] ][ literal[int] ]. identifier[data] identifier[mx_mean] = identifier[aux_params] [ identifier[mean_name] ]. identifier[asnumpy] () identifier[caf_mean] = identifier[caffe_net] . identifier[params] [ identifier[layer] . identifier[name] ][ literal[int] ]. identifier[data] / identifier[caf_rescale_factor] identifier[_compare_blob] ( identifier[caf_mean] , identifier[mx_mean] , identifier[layer] . identifier[name] , identifier[mean_name] , literal[string] , literal[string] ) identifier[mx_var] = identifier[aux_params] [ identifier[var_name] ]. identifier[asnumpy] () identifier[caf_var] = identifier[caffe_net] . identifier[params] [ identifier[layer] . identifier[name] ][ literal[int] ]. identifier[data] / identifier[caf_rescale_factor] identifier[_compare_blob] ( identifier[caf_var] , identifier[mx_var] , identifier[layer] . identifier[name] , identifier[var_name] , literal[string] , literal[string] ) keyword[elif] identifier[layer] . identifier[type] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]: keyword[pass] keyword[else] : identifier[warnings] . identifier[warn] ( literal[string] , identifier[layer] . identifier[name] , identifier[layer] . identifier[type] ) keyword[return] keyword[def] identifier[_process_layer_output] ( identifier[caffe_blob_name] ): identifier[logging] . identifier[debug] ( literal[string] , identifier[caffe_blob_name] ) keyword[if] identifier[caffe_blob_name] keyword[not] keyword[in] identifier[top_to_layers] : keyword[return] identifier[caf_blob] = identifier[caffe_net] . identifier[blobs] [ identifier[caffe_blob_name] ]. identifier[data] keyword[if] identifier[caffe_blob_name] == literal[string] : keyword[if] identifier[caf_blob] . identifier[shape] [ literal[int] ]== literal[int] keyword[or] identifier[caf_blob] . identifier[shape] [ literal[int] ]== literal[int] : identifier[caf_blob] [:,[ literal[int] , literal[int] ],:,:]= identifier[caf_blob] [:,[ literal[int] , literal[int] ],:,:] identifier[mx_name] = literal[string] keyword[else] : identifier[last_layer_name] = identifier[top_to_layers] [ identifier[caffe_blob_name] ][- literal[int] ] identifier[normalized_last_layer_name] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[last_layer_name] ) identifier[mx_name] = literal[string] . identifier[format] ( identifier[normalized_last_layer_name] ) keyword[if] literal[string] keyword[in] identifier[mx_name] : identifier[mx_name] = identifier[mx_name] . identifier[replace] ( literal[string] , literal[string] ) keyword[elif] literal[string] keyword[in] identifier[mx_name] : identifier[mx_name] = identifier[mx_name] . identifier[replace] ( literal[string] , literal[string] ) keyword[if] identifier[mx_name] keyword[not] keyword[in] identifier[exe] . identifier[output_dict] : identifier[logging] . identifier[error] ( literal[string] , identifier[mx_name] ) keyword[return] identifier[mx_blob] = identifier[exe] . identifier[output_dict] [ identifier[mx_name] ]. identifier[asnumpy] () identifier[_compare_blob] ( identifier[caf_blob] , identifier[mx_blob] , identifier[caffe_blob_name] , identifier[mx_name] , literal[string] , literal[string] ) keyword[return] identifier[logging] . identifier[info] ( literal[string] . identifier[ljust] ( literal[int] , literal[string] )) identifier[logging] . identifier[info] ( identifier[log_format] . identifier[format] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] )) identifier[first_layer_name] = identifier[layer_name_to_record] . identifier[keys] ()[ literal[int] ] identifier[_bfs] ( identifier[layer_name_to_record] [ identifier[first_layer_name] ], identifier[_process_layer_parameters] ) identifier[logging] . identifier[info] ( literal[string] . identifier[ljust] ( literal[int] , literal[string] )) identifier[logging] . identifier[info] ( identifier[log_format] . identifier[format] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] )) keyword[for] identifier[caffe_blob_name] keyword[in] identifier[caffe_net] . identifier[blobs] . identifier[keys] (): identifier[_process_layer_output] ( identifier[caffe_blob_name] ) keyword[return]
def compare_layers_from_nets(caffe_net, arg_params, aux_params, exe, layer_name_to_record, top_to_layers, mean_diff_allowed, max_diff_allowed): """ Compare layer by layer of a caffe network with mxnet network :param caffe_net: loaded caffe network :param arg_params: arguments :param aux_params: auxiliary parameters :param exe: mxnet model :param layer_name_to_record: map between caffe layer and information record :param top_to_layers: map between caffe blob name to layers which outputs it (including inplace) :param mean_diff_allowed: mean difference allowed between caffe blob and mxnet blob :param max_diff_allowed: max difference allowed between caffe blob and mxnet blob """ import re log_format = ' {0:<40} {1:<40} {2:<8} {3:>10} {4:>10} {5:<1}' compare_layers_from_nets.is_first_convolution = True def _compare_blob(caf_blob, mx_blob, caf_name, mx_name, blob_type, note): diff = np.abs(mx_blob - caf_blob) diff_mean = diff.mean() diff_max = diff.max() logging.info(log_format.format(caf_name, mx_name, blob_type, '%4.5f' % diff_mean, '%4.5f' % diff_max, note)) assert diff_mean < mean_diff_allowed assert diff_max < max_diff_allowed def _process_layer_parameters(layer): logging.debug('processing layer %s of type %s', layer.name, layer.type) normalized_layer_name = re.sub('[-/]', '_', layer.name) # handle weight and bias of convolution and fully-connected layers if layer.name in caffe_net.params and layer.type in ['Convolution', 'InnerProduct', 'Deconvolution']: has_bias = len(caffe_net.params[layer.name]) > 1 mx_name_weight = '{}_weight'.format(normalized_layer_name) mx_beta = arg_params[mx_name_weight].asnumpy() # first convolution should change from BGR to RGB if layer.type == 'Convolution' and compare_layers_from_nets.is_first_convolution: compare_layers_from_nets.is_first_convolution = False # if RGB or RGBA if mx_beta.shape[1] == 3 or mx_beta.shape[1] == 4: # Swapping BGR of caffe into RGB in mxnet mx_beta[:, [0, 2], :, :] = mx_beta[:, [2, 0], :, :] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] caf_beta = caffe_net.params[layer.name][0].data _compare_blob(caf_beta, mx_beta, layer.name, mx_name_weight, 'weight', '') if has_bias: mx_name_bias = '{}_bias'.format(normalized_layer_name) mx_gamma = arg_params[mx_name_bias].asnumpy() caf_gamma = caffe_net.params[layer.name][1].data _compare_blob(caf_gamma, mx_gamma, layer.name, mx_name_bias, 'bias', '') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif layer.name in caffe_net.params and layer.type == 'Scale': if 'scale' in normalized_layer_name: bn_name = normalized_layer_name.replace('scale', 'bn') # depends on [control=['if'], data=['normalized_layer_name']] elif 'sc' in normalized_layer_name: bn_name = normalized_layer_name.replace('sc', 'bn') # depends on [control=['if'], data=['normalized_layer_name']] else: assert False, 'Unknown name convention for bn/scale' beta_name = '{}_beta'.format(bn_name) gamma_name = '{}_gamma'.format(bn_name) mx_beta = arg_params[beta_name].asnumpy() caf_beta = caffe_net.params[layer.name][1].data _compare_blob(caf_beta, mx_beta, layer.name, beta_name, 'mov_mean', '') mx_gamma = arg_params[gamma_name].asnumpy() caf_gamma = caffe_net.params[layer.name][0].data _compare_blob(caf_gamma, mx_gamma, layer.name, gamma_name, 'mov_var', '') # depends on [control=['if'], data=[]] elif layer.name in caffe_net.params and layer.type == 'BatchNorm': mean_name = '{}_moving_mean'.format(normalized_layer_name) var_name = '{}_moving_var'.format(normalized_layer_name) caf_rescale_factor = caffe_net.params[layer.name][2].data mx_mean = aux_params[mean_name].asnumpy() caf_mean = caffe_net.params[layer.name][0].data / caf_rescale_factor _compare_blob(caf_mean, mx_mean, layer.name, mean_name, 'mean', '') mx_var = aux_params[var_name].asnumpy() caf_var = caffe_net.params[layer.name][1].data / caf_rescale_factor _compare_blob(caf_var, mx_var, layer.name, var_name, 'var', 'expect 1e-04 change due to cudnn eps') # depends on [control=['if'], data=[]] elif layer.type in ['Input', 'Pooling', 'ReLU', 'Eltwise', 'Softmax', 'LRN', 'Concat', 'Dropout', 'Crop']: # no parameters to check for these layers pass # depends on [control=['if'], data=[]] else: warnings.warn('No handling for layer %s of type %s, should we ignore it?', layer.name, layer.type) return def _process_layer_output(caffe_blob_name): logging.debug('processing blob %s', caffe_blob_name) # skip blobs not originating from actual layers, e.g. artificial split layers added by caffe if caffe_blob_name not in top_to_layers: return # depends on [control=['if'], data=[]] caf_blob = caffe_net.blobs[caffe_blob_name].data # data should change from BGR to RGB if caffe_blob_name == 'data': # if RGB or RGBA if caf_blob.shape[1] == 3 or caf_blob.shape[1] == 4: # Swapping BGR of caffe into RGB in mxnet caf_blob[:, [0, 2], :, :] = caf_blob[:, [2, 0], :, :] # depends on [control=['if'], data=[]] mx_name = 'data' # depends on [control=['if'], data=[]] else: # get last layer name which outputs this blob name last_layer_name = top_to_layers[caffe_blob_name][-1] normalized_last_layer_name = re.sub('[-/]', '_', last_layer_name) mx_name = '{}_output'.format(normalized_last_layer_name) if 'scale' in mx_name: mx_name = mx_name.replace('scale', 'bn') # depends on [control=['if'], data=['mx_name']] elif 'sc' in mx_name: mx_name = mx_name.replace('sc', 'bn') # depends on [control=['if'], data=['mx_name']] if mx_name not in exe.output_dict: logging.error('mxnet blob %s is missing, time to extend the compare tool..', mx_name) return # depends on [control=['if'], data=['mx_name']] mx_blob = exe.output_dict[mx_name].asnumpy() _compare_blob(caf_blob, mx_blob, caffe_blob_name, mx_name, 'output', '') return # check layer parameters logging.info('\n***** Network Parameters '.ljust(140, '*')) logging.info(log_format.format('CAFFE', 'MXNET', 'Type', 'Mean(diff)', 'Max(diff)', 'Note')) first_layer_name = layer_name_to_record.keys()[0] _bfs(layer_name_to_record[first_layer_name], _process_layer_parameters) # check layer output logging.info('\n***** Network Outputs '.ljust(140, '*')) logging.info(log_format.format('CAFFE', 'MXNET', 'Type', 'Mean(diff)', 'Max(diff)', 'Note')) for caffe_blob_name in caffe_net.blobs.keys(): _process_layer_output(caffe_blob_name) # depends on [control=['for'], data=['caffe_blob_name']] return
def deregister_webhook(self, **kwargs): # noqa: E501 """Delete callback URL # noqa: E501 Deletes the callback URL. **Example usage:** curl -X DELETE https://api.us-east-1.mbedcloud.com/v2/notification/callback -H 'authorization: Bearer {api-key}' # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.deregister_webhook(asynchronous=True) >>> result = thread.get() :param asynchronous bool :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous'): return self.deregister_webhook_with_http_info(**kwargs) # noqa: E501 else: (data) = self.deregister_webhook_with_http_info(**kwargs) # noqa: E501 return data
def function[deregister_webhook, parameter[self]]: constant[Delete callback URL # noqa: E501 Deletes the callback URL. **Example usage:** curl -X DELETE https://api.us-east-1.mbedcloud.com/v2/notification/callback -H 'authorization: Bearer {api-key}' # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.deregister_webhook(asynchronous=True) >>> result = thread.get() :param asynchronous bool :return: None If the method is called asynchronously, returns the request thread. ] call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True] if call[name[kwargs].get, parameter[constant[asynchronous]]] begin[:] return[call[name[self].deregister_webhook_with_http_info, parameter[]]]
keyword[def] identifier[deregister_webhook] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= keyword[True] keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ): keyword[return] identifier[self] . identifier[deregister_webhook_with_http_info] (** identifier[kwargs] ) keyword[else] : ( identifier[data] )= identifier[self] . identifier[deregister_webhook_with_http_info] (** identifier[kwargs] ) keyword[return] identifier[data]
def deregister_webhook(self, **kwargs): # noqa: E501 "Delete callback URL # noqa: E501\n\n Deletes the callback URL. **Example usage:** curl -X DELETE https://api.us-east-1.mbedcloud.com/v2/notification/callback -H 'authorization: Bearer {api-key}' # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass asynchronous=True\n >>> thread = api.deregister_webhook(asynchronous=True)\n >>> result = thread.get()\n\n :param asynchronous bool\n :return: None\n If the method is called asynchronously,\n returns the request thread.\n " kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous'): return self.deregister_webhook_with_http_info(**kwargs) # noqa: E501 # depends on [control=['if'], data=[]] else: data = self.deregister_webhook_with_http_info(**kwargs) # noqa: E501 return data
def put(self, content_bytes): ''' Save the `bytes` under a key derived from `path` in Memcache. :return: A string representing the content path if it is stored. :rettype: string or None ''' derived_path = self.context.request.url over_max, content_size = self.content_size_exceeded_max(content_bytes) logger.debug('[{log_prefix}] content size in bytes: {size}' ' | is over max? {over_max} | skip storage? {skip}'.format( log_prefix=LOG_PREFIX, size=content_size, over_max=over_max, skip=self.skip_storage())) if (over_max and self.skip_storage()): # Short-circuit the storage when configured to skip large items logger.debug('[{log_prefix}] skipping storage: {content_size} ' 'exceeds item_size_max of {max_size}'.format( log_prefix=LOG_PREFIX, content_size=content_size, max_size=self.item_size_max())) return None self.storage.set( self.timestamp_key_for(derived_path), datetime.utcnow(), time=self.context.config.RESULT_STORAGE_EXPIRATION_SECONDS ) self.storage.set( self.result_key_for(derived_path), content_bytes, time=self.context.config.RESULT_STORAGE_EXPIRATION_SECONDS ) return derived_path
def function[put, parameter[self, content_bytes]]: constant[ Save the `bytes` under a key derived from `path` in Memcache. :return: A string representing the content path if it is stored. :rettype: string or None ] variable[derived_path] assign[=] name[self].context.request.url <ast.Tuple object at 0x7da20c794a00> assign[=] call[name[self].content_size_exceeded_max, parameter[name[content_bytes]]] call[name[logger].debug, parameter[call[constant[[{log_prefix}] content size in bytes: {size} | is over max? {over_max} | skip storage? {skip}].format, parameter[]]]] if <ast.BoolOp object at 0x7da20c796860> begin[:] call[name[logger].debug, parameter[call[constant[[{log_prefix}] skipping storage: {content_size} exceeds item_size_max of {max_size}].format, parameter[]]]] return[constant[None]] call[name[self].storage.set, parameter[call[name[self].timestamp_key_for, parameter[name[derived_path]]], call[name[datetime].utcnow, parameter[]]]] call[name[self].storage.set, parameter[call[name[self].result_key_for, parameter[name[derived_path]]], name[content_bytes]]] return[name[derived_path]]
keyword[def] identifier[put] ( identifier[self] , identifier[content_bytes] ): literal[string] identifier[derived_path] = identifier[self] . identifier[context] . identifier[request] . identifier[url] identifier[over_max] , identifier[content_size] = identifier[self] . identifier[content_size_exceeded_max] ( identifier[content_bytes] ) identifier[logger] . identifier[debug] ( literal[string] literal[string] . identifier[format] ( identifier[log_prefix] = identifier[LOG_PREFIX] , identifier[size] = identifier[content_size] , identifier[over_max] = identifier[over_max] , identifier[skip] = identifier[self] . identifier[skip_storage] ())) keyword[if] ( identifier[over_max] keyword[and] identifier[self] . identifier[skip_storage] ()): identifier[logger] . identifier[debug] ( literal[string] literal[string] . identifier[format] ( identifier[log_prefix] = identifier[LOG_PREFIX] , identifier[content_size] = identifier[content_size] , identifier[max_size] = identifier[self] . identifier[item_size_max] ())) keyword[return] keyword[None] identifier[self] . identifier[storage] . identifier[set] ( identifier[self] . identifier[timestamp_key_for] ( identifier[derived_path] ), identifier[datetime] . identifier[utcnow] (), identifier[time] = identifier[self] . identifier[context] . identifier[config] . identifier[RESULT_STORAGE_EXPIRATION_SECONDS] ) identifier[self] . identifier[storage] . identifier[set] ( identifier[self] . identifier[result_key_for] ( identifier[derived_path] ), identifier[content_bytes] , identifier[time] = identifier[self] . identifier[context] . identifier[config] . identifier[RESULT_STORAGE_EXPIRATION_SECONDS] ) keyword[return] identifier[derived_path]
def put(self, content_bytes): """ Save the `bytes` under a key derived from `path` in Memcache. :return: A string representing the content path if it is stored. :rettype: string or None """ derived_path = self.context.request.url (over_max, content_size) = self.content_size_exceeded_max(content_bytes) logger.debug('[{log_prefix}] content size in bytes: {size} | is over max? {over_max} | skip storage? {skip}'.format(log_prefix=LOG_PREFIX, size=content_size, over_max=over_max, skip=self.skip_storage())) if over_max and self.skip_storage(): # Short-circuit the storage when configured to skip large items logger.debug('[{log_prefix}] skipping storage: {content_size} exceeds item_size_max of {max_size}'.format(log_prefix=LOG_PREFIX, content_size=content_size, max_size=self.item_size_max())) return None # depends on [control=['if'], data=[]] self.storage.set(self.timestamp_key_for(derived_path), datetime.utcnow(), time=self.context.config.RESULT_STORAGE_EXPIRATION_SECONDS) self.storage.set(self.result_key_for(derived_path), content_bytes, time=self.context.config.RESULT_STORAGE_EXPIRATION_SECONDS) return derived_path
def serialize(self, datas): """ Serialize datas to manifest structure with metas and references. Only references are returned, metas are assigned to attribute ``ManifestSerializer._metas``. Arguments: datas (dict): Data where to search for reference declarations. This is commonly the fully parsed manifest. Returns: collections.OrderedDict: Serialized enabled references datas. """ self._metas = OrderedDict({ 'references': self.get_meta_references(datas), }) return self.get_enabled_references(datas, self._metas['references'])
def function[serialize, parameter[self, datas]]: constant[ Serialize datas to manifest structure with metas and references. Only references are returned, metas are assigned to attribute ``ManifestSerializer._metas``. Arguments: datas (dict): Data where to search for reference declarations. This is commonly the fully parsed manifest. Returns: collections.OrderedDict: Serialized enabled references datas. ] name[self]._metas assign[=] call[name[OrderedDict], parameter[dictionary[[<ast.Constant object at 0x7da2054a5420>], [<ast.Call object at 0x7da2054a4040>]]]] return[call[name[self].get_enabled_references, parameter[name[datas], call[name[self]._metas][constant[references]]]]]
keyword[def] identifier[serialize] ( identifier[self] , identifier[datas] ): literal[string] identifier[self] . identifier[_metas] = identifier[OrderedDict] ({ literal[string] : identifier[self] . identifier[get_meta_references] ( identifier[datas] ), }) keyword[return] identifier[self] . identifier[get_enabled_references] ( identifier[datas] , identifier[self] . identifier[_metas] [ literal[string] ])
def serialize(self, datas): """ Serialize datas to manifest structure with metas and references. Only references are returned, metas are assigned to attribute ``ManifestSerializer._metas``. Arguments: datas (dict): Data where to search for reference declarations. This is commonly the fully parsed manifest. Returns: collections.OrderedDict: Serialized enabled references datas. """ self._metas = OrderedDict({'references': self.get_meta_references(datas)}) return self.get_enabled_references(datas, self._metas['references'])
def extract_expression(dirty_string, language): """ Give a string such as: "What is 4 + 4?" Return the string "4 + 4" """ tokens = tokenize(dirty_string, language) start_index = 0 end_index = len(tokens) for part in tokens: if is_symbol(part) or is_word(part, language): break else: start_index += 1 for part in reversed(tokens): if is_symbol(part) or is_word(part, language): break else: end_index -= 1 return ' '.join(tokens[start_index:end_index])
def function[extract_expression, parameter[dirty_string, language]]: constant[ Give a string such as: "What is 4 + 4?" Return the string "4 + 4" ] variable[tokens] assign[=] call[name[tokenize], parameter[name[dirty_string], name[language]]] variable[start_index] assign[=] constant[0] variable[end_index] assign[=] call[name[len], parameter[name[tokens]]] for taget[name[part]] in starred[name[tokens]] begin[:] if <ast.BoolOp object at 0x7da20e955270> begin[:] break for taget[name[part]] in starred[call[name[reversed], parameter[name[tokens]]]] begin[:] if <ast.BoolOp object at 0x7da20cabe170> begin[:] break return[call[constant[ ].join, parameter[call[name[tokens]][<ast.Slice object at 0x7da20c6e75b0>]]]]
keyword[def] identifier[extract_expression] ( identifier[dirty_string] , identifier[language] ): literal[string] identifier[tokens] = identifier[tokenize] ( identifier[dirty_string] , identifier[language] ) identifier[start_index] = literal[int] identifier[end_index] = identifier[len] ( identifier[tokens] ) keyword[for] identifier[part] keyword[in] identifier[tokens] : keyword[if] identifier[is_symbol] ( identifier[part] ) keyword[or] identifier[is_word] ( identifier[part] , identifier[language] ): keyword[break] keyword[else] : identifier[start_index] += literal[int] keyword[for] identifier[part] keyword[in] identifier[reversed] ( identifier[tokens] ): keyword[if] identifier[is_symbol] ( identifier[part] ) keyword[or] identifier[is_word] ( identifier[part] , identifier[language] ): keyword[break] keyword[else] : identifier[end_index] -= literal[int] keyword[return] literal[string] . identifier[join] ( identifier[tokens] [ identifier[start_index] : identifier[end_index] ])
def extract_expression(dirty_string, language): """ Give a string such as: "What is 4 + 4?" Return the string "4 + 4" """ tokens = tokenize(dirty_string, language) start_index = 0 end_index = len(tokens) for part in tokens: if is_symbol(part) or is_word(part, language): break # depends on [control=['if'], data=[]] else: start_index += 1 # depends on [control=['for'], data=['part']] for part in reversed(tokens): if is_symbol(part) or is_word(part, language): break # depends on [control=['if'], data=[]] else: end_index -= 1 # depends on [control=['for'], data=['part']] return ' '.join(tokens[start_index:end_index])
def main(): """ Example application that watches for an event from a specific RF device. This feature allows you to watch for events from RF devices if you have an RF receiver. This is useful in the case of internal sensors, which don't emit a FAULT if the sensor is tripped and the panel is armed STAY. It also will monitor sensors that aren't configured. NOTE: You must have an RF receiver installed and enabled in your panel for RFX messages to be seen. """ try: # Retrieve the first USB device device = AlarmDecoder(SerialDevice(interface=SERIAL_DEVICE)) # Set up an event handler and open the device device.on_rfx_message += handle_rfx with device.open(baudrate=BAUDRATE): while True: time.sleep(1) except Exception as ex: print('Exception:', ex)
def function[main, parameter[]]: constant[ Example application that watches for an event from a specific RF device. This feature allows you to watch for events from RF devices if you have an RF receiver. This is useful in the case of internal sensors, which don't emit a FAULT if the sensor is tripped and the panel is armed STAY. It also will monitor sensors that aren't configured. NOTE: You must have an RF receiver installed and enabled in your panel for RFX messages to be seen. ] <ast.Try object at 0x7da1b2727070>
keyword[def] identifier[main] (): literal[string] keyword[try] : identifier[device] = identifier[AlarmDecoder] ( identifier[SerialDevice] ( identifier[interface] = identifier[SERIAL_DEVICE] )) identifier[device] . identifier[on_rfx_message] += identifier[handle_rfx] keyword[with] identifier[device] . identifier[open] ( identifier[baudrate] = identifier[BAUDRATE] ): keyword[while] keyword[True] : identifier[time] . identifier[sleep] ( literal[int] ) keyword[except] identifier[Exception] keyword[as] identifier[ex] : identifier[print] ( literal[string] , identifier[ex] )
def main(): """ Example application that watches for an event from a specific RF device. This feature allows you to watch for events from RF devices if you have an RF receiver. This is useful in the case of internal sensors, which don't emit a FAULT if the sensor is tripped and the panel is armed STAY. It also will monitor sensors that aren't configured. NOTE: You must have an RF receiver installed and enabled in your panel for RFX messages to be seen. """ try: # Retrieve the first USB device device = AlarmDecoder(SerialDevice(interface=SERIAL_DEVICE)) # Set up an event handler and open the device device.on_rfx_message += handle_rfx with device.open(baudrate=BAUDRATE): while True: time.sleep(1) # depends on [control=['while'], data=[]] # depends on [control=['with'], data=[]] # depends on [control=['try'], data=[]] except Exception as ex: print('Exception:', ex) # depends on [control=['except'], data=['ex']]
def captures(self, uuid, withTitles=False): """Return the captures for a given uuid optional value withTitles=yes""" picker = lambda x: x.get('capture', []) return self._get((uuid,), picker, withTitles='yes' if withTitles else 'no')
def function[captures, parameter[self, uuid, withTitles]]: constant[Return the captures for a given uuid optional value withTitles=yes] variable[picker] assign[=] <ast.Lambda object at 0x7da1b28fc280> return[call[name[self]._get, parameter[tuple[[<ast.Name object at 0x7da1b28ffbe0>]], name[picker]]]]
keyword[def] identifier[captures] ( identifier[self] , identifier[uuid] , identifier[withTitles] = keyword[False] ): literal[string] identifier[picker] = keyword[lambda] identifier[x] : identifier[x] . identifier[get] ( literal[string] ,[]) keyword[return] identifier[self] . identifier[_get] (( identifier[uuid] ,), identifier[picker] , identifier[withTitles] = literal[string] keyword[if] identifier[withTitles] keyword[else] literal[string] )
def captures(self, uuid, withTitles=False): """Return the captures for a given uuid optional value withTitles=yes""" picker = lambda x: x.get('capture', []) return self._get((uuid,), picker, withTitles='yes' if withTitles else 'no')
def add_controls(self, env, target_name='control', file_name='control.json', encoder_cls=SConsEncoder): """ Adds a target to build a control file at each of the current leaves. :param env: SCons Environment object :param target_name: Name for target in nest :param file_name: Name for output file. """ if not HAS_SCONS: raise ImportError('SCons not available') @self.add_target(name=target_name) def control(outdir, c): return env.Command(os.path.join(outdir, file_name), [], action=_create_control_file, control_dict=c, encoder_cls=encoder_cls)
def function[add_controls, parameter[self, env, target_name, file_name, encoder_cls]]: constant[ Adds a target to build a control file at each of the current leaves. :param env: SCons Environment object :param target_name: Name for target in nest :param file_name: Name for output file. ] if <ast.UnaryOp object at 0x7da204621d50> begin[:] <ast.Raise object at 0x7da204623ca0> def function[control, parameter[outdir, c]]: return[call[name[env].Command, parameter[call[name[os].path.join, parameter[name[outdir], name[file_name]]], list[[]]]]]
keyword[def] identifier[add_controls] ( identifier[self] , identifier[env] , identifier[target_name] = literal[string] , identifier[file_name] = literal[string] , identifier[encoder_cls] = identifier[SConsEncoder] ): literal[string] keyword[if] keyword[not] identifier[HAS_SCONS] : keyword[raise] identifier[ImportError] ( literal[string] ) @ identifier[self] . identifier[add_target] ( identifier[name] = identifier[target_name] ) keyword[def] identifier[control] ( identifier[outdir] , identifier[c] ): keyword[return] identifier[env] . identifier[Command] ( identifier[os] . identifier[path] . identifier[join] ( identifier[outdir] , identifier[file_name] ), [], identifier[action] = identifier[_create_control_file] , identifier[control_dict] = identifier[c] , identifier[encoder_cls] = identifier[encoder_cls] )
def add_controls(self, env, target_name='control', file_name='control.json', encoder_cls=SConsEncoder): """ Adds a target to build a control file at each of the current leaves. :param env: SCons Environment object :param target_name: Name for target in nest :param file_name: Name for output file. """ if not HAS_SCONS: raise ImportError('SCons not available') # depends on [control=['if'], data=[]] @self.add_target(name=target_name) def control(outdir, c): return env.Command(os.path.join(outdir, file_name), [], action=_create_control_file, control_dict=c, encoder_cls=encoder_cls)
def payload(self): """The payload property automatically decodes the encapsulated data.""" if self.args_rdf_name: # Now try to create the correct RDFValue. result_cls = self.classes.get(self.args_rdf_name, rdfvalue.RDFString) result = result_cls.FromSerializedString( self.Get("args"), age=self.args_age) return result
def function[payload, parameter[self]]: constant[The payload property automatically decodes the encapsulated data.] if name[self].args_rdf_name begin[:] variable[result_cls] assign[=] call[name[self].classes.get, parameter[name[self].args_rdf_name, name[rdfvalue].RDFString]] variable[result] assign[=] call[name[result_cls].FromSerializedString, parameter[call[name[self].Get, parameter[constant[args]]]]] return[name[result]]
keyword[def] identifier[payload] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[args_rdf_name] : identifier[result_cls] = identifier[self] . identifier[classes] . identifier[get] ( identifier[self] . identifier[args_rdf_name] , identifier[rdfvalue] . identifier[RDFString] ) identifier[result] = identifier[result_cls] . identifier[FromSerializedString] ( identifier[self] . identifier[Get] ( literal[string] ), identifier[age] = identifier[self] . identifier[args_age] ) keyword[return] identifier[result]
def payload(self): """The payload property automatically decodes the encapsulated data.""" if self.args_rdf_name: # Now try to create the correct RDFValue. result_cls = self.classes.get(self.args_rdf_name, rdfvalue.RDFString) result = result_cls.FromSerializedString(self.Get('args'), age=self.args_age) return result # depends on [control=['if'], data=[]]
def get_in_srvc_node_ip_addr(cls, tenant_id): """Retrieves the IN service node IP address. """ if tenant_id not in cls.serv_obj_dict: LOG.error("Fabric not prepared for tenant %s", tenant_id) return tenant_obj = cls.serv_obj_dict.get(tenant_id) in_subnet_dict = tenant_obj.get_in_ip_addr() next_hop = str(netaddr.IPAddress(in_subnet_dict.get('subnet')) + 2) return next_hop
def function[get_in_srvc_node_ip_addr, parameter[cls, tenant_id]]: constant[Retrieves the IN service node IP address. ] if compare[name[tenant_id] <ast.NotIn object at 0x7da2590d7190> name[cls].serv_obj_dict] begin[:] call[name[LOG].error, parameter[constant[Fabric not prepared for tenant %s], name[tenant_id]]] return[None] variable[tenant_obj] assign[=] call[name[cls].serv_obj_dict.get, parameter[name[tenant_id]]] variable[in_subnet_dict] assign[=] call[name[tenant_obj].get_in_ip_addr, parameter[]] variable[next_hop] assign[=] call[name[str], parameter[binary_operation[call[name[netaddr].IPAddress, parameter[call[name[in_subnet_dict].get, parameter[constant[subnet]]]]] + constant[2]]]] return[name[next_hop]]
keyword[def] identifier[get_in_srvc_node_ip_addr] ( identifier[cls] , identifier[tenant_id] ): literal[string] keyword[if] identifier[tenant_id] keyword[not] keyword[in] identifier[cls] . identifier[serv_obj_dict] : identifier[LOG] . identifier[error] ( literal[string] , identifier[tenant_id] ) keyword[return] identifier[tenant_obj] = identifier[cls] . identifier[serv_obj_dict] . identifier[get] ( identifier[tenant_id] ) identifier[in_subnet_dict] = identifier[tenant_obj] . identifier[get_in_ip_addr] () identifier[next_hop] = identifier[str] ( identifier[netaddr] . identifier[IPAddress] ( identifier[in_subnet_dict] . identifier[get] ( literal[string] ))+ literal[int] ) keyword[return] identifier[next_hop]
def get_in_srvc_node_ip_addr(cls, tenant_id): """Retrieves the IN service node IP address. """ if tenant_id not in cls.serv_obj_dict: LOG.error('Fabric not prepared for tenant %s', tenant_id) return # depends on [control=['if'], data=['tenant_id']] tenant_obj = cls.serv_obj_dict.get(tenant_id) in_subnet_dict = tenant_obj.get_in_ip_addr() next_hop = str(netaddr.IPAddress(in_subnet_dict.get('subnet')) + 2) return next_hop
def scramble_string(s, key): """ s is the puzzle's solution in column-major order, omitting black squares: i.e. if the puzzle is: C A T # # A # # R solution is CATAR Key is a 4-digit number in the range 1000 <= key <= 9999 """ key = key_digits(key) for k in key: # foreach digit in the key s = shift(s, key) # for each char by each digit in the key in sequence s = s[k:] + s[:k] # cut the sequence around the key digit s = shuffle(s) # do a 1:1 shuffle of the 'deck' return s
def function[scramble_string, parameter[s, key]]: constant[ s is the puzzle's solution in column-major order, omitting black squares: i.e. if the puzzle is: C A T # # A # # R solution is CATAR Key is a 4-digit number in the range 1000 <= key <= 9999 ] variable[key] assign[=] call[name[key_digits], parameter[name[key]]] for taget[name[k]] in starred[name[key]] begin[:] variable[s] assign[=] call[name[shift], parameter[name[s], name[key]]] variable[s] assign[=] binary_operation[call[name[s]][<ast.Slice object at 0x7da20c76fd30>] + call[name[s]][<ast.Slice object at 0x7da20c76ce80>]] variable[s] assign[=] call[name[shuffle], parameter[name[s]]] return[name[s]]
keyword[def] identifier[scramble_string] ( identifier[s] , identifier[key] ): literal[string] identifier[key] = identifier[key_digits] ( identifier[key] ) keyword[for] identifier[k] keyword[in] identifier[key] : identifier[s] = identifier[shift] ( identifier[s] , identifier[key] ) identifier[s] = identifier[s] [ identifier[k] :]+ identifier[s] [: identifier[k] ] identifier[s] = identifier[shuffle] ( identifier[s] ) keyword[return] identifier[s]
def scramble_string(s, key): """ s is the puzzle's solution in column-major order, omitting black squares: i.e. if the puzzle is: C A T # # A # # R solution is CATAR Key is a 4-digit number in the range 1000 <= key <= 9999 """ key = key_digits(key) for k in key: # foreach digit in the key s = shift(s, key) # for each char by each digit in the key in sequence s = s[k:] + s[:k] # cut the sequence around the key digit s = shuffle(s) # do a 1:1 shuffle of the 'deck' # depends on [control=['for'], data=['k']] return s
def altz_to_utctz_str(altz): """As above, but inverses the operation, returning a string that can be used in commit objects""" utci = -1 * int((float(altz) / 3600) * 100) utcs = str(abs(utci)) utcs = "0" * (4 - len(utcs)) + utcs prefix = (utci < 0 and '-') or '+' return prefix + utcs
def function[altz_to_utctz_str, parameter[altz]]: constant[As above, but inverses the operation, returning a string that can be used in commit objects] variable[utci] assign[=] binary_operation[<ast.UnaryOp object at 0x7da1b224bac0> * call[name[int], parameter[binary_operation[binary_operation[call[name[float], parameter[name[altz]]] / constant[3600]] * constant[100]]]]] variable[utcs] assign[=] call[name[str], parameter[call[name[abs], parameter[name[utci]]]]] variable[utcs] assign[=] binary_operation[binary_operation[constant[0] * binary_operation[constant[4] - call[name[len], parameter[name[utcs]]]]] + name[utcs]] variable[prefix] assign[=] <ast.BoolOp object at 0x7da1b2248940> return[binary_operation[name[prefix] + name[utcs]]]
keyword[def] identifier[altz_to_utctz_str] ( identifier[altz] ): literal[string] identifier[utci] =- literal[int] * identifier[int] (( identifier[float] ( identifier[altz] )/ literal[int] )* literal[int] ) identifier[utcs] = identifier[str] ( identifier[abs] ( identifier[utci] )) identifier[utcs] = literal[string] *( literal[int] - identifier[len] ( identifier[utcs] ))+ identifier[utcs] identifier[prefix] =( identifier[utci] < literal[int] keyword[and] literal[string] ) keyword[or] literal[string] keyword[return] identifier[prefix] + identifier[utcs]
def altz_to_utctz_str(altz): """As above, but inverses the operation, returning a string that can be used in commit objects""" utci = -1 * int(float(altz) / 3600 * 100) utcs = str(abs(utci)) utcs = '0' * (4 - len(utcs)) + utcs prefix = utci < 0 and '-' or '+' return prefix + utcs
def install_script(self, dist, script_name, script_text, dev_path=None): """Generate a legacy script wrapper and install it""" spec = str(dist.as_requirement()) is_script = is_python_script(script_text, script_name) if is_script: script_text = (ScriptWriter.get_header(script_text) + self._load_template(dev_path) % locals()) self.write_script(script_name, _to_ascii(script_text), 'b')
def function[install_script, parameter[self, dist, script_name, script_text, dev_path]]: constant[Generate a legacy script wrapper and install it] variable[spec] assign[=] call[name[str], parameter[call[name[dist].as_requirement, parameter[]]]] variable[is_script] assign[=] call[name[is_python_script], parameter[name[script_text], name[script_name]]] if name[is_script] begin[:] variable[script_text] assign[=] binary_operation[call[name[ScriptWriter].get_header, parameter[name[script_text]]] + binary_operation[call[name[self]._load_template, parameter[name[dev_path]]] <ast.Mod object at 0x7da2590d6920> call[name[locals], parameter[]]]] call[name[self].write_script, parameter[name[script_name], call[name[_to_ascii], parameter[name[script_text]]], constant[b]]]
keyword[def] identifier[install_script] ( identifier[self] , identifier[dist] , identifier[script_name] , identifier[script_text] , identifier[dev_path] = keyword[None] ): literal[string] identifier[spec] = identifier[str] ( identifier[dist] . identifier[as_requirement] ()) identifier[is_script] = identifier[is_python_script] ( identifier[script_text] , identifier[script_name] ) keyword[if] identifier[is_script] : identifier[script_text] =( identifier[ScriptWriter] . identifier[get_header] ( identifier[script_text] )+ identifier[self] . identifier[_load_template] ( identifier[dev_path] )% identifier[locals] ()) identifier[self] . identifier[write_script] ( identifier[script_name] , identifier[_to_ascii] ( identifier[script_text] ), literal[string] )
def install_script(self, dist, script_name, script_text, dev_path=None): """Generate a legacy script wrapper and install it""" spec = str(dist.as_requirement()) is_script = is_python_script(script_text, script_name) if is_script: script_text = ScriptWriter.get_header(script_text) + self._load_template(dev_path) % locals() # depends on [control=['if'], data=[]] self.write_script(script_name, _to_ascii(script_text), 'b')
def check_patch_size(patch_size): """Validation and typcasting""" patch_size = np.array(patch_size) if patch_size.size == 1: patch_size = np.repeat(patch_size, 2).astype('int16') return patch_size
def function[check_patch_size, parameter[patch_size]]: constant[Validation and typcasting] variable[patch_size] assign[=] call[name[np].array, parameter[name[patch_size]]] if compare[name[patch_size].size equal[==] constant[1]] begin[:] variable[patch_size] assign[=] call[call[name[np].repeat, parameter[name[patch_size], constant[2]]].astype, parameter[constant[int16]]] return[name[patch_size]]
keyword[def] identifier[check_patch_size] ( identifier[patch_size] ): literal[string] identifier[patch_size] = identifier[np] . identifier[array] ( identifier[patch_size] ) keyword[if] identifier[patch_size] . identifier[size] == literal[int] : identifier[patch_size] = identifier[np] . identifier[repeat] ( identifier[patch_size] , literal[int] ). identifier[astype] ( literal[string] ) keyword[return] identifier[patch_size]
def check_patch_size(patch_size): """Validation and typcasting""" patch_size = np.array(patch_size) if patch_size.size == 1: patch_size = np.repeat(patch_size, 2).astype('int16') # depends on [control=['if'], data=[]] return patch_size
def predict_map(interface, state, label, inp): """Determine the closest cluster for the datapoint `e`.""" out = interface.output(0) for row in inp: if len(row) > 1: row = row.strip().split(state["delimiter"]) x_id = "" if state["id_index"] == -1 else row[state["id_index"]] x = [(0 if row[i] in state["missing_vals"] else float(row[i])) for i in state["X_indices"]] out.add(x_id, min([(i, state["dist"](c, x)) for i, c in state["centers"]], key=lambda t: t[1]))
def function[predict_map, parameter[interface, state, label, inp]]: constant[Determine the closest cluster for the datapoint `e`.] variable[out] assign[=] call[name[interface].output, parameter[constant[0]]] for taget[name[row]] in starred[name[inp]] begin[:] if compare[call[name[len], parameter[name[row]]] greater[>] constant[1]] begin[:] variable[row] assign[=] call[call[name[row].strip, parameter[]].split, parameter[call[name[state]][constant[delimiter]]]] variable[x_id] assign[=] <ast.IfExp object at 0x7da20c6a8400> variable[x] assign[=] <ast.ListComp object at 0x7da20c6a8be0> call[name[out].add, parameter[name[x_id], call[name[min], parameter[<ast.ListComp object at 0x7da20c6a8550>]]]]
keyword[def] identifier[predict_map] ( identifier[interface] , identifier[state] , identifier[label] , identifier[inp] ): literal[string] identifier[out] = identifier[interface] . identifier[output] ( literal[int] ) keyword[for] identifier[row] keyword[in] identifier[inp] : keyword[if] identifier[len] ( identifier[row] )> literal[int] : identifier[row] = identifier[row] . identifier[strip] (). identifier[split] ( identifier[state] [ literal[string] ]) identifier[x_id] = literal[string] keyword[if] identifier[state] [ literal[string] ]==- literal[int] keyword[else] identifier[row] [ identifier[state] [ literal[string] ]] identifier[x] =[( literal[int] keyword[if] identifier[row] [ identifier[i] ] keyword[in] identifier[state] [ literal[string] ] keyword[else] identifier[float] ( identifier[row] [ identifier[i] ])) keyword[for] identifier[i] keyword[in] identifier[state] [ literal[string] ]] identifier[out] . identifier[add] ( identifier[x_id] , identifier[min] ([( identifier[i] , identifier[state] [ literal[string] ]( identifier[c] , identifier[x] )) keyword[for] identifier[i] , identifier[c] keyword[in] identifier[state] [ literal[string] ]], identifier[key] = keyword[lambda] identifier[t] : identifier[t] [ literal[int] ]))
def predict_map(interface, state, label, inp): """Determine the closest cluster for the datapoint `e`.""" out = interface.output(0) for row in inp: if len(row) > 1: row = row.strip().split(state['delimiter']) x_id = '' if state['id_index'] == -1 else row[state['id_index']] x = [0 if row[i] in state['missing_vals'] else float(row[i]) for i in state['X_indices']] out.add(x_id, min([(i, state['dist'](c, x)) for (i, c) in state['centers']], key=lambda t: t[1])) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['row']]
def integers(start, count): '''Generates in sequence the integral numbers within a range. Note: This method uses deferred execution. Args: start: The first integer in the sequence. count: The number of sequential integers to generate. Returns: A Queryable over the specified range of integers. Raises: ValueError: If count is negative. ''' if count < 0: raise ValueError("integers() count cannot be negative") return query(irange(start, start + count))
def function[integers, parameter[start, count]]: constant[Generates in sequence the integral numbers within a range. Note: This method uses deferred execution. Args: start: The first integer in the sequence. count: The number of sequential integers to generate. Returns: A Queryable over the specified range of integers. Raises: ValueError: If count is negative. ] if compare[name[count] less[<] constant[0]] begin[:] <ast.Raise object at 0x7da1b1b0dea0> return[call[name[query], parameter[call[name[irange], parameter[name[start], binary_operation[name[start] + name[count]]]]]]]
keyword[def] identifier[integers] ( identifier[start] , identifier[count] ): literal[string] keyword[if] identifier[count] < literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[return] identifier[query] ( identifier[irange] ( identifier[start] , identifier[start] + identifier[count] ))
def integers(start, count): """Generates in sequence the integral numbers within a range. Note: This method uses deferred execution. Args: start: The first integer in the sequence. count: The number of sequential integers to generate. Returns: A Queryable over the specified range of integers. Raises: ValueError: If count is negative. """ if count < 0: raise ValueError('integers() count cannot be negative') # depends on [control=['if'], data=[]] return query(irange(start, start + count))
async def get_departures(self): """Get departure info from stopid.""" from .common import CommonFunctions common = CommonFunctions(self.loop, self.session) departures = [] endpoint = '{}/StopVisit/GetDepartures/{}'.format(BASE_URL, str(self.stopid)) data = await common.api_call(endpoint) for entries in data or []: try: data = entries['MonitoredVehicleJourney'] if self.destination is not None: if data['DestinationName'] == self.destination: data = entries['MonitoredVehicleJourney'] line = data['LineRef'] destinationname = data['DestinationName'] monitored = data['MonitoredCall'] time = monitored['ExpectedDepartureTime'] departures.append({"time": time, "line": line, "destination": destinationname}) else: data = entries['MonitoredVehicleJourney'] line = data['LineRef'] destinationname = data['DestinationName'] monitored = data['MonitoredCall'] time = monitored['ExpectedDepartureTime'] departures.append({"time": time, "line": line, "destination": destinationname}) except (TypeError, KeyError, IndexError) as error: LOGGER.error('Error connecting to Ruter, %s', error) self._departures = await common.sort_data(departures, 'time')
<ast.AsyncFunctionDef object at 0x7da1b14c6650>
keyword[async] keyword[def] identifier[get_departures] ( identifier[self] ): literal[string] keyword[from] . identifier[common] keyword[import] identifier[CommonFunctions] identifier[common] = identifier[CommonFunctions] ( identifier[self] . identifier[loop] , identifier[self] . identifier[session] ) identifier[departures] =[] identifier[endpoint] = literal[string] . identifier[format] ( identifier[BASE_URL] , identifier[str] ( identifier[self] . identifier[stopid] )) identifier[data] = keyword[await] identifier[common] . identifier[api_call] ( identifier[endpoint] ) keyword[for] identifier[entries] keyword[in] identifier[data] keyword[or] []: keyword[try] : identifier[data] = identifier[entries] [ literal[string] ] keyword[if] identifier[self] . identifier[destination] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[data] [ literal[string] ]== identifier[self] . identifier[destination] : identifier[data] = identifier[entries] [ literal[string] ] identifier[line] = identifier[data] [ literal[string] ] identifier[destinationname] = identifier[data] [ literal[string] ] identifier[monitored] = identifier[data] [ literal[string] ] identifier[time] = identifier[monitored] [ literal[string] ] identifier[departures] . identifier[append] ({ literal[string] : identifier[time] , literal[string] : identifier[line] , literal[string] : identifier[destinationname] }) keyword[else] : identifier[data] = identifier[entries] [ literal[string] ] identifier[line] = identifier[data] [ literal[string] ] identifier[destinationname] = identifier[data] [ literal[string] ] identifier[monitored] = identifier[data] [ literal[string] ] identifier[time] = identifier[monitored] [ literal[string] ] identifier[departures] . identifier[append] ({ literal[string] : identifier[time] , literal[string] : identifier[line] , literal[string] : identifier[destinationname] }) keyword[except] ( identifier[TypeError] , identifier[KeyError] , identifier[IndexError] ) keyword[as] identifier[error] : identifier[LOGGER] . identifier[error] ( literal[string] , identifier[error] ) identifier[self] . identifier[_departures] = keyword[await] identifier[common] . identifier[sort_data] ( identifier[departures] , literal[string] )
async def get_departures(self): """Get departure info from stopid.""" from .common import CommonFunctions common = CommonFunctions(self.loop, self.session) departures = [] endpoint = '{}/StopVisit/GetDepartures/{}'.format(BASE_URL, str(self.stopid)) data = await common.api_call(endpoint) for entries in data or []: try: data = entries['MonitoredVehicleJourney'] if self.destination is not None: if data['DestinationName'] == self.destination: data = entries['MonitoredVehicleJourney'] line = data['LineRef'] destinationname = data['DestinationName'] monitored = data['MonitoredCall'] time = monitored['ExpectedDepartureTime'] departures.append({'time': time, 'line': line, 'destination': destinationname}) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: data = entries['MonitoredVehicleJourney'] line = data['LineRef'] destinationname = data['DestinationName'] monitored = data['MonitoredCall'] time = monitored['ExpectedDepartureTime'] departures.append({'time': time, 'line': line, 'destination': destinationname}) # depends on [control=['try'], data=[]] except (TypeError, KeyError, IndexError) as error: LOGGER.error('Error connecting to Ruter, %s', error) # depends on [control=['except'], data=['error']] # depends on [control=['for'], data=['entries']] self._departures = await common.sort_data(departures, 'time')
def load(file,line=None,options={},block=None): ''' Create a new template for the given file using the file's extension to determine the the template mapping. ''' template_class = Lean.get_template(file) if template_class: return template_class(file,line,options,block) else: raise LookupError('No template engine registered for ' + os.path.basename(file))
def function[load, parameter[file, line, options, block]]: constant[ Create a new template for the given file using the file's extension to determine the the template mapping. ] variable[template_class] assign[=] call[name[Lean].get_template, parameter[name[file]]] if name[template_class] begin[:] return[call[name[template_class], parameter[name[file], name[line], name[options], name[block]]]]
keyword[def] identifier[load] ( identifier[file] , identifier[line] = keyword[None] , identifier[options] ={}, identifier[block] = keyword[None] ): literal[string] identifier[template_class] = identifier[Lean] . identifier[get_template] ( identifier[file] ) keyword[if] identifier[template_class] : keyword[return] identifier[template_class] ( identifier[file] , identifier[line] , identifier[options] , identifier[block] ) keyword[else] : keyword[raise] identifier[LookupError] ( literal[string] + identifier[os] . identifier[path] . identifier[basename] ( identifier[file] ))
def load(file, line=None, options={}, block=None): """ Create a new template for the given file using the file's extension to determine the the template mapping. """ template_class = Lean.get_template(file) if template_class: return template_class(file, line, options, block) # depends on [control=['if'], data=[]] else: raise LookupError('No template engine registered for ' + os.path.basename(file))
def can_allocate(self, nodes, pos=None): # TODO: check docstring """Returns True if this route can allocate nodes in `nodes` list Parameters ---------- nodes : type Desc pos : type, defaults to None Desc Returns ------- bool True if this route can allocate nodes in `nodes` list """ # clone route and nodes new_route = self.clone() new_nodes = [node.clone() for node in nodes] if pos is None: pos = len(self._nodes) new_route._nodes = new_route._nodes[:pos] + new_nodes + new_route._nodes[pos:] new_route._demand = sum([node.demand() for node in new_route._nodes]) if new_route.tech_constraints_satisfied(): return True return False
def function[can_allocate, parameter[self, nodes, pos]]: constant[Returns True if this route can allocate nodes in `nodes` list Parameters ---------- nodes : type Desc pos : type, defaults to None Desc Returns ------- bool True if this route can allocate nodes in `nodes` list ] variable[new_route] assign[=] call[name[self].clone, parameter[]] variable[new_nodes] assign[=] <ast.ListComp object at 0x7da1b2345240> if compare[name[pos] is constant[None]] begin[:] variable[pos] assign[=] call[name[len], parameter[name[self]._nodes]] name[new_route]._nodes assign[=] binary_operation[binary_operation[call[name[new_route]._nodes][<ast.Slice object at 0x7da1b2347af0>] + name[new_nodes]] + call[name[new_route]._nodes][<ast.Slice object at 0x7da1b2346f50>]] name[new_route]._demand assign[=] call[name[sum], parameter[<ast.ListComp object at 0x7da1b23476d0>]] if call[name[new_route].tech_constraints_satisfied, parameter[]] begin[:] return[constant[True]] return[constant[False]]
keyword[def] identifier[can_allocate] ( identifier[self] , identifier[nodes] , identifier[pos] = keyword[None] ): literal[string] identifier[new_route] = identifier[self] . identifier[clone] () identifier[new_nodes] =[ identifier[node] . identifier[clone] () keyword[for] identifier[node] keyword[in] identifier[nodes] ] keyword[if] identifier[pos] keyword[is] keyword[None] : identifier[pos] = identifier[len] ( identifier[self] . identifier[_nodes] ) identifier[new_route] . identifier[_nodes] = identifier[new_route] . identifier[_nodes] [: identifier[pos] ]+ identifier[new_nodes] + identifier[new_route] . identifier[_nodes] [ identifier[pos] :] identifier[new_route] . identifier[_demand] = identifier[sum] ([ identifier[node] . identifier[demand] () keyword[for] identifier[node] keyword[in] identifier[new_route] . identifier[_nodes] ]) keyword[if] identifier[new_route] . identifier[tech_constraints_satisfied] (): keyword[return] keyword[True] keyword[return] keyword[False]
def can_allocate(self, nodes, pos=None): # TODO: check docstring 'Returns True if this route can allocate nodes in `nodes` list\n \n Parameters\n ----------\n nodes : type\n Desc\n pos : type, defaults to None\n Desc\n \n Returns\n -------\n bool\n True if this route can allocate nodes in `nodes` list\n ' # clone route and nodes new_route = self.clone() new_nodes = [node.clone() for node in nodes] if pos is None: pos = len(self._nodes) # depends on [control=['if'], data=['pos']] new_route._nodes = new_route._nodes[:pos] + new_nodes + new_route._nodes[pos:] new_route._demand = sum([node.demand() for node in new_route._nodes]) if new_route.tech_constraints_satisfied(): return True # depends on [control=['if'], data=[]] return False
def description(self): """Provides a 7-item tuple compatible with the Python PEP249 DB Spec.""" return ( self.name, self.type_code, None, # TODO: display_length; should this be self.length? self.get_column_length(), # 'internal_size' self.get_column_length(), # 'precision' # TODO: why!?!? self.scale, self.flags % 2 == 0)
def function[description, parameter[self]]: constant[Provides a 7-item tuple compatible with the Python PEP249 DB Spec.] return[tuple[[<ast.Attribute object at 0x7da18f00c1c0>, <ast.Attribute object at 0x7da18f00e0e0>, <ast.Constant object at 0x7da18f00e6b0>, <ast.Call object at 0x7da18f00c6d0>, <ast.Call object at 0x7da18f58eb00>, <ast.Attribute object at 0x7da18f58dff0>, <ast.Compare object at 0x7da18f58ec50>]]]
keyword[def] identifier[description] ( identifier[self] ): literal[string] keyword[return] ( identifier[self] . identifier[name] , identifier[self] . identifier[type_code] , keyword[None] , identifier[self] . identifier[get_column_length] (), identifier[self] . identifier[get_column_length] (), identifier[self] . identifier[scale] , identifier[self] . identifier[flags] % literal[int] == literal[int] )
def description(self): """Provides a 7-item tuple compatible with the Python PEP249 DB Spec.""" # TODO: display_length; should this be self.length? # 'internal_size' # 'precision' # TODO: why!?!? return (self.name, self.type_code, None, self.get_column_length(), self.get_column_length(), self.scale, self.flags % 2 == 0)
async def _send_request(self, request_type, payload): """Uses an executor to send an asynchronous ZMQ request to the validator with the handler's Connection """ try: return await self._connection.send( message_type=request_type, message_content=payload, timeout=self._timeout) except DisconnectError: LOGGER.warning('Validator disconnected while waiting for response') raise errors.ValidatorDisconnected() except asyncio.TimeoutError: LOGGER.warning('Timed out while waiting for validator response') raise errors.ValidatorTimedOut() except SendBackoffTimeoutError: LOGGER.warning('Failed sending message - Backoff timed out') raise errors.SendBackoffTimeout()
<ast.AsyncFunctionDef object at 0x7da204565de0>
keyword[async] keyword[def] identifier[_send_request] ( identifier[self] , identifier[request_type] , identifier[payload] ): literal[string] keyword[try] : keyword[return] keyword[await] identifier[self] . identifier[_connection] . identifier[send] ( identifier[message_type] = identifier[request_type] , identifier[message_content] = identifier[payload] , identifier[timeout] = identifier[self] . identifier[_timeout] ) keyword[except] identifier[DisconnectError] : identifier[LOGGER] . identifier[warning] ( literal[string] ) keyword[raise] identifier[errors] . identifier[ValidatorDisconnected] () keyword[except] identifier[asyncio] . identifier[TimeoutError] : identifier[LOGGER] . identifier[warning] ( literal[string] ) keyword[raise] identifier[errors] . identifier[ValidatorTimedOut] () keyword[except] identifier[SendBackoffTimeoutError] : identifier[LOGGER] . identifier[warning] ( literal[string] ) keyword[raise] identifier[errors] . identifier[SendBackoffTimeout] ()
async def _send_request(self, request_type, payload): """Uses an executor to send an asynchronous ZMQ request to the validator with the handler's Connection """ try: return await self._connection.send(message_type=request_type, message_content=payload, timeout=self._timeout) # depends on [control=['try'], data=[]] except DisconnectError: LOGGER.warning('Validator disconnected while waiting for response') raise errors.ValidatorDisconnected() # depends on [control=['except'], data=[]] except asyncio.TimeoutError: LOGGER.warning('Timed out while waiting for validator response') raise errors.ValidatorTimedOut() # depends on [control=['except'], data=[]] except SendBackoffTimeoutError: LOGGER.warning('Failed sending message - Backoff timed out') raise errors.SendBackoffTimeout() # depends on [control=['except'], data=[]]
def make_connection(transport, **kwargs): """ Creates a connection instance based on the transport This function creates the EapiConnection object based on the desired transport. It looks up the transport class in the TRANSPORTS global dictionary. Args: transport (string): The transport to use to create the instance. **kwargs: Arbitrary keyword arguments. Returns: An instance of a connection object based on the transport Raises: TypeError: A TypeError is raised if the transport keyword is not found in the list (keys) of available transports. """ if transport not in TRANSPORTS: raise TypeError('invalid transport specified') klass = TRANSPORTS[transport] return klass(**kwargs)
def function[make_connection, parameter[transport]]: constant[ Creates a connection instance based on the transport This function creates the EapiConnection object based on the desired transport. It looks up the transport class in the TRANSPORTS global dictionary. Args: transport (string): The transport to use to create the instance. **kwargs: Arbitrary keyword arguments. Returns: An instance of a connection object based on the transport Raises: TypeError: A TypeError is raised if the transport keyword is not found in the list (keys) of available transports. ] if compare[name[transport] <ast.NotIn object at 0x7da2590d7190> name[TRANSPORTS]] begin[:] <ast.Raise object at 0x7da2041d9900> variable[klass] assign[=] call[name[TRANSPORTS]][name[transport]] return[call[name[klass], parameter[]]]
keyword[def] identifier[make_connection] ( identifier[transport] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[transport] keyword[not] keyword[in] identifier[TRANSPORTS] : keyword[raise] identifier[TypeError] ( literal[string] ) identifier[klass] = identifier[TRANSPORTS] [ identifier[transport] ] keyword[return] identifier[klass] (** identifier[kwargs] )
def make_connection(transport, **kwargs): """ Creates a connection instance based on the transport This function creates the EapiConnection object based on the desired transport. It looks up the transport class in the TRANSPORTS global dictionary. Args: transport (string): The transport to use to create the instance. **kwargs: Arbitrary keyword arguments. Returns: An instance of a connection object based on the transport Raises: TypeError: A TypeError is raised if the transport keyword is not found in the list (keys) of available transports. """ if transport not in TRANSPORTS: raise TypeError('invalid transport specified') # depends on [control=['if'], data=[]] klass = TRANSPORTS[transport] return klass(**kwargs)
def getChildElementsByTagName(self, tagName): """ Return child elements of type tagName if found, else [] """ result = [] for child in self.childNodes: if isinstance(child, Element): if child.tagName == tagName: result.append(child) return result
def function[getChildElementsByTagName, parameter[self, tagName]]: constant[ Return child elements of type tagName if found, else [] ] variable[result] assign[=] list[[]] for taget[name[child]] in starred[name[self].childNodes] begin[:] if call[name[isinstance], parameter[name[child], name[Element]]] begin[:] if compare[name[child].tagName equal[==] name[tagName]] begin[:] call[name[result].append, parameter[name[child]]] return[name[result]]
keyword[def] identifier[getChildElementsByTagName] ( identifier[self] , identifier[tagName] ): literal[string] identifier[result] =[] keyword[for] identifier[child] keyword[in] identifier[self] . identifier[childNodes] : keyword[if] identifier[isinstance] ( identifier[child] , identifier[Element] ): keyword[if] identifier[child] . identifier[tagName] == identifier[tagName] : identifier[result] . identifier[append] ( identifier[child] ) keyword[return] identifier[result]
def getChildElementsByTagName(self, tagName): """ Return child elements of type tagName if found, else [] """ result = [] for child in self.childNodes: if isinstance(child, Element): if child.tagName == tagName: result.append(child) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['child']] return result
def convert_sent_to_conll(sent_ls: List[Extraction]): """ Given a list of extractions for a single sentence - convert it to conll representation. """ # Sanity check - make sure all extractions are on the same sentence assert(len(set([ex.sent for ex in sent_ls])) == 1) toks = sent_ls[0].sent.split(' ') return safe_zip(*[range(len(toks)), toks] + \ [extraction_to_conll(ex) for ex in sent_ls])
def function[convert_sent_to_conll, parameter[sent_ls]]: constant[ Given a list of extractions for a single sentence - convert it to conll representation. ] assert[compare[call[name[len], parameter[call[name[set], parameter[<ast.ListComp object at 0x7da204963ca0>]]]] equal[==] constant[1]]] variable[toks] assign[=] call[call[name[sent_ls]][constant[0]].sent.split, parameter[constant[ ]]] return[call[name[safe_zip], parameter[<ast.Starred object at 0x7da2049601c0>]]]
keyword[def] identifier[convert_sent_to_conll] ( identifier[sent_ls] : identifier[List] [ identifier[Extraction] ]): literal[string] keyword[assert] ( identifier[len] ( identifier[set] ([ identifier[ex] . identifier[sent] keyword[for] identifier[ex] keyword[in] identifier[sent_ls] ]))== literal[int] ) identifier[toks] = identifier[sent_ls] [ literal[int] ]. identifier[sent] . identifier[split] ( literal[string] ) keyword[return] identifier[safe_zip] (*[ identifier[range] ( identifier[len] ( identifier[toks] )), identifier[toks] ]+[ identifier[extraction_to_conll] ( identifier[ex] ) keyword[for] identifier[ex] keyword[in] identifier[sent_ls] ])
def convert_sent_to_conll(sent_ls: List[Extraction]): """ Given a list of extractions for a single sentence - convert it to conll representation. """ # Sanity check - make sure all extractions are on the same sentence assert len(set([ex.sent for ex in sent_ls])) == 1 toks = sent_ls[0].sent.split(' ') return safe_zip(*[range(len(toks)), toks] + [extraction_to_conll(ex) for ex in sent_ls])
def findspans(self, type,set=None): """Find span annotation of the specified type that include this word""" if issubclass(type, AbstractAnnotationLayer): layerclass = type else: layerclass = ANNOTATIONTYPE2LAYERCLASS[type.ANNOTATIONTYPE] e = self while True: if not e.parent: break e = e.parent for layer in e.select(layerclass,set,False): for e2 in layer: if isinstance(e2, AbstractSpanAnnotation): if self in e2.wrefs(): yield e2
def function[findspans, parameter[self, type, set]]: constant[Find span annotation of the specified type that include this word] if call[name[issubclass], parameter[name[type], name[AbstractAnnotationLayer]]] begin[:] variable[layerclass] assign[=] name[type] variable[e] assign[=] name[self] while constant[True] begin[:] if <ast.UnaryOp object at 0x7da204344280> begin[:] break variable[e] assign[=] name[e].parent for taget[name[layer]] in starred[call[name[e].select, parameter[name[layerclass], name[set], constant[False]]]] begin[:] for taget[name[e2]] in starred[name[layer]] begin[:] if call[name[isinstance], parameter[name[e2], name[AbstractSpanAnnotation]]] begin[:] if compare[name[self] in call[name[e2].wrefs, parameter[]]] begin[:] <ast.Yield object at 0x7da204346680>
keyword[def] identifier[findspans] ( identifier[self] , identifier[type] , identifier[set] = keyword[None] ): literal[string] keyword[if] identifier[issubclass] ( identifier[type] , identifier[AbstractAnnotationLayer] ): identifier[layerclass] = identifier[type] keyword[else] : identifier[layerclass] = identifier[ANNOTATIONTYPE2LAYERCLASS] [ identifier[type] . identifier[ANNOTATIONTYPE] ] identifier[e] = identifier[self] keyword[while] keyword[True] : keyword[if] keyword[not] identifier[e] . identifier[parent] : keyword[break] identifier[e] = identifier[e] . identifier[parent] keyword[for] identifier[layer] keyword[in] identifier[e] . identifier[select] ( identifier[layerclass] , identifier[set] , keyword[False] ): keyword[for] identifier[e2] keyword[in] identifier[layer] : keyword[if] identifier[isinstance] ( identifier[e2] , identifier[AbstractSpanAnnotation] ): keyword[if] identifier[self] keyword[in] identifier[e2] . identifier[wrefs] (): keyword[yield] identifier[e2]
def findspans(self, type, set=None): """Find span annotation of the specified type that include this word""" if issubclass(type, AbstractAnnotationLayer): layerclass = type # depends on [control=['if'], data=[]] else: layerclass = ANNOTATIONTYPE2LAYERCLASS[type.ANNOTATIONTYPE] e = self while True: if not e.parent: break # depends on [control=['if'], data=[]] e = e.parent for layer in e.select(layerclass, set, False): for e2 in layer: if isinstance(e2, AbstractSpanAnnotation): if self in e2.wrefs(): yield e2 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['e2']] # depends on [control=['for'], data=['layer']] # depends on [control=['while'], data=[]]
def _builtin_help(self, args): """Return help information for a context or function.""" if len(args) == 0: return self.list_dir(self.contexts[-1]) if len(args) == 1: func = self.find_function(self.contexts[-1], args[0]) return annotate.get_help(func) help_text = "Too many arguments: " + str(args) + "\n" help_text += "Usage: help [function]" return help_text
def function[_builtin_help, parameter[self, args]]: constant[Return help information for a context or function.] if compare[call[name[len], parameter[name[args]]] equal[==] constant[0]] begin[:] return[call[name[self].list_dir, parameter[call[name[self].contexts][<ast.UnaryOp object at 0x7da1b0211fc0>]]]] if compare[call[name[len], parameter[name[args]]] equal[==] constant[1]] begin[:] variable[func] assign[=] call[name[self].find_function, parameter[call[name[self].contexts][<ast.UnaryOp object at 0x7da1b025f070>], call[name[args]][constant[0]]]] return[call[name[annotate].get_help, parameter[name[func]]]] variable[help_text] assign[=] binary_operation[binary_operation[constant[Too many arguments: ] + call[name[str], parameter[name[args]]]] + constant[ ]] <ast.AugAssign object at 0x7da1b025d510> return[name[help_text]]
keyword[def] identifier[_builtin_help] ( identifier[self] , identifier[args] ): literal[string] keyword[if] identifier[len] ( identifier[args] )== literal[int] : keyword[return] identifier[self] . identifier[list_dir] ( identifier[self] . identifier[contexts] [- literal[int] ]) keyword[if] identifier[len] ( identifier[args] )== literal[int] : identifier[func] = identifier[self] . identifier[find_function] ( identifier[self] . identifier[contexts] [- literal[int] ], identifier[args] [ literal[int] ]) keyword[return] identifier[annotate] . identifier[get_help] ( identifier[func] ) identifier[help_text] = literal[string] + identifier[str] ( identifier[args] )+ literal[string] identifier[help_text] += literal[string] keyword[return] identifier[help_text]
def _builtin_help(self, args): """Return help information for a context or function.""" if len(args) == 0: return self.list_dir(self.contexts[-1]) # depends on [control=['if'], data=[]] if len(args) == 1: func = self.find_function(self.contexts[-1], args[0]) return annotate.get_help(func) # depends on [control=['if'], data=[]] help_text = 'Too many arguments: ' + str(args) + '\n' help_text += 'Usage: help [function]' return help_text
def create_content(self, cli, width, height): """ Create a UIContent. """ buffer = self._buffer(cli) # Get the document to be shown. If we are currently searching (the # search buffer has focus, and the preview_search filter is enabled), # then use the search document, which has possibly a different # text/cursor position.) def preview_now(): """ True when we should preview a search. """ return bool(self.preview_search(cli) and cli.buffers[self.search_buffer_name].text) if preview_now(): if self.get_search_state: ss = self.get_search_state(cli) else: ss = cli.search_state document = buffer.document_for_search(SearchState( text=cli.current_buffer.text, direction=ss.direction, ignore_case=ss.ignore_case)) else: document = buffer.document get_processed_line = self._create_get_processed_line_func(cli, document) self._last_get_processed_line = get_processed_line def translate_rowcol(row, col): " Return the content column for this coordinate. " return Point(y=row, x=get_processed_line(row).source_to_display(col)) def get_line(i): " Return the tokens for a given line number. " tokens = get_processed_line(i).tokens # Add a space at the end, because that is a possible cursor # position. (When inserting after the input.) We should do this on # all the lines, not just the line containing the cursor. (Because # otherwise, line wrapping/scrolling could change when moving the # cursor around.) tokens = tokens + [(self.default_char.token, ' ')] return tokens content = UIContent( get_line=get_line, line_count=document.line_count, cursor_position=translate_rowcol(document.cursor_position_row, document.cursor_position_col), default_char=self.default_char) # If there is an auto completion going on, use that start point for a # pop-up menu position. (But only when this buffer has the focus -- # there is only one place for a menu, determined by the focussed buffer.) if cli.current_buffer_name == self.buffer_name: menu_position = self.menu_position(cli) if self.menu_position else None if menu_position is not None: assert isinstance(menu_position, int) menu_row, menu_col = buffer.document.translate_index_to_position(menu_position) content.menu_position = translate_rowcol(menu_row, menu_col) elif buffer.complete_state: # Position for completion menu. # Note: We use 'min', because the original cursor position could be # behind the input string when the actual completion is for # some reason shorter than the text we had before. (A completion # can change and shorten the input.) menu_row, menu_col = buffer.document.translate_index_to_position( min(buffer.cursor_position, buffer.complete_state.original_document.cursor_position)) content.menu_position = translate_rowcol(menu_row, menu_col) else: content.menu_position = None return content
def function[create_content, parameter[self, cli, width, height]]: constant[ Create a UIContent. ] variable[buffer] assign[=] call[name[self]._buffer, parameter[name[cli]]] def function[preview_now, parameter[]]: constant[ True when we should preview a search. ] return[call[name[bool], parameter[<ast.BoolOp object at 0x7da20c6abe20>]]] if call[name[preview_now], parameter[]] begin[:] if name[self].get_search_state begin[:] variable[ss] assign[=] call[name[self].get_search_state, parameter[name[cli]]] variable[document] assign[=] call[name[buffer].document_for_search, parameter[call[name[SearchState], parameter[]]]] variable[get_processed_line] assign[=] call[name[self]._create_get_processed_line_func, parameter[name[cli], name[document]]] name[self]._last_get_processed_line assign[=] name[get_processed_line] def function[translate_rowcol, parameter[row, col]]: constant[ Return the content column for this coordinate. ] return[call[name[Point], parameter[]]] def function[get_line, parameter[i]]: constant[ Return the tokens for a given line number. ] variable[tokens] assign[=] call[name[get_processed_line], parameter[name[i]]].tokens variable[tokens] assign[=] binary_operation[name[tokens] + list[[<ast.Tuple object at 0x7da18f58c100>]]] return[name[tokens]] variable[content] assign[=] call[name[UIContent], parameter[]] if compare[name[cli].current_buffer_name equal[==] name[self].buffer_name] begin[:] variable[menu_position] assign[=] <ast.IfExp object at 0x7da18f58ecb0> if compare[name[menu_position] is_not constant[None]] begin[:] assert[call[name[isinstance], parameter[name[menu_position], name[int]]]] <ast.Tuple object at 0x7da204567160> assign[=] call[name[buffer].document.translate_index_to_position, parameter[name[menu_position]]] name[content].menu_position assign[=] call[name[translate_rowcol], parameter[name[menu_row], name[menu_col]]] return[name[content]]
keyword[def] identifier[create_content] ( identifier[self] , identifier[cli] , identifier[width] , identifier[height] ): literal[string] identifier[buffer] = identifier[self] . identifier[_buffer] ( identifier[cli] ) keyword[def] identifier[preview_now] (): literal[string] keyword[return] identifier[bool] ( identifier[self] . identifier[preview_search] ( identifier[cli] ) keyword[and] identifier[cli] . identifier[buffers] [ identifier[self] . identifier[search_buffer_name] ]. identifier[text] ) keyword[if] identifier[preview_now] (): keyword[if] identifier[self] . identifier[get_search_state] : identifier[ss] = identifier[self] . identifier[get_search_state] ( identifier[cli] ) keyword[else] : identifier[ss] = identifier[cli] . identifier[search_state] identifier[document] = identifier[buffer] . identifier[document_for_search] ( identifier[SearchState] ( identifier[text] = identifier[cli] . identifier[current_buffer] . identifier[text] , identifier[direction] = identifier[ss] . identifier[direction] , identifier[ignore_case] = identifier[ss] . identifier[ignore_case] )) keyword[else] : identifier[document] = identifier[buffer] . identifier[document] identifier[get_processed_line] = identifier[self] . identifier[_create_get_processed_line_func] ( identifier[cli] , identifier[document] ) identifier[self] . identifier[_last_get_processed_line] = identifier[get_processed_line] keyword[def] identifier[translate_rowcol] ( identifier[row] , identifier[col] ): literal[string] keyword[return] identifier[Point] ( identifier[y] = identifier[row] , identifier[x] = identifier[get_processed_line] ( identifier[row] ). identifier[source_to_display] ( identifier[col] )) keyword[def] identifier[get_line] ( identifier[i] ): literal[string] identifier[tokens] = identifier[get_processed_line] ( identifier[i] ). identifier[tokens] identifier[tokens] = identifier[tokens] +[( identifier[self] . identifier[default_char] . identifier[token] , literal[string] )] keyword[return] identifier[tokens] identifier[content] = identifier[UIContent] ( identifier[get_line] = identifier[get_line] , identifier[line_count] = identifier[document] . identifier[line_count] , identifier[cursor_position] = identifier[translate_rowcol] ( identifier[document] . identifier[cursor_position_row] , identifier[document] . identifier[cursor_position_col] ), identifier[default_char] = identifier[self] . identifier[default_char] ) keyword[if] identifier[cli] . identifier[current_buffer_name] == identifier[self] . identifier[buffer_name] : identifier[menu_position] = identifier[self] . identifier[menu_position] ( identifier[cli] ) keyword[if] identifier[self] . identifier[menu_position] keyword[else] keyword[None] keyword[if] identifier[menu_position] keyword[is] keyword[not] keyword[None] : keyword[assert] identifier[isinstance] ( identifier[menu_position] , identifier[int] ) identifier[menu_row] , identifier[menu_col] = identifier[buffer] . identifier[document] . identifier[translate_index_to_position] ( identifier[menu_position] ) identifier[content] . identifier[menu_position] = identifier[translate_rowcol] ( identifier[menu_row] , identifier[menu_col] ) keyword[elif] identifier[buffer] . identifier[complete_state] : identifier[menu_row] , identifier[menu_col] = identifier[buffer] . identifier[document] . identifier[translate_index_to_position] ( identifier[min] ( identifier[buffer] . identifier[cursor_position] , identifier[buffer] . identifier[complete_state] . identifier[original_document] . identifier[cursor_position] )) identifier[content] . identifier[menu_position] = identifier[translate_rowcol] ( identifier[menu_row] , identifier[menu_col] ) keyword[else] : identifier[content] . identifier[menu_position] = keyword[None] keyword[return] identifier[content]
def create_content(self, cli, width, height): """ Create a UIContent. """ buffer = self._buffer(cli) # Get the document to be shown. If we are currently searching (the # search buffer has focus, and the preview_search filter is enabled), # then use the search document, which has possibly a different # text/cursor position.) def preview_now(): """ True when we should preview a search. """ return bool(self.preview_search(cli) and cli.buffers[self.search_buffer_name].text) if preview_now(): if self.get_search_state: ss = self.get_search_state(cli) # depends on [control=['if'], data=[]] else: ss = cli.search_state document = buffer.document_for_search(SearchState(text=cli.current_buffer.text, direction=ss.direction, ignore_case=ss.ignore_case)) # depends on [control=['if'], data=[]] else: document = buffer.document get_processed_line = self._create_get_processed_line_func(cli, document) self._last_get_processed_line = get_processed_line def translate_rowcol(row, col): """ Return the content column for this coordinate. """ return Point(y=row, x=get_processed_line(row).source_to_display(col)) def get_line(i): """ Return the tokens for a given line number. """ tokens = get_processed_line(i).tokens # Add a space at the end, because that is a possible cursor # position. (When inserting after the input.) We should do this on # all the lines, not just the line containing the cursor. (Because # otherwise, line wrapping/scrolling could change when moving the # cursor around.) tokens = tokens + [(self.default_char.token, ' ')] return tokens content = UIContent(get_line=get_line, line_count=document.line_count, cursor_position=translate_rowcol(document.cursor_position_row, document.cursor_position_col), default_char=self.default_char) # If there is an auto completion going on, use that start point for a # pop-up menu position. (But only when this buffer has the focus -- # there is only one place for a menu, determined by the focussed buffer.) if cli.current_buffer_name == self.buffer_name: menu_position = self.menu_position(cli) if self.menu_position else None if menu_position is not None: assert isinstance(menu_position, int) (menu_row, menu_col) = buffer.document.translate_index_to_position(menu_position) content.menu_position = translate_rowcol(menu_row, menu_col) # depends on [control=['if'], data=['menu_position']] elif buffer.complete_state: # Position for completion menu. # Note: We use 'min', because the original cursor position could be # behind the input string when the actual completion is for # some reason shorter than the text we had before. (A completion # can change and shorten the input.) (menu_row, menu_col) = buffer.document.translate_index_to_position(min(buffer.cursor_position, buffer.complete_state.original_document.cursor_position)) content.menu_position = translate_rowcol(menu_row, menu_col) # depends on [control=['if'], data=[]] else: content.menu_position = None # depends on [control=['if'], data=[]] return content
def qteUnbindAllFromApplet(self, applet: (QtmacsApplet, str)): """ Restore the global key-map for all widgets inside ``applet``. This method effectively resets the key map of all widgets to the state they would be in if the widgets were newly instantiated right now. The ``applet`` parameter can either be an instance of ``QtmacsApplet`` or a string denoting an applet ID. In the latter case the ``qteGetAppletHandle`` method is used to fetch the respective applet instance. If ``applet`` does not refer to an existing applet then nothing happens. |Args| * ``applet`` (**QtmacsApplet**, **str**): only widgets in this applet are affected. |Returns| * **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type. """ # If ``applet`` was specified by its ID (ie. a string) then # fetch the associated ``QtmacsApplet`` instance. If # ``applet`` is already an instance of ``QtmacsApplet`` then # use it directly. if isinstance(applet, str): appletObj = self.qteGetAppletHandle(applet) else: appletObj = applet # Return immediately if the appletObj is invalid. if appletObj is None: return # Remove the key sequence from the applet window itself. appletObj._qteAdmin.keyMap = self.qteCopyGlobalKeyMap() # Restore the global key-map for every widget. for wid in appletObj._qteAdmin.widgetList: wid._qteAdmin.keyMap = self.qteCopyGlobalKeyMap()
def function[qteUnbindAllFromApplet, parameter[self, applet]]: constant[ Restore the global key-map for all widgets inside ``applet``. This method effectively resets the key map of all widgets to the state they would be in if the widgets were newly instantiated right now. The ``applet`` parameter can either be an instance of ``QtmacsApplet`` or a string denoting an applet ID. In the latter case the ``qteGetAppletHandle`` method is used to fetch the respective applet instance. If ``applet`` does not refer to an existing applet then nothing happens. |Args| * ``applet`` (**QtmacsApplet**, **str**): only widgets in this applet are affected. |Returns| * **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type. ] if call[name[isinstance], parameter[name[applet], name[str]]] begin[:] variable[appletObj] assign[=] call[name[self].qteGetAppletHandle, parameter[name[applet]]] if compare[name[appletObj] is constant[None]] begin[:] return[None] name[appletObj]._qteAdmin.keyMap assign[=] call[name[self].qteCopyGlobalKeyMap, parameter[]] for taget[name[wid]] in starred[name[appletObj]._qteAdmin.widgetList] begin[:] name[wid]._qteAdmin.keyMap assign[=] call[name[self].qteCopyGlobalKeyMap, parameter[]]
keyword[def] identifier[qteUnbindAllFromApplet] ( identifier[self] , identifier[applet] :( identifier[QtmacsApplet] , identifier[str] )): literal[string] keyword[if] identifier[isinstance] ( identifier[applet] , identifier[str] ): identifier[appletObj] = identifier[self] . identifier[qteGetAppletHandle] ( identifier[applet] ) keyword[else] : identifier[appletObj] = identifier[applet] keyword[if] identifier[appletObj] keyword[is] keyword[None] : keyword[return] identifier[appletObj] . identifier[_qteAdmin] . identifier[keyMap] = identifier[self] . identifier[qteCopyGlobalKeyMap] () keyword[for] identifier[wid] keyword[in] identifier[appletObj] . identifier[_qteAdmin] . identifier[widgetList] : identifier[wid] . identifier[_qteAdmin] . identifier[keyMap] = identifier[self] . identifier[qteCopyGlobalKeyMap] ()
def qteUnbindAllFromApplet(self, applet: (QtmacsApplet, str)): """ Restore the global key-map for all widgets inside ``applet``. This method effectively resets the key map of all widgets to the state they would be in if the widgets were newly instantiated right now. The ``applet`` parameter can either be an instance of ``QtmacsApplet`` or a string denoting an applet ID. In the latter case the ``qteGetAppletHandle`` method is used to fetch the respective applet instance. If ``applet`` does not refer to an existing applet then nothing happens. |Args| * ``applet`` (**QtmacsApplet**, **str**): only widgets in this applet are affected. |Returns| * **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type. """ # If ``applet`` was specified by its ID (ie. a string) then # fetch the associated ``QtmacsApplet`` instance. If # ``applet`` is already an instance of ``QtmacsApplet`` then # use it directly. if isinstance(applet, str): appletObj = self.qteGetAppletHandle(applet) # depends on [control=['if'], data=[]] else: appletObj = applet # Return immediately if the appletObj is invalid. if appletObj is None: return # depends on [control=['if'], data=[]] # Remove the key sequence from the applet window itself. appletObj._qteAdmin.keyMap = self.qteCopyGlobalKeyMap() # Restore the global key-map for every widget. for wid in appletObj._qteAdmin.widgetList: wid._qteAdmin.keyMap = self.qteCopyGlobalKeyMap() # depends on [control=['for'], data=['wid']]
def assert_stmt(self, assert_loc, test, msg): """assert_stmt: 'assert' test [',' test]""" loc = assert_loc.join(test.loc) if msg: loc = loc.join(msg.loc) return ast.Assert(test=test, msg=msg, loc=loc, keyword_loc=assert_loc)
def function[assert_stmt, parameter[self, assert_loc, test, msg]]: constant[assert_stmt: 'assert' test [',' test]] variable[loc] assign[=] call[name[assert_loc].join, parameter[name[test].loc]] if name[msg] begin[:] variable[loc] assign[=] call[name[loc].join, parameter[name[msg].loc]] return[call[name[ast].Assert, parameter[]]]
keyword[def] identifier[assert_stmt] ( identifier[self] , identifier[assert_loc] , identifier[test] , identifier[msg] ): literal[string] identifier[loc] = identifier[assert_loc] . identifier[join] ( identifier[test] . identifier[loc] ) keyword[if] identifier[msg] : identifier[loc] = identifier[loc] . identifier[join] ( identifier[msg] . identifier[loc] ) keyword[return] identifier[ast] . identifier[Assert] ( identifier[test] = identifier[test] , identifier[msg] = identifier[msg] , identifier[loc] = identifier[loc] , identifier[keyword_loc] = identifier[assert_loc] )
def assert_stmt(self, assert_loc, test, msg): """assert_stmt: 'assert' test [',' test]""" loc = assert_loc.join(test.loc) if msg: loc = loc.join(msg.loc) # depends on [control=['if'], data=[]] return ast.Assert(test=test, msg=msg, loc=loc, keyword_loc=assert_loc)
def unescape(field): """ Decode escape sequences in a ``.properties`` key or value. The following escape sequences are recognized:: \\t \\n \\f \\r \\uXXXX \\\\ If a backslash is followed by any other character, the backslash is dropped. In addition, any valid UTF-16 surrogate pairs in the string after escape-decoding are further decoded into the non-BMP characters they represent. (Invalid & isolated surrogate code points are left as-is.) .. versionchanged:: 0.5.0 Invalid ``\\uXXXX`` escape sequences will now cause an `InvalidUEscapeError` to be raised :param field: the string to decode :type field: text string :rtype: text string :raises InvalidUEscapeError: if an invalid ``\\uXXXX`` escape sequence occurs in the input """ return re.sub(r'[\uD800-\uDBFF][\uDC00-\uDFFF]', _unsurrogate, re.sub(r'\\(u.{0,4}|.)', _unesc, field))
def function[unescape, parameter[field]]: constant[ Decode escape sequences in a ``.properties`` key or value. The following escape sequences are recognized:: \t \n \f \r \uXXXX \\ If a backslash is followed by any other character, the backslash is dropped. In addition, any valid UTF-16 surrogate pairs in the string after escape-decoding are further decoded into the non-BMP characters they represent. (Invalid & isolated surrogate code points are left as-is.) .. versionchanged:: 0.5.0 Invalid ``\uXXXX`` escape sequences will now cause an `InvalidUEscapeError` to be raised :param field: the string to decode :type field: text string :rtype: text string :raises InvalidUEscapeError: if an invalid ``\uXXXX`` escape sequence occurs in the input ] return[call[name[re].sub, parameter[constant[[\uD800-\uDBFF][\uDC00-\uDFFF]], name[_unsurrogate], call[name[re].sub, parameter[constant[\\(u.{0,4}|.)], name[_unesc], name[field]]]]]]
keyword[def] identifier[unescape] ( identifier[field] ): literal[string] keyword[return] identifier[re] . identifier[sub] ( literal[string] , identifier[_unsurrogate] , identifier[re] . identifier[sub] ( literal[string] , identifier[_unesc] , identifier[field] ))
def unescape(field): """ Decode escape sequences in a ``.properties`` key or value. The following escape sequences are recognized:: \\t \\n \\f \\r \\uXXXX \\\\ If a backslash is followed by any other character, the backslash is dropped. In addition, any valid UTF-16 surrogate pairs in the string after escape-decoding are further decoded into the non-BMP characters they represent. (Invalid & isolated surrogate code points are left as-is.) .. versionchanged:: 0.5.0 Invalid ``\\uXXXX`` escape sequences will now cause an `InvalidUEscapeError` to be raised :param field: the string to decode :type field: text string :rtype: text string :raises InvalidUEscapeError: if an invalid ``\\uXXXX`` escape sequence occurs in the input """ return re.sub('[\\uD800-\\uDBFF][\\uDC00-\\uDFFF]', _unsurrogate, re.sub('\\\\(u.{0,4}|.)', _unesc, field))
def own(self) -> 'PrettyDir': """Returns attributes that are not inhterited from parent classes. Now we only use a simple judgement, it is expected that many attributes not get returned, especially invoked on a module. For instance, there's no way to distinguish between properties that are initialized in instance class's __init__ and parent class's __init__(assuming super() is called). So we'll just leave it. """ return PrettyDir( self.obj, [ pattr for pattr in self.pattrs if pattr.name in type(self.obj).__dict__ or pattr.name in self.obj.__dict__ ], )
def function[own, parameter[self]]: constant[Returns attributes that are not inhterited from parent classes. Now we only use a simple judgement, it is expected that many attributes not get returned, especially invoked on a module. For instance, there's no way to distinguish between properties that are initialized in instance class's __init__ and parent class's __init__(assuming super() is called). So we'll just leave it. ] return[call[name[PrettyDir], parameter[name[self].obj, <ast.ListComp object at 0x7da2054a7520>]]]
keyword[def] identifier[own] ( identifier[self] )-> literal[string] : literal[string] keyword[return] identifier[PrettyDir] ( identifier[self] . identifier[obj] , [ identifier[pattr] keyword[for] identifier[pattr] keyword[in] identifier[self] . identifier[pattrs] keyword[if] identifier[pattr] . identifier[name] keyword[in] identifier[type] ( identifier[self] . identifier[obj] ). identifier[__dict__] keyword[or] identifier[pattr] . identifier[name] keyword[in] identifier[self] . identifier[obj] . identifier[__dict__] ], )
def own(self) -> 'PrettyDir': """Returns attributes that are not inhterited from parent classes. Now we only use a simple judgement, it is expected that many attributes not get returned, especially invoked on a module. For instance, there's no way to distinguish between properties that are initialized in instance class's __init__ and parent class's __init__(assuming super() is called). So we'll just leave it. """ return PrettyDir(self.obj, [pattr for pattr in self.pattrs if pattr.name in type(self.obj).__dict__ or pattr.name in self.obj.__dict__])
def find(self, instance_ids=None, filters=None): """Flatten list of reservations to a list of instances. :param instance_ids: A list of instance ids to filter by :type instance_ids: list :param filters: A dict of Filter.N values defined in http://goo.gl/jYNej9 :type filters: dict :return: A flattened list of filtered instances. :rtype: list """ instances = [] reservations = self.retry_on_ec2_error(self.ec2.get_all_instances, instance_ids=instance_ids, filters=filters) for reservation in reservations: instances.extend(reservation.instances) return instances
def function[find, parameter[self, instance_ids, filters]]: constant[Flatten list of reservations to a list of instances. :param instance_ids: A list of instance ids to filter by :type instance_ids: list :param filters: A dict of Filter.N values defined in http://goo.gl/jYNej9 :type filters: dict :return: A flattened list of filtered instances. :rtype: list ] variable[instances] assign[=] list[[]] variable[reservations] assign[=] call[name[self].retry_on_ec2_error, parameter[name[self].ec2.get_all_instances]] for taget[name[reservation]] in starred[name[reservations]] begin[:] call[name[instances].extend, parameter[name[reservation].instances]] return[name[instances]]
keyword[def] identifier[find] ( identifier[self] , identifier[instance_ids] = keyword[None] , identifier[filters] = keyword[None] ): literal[string] identifier[instances] =[] identifier[reservations] = identifier[self] . identifier[retry_on_ec2_error] ( identifier[self] . identifier[ec2] . identifier[get_all_instances] , identifier[instance_ids] = identifier[instance_ids] , identifier[filters] = identifier[filters] ) keyword[for] identifier[reservation] keyword[in] identifier[reservations] : identifier[instances] . identifier[extend] ( identifier[reservation] . identifier[instances] ) keyword[return] identifier[instances]
def find(self, instance_ids=None, filters=None): """Flatten list of reservations to a list of instances. :param instance_ids: A list of instance ids to filter by :type instance_ids: list :param filters: A dict of Filter.N values defined in http://goo.gl/jYNej9 :type filters: dict :return: A flattened list of filtered instances. :rtype: list """ instances = [] reservations = self.retry_on_ec2_error(self.ec2.get_all_instances, instance_ids=instance_ids, filters=filters) for reservation in reservations: instances.extend(reservation.instances) # depends on [control=['for'], data=['reservation']] return instances
def _ValidateCacheFileMetadataHeader(self, cache_file_metadata_header): """Determines whether the cache file metadata header is valid. Args: cache_file_metadata_header (firefox_cache2_file_metadata_header): cache file metadata header. Returns: bool: True if the cache file metadata header is valid. """ # TODO: add support for format version 2 and 3 return ( cache_file_metadata_header.key_size > 0 and cache_file_metadata_header.key_size < self._MAXIMUM_URL_LENGTH and cache_file_metadata_header.format_version == 1 and cache_file_metadata_header.last_fetched_time > 0 and cache_file_metadata_header.fetch_count > 0)
def function[_ValidateCacheFileMetadataHeader, parameter[self, cache_file_metadata_header]]: constant[Determines whether the cache file metadata header is valid. Args: cache_file_metadata_header (firefox_cache2_file_metadata_header): cache file metadata header. Returns: bool: True if the cache file metadata header is valid. ] return[<ast.BoolOp object at 0x7da2044c17b0>]
keyword[def] identifier[_ValidateCacheFileMetadataHeader] ( identifier[self] , identifier[cache_file_metadata_header] ): literal[string] keyword[return] ( identifier[cache_file_metadata_header] . identifier[key_size] > literal[int] keyword[and] identifier[cache_file_metadata_header] . identifier[key_size] < identifier[self] . identifier[_MAXIMUM_URL_LENGTH] keyword[and] identifier[cache_file_metadata_header] . identifier[format_version] == literal[int] keyword[and] identifier[cache_file_metadata_header] . identifier[last_fetched_time] > literal[int] keyword[and] identifier[cache_file_metadata_header] . identifier[fetch_count] > literal[int] )
def _ValidateCacheFileMetadataHeader(self, cache_file_metadata_header): """Determines whether the cache file metadata header is valid. Args: cache_file_metadata_header (firefox_cache2_file_metadata_header): cache file metadata header. Returns: bool: True if the cache file metadata header is valid. """ # TODO: add support for format version 2 and 3 return cache_file_metadata_header.key_size > 0 and cache_file_metadata_header.key_size < self._MAXIMUM_URL_LENGTH and (cache_file_metadata_header.format_version == 1) and (cache_file_metadata_header.last_fetched_time > 0) and (cache_file_metadata_header.fetch_count > 0)
def discover(glob_pattern): """ Find all files matching given glob_pattern, parse them, and return list of environments: >>> envs = discover("requirements/*.in") >>> # print(envs) >>> envs == [ ... {'name': 'base', 'refs': set()}, ... {'name': 'py27', 'refs': set()}, ... {'name': 'test', 'refs': {'base'}}, ... {'name': 'local', 'refs': {'test'}}, ... {'name': 'local27', 'refs': {'test', 'py27'}}, ... {'name': 'testwin', 'refs': {'test'}}, ... ] True """ in_paths = glob.glob(glob_pattern) names = { extract_env_name(path): path for path in in_paths } return order_by_refs([ {'name': name, 'refs': Environment.parse_references(in_path)} for name, in_path in names.items() ])
def function[discover, parameter[glob_pattern]]: constant[ Find all files matching given glob_pattern, parse them, and return list of environments: >>> envs = discover("requirements/*.in") >>> # print(envs) >>> envs == [ ... {'name': 'base', 'refs': set()}, ... {'name': 'py27', 'refs': set()}, ... {'name': 'test', 'refs': {'base'}}, ... {'name': 'local', 'refs': {'test'}}, ... {'name': 'local27', 'refs': {'test', 'py27'}}, ... {'name': 'testwin', 'refs': {'test'}}, ... ] True ] variable[in_paths] assign[=] call[name[glob].glob, parameter[name[glob_pattern]]] variable[names] assign[=] <ast.DictComp object at 0x7da20e9625c0> return[call[name[order_by_refs], parameter[<ast.ListComp object at 0x7da20e960ee0>]]]
keyword[def] identifier[discover] ( identifier[glob_pattern] ): literal[string] identifier[in_paths] = identifier[glob] . identifier[glob] ( identifier[glob_pattern] ) identifier[names] ={ identifier[extract_env_name] ( identifier[path] ): identifier[path] keyword[for] identifier[path] keyword[in] identifier[in_paths] } keyword[return] identifier[order_by_refs] ([ { literal[string] : identifier[name] , literal[string] : identifier[Environment] . identifier[parse_references] ( identifier[in_path] )} keyword[for] identifier[name] , identifier[in_path] keyword[in] identifier[names] . identifier[items] () ])
def discover(glob_pattern): """ Find all files matching given glob_pattern, parse them, and return list of environments: >>> envs = discover("requirements/*.in") >>> # print(envs) >>> envs == [ ... {'name': 'base', 'refs': set()}, ... {'name': 'py27', 'refs': set()}, ... {'name': 'test', 'refs': {'base'}}, ... {'name': 'local', 'refs': {'test'}}, ... {'name': 'local27', 'refs': {'test', 'py27'}}, ... {'name': 'testwin', 'refs': {'test'}}, ... ] True """ in_paths = glob.glob(glob_pattern) names = {extract_env_name(path): path for path in in_paths} return order_by_refs([{'name': name, 'refs': Environment.parse_references(in_path)} for (name, in_path) in names.items()])
def _port_postfix(self): """ Returns empty string for the default port and ':port' otherwise """ port = self.real_connection.port default_port = {'https': 443, 'http': 80}[self._protocol] return ':{}'.format(port) if port != default_port else ''
def function[_port_postfix, parameter[self]]: constant[ Returns empty string for the default port and ':port' otherwise ] variable[port] assign[=] name[self].real_connection.port variable[default_port] assign[=] call[dictionary[[<ast.Constant object at 0x7da1b26ae200>, <ast.Constant object at 0x7da1b26acdf0>], [<ast.Constant object at 0x7da1b26af010>, <ast.Constant object at 0x7da1b26ad270>]]][name[self]._protocol] return[<ast.IfExp object at 0x7da1b26af370>]
keyword[def] identifier[_port_postfix] ( identifier[self] ): literal[string] identifier[port] = identifier[self] . identifier[real_connection] . identifier[port] identifier[default_port] ={ literal[string] : literal[int] , literal[string] : literal[int] }[ identifier[self] . identifier[_protocol] ] keyword[return] literal[string] . identifier[format] ( identifier[port] ) keyword[if] identifier[port] != identifier[default_port] keyword[else] literal[string]
def _port_postfix(self): """ Returns empty string for the default port and ':port' otherwise """ port = self.real_connection.port default_port = {'https': 443, 'http': 80}[self._protocol] return ':{}'.format(port) if port != default_port else ''