code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def display(self, tool): """Displays the given tool above the current layer, and sets the title to its name. """ self._tools.append(tool) self._justDisplay(tool)
def function[display, parameter[self, tool]]: constant[Displays the given tool above the current layer, and sets the title to its name. ] call[name[self]._tools.append, parameter[name[tool]]] call[name[self]._justDisplay, parameter[name[tool]]]
keyword[def] identifier[display] ( identifier[self] , identifier[tool] ): literal[string] identifier[self] . identifier[_tools] . identifier[append] ( identifier[tool] ) identifier[self] . identifier[_justDisplay] ( identifier[tool] )
def display(self, tool): """Displays the given tool above the current layer, and sets the title to its name. """ self._tools.append(tool) self._justDisplay(tool)
def _check_match(self, name, version_string) -> bool: """ Check if the package name and version matches against a blacklisted package version specifier. Parameters ========== name: str Package name version: str Package version Returns ======= bool: True if it matches, False otherwise. """ if not name or not version_string: return False try: version = Version(version_string) except InvalidVersion: logger.debug(f"Package {name}=={version_string} has an invalid version") return False for requirement in self.blacklist_release_requirements: if name != requirement.name: continue if version in requirement.specifier: logger.debug( f"MATCH: Release {name}=={version} matches specifier " f"{requirement.specifier}" ) return True return False
def function[_check_match, parameter[self, name, version_string]]: constant[ Check if the package name and version matches against a blacklisted package version specifier. Parameters ========== name: str Package name version: str Package version Returns ======= bool: True if it matches, False otherwise. ] if <ast.BoolOp object at 0x7da20e9b1c60> begin[:] return[constant[False]] <ast.Try object at 0x7da20e9b0fa0> for taget[name[requirement]] in starred[name[self].blacklist_release_requirements] begin[:] if compare[name[name] not_equal[!=] name[requirement].name] begin[:] continue if compare[name[version] in name[requirement].specifier] begin[:] call[name[logger].debug, parameter[<ast.JoinedStr object at 0x7da20e9b3a90>]] return[constant[True]] return[constant[False]]
keyword[def] identifier[_check_match] ( identifier[self] , identifier[name] , identifier[version_string] )-> identifier[bool] : literal[string] keyword[if] keyword[not] identifier[name] keyword[or] keyword[not] identifier[version_string] : keyword[return] keyword[False] keyword[try] : identifier[version] = identifier[Version] ( identifier[version_string] ) keyword[except] identifier[InvalidVersion] : identifier[logger] . identifier[debug] ( literal[string] ) keyword[return] keyword[False] keyword[for] identifier[requirement] keyword[in] identifier[self] . identifier[blacklist_release_requirements] : keyword[if] identifier[name] != identifier[requirement] . identifier[name] : keyword[continue] keyword[if] identifier[version] keyword[in] identifier[requirement] . identifier[specifier] : identifier[logger] . identifier[debug] ( literal[string] literal[string] ) keyword[return] keyword[True] keyword[return] keyword[False]
def _check_match(self, name, version_string) -> bool: """ Check if the package name and version matches against a blacklisted package version specifier. Parameters ========== name: str Package name version: str Package version Returns ======= bool: True if it matches, False otherwise. """ if not name or not version_string: return False # depends on [control=['if'], data=[]] try: version = Version(version_string) # depends on [control=['try'], data=[]] except InvalidVersion: logger.debug(f'Package {name}=={version_string} has an invalid version') return False # depends on [control=['except'], data=[]] for requirement in self.blacklist_release_requirements: if name != requirement.name: continue # depends on [control=['if'], data=[]] if version in requirement.specifier: logger.debug(f'MATCH: Release {name}=={version} matches specifier {requirement.specifier}') return True # depends on [control=['if'], data=['version']] # depends on [control=['for'], data=['requirement']] return False
def deserialize_data(self, workflow, start_node): """ Reads a "data" or "define" tag from the given node. start_node -- the xml node (xml.dom.minidom.Node) """ name = start_node.getAttribute('name') value = start_node.getAttribute('value') return name, value
def function[deserialize_data, parameter[self, workflow, start_node]]: constant[ Reads a "data" or "define" tag from the given node. start_node -- the xml node (xml.dom.minidom.Node) ] variable[name] assign[=] call[name[start_node].getAttribute, parameter[constant[name]]] variable[value] assign[=] call[name[start_node].getAttribute, parameter[constant[value]]] return[tuple[[<ast.Name object at 0x7da1b016c850>, <ast.Name object at 0x7da1b016f2e0>]]]
keyword[def] identifier[deserialize_data] ( identifier[self] , identifier[workflow] , identifier[start_node] ): literal[string] identifier[name] = identifier[start_node] . identifier[getAttribute] ( literal[string] ) identifier[value] = identifier[start_node] . identifier[getAttribute] ( literal[string] ) keyword[return] identifier[name] , identifier[value]
def deserialize_data(self, workflow, start_node): """ Reads a "data" or "define" tag from the given node. start_node -- the xml node (xml.dom.minidom.Node) """ name = start_node.getAttribute('name') value = start_node.getAttribute('value') return (name, value)
def cudaPointerGetAttributes(ptr): """ Get memory pointer attributes. Returns attributes of the specified pointer. Parameters ---------- ptr : ctypes pointer Memory pointer to examine. Returns ------- memory_type : int Memory type; 1 indicates host memory, 2 indicates device memory. device : int Number of device associated with pointer. Notes ----- This function only works with CUDA 4.0 and later. """ attributes = cudaPointerAttributes() status = \ _libcudart.cudaPointerGetAttributes(ctypes.byref(attributes), ptr) cudaCheckStatus(status) return attributes.memoryType, attributes.device
def function[cudaPointerGetAttributes, parameter[ptr]]: constant[ Get memory pointer attributes. Returns attributes of the specified pointer. Parameters ---------- ptr : ctypes pointer Memory pointer to examine. Returns ------- memory_type : int Memory type; 1 indicates host memory, 2 indicates device memory. device : int Number of device associated with pointer. Notes ----- This function only works with CUDA 4.0 and later. ] variable[attributes] assign[=] call[name[cudaPointerAttributes], parameter[]] variable[status] assign[=] call[name[_libcudart].cudaPointerGetAttributes, parameter[call[name[ctypes].byref, parameter[name[attributes]]], name[ptr]]] call[name[cudaCheckStatus], parameter[name[status]]] return[tuple[[<ast.Attribute object at 0x7da20e957d60>, <ast.Attribute object at 0x7da20e956ce0>]]]
keyword[def] identifier[cudaPointerGetAttributes] ( identifier[ptr] ): literal[string] identifier[attributes] = identifier[cudaPointerAttributes] () identifier[status] = identifier[_libcudart] . identifier[cudaPointerGetAttributes] ( identifier[ctypes] . identifier[byref] ( identifier[attributes] ), identifier[ptr] ) identifier[cudaCheckStatus] ( identifier[status] ) keyword[return] identifier[attributes] . identifier[memoryType] , identifier[attributes] . identifier[device]
def cudaPointerGetAttributes(ptr): """ Get memory pointer attributes. Returns attributes of the specified pointer. Parameters ---------- ptr : ctypes pointer Memory pointer to examine. Returns ------- memory_type : int Memory type; 1 indicates host memory, 2 indicates device memory. device : int Number of device associated with pointer. Notes ----- This function only works with CUDA 4.0 and later. """ attributes = cudaPointerAttributes() status = _libcudart.cudaPointerGetAttributes(ctypes.byref(attributes), ptr) cudaCheckStatus(status) return (attributes.memoryType, attributes.device)
def touch_project(): """ Touches the project to trigger refreshing its cauldron.json state. """ r = Response() project = cd.project.get_internal_project() if project: project.refresh() else: r.fail( code='NO_PROJECT', message='No open project to refresh' ) return r.update( sync_time=sync_status.get('time', 0) ).flask_serialize()
def function[touch_project, parameter[]]: constant[ Touches the project to trigger refreshing its cauldron.json state. ] variable[r] assign[=] call[name[Response], parameter[]] variable[project] assign[=] call[name[cd].project.get_internal_project, parameter[]] if name[project] begin[:] call[name[project].refresh, parameter[]] return[call[call[name[r].update, parameter[]].flask_serialize, parameter[]]]
keyword[def] identifier[touch_project] (): literal[string] identifier[r] = identifier[Response] () identifier[project] = identifier[cd] . identifier[project] . identifier[get_internal_project] () keyword[if] identifier[project] : identifier[project] . identifier[refresh] () keyword[else] : identifier[r] . identifier[fail] ( identifier[code] = literal[string] , identifier[message] = literal[string] ) keyword[return] identifier[r] . identifier[update] ( identifier[sync_time] = identifier[sync_status] . identifier[get] ( literal[string] , literal[int] ) ). identifier[flask_serialize] ()
def touch_project(): """ Touches the project to trigger refreshing its cauldron.json state. """ r = Response() project = cd.project.get_internal_project() if project: project.refresh() # depends on [control=['if'], data=[]] else: r.fail(code='NO_PROJECT', message='No open project to refresh') return r.update(sync_time=sync_status.get('time', 0)).flask_serialize()
def sphbear (lat1, lon1, lat2, lon2, tol=1e-15): """Calculate the bearing between two locations on a sphere. lat1 The latitude of the first location. lon1 The longitude of the first location. lat2 The latitude of the second location. lon2 The longitude of the second location. tol Tolerance for checking proximity to poles and rounding to zero. The bearing (AKA the position angle, PA) is the orientation of point 2 with regards to point 1 relative to the longitudinal axis. Returns the bearing in radians. All arguments are in radians as well. The arguments may be vectors. Note that the ordering of the arguments maps to the nonstandard ordering ``(Dec, RA)`` in equatorial coordinates. In a spherical projection it maps to ``(Y, X)`` which may also be unexpected. The sign convention is astronomical: bearings range from -π to π, with negative values if point 2 is in the western hemisphere with regards to point 1, positive if it is in the eastern. (That is, “east from north”.) If point 1 is very near the pole, the bearing is undefined and the result is NaN. The *tol* argument is used for checking proximity to the poles and for rounding the bearing to precisely zero if it's extremely small. Derived from ``bear()`` in `angles.py from Prasanth Nair <https://github.com/phn/angles>`_. His version is BSD licensed. This one is sufficiently different that I think it counts as a separate implementation. """ # cross product on outer axis: ocross = lambda a, b: np.cross (a, b, axisa=0, axisb=0, axisc=0) # if args have shape S, this has shape (3, S) v1 = np.asarray ([np.cos (lat1) * np.cos (lon1), np.cos (lat1) * np.sin (lon1), np.sin (lat1)]) v2 = np.asarray ([np.cos (lat2) * np.cos (lon2), np.cos (lat2) * np.sin (lon2), np.sin (lat2)]) is_bad = (v1[0]**2 + v1[1]**2) < tol p12 = ocross (v1, v2) # ~"perpendicular to great circle containing points" p1z = np.asarray ([v1[1], -v1[0], np.zeros_like (lat1)]) # ~"perp to base and Z axis" cm = np.sqrt ((ocross (p12, p1z)**2).sum (axis=0)) # ~"angle between the vectors" bearing = np.arctan2 (cm, np.sum (p12 * p1z, axis=0)) bearing = np.where (p12[2] < 0, -bearing, bearing) # convert to [-pi/2, pi/2] bearing = np.where (np.abs (bearing) < tol, 0, bearing) # clamp bearing[np.where (is_bad)] = np.nan return bearing
def function[sphbear, parameter[lat1, lon1, lat2, lon2, tol]]: constant[Calculate the bearing between two locations on a sphere. lat1 The latitude of the first location. lon1 The longitude of the first location. lat2 The latitude of the second location. lon2 The longitude of the second location. tol Tolerance for checking proximity to poles and rounding to zero. The bearing (AKA the position angle, PA) is the orientation of point 2 with regards to point 1 relative to the longitudinal axis. Returns the bearing in radians. All arguments are in radians as well. The arguments may be vectors. Note that the ordering of the arguments maps to the nonstandard ordering ``(Dec, RA)`` in equatorial coordinates. In a spherical projection it maps to ``(Y, X)`` which may also be unexpected. The sign convention is astronomical: bearings range from -π to π, with negative values if point 2 is in the western hemisphere with regards to point 1, positive if it is in the eastern. (That is, “east from north”.) If point 1 is very near the pole, the bearing is undefined and the result is NaN. The *tol* argument is used for checking proximity to the poles and for rounding the bearing to precisely zero if it's extremely small. Derived from ``bear()`` in `angles.py from Prasanth Nair <https://github.com/phn/angles>`_. His version is BSD licensed. This one is sufficiently different that I think it counts as a separate implementation. ] variable[ocross] assign[=] <ast.Lambda object at 0x7da2054a4430> variable[v1] assign[=] call[name[np].asarray, parameter[list[[<ast.BinOp object at 0x7da2054a7730>, <ast.BinOp object at 0x7da2054a4a00>, <ast.Call object at 0x7da2054a5cf0>]]]] variable[v2] assign[=] call[name[np].asarray, parameter[list[[<ast.BinOp object at 0x7da2054a66e0>, <ast.BinOp object at 0x7da1b27ab100>, <ast.Call object at 0x7da1b27abee0>]]]] variable[is_bad] assign[=] compare[binary_operation[binary_operation[call[name[v1]][constant[0]] ** constant[2]] + binary_operation[call[name[v1]][constant[1]] ** constant[2]]] less[<] name[tol]] variable[p12] assign[=] call[name[ocross], parameter[name[v1], name[v2]]] variable[p1z] assign[=] call[name[np].asarray, parameter[list[[<ast.Subscript object at 0x7da1b2767ca0>, <ast.UnaryOp object at 0x7da1b2767610>, <ast.Call object at 0x7da1b27668c0>]]]] variable[cm] assign[=] call[name[np].sqrt, parameter[call[binary_operation[call[name[ocross], parameter[name[p12], name[p1z]]] ** constant[2]].sum, parameter[]]]] variable[bearing] assign[=] call[name[np].arctan2, parameter[name[cm], call[name[np].sum, parameter[binary_operation[name[p12] * name[p1z]]]]]] variable[bearing] assign[=] call[name[np].where, parameter[compare[call[name[p12]][constant[2]] less[<] constant[0]], <ast.UnaryOp object at 0x7da1b27b8b80>, name[bearing]]] variable[bearing] assign[=] call[name[np].where, parameter[compare[call[name[np].abs, parameter[name[bearing]]] less[<] name[tol]], constant[0], name[bearing]]] call[name[bearing]][call[name[np].where, parameter[name[is_bad]]]] assign[=] name[np].nan return[name[bearing]]
keyword[def] identifier[sphbear] ( identifier[lat1] , identifier[lon1] , identifier[lat2] , identifier[lon2] , identifier[tol] = literal[int] ): literal[string] identifier[ocross] = keyword[lambda] identifier[a] , identifier[b] : identifier[np] . identifier[cross] ( identifier[a] , identifier[b] , identifier[axisa] = literal[int] , identifier[axisb] = literal[int] , identifier[axisc] = literal[int] ) identifier[v1] = identifier[np] . identifier[asarray] ([ identifier[np] . identifier[cos] ( identifier[lat1] )* identifier[np] . identifier[cos] ( identifier[lon1] ), identifier[np] . identifier[cos] ( identifier[lat1] )* identifier[np] . identifier[sin] ( identifier[lon1] ), identifier[np] . identifier[sin] ( identifier[lat1] )]) identifier[v2] = identifier[np] . identifier[asarray] ([ identifier[np] . identifier[cos] ( identifier[lat2] )* identifier[np] . identifier[cos] ( identifier[lon2] ), identifier[np] . identifier[cos] ( identifier[lat2] )* identifier[np] . identifier[sin] ( identifier[lon2] ), identifier[np] . identifier[sin] ( identifier[lat2] )]) identifier[is_bad] =( identifier[v1] [ literal[int] ]** literal[int] + identifier[v1] [ literal[int] ]** literal[int] )< identifier[tol] identifier[p12] = identifier[ocross] ( identifier[v1] , identifier[v2] ) identifier[p1z] = identifier[np] . identifier[asarray] ([ identifier[v1] [ literal[int] ],- identifier[v1] [ literal[int] ], identifier[np] . identifier[zeros_like] ( identifier[lat1] )]) identifier[cm] = identifier[np] . identifier[sqrt] (( identifier[ocross] ( identifier[p12] , identifier[p1z] )** literal[int] ). identifier[sum] ( identifier[axis] = literal[int] )) identifier[bearing] = identifier[np] . identifier[arctan2] ( identifier[cm] , identifier[np] . identifier[sum] ( identifier[p12] * identifier[p1z] , identifier[axis] = literal[int] )) identifier[bearing] = identifier[np] . identifier[where] ( identifier[p12] [ literal[int] ]< literal[int] ,- identifier[bearing] , identifier[bearing] ) identifier[bearing] = identifier[np] . identifier[where] ( identifier[np] . identifier[abs] ( identifier[bearing] )< identifier[tol] , literal[int] , identifier[bearing] ) identifier[bearing] [ identifier[np] . identifier[where] ( identifier[is_bad] )]= identifier[np] . identifier[nan] keyword[return] identifier[bearing]
def sphbear(lat1, lon1, lat2, lon2, tol=1e-15): """Calculate the bearing between two locations on a sphere. lat1 The latitude of the first location. lon1 The longitude of the first location. lat2 The latitude of the second location. lon2 The longitude of the second location. tol Tolerance for checking proximity to poles and rounding to zero. The bearing (AKA the position angle, PA) is the orientation of point 2 with regards to point 1 relative to the longitudinal axis. Returns the bearing in radians. All arguments are in radians as well. The arguments may be vectors. Note that the ordering of the arguments maps to the nonstandard ordering ``(Dec, RA)`` in equatorial coordinates. In a spherical projection it maps to ``(Y, X)`` which may also be unexpected. The sign convention is astronomical: bearings range from -π to π, with negative values if point 2 is in the western hemisphere with regards to point 1, positive if it is in the eastern. (That is, “east from north”.) If point 1 is very near the pole, the bearing is undefined and the result is NaN. The *tol* argument is used for checking proximity to the poles and for rounding the bearing to precisely zero if it's extremely small. Derived from ``bear()`` in `angles.py from Prasanth Nair <https://github.com/phn/angles>`_. His version is BSD licensed. This one is sufficiently different that I think it counts as a separate implementation. """ # cross product on outer axis: ocross = lambda a, b: np.cross(a, b, axisa=0, axisb=0, axisc=0) # if args have shape S, this has shape (3, S) v1 = np.asarray([np.cos(lat1) * np.cos(lon1), np.cos(lat1) * np.sin(lon1), np.sin(lat1)]) v2 = np.asarray([np.cos(lat2) * np.cos(lon2), np.cos(lat2) * np.sin(lon2), np.sin(lat2)]) is_bad = v1[0] ** 2 + v1[1] ** 2 < tol p12 = ocross(v1, v2) # ~"perpendicular to great circle containing points" p1z = np.asarray([v1[1], -v1[0], np.zeros_like(lat1)]) # ~"perp to base and Z axis" cm = np.sqrt((ocross(p12, p1z) ** 2).sum(axis=0)) # ~"angle between the vectors" bearing = np.arctan2(cm, np.sum(p12 * p1z, axis=0)) bearing = np.where(p12[2] < 0, -bearing, bearing) # convert to [-pi/2, pi/2] bearing = np.where(np.abs(bearing) < tol, 0, bearing) # clamp bearing[np.where(is_bad)] = np.nan return bearing
def pop_all(self): """ NON-BLOCKING POP ALL IN QUEUE, IF ANY """ with self.lock: if self.please_stop: return [THREAD_STOP] if self.db.status.end == self.start: return [] output = [] for i in range(self.start, self.db.status.end): output.append(self.db[str(i)]) self.start = self.db.status.end return output
def function[pop_all, parameter[self]]: constant[ NON-BLOCKING POP ALL IN QUEUE, IF ANY ] with name[self].lock begin[:] if name[self].please_stop begin[:] return[list[[<ast.Name object at 0x7da18bccbcd0>]]] if compare[name[self].db.status.end equal[==] name[self].start] begin[:] return[list[[]]] variable[output] assign[=] list[[]] for taget[name[i]] in starred[call[name[range], parameter[name[self].start, name[self].db.status.end]]] begin[:] call[name[output].append, parameter[call[name[self].db][call[name[str], parameter[name[i]]]]]] name[self].start assign[=] name[self].db.status.end return[name[output]]
keyword[def] identifier[pop_all] ( identifier[self] ): literal[string] keyword[with] identifier[self] . identifier[lock] : keyword[if] identifier[self] . identifier[please_stop] : keyword[return] [ identifier[THREAD_STOP] ] keyword[if] identifier[self] . identifier[db] . identifier[status] . identifier[end] == identifier[self] . identifier[start] : keyword[return] [] identifier[output] =[] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[start] , identifier[self] . identifier[db] . identifier[status] . identifier[end] ): identifier[output] . identifier[append] ( identifier[self] . identifier[db] [ identifier[str] ( identifier[i] )]) identifier[self] . identifier[start] = identifier[self] . identifier[db] . identifier[status] . identifier[end] keyword[return] identifier[output]
def pop_all(self): """ NON-BLOCKING POP ALL IN QUEUE, IF ANY """ with self.lock: if self.please_stop: return [THREAD_STOP] # depends on [control=['if'], data=[]] if self.db.status.end == self.start: return [] # depends on [control=['if'], data=[]] output = [] for i in range(self.start, self.db.status.end): output.append(self.db[str(i)]) # depends on [control=['for'], data=['i']] self.start = self.db.status.end return output # depends on [control=['with'], data=[]]
def url_equals(a: str, b: str) -> bool: """ Compares two URLs/paths and returns True if they point to same URI. For example, querystring parameters can be different order but URLs are still equal. :param a: URL/path :param b: URL/path :return: True if URLs/paths are equal """ from urllib.parse import urlparse, parse_qsl a2 = list(urlparse(a)) b2 = list(urlparse(b)) a2[4] = dict(parse_qsl(a2[4])) b2[4] = dict(parse_qsl(b2[4])) return a2 == b2
def function[url_equals, parameter[a, b]]: constant[ Compares two URLs/paths and returns True if they point to same URI. For example, querystring parameters can be different order but URLs are still equal. :param a: URL/path :param b: URL/path :return: True if URLs/paths are equal ] from relative_module[urllib.parse] import module[urlparse], module[parse_qsl] variable[a2] assign[=] call[name[list], parameter[call[name[urlparse], parameter[name[a]]]]] variable[b2] assign[=] call[name[list], parameter[call[name[urlparse], parameter[name[b]]]]] call[name[a2]][constant[4]] assign[=] call[name[dict], parameter[call[name[parse_qsl], parameter[call[name[a2]][constant[4]]]]]] call[name[b2]][constant[4]] assign[=] call[name[dict], parameter[call[name[parse_qsl], parameter[call[name[b2]][constant[4]]]]]] return[compare[name[a2] equal[==] name[b2]]]
keyword[def] identifier[url_equals] ( identifier[a] : identifier[str] , identifier[b] : identifier[str] )-> identifier[bool] : literal[string] keyword[from] identifier[urllib] . identifier[parse] keyword[import] identifier[urlparse] , identifier[parse_qsl] identifier[a2] = identifier[list] ( identifier[urlparse] ( identifier[a] )) identifier[b2] = identifier[list] ( identifier[urlparse] ( identifier[b] )) identifier[a2] [ literal[int] ]= identifier[dict] ( identifier[parse_qsl] ( identifier[a2] [ literal[int] ])) identifier[b2] [ literal[int] ]= identifier[dict] ( identifier[parse_qsl] ( identifier[b2] [ literal[int] ])) keyword[return] identifier[a2] == identifier[b2]
def url_equals(a: str, b: str) -> bool: """ Compares two URLs/paths and returns True if they point to same URI. For example, querystring parameters can be different order but URLs are still equal. :param a: URL/path :param b: URL/path :return: True if URLs/paths are equal """ from urllib.parse import urlparse, parse_qsl a2 = list(urlparse(a)) b2 = list(urlparse(b)) a2[4] = dict(parse_qsl(a2[4])) b2[4] = dict(parse_qsl(b2[4])) return a2 == b2
def _buffered_generation_process(source_gen, buffer_, sentinal): """ helper for buffered_generator """ for data in source_gen: buffer_.put(data, block=True) # sentinel: signal the end of the iterator buffer_.put(sentinal) # unfortunately this does not suffice as a signal: if buffer_.get() was # called and subsequently the buffer_ is closed, it will block forever. buffer_.close()
def function[_buffered_generation_process, parameter[source_gen, buffer_, sentinal]]: constant[ helper for buffered_generator ] for taget[name[data]] in starred[name[source_gen]] begin[:] call[name[buffer_].put, parameter[name[data]]] call[name[buffer_].put, parameter[name[sentinal]]] call[name[buffer_].close, parameter[]]
keyword[def] identifier[_buffered_generation_process] ( identifier[source_gen] , identifier[buffer_] , identifier[sentinal] ): literal[string] keyword[for] identifier[data] keyword[in] identifier[source_gen] : identifier[buffer_] . identifier[put] ( identifier[data] , identifier[block] = keyword[True] ) identifier[buffer_] . identifier[put] ( identifier[sentinal] ) identifier[buffer_] . identifier[close] ()
def _buffered_generation_process(source_gen, buffer_, sentinal): """ helper for buffered_generator """ for data in source_gen: buffer_.put(data, block=True) # depends on [control=['for'], data=['data']] # sentinel: signal the end of the iterator buffer_.put(sentinal) # unfortunately this does not suffice as a signal: if buffer_.get() was # called and subsequently the buffer_ is closed, it will block forever. buffer_.close()
def _write_cron_lines(user, lines): ''' Takes a list of lines to be committed to a user's crontab and writes it ''' lines = [salt.utils.stringutils.to_str(_l) for _l in lines] path = salt.utils.files.mkstemp() if _check_instance_uid_match(user) or __grains__.get('os_family') in ('Solaris', 'AIX'): # In some cases crontab command should be executed as user rather than root with salt.utils.files.fpopen(path, 'w+', uid=__salt__['file.user_to_uid'](user), mode=0o600) as fp_: fp_.writelines(lines) ret = __salt__['cmd.run_all'](_get_cron_cmdstr(path), runas=user, python_shell=False) else: with salt.utils.files.fpopen(path, 'w+', mode=0o600) as fp_: fp_.writelines(lines) ret = __salt__['cmd.run_all'](_get_cron_cmdstr(path, user), python_shell=False) os.remove(path) return ret
def function[_write_cron_lines, parameter[user, lines]]: constant[ Takes a list of lines to be committed to a user's crontab and writes it ] variable[lines] assign[=] <ast.ListComp object at 0x7da18dc99720> variable[path] assign[=] call[name[salt].utils.files.mkstemp, parameter[]] if <ast.BoolOp object at 0x7da18dc07d30> begin[:] with call[name[salt].utils.files.fpopen, parameter[name[path], constant[w+]]] begin[:] call[name[fp_].writelines, parameter[name[lines]]] variable[ret] assign[=] call[call[name[__salt__]][constant[cmd.run_all]], parameter[call[name[_get_cron_cmdstr], parameter[name[path]]]]] call[name[os].remove, parameter[name[path]]] return[name[ret]]
keyword[def] identifier[_write_cron_lines] ( identifier[user] , identifier[lines] ): literal[string] identifier[lines] =[ identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_str] ( identifier[_l] ) keyword[for] identifier[_l] keyword[in] identifier[lines] ] identifier[path] = identifier[salt] . identifier[utils] . identifier[files] . identifier[mkstemp] () keyword[if] identifier[_check_instance_uid_match] ( identifier[user] ) keyword[or] identifier[__grains__] . identifier[get] ( literal[string] ) keyword[in] ( literal[string] , literal[string] ): keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fpopen] ( identifier[path] , literal[string] , identifier[uid] = identifier[__salt__] [ literal[string] ]( identifier[user] ), identifier[mode] = literal[int] ) keyword[as] identifier[fp_] : identifier[fp_] . identifier[writelines] ( identifier[lines] ) identifier[ret] = identifier[__salt__] [ literal[string] ]( identifier[_get_cron_cmdstr] ( identifier[path] ), identifier[runas] = identifier[user] , identifier[python_shell] = keyword[False] ) keyword[else] : keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fpopen] ( identifier[path] , literal[string] , identifier[mode] = literal[int] ) keyword[as] identifier[fp_] : identifier[fp_] . identifier[writelines] ( identifier[lines] ) identifier[ret] = identifier[__salt__] [ literal[string] ]( identifier[_get_cron_cmdstr] ( identifier[path] , identifier[user] ), identifier[python_shell] = keyword[False] ) identifier[os] . identifier[remove] ( identifier[path] ) keyword[return] identifier[ret]
def _write_cron_lines(user, lines): """ Takes a list of lines to be committed to a user's crontab and writes it """ lines = [salt.utils.stringutils.to_str(_l) for _l in lines] path = salt.utils.files.mkstemp() if _check_instance_uid_match(user) or __grains__.get('os_family') in ('Solaris', 'AIX'): # In some cases crontab command should be executed as user rather than root with salt.utils.files.fpopen(path, 'w+', uid=__salt__['file.user_to_uid'](user), mode=384) as fp_: fp_.writelines(lines) # depends on [control=['with'], data=['fp_']] ret = __salt__['cmd.run_all'](_get_cron_cmdstr(path), runas=user, python_shell=False) # depends on [control=['if'], data=[]] else: with salt.utils.files.fpopen(path, 'w+', mode=384) as fp_: fp_.writelines(lines) # depends on [control=['with'], data=['fp_']] ret = __salt__['cmd.run_all'](_get_cron_cmdstr(path, user), python_shell=False) os.remove(path) return ret
def drop(): """ Drop the current table if it exists """ # Ensure the connection is up _State.connection() _State.table.drop(checkfirst=True) _State.metadata.remove(_State.table) _State.table = None _State.new_transaction()
def function[drop, parameter[]]: constant[ Drop the current table if it exists ] call[name[_State].connection, parameter[]] call[name[_State].table.drop, parameter[]] call[name[_State].metadata.remove, parameter[name[_State].table]] name[_State].table assign[=] constant[None] call[name[_State].new_transaction, parameter[]]
keyword[def] identifier[drop] (): literal[string] identifier[_State] . identifier[connection] () identifier[_State] . identifier[table] . identifier[drop] ( identifier[checkfirst] = keyword[True] ) identifier[_State] . identifier[metadata] . identifier[remove] ( identifier[_State] . identifier[table] ) identifier[_State] . identifier[table] = keyword[None] identifier[_State] . identifier[new_transaction] ()
def drop(): """ Drop the current table if it exists """ # Ensure the connection is up _State.connection() _State.table.drop(checkfirst=True) _State.metadata.remove(_State.table) _State.table = None _State.new_transaction()
def _determine_uses(self, included_files, forward_declarations): """Set up the use type of each symbol.""" file_uses = dict.fromkeys(included_files, UNUSED) decl_uses = dict.fromkeys(forward_declarations, UNUSED) symbol_table = self.symbol_table for name, node in forward_declarations.items(): try: symbol_table.lookup_symbol(node.name, node.namespace) decl_uses[name] |= USES_REFERENCE except symbols.Error: module = Module(name, None) symbol_table.add_symbol(node.name, node.namespace, node, module) def _do_lookup(name, namespace): try: file_use_node = symbol_table.lookup_symbol(name, namespace) except symbols.Error: return name = file_use_node[1].filename file_uses[name] = file_uses.get(name, 0) | USES_DECLARATION def _add_declaration(name, namespace): if not name: # Ignore anonymous struct. It is not standard, but we might as # well avoid crashing if it is easy. return names = [n for n in namespace if n is not None] if names: name = '::'.join(names) + '::' + name if name in decl_uses: decl_uses[name] |= USES_DECLARATION def _add_reference(name, namespace): try: file_use_node = symbol_table.lookup_symbol(name, namespace) except symbols.Error: return name = file_use_node[1].filename if file_use_node[1].ast_list is None: decl_uses[name] |= USES_REFERENCE elif name in file_uses: # enum and typedef can't be forward declared if isinstance(file_use_node[0], (ast.Enum, ast.Typedef)): file_uses[name] |= USES_DECLARATION else: file_uses[name] |= USES_REFERENCE def _add_use(node, namespace, name=''): if isinstance(node, basestring): name = node elif isinstance(node, list): # name contains a list of tokens. name = '::'.join([n.name for n in name]) # node is a Type so look for its symbol immediately. if name: _do_lookup(name, namespace) return # Try to search for the value of the variable declaration for any # symbols, such as `#define` values or other variable names which # may be included in other files. obj = getattr(node, 'initial_value', None) if obj: _do_lookup(obj, namespace) # If node is a VariableDeclaration, check if the variable type is # a symbol used in other includes. obj = getattr(node, 'type', None) if obj and isinstance(obj.name, basestring): _do_lookup(obj.name, namespace) if not isinstance(node, basestring): # Happens when variables are defined with inlined types, e.g.: # enum {...} variable; return def _add_variable(node, namespace, reference=False): obj = node.type if isinstance( node, ast.VariableDeclaration) else node if obj.reference or obj.pointer or reference: _add_reference(obj.name, namespace) else: # Add a use for the variable declaration type as well as the # variable value. _add_use(obj.name, namespace) _add_use(node, namespace) # This needs to recurse when the node is a templated type. _add_template_use(obj.name, obj.templated_types, namespace, reference) def _process_function(function, namespace): reference = function.body is None if function.return_type: return_type = function.return_type _add_variable(return_type, namespace, reference) for s in function.specializations: _add_variable(s, namespace, not function.body) templated_types = function.templated_types or () for p in function.parameters: node = p.type if node.name not in templated_types: if function.body and p.name: # Assume that if the function has a body and a name # the parameter type is really used. # NOTE(nnorwitz): this is over-aggressive. It would be # better to iterate through the body and determine # actual uses based on local vars and data members # used. _add_use(node.name, namespace) elif ( p.default and p.default[0].name != '0' and p.default[0].name != 'NULL' and p.default[0].name != 'nullptr' ): _add_use(node.name, namespace) elif node.reference or node.pointer or reference: _add_reference(node.name, namespace) else: _add_use(node.name, namespace) _add_template_use(node.name, node.templated_types, namespace, reference) def _process_function_body(function, namespace): previous = None save = namespace[:] for t in function.body: if t.token_type == tokenize.NAME: previous = t if not keywords.is_keyword(t.name): # TODO(nnorwitz): handle static function calls. # TODO(nnorwitz): handle using statements in file. # TODO(nnorwitz): handle using statements in function. # TODO(nnorwitz): handle namespace assignment in file. _add_use(t.name, namespace) elif t.name == '::' and previous is not None: namespace.append(previous.name) elif t.name in (':', ';'): namespace = save[:] def _add_template_use(name, types, namespace, reference=False): for cls in types or (): if cls.pointer or cls.reference or reference: _add_reference(cls.name, namespace) elif name.endswith('_ptr'): # Special case templated classes that end w/_ptr. # These are things like auto_ptr which do # not require the class definition, only decl. _add_reference(cls.name, namespace) elif name.startswith('Q') and name.endswith('Pointer'): # Special case templated classes from the Qt framework. _add_reference(cls.name, namespace) else: _add_use(cls.name, namespace) _add_template_use(cls.name, cls.templated_types, namespace, reference) def _process_types(nodes, namespace): for node in nodes: if isinstance(node, ast.Type): _add_variable(node, namespace) # Iterate through the source AST/tokens, marking each symbols use. ast_seq = [self.ast_list] namespace_stack = [] while ast_seq: for node in ast_seq.pop(): if isinstance(node, ast.VariableDeclaration): namespace = namespace_stack + node.namespace _add_variable(node, namespace) elif isinstance(node, ast.Function): namespace = namespace_stack + node.namespace _process_function(node, namespace) if node.body: _process_function_body(node, namespace) elif isinstance(node, ast.Typedef): namespace = namespace_stack + node.namespace _process_types(node.alias, namespace) elif isinstance(node, ast.Friend): expr = node.expr namespace = namespace_stack + node.namespace if isinstance(expr, ast.Type): _add_reference(expr.name, namespace) elif isinstance(expr, ast.Function): _process_function(expr, namespace) elif isinstance(node, ast.Union) and node.body is not None: ast_seq.append(node.body) elif isinstance(node, ast.Class) and node.body is not None: _add_declaration(node.name, node.namespace) namespace = namespace_stack + node.namespace _add_template_use('', node.bases, namespace) ast_seq.append(node.body) elif isinstance(node, ast.Using): if node.names[0].name == 'namespace': namespace_stack.append(node.names[1].name) return file_uses, decl_uses
def function[_determine_uses, parameter[self, included_files, forward_declarations]]: constant[Set up the use type of each symbol.] variable[file_uses] assign[=] call[name[dict].fromkeys, parameter[name[included_files], name[UNUSED]]] variable[decl_uses] assign[=] call[name[dict].fromkeys, parameter[name[forward_declarations], name[UNUSED]]] variable[symbol_table] assign[=] name[self].symbol_table for taget[tuple[[<ast.Name object at 0x7da1b0be2500>, <ast.Name object at 0x7da1b0be05b0>]]] in starred[call[name[forward_declarations].items, parameter[]]] begin[:] <ast.Try object at 0x7da1b0be0490> def function[_do_lookup, parameter[name, namespace]]: <ast.Try object at 0x7da1b0be2650> variable[name] assign[=] call[name[file_use_node]][constant[1]].filename call[name[file_uses]][name[name]] assign[=] binary_operation[call[name[file_uses].get, parameter[name[name], constant[0]]] <ast.BitOr object at 0x7da2590d6aa0> name[USES_DECLARATION]] def function[_add_declaration, parameter[name, namespace]]: if <ast.UnaryOp object at 0x7da1b0be1ae0> begin[:] return[None] variable[names] assign[=] <ast.ListComp object at 0x7da1b0be18a0> if name[names] begin[:] variable[name] assign[=] binary_operation[binary_operation[call[constant[::].join, parameter[name[names]]] + constant[::]] + name[name]] if compare[name[name] in name[decl_uses]] begin[:] <ast.AugAssign object at 0x7da1b0be0e50> def function[_add_reference, parameter[name, namespace]]: <ast.Try object at 0x7da1b0be1a20> variable[name] assign[=] call[name[file_use_node]][constant[1]].filename if compare[call[name[file_use_node]][constant[1]].ast_list is constant[None]] begin[:] <ast.AugAssign object at 0x7da1b0be0b50> def function[_add_use, parameter[node, namespace, name]]: if call[name[isinstance], parameter[name[node], name[basestring]]] begin[:] variable[name] assign[=] name[node] if name[name] begin[:] call[name[_do_lookup], parameter[name[name], name[namespace]]] return[None] variable[obj] assign[=] call[name[getattr], parameter[name[node], constant[initial_value], constant[None]]] if name[obj] begin[:] call[name[_do_lookup], parameter[name[obj], name[namespace]]] variable[obj] assign[=] call[name[getattr], parameter[name[node], constant[type], constant[None]]] if <ast.BoolOp object at 0x7da1b0be1c00> begin[:] call[name[_do_lookup], parameter[name[obj].name, name[namespace]]] if <ast.UnaryOp object at 0x7da1b0be1bd0> begin[:] return[None] def function[_add_variable, parameter[node, namespace, reference]]: variable[obj] assign[=] <ast.IfExp object at 0x7da1b0ca4400> if <ast.BoolOp object at 0x7da1b0ca51e0> begin[:] call[name[_add_reference], parameter[name[obj].name, name[namespace]]] call[name[_add_template_use], parameter[name[obj].name, name[obj].templated_types, name[namespace], name[reference]]] def function[_process_function, parameter[function, namespace]]: variable[reference] assign[=] compare[name[function].body is constant[None]] if name[function].return_type begin[:] variable[return_type] assign[=] name[function].return_type call[name[_add_variable], parameter[name[return_type], name[namespace], name[reference]]] for taget[name[s]] in starred[name[function].specializations] begin[:] call[name[_add_variable], parameter[name[s], name[namespace], <ast.UnaryOp object at 0x7da1b0bc8c70>]] variable[templated_types] assign[=] <ast.BoolOp object at 0x7da1b0bc8910> for taget[name[p]] in starred[name[function].parameters] begin[:] variable[node] assign[=] name[p].type if compare[name[node].name <ast.NotIn object at 0x7da2590d7190> name[templated_types]] begin[:] if <ast.BoolOp object at 0x7da1b0bcaa10> begin[:] call[name[_add_use], parameter[name[node].name, name[namespace]]] call[name[_add_template_use], parameter[name[node].name, name[node].templated_types, name[namespace], name[reference]]] def function[_process_function_body, parameter[function, namespace]]: variable[previous] assign[=] constant[None] variable[save] assign[=] call[name[namespace]][<ast.Slice object at 0x7da1b0bc9d50>] for taget[name[t]] in starred[name[function].body] begin[:] if compare[name[t].token_type equal[==] name[tokenize].NAME] begin[:] variable[previous] assign[=] name[t] if <ast.UnaryOp object at 0x7da1b0bca200> begin[:] call[name[_add_use], parameter[name[t].name, name[namespace]]] def function[_add_template_use, parameter[name, types, namespace, reference]]: for taget[name[cls]] in starred[<ast.BoolOp object at 0x7da1b0bcbb50>] begin[:] if <ast.BoolOp object at 0x7da1b0b59300> begin[:] call[name[_add_reference], parameter[name[cls].name, name[namespace]]] call[name[_add_template_use], parameter[name[cls].name, name[cls].templated_types, name[namespace], name[reference]]] def function[_process_types, parameter[nodes, namespace]]: for taget[name[node]] in starred[name[nodes]] begin[:] if call[name[isinstance], parameter[name[node], name[ast].Type]] begin[:] call[name[_add_variable], parameter[name[node], name[namespace]]] variable[ast_seq] assign[=] list[[<ast.Attribute object at 0x7da1b0b58610>]] variable[namespace_stack] assign[=] list[[]] while name[ast_seq] begin[:] for taget[name[node]] in starred[call[name[ast_seq].pop, parameter[]]] begin[:] if call[name[isinstance], parameter[name[node], name[ast].VariableDeclaration]] begin[:] variable[namespace] assign[=] binary_operation[name[namespace_stack] + name[node].namespace] call[name[_add_variable], parameter[name[node], name[namespace]]] return[tuple[[<ast.Name object at 0x7da1b0bd9c60>, <ast.Name object at 0x7da1b0bdac20>]]]
keyword[def] identifier[_determine_uses] ( identifier[self] , identifier[included_files] , identifier[forward_declarations] ): literal[string] identifier[file_uses] = identifier[dict] . identifier[fromkeys] ( identifier[included_files] , identifier[UNUSED] ) identifier[decl_uses] = identifier[dict] . identifier[fromkeys] ( identifier[forward_declarations] , identifier[UNUSED] ) identifier[symbol_table] = identifier[self] . identifier[symbol_table] keyword[for] identifier[name] , identifier[node] keyword[in] identifier[forward_declarations] . identifier[items] (): keyword[try] : identifier[symbol_table] . identifier[lookup_symbol] ( identifier[node] . identifier[name] , identifier[node] . identifier[namespace] ) identifier[decl_uses] [ identifier[name] ]|= identifier[USES_REFERENCE] keyword[except] identifier[symbols] . identifier[Error] : identifier[module] = identifier[Module] ( identifier[name] , keyword[None] ) identifier[symbol_table] . identifier[add_symbol] ( identifier[node] . identifier[name] , identifier[node] . identifier[namespace] , identifier[node] , identifier[module] ) keyword[def] identifier[_do_lookup] ( identifier[name] , identifier[namespace] ): keyword[try] : identifier[file_use_node] = identifier[symbol_table] . identifier[lookup_symbol] ( identifier[name] , identifier[namespace] ) keyword[except] identifier[symbols] . identifier[Error] : keyword[return] identifier[name] = identifier[file_use_node] [ literal[int] ]. identifier[filename] identifier[file_uses] [ identifier[name] ]= identifier[file_uses] . identifier[get] ( identifier[name] , literal[int] )| identifier[USES_DECLARATION] keyword[def] identifier[_add_declaration] ( identifier[name] , identifier[namespace] ): keyword[if] keyword[not] identifier[name] : keyword[return] identifier[names] =[ identifier[n] keyword[for] identifier[n] keyword[in] identifier[namespace] keyword[if] identifier[n] keyword[is] keyword[not] keyword[None] ] keyword[if] identifier[names] : identifier[name] = literal[string] . identifier[join] ( identifier[names] )+ literal[string] + identifier[name] keyword[if] identifier[name] keyword[in] identifier[decl_uses] : identifier[decl_uses] [ identifier[name] ]|= identifier[USES_DECLARATION] keyword[def] identifier[_add_reference] ( identifier[name] , identifier[namespace] ): keyword[try] : identifier[file_use_node] = identifier[symbol_table] . identifier[lookup_symbol] ( identifier[name] , identifier[namespace] ) keyword[except] identifier[symbols] . identifier[Error] : keyword[return] identifier[name] = identifier[file_use_node] [ literal[int] ]. identifier[filename] keyword[if] identifier[file_use_node] [ literal[int] ]. identifier[ast_list] keyword[is] keyword[None] : identifier[decl_uses] [ identifier[name] ]|= identifier[USES_REFERENCE] keyword[elif] identifier[name] keyword[in] identifier[file_uses] : keyword[if] identifier[isinstance] ( identifier[file_use_node] [ literal[int] ],( identifier[ast] . identifier[Enum] , identifier[ast] . identifier[Typedef] )): identifier[file_uses] [ identifier[name] ]|= identifier[USES_DECLARATION] keyword[else] : identifier[file_uses] [ identifier[name] ]|= identifier[USES_REFERENCE] keyword[def] identifier[_add_use] ( identifier[node] , identifier[namespace] , identifier[name] = literal[string] ): keyword[if] identifier[isinstance] ( identifier[node] , identifier[basestring] ): identifier[name] = identifier[node] keyword[elif] identifier[isinstance] ( identifier[node] , identifier[list] ): identifier[name] = literal[string] . identifier[join] ([ identifier[n] . identifier[name] keyword[for] identifier[n] keyword[in] identifier[name] ]) keyword[if] identifier[name] : identifier[_do_lookup] ( identifier[name] , identifier[namespace] ) keyword[return] identifier[obj] = identifier[getattr] ( identifier[node] , literal[string] , keyword[None] ) keyword[if] identifier[obj] : identifier[_do_lookup] ( identifier[obj] , identifier[namespace] ) identifier[obj] = identifier[getattr] ( identifier[node] , literal[string] , keyword[None] ) keyword[if] identifier[obj] keyword[and] identifier[isinstance] ( identifier[obj] . identifier[name] , identifier[basestring] ): identifier[_do_lookup] ( identifier[obj] . identifier[name] , identifier[namespace] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[node] , identifier[basestring] ): keyword[return] keyword[def] identifier[_add_variable] ( identifier[node] , identifier[namespace] , identifier[reference] = keyword[False] ): identifier[obj] = identifier[node] . identifier[type] keyword[if] identifier[isinstance] ( identifier[node] , identifier[ast] . identifier[VariableDeclaration] ) keyword[else] identifier[node] keyword[if] identifier[obj] . identifier[reference] keyword[or] identifier[obj] . identifier[pointer] keyword[or] identifier[reference] : identifier[_add_reference] ( identifier[obj] . identifier[name] , identifier[namespace] ) keyword[else] : identifier[_add_use] ( identifier[obj] . identifier[name] , identifier[namespace] ) identifier[_add_use] ( identifier[node] , identifier[namespace] ) identifier[_add_template_use] ( identifier[obj] . identifier[name] , identifier[obj] . identifier[templated_types] , identifier[namespace] , identifier[reference] ) keyword[def] identifier[_process_function] ( identifier[function] , identifier[namespace] ): identifier[reference] = identifier[function] . identifier[body] keyword[is] keyword[None] keyword[if] identifier[function] . identifier[return_type] : identifier[return_type] = identifier[function] . identifier[return_type] identifier[_add_variable] ( identifier[return_type] , identifier[namespace] , identifier[reference] ) keyword[for] identifier[s] keyword[in] identifier[function] . identifier[specializations] : identifier[_add_variable] ( identifier[s] , identifier[namespace] , keyword[not] identifier[function] . identifier[body] ) identifier[templated_types] = identifier[function] . identifier[templated_types] keyword[or] () keyword[for] identifier[p] keyword[in] identifier[function] . identifier[parameters] : identifier[node] = identifier[p] . identifier[type] keyword[if] identifier[node] . identifier[name] keyword[not] keyword[in] identifier[templated_types] : keyword[if] identifier[function] . identifier[body] keyword[and] identifier[p] . identifier[name] : identifier[_add_use] ( identifier[node] . identifier[name] , identifier[namespace] ) keyword[elif] ( identifier[p] . identifier[default] keyword[and] identifier[p] . identifier[default] [ literal[int] ]. identifier[name] != literal[string] keyword[and] identifier[p] . identifier[default] [ literal[int] ]. identifier[name] != literal[string] keyword[and] identifier[p] . identifier[default] [ literal[int] ]. identifier[name] != literal[string] ): identifier[_add_use] ( identifier[node] . identifier[name] , identifier[namespace] ) keyword[elif] identifier[node] . identifier[reference] keyword[or] identifier[node] . identifier[pointer] keyword[or] identifier[reference] : identifier[_add_reference] ( identifier[node] . identifier[name] , identifier[namespace] ) keyword[else] : identifier[_add_use] ( identifier[node] . identifier[name] , identifier[namespace] ) identifier[_add_template_use] ( identifier[node] . identifier[name] , identifier[node] . identifier[templated_types] , identifier[namespace] , identifier[reference] ) keyword[def] identifier[_process_function_body] ( identifier[function] , identifier[namespace] ): identifier[previous] = keyword[None] identifier[save] = identifier[namespace] [:] keyword[for] identifier[t] keyword[in] identifier[function] . identifier[body] : keyword[if] identifier[t] . identifier[token_type] == identifier[tokenize] . identifier[NAME] : identifier[previous] = identifier[t] keyword[if] keyword[not] identifier[keywords] . identifier[is_keyword] ( identifier[t] . identifier[name] ): identifier[_add_use] ( identifier[t] . identifier[name] , identifier[namespace] ) keyword[elif] identifier[t] . identifier[name] == literal[string] keyword[and] identifier[previous] keyword[is] keyword[not] keyword[None] : identifier[namespace] . identifier[append] ( identifier[previous] . identifier[name] ) keyword[elif] identifier[t] . identifier[name] keyword[in] ( literal[string] , literal[string] ): identifier[namespace] = identifier[save] [:] keyword[def] identifier[_add_template_use] ( identifier[name] , identifier[types] , identifier[namespace] , identifier[reference] = keyword[False] ): keyword[for] identifier[cls] keyword[in] identifier[types] keyword[or] (): keyword[if] identifier[cls] . identifier[pointer] keyword[or] identifier[cls] . identifier[reference] keyword[or] identifier[reference] : identifier[_add_reference] ( identifier[cls] . identifier[name] , identifier[namespace] ) keyword[elif] identifier[name] . identifier[endswith] ( literal[string] ): identifier[_add_reference] ( identifier[cls] . identifier[name] , identifier[namespace] ) keyword[elif] identifier[name] . identifier[startswith] ( literal[string] ) keyword[and] identifier[name] . identifier[endswith] ( literal[string] ): identifier[_add_reference] ( identifier[cls] . identifier[name] , identifier[namespace] ) keyword[else] : identifier[_add_use] ( identifier[cls] . identifier[name] , identifier[namespace] ) identifier[_add_template_use] ( identifier[cls] . identifier[name] , identifier[cls] . identifier[templated_types] , identifier[namespace] , identifier[reference] ) keyword[def] identifier[_process_types] ( identifier[nodes] , identifier[namespace] ): keyword[for] identifier[node] keyword[in] identifier[nodes] : keyword[if] identifier[isinstance] ( identifier[node] , identifier[ast] . identifier[Type] ): identifier[_add_variable] ( identifier[node] , identifier[namespace] ) identifier[ast_seq] =[ identifier[self] . identifier[ast_list] ] identifier[namespace_stack] =[] keyword[while] identifier[ast_seq] : keyword[for] identifier[node] keyword[in] identifier[ast_seq] . identifier[pop] (): keyword[if] identifier[isinstance] ( identifier[node] , identifier[ast] . identifier[VariableDeclaration] ): identifier[namespace] = identifier[namespace_stack] + identifier[node] . identifier[namespace] identifier[_add_variable] ( identifier[node] , identifier[namespace] ) keyword[elif] identifier[isinstance] ( identifier[node] , identifier[ast] . identifier[Function] ): identifier[namespace] = identifier[namespace_stack] + identifier[node] . identifier[namespace] identifier[_process_function] ( identifier[node] , identifier[namespace] ) keyword[if] identifier[node] . identifier[body] : identifier[_process_function_body] ( identifier[node] , identifier[namespace] ) keyword[elif] identifier[isinstance] ( identifier[node] , identifier[ast] . identifier[Typedef] ): identifier[namespace] = identifier[namespace_stack] + identifier[node] . identifier[namespace] identifier[_process_types] ( identifier[node] . identifier[alias] , identifier[namespace] ) keyword[elif] identifier[isinstance] ( identifier[node] , identifier[ast] . identifier[Friend] ): identifier[expr] = identifier[node] . identifier[expr] identifier[namespace] = identifier[namespace_stack] + identifier[node] . identifier[namespace] keyword[if] identifier[isinstance] ( identifier[expr] , identifier[ast] . identifier[Type] ): identifier[_add_reference] ( identifier[expr] . identifier[name] , identifier[namespace] ) keyword[elif] identifier[isinstance] ( identifier[expr] , identifier[ast] . identifier[Function] ): identifier[_process_function] ( identifier[expr] , identifier[namespace] ) keyword[elif] identifier[isinstance] ( identifier[node] , identifier[ast] . identifier[Union] ) keyword[and] identifier[node] . identifier[body] keyword[is] keyword[not] keyword[None] : identifier[ast_seq] . identifier[append] ( identifier[node] . identifier[body] ) keyword[elif] identifier[isinstance] ( identifier[node] , identifier[ast] . identifier[Class] ) keyword[and] identifier[node] . identifier[body] keyword[is] keyword[not] keyword[None] : identifier[_add_declaration] ( identifier[node] . identifier[name] , identifier[node] . identifier[namespace] ) identifier[namespace] = identifier[namespace_stack] + identifier[node] . identifier[namespace] identifier[_add_template_use] ( literal[string] , identifier[node] . identifier[bases] , identifier[namespace] ) identifier[ast_seq] . identifier[append] ( identifier[node] . identifier[body] ) keyword[elif] identifier[isinstance] ( identifier[node] , identifier[ast] . identifier[Using] ): keyword[if] identifier[node] . identifier[names] [ literal[int] ]. identifier[name] == literal[string] : identifier[namespace_stack] . identifier[append] ( identifier[node] . identifier[names] [ literal[int] ]. identifier[name] ) keyword[return] identifier[file_uses] , identifier[decl_uses]
def _determine_uses(self, included_files, forward_declarations): """Set up the use type of each symbol.""" file_uses = dict.fromkeys(included_files, UNUSED) decl_uses = dict.fromkeys(forward_declarations, UNUSED) symbol_table = self.symbol_table for (name, node) in forward_declarations.items(): try: symbol_table.lookup_symbol(node.name, node.namespace) decl_uses[name] |= USES_REFERENCE # depends on [control=['try'], data=[]] except symbols.Error: module = Module(name, None) symbol_table.add_symbol(node.name, node.namespace, node, module) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]] def _do_lookup(name, namespace): try: file_use_node = symbol_table.lookup_symbol(name, namespace) # depends on [control=['try'], data=[]] except symbols.Error: return # depends on [control=['except'], data=[]] name = file_use_node[1].filename file_uses[name] = file_uses.get(name, 0) | USES_DECLARATION def _add_declaration(name, namespace): if not name: # Ignore anonymous struct. It is not standard, but we might as # well avoid crashing if it is easy. return # depends on [control=['if'], data=[]] names = [n for n in namespace if n is not None] if names: name = '::'.join(names) + '::' + name # depends on [control=['if'], data=[]] if name in decl_uses: decl_uses[name] |= USES_DECLARATION # depends on [control=['if'], data=['name', 'decl_uses']] def _add_reference(name, namespace): try: file_use_node = symbol_table.lookup_symbol(name, namespace) # depends on [control=['try'], data=[]] except symbols.Error: return # depends on [control=['except'], data=[]] name = file_use_node[1].filename if file_use_node[1].ast_list is None: decl_uses[name] |= USES_REFERENCE # depends on [control=['if'], data=[]] elif name in file_uses: # enum and typedef can't be forward declared if isinstance(file_use_node[0], (ast.Enum, ast.Typedef)): file_uses[name] |= USES_DECLARATION # depends on [control=['if'], data=[]] else: file_uses[name] |= USES_REFERENCE # depends on [control=['if'], data=['name', 'file_uses']] def _add_use(node, namespace, name=''): if isinstance(node, basestring): name = node # depends on [control=['if'], data=[]] elif isinstance(node, list): # name contains a list of tokens. name = '::'.join([n.name for n in name]) # depends on [control=['if'], data=[]] # node is a Type so look for its symbol immediately. if name: _do_lookup(name, namespace) return # depends on [control=['if'], data=[]] # Try to search for the value of the variable declaration for any # symbols, such as `#define` values or other variable names which # may be included in other files. obj = getattr(node, 'initial_value', None) if obj: _do_lookup(obj, namespace) # depends on [control=['if'], data=[]] # If node is a VariableDeclaration, check if the variable type is # a symbol used in other includes. obj = getattr(node, 'type', None) if obj and isinstance(obj.name, basestring): _do_lookup(obj.name, namespace) # depends on [control=['if'], data=[]] if not isinstance(node, basestring): # Happens when variables are defined with inlined types, e.g.: # enum {...} variable; return # depends on [control=['if'], data=[]] def _add_variable(node, namespace, reference=False): obj = node.type if isinstance(node, ast.VariableDeclaration) else node if obj.reference or obj.pointer or reference: _add_reference(obj.name, namespace) # depends on [control=['if'], data=[]] else: # Add a use for the variable declaration type as well as the # variable value. _add_use(obj.name, namespace) _add_use(node, namespace) # This needs to recurse when the node is a templated type. _add_template_use(obj.name, obj.templated_types, namespace, reference) def _process_function(function, namespace): reference = function.body is None if function.return_type: return_type = function.return_type _add_variable(return_type, namespace, reference) # depends on [control=['if'], data=[]] for s in function.specializations: _add_variable(s, namespace, not function.body) # depends on [control=['for'], data=['s']] templated_types = function.templated_types or () for p in function.parameters: node = p.type if node.name not in templated_types: if function.body and p.name: # Assume that if the function has a body and a name # the parameter type is really used. # NOTE(nnorwitz): this is over-aggressive. It would be # better to iterate through the body and determine # actual uses based on local vars and data members # used. _add_use(node.name, namespace) # depends on [control=['if'], data=[]] elif p.default and p.default[0].name != '0' and (p.default[0].name != 'NULL') and (p.default[0].name != 'nullptr'): _add_use(node.name, namespace) # depends on [control=['if'], data=[]] elif node.reference or node.pointer or reference: _add_reference(node.name, namespace) # depends on [control=['if'], data=[]] else: _add_use(node.name, namespace) _add_template_use(node.name, node.templated_types, namespace, reference) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['p']] def _process_function_body(function, namespace): previous = None save = namespace[:] for t in function.body: if t.token_type == tokenize.NAME: previous = t if not keywords.is_keyword(t.name): # TODO(nnorwitz): handle static function calls. # TODO(nnorwitz): handle using statements in file. # TODO(nnorwitz): handle using statements in function. # TODO(nnorwitz): handle namespace assignment in file. _add_use(t.name, namespace) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif t.name == '::' and previous is not None: namespace.append(previous.name) # depends on [control=['if'], data=[]] elif t.name in (':', ';'): namespace = save[:] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['t']] def _add_template_use(name, types, namespace, reference=False): for cls in types or (): if cls.pointer or cls.reference or reference: _add_reference(cls.name, namespace) # depends on [control=['if'], data=[]] elif name.endswith('_ptr'): # Special case templated classes that end w/_ptr. # These are things like auto_ptr which do # not require the class definition, only decl. _add_reference(cls.name, namespace) # depends on [control=['if'], data=[]] elif name.startswith('Q') and name.endswith('Pointer'): # Special case templated classes from the Qt framework. _add_reference(cls.name, namespace) # depends on [control=['if'], data=[]] else: _add_use(cls.name, namespace) _add_template_use(cls.name, cls.templated_types, namespace, reference) # depends on [control=['for'], data=['cls']] def _process_types(nodes, namespace): for node in nodes: if isinstance(node, ast.Type): _add_variable(node, namespace) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['node']] # Iterate through the source AST/tokens, marking each symbols use. ast_seq = [self.ast_list] namespace_stack = [] while ast_seq: for node in ast_seq.pop(): if isinstance(node, ast.VariableDeclaration): namespace = namespace_stack + node.namespace _add_variable(node, namespace) # depends on [control=['if'], data=[]] elif isinstance(node, ast.Function): namespace = namespace_stack + node.namespace _process_function(node, namespace) if node.body: _process_function_body(node, namespace) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif isinstance(node, ast.Typedef): namespace = namespace_stack + node.namespace _process_types(node.alias, namespace) # depends on [control=['if'], data=[]] elif isinstance(node, ast.Friend): expr = node.expr namespace = namespace_stack + node.namespace if isinstance(expr, ast.Type): _add_reference(expr.name, namespace) # depends on [control=['if'], data=[]] elif isinstance(expr, ast.Function): _process_function(expr, namespace) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif isinstance(node, ast.Union) and node.body is not None: ast_seq.append(node.body) # depends on [control=['if'], data=[]] elif isinstance(node, ast.Class) and node.body is not None: _add_declaration(node.name, node.namespace) namespace = namespace_stack + node.namespace _add_template_use('', node.bases, namespace) ast_seq.append(node.body) # depends on [control=['if'], data=[]] elif isinstance(node, ast.Using): if node.names[0].name == 'namespace': namespace_stack.append(node.names[1].name) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['node']] # depends on [control=['while'], data=[]] return (file_uses, decl_uses)
def _set_stats_group(self, v, load=False): """ Setter method for stats_group, mapped from YANG variable /mpls_state/ldp/ldp_session/session_ldp_stats/protocol_errors/stats_group (mpls-ldp-stats-error-group) If this variable is read-only (config: false) in the source YANG file, then _set_stats_group is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_stats_group() directly. YANG Description: Describes if the stats are for a given session, global-all or global-since-last-clear """ parent = getattr(self, "_parent", None) if parent is not None and load is False: raise AttributeError("Cannot set keys directly when" + " within an instantiated list") if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'global-stats-all': {'value': 1}, u'per-session-stats': {'value': 0}, u'global-stats-since-last-clear': {'value': 2}},), is_leaf=True, yang_name="stats-group", rest_name="stats-group", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='mpls-ldp-stats-error-group', is_config=False) except (TypeError, ValueError): raise ValueError({ 'error-string': """stats_group must be of a type compatible with mpls-ldp-stats-error-group""", 'defined-type': "brocade-mpls-operational:mpls-ldp-stats-error-group", 'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'global-stats-all': {'value': 1}, u'per-session-stats': {'value': 0}, u'global-stats-since-last-clear': {'value': 2}},), is_leaf=True, yang_name="stats-group", rest_name="stats-group", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='mpls-ldp-stats-error-group', is_config=False)""", }) self.__stats_group = t if hasattr(self, '_set'): self._set()
def function[_set_stats_group, parameter[self, v, load]]: constant[ Setter method for stats_group, mapped from YANG variable /mpls_state/ldp/ldp_session/session_ldp_stats/protocol_errors/stats_group (mpls-ldp-stats-error-group) If this variable is read-only (config: false) in the source YANG file, then _set_stats_group is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_stats_group() directly. YANG Description: Describes if the stats are for a given session, global-all or global-since-last-clear ] variable[parent] assign[=] call[name[getattr], parameter[name[self], constant[_parent], constant[None]]] if <ast.BoolOp object at 0x7da207f03d30> begin[:] <ast.Raise object at 0x7da207f00c40> if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:] variable[v] assign[=] call[name[v]._utype, parameter[name[v]]] <ast.Try object at 0x7da207f03f40> name[self].__stats_group assign[=] name[t] if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:] call[name[self]._set, parameter[]]
keyword[def] identifier[_set_stats_group] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ): literal[string] identifier[parent] = identifier[getattr] ( identifier[self] , literal[string] , keyword[None] ) keyword[if] identifier[parent] keyword[is] keyword[not] keyword[None] keyword[and] identifier[load] keyword[is] keyword[False] : keyword[raise] identifier[AttributeError] ( literal[string] + literal[string] ) keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ): identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] ) keyword[try] : identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[RestrictedClassType] ( identifier[base_type] = identifier[unicode] , identifier[restriction_type] = literal[string] , identifier[restriction_arg] ={ literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }},), identifier[is_leaf] = keyword[True] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[is_keyval] = keyword[True] , identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[False] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[raise] identifier[ValueError] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , }) identifier[self] . identifier[__stats_group] = identifier[t] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[_set] ()
def _set_stats_group(self, v, load=False): """ Setter method for stats_group, mapped from YANG variable /mpls_state/ldp/ldp_session/session_ldp_stats/protocol_errors/stats_group (mpls-ldp-stats-error-group) If this variable is read-only (config: false) in the source YANG file, then _set_stats_group is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_stats_group() directly. YANG Description: Describes if the stats are for a given session, global-all or global-since-last-clear """ parent = getattr(self, '_parent', None) if parent is not None and load is False: raise AttributeError('Cannot set keys directly when' + ' within an instantiated list') # depends on [control=['if'], data=[]] if hasattr(v, '_utype'): v = v._utype(v) # depends on [control=['if'], data=[]] try: t = YANGDynClass(v, base=RestrictedClassType(base_type=unicode, restriction_type='dict_key', restriction_arg={u'global-stats-all': {'value': 1}, u'per-session-stats': {'value': 0}, u'global-stats-since-last-clear': {'value': 2}}), is_leaf=True, yang_name='stats-group', rest_name='stats-group', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='mpls-ldp-stats-error-group', is_config=False) # depends on [control=['try'], data=[]] except (TypeError, ValueError): raise ValueError({'error-string': 'stats_group must be of a type compatible with mpls-ldp-stats-error-group', 'defined-type': 'brocade-mpls-operational:mpls-ldp-stats-error-group', 'generated-type': 'YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u\'global-stats-all\': {\'value\': 1}, u\'per-session-stats\': {\'value\': 0}, u\'global-stats-since-last-clear\': {\'value\': 2}},), is_leaf=True, yang_name="stats-group", rest_name="stats-group", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace=\'urn:brocade.com:mgmt:brocade-mpls-operational\', defining_module=\'brocade-mpls-operational\', yang_type=\'mpls-ldp-stats-error-group\', is_config=False)'}) # depends on [control=['except'], data=[]] self.__stats_group = t if hasattr(self, '_set'): self._set() # depends on [control=['if'], data=[]]
def _modified_shepp_logan_ellipsoids(ellipsoids): """Modify ellipsoids to give the modified Shepp-Logan phantom. Works for both 2d and 3d. """ intensities = [1.0, -0.8, -0.2, -0.2, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1] # Add minimal numbers to ensure that the result is nowhere negative. # This is needed due to numerical issues. intensities[2] += 5e-17 intensities[3] += 5e-17 assert len(ellipsoids) == len(intensities) for ellipsoid, intensity in zip(ellipsoids, intensities): ellipsoid[0] = intensity
def function[_modified_shepp_logan_ellipsoids, parameter[ellipsoids]]: constant[Modify ellipsoids to give the modified Shepp-Logan phantom. Works for both 2d and 3d. ] variable[intensities] assign[=] list[[<ast.Constant object at 0x7da1b1e5d480>, <ast.UnaryOp object at 0x7da1b1e5d210>, <ast.UnaryOp object at 0x7da1b1e5e1d0>, <ast.UnaryOp object at 0x7da1b1e5e170>, <ast.Constant object at 0x7da1b1ea0eb0>, <ast.Constant object at 0x7da1b1ea2140>, <ast.Constant object at 0x7da1b1ea3490>, <ast.Constant object at 0x7da1b1ea1b40>, <ast.Constant object at 0x7da1b1ea21d0>, <ast.Constant object at 0x7da1b1ea32e0>]] <ast.AugAssign object at 0x7da1b1ea2ad0> <ast.AugAssign object at 0x7da1b1ea1990> assert[compare[call[name[len], parameter[name[ellipsoids]]] equal[==] call[name[len], parameter[name[intensities]]]]] for taget[tuple[[<ast.Name object at 0x7da1b1ea3850>, <ast.Name object at 0x7da1b1ea2620>]]] in starred[call[name[zip], parameter[name[ellipsoids], name[intensities]]]] begin[:] call[name[ellipsoid]][constant[0]] assign[=] name[intensity]
keyword[def] identifier[_modified_shepp_logan_ellipsoids] ( identifier[ellipsoids] ): literal[string] identifier[intensities] =[ literal[int] ,- literal[int] ,- literal[int] ,- literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ] identifier[intensities] [ literal[int] ]+= literal[int] identifier[intensities] [ literal[int] ]+= literal[int] keyword[assert] identifier[len] ( identifier[ellipsoids] )== identifier[len] ( identifier[intensities] ) keyword[for] identifier[ellipsoid] , identifier[intensity] keyword[in] identifier[zip] ( identifier[ellipsoids] , identifier[intensities] ): identifier[ellipsoid] [ literal[int] ]= identifier[intensity]
def _modified_shepp_logan_ellipsoids(ellipsoids): """Modify ellipsoids to give the modified Shepp-Logan phantom. Works for both 2d and 3d. """ intensities = [1.0, -0.8, -0.2, -0.2, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1] # Add minimal numbers to ensure that the result is nowhere negative. # This is needed due to numerical issues. intensities[2] += 5e-17 intensities[3] += 5e-17 assert len(ellipsoids) == len(intensities) for (ellipsoid, intensity) in zip(ellipsoids, intensities): ellipsoid[0] = intensity # depends on [control=['for'], data=[]]
def log_time(logger): """ Decorator to log the execution time of a function """ def decorator(func): @wraps(func) def wrapper(*args, **kwargs): start = time.time() result = func(*args, **kwargs) end = time.time() _log_time(logger, func.__name__, start, end) return result return wrapper return decorator
def function[log_time, parameter[logger]]: constant[ Decorator to log the execution time of a function ] def function[decorator, parameter[func]]: def function[wrapper, parameter[]]: variable[start] assign[=] call[name[time].time, parameter[]] variable[result] assign[=] call[name[func], parameter[<ast.Starred object at 0x7da1b032f100>]] variable[end] assign[=] call[name[time].time, parameter[]] call[name[_log_time], parameter[name[logger], name[func].__name__, name[start], name[end]]] return[name[result]] return[name[wrapper]] return[name[decorator]]
keyword[def] identifier[log_time] ( identifier[logger] ): literal[string] keyword[def] identifier[decorator] ( identifier[func] ): @ identifier[wraps] ( identifier[func] ) keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ): identifier[start] = identifier[time] . identifier[time] () identifier[result] = identifier[func] (* identifier[args] ,** identifier[kwargs] ) identifier[end] = identifier[time] . identifier[time] () identifier[_log_time] ( identifier[logger] , identifier[func] . identifier[__name__] , identifier[start] , identifier[end] ) keyword[return] identifier[result] keyword[return] identifier[wrapper] keyword[return] identifier[decorator]
def log_time(logger): """ Decorator to log the execution time of a function """ def decorator(func): @wraps(func) def wrapper(*args, **kwargs): start = time.time() result = func(*args, **kwargs) end = time.time() _log_time(logger, func.__name__, start, end) return result return wrapper return decorator
def list_spiders(self, project): """ Lists all known spiders for a specific project. First class, maps to Scrapyd's list spiders endpoint. """ url = self._build_url(constants.LIST_SPIDERS_ENDPOINT) params = {'project': project} json = self.client.get(url, params=params, timeout=self.timeout) return json['spiders']
def function[list_spiders, parameter[self, project]]: constant[ Lists all known spiders for a specific project. First class, maps to Scrapyd's list spiders endpoint. ] variable[url] assign[=] call[name[self]._build_url, parameter[name[constants].LIST_SPIDERS_ENDPOINT]] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b063ce80>], [<ast.Name object at 0x7da1b063ee00>]] variable[json] assign[=] call[name[self].client.get, parameter[name[url]]] return[call[name[json]][constant[spiders]]]
keyword[def] identifier[list_spiders] ( identifier[self] , identifier[project] ): literal[string] identifier[url] = identifier[self] . identifier[_build_url] ( identifier[constants] . identifier[LIST_SPIDERS_ENDPOINT] ) identifier[params] ={ literal[string] : identifier[project] } identifier[json] = identifier[self] . identifier[client] . identifier[get] ( identifier[url] , identifier[params] = identifier[params] , identifier[timeout] = identifier[self] . identifier[timeout] ) keyword[return] identifier[json] [ literal[string] ]
def list_spiders(self, project): """ Lists all known spiders for a specific project. First class, maps to Scrapyd's list spiders endpoint. """ url = self._build_url(constants.LIST_SPIDERS_ENDPOINT) params = {'project': project} json = self.client.get(url, params=params, timeout=self.timeout) return json['spiders']
def bootstrap(self, address, port): """ Initialize bootstrap to specific **IP address** and **port** :param address: Ip address to bootstrap :type address: str :param port: Port to bootstrap :type port: int :raises: :py:exc:`nano.rpc.RPCException` >>> rpc.bootstrap(address="::ffff:138.201.94.249", port="7075") True """ address = self._process_value(address, 'ipaddr') port = self._process_value(port, 'int') payload = {"address": address, "port": port} resp = self.call('bootstrap', payload) return 'success' in resp
def function[bootstrap, parameter[self, address, port]]: constant[ Initialize bootstrap to specific **IP address** and **port** :param address: Ip address to bootstrap :type address: str :param port: Port to bootstrap :type port: int :raises: :py:exc:`nano.rpc.RPCException` >>> rpc.bootstrap(address="::ffff:138.201.94.249", port="7075") True ] variable[address] assign[=] call[name[self]._process_value, parameter[name[address], constant[ipaddr]]] variable[port] assign[=] call[name[self]._process_value, parameter[name[port], constant[int]]] variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da1b25391e0>, <ast.Constant object at 0x7da1b2539300>], [<ast.Name object at 0x7da1b2539c00>, <ast.Name object at 0x7da1b25d2bc0>]] variable[resp] assign[=] call[name[self].call, parameter[constant[bootstrap], name[payload]]] return[compare[constant[success] in name[resp]]]
keyword[def] identifier[bootstrap] ( identifier[self] , identifier[address] , identifier[port] ): literal[string] identifier[address] = identifier[self] . identifier[_process_value] ( identifier[address] , literal[string] ) identifier[port] = identifier[self] . identifier[_process_value] ( identifier[port] , literal[string] ) identifier[payload] ={ literal[string] : identifier[address] , literal[string] : identifier[port] } identifier[resp] = identifier[self] . identifier[call] ( literal[string] , identifier[payload] ) keyword[return] literal[string] keyword[in] identifier[resp]
def bootstrap(self, address, port): """ Initialize bootstrap to specific **IP address** and **port** :param address: Ip address to bootstrap :type address: str :param port: Port to bootstrap :type port: int :raises: :py:exc:`nano.rpc.RPCException` >>> rpc.bootstrap(address="::ffff:138.201.94.249", port="7075") True """ address = self._process_value(address, 'ipaddr') port = self._process_value(port, 'int') payload = {'address': address, 'port': port} resp = self.call('bootstrap', payload) return 'success' in resp
def upgrade_plan(self, subid, vpsplanid, params=None): ''' /v1/server/upgrade_plan POST - account Upgrade the plan of a virtual machine. The virtual machine will be rebooted upon a successful upgrade. Link: https://www.vultr.com/api/#server_upgrade_plan ''' params = update_params(params, { 'SUBID': subid, 'VPSPLANID': vpsplanid }) return self.request('/v1/server/upgrade_plan', params, 'POST')
def function[upgrade_plan, parameter[self, subid, vpsplanid, params]]: constant[ /v1/server/upgrade_plan POST - account Upgrade the plan of a virtual machine. The virtual machine will be rebooted upon a successful upgrade. Link: https://www.vultr.com/api/#server_upgrade_plan ] variable[params] assign[=] call[name[update_params], parameter[name[params], dictionary[[<ast.Constant object at 0x7da1b1392bf0>, <ast.Constant object at 0x7da1b1391b40>], [<ast.Name object at 0x7da1b13904f0>, <ast.Name object at 0x7da1b13914b0>]]]] return[call[name[self].request, parameter[constant[/v1/server/upgrade_plan], name[params], constant[POST]]]]
keyword[def] identifier[upgrade_plan] ( identifier[self] , identifier[subid] , identifier[vpsplanid] , identifier[params] = keyword[None] ): literal[string] identifier[params] = identifier[update_params] ( identifier[params] ,{ literal[string] : identifier[subid] , literal[string] : identifier[vpsplanid] }) keyword[return] identifier[self] . identifier[request] ( literal[string] , identifier[params] , literal[string] )
def upgrade_plan(self, subid, vpsplanid, params=None): """ /v1/server/upgrade_plan POST - account Upgrade the plan of a virtual machine. The virtual machine will be rebooted upon a successful upgrade. Link: https://www.vultr.com/api/#server_upgrade_plan """ params = update_params(params, {'SUBID': subid, 'VPSPLANID': vpsplanid}) return self.request('/v1/server/upgrade_plan', params, 'POST')
def add(self, dic): '''adds a dict as pair Args: dic (dict): key and value ''' for kw in dic: checkKey(kw, self.keyWord) self._add([Pair(kw, StringSingle(dic[kw]))], self.d)
def function[add, parameter[self, dic]]: constant[adds a dict as pair Args: dic (dict): key and value ] for taget[name[kw]] in starred[name[dic]] begin[:] call[name[checkKey], parameter[name[kw], name[self].keyWord]] call[name[self]._add, parameter[list[[<ast.Call object at 0x7da20c6a9ed0>]], name[self].d]]
keyword[def] identifier[add] ( identifier[self] , identifier[dic] ): literal[string] keyword[for] identifier[kw] keyword[in] identifier[dic] : identifier[checkKey] ( identifier[kw] , identifier[self] . identifier[keyWord] ) identifier[self] . identifier[_add] ([ identifier[Pair] ( identifier[kw] , identifier[StringSingle] ( identifier[dic] [ identifier[kw] ]))], identifier[self] . identifier[d] )
def add(self, dic): """adds a dict as pair Args: dic (dict): key and value """ for kw in dic: checkKey(kw, self.keyWord) self._add([Pair(kw, StringSingle(dic[kw]))], self.d) # depends on [control=['for'], data=['kw']]
def generate_module_table_header(modules): """ Generate header with module table entries for builtin modules. :param List[(module_name, obj_module, enabled_define)] modules: module defs :return: None """ # Print header file for all external modules. mod_defs = [] print("// Automatically generated by makemoduledefs.py.\n") for module_name, obj_module, enabled_define in modules: mod_def = "MODULE_DEF_{}".format(module_name.upper()) mod_defs.append(mod_def) print(( "#if ({enabled_define})\n" " extern const struct _mp_obj_module_t {obj_module};\n" " #define {mod_def} {{ MP_ROM_QSTR({module_name}), MP_ROM_PTR(&{obj_module}) }},\n" "#else\n" " #define {mod_def}\n" "#endif\n" ).format(module_name=module_name, obj_module=obj_module, enabled_define=enabled_define, mod_def=mod_def) ) print("\n#define MICROPY_REGISTERED_MODULES \\") for mod_def in mod_defs: print(" {mod_def} \\".format(mod_def=mod_def)) print("// MICROPY_REGISTERED_MODULES")
def function[generate_module_table_header, parameter[modules]]: constant[ Generate header with module table entries for builtin modules. :param List[(module_name, obj_module, enabled_define)] modules: module defs :return: None ] variable[mod_defs] assign[=] list[[]] call[name[print], parameter[constant[// Automatically generated by makemoduledefs.py. ]]] for taget[tuple[[<ast.Name object at 0x7da1b1ef2e90>, <ast.Name object at 0x7da1b1ef0220>, <ast.Name object at 0x7da1b1ef0e80>]]] in starred[name[modules]] begin[:] variable[mod_def] assign[=] call[constant[MODULE_DEF_{}].format, parameter[call[name[module_name].upper, parameter[]]]] call[name[mod_defs].append, parameter[name[mod_def]]] call[name[print], parameter[call[constant[#if ({enabled_define}) extern const struct _mp_obj_module_t {obj_module}; #define {mod_def} {{ MP_ROM_QSTR({module_name}), MP_ROM_PTR(&{obj_module}) }}, #else #define {mod_def} #endif ].format, parameter[]]]] call[name[print], parameter[constant[ #define MICROPY_REGISTERED_MODULES \]]] for taget[name[mod_def]] in starred[name[mod_defs]] begin[:] call[name[print], parameter[call[constant[ {mod_def} \].format, parameter[]]]] call[name[print], parameter[constant[// MICROPY_REGISTERED_MODULES]]]
keyword[def] identifier[generate_module_table_header] ( identifier[modules] ): literal[string] identifier[mod_defs] =[] identifier[print] ( literal[string] ) keyword[for] identifier[module_name] , identifier[obj_module] , identifier[enabled_define] keyword[in] identifier[modules] : identifier[mod_def] = literal[string] . identifier[format] ( identifier[module_name] . identifier[upper] ()) identifier[mod_defs] . identifier[append] ( identifier[mod_def] ) identifier[print] (( literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] ). identifier[format] ( identifier[module_name] = identifier[module_name] , identifier[obj_module] = identifier[obj_module] , identifier[enabled_define] = identifier[enabled_define] , identifier[mod_def] = identifier[mod_def] ) ) identifier[print] ( literal[string] ) keyword[for] identifier[mod_def] keyword[in] identifier[mod_defs] : identifier[print] ( literal[string] . identifier[format] ( identifier[mod_def] = identifier[mod_def] )) identifier[print] ( literal[string] )
def generate_module_table_header(modules): """ Generate header with module table entries for builtin modules. :param List[(module_name, obj_module, enabled_define)] modules: module defs :return: None """ # Print header file for all external modules. mod_defs = [] print('// Automatically generated by makemoduledefs.py.\n') for (module_name, obj_module, enabled_define) in modules: mod_def = 'MODULE_DEF_{}'.format(module_name.upper()) mod_defs.append(mod_def) print('#if ({enabled_define})\n extern const struct _mp_obj_module_t {obj_module};\n #define {mod_def} {{ MP_ROM_QSTR({module_name}), MP_ROM_PTR(&{obj_module}) }},\n#else\n #define {mod_def}\n#endif\n'.format(module_name=module_name, obj_module=obj_module, enabled_define=enabled_define, mod_def=mod_def)) # depends on [control=['for'], data=[]] print('\n#define MICROPY_REGISTERED_MODULES \\') for mod_def in mod_defs: print(' {mod_def} \\'.format(mod_def=mod_def)) # depends on [control=['for'], data=['mod_def']] print('// MICROPY_REGISTERED_MODULES')
def mouseMoveEvent( self, event ): """ Overloads the mouse move event to ignore the event when \ the scene is in view mode. :param event <QMouseMoveEvent> """ event.setAccepted(False) if self._hotspotPressed: event.accept() return # ignore events when the scene is in view mode scene = self.scene() if ( self.isLocked() or self._ignoreMouseEvents or \ (scene and (scene.inViewMode() or scene.isConnecting()))): event.ignore() self._ignoreMouseEvents = True return # call the base method event.accept() super(XNode, self).mouseMoveEvent(event)
def function[mouseMoveEvent, parameter[self, event]]: constant[ Overloads the mouse move event to ignore the event when the scene is in view mode. :param event <QMouseMoveEvent> ] call[name[event].setAccepted, parameter[constant[False]]] if name[self]._hotspotPressed begin[:] call[name[event].accept, parameter[]] return[None] variable[scene] assign[=] call[name[self].scene, parameter[]] if <ast.BoolOp object at 0x7da18f09cfd0> begin[:] call[name[event].ignore, parameter[]] name[self]._ignoreMouseEvents assign[=] constant[True] return[None] call[name[event].accept, parameter[]] call[call[name[super], parameter[name[XNode], name[self]]].mouseMoveEvent, parameter[name[event]]]
keyword[def] identifier[mouseMoveEvent] ( identifier[self] , identifier[event] ): literal[string] identifier[event] . identifier[setAccepted] ( keyword[False] ) keyword[if] identifier[self] . identifier[_hotspotPressed] : identifier[event] . identifier[accept] () keyword[return] identifier[scene] = identifier[self] . identifier[scene] () keyword[if] ( identifier[self] . identifier[isLocked] () keyword[or] identifier[self] . identifier[_ignoreMouseEvents] keyword[or] ( identifier[scene] keyword[and] ( identifier[scene] . identifier[inViewMode] () keyword[or] identifier[scene] . identifier[isConnecting] ()))): identifier[event] . identifier[ignore] () identifier[self] . identifier[_ignoreMouseEvents] = keyword[True] keyword[return] identifier[event] . identifier[accept] () identifier[super] ( identifier[XNode] , identifier[self] ). identifier[mouseMoveEvent] ( identifier[event] )
def mouseMoveEvent(self, event): """ Overloads the mouse move event to ignore the event when the scene is in view mode. :param event <QMouseMoveEvent> """ event.setAccepted(False) if self._hotspotPressed: event.accept() return # depends on [control=['if'], data=[]] # ignore events when the scene is in view mode scene = self.scene() if self.isLocked() or self._ignoreMouseEvents or (scene and (scene.inViewMode() or scene.isConnecting())): event.ignore() self._ignoreMouseEvents = True return # depends on [control=['if'], data=[]] # call the base method event.accept() super(XNode, self).mouseMoveEvent(event)
def zero_pad(fp, data_size, pad_size): # type: (BinaryIO, int, int) -> None ''' A function to write padding out from data_size up to pad_size efficiently. Parameters: fp - The file object to use to write padding out to. data_size - The current size of the data. pad_size - The boundary size of data to pad out to. Returns: Nothing. ''' padbytes = pad_size - (data_size % pad_size) if padbytes == pad_size: # Nothing to pad, get out. return fp.seek(padbytes - 1, os.SEEK_CUR) fp.write(b'\x00')
def function[zero_pad, parameter[fp, data_size, pad_size]]: constant[ A function to write padding out from data_size up to pad_size efficiently. Parameters: fp - The file object to use to write padding out to. data_size - The current size of the data. pad_size - The boundary size of data to pad out to. Returns: Nothing. ] variable[padbytes] assign[=] binary_operation[name[pad_size] - binary_operation[name[data_size] <ast.Mod object at 0x7da2590d6920> name[pad_size]]] if compare[name[padbytes] equal[==] name[pad_size]] begin[:] return[None] call[name[fp].seek, parameter[binary_operation[name[padbytes] - constant[1]], name[os].SEEK_CUR]] call[name[fp].write, parameter[constant[b'\x00']]]
keyword[def] identifier[zero_pad] ( identifier[fp] , identifier[data_size] , identifier[pad_size] ): literal[string] identifier[padbytes] = identifier[pad_size] -( identifier[data_size] % identifier[pad_size] ) keyword[if] identifier[padbytes] == identifier[pad_size] : keyword[return] identifier[fp] . identifier[seek] ( identifier[padbytes] - literal[int] , identifier[os] . identifier[SEEK_CUR] ) identifier[fp] . identifier[write] ( literal[string] )
def zero_pad(fp, data_size, pad_size): # type: (BinaryIO, int, int) -> None '\n A function to write padding out from data_size up to pad_size\n efficiently.\n\n Parameters:\n fp - The file object to use to write padding out to.\n data_size - The current size of the data.\n pad_size - The boundary size of data to pad out to.\n Returns:\n Nothing.\n ' padbytes = pad_size - data_size % pad_size if padbytes == pad_size: # Nothing to pad, get out. return # depends on [control=['if'], data=[]] fp.seek(padbytes - 1, os.SEEK_CUR) fp.write(b'\x00')
def _base_request(self, method): """Factory method for generating the base XML requests.""" request = E.Element(method) request.set('xmlns', 'AnetApi/xml/v1/schema/AnetApiSchema.xsd') request.append(self.client_auth) return request
def function[_base_request, parameter[self, method]]: constant[Factory method for generating the base XML requests.] variable[request] assign[=] call[name[E].Element, parameter[name[method]]] call[name[request].set, parameter[constant[xmlns], constant[AnetApi/xml/v1/schema/AnetApiSchema.xsd]]] call[name[request].append, parameter[name[self].client_auth]] return[name[request]]
keyword[def] identifier[_base_request] ( identifier[self] , identifier[method] ): literal[string] identifier[request] = identifier[E] . identifier[Element] ( identifier[method] ) identifier[request] . identifier[set] ( literal[string] , literal[string] ) identifier[request] . identifier[append] ( identifier[self] . identifier[client_auth] ) keyword[return] identifier[request]
def _base_request(self, method): """Factory method for generating the base XML requests.""" request = E.Element(method) request.set('xmlns', 'AnetApi/xml/v1/schema/AnetApiSchema.xsd') request.append(self.client_auth) return request
def _request_reports(self, domains): """Sends multiples requests for the resources to a particular endpoint. Args: resource_param_name: a string name of the resource parameter. resources: list of of the resources. endpoint_name: AlexaRankingApi endpoint URL suffix. Returns: A list of the responses. """ params = [{'url': domain} for domain in domains] responses = self._requests.multi_get( self.BASE_URL, query_params=params, to_json=False) return responses
def function[_request_reports, parameter[self, domains]]: constant[Sends multiples requests for the resources to a particular endpoint. Args: resource_param_name: a string name of the resource parameter. resources: list of of the resources. endpoint_name: AlexaRankingApi endpoint URL suffix. Returns: A list of the responses. ] variable[params] assign[=] <ast.ListComp object at 0x7da20e74ba30> variable[responses] assign[=] call[name[self]._requests.multi_get, parameter[name[self].BASE_URL]] return[name[responses]]
keyword[def] identifier[_request_reports] ( identifier[self] , identifier[domains] ): literal[string] identifier[params] =[{ literal[string] : identifier[domain] } keyword[for] identifier[domain] keyword[in] identifier[domains] ] identifier[responses] = identifier[self] . identifier[_requests] . identifier[multi_get] ( identifier[self] . identifier[BASE_URL] , identifier[query_params] = identifier[params] , identifier[to_json] = keyword[False] ) keyword[return] identifier[responses]
def _request_reports(self, domains): """Sends multiples requests for the resources to a particular endpoint. Args: resource_param_name: a string name of the resource parameter. resources: list of of the resources. endpoint_name: AlexaRankingApi endpoint URL suffix. Returns: A list of the responses. """ params = [{'url': domain} for domain in domains] responses = self._requests.multi_get(self.BASE_URL, query_params=params, to_json=False) return responses
def make_path(*path_or_str_or_segments): """ :param path_or_str_or_segments: :return: :rtype: cifparser.path.Path """ if len(path_or_str_or_segments) == 0: return ROOT_PATH elif len(path_or_str_or_segments) == 1: single_item = path_or_str_or_segments[0] if isinstance(single_item, Path): return single_item if isinstance(single_item, str): try: return path_parser.parseString(single_item, True).asList()[0] except: raise ValueError() raise TypeError() else: segments = path_or_str_or_segments return sum(map(lambda x: make_path(x), segments), ROOT_PATH)
def function[make_path, parameter[]]: constant[ :param path_or_str_or_segments: :return: :rtype: cifparser.path.Path ] if compare[call[name[len], parameter[name[path_or_str_or_segments]]] equal[==] constant[0]] begin[:] return[name[ROOT_PATH]]
keyword[def] identifier[make_path] (* identifier[path_or_str_or_segments] ): literal[string] keyword[if] identifier[len] ( identifier[path_or_str_or_segments] )== literal[int] : keyword[return] identifier[ROOT_PATH] keyword[elif] identifier[len] ( identifier[path_or_str_or_segments] )== literal[int] : identifier[single_item] = identifier[path_or_str_or_segments] [ literal[int] ] keyword[if] identifier[isinstance] ( identifier[single_item] , identifier[Path] ): keyword[return] identifier[single_item] keyword[if] identifier[isinstance] ( identifier[single_item] , identifier[str] ): keyword[try] : keyword[return] identifier[path_parser] . identifier[parseString] ( identifier[single_item] , keyword[True] ). identifier[asList] ()[ literal[int] ] keyword[except] : keyword[raise] identifier[ValueError] () keyword[raise] identifier[TypeError] () keyword[else] : identifier[segments] = identifier[path_or_str_or_segments] keyword[return] identifier[sum] ( identifier[map] ( keyword[lambda] identifier[x] : identifier[make_path] ( identifier[x] ), identifier[segments] ), identifier[ROOT_PATH] )
def make_path(*path_or_str_or_segments): """ :param path_or_str_or_segments: :return: :rtype: cifparser.path.Path """ if len(path_or_str_or_segments) == 0: return ROOT_PATH # depends on [control=['if'], data=[]] elif len(path_or_str_or_segments) == 1: single_item = path_or_str_or_segments[0] if isinstance(single_item, Path): return single_item # depends on [control=['if'], data=[]] if isinstance(single_item, str): try: return path_parser.parseString(single_item, True).asList()[0] # depends on [control=['try'], data=[]] except: raise ValueError() # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] raise TypeError() # depends on [control=['if'], data=[]] else: segments = path_or_str_or_segments return sum(map(lambda x: make_path(x), segments), ROOT_PATH)
def add_ti_txt(self, lines, overwrite=False): """Add given TI-TXT string `lines`. Set `overwrite` to ``True`` to allow already added data to be overwritten. """ address = None eof_found = False for line in StringIO(lines): # Abort if data is found after end of file. if eof_found: raise Error("bad file terminator") line = line.strip() if len(line) < 1: raise Error("bad line length") if line[0] == 'q': eof_found = True elif line[0] == '@': try: address = int(line[1:], 16) except ValueError: raise Error("bad section address") else: # Try to decode the data. try: data = bytearray(binascii.unhexlify(line.replace(' ', ''))) except (TypeError, binascii.Error): raise Error("bad data") size = len(data) # Check that there are correct number of bytes per # line. There should TI_TXT_BYTES_PER_LINE. Only # exception is last line of section which may be # shorter. if size > TI_TXT_BYTES_PER_LINE: raise Error("bad line length") if address is None: raise Error("missing section address") self._segments.add(_Segment(address, address + size, data, self.word_size_bytes), overwrite) if size == TI_TXT_BYTES_PER_LINE: address += size else: address = None if not eof_found: raise Error("missing file terminator")
def function[add_ti_txt, parameter[self, lines, overwrite]]: constant[Add given TI-TXT string `lines`. Set `overwrite` to ``True`` to allow already added data to be overwritten. ] variable[address] assign[=] constant[None] variable[eof_found] assign[=] constant[False] for taget[name[line]] in starred[call[name[StringIO], parameter[name[lines]]]] begin[:] if name[eof_found] begin[:] <ast.Raise object at 0x7da18f810fd0> variable[line] assign[=] call[name[line].strip, parameter[]] if compare[call[name[len], parameter[name[line]]] less[<] constant[1]] begin[:] <ast.Raise object at 0x7da18f811cf0> if compare[call[name[line]][constant[0]] equal[==] constant[q]] begin[:] variable[eof_found] assign[=] constant[True] if <ast.UnaryOp object at 0x7da18f811a80> begin[:] <ast.Raise object at 0x7da18f811ba0>
keyword[def] identifier[add_ti_txt] ( identifier[self] , identifier[lines] , identifier[overwrite] = keyword[False] ): literal[string] identifier[address] = keyword[None] identifier[eof_found] = keyword[False] keyword[for] identifier[line] keyword[in] identifier[StringIO] ( identifier[lines] ): keyword[if] identifier[eof_found] : keyword[raise] identifier[Error] ( literal[string] ) identifier[line] = identifier[line] . identifier[strip] () keyword[if] identifier[len] ( identifier[line] )< literal[int] : keyword[raise] identifier[Error] ( literal[string] ) keyword[if] identifier[line] [ literal[int] ]== literal[string] : identifier[eof_found] = keyword[True] keyword[elif] identifier[line] [ literal[int] ]== literal[string] : keyword[try] : identifier[address] = identifier[int] ( identifier[line] [ literal[int] :], literal[int] ) keyword[except] identifier[ValueError] : keyword[raise] identifier[Error] ( literal[string] ) keyword[else] : keyword[try] : identifier[data] = identifier[bytearray] ( identifier[binascii] . identifier[unhexlify] ( identifier[line] . identifier[replace] ( literal[string] , literal[string] ))) keyword[except] ( identifier[TypeError] , identifier[binascii] . identifier[Error] ): keyword[raise] identifier[Error] ( literal[string] ) identifier[size] = identifier[len] ( identifier[data] ) keyword[if] identifier[size] > identifier[TI_TXT_BYTES_PER_LINE] : keyword[raise] identifier[Error] ( literal[string] ) keyword[if] identifier[address] keyword[is] keyword[None] : keyword[raise] identifier[Error] ( literal[string] ) identifier[self] . identifier[_segments] . identifier[add] ( identifier[_Segment] ( identifier[address] , identifier[address] + identifier[size] , identifier[data] , identifier[self] . identifier[word_size_bytes] ), identifier[overwrite] ) keyword[if] identifier[size] == identifier[TI_TXT_BYTES_PER_LINE] : identifier[address] += identifier[size] keyword[else] : identifier[address] = keyword[None] keyword[if] keyword[not] identifier[eof_found] : keyword[raise] identifier[Error] ( literal[string] )
def add_ti_txt(self, lines, overwrite=False): """Add given TI-TXT string `lines`. Set `overwrite` to ``True`` to allow already added data to be overwritten. """ address = None eof_found = False for line in StringIO(lines): # Abort if data is found after end of file. if eof_found: raise Error('bad file terminator') # depends on [control=['if'], data=[]] line = line.strip() if len(line) < 1: raise Error('bad line length') # depends on [control=['if'], data=[]] if line[0] == 'q': eof_found = True # depends on [control=['if'], data=[]] elif line[0] == '@': try: address = int(line[1:], 16) # depends on [control=['try'], data=[]] except ValueError: raise Error('bad section address') # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] else: # Try to decode the data. try: data = bytearray(binascii.unhexlify(line.replace(' ', ''))) # depends on [control=['try'], data=[]] except (TypeError, binascii.Error): raise Error('bad data') # depends on [control=['except'], data=[]] size = len(data) # Check that there are correct number of bytes per # line. There should TI_TXT_BYTES_PER_LINE. Only # exception is last line of section which may be # shorter. if size > TI_TXT_BYTES_PER_LINE: raise Error('bad line length') # depends on [control=['if'], data=[]] if address is None: raise Error('missing section address') # depends on [control=['if'], data=[]] self._segments.add(_Segment(address, address + size, data, self.word_size_bytes), overwrite) if size == TI_TXT_BYTES_PER_LINE: address += size # depends on [control=['if'], data=['size']] else: address = None # depends on [control=['for'], data=['line']] if not eof_found: raise Error('missing file terminator') # depends on [control=['if'], data=[]]
def FindMessageTypeByName(self, full_name): """Loads the named descriptor from the pool. Args: full_name: The full name of the descriptor to load. Returns: The descriptor for the named type. """ full_name = _NormalizeFullyQualifiedName(full_name) if full_name not in self._descriptors: self.FindFileContainingSymbol(full_name) return self._descriptors[full_name]
def function[FindMessageTypeByName, parameter[self, full_name]]: constant[Loads the named descriptor from the pool. Args: full_name: The full name of the descriptor to load. Returns: The descriptor for the named type. ] variable[full_name] assign[=] call[name[_NormalizeFullyQualifiedName], parameter[name[full_name]]] if compare[name[full_name] <ast.NotIn object at 0x7da2590d7190> name[self]._descriptors] begin[:] call[name[self].FindFileContainingSymbol, parameter[name[full_name]]] return[call[name[self]._descriptors][name[full_name]]]
keyword[def] identifier[FindMessageTypeByName] ( identifier[self] , identifier[full_name] ): literal[string] identifier[full_name] = identifier[_NormalizeFullyQualifiedName] ( identifier[full_name] ) keyword[if] identifier[full_name] keyword[not] keyword[in] identifier[self] . identifier[_descriptors] : identifier[self] . identifier[FindFileContainingSymbol] ( identifier[full_name] ) keyword[return] identifier[self] . identifier[_descriptors] [ identifier[full_name] ]
def FindMessageTypeByName(self, full_name): """Loads the named descriptor from the pool. Args: full_name: The full name of the descriptor to load. Returns: The descriptor for the named type. """ full_name = _NormalizeFullyQualifiedName(full_name) if full_name not in self._descriptors: self.FindFileContainingSymbol(full_name) # depends on [control=['if'], data=['full_name']] return self._descriptors[full_name]
def process(c, request, name=None): """ process uses the current request to determine which menus should be visible, which are selected, etc. """ # make sure we're loaded & sorted c.load_menus() c.sort_menus() if name is None: # special case, process all menus items = {} for name in c.items: items[name] = c.process(request, name) return items if name not in c.items: return [] items = copy.deepcopy(c.items[name]) curitem = None for item in items: item.process(request) if item.visible: item.selected = False if item.match_url(request): if curitem is None or len(curitem.url) < len(item.url): curitem = item if curitem is not None: curitem.selected = True # return only visible items visible = [ item for item in items if item.visible ] # determine if we should apply 'selected' to parents when one of their # children is the 'selected' menu if getattr(settings, 'MENU_SELECT_PARENTS', False): def is_child_selected(item): for child in item.children: if child.selected or is_child_selected(child): return True for item in visible: if is_child_selected(item): item.selected = True return visible
def function[process, parameter[c, request, name]]: constant[ process uses the current request to determine which menus should be visible, which are selected, etc. ] call[name[c].load_menus, parameter[]] call[name[c].sort_menus, parameter[]] if compare[name[name] is constant[None]] begin[:] variable[items] assign[=] dictionary[[], []] for taget[name[name]] in starred[name[c].items] begin[:] call[name[items]][name[name]] assign[=] call[name[c].process, parameter[name[request], name[name]]] return[name[items]] if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[c].items] begin[:] return[list[[]]] variable[items] assign[=] call[name[copy].deepcopy, parameter[call[name[c].items][name[name]]]] variable[curitem] assign[=] constant[None] for taget[name[item]] in starred[name[items]] begin[:] call[name[item].process, parameter[name[request]]] if name[item].visible begin[:] name[item].selected assign[=] constant[False] if call[name[item].match_url, parameter[name[request]]] begin[:] if <ast.BoolOp object at 0x7da18bcca530> begin[:] variable[curitem] assign[=] name[item] if compare[name[curitem] is_not constant[None]] begin[:] name[curitem].selected assign[=] constant[True] variable[visible] assign[=] <ast.ListComp object at 0x7da18bcc8c70> if call[name[getattr], parameter[name[settings], constant[MENU_SELECT_PARENTS], constant[False]]] begin[:] def function[is_child_selected, parameter[item]]: for taget[name[child]] in starred[name[item].children] begin[:] if <ast.BoolOp object at 0x7da18bccada0> begin[:] return[constant[True]] for taget[name[item]] in starred[name[visible]] begin[:] if call[name[is_child_selected], parameter[name[item]]] begin[:] name[item].selected assign[=] constant[True] return[name[visible]]
keyword[def] identifier[process] ( identifier[c] , identifier[request] , identifier[name] = keyword[None] ): literal[string] identifier[c] . identifier[load_menus] () identifier[c] . identifier[sort_menus] () keyword[if] identifier[name] keyword[is] keyword[None] : identifier[items] ={} keyword[for] identifier[name] keyword[in] identifier[c] . identifier[items] : identifier[items] [ identifier[name] ]= identifier[c] . identifier[process] ( identifier[request] , identifier[name] ) keyword[return] identifier[items] keyword[if] identifier[name] keyword[not] keyword[in] identifier[c] . identifier[items] : keyword[return] [] identifier[items] = identifier[copy] . identifier[deepcopy] ( identifier[c] . identifier[items] [ identifier[name] ]) identifier[curitem] = keyword[None] keyword[for] identifier[item] keyword[in] identifier[items] : identifier[item] . identifier[process] ( identifier[request] ) keyword[if] identifier[item] . identifier[visible] : identifier[item] . identifier[selected] = keyword[False] keyword[if] identifier[item] . identifier[match_url] ( identifier[request] ): keyword[if] identifier[curitem] keyword[is] keyword[None] keyword[or] identifier[len] ( identifier[curitem] . identifier[url] )< identifier[len] ( identifier[item] . identifier[url] ): identifier[curitem] = identifier[item] keyword[if] identifier[curitem] keyword[is] keyword[not] keyword[None] : identifier[curitem] . identifier[selected] = keyword[True] identifier[visible] =[ identifier[item] keyword[for] identifier[item] keyword[in] identifier[items] keyword[if] identifier[item] . identifier[visible] ] keyword[if] identifier[getattr] ( identifier[settings] , literal[string] , keyword[False] ): keyword[def] identifier[is_child_selected] ( identifier[item] ): keyword[for] identifier[child] keyword[in] identifier[item] . identifier[children] : keyword[if] identifier[child] . identifier[selected] keyword[or] identifier[is_child_selected] ( identifier[child] ): keyword[return] keyword[True] keyword[for] identifier[item] keyword[in] identifier[visible] : keyword[if] identifier[is_child_selected] ( identifier[item] ): identifier[item] . identifier[selected] = keyword[True] keyword[return] identifier[visible]
def process(c, request, name=None): """ process uses the current request to determine which menus should be visible, which are selected, etc. """ # make sure we're loaded & sorted c.load_menus() c.sort_menus() if name is None: # special case, process all menus items = {} for name in c.items: items[name] = c.process(request, name) # depends on [control=['for'], data=['name']] return items # depends on [control=['if'], data=['name']] if name not in c.items: return [] # depends on [control=['if'], data=[]] items = copy.deepcopy(c.items[name]) curitem = None for item in items: item.process(request) if item.visible: item.selected = False if item.match_url(request): if curitem is None or len(curitem.url) < len(item.url): curitem = item # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']] if curitem is not None: curitem.selected = True # depends on [control=['if'], data=['curitem']] # return only visible items visible = [item for item in items if item.visible] # determine if we should apply 'selected' to parents when one of their # children is the 'selected' menu if getattr(settings, 'MENU_SELECT_PARENTS', False): def is_child_selected(item): for child in item.children: if child.selected or is_child_selected(child): return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['child']] for item in visible: if is_child_selected(item): item.selected = True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']] # depends on [control=['if'], data=[]] return visible
def get_list_privileges(self, username): """Get the list of all privileges granted to given user. :param username: the username to get privileges of :type username: str :returns: all privileges granted to given user :rtype: list of dictionaries :Example: :: >> privileges = client.get_list_privileges('user1') >> privileges [{u'privilege': u'WRITE', u'database': u'db1'}, {u'privilege': u'ALL PRIVILEGES', u'database': u'db2'}, {u'privilege': u'NO PRIVILEGES', u'database': u'db3'}] """ text = "SHOW GRANTS FOR {0}".format(quote_ident(username)) return list(self.query(text).get_points())
def function[get_list_privileges, parameter[self, username]]: constant[Get the list of all privileges granted to given user. :param username: the username to get privileges of :type username: str :returns: all privileges granted to given user :rtype: list of dictionaries :Example: :: >> privileges = client.get_list_privileges('user1') >> privileges [{u'privilege': u'WRITE', u'database': u'db1'}, {u'privilege': u'ALL PRIVILEGES', u'database': u'db2'}, {u'privilege': u'NO PRIVILEGES', u'database': u'db3'}] ] variable[text] assign[=] call[constant[SHOW GRANTS FOR {0}].format, parameter[call[name[quote_ident], parameter[name[username]]]]] return[call[name[list], parameter[call[call[name[self].query, parameter[name[text]]].get_points, parameter[]]]]]
keyword[def] identifier[get_list_privileges] ( identifier[self] , identifier[username] ): literal[string] identifier[text] = literal[string] . identifier[format] ( identifier[quote_ident] ( identifier[username] )) keyword[return] identifier[list] ( identifier[self] . identifier[query] ( identifier[text] ). identifier[get_points] ())
def get_list_privileges(self, username): """Get the list of all privileges granted to given user. :param username: the username to get privileges of :type username: str :returns: all privileges granted to given user :rtype: list of dictionaries :Example: :: >> privileges = client.get_list_privileges('user1') >> privileges [{u'privilege': u'WRITE', u'database': u'db1'}, {u'privilege': u'ALL PRIVILEGES', u'database': u'db2'}, {u'privilege': u'NO PRIVILEGES', u'database': u'db3'}] """ text = 'SHOW GRANTS FOR {0}'.format(quote_ident(username)) return list(self.query(text).get_points())
def endGroup(self): """ Ends the current group of xml data. """ if self._customFormat: self._customFormat.endGroup() else: super(XSettings, self).endGroup()
def function[endGroup, parameter[self]]: constant[ Ends the current group of xml data. ] if name[self]._customFormat begin[:] call[name[self]._customFormat.endGroup, parameter[]]
keyword[def] identifier[endGroup] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_customFormat] : identifier[self] . identifier[_customFormat] . identifier[endGroup] () keyword[else] : identifier[super] ( identifier[XSettings] , identifier[self] ). identifier[endGroup] ()
def endGroup(self): """ Ends the current group of xml data. """ if self._customFormat: self._customFormat.endGroup() # depends on [control=['if'], data=[]] else: super(XSettings, self).endGroup()
def _encode_long(self, val): ''' encodes an integer of 8*self.chunklen[0] bits using the specified alphabet ''' return ''.join([ self.alphabet[(val//len(self.alphabet)**i) % len(self.alphabet)] for i in reversed(range(self.chunklen[1])) ])
def function[_encode_long, parameter[self, val]]: constant[ encodes an integer of 8*self.chunklen[0] bits using the specified alphabet ] return[call[constant[].join, parameter[<ast.ListComp object at 0x7da1b021da20>]]]
keyword[def] identifier[_encode_long] ( identifier[self] , identifier[val] ): literal[string] keyword[return] literal[string] . identifier[join] ([ identifier[self] . identifier[alphabet] [( identifier[val] // identifier[len] ( identifier[self] . identifier[alphabet] )** identifier[i] )% identifier[len] ( identifier[self] . identifier[alphabet] )] keyword[for] identifier[i] keyword[in] identifier[reversed] ( identifier[range] ( identifier[self] . identifier[chunklen] [ literal[int] ])) ])
def _encode_long(self, val): """ encodes an integer of 8*self.chunklen[0] bits using the specified alphabet """ return ''.join([self.alphabet[val // len(self.alphabet) ** i % len(self.alphabet)] for i in reversed(range(self.chunklen[1]))])
def remove_files_from_dict(d): """Return the provided dict with any file objects removed. >>> remove_files_from_dict({ ... 'oauth_token': 'foo', ... 'track': { ... 'title': 'bar', ... 'asset_data': open('setup.py', 'rb') ... } ... }) == {'track': {'title': 'bar'}, 'oauth_token': 'foo'} ... # doctest:+ELLIPSIS True """ file_free = {} for key, value in six.iteritems(d): if isinstance(value, dict): file_free[key] = remove_files_from_dict(value) elif not is_file_like(value): if hasattr(value, '__iter__'): file_free[key] = value else: if hasattr(value, 'encode'): file_free[key] = value.encode('utf-8') else: file_free[key] = str(value) return file_free
def function[remove_files_from_dict, parameter[d]]: constant[Return the provided dict with any file objects removed. >>> remove_files_from_dict({ ... 'oauth_token': 'foo', ... 'track': { ... 'title': 'bar', ... 'asset_data': open('setup.py', 'rb') ... } ... }) == {'track': {'title': 'bar'}, 'oauth_token': 'foo'} ... # doctest:+ELLIPSIS True ] variable[file_free] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da1b26ac1c0>, <ast.Name object at 0x7da1b26aff40>]]] in starred[call[name[six].iteritems, parameter[name[d]]]] begin[:] if call[name[isinstance], parameter[name[value], name[dict]]] begin[:] call[name[file_free]][name[key]] assign[=] call[name[remove_files_from_dict], parameter[name[value]]] return[name[file_free]]
keyword[def] identifier[remove_files_from_dict] ( identifier[d] ): literal[string] identifier[file_free] ={} keyword[for] identifier[key] , identifier[value] keyword[in] identifier[six] . identifier[iteritems] ( identifier[d] ): keyword[if] identifier[isinstance] ( identifier[value] , identifier[dict] ): identifier[file_free] [ identifier[key] ]= identifier[remove_files_from_dict] ( identifier[value] ) keyword[elif] keyword[not] identifier[is_file_like] ( identifier[value] ): keyword[if] identifier[hasattr] ( identifier[value] , literal[string] ): identifier[file_free] [ identifier[key] ]= identifier[value] keyword[else] : keyword[if] identifier[hasattr] ( identifier[value] , literal[string] ): identifier[file_free] [ identifier[key] ]= identifier[value] . identifier[encode] ( literal[string] ) keyword[else] : identifier[file_free] [ identifier[key] ]= identifier[str] ( identifier[value] ) keyword[return] identifier[file_free]
def remove_files_from_dict(d): """Return the provided dict with any file objects removed. >>> remove_files_from_dict({ ... 'oauth_token': 'foo', ... 'track': { ... 'title': 'bar', ... 'asset_data': open('setup.py', 'rb') ... } ... }) == {'track': {'title': 'bar'}, 'oauth_token': 'foo'} ... # doctest:+ELLIPSIS True """ file_free = {} for (key, value) in six.iteritems(d): if isinstance(value, dict): file_free[key] = remove_files_from_dict(value) # depends on [control=['if'], data=[]] elif not is_file_like(value): if hasattr(value, '__iter__'): file_free[key] = value # depends on [control=['if'], data=[]] elif hasattr(value, 'encode'): file_free[key] = value.encode('utf-8') # depends on [control=['if'], data=[]] else: file_free[key] = str(value) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return file_free
def get_member(row, key): ''' properly detects if a an attribute exists ''' (target, tkey, tvalue) = dict_crawl(row, key) if target: return tvalue return None
def function[get_member, parameter[row, key]]: constant[ properly detects if a an attribute exists ] <ast.Tuple object at 0x7da20cabd180> assign[=] call[name[dict_crawl], parameter[name[row], name[key]]] if name[target] begin[:] return[name[tvalue]] return[constant[None]]
keyword[def] identifier[get_member] ( identifier[row] , identifier[key] ): literal[string] ( identifier[target] , identifier[tkey] , identifier[tvalue] )= identifier[dict_crawl] ( identifier[row] , identifier[key] ) keyword[if] identifier[target] : keyword[return] identifier[tvalue] keyword[return] keyword[None]
def get_member(row, key): """ properly detects if a an attribute exists """ (target, tkey, tvalue) = dict_crawl(row, key) if target: return tvalue # depends on [control=['if'], data=[]] return None
def track(self, event_key, user_id, attributes=None, event_tags=None): """ Send conversion event to Optimizely. Args: event_key: Event key representing the event which needs to be recorded. user_id: ID for user. attributes: Dict representing visitor attributes and values which need to be recorded. event_tags: Dict representing metadata associated with the event. """ if not self.is_valid: self.logger.error(enums.Errors.INVALID_DATAFILE.format('track')) return if not validator.is_non_empty_string(event_key): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('event_key')) return if not isinstance(user_id, string_types): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('user_id')) return if not self._validate_user_inputs(attributes, event_tags): return event = self.config.get_event(event_key) if not event: self.logger.info('Not tracking user "%s" for event "%s".' % (user_id, event_key)) return conversion_event = self.event_builder.create_conversion_event(event_key, user_id, attributes, event_tags) self.logger.info('Tracking event "%s" for user "%s".' % (event_key, user_id)) self.logger.debug('Dispatching conversion event to URL %s with params %s.' % ( conversion_event.url, conversion_event.params )) try: self.event_dispatcher.dispatch_event(conversion_event) except: self.logger.exception('Unable to dispatch conversion event!') self.notification_center.send_notifications(enums.NotificationTypes.TRACK, event_key, user_id, attributes, event_tags, conversion_event)
def function[track, parameter[self, event_key, user_id, attributes, event_tags]]: constant[ Send conversion event to Optimizely. Args: event_key: Event key representing the event which needs to be recorded. user_id: ID for user. attributes: Dict representing visitor attributes and values which need to be recorded. event_tags: Dict representing metadata associated with the event. ] if <ast.UnaryOp object at 0x7da18f00c370> begin[:] call[name[self].logger.error, parameter[call[name[enums].Errors.INVALID_DATAFILE.format, parameter[constant[track]]]]] return[None] if <ast.UnaryOp object at 0x7da18f00c1c0> begin[:] call[name[self].logger.error, parameter[call[name[enums].Errors.INVALID_INPUT_ERROR.format, parameter[constant[event_key]]]]] return[None] if <ast.UnaryOp object at 0x7da18f00e590> begin[:] call[name[self].logger.error, parameter[call[name[enums].Errors.INVALID_INPUT_ERROR.format, parameter[constant[user_id]]]]] return[None] if <ast.UnaryOp object at 0x7da18f00f250> begin[:] return[None] variable[event] assign[=] call[name[self].config.get_event, parameter[name[event_key]]] if <ast.UnaryOp object at 0x7da18f00fc10> begin[:] call[name[self].logger.info, parameter[binary_operation[constant[Not tracking user "%s" for event "%s".] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f00eaa0>, <ast.Name object at 0x7da18f00d1e0>]]]]] return[None] variable[conversion_event] assign[=] call[name[self].event_builder.create_conversion_event, parameter[name[event_key], name[user_id], name[attributes], name[event_tags]]] call[name[self].logger.info, parameter[binary_operation[constant[Tracking event "%s" for user "%s".] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f00ef80>, <ast.Name object at 0x7da18f00e170>]]]]] call[name[self].logger.debug, parameter[binary_operation[constant[Dispatching conversion event to URL %s with params %s.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da18f00e7d0>, <ast.Attribute object at 0x7da18f00d3f0>]]]]] <ast.Try object at 0x7da18f00fbe0> call[name[self].notification_center.send_notifications, parameter[name[enums].NotificationTypes.TRACK, name[event_key], name[user_id], name[attributes], name[event_tags], name[conversion_event]]]
keyword[def] identifier[track] ( identifier[self] , identifier[event_key] , identifier[user_id] , identifier[attributes] = keyword[None] , identifier[event_tags] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[is_valid] : identifier[self] . identifier[logger] . identifier[error] ( identifier[enums] . identifier[Errors] . identifier[INVALID_DATAFILE] . identifier[format] ( literal[string] )) keyword[return] keyword[if] keyword[not] identifier[validator] . identifier[is_non_empty_string] ( identifier[event_key] ): identifier[self] . identifier[logger] . identifier[error] ( identifier[enums] . identifier[Errors] . identifier[INVALID_INPUT_ERROR] . identifier[format] ( literal[string] )) keyword[return] keyword[if] keyword[not] identifier[isinstance] ( identifier[user_id] , identifier[string_types] ): identifier[self] . identifier[logger] . identifier[error] ( identifier[enums] . identifier[Errors] . identifier[INVALID_INPUT_ERROR] . identifier[format] ( literal[string] )) keyword[return] keyword[if] keyword[not] identifier[self] . identifier[_validate_user_inputs] ( identifier[attributes] , identifier[event_tags] ): keyword[return] identifier[event] = identifier[self] . identifier[config] . identifier[get_event] ( identifier[event_key] ) keyword[if] keyword[not] identifier[event] : identifier[self] . identifier[logger] . identifier[info] ( literal[string] %( identifier[user_id] , identifier[event_key] )) keyword[return] identifier[conversion_event] = identifier[self] . identifier[event_builder] . identifier[create_conversion_event] ( identifier[event_key] , identifier[user_id] , identifier[attributes] , identifier[event_tags] ) identifier[self] . identifier[logger] . identifier[info] ( literal[string] %( identifier[event_key] , identifier[user_id] )) identifier[self] . identifier[logger] . identifier[debug] ( literal[string] %( identifier[conversion_event] . identifier[url] , identifier[conversion_event] . identifier[params] )) keyword[try] : identifier[self] . identifier[event_dispatcher] . identifier[dispatch_event] ( identifier[conversion_event] ) keyword[except] : identifier[self] . identifier[logger] . identifier[exception] ( literal[string] ) identifier[self] . identifier[notification_center] . identifier[send_notifications] ( identifier[enums] . identifier[NotificationTypes] . identifier[TRACK] , identifier[event_key] , identifier[user_id] , identifier[attributes] , identifier[event_tags] , identifier[conversion_event] )
def track(self, event_key, user_id, attributes=None, event_tags=None): """ Send conversion event to Optimizely. Args: event_key: Event key representing the event which needs to be recorded. user_id: ID for user. attributes: Dict representing visitor attributes and values which need to be recorded. event_tags: Dict representing metadata associated with the event. """ if not self.is_valid: self.logger.error(enums.Errors.INVALID_DATAFILE.format('track')) return # depends on [control=['if'], data=[]] if not validator.is_non_empty_string(event_key): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('event_key')) return # depends on [control=['if'], data=[]] if not isinstance(user_id, string_types): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('user_id')) return # depends on [control=['if'], data=[]] if not self._validate_user_inputs(attributes, event_tags): return # depends on [control=['if'], data=[]] event = self.config.get_event(event_key) if not event: self.logger.info('Not tracking user "%s" for event "%s".' % (user_id, event_key)) return # depends on [control=['if'], data=[]] conversion_event = self.event_builder.create_conversion_event(event_key, user_id, attributes, event_tags) self.logger.info('Tracking event "%s" for user "%s".' % (event_key, user_id)) self.logger.debug('Dispatching conversion event to URL %s with params %s.' % (conversion_event.url, conversion_event.params)) try: self.event_dispatcher.dispatch_event(conversion_event) # depends on [control=['try'], data=[]] except: self.logger.exception('Unable to dispatch conversion event!') # depends on [control=['except'], data=[]] self.notification_center.send_notifications(enums.NotificationTypes.TRACK, event_key, user_id, attributes, event_tags, conversion_event)
def get_queryset(self, request): """ Returns a QuerySet of all model instances that can be edited by the admin site. This is used by changelist_view. """ qs = self.model._default_manager.get_queryset() qs.__class__ = TreeEditorQuerySet return qs
def function[get_queryset, parameter[self, request]]: constant[ Returns a QuerySet of all model instances that can be edited by the admin site. This is used by changelist_view. ] variable[qs] assign[=] call[name[self].model._default_manager.get_queryset, parameter[]] name[qs].__class__ assign[=] name[TreeEditorQuerySet] return[name[qs]]
keyword[def] identifier[get_queryset] ( identifier[self] , identifier[request] ): literal[string] identifier[qs] = identifier[self] . identifier[model] . identifier[_default_manager] . identifier[get_queryset] () identifier[qs] . identifier[__class__] = identifier[TreeEditorQuerySet] keyword[return] identifier[qs]
def get_queryset(self, request): """ Returns a QuerySet of all model instances that can be edited by the admin site. This is used by changelist_view. """ qs = self.model._default_manager.get_queryset() qs.__class__ = TreeEditorQuerySet return qs
def _compare(self, dir1, dir2): """ Compare contents of two directories """ left = set() right = set() self._numdirs += 1 excl_patterns = set(self._exclude).union(self._ignore) for cwd, dirs, files in os.walk(dir1): self._numdirs += len(dirs) for f in dirs + files: path = os.path.relpath(os.path.join(cwd, f), dir1) re_path = path.replace('\\', '/') if self._only: for pattern in self._only: if re.match(pattern, re_path): # go to exclude and ignore filtering break else: # next item, this one does not match any pattern # in the _only list continue add_path = False for pattern in self._include: if re.match(pattern, re_path): add_path = True break else: # path was not in includes # test if it is in excludes for pattern in excl_patterns: if re.match(pattern, re_path): # path is in excludes, do not add it break else: # path was not in excludes # it should be added add_path = True if add_path: left.add(path) anc_dirs = re_path[:-1].split('/') for i in range(1, len(anc_dirs)): left.add('/'.join(anc_dirs[:i])) for cwd, dirs, files in os.walk(dir2): for f in dirs + files: path = os.path.relpath(os.path.join(cwd, f), dir2) re_path = path.replace('\\', '/') for pattern in self._ignore: if re.match(pattern, re_path): if f in dirs: dirs.remove(f) break else: right.add(path) # no need to add the parent dirs here, # as there is no _only pattern detection if f in dirs and path not in left: self._numdirs += 1 common = left.intersection(right) left.difference_update(common) right.difference_update(common) return DCMP(left, right, common)
def function[_compare, parameter[self, dir1, dir2]]: constant[ Compare contents of two directories ] variable[left] assign[=] call[name[set], parameter[]] variable[right] assign[=] call[name[set], parameter[]] <ast.AugAssign object at 0x7da1b2530df0> variable[excl_patterns] assign[=] call[call[name[set], parameter[name[self]._exclude]].union, parameter[name[self]._ignore]] for taget[tuple[[<ast.Name object at 0x7da1b2547a30>, <ast.Name object at 0x7da1b2545d80>, <ast.Name object at 0x7da1b2546fb0>]]] in starred[call[name[os].walk, parameter[name[dir1]]]] begin[:] <ast.AugAssign object at 0x7da1b2544100> for taget[name[f]] in starred[binary_operation[name[dirs] + name[files]]] begin[:] variable[path] assign[=] call[name[os].path.relpath, parameter[call[name[os].path.join, parameter[name[cwd], name[f]]], name[dir1]]] variable[re_path] assign[=] call[name[path].replace, parameter[constant[\], constant[/]]] if name[self]._only begin[:] for taget[name[pattern]] in starred[name[self]._only] begin[:] if call[name[re].match, parameter[name[pattern], name[re_path]]] begin[:] break variable[add_path] assign[=] constant[False] for taget[name[pattern]] in starred[name[self]._include] begin[:] if call[name[re].match, parameter[name[pattern], name[re_path]]] begin[:] variable[add_path] assign[=] constant[True] break if name[add_path] begin[:] call[name[left].add, parameter[name[path]]] variable[anc_dirs] assign[=] call[call[name[re_path]][<ast.Slice object at 0x7da1b2544340>].split, parameter[constant[/]]] for taget[name[i]] in starred[call[name[range], parameter[constant[1], call[name[len], parameter[name[anc_dirs]]]]]] begin[:] call[name[left].add, parameter[call[constant[/].join, parameter[call[name[anc_dirs]][<ast.Slice object at 0x7da1b2544160>]]]]] for taget[tuple[[<ast.Name object at 0x7da1b25454e0>, <ast.Name object at 0x7da1b2547280>, <ast.Name object at 0x7da1b2546f50>]]] in starred[call[name[os].walk, parameter[name[dir2]]]] begin[:] for taget[name[f]] in starred[binary_operation[name[dirs] + name[files]]] begin[:] variable[path] assign[=] call[name[os].path.relpath, parameter[call[name[os].path.join, parameter[name[cwd], name[f]]], name[dir2]]] variable[re_path] assign[=] call[name[path].replace, parameter[constant[\], constant[/]]] for taget[name[pattern]] in starred[name[self]._ignore] begin[:] if call[name[re].match, parameter[name[pattern], name[re_path]]] begin[:] if compare[name[f] in name[dirs]] begin[:] call[name[dirs].remove, parameter[name[f]]] break variable[common] assign[=] call[name[left].intersection, parameter[name[right]]] call[name[left].difference_update, parameter[name[common]]] call[name[right].difference_update, parameter[name[common]]] return[call[name[DCMP], parameter[name[left], name[right], name[common]]]]
keyword[def] identifier[_compare] ( identifier[self] , identifier[dir1] , identifier[dir2] ): literal[string] identifier[left] = identifier[set] () identifier[right] = identifier[set] () identifier[self] . identifier[_numdirs] += literal[int] identifier[excl_patterns] = identifier[set] ( identifier[self] . identifier[_exclude] ). identifier[union] ( identifier[self] . identifier[_ignore] ) keyword[for] identifier[cwd] , identifier[dirs] , identifier[files] keyword[in] identifier[os] . identifier[walk] ( identifier[dir1] ): identifier[self] . identifier[_numdirs] += identifier[len] ( identifier[dirs] ) keyword[for] identifier[f] keyword[in] identifier[dirs] + identifier[files] : identifier[path] = identifier[os] . identifier[path] . identifier[relpath] ( identifier[os] . identifier[path] . identifier[join] ( identifier[cwd] , identifier[f] ), identifier[dir1] ) identifier[re_path] = identifier[path] . identifier[replace] ( literal[string] , literal[string] ) keyword[if] identifier[self] . identifier[_only] : keyword[for] identifier[pattern] keyword[in] identifier[self] . identifier[_only] : keyword[if] identifier[re] . identifier[match] ( identifier[pattern] , identifier[re_path] ): keyword[break] keyword[else] : keyword[continue] identifier[add_path] = keyword[False] keyword[for] identifier[pattern] keyword[in] identifier[self] . identifier[_include] : keyword[if] identifier[re] . identifier[match] ( identifier[pattern] , identifier[re_path] ): identifier[add_path] = keyword[True] keyword[break] keyword[else] : keyword[for] identifier[pattern] keyword[in] identifier[excl_patterns] : keyword[if] identifier[re] . identifier[match] ( identifier[pattern] , identifier[re_path] ): keyword[break] keyword[else] : identifier[add_path] = keyword[True] keyword[if] identifier[add_path] : identifier[left] . identifier[add] ( identifier[path] ) identifier[anc_dirs] = identifier[re_path] [:- literal[int] ]. identifier[split] ( literal[string] ) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[anc_dirs] )): identifier[left] . identifier[add] ( literal[string] . identifier[join] ( identifier[anc_dirs] [: identifier[i] ])) keyword[for] identifier[cwd] , identifier[dirs] , identifier[files] keyword[in] identifier[os] . identifier[walk] ( identifier[dir2] ): keyword[for] identifier[f] keyword[in] identifier[dirs] + identifier[files] : identifier[path] = identifier[os] . identifier[path] . identifier[relpath] ( identifier[os] . identifier[path] . identifier[join] ( identifier[cwd] , identifier[f] ), identifier[dir2] ) identifier[re_path] = identifier[path] . identifier[replace] ( literal[string] , literal[string] ) keyword[for] identifier[pattern] keyword[in] identifier[self] . identifier[_ignore] : keyword[if] identifier[re] . identifier[match] ( identifier[pattern] , identifier[re_path] ): keyword[if] identifier[f] keyword[in] identifier[dirs] : identifier[dirs] . identifier[remove] ( identifier[f] ) keyword[break] keyword[else] : identifier[right] . identifier[add] ( identifier[path] ) keyword[if] identifier[f] keyword[in] identifier[dirs] keyword[and] identifier[path] keyword[not] keyword[in] identifier[left] : identifier[self] . identifier[_numdirs] += literal[int] identifier[common] = identifier[left] . identifier[intersection] ( identifier[right] ) identifier[left] . identifier[difference_update] ( identifier[common] ) identifier[right] . identifier[difference_update] ( identifier[common] ) keyword[return] identifier[DCMP] ( identifier[left] , identifier[right] , identifier[common] )
def _compare(self, dir1, dir2): """ Compare contents of two directories """ left = set() right = set() self._numdirs += 1 excl_patterns = set(self._exclude).union(self._ignore) for (cwd, dirs, files) in os.walk(dir1): self._numdirs += len(dirs) for f in dirs + files: path = os.path.relpath(os.path.join(cwd, f), dir1) re_path = path.replace('\\', '/') if self._only: for pattern in self._only: if re.match(pattern, re_path): # go to exclude and ignore filtering break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['pattern']] else: # next item, this one does not match any pattern # in the _only list continue # depends on [control=['if'], data=[]] add_path = False for pattern in self._include: if re.match(pattern, re_path): add_path = True break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['pattern']] else: # path was not in includes # test if it is in excludes for pattern in excl_patterns: if re.match(pattern, re_path): # path is in excludes, do not add it break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['pattern']] else: # path was not in excludes # it should be added add_path = True if add_path: left.add(path) anc_dirs = re_path[:-1].split('/') for i in range(1, len(anc_dirs)): left.add('/'.join(anc_dirs[:i])) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['f']] # depends on [control=['for'], data=[]] for (cwd, dirs, files) in os.walk(dir2): for f in dirs + files: path = os.path.relpath(os.path.join(cwd, f), dir2) re_path = path.replace('\\', '/') for pattern in self._ignore: if re.match(pattern, re_path): if f in dirs: dirs.remove(f) # depends on [control=['if'], data=['f', 'dirs']] break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['pattern']] else: right.add(path) # no need to add the parent dirs here, # as there is no _only pattern detection if f in dirs and path not in left: self._numdirs += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['f']] # depends on [control=['for'], data=[]] common = left.intersection(right) left.difference_update(common) right.difference_update(common) return DCMP(left, right, common)
def _ttr(self, k, dist, cache): """Three terms recursion coefficients.""" a,b = evaluation.evaluate_recurrence_coefficients(dist, k) return -a, b
def function[_ttr, parameter[self, k, dist, cache]]: constant[Three terms recursion coefficients.] <ast.Tuple object at 0x7da20c7cada0> assign[=] call[name[evaluation].evaluate_recurrence_coefficients, parameter[name[dist], name[k]]] return[tuple[[<ast.UnaryOp object at 0x7da20c7c8130>, <ast.Name object at 0x7da20c7c8f40>]]]
keyword[def] identifier[_ttr] ( identifier[self] , identifier[k] , identifier[dist] , identifier[cache] ): literal[string] identifier[a] , identifier[b] = identifier[evaluation] . identifier[evaluate_recurrence_coefficients] ( identifier[dist] , identifier[k] ) keyword[return] - identifier[a] , identifier[b]
def _ttr(self, k, dist, cache): """Three terms recursion coefficients.""" (a, b) = evaluation.evaluate_recurrence_coefficients(dist, k) return (-a, b)
def apps_notify_create(self, data, **kwargs): "https://developer.zendesk.com/rest_api/docs/core/apps#send-notification-to-app" api_path = "/api/v2/apps/notify.json" return self.call(api_path, method="POST", data=data, **kwargs)
def function[apps_notify_create, parameter[self, data]]: constant[https://developer.zendesk.com/rest_api/docs/core/apps#send-notification-to-app] variable[api_path] assign[=] constant[/api/v2/apps/notify.json] return[call[name[self].call, parameter[name[api_path]]]]
keyword[def] identifier[apps_notify_create] ( identifier[self] , identifier[data] ,** identifier[kwargs] ): literal[string] identifier[api_path] = literal[string] keyword[return] identifier[self] . identifier[call] ( identifier[api_path] , identifier[method] = literal[string] , identifier[data] = identifier[data] ,** identifier[kwargs] )
def apps_notify_create(self, data, **kwargs): """https://developer.zendesk.com/rest_api/docs/core/apps#send-notification-to-app""" api_path = '/api/v2/apps/notify.json' return self.call(api_path, method='POST', data=data, **kwargs)
def manage_view(request, semester, profile=None): """ View all members' preferences. This view also includes forms to create an entire semester's worth of weekly workshifts. """ page_name = "Manage Workshift" pools = WorkshiftPool.objects.filter(semester=semester) full_management = utils.can_manage(request.user, semester=semester) edit_semester_form = None close_semester_form = None open_semester_form = None if not full_management: pools = pools.filter(managers__incumbent__user=request.user) if not pools.count(): messages.add_message(request, messages.ERROR, MESSAGES["ADMINS_ONLY"]) return HttpResponseRedirect(semester.get_view_url()) else: edit_semester_form = FullSemesterForm( data=request.POST if "edit_semester" in request.POST else None, instance=semester, ) if semester.current: close_semester_form = CloseSemesterForm( data=request.POST if "close_semester" in request.POST else None, semester=semester, ) else: open_semester_form = OpenSemesterForm( data=request.POST if "open_semester" in request.POST else None, semester=semester ) if edit_semester_form and edit_semester_form.is_valid(): semester = edit_semester_form.save() messages.add_message( request, messages.INFO, "Semester successfully updated.", ) return HttpResponseRedirect(wurl( "workshift:manage", sem_url=semester.sem_url, )) if close_semester_form and close_semester_form.is_valid(): close_semester_form.save() messages.add_message(request, messages.INFO, "Semester closed.") return HttpResponseRedirect(wurl( "workshift:manage", sem_url=semester.sem_url, )) if open_semester_form and open_semester_form.is_valid(): open_semester_form.save() messages.add_message(request, messages.INFO, "Semester reopened.") return HttpResponseRedirect(wurl( "workshift:manage", sem_url=semester.sem_url, )) pools = pools.order_by("-is_primary", "title") workshifters = WorkshiftProfile.objects.filter(semester=semester) pool_hours = [ [ workshifter.pool_hours.get(pool=pool) for pool in pools ] for workshifter in workshifters ] return render_to_response("manage.html", { "page_name": page_name, "pools": pools, "full_management": full_management, "edit_semester_form": edit_semester_form, "close_semester_form": close_semester_form, "open_semester_form": open_semester_form, "workshifters": zip(workshifters, pool_hours), }, context_instance=RequestContext(request))
def function[manage_view, parameter[request, semester, profile]]: constant[ View all members' preferences. This view also includes forms to create an entire semester's worth of weekly workshifts. ] variable[page_name] assign[=] constant[Manage Workshift] variable[pools] assign[=] call[name[WorkshiftPool].objects.filter, parameter[]] variable[full_management] assign[=] call[name[utils].can_manage, parameter[name[request].user]] variable[edit_semester_form] assign[=] constant[None] variable[close_semester_form] assign[=] constant[None] variable[open_semester_form] assign[=] constant[None] if <ast.UnaryOp object at 0x7da1b14a3850> begin[:] variable[pools] assign[=] call[name[pools].filter, parameter[]] if <ast.UnaryOp object at 0x7da1b14a3640> begin[:] call[name[messages].add_message, parameter[name[request], name[messages].ERROR, call[name[MESSAGES]][constant[ADMINS_ONLY]]]] return[call[name[HttpResponseRedirect], parameter[call[name[semester].get_view_url, parameter[]]]]] if <ast.BoolOp object at 0x7da1b14a28f0> begin[:] variable[semester] assign[=] call[name[edit_semester_form].save, parameter[]] call[name[messages].add_message, parameter[name[request], name[messages].INFO, constant[Semester successfully updated.]]] return[call[name[HttpResponseRedirect], parameter[call[name[wurl], parameter[constant[workshift:manage]]]]]] if <ast.BoolOp object at 0x7da1b14a2350> begin[:] call[name[close_semester_form].save, parameter[]] call[name[messages].add_message, parameter[name[request], name[messages].INFO, constant[Semester closed.]]] return[call[name[HttpResponseRedirect], parameter[call[name[wurl], parameter[constant[workshift:manage]]]]]] if <ast.BoolOp object at 0x7da1b14a1db0> begin[:] call[name[open_semester_form].save, parameter[]] call[name[messages].add_message, parameter[name[request], name[messages].INFO, constant[Semester reopened.]]] return[call[name[HttpResponseRedirect], parameter[call[name[wurl], parameter[constant[workshift:manage]]]]]] variable[pools] assign[=] call[name[pools].order_by, parameter[constant[-is_primary], constant[title]]] variable[workshifters] assign[=] call[name[WorkshiftProfile].objects.filter, parameter[]] variable[pool_hours] assign[=] <ast.ListComp object at 0x7da1b14a0b50> return[call[name[render_to_response], parameter[constant[manage.html], dictionary[[<ast.Constant object at 0x7da1b14a0760>, <ast.Constant object at 0x7da1b14a0730>, <ast.Constant object at 0x7da1b14a0700>, <ast.Constant object at 0x7da1b14a06d0>, <ast.Constant object at 0x7da1b14a06a0>, <ast.Constant object at 0x7da1b14a0670>, <ast.Constant object at 0x7da1b14a0640>], [<ast.Name object at 0x7da1b14a0610>, <ast.Name object at 0x7da1b14a05e0>, <ast.Name object at 0x7da1b14a05b0>, <ast.Name object at 0x7da1b14a0580>, <ast.Name object at 0x7da1b14a0550>, <ast.Name object at 0x7da1b14a0520>, <ast.Call object at 0x7da1b14a04f0>]]]]]
keyword[def] identifier[manage_view] ( identifier[request] , identifier[semester] , identifier[profile] = keyword[None] ): literal[string] identifier[page_name] = literal[string] identifier[pools] = identifier[WorkshiftPool] . identifier[objects] . identifier[filter] ( identifier[semester] = identifier[semester] ) identifier[full_management] = identifier[utils] . identifier[can_manage] ( identifier[request] . identifier[user] , identifier[semester] = identifier[semester] ) identifier[edit_semester_form] = keyword[None] identifier[close_semester_form] = keyword[None] identifier[open_semester_form] = keyword[None] keyword[if] keyword[not] identifier[full_management] : identifier[pools] = identifier[pools] . identifier[filter] ( identifier[managers__incumbent__user] = identifier[request] . identifier[user] ) keyword[if] keyword[not] identifier[pools] . identifier[count] (): identifier[messages] . identifier[add_message] ( identifier[request] , identifier[messages] . identifier[ERROR] , identifier[MESSAGES] [ literal[string] ]) keyword[return] identifier[HttpResponseRedirect] ( identifier[semester] . identifier[get_view_url] ()) keyword[else] : identifier[edit_semester_form] = identifier[FullSemesterForm] ( identifier[data] = identifier[request] . identifier[POST] keyword[if] literal[string] keyword[in] identifier[request] . identifier[POST] keyword[else] keyword[None] , identifier[instance] = identifier[semester] , ) keyword[if] identifier[semester] . identifier[current] : identifier[close_semester_form] = identifier[CloseSemesterForm] ( identifier[data] = identifier[request] . identifier[POST] keyword[if] literal[string] keyword[in] identifier[request] . identifier[POST] keyword[else] keyword[None] , identifier[semester] = identifier[semester] , ) keyword[else] : identifier[open_semester_form] = identifier[OpenSemesterForm] ( identifier[data] = identifier[request] . identifier[POST] keyword[if] literal[string] keyword[in] identifier[request] . identifier[POST] keyword[else] keyword[None] , identifier[semester] = identifier[semester] ) keyword[if] identifier[edit_semester_form] keyword[and] identifier[edit_semester_form] . identifier[is_valid] (): identifier[semester] = identifier[edit_semester_form] . identifier[save] () identifier[messages] . identifier[add_message] ( identifier[request] , identifier[messages] . identifier[INFO] , literal[string] , ) keyword[return] identifier[HttpResponseRedirect] ( identifier[wurl] ( literal[string] , identifier[sem_url] = identifier[semester] . identifier[sem_url] , )) keyword[if] identifier[close_semester_form] keyword[and] identifier[close_semester_form] . identifier[is_valid] (): identifier[close_semester_form] . identifier[save] () identifier[messages] . identifier[add_message] ( identifier[request] , identifier[messages] . identifier[INFO] , literal[string] ) keyword[return] identifier[HttpResponseRedirect] ( identifier[wurl] ( literal[string] , identifier[sem_url] = identifier[semester] . identifier[sem_url] , )) keyword[if] identifier[open_semester_form] keyword[and] identifier[open_semester_form] . identifier[is_valid] (): identifier[open_semester_form] . identifier[save] () identifier[messages] . identifier[add_message] ( identifier[request] , identifier[messages] . identifier[INFO] , literal[string] ) keyword[return] identifier[HttpResponseRedirect] ( identifier[wurl] ( literal[string] , identifier[sem_url] = identifier[semester] . identifier[sem_url] , )) identifier[pools] = identifier[pools] . identifier[order_by] ( literal[string] , literal[string] ) identifier[workshifters] = identifier[WorkshiftProfile] . identifier[objects] . identifier[filter] ( identifier[semester] = identifier[semester] ) identifier[pool_hours] =[ [ identifier[workshifter] . identifier[pool_hours] . identifier[get] ( identifier[pool] = identifier[pool] ) keyword[for] identifier[pool] keyword[in] identifier[pools] ] keyword[for] identifier[workshifter] keyword[in] identifier[workshifters] ] keyword[return] identifier[render_to_response] ( literal[string] ,{ literal[string] : identifier[page_name] , literal[string] : identifier[pools] , literal[string] : identifier[full_management] , literal[string] : identifier[edit_semester_form] , literal[string] : identifier[close_semester_form] , literal[string] : identifier[open_semester_form] , literal[string] : identifier[zip] ( identifier[workshifters] , identifier[pool_hours] ), }, identifier[context_instance] = identifier[RequestContext] ( identifier[request] ))
def manage_view(request, semester, profile=None): """ View all members' preferences. This view also includes forms to create an entire semester's worth of weekly workshifts. """ page_name = 'Manage Workshift' pools = WorkshiftPool.objects.filter(semester=semester) full_management = utils.can_manage(request.user, semester=semester) edit_semester_form = None close_semester_form = None open_semester_form = None if not full_management: pools = pools.filter(managers__incumbent__user=request.user) if not pools.count(): messages.add_message(request, messages.ERROR, MESSAGES['ADMINS_ONLY']) return HttpResponseRedirect(semester.get_view_url()) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: edit_semester_form = FullSemesterForm(data=request.POST if 'edit_semester' in request.POST else None, instance=semester) if semester.current: close_semester_form = CloseSemesterForm(data=request.POST if 'close_semester' in request.POST else None, semester=semester) # depends on [control=['if'], data=[]] else: open_semester_form = OpenSemesterForm(data=request.POST if 'open_semester' in request.POST else None, semester=semester) if edit_semester_form and edit_semester_form.is_valid(): semester = edit_semester_form.save() messages.add_message(request, messages.INFO, 'Semester successfully updated.') return HttpResponseRedirect(wurl('workshift:manage', sem_url=semester.sem_url)) # depends on [control=['if'], data=[]] if close_semester_form and close_semester_form.is_valid(): close_semester_form.save() messages.add_message(request, messages.INFO, 'Semester closed.') return HttpResponseRedirect(wurl('workshift:manage', sem_url=semester.sem_url)) # depends on [control=['if'], data=[]] if open_semester_form and open_semester_form.is_valid(): open_semester_form.save() messages.add_message(request, messages.INFO, 'Semester reopened.') return HttpResponseRedirect(wurl('workshift:manage', sem_url=semester.sem_url)) # depends on [control=['if'], data=[]] pools = pools.order_by('-is_primary', 'title') workshifters = WorkshiftProfile.objects.filter(semester=semester) pool_hours = [[workshifter.pool_hours.get(pool=pool) for pool in pools] for workshifter in workshifters] return render_to_response('manage.html', {'page_name': page_name, 'pools': pools, 'full_management': full_management, 'edit_semester_form': edit_semester_form, 'close_semester_form': close_semester_form, 'open_semester_form': open_semester_form, 'workshifters': zip(workshifters, pool_hours)}, context_instance=RequestContext(request))
def init(name, # Salt_id for created VM cwd=None, # path to find Vagrantfile machine='', # name of machine in Vagrantfile runas=None, # username who owns Vagrant box start=False, # start the machine when initialized vagrant_provider='', # vagrant provider (default=virtualbox) vm=None, # a dictionary of VM configuration settings ): ''' Initialize a new Vagrant VM. This inputs all the information needed to start a Vagrant VM. These settings are stored in a Salt sdb database on the Vagrant host minion and used to start, control, and query the guest VMs. The salt_id assigned here is the key field for that database and must be unique. :param name: The salt_id name you will use to control this VM :param cwd: The path to the directory where the Vagrantfile is located :param machine: The machine name in the Vagrantfile. If blank, the primary machine will be used. :param runas: The username on the host who owns the Vagrant work files. :param start: (default: False) Start the virtual machine now. :param vagrant_provider: The name of a Vagrant VM provider (if not the default). :param vm: Optionally, all the above information may be supplied in this dictionary. :return: A string indicating success, or False. CLI Example: .. code-block:: bash salt <host> vagrant.init <salt_id> /path/to/Vagrantfile salt my_laptop vagrant.init x1 /projects/bevy_master machine=quail1 ''' vm_ = {} if vm is None else vm.copy() # passed configuration data vm_['name'] = name # passed-in keyword arguments overwrite vm dictionary values vm_['cwd'] = cwd or vm_.get('cwd') if not vm_['cwd']: raise SaltInvocationError('Path to Vagrantfile must be defined by "cwd" argument') vm_['machine'] = machine or vm_.get('machine', machine) vm_['runas'] = runas or vm_.get('runas', runas) vm_['vagrant_provider'] = vagrant_provider or vm_.get('vagrant_provider', '') _update_vm_info(name, vm_) if start: log.debug('Starting VM %s', name) ret = _start(name, vm_) else: ret = 'Name {} defined using VM {}'.format(name, vm_['machine'] or '(default)') return ret
def function[init, parameter[name, cwd, machine, runas, start, vagrant_provider, vm]]: constant[ Initialize a new Vagrant VM. This inputs all the information needed to start a Vagrant VM. These settings are stored in a Salt sdb database on the Vagrant host minion and used to start, control, and query the guest VMs. The salt_id assigned here is the key field for that database and must be unique. :param name: The salt_id name you will use to control this VM :param cwd: The path to the directory where the Vagrantfile is located :param machine: The machine name in the Vagrantfile. If blank, the primary machine will be used. :param runas: The username on the host who owns the Vagrant work files. :param start: (default: False) Start the virtual machine now. :param vagrant_provider: The name of a Vagrant VM provider (if not the default). :param vm: Optionally, all the above information may be supplied in this dictionary. :return: A string indicating success, or False. CLI Example: .. code-block:: bash salt <host> vagrant.init <salt_id> /path/to/Vagrantfile salt my_laptop vagrant.init x1 /projects/bevy_master machine=quail1 ] variable[vm_] assign[=] <ast.IfExp object at 0x7da1b1c70f10> call[name[vm_]][constant[name]] assign[=] name[name] call[name[vm_]][constant[cwd]] assign[=] <ast.BoolOp object at 0x7da1b1c738e0> if <ast.UnaryOp object at 0x7da1b1c709a0> begin[:] <ast.Raise object at 0x7da1b1c719c0> call[name[vm_]][constant[machine]] assign[=] <ast.BoolOp object at 0x7da1b1c72650> call[name[vm_]][constant[runas]] assign[=] <ast.BoolOp object at 0x7da1b2046f20> call[name[vm_]][constant[vagrant_provider]] assign[=] <ast.BoolOp object at 0x7da1b2044130> call[name[_update_vm_info], parameter[name[name], name[vm_]]] if name[start] begin[:] call[name[log].debug, parameter[constant[Starting VM %s], name[name]]] variable[ret] assign[=] call[name[_start], parameter[name[name], name[vm_]]] return[name[ret]]
keyword[def] identifier[init] ( identifier[name] , identifier[cwd] = keyword[None] , identifier[machine] = literal[string] , identifier[runas] = keyword[None] , identifier[start] = keyword[False] , identifier[vagrant_provider] = literal[string] , identifier[vm] = keyword[None] , ): literal[string] identifier[vm_] ={} keyword[if] identifier[vm] keyword[is] keyword[None] keyword[else] identifier[vm] . identifier[copy] () identifier[vm_] [ literal[string] ]= identifier[name] identifier[vm_] [ literal[string] ]= identifier[cwd] keyword[or] identifier[vm_] . identifier[get] ( literal[string] ) keyword[if] keyword[not] identifier[vm_] [ literal[string] ]: keyword[raise] identifier[SaltInvocationError] ( literal[string] ) identifier[vm_] [ literal[string] ]= identifier[machine] keyword[or] identifier[vm_] . identifier[get] ( literal[string] , identifier[machine] ) identifier[vm_] [ literal[string] ]= identifier[runas] keyword[or] identifier[vm_] . identifier[get] ( literal[string] , identifier[runas] ) identifier[vm_] [ literal[string] ]= identifier[vagrant_provider] keyword[or] identifier[vm_] . identifier[get] ( literal[string] , literal[string] ) identifier[_update_vm_info] ( identifier[name] , identifier[vm_] ) keyword[if] identifier[start] : identifier[log] . identifier[debug] ( literal[string] , identifier[name] ) identifier[ret] = identifier[_start] ( identifier[name] , identifier[vm_] ) keyword[else] : identifier[ret] = literal[string] . identifier[format] ( identifier[name] , identifier[vm_] [ literal[string] ] keyword[or] literal[string] ) keyword[return] identifier[ret]
def init(name, cwd=None, machine='', runas=None, start=False, vagrant_provider='', vm=None): # Salt_id for created VM # path to find Vagrantfile # name of machine in Vagrantfile # username who owns Vagrant box # start the machine when initialized # vagrant provider (default=virtualbox) # a dictionary of VM configuration settings '\n Initialize a new Vagrant VM.\n\n This inputs all the information needed to start a Vagrant VM. These settings are stored in\n a Salt sdb database on the Vagrant host minion and used to start, control, and query the\n guest VMs. The salt_id assigned here is the key field for that database and must be unique.\n\n :param name: The salt_id name you will use to control this VM\n :param cwd: The path to the directory where the Vagrantfile is located\n :param machine: The machine name in the Vagrantfile. If blank, the primary machine will be used.\n :param runas: The username on the host who owns the Vagrant work files.\n :param start: (default: False) Start the virtual machine now.\n :param vagrant_provider: The name of a Vagrant VM provider (if not the default).\n :param vm: Optionally, all the above information may be supplied in this dictionary.\n :return: A string indicating success, or False.\n\n CLI Example:\n\n .. code-block:: bash\n\n salt <host> vagrant.init <salt_id> /path/to/Vagrantfile\n salt my_laptop vagrant.init x1 /projects/bevy_master machine=quail1\n ' vm_ = {} if vm is None else vm.copy() # passed configuration data vm_['name'] = name # passed-in keyword arguments overwrite vm dictionary values vm_['cwd'] = cwd or vm_.get('cwd') if not vm_['cwd']: raise SaltInvocationError('Path to Vagrantfile must be defined by "cwd" argument') # depends on [control=['if'], data=[]] vm_['machine'] = machine or vm_.get('machine', machine) vm_['runas'] = runas or vm_.get('runas', runas) vm_['vagrant_provider'] = vagrant_provider or vm_.get('vagrant_provider', '') _update_vm_info(name, vm_) if start: log.debug('Starting VM %s', name) ret = _start(name, vm_) # depends on [control=['if'], data=[]] else: ret = 'Name {} defined using VM {}'.format(name, vm_['machine'] or '(default)') return ret
def _eval_unaryop(self, node): """ Evaluate a unary operator node (ie. -2, +3) Currently just supports positive and negative :param node: Node to eval :return: Result of node """ return self.operators[type(node.op)](self._eval(node.operand))
def function[_eval_unaryop, parameter[self, node]]: constant[ Evaluate a unary operator node (ie. -2, +3) Currently just supports positive and negative :param node: Node to eval :return: Result of node ] return[call[call[name[self].operators][call[name[type], parameter[name[node].op]]], parameter[call[name[self]._eval, parameter[name[node].operand]]]]]
keyword[def] identifier[_eval_unaryop] ( identifier[self] , identifier[node] ): literal[string] keyword[return] identifier[self] . identifier[operators] [ identifier[type] ( identifier[node] . identifier[op] )]( identifier[self] . identifier[_eval] ( identifier[node] . identifier[operand] ))
def _eval_unaryop(self, node): """ Evaluate a unary operator node (ie. -2, +3) Currently just supports positive and negative :param node: Node to eval :return: Result of node """ return self.operators[type(node.op)](self._eval(node.operand))
def reference(self): """Return the Reference object for this Key. This is a entity_pb.Reference instance -- a protocol buffer class used by the lower-level API to the datastore. NOTE: The caller should not mutate the return value. """ if self.__reference is None: self.__reference = _ConstructReference(self.__class__, pairs=self.__pairs, app=self.__app, namespace=self.__namespace) return self.__reference
def function[reference, parameter[self]]: constant[Return the Reference object for this Key. This is a entity_pb.Reference instance -- a protocol buffer class used by the lower-level API to the datastore. NOTE: The caller should not mutate the return value. ] if compare[name[self].__reference is constant[None]] begin[:] name[self].__reference assign[=] call[name[_ConstructReference], parameter[name[self].__class__]] return[name[self].__reference]
keyword[def] identifier[reference] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[__reference] keyword[is] keyword[None] : identifier[self] . identifier[__reference] = identifier[_ConstructReference] ( identifier[self] . identifier[__class__] , identifier[pairs] = identifier[self] . identifier[__pairs] , identifier[app] = identifier[self] . identifier[__app] , identifier[namespace] = identifier[self] . identifier[__namespace] ) keyword[return] identifier[self] . identifier[__reference]
def reference(self): """Return the Reference object for this Key. This is a entity_pb.Reference instance -- a protocol buffer class used by the lower-level API to the datastore. NOTE: The caller should not mutate the return value. """ if self.__reference is None: self.__reference = _ConstructReference(self.__class__, pairs=self.__pairs, app=self.__app, namespace=self.__namespace) # depends on [control=['if'], data=[]] return self.__reference
def _update_partition_srvc_node_ip(self, tenant_name, srvc_ip, vrf_prof=None, part_name=None): """Function to update srvc_node address of partition. """ self.dcnm_obj.update_project(tenant_name, part_name, service_node_ip=srvc_ip, vrf_prof=vrf_prof, desc="Service Partition")
def function[_update_partition_srvc_node_ip, parameter[self, tenant_name, srvc_ip, vrf_prof, part_name]]: constant[Function to update srvc_node address of partition. ] call[name[self].dcnm_obj.update_project, parameter[name[tenant_name], name[part_name]]]
keyword[def] identifier[_update_partition_srvc_node_ip] ( identifier[self] , identifier[tenant_name] , identifier[srvc_ip] , identifier[vrf_prof] = keyword[None] , identifier[part_name] = keyword[None] ): literal[string] identifier[self] . identifier[dcnm_obj] . identifier[update_project] ( identifier[tenant_name] , identifier[part_name] , identifier[service_node_ip] = identifier[srvc_ip] , identifier[vrf_prof] = identifier[vrf_prof] , identifier[desc] = literal[string] )
def _update_partition_srvc_node_ip(self, tenant_name, srvc_ip, vrf_prof=None, part_name=None): """Function to update srvc_node address of partition. """ self.dcnm_obj.update_project(tenant_name, part_name, service_node_ip=srvc_ip, vrf_prof=vrf_prof, desc='Service Partition')
def ndimage_to_list(image): """ Split a n dimensional ANTsImage into a list of n-1 dimensional ANTsImages Arguments --------- image : ANTsImage n-dimensional image to split Returns ------- list of ANTsImage types Example ------- >>> import ants >>> image = ants.image_read(ants.get_ants_data('r16')) >>> image2 = ants.image_read(ants.get_ants_data('r16')) >>> imageTar = ants.make_image( ( *image2.shape, 2 ) ) >>> image3 = ants.list_to_ndimage( imageTar, [image,image2]) >>> image3.dimension == 3 >>> images_unmerged = ants.ndimage_to_list( image3 ) >>> len(images_unmerged) == 2 >>> images_unmerged[0].dimension == 2 """ inpixeltype = image.pixeltype dimension = image.dimension components = 1 imageShape = image.shape nSections = imageShape[ dimension - 1 ] subdimension = dimension - 1 suborigin = iio.get_origin( image )[0:subdimension] subspacing = iio.get_spacing( image )[0:subdimension] subdirection = np.eye( subdimension ) for i in range( subdimension ): subdirection[i,:] = iio.get_direction( image )[i,0:subdimension] subdim = image.shape[ 0:subdimension ] imagelist = [] for i in range( nSections ): img = utils.slice_image( image, axis = subdimension, idx = i ) iio.set_spacing( img, subspacing ) iio.set_origin( img, suborigin ) iio.set_direction( img, subdirection ) imagelist.append( img ) return imagelist
def function[ndimage_to_list, parameter[image]]: constant[ Split a n dimensional ANTsImage into a list of n-1 dimensional ANTsImages Arguments --------- image : ANTsImage n-dimensional image to split Returns ------- list of ANTsImage types Example ------- >>> import ants >>> image = ants.image_read(ants.get_ants_data('r16')) >>> image2 = ants.image_read(ants.get_ants_data('r16')) >>> imageTar = ants.make_image( ( *image2.shape, 2 ) ) >>> image3 = ants.list_to_ndimage( imageTar, [image,image2]) >>> image3.dimension == 3 >>> images_unmerged = ants.ndimage_to_list( image3 ) >>> len(images_unmerged) == 2 >>> images_unmerged[0].dimension == 2 ] variable[inpixeltype] assign[=] name[image].pixeltype variable[dimension] assign[=] name[image].dimension variable[components] assign[=] constant[1] variable[imageShape] assign[=] name[image].shape variable[nSections] assign[=] call[name[imageShape]][binary_operation[name[dimension] - constant[1]]] variable[subdimension] assign[=] binary_operation[name[dimension] - constant[1]] variable[suborigin] assign[=] call[call[name[iio].get_origin, parameter[name[image]]]][<ast.Slice object at 0x7da2041d9240>] variable[subspacing] assign[=] call[call[name[iio].get_spacing, parameter[name[image]]]][<ast.Slice object at 0x7da2041dbcd0>] variable[subdirection] assign[=] call[name[np].eye, parameter[name[subdimension]]] for taget[name[i]] in starred[call[name[range], parameter[name[subdimension]]]] begin[:] call[name[subdirection]][tuple[[<ast.Name object at 0x7da1b16befb0>, <ast.Slice object at 0x7da1b16bcd60>]]] assign[=] call[call[name[iio].get_direction, parameter[name[image]]]][tuple[[<ast.Name object at 0x7da1b16beef0>, <ast.Slice object at 0x7da1b16be440>]]] variable[subdim] assign[=] call[name[image].shape][<ast.Slice object at 0x7da1b16bec80>] variable[imagelist] assign[=] list[[]] for taget[name[i]] in starred[call[name[range], parameter[name[nSections]]]] begin[:] variable[img] assign[=] call[name[utils].slice_image, parameter[name[image]]] call[name[iio].set_spacing, parameter[name[img], name[subspacing]]] call[name[iio].set_origin, parameter[name[img], name[suborigin]]] call[name[iio].set_direction, parameter[name[img], name[subdirection]]] call[name[imagelist].append, parameter[name[img]]] return[name[imagelist]]
keyword[def] identifier[ndimage_to_list] ( identifier[image] ): literal[string] identifier[inpixeltype] = identifier[image] . identifier[pixeltype] identifier[dimension] = identifier[image] . identifier[dimension] identifier[components] = literal[int] identifier[imageShape] = identifier[image] . identifier[shape] identifier[nSections] = identifier[imageShape] [ identifier[dimension] - literal[int] ] identifier[subdimension] = identifier[dimension] - literal[int] identifier[suborigin] = identifier[iio] . identifier[get_origin] ( identifier[image] )[ literal[int] : identifier[subdimension] ] identifier[subspacing] = identifier[iio] . identifier[get_spacing] ( identifier[image] )[ literal[int] : identifier[subdimension] ] identifier[subdirection] = identifier[np] . identifier[eye] ( identifier[subdimension] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[subdimension] ): identifier[subdirection] [ identifier[i] ,:]= identifier[iio] . identifier[get_direction] ( identifier[image] )[ identifier[i] , literal[int] : identifier[subdimension] ] identifier[subdim] = identifier[image] . identifier[shape] [ literal[int] : identifier[subdimension] ] identifier[imagelist] =[] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[nSections] ): identifier[img] = identifier[utils] . identifier[slice_image] ( identifier[image] , identifier[axis] = identifier[subdimension] , identifier[idx] = identifier[i] ) identifier[iio] . identifier[set_spacing] ( identifier[img] , identifier[subspacing] ) identifier[iio] . identifier[set_origin] ( identifier[img] , identifier[suborigin] ) identifier[iio] . identifier[set_direction] ( identifier[img] , identifier[subdirection] ) identifier[imagelist] . identifier[append] ( identifier[img] ) keyword[return] identifier[imagelist]
def ndimage_to_list(image): """ Split a n dimensional ANTsImage into a list of n-1 dimensional ANTsImages Arguments --------- image : ANTsImage n-dimensional image to split Returns ------- list of ANTsImage types Example ------- >>> import ants >>> image = ants.image_read(ants.get_ants_data('r16')) >>> image2 = ants.image_read(ants.get_ants_data('r16')) >>> imageTar = ants.make_image( ( *image2.shape, 2 ) ) >>> image3 = ants.list_to_ndimage( imageTar, [image,image2]) >>> image3.dimension == 3 >>> images_unmerged = ants.ndimage_to_list( image3 ) >>> len(images_unmerged) == 2 >>> images_unmerged[0].dimension == 2 """ inpixeltype = image.pixeltype dimension = image.dimension components = 1 imageShape = image.shape nSections = imageShape[dimension - 1] subdimension = dimension - 1 suborigin = iio.get_origin(image)[0:subdimension] subspacing = iio.get_spacing(image)[0:subdimension] subdirection = np.eye(subdimension) for i in range(subdimension): subdirection[i, :] = iio.get_direction(image)[i, 0:subdimension] # depends on [control=['for'], data=['i']] subdim = image.shape[0:subdimension] imagelist = [] for i in range(nSections): img = utils.slice_image(image, axis=subdimension, idx=i) iio.set_spacing(img, subspacing) iio.set_origin(img, suborigin) iio.set_direction(img, subdirection) imagelist.append(img) # depends on [control=['for'], data=['i']] return imagelist
def detach(self, force=False): """ Detach this EBS volume from an EC2 instance. :type force: bool :param force: Forces detachment if the previous detachment attempt did not occur cleanly. This option can lead to data loss or a corrupted file system. Use this option only as a last resort to detach a volume from a failed instance. The instance will not have an opportunity to flush file system caches nor file system meta data. If you use this option, you must perform file system check and repair procedures. :rtype: bool :return: True if successful """ instance_id = None if self.attach_data: instance_id = self.attach_data.instance_id device = None if self.attach_data: device = self.attach_data.device return self.connection.detach_volume(self.id, instance_id, device, force)
def function[detach, parameter[self, force]]: constant[ Detach this EBS volume from an EC2 instance. :type force: bool :param force: Forces detachment if the previous detachment attempt did not occur cleanly. This option can lead to data loss or a corrupted file system. Use this option only as a last resort to detach a volume from a failed instance. The instance will not have an opportunity to flush file system caches nor file system meta data. If you use this option, you must perform file system check and repair procedures. :rtype: bool :return: True if successful ] variable[instance_id] assign[=] constant[None] if name[self].attach_data begin[:] variable[instance_id] assign[=] name[self].attach_data.instance_id variable[device] assign[=] constant[None] if name[self].attach_data begin[:] variable[device] assign[=] name[self].attach_data.device return[call[name[self].connection.detach_volume, parameter[name[self].id, name[instance_id], name[device], name[force]]]]
keyword[def] identifier[detach] ( identifier[self] , identifier[force] = keyword[False] ): literal[string] identifier[instance_id] = keyword[None] keyword[if] identifier[self] . identifier[attach_data] : identifier[instance_id] = identifier[self] . identifier[attach_data] . identifier[instance_id] identifier[device] = keyword[None] keyword[if] identifier[self] . identifier[attach_data] : identifier[device] = identifier[self] . identifier[attach_data] . identifier[device] keyword[return] identifier[self] . identifier[connection] . identifier[detach_volume] ( identifier[self] . identifier[id] , identifier[instance_id] , identifier[device] , identifier[force] )
def detach(self, force=False): """ Detach this EBS volume from an EC2 instance. :type force: bool :param force: Forces detachment if the previous detachment attempt did not occur cleanly. This option can lead to data loss or a corrupted file system. Use this option only as a last resort to detach a volume from a failed instance. The instance will not have an opportunity to flush file system caches nor file system meta data. If you use this option, you must perform file system check and repair procedures. :rtype: bool :return: True if successful """ instance_id = None if self.attach_data: instance_id = self.attach_data.instance_id # depends on [control=['if'], data=[]] device = None if self.attach_data: device = self.attach_data.device # depends on [control=['if'], data=[]] return self.connection.detach_volume(self.id, instance_id, device, force)
def log_likelihood_top1(data, params): """Compute the log-likelihood of model parameters.""" loglik = 0 params = np.asarray(params) for winner, losers in data: idx = np.append(winner, losers) loglik -= logsumexp(params.take(idx) - params[winner]) return loglik
def function[log_likelihood_top1, parameter[data, params]]: constant[Compute the log-likelihood of model parameters.] variable[loglik] assign[=] constant[0] variable[params] assign[=] call[name[np].asarray, parameter[name[params]]] for taget[tuple[[<ast.Name object at 0x7da1b18877f0>, <ast.Name object at 0x7da1b18852a0>]]] in starred[name[data]] begin[:] variable[idx] assign[=] call[name[np].append, parameter[name[winner], name[losers]]] <ast.AugAssign object at 0x7da1b18862c0> return[name[loglik]]
keyword[def] identifier[log_likelihood_top1] ( identifier[data] , identifier[params] ): literal[string] identifier[loglik] = literal[int] identifier[params] = identifier[np] . identifier[asarray] ( identifier[params] ) keyword[for] identifier[winner] , identifier[losers] keyword[in] identifier[data] : identifier[idx] = identifier[np] . identifier[append] ( identifier[winner] , identifier[losers] ) identifier[loglik] -= identifier[logsumexp] ( identifier[params] . identifier[take] ( identifier[idx] )- identifier[params] [ identifier[winner] ]) keyword[return] identifier[loglik]
def log_likelihood_top1(data, params): """Compute the log-likelihood of model parameters.""" loglik = 0 params = np.asarray(params) for (winner, losers) in data: idx = np.append(winner, losers) loglik -= logsumexp(params.take(idx) - params[winner]) # depends on [control=['for'], data=[]] return loglik
def slice(self, start, size): """Summary Args: start (TYPE): Description size (TYPE): Description Returns: TYPE: Description """ return SeriesWeld( grizzly_impl.slice( self.expr, start, size, self.weld_type ), self.weld_type, self.df, self.column_name )
def function[slice, parameter[self, start, size]]: constant[Summary Args: start (TYPE): Description size (TYPE): Description Returns: TYPE: Description ] return[call[name[SeriesWeld], parameter[call[name[grizzly_impl].slice, parameter[name[self].expr, name[start], name[size], name[self].weld_type]], name[self].weld_type, name[self].df, name[self].column_name]]]
keyword[def] identifier[slice] ( identifier[self] , identifier[start] , identifier[size] ): literal[string] keyword[return] identifier[SeriesWeld] ( identifier[grizzly_impl] . identifier[slice] ( identifier[self] . identifier[expr] , identifier[start] , identifier[size] , identifier[self] . identifier[weld_type] ), identifier[self] . identifier[weld_type] , identifier[self] . identifier[df] , identifier[self] . identifier[column_name] )
def slice(self, start, size): """Summary Args: start (TYPE): Description size (TYPE): Description Returns: TYPE: Description """ return SeriesWeld(grizzly_impl.slice(self.expr, start, size, self.weld_type), self.weld_type, self.df, self.column_name)
def stats(local=False, remote=False, jail=None, chroot=None, root=None): ''' Return pkgng stats. CLI Example: .. code-block:: bash salt '*' pkg.stats local Display stats only for the local package database. CLI Example: .. code-block:: bash salt '*' pkg.stats local=True remote Display stats only for the remote package database(s). CLI Example: .. code-block:: bash salt '*' pkg.stats remote=True jail Retrieve stats from the specified jail. CLI Example: .. code-block:: bash salt '*' pkg.stats jail=<jail name or id> salt '*' pkg.stats jail=<jail name or id> local=True salt '*' pkg.stats jail=<jail name or id> remote=True chroot Retrieve stats from the specified chroot (ignored if ``jail`` is specified). root Retrieve stats from the specified root (ignored if ``jail`` is specified). CLI Example: .. code-block:: bash salt '*' pkg.stats chroot=/path/to/chroot salt '*' pkg.stats chroot=/path/to/chroot local=True salt '*' pkg.stats chroot=/path/to/chroot remote=True ''' opts = '' if local: opts += 'l' if remote: opts += 'r' cmd = _pkg(jail, chroot, root) cmd.append('stats') if opts: cmd.append('-' + opts) out = __salt__['cmd.run'](cmd, output_loglevel='trace', python_shell=False) return [x.strip('\t') for x in salt.utils.itertools.split(out, '\n')]
def function[stats, parameter[local, remote, jail, chroot, root]]: constant[ Return pkgng stats. CLI Example: .. code-block:: bash salt '*' pkg.stats local Display stats only for the local package database. CLI Example: .. code-block:: bash salt '*' pkg.stats local=True remote Display stats only for the remote package database(s). CLI Example: .. code-block:: bash salt '*' pkg.stats remote=True jail Retrieve stats from the specified jail. CLI Example: .. code-block:: bash salt '*' pkg.stats jail=<jail name or id> salt '*' pkg.stats jail=<jail name or id> local=True salt '*' pkg.stats jail=<jail name or id> remote=True chroot Retrieve stats from the specified chroot (ignored if ``jail`` is specified). root Retrieve stats from the specified root (ignored if ``jail`` is specified). CLI Example: .. code-block:: bash salt '*' pkg.stats chroot=/path/to/chroot salt '*' pkg.stats chroot=/path/to/chroot local=True salt '*' pkg.stats chroot=/path/to/chroot remote=True ] variable[opts] assign[=] constant[] if name[local] begin[:] <ast.AugAssign object at 0x7da20e960550> if name[remote] begin[:] <ast.AugAssign object at 0x7da20e962320> variable[cmd] assign[=] call[name[_pkg], parameter[name[jail], name[chroot], name[root]]] call[name[cmd].append, parameter[constant[stats]]] if name[opts] begin[:] call[name[cmd].append, parameter[binary_operation[constant[-] + name[opts]]]] variable[out] assign[=] call[call[name[__salt__]][constant[cmd.run]], parameter[name[cmd]]] return[<ast.ListComp object at 0x7da18ede57e0>]
keyword[def] identifier[stats] ( identifier[local] = keyword[False] , identifier[remote] = keyword[False] , identifier[jail] = keyword[None] , identifier[chroot] = keyword[None] , identifier[root] = keyword[None] ): literal[string] identifier[opts] = literal[string] keyword[if] identifier[local] : identifier[opts] += literal[string] keyword[if] identifier[remote] : identifier[opts] += literal[string] identifier[cmd] = identifier[_pkg] ( identifier[jail] , identifier[chroot] , identifier[root] ) identifier[cmd] . identifier[append] ( literal[string] ) keyword[if] identifier[opts] : identifier[cmd] . identifier[append] ( literal[string] + identifier[opts] ) identifier[out] = identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[output_loglevel] = literal[string] , identifier[python_shell] = keyword[False] ) keyword[return] [ identifier[x] . identifier[strip] ( literal[string] ) keyword[for] identifier[x] keyword[in] identifier[salt] . identifier[utils] . identifier[itertools] . identifier[split] ( identifier[out] , literal[string] )]
def stats(local=False, remote=False, jail=None, chroot=None, root=None): """ Return pkgng stats. CLI Example: .. code-block:: bash salt '*' pkg.stats local Display stats only for the local package database. CLI Example: .. code-block:: bash salt '*' pkg.stats local=True remote Display stats only for the remote package database(s). CLI Example: .. code-block:: bash salt '*' pkg.stats remote=True jail Retrieve stats from the specified jail. CLI Example: .. code-block:: bash salt '*' pkg.stats jail=<jail name or id> salt '*' pkg.stats jail=<jail name or id> local=True salt '*' pkg.stats jail=<jail name or id> remote=True chroot Retrieve stats from the specified chroot (ignored if ``jail`` is specified). root Retrieve stats from the specified root (ignored if ``jail`` is specified). CLI Example: .. code-block:: bash salt '*' pkg.stats chroot=/path/to/chroot salt '*' pkg.stats chroot=/path/to/chroot local=True salt '*' pkg.stats chroot=/path/to/chroot remote=True """ opts = '' if local: opts += 'l' # depends on [control=['if'], data=[]] if remote: opts += 'r' # depends on [control=['if'], data=[]] cmd = _pkg(jail, chroot, root) cmd.append('stats') if opts: cmd.append('-' + opts) # depends on [control=['if'], data=[]] out = __salt__['cmd.run'](cmd, output_loglevel='trace', python_shell=False) return [x.strip('\t') for x in salt.utils.itertools.split(out, '\n')]
def run(self): """Run command.""" self.announce( 'Building thunks', level=distutils.log.INFO) # run short circuit logic here srcDir = os.path.join("build","lib") destBody = os.path.join("build","src","jp_thunk.cpp") destHeader = os.path.join("build","src","jp_thunk.h") if os.path.isfile(destBody): t1=os.path.getctime(destBody) update =False for filename in _glob(srcDir, "*.class"): if t1<os.path.getctime(filename): update=True if not update: self.announce( 'Skip build thunks', level=distutils.log.INFO) return # do the build createThunks( srcDir, destBody, destHeader, namespace="JPThunk")
def function[run, parameter[self]]: constant[Run command.] call[name[self].announce, parameter[constant[Building thunks]]] variable[srcDir] assign[=] call[name[os].path.join, parameter[constant[build], constant[lib]]] variable[destBody] assign[=] call[name[os].path.join, parameter[constant[build], constant[src], constant[jp_thunk.cpp]]] variable[destHeader] assign[=] call[name[os].path.join, parameter[constant[build], constant[src], constant[jp_thunk.h]]] if call[name[os].path.isfile, parameter[name[destBody]]] begin[:] variable[t1] assign[=] call[name[os].path.getctime, parameter[name[destBody]]] variable[update] assign[=] constant[False] for taget[name[filename]] in starred[call[name[_glob], parameter[name[srcDir], constant[*.class]]]] begin[:] if compare[name[t1] less[<] call[name[os].path.getctime, parameter[name[filename]]]] begin[:] variable[update] assign[=] constant[True] if <ast.UnaryOp object at 0x7da2047e9a20> begin[:] call[name[self].announce, parameter[constant[Skip build thunks]]] return[None] call[name[createThunks], parameter[name[srcDir], name[destBody], name[destHeader]]]
keyword[def] identifier[run] ( identifier[self] ): literal[string] identifier[self] . identifier[announce] ( literal[string] , identifier[level] = identifier[distutils] . identifier[log] . identifier[INFO] ) identifier[srcDir] = identifier[os] . identifier[path] . identifier[join] ( literal[string] , literal[string] ) identifier[destBody] = identifier[os] . identifier[path] . identifier[join] ( literal[string] , literal[string] , literal[string] ) identifier[destHeader] = identifier[os] . identifier[path] . identifier[join] ( literal[string] , literal[string] , literal[string] ) keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[destBody] ): identifier[t1] = identifier[os] . identifier[path] . identifier[getctime] ( identifier[destBody] ) identifier[update] = keyword[False] keyword[for] identifier[filename] keyword[in] identifier[_glob] ( identifier[srcDir] , literal[string] ): keyword[if] identifier[t1] < identifier[os] . identifier[path] . identifier[getctime] ( identifier[filename] ): identifier[update] = keyword[True] keyword[if] keyword[not] identifier[update] : identifier[self] . identifier[announce] ( literal[string] , identifier[level] = identifier[distutils] . identifier[log] . identifier[INFO] ) keyword[return] identifier[createThunks] ( identifier[srcDir] , identifier[destBody] , identifier[destHeader] , identifier[namespace] = literal[string] )
def run(self): """Run command.""" self.announce('Building thunks', level=distutils.log.INFO) # run short circuit logic here srcDir = os.path.join('build', 'lib') destBody = os.path.join('build', 'src', 'jp_thunk.cpp') destHeader = os.path.join('build', 'src', 'jp_thunk.h') if os.path.isfile(destBody): t1 = os.path.getctime(destBody) update = False for filename in _glob(srcDir, '*.class'): if t1 < os.path.getctime(filename): update = True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['filename']] if not update: self.announce('Skip build thunks', level=distutils.log.INFO) return # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # do the build createThunks(srcDir, destBody, destHeader, namespace='JPThunk')
def partial_distance_correlation(x, y, z): # pylint:disable=too-many-locals """ Partial distance correlation estimator. Compute the estimator for the partial distance correlation of the random vectors corresponding to :math:`x` and :math:`y` with respect to the random variable corresponding to :math:`z`. Parameters ---------- x: array_like First random vector. The columns correspond with the individual random variables while the rows are individual instances of the random vector. y: array_like Second random vector. The columns correspond with the individual random variables while the rows are individual instances of the random vector. z: array_like Random vector with respect to which the partial distance correlation is computed. The columns correspond with the individual random variables while the rows are individual instances of the random vector. Returns ------- numpy scalar Value of the estimator of the partial distance correlation. See Also -------- partial_distance_covariance Examples -------- >>> import numpy as np >>> import dcor >>> a = np.array([[1], [1], [2], [2], [3]]) >>> b = np.array([[1], [2], [1], [2], [1]]) >>> c = np.array([[1], [2], [2], [1], [2]]) >>> dcor.partial_distance_correlation(a, a, c) 1.0 >>> dcor.partial_distance_correlation(a, b, c) # doctest: +ELLIPSIS -0.5... >>> dcor.partial_distance_correlation(b, b, c) 1.0 >>> dcor.partial_distance_correlation(a, c, c) 0.0 """ a = _u_distance_matrix(x) b = _u_distance_matrix(y) c = _u_distance_matrix(z) aa = u_product(a, a) bb = u_product(b, b) cc = u_product(c, c) ab = u_product(a, b) ac = u_product(a, c) bc = u_product(b, c) denom_sqr = aa * bb r_xy = ab / _sqrt(denom_sqr) if denom_sqr != 0 else denom_sqr r_xy = np.clip(r_xy, -1, 1) denom_sqr = aa * cc r_xz = ac / _sqrt(denom_sqr) if denom_sqr != 0 else denom_sqr r_xz = np.clip(r_xz, -1, 1) denom_sqr = bb * cc r_yz = bc / _sqrt(denom_sqr) if denom_sqr != 0 else denom_sqr r_yz = np.clip(r_yz, -1, 1) denom = _sqrt(1 - r_xz ** 2) * _sqrt(1 - r_yz ** 2) return (r_xy - r_xz * r_yz) / denom if denom != 0 else denom
def function[partial_distance_correlation, parameter[x, y, z]]: constant[ Partial distance correlation estimator. Compute the estimator for the partial distance correlation of the random vectors corresponding to :math:`x` and :math:`y` with respect to the random variable corresponding to :math:`z`. Parameters ---------- x: array_like First random vector. The columns correspond with the individual random variables while the rows are individual instances of the random vector. y: array_like Second random vector. The columns correspond with the individual random variables while the rows are individual instances of the random vector. z: array_like Random vector with respect to which the partial distance correlation is computed. The columns correspond with the individual random variables while the rows are individual instances of the random vector. Returns ------- numpy scalar Value of the estimator of the partial distance correlation. See Also -------- partial_distance_covariance Examples -------- >>> import numpy as np >>> import dcor >>> a = np.array([[1], [1], [2], [2], [3]]) >>> b = np.array([[1], [2], [1], [2], [1]]) >>> c = np.array([[1], [2], [2], [1], [2]]) >>> dcor.partial_distance_correlation(a, a, c) 1.0 >>> dcor.partial_distance_correlation(a, b, c) # doctest: +ELLIPSIS -0.5... >>> dcor.partial_distance_correlation(b, b, c) 1.0 >>> dcor.partial_distance_correlation(a, c, c) 0.0 ] variable[a] assign[=] call[name[_u_distance_matrix], parameter[name[x]]] variable[b] assign[=] call[name[_u_distance_matrix], parameter[name[y]]] variable[c] assign[=] call[name[_u_distance_matrix], parameter[name[z]]] variable[aa] assign[=] call[name[u_product], parameter[name[a], name[a]]] variable[bb] assign[=] call[name[u_product], parameter[name[b], name[b]]] variable[cc] assign[=] call[name[u_product], parameter[name[c], name[c]]] variable[ab] assign[=] call[name[u_product], parameter[name[a], name[b]]] variable[ac] assign[=] call[name[u_product], parameter[name[a], name[c]]] variable[bc] assign[=] call[name[u_product], parameter[name[b], name[c]]] variable[denom_sqr] assign[=] binary_operation[name[aa] * name[bb]] variable[r_xy] assign[=] <ast.IfExp object at 0x7da20c6e4370> variable[r_xy] assign[=] call[name[np].clip, parameter[name[r_xy], <ast.UnaryOp object at 0x7da20c6e78b0>, constant[1]]] variable[denom_sqr] assign[=] binary_operation[name[aa] * name[cc]] variable[r_xz] assign[=] <ast.IfExp object at 0x7da20c6e5000> variable[r_xz] assign[=] call[name[np].clip, parameter[name[r_xz], <ast.UnaryOp object at 0x7da1b2347580>, constant[1]]] variable[denom_sqr] assign[=] binary_operation[name[bb] * name[cc]] variable[r_yz] assign[=] <ast.IfExp object at 0x7da1b2344a90> variable[r_yz] assign[=] call[name[np].clip, parameter[name[r_yz], <ast.UnaryOp object at 0x7da1b2345690>, constant[1]]] variable[denom] assign[=] binary_operation[call[name[_sqrt], parameter[binary_operation[constant[1] - binary_operation[name[r_xz] ** constant[2]]]]] * call[name[_sqrt], parameter[binary_operation[constant[1] - binary_operation[name[r_yz] ** constant[2]]]]]] return[<ast.IfExp object at 0x7da20c991030>]
keyword[def] identifier[partial_distance_correlation] ( identifier[x] , identifier[y] , identifier[z] ): literal[string] identifier[a] = identifier[_u_distance_matrix] ( identifier[x] ) identifier[b] = identifier[_u_distance_matrix] ( identifier[y] ) identifier[c] = identifier[_u_distance_matrix] ( identifier[z] ) identifier[aa] = identifier[u_product] ( identifier[a] , identifier[a] ) identifier[bb] = identifier[u_product] ( identifier[b] , identifier[b] ) identifier[cc] = identifier[u_product] ( identifier[c] , identifier[c] ) identifier[ab] = identifier[u_product] ( identifier[a] , identifier[b] ) identifier[ac] = identifier[u_product] ( identifier[a] , identifier[c] ) identifier[bc] = identifier[u_product] ( identifier[b] , identifier[c] ) identifier[denom_sqr] = identifier[aa] * identifier[bb] identifier[r_xy] = identifier[ab] / identifier[_sqrt] ( identifier[denom_sqr] ) keyword[if] identifier[denom_sqr] != literal[int] keyword[else] identifier[denom_sqr] identifier[r_xy] = identifier[np] . identifier[clip] ( identifier[r_xy] ,- literal[int] , literal[int] ) identifier[denom_sqr] = identifier[aa] * identifier[cc] identifier[r_xz] = identifier[ac] / identifier[_sqrt] ( identifier[denom_sqr] ) keyword[if] identifier[denom_sqr] != literal[int] keyword[else] identifier[denom_sqr] identifier[r_xz] = identifier[np] . identifier[clip] ( identifier[r_xz] ,- literal[int] , literal[int] ) identifier[denom_sqr] = identifier[bb] * identifier[cc] identifier[r_yz] = identifier[bc] / identifier[_sqrt] ( identifier[denom_sqr] ) keyword[if] identifier[denom_sqr] != literal[int] keyword[else] identifier[denom_sqr] identifier[r_yz] = identifier[np] . identifier[clip] ( identifier[r_yz] ,- literal[int] , literal[int] ) identifier[denom] = identifier[_sqrt] ( literal[int] - identifier[r_xz] ** literal[int] )* identifier[_sqrt] ( literal[int] - identifier[r_yz] ** literal[int] ) keyword[return] ( identifier[r_xy] - identifier[r_xz] * identifier[r_yz] )/ identifier[denom] keyword[if] identifier[denom] != literal[int] keyword[else] identifier[denom]
def partial_distance_correlation(x, y, z): # pylint:disable=too-many-locals '\n Partial distance correlation estimator.\n\n Compute the estimator for the partial distance correlation of the\n random vectors corresponding to :math:`x` and :math:`y` with respect\n to the random variable corresponding to :math:`z`.\n\n Parameters\n ----------\n x: array_like\n First random vector. The columns correspond with the individual random\n variables while the rows are individual instances of the random vector.\n y: array_like\n Second random vector. The columns correspond with the individual random\n variables while the rows are individual instances of the random vector.\n z: array_like\n Random vector with respect to which the partial distance correlation\n is computed. The columns correspond with the individual random\n variables while the rows are individual instances of the random vector.\n\n Returns\n -------\n numpy scalar\n Value of the estimator of the partial distance correlation.\n\n See Also\n --------\n partial_distance_covariance\n\n Examples\n --------\n >>> import numpy as np\n >>> import dcor\n >>> a = np.array([[1], [1], [2], [2], [3]])\n >>> b = np.array([[1], [2], [1], [2], [1]])\n >>> c = np.array([[1], [2], [2], [1], [2]])\n >>> dcor.partial_distance_correlation(a, a, c)\n 1.0\n >>> dcor.partial_distance_correlation(a, b, c) # doctest: +ELLIPSIS\n -0.5...\n >>> dcor.partial_distance_correlation(b, b, c)\n 1.0\n >>> dcor.partial_distance_correlation(a, c, c)\n 0.0\n\n ' a = _u_distance_matrix(x) b = _u_distance_matrix(y) c = _u_distance_matrix(z) aa = u_product(a, a) bb = u_product(b, b) cc = u_product(c, c) ab = u_product(a, b) ac = u_product(a, c) bc = u_product(b, c) denom_sqr = aa * bb r_xy = ab / _sqrt(denom_sqr) if denom_sqr != 0 else denom_sqr r_xy = np.clip(r_xy, -1, 1) denom_sqr = aa * cc r_xz = ac / _sqrt(denom_sqr) if denom_sqr != 0 else denom_sqr r_xz = np.clip(r_xz, -1, 1) denom_sqr = bb * cc r_yz = bc / _sqrt(denom_sqr) if denom_sqr != 0 else denom_sqr r_yz = np.clip(r_yz, -1, 1) denom = _sqrt(1 - r_xz ** 2) * _sqrt(1 - r_yz ** 2) return (r_xy - r_xz * r_yz) / denom if denom != 0 else denom
def authorized_request(self, method, url, **kwargs): """Shortcut for requests.request with proper Authorization header. Note: If you put auth keyword argument or Authorization in headers keyword argument, this will raise an exception. Decide what you want to do! Args: method (str) - HTTP method of this request, like GET or POST. url (str) - URL of this request (one of Google APIs). Examples: >>> scope = 'https://www.googleapis.com/auth/plus.login' >>> url = 'https://www.googleapis.com/plus/v1/people' \ >>> '?query=Guuido+van+Rossum' >>> key = json.load(open('/path/to/credentials.json')) >>> auth = ServiceAccount.from_json(key=key, scopes=scope) >>> auth.authorized_request(method='get', url=url) Returns: requests.Response """ headers = kwargs.pop('headers', {}) if headers.get('Authorization') or kwargs.get('auth'): raise ValueError("Found custom Authorization header, " "method call would override it.") headers['Authorization'] = 'Bearer ' + self.access_token return requests.request(method, url, headers=headers, **kwargs)
def function[authorized_request, parameter[self, method, url]]: constant[Shortcut for requests.request with proper Authorization header. Note: If you put auth keyword argument or Authorization in headers keyword argument, this will raise an exception. Decide what you want to do! Args: method (str) - HTTP method of this request, like GET or POST. url (str) - URL of this request (one of Google APIs). Examples: >>> scope = 'https://www.googleapis.com/auth/plus.login' >>> url = 'https://www.googleapis.com/plus/v1/people' >>> '?query=Guuido+van+Rossum' >>> key = json.load(open('/path/to/credentials.json')) >>> auth = ServiceAccount.from_json(key=key, scopes=scope) >>> auth.authorized_request(method='get', url=url) Returns: requests.Response ] variable[headers] assign[=] call[name[kwargs].pop, parameter[constant[headers], dictionary[[], []]]] if <ast.BoolOp object at 0x7da20c76f940> begin[:] <ast.Raise object at 0x7da20c76c250> call[name[headers]][constant[Authorization]] assign[=] binary_operation[constant[Bearer ] + name[self].access_token] return[call[name[requests].request, parameter[name[method], name[url]]]]
keyword[def] identifier[authorized_request] ( identifier[self] , identifier[method] , identifier[url] ,** identifier[kwargs] ): literal[string] identifier[headers] = identifier[kwargs] . identifier[pop] ( literal[string] ,{}) keyword[if] identifier[headers] . identifier[get] ( literal[string] ) keyword[or] identifier[kwargs] . identifier[get] ( literal[string] ): keyword[raise] identifier[ValueError] ( literal[string] literal[string] ) identifier[headers] [ literal[string] ]= literal[string] + identifier[self] . identifier[access_token] keyword[return] identifier[requests] . identifier[request] ( identifier[method] , identifier[url] , identifier[headers] = identifier[headers] ,** identifier[kwargs] )
def authorized_request(self, method, url, **kwargs): """Shortcut for requests.request with proper Authorization header. Note: If you put auth keyword argument or Authorization in headers keyword argument, this will raise an exception. Decide what you want to do! Args: method (str) - HTTP method of this request, like GET or POST. url (str) - URL of this request (one of Google APIs). Examples: >>> scope = 'https://www.googleapis.com/auth/plus.login' >>> url = 'https://www.googleapis.com/plus/v1/people' >>> '?query=Guuido+van+Rossum' >>> key = json.load(open('/path/to/credentials.json')) >>> auth = ServiceAccount.from_json(key=key, scopes=scope) >>> auth.authorized_request(method='get', url=url) Returns: requests.Response """ headers = kwargs.pop('headers', {}) if headers.get('Authorization') or kwargs.get('auth'): raise ValueError('Found custom Authorization header, method call would override it.') # depends on [control=['if'], data=[]] headers['Authorization'] = 'Bearer ' + self.access_token return requests.request(method, url, headers=headers, **kwargs)
def ancestors(self): """ Get a list of ancestors. @return: A list of ancestors. @rtype: [L{Element},..] """ ancestors = [] p = self.parent while p is not None: ancestors.append(p) p = p.parent return ancestors
def function[ancestors, parameter[self]]: constant[ Get a list of ancestors. @return: A list of ancestors. @rtype: [L{Element},..] ] variable[ancestors] assign[=] list[[]] variable[p] assign[=] name[self].parent while compare[name[p] is_not constant[None]] begin[:] call[name[ancestors].append, parameter[name[p]]] variable[p] assign[=] name[p].parent return[name[ancestors]]
keyword[def] identifier[ancestors] ( identifier[self] ): literal[string] identifier[ancestors] =[] identifier[p] = identifier[self] . identifier[parent] keyword[while] identifier[p] keyword[is] keyword[not] keyword[None] : identifier[ancestors] . identifier[append] ( identifier[p] ) identifier[p] = identifier[p] . identifier[parent] keyword[return] identifier[ancestors]
def ancestors(self): """ Get a list of ancestors. @return: A list of ancestors. @rtype: [L{Element},..] """ ancestors = [] p = self.parent while p is not None: ancestors.append(p) p = p.parent # depends on [control=['while'], data=['p']] return ancestors
def build_franklin_graph(): """Makes a new Franklin graph. Ref: http://mathworld.wolfram.com/FranklinGraph.html""" # The easiest way to build the Franklin graph is to start # with C12 and add the additional 6 edges graph = build_cycle_graph(12) edge_tpls = [ (1,8), (2,7), (3,10), (4,9), (5,12), (6,11) ] for i, j in edge_tpls: graph.new_edge(i, j) return graph
def function[build_franklin_graph, parameter[]]: constant[Makes a new Franklin graph. Ref: http://mathworld.wolfram.com/FranklinGraph.html] variable[graph] assign[=] call[name[build_cycle_graph], parameter[constant[12]]] variable[edge_tpls] assign[=] list[[<ast.Tuple object at 0x7da1b28de560>, <ast.Tuple object at 0x7da1b28ddab0>, <ast.Tuple object at 0x7da1b28de4d0>, <ast.Tuple object at 0x7da1b28dd000>, <ast.Tuple object at 0x7da1b28dd8a0>, <ast.Tuple object at 0x7da1b28dc100>]] for taget[tuple[[<ast.Name object at 0x7da1b28dc0d0>, <ast.Name object at 0x7da1b28ddcf0>]]] in starred[name[edge_tpls]] begin[:] call[name[graph].new_edge, parameter[name[i], name[j]]] return[name[graph]]
keyword[def] identifier[build_franklin_graph] (): literal[string] identifier[graph] = identifier[build_cycle_graph] ( literal[int] ) identifier[edge_tpls] =[ ( literal[int] , literal[int] ), ( literal[int] , literal[int] ), ( literal[int] , literal[int] ), ( literal[int] , literal[int] ), ( literal[int] , literal[int] ), ( literal[int] , literal[int] ) ] keyword[for] identifier[i] , identifier[j] keyword[in] identifier[edge_tpls] : identifier[graph] . identifier[new_edge] ( identifier[i] , identifier[j] ) keyword[return] identifier[graph]
def build_franklin_graph(): """Makes a new Franklin graph. Ref: http://mathworld.wolfram.com/FranklinGraph.html""" # The easiest way to build the Franklin graph is to start # with C12 and add the additional 6 edges graph = build_cycle_graph(12) edge_tpls = [(1, 8), (2, 7), (3, 10), (4, 9), (5, 12), (6, 11)] for (i, j) in edge_tpls: graph.new_edge(i, j) # depends on [control=['for'], data=[]] return graph
def find_one_and_update(self, filter, update, projection=None, sort=None, upsert=False, return_document=ReturnDocument.BEFORE, **kwargs): """Finds a single document and updates it, returning either the original or the updated document. >>> db.test.find_one_and_update( ... {'_id': 665}, {'$inc': {'count': 1}, '$set': {'done': True}}) {u'_id': 665, u'done': False, u'count': 25}} By default :meth:`find_one_and_update` returns the original version of the document before the update was applied. To return the updated version of the document instead, use the *return_document* option. >>> from pymongo import ReturnDocument >>> db.example.find_one_and_update( ... {'_id': 'userid'}, ... {'$inc': {'seq': 1}}, ... return_document=ReturnDocument.AFTER) {u'_id': u'userid', u'seq': 1} You can limit the fields returned with the *projection* option. >>> db.example.find_one_and_update( ... {'_id': 'userid'}, ... {'$inc': {'seq': 1}}, ... projection={'seq': True, '_id': False}, ... return_document=ReturnDocument.AFTER) {u'seq': 2} The *upsert* option can be used to create the document if it doesn't already exist. >>> db.example.delete_many({}).deleted_count 1 >>> db.example.find_one_and_update( ... {'_id': 'userid'}, ... {'$inc': {'seq': 1}}, ... projection={'seq': True, '_id': False}, ... upsert=True, ... return_document=ReturnDocument.AFTER) {u'seq': 1} If multiple documents match *filter*, a *sort* can be applied. >>> for doc in db.test.find({'done': True}): ... print(doc) ... {u'_id': 665, u'done': True, u'result': {u'count': 26}} {u'_id': 701, u'done': True, u'result': {u'count': 17}} >>> db.test.find_one_and_update( ... {'done': True}, ... {'$set': {'final': True}}, ... sort=[('_id', pymongo.DESCENDING)]) {u'_id': 701, u'done': True, u'result': {u'count': 17}} :Parameters: - `filter`: A query that matches the document to update. - `update`: The update operations to apply. - `projection` (optional): A list of field names that should be returned in the result document or a mapping specifying the fields to include or exclude. If `projection` is a list "_id" will always be returned. Use a dict to exclude fields from the result (e.g. projection={'_id': False}). - `sort` (optional): a list of (key, direction) pairs specifying the sort order for the query. If multiple documents match the query, they are sorted and the first is updated. - `upsert` (optional): When ``True``, inserts a new document if no document matches the query. Defaults to ``False``. - `return_document`: If :attr:`ReturnDocument.BEFORE` (the default), returns the original document before it was updated, or ``None`` if no document matches. If :attr:`ReturnDocument.AFTER`, returns the updated or inserted document. - `**kwargs` (optional): additional command arguments can be passed as keyword arguments (for example maxTimeMS can be used with recent server versions). .. versionchanged:: 3.4 Added the `collation` option. .. versionchanged:: 3.2 Respects write concern. .. warning:: Starting in PyMongo 3.2, this command uses the :class:`~pymongo.write_concern.WriteConcern` of this :class:`~pymongo.collection.Collection` when connected to MongoDB >= 3.2. Note that using an elevated write concern with this command may be slower compared to using the default write concern. .. versionadded:: 3.0 """ common.validate_ok_for_update(update) kwargs['update'] = update return self.__find_and_modify(filter, projection, sort, upsert, return_document, **kwargs)
def function[find_one_and_update, parameter[self, filter, update, projection, sort, upsert, return_document]]: constant[Finds a single document and updates it, returning either the original or the updated document. >>> db.test.find_one_and_update( ... {'_id': 665}, {'$inc': {'count': 1}, '$set': {'done': True}}) {u'_id': 665, u'done': False, u'count': 25}} By default :meth:`find_one_and_update` returns the original version of the document before the update was applied. To return the updated version of the document instead, use the *return_document* option. >>> from pymongo import ReturnDocument >>> db.example.find_one_and_update( ... {'_id': 'userid'}, ... {'$inc': {'seq': 1}}, ... return_document=ReturnDocument.AFTER) {u'_id': u'userid', u'seq': 1} You can limit the fields returned with the *projection* option. >>> db.example.find_one_and_update( ... {'_id': 'userid'}, ... {'$inc': {'seq': 1}}, ... projection={'seq': True, '_id': False}, ... return_document=ReturnDocument.AFTER) {u'seq': 2} The *upsert* option can be used to create the document if it doesn't already exist. >>> db.example.delete_many({}).deleted_count 1 >>> db.example.find_one_and_update( ... {'_id': 'userid'}, ... {'$inc': {'seq': 1}}, ... projection={'seq': True, '_id': False}, ... upsert=True, ... return_document=ReturnDocument.AFTER) {u'seq': 1} If multiple documents match *filter*, a *sort* can be applied. >>> for doc in db.test.find({'done': True}): ... print(doc) ... {u'_id': 665, u'done': True, u'result': {u'count': 26}} {u'_id': 701, u'done': True, u'result': {u'count': 17}} >>> db.test.find_one_and_update( ... {'done': True}, ... {'$set': {'final': True}}, ... sort=[('_id', pymongo.DESCENDING)]) {u'_id': 701, u'done': True, u'result': {u'count': 17}} :Parameters: - `filter`: A query that matches the document to update. - `update`: The update operations to apply. - `projection` (optional): A list of field names that should be returned in the result document or a mapping specifying the fields to include or exclude. If `projection` is a list "_id" will always be returned. Use a dict to exclude fields from the result (e.g. projection={'_id': False}). - `sort` (optional): a list of (key, direction) pairs specifying the sort order for the query. If multiple documents match the query, they are sorted and the first is updated. - `upsert` (optional): When ``True``, inserts a new document if no document matches the query. Defaults to ``False``. - `return_document`: If :attr:`ReturnDocument.BEFORE` (the default), returns the original document before it was updated, or ``None`` if no document matches. If :attr:`ReturnDocument.AFTER`, returns the updated or inserted document. - `**kwargs` (optional): additional command arguments can be passed as keyword arguments (for example maxTimeMS can be used with recent server versions). .. versionchanged:: 3.4 Added the `collation` option. .. versionchanged:: 3.2 Respects write concern. .. warning:: Starting in PyMongo 3.2, this command uses the :class:`~pymongo.write_concern.WriteConcern` of this :class:`~pymongo.collection.Collection` when connected to MongoDB >= 3.2. Note that using an elevated write concern with this command may be slower compared to using the default write concern. .. versionadded:: 3.0 ] call[name[common].validate_ok_for_update, parameter[name[update]]] call[name[kwargs]][constant[update]] assign[=] name[update] return[call[name[self].__find_and_modify, parameter[name[filter], name[projection], name[sort], name[upsert], name[return_document]]]]
keyword[def] identifier[find_one_and_update] ( identifier[self] , identifier[filter] , identifier[update] , identifier[projection] = keyword[None] , identifier[sort] = keyword[None] , identifier[upsert] = keyword[False] , identifier[return_document] = identifier[ReturnDocument] . identifier[BEFORE] ,** identifier[kwargs] ): literal[string] identifier[common] . identifier[validate_ok_for_update] ( identifier[update] ) identifier[kwargs] [ literal[string] ]= identifier[update] keyword[return] identifier[self] . identifier[__find_and_modify] ( identifier[filter] , identifier[projection] , identifier[sort] , identifier[upsert] , identifier[return_document] ,** identifier[kwargs] )
def find_one_and_update(self, filter, update, projection=None, sort=None, upsert=False, return_document=ReturnDocument.BEFORE, **kwargs): """Finds a single document and updates it, returning either the original or the updated document. >>> db.test.find_one_and_update( ... {'_id': 665}, {'$inc': {'count': 1}, '$set': {'done': True}}) {u'_id': 665, u'done': False, u'count': 25}} By default :meth:`find_one_and_update` returns the original version of the document before the update was applied. To return the updated version of the document instead, use the *return_document* option. >>> from pymongo import ReturnDocument >>> db.example.find_one_and_update( ... {'_id': 'userid'}, ... {'$inc': {'seq': 1}}, ... return_document=ReturnDocument.AFTER) {u'_id': u'userid', u'seq': 1} You can limit the fields returned with the *projection* option. >>> db.example.find_one_and_update( ... {'_id': 'userid'}, ... {'$inc': {'seq': 1}}, ... projection={'seq': True, '_id': False}, ... return_document=ReturnDocument.AFTER) {u'seq': 2} The *upsert* option can be used to create the document if it doesn't already exist. >>> db.example.delete_many({}).deleted_count 1 >>> db.example.find_one_and_update( ... {'_id': 'userid'}, ... {'$inc': {'seq': 1}}, ... projection={'seq': True, '_id': False}, ... upsert=True, ... return_document=ReturnDocument.AFTER) {u'seq': 1} If multiple documents match *filter*, a *sort* can be applied. >>> for doc in db.test.find({'done': True}): ... print(doc) ... {u'_id': 665, u'done': True, u'result': {u'count': 26}} {u'_id': 701, u'done': True, u'result': {u'count': 17}} >>> db.test.find_one_and_update( ... {'done': True}, ... {'$set': {'final': True}}, ... sort=[('_id', pymongo.DESCENDING)]) {u'_id': 701, u'done': True, u'result': {u'count': 17}} :Parameters: - `filter`: A query that matches the document to update. - `update`: The update operations to apply. - `projection` (optional): A list of field names that should be returned in the result document or a mapping specifying the fields to include or exclude. If `projection` is a list "_id" will always be returned. Use a dict to exclude fields from the result (e.g. projection={'_id': False}). - `sort` (optional): a list of (key, direction) pairs specifying the sort order for the query. If multiple documents match the query, they are sorted and the first is updated. - `upsert` (optional): When ``True``, inserts a new document if no document matches the query. Defaults to ``False``. - `return_document`: If :attr:`ReturnDocument.BEFORE` (the default), returns the original document before it was updated, or ``None`` if no document matches. If :attr:`ReturnDocument.AFTER`, returns the updated or inserted document. - `**kwargs` (optional): additional command arguments can be passed as keyword arguments (for example maxTimeMS can be used with recent server versions). .. versionchanged:: 3.4 Added the `collation` option. .. versionchanged:: 3.2 Respects write concern. .. warning:: Starting in PyMongo 3.2, this command uses the :class:`~pymongo.write_concern.WriteConcern` of this :class:`~pymongo.collection.Collection` when connected to MongoDB >= 3.2. Note that using an elevated write concern with this command may be slower compared to using the default write concern. .. versionadded:: 3.0 """ common.validate_ok_for_update(update) kwargs['update'] = update return self.__find_and_modify(filter, projection, sort, upsert, return_document, **kwargs)
def unit(session): """Run the unit test suite.""" # Testing multiple version of django # See https://www.djangoproject.com/download/ for supported version django_deps_27 = [ ('django==1.8.19',), ('django >= 1.11.0, < 2.0.0dev',), ] if session.virtualenv.interpreter == '2.7': [default(session, django_dep=django) for django in django_deps_27] else: default(session)
def function[unit, parameter[session]]: constant[Run the unit test suite.] variable[django_deps_27] assign[=] list[[<ast.Tuple object at 0x7da18bcc9c60>, <ast.Tuple object at 0x7da18bccb220>]] if compare[name[session].virtualenv.interpreter equal[==] constant[2.7]] begin[:] <ast.ListComp object at 0x7da207f03850>
keyword[def] identifier[unit] ( identifier[session] ): literal[string] identifier[django_deps_27] =[ ( literal[string] ,), ( literal[string] ,), ] keyword[if] identifier[session] . identifier[virtualenv] . identifier[interpreter] == literal[string] : [ identifier[default] ( identifier[session] , identifier[django_dep] = identifier[django] ) keyword[for] identifier[django] keyword[in] identifier[django_deps_27] ] keyword[else] : identifier[default] ( identifier[session] )
def unit(session): """Run the unit test suite.""" # Testing multiple version of django # See https://www.djangoproject.com/download/ for supported version django_deps_27 = [('django==1.8.19',), ('django >= 1.11.0, < 2.0.0dev',)] if session.virtualenv.interpreter == '2.7': [default(session, django_dep=django) for django in django_deps_27] # depends on [control=['if'], data=[]] else: default(session)
def basename(path, extension_marker="."): """ :param str|None path: Path to consider :param str|None extension_marker: Trim file extension based on specified character :return str: Basename part of path, without extension (if 'extension_marker' provided) """ result = os.path.basename(path or "") if extension_marker: pre, _, post = result.rpartition(extension_marker) return pre or post return result
def function[basename, parameter[path, extension_marker]]: constant[ :param str|None path: Path to consider :param str|None extension_marker: Trim file extension based on specified character :return str: Basename part of path, without extension (if 'extension_marker' provided) ] variable[result] assign[=] call[name[os].path.basename, parameter[<ast.BoolOp object at 0x7da1b237cfa0>]] if name[extension_marker] begin[:] <ast.Tuple object at 0x7da1b237d540> assign[=] call[name[result].rpartition, parameter[name[extension_marker]]] return[<ast.BoolOp object at 0x7da1b237e320>] return[name[result]]
keyword[def] identifier[basename] ( identifier[path] , identifier[extension_marker] = literal[string] ): literal[string] identifier[result] = identifier[os] . identifier[path] . identifier[basename] ( identifier[path] keyword[or] literal[string] ) keyword[if] identifier[extension_marker] : identifier[pre] , identifier[_] , identifier[post] = identifier[result] . identifier[rpartition] ( identifier[extension_marker] ) keyword[return] identifier[pre] keyword[or] identifier[post] keyword[return] identifier[result]
def basename(path, extension_marker='.'): """ :param str|None path: Path to consider :param str|None extension_marker: Trim file extension based on specified character :return str: Basename part of path, without extension (if 'extension_marker' provided) """ result = os.path.basename(path or '') if extension_marker: (pre, _, post) = result.rpartition(extension_marker) return pre or post # depends on [control=['if'], data=[]] return result
def complete_abstract_value( exe_context, # type: ExecutionContext return_type, # type: Union[GraphQLInterfaceType, GraphQLUnionType] field_asts, # type: List[Field] info, # type: ResolveInfo path, # type: List[Union[int, str]] result, # type: Any ): # type: (...) -> Dict[str, Any] """ Complete an value of an abstract type by determining the runtime type of that value, then completing based on that type. """ runtime_type = None # type: Union[str, GraphQLObjectType, None] # Field type must be Object, Interface or Union and expect sub-selections. if isinstance(return_type, (GraphQLInterfaceType, GraphQLUnionType)): if return_type.resolve_type: runtime_type = return_type.resolve_type(result, info) else: runtime_type = get_default_resolve_type_fn(result, info, return_type) if isinstance(runtime_type, string_types): runtime_type = info.schema.get_type(runtime_type) # type: ignore if not isinstance(runtime_type, GraphQLObjectType): raise GraphQLError( ( "Abstract type {} must resolve to an Object type at runtime " + 'for field {}.{} with value "{}", received "{}".' ).format( return_type, info.parent_type, info.field_name, result, runtime_type ), field_asts, ) if not exe_context.schema.is_possible_type(return_type, runtime_type): raise GraphQLError( u'Runtime Object type "{}" is not a possible type for "{}".'.format( runtime_type, return_type ), field_asts, ) return complete_object_value( exe_context, runtime_type, field_asts, info, path, result )
def function[complete_abstract_value, parameter[exe_context, return_type, field_asts, info, path, result]]: constant[ Complete an value of an abstract type by determining the runtime type of that value, then completing based on that type. ] variable[runtime_type] assign[=] constant[None] if call[name[isinstance], parameter[name[return_type], tuple[[<ast.Name object at 0x7da18bcca2c0>, <ast.Name object at 0x7da18bcc8550>]]]] begin[:] if name[return_type].resolve_type begin[:] variable[runtime_type] assign[=] call[name[return_type].resolve_type, parameter[name[result], name[info]]] if call[name[isinstance], parameter[name[runtime_type], name[string_types]]] begin[:] variable[runtime_type] assign[=] call[name[info].schema.get_type, parameter[name[runtime_type]]] if <ast.UnaryOp object at 0x7da18bccaf20> begin[:] <ast.Raise object at 0x7da18bccb0a0> if <ast.UnaryOp object at 0x7da18bccb730> begin[:] <ast.Raise object at 0x7da18bccbc40> return[call[name[complete_object_value], parameter[name[exe_context], name[runtime_type], name[field_asts], name[info], name[path], name[result]]]]
keyword[def] identifier[complete_abstract_value] ( identifier[exe_context] , identifier[return_type] , identifier[field_asts] , identifier[info] , identifier[path] , identifier[result] , ): literal[string] identifier[runtime_type] = keyword[None] keyword[if] identifier[isinstance] ( identifier[return_type] ,( identifier[GraphQLInterfaceType] , identifier[GraphQLUnionType] )): keyword[if] identifier[return_type] . identifier[resolve_type] : identifier[runtime_type] = identifier[return_type] . identifier[resolve_type] ( identifier[result] , identifier[info] ) keyword[else] : identifier[runtime_type] = identifier[get_default_resolve_type_fn] ( identifier[result] , identifier[info] , identifier[return_type] ) keyword[if] identifier[isinstance] ( identifier[runtime_type] , identifier[string_types] ): identifier[runtime_type] = identifier[info] . identifier[schema] . identifier[get_type] ( identifier[runtime_type] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[runtime_type] , identifier[GraphQLObjectType] ): keyword[raise] identifier[GraphQLError] ( ( literal[string] + literal[string] ). identifier[format] ( identifier[return_type] , identifier[info] . identifier[parent_type] , identifier[info] . identifier[field_name] , identifier[result] , identifier[runtime_type] ), identifier[field_asts] , ) keyword[if] keyword[not] identifier[exe_context] . identifier[schema] . identifier[is_possible_type] ( identifier[return_type] , identifier[runtime_type] ): keyword[raise] identifier[GraphQLError] ( literal[string] . identifier[format] ( identifier[runtime_type] , identifier[return_type] ), identifier[field_asts] , ) keyword[return] identifier[complete_object_value] ( identifier[exe_context] , identifier[runtime_type] , identifier[field_asts] , identifier[info] , identifier[path] , identifier[result] )
def complete_abstract_value(exe_context, return_type, field_asts, info, path, result): # type: ExecutionContext # type: Union[GraphQLInterfaceType, GraphQLUnionType] # type: List[Field] # type: ResolveInfo # type: List[Union[int, str]] # type: Any # type: (...) -> Dict[str, Any] '\n Complete an value of an abstract type by determining the runtime type of that value, then completing based\n on that type.\n ' runtime_type = None # type: Union[str, GraphQLObjectType, None] # Field type must be Object, Interface or Union and expect sub-selections. if isinstance(return_type, (GraphQLInterfaceType, GraphQLUnionType)): if return_type.resolve_type: runtime_type = return_type.resolve_type(result, info) # depends on [control=['if'], data=[]] else: runtime_type = get_default_resolve_type_fn(result, info, return_type) # depends on [control=['if'], data=[]] if isinstance(runtime_type, string_types): runtime_type = info.schema.get_type(runtime_type) # type: ignore # depends on [control=['if'], data=[]] if not isinstance(runtime_type, GraphQLObjectType): raise GraphQLError(('Abstract type {} must resolve to an Object type at runtime ' + 'for field {}.{} with value "{}", received "{}".').format(return_type, info.parent_type, info.field_name, result, runtime_type), field_asts) # depends on [control=['if'], data=[]] if not exe_context.schema.is_possible_type(return_type, runtime_type): raise GraphQLError(u'Runtime Object type "{}" is not a possible type for "{}".'.format(runtime_type, return_type), field_asts) # depends on [control=['if'], data=[]] return complete_object_value(exe_context, runtime_type, field_asts, info, path, result)
def get_port_at(self, tile_id, direction): """ If no port is found, a new none port is made and added to self.ports. Returns the port. :param tile_id: :param direction: :return: Port """ for port in self.ports: if port.tile_id == tile_id and port.direction == direction: return port port = Port(tile_id, direction, PortType.none) self.ports.append(port) return port
def function[get_port_at, parameter[self, tile_id, direction]]: constant[ If no port is found, a new none port is made and added to self.ports. Returns the port. :param tile_id: :param direction: :return: Port ] for taget[name[port]] in starred[name[self].ports] begin[:] if <ast.BoolOp object at 0x7da1b2472f50> begin[:] return[name[port]] variable[port] assign[=] call[name[Port], parameter[name[tile_id], name[direction], name[PortType].none]] call[name[self].ports.append, parameter[name[port]]] return[name[port]]
keyword[def] identifier[get_port_at] ( identifier[self] , identifier[tile_id] , identifier[direction] ): literal[string] keyword[for] identifier[port] keyword[in] identifier[self] . identifier[ports] : keyword[if] identifier[port] . identifier[tile_id] == identifier[tile_id] keyword[and] identifier[port] . identifier[direction] == identifier[direction] : keyword[return] identifier[port] identifier[port] = identifier[Port] ( identifier[tile_id] , identifier[direction] , identifier[PortType] . identifier[none] ) identifier[self] . identifier[ports] . identifier[append] ( identifier[port] ) keyword[return] identifier[port]
def get_port_at(self, tile_id, direction): """ If no port is found, a new none port is made and added to self.ports. Returns the port. :param tile_id: :param direction: :return: Port """ for port in self.ports: if port.tile_id == tile_id and port.direction == direction: return port # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['port']] port = Port(tile_id, direction, PortType.none) self.ports.append(port) return port
def pretty_spaces(level): """ Return spaces and new line. :type level: int or None :param level: deep level :rtype: unicode :return: string with new line and spaces """ if level is None: return u'' return (os.linesep if level >= 0 else u'') + (u' ' * (INDENT * level))
def function[pretty_spaces, parameter[level]]: constant[ Return spaces and new line. :type level: int or None :param level: deep level :rtype: unicode :return: string with new line and spaces ] if compare[name[level] is constant[None]] begin[:] return[constant[]] return[binary_operation[<ast.IfExp object at 0x7da1b140dab0> + binary_operation[constant[ ] * binary_operation[name[INDENT] * name[level]]]]]
keyword[def] identifier[pretty_spaces] ( identifier[level] ): literal[string] keyword[if] identifier[level] keyword[is] keyword[None] : keyword[return] literal[string] keyword[return] ( identifier[os] . identifier[linesep] keyword[if] identifier[level] >= literal[int] keyword[else] literal[string] )+( literal[string] *( identifier[INDENT] * identifier[level] ))
def pretty_spaces(level): """ Return spaces and new line. :type level: int or None :param level: deep level :rtype: unicode :return: string with new line and spaces """ if level is None: return u'' # depends on [control=['if'], data=[]] return (os.linesep if level >= 0 else u'') + u' ' * (INDENT * level)
def quit(self): """Remove this user from all channels and reinitialize the user's list of joined channels. """ for c in self.channels: c.users.remove(self.nick) self.channels = []
def function[quit, parameter[self]]: constant[Remove this user from all channels and reinitialize the user's list of joined channels. ] for taget[name[c]] in starred[name[self].channels] begin[:] call[name[c].users.remove, parameter[name[self].nick]] name[self].channels assign[=] list[[]]
keyword[def] identifier[quit] ( identifier[self] ): literal[string] keyword[for] identifier[c] keyword[in] identifier[self] . identifier[channels] : identifier[c] . identifier[users] . identifier[remove] ( identifier[self] . identifier[nick] ) identifier[self] . identifier[channels] =[]
def quit(self): """Remove this user from all channels and reinitialize the user's list of joined channels. """ for c in self.channels: c.users.remove(self.nick) # depends on [control=['for'], data=['c']] self.channels = []
def _set_cos_traffic_class(self, v, load=False): """ Setter method for cos_traffic_class, mapped from YANG variable /qos/map/cos_traffic_class (list) If this variable is read-only (config: false) in the source YANG file, then _set_cos_traffic_class is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_cos_traffic_class() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("name",cos_traffic_class.cos_traffic_class, yang_name="cos-traffic-class", rest_name="cos-traffic-class", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions={u'tailf-common': {u'info': u'Configure CoS-to-Traffic-Class map', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'cos_traffic_class', u'cli-mode-name': u'cos-traffic-class-$(name)'}}), is_container='list', yang_name="cos-traffic-class", rest_name="cos-traffic-class", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure CoS-to-Traffic-Class map', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'cos_traffic_class', u'cli-mode-name': u'cos-traffic-class-$(name)'}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='list', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """cos_traffic_class must be of a type compatible with list""", 'defined-type': "list", 'generated-type': """YANGDynClass(base=YANGListType("name",cos_traffic_class.cos_traffic_class, yang_name="cos-traffic-class", rest_name="cos-traffic-class", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions={u'tailf-common': {u'info': u'Configure CoS-to-Traffic-Class map', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'cos_traffic_class', u'cli-mode-name': u'cos-traffic-class-$(name)'}}), is_container='list', yang_name="cos-traffic-class", rest_name="cos-traffic-class", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure CoS-to-Traffic-Class map', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'cos_traffic_class', u'cli-mode-name': u'cos-traffic-class-$(name)'}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='list', is_config=True)""", }) self.__cos_traffic_class = t if hasattr(self, '_set'): self._set()
def function[_set_cos_traffic_class, parameter[self, v, load]]: constant[ Setter method for cos_traffic_class, mapped from YANG variable /qos/map/cos_traffic_class (list) If this variable is read-only (config: false) in the source YANG file, then _set_cos_traffic_class is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_cos_traffic_class() directly. ] if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:] variable[v] assign[=] call[name[v]._utype, parameter[name[v]]] <ast.Try object at 0x7da18f00d960> name[self].__cos_traffic_class assign[=] name[t] if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:] call[name[self]._set, parameter[]]
keyword[def] identifier[_set_cos_traffic_class] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ): literal[string] keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ): identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] ) keyword[try] : identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[YANGListType] ( literal[string] , identifier[cos_traffic_class] . identifier[cos_traffic_class] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[is_container] = literal[string] , identifier[user_ordered] = keyword[False] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[yang_keys] = literal[string] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : literal[string] }}), identifier[is_container] = literal[string] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[raise] identifier[ValueError] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , }) identifier[self] . identifier[__cos_traffic_class] = identifier[t] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[_set] ()
def _set_cos_traffic_class(self, v, load=False): """ Setter method for cos_traffic_class, mapped from YANG variable /qos/map/cos_traffic_class (list) If this variable is read-only (config: false) in the source YANG file, then _set_cos_traffic_class is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_cos_traffic_class() directly. """ if hasattr(v, '_utype'): v = v._utype(v) # depends on [control=['if'], data=[]] try: t = YANGDynClass(v, base=YANGListType('name', cos_traffic_class.cos_traffic_class, yang_name='cos-traffic-class', rest_name='cos-traffic-class', parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions={u'tailf-common': {u'info': u'Configure CoS-to-Traffic-Class map', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'cos_traffic_class', u'cli-mode-name': u'cos-traffic-class-$(name)'}}), is_container='list', yang_name='cos-traffic-class', rest_name='cos-traffic-class', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure CoS-to-Traffic-Class map', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'cos_traffic_class', u'cli-mode-name': u'cos-traffic-class-$(name)'}}, namespace='urn:brocade.com:mgmt:brocade-qos-mls', defining_module='brocade-qos-mls', yang_type='list', is_config=True) # depends on [control=['try'], data=[]] except (TypeError, ValueError): raise ValueError({'error-string': 'cos_traffic_class must be of a type compatible with list', 'defined-type': 'list', 'generated-type': 'YANGDynClass(base=YANGListType("name",cos_traffic_class.cos_traffic_class, yang_name="cos-traffic-class", rest_name="cos-traffic-class", parent=self, is_container=\'list\', user_ordered=False, path_helper=self._path_helper, yang_keys=\'name\', extensions={u\'tailf-common\': {u\'info\': u\'Configure CoS-to-Traffic-Class map\', u\'cli-no-key-completion\': None, u\'cli-full-no\': None, u\'cli-suppress-list-no\': None, u\'cli-suppress-key-abbreviation\': None, u\'cli-full-command\': None, u\'callpoint\': u\'cos_traffic_class\', u\'cli-mode-name\': u\'cos-traffic-class-$(name)\'}}), is_container=\'list\', yang_name="cos-traffic-class", rest_name="cos-traffic-class", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Configure CoS-to-Traffic-Class map\', u\'cli-no-key-completion\': None, u\'cli-full-no\': None, u\'cli-suppress-list-no\': None, u\'cli-suppress-key-abbreviation\': None, u\'cli-full-command\': None, u\'callpoint\': u\'cos_traffic_class\', u\'cli-mode-name\': u\'cos-traffic-class-$(name)\'}}, namespace=\'urn:brocade.com:mgmt:brocade-qos-mls\', defining_module=\'brocade-qos-mls\', yang_type=\'list\', is_config=True)'}) # depends on [control=['except'], data=[]] self.__cos_traffic_class = t if hasattr(self, '_set'): self._set() # depends on [control=['if'], data=[]]
def main(): """ Commandline interface to rerank nbest lists. """ log.setup_main_logger(console=True, file_logging=False) log.log_sockeye_version(logger) params = argparse.ArgumentParser(description="Rerank nbest lists of translations." " Reranking sorts a list of hypotheses according" " to their score compared to a common reference.") arguments.add_rerank_args(params) args = params.parse_args() logger.info(args) rerank(args)
def function[main, parameter[]]: constant[ Commandline interface to rerank nbest lists. ] call[name[log].setup_main_logger, parameter[]] call[name[log].log_sockeye_version, parameter[name[logger]]] variable[params] assign[=] call[name[argparse].ArgumentParser, parameter[]] call[name[arguments].add_rerank_args, parameter[name[params]]] variable[args] assign[=] call[name[params].parse_args, parameter[]] call[name[logger].info, parameter[name[args]]] call[name[rerank], parameter[name[args]]]
keyword[def] identifier[main] (): literal[string] identifier[log] . identifier[setup_main_logger] ( identifier[console] = keyword[True] , identifier[file_logging] = keyword[False] ) identifier[log] . identifier[log_sockeye_version] ( identifier[logger] ) identifier[params] = identifier[argparse] . identifier[ArgumentParser] ( identifier[description] = literal[string] literal[string] literal[string] ) identifier[arguments] . identifier[add_rerank_args] ( identifier[params] ) identifier[args] = identifier[params] . identifier[parse_args] () identifier[logger] . identifier[info] ( identifier[args] ) identifier[rerank] ( identifier[args] )
def main(): """ Commandline interface to rerank nbest lists. """ log.setup_main_logger(console=True, file_logging=False) log.log_sockeye_version(logger) params = argparse.ArgumentParser(description='Rerank nbest lists of translations. Reranking sorts a list of hypotheses according to their score compared to a common reference.') arguments.add_rerank_args(params) args = params.parse_args() logger.info(args) rerank(args)
def load(self, game_json=None, mode=None): """ Load a game from a serialized JSON representation. The game expects a well defined structure as follows (Note JSON string format): '{ "guesses_made": int, "key": "str:a 4 word", "status": "str: one of playing, won, lost", "mode": { "digits": int, "digit_type": DigitWord.DIGIT | DigitWord.HEXDIGIT, "mode": GameMode(), "priority": int, "help_text": str, "instruction_text": str, "guesses_allowed": int }, "ttl": int, "answer": [int|str0, int|str1, ..., int|strN] }' * "mode" will be cast to a GameMode object * "answer" will be cast to a DigitWord object :param game_json: The source JSON - MUST be a string :param mode: A mode (str or GameMode) for the game being loaded :return: A game object """ if game_json is None: # New game_json if mode is not None: if isinstance(mode, str): _game_object = GameObject(mode=self._match_mode(mode=mode)) elif isinstance(mode, GameMode): _game_object = GameObject(mode=mode) else: raise TypeError("Game mode must be a GameMode or string") else: _game_object = GameObject(mode=self._game_modes[0]) _game_object.status = self.GAME_PLAYING else: if not isinstance(game_json, str): raise TypeError("Game must be passed as a serialized JSON string.") game_dict = json.loads(game_json) if not 'mode' in game_dict: raise ValueError("Mode is not provided in JSON; game_json cannot be loaded!") _mode = GameMode(**game_dict["mode"]) _game_object = GameObject(mode=_mode, source_game=game_dict) self.game = copy.deepcopy(_game_object)
def function[load, parameter[self, game_json, mode]]: constant[ Load a game from a serialized JSON representation. The game expects a well defined structure as follows (Note JSON string format): '{ "guesses_made": int, "key": "str:a 4 word", "status": "str: one of playing, won, lost", "mode": { "digits": int, "digit_type": DigitWord.DIGIT | DigitWord.HEXDIGIT, "mode": GameMode(), "priority": int, "help_text": str, "instruction_text": str, "guesses_allowed": int }, "ttl": int, "answer": [int|str0, int|str1, ..., int|strN] }' * "mode" will be cast to a GameMode object * "answer" will be cast to a DigitWord object :param game_json: The source JSON - MUST be a string :param mode: A mode (str or GameMode) for the game being loaded :return: A game object ] if compare[name[game_json] is constant[None]] begin[:] if compare[name[mode] is_not constant[None]] begin[:] if call[name[isinstance], parameter[name[mode], name[str]]] begin[:] variable[_game_object] assign[=] call[name[GameObject], parameter[]] name[_game_object].status assign[=] name[self].GAME_PLAYING name[self].game assign[=] call[name[copy].deepcopy, parameter[name[_game_object]]]
keyword[def] identifier[load] ( identifier[self] , identifier[game_json] = keyword[None] , identifier[mode] = keyword[None] ): literal[string] keyword[if] identifier[game_json] keyword[is] keyword[None] : keyword[if] identifier[mode] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[isinstance] ( identifier[mode] , identifier[str] ): identifier[_game_object] = identifier[GameObject] ( identifier[mode] = identifier[self] . identifier[_match_mode] ( identifier[mode] = identifier[mode] )) keyword[elif] identifier[isinstance] ( identifier[mode] , identifier[GameMode] ): identifier[_game_object] = identifier[GameObject] ( identifier[mode] = identifier[mode] ) keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] ) keyword[else] : identifier[_game_object] = identifier[GameObject] ( identifier[mode] = identifier[self] . identifier[_game_modes] [ literal[int] ]) identifier[_game_object] . identifier[status] = identifier[self] . identifier[GAME_PLAYING] keyword[else] : keyword[if] keyword[not] identifier[isinstance] ( identifier[game_json] , identifier[str] ): keyword[raise] identifier[TypeError] ( literal[string] ) identifier[game_dict] = identifier[json] . identifier[loads] ( identifier[game_json] ) keyword[if] keyword[not] literal[string] keyword[in] identifier[game_dict] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[_mode] = identifier[GameMode] (** identifier[game_dict] [ literal[string] ]) identifier[_game_object] = identifier[GameObject] ( identifier[mode] = identifier[_mode] , identifier[source_game] = identifier[game_dict] ) identifier[self] . identifier[game] = identifier[copy] . identifier[deepcopy] ( identifier[_game_object] )
def load(self, game_json=None, mode=None): """ Load a game from a serialized JSON representation. The game expects a well defined structure as follows (Note JSON string format): '{ "guesses_made": int, "key": "str:a 4 word", "status": "str: one of playing, won, lost", "mode": { "digits": int, "digit_type": DigitWord.DIGIT | DigitWord.HEXDIGIT, "mode": GameMode(), "priority": int, "help_text": str, "instruction_text": str, "guesses_allowed": int }, "ttl": int, "answer": [int|str0, int|str1, ..., int|strN] }' * "mode" will be cast to a GameMode object * "answer" will be cast to a DigitWord object :param game_json: The source JSON - MUST be a string :param mode: A mode (str or GameMode) for the game being loaded :return: A game object """ if game_json is None: # New game_json if mode is not None: if isinstance(mode, str): _game_object = GameObject(mode=self._match_mode(mode=mode)) # depends on [control=['if'], data=[]] elif isinstance(mode, GameMode): _game_object = GameObject(mode=mode) # depends on [control=['if'], data=[]] else: raise TypeError('Game mode must be a GameMode or string') # depends on [control=['if'], data=['mode']] else: _game_object = GameObject(mode=self._game_modes[0]) _game_object.status = self.GAME_PLAYING # depends on [control=['if'], data=[]] else: if not isinstance(game_json, str): raise TypeError('Game must be passed as a serialized JSON string.') # depends on [control=['if'], data=[]] game_dict = json.loads(game_json) if not 'mode' in game_dict: raise ValueError('Mode is not provided in JSON; game_json cannot be loaded!') # depends on [control=['if'], data=[]] _mode = GameMode(**game_dict['mode']) _game_object = GameObject(mode=_mode, source_game=game_dict) self.game = copy.deepcopy(_game_object)
def _chk_fld(self, ntd, name, qty_min=0, qty_max=None): """Further split a GAF value within a single field.""" vals = getattr(ntd, name) num_vals = len(vals) if num_vals < qty_min: self.illegal_lines['MIN QTY'].append( (-1, "FIELD({F}): MIN QUANTITY({Q}) WASN'T MET: {V}".format(F=name, Q=qty_min, V=vals))) if qty_max is not None: if num_vals > qty_max: self.illegal_lines['MAX QTY'].append( (-1, "FIELD({F}): MAX QUANTITY({Q}) EXCEEDED: {V}\n{NT}".format( F=name, Q=qty_max, V=vals, NT=ntd)))
def function[_chk_fld, parameter[self, ntd, name, qty_min, qty_max]]: constant[Further split a GAF value within a single field.] variable[vals] assign[=] call[name[getattr], parameter[name[ntd], name[name]]] variable[num_vals] assign[=] call[name[len], parameter[name[vals]]] if compare[name[num_vals] less[<] name[qty_min]] begin[:] call[call[name[self].illegal_lines][constant[MIN QTY]].append, parameter[tuple[[<ast.UnaryOp object at 0x7da18bc71510>, <ast.Call object at 0x7da18bc70400>]]]] if compare[name[qty_max] is_not constant[None]] begin[:] if compare[name[num_vals] greater[>] name[qty_max]] begin[:] call[call[name[self].illegal_lines][constant[MAX QTY]].append, parameter[tuple[[<ast.UnaryOp object at 0x7da18bc72470>, <ast.Call object at 0x7da18bc71db0>]]]]
keyword[def] identifier[_chk_fld] ( identifier[self] , identifier[ntd] , identifier[name] , identifier[qty_min] = literal[int] , identifier[qty_max] = keyword[None] ): literal[string] identifier[vals] = identifier[getattr] ( identifier[ntd] , identifier[name] ) identifier[num_vals] = identifier[len] ( identifier[vals] ) keyword[if] identifier[num_vals] < identifier[qty_min] : identifier[self] . identifier[illegal_lines] [ literal[string] ]. identifier[append] ( (- literal[int] , literal[string] . identifier[format] ( identifier[F] = identifier[name] , identifier[Q] = identifier[qty_min] , identifier[V] = identifier[vals] ))) keyword[if] identifier[qty_max] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[num_vals] > identifier[qty_max] : identifier[self] . identifier[illegal_lines] [ literal[string] ]. identifier[append] ( (- literal[int] , literal[string] . identifier[format] ( identifier[F] = identifier[name] , identifier[Q] = identifier[qty_max] , identifier[V] = identifier[vals] , identifier[NT] = identifier[ntd] )))
def _chk_fld(self, ntd, name, qty_min=0, qty_max=None): """Further split a GAF value within a single field.""" vals = getattr(ntd, name) num_vals = len(vals) if num_vals < qty_min: self.illegal_lines['MIN QTY'].append((-1, "FIELD({F}): MIN QUANTITY({Q}) WASN'T MET: {V}".format(F=name, Q=qty_min, V=vals))) # depends on [control=['if'], data=['qty_min']] if qty_max is not None: if num_vals > qty_max: self.illegal_lines['MAX QTY'].append((-1, 'FIELD({F}): MAX QUANTITY({Q}) EXCEEDED: {V}\n{NT}'.format(F=name, Q=qty_max, V=vals, NT=ntd))) # depends on [control=['if'], data=['qty_max']] # depends on [control=['if'], data=['qty_max']]
def on_unselect(self, item, action): """Add an action to make when an object is unfocused.""" if not isinstance(item, int): item = self.items.index(item) self._on_unselect[item] = action
def function[on_unselect, parameter[self, item, action]]: constant[Add an action to make when an object is unfocused.] if <ast.UnaryOp object at 0x7da20c990dc0> begin[:] variable[item] assign[=] call[name[self].items.index, parameter[name[item]]] call[name[self]._on_unselect][name[item]] assign[=] name[action]
keyword[def] identifier[on_unselect] ( identifier[self] , identifier[item] , identifier[action] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[item] , identifier[int] ): identifier[item] = identifier[self] . identifier[items] . identifier[index] ( identifier[item] ) identifier[self] . identifier[_on_unselect] [ identifier[item] ]= identifier[action]
def on_unselect(self, item, action): """Add an action to make when an object is unfocused.""" if not isinstance(item, int): item = self.items.index(item) # depends on [control=['if'], data=[]] self._on_unselect[item] = action
def group_systems(self, group_name, systems): """ Adds an array of systems to specified group Args: group_name: Display name of group systems: Array of {'machine_id': machine_id} """ api_group_id = None headers = {'Content-Type': 'application/json'} group_path = self.api_url + '/v1/groups' group_get_path = group_path + ('?display_name=%s' % quote(group_name)) logger.debug("GET group: %s", group_get_path) net_logger.info("GET %s", group_get_path) get_group = self.session.get(group_get_path) logger.debug("GET group status: %s", get_group.status_code) if get_group.status_code == 200: api_group_id = get_group.json()['id'] if get_group.status_code == 404: # Group does not exist, POST to create logger.debug("POST group") data = json.dumps({'display_name': group_name}) net_logger.info("POST", group_path) post_group = self.session.post(group_path, headers=headers, data=data) logger.debug("POST group status: %s", post_group.status_code) logger.debug("POST Group: %s", post_group.json()) self.handle_fail_rcs(post_group) api_group_id = post_group.json()['id'] logger.debug("PUT group") data = json.dumps(systems) net_logger.info("PUT %s", group_path + ('/%s/systems' % api_group_id)) put_group = self.session.put(group_path + ('/%s/systems' % api_group_id), headers=headers, data=data) logger.debug("PUT group status: %d", put_group.status_code) logger.debug("PUT Group: %s", put_group.json())
def function[group_systems, parameter[self, group_name, systems]]: constant[ Adds an array of systems to specified group Args: group_name: Display name of group systems: Array of {'machine_id': machine_id} ] variable[api_group_id] assign[=] constant[None] variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da18c4cd690>], [<ast.Constant object at 0x7da18c4cd7b0>]] variable[group_path] assign[=] binary_operation[name[self].api_url + constant[/v1/groups]] variable[group_get_path] assign[=] binary_operation[name[group_path] + binary_operation[constant[?display_name=%s] <ast.Mod object at 0x7da2590d6920> call[name[quote], parameter[name[group_name]]]]] call[name[logger].debug, parameter[constant[GET group: %s], name[group_get_path]]] call[name[net_logger].info, parameter[constant[GET %s], name[group_get_path]]] variable[get_group] assign[=] call[name[self].session.get, parameter[name[group_get_path]]] call[name[logger].debug, parameter[constant[GET group status: %s], name[get_group].status_code]] if compare[name[get_group].status_code equal[==] constant[200]] begin[:] variable[api_group_id] assign[=] call[call[name[get_group].json, parameter[]]][constant[id]] if compare[name[get_group].status_code equal[==] constant[404]] begin[:] call[name[logger].debug, parameter[constant[POST group]]] variable[data] assign[=] call[name[json].dumps, parameter[dictionary[[<ast.Constant object at 0x7da20c990910>], [<ast.Name object at 0x7da20c992470>]]]] call[name[net_logger].info, parameter[constant[POST], name[group_path]]] variable[post_group] assign[=] call[name[self].session.post, parameter[name[group_path]]] call[name[logger].debug, parameter[constant[POST group status: %s], name[post_group].status_code]] call[name[logger].debug, parameter[constant[POST Group: %s], call[name[post_group].json, parameter[]]]] call[name[self].handle_fail_rcs, parameter[name[post_group]]] variable[api_group_id] assign[=] call[call[name[post_group].json, parameter[]]][constant[id]] call[name[logger].debug, parameter[constant[PUT group]]] variable[data] assign[=] call[name[json].dumps, parameter[name[systems]]] call[name[net_logger].info, parameter[constant[PUT %s], binary_operation[name[group_path] + binary_operation[constant[/%s/systems] <ast.Mod object at 0x7da2590d6920> name[api_group_id]]]]] variable[put_group] assign[=] call[name[self].session.put, parameter[binary_operation[name[group_path] + binary_operation[constant[/%s/systems] <ast.Mod object at 0x7da2590d6920> name[api_group_id]]]]] call[name[logger].debug, parameter[constant[PUT group status: %d], name[put_group].status_code]] call[name[logger].debug, parameter[constant[PUT Group: %s], call[name[put_group].json, parameter[]]]]
keyword[def] identifier[group_systems] ( identifier[self] , identifier[group_name] , identifier[systems] ): literal[string] identifier[api_group_id] = keyword[None] identifier[headers] ={ literal[string] : literal[string] } identifier[group_path] = identifier[self] . identifier[api_url] + literal[string] identifier[group_get_path] = identifier[group_path] +( literal[string] % identifier[quote] ( identifier[group_name] )) identifier[logger] . identifier[debug] ( literal[string] , identifier[group_get_path] ) identifier[net_logger] . identifier[info] ( literal[string] , identifier[group_get_path] ) identifier[get_group] = identifier[self] . identifier[session] . identifier[get] ( identifier[group_get_path] ) identifier[logger] . identifier[debug] ( literal[string] , identifier[get_group] . identifier[status_code] ) keyword[if] identifier[get_group] . identifier[status_code] == literal[int] : identifier[api_group_id] = identifier[get_group] . identifier[json] ()[ literal[string] ] keyword[if] identifier[get_group] . identifier[status_code] == literal[int] : identifier[logger] . identifier[debug] ( literal[string] ) identifier[data] = identifier[json] . identifier[dumps] ({ literal[string] : identifier[group_name] }) identifier[net_logger] . identifier[info] ( literal[string] , identifier[group_path] ) identifier[post_group] = identifier[self] . identifier[session] . identifier[post] ( identifier[group_path] , identifier[headers] = identifier[headers] , identifier[data] = identifier[data] ) identifier[logger] . identifier[debug] ( literal[string] , identifier[post_group] . identifier[status_code] ) identifier[logger] . identifier[debug] ( literal[string] , identifier[post_group] . identifier[json] ()) identifier[self] . identifier[handle_fail_rcs] ( identifier[post_group] ) identifier[api_group_id] = identifier[post_group] . identifier[json] ()[ literal[string] ] identifier[logger] . identifier[debug] ( literal[string] ) identifier[data] = identifier[json] . identifier[dumps] ( identifier[systems] ) identifier[net_logger] . identifier[info] ( literal[string] , identifier[group_path] +( literal[string] % identifier[api_group_id] )) identifier[put_group] = identifier[self] . identifier[session] . identifier[put] ( identifier[group_path] + ( literal[string] % identifier[api_group_id] ), identifier[headers] = identifier[headers] , identifier[data] = identifier[data] ) identifier[logger] . identifier[debug] ( literal[string] , identifier[put_group] . identifier[status_code] ) identifier[logger] . identifier[debug] ( literal[string] , identifier[put_group] . identifier[json] ())
def group_systems(self, group_name, systems): """ Adds an array of systems to specified group Args: group_name: Display name of group systems: Array of {'machine_id': machine_id} """ api_group_id = None headers = {'Content-Type': 'application/json'} group_path = self.api_url + '/v1/groups' group_get_path = group_path + '?display_name=%s' % quote(group_name) logger.debug('GET group: %s', group_get_path) net_logger.info('GET %s', group_get_path) get_group = self.session.get(group_get_path) logger.debug('GET group status: %s', get_group.status_code) if get_group.status_code == 200: api_group_id = get_group.json()['id'] # depends on [control=['if'], data=[]] if get_group.status_code == 404: # Group does not exist, POST to create logger.debug('POST group') data = json.dumps({'display_name': group_name}) net_logger.info('POST', group_path) post_group = self.session.post(group_path, headers=headers, data=data) logger.debug('POST group status: %s', post_group.status_code) logger.debug('POST Group: %s', post_group.json()) self.handle_fail_rcs(post_group) api_group_id = post_group.json()['id'] # depends on [control=['if'], data=[]] logger.debug('PUT group') data = json.dumps(systems) net_logger.info('PUT %s', group_path + '/%s/systems' % api_group_id) put_group = self.session.put(group_path + '/%s/systems' % api_group_id, headers=headers, data=data) logger.debug('PUT group status: %d', put_group.status_code) logger.debug('PUT Group: %s', put_group.json())
def encrypt(self, data, unique_identifier=None, cryptographic_parameters=None, iv_counter_nonce=None, credential=None): """ Encrypt data using the specified encryption key and parameters. Args: data (bytes): The bytes to encrypt. Required. unique_identifier (string): The unique ID of the encryption key to use. Optional, defaults to None. cryptographic_parameters (CryptographicParameters): A structure containing various cryptographic settings to be used for the encryption. Optional, defaults to None. iv_counter_nonce (bytes): The bytes to use for the IV/counter/ nonce, if needed by the encryption algorithm and/or cipher mode. Optional, defaults to None. credential (Credential): A credential object containing a set of authorization parameters for the operation. Optional, defaults to None. Returns: dict: The results of the encrypt operation, containing the following key/value pairs: Key | Value --------------------|----------------------------------------- 'unique_identifier' | (string) The unique ID of the encryption | key used to encrypt the data. 'data' | (bytes) The encrypted data. 'iv_counter_nonce' | (bytes) The IV/counter/nonce used for | the encryption, if autogenerated. 'result_status' | (ResultStatus) An enumeration indicating | the status of the operation result. 'result_reason' | (ResultReason) An enumeration providing | context for the result status. 'result_message' | (string) A message providing additional | context for the operation result. """ operation = Operation(OperationEnum.ENCRYPT) request_payload = payloads.EncryptRequestPayload( unique_identifier=unique_identifier, data=data, cryptographic_parameters=cryptographic_parameters, iv_counter_nonce=iv_counter_nonce ) batch_item = messages.RequestBatchItem( operation=operation, request_payload=request_payload ) request = self._build_request_message(credential, [batch_item]) response = self._send_and_receive_message(request) batch_item = response.batch_items[0] payload = batch_item.response_payload result = {} if payload: result['unique_identifier'] = payload.unique_identifier result['data'] = payload.data result['iv_counter_nonce'] = payload.iv_counter_nonce result['result_status'] = batch_item.result_status.value try: result['result_reason'] = batch_item.result_reason.value except Exception: result['result_reason'] = batch_item.result_reason try: result['result_message'] = batch_item.result_message.value except Exception: result['result_message'] = batch_item.result_message return result
def function[encrypt, parameter[self, data, unique_identifier, cryptographic_parameters, iv_counter_nonce, credential]]: constant[ Encrypt data using the specified encryption key and parameters. Args: data (bytes): The bytes to encrypt. Required. unique_identifier (string): The unique ID of the encryption key to use. Optional, defaults to None. cryptographic_parameters (CryptographicParameters): A structure containing various cryptographic settings to be used for the encryption. Optional, defaults to None. iv_counter_nonce (bytes): The bytes to use for the IV/counter/ nonce, if needed by the encryption algorithm and/or cipher mode. Optional, defaults to None. credential (Credential): A credential object containing a set of authorization parameters for the operation. Optional, defaults to None. Returns: dict: The results of the encrypt operation, containing the following key/value pairs: Key | Value --------------------|----------------------------------------- 'unique_identifier' | (string) The unique ID of the encryption | key used to encrypt the data. 'data' | (bytes) The encrypted data. 'iv_counter_nonce' | (bytes) The IV/counter/nonce used for | the encryption, if autogenerated. 'result_status' | (ResultStatus) An enumeration indicating | the status of the operation result. 'result_reason' | (ResultReason) An enumeration providing | context for the result status. 'result_message' | (string) A message providing additional | context for the operation result. ] variable[operation] assign[=] call[name[Operation], parameter[name[OperationEnum].ENCRYPT]] variable[request_payload] assign[=] call[name[payloads].EncryptRequestPayload, parameter[]] variable[batch_item] assign[=] call[name[messages].RequestBatchItem, parameter[]] variable[request] assign[=] call[name[self]._build_request_message, parameter[name[credential], list[[<ast.Name object at 0x7da20c7ca3e0>]]]] variable[response] assign[=] call[name[self]._send_and_receive_message, parameter[name[request]]] variable[batch_item] assign[=] call[name[response].batch_items][constant[0]] variable[payload] assign[=] name[batch_item].response_payload variable[result] assign[=] dictionary[[], []] if name[payload] begin[:] call[name[result]][constant[unique_identifier]] assign[=] name[payload].unique_identifier call[name[result]][constant[data]] assign[=] name[payload].data call[name[result]][constant[iv_counter_nonce]] assign[=] name[payload].iv_counter_nonce call[name[result]][constant[result_status]] assign[=] name[batch_item].result_status.value <ast.Try object at 0x7da1b0297760> <ast.Try object at 0x7da1b02956f0> return[name[result]]
keyword[def] identifier[encrypt] ( identifier[self] , identifier[data] , identifier[unique_identifier] = keyword[None] , identifier[cryptographic_parameters] = keyword[None] , identifier[iv_counter_nonce] = keyword[None] , identifier[credential] = keyword[None] ): literal[string] identifier[operation] = identifier[Operation] ( identifier[OperationEnum] . identifier[ENCRYPT] ) identifier[request_payload] = identifier[payloads] . identifier[EncryptRequestPayload] ( identifier[unique_identifier] = identifier[unique_identifier] , identifier[data] = identifier[data] , identifier[cryptographic_parameters] = identifier[cryptographic_parameters] , identifier[iv_counter_nonce] = identifier[iv_counter_nonce] ) identifier[batch_item] = identifier[messages] . identifier[RequestBatchItem] ( identifier[operation] = identifier[operation] , identifier[request_payload] = identifier[request_payload] ) identifier[request] = identifier[self] . identifier[_build_request_message] ( identifier[credential] ,[ identifier[batch_item] ]) identifier[response] = identifier[self] . identifier[_send_and_receive_message] ( identifier[request] ) identifier[batch_item] = identifier[response] . identifier[batch_items] [ literal[int] ] identifier[payload] = identifier[batch_item] . identifier[response_payload] identifier[result] ={} keyword[if] identifier[payload] : identifier[result] [ literal[string] ]= identifier[payload] . identifier[unique_identifier] identifier[result] [ literal[string] ]= identifier[payload] . identifier[data] identifier[result] [ literal[string] ]= identifier[payload] . identifier[iv_counter_nonce] identifier[result] [ literal[string] ]= identifier[batch_item] . identifier[result_status] . identifier[value] keyword[try] : identifier[result] [ literal[string] ]= identifier[batch_item] . identifier[result_reason] . identifier[value] keyword[except] identifier[Exception] : identifier[result] [ literal[string] ]= identifier[batch_item] . identifier[result_reason] keyword[try] : identifier[result] [ literal[string] ]= identifier[batch_item] . identifier[result_message] . identifier[value] keyword[except] identifier[Exception] : identifier[result] [ literal[string] ]= identifier[batch_item] . identifier[result_message] keyword[return] identifier[result]
def encrypt(self, data, unique_identifier=None, cryptographic_parameters=None, iv_counter_nonce=None, credential=None): """ Encrypt data using the specified encryption key and parameters. Args: data (bytes): The bytes to encrypt. Required. unique_identifier (string): The unique ID of the encryption key to use. Optional, defaults to None. cryptographic_parameters (CryptographicParameters): A structure containing various cryptographic settings to be used for the encryption. Optional, defaults to None. iv_counter_nonce (bytes): The bytes to use for the IV/counter/ nonce, if needed by the encryption algorithm and/or cipher mode. Optional, defaults to None. credential (Credential): A credential object containing a set of authorization parameters for the operation. Optional, defaults to None. Returns: dict: The results of the encrypt operation, containing the following key/value pairs: Key | Value --------------------|----------------------------------------- 'unique_identifier' | (string) The unique ID of the encryption | key used to encrypt the data. 'data' | (bytes) The encrypted data. 'iv_counter_nonce' | (bytes) The IV/counter/nonce used for | the encryption, if autogenerated. 'result_status' | (ResultStatus) An enumeration indicating | the status of the operation result. 'result_reason' | (ResultReason) An enumeration providing | context for the result status. 'result_message' | (string) A message providing additional | context for the operation result. """ operation = Operation(OperationEnum.ENCRYPT) request_payload = payloads.EncryptRequestPayload(unique_identifier=unique_identifier, data=data, cryptographic_parameters=cryptographic_parameters, iv_counter_nonce=iv_counter_nonce) batch_item = messages.RequestBatchItem(operation=operation, request_payload=request_payload) request = self._build_request_message(credential, [batch_item]) response = self._send_and_receive_message(request) batch_item = response.batch_items[0] payload = batch_item.response_payload result = {} if payload: result['unique_identifier'] = payload.unique_identifier result['data'] = payload.data result['iv_counter_nonce'] = payload.iv_counter_nonce # depends on [control=['if'], data=[]] result['result_status'] = batch_item.result_status.value try: result['result_reason'] = batch_item.result_reason.value # depends on [control=['try'], data=[]] except Exception: result['result_reason'] = batch_item.result_reason # depends on [control=['except'], data=[]] try: result['result_message'] = batch_item.result_message.value # depends on [control=['try'], data=[]] except Exception: result['result_message'] = batch_item.result_message # depends on [control=['except'], data=[]] return result
def cleanup_on_delete(self, sender, document, **kwargs): ''' Clean up slug redirections on object deletion ''' if not self.follow or sender is not self.owner_document: return slug = getattr(document, self.db_field) namespace = self.owner_document.__name__ SlugFollow.objects(namespace=namespace, new_slug=slug).delete()
def function[cleanup_on_delete, parameter[self, sender, document]]: constant[ Clean up slug redirections on object deletion ] if <ast.BoolOp object at 0x7da20c6ab700> begin[:] return[None] variable[slug] assign[=] call[name[getattr], parameter[name[document], name[self].db_field]] variable[namespace] assign[=] name[self].owner_document.__name__ call[call[name[SlugFollow].objects, parameter[]].delete, parameter[]]
keyword[def] identifier[cleanup_on_delete] ( identifier[self] , identifier[sender] , identifier[document] ,** identifier[kwargs] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[follow] keyword[or] identifier[sender] keyword[is] keyword[not] identifier[self] . identifier[owner_document] : keyword[return] identifier[slug] = identifier[getattr] ( identifier[document] , identifier[self] . identifier[db_field] ) identifier[namespace] = identifier[self] . identifier[owner_document] . identifier[__name__] identifier[SlugFollow] . identifier[objects] ( identifier[namespace] = identifier[namespace] , identifier[new_slug] = identifier[slug] ). identifier[delete] ()
def cleanup_on_delete(self, sender, document, **kwargs): """ Clean up slug redirections on object deletion """ if not self.follow or sender is not self.owner_document: return # depends on [control=['if'], data=[]] slug = getattr(document, self.db_field) namespace = self.owner_document.__name__ SlugFollow.objects(namespace=namespace, new_slug=slug).delete()
def untrace_class(cls): """ Untraces given class. :param cls: Class to untrace. :type cls: object :return: Definition success. :rtype: bool """ for name, method in inspect.getmembers(cls, inspect.ismethod): untrace_method(cls, method) for name, function in inspect.getmembers(cls, inspect.isfunction): untrace_method(cls, function) for name, accessor in inspect.getmembers(cls, lambda x: type(x) is property): untrace_property(cls, accessor) set_untraced(cls) return True
def function[untrace_class, parameter[cls]]: constant[ Untraces given class. :param cls: Class to untrace. :type cls: object :return: Definition success. :rtype: bool ] for taget[tuple[[<ast.Name object at 0x7da18c4ce140>, <ast.Name object at 0x7da18c4cf670>]]] in starred[call[name[inspect].getmembers, parameter[name[cls], name[inspect].ismethod]]] begin[:] call[name[untrace_method], parameter[name[cls], name[method]]] for taget[tuple[[<ast.Name object at 0x7da2044c26e0>, <ast.Name object at 0x7da2044c2650>]]] in starred[call[name[inspect].getmembers, parameter[name[cls], name[inspect].isfunction]]] begin[:] call[name[untrace_method], parameter[name[cls], name[function]]] for taget[tuple[[<ast.Name object at 0x7da2044c1990>, <ast.Name object at 0x7da2044c3be0>]]] in starred[call[name[inspect].getmembers, parameter[name[cls], <ast.Lambda object at 0x7da2044c0970>]]] begin[:] call[name[untrace_property], parameter[name[cls], name[accessor]]] call[name[set_untraced], parameter[name[cls]]] return[constant[True]]
keyword[def] identifier[untrace_class] ( identifier[cls] ): literal[string] keyword[for] identifier[name] , identifier[method] keyword[in] identifier[inspect] . identifier[getmembers] ( identifier[cls] , identifier[inspect] . identifier[ismethod] ): identifier[untrace_method] ( identifier[cls] , identifier[method] ) keyword[for] identifier[name] , identifier[function] keyword[in] identifier[inspect] . identifier[getmembers] ( identifier[cls] , identifier[inspect] . identifier[isfunction] ): identifier[untrace_method] ( identifier[cls] , identifier[function] ) keyword[for] identifier[name] , identifier[accessor] keyword[in] identifier[inspect] . identifier[getmembers] ( identifier[cls] , keyword[lambda] identifier[x] : identifier[type] ( identifier[x] ) keyword[is] identifier[property] ): identifier[untrace_property] ( identifier[cls] , identifier[accessor] ) identifier[set_untraced] ( identifier[cls] ) keyword[return] keyword[True]
def untrace_class(cls): """ Untraces given class. :param cls: Class to untrace. :type cls: object :return: Definition success. :rtype: bool """ for (name, method) in inspect.getmembers(cls, inspect.ismethod): untrace_method(cls, method) # depends on [control=['for'], data=[]] for (name, function) in inspect.getmembers(cls, inspect.isfunction): untrace_method(cls, function) # depends on [control=['for'], data=[]] for (name, accessor) in inspect.getmembers(cls, lambda x: type(x) is property): untrace_property(cls, accessor) # depends on [control=['for'], data=[]] set_untraced(cls) return True
def __firewall_cmd(cmd): ''' Return the firewall-cmd location ''' firewall_cmd = '{0} {1}'.format(salt.utils.path.which('firewall-cmd'), cmd) out = __salt__['cmd.run_all'](firewall_cmd) if out['retcode'] != 0: if not out['stderr']: msg = out['stdout'] else: msg = out['stderr'] raise CommandExecutionError( 'firewall-cmd failed: {0}'.format(msg) ) return out['stdout']
def function[__firewall_cmd, parameter[cmd]]: constant[ Return the firewall-cmd location ] variable[firewall_cmd] assign[=] call[constant[{0} {1}].format, parameter[call[name[salt].utils.path.which, parameter[constant[firewall-cmd]]], name[cmd]]] variable[out] assign[=] call[call[name[__salt__]][constant[cmd.run_all]], parameter[name[firewall_cmd]]] if compare[call[name[out]][constant[retcode]] not_equal[!=] constant[0]] begin[:] if <ast.UnaryOp object at 0x7da1b2347cd0> begin[:] variable[msg] assign[=] call[name[out]][constant[stdout]] <ast.Raise object at 0x7da1b2345810> return[call[name[out]][constant[stdout]]]
keyword[def] identifier[__firewall_cmd] ( identifier[cmd] ): literal[string] identifier[firewall_cmd] = literal[string] . identifier[format] ( identifier[salt] . identifier[utils] . identifier[path] . identifier[which] ( literal[string] ), identifier[cmd] ) identifier[out] = identifier[__salt__] [ literal[string] ]( identifier[firewall_cmd] ) keyword[if] identifier[out] [ literal[string] ]!= literal[int] : keyword[if] keyword[not] identifier[out] [ literal[string] ]: identifier[msg] = identifier[out] [ literal[string] ] keyword[else] : identifier[msg] = identifier[out] [ literal[string] ] keyword[raise] identifier[CommandExecutionError] ( literal[string] . identifier[format] ( identifier[msg] ) ) keyword[return] identifier[out] [ literal[string] ]
def __firewall_cmd(cmd): """ Return the firewall-cmd location """ firewall_cmd = '{0} {1}'.format(salt.utils.path.which('firewall-cmd'), cmd) out = __salt__['cmd.run_all'](firewall_cmd) if out['retcode'] != 0: if not out['stderr']: msg = out['stdout'] # depends on [control=['if'], data=[]] else: msg = out['stderr'] raise CommandExecutionError('firewall-cmd failed: {0}'.format(msg)) # depends on [control=['if'], data=[]] return out['stdout']
def add(self, new_results): """ Add new benchmark results. """ for result in new_results: result.update(self.context) self.results = self.results.append(result, ignore_index=True)
def function[add, parameter[self, new_results]]: constant[ Add new benchmark results. ] for taget[name[result]] in starred[name[new_results]] begin[:] call[name[result].update, parameter[name[self].context]] name[self].results assign[=] call[name[self].results.append, parameter[name[result]]]
keyword[def] identifier[add] ( identifier[self] , identifier[new_results] ): literal[string] keyword[for] identifier[result] keyword[in] identifier[new_results] : identifier[result] . identifier[update] ( identifier[self] . identifier[context] ) identifier[self] . identifier[results] = identifier[self] . identifier[results] . identifier[append] ( identifier[result] , identifier[ignore_index] = keyword[True] )
def add(self, new_results): """ Add new benchmark results. """ for result in new_results: result.update(self.context) self.results = self.results.append(result, ignore_index=True) # depends on [control=['for'], data=['result']]
def plot_two_columns(self, reset_xlimits=False, reset_ylimits=False): """Simple line plot for two selected columns.""" self.clear_plot() if self.tab is None: # No table data to plot return plt_kw = { 'lw': self.settings.get('linewidth', 1), 'ls': self.settings.get('linestyle', '-'), 'color': self.settings.get('linecolor', 'blue'), 'ms': self.settings.get('markersize', 6), 'mew': self.settings.get('markerwidth', 0.5), 'mfc': self.settings.get('markercolor', 'red')} plt_kw['mec'] = plt_kw['mfc'] try: x_data, y_data, marker = self._get_plot_data() self.tab_plot.plot( x_data, y_data, xtitle=self._get_label('x'), ytitle=self._get_label('y'), marker=marker, **plt_kw) if reset_xlimits: self.set_ylim_cb() self.set_xlimits_widgets() if reset_ylimits: self.set_xlim_cb() self.set_ylimits_widgets() if not (reset_xlimits or reset_ylimits): self.set_xlim_cb(redraw=False) self.set_ylim_cb() except Exception as e: self.logger.error(str(e)) else: self.save_plot.set_enabled(True)
def function[plot_two_columns, parameter[self, reset_xlimits, reset_ylimits]]: constant[Simple line plot for two selected columns.] call[name[self].clear_plot, parameter[]] if compare[name[self].tab is constant[None]] begin[:] return[None] variable[plt_kw] assign[=] dictionary[[<ast.Constant object at 0x7da18dc07910>, <ast.Constant object at 0x7da18dc06e30>, <ast.Constant object at 0x7da18dc07940>, <ast.Constant object at 0x7da18dc06bf0>, <ast.Constant object at 0x7da18dc06080>, <ast.Constant object at 0x7da18dc05a20>], [<ast.Call object at 0x7da18dc05240>, <ast.Call object at 0x7da18dc06890>, <ast.Call object at 0x7da207f98c10>, <ast.Call object at 0x7da207f9a9e0>, <ast.Call object at 0x7da207f9a1d0>, <ast.Call object at 0x7da207f9a860>]] call[name[plt_kw]][constant[mec]] assign[=] call[name[plt_kw]][constant[mfc]] <ast.Try object at 0x7da2041da860>
keyword[def] identifier[plot_two_columns] ( identifier[self] , identifier[reset_xlimits] = keyword[False] , identifier[reset_ylimits] = keyword[False] ): literal[string] identifier[self] . identifier[clear_plot] () keyword[if] identifier[self] . identifier[tab] keyword[is] keyword[None] : keyword[return] identifier[plt_kw] ={ literal[string] : identifier[self] . identifier[settings] . identifier[get] ( literal[string] , literal[int] ), literal[string] : identifier[self] . identifier[settings] . identifier[get] ( literal[string] , literal[string] ), literal[string] : identifier[self] . identifier[settings] . identifier[get] ( literal[string] , literal[string] ), literal[string] : identifier[self] . identifier[settings] . identifier[get] ( literal[string] , literal[int] ), literal[string] : identifier[self] . identifier[settings] . identifier[get] ( literal[string] , literal[int] ), literal[string] : identifier[self] . identifier[settings] . identifier[get] ( literal[string] , literal[string] )} identifier[plt_kw] [ literal[string] ]= identifier[plt_kw] [ literal[string] ] keyword[try] : identifier[x_data] , identifier[y_data] , identifier[marker] = identifier[self] . identifier[_get_plot_data] () identifier[self] . identifier[tab_plot] . identifier[plot] ( identifier[x_data] , identifier[y_data] , identifier[xtitle] = identifier[self] . identifier[_get_label] ( literal[string] ), identifier[ytitle] = identifier[self] . identifier[_get_label] ( literal[string] ), identifier[marker] = identifier[marker] ,** identifier[plt_kw] ) keyword[if] identifier[reset_xlimits] : identifier[self] . identifier[set_ylim_cb] () identifier[self] . identifier[set_xlimits_widgets] () keyword[if] identifier[reset_ylimits] : identifier[self] . identifier[set_xlim_cb] () identifier[self] . identifier[set_ylimits_widgets] () keyword[if] keyword[not] ( identifier[reset_xlimits] keyword[or] identifier[reset_ylimits] ): identifier[self] . identifier[set_xlim_cb] ( identifier[redraw] = keyword[False] ) identifier[self] . identifier[set_ylim_cb] () keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[self] . identifier[logger] . identifier[error] ( identifier[str] ( identifier[e] )) keyword[else] : identifier[self] . identifier[save_plot] . identifier[set_enabled] ( keyword[True] )
def plot_two_columns(self, reset_xlimits=False, reset_ylimits=False): """Simple line plot for two selected columns.""" self.clear_plot() if self.tab is None: # No table data to plot return # depends on [control=['if'], data=[]] plt_kw = {'lw': self.settings.get('linewidth', 1), 'ls': self.settings.get('linestyle', '-'), 'color': self.settings.get('linecolor', 'blue'), 'ms': self.settings.get('markersize', 6), 'mew': self.settings.get('markerwidth', 0.5), 'mfc': self.settings.get('markercolor', 'red')} plt_kw['mec'] = plt_kw['mfc'] try: (x_data, y_data, marker) = self._get_plot_data() self.tab_plot.plot(x_data, y_data, xtitle=self._get_label('x'), ytitle=self._get_label('y'), marker=marker, **plt_kw) if reset_xlimits: self.set_ylim_cb() self.set_xlimits_widgets() # depends on [control=['if'], data=[]] if reset_ylimits: self.set_xlim_cb() self.set_ylimits_widgets() # depends on [control=['if'], data=[]] if not (reset_xlimits or reset_ylimits): self.set_xlim_cb(redraw=False) self.set_ylim_cb() # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except Exception as e: self.logger.error(str(e)) # depends on [control=['except'], data=['e']] else: self.save_plot.set_enabled(True)
def GetClientOs(client_id, token=None): """Returns last known operating system name that the client used.""" if data_store.RelationalDBEnabled(): kb = data_store.REL_DB.ReadClientSnapshot(client_id).knowledge_base else: with aff4.FACTORY.Open(client_id, token=token) as client: kb = client.Get(client.Schema.KNOWLEDGE_BASE) return kb.os
def function[GetClientOs, parameter[client_id, token]]: constant[Returns last known operating system name that the client used.] if call[name[data_store].RelationalDBEnabled, parameter[]] begin[:] variable[kb] assign[=] call[name[data_store].REL_DB.ReadClientSnapshot, parameter[name[client_id]]].knowledge_base return[name[kb].os]
keyword[def] identifier[GetClientOs] ( identifier[client_id] , identifier[token] = keyword[None] ): literal[string] keyword[if] identifier[data_store] . identifier[RelationalDBEnabled] (): identifier[kb] = identifier[data_store] . identifier[REL_DB] . identifier[ReadClientSnapshot] ( identifier[client_id] ). identifier[knowledge_base] keyword[else] : keyword[with] identifier[aff4] . identifier[FACTORY] . identifier[Open] ( identifier[client_id] , identifier[token] = identifier[token] ) keyword[as] identifier[client] : identifier[kb] = identifier[client] . identifier[Get] ( identifier[client] . identifier[Schema] . identifier[KNOWLEDGE_BASE] ) keyword[return] identifier[kb] . identifier[os]
def GetClientOs(client_id, token=None): """Returns last known operating system name that the client used.""" if data_store.RelationalDBEnabled(): kb = data_store.REL_DB.ReadClientSnapshot(client_id).knowledge_base # depends on [control=['if'], data=[]] else: with aff4.FACTORY.Open(client_id, token=token) as client: kb = client.Get(client.Schema.KNOWLEDGE_BASE) # depends on [control=['with'], data=['client']] return kb.os
def _create_user(self, username, password, mail, method, uuid): """Create a new user and all initial data""" try: if method == 'Invited': config_role = self.config.group_accept_invited else: config_role = self.config.group_accept_enrolled roles = [] if ',' in config_role: for item in config_role.split(','): roles.append(item.lstrip().rstrip()) else: roles = [config_role] newuser = objectmodels['user']({ 'name': username, 'passhash': std_hash(password, self.salt), 'mail': mail, 'uuid': std_uuid(), 'roles': roles, 'created': std_now() }) if method == 'Invited': newuser.needs_password_change = True newuser.save() except Exception as e: self.log("Problem creating new user: ", type(e), e, lvl=error) return try: newprofile = objectmodels['profile']({ 'uuid': std_uuid(), 'owner': newuser.uuid }) self.log("New profile uuid: ", newprofile.uuid, lvl=verbose) newprofile.save() packet = { 'component': 'hfos.enrol.enrolmanager', 'action': 'enrol', 'data': [True, mail] } self.fireEvent(send(uuid, packet)) # TODO: Notify crew-admins except Exception as e: self.log("Problem creating new profile: ", type(e), e, lvl=error)
def function[_create_user, parameter[self, username, password, mail, method, uuid]]: constant[Create a new user and all initial data] <ast.Try object at 0x7da1b0f060b0> <ast.Try object at 0x7da1b0e609a0>
keyword[def] identifier[_create_user] ( identifier[self] , identifier[username] , identifier[password] , identifier[mail] , identifier[method] , identifier[uuid] ): literal[string] keyword[try] : keyword[if] identifier[method] == literal[string] : identifier[config_role] = identifier[self] . identifier[config] . identifier[group_accept_invited] keyword[else] : identifier[config_role] = identifier[self] . identifier[config] . identifier[group_accept_enrolled] identifier[roles] =[] keyword[if] literal[string] keyword[in] identifier[config_role] : keyword[for] identifier[item] keyword[in] identifier[config_role] . identifier[split] ( literal[string] ): identifier[roles] . identifier[append] ( identifier[item] . identifier[lstrip] (). identifier[rstrip] ()) keyword[else] : identifier[roles] =[ identifier[config_role] ] identifier[newuser] = identifier[objectmodels] [ literal[string] ]({ literal[string] : identifier[username] , literal[string] : identifier[std_hash] ( identifier[password] , identifier[self] . identifier[salt] ), literal[string] : identifier[mail] , literal[string] : identifier[std_uuid] (), literal[string] : identifier[roles] , literal[string] : identifier[std_now] () }) keyword[if] identifier[method] == literal[string] : identifier[newuser] . identifier[needs_password_change] = keyword[True] identifier[newuser] . identifier[save] () keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[self] . identifier[log] ( literal[string] , identifier[type] ( identifier[e] ), identifier[e] , identifier[lvl] = identifier[error] ) keyword[return] keyword[try] : identifier[newprofile] = identifier[objectmodels] [ literal[string] ]({ literal[string] : identifier[std_uuid] (), literal[string] : identifier[newuser] . identifier[uuid] }) identifier[self] . identifier[log] ( literal[string] , identifier[newprofile] . identifier[uuid] , identifier[lvl] = identifier[verbose] ) identifier[newprofile] . identifier[save] () identifier[packet] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] :[ keyword[True] , identifier[mail] ] } identifier[self] . identifier[fireEvent] ( identifier[send] ( identifier[uuid] , identifier[packet] )) keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[self] . identifier[log] ( literal[string] , identifier[type] ( identifier[e] ), identifier[e] , identifier[lvl] = identifier[error] )
def _create_user(self, username, password, mail, method, uuid): """Create a new user and all initial data""" try: if method == 'Invited': config_role = self.config.group_accept_invited # depends on [control=['if'], data=[]] else: config_role = self.config.group_accept_enrolled roles = [] if ',' in config_role: for item in config_role.split(','): roles.append(item.lstrip().rstrip()) # depends on [control=['for'], data=['item']] # depends on [control=['if'], data=['config_role']] else: roles = [config_role] newuser = objectmodels['user']({'name': username, 'passhash': std_hash(password, self.salt), 'mail': mail, 'uuid': std_uuid(), 'roles': roles, 'created': std_now()}) if method == 'Invited': newuser.needs_password_change = True # depends on [control=['if'], data=[]] newuser.save() # depends on [control=['try'], data=[]] except Exception as e: self.log('Problem creating new user: ', type(e), e, lvl=error) return # depends on [control=['except'], data=['e']] try: newprofile = objectmodels['profile']({'uuid': std_uuid(), 'owner': newuser.uuid}) self.log('New profile uuid: ', newprofile.uuid, lvl=verbose) newprofile.save() packet = {'component': 'hfos.enrol.enrolmanager', 'action': 'enrol', 'data': [True, mail]} self.fireEvent(send(uuid, packet)) # depends on [control=['try'], data=[]] # TODO: Notify crew-admins except Exception as e: self.log('Problem creating new profile: ', type(e), e, lvl=error) # depends on [control=['except'], data=['e']]
def clone(src, **kwargs): """Clones object with optionally overridden fields""" obj = object.__new__(type(src)) obj.__dict__.update(src.__dict__) obj.__dict__.update(kwargs) return obj
def function[clone, parameter[src]]: constant[Clones object with optionally overridden fields] variable[obj] assign[=] call[name[object].__new__, parameter[call[name[type], parameter[name[src]]]]] call[name[obj].__dict__.update, parameter[name[src].__dict__]] call[name[obj].__dict__.update, parameter[name[kwargs]]] return[name[obj]]
keyword[def] identifier[clone] ( identifier[src] ,** identifier[kwargs] ): literal[string] identifier[obj] = identifier[object] . identifier[__new__] ( identifier[type] ( identifier[src] )) identifier[obj] . identifier[__dict__] . identifier[update] ( identifier[src] . identifier[__dict__] ) identifier[obj] . identifier[__dict__] . identifier[update] ( identifier[kwargs] ) keyword[return] identifier[obj]
def clone(src, **kwargs): """Clones object with optionally overridden fields""" obj = object.__new__(type(src)) obj.__dict__.update(src.__dict__) obj.__dict__.update(kwargs) return obj
def data_received(self, data): """Add incoming data to buffer.""" data = data.decode() log.debug('received data: %s', data.strip()) self.buffer += data self.handle_lines()
def function[data_received, parameter[self, data]]: constant[Add incoming data to buffer.] variable[data] assign[=] call[name[data].decode, parameter[]] call[name[log].debug, parameter[constant[received data: %s], call[name[data].strip, parameter[]]]] <ast.AugAssign object at 0x7da1b04d9630> call[name[self].handle_lines, parameter[]]
keyword[def] identifier[data_received] ( identifier[self] , identifier[data] ): literal[string] identifier[data] = identifier[data] . identifier[decode] () identifier[log] . identifier[debug] ( literal[string] , identifier[data] . identifier[strip] ()) identifier[self] . identifier[buffer] += identifier[data] identifier[self] . identifier[handle_lines] ()
def data_received(self, data): """Add incoming data to buffer.""" data = data.decode() log.debug('received data: %s', data.strip()) self.buffer += data self.handle_lines()
def _make_serverproxy_handler(name, command, environment, timeout, absolute_url, port): """ Create a SuperviseAndProxyHandler subclass with given parameters """ # FIXME: Set 'name' properly class _Proxy(SuperviseAndProxyHandler): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.name = name self.proxy_base = name self.absolute_url = absolute_url self.requested_port = port @property def process_args(self): return { 'port': self.port, 'base_url': self.base_url, } def _render_template(self, value): args = self.process_args if type(value) is str: return value.format(**args) elif type(value) is list: return [self._render_template(v) for v in value] elif type(value) is dict: return { self._render_template(k): self._render_template(v) for k, v in value.items() } else: raise ValueError('Value of unrecognized type {}'.format(type(value))) def get_cmd(self): if callable(command): return self._render_template(call_with_asked_args(command, self.process_args)) else: return self._render_template(command) def get_env(self): if callable(environment): return self._render_template(call_with_asked_args(environment, self.process_args)) else: return self._render_template(environment) def get_timeout(self): return timeout return _Proxy
def function[_make_serverproxy_handler, parameter[name, command, environment, timeout, absolute_url, port]]: constant[ Create a SuperviseAndProxyHandler subclass with given parameters ] class class[_Proxy, parameter[]] begin[:] def function[__init__, parameter[self]]: call[call[name[super], parameter[]].__init__, parameter[<ast.Starred object at 0x7da1b26ae6b0>]] name[self].name assign[=] name[name] name[self].proxy_base assign[=] name[name] name[self].absolute_url assign[=] name[absolute_url] name[self].requested_port assign[=] name[port] def function[process_args, parameter[self]]: return[dictionary[[<ast.Constant object at 0x7da1b26aee90>, <ast.Constant object at 0x7da1b26af910>], [<ast.Attribute object at 0x7da1b26acca0>, <ast.Attribute object at 0x7da1b26aef20>]]] def function[_render_template, parameter[self, value]]: variable[args] assign[=] name[self].process_args if compare[call[name[type], parameter[name[value]]] is name[str]] begin[:] return[call[name[value].format, parameter[]]] def function[get_cmd, parameter[self]]: if call[name[callable], parameter[name[command]]] begin[:] return[call[name[self]._render_template, parameter[call[name[call_with_asked_args], parameter[name[command], name[self].process_args]]]]] def function[get_env, parameter[self]]: if call[name[callable], parameter[name[environment]]] begin[:] return[call[name[self]._render_template, parameter[call[name[call_with_asked_args], parameter[name[environment], name[self].process_args]]]]] def function[get_timeout, parameter[self]]: return[name[timeout]] return[name[_Proxy]]
keyword[def] identifier[_make_serverproxy_handler] ( identifier[name] , identifier[command] , identifier[environment] , identifier[timeout] , identifier[absolute_url] , identifier[port] ): literal[string] keyword[class] identifier[_Proxy] ( identifier[SuperviseAndProxyHandler] ): keyword[def] identifier[__init__] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): identifier[super] (). identifier[__init__] (* identifier[args] ,** identifier[kwargs] ) identifier[self] . identifier[name] = identifier[name] identifier[self] . identifier[proxy_base] = identifier[name] identifier[self] . identifier[absolute_url] = identifier[absolute_url] identifier[self] . identifier[requested_port] = identifier[port] @ identifier[property] keyword[def] identifier[process_args] ( identifier[self] ): keyword[return] { literal[string] : identifier[self] . identifier[port] , literal[string] : identifier[self] . identifier[base_url] , } keyword[def] identifier[_render_template] ( identifier[self] , identifier[value] ): identifier[args] = identifier[self] . identifier[process_args] keyword[if] identifier[type] ( identifier[value] ) keyword[is] identifier[str] : keyword[return] identifier[value] . identifier[format] (** identifier[args] ) keyword[elif] identifier[type] ( identifier[value] ) keyword[is] identifier[list] : keyword[return] [ identifier[self] . identifier[_render_template] ( identifier[v] ) keyword[for] identifier[v] keyword[in] identifier[value] ] keyword[elif] identifier[type] ( identifier[value] ) keyword[is] identifier[dict] : keyword[return] { identifier[self] . identifier[_render_template] ( identifier[k] ): identifier[self] . identifier[_render_template] ( identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[value] . identifier[items] () } keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[type] ( identifier[value] ))) keyword[def] identifier[get_cmd] ( identifier[self] ): keyword[if] identifier[callable] ( identifier[command] ): keyword[return] identifier[self] . identifier[_render_template] ( identifier[call_with_asked_args] ( identifier[command] , identifier[self] . identifier[process_args] )) keyword[else] : keyword[return] identifier[self] . identifier[_render_template] ( identifier[command] ) keyword[def] identifier[get_env] ( identifier[self] ): keyword[if] identifier[callable] ( identifier[environment] ): keyword[return] identifier[self] . identifier[_render_template] ( identifier[call_with_asked_args] ( identifier[environment] , identifier[self] . identifier[process_args] )) keyword[else] : keyword[return] identifier[self] . identifier[_render_template] ( identifier[environment] ) keyword[def] identifier[get_timeout] ( identifier[self] ): keyword[return] identifier[timeout] keyword[return] identifier[_Proxy]
def _make_serverproxy_handler(name, command, environment, timeout, absolute_url, port): """ Create a SuperviseAndProxyHandler subclass with given parameters """ # FIXME: Set 'name' properly class _Proxy(SuperviseAndProxyHandler): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.name = name self.proxy_base = name self.absolute_url = absolute_url self.requested_port = port @property def process_args(self): return {'port': self.port, 'base_url': self.base_url} def _render_template(self, value): args = self.process_args if type(value) is str: return value.format(**args) # depends on [control=['if'], data=[]] elif type(value) is list: return [self._render_template(v) for v in value] # depends on [control=['if'], data=[]] elif type(value) is dict: return {self._render_template(k): self._render_template(v) for (k, v) in value.items()} # depends on [control=['if'], data=[]] else: raise ValueError('Value of unrecognized type {}'.format(type(value))) def get_cmd(self): if callable(command): return self._render_template(call_with_asked_args(command, self.process_args)) # depends on [control=['if'], data=[]] else: return self._render_template(command) def get_env(self): if callable(environment): return self._render_template(call_with_asked_args(environment, self.process_args)) # depends on [control=['if'], data=[]] else: return self._render_template(environment) def get_timeout(self): return timeout return _Proxy
def download_profile(self, profile_name: Union[str, Profile], profile_pic: bool = True, profile_pic_only: bool = False, fast_update: bool = False, download_stories: bool = False, download_stories_only: bool = False, download_tagged: bool = False, download_tagged_only: bool = False, post_filter: Optional[Callable[[Post], bool]] = None, storyitem_filter: Optional[Callable[[StoryItem], bool]] = None) -> None: """Download one profile .. deprecated:: 4.1 Use :meth:`Instaloader.download_profiles`. """ # Get profile main page json # check if profile does exist or name has changed since last download # and update name and json data if necessary if isinstance(profile_name, str): profile = self.check_profile_id(profile_name.lower()) else: profile = profile_name profile_name = profile.username # Save metadata as JSON if desired. if self.save_metadata is not False: json_filename = '{0}/{1}_{2}'.format(self.dirname_pattern.format(profile=profile_name, target=profile_name), profile_name, profile.userid) self.save_metadata_json(json_filename, profile) if self.context.is_logged_in and profile.has_blocked_viewer and not profile.is_private: # raising ProfileNotExistsException invokes "trying again anonymously" logic raise ProfileNotExistsException("Profile {} has blocked you".format(profile_name)) # Download profile picture if profile_pic or profile_pic_only: with self.context.error_catcher('Download profile picture of {}'.format(profile_name)): self.download_profilepic(profile) if profile_pic_only: return # Catch some errors if profile.is_private: if not self.context.is_logged_in: raise LoginRequiredException("profile %s requires login" % profile_name) if not profile.followed_by_viewer and \ self.context.username != profile.username: raise PrivateProfileNotFollowedException("Profile %s: private but not followed." % profile_name) else: if self.context.is_logged_in and not (download_stories or download_stories_only): self.context.log("profile %s could also be downloaded anonymously." % profile_name) # Download stories, if requested if download_stories or download_stories_only: if profile.has_viewable_story: with self.context.error_catcher("Download stories of {}".format(profile_name)): self.download_stories(userids=[profile.userid], filename_target=profile_name, fast_update=fast_update, storyitem_filter=storyitem_filter) else: self.context.log("{} does not have any stories.".format(profile_name)) if download_stories_only: return # Download tagged, if requested if download_tagged or download_tagged_only: with self.context.error_catcher('Download tagged of {}'.format(profile_name)): self.download_tagged(profile, fast_update=fast_update, post_filter=post_filter) if download_tagged_only: return # Iterate over pictures and download them self.context.log("Retrieving posts from profile {}.".format(profile_name)) totalcount = profile.mediacount count = 1 for post in profile.get_posts(): self.context.log("[%3i/%3i] " % (count, totalcount), end="", flush=True) count += 1 if post_filter is not None and not post_filter(post): self.context.log('<skipped>') continue with self.context.error_catcher('Download profile {}'.format(profile_name)): downloaded = self.download_post(post, target=profile_name) if fast_update and not downloaded: break
def function[download_profile, parameter[self, profile_name, profile_pic, profile_pic_only, fast_update, download_stories, download_stories_only, download_tagged, download_tagged_only, post_filter, storyitem_filter]]: constant[Download one profile .. deprecated:: 4.1 Use :meth:`Instaloader.download_profiles`. ] if call[name[isinstance], parameter[name[profile_name], name[str]]] begin[:] variable[profile] assign[=] call[name[self].check_profile_id, parameter[call[name[profile_name].lower, parameter[]]]] variable[profile_name] assign[=] name[profile].username if compare[name[self].save_metadata is_not constant[False]] begin[:] variable[json_filename] assign[=] call[constant[{0}/{1}_{2}].format, parameter[call[name[self].dirname_pattern.format, parameter[]], name[profile_name], name[profile].userid]] call[name[self].save_metadata_json, parameter[name[json_filename], name[profile]]] if <ast.BoolOp object at 0x7da20c796740> begin[:] <ast.Raise object at 0x7da20c796200> if <ast.BoolOp object at 0x7da20c796140> begin[:] with call[name[self].context.error_catcher, parameter[call[constant[Download profile picture of {}].format, parameter[name[profile_name]]]]] begin[:] call[name[self].download_profilepic, parameter[name[profile]]] if name[profile_pic_only] begin[:] return[None] if name[profile].is_private begin[:] if <ast.UnaryOp object at 0x7da20c796770> begin[:] <ast.Raise object at 0x7da20c795ed0> if <ast.BoolOp object at 0x7da20c794610> begin[:] <ast.Raise object at 0x7da20c794370> if <ast.BoolOp object at 0x7da207f03e80> begin[:] if name[profile].has_viewable_story begin[:] with call[name[self].context.error_catcher, parameter[call[constant[Download stories of {}].format, parameter[name[profile_name]]]]] begin[:] call[name[self].download_stories, parameter[]] if name[download_stories_only] begin[:] return[None] if <ast.BoolOp object at 0x7da207f00a00> begin[:] with call[name[self].context.error_catcher, parameter[call[constant[Download tagged of {}].format, parameter[name[profile_name]]]]] begin[:] call[name[self].download_tagged, parameter[name[profile]]] if name[download_tagged_only] begin[:] return[None] call[name[self].context.log, parameter[call[constant[Retrieving posts from profile {}.].format, parameter[name[profile_name]]]]] variable[totalcount] assign[=] name[profile].mediacount variable[count] assign[=] constant[1] for taget[name[post]] in starred[call[name[profile].get_posts, parameter[]]] begin[:] call[name[self].context.log, parameter[binary_operation[constant[[%3i/%3i] ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da207f00460>, <ast.Name object at 0x7da207f030d0>]]]]] <ast.AugAssign object at 0x7da207f01180> if <ast.BoolOp object at 0x7da207f02620> begin[:] call[name[self].context.log, parameter[constant[<skipped>]]] continue with call[name[self].context.error_catcher, parameter[call[constant[Download profile {}].format, parameter[name[profile_name]]]]] begin[:] variable[downloaded] assign[=] call[name[self].download_post, parameter[name[post]]] if <ast.BoolOp object at 0x7da207f009d0> begin[:] break
keyword[def] identifier[download_profile] ( identifier[self] , identifier[profile_name] : identifier[Union] [ identifier[str] , identifier[Profile] ], identifier[profile_pic] : identifier[bool] = keyword[True] , identifier[profile_pic_only] : identifier[bool] = keyword[False] , identifier[fast_update] : identifier[bool] = keyword[False] , identifier[download_stories] : identifier[bool] = keyword[False] , identifier[download_stories_only] : identifier[bool] = keyword[False] , identifier[download_tagged] : identifier[bool] = keyword[False] , identifier[download_tagged_only] : identifier[bool] = keyword[False] , identifier[post_filter] : identifier[Optional] [ identifier[Callable] [[ identifier[Post] ], identifier[bool] ]]= keyword[None] , identifier[storyitem_filter] : identifier[Optional] [ identifier[Callable] [[ identifier[StoryItem] ], identifier[bool] ]]= keyword[None] )-> keyword[None] : literal[string] keyword[if] identifier[isinstance] ( identifier[profile_name] , identifier[str] ): identifier[profile] = identifier[self] . identifier[check_profile_id] ( identifier[profile_name] . identifier[lower] ()) keyword[else] : identifier[profile] = identifier[profile_name] identifier[profile_name] = identifier[profile] . identifier[username] keyword[if] identifier[self] . identifier[save_metadata] keyword[is] keyword[not] keyword[False] : identifier[json_filename] = literal[string] . identifier[format] ( identifier[self] . identifier[dirname_pattern] . identifier[format] ( identifier[profile] = identifier[profile_name] , identifier[target] = identifier[profile_name] ), identifier[profile_name] , identifier[profile] . identifier[userid] ) identifier[self] . identifier[save_metadata_json] ( identifier[json_filename] , identifier[profile] ) keyword[if] identifier[self] . identifier[context] . identifier[is_logged_in] keyword[and] identifier[profile] . identifier[has_blocked_viewer] keyword[and] keyword[not] identifier[profile] . identifier[is_private] : keyword[raise] identifier[ProfileNotExistsException] ( literal[string] . identifier[format] ( identifier[profile_name] )) keyword[if] identifier[profile_pic] keyword[or] identifier[profile_pic_only] : keyword[with] identifier[self] . identifier[context] . identifier[error_catcher] ( literal[string] . identifier[format] ( identifier[profile_name] )): identifier[self] . identifier[download_profilepic] ( identifier[profile] ) keyword[if] identifier[profile_pic_only] : keyword[return] keyword[if] identifier[profile] . identifier[is_private] : keyword[if] keyword[not] identifier[self] . identifier[context] . identifier[is_logged_in] : keyword[raise] identifier[LoginRequiredException] ( literal[string] % identifier[profile_name] ) keyword[if] keyword[not] identifier[profile] . identifier[followed_by_viewer] keyword[and] identifier[self] . identifier[context] . identifier[username] != identifier[profile] . identifier[username] : keyword[raise] identifier[PrivateProfileNotFollowedException] ( literal[string] % identifier[profile_name] ) keyword[else] : keyword[if] identifier[self] . identifier[context] . identifier[is_logged_in] keyword[and] keyword[not] ( identifier[download_stories] keyword[or] identifier[download_stories_only] ): identifier[self] . identifier[context] . identifier[log] ( literal[string] % identifier[profile_name] ) keyword[if] identifier[download_stories] keyword[or] identifier[download_stories_only] : keyword[if] identifier[profile] . identifier[has_viewable_story] : keyword[with] identifier[self] . identifier[context] . identifier[error_catcher] ( literal[string] . identifier[format] ( identifier[profile_name] )): identifier[self] . identifier[download_stories] ( identifier[userids] =[ identifier[profile] . identifier[userid] ], identifier[filename_target] = identifier[profile_name] , identifier[fast_update] = identifier[fast_update] , identifier[storyitem_filter] = identifier[storyitem_filter] ) keyword[else] : identifier[self] . identifier[context] . identifier[log] ( literal[string] . identifier[format] ( identifier[profile_name] )) keyword[if] identifier[download_stories_only] : keyword[return] keyword[if] identifier[download_tagged] keyword[or] identifier[download_tagged_only] : keyword[with] identifier[self] . identifier[context] . identifier[error_catcher] ( literal[string] . identifier[format] ( identifier[profile_name] )): identifier[self] . identifier[download_tagged] ( identifier[profile] , identifier[fast_update] = identifier[fast_update] , identifier[post_filter] = identifier[post_filter] ) keyword[if] identifier[download_tagged_only] : keyword[return] identifier[self] . identifier[context] . identifier[log] ( literal[string] . identifier[format] ( identifier[profile_name] )) identifier[totalcount] = identifier[profile] . identifier[mediacount] identifier[count] = literal[int] keyword[for] identifier[post] keyword[in] identifier[profile] . identifier[get_posts] (): identifier[self] . identifier[context] . identifier[log] ( literal[string] %( identifier[count] , identifier[totalcount] ), identifier[end] = literal[string] , identifier[flush] = keyword[True] ) identifier[count] += literal[int] keyword[if] identifier[post_filter] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[post_filter] ( identifier[post] ): identifier[self] . identifier[context] . identifier[log] ( literal[string] ) keyword[continue] keyword[with] identifier[self] . identifier[context] . identifier[error_catcher] ( literal[string] . identifier[format] ( identifier[profile_name] )): identifier[downloaded] = identifier[self] . identifier[download_post] ( identifier[post] , identifier[target] = identifier[profile_name] ) keyword[if] identifier[fast_update] keyword[and] keyword[not] identifier[downloaded] : keyword[break]
def download_profile(self, profile_name: Union[str, Profile], profile_pic: bool=True, profile_pic_only: bool=False, fast_update: bool=False, download_stories: bool=False, download_stories_only: bool=False, download_tagged: bool=False, download_tagged_only: bool=False, post_filter: Optional[Callable[[Post], bool]]=None, storyitem_filter: Optional[Callable[[StoryItem], bool]]=None) -> None: """Download one profile .. deprecated:: 4.1 Use :meth:`Instaloader.download_profiles`. """ # Get profile main page json # check if profile does exist or name has changed since last download # and update name and json data if necessary if isinstance(profile_name, str): profile = self.check_profile_id(profile_name.lower()) # depends on [control=['if'], data=[]] else: profile = profile_name profile_name = profile.username # Save metadata as JSON if desired. if self.save_metadata is not False: json_filename = '{0}/{1}_{2}'.format(self.dirname_pattern.format(profile=profile_name, target=profile_name), profile_name, profile.userid) self.save_metadata_json(json_filename, profile) # depends on [control=['if'], data=[]] if self.context.is_logged_in and profile.has_blocked_viewer and (not profile.is_private): # raising ProfileNotExistsException invokes "trying again anonymously" logic raise ProfileNotExistsException('Profile {} has blocked you'.format(profile_name)) # depends on [control=['if'], data=[]] # Download profile picture if profile_pic or profile_pic_only: with self.context.error_catcher('Download profile picture of {}'.format(profile_name)): self.download_profilepic(profile) # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]] if profile_pic_only: return # depends on [control=['if'], data=[]] # Catch some errors if profile.is_private: if not self.context.is_logged_in: raise LoginRequiredException('profile %s requires login' % profile_name) # depends on [control=['if'], data=[]] if not profile.followed_by_viewer and self.context.username != profile.username: raise PrivateProfileNotFollowedException('Profile %s: private but not followed.' % profile_name) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif self.context.is_logged_in and (not (download_stories or download_stories_only)): self.context.log('profile %s could also be downloaded anonymously.' % profile_name) # depends on [control=['if'], data=[]] # Download stories, if requested if download_stories or download_stories_only: if profile.has_viewable_story: with self.context.error_catcher('Download stories of {}'.format(profile_name)): self.download_stories(userids=[profile.userid], filename_target=profile_name, fast_update=fast_update, storyitem_filter=storyitem_filter) # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]] else: self.context.log('{} does not have any stories.'.format(profile_name)) # depends on [control=['if'], data=[]] if download_stories_only: return # depends on [control=['if'], data=[]] # Download tagged, if requested if download_tagged or download_tagged_only: with self.context.error_catcher('Download tagged of {}'.format(profile_name)): self.download_tagged(profile, fast_update=fast_update, post_filter=post_filter) # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]] if download_tagged_only: return # depends on [control=['if'], data=[]] # Iterate over pictures and download them self.context.log('Retrieving posts from profile {}.'.format(profile_name)) totalcount = profile.mediacount count = 1 for post in profile.get_posts(): self.context.log('[%3i/%3i] ' % (count, totalcount), end='', flush=True) count += 1 if post_filter is not None and (not post_filter(post)): self.context.log('<skipped>') continue # depends on [control=['if'], data=[]] with self.context.error_catcher('Download profile {}'.format(profile_name)): downloaded = self.download_post(post, target=profile_name) if fast_update and (not downloaded): break # depends on [control=['if'], data=[]] # depends on [control=['with'], data=[]] # depends on [control=['for'], data=['post']]
def upload_process_reach_files(output_dir, pmid_info_dict, reader_version, num_cores): # At this point, we have a directory full of JSON files # Collect all the prefixes into a set, then iterate over the prefixes # Collect prefixes json_files = glob.glob(os.path.join(output_dir, '*.json')) json_prefixes = set([]) for json_file in json_files: filename = os.path.basename(json_file) prefix = filename.split('.')[0] json_prefixes.add(prefix) # Make a list with PMID and source_text info logger.info("Uploading reading results for reach.") pmid_json_tuples = [] for json_prefix in json_prefixes: try: full_json = upload_reach_readings( json_prefix, pmid_info_dict[json_prefix].get('content_source'), reader_version, output_dir ) pmid_json_tuples.append((json_prefix, full_json)) except Exception as e: logger.error("Caught an exception while trying to upload reach " "reading results onto s3 for %s." % json_prefix) logger.exception(e) # Create a multiprocessing pool logger.info('Creating a multiprocessing pool with %d cores' % num_cores) # Get a multiprocessing pool. pool = mp.Pool(num_cores) logger.info('Processing local REACH JSON files') res = pool.map(upload_process_pmid, pmid_json_tuples) stmts_by_pmid = { pmid: stmts for res_dict in res for pmid, stmts in res_dict.items() } pool.close() logger.info('Multiprocessing pool closed.') pool.join() logger.info('Multiprocessing pool joined.') """ logger.info('Uploaded REACH JSON for %d files to S3 (%d failures)' % (num_uploaded, num_failures)) failures_file = os.path.join(output_dir, 'failures.txt') with open(failures_file, 'wt') as f: for fail in failures: f.write('%s\n' % fail) """ return stmts_by_pmid
def function[upload_process_reach_files, parameter[output_dir, pmid_info_dict, reader_version, num_cores]]: variable[json_files] assign[=] call[name[glob].glob, parameter[call[name[os].path.join, parameter[name[output_dir], constant[*.json]]]]] variable[json_prefixes] assign[=] call[name[set], parameter[list[[]]]] for taget[name[json_file]] in starred[name[json_files]] begin[:] variable[filename] assign[=] call[name[os].path.basename, parameter[name[json_file]]] variable[prefix] assign[=] call[call[name[filename].split, parameter[constant[.]]]][constant[0]] call[name[json_prefixes].add, parameter[name[prefix]]] call[name[logger].info, parameter[constant[Uploading reading results for reach.]]] variable[pmid_json_tuples] assign[=] list[[]] for taget[name[json_prefix]] in starred[name[json_prefixes]] begin[:] <ast.Try object at 0x7da1b0d1b3a0> call[name[logger].info, parameter[binary_operation[constant[Creating a multiprocessing pool with %d cores] <ast.Mod object at 0x7da2590d6920> name[num_cores]]]] variable[pool] assign[=] call[name[mp].Pool, parameter[name[num_cores]]] call[name[logger].info, parameter[constant[Processing local REACH JSON files]]] variable[res] assign[=] call[name[pool].map, parameter[name[upload_process_pmid], name[pmid_json_tuples]]] variable[stmts_by_pmid] assign[=] <ast.DictComp object at 0x7da1b0d18370> call[name[pool].close, parameter[]] call[name[logger].info, parameter[constant[Multiprocessing pool closed.]]] call[name[pool].join, parameter[]] call[name[logger].info, parameter[constant[Multiprocessing pool joined.]]] constant[ logger.info('Uploaded REACH JSON for %d files to S3 (%d failures)' % (num_uploaded, num_failures)) failures_file = os.path.join(output_dir, 'failures.txt') with open(failures_file, 'wt') as f: for fail in failures: f.write('%s ' % fail) ] return[name[stmts_by_pmid]]
keyword[def] identifier[upload_process_reach_files] ( identifier[output_dir] , identifier[pmid_info_dict] , identifier[reader_version] , identifier[num_cores] ): identifier[json_files] = identifier[glob] . identifier[glob] ( identifier[os] . identifier[path] . identifier[join] ( identifier[output_dir] , literal[string] )) identifier[json_prefixes] = identifier[set] ([]) keyword[for] identifier[json_file] keyword[in] identifier[json_files] : identifier[filename] = identifier[os] . identifier[path] . identifier[basename] ( identifier[json_file] ) identifier[prefix] = identifier[filename] . identifier[split] ( literal[string] )[ literal[int] ] identifier[json_prefixes] . identifier[add] ( identifier[prefix] ) identifier[logger] . identifier[info] ( literal[string] ) identifier[pmid_json_tuples] =[] keyword[for] identifier[json_prefix] keyword[in] identifier[json_prefixes] : keyword[try] : identifier[full_json] = identifier[upload_reach_readings] ( identifier[json_prefix] , identifier[pmid_info_dict] [ identifier[json_prefix] ]. identifier[get] ( literal[string] ), identifier[reader_version] , identifier[output_dir] ) identifier[pmid_json_tuples] . identifier[append] (( identifier[json_prefix] , identifier[full_json] )) keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[logger] . identifier[error] ( literal[string] literal[string] % identifier[json_prefix] ) identifier[logger] . identifier[exception] ( identifier[e] ) identifier[logger] . identifier[info] ( literal[string] % identifier[num_cores] ) identifier[pool] = identifier[mp] . identifier[Pool] ( identifier[num_cores] ) identifier[logger] . identifier[info] ( literal[string] ) identifier[res] = identifier[pool] . identifier[map] ( identifier[upload_process_pmid] , identifier[pmid_json_tuples] ) identifier[stmts_by_pmid] ={ identifier[pmid] : identifier[stmts] keyword[for] identifier[res_dict] keyword[in] identifier[res] keyword[for] identifier[pmid] , identifier[stmts] keyword[in] identifier[res_dict] . identifier[items] () } identifier[pool] . identifier[close] () identifier[logger] . identifier[info] ( literal[string] ) identifier[pool] . identifier[join] () identifier[logger] . identifier[info] ( literal[string] ) literal[string] keyword[return] identifier[stmts_by_pmid]
def upload_process_reach_files(output_dir, pmid_info_dict, reader_version, num_cores): # At this point, we have a directory full of JSON files # Collect all the prefixes into a set, then iterate over the prefixes # Collect prefixes json_files = glob.glob(os.path.join(output_dir, '*.json')) json_prefixes = set([]) for json_file in json_files: filename = os.path.basename(json_file) prefix = filename.split('.')[0] json_prefixes.add(prefix) # depends on [control=['for'], data=['json_file']] # Make a list with PMID and source_text info logger.info('Uploading reading results for reach.') pmid_json_tuples = [] for json_prefix in json_prefixes: try: full_json = upload_reach_readings(json_prefix, pmid_info_dict[json_prefix].get('content_source'), reader_version, output_dir) pmid_json_tuples.append((json_prefix, full_json)) # depends on [control=['try'], data=[]] except Exception as e: logger.error('Caught an exception while trying to upload reach reading results onto s3 for %s.' % json_prefix) logger.exception(e) # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=['json_prefix']] # Create a multiprocessing pool logger.info('Creating a multiprocessing pool with %d cores' % num_cores) # Get a multiprocessing pool. pool = mp.Pool(num_cores) logger.info('Processing local REACH JSON files') res = pool.map(upload_process_pmid, pmid_json_tuples) stmts_by_pmid = {pmid: stmts for res_dict in res for (pmid, stmts) in res_dict.items()} pool.close() logger.info('Multiprocessing pool closed.') pool.join() logger.info('Multiprocessing pool joined.') "\n logger.info('Uploaded REACH JSON for %d files to S3 (%d failures)' %\n (num_uploaded, num_failures))\n failures_file = os.path.join(output_dir, 'failures.txt')\n with open(failures_file, 'wt') as f:\n for fail in failures:\n f.write('%s\n' % fail)\n " return stmts_by_pmid
def get(cls, parent=None, id=None, data=None): """Inherit info from parent and return new object""" # TODO - allow fetching of parent based on child? if parent is not None: route = copy(parent.route) else: route = {} if id is not None and cls.ID_NAME is not None: route[cls.ID_NAME] = id obj = cls(key=parent.key, route=route, config=parent.config) if data: # This is used in "get all" queries obj.data = data else: obj.fetch() return obj
def function[get, parameter[cls, parent, id, data]]: constant[Inherit info from parent and return new object] if compare[name[parent] is_not constant[None]] begin[:] variable[route] assign[=] call[name[copy], parameter[name[parent].route]] if <ast.BoolOp object at 0x7da1b1a5efb0> begin[:] call[name[route]][name[cls].ID_NAME] assign[=] name[id] variable[obj] assign[=] call[name[cls], parameter[]] if name[data] begin[:] name[obj].data assign[=] name[data] return[name[obj]]
keyword[def] identifier[get] ( identifier[cls] , identifier[parent] = keyword[None] , identifier[id] = keyword[None] , identifier[data] = keyword[None] ): literal[string] keyword[if] identifier[parent] keyword[is] keyword[not] keyword[None] : identifier[route] = identifier[copy] ( identifier[parent] . identifier[route] ) keyword[else] : identifier[route] ={} keyword[if] identifier[id] keyword[is] keyword[not] keyword[None] keyword[and] identifier[cls] . identifier[ID_NAME] keyword[is] keyword[not] keyword[None] : identifier[route] [ identifier[cls] . identifier[ID_NAME] ]= identifier[id] identifier[obj] = identifier[cls] ( identifier[key] = identifier[parent] . identifier[key] , identifier[route] = identifier[route] , identifier[config] = identifier[parent] . identifier[config] ) keyword[if] identifier[data] : identifier[obj] . identifier[data] = identifier[data] keyword[else] : identifier[obj] . identifier[fetch] () keyword[return] identifier[obj]
def get(cls, parent=None, id=None, data=None): """Inherit info from parent and return new object""" # TODO - allow fetching of parent based on child? if parent is not None: route = copy(parent.route) # depends on [control=['if'], data=['parent']] else: route = {} if id is not None and cls.ID_NAME is not None: route[cls.ID_NAME] = id # depends on [control=['if'], data=[]] obj = cls(key=parent.key, route=route, config=parent.config) if data: # This is used in "get all" queries obj.data = data # depends on [control=['if'], data=[]] else: obj.fetch() return obj
def read_multiple( self, points_list, *, points_per_request=25, discover_request=(None, 6), force_single=False ): """ Read points from a device using a ReadPropertyMultiple request. [ReadProperty requests are very slow in comparison]. :param points_list: (list) a list of all point_name as str :param points_per_request: (int) number of points in the request Requesting many points results big requests that need segmentation. Aim to request just the 'right amount' so segmentation can be avoided. Determining the 'right amount' is often trial-&-error. :Example: device.read_multiple(['point1', 'point2', 'point3'], points_per_request = 10) """ if not self.properties.pss["readPropertyMultiple"] or force_single: self._log.warning("Read property Multiple Not supported") self.read_single( points_list, points_per_request=1, discover_request=discover_request ) else: if not self.properties.segmentation_supported: points_per_request = 1 if discover_request[0]: values = [] info_length = discover_request[1] big_request = discover_request[0] for request in self._batches(big_request, points_per_request): try: request = "{} {}".format( self.properties.address, "".join(request) ) self._log.debug("RPM_Request: %s " % request) val = self.properties.network.readMultiple(request) # print('val : ', val, len(val), type(val)) if val == None: self.properties.segmentation_supported = False raise SegmentationNotSupported except KeyError as error: raise Exception("Unknown point name : %s" % error) except SegmentationNotSupported as error: self.properties.segmentation_supported = False # self.read_multiple(points_list,points_per_request=1, discover_request=discover_request) self._log.warning("Segmentation not supported") self._log.warning("Request too big...will reduce it") if points_per_request == 1: raise self.read_multiple( points_list, points_per_request=1, discover_request=discover_request, ) else: for points_info in self._batches(val, info_length): values.append(points_info) return values else: big_request = self._rpm_request_by_name(points_list) i = 0 for request in self._batches(big_request[0], points_per_request): try: request = "{} {}".format( self.properties.address, "".join(request) ) val = self.properties.network.readMultiple(request) except SegmentationNotSupported as error: self.properties.segmentation_supported = False self.read_multiple( points_list, points_per_request=1, discover_request=discover_request, ) except KeyError as error: raise Exception("Unknown point name : %s" % error) else: points_values = zip(big_request[1][i : i + len(val)], val) i += len(val) for each in points_values: each[0]._trend(each[1])
def function[read_multiple, parameter[self, points_list]]: constant[ Read points from a device using a ReadPropertyMultiple request. [ReadProperty requests are very slow in comparison]. :param points_list: (list) a list of all point_name as str :param points_per_request: (int) number of points in the request Requesting many points results big requests that need segmentation. Aim to request just the 'right amount' so segmentation can be avoided. Determining the 'right amount' is often trial-&-error. :Example: device.read_multiple(['point1', 'point2', 'point3'], points_per_request = 10) ] if <ast.BoolOp object at 0x7da1b0407f70> begin[:] call[name[self]._log.warning, parameter[constant[Read property Multiple Not supported]]] call[name[self].read_single, parameter[name[points_list]]]
keyword[def] identifier[read_multiple] ( identifier[self] , identifier[points_list] , *, identifier[points_per_request] = literal[int] , identifier[discover_request] =( keyword[None] , literal[int] ), identifier[force_single] = keyword[False] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[properties] . identifier[pss] [ literal[string] ] keyword[or] identifier[force_single] : identifier[self] . identifier[_log] . identifier[warning] ( literal[string] ) identifier[self] . identifier[read_single] ( identifier[points_list] , identifier[points_per_request] = literal[int] , identifier[discover_request] = identifier[discover_request] ) keyword[else] : keyword[if] keyword[not] identifier[self] . identifier[properties] . identifier[segmentation_supported] : identifier[points_per_request] = literal[int] keyword[if] identifier[discover_request] [ literal[int] ]: identifier[values] =[] identifier[info_length] = identifier[discover_request] [ literal[int] ] identifier[big_request] = identifier[discover_request] [ literal[int] ] keyword[for] identifier[request] keyword[in] identifier[self] . identifier[_batches] ( identifier[big_request] , identifier[points_per_request] ): keyword[try] : identifier[request] = literal[string] . identifier[format] ( identifier[self] . identifier[properties] . identifier[address] , literal[string] . identifier[join] ( identifier[request] ) ) identifier[self] . identifier[_log] . identifier[debug] ( literal[string] % identifier[request] ) identifier[val] = identifier[self] . identifier[properties] . identifier[network] . identifier[readMultiple] ( identifier[request] ) keyword[if] identifier[val] == keyword[None] : identifier[self] . identifier[properties] . identifier[segmentation_supported] = keyword[False] keyword[raise] identifier[SegmentationNotSupported] keyword[except] identifier[KeyError] keyword[as] identifier[error] : keyword[raise] identifier[Exception] ( literal[string] % identifier[error] ) keyword[except] identifier[SegmentationNotSupported] keyword[as] identifier[error] : identifier[self] . identifier[properties] . identifier[segmentation_supported] = keyword[False] identifier[self] . identifier[_log] . identifier[warning] ( literal[string] ) identifier[self] . identifier[_log] . identifier[warning] ( literal[string] ) keyword[if] identifier[points_per_request] == literal[int] : keyword[raise] identifier[self] . identifier[read_multiple] ( identifier[points_list] , identifier[points_per_request] = literal[int] , identifier[discover_request] = identifier[discover_request] , ) keyword[else] : keyword[for] identifier[points_info] keyword[in] identifier[self] . identifier[_batches] ( identifier[val] , identifier[info_length] ): identifier[values] . identifier[append] ( identifier[points_info] ) keyword[return] identifier[values] keyword[else] : identifier[big_request] = identifier[self] . identifier[_rpm_request_by_name] ( identifier[points_list] ) identifier[i] = literal[int] keyword[for] identifier[request] keyword[in] identifier[self] . identifier[_batches] ( identifier[big_request] [ literal[int] ], identifier[points_per_request] ): keyword[try] : identifier[request] = literal[string] . identifier[format] ( identifier[self] . identifier[properties] . identifier[address] , literal[string] . identifier[join] ( identifier[request] ) ) identifier[val] = identifier[self] . identifier[properties] . identifier[network] . identifier[readMultiple] ( identifier[request] ) keyword[except] identifier[SegmentationNotSupported] keyword[as] identifier[error] : identifier[self] . identifier[properties] . identifier[segmentation_supported] = keyword[False] identifier[self] . identifier[read_multiple] ( identifier[points_list] , identifier[points_per_request] = literal[int] , identifier[discover_request] = identifier[discover_request] , ) keyword[except] identifier[KeyError] keyword[as] identifier[error] : keyword[raise] identifier[Exception] ( literal[string] % identifier[error] ) keyword[else] : identifier[points_values] = identifier[zip] ( identifier[big_request] [ literal[int] ][ identifier[i] : identifier[i] + identifier[len] ( identifier[val] )], identifier[val] ) identifier[i] += identifier[len] ( identifier[val] ) keyword[for] identifier[each] keyword[in] identifier[points_values] : identifier[each] [ literal[int] ]. identifier[_trend] ( identifier[each] [ literal[int] ])
def read_multiple(self, points_list, *, points_per_request=25, discover_request=(None, 6), force_single=False): """ Read points from a device using a ReadPropertyMultiple request. [ReadProperty requests are very slow in comparison]. :param points_list: (list) a list of all point_name as str :param points_per_request: (int) number of points in the request Requesting many points results big requests that need segmentation. Aim to request just the 'right amount' so segmentation can be avoided. Determining the 'right amount' is often trial-&-error. :Example: device.read_multiple(['point1', 'point2', 'point3'], points_per_request = 10) """ if not self.properties.pss['readPropertyMultiple'] or force_single: self._log.warning('Read property Multiple Not supported') self.read_single(points_list, points_per_request=1, discover_request=discover_request) # depends on [control=['if'], data=[]] else: if not self.properties.segmentation_supported: points_per_request = 1 # depends on [control=['if'], data=[]] if discover_request[0]: values = [] info_length = discover_request[1] big_request = discover_request[0] for request in self._batches(big_request, points_per_request): try: request = '{} {}'.format(self.properties.address, ''.join(request)) self._log.debug('RPM_Request: %s ' % request) val = self.properties.network.readMultiple(request) # print('val : ', val, len(val), type(val)) if val == None: self.properties.segmentation_supported = False raise SegmentationNotSupported # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except KeyError as error: raise Exception('Unknown point name : %s' % error) # depends on [control=['except'], data=['error']] except SegmentationNotSupported as error: self.properties.segmentation_supported = False # self.read_multiple(points_list,points_per_request=1, discover_request=discover_request) self._log.warning('Segmentation not supported') self._log.warning('Request too big...will reduce it') if points_per_request == 1: raise # depends on [control=['if'], data=[]] self.read_multiple(points_list, points_per_request=1, discover_request=discover_request) # depends on [control=['except'], data=[]] else: for points_info in self._batches(val, info_length): values.append(points_info) # depends on [control=['for'], data=['points_info']] # depends on [control=['for'], data=['request']] return values # depends on [control=['if'], data=[]] else: big_request = self._rpm_request_by_name(points_list) i = 0 for request in self._batches(big_request[0], points_per_request): try: request = '{} {}'.format(self.properties.address, ''.join(request)) val = self.properties.network.readMultiple(request) # depends on [control=['try'], data=[]] except SegmentationNotSupported as error: self.properties.segmentation_supported = False self.read_multiple(points_list, points_per_request=1, discover_request=discover_request) # depends on [control=['except'], data=[]] except KeyError as error: raise Exception('Unknown point name : %s' % error) # depends on [control=['except'], data=['error']] else: points_values = zip(big_request[1][i:i + len(val)], val) i += len(val) for each in points_values: each[0]._trend(each[1]) # depends on [control=['for'], data=['each']] # depends on [control=['for'], data=['request']]
def subst(self, string, raw=0, target=None, source=None, conv=None, executor=None): """Recursively interpolates construction variables from the Environment into the specified string, returning the expanded result. Construction variables are specified by a $ prefix in the string and begin with an initial underscore or alphabetic character followed by any number of underscores or alphanumeric characters. The construction variable names may be surrounded by curly braces to separate the name from trailing characters. """ gvars = self.gvars() lvars = self.lvars() lvars['__env__'] = self if executor: lvars.update(executor.get_lvars()) return SCons.Subst.scons_subst(string, self, raw, target, source, gvars, lvars, conv)
def function[subst, parameter[self, string, raw, target, source, conv, executor]]: constant[Recursively interpolates construction variables from the Environment into the specified string, returning the expanded result. Construction variables are specified by a $ prefix in the string and begin with an initial underscore or alphabetic character followed by any number of underscores or alphanumeric characters. The construction variable names may be surrounded by curly braces to separate the name from trailing characters. ] variable[gvars] assign[=] call[name[self].gvars, parameter[]] variable[lvars] assign[=] call[name[self].lvars, parameter[]] call[name[lvars]][constant[__env__]] assign[=] name[self] if name[executor] begin[:] call[name[lvars].update, parameter[call[name[executor].get_lvars, parameter[]]]] return[call[name[SCons].Subst.scons_subst, parameter[name[string], name[self], name[raw], name[target], name[source], name[gvars], name[lvars], name[conv]]]]
keyword[def] identifier[subst] ( identifier[self] , identifier[string] , identifier[raw] = literal[int] , identifier[target] = keyword[None] , identifier[source] = keyword[None] , identifier[conv] = keyword[None] , identifier[executor] = keyword[None] ): literal[string] identifier[gvars] = identifier[self] . identifier[gvars] () identifier[lvars] = identifier[self] . identifier[lvars] () identifier[lvars] [ literal[string] ]= identifier[self] keyword[if] identifier[executor] : identifier[lvars] . identifier[update] ( identifier[executor] . identifier[get_lvars] ()) keyword[return] identifier[SCons] . identifier[Subst] . identifier[scons_subst] ( identifier[string] , identifier[self] , identifier[raw] , identifier[target] , identifier[source] , identifier[gvars] , identifier[lvars] , identifier[conv] )
def subst(self, string, raw=0, target=None, source=None, conv=None, executor=None): """Recursively interpolates construction variables from the Environment into the specified string, returning the expanded result. Construction variables are specified by a $ prefix in the string and begin with an initial underscore or alphabetic character followed by any number of underscores or alphanumeric characters. The construction variable names may be surrounded by curly braces to separate the name from trailing characters. """ gvars = self.gvars() lvars = self.lvars() lvars['__env__'] = self if executor: lvars.update(executor.get_lvars()) # depends on [control=['if'], data=[]] return SCons.Subst.scons_subst(string, self, raw, target, source, gvars, lvars, conv)
def run(self, default=None): """Parse the command line arguments. default: Name of default command to run if no arguments are passed. """ parent, *sys_args = sys.argv self.parent = Path(parent).stem cmd_name = default if sys_args: cmd_name, *sys_args = sys_args if cmd_name is None or cmd_name.lstrip("-") in HELP_COMMANDS: self.show_help_root() return command = self.commands.get(cmd_name) if command is None: self.show_error(f"command `{cmd_name}` not found") self.show_help_root() return args, opts = parse_args(sys_args) return command.run(*args, **opts)
def function[run, parameter[self, default]]: constant[Parse the command line arguments. default: Name of default command to run if no arguments are passed. ] <ast.Tuple object at 0x7da1b0baf3a0> assign[=] name[sys].argv name[self].parent assign[=] call[name[Path], parameter[name[parent]]].stem variable[cmd_name] assign[=] name[default] if name[sys_args] begin[:] <ast.Tuple object at 0x7da18ede5540> assign[=] name[sys_args] if <ast.BoolOp object at 0x7da18ede4a60> begin[:] call[name[self].show_help_root, parameter[]] return[None] variable[command] assign[=] call[name[self].commands.get, parameter[name[cmd_name]]] if compare[name[command] is constant[None]] begin[:] call[name[self].show_error, parameter[<ast.JoinedStr object at 0x7da18ede4fa0>]] call[name[self].show_help_root, parameter[]] return[None] <ast.Tuple object at 0x7da18ede5d80> assign[=] call[name[parse_args], parameter[name[sys_args]]] return[call[name[command].run, parameter[<ast.Starred object at 0x7da18ede4100>]]]
keyword[def] identifier[run] ( identifier[self] , identifier[default] = keyword[None] ): literal[string] identifier[parent] ,* identifier[sys_args] = identifier[sys] . identifier[argv] identifier[self] . identifier[parent] = identifier[Path] ( identifier[parent] ). identifier[stem] identifier[cmd_name] = identifier[default] keyword[if] identifier[sys_args] : identifier[cmd_name] ,* identifier[sys_args] = identifier[sys_args] keyword[if] identifier[cmd_name] keyword[is] keyword[None] keyword[or] identifier[cmd_name] . identifier[lstrip] ( literal[string] ) keyword[in] identifier[HELP_COMMANDS] : identifier[self] . identifier[show_help_root] () keyword[return] identifier[command] = identifier[self] . identifier[commands] . identifier[get] ( identifier[cmd_name] ) keyword[if] identifier[command] keyword[is] keyword[None] : identifier[self] . identifier[show_error] ( literal[string] ) identifier[self] . identifier[show_help_root] () keyword[return] identifier[args] , identifier[opts] = identifier[parse_args] ( identifier[sys_args] ) keyword[return] identifier[command] . identifier[run] (* identifier[args] ,** identifier[opts] )
def run(self, default=None): """Parse the command line arguments. default: Name of default command to run if no arguments are passed. """ (parent, *sys_args) = sys.argv self.parent = Path(parent).stem cmd_name = default if sys_args: (cmd_name, *sys_args) = sys_args # depends on [control=['if'], data=[]] if cmd_name is None or cmd_name.lstrip('-') in HELP_COMMANDS: self.show_help_root() return # depends on [control=['if'], data=[]] command = self.commands.get(cmd_name) if command is None: self.show_error(f'command `{cmd_name}` not found') self.show_help_root() return # depends on [control=['if'], data=[]] (args, opts) = parse_args(sys_args) return command.run(*args, **opts)
def append_skipped_rules(pyyaml_data, file_text, file_type): """ Uses ruamel.yaml to parse comments then adds a skipped_rules list to the task (or meta yaml block) """ yaml = ruamel.yaml.YAML() ruamel_data = yaml.load(file_text) if file_type in ('tasks', 'handlers'): ruamel_tasks = ruamel_data pyyaml_tasks = pyyaml_data elif file_type == 'playbook': try: ruamel_tasks = [] pyyaml_tasks = [] for ruamel_play, pyyaml_play in zip(ruamel_data, pyyaml_data): ruamel_tasks.extend(ruamel_play.get('tasks')) pyyaml_tasks.extend(pyyaml_play.get('tasks')) except (AttributeError, TypeError): return pyyaml_data elif file_type == 'meta': if not isinstance(pyyaml_data, list): return pyyaml_data ruamel_tasks = [ruamel_data] pyyaml_tasks = pyyaml_data else: return pyyaml_data if len(ruamel_tasks) != len(pyyaml_tasks): return pyyaml_data for ruamel_task, pyyaml_task in zip(ruamel_tasks, pyyaml_tasks): skipped_rules = _get_rule_skips_from_task(ruamel_task) if skipped_rules: pyyaml_task['skipped_rules'] = skipped_rules return pyyaml_data
def function[append_skipped_rules, parameter[pyyaml_data, file_text, file_type]]: constant[ Uses ruamel.yaml to parse comments then adds a skipped_rules list to the task (or meta yaml block) ] variable[yaml] assign[=] call[name[ruamel].yaml.YAML, parameter[]] variable[ruamel_data] assign[=] call[name[yaml].load, parameter[name[file_text]]] if compare[name[file_type] in tuple[[<ast.Constant object at 0x7da1b2346020>, <ast.Constant object at 0x7da1b23461d0>]]] begin[:] variable[ruamel_tasks] assign[=] name[ruamel_data] variable[pyyaml_tasks] assign[=] name[pyyaml_data] if compare[call[name[len], parameter[name[ruamel_tasks]]] not_equal[!=] call[name[len], parameter[name[pyyaml_tasks]]]] begin[:] return[name[pyyaml_data]] for taget[tuple[[<ast.Name object at 0x7da18f810940>, <ast.Name object at 0x7da18f8118a0>]]] in starred[call[name[zip], parameter[name[ruamel_tasks], name[pyyaml_tasks]]]] begin[:] variable[skipped_rules] assign[=] call[name[_get_rule_skips_from_task], parameter[name[ruamel_task]]] if name[skipped_rules] begin[:] call[name[pyyaml_task]][constant[skipped_rules]] assign[=] name[skipped_rules] return[name[pyyaml_data]]
keyword[def] identifier[append_skipped_rules] ( identifier[pyyaml_data] , identifier[file_text] , identifier[file_type] ): literal[string] identifier[yaml] = identifier[ruamel] . identifier[yaml] . identifier[YAML] () identifier[ruamel_data] = identifier[yaml] . identifier[load] ( identifier[file_text] ) keyword[if] identifier[file_type] keyword[in] ( literal[string] , literal[string] ): identifier[ruamel_tasks] = identifier[ruamel_data] identifier[pyyaml_tasks] = identifier[pyyaml_data] keyword[elif] identifier[file_type] == literal[string] : keyword[try] : identifier[ruamel_tasks] =[] identifier[pyyaml_tasks] =[] keyword[for] identifier[ruamel_play] , identifier[pyyaml_play] keyword[in] identifier[zip] ( identifier[ruamel_data] , identifier[pyyaml_data] ): identifier[ruamel_tasks] . identifier[extend] ( identifier[ruamel_play] . identifier[get] ( literal[string] )) identifier[pyyaml_tasks] . identifier[extend] ( identifier[pyyaml_play] . identifier[get] ( literal[string] )) keyword[except] ( identifier[AttributeError] , identifier[TypeError] ): keyword[return] identifier[pyyaml_data] keyword[elif] identifier[file_type] == literal[string] : keyword[if] keyword[not] identifier[isinstance] ( identifier[pyyaml_data] , identifier[list] ): keyword[return] identifier[pyyaml_data] identifier[ruamel_tasks] =[ identifier[ruamel_data] ] identifier[pyyaml_tasks] = identifier[pyyaml_data] keyword[else] : keyword[return] identifier[pyyaml_data] keyword[if] identifier[len] ( identifier[ruamel_tasks] )!= identifier[len] ( identifier[pyyaml_tasks] ): keyword[return] identifier[pyyaml_data] keyword[for] identifier[ruamel_task] , identifier[pyyaml_task] keyword[in] identifier[zip] ( identifier[ruamel_tasks] , identifier[pyyaml_tasks] ): identifier[skipped_rules] = identifier[_get_rule_skips_from_task] ( identifier[ruamel_task] ) keyword[if] identifier[skipped_rules] : identifier[pyyaml_task] [ literal[string] ]= identifier[skipped_rules] keyword[return] identifier[pyyaml_data]
def append_skipped_rules(pyyaml_data, file_text, file_type): """ Uses ruamel.yaml to parse comments then adds a skipped_rules list to the task (or meta yaml block) """ yaml = ruamel.yaml.YAML() ruamel_data = yaml.load(file_text) if file_type in ('tasks', 'handlers'): ruamel_tasks = ruamel_data pyyaml_tasks = pyyaml_data # depends on [control=['if'], data=[]] elif file_type == 'playbook': try: ruamel_tasks = [] pyyaml_tasks = [] for (ruamel_play, pyyaml_play) in zip(ruamel_data, pyyaml_data): ruamel_tasks.extend(ruamel_play.get('tasks')) pyyaml_tasks.extend(pyyaml_play.get('tasks')) # depends on [control=['for'], data=[]] # depends on [control=['try'], data=[]] except (AttributeError, TypeError): return pyyaml_data # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] elif file_type == 'meta': if not isinstance(pyyaml_data, list): return pyyaml_data # depends on [control=['if'], data=[]] ruamel_tasks = [ruamel_data] pyyaml_tasks = pyyaml_data # depends on [control=['if'], data=[]] else: return pyyaml_data if len(ruamel_tasks) != len(pyyaml_tasks): return pyyaml_data # depends on [control=['if'], data=[]] for (ruamel_task, pyyaml_task) in zip(ruamel_tasks, pyyaml_tasks): skipped_rules = _get_rule_skips_from_task(ruamel_task) if skipped_rules: pyyaml_task['skipped_rules'] = skipped_rules # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return pyyaml_data
def get_account_statement( self, locale=None, from_record=None, record_count=None, item_date_range=None, include_item=None, wallet=None): """Get account statement. :param str locale: The language to be used where applicable :param int from_record: Specifies the first record that will be returned :param int record_count: Specifies the maximum number of records to be returned :param TimeRange item_date_range: Return items with an itemDate within this date range :param IncludeItem include_item: Which items to include :param Wallet wallte: Which wallet to return statementItems for """ return self.make_api_request( 'Account', 'getAccountStatement', utils.get_kwargs(locals()), model=models.AccountStatementReport, )
def function[get_account_statement, parameter[self, locale, from_record, record_count, item_date_range, include_item, wallet]]: constant[Get account statement. :param str locale: The language to be used where applicable :param int from_record: Specifies the first record that will be returned :param int record_count: Specifies the maximum number of records to be returned :param TimeRange item_date_range: Return items with an itemDate within this date range :param IncludeItem include_item: Which items to include :param Wallet wallte: Which wallet to return statementItems for ] return[call[name[self].make_api_request, parameter[constant[Account], constant[getAccountStatement], call[name[utils].get_kwargs, parameter[call[name[locals], parameter[]]]]]]]
keyword[def] identifier[get_account_statement] ( identifier[self] , identifier[locale] = keyword[None] , identifier[from_record] = keyword[None] , identifier[record_count] = keyword[None] , identifier[item_date_range] = keyword[None] , identifier[include_item] = keyword[None] , identifier[wallet] = keyword[None] ): literal[string] keyword[return] identifier[self] . identifier[make_api_request] ( literal[string] , literal[string] , identifier[utils] . identifier[get_kwargs] ( identifier[locals] ()), identifier[model] = identifier[models] . identifier[AccountStatementReport] , )
def get_account_statement(self, locale=None, from_record=None, record_count=None, item_date_range=None, include_item=None, wallet=None): """Get account statement. :param str locale: The language to be used where applicable :param int from_record: Specifies the first record that will be returned :param int record_count: Specifies the maximum number of records to be returned :param TimeRange item_date_range: Return items with an itemDate within this date range :param IncludeItem include_item: Which items to include :param Wallet wallte: Which wallet to return statementItems for """ return self.make_api_request('Account', 'getAccountStatement', utils.get_kwargs(locals()), model=models.AccountStatementReport)
def wrap_query_in_nested_if_field_is_nested(query, field, nested_fields): """Helper for wrapping a query into a nested if the fields within the query are nested Args: query : The query to be wrapped. field : The field that is being queried. nested_fields : List of fields which are nested. Returns: (dict): The nested query """ for element in nested_fields: match_pattern = r'^{}.'.format(element) if re.match(match_pattern, field): return generate_nested_query(element, query) return query
def function[wrap_query_in_nested_if_field_is_nested, parameter[query, field, nested_fields]]: constant[Helper for wrapping a query into a nested if the fields within the query are nested Args: query : The query to be wrapped. field : The field that is being queried. nested_fields : List of fields which are nested. Returns: (dict): The nested query ] for taget[name[element]] in starred[name[nested_fields]] begin[:] variable[match_pattern] assign[=] call[constant[^{}.].format, parameter[name[element]]] if call[name[re].match, parameter[name[match_pattern], name[field]]] begin[:] return[call[name[generate_nested_query], parameter[name[element], name[query]]]] return[name[query]]
keyword[def] identifier[wrap_query_in_nested_if_field_is_nested] ( identifier[query] , identifier[field] , identifier[nested_fields] ): literal[string] keyword[for] identifier[element] keyword[in] identifier[nested_fields] : identifier[match_pattern] = literal[string] . identifier[format] ( identifier[element] ) keyword[if] identifier[re] . identifier[match] ( identifier[match_pattern] , identifier[field] ): keyword[return] identifier[generate_nested_query] ( identifier[element] , identifier[query] ) keyword[return] identifier[query]
def wrap_query_in_nested_if_field_is_nested(query, field, nested_fields): """Helper for wrapping a query into a nested if the fields within the query are nested Args: query : The query to be wrapped. field : The field that is being queried. nested_fields : List of fields which are nested. Returns: (dict): The nested query """ for element in nested_fields: match_pattern = '^{}.'.format(element) if re.match(match_pattern, field): return generate_nested_query(element, query) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['element']] return query
def IsProcess64Bit(processId: int) -> bool: """ Return True if process is 64 bit. Return False if process is 32 bit. Return None if unknown, maybe caused by having no acess right to the process. """ try: func = ctypes.windll.ntdll.ZwWow64ReadVirtualMemory64 #only 64 bit OS has this function except Exception as ex: return False try: IsWow64Process = ctypes.windll.kernel32.IsWow64Process IsWow64Process.argtypes = (ctypes.c_void_p, ctypes.POINTER(ctypes.c_int)) except Exception as ex: return False hProcess = ctypes.windll.kernel32.OpenProcess(0x1000, 0, processId) #PROCESS_QUERY_INFORMATION=0x0400,PROCESS_QUERY_LIMITED_INFORMATION=0x1000 if hProcess: is64Bit = ctypes.c_int32() if IsWow64Process(hProcess, ctypes.byref(is64Bit)): ctypes.windll.kernel32.CloseHandle(ctypes.c_void_p(hProcess)) return False if is64Bit.value else True else: ctypes.windll.kernel32.CloseHandle(ctypes.c_void_p(hProcess))
def function[IsProcess64Bit, parameter[processId]]: constant[ Return True if process is 64 bit. Return False if process is 32 bit. Return None if unknown, maybe caused by having no acess right to the process. ] <ast.Try object at 0x7da207f01630> <ast.Try object at 0x7da207f00400> variable[hProcess] assign[=] call[name[ctypes].windll.kernel32.OpenProcess, parameter[constant[4096], constant[0], name[processId]]] if name[hProcess] begin[:] variable[is64Bit] assign[=] call[name[ctypes].c_int32, parameter[]] if call[name[IsWow64Process], parameter[name[hProcess], call[name[ctypes].byref, parameter[name[is64Bit]]]]] begin[:] call[name[ctypes].windll.kernel32.CloseHandle, parameter[call[name[ctypes].c_void_p, parameter[name[hProcess]]]]] return[<ast.IfExp object at 0x7da207f030a0>]
keyword[def] identifier[IsProcess64Bit] ( identifier[processId] : identifier[int] )-> identifier[bool] : literal[string] keyword[try] : identifier[func] = identifier[ctypes] . identifier[windll] . identifier[ntdll] . identifier[ZwWow64ReadVirtualMemory64] keyword[except] identifier[Exception] keyword[as] identifier[ex] : keyword[return] keyword[False] keyword[try] : identifier[IsWow64Process] = identifier[ctypes] . identifier[windll] . identifier[kernel32] . identifier[IsWow64Process] identifier[IsWow64Process] . identifier[argtypes] =( identifier[ctypes] . identifier[c_void_p] , identifier[ctypes] . identifier[POINTER] ( identifier[ctypes] . identifier[c_int] )) keyword[except] identifier[Exception] keyword[as] identifier[ex] : keyword[return] keyword[False] identifier[hProcess] = identifier[ctypes] . identifier[windll] . identifier[kernel32] . identifier[OpenProcess] ( literal[int] , literal[int] , identifier[processId] ) keyword[if] identifier[hProcess] : identifier[is64Bit] = identifier[ctypes] . identifier[c_int32] () keyword[if] identifier[IsWow64Process] ( identifier[hProcess] , identifier[ctypes] . identifier[byref] ( identifier[is64Bit] )): identifier[ctypes] . identifier[windll] . identifier[kernel32] . identifier[CloseHandle] ( identifier[ctypes] . identifier[c_void_p] ( identifier[hProcess] )) keyword[return] keyword[False] keyword[if] identifier[is64Bit] . identifier[value] keyword[else] keyword[True] keyword[else] : identifier[ctypes] . identifier[windll] . identifier[kernel32] . identifier[CloseHandle] ( identifier[ctypes] . identifier[c_void_p] ( identifier[hProcess] ))
def IsProcess64Bit(processId: int) -> bool: """ Return True if process is 64 bit. Return False if process is 32 bit. Return None if unknown, maybe caused by having no acess right to the process. """ try: func = ctypes.windll.ntdll.ZwWow64ReadVirtualMemory64 #only 64 bit OS has this function # depends on [control=['try'], data=[]] except Exception as ex: return False # depends on [control=['except'], data=[]] try: IsWow64Process = ctypes.windll.kernel32.IsWow64Process IsWow64Process.argtypes = (ctypes.c_void_p, ctypes.POINTER(ctypes.c_int)) # depends on [control=['try'], data=[]] except Exception as ex: return False # depends on [control=['except'], data=[]] hProcess = ctypes.windll.kernel32.OpenProcess(4096, 0, processId) #PROCESS_QUERY_INFORMATION=0x0400,PROCESS_QUERY_LIMITED_INFORMATION=0x1000 if hProcess: is64Bit = ctypes.c_int32() if IsWow64Process(hProcess, ctypes.byref(is64Bit)): ctypes.windll.kernel32.CloseHandle(ctypes.c_void_p(hProcess)) return False if is64Bit.value else True # depends on [control=['if'], data=[]] else: ctypes.windll.kernel32.CloseHandle(ctypes.c_void_p(hProcess)) # depends on [control=['if'], data=[]]
def register_namespace(self, namespace_handler): """Register a namespace handler object. :param namespace_handler: An instance of a :class:`Namespace` subclass that handles all the event traffic for a namespace. """ if not isinstance(namespace_handler, namespace.Namespace): raise ValueError('Not a namespace instance') if self.is_asyncio_based() != namespace_handler.is_asyncio_based(): raise ValueError('Not a valid namespace class for this server') namespace_handler._set_server(self) self.namespace_handlers[namespace_handler.namespace] = \ namespace_handler
def function[register_namespace, parameter[self, namespace_handler]]: constant[Register a namespace handler object. :param namespace_handler: An instance of a :class:`Namespace` subclass that handles all the event traffic for a namespace. ] if <ast.UnaryOp object at 0x7da1b21bba30> begin[:] <ast.Raise object at 0x7da1b21d4b80> if compare[call[name[self].is_asyncio_based, parameter[]] not_equal[!=] call[name[namespace_handler].is_asyncio_based, parameter[]]] begin[:] <ast.Raise object at 0x7da1b21d60e0> call[name[namespace_handler]._set_server, parameter[name[self]]] call[name[self].namespace_handlers][name[namespace_handler].namespace] assign[=] name[namespace_handler]
keyword[def] identifier[register_namespace] ( identifier[self] , identifier[namespace_handler] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[namespace_handler] , identifier[namespace] . identifier[Namespace] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[self] . identifier[is_asyncio_based] ()!= identifier[namespace_handler] . identifier[is_asyncio_based] (): keyword[raise] identifier[ValueError] ( literal[string] ) identifier[namespace_handler] . identifier[_set_server] ( identifier[self] ) identifier[self] . identifier[namespace_handlers] [ identifier[namespace_handler] . identifier[namespace] ]= identifier[namespace_handler]
def register_namespace(self, namespace_handler): """Register a namespace handler object. :param namespace_handler: An instance of a :class:`Namespace` subclass that handles all the event traffic for a namespace. """ if not isinstance(namespace_handler, namespace.Namespace): raise ValueError('Not a namespace instance') # depends on [control=['if'], data=[]] if self.is_asyncio_based() != namespace_handler.is_asyncio_based(): raise ValueError('Not a valid namespace class for this server') # depends on [control=['if'], data=[]] namespace_handler._set_server(self) self.namespace_handlers[namespace_handler.namespace] = namespace_handler
def flatten(root: ast.Tree, class_name: ast.ComponentRef) -> ast.Class: """ This function takes a Tree and flattens it so that all subclasses instances are replaced by the their equations and symbols with name mangling of the instance name passed. :param root: The Tree to flatten :param class_name: The class that we want to create a flat model for :return: flat_class, a Class containing the flattened class """ orig_class = root.find_class(class_name, copy=False) flat_class = flatten_class(orig_class) # expand connectors expand_connectors(flat_class) # add equations for state symbol values add_state_value_equations(flat_class) for func in flat_class.functions.values(): add_variable_value_statements(func) # annotate states annotate_states(flat_class) # Put class in root root = ast.Tree() root.classes[orig_class.name] = flat_class # pull functions to the top level, # putting them prior to the model class so that they are visited # first by the tree walker. functions_and_classes = flat_class.functions flat_class.functions = OrderedDict() functions_and_classes.update(root.classes) root.classes = functions_and_classes return root
def function[flatten, parameter[root, class_name]]: constant[ This function takes a Tree and flattens it so that all subclasses instances are replaced by the their equations and symbols with name mangling of the instance name passed. :param root: The Tree to flatten :param class_name: The class that we want to create a flat model for :return: flat_class, a Class containing the flattened class ] variable[orig_class] assign[=] call[name[root].find_class, parameter[name[class_name]]] variable[flat_class] assign[=] call[name[flatten_class], parameter[name[orig_class]]] call[name[expand_connectors], parameter[name[flat_class]]] call[name[add_state_value_equations], parameter[name[flat_class]]] for taget[name[func]] in starred[call[name[flat_class].functions.values, parameter[]]] begin[:] call[name[add_variable_value_statements], parameter[name[func]]] call[name[annotate_states], parameter[name[flat_class]]] variable[root] assign[=] call[name[ast].Tree, parameter[]] call[name[root].classes][name[orig_class].name] assign[=] name[flat_class] variable[functions_and_classes] assign[=] name[flat_class].functions name[flat_class].functions assign[=] call[name[OrderedDict], parameter[]] call[name[functions_and_classes].update, parameter[name[root].classes]] name[root].classes assign[=] name[functions_and_classes] return[name[root]]
keyword[def] identifier[flatten] ( identifier[root] : identifier[ast] . identifier[Tree] , identifier[class_name] : identifier[ast] . identifier[ComponentRef] )-> identifier[ast] . identifier[Class] : literal[string] identifier[orig_class] = identifier[root] . identifier[find_class] ( identifier[class_name] , identifier[copy] = keyword[False] ) identifier[flat_class] = identifier[flatten_class] ( identifier[orig_class] ) identifier[expand_connectors] ( identifier[flat_class] ) identifier[add_state_value_equations] ( identifier[flat_class] ) keyword[for] identifier[func] keyword[in] identifier[flat_class] . identifier[functions] . identifier[values] (): identifier[add_variable_value_statements] ( identifier[func] ) identifier[annotate_states] ( identifier[flat_class] ) identifier[root] = identifier[ast] . identifier[Tree] () identifier[root] . identifier[classes] [ identifier[orig_class] . identifier[name] ]= identifier[flat_class] identifier[functions_and_classes] = identifier[flat_class] . identifier[functions] identifier[flat_class] . identifier[functions] = identifier[OrderedDict] () identifier[functions_and_classes] . identifier[update] ( identifier[root] . identifier[classes] ) identifier[root] . identifier[classes] = identifier[functions_and_classes] keyword[return] identifier[root]
def flatten(root: ast.Tree, class_name: ast.ComponentRef) -> ast.Class: """ This function takes a Tree and flattens it so that all subclasses instances are replaced by the their equations and symbols with name mangling of the instance name passed. :param root: The Tree to flatten :param class_name: The class that we want to create a flat model for :return: flat_class, a Class containing the flattened class """ orig_class = root.find_class(class_name, copy=False) flat_class = flatten_class(orig_class) # expand connectors expand_connectors(flat_class) # add equations for state symbol values add_state_value_equations(flat_class) for func in flat_class.functions.values(): add_variable_value_statements(func) # depends on [control=['for'], data=['func']] # annotate states annotate_states(flat_class) # Put class in root root = ast.Tree() root.classes[orig_class.name] = flat_class # pull functions to the top level, # putting them prior to the model class so that they are visited # first by the tree walker. functions_and_classes = flat_class.functions flat_class.functions = OrderedDict() functions_and_classes.update(root.classes) root.classes = functions_and_classes return root
def is_parent_of_repository(self, id_, repository_id): """Tests if an ``Id`` is a direct parent of a repository. arg: id (osid.id.Id): an ``Id`` arg: repository_id (osid.id.Id): the ``Id`` of a repository return: (boolean) - ``true`` if this ``id`` is a parent of ``repository_id,`` ``false`` otherwise raise: NotFound - ``repository_id`` is not found raise: NullArgument - ``id`` or ``repository_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.* *implementation notes*: If ``id`` not found return ``false``. """ # Implemented from template for # osid.resource.BinHierarchySession.is_parent_of_bin if self._catalog_session is not None: return self._catalog_session.is_parent_of_catalog(id_=id_, catalog_id=repository_id) return self._hierarchy_session.is_parent(id_=repository_id, parent_id=id_)
def function[is_parent_of_repository, parameter[self, id_, repository_id]]: constant[Tests if an ``Id`` is a direct parent of a repository. arg: id (osid.id.Id): an ``Id`` arg: repository_id (osid.id.Id): the ``Id`` of a repository return: (boolean) - ``true`` if this ``id`` is a parent of ``repository_id,`` ``false`` otherwise raise: NotFound - ``repository_id`` is not found raise: NullArgument - ``id`` or ``repository_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.* *implementation notes*: If ``id`` not found return ``false``. ] if compare[name[self]._catalog_session is_not constant[None]] begin[:] return[call[name[self]._catalog_session.is_parent_of_catalog, parameter[]]] return[call[name[self]._hierarchy_session.is_parent, parameter[]]]
keyword[def] identifier[is_parent_of_repository] ( identifier[self] , identifier[id_] , identifier[repository_id] ): literal[string] keyword[if] identifier[self] . identifier[_catalog_session] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[self] . identifier[_catalog_session] . identifier[is_parent_of_catalog] ( identifier[id_] = identifier[id_] , identifier[catalog_id] = identifier[repository_id] ) keyword[return] identifier[self] . identifier[_hierarchy_session] . identifier[is_parent] ( identifier[id_] = identifier[repository_id] , identifier[parent_id] = identifier[id_] )
def is_parent_of_repository(self, id_, repository_id): """Tests if an ``Id`` is a direct parent of a repository. arg: id (osid.id.Id): an ``Id`` arg: repository_id (osid.id.Id): the ``Id`` of a repository return: (boolean) - ``true`` if this ``id`` is a parent of ``repository_id,`` ``false`` otherwise raise: NotFound - ``repository_id`` is not found raise: NullArgument - ``id`` or ``repository_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.* *implementation notes*: If ``id`` not found return ``false``. """ # Implemented from template for # osid.resource.BinHierarchySession.is_parent_of_bin if self._catalog_session is not None: return self._catalog_session.is_parent_of_catalog(id_=id_, catalog_id=repository_id) # depends on [control=['if'], data=[]] return self._hierarchy_session.is_parent(id_=repository_id, parent_id=id_)
def _split_constraints(constraints, concrete=True): """ Returns independent constraints, split from this Frontend's `constraints`. """ splitted = [ ] for i in constraints: splitted.extend(i.split(['And'])) l.debug("... splitted of size %d", len(splitted)) concrete_constraints = [ ] variable_connections = { } constraint_connections = { } for n,s in enumerate(splitted): l.debug("... processing constraint with %d variables", len(s.variables)) connected_variables = set(s.variables) connected_constraints = { n } if len(connected_variables) == 0: concrete_constraints.append(s) for v in s.variables: if v in variable_connections: connected_variables |= variable_connections[v] if v in constraint_connections: connected_constraints |= constraint_connections[v] for v in connected_variables: variable_connections[v] = connected_variables constraint_connections[v] = connected_constraints unique_constraint_sets = set() for v in variable_connections: unique_constraint_sets.add((frozenset(variable_connections[v]), frozenset(constraint_connections[v]))) results = [ ] for v,c_indexes in unique_constraint_sets: results.append((set(v), [ splitted[c] for c in c_indexes ])) if concrete and len(concrete_constraints) > 0: results.append(({ 'CONCRETE' }, concrete_constraints)) return results
def function[_split_constraints, parameter[constraints, concrete]]: constant[ Returns independent constraints, split from this Frontend's `constraints`. ] variable[splitted] assign[=] list[[]] for taget[name[i]] in starred[name[constraints]] begin[:] call[name[splitted].extend, parameter[call[name[i].split, parameter[list[[<ast.Constant object at 0x7da20c76ca90>]]]]]] call[name[l].debug, parameter[constant[... splitted of size %d], call[name[len], parameter[name[splitted]]]]] variable[concrete_constraints] assign[=] list[[]] variable[variable_connections] assign[=] dictionary[[], []] variable[constraint_connections] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da20c76db10>, <ast.Name object at 0x7da20c76f2e0>]]] in starred[call[name[enumerate], parameter[name[splitted]]]] begin[:] call[name[l].debug, parameter[constant[... processing constraint with %d variables], call[name[len], parameter[name[s].variables]]]] variable[connected_variables] assign[=] call[name[set], parameter[name[s].variables]] variable[connected_constraints] assign[=] <ast.Set object at 0x7da20c76d870> if compare[call[name[len], parameter[name[connected_variables]]] equal[==] constant[0]] begin[:] call[name[concrete_constraints].append, parameter[name[s]]] for taget[name[v]] in starred[name[s].variables] begin[:] if compare[name[v] in name[variable_connections]] begin[:] <ast.AugAssign object at 0x7da20c76c280> if compare[name[v] in name[constraint_connections]] begin[:] <ast.AugAssign object at 0x7da20c76dea0> for taget[name[v]] in starred[name[connected_variables]] begin[:] call[name[variable_connections]][name[v]] assign[=] name[connected_variables] call[name[constraint_connections]][name[v]] assign[=] name[connected_constraints] variable[unique_constraint_sets] assign[=] call[name[set], parameter[]] for taget[name[v]] in starred[name[variable_connections]] begin[:] call[name[unique_constraint_sets].add, parameter[tuple[[<ast.Call object at 0x7da18eb56890>, <ast.Call object at 0x7da18eb57310>]]]] variable[results] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da18eb558d0>, <ast.Name object at 0x7da18eb574f0>]]] in starred[name[unique_constraint_sets]] begin[:] call[name[results].append, parameter[tuple[[<ast.Call object at 0x7da20c76c5e0>, <ast.ListComp object at 0x7da20c76d570>]]]] if <ast.BoolOp object at 0x7da20c76c130> begin[:] call[name[results].append, parameter[tuple[[<ast.Set object at 0x7da20c76f820>, <ast.Name object at 0x7da20c76c640>]]]] return[name[results]]
keyword[def] identifier[_split_constraints] ( identifier[constraints] , identifier[concrete] = keyword[True] ): literal[string] identifier[splitted] =[] keyword[for] identifier[i] keyword[in] identifier[constraints] : identifier[splitted] . identifier[extend] ( identifier[i] . identifier[split] ([ literal[string] ])) identifier[l] . identifier[debug] ( literal[string] , identifier[len] ( identifier[splitted] )) identifier[concrete_constraints] =[] identifier[variable_connections] ={} identifier[constraint_connections] ={} keyword[for] identifier[n] , identifier[s] keyword[in] identifier[enumerate] ( identifier[splitted] ): identifier[l] . identifier[debug] ( literal[string] , identifier[len] ( identifier[s] . identifier[variables] )) identifier[connected_variables] = identifier[set] ( identifier[s] . identifier[variables] ) identifier[connected_constraints] ={ identifier[n] } keyword[if] identifier[len] ( identifier[connected_variables] )== literal[int] : identifier[concrete_constraints] . identifier[append] ( identifier[s] ) keyword[for] identifier[v] keyword[in] identifier[s] . identifier[variables] : keyword[if] identifier[v] keyword[in] identifier[variable_connections] : identifier[connected_variables] |= identifier[variable_connections] [ identifier[v] ] keyword[if] identifier[v] keyword[in] identifier[constraint_connections] : identifier[connected_constraints] |= identifier[constraint_connections] [ identifier[v] ] keyword[for] identifier[v] keyword[in] identifier[connected_variables] : identifier[variable_connections] [ identifier[v] ]= identifier[connected_variables] identifier[constraint_connections] [ identifier[v] ]= identifier[connected_constraints] identifier[unique_constraint_sets] = identifier[set] () keyword[for] identifier[v] keyword[in] identifier[variable_connections] : identifier[unique_constraint_sets] . identifier[add] (( identifier[frozenset] ( identifier[variable_connections] [ identifier[v] ]), identifier[frozenset] ( identifier[constraint_connections] [ identifier[v] ]))) identifier[results] =[] keyword[for] identifier[v] , identifier[c_indexes] keyword[in] identifier[unique_constraint_sets] : identifier[results] . identifier[append] (( identifier[set] ( identifier[v] ),[ identifier[splitted] [ identifier[c] ] keyword[for] identifier[c] keyword[in] identifier[c_indexes] ])) keyword[if] identifier[concrete] keyword[and] identifier[len] ( identifier[concrete_constraints] )> literal[int] : identifier[results] . identifier[append] (({ literal[string] }, identifier[concrete_constraints] )) keyword[return] identifier[results]
def _split_constraints(constraints, concrete=True): """ Returns independent constraints, split from this Frontend's `constraints`. """ splitted = [] for i in constraints: splitted.extend(i.split(['And'])) # depends on [control=['for'], data=['i']] l.debug('... splitted of size %d', len(splitted)) concrete_constraints = [] variable_connections = {} constraint_connections = {} for (n, s) in enumerate(splitted): l.debug('... processing constraint with %d variables', len(s.variables)) connected_variables = set(s.variables) connected_constraints = {n} if len(connected_variables) == 0: concrete_constraints.append(s) # depends on [control=['if'], data=[]] for v in s.variables: if v in variable_connections: connected_variables |= variable_connections[v] # depends on [control=['if'], data=['v', 'variable_connections']] if v in constraint_connections: connected_constraints |= constraint_connections[v] # depends on [control=['if'], data=['v', 'constraint_connections']] # depends on [control=['for'], data=['v']] for v in connected_variables: variable_connections[v] = connected_variables constraint_connections[v] = connected_constraints # depends on [control=['for'], data=['v']] # depends on [control=['for'], data=[]] unique_constraint_sets = set() for v in variable_connections: unique_constraint_sets.add((frozenset(variable_connections[v]), frozenset(constraint_connections[v]))) # depends on [control=['for'], data=['v']] results = [] for (v, c_indexes) in unique_constraint_sets: results.append((set(v), [splitted[c] for c in c_indexes])) # depends on [control=['for'], data=[]] if concrete and len(concrete_constraints) > 0: results.append(({'CONCRETE'}, concrete_constraints)) # depends on [control=['if'], data=[]] return results
def parse_references(content, year, reference_id=None, canonicalize=True): """\ Returns the references to other cables as (maybe empty) list. `content` The content of the cable. `year` The year when the cable was created. `reference_id` The reference identifier of the cable. `canonicalize` Indicates if the cable reference origin should be canonicalized. (enabled by default) """ from cablemap.core.models import Reference def format_year(y): y = str(y) if not y: y = str(year) if len(y) == 4: return y[2:] elif len(y) == 3 and y[0] == '0': return y[1:] return y offset = 0 m_offset = _REF_OFFSET_PATTERN.search(content) if m_offset: offset = m_offset.end() # 1. Try to find "Classified By:" m_stop = _REF_STOP_PATTERN.search(content, offset) # If found, use it as maximum index to search for references, otherwise use a constant max_idx = m_stop and m_stop.start() or _MAX_HEADER_IDX # 2. Find references m_start = _REF_START_PATTERN.search(content, offset, max_idx) # 3. Check if we have a paragraph in the references m_stop = _REF_NOT_REF_PATTERN.search(content, m_start and m_start.end() or 0, max_idx) last_end = m_start and m_start.end() or 0 # 4. Find the next max_idx max_idx = min(m_stop and m_stop.start() or _MAX_HEADER_IDX, max_idx) m_end = _REF_LAST_REF_PATTERN.search(content, last_end, max_idx) while m_end: last_end = m_end.end() m_end = _REF_LAST_REF_PATTERN.search(content, last_end, max_idx) res = [] if m_end and not m_start: logger.warn('Found ref end but no start in "%s", content: "%s"' % (reference_id, content)) if m_start and last_end: start = m_start.start(1) end = last_end or m_start.end() refs = content[start:end].replace('\n', ' ') refs = _CLEAN_REFS_PATTERN.sub('', refs) for enum, y, origin, sn, alt_year in _REF_PATTERN.findall(refs): if alt_year and not y: y = alt_year y = format_year(y) origin = origin.replace(' ', '').replace(u"'", u'').upper() if origin == 'AND' and res and res[-1].is_cable(): last_origin = _REF_ORIGIN_PATTERN.match(res[-1].value).group(1) origin = last_origin enum = enum or res[-1].value elif origin.startswith('AND') and res and res[-1].is_cable(): # for references like 09 FOO 1234 AND BAR 1234 origin = origin[3:] enum = enum or res[-1].value reference = u'%s%s%d' % (y, origin, int(sn)) if canonicalize: reference = canonicalize_id(reference) length = len(reference) if length < 7 or length > 25: # constants.MIN_ORIGIN_LENGTH + constants.MIN_SERIAL_LENGTH + length of year or constants.MAX_ORIGIN_LENGTH + constants.MAX_SERIAL_LENGTH + 2 (for the year) continue if not REFERENCE_ID_PATTERN.match(reference): if 'CORRUPTION' not in reference and 'ECRET' not in reference and 'PARISPOINT' not in reference and 'TELCON' not in reference and 'FORTHE' not in reference and 'ZOCT' not in reference and 'ZSEP' not in reference and 'ZMAY' not in reference and 'ZNOV' not in reference and 'ZAUG' not in reference and 'PRIORITY' not in reference and 'ZJAN' not in reference and 'ZFEB' not in reference and 'ZJUN' not in reference and'ZJUL' not in reference and 'PREVIO' not in reference and 'SEPTEMBER' not in reference and 'ZAPR' not in reference and 'ZFEB' not in reference and 'PART' not in reference and 'ONFIDENTIAL' not in reference and 'SECRET' not in reference and 'SECTION' not in reference and 'TODAY' not in reference and 'DAILY' not in reference and 'OUTOF' not in reference and 'PROVIDING' not in reference and 'NUMBER' not in reference and 'APRIL' not in reference and 'OCTOBER' not in reference and 'MAIL' not in reference and 'DECEMBER' not in reference and 'FEBRUAY' not in reference and 'AUGUST' not in reference and 'MARCH' not in reference and 'JULY' not in reference and 'JUNE' not in reference and 'MAIL' not in reference and 'JANUARY' not in reference and '--' not in reference and 'PARAGRAPH' not in reference and 'ANDPREVIOUS' not in reference and 'UNCLAS' not in reference and 'ONMARCH' not in reference and 'ONAPRIL' not in reference and 'FEBRUARY' not in reference and 'ONMAY' not in reference and 'ONJULY' not in reference and 'ONJUNE' not in reference and 'NOVEMBER' not in reference and not 'CONFIDENTIAL' in reference: logger.debug('Ignore "%s". Not a valid reference identifier (%s)' % (reference, reference_id)) continue if reference != reference_id: reference = Reference(reference, consts.REF_KIND_CABLE, enum) if reference not in res: res.append(reference) return res
def function[parse_references, parameter[content, year, reference_id, canonicalize]]: constant[ Returns the references to other cables as (maybe empty) list. `content` The content of the cable. `year` The year when the cable was created. `reference_id` The reference identifier of the cable. `canonicalize` Indicates if the cable reference origin should be canonicalized. (enabled by default) ] from relative_module[cablemap.core.models] import module[Reference] def function[format_year, parameter[y]]: variable[y] assign[=] call[name[str], parameter[name[y]]] if <ast.UnaryOp object at 0x7da20c990850> begin[:] variable[y] assign[=] call[name[str], parameter[name[year]]] if compare[call[name[len], parameter[name[y]]] equal[==] constant[4]] begin[:] return[call[name[y]][<ast.Slice object at 0x7da20c990280>]] return[name[y]] variable[offset] assign[=] constant[0] variable[m_offset] assign[=] call[name[_REF_OFFSET_PATTERN].search, parameter[name[content]]] if name[m_offset] begin[:] variable[offset] assign[=] call[name[m_offset].end, parameter[]] variable[m_stop] assign[=] call[name[_REF_STOP_PATTERN].search, parameter[name[content], name[offset]]] variable[max_idx] assign[=] <ast.BoolOp object at 0x7da20c993610> variable[m_start] assign[=] call[name[_REF_START_PATTERN].search, parameter[name[content], name[offset], name[max_idx]]] variable[m_stop] assign[=] call[name[_REF_NOT_REF_PATTERN].search, parameter[name[content], <ast.BoolOp object at 0x7da20c9913c0>, name[max_idx]]] variable[last_end] assign[=] <ast.BoolOp object at 0x7da20c991870> variable[max_idx] assign[=] call[name[min], parameter[<ast.BoolOp object at 0x7da20c993790>, name[max_idx]]] variable[m_end] assign[=] call[name[_REF_LAST_REF_PATTERN].search, parameter[name[content], name[last_end], name[max_idx]]] while name[m_end] begin[:] variable[last_end] assign[=] call[name[m_end].end, parameter[]] variable[m_end] assign[=] call[name[_REF_LAST_REF_PATTERN].search, parameter[name[content], name[last_end], name[max_idx]]] variable[res] assign[=] list[[]] if <ast.BoolOp object at 0x7da2041da650> begin[:] call[name[logger].warn, parameter[binary_operation[constant[Found ref end but no start in "%s", content: "%s"] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da2041d9330>, <ast.Name object at 0x7da2041daf20>]]]]] if <ast.BoolOp object at 0x7da2041db730> begin[:] variable[start] assign[=] call[name[m_start].start, parameter[constant[1]]] variable[end] assign[=] <ast.BoolOp object at 0x7da2041d8610> variable[refs] assign[=] call[call[name[content]][<ast.Slice object at 0x7da2041daa40>].replace, parameter[constant[ ], constant[ ]]] variable[refs] assign[=] call[name[_CLEAN_REFS_PATTERN].sub, parameter[constant[], name[refs]]] for taget[tuple[[<ast.Name object at 0x7da2041da230>, <ast.Name object at 0x7da2041d8190>, <ast.Name object at 0x7da2041da3b0>, <ast.Name object at 0x7da2041d9c60>, <ast.Name object at 0x7da2041daaa0>]]] in starred[call[name[_REF_PATTERN].findall, parameter[name[refs]]]] begin[:] if <ast.BoolOp object at 0x7da2041da0b0> begin[:] variable[y] assign[=] name[alt_year] variable[y] assign[=] call[name[format_year], parameter[name[y]]] variable[origin] assign[=] call[call[call[name[origin].replace, parameter[constant[ ], constant[]]].replace, parameter[constant['], constant[]]].upper, parameter[]] if <ast.BoolOp object at 0x7da2041dbbe0> begin[:] variable[last_origin] assign[=] call[call[name[_REF_ORIGIN_PATTERN].match, parameter[call[name[res]][<ast.UnaryOp object at 0x7da2041d9b70>].value]].group, parameter[constant[1]]] variable[origin] assign[=] name[last_origin] variable[enum] assign[=] <ast.BoolOp object at 0x7da2041dadd0> variable[reference] assign[=] binary_operation[constant[%s%s%d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da2041db580>, <ast.Name object at 0x7da2041dbd30>, <ast.Call object at 0x7da2041d8d60>]]] if name[canonicalize] begin[:] variable[reference] assign[=] call[name[canonicalize_id], parameter[name[reference]]] variable[length] assign[=] call[name[len], parameter[name[reference]]] if <ast.BoolOp object at 0x7da2041da0e0> begin[:] continue if <ast.UnaryOp object at 0x7da2041db3d0> begin[:] if <ast.BoolOp object at 0x7da2041d9bd0> begin[:] call[name[logger].debug, parameter[binary_operation[constant[Ignore "%s". Not a valid reference identifier (%s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da204962bf0>, <ast.Name object at 0x7da204962c50>]]]]] continue if compare[name[reference] not_equal[!=] name[reference_id]] begin[:] variable[reference] assign[=] call[name[Reference], parameter[name[reference], name[consts].REF_KIND_CABLE, name[enum]]] if compare[name[reference] <ast.NotIn object at 0x7da2590d7190> name[res]] begin[:] call[name[res].append, parameter[name[reference]]] return[name[res]]
keyword[def] identifier[parse_references] ( identifier[content] , identifier[year] , identifier[reference_id] = keyword[None] , identifier[canonicalize] = keyword[True] ): literal[string] keyword[from] identifier[cablemap] . identifier[core] . identifier[models] keyword[import] identifier[Reference] keyword[def] identifier[format_year] ( identifier[y] ): identifier[y] = identifier[str] ( identifier[y] ) keyword[if] keyword[not] identifier[y] : identifier[y] = identifier[str] ( identifier[year] ) keyword[if] identifier[len] ( identifier[y] )== literal[int] : keyword[return] identifier[y] [ literal[int] :] keyword[elif] identifier[len] ( identifier[y] )== literal[int] keyword[and] identifier[y] [ literal[int] ]== literal[string] : keyword[return] identifier[y] [ literal[int] :] keyword[return] identifier[y] identifier[offset] = literal[int] identifier[m_offset] = identifier[_REF_OFFSET_PATTERN] . identifier[search] ( identifier[content] ) keyword[if] identifier[m_offset] : identifier[offset] = identifier[m_offset] . identifier[end] () identifier[m_stop] = identifier[_REF_STOP_PATTERN] . identifier[search] ( identifier[content] , identifier[offset] ) identifier[max_idx] = identifier[m_stop] keyword[and] identifier[m_stop] . identifier[start] () keyword[or] identifier[_MAX_HEADER_IDX] identifier[m_start] = identifier[_REF_START_PATTERN] . identifier[search] ( identifier[content] , identifier[offset] , identifier[max_idx] ) identifier[m_stop] = identifier[_REF_NOT_REF_PATTERN] . identifier[search] ( identifier[content] , identifier[m_start] keyword[and] identifier[m_start] . identifier[end] () keyword[or] literal[int] , identifier[max_idx] ) identifier[last_end] = identifier[m_start] keyword[and] identifier[m_start] . identifier[end] () keyword[or] literal[int] identifier[max_idx] = identifier[min] ( identifier[m_stop] keyword[and] identifier[m_stop] . identifier[start] () keyword[or] identifier[_MAX_HEADER_IDX] , identifier[max_idx] ) identifier[m_end] = identifier[_REF_LAST_REF_PATTERN] . identifier[search] ( identifier[content] , identifier[last_end] , identifier[max_idx] ) keyword[while] identifier[m_end] : identifier[last_end] = identifier[m_end] . identifier[end] () identifier[m_end] = identifier[_REF_LAST_REF_PATTERN] . identifier[search] ( identifier[content] , identifier[last_end] , identifier[max_idx] ) identifier[res] =[] keyword[if] identifier[m_end] keyword[and] keyword[not] identifier[m_start] : identifier[logger] . identifier[warn] ( literal[string] %( identifier[reference_id] , identifier[content] )) keyword[if] identifier[m_start] keyword[and] identifier[last_end] : identifier[start] = identifier[m_start] . identifier[start] ( literal[int] ) identifier[end] = identifier[last_end] keyword[or] identifier[m_start] . identifier[end] () identifier[refs] = identifier[content] [ identifier[start] : identifier[end] ]. identifier[replace] ( literal[string] , literal[string] ) identifier[refs] = identifier[_CLEAN_REFS_PATTERN] . identifier[sub] ( literal[string] , identifier[refs] ) keyword[for] identifier[enum] , identifier[y] , identifier[origin] , identifier[sn] , identifier[alt_year] keyword[in] identifier[_REF_PATTERN] . identifier[findall] ( identifier[refs] ): keyword[if] identifier[alt_year] keyword[and] keyword[not] identifier[y] : identifier[y] = identifier[alt_year] identifier[y] = identifier[format_year] ( identifier[y] ) identifier[origin] = identifier[origin] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[upper] () keyword[if] identifier[origin] == literal[string] keyword[and] identifier[res] keyword[and] identifier[res] [- literal[int] ]. identifier[is_cable] (): identifier[last_origin] = identifier[_REF_ORIGIN_PATTERN] . identifier[match] ( identifier[res] [- literal[int] ]. identifier[value] ). identifier[group] ( literal[int] ) identifier[origin] = identifier[last_origin] identifier[enum] = identifier[enum] keyword[or] identifier[res] [- literal[int] ]. identifier[value] keyword[elif] identifier[origin] . identifier[startswith] ( literal[string] ) keyword[and] identifier[res] keyword[and] identifier[res] [- literal[int] ]. identifier[is_cable] (): identifier[origin] = identifier[origin] [ literal[int] :] identifier[enum] = identifier[enum] keyword[or] identifier[res] [- literal[int] ]. identifier[value] identifier[reference] = literal[string] %( identifier[y] , identifier[origin] , identifier[int] ( identifier[sn] )) keyword[if] identifier[canonicalize] : identifier[reference] = identifier[canonicalize_id] ( identifier[reference] ) identifier[length] = identifier[len] ( identifier[reference] ) keyword[if] identifier[length] < literal[int] keyword[or] identifier[length] > literal[int] : keyword[continue] keyword[if] keyword[not] identifier[REFERENCE_ID_PATTERN] . identifier[match] ( identifier[reference] ): keyword[if] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] literal[string] keyword[not] keyword[in] identifier[reference] keyword[and] keyword[not] literal[string] keyword[in] identifier[reference] : identifier[logger] . identifier[debug] ( literal[string] %( identifier[reference] , identifier[reference_id] )) keyword[continue] keyword[if] identifier[reference] != identifier[reference_id] : identifier[reference] = identifier[Reference] ( identifier[reference] , identifier[consts] . identifier[REF_KIND_CABLE] , identifier[enum] ) keyword[if] identifier[reference] keyword[not] keyword[in] identifier[res] : identifier[res] . identifier[append] ( identifier[reference] ) keyword[return] identifier[res]
def parse_references(content, year, reference_id=None, canonicalize=True): """ Returns the references to other cables as (maybe empty) list. `content` The content of the cable. `year` The year when the cable was created. `reference_id` The reference identifier of the cable. `canonicalize` Indicates if the cable reference origin should be canonicalized. (enabled by default) """ from cablemap.core.models import Reference def format_year(y): y = str(y) if not y: y = str(year) # depends on [control=['if'], data=[]] if len(y) == 4: return y[2:] # depends on [control=['if'], data=[]] elif len(y) == 3 and y[0] == '0': return y[1:] # depends on [control=['if'], data=[]] return y offset = 0 m_offset = _REF_OFFSET_PATTERN.search(content) if m_offset: offset = m_offset.end() # depends on [control=['if'], data=[]] # 1. Try to find "Classified By:" m_stop = _REF_STOP_PATTERN.search(content, offset) # If found, use it as maximum index to search for references, otherwise use a constant max_idx = m_stop and m_stop.start() or _MAX_HEADER_IDX # 2. Find references m_start = _REF_START_PATTERN.search(content, offset, max_idx) # 3. Check if we have a paragraph in the references m_stop = _REF_NOT_REF_PATTERN.search(content, m_start and m_start.end() or 0, max_idx) last_end = m_start and m_start.end() or 0 # 4. Find the next max_idx max_idx = min(m_stop and m_stop.start() or _MAX_HEADER_IDX, max_idx) m_end = _REF_LAST_REF_PATTERN.search(content, last_end, max_idx) while m_end: last_end = m_end.end() m_end = _REF_LAST_REF_PATTERN.search(content, last_end, max_idx) # depends on [control=['while'], data=[]] res = [] if m_end and (not m_start): logger.warn('Found ref end but no start in "%s", content: "%s"' % (reference_id, content)) # depends on [control=['if'], data=[]] if m_start and last_end: start = m_start.start(1) end = last_end or m_start.end() refs = content[start:end].replace('\n', ' ') refs = _CLEAN_REFS_PATTERN.sub('', refs) for (enum, y, origin, sn, alt_year) in _REF_PATTERN.findall(refs): if alt_year and (not y): y = alt_year # depends on [control=['if'], data=[]] y = format_year(y) origin = origin.replace(' ', '').replace(u"'", u'').upper() if origin == 'AND' and res and res[-1].is_cable(): last_origin = _REF_ORIGIN_PATTERN.match(res[-1].value).group(1) origin = last_origin enum = enum or res[-1].value # depends on [control=['if'], data=[]] elif origin.startswith('AND') and res and res[-1].is_cable(): # for references like 09 FOO 1234 AND BAR 1234 origin = origin[3:] enum = enum or res[-1].value # depends on [control=['if'], data=[]] reference = u'%s%s%d' % (y, origin, int(sn)) if canonicalize: reference = canonicalize_id(reference) # depends on [control=['if'], data=[]] length = len(reference) if length < 7 or length > 25: # constants.MIN_ORIGIN_LENGTH + constants.MIN_SERIAL_LENGTH + length of year or constants.MAX_ORIGIN_LENGTH + constants.MAX_SERIAL_LENGTH + 2 (for the year) continue # depends on [control=['if'], data=[]] if not REFERENCE_ID_PATTERN.match(reference): if 'CORRUPTION' not in reference and 'ECRET' not in reference and ('PARISPOINT' not in reference) and ('TELCON' not in reference) and ('FORTHE' not in reference) and ('ZOCT' not in reference) and ('ZSEP' not in reference) and ('ZMAY' not in reference) and ('ZNOV' not in reference) and ('ZAUG' not in reference) and ('PRIORITY' not in reference) and ('ZJAN' not in reference) and ('ZFEB' not in reference) and ('ZJUN' not in reference) and ('ZJUL' not in reference) and ('PREVIO' not in reference) and ('SEPTEMBER' not in reference) and ('ZAPR' not in reference) and ('ZFEB' not in reference) and ('PART' not in reference) and ('ONFIDENTIAL' not in reference) and ('SECRET' not in reference) and ('SECTION' not in reference) and ('TODAY' not in reference) and ('DAILY' not in reference) and ('OUTOF' not in reference) and ('PROVIDING' not in reference) and ('NUMBER' not in reference) and ('APRIL' not in reference) and ('OCTOBER' not in reference) and ('MAIL' not in reference) and ('DECEMBER' not in reference) and ('FEBRUAY' not in reference) and ('AUGUST' not in reference) and ('MARCH' not in reference) and ('JULY' not in reference) and ('JUNE' not in reference) and ('MAIL' not in reference) and ('JANUARY' not in reference) and ('--' not in reference) and ('PARAGRAPH' not in reference) and ('ANDPREVIOUS' not in reference) and ('UNCLAS' not in reference) and ('ONMARCH' not in reference) and ('ONAPRIL' not in reference) and ('FEBRUARY' not in reference) and ('ONMAY' not in reference) and ('ONJULY' not in reference) and ('ONJUNE' not in reference) and ('NOVEMBER' not in reference) and (not 'CONFIDENTIAL' in reference): logger.debug('Ignore "%s". Not a valid reference identifier (%s)' % (reference, reference_id)) # depends on [control=['if'], data=[]] continue # depends on [control=['if'], data=[]] if reference != reference_id: reference = Reference(reference, consts.REF_KIND_CABLE, enum) if reference not in res: res.append(reference) # depends on [control=['if'], data=['reference', 'res']] # depends on [control=['if'], data=['reference']] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] return res
def _deprecation_notice(cls): """Warn about deprecation of this class.""" _deprecation_msg = ( '{name} {type} is deprecated. ' 'It will be removed in the next version. ' 'Use saml2.cryptography.symmetric instead.' ).format(name=cls.__name__, type=type(cls).__name__) _warnings.warn(_deprecation_msg, DeprecationWarning)
def function[_deprecation_notice, parameter[cls]]: constant[Warn about deprecation of this class.] variable[_deprecation_msg] assign[=] call[constant[{name} {type} is deprecated. It will be removed in the next version. Use saml2.cryptography.symmetric instead.].format, parameter[]] call[name[_warnings].warn, parameter[name[_deprecation_msg], name[DeprecationWarning]]]
keyword[def] identifier[_deprecation_notice] ( identifier[cls] ): literal[string] identifier[_deprecation_msg] =( literal[string] literal[string] literal[string] ). identifier[format] ( identifier[name] = identifier[cls] . identifier[__name__] , identifier[type] = identifier[type] ( identifier[cls] ). identifier[__name__] ) identifier[_warnings] . identifier[warn] ( identifier[_deprecation_msg] , identifier[DeprecationWarning] )
def _deprecation_notice(cls): """Warn about deprecation of this class.""" _deprecation_msg = '{name} {type} is deprecated. It will be removed in the next version. Use saml2.cryptography.symmetric instead.'.format(name=cls.__name__, type=type(cls).__name__) _warnings.warn(_deprecation_msg, DeprecationWarning)
def main(self, x): """ Transposed FIR structure """ self.acc[0] = x * self.TAPS[-1] for i in range(1, len(self.acc)): self.acc[i] = self.acc[i - 1] + x * self.TAPS[len(self.TAPS) - 1 - i] self.out = self.acc[-1] return self.out
def function[main, parameter[self, x]]: constant[ Transposed FIR structure ] call[name[self].acc][constant[0]] assign[=] binary_operation[name[x] * call[name[self].TAPS][<ast.UnaryOp object at 0x7da1b0a4f910>]] for taget[name[i]] in starred[call[name[range], parameter[constant[1], call[name[len], parameter[name[self].acc]]]]] begin[:] call[name[self].acc][name[i]] assign[=] binary_operation[call[name[self].acc][binary_operation[name[i] - constant[1]]] + binary_operation[name[x] * call[name[self].TAPS][binary_operation[binary_operation[call[name[len], parameter[name[self].TAPS]] - constant[1]] - name[i]]]]] name[self].out assign[=] call[name[self].acc][<ast.UnaryOp object at 0x7da1b0adbf40>] return[name[self].out]
keyword[def] identifier[main] ( identifier[self] , identifier[x] ): literal[string] identifier[self] . identifier[acc] [ literal[int] ]= identifier[x] * identifier[self] . identifier[TAPS] [- literal[int] ] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[self] . identifier[acc] )): identifier[self] . identifier[acc] [ identifier[i] ]= identifier[self] . identifier[acc] [ identifier[i] - literal[int] ]+ identifier[x] * identifier[self] . identifier[TAPS] [ identifier[len] ( identifier[self] . identifier[TAPS] )- literal[int] - identifier[i] ] identifier[self] . identifier[out] = identifier[self] . identifier[acc] [- literal[int] ] keyword[return] identifier[self] . identifier[out]
def main(self, x): """ Transposed FIR structure """ self.acc[0] = x * self.TAPS[-1] for i in range(1, len(self.acc)): self.acc[i] = self.acc[i - 1] + x * self.TAPS[len(self.TAPS) - 1 - i] # depends on [control=['for'], data=['i']] self.out = self.acc[-1] return self.out
def _parse_tag(self, name): """Parse a tag command.""" from_ = self._get_from(b'tag') tagger = self._get_user_info(b'tag', b'tagger', accept_just_who=True) message = self._get_data(b'tag', b'message') return commands.TagCommand(name, from_, tagger, message)
def function[_parse_tag, parameter[self, name]]: constant[Parse a tag command.] variable[from_] assign[=] call[name[self]._get_from, parameter[constant[b'tag']]] variable[tagger] assign[=] call[name[self]._get_user_info, parameter[constant[b'tag'], constant[b'tagger']]] variable[message] assign[=] call[name[self]._get_data, parameter[constant[b'tag'], constant[b'message']]] return[call[name[commands].TagCommand, parameter[name[name], name[from_], name[tagger], name[message]]]]
keyword[def] identifier[_parse_tag] ( identifier[self] , identifier[name] ): literal[string] identifier[from_] = identifier[self] . identifier[_get_from] ( literal[string] ) identifier[tagger] = identifier[self] . identifier[_get_user_info] ( literal[string] , literal[string] , identifier[accept_just_who] = keyword[True] ) identifier[message] = identifier[self] . identifier[_get_data] ( literal[string] , literal[string] ) keyword[return] identifier[commands] . identifier[TagCommand] ( identifier[name] , identifier[from_] , identifier[tagger] , identifier[message] )
def _parse_tag(self, name): """Parse a tag command.""" from_ = self._get_from(b'tag') tagger = self._get_user_info(b'tag', b'tagger', accept_just_who=True) message = self._get_data(b'tag', b'message') return commands.TagCommand(name, from_, tagger, message)
def log(self, message, level=logging.DEBUG): """ Logs the message in the root logger with the log level @param message: Message to be logged @type message: string @param level: Log level, defaul DEBUG @type level: integer @return: 1 on success and 0 on error @rtype: integer """ if _ldtp_debug: print(message) self.logger.log(level, str(message)) return 1
def function[log, parameter[self, message, level]]: constant[ Logs the message in the root logger with the log level @param message: Message to be logged @type message: string @param level: Log level, defaul DEBUG @type level: integer @return: 1 on success and 0 on error @rtype: integer ] if name[_ldtp_debug] begin[:] call[name[print], parameter[name[message]]] call[name[self].logger.log, parameter[name[level], call[name[str], parameter[name[message]]]]] return[constant[1]]
keyword[def] identifier[log] ( identifier[self] , identifier[message] , identifier[level] = identifier[logging] . identifier[DEBUG] ): literal[string] keyword[if] identifier[_ldtp_debug] : identifier[print] ( identifier[message] ) identifier[self] . identifier[logger] . identifier[log] ( identifier[level] , identifier[str] ( identifier[message] )) keyword[return] literal[int]
def log(self, message, level=logging.DEBUG): """ Logs the message in the root logger with the log level @param message: Message to be logged @type message: string @param level: Log level, defaul DEBUG @type level: integer @return: 1 on success and 0 on error @rtype: integer """ if _ldtp_debug: print(message) # depends on [control=['if'], data=[]] self.logger.log(level, str(message)) return 1
def get_timeout(self): "setup a timeout for waiting for a proposal" if self.timeout_time is not None or self.proposal: return now = self.cm.chainservice.now round_timeout = ConsensusManager.round_timeout round_timeout_factor = ConsensusManager.round_timeout_factor delay = round_timeout * round_timeout_factor ** self.round self.timeout_time = now + delay return delay
def function[get_timeout, parameter[self]]: constant[setup a timeout for waiting for a proposal] if <ast.BoolOp object at 0x7da2054a77f0> begin[:] return[None] variable[now] assign[=] name[self].cm.chainservice.now variable[round_timeout] assign[=] name[ConsensusManager].round_timeout variable[round_timeout_factor] assign[=] name[ConsensusManager].round_timeout_factor variable[delay] assign[=] binary_operation[name[round_timeout] * binary_operation[name[round_timeout_factor] ** name[self].round]] name[self].timeout_time assign[=] binary_operation[name[now] + name[delay]] return[name[delay]]
keyword[def] identifier[get_timeout] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[timeout_time] keyword[is] keyword[not] keyword[None] keyword[or] identifier[self] . identifier[proposal] : keyword[return] identifier[now] = identifier[self] . identifier[cm] . identifier[chainservice] . identifier[now] identifier[round_timeout] = identifier[ConsensusManager] . identifier[round_timeout] identifier[round_timeout_factor] = identifier[ConsensusManager] . identifier[round_timeout_factor] identifier[delay] = identifier[round_timeout] * identifier[round_timeout_factor] ** identifier[self] . identifier[round] identifier[self] . identifier[timeout_time] = identifier[now] + identifier[delay] keyword[return] identifier[delay]
def get_timeout(self): """setup a timeout for waiting for a proposal""" if self.timeout_time is not None or self.proposal: return # depends on [control=['if'], data=[]] now = self.cm.chainservice.now round_timeout = ConsensusManager.round_timeout round_timeout_factor = ConsensusManager.round_timeout_factor delay = round_timeout * round_timeout_factor ** self.round self.timeout_time = now + delay return delay
def _encode_list(name, value, check_keys, opts): """Encode a list/tuple.""" lname = gen_list_name() data = b"".join([_name_value_to_bson(next(lname), item, check_keys, opts) for item in value]) return b"\x04" + name + _PACK_INT(len(data) + 5) + data + b"\x00"
def function[_encode_list, parameter[name, value, check_keys, opts]]: constant[Encode a list/tuple.] variable[lname] assign[=] call[name[gen_list_name], parameter[]] variable[data] assign[=] call[constant[b''].join, parameter[<ast.ListComp object at 0x7da1b21ba050>]] return[binary_operation[binary_operation[binary_operation[binary_operation[constant[b'\x04'] + name[name]] + call[name[_PACK_INT], parameter[binary_operation[call[name[len], parameter[name[data]]] + constant[5]]]]] + name[data]] + constant[b'\x00']]]
keyword[def] identifier[_encode_list] ( identifier[name] , identifier[value] , identifier[check_keys] , identifier[opts] ): literal[string] identifier[lname] = identifier[gen_list_name] () identifier[data] = literal[string] . identifier[join] ([ identifier[_name_value_to_bson] ( identifier[next] ( identifier[lname] ), identifier[item] , identifier[check_keys] , identifier[opts] ) keyword[for] identifier[item] keyword[in] identifier[value] ]) keyword[return] literal[string] + identifier[name] + identifier[_PACK_INT] ( identifier[len] ( identifier[data] )+ literal[int] )+ identifier[data] + literal[string]
def _encode_list(name, value, check_keys, opts): """Encode a list/tuple.""" lname = gen_list_name() data = b''.join([_name_value_to_bson(next(lname), item, check_keys, opts) for item in value]) return b'\x04' + name + _PACK_INT(len(data) + 5) + data + b'\x00'
def bulk_history_create(self, objs, batch_size=None): """Bulk create the history for the objects specified by objs""" historical_instances = [ self.model( history_date=getattr(instance, "_history_date", now()), history_user=getattr(instance, "_history_user", None), history_change_reason=getattr(instance, "changeReason", ""), history_type="+", **{ field.attname: getattr(instance, field.attname) for field in instance._meta.fields if field.name not in self.model._history_excluded_fields } ) for instance in objs ] return self.model.objects.bulk_create( historical_instances, batch_size=batch_size )
def function[bulk_history_create, parameter[self, objs, batch_size]]: constant[Bulk create the history for the objects specified by objs] variable[historical_instances] assign[=] <ast.ListComp object at 0x7da20c6a9990> return[call[name[self].model.objects.bulk_create, parameter[name[historical_instances]]]]
keyword[def] identifier[bulk_history_create] ( identifier[self] , identifier[objs] , identifier[batch_size] = keyword[None] ): literal[string] identifier[historical_instances] =[ identifier[self] . identifier[model] ( identifier[history_date] = identifier[getattr] ( identifier[instance] , literal[string] , identifier[now] ()), identifier[history_user] = identifier[getattr] ( identifier[instance] , literal[string] , keyword[None] ), identifier[history_change_reason] = identifier[getattr] ( identifier[instance] , literal[string] , literal[string] ), identifier[history_type] = literal[string] , **{ identifier[field] . identifier[attname] : identifier[getattr] ( identifier[instance] , identifier[field] . identifier[attname] ) keyword[for] identifier[field] keyword[in] identifier[instance] . identifier[_meta] . identifier[fields] keyword[if] identifier[field] . identifier[name] keyword[not] keyword[in] identifier[self] . identifier[model] . identifier[_history_excluded_fields] } ) keyword[for] identifier[instance] keyword[in] identifier[objs] ] keyword[return] identifier[self] . identifier[model] . identifier[objects] . identifier[bulk_create] ( identifier[historical_instances] , identifier[batch_size] = identifier[batch_size] )
def bulk_history_create(self, objs, batch_size=None): """Bulk create the history for the objects specified by objs""" historical_instances = [self.model(history_date=getattr(instance, '_history_date', now()), history_user=getattr(instance, '_history_user', None), history_change_reason=getattr(instance, 'changeReason', ''), history_type='+', **{field.attname: getattr(instance, field.attname) for field in instance._meta.fields if field.name not in self.model._history_excluded_fields}) for instance in objs] return self.model.objects.bulk_create(historical_instances, batch_size=batch_size)
def draw(self): ''' Draws samples from the `fake` distribution. Returns: `np.ndarray` of samples. ''' observed_arr = self.extract_conditions() conv_arr = self.inference(observed_arr) if self.__conditon_noise_sampler is not None: self.__conditon_noise_sampler.output_shape = conv_arr.shape noise_arr = self.__conditon_noise_sampler.generate() conv_arr += noise_arr deconv_arr = self.__deconvolution_model.inference(conv_arr) return np.concatenate((deconv_arr, observed_arr), axis=1)
def function[draw, parameter[self]]: constant[ Draws samples from the `fake` distribution. Returns: `np.ndarray` of samples. ] variable[observed_arr] assign[=] call[name[self].extract_conditions, parameter[]] variable[conv_arr] assign[=] call[name[self].inference, parameter[name[observed_arr]]] if compare[name[self].__conditon_noise_sampler is_not constant[None]] begin[:] name[self].__conditon_noise_sampler.output_shape assign[=] name[conv_arr].shape variable[noise_arr] assign[=] call[name[self].__conditon_noise_sampler.generate, parameter[]] <ast.AugAssign object at 0x7da1b07afc70> variable[deconv_arr] assign[=] call[name[self].__deconvolution_model.inference, parameter[name[conv_arr]]] return[call[name[np].concatenate, parameter[tuple[[<ast.Name object at 0x7da1b07ae770>, <ast.Name object at 0x7da1b07ad510>]]]]]
keyword[def] identifier[draw] ( identifier[self] ): literal[string] identifier[observed_arr] = identifier[self] . identifier[extract_conditions] () identifier[conv_arr] = identifier[self] . identifier[inference] ( identifier[observed_arr] ) keyword[if] identifier[self] . identifier[__conditon_noise_sampler] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[__conditon_noise_sampler] . identifier[output_shape] = identifier[conv_arr] . identifier[shape] identifier[noise_arr] = identifier[self] . identifier[__conditon_noise_sampler] . identifier[generate] () identifier[conv_arr] += identifier[noise_arr] identifier[deconv_arr] = identifier[self] . identifier[__deconvolution_model] . identifier[inference] ( identifier[conv_arr] ) keyword[return] identifier[np] . identifier[concatenate] (( identifier[deconv_arr] , identifier[observed_arr] ), identifier[axis] = literal[int] )
def draw(self): """ Draws samples from the `fake` distribution. Returns: `np.ndarray` of samples. """ observed_arr = self.extract_conditions() conv_arr = self.inference(observed_arr) if self.__conditon_noise_sampler is not None: self.__conditon_noise_sampler.output_shape = conv_arr.shape noise_arr = self.__conditon_noise_sampler.generate() conv_arr += noise_arr # depends on [control=['if'], data=[]] deconv_arr = self.__deconvolution_model.inference(conv_arr) return np.concatenate((deconv_arr, observed_arr), axis=1)